Migrate completely to Nix configs.
This commit is contained in:
parent
cd3aecb67d
commit
9113741910
8
Makefile
8
Makefile
|
@ -13,10 +13,6 @@ reboot: boot
|
|||
switch:
|
||||
nixos-rebuild switch $(options) --flake $(flake)
|
||||
|
||||
.PHONY: trace
|
||||
trace: check
|
||||
nixos-rebuild boot $(options) --show-trace --flake $(flake)
|
||||
|
||||
.PHONY: update
|
||||
update:
|
||||
nix flake update
|
||||
|
@ -33,6 +29,10 @@ android:
|
|||
check:
|
||||
nix flake check
|
||||
|
||||
.PHONY: trace
|
||||
trace:
|
||||
nix flake check --show-trace
|
||||
|
||||
.PHONY: show
|
||||
show:
|
||||
nix flake show
|
||||
|
|
22
flake.nix
22
flake.nix
|
@ -139,7 +139,7 @@
|
|||
];
|
||||
|
||||
# Function to create a host.
|
||||
mkHost = { system, hostname, modules }: nixpkgs.lib.nixosSystem {
|
||||
mkHost = { system, hostname, modules } @args: nixpkgs.lib.nixosSystem {
|
||||
inherit system;
|
||||
|
||||
modules = [
|
||||
|
@ -151,12 +151,18 @@
|
|||
stylix.nixosModules.stylix
|
||||
] ++ modules;
|
||||
|
||||
specialArgs = {
|
||||
specialArgs = let
|
||||
pkgs = nixpkgs.legacyPackages.${system}.pkgs;
|
||||
config = self.nixosConfigurations.${hostname}.config;
|
||||
in {
|
||||
const = self.nixosModules.const;
|
||||
flake = self;
|
||||
inputs = inputs;
|
||||
style = import ./part/Style.nix { config = self.nixosConfigurations.${hostname}.config; };
|
||||
wallpaper = import ./part/Wallpaper.nix { pkgs = nixpkgs.legacyPackages.${system}.pkgs; };
|
||||
key = import ./part/Key.nix {};
|
||||
setting = import ./part/Setting.nix {};
|
||||
style = import ./part/Style.nix { config = config; };
|
||||
util = import ./part/Util.nix { pkgs = pkgs; };
|
||||
wallpaper = import ./part/Wallpaper.nix { pkgs = pkgs; };
|
||||
};
|
||||
};
|
||||
|
||||
|
@ -265,11 +271,17 @@
|
|||
{ system.stateVersion = inputs.self.nixosModules.const.droidStateVersion; }
|
||||
./module/NixOnDroid.nix
|
||||
];
|
||||
extraSpecialArgs = {
|
||||
|
||||
extraSpecialArgs = let
|
||||
pkgs = nixpkgs.legacyPackages."aarch64-linux".pkgs;
|
||||
in {
|
||||
const = self.nixosModules.const;
|
||||
flake = self;
|
||||
inputs = inputs;
|
||||
key = import ./part/Key.nix {};
|
||||
setting = import ./part/Setting.nix {};
|
||||
style = import ./part/Style.nix { config = import ./part/style/Gruvbox.nix {}; };
|
||||
util = import ./part/Util.nix { pkgs = pkgs; };
|
||||
};
|
||||
};
|
||||
};
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
{ ... }: {
|
||||
{ lib, ... }: {
|
||||
imports = [
|
||||
./Filesystem.nix
|
||||
../dasha/Tablet.nix
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
{ ... }: {
|
||||
environment.variables.DOCKER_CONFIG = ./docker;
|
||||
{ pkgs, key, util, ... } @args: let
|
||||
docker = import ./docker/Init.nix args;
|
||||
config = pkgs.writeText "dockerConfig" docker.text;
|
||||
in {
|
||||
environment.variables.DOCKER_CONFIG = config;
|
||||
virtualisation.docker.enable = true;
|
||||
}
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
{ ... }: {
|
||||
environment.variables.DOCKER_CONFIG = ./docker;
|
||||
{ pkgs, key, util, ... } @args: let
|
||||
docker = import ./docker/Init.nix args;
|
||||
config = pkgs.writeText "dockerConfig" docker.text;
|
||||
in {
|
||||
environment.variables.DOCKER_CONFIG = config;
|
||||
virtualisation.docker.enable = true;
|
||||
virtualisation.docker.rootless = {
|
||||
enable = true;
|
||||
|
|
|
@ -1,7 +1,17 @@
|
|||
{ pkgs, inputs, const, style, ... }: let
|
||||
{ pkgs, inputs, const, style, util, key, setting, ... } @args: let
|
||||
homePath = "/data/data/com.termux.nix/files/home";
|
||||
tmuxScript = pkgs.writeShellScriptBin "tmux_script" (builtins.readFile ./common/tmux/Script.sh);
|
||||
bash = import ./common/bash/Bash.nix { style = style; };
|
||||
tmux = import ./common/tmux/Init.nix args;
|
||||
tmuxScript = pkgs.writeShellScriptBin "tmux_script" tmux.script;
|
||||
bash = import ./common/bash/Init.nix args;
|
||||
nvim = import ./common/nvim/Init.nix args;
|
||||
ssh = import ./common/ssh/Init.nix args;
|
||||
font = pkgs.runCommandNoCC "font" {} ''
|
||||
cp ${pkgs.nerdfonts.override { fonts = [ "Terminus" ]; }}/share/fonts/truetype/NerdFonts/TerminessNerdFontMono-Regular.ttf $out
|
||||
'';
|
||||
colors = ''
|
||||
background=#${style.color.bg.dark}
|
||||
foreground=#${style.color.fg.light}
|
||||
'';
|
||||
in {
|
||||
# NOTE: Split into modules?
|
||||
environment.packages = with pkgs; [
|
||||
|
@ -53,17 +63,11 @@ in {
|
|||
home.stateVersion = const.droidStateVersion;
|
||||
home.file = {
|
||||
".dotfiles".source = inputs.self;
|
||||
".ssh/config".source = ./common/ssh/config;
|
||||
".termux/_font.ttf".source = pkgs.runCommandNoCC "font" {} ''
|
||||
cp ${pkgs.nerdfonts.override { fonts = [ "Terminus" ]; }}/share/fonts/truetype/NerdFonts/TerminessNerdFontMono-Regular.ttf $out
|
||||
'';
|
||||
".termux/_colors.properties".text = ''
|
||||
background=#${style.color.bg_dark}
|
||||
foreground=#${style.color.fg}
|
||||
'';
|
||||
".ssh/config".text = ssh.config;
|
||||
".termux/_font.ttf".source = font;
|
||||
".termux/_colors.properties".text = colors;
|
||||
};
|
||||
home.sessionVariables = {
|
||||
BASH_PATH = ./common/bash;
|
||||
EDITOR = "nvim";
|
||||
MANPAGER = "nvim +Man!";
|
||||
NIXPKGS_ALLOW_UNFREE = "1";
|
||||
|
@ -82,7 +86,7 @@ in {
|
|||
};
|
||||
programs.tmux = {
|
||||
enable = true;
|
||||
extraConfig = builtins.readFile ./common/tmux/tmux.conf;
|
||||
extraConfig = tmux.config;
|
||||
};
|
||||
programs.git = {
|
||||
enable = true;
|
||||
|
@ -98,7 +102,7 @@ in {
|
|||
enable = true;
|
||||
viAlias = true;
|
||||
vimAlias = true;
|
||||
extraConfig = (import ./common/nvim/Init.nix { inputs = inputs; }).customRc;
|
||||
extraConfig = nvim.config;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
{ pkgs, lib, wallpaper, style, ... }: let
|
||||
sway = import ./sway/Config.nix { pkgs = pkgs; wallpaper = wallpaper; style = style; };
|
||||
{ pkgs, lib, wallpaper, style, ... } @args: let
|
||||
sway = import ./sway/Init.nix args;
|
||||
config = pkgs.writeText "swayConfig" sway.config;
|
||||
in {
|
||||
imports = [
|
||||
./desktop/App.nix
|
||||
|
@ -28,13 +29,12 @@ in {
|
|||
gtk = true;
|
||||
};
|
||||
extraOptions = [
|
||||
"--config=${sway.config}"
|
||||
"--config=${config}"
|
||||
];
|
||||
};
|
||||
|
||||
environment = {
|
||||
variables = {
|
||||
SWAY_CONFIG = ./sway/module;
|
||||
SWAY_IWT_PATH = "${pkgs.sway-contrib.inactive-windows-transparency}/bin/inactive-windows-transparency.py";
|
||||
# PATH = [ "/etc/swaybin" ]; # NOTE: Kept as an example on PATH modification.
|
||||
};
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
{ lib, style, ... }: let
|
||||
bash = import ./bash/Bash.nix { style = style; };
|
||||
{ lib, style, util, pkgs, ... } @args: let
|
||||
bash = import ./bash/Init.nix args;
|
||||
in {
|
||||
programs.bash.interactiveShellInit = bash.config;
|
||||
environment.shellAliases = lib.mkForce {};
|
||||
environment.variables = {
|
||||
BASH_PATH = ./bash;
|
||||
TERM = "xterm-256color";
|
||||
};
|
||||
}
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
{ pkgs, ... }: {
|
||||
{ pkgs, util, ... } @args: let
|
||||
firefox = import ./firefox/Init.nix args;
|
||||
in {
|
||||
# Disable profile switching on rebuild.
|
||||
environment.variables = {
|
||||
MOZ_LEGACY_PROFILES = "1";
|
||||
|
@ -8,7 +10,7 @@
|
|||
enable = true;
|
||||
package = pkgs.firefox-esr;
|
||||
languagePacks = [ "en-US" "ru" ];
|
||||
autoConfig = builtins.readFile ./firefox/Config.js;
|
||||
autoConfig = firefox.config;
|
||||
policies = {
|
||||
ManagedBookmarks = [
|
||||
{
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{ inputs, pkgs, ... }: let
|
||||
nvimCfg = import ./nvim/Init.nix { inputs = inputs; };
|
||||
{ inputs, pkgs, util, key, setting, ... } @args: let
|
||||
nvim = import ./nvim/Init.nix args;
|
||||
in {
|
||||
environment = {
|
||||
variables = {
|
||||
|
@ -15,7 +15,7 @@ in {
|
|||
viAlias = true;
|
||||
vimAlias = true;
|
||||
configure = {
|
||||
customRC = nvimCfg.customRc;
|
||||
customRC = nvim.config;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
{ pkgs, ... }: {
|
||||
{ pkgs, util, ... } @args: let
|
||||
ssh = import ./ssh/Init.nix args;
|
||||
in {
|
||||
environment.systemPackages = with pkgs; [ sshfs ];
|
||||
programs.ssh.extraConfig = builtins.readFile ./ssh/config;
|
||||
programs.ssh.extraConfig = ssh.config;
|
||||
}
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
{ pkgs, ... }: let
|
||||
script = pkgs.writeShellScriptBin "tmux_script" (builtins.readFile ./tmux/Script.sh);
|
||||
{ pkgs, style, key, util, ... } @args: let
|
||||
tmux = import ./tmux/Init.nix args;
|
||||
script = pkgs.writeShellScriptBin "tmux_script" tmux.script;
|
||||
in {
|
||||
programs.tmux = {
|
||||
enable = true;
|
||||
extraConfig = builtins.readFile ./tmux/tmux.conf;
|
||||
extraConfig = tmux.config;
|
||||
};
|
||||
environment.systemPackages = [ script ];
|
||||
}
|
||||
|
|
|
@ -1,60 +0,0 @@
|
|||
{ style, ... }: let
|
||||
accent-b = style.color.accent-b;
|
||||
accent-g = style.color.accent-g;
|
||||
accent-r = style.color.accent-r;
|
||||
negative-b = style.color.negative-b;
|
||||
negative-g = style.color.negative-g;
|
||||
negative-r = style.color.negative-r;
|
||||
neutral-b = style.color.neutral-b;
|
||||
neutral-g = style.color.neutral-g;
|
||||
neutral-r = style.color.neutral-r;
|
||||
positive-b = style.color.positive-b;
|
||||
positive-g = style.color.positive-g;
|
||||
positive-r = style.color.positive-r;
|
||||
|
||||
accent = style.color.accent;
|
||||
bg = style.color.bg_dark;
|
||||
fg = style.color.fg_light;
|
||||
|
||||
fontNamePopup = style.font.serif.name;
|
||||
fontSizePopup = style.font.size.popup;
|
||||
in {
|
||||
config = ''
|
||||
# If not running interactively, don't do anything.
|
||||
[[ "$-" != *i* ]] && return
|
||||
|
||||
# Src system bashrc.
|
||||
[[ -f /etc/bashrc ]] && source /etc/bashrc
|
||||
|
||||
# Define colors.
|
||||
export negative_rgb="${negative-r};${negative-g};${negative-b}"
|
||||
export neutral_rgb="${neutral-r};${neutral-g};${neutral-b}"
|
||||
export positive_rgb="${positive-r};${positive-g};${positive-b}"
|
||||
export accent_rgb="${accent-r};${accent-g};${accent-b}"
|
||||
export bg="${bg}"
|
||||
export fg="${fg}"
|
||||
export accent="${accent}"
|
||||
|
||||
# Define font.
|
||||
export font_name_popup="${fontNamePopup}"
|
||||
export font_size_popup="${toString(fontSizePopup)}"
|
||||
|
||||
# Src custom modules.
|
||||
for module in $BASH_PATH/module/*.sh; do
|
||||
source "$module"
|
||||
done
|
||||
|
||||
# Alias to reload.
|
||||
function bashrc() {
|
||||
source $BASH_PATH/Bashrc.sh
|
||||
}
|
||||
|
||||
# Export all functions.
|
||||
export -f $(find_function | tr '\n' ' ')
|
||||
|
||||
# Autostart Sway.
|
||||
if [[ -z $DISPLAY ]] && [[ "$(tty)" = "/dev/tty1" ]]; then
|
||||
exec sway
|
||||
fi
|
||||
'';
|
||||
}
|
25
module/common/bash/Init.nix
Normal file
25
module/common/bash/Init.nix
Normal file
|
@ -0,0 +1,25 @@
|
|||
{ style, util, pkgs, ... } @args: let
|
||||
bashRc = pkgs.writeText "bashRc" (util.trimTabs (builtins.foldl' (acc: mod:
|
||||
acc + (import mod args).text
|
||||
) "" (util.ls ./module)));
|
||||
in {
|
||||
config = util.trimTabs (''
|
||||
# If not running interactively, don't do anything.
|
||||
[[ "$-" != *i* ]] && return
|
||||
|
||||
'' + builtins.readFile bashRc + ''
|
||||
|
||||
# Find all functions.
|
||||
function find_function() {
|
||||
/usr/bin/env cat ${bashRc} | /usr/bin/env grep "^function.*()" | /usr/bin/env sed -e "s/^function //" -e "s/().*//"
|
||||
}
|
||||
|
||||
# Export all functions.
|
||||
export -f $(find_function | tr '\n' ' ')
|
||||
|
||||
# Autostart Sway.
|
||||
if [[ -z $DISPLAY ]] && [[ "$(tty)" = "/dev/tty1" ]]; then
|
||||
exec sway
|
||||
fi
|
||||
'');
|
||||
}
|
20
module/common/bash/module/Android.nix
Normal file
20
module/common/bash/module/Android.nix
Normal file
|
@ -0,0 +1,20 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# Start an Android emulator.
|
||||
# Default name is `main`.
|
||||
# Usage: emulator [NAME]
|
||||
function emulator() {
|
||||
local name="$1"
|
||||
|
||||
[[ "$name" = "" ]] && name="main"
|
||||
|
||||
steam-run ~/.android/sdk/emulator/emulator -avd "$name" &> /dev/null & disown
|
||||
}
|
||||
|
||||
function _android_emulators() {
|
||||
_autocomplete_first $(ls --classify ~/.android/avd/ | grep \/$ | sed -e "s/.avd\/$//")
|
||||
}
|
||||
|
||||
complete -F _android_emulators emulator
|
||||
'';
|
||||
}
|
|
@ -1,16 +0,0 @@
|
|||
# Start an Android emulator.
|
||||
# Default name is `main`.
|
||||
# Usage: emulator [NAME]
|
||||
function emulator() {
|
||||
local name="${1}"
|
||||
|
||||
[[ "${name}" = "" ]] && name="main"
|
||||
|
||||
steam-run ~/.android/sdk/emulator/emulator -avd "${name}" &> /dev/null & disown
|
||||
}
|
||||
|
||||
function _android_emulators() {
|
||||
_autocomplete_first $(ls --classify ~/.android/avd/ | grep \/$ | sed -e "s/.avd\/$//")
|
||||
}
|
||||
|
||||
complete -F _android_emulators emulator
|
397
module/common/bash/module/Archive.nix
Normal file
397
module/common/bash/module/Archive.nix
Normal file
|
@ -0,0 +1,397 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
export _archive_pattern="_[0-9]{12}-[[:alnum:]]{40}.t[xg]z$"
|
||||
export _archive_pattern_fast="_[0-9]{12}-[[:alnum:]]{40}.tgz$"
|
||||
|
||||
# Archive directories.
|
||||
# All directories by default.
|
||||
# Supports .archiveignore exclude file.
|
||||
# Usage: archive [DIRS]
|
||||
function archive() {
|
||||
local IFS=$'\n'
|
||||
local targets=(''${@})
|
||||
[[ "''${targets}" = "" ]] && targets=($(_ls_dir))
|
||||
|
||||
process() {
|
||||
local date=$(_archive_date)
|
||||
|
||||
# Parse name.
|
||||
local name=$(parse_pascal ''${target})
|
||||
|
||||
# Exclude support.
|
||||
local exclude=""
|
||||
[[ -f ".archiveignore" ]] && exclude="--exclude-from=.archiveignore"
|
||||
[[ -f "''${target}/.archiveignore" ]] && exclude="--exclude-from=''${target}/.archiveignore"
|
||||
|
||||
# Create archive.
|
||||
local hash=$(tar ''${exclude} -c ''${target} | pv -s $(/usr/bin/env du -sb ''${target} | awk '{print $1}') | xz -9e --threads=1 | tee ''${name}.txz | sha1sum | cut -d\ -f1)
|
||||
|
||||
# Append hash to target name.
|
||||
local new_name="''${name}_''${date}-''${hash}.txz"
|
||||
mv -- ''${name}.txz ''${new_name} && echo ''${new_name}
|
||||
}
|
||||
|
||||
_iterate_targets process ''${targets[@]}
|
||||
}
|
||||
|
||||
# Archive using multiple threads. Uses 75% of free RAM.
|
||||
# All directories by default.
|
||||
# Supports .archiveignore exclude file.
|
||||
# Usage: archive_mt [DIRS]
|
||||
function archive_mt() {
|
||||
local IFS=$'\n'
|
||||
local targets=(''${@})
|
||||
[[ "''${targets}" = "" ]] && targets=($(_ls_dir))
|
||||
|
||||
process() {
|
||||
local date=$(_archive_date)
|
||||
|
||||
# Parse name.
|
||||
local name=$(parse_pascal ''${target})
|
||||
|
||||
# Exclude support.
|
||||
local exclude=""
|
||||
[[ -f ".archiveignore" ]] && exclude="--exclude-from=.archiveignore"
|
||||
[[ -f "''${target}/.archiveignore" ]] && exclude="--exclude-from=''${target}/.archiveignore"
|
||||
|
||||
# Determine memory limit.
|
||||
local mem_free=$(_mem_free)
|
||||
local mem_limit=$((mem_free*3/4))
|
||||
|
||||
# Create archive.
|
||||
local hash=$(tar ''${exclude} -c ''${target} | pv -s $(/usr/bin/env du -sb ''${target} | awk '{print $1}') | xz -9e --threads=0 --memlimit=''${mem_limit}MiB | tee ''${name}.txz | sha1sum | cut -d\ -f1)
|
||||
|
||||
# Append hash to target name.
|
||||
local new_name="''${name}_''${date}-''${hash}.txz"
|
||||
mv -- ''${name}.txz ''${new_name} && echo ''${new_name}
|
||||
}
|
||||
|
||||
_iterate_targets process ''${targets[@]}
|
||||
}
|
||||
|
||||
# Archive directories with fast compression.
|
||||
# All directories by default.
|
||||
# Supports .archiveignore exclude file.
|
||||
# Usage: archive_fast [DIRS]
|
||||
function archive_fast() {
|
||||
local IFS=$'\n'
|
||||
local targets=("''${@}")
|
||||
[[ "''${targets}" = "" ]] && targets=($(_ls_dir))
|
||||
|
||||
process() {
|
||||
# Start timestamp.
|
||||
local date=$(_archive_date)
|
||||
|
||||
# Parse name.
|
||||
local name=$(parse_pascal "''${target}")
|
||||
|
||||
# Exclude support.
|
||||
local exclude=""
|
||||
[[ -f ".archiveignore" ]] && exclude="--exclude-from=.archiveignore"
|
||||
[[ -f "''${target}/.archiveignore" ]] && exclude="--exclude-from=''${target}/.archiveignore"
|
||||
|
||||
# Create archive.
|
||||
local hash=$(tar ''${exclude} -c "''${target}" | pv -s $(/usr/bin/env du -sb "''${target}" | awk '{print $1}') | gzip -1 | tee "''${name}".tgz | sha1sum | cut -d\ -f1)
|
||||
|
||||
# Append hash to target name.
|
||||
local new_name="''${name}_''${date}-''${hash}.tgz"
|
||||
mv -- "''${name}".tgz ''${new_name} && echo ''${new_name}
|
||||
}
|
||||
|
||||
_iterate_targets process ''${targets[@]}
|
||||
}
|
||||
|
||||
# Check archives integrity.
|
||||
# Checks all archives by default.
|
||||
# Usage: archive_check [FILES]
|
||||
function archive_check() {
|
||||
local IFS=$'\n'
|
||||
local targets=(''${@})
|
||||
[[ "''${targets}" = "" ]] && targets=($(_ls_archive))
|
||||
|
||||
process() {
|
||||
_archive_check "''${target}"
|
||||
}
|
||||
|
||||
_iterate_targets process ''${targets[@]}
|
||||
}
|
||||
|
||||
# Delete old versions of an archive.
|
||||
# All archives with 1 version by default.
|
||||
# Usage: archive_prune [NAME] [VERSIONS]
|
||||
function archive_prune() {
|
||||
local IFS=$'\n'
|
||||
local targets=(''${1})
|
||||
local versions=''${2}
|
||||
|
||||
[[ "''${targets}" = "" ]] && targets=($(_archive_names))
|
||||
[[ "''${versions}" = "" ]] && versions=1
|
||||
|
||||
if [[ ''${#} -gt 2 ]]; then
|
||||
help archive_prune
|
||||
return 2
|
||||
fi
|
||||
|
||||
process() {
|
||||
local prune=($(ls | grep -E "^''${target}''${_archive_pattern}" | sort -r | sed -e "1,''${versions}d"))
|
||||
|
||||
for archive in ''${prune[@]}; do
|
||||
rm -- "''${archive}" && echo "''${archive}"
|
||||
done
|
||||
}
|
||||
|
||||
_iterate_targets process ''${targets[@]}
|
||||
}
|
||||
|
||||
# Delete specified or all archive files.
|
||||
# Usage: archive_rm [FILES]
|
||||
function archive_rm() {
|
||||
local IFS=$'\n'
|
||||
local targets=(''${@})
|
||||
[[ "''${targets}" = "" ]] && targets=($(_ls_archive))
|
||||
|
||||
process() {
|
||||
rm -- "''${target}" && echo "''${target}"
|
||||
}
|
||||
|
||||
_iterate_targets process ''${targets[@]}
|
||||
}
|
||||
|
||||
# Recompress previously created archive_fast with better compression.
|
||||
# Usage: archive_xz [FILES]
|
||||
function archive_xz() {
|
||||
local IFS=$'\n'
|
||||
local targets=(''${@})
|
||||
[[ "''${targets}" = "" ]] && targets=$(_ls_archive_fast)
|
||||
|
||||
process() {
|
||||
local data=($(_archive_parse "''${target}"))
|
||||
local tmp="''${data[0]}.txz"
|
||||
|
||||
# Check that old format.
|
||||
if [[ "''${data[3]}" != "tgz" ]]; then
|
||||
_error "Not in .tgz format!"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Check integrity.
|
||||
_archive_check "''${target}" || return 1
|
||||
|
||||
# Recompress.
|
||||
local hash=$(pv "''${target}" | gzip -d | xz -9e --threads=1 | tee "''${tmp}" | sha1sum | cut -d\ -f1)
|
||||
|
||||
# Rename.
|
||||
local new_name="''${data[0]}_''${data[1]}-''${hash}.txz"
|
||||
mv -- ''${tmp} ''${new_name} && rm ''${target} && echo ''${new_name}
|
||||
}
|
||||
|
||||
_iterate_targets process ''${targets[@]}
|
||||
}
|
||||
|
||||
# Rename archives.
|
||||
# If no name specified, it simplifies archive's name.
|
||||
# If no archives specified, apply to all archives.
|
||||
# Usage: archive_name [ARCHIVE] [NAME]
|
||||
function archive_name() {
|
||||
local IFS=$'\n'
|
||||
local targets="''${1}"
|
||||
local name="''${2}"
|
||||
[[ "''${targets}" = "" ]] && targets=($(_ls_archive))
|
||||
|
||||
process() {
|
||||
# Simplify name by default.
|
||||
if [[ "''${name}" = "" || ''${count} -gt 1 ]]; then
|
||||
name="''${target%_*}"
|
||||
name="$(parse_pascal ''${name})"
|
||||
fi
|
||||
|
||||
# Remove old name.
|
||||
local data="''${target##*_}"
|
||||
local new_name="''${name}_''${data}"
|
||||
|
||||
# Check for the same name.
|
||||
[[ "''${target}" = "''${new_name}" ]] && return 0
|
||||
|
||||
# Check for existing target.
|
||||
if [[ -f "''${new_name}" ]]; then
|
||||
_error "''${new_name}: Already exists!"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Rename.
|
||||
mv -- ''${target} ''${new_name} && echo ''${new_name}
|
||||
}
|
||||
|
||||
_iterate_targets process ''${targets[@]}
|
||||
}
|
||||
|
||||
# Extract previously created archive with checksum validation.
|
||||
# Supports unarchiving exact paths from the remote machines (rsync syntax).
|
||||
# Usage: unarchive [HOST:FILES]
|
||||
function unarchive() {
|
||||
local IFS=$'\n'
|
||||
local targets=(''${@})
|
||||
[[ "''${targets}" = "" ]] && targets=$(_ls_archive)
|
||||
|
||||
process() {
|
||||
# Validate.
|
||||
# _archive_check "''${target}" || return 1
|
||||
if ! _is_archive "''${target}"; then
|
||||
_iterate_skip "Not an archive."
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Remote archives.
|
||||
local remote
|
||||
local file="''${target}"
|
||||
|
||||
if [[ "''${target//\\:/}" == *:* ]]; then
|
||||
local host="''${target%%:*}"
|
||||
file="''${target#*:}"
|
||||
remote=(sudo ssh ''${host})
|
||||
fi
|
||||
|
||||
# Extract.
|
||||
case "''${file##*.}" in
|
||||
"txz")
|
||||
''${remote[@]} pv -f ''${file} | xz -d | tar -xf -
|
||||
;;
|
||||
"tgz")
|
||||
''${remote[@]} pv -f ''${file} | gzip -d | tar -xf -
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
_iterate_targets process ''${targets[@]}
|
||||
}
|
||||
|
||||
# Change archive's filesystem time to match creation date.
|
||||
# Usage: archive_touch [FILES]
|
||||
function archive_touch() {
|
||||
local IFS=$'\n'
|
||||
local targets=(''${@})
|
||||
[[ "''${targets}" = "" ]] && targets=$(_ls_archive)
|
||||
|
||||
process() {
|
||||
local data=($(_archive_parse "''${target}"))
|
||||
local date=''${data[1]}
|
||||
touch -t ''${date} -- ''${target}
|
||||
}
|
||||
|
||||
_iterate_targets process ''${targets[@]}
|
||||
}
|
||||
|
||||
# Parse archive file name to get: name, date, hash and format.
|
||||
# Usage: _archive_parse <FILENAME>
|
||||
function _archive_parse() {
|
||||
local input="''${1}"
|
||||
local name="''${input%_*}"
|
||||
local format="''${input##*.}"
|
||||
local data="''${input##*_}"; data="''${data%.*}"
|
||||
local date="''${data%%-*}"
|
||||
local hash="''${data##*-}"
|
||||
|
||||
echo "''${name}"
|
||||
echo "''${date}"
|
||||
echo "''${hash}"
|
||||
echo "''${format}"
|
||||
}
|
||||
|
||||
# Autocomplete for archive_name function.
|
||||
# First arg is the archives list, second one is selected archive's current name.
|
||||
function _comp_archive_name() {
|
||||
local IFS=$'\n'
|
||||
COMPREPLY=()
|
||||
|
||||
local cur="''${COMP_WORDS[COMP_CWORD]}"
|
||||
local prev="''${COMP_WORDS[COMP_CWORD-1]}"
|
||||
local command="''${COMP_WORDS[0]}"
|
||||
|
||||
if [[ "''${prev}" = "''${command}" ]]; then
|
||||
COMPREPLY=( $(compgen -W "$(ls | grep -E ''${_archive_pattern})" -- ''${cur}) )
|
||||
return 0
|
||||
else
|
||||
local data=($(_archive_parse ''${prev}))
|
||||
local name="''${data[0]}"
|
||||
COMPREPLY=( $(compgen -W "''${name}" -- ''${cur}) )
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
# Autocomplete with archives in current dir.
|
||||
function _comp_archive_grep() {
|
||||
_autocomplete_grep ''${_archive_pattern}
|
||||
}
|
||||
|
||||
# Autocomplete with fast archives in current dir.
|
||||
function _comp_archive_grep_fast() {
|
||||
_autocomplete_grep ''${_archive_pattern_fast}
|
||||
}
|
||||
|
||||
# Get date for a new archive.
|
||||
function _archive_date() {
|
||||
date +%Y%m%d%H%M
|
||||
}
|
||||
|
||||
# Get names of all archives.
|
||||
function _archive_names() {
|
||||
local IFS=$'\n'
|
||||
local archives=($(_ls_archive))
|
||||
local names=()
|
||||
|
||||
for archive in ''${archives[@]}; do
|
||||
local data=($(_archive_parse ''${archive}))
|
||||
names+=(''${data[0]})
|
||||
done
|
||||
|
||||
# Remove copies.
|
||||
names=($(printf '%s\n' "''${names[@]}" | sort -u))
|
||||
|
||||
printf '%s\n' "''${names[@]}"
|
||||
}
|
||||
|
||||
# Autocomplete with names of all archives.
|
||||
function _comp_archive_names() {
|
||||
_autocomplete_first $(_archive_names)
|
||||
}
|
||||
|
||||
# Check if file is an archive.
|
||||
function _is_archive() {
|
||||
local out=$(echo "''${*}" | grep -E ''${_archive_pattern})
|
||||
|
||||
[[ "''${out}" != "" ]]
|
||||
}
|
||||
|
||||
# List all archives.
|
||||
function _ls_archive() {
|
||||
ls | grep -E ''${_archive_pattern}
|
||||
}
|
||||
|
||||
# List fast archives.
|
||||
function _ls_archive_fast() {
|
||||
ls | grep -E ''${_archive_pattern_fast}
|
||||
}
|
||||
|
||||
# Filter input for archives only.
|
||||
function _filter_archive() {
|
||||
grep -E ''${_archive_pattern}
|
||||
}
|
||||
|
||||
function _archive_check() {
|
||||
# Extract hash from name.
|
||||
local data=($(_archive_parse ''${target}))
|
||||
local saved=''${data[2]}
|
||||
|
||||
# Calculate actual hash.
|
||||
local actual=$(pv ''${target} | sha1sum | cut -d\ -f1)
|
||||
|
||||
# Compare hashes.
|
||||
[[ "''${actual}" = "''${saved}" ]]
|
||||
}
|
||||
|
||||
# complete -o filenames -F _comp_archive_grep archive_check unarchive archive_rm archive_touch
|
||||
# complete -o filenames -F _comp_archive_grep_fast archive_xz
|
||||
complete -o filenames -F _comp_archive_name archive_name
|
||||
complete -o filenames -F _comp_archive_names archive_prune
|
||||
'';
|
||||
}
|
|
@ -1,393 +0,0 @@
|
|||
export _archive_pattern="_[0-9]{12}-[[:alnum:]]{40}.t[xg]z$"
|
||||
export _archive_pattern_fast="_[0-9]{12}-[[:alnum:]]{40}.tgz$"
|
||||
|
||||
# Archive directories.
|
||||
# All directories by default.
|
||||
# Supports .archiveignore exclude file.
|
||||
# Usage: archive [DIRS]
|
||||
function archive() {
|
||||
local IFS=$'\n'
|
||||
local targets=(${@})
|
||||
[[ "${targets}" = "" ]] && targets=($(_ls_dir))
|
||||
|
||||
process() {
|
||||
local date=$(_archive_date)
|
||||
|
||||
# Parse name.
|
||||
local name=$(parse_pascal ${target})
|
||||
|
||||
# Exclude support.
|
||||
local exclude=""
|
||||
[[ -f ".archiveignore" ]] && exclude="--exclude-from=.archiveignore"
|
||||
[[ -f "${target}/.archiveignore" ]] && exclude="--exclude-from=${target}/.archiveignore"
|
||||
|
||||
# create archive.
|
||||
local hash=$(tar ${exclude} -c ${target} | pv -s $(/usr/bin/env du -sb ${target} | awk '{print $1}') | xz -9e --threads=1 | tee ${name}.txz | sha1sum | cut -d\ -f1)
|
||||
|
||||
# append hash to target name.
|
||||
local new_name="${name}_${date}-${hash}.txz"
|
||||
mv -- ${name}.txz ${new_name} && echo ${new_name}
|
||||
}
|
||||
|
||||
_iterate_targets process ${targets[@]}
|
||||
}
|
||||
|
||||
# Archive using multiple threads. Uses 75% of free RAM.
|
||||
# All directories by default.
|
||||
# Supports .archiveignore exclude file.
|
||||
# Usage: archive_mt [DIRS]
|
||||
function archive_mt() {
|
||||
local IFS=$'\n'
|
||||
local targets=(${@})
|
||||
[[ "${targets}" = "" ]] && targets=($(_ls_dir))
|
||||
|
||||
process() {
|
||||
local date=$(_archive_date)
|
||||
|
||||
# Parse name.
|
||||
local name=$(parse_pascal ${target})
|
||||
|
||||
# Exclude support.
|
||||
local exclude=""
|
||||
[[ -f ".archiveignore" ]] && exclude="--exclude-from=.archiveignore"
|
||||
[[ -f "${target}/.archiveignore" ]] && exclude="--exclude-from=${target}/.archiveignore"
|
||||
|
||||
# Determine memory limit.
|
||||
local mem_free=$(_mem_free)
|
||||
local mem_limit=$((mem_free*3/4))
|
||||
|
||||
# create archive.
|
||||
local hash=$(tar ${exclude} -c ${target} | pv -s $(/usr/bin/env du -sb ${target} | awk '{print $1}') | xz -9e --threads=0 --memlimit=${mem_limit}MiB | tee ${name}.txz | sha1sum | cut -d\ -f1)
|
||||
|
||||
# append hash to target name.
|
||||
local new_name="${name}_${date}-${hash}.txz"
|
||||
mv -- ${name}.txz ${new_name} && echo ${new_name}
|
||||
}
|
||||
|
||||
_iterate_targets process ${targets[@]}
|
||||
}
|
||||
|
||||
# Archive directories with fast compression.
|
||||
# All directories by default.
|
||||
# Supports .archiveignore exclude file.
|
||||
# Usage: archive_fast [DIRS]
|
||||
function archive_fast() {
|
||||
local IFS=$'\n'
|
||||
local targets=("${@}")
|
||||
[[ "${targets}" = "" ]] && targets=($(_ls_dir))
|
||||
|
||||
process() {
|
||||
# Start timestamp.
|
||||
local date=$(_archive_date)
|
||||
|
||||
# Parse name.
|
||||
local name=$(parse_pascal "${target}")
|
||||
|
||||
# Exclude support.
|
||||
local exclude=""
|
||||
[[ -f ".archiveignore" ]] && exclude="--exclude-from=.archiveignore"
|
||||
[[ -f "${target}/.archiveignore" ]] && exclude="--exclude-from=${target}/.archiveignore"
|
||||
|
||||
# create archive.
|
||||
local hash=$(tar ${exclude} -c "${target}" | pv -s $(/usr/bin/env du -sb "${target}" | awk '{print $1}') | gzip -1 | tee "${name}".tgz | sha1sum | cut -d\ -f1)
|
||||
|
||||
# append hash to target name.
|
||||
local new_name="${name}_${date}-${hash}.tgz"
|
||||
mv -- "${name}".tgz ${new_name} && echo ${new_name}
|
||||
}
|
||||
|
||||
_iterate_targets process ${targets[@]}
|
||||
}
|
||||
|
||||
# Check archives integrity.
|
||||
# Checks all archives by default.
|
||||
# Usage: archive_check [FILES]
|
||||
function archive_check() {
|
||||
local IFS=$'\n'
|
||||
local targets=(${@})
|
||||
[[ "${targets}" = "" ]] && targets=($(_ls_archive))
|
||||
|
||||
process() {
|
||||
_archive_check "${target}"
|
||||
}
|
||||
|
||||
_iterate_targets process ${targets[@]}
|
||||
}
|
||||
|
||||
# Delete old versions of an archive.
|
||||
# All archives with 1 version by default.
|
||||
# Usage: archive_prune [NAME] [VERSIONS]
|
||||
function archive_prune() {
|
||||
local IFS=$'\n'
|
||||
local targets=(${1})
|
||||
local versions=${2}
|
||||
|
||||
[[ "${targets}" = "" ]] && targets=($(_archive_names))
|
||||
[[ "${versions}" = "" ]] && versions=1
|
||||
|
||||
if [[ ${#} -gt 2 ]]; then
|
||||
help archive_prune
|
||||
return 2
|
||||
fi
|
||||
|
||||
process() {
|
||||
local prune=($(ls | grep -E "^${target}${_archive_pattern}" | sort -r | sed -e "1,${versions}d"))
|
||||
|
||||
for archive in ${prune[@]}; do
|
||||
rm -- "${archive}" && echo "${archive}"
|
||||
done
|
||||
}
|
||||
|
||||
_iterate_targets process ${targets[@]}
|
||||
}
|
||||
|
||||
# Delete specified or all archive files.
|
||||
# Usage: archive_rm [FILES]
|
||||
function archive_rm() {
|
||||
local IFS=$'\n'
|
||||
local targets=(${@})
|
||||
[[ "${targets}" = "" ]] && targets=($(_ls_archive))
|
||||
|
||||
process() {
|
||||
rm -- "${target}" && echo "${target}"
|
||||
}
|
||||
|
||||
_iterate_targets process ${targets[@]}
|
||||
}
|
||||
|
||||
# Recompress previously created archive_fast with better compression.
|
||||
# Usage: archive_xz [FILES]
|
||||
function archive_xz() {
|
||||
local IFS=$'\n'
|
||||
local targets=(${@})
|
||||
[[ "${targets}" = "" ]] && targets=$(_ls_archive_fast)
|
||||
|
||||
process() {
|
||||
local data=($(_archive_parse "${target}"))
|
||||
local tmp="${data[0]}.txz"
|
||||
|
||||
# Check that old format.
|
||||
if [[ "${data[3]}" != "tgz" ]]; then
|
||||
_error "Not in .tgz format!"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Check integrity.
|
||||
_archive_check "${target}" || return 1
|
||||
|
||||
# Recompress.
|
||||
local hash=$(pv "${target}" | gzip -d | xz -9e --threads=1 | tee "${tmp}" | sha1sum | cut -d\ -f1)
|
||||
|
||||
# Rename.
|
||||
local new_name="${data[0]}_${data[1]}-${hash}.txz"
|
||||
mv -- ${tmp} ${new_name} && rm ${target} && echo ${new_name}
|
||||
}
|
||||
|
||||
_iterate_targets process ${targets[@]}
|
||||
}
|
||||
|
||||
# Rename archives.
|
||||
# If no name specified, it simplifies archive's name.
|
||||
# If no archives specified, apply to all archives.
|
||||
# Usage: archive_name [ARCHIVE] [NAME]
|
||||
function archive_name() {
|
||||
local IFS=$'\n'
|
||||
local targets="${1}"
|
||||
local name="${2}"
|
||||
[[ "${targets}" = "" ]] && targets=($(_ls_archive))
|
||||
|
||||
process() {
|
||||
# simplify name by default.
|
||||
if [[ "${name}" = "" || ${count} -gt 1 ]]; then
|
||||
name="${target%_*}"
|
||||
name="$(parse_pascal ${name})"
|
||||
fi
|
||||
|
||||
# remove old name.
|
||||
local data="${target##*_}"
|
||||
local new_name="${name}_${data}"
|
||||
|
||||
# check for the same name.
|
||||
[[ "${target}" = "${new_name}" ]] && return 0
|
||||
|
||||
# check for existing target.
|
||||
if [[ -f "${new_name}" ]]; then
|
||||
_error "${new_name}: Already exists!"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# rename.
|
||||
mv -- ${target} ${new_name} && echo ${new_name}
|
||||
}
|
||||
|
||||
_iterate_targets process ${targets[@]}
|
||||
}
|
||||
|
||||
# Extract previously created archive with checksum validation.
|
||||
# Supports unarchiving exact paths from the remote machines (rsync syntax).
|
||||
# Usage: unarchive [HOST:FILES]
|
||||
function unarchive() {
|
||||
local IFS=$'\n'
|
||||
local targets=(${@})
|
||||
[[ "${targets}" = "" ]] && targets=$(_ls_archive)
|
||||
|
||||
process() {
|
||||
# Validate.
|
||||
# _archive_check "${target}" || return 1
|
||||
if ! _is_archive "${target}"; then
|
||||
_iterate_skip "Not an archive."
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Remote archives.
|
||||
local remote
|
||||
local file="${target}"
|
||||
|
||||
if [[ "${target//\\:/}" == *:* ]]; then
|
||||
local host="${target%%:*}"
|
||||
file="${target#*:}"
|
||||
remote=(sudo ssh ${host})
|
||||
fi
|
||||
|
||||
# Extract.
|
||||
case "${file##*.}" in
|
||||
"txz")
|
||||
${remote[@]} pv -f ${file} | xz -d | tar -xf -
|
||||
;;
|
||||
"tgz")
|
||||
${remote[@]} pv -f ${file} | gzip -d | tar -xf -
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
_iterate_targets process ${targets[@]}
|
||||
}
|
||||
|
||||
# Change archive's filesystem time to match creation date.
|
||||
# Usage: archive_touch [FILES]
|
||||
function archive_touch() {
|
||||
local IFS=$'\n'
|
||||
local targets=(${@})
|
||||
[[ "${targets}" = "" ]] && targets=$(_ls_archive)
|
||||
|
||||
process() {
|
||||
local data=($(_archive_parse "${target}"))
|
||||
local date=${data[1]}
|
||||
touch -t ${date} -- ${target}
|
||||
}
|
||||
|
||||
_iterate_targets process ${targets[@]}
|
||||
}
|
||||
|
||||
# Parse archive file name to get: name, date, hash and format.
|
||||
# Usage: _archive_parse <FILENAME>
|
||||
function _archive_parse() {
|
||||
local input="${1}"
|
||||
local name="${input%_*}"
|
||||
local format="${input##*.}"
|
||||
local data="${input##*_}"; data="${data%.*}"
|
||||
local date="${data%%-*}"
|
||||
local hash="${data##*-}"
|
||||
|
||||
echo "${name}"
|
||||
echo "${date}"
|
||||
echo "${hash}"
|
||||
echo "${format}"
|
||||
}
|
||||
|
||||
# Autocomplete for archive_name function.
|
||||
# First arg is the archives list, second one is selected archive's current name.
|
||||
function _comp_archive_name() {
|
||||
local IFS=$'\n'
|
||||
COMPREPLY=()
|
||||
|
||||
local cur="${COMP_WORDS[COMP_CWORD]}"
|
||||
local prev="${COMP_WORDS[COMP_CWORD-1]}"
|
||||
local command="${COMP_WORDS[0]}"
|
||||
|
||||
if [[ "${prev}" = "${command}" ]]; then
|
||||
COMPREPLY=( $(compgen -W "$(ls | grep -E ${_archive_pattern})" -- ${cur}) )
|
||||
return 0
|
||||
else
|
||||
local data=($(_archive_parse ${prev}))
|
||||
local name="${data[0]}"
|
||||
COMPREPLY=( $(compgen -W "${name}" -- ${cur}) )
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
# Autocomplete with archives in current dir.
|
||||
function _comp_archive_grep() {
|
||||
_autocomplete_grep ${_archive_pattern}
|
||||
}
|
||||
|
||||
# Autocomplete with fast archives in current dir.
|
||||
function _comp_archive_grep_fast() {
|
||||
_autocomplete_grep ${_archive_pattern_fast}
|
||||
}
|
||||
|
||||
# Get date for a new archive.
|
||||
function _archive_date() {
|
||||
date +%Y%m%d%H%M
|
||||
}
|
||||
|
||||
# Get names of all archives.
|
||||
function _archive_names() {
|
||||
local IFS=$'\n'
|
||||
local archives=($(_ls_archive))
|
||||
local names=()
|
||||
|
||||
for archive in ${archives[@]}; do
|
||||
local data=($(_archive_parse ${archive}))
|
||||
names+=(${data[0]})
|
||||
done
|
||||
|
||||
# Remove copies.
|
||||
names=($(printf '%s\n' "${names[@]}" | sort -u))
|
||||
|
||||
printf '%s\n' "${names[@]}"
|
||||
}
|
||||
|
||||
# Autocomplete with names of all archives.
|
||||
function _comp_archive_names() {
|
||||
_autocomplete_first $(_archive_names)
|
||||
}
|
||||
|
||||
# Check if file is an archive.
|
||||
function _is_archive() {
|
||||
local out=$(echo "${*}" | grep -E ${_archive_pattern})
|
||||
|
||||
[[ "${out}" != "" ]]
|
||||
}
|
||||
|
||||
# List all archives.
|
||||
function _ls_archive() {
|
||||
ls | grep -E ${_archive_pattern}
|
||||
}
|
||||
|
||||
# List fast archives.
|
||||
function _ls_archive_fast() {
|
||||
ls | grep -E ${_archive_pattern_fast}
|
||||
}
|
||||
|
||||
# Filter input for archives only.
|
||||
function _filter_archive() {
|
||||
grep -E ${_archive_pattern}
|
||||
}
|
||||
|
||||
function _archive_check() {
|
||||
# extract hash from name.
|
||||
local data=($(_archive_parse ${target}))
|
||||
local saved=${data[2]}
|
||||
|
||||
# calculate actual hash.
|
||||
local actual=$(pv ${target} | sha1sum | cut -d\ -f1)
|
||||
|
||||
# compare hashes.
|
||||
[[ "${actual}" = "${saved}" ]]
|
||||
}
|
||||
|
||||
# complete -o filenames -F _comp_archive_grep archive_check unarchive archive_rm archive_touch
|
||||
# complete -o filenames -F _comp_archive_grep_fast archive_xz
|
||||
complete -o filenames -F _comp_archive_name archive_name
|
||||
complete -o filenames -F _comp_archive_names archive_prune
|
28
module/common/bash/module/Ask.nix
Normal file
28
module/common/bash/module/Ask.nix
Normal file
|
@ -0,0 +1,28 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
export _ask_model="mistral"
|
||||
|
||||
# Ask general AI.
|
||||
# Usage: ask <QUERY>
|
||||
function ask() {
|
||||
curl http://localhost:11434/api/generate -d "{
|
||||
\"model\": \"''${_ask_model}\",
|
||||
\"prompt\":\"''${*}\"
|
||||
}" 2> /dev/null | parallel -j1 -- "echo {} | jq -r .response | tr -d '\n'"
|
||||
echo
|
||||
}
|
||||
|
||||
# Specify ask model.
|
||||
function ask_model() {
|
||||
export _ask_model="''${1}"
|
||||
}
|
||||
|
||||
function _complete_ask_model() {
|
||||
local IFS=$'\n'
|
||||
local models=($(ollama list | sed -e "1d" | cut -f1))
|
||||
_autocomplete_first ''${models[@]}
|
||||
}
|
||||
|
||||
complete -F _complete_ask_model ask_model
|
||||
'';
|
||||
}
|
|
@ -1,24 +0,0 @@
|
|||
export _ask_model="mistral"
|
||||
|
||||
# Ask general AI.
|
||||
# Usage: ask <QUERY>
|
||||
function ask() {
|
||||
curl http://localhost:11434/api/generate -d "{
|
||||
\"model\": \"${_ask_model}\",
|
||||
\"prompt\":\"${*}\"
|
||||
}" 2> /dev/null | parallel -j1 -- "echo {} | jq -r .response | tr -d '\n'"
|
||||
echo
|
||||
}
|
||||
|
||||
# Specify ask model.
|
||||
function ask_model() {
|
||||
export _ask_model="${1}"
|
||||
}
|
||||
|
||||
function _complete_ask_model() {
|
||||
local IFS=$'\n'
|
||||
local models=($(ollama list | sed -e "1d" | cut -f1))
|
||||
_autocomplete_first ${models[@]}
|
||||
}
|
||||
|
||||
complete -F _complete_ask_model ask_model
|
87
module/common/bash/module/Autocomplete.nix
Normal file
87
module/common/bash/module/Autocomplete.nix
Normal file
|
@ -0,0 +1,87 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# Bash autocomplete.
|
||||
# There are also options like -o nospace. see man for more info.
|
||||
# Usage: _foo() { _autocomplete "{foo,bar}" } ; complete -F _foo foo
|
||||
function _autocomplete() {
|
||||
local IFS=$'\n'
|
||||
local commands="''${*}"
|
||||
|
||||
COMPREPLY=()
|
||||
|
||||
local cur="''${COMP_WORDS[COMP_CWORD]}"
|
||||
local prev="''${COMP_WORDS[COMP_CWORD-1]}"
|
||||
local command="''${COMP_WORDS[0]}"
|
||||
|
||||
COMPREPLY=( $(compgen -W "''${commands}" -- ''${cur}) )
|
||||
return 0
|
||||
}
|
||||
|
||||
# Autocomplete only first argument.
|
||||
function _autocomplete_first() {
|
||||
local IFS=$'\n'
|
||||
local commands="''${*}"
|
||||
|
||||
COMPREPLY=()
|
||||
|
||||
local cur="''${COMP_WORDS[COMP_CWORD]}"
|
||||
local prev="''${COMP_WORDS[COMP_CWORD-1]}"
|
||||
local command="''${COMP_WORDS[0]}"
|
||||
|
||||
if [[ "''${prev}" = "''${command}" ]]; then
|
||||
COMPREPLY=( $(compgen -W "''${commands}" -- ''${cur}) )
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
# Autocomplete only first argument and the rest with files.
|
||||
function _autocomplete_first_ls() {
|
||||
local IFS=$'\n'
|
||||
local commands="''${*}"
|
||||
|
||||
COMPREPLY=()
|
||||
|
||||
local cur="''${COMP_WORDS[COMP_CWORD]}"
|
||||
local prev="''${COMP_WORDS[COMP_CWORD-1]}"
|
||||
local command="''${COMP_WORDS[0]}"
|
||||
|
||||
if [[ "''${prev}" = "''${command}" ]]; then
|
||||
COMPREPLY=( $(compgen -W "''${commands}" -- ''${cur}) )
|
||||
return 0
|
||||
else
|
||||
COMPREPLY=( $(compgen -W "$(ls)" -- ''${cur}) )
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
# Autocomplete by grepping file names.
|
||||
function _autocomplete_grep() {
|
||||
local IFS=$'\n'
|
||||
COMPREPLY=()
|
||||
|
||||
local pattern="''${1}"
|
||||
local cur="''${COMP_WORDS[COMP_CWORD]}"
|
||||
local prev="''${COMP_WORDS[COMP_CWORD-1]}"
|
||||
local command="''${COMP_WORDS[0]}"
|
||||
|
||||
COMPREPLY=( $(compgen -W "$(ls | grep -E ''${pattern})" -- ''${cur}) )
|
||||
return 0
|
||||
}
|
||||
|
||||
# Autocomplete nested program.
|
||||
function _autocomplete_nested() {
|
||||
# local IFS=$'\n'
|
||||
local cur prev words cword split i
|
||||
_init_completion -s || return
|
||||
|
||||
for ((i = 1; i <= cword; i++)); do
|
||||
if [[ ''${words[i]} != -* ]]; then
|
||||
local PATH=$PATH:/sbin:/usr/sbin:/usr/local/sbin
|
||||
local root_command=''${words[i]}
|
||||
_command_offset ''${i}
|
||||
return
|
||||
fi
|
||||
done
|
||||
}
|
||||
'';
|
||||
}
|
|
@ -1,83 +0,0 @@
|
|||
# Bash autocomplete.
|
||||
# There are also options like -o nospace. see man for more info.
|
||||
# Usage: _foo() { _autocomplete "{foo,bar}" } ; complete -F _foo foo
|
||||
function _autocomplete() {
|
||||
local IFS=$'\n'
|
||||
local commands="${*}"
|
||||
|
||||
COMPREPLY=()
|
||||
|
||||
local cur="${COMP_WORDS[COMP_CWORD]}"
|
||||
local prev="${COMP_WORDS[COMP_CWORD-1]}"
|
||||
local command="${COMP_WORDS[0]}"
|
||||
|
||||
COMPREPLY=( $(compgen -W "${commands}" -- ${cur}) )
|
||||
return 0
|
||||
}
|
||||
|
||||
# Autocomplete only first argument.
|
||||
function _autocomplete_first() {
|
||||
local IFS=$'\n'
|
||||
local commands="${*}"
|
||||
|
||||
COMPREPLY=()
|
||||
|
||||
local cur="${COMP_WORDS[COMP_CWORD]}"
|
||||
local prev="${COMP_WORDS[COMP_CWORD-1]}"
|
||||
local command="${COMP_WORDS[0]}"
|
||||
|
||||
if [[ "${prev}" = "${command}" ]]; then
|
||||
COMPREPLY=( $(compgen -W "${commands}" -- ${cur}) )
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
# Autocomplete only first argument and the rest with files.
|
||||
function _autocomplete_first_ls() {
|
||||
local IFS=$'\n'
|
||||
local commands="${*}"
|
||||
|
||||
COMPREPLY=()
|
||||
|
||||
local cur="${COMP_WORDS[COMP_CWORD]}"
|
||||
local prev="${COMP_WORDS[COMP_CWORD-1]}"
|
||||
local command="${COMP_WORDS[0]}"
|
||||
|
||||
if [[ "${prev}" = "${command}" ]]; then
|
||||
COMPREPLY=( $(compgen -W "${commands}" -- ${cur}) )
|
||||
return 0
|
||||
else
|
||||
COMPREPLY=( $(compgen -W "$(ls)" -- ${cur}) )
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
# Autocomplete by grepping file names.
|
||||
function _autocomplete_grep() {
|
||||
local IFS=$'\n'
|
||||
COMPREPLY=()
|
||||
|
||||
local pattern="${1}"
|
||||
local cur="${COMP_WORDS[COMP_CWORD]}"
|
||||
local prev="${COMP_WORDS[COMP_CWORD-1]}"
|
||||
local command="${COMP_WORDS[0]}"
|
||||
|
||||
COMPREPLY=( $(compgen -W "$(ls | grep -E ${pattern})" -- ${cur}) )
|
||||
return 0
|
||||
}
|
||||
|
||||
# Autocomplete nested program.
|
||||
function _autocomplete_nested() {
|
||||
# local IFS=$'\n'
|
||||
local cur prev words cword split i
|
||||
_init_completion -s || return
|
||||
|
||||
for ((i = 1; i <= cword; i++)); do
|
||||
if [[ ${words[i]} != -* ]]; then
|
||||
local PATH=$PATH:/sbin:/usr/sbin:/usr/local/sbin
|
||||
local root_command=${words[i]}
|
||||
_command_offset ${i}
|
||||
return
|
||||
fi
|
||||
done
|
||||
}
|
16
module/common/bash/module/Battery.nix
Normal file
16
module/common/bash/module/Battery.nix
Normal file
|
@ -0,0 +1,16 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# Print current battery charge level in percents.
|
||||
function battery_level() {
|
||||
head -c -1 /sys/class/power_supply/BAT*/capacity
|
||||
echo '%'
|
||||
}
|
||||
|
||||
# Get battery's info.
|
||||
function battery_info() {
|
||||
local IFS=$'\n'
|
||||
local battery=("$(upower --enumerate | grep battery_BAT)")
|
||||
upower -i "''${battery[0]}"
|
||||
}
|
||||
'';
|
||||
}
|
|
@ -1,12 +0,0 @@
|
|||
# Print current battery charge in percents.
|
||||
function battery_charge() {
|
||||
head -c -1 /sys/class/power_supply/BAT*/capacity
|
||||
echo '%'
|
||||
}
|
||||
|
||||
# Get battery's info.
|
||||
function battery_info() {
|
||||
local IFS=$'\n'
|
||||
local battery=("$(upower --enumerate | grep battery_BAT)")
|
||||
upower -i "${battery[0]}"
|
||||
}
|
45
module/common/bash/module/Bootstrap.nix
Normal file
45
module/common/bash/module/Bootstrap.nix
Normal file
|
@ -0,0 +1,45 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# Install nixos to specified drive. To be run from Live ISO.
|
||||
# Usage: bootstrap_nixos <DRIVE> [HOST]
|
||||
function bootstrap_nixos() {
|
||||
local target="''${1}"
|
||||
local host="''${2}"
|
||||
|
||||
if [[ "''${target}" = "" ]]; then
|
||||
help bootstrap_nixos
|
||||
return 2
|
||||
fi
|
||||
|
||||
# Create partitions.
|
||||
parted -s "''${target}" mktable gpt
|
||||
parted -s "''${target}" mkpart primary 0% 512MB
|
||||
parted -s "''${target}" mkpart primary 512MB 100%
|
||||
parted -s "''${target}" name 1 NIXBOOT
|
||||
parted -s "''${target}" name 2 NIXROOT
|
||||
parted -s "''${target}" set 1 esp on
|
||||
|
||||
# Format.
|
||||
mkfs.fat -F 32 /dev/disk/by-partlabel/NIXBOOT
|
||||
mkfs.ext4 -F /dev/disk/by-partlabel/NIXROOT
|
||||
|
||||
# Mount.
|
||||
mount /dev/disk/by-partlabel/NIXROOT /mnt
|
||||
mkdir /mnt/boot
|
||||
mount /dev/disk/by-partlabel/NIXBOOT /mnt/boot
|
||||
|
||||
# Generate config.
|
||||
nixos-generate-config --root /mnt
|
||||
|
||||
# Install.
|
||||
cd /mnt
|
||||
if [[ "''${host}" = "" ]]; then
|
||||
_warn "Warning: Installing basic system."
|
||||
nixos-install
|
||||
else
|
||||
_info "Installing ''${host}..."
|
||||
nixos-install --no-root-password --no-channel-copy --flake "''${_nix_system_config}#''${host}"
|
||||
fi
|
||||
}
|
||||
'';
|
||||
}
|
|
@ -1,70 +0,0 @@
|
|||
# Install Editorconfig file (with tabs) in current directory.
|
||||
function bootstrap_editorconfig() {
|
||||
echo "\
|
||||
[*]
|
||||
end_of_line = lf
|
||||
charset = utf-8
|
||||
indent_style = tab
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
" > .editorconfig
|
||||
}
|
||||
|
||||
# Install Editorconfig file (with specified spaces, 8 by default) in current directory.
|
||||
# Usage: bootstrap_editorconfig_space [AMOUNT]
|
||||
function bootstrap_editorconfig_space() {
|
||||
local spaces="${1}"
|
||||
[[ "${spaces}" = "" ]] && spaces=8
|
||||
|
||||
echo "\
|
||||
[*]
|
||||
end_of_line = lf
|
||||
charset = utf-8
|
||||
indent_style = space
|
||||
indent_size = ${spaces}
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
" > .editorconfig
|
||||
}
|
||||
|
||||
# Install nixos to specified drive. To be run from Live ISO.
|
||||
# Usage: bootstrap_nixos <DRIVE> [HOST]
|
||||
function bootstrap_nixos() {
|
||||
local target="${1}"
|
||||
local host="${2}"
|
||||
|
||||
if [[ "${target}" = "" ]]; then
|
||||
help bootstrap_nixos
|
||||
return 2
|
||||
fi
|
||||
|
||||
# Create partitions.
|
||||
parted -s "${target}" mktable gpt
|
||||
parted -s "${target}" mkpart primary 0% 512MB
|
||||
parted -s "${target}" mkpart primary 512MB 100%
|
||||
parted -s "${target}" name 1 NIXBOOT
|
||||
parted -s "${target}" name 2 NIXROOT
|
||||
parted -s "${target}" set 1 esp on
|
||||
|
||||
# Format.
|
||||
mkfs.fat -F 32 /dev/disk/by-partlabel/NIXBOOT
|
||||
mkfs.ext4 -F /dev/disk/by-partlabel/NIXROOT
|
||||
|
||||
# Mount.
|
||||
mount /dev/disk/by-partlabel/NIXROOT /mnt
|
||||
mkdir /mnt/boot
|
||||
mount /dev/disk/by-partlabel/NIXBOOT /mnt/boot
|
||||
|
||||
# Generate config.
|
||||
nixos-generate-config --root /mnt
|
||||
|
||||
# Install.
|
||||
cd /mnt
|
||||
if [[ "${host}" = "" ]]; then
|
||||
_warn "Warning: Installing basic system."
|
||||
nixos-install
|
||||
else
|
||||
_info "Installing ${host}..."
|
||||
nixos-install --no-root-password --no-channel-copy --flake "${_nix_system_config}#${host}"
|
||||
fi
|
||||
}
|
22
module/common/bash/module/Brightness.nix
Normal file
22
module/common/bash/module/Brightness.nix
Normal file
|
@ -0,0 +1,22 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# Set display brightness to a minimum.
|
||||
function brmin() {
|
||||
light -S 0.01
|
||||
}
|
||||
|
||||
# Set display brightness to a maximum.
|
||||
function brmax() {
|
||||
light -S 100
|
||||
}
|
||||
|
||||
# Set display brightness in percent, 50% default.
|
||||
# Usage: brset [LEVEL]
|
||||
function brset() {
|
||||
local level=''${1}
|
||||
[[ "''${level}" = "" ]] && level=50
|
||||
|
||||
light -S ''${level}
|
||||
}
|
||||
'';
|
||||
}
|
|
@ -1,18 +0,0 @@
|
|||
# Set display brightness to a minimum.
|
||||
function brmin() {
|
||||
light -S 0.01
|
||||
}
|
||||
|
||||
# Set display brightness to a maximum.
|
||||
function brmax() {
|
||||
light -S 100
|
||||
}
|
||||
|
||||
# Set display brightness in percent, 50% default.
|
||||
# Usage: brset [LEVEL]
|
||||
function brset() {
|
||||
local level=${1}
|
||||
[[ "${level}" = "" ]] && level=50
|
||||
|
||||
light -S ${level}
|
||||
}
|
63
module/common/bash/module/Cd.nix
Normal file
63
module/common/bash/module/Cd.nix
Normal file
|
@ -0,0 +1,63 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# CD (back to) directory.
|
||||
# Goes to the exact-match dir first. If no exact match found, it finds first directory that contains the input (case-insensitive).
|
||||
# Usage: cdd <DIR>
|
||||
function cdd() {
|
||||
local target="''${1}"
|
||||
|
||||
if [[ "''${target}" = "" ]]; then
|
||||
help cdd
|
||||
return 2
|
||||
fi
|
||||
|
||||
local array=($(_cdd_directories))
|
||||
local result
|
||||
|
||||
# Check for exact match ELSE look for containing.
|
||||
if _contains ''${target} ''${array[@]}; then
|
||||
local current="''${PWD%/*}"
|
||||
result="''${current%\/$target\/*}/''${target}"
|
||||
else
|
||||
# Make search case-insensitive.
|
||||
shopt -s nocasematch
|
||||
|
||||
# Find dir name that contains input.
|
||||
local found=1
|
||||
for (( idx=''${#array[@]}-1 ; idx>=0 ; idx-- )); do
|
||||
dir="''${array[idx]}"
|
||||
[[ "''${dir}" =~ "''${target}" ]] && found=0
|
||||
[[ ''${found} = 0 ]] && result="/''${dir}''${result}"
|
||||
done
|
||||
|
||||
# Clean-up???
|
||||
shopt -u nocasematch
|
||||
fi
|
||||
|
||||
# Go there!
|
||||
if [[ "''${result}" != "" ]]; then
|
||||
echo "''${result}"
|
||||
cd "''${result}"
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Get list of all parent dirs.
|
||||
function _cdd_directories() {
|
||||
local array
|
||||
IFS='/' read -r -a array <<< "''${PWD}"
|
||||
array=("''${array[@]:1}")
|
||||
unset array[-1]
|
||||
printf "%s\n" "''${array[@]}"
|
||||
}
|
||||
|
||||
function _comp_cdd() {
|
||||
local IFS=$'\n'
|
||||
local dirs=($(_cdd_directories))
|
||||
_autocomplete_first ''${dirs[@]}
|
||||
}
|
||||
|
||||
complete -o nosort -o filenames -F _comp_cdd cdd
|
||||
'';
|
||||
}
|
|
@ -1,59 +0,0 @@
|
|||
# CD (back to) directory.
|
||||
# Goes to the exact-match dir first. If no exact match found, it finds first directory that contains the input (case-insensitive).
|
||||
# Usage: cdd <DIR>
|
||||
function cdd() {
|
||||
local target="${1}"
|
||||
|
||||
if [[ "${target}" = "" ]]; then
|
||||
help cdd
|
||||
return 2
|
||||
fi
|
||||
|
||||
local array=($(_cdd_directories))
|
||||
local result
|
||||
|
||||
# Check for exact match ELSE look for containing.
|
||||
if _contains ${target} ${array[@]}; then
|
||||
local current="${PWD%/*}"
|
||||
result="${current%\/$target\/*}/${target}"
|
||||
else
|
||||
# Make search case-insensitive.
|
||||
shopt -s nocasematch
|
||||
|
||||
# Find dir name that contains input.
|
||||
local found=1
|
||||
for (( idx=${#array[@]}-1 ; idx>=0 ; idx-- )); do
|
||||
dir="${array[idx]}"
|
||||
[[ "${dir}" =~ "${target}" ]] && found=0
|
||||
[[ ${found} = 0 ]] && result="/${dir}${result}"
|
||||
done
|
||||
|
||||
# Clean-up???
|
||||
shopt -u nocasematch
|
||||
fi
|
||||
|
||||
# Go there!
|
||||
if [[ "${result}" != "" ]]; then
|
||||
echo "${result}"
|
||||
cd "${result}"
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Get list of all parent dirs.
|
||||
function _cdd_directories() {
|
||||
local array
|
||||
IFS='/' read -r -a array <<< "${PWD}"
|
||||
array=("${array[@]:1}")
|
||||
unset array[-1]
|
||||
printf "%s\n" "${array[@]}"
|
||||
}
|
||||
|
||||
function _comp_cdd() {
|
||||
local IFS=$'\n'
|
||||
local dirs=($(_cdd_directories))
|
||||
_autocomplete_first ${dirs[@]}
|
||||
}
|
||||
|
||||
complete -o nosort -o filenames -F _comp_cdd cdd
|
87
module/common/bash/module/Checksum.nix
Normal file
87
module/common/bash/module/Checksum.nix
Normal file
|
@ -0,0 +1,87 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# Save file checksums.
|
||||
# For file with a name `file` it will create a new file called `.file.sha1` with hash in it.
|
||||
# All files by default.
|
||||
# Usage: checksum_create [FILES]
|
||||
function checksum_create() {
|
||||
local IFS=$'\n'
|
||||
local targets=(''${@})
|
||||
[[ "''${targets}" = "" ]] && targets=($(_ls_file))
|
||||
|
||||
process() {
|
||||
local hashfile=".''${target#./}.sha1"
|
||||
|
||||
# Skip if hash exists.
|
||||
[[ -f "''${hashfile}" ]] && return 0
|
||||
|
||||
# Calculate hash.
|
||||
pv ''${target} | sha1sum > ''${hashfile}
|
||||
}
|
||||
|
||||
_iterate_targets process ''${targets[@]}
|
||||
}
|
||||
|
||||
# Check stored values against actual files.
|
||||
# All files by default.
|
||||
# Usage: checksum_check [FILES]
|
||||
function checksum_check() {
|
||||
local IFS=$'\n'
|
||||
local targets=(''${@})
|
||||
[[ "''${targets}" = "" ]] && targets=($(_ls_file))
|
||||
|
||||
process() {
|
||||
local hashfile=".''${target#./}.sha1"
|
||||
|
||||
# Skip if hash doesn't exist.
|
||||
[[ -f "''${hashfile}" ]] || { _iterate_skip "No hash found."; return 0; }
|
||||
|
||||
# Calculate hash.
|
||||
local stored=$(cat "''${hashfile}" | cut -d\ -f1)
|
||||
local actual=$(pv "''${target}" | sha1sum | cut -d\ -f1)
|
||||
|
||||
if [[ "''${stored}" != "''${actual}" ]]; then
|
||||
_error "Failed."
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
_iterate_targets process ''${targets[@]}
|
||||
}
|
||||
|
||||
# Calculate hashes for all files recursively and store in a file called `checksum.sha1`.
|
||||
function checksum() {
|
||||
find -type f | parallel -j $(_core_count) -- sha1sum {} >> checksum.sha1
|
||||
}
|
||||
|
||||
# Create checksum for a file.
|
||||
# Usage: _checksum_create <FILE>
|
||||
function _checksum_create() {
|
||||
local path="''${1%/*}"
|
||||
local name="''${1##*/}"
|
||||
sha1sum "''${path}/''${name}" > "''${path}/.''${name}.sha1"
|
||||
}
|
||||
|
||||
# Check checksum for a file.
|
||||
# Usage: _checksum_check <FILE>
|
||||
function _checksum_check() {
|
||||
local file="''${1##*\ \ }"
|
||||
local stored="''${1%%\ \ *}"
|
||||
|
||||
# Skip if no file.
|
||||
[[ -f "''${file}" ]] || return 0
|
||||
|
||||
# Check file hash.
|
||||
local actual=$(sha1sum "''${file}")
|
||||
actual="''${actual%%\ \ *}"
|
||||
|
||||
# Compare values.
|
||||
if [[ "''${stored}" != "''${actual}" ]]; then
|
||||
_error "''${file}: Failed."
|
||||
return 1
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
'';
|
||||
}
|
|
@ -1,83 +0,0 @@
|
|||
# Save file checksums.
|
||||
# For file with a name `file` it will create a new file called `.file.sha1` with hash in it.
|
||||
# All files by default.
|
||||
# Usage: checksum_create [FILES]
|
||||
function checksum_create() {
|
||||
local IFS=$'\n'
|
||||
local targets=(${@})
|
||||
[[ "${targets}" = "" ]] && targets=($(_ls_file))
|
||||
|
||||
process() {
|
||||
local hashfile=".${target#./}.sha1"
|
||||
|
||||
# Skip if hash exists.
|
||||
[[ -f "${hashfile}" ]] && return 0
|
||||
|
||||
# Calculate hash.
|
||||
pv ${target} | sha1sum > ${hashfile}
|
||||
}
|
||||
|
||||
_iterate_targets process ${targets[@]}
|
||||
}
|
||||
|
||||
# Check stored values against actual files.
|
||||
# All files by default.
|
||||
# Usage: checksum_check [FILES]
|
||||
function checksum_check() {
|
||||
local IFS=$'\n'
|
||||
local targets=(${@})
|
||||
[[ "${targets}" = "" ]] && targets=($(_ls_file))
|
||||
|
||||
process() {
|
||||
local hashfile=".${target#./}.sha1"
|
||||
|
||||
# Skip if hash doesn't exist.
|
||||
[[ -f "${hashfile}" ]] || { _iterate_skip "No hash found."; return 0; }
|
||||
|
||||
# Calculate hash.
|
||||
local stored=$(cat "${hashfile}" | cut -d\ -f1)
|
||||
local actual=$(pv "${target}" | sha1sum | cut -d\ -f1)
|
||||
|
||||
if [[ "${stored}" != "${actual}" ]]; then
|
||||
_error "Failed."
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
_iterate_targets process ${targets[@]}
|
||||
}
|
||||
|
||||
# Calculate hashes for all files recursively and store in a file called `checksum.sha1`.
|
||||
function checksum() {
|
||||
find -type f | parallel -j $(_core_count) -- sha1sum {} >> checksum.sha1
|
||||
}
|
||||
|
||||
# Create checksum for a file.
|
||||
# Usage: _checksum_create <FILE>
|
||||
function _checksum_create() {
|
||||
local path="${1%/*}"
|
||||
local name="${1##*/}"
|
||||
sha1sum "${path}/${name}" > "${path}/.${name}.sha1"
|
||||
}
|
||||
|
||||
# Check checksum for a file.
|
||||
# Usage: _checksum_check <FILE>
|
||||
function _checksum_check() {
|
||||
local file="${1##*\ \ }"
|
||||
local stored="${1%%\ \ *}"
|
||||
|
||||
# Skip if no file.
|
||||
[[ -f "${file}" ]] || return 0
|
||||
|
||||
# Check file hash.
|
||||
local actual=$(sha1sum "${file}")
|
||||
actual="${actual%%\ \ *}"
|
||||
|
||||
# Compare values.
|
||||
if [[ "${stored}" != "${actual}" ]]; then
|
||||
_error "${file}: Failed."
|
||||
return 1
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
9
module/common/bash/module/Chmod.nix
Normal file
9
module/common/bash/module/Chmod.nix
Normal file
|
@ -0,0 +1,9 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# Add executable flag to file.
|
||||
# Usage: x <FILES>
|
||||
function x() {
|
||||
chmod +x -- "''${@}"
|
||||
}
|
||||
'';
|
||||
}
|
|
@ -1,5 +0,0 @@
|
|||
# Add executable flag to file.
|
||||
# Usage: x <FILES>
|
||||
function x() {
|
||||
chmod +x -- "${@}"
|
||||
}
|
28
module/common/bash/module/Color.nix
Normal file
28
module/common/bash/module/Color.nix
Normal file
|
@ -0,0 +1,28 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# Collection of available CLI colors.
|
||||
# They may differ depending on the terminal used.
|
||||
# Colors with 'b' prefix are bold colors.
|
||||
|
||||
export color_default="\033[0m"
|
||||
export color_blue="\033[0;34m"
|
||||
export color_bblue="\033[1;34m"
|
||||
export color_cyan="\033[0;36m"
|
||||
export color_bcyan="\033[1;36m"
|
||||
export color_green="\033[0;32m"
|
||||
export color_bgreen="\033[1;32m"
|
||||
export color_purple="\033[0;35m"
|
||||
export color_bpurple="\033[1;35m"
|
||||
export color_red="\033[0;31m"
|
||||
export color_bred="\033[1;31m"
|
||||
export color_white="\033[0;37m"
|
||||
export color_bwhite="\033[1;37m"
|
||||
export color_yellow="\033[0;33m"
|
||||
export color_byellow="\033[1;33m"
|
||||
|
||||
# Print all available colors with their names colored in corresponding color.
|
||||
function color_test() {
|
||||
echo -e "''${color_default}color_default\n''${color_blue}color_blue\n''${color_bblue}color_bblue\n''${color_cyan}color_cyan\n''${color_bcyan}color_bcyan\n''${color_green}color_green\n''${color_bgreen}color_bgreen\n''${color_purple}color_purple\n''${color_bpurple}color_bpurple\n''${color_red}color_red\n''${color_bred}color_bred\n''${color_white}color_white\n''${color_bwhite}color_bwhite\n''${color_yellow}color_yellow\n''${color_byellow}color_byellow"
|
||||
}
|
||||
'';
|
||||
}
|
|
@ -1,24 +0,0 @@
|
|||
# Collection of available CLI colors.
|
||||
# They may differ depending on the terminal used.
|
||||
# Colors with 'b' prefix are bold colors.
|
||||
|
||||
export color_default="\033[0m"
|
||||
export color_blue="\033[0;34m"
|
||||
export color_bblue="\033[1;34m"
|
||||
export color_cyan="\033[0;36m"
|
||||
export color_bcyan="\033[1;36m"
|
||||
export color_green="\033[0;32m"
|
||||
export color_bgreen="\033[1;32m"
|
||||
export color_purple="\033[0;35m"
|
||||
export color_bpurple="\033[1;35m"
|
||||
export color_red="\033[0;31m"
|
||||
export color_bred="\033[1;31m"
|
||||
export color_white="\033[0;37m"
|
||||
export color_bwhite="\033[1;37m"
|
||||
export color_yellow="\033[0;33m"
|
||||
export color_byellow="\033[1;33m"
|
||||
|
||||
# Print all available colors with their names colored in corresponding color.
|
||||
function color_test() {
|
||||
echo -e "${color_default}color_default\n${color_blue}color_blue\n${color_bblue}color_bblue\n${color_cyan}color_cyan\n${color_bcyan}color_bcyan\n${color_green}color_green\n${color_bgreen}color_bgreen\n${color_purple}color_purple\n${color_bpurple}color_bpurple\n${color_red}color_red\n${color_bred}color_bred\n${color_white}color_white\n${color_bwhite}color_bwhite\n${color_yellow}color_yellow\n${color_byellow}color_byellow"
|
||||
}
|
84
module/common/bash/module/Container.nix
Normal file
84
module/common/bash/module/Container.nix
Normal file
|
@ -0,0 +1,84 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# Attach/create container box in current directory with specified name.
|
||||
# By default uses current dir name.
|
||||
# Usage: ca [NAME]
|
||||
function ca() {
|
||||
local name="''${1}"
|
||||
|
||||
# Set default name.
|
||||
# [[ "''${name}" = "" ]] && name="main"
|
||||
[[ "''${name}" = "" ]] && name=$(parse_alnum "''${PWD##*/}")
|
||||
|
||||
# Append box prefix.
|
||||
name="box-''${name}"
|
||||
|
||||
# Create container.
|
||||
docker run \
|
||||
--privileged \
|
||||
-d -it \
|
||||
-h "''${HOSTNAME}''${name#box}" --name "''${name}" \
|
||||
--workdir /data \
|
||||
-e XDG_RUNTIME_DIR=/tmp \
|
||||
-e WAYLAND_DISPLAY=''${WAYLAND_DISPLAY} \
|
||||
-v ''${XDG_RUNTIME_DIR}/''${WAYLAND_DISPLAY}:/tmp/''${WAYLAND_DISPLAY} \
|
||||
-v ''${PWD}:/data \
|
||||
-v ''${HOME}:/root \
|
||||
debian:latest bash -c bash &> /dev/null
|
||||
|
||||
# Attempt to start container.
|
||||
docker start "''${name}" &> /dev/null
|
||||
|
||||
# Attach to running container.
|
||||
docker attach "''${name}"
|
||||
}
|
||||
|
||||
# Remove container box with specified name.
|
||||
# By default uses current dir name.
|
||||
# Usage: ck [NAME]
|
||||
function ck() {
|
||||
local name="''${1}"
|
||||
|
||||
# Set default name.
|
||||
[[ "''${name}" = "" ]] && name=$(parse_alnum "''${PWD##*/}")
|
||||
|
||||
# Append box prefix.
|
||||
name="box-''${name}"
|
||||
|
||||
# Kill container.
|
||||
docker kill "''${name}" &> /dev/null
|
||||
docker rm "''${name}" &> /dev/null
|
||||
}
|
||||
|
||||
# Remove all container boxes.
|
||||
function cka() {
|
||||
local IFS=$'\n'
|
||||
local boxes=$(_get_boxes)
|
||||
|
||||
for box in ''${boxes[@]}; do
|
||||
ck "''${box}"
|
||||
done
|
||||
}
|
||||
|
||||
# List all container boxes.
|
||||
function cl() {
|
||||
_get_boxes
|
||||
}
|
||||
|
||||
# Print all boxes.
|
||||
function _get_boxes() {
|
||||
local IFS=$'\n'
|
||||
local boxes=$(docker ps -a | grep "box-" | sed -e "s/.*box-//")
|
||||
|
||||
[[ "''${boxes[@]}" != "" ]] && echo "''${boxes[@]}" || true
|
||||
}
|
||||
|
||||
# Autocomplete with boxes.
|
||||
function _comp_get_boxes() {
|
||||
local IFS=$'\n'
|
||||
_autocomplete_first $(_get_boxes)
|
||||
}
|
||||
|
||||
complete -F _comp_get_boxes ca ck
|
||||
'';
|
||||
}
|
|
@ -1,80 +0,0 @@
|
|||
# Attach/create container box in current directory with specified name.
|
||||
# By default uses current dir name.
|
||||
# Usage: ca [NAME]
|
||||
function ca() {
|
||||
local name="${1}"
|
||||
|
||||
# Set default name.
|
||||
# [[ "${name}" = "" ]] && name="main"
|
||||
[[ "${name}" = "" ]] && name=$(parse_alnum "${PWD##*/}")
|
||||
|
||||
# Append box prefix.
|
||||
name="box-${name}"
|
||||
|
||||
# Create container.
|
||||
docker run \
|
||||
--privileged \
|
||||
-d -it \
|
||||
-h "${HOSTNAME}${name#box}" --name "${name}" \
|
||||
--workdir /data \
|
||||
-e XDG_RUNTIME_DIR=/tmp \
|
||||
-e WAYLAND_DISPLAY=${WAYLAND_DISPLAY} \
|
||||
-v ${XDG_RUNTIME_DIR}/${WAYLAND_DISPLAY}:/tmp/${WAYLAND_DISPLAY} \
|
||||
-v ${PWD}:/data \
|
||||
-v ${HOME}:/root \
|
||||
debian:latest bash -c bash &> /dev/null
|
||||
|
||||
# Attempt to start container.
|
||||
docker start "${name}" &> /dev/null
|
||||
|
||||
# Attach to running container.
|
||||
docker attach "${name}"
|
||||
}
|
||||
|
||||
# Remove container box with specified name.
|
||||
# By default uses current dir name.
|
||||
# Usage: ck [NAME]
|
||||
function ck() {
|
||||
local name="${1}"
|
||||
|
||||
# Set default name.
|
||||
[[ "${name}" = "" ]] && name=$(parse_alnum "${PWD##*/}")
|
||||
|
||||
# Append box prefix.
|
||||
name="box-${name}"
|
||||
|
||||
# Kill container.
|
||||
docker kill "${name}" &> /dev/null
|
||||
docker rm "${name}" &> /dev/null
|
||||
}
|
||||
|
||||
# Remove all container boxes.
|
||||
function cka() {
|
||||
local IFS=$'\n'
|
||||
local boxes=$(_get_boxes)
|
||||
|
||||
for box in ${boxes[@]}; do
|
||||
ck "${box}"
|
||||
done
|
||||
}
|
||||
|
||||
# List all container boxes.
|
||||
function cl() {
|
||||
_get_boxes
|
||||
}
|
||||
|
||||
# Print all boxes.
|
||||
function _get_boxes() {
|
||||
local IFS=$'\n'
|
||||
local boxes=$(docker ps -a | grep "box-" | sed -e "s/.*box-//")
|
||||
|
||||
[[ "${boxes[@]}" != "" ]] && echo "${boxes[@]}" || true
|
||||
}
|
||||
|
||||
# Autocomplete with boxes.
|
||||
function _comp_get_boxes() {
|
||||
local IFS=$'\n'
|
||||
_autocomplete_first $(_get_boxes)
|
||||
}
|
||||
|
||||
complete -F _comp_get_boxes ca ck
|
13
module/common/bash/module/Copypaste.nix
Normal file
13
module/common/bash/module/Copypaste.nix
Normal file
|
@ -0,0 +1,13 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# Copy stdin to system clipboard. *Example:* `echo hi \| copy`.
|
||||
function copy() {
|
||||
wl-copy
|
||||
}
|
||||
|
||||
# Paste system clipboard to stdout. *Example:* `paste > file.txt`.
|
||||
function paste() {
|
||||
wl-paste
|
||||
}
|
||||
'';
|
||||
}
|
|
@ -1,9 +0,0 @@
|
|||
# Copy stdin to system clipboard. *Example:* `echo hi \| copy`.
|
||||
function copy() {
|
||||
wl-copy
|
||||
}
|
||||
|
||||
# Paste system clipboard to stdout. *Example:* `paste > file.txt`.
|
||||
function paste() {
|
||||
wl-paste
|
||||
}
|
28
module/common/bash/module/Cp.nix
Normal file
28
module/common/bash/module/Cp.nix
Normal file
|
@ -0,0 +1,28 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# Replaces default cp with rsync.
|
||||
# Usage: rcp <FROM> <TO>
|
||||
function rcp() {
|
||||
rsync -ahP --chmod=u+w "''${@}"
|
||||
}
|
||||
|
||||
# Copy and also merge all changes (delete dst files that do not exist in src).
|
||||
# Usage: rcp_merge <FROM> <TO>
|
||||
function rcp_merge() {
|
||||
rsync -ahP --chmod=u+w --delete "''${@}"
|
||||
}
|
||||
|
||||
# Copy by creating hardlinks.
|
||||
# Works for directories, too.
|
||||
# Usage: cp_link <FROM> <TO>
|
||||
function cp_link() {
|
||||
/usr/bin/env cp -lr "''${@}"
|
||||
}
|
||||
|
||||
# Print output of cp_merge without writing anything.
|
||||
# Usage: rcp_test <FROM> <TO>
|
||||
function rcp_test() {
|
||||
rsync -ahP --chmod=u+w --delete -n "''${@}"
|
||||
}
|
||||
'';
|
||||
}
|
|
@ -1,24 +0,0 @@
|
|||
# Replaces default cp with rsync.
|
||||
# Usage: rcp <FROM> <TO>
|
||||
function rcp() {
|
||||
rsync -ahP --chmod=u+w "${@}"
|
||||
}
|
||||
|
||||
# Copy and also merge all changes (delete dst files that do not exist in src).
|
||||
# Usage: rcp_merge <FROM> <TO>
|
||||
function rcp_merge() {
|
||||
rsync -ahP --chmod=u+w --delete "${@}"
|
||||
}
|
||||
|
||||
# Copy by creating hardlinks.
|
||||
# Works for directories, too.
|
||||
# Usage: cp_link <FROM> <TO>
|
||||
function cp_link() {
|
||||
/usr/bin/env cp -lr "${@}"
|
||||
}
|
||||
|
||||
# Print output of cp_merge without writing anything.
|
||||
# Usage: rcp_test <FROM> <TO>
|
||||
function rcp_test() {
|
||||
rsync -ahP --chmod=u+w --delete -n "${@}"
|
||||
}
|
13
module/common/bash/module/Date.nix
Normal file
13
module/common/bash/module/Date.nix
Normal file
|
@ -0,0 +1,13 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# Print today date in yyyyMMdd format.
|
||||
function today() {
|
||||
date +%Y%m%d
|
||||
}
|
||||
|
||||
# Current day of week number.
|
||||
function dow() {
|
||||
date +%u
|
||||
}
|
||||
'';
|
||||
}
|
|
@ -1,9 +0,0 @@
|
|||
# Print today date in yyyyMMdd format.
|
||||
function today() {
|
||||
date +%Y%m%d
|
||||
}
|
||||
|
||||
# Current day of week number.
|
||||
function dow() {
|
||||
date +%u
|
||||
}
|
20
module/common/bash/module/Dconf.nix
Normal file
20
module/common/bash/module/Dconf.nix
Normal file
|
@ -0,0 +1,20 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
export _gdconf_path="''${HOME}/.config/linux/Gnome.dconf"
|
||||
|
||||
# Load Gnome settings.
|
||||
function dconf_load() {
|
||||
sed -i -e s/voronind/$(whoami)/g ''${_gdconf_path} ; dconf load / < ''${_gdconf_path}
|
||||
}
|
||||
|
||||
# Dump Gnome settings into the file.
|
||||
# Default name is `gnome.dconf`.
|
||||
# Do this before changing settings and after, an then run `diff` to find out what to add to the main `gnome.dconf`.
|
||||
# Usage: dconf_save [FILE]
|
||||
function dconf_save() {
|
||||
local name="''${1}"
|
||||
[[ "''${name}" = "" ]] && name="gnome.dconf"
|
||||
dconf dump / > "''${name}"
|
||||
}
|
||||
'';
|
||||
}
|
|
@ -1,16 +0,0 @@
|
|||
export _gdconf_path="${HOME}/.config/linux/Gnome.dconf"
|
||||
|
||||
# Load Gnome settings.
|
||||
function dconf_load() {
|
||||
sed -i -e s/voronind/$(whoami)/g ${_gdconf_path} ; dconf load / < ${_gdconf_path}
|
||||
}
|
||||
|
||||
# Dump Gnome settings into the file.
|
||||
# Default name is `gnome.dconf`.
|
||||
# Do this before changing settings and after, an then run `diff` to find out what to add to the main `gnome.dconf`.
|
||||
# Usage: dconf_save [FILE]
|
||||
function dconf_save() {
|
||||
local name="${1}"
|
||||
[[ "${name}" = "" ]] && name="gnome.dconf"
|
||||
dconf dump / > "${name}"
|
||||
}
|
15
module/common/bash/module/Disk.nix
Normal file
15
module/common/bash/module/Disk.nix
Normal file
|
@ -0,0 +1,15 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# Show only physical drives info.
|
||||
function pdf() {
|
||||
df --si | sed -e '1p' -e '/^\/dev\//!d'
|
||||
}
|
||||
|
||||
# Show total size in SI.
|
||||
# Current dir by default.
|
||||
# Usage: tdu [DIRS]
|
||||
function tdu() {
|
||||
du -sh --si "''${@}"
|
||||
}
|
||||
'';
|
||||
}
|
|
@ -1,11 +0,0 @@
|
|||
# Show only physical drives info.
|
||||
function pdf() {
|
||||
df --si | sed -e '1p' -e '/^\/dev\//!d'
|
||||
}
|
||||
|
||||
# Show total size in SI.
|
||||
# Current dir by default.
|
||||
# Usage: tdu [DIRS]
|
||||
function tdu() {
|
||||
du -sh --si "${@}"
|
||||
}
|
19
module/common/bash/module/Distrobox.nix
Normal file
19
module/common/bash/module/Distrobox.nix
Normal file
|
@ -0,0 +1,19 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# Create/Attach to the box.
|
||||
# Uses name `main` by default.
|
||||
# Usage: da [BOX]
|
||||
function da() {
|
||||
local name="''${1}"
|
||||
|
||||
[[ "''${name}" = "" ]] && name="main"
|
||||
# if [[ "''${name}" = "" ]]; then
|
||||
# help da
|
||||
# return 2
|
||||
# fi
|
||||
|
||||
# --user 0 is required for rootless docker.
|
||||
distrobox enter -a '--user=0' "''${name}"
|
||||
}
|
||||
'';
|
||||
}
|
|
@ -1,15 +0,0 @@
|
|||
# Create/Attach to the box.
|
||||
# Uses name `main` by default.
|
||||
# Usage: da [BOX]
|
||||
function da() {
|
||||
local name="${1}"
|
||||
|
||||
[[ "${name}" = "" ]] && name="main"
|
||||
# if [[ "${name}" = "" ]]; then
|
||||
# help da
|
||||
# return 2
|
||||
# fi
|
||||
|
||||
# --user 0 is required for rootless docker.
|
||||
distrobox enter -a '--user=0' "${name}"
|
||||
}
|
15
module/common/bash/module/Dmenu.nix
Normal file
15
module/common/bash/module/Dmenu.nix
Normal file
|
@ -0,0 +1,15 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# Wrapped dmenu_path to include my functions.
|
||||
function _dmenu_path_wrapped() {
|
||||
c=0
|
||||
while [ ''${c} -lt 1000 ]; do
|
||||
echo
|
||||
((c++))
|
||||
done
|
||||
|
||||
dmenu_path
|
||||
find_function | grep -v ^_
|
||||
}
|
||||
'';
|
||||
}
|
|
@ -1,11 +0,0 @@
|
|||
# Wrapped dmenu_path to include my functions.
|
||||
function _dmenu_path_wrapped() {
|
||||
c=0
|
||||
while [ ${c} -lt 1000 ]; do
|
||||
echo
|
||||
((c++))
|
||||
done
|
||||
|
||||
dmenu_path
|
||||
find_function | grep -v ^_
|
||||
}
|
99
module/common/bash/module/Docker.nix
Normal file
99
module/common/bash/module/Docker.nix
Normal file
|
@ -0,0 +1,99 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# Show container's volumes.
|
||||
# Usage: docker_volumes <CONTAINER>
|
||||
function docker_volumes() {
|
||||
docker inspect -f '{{ .Mounts }}' "''${@}"
|
||||
}
|
||||
|
||||
# Check if any container exited.
|
||||
function docker_health() {
|
||||
docker ps -a | grep Exited
|
||||
}
|
||||
|
||||
# Find out container's IP address.
|
||||
# Usage: docker_ip <CONTAINER>
|
||||
function docker_ip() {
|
||||
docker inspect -f '\'''{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}'\' "''${1}" | sed "s/^.//" | sed "s/.$//"
|
||||
}
|
||||
|
||||
# Update all docker images.
|
||||
function docker_update() {
|
||||
docker images --format "{{.Repository}}:{{.Tag}}" | xargs -L1 docker pull
|
||||
}
|
||||
|
||||
# Docker compose shortcut.
|
||||
function dc() {
|
||||
docker compose "''${@}"
|
||||
}
|
||||
|
||||
# Docker compose up.
|
||||
# Usage: dcu [SERVICES]
|
||||
function dcu() {
|
||||
docker compose up -d "''${@}"
|
||||
}
|
||||
|
||||
# Docker compose down.
|
||||
# Usage: dcd [SERVICES]
|
||||
function dcd() {
|
||||
docker compose down "''${@}"
|
||||
}
|
||||
|
||||
# Docker compose pull.
|
||||
# Usage: dcp [SERVICES]
|
||||
function dcp() {
|
||||
docker compose pull "''${@}"
|
||||
}
|
||||
|
||||
# Docker compose logs.
|
||||
# Usage: dcl [SERVICES]
|
||||
function dcl() {
|
||||
docker compose logs -f "''${@}"
|
||||
}
|
||||
|
||||
# Docker compose restart.
|
||||
# Usage: dcr [SERVICES]
|
||||
function dcr() {
|
||||
docker compose restart "''${@}"
|
||||
}
|
||||
|
||||
# Docker compose stop.
|
||||
# Usage: dcs [SERVICES]
|
||||
function dcs() {
|
||||
docker compose stop "''${@}"
|
||||
}
|
||||
|
||||
# Docker compose down & up specified services.
|
||||
# Usage: dcdu [SERVICES]
|
||||
function dcdu() {
|
||||
dcd "''${@}"
|
||||
dcu "''${@}"
|
||||
}
|
||||
|
||||
# Docker compose pull & up specified services.
|
||||
# Usage: dcpu [SERVICES]
|
||||
function dcpu() {
|
||||
dcp "''${@}"
|
||||
dcu "''${@}"
|
||||
}
|
||||
|
||||
# Docker compose up & attach to logs for specified services.
|
||||
# Usage: dcul [SERVICES]
|
||||
function dcul() {
|
||||
dcu "''${@}" && dcl "''${@}"
|
||||
}
|
||||
|
||||
# Autocomplete with available services.
|
||||
function _dc_services() {
|
||||
_autocomplete "$(docker compose config --services 2> /dev/null)"
|
||||
}
|
||||
|
||||
# Autocomplete with available container names.
|
||||
function _dc_containers() {
|
||||
_autocomplete "$(docker ps --format "\""{{.Names}}"\"")"
|
||||
}
|
||||
|
||||
complete -F _dc_services dcu dcd dcp dcl dcul dcdu dcr dcs dcpu
|
||||
complete -F _dc_containers docker_volumes docker_ip
|
||||
'';
|
||||
}
|
|
@ -1,95 +0,0 @@
|
|||
# Show container's volumes.
|
||||
# Usage: docker_volumes <CONTAINER>
|
||||
function docker_volumes() {
|
||||
docker inspect -f '{{ .Mounts }}' "${@}"
|
||||
}
|
||||
|
||||
# Check if any container exited.
|
||||
function docker_health() {
|
||||
docker ps -a | grep Exited
|
||||
}
|
||||
|
||||
# Find out container's IP address.
|
||||
# Usage: docker_ip <CONTAINER>
|
||||
function docker_ip() {
|
||||
docker inspect -f '\''{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}'\' "${1}" | sed "s/^.//" | sed "s/.$//"
|
||||
}
|
||||
|
||||
# Update all docker images.
|
||||
function docker_update() {
|
||||
docker images --format "{{.Repository}}:{{.Tag}}" | xargs -L1 docker pull
|
||||
}
|
||||
|
||||
# Docker compose shortcut.
|
||||
function dc() {
|
||||
docker compose "${@}"
|
||||
}
|
||||
|
||||
# Docker compose up.
|
||||
# Usage: dcu [SERVICES]
|
||||
function dcu() {
|
||||
docker compose up -d "${@}"
|
||||
}
|
||||
|
||||
# Docker compose down.
|
||||
# Usage: dcd [SERVICES]
|
||||
function dcd() {
|
||||
docker compose down "${@}"
|
||||
}
|
||||
|
||||
# Docker compose pull.
|
||||
# Usage: dcp [SERVICES]
|
||||
function dcp() {
|
||||
docker compose pull "${@}"
|
||||
}
|
||||
|
||||
# Docker compose logs.
|
||||
# Usage: dcl [SERVICES]
|
||||
function dcl() {
|
||||
docker compose logs -f "${@}"
|
||||
}
|
||||
|
||||
# Docker compose restart.
|
||||
# Usage: dcr [SERVICES]
|
||||
function dcr() {
|
||||
docker compose restart "${@}"
|
||||
}
|
||||
|
||||
# Docker compose stop.
|
||||
# Usage: dcs [SERVICES]
|
||||
function dcs() {
|
||||
docker compose stop "${@}"
|
||||
}
|
||||
|
||||
# Docker compose down & up specified services.
|
||||
# Usage: dcdu [SERVICES]
|
||||
function dcdu() {
|
||||
dcd "${@}"
|
||||
dcu "${@}"
|
||||
}
|
||||
|
||||
# Docker compose pull & up specified services.
|
||||
# Usage: dcpu [SERVICES]
|
||||
function dcpu() {
|
||||
dcp "${@}"
|
||||
dcu "${@}"
|
||||
}
|
||||
|
||||
# Docker compose up & attach to logs for specified services.
|
||||
# Usage: dcul [SERVICES]
|
||||
function dcul() {
|
||||
dcu "${@}" && dcl "${@}"
|
||||
}
|
||||
|
||||
# Autocomplete with available services.
|
||||
function _dc_services() {
|
||||
_autocomplete "$(docker compose config --services 2> /dev/null)"
|
||||
}
|
||||
|
||||
# Autocomplete with available container names.
|
||||
function _dc_containers() {
|
||||
_autocomplete "$(docker ps --format "\""{{.Names}}"\"")"
|
||||
}
|
||||
|
||||
complete -F _dc_services dcu dcd dcp dcl dcul dcdu dcr dcs dcpu
|
||||
complete -F _dc_containers docker_volumes docker_ip
|
43
module/common/bash/module/Dvd.nix
Normal file
43
module/common/bash/module/Dvd.nix
Normal file
|
@ -0,0 +1,43 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# Burn specified iso file to DVD.
|
||||
# Usage: dvd_burn_iso <FILE.iso>
|
||||
function dvd_burn_iso() {
|
||||
local iso="''${1}"
|
||||
if [[ "''${iso}" = "" ]]; then
|
||||
help dvd_burn_iso
|
||||
return 2
|
||||
fi
|
||||
|
||||
growisofs -dvd-compat -speed=8 -use-the-force-luke=tty -Z /dev/sr0="''${iso}"
|
||||
}
|
||||
|
||||
# Burn specified iso file to CD.
|
||||
# Usage: cd_burn_iso <FILE.iso>
|
||||
function cd_burn_iso() {
|
||||
local iso="''${1}"
|
||||
if [[ "''${iso}" = "" ]]; then
|
||||
help cd_burn_iso
|
||||
return 2
|
||||
fi
|
||||
|
||||
wodim speed=8 -tao dev=/dev/sr0 "''${iso}"
|
||||
}
|
||||
|
||||
# Burn specified audio files to CD.
|
||||
# Usage: cd_burn_audio <FILES.wav>
|
||||
function cd_burn_audio() {
|
||||
if [[ "''${*}" = "" ]]; then
|
||||
help cd_burn_audio
|
||||
return 2
|
||||
fi
|
||||
|
||||
cdrecord -v dev=/dev/sr0 speed=8 -audio -pad "''${*}"
|
||||
}
|
||||
|
||||
# Spawn Nix shell with required tools.
|
||||
function dvd_shell() {
|
||||
NIX_SHELL="dvd" tmpshell dvdplusrwtools cdrkit
|
||||
}
|
||||
'';
|
||||
}
|
|
@ -1,39 +0,0 @@
|
|||
# Burn specified iso file to DVD.
|
||||
# Usage: dvd_burn_iso <FILE.iso>
|
||||
function dvd_burn_iso() {
|
||||
local iso="${1}"
|
||||
if [[ "${iso}" = "" ]]; then
|
||||
help dvd_burn_iso
|
||||
return 2
|
||||
fi
|
||||
|
||||
growisofs -dvd-compat -speed=8 -use-the-force-luke=tty -Z /dev/sr0="${iso}"
|
||||
}
|
||||
|
||||
# Burn specified iso file to CD.
|
||||
# Usage: cd_burn_iso <FILE.iso>
|
||||
function cd_burn_iso() {
|
||||
local iso="${1}"
|
||||
if [[ "${iso}" = "" ]]; then
|
||||
help cd_burn_iso
|
||||
return 2
|
||||
fi
|
||||
|
||||
wodim speed=8 -tao dev=/dev/sr0 "${iso}"
|
||||
}
|
||||
|
||||
# Burn specified audio files to CD.
|
||||
# Usage: cd_burn_audio <FILES.wav>
|
||||
function cd_burn_audio() {
|
||||
if [[ "${*}" = "" ]]; then
|
||||
help cd_burn_audio
|
||||
return 2
|
||||
fi
|
||||
|
||||
cdrecord -v dev=/dev/sr0 speed=8 -audio -pad "${*}"
|
||||
}
|
||||
|
||||
# Spawn Nix shell with required tools.
|
||||
function dvd_shell() {
|
||||
NIX_SHELL="dvd" tmpshell dvdplusrwtools cdrkit
|
||||
}
|
99
module/common/bash/module/Ffmpeg.nix
Normal file
99
module/common/bash/module/Ffmpeg.nix
Normal file
|
@ -0,0 +1,99 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# Mux audio into containers. File names in sound and current dirrectories must match.
|
||||
# Usage: ffmpeg_mux_audio <SOUND> <OUTPUT DIR>
|
||||
function ffmpeg_mux_audio() {
|
||||
if [[ "''${1}" = "" ]]; then
|
||||
help ffmpeg_mux_audio
|
||||
return 2
|
||||
fi
|
||||
|
||||
for file in *; do ffmpeg -i "$file" -i "$1"/"$file" -c copy -map 0:v:0 -map 1:a:0 -shortest "$2"/"$file"; done
|
||||
}
|
||||
|
||||
# Mux cover into music file.
|
||||
# Usage: ffmpeg_mux_cover <FORMAT> <COVER>
|
||||
function ffmpeg_mux_cover() {
|
||||
if [[ "''${1}" = "" ]]; then
|
||||
help ffmpeg_mux_cover
|
||||
return 2
|
||||
fi
|
||||
|
||||
local format="''${1}"
|
||||
local cover="''${2}"
|
||||
|
||||
mkdir out
|
||||
|
||||
case "''${format}" in
|
||||
# "mka"|"mkv")
|
||||
# for file in *.''${format}; do
|
||||
# ffmpeg -i "''${file}" -attach "''${cover}" -map 0 -c copy -metadata:s:t mimetype="image/''${cover##*.}" -metadata:s:t:0 filename="cover.''${cover##*.}" "./out/''${file}" || return 1
|
||||
# done
|
||||
# ;;
|
||||
*)
|
||||
for file in *.''${format}; do
|
||||
# ffmpeg -i "''${file}" -i "''${cover}" -map 0 -map 0:-v? -map 1 -codec copy -metadata:s:v title="Album cover" -metadata:s:v comment="Cover (front)" -disposition:v attached_pic ./out/"''${file}" || return 1
|
||||
ffmpeg -i "''${file}" -i "''${cover}" -map 0 -map 1 -codec copy -metadata:s:v title="Album cover" -metadata:s:v comment="Cover (front)" -disposition:v attached_pic ./out/"''${file}" || return 1
|
||||
done
|
||||
;;
|
||||
esac
|
||||
|
||||
mv out/* .
|
||||
rm -d out/ && rm "''${2}"
|
||||
}
|
||||
|
||||
# Generate music metadata from directory structure.
|
||||
# Top dir is the Artist name like this: `The_Beatles`.
|
||||
# Next are albums like this: `2010_My_love`.
|
||||
# Inside are songs like this: `01_sample.flac`.
|
||||
# Usage: ffmpeg_music_meta <FORMAT>
|
||||
function ffmpeg_music_meta() {
|
||||
if [[ "''${1}" = "" ]]; then
|
||||
help ffmpeg_music_meta
|
||||
return 2
|
||||
fi
|
||||
|
||||
local format="''${1}"
|
||||
|
||||
ls *.''${format} &> /dev/null || return 1
|
||||
|
||||
local artist="''${PWD%/*}"; artist="''${artist##*/}"; artist="''${artist//_/ }"
|
||||
local album="''${PWD##*/}"; album="''${album#*_}"; album="''${album//_/ }"
|
||||
local year="''${PWD##*/}"; year="''${year%%_*}"
|
||||
# local total=$(ls *.''${format} | wc -l)
|
||||
|
||||
mkdir out
|
||||
|
||||
for file in *.''${format}; do
|
||||
local track="''${file%%_*}"; track=$((10#''${track})); [[ "''${track}" = "" ]] && track=0
|
||||
local title="''${file#*_}"; title="''${title%.*}"; title="''${title//_/ }"
|
||||
|
||||
# echo "''${artist}; ''${album}; ''${year}; ''${track}; ''${title}"
|
||||
# TODO: make it format-specific.
|
||||
ffmpeg -i "''${file}" -map 0 -c copy -metadata "artist=''${artist}" -metadata "album_artist=''${artist}" -metadata "album=''${album}" -metadata "date=''${year}" -metadata "year=''${year}" -metadata "date_released=''${year}" -metadata "track=''${track}" -metadata "part_number=''${track}" -metadata "title=''${title}" ./out/"''${file}" || return 1
|
||||
done
|
||||
|
||||
mv out/* .
|
||||
rm -d out/
|
||||
}
|
||||
|
||||
# Get video FPS.
|
||||
function _ffprobe_fps() {
|
||||
local fps=$(ffprobe -v 0 -of csv=p=0 -select_streams v:0 -show_entries stream=r_frame_rate "''${1}")
|
||||
[[ "''${fps}" = "" ]] && fps=30 || fps=$((fps))
|
||||
echo "''${fps}"
|
||||
}
|
||||
|
||||
# Get recommended keyframe interval for a file.
|
||||
_ffprobe_keyint() {
|
||||
local fps=$(_ffprobe_fps "''${1}")
|
||||
echo $((fps*5))
|
||||
}
|
||||
|
||||
# Get audio bitrage. 128 by default.
|
||||
function _ffprobe_ba() {
|
||||
local ba=$(ffprobe -v error -select_streams a:0 -show_entries stream=bit_rate -of default=noprint_wrappers=1:nokey=1 "''${1}")
|
||||
[[ "''${ba}" != "N/A" ]] && echo $((ba/1024)) || echo 128
|
||||
}
|
||||
'';
|
||||
}
|
|
@ -1,95 +0,0 @@
|
|||
# Mux audio into containers. File names in sound and current dirrectories must match.
|
||||
# Usage: ffmpeg_mux_audio <SOUND> <OUTPUT DIR>
|
||||
function ffmpeg_mux_audio() {
|
||||
if [[ "${1}" = "" ]]; then
|
||||
help ffmpeg_mux_audio
|
||||
return 2
|
||||
fi
|
||||
|
||||
for file in *; do ffmpeg -i "$file" -i "$1"/"$file" -c copy -map 0:v:0 -map 1:a:0 -shortest "$2"/"$file"; done
|
||||
}
|
||||
|
||||
# Mux cover into music file.
|
||||
# Usage: ffmpeg_mux_cover <FORMAT> <COVER>
|
||||
function ffmpeg_mux_cover() {
|
||||
if [[ "${1}" = "" ]]; then
|
||||
help ffmpeg_mux_cover
|
||||
return 2
|
||||
fi
|
||||
|
||||
local format="${1}"
|
||||
local cover="${2}"
|
||||
|
||||
mkdir out
|
||||
|
||||
case "${format}" in
|
||||
# "mka"|"mkv")
|
||||
# for file in *.${format}; do
|
||||
# ffmpeg -i "${file}" -attach "${cover}" -map 0 -c copy -metadata:s:t mimetype="image/${cover##*.}" -metadata:s:t:0 filename="cover.${cover##*.}" "./out/${file}" || return 1
|
||||
# done
|
||||
# ;;
|
||||
*)
|
||||
for file in *.${format}; do
|
||||
# ffmpeg -i "${file}" -i "${cover}" -map 0 -map 0:-v? -map 1 -codec copy -metadata:s:v title="Album cover" -metadata:s:v comment="Cover (front)" -disposition:v attached_pic ./out/"${file}" || return 1
|
||||
ffmpeg -i "${file}" -i "${cover}" -map 0 -map 1 -codec copy -metadata:s:v title="Album cover" -metadata:s:v comment="Cover (front)" -disposition:v attached_pic ./out/"${file}" || return 1
|
||||
done
|
||||
;;
|
||||
esac
|
||||
|
||||
mv out/* .
|
||||
rm -d out/ && rm "${2}"
|
||||
}
|
||||
|
||||
# Generate music metadata from directory structure.
|
||||
# Top dir is the Artist name like this: `The_Beatles`.
|
||||
# Next are albums like this: `2010_My_love`.
|
||||
# Inside are songs like this: `01_sample.flac`.
|
||||
# Usage: ffmpeg_music_meta <FORMAT>
|
||||
function ffmpeg_music_meta() {
|
||||
if [[ "${1}" = "" ]]; then
|
||||
help ffmpeg_music_meta
|
||||
return 2
|
||||
fi
|
||||
|
||||
local format="${1}"
|
||||
|
||||
ls *.${format} &> /dev/null || return 1
|
||||
|
||||
local artist="${PWD%/*}"; artist="${artist##*/}"; artist="${artist//_/ }"
|
||||
local album="${PWD##*/}"; album="${album#*_}"; album="${album//_/ }"
|
||||
local year="${PWD##*/}"; year="${year%%_*}"
|
||||
# local total=$(ls *.${format} | wc -l)
|
||||
|
||||
mkdir out
|
||||
|
||||
for file in *.${format}; do
|
||||
local track="${file%%_*}"; track=$((10#${track})); [[ "${track}" = "" ]] && track=0
|
||||
local title="${file#*_}"; title="${title%.*}"; title="${title//_/ }"
|
||||
|
||||
# echo "${artist}; ${album}; ${year}; ${track}; ${title}"
|
||||
# TODO: make it format-specific.
|
||||
ffmpeg -i "${file}" -map 0 -c copy -metadata "artist=${artist}" -metadata "album_artist=${artist}" -metadata "album=${album}" -metadata "date=${year}" -metadata "year=${year}" -metadata "date_released=${year}" -metadata "track=${track}" -metadata "part_number=${track}" -metadata "title=${title}" ./out/"${file}" || return 1
|
||||
done
|
||||
|
||||
mv out/* .
|
||||
rm -d out/
|
||||
}
|
||||
|
||||
# Get video FPS.
|
||||
function _ffprobe_fps() {
|
||||
local fps=$(ffprobe -v 0 -of csv=p=0 -select_streams v:0 -show_entries stream=r_frame_rate "${1}")
|
||||
[[ "${fps}" = "" ]] && fps=30 || fps=$((fps))
|
||||
echo "${fps}"
|
||||
}
|
||||
|
||||
# Get recommended keyframe interval for a file.
|
||||
_ffprobe_keyint() {
|
||||
local fps=$(_ffprobe_fps "${1}")
|
||||
echo $((fps*5))
|
||||
}
|
||||
|
||||
# Get audio bitrage. 128 by default.
|
||||
function _ffprobe_ba() {
|
||||
local ba=$(ffprobe -v error -select_streams a:0 -show_entries stream=bit_rate -of default=noprint_wrappers=1:nokey=1 "${1}")
|
||||
[[ "${ba}" != "N/A" ]] && echo $((ba/1024)) || echo 128
|
||||
}
|
28
module/common/bash/module/File.nix
Normal file
28
module/common/bash/module/File.nix
Normal file
|
@ -0,0 +1,28 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# Open file/dir in GUI.
|
||||
# Usage: open <FILE>
|
||||
function open() {
|
||||
xdg-open "''${@}"
|
||||
}
|
||||
alias o="open"
|
||||
|
||||
# Play media file from CLI. All files by default.
|
||||
# Usage: play [FILE]
|
||||
function play() {
|
||||
local targets=''${*}
|
||||
[[ "''${targets}" = "" ]] && targets=$(_ls_file)
|
||||
|
||||
mpv --no-video ''${targets}
|
||||
}
|
||||
|
||||
# Play media files shuffled from CLI. All files by default.
|
||||
# Usage: play_shuffle [FILE]
|
||||
function play_shuffle() {
|
||||
local targets=''${*}
|
||||
[[ "''${targets}" = "" ]] && targets=$(_ls_file)
|
||||
|
||||
mpv --no-video --shuffle ''${targets}
|
||||
}
|
||||
'';
|
||||
}
|
|
@ -1,24 +0,0 @@
|
|||
# Open file/dir in GUI.
|
||||
# Usage: open <FILE>
|
||||
function open() {
|
||||
xdg-open "${@}"
|
||||
}
|
||||
alias o="open"
|
||||
|
||||
# Play media file from CLI. All files by default.
|
||||
# Usage: play [FILE]
|
||||
function play() {
|
||||
local targets=${*}
|
||||
[[ "${targets}" = "" ]] && targets=$(_ls_file)
|
||||
|
||||
mpv --no-video ${targets}
|
||||
}
|
||||
|
||||
# Play media files shuffled from CLI. All files by default.
|
||||
# Usage: play_shuffle [FILE]
|
||||
function play_shuffle() {
|
||||
local targets=${*}
|
||||
[[ "${targets}" = "" ]] && targets=$(_ls_file)
|
||||
|
||||
mpv --no-video --shuffle ${targets}
|
||||
}
|
9
module/common/bash/module/Find.nix
Normal file
9
module/common/bash/module/Find.nix
Normal file
|
@ -0,0 +1,9 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# Find all file extensions.
|
||||
function find_ext() {
|
||||
local types=($(find -type f | sed -e "s/.*\///" -e "s/^\.//" -e "/\./!d" -e "s/.*\.//"))
|
||||
echo "''${types[@]}" | tr ' ' '\n' | sort -u
|
||||
}
|
||||
'';
|
||||
}
|
|
@ -1,26 +0,0 @@
|
|||
# Find all file extensions.
|
||||
function find_ext() {
|
||||
local types=($(find -type f | sed -e "s/.*\///" -e "s/^\.//" -e "/\./!d" -e "s/.*\.//"))
|
||||
echo "${types[@]}" | tr ' ' '\n' | sort -u
|
||||
}
|
||||
|
||||
# Find all modules.
|
||||
function find_module() {
|
||||
/usr/bin/env ls "${BASH_PATH}/module" | /usr/bin/env sed -e "s/.sh$//"
|
||||
}
|
||||
|
||||
# Find all module functions.
|
||||
# Functions from all modules by default.
|
||||
# Usage: find_function [MODULE]
|
||||
function find_function() {
|
||||
local module="${1}"
|
||||
[[ "${module}" = "" ]] && module="*"
|
||||
/usr/bin/env cat "${BASH_PATH}/module"/${module}.sh | /usr/bin/env grep "^function.*()" | /usr/bin/env sed -e "s/^function //" -e "s/().*//"
|
||||
}
|
||||
|
||||
# Autocomplete with module.
|
||||
_find_module() {
|
||||
_autocomplete_first $(find_module)
|
||||
}
|
||||
|
||||
complete -F _find_module find_function
|
70
module/common/bash/module/Fix.nix
Normal file
70
module/common/bash/module/Fix.nix
Normal file
|
@ -0,0 +1,70 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# Fix when ethernet mistakenly detects 100 Mb instead of 1000 Mb.
|
||||
# SPEED is one of 10/100/1000 etc.
|
||||
# Usage: fix_ethernet_speed <DEVICE> <SPEED>
|
||||
function fix_ethernet_speed() {
|
||||
local device="''${1}"
|
||||
local speed="''${2}"
|
||||
|
||||
if [[ "''${device}" = "" || "''${speed}" = "" ]]; then
|
||||
help fix_ethernet_speed
|
||||
return 2
|
||||
fi
|
||||
|
||||
ethtool -s "''${device}" speed "''${speed}"
|
||||
}
|
||||
|
||||
# Fix nautilus after typing wrong sftp password.
|
||||
function fix_files_sftp() {
|
||||
secret-tool clear protocol sftp
|
||||
}
|
||||
|
||||
# Delete lost Gradle lock files.
|
||||
function fix_gradle_lock() {
|
||||
cd "''${HOME}/.gradle" && find -type f | grep \\.lock$ | xargs -- rm
|
||||
cd -
|
||||
}
|
||||
|
||||
# Fix Gnome's broken RDP ffs.
|
||||
# Usage: fix_gnome_rdp <PASSWORD>
|
||||
function fix_gnome_rdp() {
|
||||
local user="''${USERNAME}"
|
||||
local password="''${1}"
|
||||
|
||||
# Check params.
|
||||
if [[ "''${password}" = "" ]]; then
|
||||
help fix_gnome_rdp
|
||||
return 2
|
||||
fi
|
||||
|
||||
# Unlock keyring. PROTIP: Disable password for it in password manager.
|
||||
pkill -9 -f gnome-keyring-daemon
|
||||
echo -n "''${user}" | gnome-keyring-daemon --unlock
|
||||
|
||||
# Generate keys.
|
||||
cd /tmp
|
||||
openssl genrsa -out rdp-tls.key 4096
|
||||
openssl req -new -key rdp-tls.key -subj "/C=US" | openssl x509 -req -days 730 -signkey rdp-tls.key -out rdp-tls.crt
|
||||
mkdir -p "''${HOME}/.local/share/gnome-remote-desktop/"
|
||||
mv rdp-tls.key rdp-tls.crt "''${HOME}/.local/share/gnome-remote-desktop/"
|
||||
|
||||
# Configure RDP.
|
||||
grdctl rdp set-tls-cert "''${HOME}/.local/share/gnome-remote-desktop/rdp-tls.crt"
|
||||
grdctl rdp set-tls-key "''${HOME}/.local/share/gnome-remote-desktop/rdp-tls.key"
|
||||
grdctl rdp set-credentials "''${user}" "''${password}"
|
||||
grdctl rdp disable-view-only
|
||||
|
||||
# Start service.
|
||||
grdctl rdp enable
|
||||
systemctl --user start gnome-remote-desktop.service
|
||||
|
||||
# Show status.
|
||||
grdctl status --show-credentials
|
||||
systemctl --user status gnome-remote-desktop.service
|
||||
|
||||
# Show extra info.
|
||||
_warn "You may need to manually restart Desktop sharing via Gnome Settings."
|
||||
}
|
||||
'';
|
||||
}
|
|
@ -1,66 +0,0 @@
|
|||
# Fix when ethernet mistakenly detects 100 Mb instead of 1000 Mb.
|
||||
# SPEED is one of 10/100/1000 etc.
|
||||
# Usage: fix_ethernet_speed <DEVICE> <SPEED>
|
||||
function fix_ethernet_speed() {
|
||||
local device="${1}"
|
||||
local speed="${2}"
|
||||
|
||||
if [[ "${device}" = "" || "${speed}" = "" ]]; then
|
||||
help fix_ethernet_speed
|
||||
return 2
|
||||
fi
|
||||
|
||||
ethtool -s "${device}" speed "${speed}"
|
||||
}
|
||||
|
||||
# Fix nautilus after typing wrong sftp password.
|
||||
function fix_files_sftp() {
|
||||
secret-tool clear protocol sftp
|
||||
}
|
||||
|
||||
# Delete lost Gradle lock files.
|
||||
function fix_gradle_lock() {
|
||||
cd "${HOME}/.gradle" && find -type f | grep \\.lock$ | xargs -- rm
|
||||
cd -
|
||||
}
|
||||
|
||||
# Fix Gnome's broken RDP ffs.
|
||||
# Usage: fix_gnome_rdp <PASSWORD>
|
||||
function fix_gnome_rdp() {
|
||||
local user="${USERNAME}"
|
||||
local password="${1}"
|
||||
|
||||
# Check params.
|
||||
if [[ "${password}" = "" ]]; then
|
||||
help fix_gnome_rdp
|
||||
return 2
|
||||
fi
|
||||
|
||||
# Unlock keyring. PROTIP: Disable password for it in password manager.
|
||||
pkill -9 -f gnome-keyring-daemon
|
||||
echo -n "${user}" | gnome-keyring-daemon --unlock
|
||||
|
||||
# Generate keys.
|
||||
cd /tmp
|
||||
openssl genrsa -out rdp-tls.key 4096
|
||||
openssl req -new -key rdp-tls.key -subj "/C=US" | openssl x509 -req -days 730 -signkey rdp-tls.key -out rdp-tls.crt
|
||||
mkdir -p "${HOME}/.local/share/gnome-remote-desktop/"
|
||||
mv rdp-tls.key rdp-tls.crt "${HOME}/.local/share/gnome-remote-desktop/"
|
||||
|
||||
# Configure RDP.
|
||||
grdctl rdp set-tls-cert "${HOME}/.local/share/gnome-remote-desktop/rdp-tls.crt"
|
||||
grdctl rdp set-tls-key "${HOME}/.local/share/gnome-remote-desktop/rdp-tls.key"
|
||||
grdctl rdp set-credentials "${user}" "${password}"
|
||||
grdctl rdp disable-view-only
|
||||
|
||||
# Start service.
|
||||
grdctl rdp enable
|
||||
systemctl --user start gnome-remote-desktop.service
|
||||
|
||||
# Show status.
|
||||
grdctl status --show-credentials
|
||||
systemctl --user status gnome-remote-desktop.service
|
||||
|
||||
# Show extra info.
|
||||
_warn "You may need to manually restart Desktop sharing via Gnome Settings."
|
||||
}
|
254
module/common/bash/module/Git.nix
Normal file
254
module/common/bash/module/Git.nix
Normal file
|
@ -0,0 +1,254 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# Git push.
|
||||
function gps() {
|
||||
git push "''${@}"
|
||||
}
|
||||
|
||||
# Git push all (branches). Useful for pushing all stuff to a new remote.
|
||||
function gpsa() {
|
||||
local remotes=($(git remote))
|
||||
for remote in ''${remotes[@]}; do
|
||||
echo -n "''${remote}: "
|
||||
git push "''${remote}" --tags "refs/remotes/origin/*:refs/heads/*"
|
||||
done
|
||||
}
|
||||
|
||||
# Git force push.
|
||||
function gpsf() {
|
||||
git push --force "''${@}"
|
||||
}
|
||||
|
||||
# Git pull.
|
||||
function gpl() {
|
||||
git pull "''${@}"
|
||||
}
|
||||
|
||||
# Git log.
|
||||
function gl() {
|
||||
git log "''${@}"
|
||||
}
|
||||
|
||||
# Git status.
|
||||
function gs() {
|
||||
git status "''${@}"
|
||||
}
|
||||
|
||||
# Git stash.
|
||||
function gst() {
|
||||
git stash "''${@}"
|
||||
}
|
||||
|
||||
# Git diff.
|
||||
function gd() {
|
||||
git diff "''${@}"
|
||||
}
|
||||
|
||||
# Git commit.
|
||||
function gc() {
|
||||
git commit -m "''${@}"
|
||||
}
|
||||
|
||||
# Git checkout.
|
||||
function gch() {
|
||||
git checkout "''${@}"
|
||||
}
|
||||
|
||||
# Git checkout branch.
|
||||
# Usage: gchb <BRANCH>
|
||||
function gchb() {
|
||||
git checkout -b "''${@}"
|
||||
}
|
||||
|
||||
# Git branch.
|
||||
function gb() {
|
||||
git branch --all "''${@}"
|
||||
}
|
||||
|
||||
# Git branch delete.
|
||||
# Usage: gbd <BRANCH>
|
||||
function gbd() {
|
||||
git branch -D "''${@}"
|
||||
}
|
||||
|
||||
# Git branch delete all except current.
|
||||
function gbda() {
|
||||
git branch | grep -v ^* | xargs git branch -D
|
||||
}
|
||||
|
||||
# Git fetch all.
|
||||
function gf() {
|
||||
git fetch --all -v -p
|
||||
}
|
||||
|
||||
# Git tag.
|
||||
function gt() {
|
||||
git tag "''${@}"
|
||||
}
|
||||
|
||||
# Git ignore files.
|
||||
function gi() {
|
||||
git ls-files -ci --exclude-standard -z | xargs -0 git rm --cached
|
||||
}
|
||||
|
||||
# Git patch create.
|
||||
# Usage: gpc <FILE>
|
||||
function gpc() {
|
||||
git diff > "''${@}"
|
||||
}
|
||||
|
||||
# Git patch (apply).
|
||||
# Usage: gp <FILE>
|
||||
function gp() {
|
||||
git apply "''${@}"
|
||||
}
|
||||
|
||||
# Run git garbage collection.
|
||||
function ggc() {
|
||||
git gc --aggressive --no-cruft --prune=now
|
||||
}
|
||||
|
||||
# Preview diff while adding. Adds current dir by default.
|
||||
# Usage: ga [FILES]
|
||||
function ga() {
|
||||
local target=''${@}
|
||||
|
||||
if [[ "''${target}" = "" ]]; then
|
||||
target="."
|
||||
fi
|
||||
|
||||
git diff ''${target}
|
||||
git add ''${target}
|
||||
}
|
||||
|
||||
# Rebase by X commits or from root. When COUNT is 0 - rebase from root. Default is 2.
|
||||
# Usage: gr [COMMIT COUNT]
|
||||
function gr() {
|
||||
local base="''${1}"
|
||||
|
||||
# rebase last 2 commits by default.
|
||||
if [[ "''${base}" = "" ]]; then
|
||||
base="2"
|
||||
fi
|
||||
|
||||
# if 0, rebase from root. else from specified base.
|
||||
if [[ "''${base}" = "0" ]]; then
|
||||
git rebase -i --root
|
||||
else
|
||||
git rebase -i HEAD~''${base}
|
||||
fi
|
||||
}
|
||||
|
||||
# Specify git user as Dmitry Voronin with provided email.
|
||||
# Usage: gu [EMAIL]
|
||||
function gu() {
|
||||
local name="Dmitry Voronin"
|
||||
local email="''${1}"
|
||||
|
||||
if [[ "''${name}" = "" || "''${email}" = "" ]]; then
|
||||
echo "usage: gu [EMAIL]"
|
||||
return 2
|
||||
fi
|
||||
|
||||
git config user.name "''${name}"
|
||||
git config user.email "''${email}"
|
||||
}
|
||||
|
||||
# Get my git repo.
|
||||
# Usage: gg <REPO>
|
||||
function gg() {
|
||||
local repo="''${1}"
|
||||
|
||||
if [[ "''${repo}" = "" ]]; then
|
||||
help gg
|
||||
return 2
|
||||
fi
|
||||
|
||||
git clone https://git.voronind.com/voronind/"''${repo}"
|
||||
}
|
||||
|
||||
# See diff for a specific commit. Last commit by default.
|
||||
# Usage: gdc [COMMITHASH]
|
||||
function gdc() {
|
||||
local hash="''${1}"
|
||||
[[ "''${hash}" = "" ]] && hash="HEAD"
|
||||
git diff "''${hash}^!"
|
||||
}
|
||||
|
||||
# Get version number based on commit count.
|
||||
function gv() {
|
||||
git rev-list HEAD --count
|
||||
}
|
||||
|
||||
# Open the remote web url in default browser.
|
||||
# Usage: gw [REMOTE]
|
||||
function gw() {
|
||||
local remote="''${1}"
|
||||
[[ "''${remote}" = "" ]] && remote="$(git remote | head -n1)"
|
||||
|
||||
local url="$(git remote get-url ''${remote})"
|
||||
open "''${url}"
|
||||
}
|
||||
|
||||
# Show current branch.
|
||||
function _git_current_branch() {
|
||||
git branch --show-current 2> /dev/null
|
||||
}
|
||||
|
||||
# Show origin's url.
|
||||
function _git_origin_url() {
|
||||
git remote get-url origin
|
||||
}
|
||||
|
||||
# Get this dotfiles url.
|
||||
function _git_dotfiles_url() {
|
||||
echo 'https://git.voronind.com/voronind/linux.git'
|
||||
}
|
||||
|
||||
# Check if current git repo is this dotfiles.
|
||||
function _git_is_dotfiles() {
|
||||
# [[ "$(_git_origin_url)" = "$(_git_dotfiles_url)" ]]
|
||||
local dir="''${PWD}"
|
||||
|
||||
while [[ "''${dir}" != "" ]]; do
|
||||
if [[ -d "''${dir}/.git" ]]; then
|
||||
if [[ "''${dir}" = "''${HOME}" ]] || [[ "''${dir}" = "$(realpath ''${HOME})" ]]; then
|
||||
return 0
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
dir="''${dir%/*}"
|
||||
done
|
||||
}
|
||||
|
||||
# Autocomplete.
|
||||
_completion_loader git &> /dev/null
|
||||
__git_complete gps _git_push &> /dev/null
|
||||
__git_complete gpsf _git_push &> /dev/null
|
||||
__git_complete gpl _git_pull &> /dev/null
|
||||
__git_complete gl _git_log &> /dev/null
|
||||
__git_complete gs _git_status &> /dev/null
|
||||
__git_complete gst _git_stash &> /dev/null
|
||||
__git_complete gd _git_diff &> /dev/null
|
||||
__git_complete gdc _git_diff &> /dev/null
|
||||
__git_complete gc _git_commit &> /dev/null
|
||||
__git_complete gch _git_checkout &> /dev/null
|
||||
__git_complete gchb _git_checkout &> /dev/null
|
||||
__git_complete gb _git_branch &> /dev/null
|
||||
__git_complete gbd _git_branch &> /dev/null
|
||||
__git_complete gf _git_fetch &> /dev/null
|
||||
__git_complete gt _git_tag &> /dev/null
|
||||
__git_complete gp _git_apply &> /dev/null
|
||||
__git_complete ga _git_add &> /dev/null
|
||||
__git_complete gw _git_pull &> /dev/null
|
||||
|
||||
# Autocomplete with my git emails.
|
||||
function _gu() {
|
||||
_autocomplete_first account@voronind.com dd.voronin@fsight.ru
|
||||
}
|
||||
|
||||
complete -F _gu gu
|
||||
'';
|
||||
}
|
|
@ -1,250 +0,0 @@
|
|||
# Git push.
|
||||
function gps() {
|
||||
git push "${@}"
|
||||
}
|
||||
|
||||
# Git push all (branches). Useful for pushing all stuff to a new remote.
|
||||
function gpsa() {
|
||||
local remotes=($(git remote))
|
||||
for remote in ${remotes[@]}; do
|
||||
echo -n "${remote}: "
|
||||
git push "${remote}" --tags "refs/remotes/origin/*:refs/heads/*"
|
||||
done
|
||||
}
|
||||
|
||||
# Git force push.
|
||||
function gpsf() {
|
||||
git push --force "${@}"
|
||||
}
|
||||
|
||||
# Git pull.
|
||||
function gpl() {
|
||||
git pull "${@}"
|
||||
}
|
||||
|
||||
# Git log.
|
||||
function gl() {
|
||||
git log "${@}"
|
||||
}
|
||||
|
||||
# Git status.
|
||||
function gs() {
|
||||
git status "${@}"
|
||||
}
|
||||
|
||||
# Git stash.
|
||||
function gst() {
|
||||
git stash "${@}"
|
||||
}
|
||||
|
||||
# Git diff.
|
||||
function gd() {
|
||||
git diff "${@}"
|
||||
}
|
||||
|
||||
# Git commit.
|
||||
function gc() {
|
||||
git commit -m "${@}"
|
||||
}
|
||||
|
||||
# Git checkout.
|
||||
function gch() {
|
||||
git checkout "${@}"
|
||||
}
|
||||
|
||||
# Git checkout branch.
|
||||
# Usage: gchb <BRANCH>
|
||||
function gchb() {
|
||||
git checkout -b "${@}"
|
||||
}
|
||||
|
||||
# Git branch.
|
||||
function gb() {
|
||||
git branch --all "${@}"
|
||||
}
|
||||
|
||||
# Git branch delete.
|
||||
# Usage: gbd <BRANCH>
|
||||
function gbd() {
|
||||
git branch -D "${@}"
|
||||
}
|
||||
|
||||
# Git branch delete all except current.
|
||||
function gbda() {
|
||||
git branch | grep -v ^* | xargs git branch -D
|
||||
}
|
||||
|
||||
# Git fetch all.
|
||||
function gf() {
|
||||
git fetch --all -v -p
|
||||
}
|
||||
|
||||
# Git tag.
|
||||
function gt() {
|
||||
git tag "${@}"
|
||||
}
|
||||
|
||||
# Git ignore files.
|
||||
function gi() {
|
||||
git ls-files -ci --exclude-standard -z | xargs -0 git rm --cached
|
||||
}
|
||||
|
||||
# Git patch create.
|
||||
# Usage: gpc <FILE>
|
||||
function gpc() {
|
||||
git diff > "${@}"
|
||||
}
|
||||
|
||||
# Git patch (apply).
|
||||
# Usage: gp <FILE>
|
||||
function gp() {
|
||||
git apply "${@}"
|
||||
}
|
||||
|
||||
# Run git garbage collection.
|
||||
function ggc() {
|
||||
git gc --aggressive --no-cruft --prune=now
|
||||
}
|
||||
|
||||
# Preview diff while adding. Adds current dir by default.
|
||||
# Usage: ga [FILES]
|
||||
function ga() {
|
||||
local target=${@}
|
||||
|
||||
if [[ "${target}" = "" ]]; then
|
||||
target="."
|
||||
fi
|
||||
|
||||
git diff ${target}
|
||||
git add ${target}
|
||||
}
|
||||
|
||||
# Rebase by X commits or from root. When COUNT is 0 - rebase from root. Default is 2.
|
||||
# Usage: gr [COMMIT COUNT]
|
||||
function gr() {
|
||||
local base="${1}"
|
||||
|
||||
# rebase last 2 commits by default.
|
||||
if [[ "${base}" = "" ]]; then
|
||||
base="2"
|
||||
fi
|
||||
|
||||
# if 0, rebase from root. else from specified base.
|
||||
if [[ "${base}" = "0" ]]; then
|
||||
git rebase -i --root
|
||||
else
|
||||
git rebase -i HEAD~${base}
|
||||
fi
|
||||
}
|
||||
|
||||
# Specify git user as Dmitry Voronin with provided email.
|
||||
# Usage: gu [EMAIL]
|
||||
function gu() {
|
||||
local name="Dmitry Voronin"
|
||||
local email="${1}"
|
||||
|
||||
if [[ "${name}" = "" || "${email}" = "" ]]; then
|
||||
echo "usage: gu [EMAIL]"
|
||||
return 2
|
||||
fi
|
||||
|
||||
git config user.name "${name}"
|
||||
git config user.email "${email}"
|
||||
}
|
||||
|
||||
# Get my git repo.
|
||||
# Usage: gg <REPO>
|
||||
function gg() {
|
||||
local repo="${1}"
|
||||
|
||||
if [[ "${repo}" = "" ]]; then
|
||||
help gg
|
||||
return 2
|
||||
fi
|
||||
|
||||
git clone https://git.voronind.com/voronind/"${repo}"
|
||||
}
|
||||
|
||||
# See diff for a specific commit. Last commit by default.
|
||||
# Usage: gdc [COMMITHASH]
|
||||
function gdc() {
|
||||
local hash="${1}"
|
||||
[[ "${hash}" = "" ]] && hash="HEAD"
|
||||
git diff "${hash}^!"
|
||||
}
|
||||
|
||||
# Get version number based on commit count.
|
||||
function gv() {
|
||||
git rev-list HEAD --count
|
||||
}
|
||||
|
||||
# Open the remote web url in default browser.
|
||||
# Usage: gw [REMOTE]
|
||||
function gw() {
|
||||
local remote="${1}"
|
||||
[[ "${remote}" = "" ]] && remote="$(git remote | head -n1)"
|
||||
|
||||
local url="$(git remote get-url ${remote})"
|
||||
open "${url}"
|
||||
}
|
||||
|
||||
# Show current branch.
|
||||
function _git_current_branch() {
|
||||
git branch --show-current 2> /dev/null
|
||||
}
|
||||
|
||||
# Show origin's url.
|
||||
function _git_origin_url() {
|
||||
git remote get-url origin
|
||||
}
|
||||
|
||||
# Get this dotfiles url.
|
||||
function _git_dotfiles_url() {
|
||||
echo 'https://git.voronind.com/voronind/linux.git'
|
||||
}
|
||||
|
||||
# Check if current git repo is this dotfiles.
|
||||
function _git_is_dotfiles() {
|
||||
# [[ "$(_git_origin_url)" = "$(_git_dotfiles_url)" ]]
|
||||
local dir="${PWD}"
|
||||
|
||||
while [[ "${dir}" != "" ]]; do
|
||||
if [[ -d "${dir}/.git" ]]; then
|
||||
if [[ "${dir}" = "${HOME}" ]] || [[ "${dir}" = "$(realpath ${HOME})" ]]; then
|
||||
return 0
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
dir="${dir%/*}"
|
||||
done
|
||||
}
|
||||
|
||||
# Autocomplete.
|
||||
_completion_loader git &> /dev/null
|
||||
__git_complete gps _git_push &> /dev/null
|
||||
__git_complete gpsf _git_push &> /dev/null
|
||||
__git_complete gpl _git_pull &> /dev/null
|
||||
__git_complete gl _git_log &> /dev/null
|
||||
__git_complete gs _git_status &> /dev/null
|
||||
__git_complete gst _git_stash &> /dev/null
|
||||
__git_complete gd _git_diff &> /dev/null
|
||||
__git_complete gdc _git_diff &> /dev/null
|
||||
__git_complete gc _git_commit &> /dev/null
|
||||
__git_complete gch _git_checkout &> /dev/null
|
||||
__git_complete gchb _git_checkout &> /dev/null
|
||||
__git_complete gb _git_branch &> /dev/null
|
||||
__git_complete gbd _git_branch &> /dev/null
|
||||
__git_complete gf _git_fetch &> /dev/null
|
||||
__git_complete gt _git_tag &> /dev/null
|
||||
__git_complete gp _git_apply &> /dev/null
|
||||
__git_complete ga _git_add &> /dev/null
|
||||
__git_complete gw _git_pull &> /dev/null
|
||||
|
||||
# Autocomplete with my git emails.
|
||||
function _gu() {
|
||||
_autocomplete_first account@voronind.com dd.voronin@fsight.ru
|
||||
}
|
||||
|
||||
complete -F _gu gu
|
73
module/common/bash/module/Group.nix
Normal file
73
module/common/bash/module/Group.nix
Normal file
|
@ -0,0 +1,73 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# Group files by extension.
|
||||
# Usage: group_ext [FILES]
|
||||
function group_ext() {
|
||||
local IFS=$'\n'
|
||||
local targets=(''${@})
|
||||
[[ "''${targets}" = "" ]] && targets=($(_ls_file))
|
||||
|
||||
process() {
|
||||
local ext=''${target##*.}
|
||||
[[ -d "''${target}" ]] && { _iterate_skip "Is a directory."; return 0; }
|
||||
[[ "''${ext}" = "''${target}" ]] && { _iterate_skip "No extension."; return 0; }
|
||||
|
||||
mkdir ''${ext} 2> /dev/null
|
||||
|
||||
mv -- ''${target} ./''${ext}/''${target}
|
||||
}
|
||||
|
||||
_iterate_targets process ''${targets[@]}
|
||||
}
|
||||
|
||||
# Group files and dirs by year.
|
||||
# Usage: group_year [FILES]
|
||||
function group_year() {
|
||||
local IFS=$'\n'
|
||||
local targets=(''${@})
|
||||
[[ "''${targets}" = "" ]] && targets=($(ls))
|
||||
|
||||
process() {
|
||||
local year=$(stat --format=%y ''${target})
|
||||
year=''${year%%-*}
|
||||
|
||||
mkdir ''${year} 2> /dev/null
|
||||
|
||||
mv -- ''${target} ./''${year}/''${target}
|
||||
}
|
||||
|
||||
_iterate_targets process ''${targets[@]}
|
||||
}
|
||||
|
||||
# Copy files from current year to the named dir.
|
||||
# Usage: group_year_copy <YEAR> [FILES]
|
||||
function group_year_copy() {
|
||||
local IFS=$'\n'
|
||||
local selected_year="''${1}"
|
||||
local targets=(''${@:2})
|
||||
|
||||
if [[ "''${selected_year}" = "" ]]; then
|
||||
help group_year_copy
|
||||
return 2
|
||||
fi
|
||||
|
||||
# All files by default.
|
||||
[[ "''${targets}" = "" ]] && targets=($(ls))
|
||||
|
||||
mkdir ''${selected_year} 2> /dev/null
|
||||
|
||||
process() {
|
||||
local year=$(stat --format=%y ''${target})
|
||||
year=''${year%%-*}
|
||||
|
||||
if [[ "''${year}" = "''${selected_year}" ]]; then
|
||||
rcp -- ''${target} ./''${selected_year}/
|
||||
else
|
||||
_iterate_skip
|
||||
fi
|
||||
}
|
||||
|
||||
_iterate_targets process ''${targets[@]}
|
||||
}
|
||||
'';
|
||||
}
|
|
@ -1,69 +0,0 @@
|
|||
# Group files by extension.
|
||||
# Usage: group_ext [FILES]
|
||||
function group_ext() {
|
||||
local IFS=$'\n'
|
||||
local targets=(${@})
|
||||
[[ "${targets}" = "" ]] && targets=($(_ls_file))
|
||||
|
||||
process() {
|
||||
local ext=${target##*.}
|
||||
[[ -d "${target}" ]] && { _iterate_skip "Is a directory."; return 0; }
|
||||
[[ "${ext}" = "${target}" ]] && { _iterate_skip "No extension."; return 0; }
|
||||
|
||||
mkdir ${ext} 2> /dev/null
|
||||
|
||||
mv -- ${target} ./${ext}/${target}
|
||||
}
|
||||
|
||||
_iterate_targets process ${targets[@]}
|
||||
}
|
||||
|
||||
# Group files and dirs by year.
|
||||
# Usage: group_year [FILES]
|
||||
function group_year() {
|
||||
local IFS=$'\n'
|
||||
local targets=(${@})
|
||||
[[ "${targets}" = "" ]] && targets=($(ls))
|
||||
|
||||
process() {
|
||||
local year=$(stat --format=%y ${target})
|
||||
year=${year%%-*}
|
||||
|
||||
mkdir ${year} 2> /dev/null
|
||||
|
||||
mv -- ${target} ./${year}/${target}
|
||||
}
|
||||
|
||||
_iterate_targets process ${targets[@]}
|
||||
}
|
||||
|
||||
# Copy files from current year to the named dir.
|
||||
# Usage: group_year_copy <YEAR> [FILES]
|
||||
function group_year_copy() {
|
||||
local IFS=$'\n'
|
||||
local selected_year="${1}"
|
||||
local targets=(${@:2})
|
||||
|
||||
if [[ "${selected_year}" = "" ]]; then
|
||||
help group_year_copy
|
||||
return 2
|
||||
fi
|
||||
|
||||
# All files by default.
|
||||
[[ "${targets}" = "" ]] && targets=($(ls))
|
||||
|
||||
mkdir ${selected_year} 2> /dev/null
|
||||
|
||||
process() {
|
||||
local year=$(stat --format=%y ${target})
|
||||
year=${year%%-*}
|
||||
|
||||
if [[ "${year}" = "${selected_year}" ]]; then
|
||||
rcp -- ${target} ./${selected_year}/
|
||||
else
|
||||
_iterate_skip
|
||||
fi
|
||||
}
|
||||
|
||||
_iterate_targets process ${targets[@]}
|
||||
}
|
29
module/common/bash/module/Help.nix
Normal file
29
module/common/bash/module/Help.nix
Normal file
|
@ -0,0 +1,29 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# Get help about dotfiles bash function.
|
||||
# Usage: help <FUNCTION>
|
||||
function help() {
|
||||
local fun="''${1}"
|
||||
|
||||
if [[ "''${fun}" = "" ]] || [[ "$(find_function | /usr/bin/env grep ''${fun})" = "" ]]; then
|
||||
help help
|
||||
return 2
|
||||
fi
|
||||
|
||||
/usr/bin/env sed -e '$s/$/\n/' -s "''${BASH_PATH}/module"/* | /usr/bin/env sed -n -e "/function ''${fun}()/q;p" | /usr/bin/env tac | /usr/bin/env sed -n -e "/^$/q;p" | /usr/bin/env tac | /usr/bin/env sed -e "s/^# \+//" -e "\$i \ "
|
||||
}
|
||||
|
||||
# Short for help.
|
||||
# Usage: h <FUNCTION>
|
||||
function h() {
|
||||
help "''${@}"
|
||||
}
|
||||
|
||||
# Autocomplete with available functions.
|
||||
function _help_functions() {
|
||||
_autocomplete_first $(find_function)
|
||||
}
|
||||
|
||||
complete -F _help_functions help h
|
||||
'';
|
||||
}
|
|
@ -1,25 +0,0 @@
|
|||
# Get help about dotfiles bash function.
|
||||
# Usage: help <FUNCTION>
|
||||
function help() {
|
||||
local fun="${1}"
|
||||
|
||||
if [[ "${fun}" = "" ]] || [[ "$(find_function | /usr/bin/env grep ${fun})" = "" ]]; then
|
||||
help help
|
||||
return 2
|
||||
fi
|
||||
|
||||
/usr/bin/env sed -e '$s/$/\n/' -s "${BASH_PATH}/module"/* | /usr/bin/env sed -n -e "/function ${fun}()/q;p" | /usr/bin/env tac | /usr/bin/env sed -n -e "/^$/q;p" | /usr/bin/env tac | /usr/bin/env sed -e "s/^# \+//" -e "\$i \ "
|
||||
}
|
||||
|
||||
# Short for help.
|
||||
# Usage: h <FUNCTION>
|
||||
function h() {
|
||||
help "${@}"
|
||||
}
|
||||
|
||||
# Autocomplete with available functions.
|
||||
function _help_functions() {
|
||||
_autocomplete_first $(find_function)
|
||||
}
|
||||
|
||||
complete -F _help_functions help h
|
68
module/common/bash/module/Ls.nix
Normal file
68
module/common/bash/module/Ls.nix
Normal file
|
@ -0,0 +1,68 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# Unset possible system-defined aliases.
|
||||
unalias l ll lll llll la lla &> /dev/null
|
||||
unset l ll lll llll la lla &> /dev/null
|
||||
|
||||
# List files in dirs.
|
||||
# Current dir by default.
|
||||
# Usage: l [DIRS]
|
||||
function l() {
|
||||
ls -lhv --si --group-directories-first --color=auto -- "$@"
|
||||
}
|
||||
|
||||
# List last modified files first.
|
||||
# Current dir by default.
|
||||
# Usage: ll [DIRS]
|
||||
function ll() {
|
||||
ls -lhvtr --si --color=auto -- "$@"
|
||||
}
|
||||
|
||||
# List files in tree structure.
|
||||
# Current dir by default.
|
||||
# Depth can be omitted by passing `-` (dash).
|
||||
# Usage: lll [DEPTH] [DIRS]
|
||||
function lll() {
|
||||
local IFS=$'\n'
|
||||
local depth="''${1}"
|
||||
local target=("''${@:2}")
|
||||
|
||||
[[ "''${target}" = "" ]] && target="."
|
||||
[[ "''${depth}" = "" ]] && depth=666
|
||||
[[ "''${depth}" = "-" ]] && depth=666
|
||||
|
||||
tree -a -L "''${depth}" -- "''${target[@]}"
|
||||
}
|
||||
|
||||
# List files recursively.
|
||||
# Current dir by default.
|
||||
# Usage: llll [DIRS]
|
||||
function llll() {
|
||||
ls -RlAhv --si --group-directories-first --color=auto -- "$@"
|
||||
}
|
||||
|
||||
# List all files in dirs, incl. hidden files.
|
||||
# Current dir by default.
|
||||
# Usage: la [DIRS]
|
||||
function la() {
|
||||
ls -lAh --si --group-directories-first --color=auto -- "$@"
|
||||
}
|
||||
|
||||
# List all files in dirs, incl. hidden files, sorted by mtime.
|
||||
# Current dir by default.
|
||||
# Usage: lla [DIRS]
|
||||
function lla() {
|
||||
ls -lAhtr --si --color=auto -- "$@"
|
||||
}
|
||||
|
||||
# List only files.
|
||||
function _ls_file() {
|
||||
ls --classify | grep -v \/$
|
||||
}
|
||||
|
||||
# List only dirs.
|
||||
function _ls_dir() {
|
||||
ls --classify | grep \/$ | sed -e "s/\/$//"
|
||||
}
|
||||
'';
|
||||
}
|
|
@ -1,64 +0,0 @@
|
|||
# Unset possible system-defined aliases.
|
||||
unalias l ll lll llll la lla &> /dev/null
|
||||
unset l ll lll llll la lla &> /dev/null
|
||||
|
||||
# List files in dirs.
|
||||
# Current dir by default.
|
||||
# Usage: l [DIRS]
|
||||
function l() {
|
||||
ls -lhv --si --group-directories-first --color=auto -- "$@"
|
||||
}
|
||||
|
||||
# List last modified files first.
|
||||
# Current dir by default.
|
||||
# Usage: ll [DIRS]
|
||||
function ll() {
|
||||
ls -lhvtr --si --color=auto -- "$@"
|
||||
}
|
||||
|
||||
# List files in tree structure.
|
||||
# Current dir by default.
|
||||
# Depth can be omitted by passing `-` (dash).
|
||||
# Usage: lll [DEPTH] [DIRS]
|
||||
function lll() {
|
||||
local IFS=$'\n'
|
||||
local depth="${1}"
|
||||
local target=("${@:2}")
|
||||
|
||||
[[ "${target}" = "" ]] && target="."
|
||||
[[ "${depth}" = "" ]] && depth=666
|
||||
[[ "${depth}" = "-" ]] && depth=666
|
||||
|
||||
tree -a -L "${depth}" -- "${target[@]}"
|
||||
}
|
||||
|
||||
# List files recursively.
|
||||
# Current dir by default.
|
||||
# Usage: llll [DIRS]
|
||||
function llll() {
|
||||
ls -RlAhv --si --group-directories-first --color=auto -- "$@"
|
||||
}
|
||||
|
||||
# List all files in dirs, incl. hidden files.
|
||||
# Current dir by default.
|
||||
# Usage: la [DIRS]
|
||||
function la() {
|
||||
ls -lAh --si --group-directories-first --color=auto -- "$@"
|
||||
}
|
||||
|
||||
# List all files in dirs, incl. hidden files, sorted by mtime.
|
||||
# Current dir by default.
|
||||
# Usage: lla [DIRS]
|
||||
function lla() {
|
||||
ls -lAhtr --si --color=auto -- "$@"
|
||||
}
|
||||
|
||||
# List only files.
|
||||
function _ls_file() {
|
||||
ls --classify | grep -v \/$
|
||||
}
|
||||
|
||||
# List only dirs.
|
||||
function _ls_dir() {
|
||||
ls --classify | grep \/$ | sed -e "s/\/$//"
|
||||
}
|
19
module/common/bash/module/Markdown.nix
Normal file
19
module/common/bash/module/Markdown.nix
Normal file
|
@ -0,0 +1,19 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# Render markdown in browser using Gitea API. Because I want consistency with Gitea web render.
|
||||
# Works only inside LAN.
|
||||
# Usage: markdown_render <FILE.md>
|
||||
function markdown_render() {
|
||||
local IFS=$'\n'
|
||||
local file="''${1}"
|
||||
local render="markdown_render.html"
|
||||
|
||||
if [[ "''${file}" = "" ]]; then
|
||||
help markdown_render
|
||||
return 2
|
||||
fi
|
||||
|
||||
curl -X POST https://git.voronind.com/markdown -d "$(cat ''${file})" > "''${render}" && o "''${render}" && sleep 2 && rm "''${render}"
|
||||
}
|
||||
'';
|
||||
}
|
|
@ -1,15 +0,0 @@
|
|||
# Render markdown in browser using Gitea API. Because I want consistency with Gitea web render.
|
||||
# Works only inside LAN.
|
||||
# Usage: markdown_render <FILE.md>
|
||||
function markdown_render() {
|
||||
local IFS=$'\n'
|
||||
local file="${1}"
|
||||
local render="markdown_render.html"
|
||||
|
||||
if [[ "${file}" = "" ]]; then
|
||||
help markdown_render
|
||||
return 2
|
||||
fi
|
||||
|
||||
curl -X POST https://git.voronind.com/markdown -d "$(cat ${file})" > "${render}" && o "${render}" && sleep 2 && rm "${render}"
|
||||
}
|
13
module/common/bash/module/Monitor.nix
Normal file
13
module/common/bash/module/Monitor.nix
Normal file
|
@ -0,0 +1,13 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# Enable monitors.
|
||||
function monon() {
|
||||
swaymsg 'output "ASUSTek COMPUTER INC ASUS VA24E R2LMTF127165" power on'
|
||||
}
|
||||
|
||||
# Disable monitors.
|
||||
function monoff() {
|
||||
swaymsg 'output "ASUSTek COMPUTER INC ASUS VA24E R2LMTF127165" power off'
|
||||
}
|
||||
'';
|
||||
}
|
|
@ -1,9 +0,0 @@
|
|||
# Enable monitors.
|
||||
function monon() {
|
||||
swaymsg 'output "ASUSTek COMPUTER INC ASUS VA24E R2LMTF127165" power on'
|
||||
}
|
||||
|
||||
# Disable monitors.
|
||||
function monoff() {
|
||||
swaymsg 'output "ASUSTek COMPUTER INC ASUS VA24E R2LMTF127165" power off'
|
||||
}
|
399
module/common/bash/module/Name.nix
Normal file
399
module/common/bash/module/Name.nix
Normal file
|
@ -0,0 +1,399 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# Rename dirs to `snake_case` and files to `PascalCase`. Careful with structured file names like archives!
|
||||
# Usage: name [FILES]
|
||||
function name() {
|
||||
local IFS=$'\n'
|
||||
local targets=(''${@})
|
||||
[[ "''${targets}" = "" ]] && targets=($(ls))
|
||||
|
||||
process() {
|
||||
# Skip archive.
|
||||
if $(_is_archive ''${target}); then
|
||||
_iterate_skip "File is an archive, skip."
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [[ -d "''${target}" ]]; then
|
||||
local new_name=$(parse_snake ''${target})
|
||||
[[ -e "''${new_name}" ]] && return 0
|
||||
|
||||
mv -- ''${target} ''${new_name} && echo ''${new_name}
|
||||
else
|
||||
local ext=".''${target##*.}"
|
||||
local name=''${target%.*}
|
||||
[[ "''${ext}" = ".''${target}" ]] && ext=""
|
||||
|
||||
local new_name="$(parse_pascal ''${name})''${ext}"
|
||||
[[ -e "''${new_name}" ]] && return 0
|
||||
|
||||
mv -- ''${target} ''${new_name} && echo ''${new_name}
|
||||
fi
|
||||
}
|
||||
|
||||
_iterate_targets process ''${targets[@]}
|
||||
}
|
||||
|
||||
# Rename files with provided parser, i.e. `parse_simple`.
|
||||
# All files by default.
|
||||
# Usage: name_parse <PARSER> [FILES]
|
||||
function name_parse() {
|
||||
local IFS=$'\n'
|
||||
local parser=''${1}
|
||||
local targets=(''${@:2})
|
||||
[[ "''${targets}" = "" ]] && targets=([^.]*)
|
||||
|
||||
if [[ "''${parser}" = "" ]]; then
|
||||
help name_parse
|
||||
return 2
|
||||
fi
|
||||
|
||||
process() {
|
||||
# Skip archive.
|
||||
if $(_is_archive ''${target}); then
|
||||
_iterate_skip "File is an archive, skip."
|
||||
return 0
|
||||
fi
|
||||
|
||||
# parse new name.
|
||||
local ext=""
|
||||
local name="''${target}"
|
||||
|
||||
# ext only for files.
|
||||
if [[ -f "''${target}" ]]; then
|
||||
ext=".''${target##*.}"
|
||||
name="''${target%.*}"
|
||||
fi
|
||||
|
||||
# Files w/o extension support.
|
||||
[[ "''${ext#.}" = "''${name}" ]] && ext=""
|
||||
|
||||
# Get new name.
|
||||
local new_name=$(''${parser} "''${name}")''${ext,,}
|
||||
|
||||
# check if same name.
|
||||
[[ "''${target}" = "''${new_name}" ]] && return 0
|
||||
|
||||
# check if target name already exists.
|
||||
if [[ -f "''${new_name}" ]]; then
|
||||
_error "''${new_name}: Already exists!"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# rename target.
|
||||
mv -- "''${target}" "''${new_name}" && echo "''${new_name}"
|
||||
}
|
||||
|
||||
_iterate_targets process ''${targets[@]}
|
||||
}
|
||||
|
||||
# Rename all files to their hashes while keeping extensions.
|
||||
# All files by default.
|
||||
# Usage: name_hash [FILES]
|
||||
function name_hash() {
|
||||
local IFS=$'\n'
|
||||
local targets=(''${@})
|
||||
[[ "''${targets}" = "" ]] && targets=($(_ls_file))
|
||||
|
||||
process() {
|
||||
# extract extension.
|
||||
local extension="''${target##*.}"
|
||||
if [[ "''${extension}" = "''${target}" ]]; then
|
||||
extension=""
|
||||
else
|
||||
extension=".''${extension}"
|
||||
fi
|
||||
|
||||
# hash the new name.
|
||||
local hash=$(pv "''${target}" | sha1sum | cut -d\ -f1)
|
||||
new_name="''${hash,,}''${extension,,}"
|
||||
|
||||
# check if same name.
|
||||
[[ "''${target}" = "''${new_name}" ]] && return 0
|
||||
|
||||
# rename target.
|
||||
mv -- ''${target} ''${new_name} && echo ''${new_name}
|
||||
}
|
||||
|
||||
_iterate_targets process ''${targets[@]}
|
||||
}
|
||||
|
||||
# Check hashes for previously renamed files.
|
||||
# All files by default.
|
||||
# Usage: name_hash_check [FILES]
|
||||
function name_hash_check() {
|
||||
local IFS=$'\n'
|
||||
local targets=(''${@})
|
||||
[[ "''${targets}" = "" ]] && targets=([^.]*)
|
||||
|
||||
process() {
|
||||
# extract hashes.
|
||||
local stored="''${target%%.*}"
|
||||
local actual=$(pv "''${target}" | sha1sum | cut -d\ -f1)
|
||||
|
||||
# compare hashes.
|
||||
if [[ "''${stored}" != "''${actual}" ]]; then
|
||||
_error "Failed."
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
_iterate_targets process ''${targets[@]}
|
||||
}
|
||||
|
||||
# Rename files for Jellyfin shows, i.e. `Episode S01E01.mkv`
|
||||
# All files by default.
|
||||
# Usage: name_show [FILES]
|
||||
function name_show() {
|
||||
local IFS=$'\n'
|
||||
local season="$(realpath .)"; season="''${season##*\ }"
|
||||
local episode=0
|
||||
local targets=(''${@})
|
||||
[[ "''${targets}" = "" ]] && targets=($(_ls_file))
|
||||
|
||||
# Error when no season number specified.
|
||||
if [[ "''${season}" = "" ]]; then
|
||||
_error "Could not determine season number."
|
||||
return 2
|
||||
fi
|
||||
|
||||
process() {
|
||||
((episode++))
|
||||
|
||||
# extract new name.
|
||||
local new_name="Episode S''${season}E$(printf %02d ''${episode}).''${target##*.}"
|
||||
|
||||
# Skip on no change.
|
||||
[[ "''${target}" = "''${new_name}" ]] && return 0
|
||||
|
||||
# rename target.
|
||||
mv -- ''${target} ''${new_name} && echo ''${new_name}
|
||||
}
|
||||
|
||||
_iterate_targets process ''${targets[@]}
|
||||
}
|
||||
|
||||
# Rename files for Kavita manga format.
|
||||
# All files by default.
|
||||
# Usage: name_manga <SEASON> [FILES]
|
||||
function name_manga() {
|
||||
local IFS=$'\n'
|
||||
local manga=''${PWD##*/}
|
||||
local season=''${1}
|
||||
local episode=0
|
||||
local targets=(''${@:2})
|
||||
[[ "''${targets}" = "" ]] && targets=($(_ls_file))
|
||||
|
||||
# Error when no season number specified.
|
||||
if [[ "''${season}" = "" ]]; then
|
||||
help name_manga
|
||||
return 2
|
||||
fi
|
||||
|
||||
process() {
|
||||
((episode++))
|
||||
|
||||
# Extract new name.
|
||||
local new_name="''${manga} Vol.''${season} Ch.''${episode}.''${target##*.}"
|
||||
|
||||
# Skip on no change.
|
||||
[[ "''${target}" = "''${new_name}" ]] && return 0
|
||||
|
||||
# Rename target.
|
||||
mv -- ''${target} ''${new_name} && echo ''${new_name}
|
||||
}
|
||||
|
||||
_iterate_targets process ''${targets[@]}
|
||||
}
|
||||
|
||||
# Rename files for ffmpeg_music_meta format.
|
||||
# All files by default.
|
||||
# Usage: name_music [FILES]
|
||||
function name_music() {
|
||||
local IFS=$'\n'
|
||||
local targets=(''${@})
|
||||
[[ "''${targets}" = "" ]] && targets=($(ls))
|
||||
|
||||
process() {
|
||||
# Extract new name.
|
||||
local ext=''${target##*.}
|
||||
|
||||
if [[ -d "''${target}" ]]; then
|
||||
local new_name="$(parse_titlecase $(parse_simple ''${target%.*}))"
|
||||
else
|
||||
local new_name="$(parse_titlecase $(parse_simple ''${target%.*})).''${ext}"
|
||||
fi
|
||||
|
||||
# Skip on no change.
|
||||
[[ "''${target%/}" = "''${new_name}" ]] && return 0
|
||||
|
||||
# Rename target.
|
||||
mv -- ''${target} ''${new_name} && echo ''${new_name}
|
||||
}
|
||||
|
||||
_iterate_targets process ''${targets[@]}
|
||||
}
|
||||
|
||||
# Rename files with new extension.
|
||||
# All files by default.
|
||||
# Usage: name_ext <EXTENSION> [FILES]
|
||||
function name_ext() {
|
||||
local IFS=$'\n'
|
||||
local extension=''${1}
|
||||
local targets=(''${@:2})
|
||||
[[ "''${targets}" = "" ]] && targets=($(_ls_file))
|
||||
|
||||
# Error when no new extension specified.
|
||||
if [[ "''${extension}" = "" ]]; then
|
||||
help name_ext
|
||||
return 2
|
||||
fi
|
||||
|
||||
process() {
|
||||
# Extract new name.
|
||||
local new_name="''${target%.*}"."''${extension}"
|
||||
|
||||
# Skip on no change.
|
||||
[[ "''${target}" = "''${new_name}" ]] && return 0
|
||||
|
||||
# Rename target.
|
||||
mv -- ''${target} ''${new_name} && echo ''${new_name}
|
||||
}
|
||||
|
||||
_iterate_targets process ''${targets[@]}
|
||||
}
|
||||
|
||||
# Change file name prefix.
|
||||
# All matching files by default.
|
||||
# Usage: name_prefix <OLD> <NEW> [FILES]
|
||||
function name_prefix() {
|
||||
local IFS=$'\n'
|
||||
local old=''${1}
|
||||
local new=''${2}
|
||||
local targets=(''${@:3})
|
||||
[[ "''${targets}" = "" ]] && targets=(''${old}*)
|
||||
|
||||
process() {
|
||||
# Create new name.
|
||||
local new_name="''${new}''${target#$old}"
|
||||
|
||||
# Skip on no change.
|
||||
[[ "''${target}" = "''${new_name}" ]] && return 0
|
||||
|
||||
# Rename.
|
||||
mv -- ''${target} ''${new_name} && echo ''${new_name}
|
||||
}
|
||||
|
||||
_iterate_targets process ''${targets[@]}
|
||||
}
|
||||
|
||||
# Change file name postfix.
|
||||
# All matching files by default.
|
||||
# Usage: name_postfix <OLD> <NEW> [FILES]
|
||||
function name_postfix() {
|
||||
local IFS=$'\n'
|
||||
local old=''${1}
|
||||
local new=''${2}
|
||||
local targets=(''${@:3})
|
||||
[[ "''${targets}" = "" ]] && targets=(*''${old})
|
||||
|
||||
process() {
|
||||
# Create new name.
|
||||
local new_name="''${target%$old}''${new}"
|
||||
|
||||
# Skip on no change.
|
||||
[[ "''${target}" = "''${new_name}" ]] && return 0
|
||||
|
||||
# Rename.
|
||||
mv -- ''${target} ''${new_name} && echo ''${new_name}
|
||||
}
|
||||
|
||||
_iterate_targets process ''${targets[@]}
|
||||
}
|
||||
|
||||
# Replace part of the name.
|
||||
# All matching files by default.
|
||||
# Usage: name_replace <OLD> <NEW> [FILES]
|
||||
function name_replace() {
|
||||
local IFS=$'\n'
|
||||
local old=''${1}
|
||||
local new=''${2}
|
||||
local targets=(''${@:3})
|
||||
[[ "''${targets}" = "" ]] && targets=(*''${old}*)
|
||||
|
||||
process() {
|
||||
# Create new name.
|
||||
local new_name="''${target//$old/$new}"
|
||||
|
||||
# Skip on no change.
|
||||
[[ "''${target}" = "''${new_name}" ]] && return 0
|
||||
|
||||
# Rename.
|
||||
mv -- ''${target} ''${new_name} && echo ''${new_name}
|
||||
}
|
||||
|
||||
_iterate_targets process ''${targets[@]}
|
||||
}
|
||||
|
||||
# Fix numbering for numbered files. I.e if there are 10 items and some of them start without zero, then append zero to it. 1..10 -> 01..10.
|
||||
# Usage: name_fix_numbering [FILES]
|
||||
function name_fix_numbering() {
|
||||
local IFS=$'\n'
|
||||
local highest=0
|
||||
local power=0
|
||||
local targets=(''${@})
|
||||
[[ "''${targets}" = "" ]] && targets=($(ls | grep "^[0-9]"))
|
||||
|
||||
# Count leading zeroes.
|
||||
for target in "''${targets[@]}"; do
|
||||
# Check that starts with a digit.
|
||||
[[ "''${target}" =~ ^[0-9] ]] || continue
|
||||
|
||||
local digits=($(parse_ints "''${target}"))
|
||||
local digit="''${digits[0]}"
|
||||
digit=$((10#''${digit}))
|
||||
|
||||
[[ "''${digit}" -gt "''${highest}" ]] && highest="''${digit}"
|
||||
done
|
||||
|
||||
local i=''${highest}
|
||||
while [[ i -gt 0 ]]; do
|
||||
((power++))
|
||||
i=$((''${i}/10))
|
||||
done
|
||||
|
||||
process() {
|
||||
# Check that starts with a digit.
|
||||
if [[ ! "''${target}" =~ ^[0-9] ]]; then
|
||||
_error "Does not start with a digit!"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Prepare new name.
|
||||
local digits=($(parse_ints "''${target}"))
|
||||
local digit="''${digits[0]}"
|
||||
digit=$((10#''${digit}))
|
||||
local new_name=$(printf "%0''${power}d" "''${digit}")"''${target#''${digits[0]}}"
|
||||
|
||||
# Skip if the same name.
|
||||
[[ "''${target}" = "''${new_name}" ]] && return 0
|
||||
|
||||
# Check that file does not exist.
|
||||
if [[ -e "''${new_name}" ]]; then
|
||||
_error "''${new_name}: File exists!"
|
||||
return 1
|
||||
fi
|
||||
|
||||
mv -- ''${target} ''${new_name} && echo ''${new_name}
|
||||
}
|
||||
|
||||
_iterate_targets process ''${targets[@]}
|
||||
}
|
||||
|
||||
function _comp_name_parse() {
|
||||
_autocomplete_first_ls $(find_function | grep ^parse)
|
||||
}
|
||||
|
||||
complete -o filenames -F _comp_name_parse name_parse
|
||||
'';
|
||||
}
|
|
@ -1,395 +0,0 @@
|
|||
# Rename dirs to `snake_case` and files to `PascalCase`. Careful with structured file names like archives!
|
||||
# Usage: name [FILES]
|
||||
function name() {
|
||||
local IFS=$'\n'
|
||||
local targets=(${@})
|
||||
[[ "${targets}" = "" ]] && targets=($(ls))
|
||||
|
||||
process() {
|
||||
# Skip archive.
|
||||
if $(_is_archive ${target}); then
|
||||
_iterate_skip "File is an archive, skip."
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [[ -d "${target}" ]]; then
|
||||
local new_name=$(parse_snake ${target})
|
||||
[[ -e "${new_name}" ]] && return 0
|
||||
|
||||
mv -- ${target} ${new_name} && echo ${new_name}
|
||||
else
|
||||
local ext=".${target##*.}"
|
||||
local name=${target%.*}
|
||||
[[ "${ext}" = ".${target}" ]] && ext=""
|
||||
|
||||
local new_name="$(parse_pascal ${name})${ext}"
|
||||
[[ -e "${new_name}" ]] && return 0
|
||||
|
||||
mv -- ${target} ${new_name} && echo ${new_name}
|
||||
fi
|
||||
}
|
||||
|
||||
_iterate_targets process ${targets[@]}
|
||||
}
|
||||
|
||||
# Rename files with provided parser, i.e. `parse_simple`.
|
||||
# All files by default.
|
||||
# Usage: name_parse <PARSER> [FILES]
|
||||
function name_parse() {
|
||||
local IFS=$'\n'
|
||||
local parser=${1}
|
||||
local targets=(${@:2})
|
||||
[[ "${targets}" = "" ]] && targets=([^.]*)
|
||||
|
||||
if [[ "${parser}" = "" ]]; then
|
||||
help name_parse
|
||||
return 2
|
||||
fi
|
||||
|
||||
process() {
|
||||
# Skip archive.
|
||||
if $(_is_archive ${target}); then
|
||||
_iterate_skip "File is an archive, skip."
|
||||
return 0
|
||||
fi
|
||||
|
||||
# parse new name.
|
||||
local ext=""
|
||||
local name="${target}"
|
||||
|
||||
# ext only for files.
|
||||
if [[ -f "${target}" ]]; then
|
||||
ext=".${target##*.}"
|
||||
name="${target%.*}"
|
||||
fi
|
||||
|
||||
# Files w/o extension support.
|
||||
[[ "${ext#.}" = "${name}" ]] && ext=""
|
||||
|
||||
# Get new name.
|
||||
local new_name=$(${parser} "${name}")${ext,,}
|
||||
|
||||
# check if same name.
|
||||
[[ "${target}" = "${new_name}" ]] && return 0
|
||||
|
||||
# check if target name already exists.
|
||||
if [[ -f "${new_name}" ]]; then
|
||||
_error "${new_name}: Already exists!"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# rename target.
|
||||
mv -- "${target}" "${new_name}" && echo "${new_name}"
|
||||
}
|
||||
|
||||
_iterate_targets process ${targets[@]}
|
||||
}
|
||||
|
||||
# Rename all files to their hashes while keeping extensions.
|
||||
# All files by default.
|
||||
# Usage: name_hash [FILES]
|
||||
function name_hash() {
|
||||
local IFS=$'\n'
|
||||
local targets=(${@})
|
||||
[[ "${targets}" = "" ]] && targets=($(_ls_file))
|
||||
|
||||
process() {
|
||||
# extract extension.
|
||||
local extension="${target##*.}"
|
||||
if [[ "${extension}" = "${target}" ]]; then
|
||||
extension=""
|
||||
else
|
||||
extension=".${extension}"
|
||||
fi
|
||||
|
||||
# hash the new name.
|
||||
local hash=$(pv "${target}" | sha1sum | cut -d\ -f1)
|
||||
new_name="${hash,,}${extension,,}"
|
||||
|
||||
# check if same name.
|
||||
[[ "${target}" = "${new_name}" ]] && return 0
|
||||
|
||||
# rename target.
|
||||
mv -- ${target} ${new_name} && echo ${new_name}
|
||||
}
|
||||
|
||||
_iterate_targets process ${targets[@]}
|
||||
}
|
||||
|
||||
# Check hashes for previously renamed files.
|
||||
# All files by default.
|
||||
# Usage: name_hash_check [FILES]
|
||||
function name_hash_check() {
|
||||
local IFS=$'\n'
|
||||
local targets=(${@})
|
||||
[[ "${targets}" = "" ]] && targets=([^.]*)
|
||||
|
||||
process() {
|
||||
# extract hashes.
|
||||
local stored="${target%%.*}"
|
||||
local actual=$(pv "${target}" | sha1sum | cut -d\ -f1)
|
||||
|
||||
# compare hashes.
|
||||
if [[ "${stored}" != "${actual}" ]]; then
|
||||
_error "Failed."
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
_iterate_targets process ${targets[@]}
|
||||
}
|
||||
|
||||
# Rename files for Jellyfin shows, i.e. `Episode S01E01.mkv`
|
||||
# All files by default.
|
||||
# Usage: name_show [FILES]
|
||||
function name_show() {
|
||||
local IFS=$'\n'
|
||||
local season="$(realpath .)"; season="${season##*\ }"
|
||||
local episode=0
|
||||
local targets=(${@})
|
||||
[[ "${targets}" = "" ]] && targets=($(_ls_file))
|
||||
|
||||
# Error when no season number specified.
|
||||
if [[ "${season}" = "" ]]; then
|
||||
_error "Could not determine season number."
|
||||
return 2
|
||||
fi
|
||||
|
||||
process() {
|
||||
((episode++))
|
||||
|
||||
# extract new name.
|
||||
local new_name="Episode S${season}E$(printf %02d ${episode}).${target##*.}"
|
||||
|
||||
# Skip on no change.
|
||||
[[ "${target}" = "${new_name}" ]] && return 0
|
||||
|
||||
# rename target.
|
||||
mv -- ${target} ${new_name} && echo ${new_name}
|
||||
}
|
||||
|
||||
_iterate_targets process ${targets[@]}
|
||||
}
|
||||
|
||||
# Rename files for Kavita manga format.
|
||||
# All files by default.
|
||||
# Usage: name_manga <SEASON> [FILES]
|
||||
function name_manga() {
|
||||
local IFS=$'\n'
|
||||
local manga=${PWD##*/}
|
||||
local season=${1}
|
||||
local episode=0
|
||||
local targets=(${@:2})
|
||||
[[ "${targets}" = "" ]] && targets=($(_ls_file))
|
||||
|
||||
# Error when no season number specified.
|
||||
if [[ "${season}" = "" ]]; then
|
||||
help name_manga
|
||||
return 2
|
||||
fi
|
||||
|
||||
process() {
|
||||
((episode++))
|
||||
|
||||
# Extract new name.
|
||||
local new_name="${manga} Vol.${season} Ch.${episode}.${target##*.}"
|
||||
|
||||
# Skip on no change.
|
||||
[[ "${target}" = "${new_name}" ]] && return 0
|
||||
|
||||
# Rename target.
|
||||
mv -- ${target} ${new_name} && echo ${new_name}
|
||||
}
|
||||
|
||||
_iterate_targets process ${targets[@]}
|
||||
}
|
||||
|
||||
# Rename files for ffmpeg_music_meta format.
|
||||
# All files by default.
|
||||
# Usage: name_music [FILES]
|
||||
function name_music() {
|
||||
local IFS=$'\n'
|
||||
local targets=(${@})
|
||||
[[ "${targets}" = "" ]] && targets=($(ls))
|
||||
|
||||
process() {
|
||||
# Extract new name.
|
||||
local ext=${target##*.}
|
||||
|
||||
if [[ -d "${target}" ]]; then
|
||||
local new_name="$(parse_titlecase $(parse_simple ${target%.*}))"
|
||||
else
|
||||
local new_name="$(parse_titlecase $(parse_simple ${target%.*})).${ext}"
|
||||
fi
|
||||
|
||||
# Skip on no change.
|
||||
[[ "${target%/}" = "${new_name}" ]] && return 0
|
||||
|
||||
# Rename target.
|
||||
mv -- ${target} ${new_name} && echo ${new_name}
|
||||
}
|
||||
|
||||
_iterate_targets process ${targets[@]}
|
||||
}
|
||||
|
||||
# Rename files with new extension.
|
||||
# All files by default.
|
||||
# Usage: name_ext <EXTENSION> [FILES]
|
||||
function name_ext() {
|
||||
local IFS=$'\n'
|
||||
local extension=${1}
|
||||
local targets=(${@:2})
|
||||
[[ "${targets}" = "" ]] && targets=($(_ls_file))
|
||||
|
||||
# Error when no new extension specified.
|
||||
if [[ "${extension}" = "" ]]; then
|
||||
help name_ext
|
||||
return 2
|
||||
fi
|
||||
|
||||
process() {
|
||||
# Extract new name.
|
||||
local new_name="${target%.*}"."${extension}"
|
||||
|
||||
# Skip on no change.
|
||||
[[ "${target}" = "${new_name}" ]] && return 0
|
||||
|
||||
# Rename target.
|
||||
mv -- ${target} ${new_name} && echo ${new_name}
|
||||
}
|
||||
|
||||
_iterate_targets process ${targets[@]}
|
||||
}
|
||||
|
||||
# Change file name prefix.
|
||||
# All matching files by default.
|
||||
# Usage: name_prefix <OLD> <NEW> [FILES]
|
||||
function name_prefix() {
|
||||
local IFS=$'\n'
|
||||
local old=${1}
|
||||
local new=${2}
|
||||
local targets=(${@:3})
|
||||
[[ "${targets}" = "" ]] && targets=(${old}*)
|
||||
|
||||
process() {
|
||||
# Create new name.
|
||||
local new_name="${new}${target#$old}"
|
||||
|
||||
# Skip on no change.
|
||||
[[ "${target}" = "${new_name}" ]] && return 0
|
||||
|
||||
# Rename.
|
||||
mv -- ${target} ${new_name} && echo ${new_name}
|
||||
}
|
||||
|
||||
_iterate_targets process ${targets[@]}
|
||||
}
|
||||
|
||||
# Change file name postfix.
|
||||
# All matching files by default.
|
||||
# Usage: name_postfix <OLD> <NEW> [FILES]
|
||||
function name_postfix() {
|
||||
local IFS=$'\n'
|
||||
local old=${1}
|
||||
local new=${2}
|
||||
local targets=(${@:3})
|
||||
[[ "${targets}" = "" ]] && targets=(*${old})
|
||||
|
||||
process() {
|
||||
# Create new name.
|
||||
local new_name="${target%$old}${new}"
|
||||
|
||||
# Skip on no change.
|
||||
[[ "${target}" = "${new_name}" ]] && return 0
|
||||
|
||||
# Rename.
|
||||
mv -- ${target} ${new_name} && echo ${new_name}
|
||||
}
|
||||
|
||||
_iterate_targets process ${targets[@]}
|
||||
}
|
||||
|
||||
# Replace part of the name.
|
||||
# All matching files by default.
|
||||
# Usage: name_replace <OLD> <NEW> [FILES]
|
||||
function name_replace() {
|
||||
local IFS=$'\n'
|
||||
local old=${1}
|
||||
local new=${2}
|
||||
local targets=(${@:3})
|
||||
[[ "${targets}" = "" ]] && targets=(*${old}*)
|
||||
|
||||
process() {
|
||||
# Create new name.
|
||||
local new_name="${target//$old/$new}"
|
||||
|
||||
# Skip on no change.
|
||||
[[ "${target}" = "${new_name}" ]] && return 0
|
||||
|
||||
# Rename.
|
||||
mv -- ${target} ${new_name} && echo ${new_name}
|
||||
}
|
||||
|
||||
_iterate_targets process ${targets[@]}
|
||||
}
|
||||
|
||||
# Fix numbering for numbered files. I.e if there are 10 items and some of them start without zero, then append zero to it. 1..10 -> 01..10.
|
||||
# Usage: name_fix_numbering [FILES]
|
||||
function name_fix_numbering() {
|
||||
local IFS=$'\n'
|
||||
local highest=0
|
||||
local power=0
|
||||
local targets=(${@})
|
||||
[[ "${targets}" = "" ]] && targets=($(ls | grep "^[0-9]"))
|
||||
|
||||
# Count leading zeroes.
|
||||
for target in "${targets[@]}"; do
|
||||
# Check that starts with a digit.
|
||||
[[ "${target}" =~ ^[0-9] ]] || continue
|
||||
|
||||
local digits=($(parse_ints "${target}"))
|
||||
local digit="${digits[0]}"
|
||||
digit=$((10#${digit}))
|
||||
|
||||
[[ "${digit}" -gt "${highest}" ]] && highest="${digit}"
|
||||
done
|
||||
|
||||
local i=${highest}
|
||||
while [[ i -gt 0 ]]; do
|
||||
((power++))
|
||||
i=$((${i}/10))
|
||||
done
|
||||
|
||||
process() {
|
||||
# Check that starts with a digit.
|
||||
if [[ ! "${target}" =~ ^[0-9] ]]; then
|
||||
_error "Does not start with a digit!"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Prepare new name.
|
||||
local digits=($(parse_ints "${target}"))
|
||||
local digit="${digits[0]}"
|
||||
digit=$((10#${digit}))
|
||||
local new_name=$(printf "%0${power}d" "${digit}")"${target#${digits[0]}}"
|
||||
|
||||
# Skip if the same name.
|
||||
[[ "${target}" = "${new_name}" ]] && return 0
|
||||
|
||||
# Check that file does not exist.
|
||||
if [[ -e "${new_name}" ]]; then
|
||||
_error "${new_name}: File exists!"
|
||||
return 1
|
||||
fi
|
||||
|
||||
mv -- ${target} ${new_name} && echo ${new_name}
|
||||
}
|
||||
|
||||
_iterate_targets process ${targets[@]}
|
||||
}
|
||||
|
||||
function _comp_name_parse() {
|
||||
_autocomplete_first_ls $(find_function | grep ^parse)
|
||||
}
|
||||
|
||||
complete -o filenames -F _comp_name_parse name_parse
|
36
module/common/bash/module/Network.nix
Normal file
36
module/common/bash/module/Network.nix
Normal file
|
@ -0,0 +1,36 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# Open bluetooth control panel.
|
||||
function bluetooth() {
|
||||
blueman-manager
|
||||
}
|
||||
|
||||
# Open network control panel.
|
||||
function network() {
|
||||
nm-connection-editor
|
||||
}
|
||||
|
||||
# Show active connections.
|
||||
function nms() {
|
||||
nmcli connection show
|
||||
}
|
||||
|
||||
# Start the connection.
|
||||
# Usage: nu <CONNECTION>
|
||||
function nmu() {
|
||||
nmcli connection up "''${@}"
|
||||
}
|
||||
|
||||
# Stop the connection.
|
||||
# Usage: nd <CONNECTION>
|
||||
function nmd() {
|
||||
nmcli connection down "''${@}"
|
||||
}
|
||||
|
||||
function _complete_connections() {
|
||||
_autocomplete $(nmcli connection show | sed "1d" | cut -d\ -f1)
|
||||
}
|
||||
|
||||
complete -F _complete_connections nmd nmu
|
||||
'';
|
||||
}
|
|
@ -1,32 +0,0 @@
|
|||
# Open bluetooth control panel.
|
||||
function bluetooth() {
|
||||
blueman-manager
|
||||
}
|
||||
|
||||
# Open network control panel.
|
||||
function network() {
|
||||
nm-connection-editor
|
||||
}
|
||||
|
||||
# Show active connections.
|
||||
function nms() {
|
||||
nmcli connection show
|
||||
}
|
||||
|
||||
# Start the connection.
|
||||
# Usage: nu <CONNECTION>
|
||||
function nmu() {
|
||||
nmcli connection up "${@}"
|
||||
}
|
||||
|
||||
# Stop the connection.
|
||||
# Usage: nd <CONNECTION>
|
||||
function nmd() {
|
||||
nmcli connection down "${@}"
|
||||
}
|
||||
|
||||
function _complete_connections() {
|
||||
_autocomplete $(nmcli connection show | sed "1d" | cut -d\ -f1)
|
||||
}
|
||||
|
||||
complete -F _complete_connections nmd nmu
|
87
module/common/bash/module/Nix.nix
Normal file
87
module/common/bash/module/Nix.nix
Normal file
|
@ -0,0 +1,87 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
export _nix_system_config="git+https://git.voronind.com/voronind/nixos.git"
|
||||
|
||||
# Rebuild system.
|
||||
# Optionally force the hostname.
|
||||
# Usage: nixos_rebuild [HOSTNAME]
|
||||
function nixos_rebuild() {
|
||||
local target="''${1}"
|
||||
[[ "''${target}" = "" ]] && target="''${HOSTNAME}"
|
||||
|
||||
nixos-rebuild boot --refresh --option eval-cache false --flake "''${_nix_system_config}#''${target}"
|
||||
}
|
||||
|
||||
# Rebuild and switch system.
|
||||
# Optionally force the hostname.
|
||||
# Usage: nixos_switch [HOSTNAME]
|
||||
function nixos_switch() {
|
||||
local target="''${1}"
|
||||
[[ "''${target}" = "" ]] && target="''${HOSTNAME}"
|
||||
|
||||
nixos-rebuild switch --refresh --option eval-cache false --flake "''${_nix_system_config}#''${target}"
|
||||
}
|
||||
|
||||
# Spawn shell with specified nix environment.
|
||||
# Uses flake.nix in current dir by default.
|
||||
# Usage: nix_shell [NAME]
|
||||
function nix_shell() {
|
||||
local target="''${1}"
|
||||
[[ "''${target}" = "" ]] && target="default"
|
||||
|
||||
# Create Nix GC root in .NixRoot{NAME}.
|
||||
nix build ".#devShells.''${NIX_CURRENT_SYSTEM}.''${target}" -o ".NixRoot''${target^}"
|
||||
|
||||
NIX_SHELL="''${target}" nix develop ".#devShells.''${NIX_CURRENT_SYSTEM}.''${target}"
|
||||
}
|
||||
alias shell="nix_shell"
|
||||
|
||||
# Spawn temporary nix-shell with specified packages.
|
||||
# Usage: nix_tmpshell <PACKAGES>
|
||||
function nix_tmpshell() {
|
||||
local IFS=$'\n'
|
||||
local input=("''${@}")
|
||||
local pkgs=()
|
||||
local tag="''${NIX_SHELL}"
|
||||
|
||||
if [[ "''${input}" = "" ]]; then
|
||||
help nix_tmpshell
|
||||
return 2
|
||||
fi
|
||||
|
||||
[[ "''${tag}" = "" ]] && tag="''${1}"
|
||||
|
||||
for pkg in ''${input[@]}; do
|
||||
pkgs+=("nixpkgs#''${pkg}")
|
||||
done
|
||||
|
||||
NIX_SHELL="''${tag}" NIXPKGS_ALLOW_UNFREE=1 nix shell --impure ''${pkgs[@]}
|
||||
}
|
||||
alias tmpshell="nix_tmpshell"
|
||||
|
||||
# Build live image.
|
||||
function nixos_live() {
|
||||
nix build "''${_nix_system_config}#nixosConfigurations.live.config.system.build.isoImage" --refresh ''${@}
|
||||
}
|
||||
|
||||
# List nixos generations.
|
||||
function nixos_generations() {
|
||||
nix-env -p /nix/var/nix/profiles/system --list-generations
|
||||
}
|
||||
|
||||
# Switch nix-on-droid.
|
||||
function nixdroid_switch() {
|
||||
nix-on-droid switch --flake "''${_nix_system_config}" ''${@}
|
||||
}
|
||||
|
||||
# Autocomplete with available hosts.
|
||||
function _comp_hosts() {
|
||||
local IFS=$'\n'
|
||||
local targets=($(ls ~/.config/linux/system/host/))
|
||||
|
||||
_autocomplete_first ''${targets[@]}
|
||||
}
|
||||
|
||||
complete -F _comp_hosts nix_switch nix_rebuild
|
||||
'';
|
||||
}
|
|
@ -1,83 +0,0 @@
|
|||
export _nix_system_config="git+https://git.voronind.com/voronind/nixos.git"
|
||||
|
||||
# Rebuild system.
|
||||
# Optionally force the hostname.
|
||||
# Usage: nixos_rebuild [HOSTNAME]
|
||||
function nixos_rebuild() {
|
||||
local target="${1}"
|
||||
[[ "${target}" = "" ]] && target="${HOSTNAME}"
|
||||
|
||||
nixos-rebuild boot --refresh --option eval-cache false --flake "${_nix_system_config}#${target}"
|
||||
}
|
||||
|
||||
# Rebuild and switch system.
|
||||
# Optionally force the hostname.
|
||||
# Usage: nixos_switch [HOSTNAME]
|
||||
function nixos_switch() {
|
||||
local target="${1}"
|
||||
[[ "${target}" = "" ]] && target="${HOSTNAME}"
|
||||
|
||||
nixos-rebuild switch --refresh --option eval-cache false --flake "${_nix_system_config}#${target}"
|
||||
}
|
||||
|
||||
# Spawn shell with specified nix environment.
|
||||
# Uses flake.nix in current dir by default.
|
||||
# Usage: nix_shell [NAME]
|
||||
function nix_shell() {
|
||||
local target="${1}"
|
||||
[[ "${target}" = "" ]] && target="default"
|
||||
|
||||
# Create Nix GC root in .NixRoot{NAME}.
|
||||
nix build ".#devShells.${NIX_CURRENT_SYSTEM}.${target}" -o ".NixRoot${target^}"
|
||||
|
||||
NIX_SHELL="${target}" nix develop ".#devShells.${NIX_CURRENT_SYSTEM}.${target}"
|
||||
}
|
||||
alias shell="nix_shell"
|
||||
|
||||
# Spawn temporary nix-shell with specified packages.
|
||||
# Usage: nix_tmpshell <PACKAGES>
|
||||
function nix_tmpshell() {
|
||||
local IFS=$'\n'
|
||||
local input=("${@}")
|
||||
local pkgs=()
|
||||
local tag="${NIX_SHELL}"
|
||||
|
||||
if [[ "${input}" = "" ]]; then
|
||||
help nix_tmpshell
|
||||
return 2
|
||||
fi
|
||||
|
||||
[[ "${tag}" = "" ]] && tag="${1}"
|
||||
|
||||
for pkg in ${input[@]}; do
|
||||
pkgs+=("nixpkgs#${pkg}")
|
||||
done
|
||||
|
||||
NIX_SHELL="${tag}" NIXPKGS_ALLOW_UNFREE=1 nix shell --impure ${pkgs[@]}
|
||||
}
|
||||
alias tmpshell="nix_tmpshell"
|
||||
|
||||
# Build live image.
|
||||
function nixos_live() {
|
||||
nix build "${_nix_system_config}#nixosConfigurations.live.config.system.build.isoImage" --refresh ${@}
|
||||
}
|
||||
|
||||
# List nixos generations.
|
||||
function nixos_generations() {
|
||||
nix-env -p /nix/var/nix/profiles/system --list-generations
|
||||
}
|
||||
|
||||
# Switch nix-on-droid.
|
||||
function nixdroid_switch() {
|
||||
nix-on-droid switch --flake "${_nix_system_config}" ${@}
|
||||
}
|
||||
|
||||
# Autocomplete with available hosts.
|
||||
function _comp_hosts() {
|
||||
local IFS=$'\n'
|
||||
local targets=($(ls ~/.config/linux/system/host/))
|
||||
|
||||
_autocomplete_first ${targets[@]}
|
||||
}
|
||||
|
||||
complete -F _comp_hosts nix_switch nix_rebuild
|
15
module/common/bash/module/Notify.nix
Normal file
15
module/common/bash/module/Notify.nix
Normal file
|
@ -0,0 +1,15 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# Send Telegram notification.
|
||||
# Usage: notify <MESSAGE>
|
||||
function notify() {
|
||||
curl -X POST -H 'Content-Type: Application/json' -d "{\"chat_id\":\"155897358\",\"text\":\"$1\"}" https://api.telegram.org/bot2046849441:AAHQpjRK4xpL8tEUyN4JTSDUUze4J0wSIy4/sendMessage &> /dev/null
|
||||
}
|
||||
|
||||
# Send silent Telegram notification.
|
||||
# Usage: notify_silent <MESSAGE>
|
||||
function notify_silent() {
|
||||
curl -X POST -H 'Content-Type: Application/json' -d "{\"chat_id\":\"155897358\",\"text\":\"$1\",\"disable_notification\":\"true\"}" https://api.telegram.org/bot2046849441:AAHQpjRK4xpL8tEUyN4JTSDUUze4J0wSIy4/sendMessage &> /dev/null
|
||||
}
|
||||
'';
|
||||
}
|
|
@ -1,11 +0,0 @@
|
|||
# Send Telegram notification.
|
||||
# Usage: notify <MESSAGE>
|
||||
function notify() {
|
||||
curl -X POST -H 'Content-Type: Application/json' -d "{\"chat_id\":\"155897358\",\"text\":\"$1\"}" https://api.telegram.org/bot2046849441:AAHQpjRK4xpL8tEUyN4JTSDUUze4J0wSIy4/sendMessage &> /dev/null
|
||||
}
|
||||
|
||||
# Send silent Telegram notification.
|
||||
# Usage: notify_silent <MESSAGE>
|
||||
function notify_silent() {
|
||||
curl -X POST -H 'Content-Type: Application/json' -d "{\"chat_id\":\"155897358\",\"text\":\"$1\",\"disable_notification\":\"true\"}" https://api.telegram.org/bot2046849441:AAHQpjRK4xpL8tEUyN4JTSDUUze4J0wSIy4/sendMessage &> /dev/null
|
||||
}
|
40
module/common/bash/module/Own.nix
Normal file
40
module/common/bash/module/Own.nix
Normal file
|
@ -0,0 +1,40 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# Change file ownership to specified user id and restrict access to him.
|
||||
# Root user by default. This directory recursively by default.
|
||||
# Usage: own [USER] [FILES]
|
||||
function own() {
|
||||
local IFS=$'\n'
|
||||
local files=("''${@:2}")
|
||||
local user="''${1}"
|
||||
local group="''${1}"
|
||||
|
||||
# default to current dir.
|
||||
if [ "''${files[*]}" = "" ]; then
|
||||
files=(".")
|
||||
fi
|
||||
|
||||
# default to current user.
|
||||
if [ "''${user}" = "" ]; then
|
||||
user="''${UID}"
|
||||
fi
|
||||
|
||||
# If not root, default to users group.
|
||||
[[ "''${user}" = 0 ]] && group="0" || group="100"
|
||||
|
||||
for file in "''${files[@]}"; do
|
||||
# set ownership.
|
||||
chown "''${user}":"''${group}" -R "''${file}" &> /dev/null
|
||||
|
||||
# remove access from group and others.
|
||||
chmod -077 -R "''${file}"
|
||||
done
|
||||
}
|
||||
|
||||
function _complete_own() {
|
||||
_autocomplete_first_ls $(_get_users)
|
||||
}
|
||||
|
||||
complete -F _complete_own own
|
||||
'';
|
||||
}
|
|
@ -1,36 +0,0 @@
|
|||
# Change file ownership to specified user id and restrict access to him.
|
||||
# Root user by default. This directory recursively by default.
|
||||
# Usage: own [USER] [FILES]
|
||||
function own() {
|
||||
local IFS=$'\n'
|
||||
local files=("${@:2}")
|
||||
local user="${1}"
|
||||
local group="${1}"
|
||||
|
||||
# default to current dir.
|
||||
if [ "${files[*]}" = "" ]; then
|
||||
files=(".")
|
||||
fi
|
||||
|
||||
# default to current user.
|
||||
if [ "${user}" = "" ]; then
|
||||
user="${UID}"
|
||||
fi
|
||||
|
||||
# If not root, default to users group.
|
||||
[[ "${user}" = 0 ]] && group="0" || group="100"
|
||||
|
||||
for file in "${files[@]}"; do
|
||||
# set ownership.
|
||||
chown "${user}":"${group}" -R "${file}" &> /dev/null
|
||||
|
||||
# remove access from group and others.
|
||||
chmod -077 -R "${file}"
|
||||
done
|
||||
}
|
||||
|
||||
function _complete_own() {
|
||||
_autocomplete_first_ls $(_get_users)
|
||||
}
|
||||
|
||||
complete -F _complete_own own
|
191
module/common/bash/module/Pack.nix
Normal file
191
module/common/bash/module/Pack.nix
Normal file
|
@ -0,0 +1,191 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
export _unpack_supported=".tar$|.tgz$|.txz$|.tar.gz$|.tar.xz$|.zip$|.iso$|.rar$"
|
||||
|
||||
# Pack files into desired format.
|
||||
# All files and directories by default.
|
||||
# Usage: pack <TARGET.ext> [FILES]
|
||||
function pack() {
|
||||
local IFS=$'\n'
|
||||
local output="''${1}"
|
||||
local targets=("''${@:2}")
|
||||
local format="''${output##*.}"
|
||||
local name="''${output%.*}"
|
||||
|
||||
# report no output.
|
||||
if [[ "''${output}" = "" ]]; then
|
||||
help pack
|
||||
return 2
|
||||
fi
|
||||
|
||||
# report no format.
|
||||
if [[ "''${format}" = "" ]]; then
|
||||
_error "Could not determine output format."
|
||||
help pack
|
||||
return 2
|
||||
fi
|
||||
|
||||
# All targets by default.
|
||||
[[ "''${targets}" = "" ]] && targets=(*)
|
||||
|
||||
case "''${format}" in
|
||||
"tgz")
|
||||
_pack_tgz "''${output}" "''${targets[@]}"
|
||||
;;
|
||||
"txz")
|
||||
_pack_txz "''${output}" "''${targets[@]}"
|
||||
;;
|
||||
"tar")
|
||||
_pack_tar "''${output}" "''${targets[@]}"
|
||||
;;
|
||||
"zip")
|
||||
_pack_zip "''${output}" "''${targets[@]}"
|
||||
;;
|
||||
"gz")
|
||||
_pack_gz "''${output}" "''${targets[@]}"
|
||||
;;
|
||||
"xz")
|
||||
_pack_xz "''${output}" "''${targets[@]}"
|
||||
;;
|
||||
"iso")
|
||||
_pack_iso "''${output}" "''${targets[@]}"
|
||||
;;
|
||||
*)
|
||||
_error "''${target}: Format not supported."
|
||||
return 2
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Attempt to unpack.
|
||||
# All supported formats by default.
|
||||
# Usage: unpack [FILES]
|
||||
function unpack() {
|
||||
local IFS=$'\n'
|
||||
local targets=(''${@})
|
||||
[[ "''${targets}" = "" ]] && targets=($(_ls_files | grep -E ''${_unpack_supported}))
|
||||
|
||||
process() {
|
||||
# unpack file type.
|
||||
local type="''${target##*.}"
|
||||
|
||||
[[ "''${target}" =~ .tar.gz$ ]] && type="tar.gz"
|
||||
[[ "''${target}" =~ .tar.xz$ ]] && type="tar.xz"
|
||||
|
||||
# unpack content.
|
||||
case "''${type,,}" in
|
||||
"zip")
|
||||
_unpack_zip "''${target}"
|
||||
;;
|
||||
"7z")
|
||||
_unpack_7z "''${target}"
|
||||
;;
|
||||
"tgz"|"tar.gz")
|
||||
_unpack_tgz "''${target}"
|
||||
;;
|
||||
"txz"|"tar.xz")
|
||||
_unpack_txz "''${target}"
|
||||
;;
|
||||
"tar")
|
||||
_unpack_tar "''${target}"
|
||||
;;
|
||||
"iso")
|
||||
_unpack_iso "''${target}"
|
||||
;;
|
||||
"rar")
|
||||
_unpack_rar "''${target}"
|
||||
;;
|
||||
"xz")
|
||||
_unpack_xz "''${target}"
|
||||
;;
|
||||
"gz")
|
||||
_unpack_gz "''${target}"
|
||||
;;
|
||||
*)
|
||||
_error "''${target}: Format not supported."
|
||||
return 2
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
_iterate_targets process ''${targets[@]}
|
||||
}
|
||||
|
||||
function _pack_zip() {
|
||||
zip -9 -r "''${@}"
|
||||
}
|
||||
|
||||
function _pack_tgz() {
|
||||
tar -c "''${@:2}" | pv -s $(/usr/bin/env du -csb "''${@:2}" | sed -n -e '$p' | awk '{print $1}') | gzip -1 > "''${1}"
|
||||
}
|
||||
|
||||
function _pack_txz() {
|
||||
tar -c "''${@:2}" | pv -s $(/usr/bin/env du -csb "''${@:2}" | sed -n -e '$p' | awk '{print $1}') | xz -9e > "''${1}"
|
||||
}
|
||||
|
||||
function _pack_tar() {
|
||||
tar -c "''${@:2}" | pv -s $(/usr/bin/env du -csb "''${@:2}" | sed -n -e '$p' | awk '{print $1}') > "''${1}"
|
||||
}
|
||||
|
||||
function _pack_gz() {
|
||||
pv "''${2}" | gzip -1 > "''${1}"
|
||||
}
|
||||
|
||||
function _pack_xz() {
|
||||
pv "''${2}" | xz -9e > "''${1}"
|
||||
}
|
||||
|
||||
function _pack_iso() {
|
||||
local input=("''${@:2}")
|
||||
local output="''${1}"
|
||||
local args=()
|
||||
|
||||
for arg in ''${input[@]}; do
|
||||
[[ -d "''${arg}" ]] || {
|
||||
_error "''${arg} is not a directory."
|
||||
return 1
|
||||
};
|
||||
|
||||
args+=("''${arg}=''${arg}")
|
||||
done
|
||||
|
||||
genisoimage -J -r -pad -o "''${output}" -graft-points "''${args[@]}"
|
||||
}
|
||||
|
||||
function _unpack_zip() {
|
||||
unzip "''${1}"
|
||||
}
|
||||
|
||||
function _unpack_7z() {
|
||||
7za x "''${1}"
|
||||
}
|
||||
|
||||
function _unpack_tgz() {
|
||||
pv "''${1}" | gzip -d | tar -xf -
|
||||
}
|
||||
|
||||
function _unpack_txz() {
|
||||
pv "''${1}" | xz -d | tar -xf -
|
||||
}
|
||||
|
||||
function _unpack_tar() {
|
||||
pv "''${1}" | tar -xf -
|
||||
}
|
||||
|
||||
function _unpack_iso() {
|
||||
7za x "''${1}"
|
||||
}
|
||||
|
||||
function _unpack_rar() {
|
||||
unrar x "''${1}"
|
||||
}
|
||||
|
||||
function _unpack_gz() {
|
||||
pv "''${1}" | gzip -d > "''${1%.gz}"
|
||||
}
|
||||
|
||||
function _unpack_xz() {
|
||||
pv "''${1}" | xz -d > "''${1%.xz}"
|
||||
}
|
||||
'';
|
||||
}
|
|
@ -1,187 +0,0 @@
|
|||
export _unpack_supported=".tar$|.tgz$|.txz$|.tar.gz$|.tar.xz$|.zip$|.iso$|.rar$"
|
||||
|
||||
# Pack files into desired format.
|
||||
# All files and directories by default.
|
||||
# Usage: pack <TARGET.ext> [FILES]
|
||||
function pack() {
|
||||
local IFS=$'\n'
|
||||
local output="${1}"
|
||||
local targets=("${@:2}")
|
||||
local format="${output##*.}"
|
||||
local name="${output%.*}"
|
||||
|
||||
# report no output.
|
||||
if [[ "${output}" = "" ]]; then
|
||||
help pack
|
||||
return 2
|
||||
fi
|
||||
|
||||
# report no format.
|
||||
if [[ "${format}" = "" ]]; then
|
||||
_error "Could not determine output format."
|
||||
help pack
|
||||
return 2
|
||||
fi
|
||||
|
||||
# All targets by default.
|
||||
[[ "${targets}" = "" ]] && targets=(*)
|
||||
|
||||
case "${format}" in
|
||||
"tgz")
|
||||
_pack_tgz "${output}" "${targets[@]}"
|
||||
;;
|
||||
"txz")
|
||||
_pack_txz "${output}" "${targets[@]}"
|
||||
;;
|
||||
"tar")
|
||||
_pack_tar "${output}" "${targets[@]}"
|
||||
;;
|
||||
"zip")
|
||||
_pack_zip "${output}" "${targets[@]}"
|
||||
;;
|
||||
"gz")
|
||||
_pack_gz "${output}" "${targets[@]}"
|
||||
;;
|
||||
"xz")
|
||||
_pack_xz "${output}" "${targets[@]}"
|
||||
;;
|
||||
"iso")
|
||||
_pack_iso "${output}" "${targets[@]}"
|
||||
;;
|
||||
*)
|
||||
_error "${target}: Format not supported."
|
||||
return 2
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Attempt to unpack.
|
||||
# All supported formats by default.
|
||||
# Usage: unpack [FILES]
|
||||
function unpack() {
|
||||
local IFS=$'\n'
|
||||
local targets=(${@})
|
||||
[[ "${targets}" = "" ]] && targets=($(_ls_files | grep -E ${_unpack_supported}))
|
||||
|
||||
process() {
|
||||
# unpack file type.
|
||||
local type="${target##*.}"
|
||||
|
||||
[[ "${target}" =~ .tar.gz$ ]] && type="tar.gz"
|
||||
[[ "${target}" =~ .tar.xz$ ]] && type="tar.xz"
|
||||
|
||||
# unpack content.
|
||||
case "${type,,}" in
|
||||
"zip")
|
||||
_unpack_zip "${target}"
|
||||
;;
|
||||
"7z")
|
||||
_unpack_7z "${target}"
|
||||
;;
|
||||
"tgz"|"tar.gz")
|
||||
_unpack_tgz "${target}"
|
||||
;;
|
||||
"txz"|"tar.xz")
|
||||
_unpack_txz "${target}"
|
||||
;;
|
||||
"tar")
|
||||
_unpack_tar "${target}"
|
||||
;;
|
||||
"iso")
|
||||
_unpack_iso "${target}"
|
||||
;;
|
||||
"rar")
|
||||
_unpack_rar "${target}"
|
||||
;;
|
||||
"xz")
|
||||
_unpack_xz "${target}"
|
||||
;;
|
||||
"gz")
|
||||
_unpack_gz "${target}"
|
||||
;;
|
||||
*)
|
||||
_error "${target}: Format not supported."
|
||||
return 2
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
_iterate_targets process ${targets[@]}
|
||||
}
|
||||
|
||||
function _pack_zip() {
|
||||
zip -9 -r "${@}"
|
||||
}
|
||||
|
||||
function _pack_tgz() {
|
||||
tar -c "${@:2}" | pv -s $(/usr/bin/env du -csb "${@:2}" | sed -n -e '$p' | awk '{print $1}') | gzip -1 > "${1}"
|
||||
}
|
||||
|
||||
function _pack_txz() {
|
||||
tar -c "${@:2}" | pv -s $(/usr/bin/env du -csb "${@:2}" | sed -n -e '$p' | awk '{print $1}') | xz -9e > "${1}"
|
||||
}
|
||||
|
||||
function _pack_tar() {
|
||||
tar -c "${@:2}" | pv -s $(/usr/bin/env du -csb "${@:2}" | sed -n -e '$p' | awk '{print $1}') > "${1}"
|
||||
}
|
||||
|
||||
function _pack_gz() {
|
||||
pv "${2}" | gzip -1 > "${1}"
|
||||
}
|
||||
|
||||
function _pack_xz() {
|
||||
pv "${2}" | xz -9e > "${1}"
|
||||
}
|
||||
|
||||
function _pack_iso() {
|
||||
local input=("${@:2}")
|
||||
local output="${1}"
|
||||
local args=()
|
||||
|
||||
for arg in ${input[@]}; do
|
||||
[[ -d "${arg}" ]] || {
|
||||
_error "${arg} is not a directory."
|
||||
return 1
|
||||
};
|
||||
|
||||
args+=("${arg}=${arg}")
|
||||
done
|
||||
|
||||
genisoimage -J -r -pad -o "${output}" -graft-points "${args[@]}"
|
||||
}
|
||||
|
||||
function _unpack_zip() {
|
||||
unzip "${1}"
|
||||
}
|
||||
|
||||
function _unpack_7z() {
|
||||
7za x "${1}"
|
||||
}
|
||||
|
||||
function _unpack_tgz() {
|
||||
pv "${1}" | gzip -d | tar -xf -
|
||||
}
|
||||
|
||||
function _unpack_txz() {
|
||||
pv "${1}" | xz -d | tar -xf -
|
||||
}
|
||||
|
||||
function _unpack_tar() {
|
||||
pv "${1}" | tar -xf -
|
||||
}
|
||||
|
||||
function _unpack_iso() {
|
||||
7za x "${1}"
|
||||
}
|
||||
|
||||
function _unpack_rar() {
|
||||
unrar x "${1}"
|
||||
}
|
||||
|
||||
function _unpack_gz() {
|
||||
pv "${1}" | gzip -d > "${1%.gz}"
|
||||
}
|
||||
|
||||
function _unpack_xz() {
|
||||
pv "${1}" | xz -d > "${1%.xz}"
|
||||
}
|
171
module/common/bash/module/Parse.nix
Normal file
171
module/common/bash/module/Parse.nix
Normal file
|
@ -0,0 +1,171 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
_PARSE_ALLOWED_CHARS="_-"
|
||||
_PARSE_SPLIT_CHARS="\.\ _-"
|
||||
|
||||
# Parse data and output simplified format.
|
||||
# Usage: parse_simple <STRING>
|
||||
function parse_simple() {
|
||||
echo "''${*}" | sed \
|
||||
-e "s/[''${_PARSE_SPLIT_CHARS}]/_/g" \
|
||||
-e "s/[^[:alnum:]''${_PARSE_ALLOWED_CHARS}]//g" \
|
||||
-e "s/_\+/_/g" -e "s/-\+/-/g" \
|
||||
-e "s/_-/_/g" -e "s/-_/_/g" \
|
||||
-e "s/_\+/_/g" \
|
||||
-e "s/^_//" -e "s/_$//"
|
||||
}
|
||||
|
||||
# Parse to PascalCase.
|
||||
# Usage: parse_pascal <STRING>
|
||||
function parse_pascal() {
|
||||
local parts=($(_get_parts $(parse_simple "''${*}")))
|
||||
local result
|
||||
|
||||
for part in "''${parts[@]}"; do
|
||||
local word="''${part,,}"
|
||||
word="''${word^}"
|
||||
result="''${result}''${word}"
|
||||
done
|
||||
|
||||
echo "''${result}"
|
||||
}
|
||||
|
||||
# Parse to snake_case.
|
||||
# Usage: parse_snake <STRING>
|
||||
function parse_snake() {
|
||||
local parts=($(_get_parts $(parse_simple "''${*}")))
|
||||
local result
|
||||
|
||||
for part in "''${parts[@]}"; do
|
||||
local word="''${part,,}"
|
||||
result="''${result}_''${word}"
|
||||
done
|
||||
|
||||
echo "''${result#_}"
|
||||
}
|
||||
|
||||
# Parse to kebab-case.
|
||||
# Usage: parse_kebab <STRING>
|
||||
function parse_kebab() {
|
||||
local parts=($(_get_parts $(parse_simple "''${*}")))
|
||||
local result
|
||||
|
||||
for part in "''${parts[@]}"; do
|
||||
local word="''${part,,}"
|
||||
result="''${result}-''${word}"
|
||||
done
|
||||
|
||||
echo "''${result#-}"
|
||||
}
|
||||
|
||||
# Parse to camelCase.
|
||||
# Usage: parse_camel <STRING>
|
||||
function parse_camel() {
|
||||
local parts=($(_get_parts $(parse_simple "''${*}")))
|
||||
local result
|
||||
|
||||
for part in "''${parts[@]}"; do
|
||||
local word="''${part,,}"
|
||||
word="''${word^}"
|
||||
result="''${result}''${word}"
|
||||
done
|
||||
|
||||
echo "''${result,}"
|
||||
}
|
||||
|
||||
# Parse to SNAKE_CASE_UPPERCASE. **NOT STABLE! Repeating results in different output.**
|
||||
# Usage: parse_snake_uppercase <STRING>
|
||||
function parse_snake_uppercase() {
|
||||
local parts=($(_get_parts $(parse_simple "''${*}")))
|
||||
local result
|
||||
|
||||
for part in "''${parts[@]}"; do
|
||||
local word="''${part^^}"
|
||||
result="''${result}_''${word}"
|
||||
done
|
||||
|
||||
echo "''${result#_}"
|
||||
}
|
||||
|
||||
# Parse data keeping only alphanumeric characters.
|
||||
# Usage: parse_alnum <STRING>
|
||||
function parse_alnum() {
|
||||
echo "''${*}" | sed -e "s/[^[:alnum:]]//g"
|
||||
}
|
||||
|
||||
# Parse integers from mixed string.
|
||||
# Usage: parse_ints <STRING>
|
||||
function parse_ints() {
|
||||
echo "''${*}" | tr '\n' ' ' | sed -e 's/[^0-9]/ /g' -e 's/^ *//g' -e 's/ *$//g' | tr -s ' ' | sed 's/ /\n/g'
|
||||
}
|
||||
|
||||
# Parse string to lowercase.
|
||||
# Usage: parse_lowercase <STRING>
|
||||
function parse_lowercase() {
|
||||
echo "''${*,,}"
|
||||
}
|
||||
|
||||
# Parse string to uppercase.
|
||||
# Usage: parse_uppercase <STRING>
|
||||
function parse_uppercase() {
|
||||
echo "''${*^^}"
|
||||
}
|
||||
|
||||
# Parse string to title case.
|
||||
# Usage: parse_titlecase <STRING>
|
||||
function parse_titlecase() {
|
||||
local IFS=$'\n'
|
||||
local parts=($(_parse_split ''${@}))
|
||||
local minors=("is" "at" "of" "to" "in" "for" "the" "a" "an" "and" "but" "or" "on" "was" "were" "been" "be" "do" "did" "does")
|
||||
|
||||
echo -n "$(parse_sentencecase ''${parts[0]})"
|
||||
for part in ''${parts[@]:1}; do
|
||||
if _contains $(echo ''${part,,} | sed -e "s/[''${_PARSE_SPLIT_CHARS}]//g") ''${minors[@]}; then
|
||||
echo -n "''${part,,}"
|
||||
else
|
||||
echo -n "$(parse_sentencecase ''${part})"
|
||||
fi
|
||||
done
|
||||
|
||||
echo
|
||||
}
|
||||
|
||||
# Parse string to sentence case.
|
||||
# Usage: parse_sentencecase <STRING>
|
||||
function parse_sentencecase() {
|
||||
local lower="''${*,,}"
|
||||
echo "''${lower^}"
|
||||
}
|
||||
|
||||
# Parse string to start case.
|
||||
# Usage: parse_startcase <STRING>
|
||||
function parse_startcase() {
|
||||
local IFS=$'\n'
|
||||
local parts=($(_parse_split ''${*}))
|
||||
|
||||
for part in ''${parts[@]}; do
|
||||
echo -n "''${part^}"
|
||||
done
|
||||
|
||||
echo
|
||||
}
|
||||
|
||||
# Parse string to pretty Json.
|
||||
# Usage: parse_json <STRING>
|
||||
function parse_json() {
|
||||
echo "''${*}" | jq
|
||||
}
|
||||
|
||||
# Split string by separators.
|
||||
# Usage: _parse_split <STRING>
|
||||
function _parse_split() {
|
||||
echo "''${*}" | sed -e "s/[A-Z]\+/\n&/g" -e "s/[0-9]\+/\n&\n/g" -e "s/[''${_PARSE_SPLIT_CHARS}]/&\n/g" | sed -e "/^$/d"
|
||||
}
|
||||
|
||||
# Get name parts.
|
||||
# Usage: _get_parts <STRING>
|
||||
function _get_parts() {
|
||||
_parse_split "''${*}" | sed -e "s/[''${_PARSE_SPLIT_CHARS}]//g" | sed -e "/^$/d"
|
||||
}
|
||||
'';
|
||||
}
|
|
@ -1,167 +0,0 @@
|
|||
_PARSE_ALLOWED_CHARS="_-"
|
||||
_PARSE_SPLIT_CHARS="\.\ _-"
|
||||
|
||||
# Parse data and output simplified format.
|
||||
# Usage: parse_simple <STRING>
|
||||
function parse_simple() {
|
||||
echo "${*}" | sed \
|
||||
-e "s/[${_PARSE_SPLIT_CHARS}]/_/g" \
|
||||
-e "s/[^[:alnum:]${_PARSE_ALLOWED_CHARS}]//g" \
|
||||
-e "s/_\+/_/g" -e "s/-\+/-/g" \
|
||||
-e "s/_-/_/g" -e "s/-_/_/g" \
|
||||
-e "s/_\+/_/g" \
|
||||
-e "s/^_//" -e "s/_$//"
|
||||
}
|
||||
|
||||
# Parse to PascalCase.
|
||||
# Usage: parse_pascal <STRING>
|
||||
function parse_pascal() {
|
||||
local parts=($(_get_parts $(parse_simple "${*}")))
|
||||
local result
|
||||
|
||||
for part in "${parts[@]}"; do
|
||||
local word="${part,,}"
|
||||
word="${word^}"
|
||||
result="${result}${word}"
|
||||
done
|
||||
|
||||
echo "${result}"
|
||||
}
|
||||
|
||||
# Parse to snake_case.
|
||||
# Usage: parse_snake <STRING>
|
||||
function parse_snake() {
|
||||
local parts=($(_get_parts $(parse_simple "${*}")))
|
||||
local result
|
||||
|
||||
for part in "${parts[@]}"; do
|
||||
local word="${part,,}"
|
||||
result="${result}_${word}"
|
||||
done
|
||||
|
||||
echo "${result#_}"
|
||||
}
|
||||
|
||||
# Parse to kebab-case.
|
||||
# Usage: parse_kebab <STRING>
|
||||
function parse_kebab() {
|
||||
local parts=($(_get_parts $(parse_simple "${*}")))
|
||||
local result
|
||||
|
||||
for part in "${parts[@]}"; do
|
||||
local word="${part,,}"
|
||||
result="${result}-${word}"
|
||||
done
|
||||
|
||||
echo "${result#-}"
|
||||
}
|
||||
|
||||
# Parse to camelCase.
|
||||
# Usage: parse_camel <STRING>
|
||||
function parse_camel() {
|
||||
local parts=($(_get_parts $(parse_simple "${*}")))
|
||||
local result
|
||||
|
||||
for part in "${parts[@]}"; do
|
||||
local word="${part,,}"
|
||||
word="${word^}"
|
||||
result="${result}${word}"
|
||||
done
|
||||
|
||||
echo "${result,}"
|
||||
}
|
||||
|
||||
# Parse to SNAKE_CASE_UPPERCASE. **NOT STABLE! Repeating results in different output.**
|
||||
# Usage: parse_snake_uppercase <STRING>
|
||||
function parse_snake_uppercase() {
|
||||
local parts=($(_get_parts $(parse_simple "${*}")))
|
||||
local result
|
||||
|
||||
for part in "${parts[@]}"; do
|
||||
local word="${part^^}"
|
||||
result="${result}_${word}"
|
||||
done
|
||||
|
||||
echo "${result#_}"
|
||||
}
|
||||
|
||||
# Parse data keeping only alphanumeric characters.
|
||||
# Usage: parse_alnum <STRING>
|
||||
function parse_alnum() {
|
||||
echo "${*}" | sed -e "s/[^[:alnum:]]//g"
|
||||
}
|
||||
|
||||
# Parse integers from mixed string.
|
||||
# Usage: parse_ints <STRING>
|
||||
function parse_ints() {
|
||||
echo "${*}" | tr '\n' ' ' | sed -e 's/[^0-9]/ /g' -e 's/^ *//g' -e 's/ *$//g' | tr -s ' ' | sed 's/ /\n/g'
|
||||
}
|
||||
|
||||
# Parse string to lowercase.
|
||||
# Usage: parse_lowercase <STRING>
|
||||
function parse_lowercase() {
|
||||
echo "${*,,}"
|
||||
}
|
||||
|
||||
# Parse string to uppercase.
|
||||
# Usage: parse_uppercase <STRING>
|
||||
function parse_uppercase() {
|
||||
echo "${*^^}"
|
||||
}
|
||||
|
||||
# Parse string to title case.
|
||||
# Usage: parse_titlecase <STRING>
|
||||
function parse_titlecase() {
|
||||
local IFS=$'\n'
|
||||
local parts=($(_parse_split ${@}))
|
||||
local minors=("is" "at" "of" "to" "in" "for" "the" "a" "an" "and" "but" "or" "on" "was" "were" "been" "be" "do" "did" "does")
|
||||
|
||||
echo -n "$(parse_sentencecase ${parts[0]})"
|
||||
for part in ${parts[@]:1}; do
|
||||
if _contains $(echo ${part,,} | sed -e "s/[${_PARSE_SPLIT_CHARS}]//g") ${minors[@]}; then
|
||||
echo -n "${part,,}"
|
||||
else
|
||||
echo -n "$(parse_sentencecase ${part})"
|
||||
fi
|
||||
done
|
||||
|
||||
echo
|
||||
}
|
||||
|
||||
# Parse string to sentence case.
|
||||
# Usage: parse_sentencecase <STRING>
|
||||
function parse_sentencecase() {
|
||||
local lower="${*,,}"
|
||||
echo "${lower^}"
|
||||
}
|
||||
|
||||
# Parse string to start case.
|
||||
# Usage: parse_startcase <STRING>
|
||||
function parse_startcase() {
|
||||
local IFS=$'\n'
|
||||
local parts=($(_parse_split ${*}))
|
||||
|
||||
for part in ${parts[@]}; do
|
||||
echo -n "${part^}"
|
||||
done
|
||||
|
||||
echo
|
||||
}
|
||||
|
||||
# Parse string to pretty Json.
|
||||
# Usage: parse_json <STRING>
|
||||
function parse_json() {
|
||||
echo "${*}" | jq
|
||||
}
|
||||
|
||||
# Split string by separators.
|
||||
# Usage: _parse_split <STRING>
|
||||
function _parse_split() {
|
||||
echo "${*}" | sed -e "s/[A-Z]\+/\n&/g" -e "s/[0-9]\+/\n&\n/g" -e "s/[${_PARSE_SPLIT_CHARS}]/&\n/g" | sed -e "/^$/d"
|
||||
}
|
||||
|
||||
# Get name parts.
|
||||
# Usage: _get_parts <STRING>
|
||||
function _get_parts() {
|
||||
_parse_split "${*}" | sed -e "s/[${_PARSE_SPLIT_CHARS}]//g" | sed -e "/^$/d"
|
||||
}
|
13
module/common/bash/module/Permission.nix
Normal file
13
module/common/bash/module/Permission.nix
Normal file
|
@ -0,0 +1,13 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# Recursively change permissions to allow read sharing with group and others.
|
||||
function perm_share() {
|
||||
find . -type d -exec chmod 755 {} \;; find . -type f -exec chmod 644 {} \;
|
||||
}
|
||||
|
||||
# Recursively change permissions to restrict access for group and others.
|
||||
function perm() {
|
||||
find . -type d -exec chmod 700 {} \;; find . -type f -exec chmod 600 {} \;
|
||||
}
|
||||
'';
|
||||
}
|
|
@ -1,9 +0,0 @@
|
|||
# Recursively change permissions to allow read sharing with group and others.
|
||||
function perm_share() {
|
||||
find . -type d -exec chmod 755 {} \;; find . -type f -exec chmod 644 {} \;
|
||||
}
|
||||
|
||||
# Recursively change permissions to restrict access for group and others.
|
||||
function perm() {
|
||||
find . -type d -exec chmod 700 {} \;; find . -type f -exec chmod 600 {} \;
|
||||
}
|
13
module/common/bash/module/Power.nix
Normal file
13
module/common/bash/module/Power.nix
Normal file
|
@ -0,0 +1,13 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# Suspend system.
|
||||
function slp() {
|
||||
systemctl suspend -i
|
||||
}
|
||||
|
||||
# Poweroff.
|
||||
function bye() {
|
||||
systemctl poweroff -i
|
||||
}
|
||||
'';
|
||||
}
|
|
@ -1,9 +0,0 @@
|
|||
# Suspend system.
|
||||
function slp() {
|
||||
systemctl suspend -i
|
||||
}
|
||||
|
||||
# Poweroff.
|
||||
function bye() {
|
||||
systemctl poweroff -i
|
||||
}
|
28
module/common/bash/module/Prune.nix
Normal file
28
module/common/bash/module/Prune.nix
Normal file
|
@ -0,0 +1,28 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
export _flatpakcfg_path="''${HOME}/.config/linux/Flatpak.txt"
|
||||
|
||||
# Prune everything unused in docker.
|
||||
function prune_docker() {
|
||||
docker system prune --volumes --all
|
||||
}
|
||||
|
||||
# Prune Nix Store.
|
||||
function prune_nix() {
|
||||
nix-store --gc
|
||||
}
|
||||
|
||||
# Uninstall flatpaks not listed in the config.
|
||||
function prune_flatpak() {
|
||||
local IFS=$'\n'
|
||||
local config=($(cat ''${_flatpakcfg_path} | cut -f2))
|
||||
local installed=($(flatpak list --app | cut -f2))
|
||||
|
||||
process() {
|
||||
_contains ''${target} ''${config[@]} || flatpak uninstall ''${target}
|
||||
}
|
||||
|
||||
_iterate_targets process ''${installed[@]}
|
||||
}
|
||||
'';
|
||||
}
|
|
@ -1,24 +0,0 @@
|
|||
export _flatpakcfg_path="${HOME}/.config/linux/Flatpak.txt"
|
||||
|
||||
# Prune everything unused in docker.
|
||||
function prune_docker() {
|
||||
docker system prune --volumes --all
|
||||
}
|
||||
|
||||
# Prune Nix Store.
|
||||
function prune_nix() {
|
||||
nix-store --gc
|
||||
}
|
||||
|
||||
# Uninstall flatpaks not listed in the config.
|
||||
function prune_flatpak() {
|
||||
local IFS=$'\n'
|
||||
local config=($(cat ${_flatpakcfg_path} | cut -f2))
|
||||
local installed=($(flatpak list --app | cut -f2))
|
||||
|
||||
process() {
|
||||
_contains ${target} ${config[@]} || flatpak uninstall ${target}
|
||||
}
|
||||
|
||||
_iterate_targets process ${installed[@]}
|
||||
}
|
15
module/common/bash/module/Ps.nix
Normal file
15
module/common/bash/module/Ps.nix
Normal file
|
@ -0,0 +1,15 @@
|
|||
{ ... }: {
|
||||
text = ''
|
||||
# Find process and filter.
|
||||
# Usage: fps [PROCESS]
|
||||
function fps() {
|
||||
local process="''${1}"
|
||||
|
||||
if [[ "''${process}" = "" ]]; then
|
||||
ps aux
|
||||
else
|
||||
ps aux | sed -n -e "1p" -e "/''${process}/Ip" | sed -e "/sed -n -e 1p -e/d"
|
||||
fi
|
||||
}
|
||||
'';
|
||||
}
|
|
@ -1,11 +0,0 @@
|
|||
# Find process and filter.
|
||||
# Usage: fps [PROCESS]
|
||||
function fps() {
|
||||
local process="${1}"
|
||||
|
||||
if [[ "${process}" = "" ]]; then
|
||||
ps aux
|
||||
else
|
||||
ps aux | sed -n -e "1p" -e "/${process}/Ip" | sed -e "/sed -n -e 1p -e/d"
|
||||
fi
|
||||
}
|
157
module/common/bash/module/Ps1.nix
Normal file
157
module/common/bash/module/Ps1.nix
Normal file
|
@ -0,0 +1,157 @@
|
|||
{ style, ... }: let
|
||||
accent = "${style.color.accent-r};${style.color.accent-g};${style.color.accent-b};";
|
||||
negative = "${style.color.negative-r};${style.color.negative-g};${style.color.negative-b};";
|
||||
neutral = "${style.color.neutral-r};${style.color.neutral-g};${style.color.neutral-b};";
|
||||
positive = "${style.color.positive-r};${style.color.positive-g};${style.color.positive-b};";
|
||||
in {
|
||||
text = ''
|
||||
export PROMPT_COMMAND=(__prompt_command "''${PROMPT_COMMAND[@]}")
|
||||
|
||||
function __prompt_color() {
|
||||
local color="''${1}"
|
||||
if [[ "''${color}" = "" ]]; then
|
||||
printf "\[\x1b[0m\]"
|
||||
else
|
||||
printf "\[\x1b[38;2;''${color}m\]"
|
||||
fi
|
||||
}
|
||||
|
||||
# Custom terminal prompt format.
|
||||
function __prompt_command() {
|
||||
local last_status="''${?}"
|
||||
local is_error=false
|
||||
local is_root=false
|
||||
|
||||
if [[ ''${last_status} != 0 && ''${last_status} != 130 ]]; then
|
||||
is_error=true
|
||||
fi
|
||||
if [[ "''${UID}" -eq 0 ]]; then
|
||||
is_root=true
|
||||
fi
|
||||
|
||||
# Add newline.
|
||||
PS1="\n"
|
||||
|
||||
# Set error red.
|
||||
if ''${is_error}; then
|
||||
PS1+="$(__prompt_color '${negative}')"
|
||||
PS1+="["
|
||||
else
|
||||
PS1+="$(__prompt_color)"
|
||||
PS1+="["
|
||||
fi
|
||||
|
||||
# Add time.
|
||||
# PS1+="$(__prompt_color '${accent}')"
|
||||
# PS1+="$(date +%H:%M) "
|
||||
|
||||
# Set root red.
|
||||
if ''${is_root}; then
|
||||
PS1+="$(__prompt_color '${negative}')"
|
||||
else
|
||||
PS1+="$(__prompt_color '${neutral}')"
|
||||
fi
|
||||
|
||||
# Add user, host and working dir.
|
||||
PS1+="\u@\h "
|
||||
PS1+="$(__prompt_color '${positive}')"
|
||||
PS1+="\w"
|
||||
# PS1+="\''${PWD}"
|
||||
|
||||
# Add git branch if available.
|
||||
local git_branch="$(_git_current_branch)"
|
||||
if [[ "''${git_branch}" != "" ]]; then
|
||||
PS1+=" $(__prompt_color '${accent}')@''${git_branch}"
|
||||
fi
|
||||
|
||||
# Set error red.
|
||||
if ''${is_error}; then
|
||||
PS1+="$(__prompt_color '${negative}')"
|
||||
PS1+="] "
|
||||
else
|
||||
PS1+="$(__prompt_color)"
|
||||
PS1+="] "
|
||||
fi
|
||||
|
||||
# If error, show code.
|
||||
if ''${is_error}; then
|
||||
PS1+="$(__prompt_color '${negative}')("
|
||||
PS1+="''${last_status}"
|
||||
local error_type="$(_ps1error ''${last_status})"
|
||||
[[ "''${error_type}" != "" ]] && PS1+=" ''${error_type}"
|
||||
PS1+=")$(__prompt_color) "
|
||||
fi
|
||||
|
||||
# Command on new line.
|
||||
PS1+="\n"
|
||||
|
||||
# Show nix shell name.
|
||||
if [ -n "''${NIX_SHELL}" ]; then
|
||||
PS1+="''${NIX_SHELL} "
|
||||
fi
|
||||
|
||||
# Show remote connections.
|
||||
if [ -n "''${SSH_TTY}" ]; then
|
||||
PS1+=">"
|
||||
fi
|
||||
|
||||
PS1+="$(__prompt_color)"
|
||||
|
||||
# Set user tag.
|
||||
if ''${is_root}; then
|
||||
PS1+="# "
|
||||
else
|
||||
PS1+="$ "
|
||||
fi
|
||||
|
||||
# Reset color.
|
||||
PS1+="\[\033[0m\]"
|
||||
}
|
||||
|
||||
# Convert error code into short description.
|
||||
# Usage: _ps1error <CODE>
|
||||
function _ps1error() {
|
||||
local type
|
||||
case ''${1} in
|
||||
1) type="GENERAL" ;;
|
||||
2) type="MISUSE" ;;
|
||||
126) type="CANTEXEC" ;;
|
||||
127) type="CMDNF" ;;
|
||||
129) type="SIGHUP" ;;
|
||||
130) type="SIGINT" ;;
|
||||
131) type="SIGQUIT" ;;
|
||||
132) type="SIGILL" ;;
|
||||
133) type="SIGTRAP" ;;
|
||||
134) type="SIGABRT" ;;
|
||||
135) type="SIGBUS" ;;
|
||||
136) type="SIGFPE" ;;
|
||||
137) type="SIGKILL" ;;
|
||||
138) type="SIGUSR1" ;;
|
||||
139) type="SIGSEGV" ;;
|
||||
140) type="SIGUSR2" ;;
|
||||
141) type="SIGPIPE" ;;
|
||||
142) type="SIGALRM" ;;
|
||||
143) type="SIGTERM" ;;
|
||||
144) type="" ;;
|
||||
145) type="SIGCHLD" ;;
|
||||
146) type="SIGCONT" ;;
|
||||
147) type="SIGSTOP" ;;
|
||||
148) type="SIGTSTP" ;;
|
||||
149) type="SIGTTIN" ;;
|
||||
150) type="SIGTTOU" ;;
|
||||
151) type="SIGURG" ;;
|
||||
152) type="SIGXCPU" ;;
|
||||
153) type="SIGXFSZ" ;;
|
||||
154) type="SIGVTALRM" ;;
|
||||
155) type="SIGPROF" ;;
|
||||
156) type="SIGWINCH" ;;
|
||||
157) type="SIGIO" ;;
|
||||
158) type="SIGPWR" ;;
|
||||
159) type="SIGSYS" ;;
|
||||
*) type="" ;;
|
||||
esac
|
||||
|
||||
echo -n "''${type}"
|
||||
}
|
||||
'';
|
||||
}
|
|
@ -1,151 +0,0 @@
|
|||
export PROMPT_COMMAND=(__prompt_command "${PROMPT_COMMAND[@]}")
|
||||
|
||||
function __prompt_color() {
|
||||
local color="${1}"
|
||||
if [[ "${color}" = "" ]]; then
|
||||
printf "\[\x1b[0m\]"
|
||||
else
|
||||
printf "\[\x1b[38;2;${color}m\]"
|
||||
fi
|
||||
# echo "\[\033[48;5;${COLOR_BACKGROUND};38;5;${color}m\]" # With backgroud.
|
||||
# echo "\[\033[38;5;${color}m\]" # Only foreground.
|
||||
}
|
||||
|
||||
# Custom terminal prompt format.
|
||||
function __prompt_command() {
|
||||
local last_status="${?}"
|
||||
local is_error=false
|
||||
local is_root=false
|
||||
|
||||
if [[ ${last_status} != 0 && ${last_status} != 130 ]]; then
|
||||
is_error=true
|
||||
fi
|
||||
if [[ "${UID}" -eq 0 ]]; then
|
||||
is_root=true
|
||||
fi
|
||||
|
||||
# Add newline.
|
||||
PS1="\n"
|
||||
|
||||
# Set error red.
|
||||
if ${is_error}; then
|
||||
PS1+="$(__prompt_color ${negative_rgb})"
|
||||
PS1+="["
|
||||
else
|
||||
PS1+="$(__prompt_color)"
|
||||
PS1+="["
|
||||
fi
|
||||
|
||||
# Add time.
|
||||
# PS1+="$(__prompt_color ${accent_rgb})"
|
||||
# PS1+="$(date +%H:%M) "
|
||||
|
||||
# Set root red.
|
||||
if ${is_root}; then
|
||||
PS1+="$(__prompt_color ${negative_rgb})"
|
||||
else
|
||||
PS1+="$(__prompt_color ${neutral_rgb})"
|
||||
fi
|
||||
|
||||
# Add user, host and working dir.
|
||||
PS1+="\u@\h "
|
||||
PS1+="$(__prompt_color ${positive_rgb})"
|
||||
PS1+="\w"
|
||||
# PS1+="\${PWD}"
|
||||
|
||||
# Add git branch if available.
|
||||
local git_branch="$(_git_current_branch)"
|
||||
if [[ "${git_branch}" != "" ]]; then
|
||||
PS1+=" $(__prompt_color ${accent_rgb})@${git_branch}"
|
||||
fi
|
||||
|
||||
# Set error red.
|
||||
if ${is_error}; then
|
||||
PS1+="$(__prompt_color ${negative_rgb})"
|
||||
PS1+="] "
|
||||
else
|
||||
PS1+="$(__prompt_color)"
|
||||
PS1+="] "
|
||||
fi
|
||||
|
||||
# If error, show code.
|
||||
if ${is_error}; then
|
||||
PS1+="$(__prompt_color ${negative_rgb})("
|
||||
PS1+="${last_status}"
|
||||
local error_type="$(_ps1error ${last_status})"
|
||||
[[ "${error_type}" != "" ]] && PS1+=" ${error_type}"
|
||||
PS1+=")$(__prompt_color) "
|
||||
fi
|
||||
|
||||
# Command on new line.
|
||||
PS1+="\n"
|
||||
PS1+="$(__prompt_color ${fg_3_rgb})"
|
||||
|
||||
# Show nix shell name.
|
||||
if [ -n "${NIX_SHELL}" ]; then
|
||||
PS1+="${NIX_SHELL} "
|
||||
fi
|
||||
|
||||
# Show remote connections.
|
||||
if [ -n "${SSH_TTY}" ]; then
|
||||
PS1+=">"
|
||||
fi
|
||||
|
||||
PS1+="$(__prompt_color)"
|
||||
|
||||
# Set user tag.
|
||||
if ${is_root}; then
|
||||
PS1+="# "
|
||||
else
|
||||
PS1+="$ "
|
||||
fi
|
||||
|
||||
# Reset color.
|
||||
PS1+="\[\033[0m\]"
|
||||
}
|
||||
|
||||
# Convert error code into short description.
|
||||
# Usage: _ps1error <CODE>
|
||||
function _ps1error() {
|
||||
local type
|
||||
case ${1} in
|
||||
1) type="GENERAL" ;;
|
||||
2) type="MISUSE" ;;
|
||||
126) type="CANTEXEC" ;;
|
||||
127) type="CMDNF" ;;
|
||||
129) type="SIGHUP" ;;
|
||||
130) type="SIGINT" ;;
|
||||
131) type="SIGQUIT" ;;
|
||||
132) type="SIGILL" ;;
|
||||
133) type="SIGTRAP" ;;
|
||||
134) type="SIGABRT" ;;
|
||||
135) type="SIGBUS" ;;
|
||||
136) type="SIGFPE" ;;
|
||||
137) type="SIGKILL" ;;
|
||||
138) type="SIGUSR1" ;;
|
||||
139) type="SIGSEGV" ;;
|
||||
140) type="SIGUSR2" ;;
|
||||
141) type="SIGPIPE" ;;
|
||||
142) type="SIGALRM" ;;
|
||||
143) type="SIGTERM" ;;
|
||||
144) type="" ;;
|
||||
145) type="SIGCHLD" ;;
|
||||
146) type="SIGCONT" ;;
|
||||
147) type="SIGSTOP" ;;
|
||||
148) type="SIGTSTP" ;;
|
||||
149) type="SIGTTIN" ;;
|
||||
150) type="SIGTTOU" ;;
|
||||
151) type="SIGURG" ;;
|
||||
152) type="SIGXCPU" ;;
|
||||
153) type="SIGXFSZ" ;;
|
||||
154) type="SIGVTALRM" ;;
|
||||
155) type="SIGPROF" ;;
|
||||
156) type="SIGWINCH" ;;
|
||||
157) type="SIGIO" ;;
|
||||
158) type="SIGPWR" ;;
|
||||
159) type="SIGSYS" ;;
|
||||
*) type="" ;;
|
||||
esac
|
||||
|
||||
echo -n "${type}"
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue