Throw shfmt against bash scripts
Signed-off-by: Knut Ahlers <knut@ahlers.me>
This commit is contained in:
parent
76b40bb59b
commit
efc8bc4f3c
25 changed files with 181 additions and 168 deletions
|
@ -16,8 +16,7 @@ PASSES="2"
|
||||||
###
|
###
|
||||||
###
|
###
|
||||||
|
|
||||||
if ! [ -f "$1" ]
|
if ! [ -f "$1" ]; then
|
||||||
then
|
|
||||||
echo ""$!" is not a file. Nothing to convert."
|
echo ""$!" is not a file. Nothing to convert."
|
||||||
exit
|
exit
|
||||||
fi
|
fi
|
||||||
|
@ -45,8 +44,7 @@ TEMPFILE="$DIRECTORY/.${FILENAME%.*}_${NOW}_ffmpeg.${EXT}"
|
||||||
|
|
||||||
#METADATA="-metadata creation_time=\"$TIMESTAMP\""
|
#METADATA="-metadata creation_time=\"$TIMESTAMP\""
|
||||||
|
|
||||||
if [ "$PASSES" == "1" ]
|
if [ "$PASSES" == "1" ]; then
|
||||||
then
|
|
||||||
# 1 pass encoding
|
# 1 pass encoding
|
||||||
$NICE ffmpeg -i "$1" -threads auto -loglevel "$LOGLEVEL" -vcodec libx264 -b:v "${BITRATE}k" -vf yadif=1 -acodec libfaac -ab 192k -ar 48000 -sn -metadata creation_time="$TIMESTAMP" -f mp4 -y "${TEMPFILE}"
|
$NICE ffmpeg -i "$1" -threads auto -loglevel "$LOGLEVEL" -vcodec libx264 -b:v "${BITRATE}k" -vf yadif=1 -acodec libfaac -ab 192k -ar 48000 -sn -metadata creation_time="$TIMESTAMP" -f mp4 -y "${TEMPFILE}"
|
||||||
else
|
else
|
||||||
|
@ -60,8 +58,7 @@ fi
|
||||||
|
|
||||||
touch -r "$1" "${TEMPFILE}"
|
touch -r "$1" "${TEMPFILE}"
|
||||||
|
|
||||||
if [ -f "${1%.*}.${EXT}" ]
|
if [ -f "${1%.*}.${EXT}" ]; then
|
||||||
then
|
|
||||||
mv -f "${TEMPFILE}" "${1%.*}_${NOW}.${EXT}"
|
mv -f "${TEMPFILE}" "${1%.*}_${NOW}.${EXT}"
|
||||||
echo
|
echo
|
||||||
echo "File "${1%.*}.${EXT}" already exist, moved to: "${1%.*}_${NOW}.${EXT}""
|
echo "File "${1%.*}.${EXT}" already exist, moved to: "${1%.*}_${NOW}.${EXT}""
|
||||||
|
|
|
@ -7,15 +7,21 @@ REPOS=( public secret )
|
||||||
SSH_KEY=${SSH_KEY:-fafnir}
|
SSH_KEY=${SSH_KEY:-fafnir}
|
||||||
|
|
||||||
# Print debug messages if enabled by ${DEBUG}
|
# Print debug messages if enabled by ${DEBUG}
|
||||||
function debug {
|
function debug() {
|
||||||
[[ "${DEBUG}" = "false" ]] && return
|
[[ ${DEBUG} == "false" ]] && return
|
||||||
echo "$@" >&2
|
echo "$@" >&2
|
||||||
}
|
}
|
||||||
|
|
||||||
function join_by { local d=$1; shift; echo -n "$1"; shift; printf "%s" "${@/#/$d}"; }
|
function join_by() {
|
||||||
|
local d=$1
|
||||||
|
shift
|
||||||
|
echo -n "$1"
|
||||||
|
shift
|
||||||
|
printf "%s" "${@/#/$d}"
|
||||||
|
}
|
||||||
|
|
||||||
# Wrap git to work with git-dir and work-tree being in other locations
|
# Wrap git to work with git-dir and work-tree being in other locations
|
||||||
function gwrap {
|
function gwrap() {
|
||||||
hub --git-dir=${HOME}/.cfg/${REPO} --work-tree=${HOME} $@
|
hub --git-dir=${HOME}/.cfg/${REPO} --work-tree=${HOME} $@
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -26,23 +32,26 @@ if ! ( ping -q -c 1 8.8.8.8 >/dev/null ); then
|
||||||
fi
|
fi
|
||||||
|
|
||||||
STAT_PARM="-c %Y"
|
STAT_PARM="-c %Y"
|
||||||
[[ "$(uname -s)" = "Darwin" ]] && STAT_PARM="-f %m"
|
[[ "$(uname -s)" == "Darwin" ]] && STAT_PARM="-f %m"
|
||||||
|
|
||||||
NEED_UPDATE=()
|
NEED_UPDATE=()
|
||||||
# Check repos for updates
|
# Check repos for updates
|
||||||
for REPO in ${REPOS[@]}; do
|
for REPO in ${REPOS[@]}; do
|
||||||
LAST_FETCH=0
|
LAST_FETCH=0
|
||||||
[ -f ~/.cfg/${REPO}/FETCH_HEAD ] && LAST_FETCH=$(stat ${STAT_PARM} ~/.cfg/${REPO}/FETCH_HEAD)
|
[ -f ~/.cfg/${REPO}/FETCH_HEAD ] && LAST_FETCH=$(stat ${STAT_PARM} ~/.cfg/${REPO}/FETCH_HEAD)
|
||||||
if [ $(( $(date +%s) - ${LAST_FETCH} )) -gt ${FETCH_INTERVAL} ] || \
|
if [ $(($(date +%s) - LAST_FETCH)) -gt ${FETCH_INTERVAL} ] ||
|
||||||
[ $(wc -c ~/.cfg/${REPO}/FETCH_HEAD | cut -d' ' -f1) -eq 0 ]; then
|
[ $(wc -c ~/.cfg/${REPO}/FETCH_HEAD | cut -d' ' -f1) -eq 0 ]; then
|
||||||
vault-sshadd ${SSH_KEY} 2>&1 >/dev/null || { echo "Unable to load key ${SSH_KEY}"; exit 1; }
|
vault-sshadd ${SSH_KEY} 2>&1 >/dev/null || {
|
||||||
|
echo "Unable to load key ${SSH_KEY}"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
gwrap fetch -q origin master
|
gwrap fetch -q origin master
|
||||||
fi
|
fi
|
||||||
|
|
||||||
LOCAL=$(gwrap rev-parse HEAD)
|
LOCAL=$(gwrap rev-parse HEAD)
|
||||||
REMOTE=$(gwrap rev-parse FETCH_HEAD)
|
REMOTE=$(gwrap rev-parse FETCH_HEAD)
|
||||||
|
|
||||||
if ! [[ "${LOCAL}" = "${REMOTE}" ]]; then
|
if ! [[ ${LOCAL} == "${REMOTE}" ]]; then
|
||||||
NEED_UPDATE+=("'${REPO}'")
|
NEED_UPDATE+=("'${REPO}'")
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
|
@ -4,7 +4,7 @@ set -euo pipefail
|
||||||
if [ -e "${HOME}/bin/script_framework.sh" ]; then
|
if [ -e "${HOME}/bin/script_framework.sh" ]; then
|
||||||
source "${HOME}/bin/script_framework.sh"
|
source "${HOME}/bin/script_framework.sh"
|
||||||
else
|
else
|
||||||
function step { echo $@; }
|
function step() { echo $@; }
|
||||||
fi
|
fi
|
||||||
|
|
||||||
step "Removing containers created / exited >= ~1h ago..."
|
step "Removing containers created / exited >= ~1h ago..."
|
||||||
|
|
|
@ -34,11 +34,14 @@ shift $((OPTIND-1))
|
||||||
if [ -e ${HOME}/bin/script_framework.sh ]; then
|
if [ -e ${HOME}/bin/script_framework.sh ]; then
|
||||||
source ${HOME}/bin/script_framework.sh
|
source ${HOME}/bin/script_framework.sh
|
||||||
else
|
else
|
||||||
function step { echo "$@"; }
|
function step() { echo "$@"; }
|
||||||
function fatal { echo "$@"; exit 1; }
|
function fatal() {
|
||||||
|
echo "$@"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
fi
|
fi
|
||||||
|
|
||||||
function config {
|
function config() {
|
||||||
git --git-dir="${HOME}/.cfg/${repo_name}" --work-tree="${HOME}" $@
|
git --git-dir="${HOME}/.cfg/${repo_name}" --work-tree="${HOME}" $@
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -6,8 +6,8 @@ if ! [ -e Makefile ]; then
|
||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
TARGETS=$(make -pRrq : 2>/dev/null | \
|
TARGETS=$(make -pRrq : 2>/dev/null |
|
||||||
awk -v RS= -F: '/^# File/,/^# Finished Make data base/ {if ($$1 !~ "^[#.]") {print $$1}}' | \
|
awk -v RS= -F: '/^# File/,/^# Finished Make data base/ {if ($$1 !~ "^[#.]") {print $$1}}' |
|
||||||
sort | egrep -v -e '^[^[:alnum:]]' | sed 's/:$//' | xargs)
|
sort | egrep -v -e '^[^[:alnum:]]' | sed 's/:$//' | xargs)
|
||||||
|
|
||||||
if (echo $TARGETS | grep -q "auto-hook-${HOOKTYPE}"); then
|
if (echo $TARGETS | grep -q "auto-hook-${HOOKTYPE}"); then
|
||||||
|
|
|
@ -4,11 +4,11 @@
|
||||||
|
|
||||||
startpath=$(pwd)
|
startpath=$(pwd)
|
||||||
|
|
||||||
for repo in $(find . -name ".git")
|
for repo in $(find . -name ".git"); do
|
||||||
do
|
|
||||||
repodir=$(dirname $repo)
|
repodir=$(dirname $repo)
|
||||||
reponame=$(basename $repodir)
|
reponame=$(basename $repodir)
|
||||||
cd $repodir; LANG=C git status | grep -q "nothing to commit"
|
cd $repodir
|
||||||
|
LANG=C git status | grep -q "nothing to commit"
|
||||||
state=$?
|
state=$?
|
||||||
cd $startpath
|
cd $startpath
|
||||||
echo -n "$reponame: "
|
echo -n "$reponame: "
|
||||||
|
|
|
@ -1,10 +1,13 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
function require_gotool {
|
function require_gotool() {
|
||||||
toolname=$(basename $1)
|
toolname=$(basename $1)
|
||||||
if ! (which ${toolname} >/dev/null 2>&1); then
|
if ! (which ${toolname} >/dev/null 2>&1); then
|
||||||
go version || { echo "${basename} not found and no usable go environment"; exit 1; }
|
go version || {
|
||||||
|
echo "${basename} not found and no usable go environment"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
go get -u $1
|
go get -u $1
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
@ -23,7 +26,10 @@ step "Vault is not authenticated, trying to authenticate... "
|
||||||
rm -f "${HOME}/.vault-token"
|
rm -f "${HOME}/.vault-token"
|
||||||
|
|
||||||
VUT=$(pgrep -f vault-user-token || echo "" | xargs)
|
VUT=$(pgrep -f vault-user-token || echo "" | xargs)
|
||||||
[ -n "$VUT" ] && { step "Killing old vault-user-token processes..."; kill ${VUT}; }
|
[ -n "$VUT" ] && {
|
||||||
|
step "Killing old vault-user-token processes..."
|
||||||
|
kill ${VUT}
|
||||||
|
}
|
||||||
|
|
||||||
# Start new vault-user-token daemon
|
# Start new vault-user-token daemon
|
||||||
vault-user-token --full-hostname=false >/dev/null 2>&1 &
|
vault-user-token --full-hostname=false >/dev/null 2>&1 &
|
||||||
|
|
|
@ -8,7 +8,7 @@ source ${HOME}/bin/script_framework.sh
|
||||||
[ $# -lt 1 ] && fail "You need to supply at least password as argument"
|
[ $# -lt 1 ] && fail "You need to supply at least password as argument"
|
||||||
|
|
||||||
# Check against online API using range request not to disclose the password hash
|
# Check against online API using range request not to disclose the password hash
|
||||||
function check_password {
|
function check_password() {
|
||||||
checksum=$(echo -n "${1}" | sha1sum | tr 'a-z' 'A-Z')
|
checksum=$(echo -n "${1}" | sha1sum | tr 'a-z' 'A-Z')
|
||||||
curl -s https://api.pwnedpasswords.com/range/${checksum:0:5} |
|
curl -s https://api.pwnedpasswords.com/range/${checksum:0:5} |
|
||||||
awk -F: "/${checksum:5:35}/{ print \$2 }" | tr -d '\n\r'
|
awk -F: "/${checksum:5:35}/{ print \$2 }" | tr -d '\n\r'
|
||||||
|
|
|
@ -4,27 +4,27 @@ COLOR_CYAN="\033[0;36m"
|
||||||
COLOR_YELLOW="\033[0;33m"
|
COLOR_YELLOW="\033[0;33m"
|
||||||
COLOR_PLAIN="\033[0m"
|
COLOR_PLAIN="\033[0m"
|
||||||
|
|
||||||
function error {
|
function error() {
|
||||||
echo -e "${COLOR_RED}$@${COLOR_PLAIN}"
|
echo -e "${COLOR_RED}$@${COLOR_PLAIN}"
|
||||||
}
|
}
|
||||||
|
|
||||||
function fail {
|
function fail() {
|
||||||
error "$@"
|
error "$@"
|
||||||
exit 1
|
exit 1
|
||||||
}
|
}
|
||||||
|
|
||||||
function info {
|
function info() {
|
||||||
echo -e "${COLOR_CYAN}$@${COLOR_PLAIN}"
|
echo -e "${COLOR_CYAN}$@${COLOR_PLAIN}"
|
||||||
}
|
}
|
||||||
|
|
||||||
function step {
|
function step() {
|
||||||
info "[$(date +%H:%M:%S)] $@"
|
info "[$(date +%H:%M:%S)] $@"
|
||||||
}
|
}
|
||||||
|
|
||||||
function success {
|
function success() {
|
||||||
echo -e "${COLOR_GREEN}$@${COLOR_PLAIN}"
|
echo -e "${COLOR_GREEN}$@${COLOR_PLAIN}"
|
||||||
}
|
}
|
||||||
|
|
||||||
function warn {
|
function warn() {
|
||||||
echo -e "${COLOR_YELLOW}$@${COLOR_PLAIN}"
|
echo -e "${COLOR_YELLOW}$@${COLOR_PLAIN}"
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,8 +6,9 @@ set -e
|
||||||
BASE_PATH=/tmp/system-audit
|
BASE_PATH=/tmp/system-audit
|
||||||
GIT_DIR=/var/local/system-audit
|
GIT_DIR=/var/local/system-audit
|
||||||
|
|
||||||
function collect_hashes {
|
function collect_hashes() {
|
||||||
target=$1; shift
|
target=$1
|
||||||
|
shift
|
||||||
for dir in $@; do
|
for dir in $@; do
|
||||||
if ! [ -e "${dir}" ]; then
|
if ! [ -e "${dir}" ]; then
|
||||||
echo "${dir}" >>${BASE_PATH}/missing
|
echo "${dir}" >>${BASE_PATH}/missing
|
||||||
|
@ -23,7 +24,7 @@ function collect_hashes {
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
function wrap_git {
|
function wrap_git() {
|
||||||
git --work-tree=${BASE_PATH} --git-dir=${GIT_DIR} "$@"
|
git --work-tree=${BASE_PATH} --git-dir=${GIT_DIR} "$@"
|
||||||
return $?
|
return $?
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
set -e
|
set -e
|
||||||
set -o pipefail
|
set -o pipefail
|
||||||
|
|
||||||
function unexpose {
|
function unexpose() {
|
||||||
expose -d 8888
|
expose -d 8888
|
||||||
}
|
}
|
||||||
trap unexpose EXIT
|
trap unexpose EXIT
|
||||||
|
|
|
@ -5,7 +5,6 @@ if ! ( which vault > /dev/null ); then
|
||||||
exit 2
|
exit 2
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
||||||
# If we can list the environments there is no need to unlock the database
|
# If we can list the environments there is no need to unlock the database
|
||||||
if (awsenv list >/dev/null 2>&1); then
|
if (awsenv list >/dev/null 2>&1); then
|
||||||
echo "Database already unlocked."
|
echo "Database already unlocked."
|
||||||
|
@ -38,4 +37,3 @@ else
|
||||||
echo "Found passphrase but could not unlock database."
|
echo "Found passphrase but could not unlock database."
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
1
bin/venv
1
bin/venv
|
@ -7,4 +7,3 @@ set -o pipefail
|
||||||
source .venv/bin/activate
|
source .venv/bin/activate
|
||||||
|
|
||||||
[ -f requirements.txt ] && pip install -r requirements.txt
|
[ -f requirements.txt ] && pip install -r requirements.txt
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue