add local scripts
This commit is contained in:
parent
2258a1256d
commit
4f19253c3f
37 changed files with 1026 additions and 0 deletions
73
bin/avhcd2x264
Normal file
73
bin/avhcd2x264
Normal file
|
@ -0,0 +1,73 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
#
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# loglevels: quiet, panic, fatal, error, warning, info, verbose, debug
|
||||||
|
LOGLEVEL=info
|
||||||
|
|
||||||
|
# set nice level, 20 lowest, -20 highest
|
||||||
|
NICE="nice -n 19"
|
||||||
|
|
||||||
|
# on pass or two pass encoding
|
||||||
|
PASSES="2"
|
||||||
|
|
||||||
|
###
|
||||||
|
###
|
||||||
|
###
|
||||||
|
|
||||||
|
if ! [ -f "$1" ]
|
||||||
|
then
|
||||||
|
echo ""$!" is not a file. Nothing to convert."
|
||||||
|
exit
|
||||||
|
fi
|
||||||
|
|
||||||
|
trap cleanup EXIT
|
||||||
|
cleanup() {
|
||||||
|
set +e
|
||||||
|
rm -f "${PASSLOGFILE}-0.log" "${PASSLOGFILE}-0.log.mbtree" "${TEMPFILE}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# get creation date
|
||||||
|
TIMESTAMP=$(stat -l -t "%Y-%m-%d %H:%M:%S" "$1" | awk '{ print $6" "$7 }')
|
||||||
|
# get betrate
|
||||||
|
BITRATE=$(ffmpeg -i "$1" 2>&1 |grep bitrate | awk '{print $6}')
|
||||||
|
|
||||||
|
# set task priority to low
|
||||||
|
NICE="nice -n 19"
|
||||||
|
|
||||||
|
EXT="mp4"
|
||||||
|
|
||||||
|
DIRECTORY=$(dirname "$1")
|
||||||
|
FILENAME=$(basename "$1")
|
||||||
|
NOW=$(date "+%Y%m%d%-H%M%S")
|
||||||
|
TEMPFILE="$DIRECTORY/.${FILENAME%.*}_${NOW}_ffmpeg.${EXT}"
|
||||||
|
|
||||||
|
#METADATA="-metadata creation_time=\"$TIMESTAMP\""
|
||||||
|
|
||||||
|
if [ "$PASSES" == "1" ]
|
||||||
|
then
|
||||||
|
# 1 pass encoding
|
||||||
|
$NICE ffmpeg -i "$1" -threads auto -loglevel "$LOGLEVEL" -vcodec libx264 -b:v "${BITRATE}k" -vf yadif=1 -acodec libfaac -ab 192k -ar 48000 -sn -metadata creation_time="$TIMESTAMP" -f mp4 -y "${TEMPFILE}"
|
||||||
|
else
|
||||||
|
# 2 pass encoding
|
||||||
|
PASSLOGFILE="$DIRECTORY/.${FILENAME%.*}_${NOW}_ffmpeg"
|
||||||
|
$NICE ffmpeg -i "$1" -threads auto -loglevel "$LOGLEVEL" -vcodec libx264 -b:v "${BITRATE}k" -vf yadif=1 -pass 1 -passlogfile "$PASSLOGFILE" -an -sn -f rawvideo -y /dev/null
|
||||||
|
$NICE ffmpeg -i "$1" -threads auto -loglevel "$LOGLEVEL" -vcodec libx264 -b:v "${BITRATE}k" -vf yadif=1 -pass 2 -passlogfile "$PASSLOGFILE" -acodec libfaac -ab 192k -ar 48000 -sn -metadata creation_time="$TIMESTAMP" -f mp4 -y "${TEMPFILE}"
|
||||||
|
rm "${PASSLOGFILE}-0.log"
|
||||||
|
rm "${PASSLOGFILE}-0.log.mbtree"
|
||||||
|
fi
|
||||||
|
|
||||||
|
touch -r "$1" "${TEMPFILE}"
|
||||||
|
|
||||||
|
if [ -f "${1%.*}.${EXT}" ]
|
||||||
|
then
|
||||||
|
mv -f "${TEMPFILE}" "${1%.*}_${NOW}.${EXT}"
|
||||||
|
echo
|
||||||
|
echo "File "${1%.*}.${EXT}" already exist, moved to: "${1%.*}_${NOW}.${EXT}""
|
||||||
|
echo
|
||||||
|
else
|
||||||
|
mv -i "${TEMPFILE}" "${1%.*}.${EXT}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
exit
|
14
bin/backup
Executable file
14
bin/backup
Executable file
|
@ -0,0 +1,14 @@
|
||||||
|
#!/bin/bash -e
|
||||||
|
|
||||||
|
if [ -e /Volumes ]; then
|
||||||
|
SHACHECK="2688057d3ddc1b9b5a5a4309f89a2bcb57cc033b /Volumes/Duplicity/mountcheck"
|
||||||
|
if ! ( echo "${SHACHECK}" | shasum -cs ); then
|
||||||
|
echo "/Volumes/Duplicity does not seem to be mounted or wrong volume"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
CMD=${1:-backup}
|
||||||
|
|
||||||
|
exec vault2env /secret/private/duplicity/$(hostname -s) -- \
|
||||||
|
duplicity-backup ${CMD}
|
40
bin/clean-keys
Executable file
40
bin/clean-keys
Executable file
|
@ -0,0 +1,40 @@
|
||||||
|
#!/bin/bash
|
||||||
|
# clean_keyring.sh - clean up all the excess keys
|
||||||
|
|
||||||
|
# my key should probably be the first secret key listed
|
||||||
|
mykey=$(gpg --list-secret-keys | grep '^sec' | cut -c 13-20)
|
||||||
|
if [ -z "$mykey" ]; then
|
||||||
|
# exit if no key string
|
||||||
|
echo "Can't get user's key ID"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# all of the people who have signed my key
|
||||||
|
mysigners=$(gpg --list-sigs $mykey | grep '^sig' | cut -c 14-21 | sort -u)
|
||||||
|
|
||||||
|
# keep also the keys of persons I'm tracking on keybase.io
|
||||||
|
keybase_tracks=""
|
||||||
|
for person in $(keybase list-tracking); do
|
||||||
|
id=$(keybase id $person 2>&1 | grep "public key fingerprint" | cut -d ':' -f 2 | cut -c 41-50 | sed "s/ //g")
|
||||||
|
keybase_tracks="$keybase_tracks $id"
|
||||||
|
done
|
||||||
|
|
||||||
|
# keep all of the signers, plus my key (if I haven't self-signed)
|
||||||
|
keepers=$(echo $mykey $mysigners $keybase_tracks | tr ' ' '\012' | sort -u)
|
||||||
|
|
||||||
|
# the keepers list in egrep syntax: ^(key|key|…)
|
||||||
|
keepers_egrep=$(echo $keepers | sed 's/^/^(/; s/$/)/; s/ /|/g;')
|
||||||
|
|
||||||
|
# show all the keepers as a comment so this script's output is shell-able
|
||||||
|
echo '# Keepers: ' $keepers
|
||||||
|
|
||||||
|
# everyone who isn't on the keepers list is deleted
|
||||||
|
deleters=$(gpg --list-keys | grep '^pub'| cut -c 13-20 | egrep -v ${keepers_egrep})
|
||||||
|
|
||||||
|
# echo the command if there are any to delete
|
||||||
|
# command is interactive
|
||||||
|
if [ -z "$deleters" ]; then
|
||||||
|
echo "# Nothing to delete!"
|
||||||
|
else
|
||||||
|
echo 'gpg --delete-keys' $deleters
|
||||||
|
fi
|
4
bin/cleanup_directories
Executable file
4
bin/cleanup_directories
Executable file
|
@ -0,0 +1,4 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
find /Users/luzifer/Downloads ! -name '.*' -mtime +12h -exec mv "{}" /Users/luzifer/.Trash/ \;
|
||||||
|
find /Users/luzifer/.Trash ! -name '.*' -mtime +2d -exec rm -rf "{}" \;
|
3
bin/docker-clean
Executable file
3
bin/docker-clean
Executable file
|
@ -0,0 +1,3 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -v /etc:/etc spotify/docker-gc
|
20
bin/edgedist.py
Executable file
20
bin/edgedist.py
Executable file
|
@ -0,0 +1,20 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import json, sys, urllib
|
||||||
|
|
||||||
|
data = json.loads(open(sys.argv[1], 'r').read())
|
||||||
|
|
||||||
|
keys = sorted(data.keys())
|
||||||
|
last = data[keys[0]]
|
||||||
|
distsum = 0
|
||||||
|
|
||||||
|
for key in keys[1:]:
|
||||||
|
start = '%s,%s' % tuple(last)
|
||||||
|
end = '%s,%s' % tuple(data[key])
|
||||||
|
url = 'http://maps.googleapis.com/maps/api/directions/json?origin=%s&destination=%s&sensor=false' % (start, end)
|
||||||
|
result = json.loads(urllib.urlopen(url).read())
|
||||||
|
dist = result['routes'][0]['legs'][0]['distance']['value']
|
||||||
|
distsum = distsum + dist
|
||||||
|
last = data[key]
|
||||||
|
|
||||||
|
print 'Distance of list: %.2f km' % (float(distsum) / 1000.0)
|
23
bin/exportstickies
Executable file
23
bin/exportstickies
Executable file
|
@ -0,0 +1,23 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import json, subprocess, os, re, HTMLParser
|
||||||
|
|
||||||
|
exportcmd = 'plutil -convert json -o - %s' % os.path.expanduser('~/Library/Preferences/widget-com.apple.widget.stickies.plist')
|
||||||
|
out = json.loads(subprocess.check_output(exportcmd.split(' ')))
|
||||||
|
|
||||||
|
stickies = []
|
||||||
|
htmlparser = HTMLParser.HTMLParser()
|
||||||
|
|
||||||
|
for key in out.keys():
|
||||||
|
if not '-data' in key:
|
||||||
|
continue
|
||||||
|
|
||||||
|
html = out[key]
|
||||||
|
text = re.sub('\n+', '\n', '\n'.join(re.split(r'<[^>]*div>', html))).strip()
|
||||||
|
text = '\n'.join(re.split(r'<br[^>]*>\n?', text))
|
||||||
|
lines = text.split('\n')
|
||||||
|
lines = map((lambda parm: htmlparser.unescape(parm).strip()), lines)
|
||||||
|
text = '\n'.join(lines).strip()
|
||||||
|
stickies.append(text)
|
||||||
|
|
||||||
|
print '\n\n=====\n\n'.join(stickies)
|
5
bin/fix-ssh
Executable file
5
bin/fix-ssh
Executable file
|
@ -0,0 +1,5 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
for host in $@; do
|
||||||
|
grep -v ${host} ~/.ssh/known_hosts > ~/.ssh/known_hosts.tmp && mv ~/.ssh/known_hosts.tmp ~/.ssh/known_hosts
|
||||||
|
done
|
77
bin/gen-dockerfile-1.6
Executable file
77
bin/gen-dockerfile-1.6
Executable file
|
@ -0,0 +1,77 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
PWD=$(pwd)
|
||||||
|
PACKAGE=${PWD/${GOPATH}\/src\//}
|
||||||
|
BINARY=$(basename ${PACKAGE})
|
||||||
|
|
||||||
|
GIT_NAME=$(git config --get user.name)
|
||||||
|
GIT_MAIL=$(git config --get user.email)
|
||||||
|
|
||||||
|
if [ -e "${PWD}/Godeps/_workspace/src" ]; then
|
||||||
|
OVERRIDE_GOPATH="/go:/go/src/${PACKAGE}/Godeps/_workspace"
|
||||||
|
fi
|
||||||
|
|
||||||
|
EXPOSE="EXPOSE"
|
||||||
|
|
||||||
|
while getopts ":e:t:v:" opt; do
|
||||||
|
case $opt in
|
||||||
|
e)
|
||||||
|
EXPOSE="${EXPOSE} ${OPTARG}"
|
||||||
|
;;
|
||||||
|
t)
|
||||||
|
TIMEZONE=${OPTARG}
|
||||||
|
;;
|
||||||
|
v)
|
||||||
|
VOLUME="${VOLUME}, \"${OPTARG}\""
|
||||||
|
;;
|
||||||
|
:)
|
||||||
|
echo "Option -$OPTARG requires an argument." >&2
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
# Header
|
||||||
|
cat <<EOF
|
||||||
|
FROM golang:alpine
|
||||||
|
|
||||||
|
MAINTAINER ${GIT_NAME} <${GIT_MAIL}>
|
||||||
|
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Support old (pre-1.6) Godep style
|
||||||
|
[ -z "${OVERRIDE_GOPATH}" ] || echo -e "ENV GOPATH ${OVERRIDE_GOPATH}\n"
|
||||||
|
|
||||||
|
# Allow setting timezone using `-t Europe/Berlin`
|
||||||
|
[ -z "${TIMEZONE}" ] || cat <<EOF
|
||||||
|
RUN set -ex \\
|
||||||
|
&& apk add --update tzdata \\
|
||||||
|
&& cp /usr/share/zoneinfo/${TIMEZONE} /etc/localtime \\
|
||||||
|
&& echo "${TIMEZONE}" > /etc/timezone \\
|
||||||
|
&& apk del --purge tzdata
|
||||||
|
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Build binary
|
||||||
|
cat <<EOF
|
||||||
|
ADD . /go/src/${PACKAGE}
|
||||||
|
WORKDIR /go/src/${PACKAGE}
|
||||||
|
|
||||||
|
RUN set -ex \\
|
||||||
|
&& apk add --update git ca-certificates \\
|
||||||
|
&& go install -ldflags "-X main.version=\$(git describe --tags || git rev-parse --short HEAD || echo dev)" \\
|
||||||
|
&& apk del --purge git
|
||||||
|
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Allow exposing ports using `-e 3000`
|
||||||
|
( test "EXPOSE" != "${EXPOSE}" ) && echo -e "${EXPOSE}\n"
|
||||||
|
|
||||||
|
# Allow
|
||||||
|
[ -z "${VOLUME}" ] || echo -e "VOLUME [${VOLUME/, /}]\n"
|
||||||
|
|
||||||
|
# Execution information
|
||||||
|
cat <<EOF
|
||||||
|
ENTRYPOINT ["/go/bin/${BINARY}"]
|
||||||
|
CMD ["--"]
|
||||||
|
EOF
|
10
bin/git-c
Executable file
10
bin/git-c
Executable file
|
@ -0,0 +1,10 @@
|
||||||
|
#!/bin/bash -e
|
||||||
|
|
||||||
|
vault-gpg $(grep default-key ~/.gnupg/gpg.conf | cut -d ' ' -f 2)
|
||||||
|
git commit -S -v $@
|
||||||
|
|
||||||
|
PROD_TASK=$(habitica https://habitica.com/api/v3/tasks/user | jq -r '.data | map(select(.text=="productivity").id)[0]')
|
||||||
|
|
||||||
|
if ! ( habitica -sS -o /dev/null -X POST https://habitica.com/api/v3/tasks/${PROD_TASK}/score/up ); then
|
||||||
|
echo "Scoring failed."
|
||||||
|
fi
|
5
bin/git-pot
Executable file
5
bin/git-pot
Executable file
|
@ -0,0 +1,5 @@
|
||||||
|
#!/bin/bash -ex
|
||||||
|
|
||||||
|
CURRENT_BRANCH=$(git branch --list | grep '^\*' | cut -d ' ' -f 2)
|
||||||
|
|
||||||
|
exec git push origin ${CURRENT_BRANCH} --tags
|
25
bin/git-ps
Executable file
25
bin/git-ps
Executable file
|
@ -0,0 +1,25 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
STEP_COLOR="\033[0;36m"
|
||||||
|
NO_COLOR="\033[0m"
|
||||||
|
|
||||||
|
function step {
|
||||||
|
echo -e ${STEP_COLOR}[$(date +%H:%M:%S)] $1${NO_COLOR}
|
||||||
|
}
|
||||||
|
|
||||||
|
step "Fetching data from remote..."
|
||||||
|
for remote in $(git remote -v | awk '{print $1}' | sort | uniq); do
|
||||||
|
step "+++ Remote: '${remote}'"
|
||||||
|
git fetch -p ${remote}
|
||||||
|
done
|
||||||
|
|
||||||
|
step "Rebasing branch / updating submodules..."
|
||||||
|
git pull --rebase && git submodule update --init --recursive
|
||||||
|
|
||||||
|
step "Cleaning local branches..."
|
||||||
|
for branch in $(git branch --merged | grep -v '^*'); do
|
||||||
|
# Do not delete master as the main branch
|
||||||
|
if ( test "${branch}" != "master" ); then
|
||||||
|
git branch -d ${branch}
|
||||||
|
fi
|
||||||
|
done
|
3
bin/git-pushall
Executable file
3
bin/git-pushall
Executable file
|
@ -0,0 +1,3 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
exec git push origin '+refs/heads/*:refs/heads/*' '+refs/tags/*:refs/tags/*'
|
16
bin/git_status_recursive
Executable file
16
bin/git_status_recursive
Executable file
|
@ -0,0 +1,16 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
[ -e ~/.bashcolors ] && . ~/.bashcolors
|
||||||
|
|
||||||
|
startpath=$(pwd)
|
||||||
|
|
||||||
|
for repo in $(find . -name ".git")
|
||||||
|
do
|
||||||
|
repodir=$(dirname $repo)
|
||||||
|
reponame=$(basename $repodir)
|
||||||
|
cd $repodir; LANG=C git status | grep -q "nothing to commit"
|
||||||
|
state=$?
|
||||||
|
cd $startpath
|
||||||
|
echo -n "$reponame: "
|
||||||
|
[ $state -eq 0 ] && echo -e "${PR_GREEN}Up-2-date${PR_NC}" || echo -e "${PR_BR_RED}Changes available${PR_NC}"
|
||||||
|
done
|
200
bin/gpx2kml
Executable file
200
bin/gpx2kml
Executable file
|
@ -0,0 +1,200 @@
|
||||||
|
#!/usr/bin/python
|
||||||
|
#
|
||||||
|
# Copyright (c) 2007 Brian "Beej Jorgensen" Hall
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person obtaining
|
||||||
|
# a copy of this software and associated documentation files (the
|
||||||
|
# "Software"), to deal in the Software without restriction, including
|
||||||
|
# without limitation the rights to use, copy, modify, merge, publish,
|
||||||
|
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||||
|
# permit persons to whom the Software is furnished to do so, subject to
|
||||||
|
# the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be
|
||||||
|
# included in all copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||||
|
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||||
|
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||||
|
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||||
|
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||||
|
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
#
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import os.path
|
||||||
|
import xml.dom.minidom
|
||||||
|
|
||||||
|
symbolMap = {
|
||||||
|
"Dot":"wpt_dot",
|
||||||
|
"Amusement Park":"amuse_pk",
|
||||||
|
"Ball Park":"ball",
|
||||||
|
"Bank":"dollar",
|
||||||
|
"Bar":"mug",
|
||||||
|
"Campground":"camp",
|
||||||
|
"Large City":"lrg_cty",
|
||||||
|
"Medium City":"med_cty",
|
||||||
|
"Small City":"sml_cty",
|
||||||
|
"Convenience Store":"gas_plus",
|
||||||
|
"Danger Area":"skull",
|
||||||
|
"Dangerous Area":"danger",
|
||||||
|
"Department Store":"store",
|
||||||
|
"Drinking Water":"drinking_wtr",
|
||||||
|
"Fast Food":"fastfood",
|
||||||
|
"Fishing Area":"fish",
|
||||||
|
"Fitness Center":"fitness",
|
||||||
|
"Gas Station":"fuel",
|
||||||
|
"Glider Area":"glider",
|
||||||
|
"Mine":"mine",
|
||||||
|
}
|
||||||
|
|
||||||
|
class AppInfo:
|
||||||
|
def __init__(self, argv):
|
||||||
|
self.scriptname = os.path.basename(argv.pop(0))
|
||||||
|
self.outfilename = None
|
||||||
|
self.infilename = None
|
||||||
|
|
||||||
|
self.infile = None
|
||||||
|
self.outfile = None
|
||||||
|
|
||||||
|
while len(argv) > 0:
|
||||||
|
if argv[0] == "-h" or argv[0] == "--help" or argv[0] == "-?":
|
||||||
|
self.usageExit()
|
||||||
|
|
||||||
|
elif argv[0] == "-o":
|
||||||
|
argv.pop(0)
|
||||||
|
if len(argv) == 0: self.usageExit()
|
||||||
|
self.outfilename = argv[0]
|
||||||
|
|
||||||
|
elif self.infilename == None:
|
||||||
|
self.infilename = argv[0]
|
||||||
|
|
||||||
|
argv.pop(0)
|
||||||
|
|
||||||
|
if self.infilename == None:
|
||||||
|
self.infile = sys.stdin
|
||||||
|
self.infilename = "<stdin>"
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
self.infile = file(self.infilename, "r")
|
||||||
|
except IOError, (no, str):
|
||||||
|
self.errorExit("error opening %s: %s" % \
|
||||||
|
(self.infilename, str), 2)
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.dom = xml.dom.minidom.parse(self.infile)
|
||||||
|
except Exception, (str):
|
||||||
|
self.errorExit("%s: %s" % (self.infilename, str), 4)
|
||||||
|
|
||||||
|
if self.outfilename == None:
|
||||||
|
self.outfile = sys.stdout
|
||||||
|
self.outfilename = "<stdout>"
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
self.outfile = file(self.outfilename, "w")
|
||||||
|
except IOError, (no, str):
|
||||||
|
self.errorExit("error opening %s: %s" % \
|
||||||
|
(self.outfilename, str), 3)
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
if self.infile != None: self.infile.close()
|
||||||
|
if self.outfile != None: self.outfile.close()
|
||||||
|
|
||||||
|
def errorExit(self, str, status=1):
|
||||||
|
sys.stderr.write("%s: %s\n" % (self.scriptname, str))
|
||||||
|
sys.exit(status)
|
||||||
|
|
||||||
|
def usageExit(self):
|
||||||
|
sys.stderr.write("usage: %s [-o outfile] [infile]\n" % self.scriptname)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
def getTextBelow(node, subnodename, handleNone=False):
|
||||||
|
subnodes = node.getElementsByTagName(subnodename)
|
||||||
|
if subnodes != []:
|
||||||
|
return subnodes[0].firstChild.data
|
||||||
|
|
||||||
|
if handleNone: return ""
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def symTranslate(name):
|
||||||
|
if not symbolMap.has_key(name): name = "Dot"
|
||||||
|
return symbolMap[name]
|
||||||
|
|
||||||
|
def main(argv):
|
||||||
|
ai = AppInfo(argv)
|
||||||
|
|
||||||
|
gpx = ai.dom.firstChild
|
||||||
|
|
||||||
|
waypoints = gpx.getElementsByTagName("wpt")
|
||||||
|
|
||||||
|
ai.outfile.write("""<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<!-- Generated by gpx2kml -->
|
||||||
|
<kml xmlns="http://earth.google.com/kml/2.1">
|
||||||
|
\t<Document>
|
||||||
|
\t\t<Style id="trackStyle">
|
||||||
|
\t\t\t<LineStyle>
|
||||||
|
\t\t\t\t<color>7fff0000</color>
|
||||||
|
\t\t\t\t<width>4</width>
|
||||||
|
\t\t\t</LineStyle>
|
||||||
|
\t\t</Style>
|
||||||
|
""")
|
||||||
|
|
||||||
|
if waypoints != []:
|
||||||
|
for wp in waypoints:
|
||||||
|
lat = wp.getAttribute("lat")
|
||||||
|
lon = wp.getAttribute("lon")
|
||||||
|
name = getTextBelow(wp, "name", True)
|
||||||
|
sym = getTextBelow(wp, "sym")
|
||||||
|
desc = getTextBelow(wp, "desc")
|
||||||
|
ele = getTextBelow(wp, "ele")
|
||||||
|
|
||||||
|
ai.outfile.write('\t\t<Placemark>\n')
|
||||||
|
ai.outfile.write('\t\t\t<name>%s</name>\n' % name)
|
||||||
|
if desc != '' and desc != None:
|
||||||
|
ai.outfile.write('\t\t\t<description>%s</description>\n' \
|
||||||
|
% desc)
|
||||||
|
ai.outfile.write("\t\t\t<styleUrl>#trackStyle</styleUrl>\n")
|
||||||
|
ai.outfile.write('\t\t\t<Point>\n')
|
||||||
|
ai.outfile.write('\t\t\t\t<coordinates>')
|
||||||
|
ai.outfile.write('%s,%s</coordinates>\n' % (lon, lat))
|
||||||
|
ai.outfile.write('\t\t\t</Point>\n')
|
||||||
|
|
||||||
|
ai.outfile.write('\t\t</Placemark>\n')
|
||||||
|
|
||||||
|
tracks = gpx.getElementsByTagName("trk")
|
||||||
|
|
||||||
|
if tracks != []:
|
||||||
|
for trk in tracks:
|
||||||
|
|
||||||
|
name = getTextBelow(trk, "name", True)
|
||||||
|
desc = getTextBelow(trk, "desc")
|
||||||
|
|
||||||
|
ai.outfile.write('\t\t<Placemark>\n')
|
||||||
|
ai.outfile.write('\t\t\t<name>%s</name>\n' % name)
|
||||||
|
if desc != '' and desc != None:
|
||||||
|
ai.outfile.write('\t\t\t<description>%s</description>\n' \
|
||||||
|
% desc)
|
||||||
|
ai.outfile.write("\t\t\t<styleUrl>#trackStyle</styleUrl>\n")
|
||||||
|
ai.outfile.write("\t\t\t<LineString>>\n")
|
||||||
|
ai.outfile.write("\t\t\t\t<coordinates>\n")
|
||||||
|
|
||||||
|
for seg in trk.getElementsByTagName("trkseg"):
|
||||||
|
for trkpt in seg.getElementsByTagName("trkpt"):
|
||||||
|
lat = trkpt.getAttribute("lat")
|
||||||
|
lon = trkpt.getAttribute("lon")
|
||||||
|
ele = getTextBelow(trkpt, "ele")
|
||||||
|
|
||||||
|
ai.outfile.write("\t\t\t\t\t%s,%s\n" % (lon, lat))
|
||||||
|
|
||||||
|
ai.outfile.write("\t\t\t\t</coordinates>\n")
|
||||||
|
ai.outfile.write("\t\t\t</LineString>>\n")
|
||||||
|
ai.outfile.write('\t\t</Placemark>\n')
|
||||||
|
|
||||||
|
ai.outfile.write('\t</Document>\n')
|
||||||
|
ai.outfile.write('</kml>\n')
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
if __name__ == "__main__": sys.exit(main(sys.argv))
|
6
bin/habitica
Executable file
6
bin/habitica
Executable file
|
@ -0,0 +1,6 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
HABITICA_API_TOKEN=$(vault read -field=api_token /secret/private/habitica)
|
||||||
|
HABITICA_USER_ID=$(vault read -field=user_id /secret/private/habitica)
|
||||||
|
|
||||||
|
curl -s -H "x-api-key: ${HABITICA_API_TOKEN}" -H "x-api-user: ${HABITICA_USER_ID}" "$@" <&0
|
56
bin/instacopy
Executable file
56
bin/instacopy
Executable file
|
@ -0,0 +1,56 @@
|
||||||
|
#!/opt/local/bin/python2.7
|
||||||
|
|
||||||
|
# Install: easy_install gdata
|
||||||
|
|
||||||
|
# Config:
|
||||||
|
|
||||||
|
google_email = 'configure@me.com'
|
||||||
|
google_passwd = 'yoursecretpassword'
|
||||||
|
|
||||||
|
instagram_token = 'some instagram_token'
|
||||||
|
|
||||||
|
# Code:
|
||||||
|
|
||||||
|
import gdata.photos.service, gdata.photos
|
||||||
|
import gdata.media
|
||||||
|
import gdata.geo
|
||||||
|
import json, tempfile, sys, urllib, os
|
||||||
|
|
||||||
|
gd_client = gdata.photos.service.PhotosService()
|
||||||
|
gd_client.email = google_email
|
||||||
|
gd_client.password = google_passwd
|
||||||
|
gd_client.source = 'InstaCopy v0.1'
|
||||||
|
gd_client.ProgrammaticLogin()
|
||||||
|
|
||||||
|
album = gd_client.InsertAlbum(title='Instagram', summary='EditMe')
|
||||||
|
album_url = '/data/feed/api/user/default/albumid/%s' % album.gphoto_id.text
|
||||||
|
|
||||||
|
url = 'https://api.instagram.com/v1/users/self/media/recent?access_token=%s&count=100' % (instagram_token)
|
||||||
|
images = json.loads(urllib.urlopen(url).read())
|
||||||
|
|
||||||
|
if not 'data' in images:
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
i = 0
|
||||||
|
|
||||||
|
for image in images['data']:
|
||||||
|
i = i + 1
|
||||||
|
if not image['type'] == 'image':
|
||||||
|
continue
|
||||||
|
|
||||||
|
_, temp_path = tempfile.mkstemp(suffix='.jpg')
|
||||||
|
f = open(temp_path, 'w')
|
||||||
|
f.write(urllib.urlopen(image['images']['standard_resolution']['url']).read())
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
photo = gd_client.InsertPhotoSimple(album_url, image['link'].split('/')[4], image['caption']['text'], temp_path, content_type='image/jpeg')
|
||||||
|
photo.timestamp = gdata.photos.Timestamp(text=str(int(image['caption']['created_time']) * 1000))
|
||||||
|
try:
|
||||||
|
gd_client.UpdatePhotoMetadata(photo)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
os.unlink(temp_path)
|
||||||
|
|
||||||
|
sys.stdout.write('Transferred %d of %d...\r' % (i, len(images['data'])))
|
||||||
|
sys.stdout.flush()
|
||||||
|
|
47
bin/issh
Executable file
47
bin/issh
Executable file
|
@ -0,0 +1,47 @@
|
||||||
|
#!/usr/bin/env python2
|
||||||
|
|
||||||
|
import os, sys, argparse, boto.ec2
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(description='Searches instance IP and connects via SSH')
|
||||||
|
parser.add_argument('-p', type=int, default=22, metavar='port', help='Port to connect to (default 22)')
|
||||||
|
parser.add_argument('-u', type=str, default=None, metavar='user', help='User to use for connection (default current username)')
|
||||||
|
parser.add_argument('instance_id', type=str, help='ID of the instance in format "i-XXXXX"')
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
if None in [os.getenv('EC2_REGION'), os.getenv('AWS_ACCESS_KEY_ID'), os.getenv('AWS_SECRET_ACCESS_KEY')]:
|
||||||
|
print 'Please provide these environment variables: EC2_REGION, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY'
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if os.getenv('EC2_REGION') not in [r.name for r in boto.ec2.regions()]:
|
||||||
|
print 'Region "{}" derived from environment variable EC2_REGION is not a valid region.'.format(os.getenv('EC2_REGION'))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
ec2_connection = boto.ec2.connect_to_region(os.getenv('EC2_REGION'),
|
||||||
|
aws_access_key_id=os.getenv('AWS_ACCESS_KEY_ID'),
|
||||||
|
aws_secret_access_key=os.getenv('AWS_SECRET_ACCESS_KEY'))
|
||||||
|
|
||||||
|
instances = ec2_connection.get_only_instances([args.instance_id])
|
||||||
|
if len(instances) != 1:
|
||||||
|
print 'Did not found a single instance for ID {}'.format(args.instance_id)
|
||||||
|
sys.exit(1)
|
||||||
|
instance = instances[0]
|
||||||
|
|
||||||
|
if instance.private_ip_address is not None:
|
||||||
|
_call_ssh(instance.private_ip_address, args.u, args.p)
|
||||||
|
else:
|
||||||
|
_call_ssh(instance.ip_address, args.u, args.p)
|
||||||
|
|
||||||
|
def _call_ssh(ip, user=None, port=None):
|
||||||
|
args = ['/usr/bin/ssh']
|
||||||
|
if port is not None:
|
||||||
|
args.append('-p {}'.format(port))
|
||||||
|
if user is not None:
|
||||||
|
args.append('{}@{}'.format(user, ip))
|
||||||
|
else:
|
||||||
|
args.append('{}'.format(ip))
|
||||||
|
|
||||||
|
os.execv('/usr/bin/ssh', args)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
13
bin/local-vault-auth
Executable file
13
bin/local-vault-auth
Executable file
|
@ -0,0 +1,13 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
if (vault status | grep -q "Sealed: true"); then
|
||||||
|
echo -n "Vault is sealed, trying to unseal... "
|
||||||
|
UNSEAL_TOKEN=$(lpass show --field=Passphrase "vault-unseal")
|
||||||
|
vault unseal "${UNSEAL_TOKEN}" > /dev/null || echo "FAIL" && echo "OK"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! (vault token-lookup 1>/dev/null 2>&1); then
|
||||||
|
echo -n "Vault is not authenticated, trying to authenticate... "
|
||||||
|
AUTH_TOKEN=$(lpass show --field=Passphrase "vault-auth")
|
||||||
|
echo "${AUTH_TOKEN}" | vault auth - > /dev/null || echo "FAIL" && echo "OK"
|
||||||
|
fi
|
5
bin/ls-unsafe-ssh
Executable file
5
bin/ls-unsafe-ssh
Executable file
|
@ -0,0 +1,5 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
for key in $(find . -type f | grep -vE "\.(pub|pem)" | grep -vE '(config|authorized_keys|known_hosts)' | sort); do
|
||||||
|
grep -q ENCRYPTED ${key} || echo "Key unsafe: ${key}"
|
||||||
|
done
|
22
bin/migrate-godeps-1.6
Executable file
22
bin/migrate-godeps-1.6
Executable file
|
@ -0,0 +1,22 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
PWD=$(pwd)
|
||||||
|
|
||||||
|
if ! [ -e "${PWD}/Godeps/_workspace/src" ]; then
|
||||||
|
echo "Path ${PWD}/Godeps/_workspace/src not found, stopping."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -e "${PWD}/vendor" ]; then
|
||||||
|
echo "You already have a vendor directory, stopping."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
set -ex
|
||||||
|
|
||||||
|
mv "${PWD}/Godeps/_workspace/src" "${PWD}/vendor"
|
||||||
|
rm -rf "${PWD}/Godeps/_workspace"
|
||||||
|
godep update -goversion
|
||||||
|
|
||||||
|
git add vendor
|
||||||
|
git rm -rf Godeps/_workspace
|
5
bin/pbfold
Executable file
5
bin/pbfold
Executable file
|
@ -0,0 +1,5 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
LEN=${1:-72}
|
||||||
|
|
||||||
|
pbpaste | fold -sw $LEN | pbcopy
|
7
bin/pdfsort
Executable file
7
bin/pdfsort
Executable file
|
@ -0,0 +1,7 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
for filename in *.pdf; do
|
||||||
|
target=$(echo ${filename} | sed 's/^\(\([0-9]\{4\}\)\([0-9]\{2\}\).*\)$/\2\/\3\/\1/')
|
||||||
|
mkdir -p $(dirname ${target})
|
||||||
|
mv ${filename} ${target}
|
||||||
|
done
|
5
bin/read_timestamp
Executable file
5
bin/read_timestamp
Executable file
|
@ -0,0 +1,5 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
import sys
|
||||||
|
from email.Utils import formatdate
|
||||||
|
|
||||||
|
print formatdate(float(sys.argv[1]))
|
18
bin/short_path
Executable file
18
bin/short_path
Executable file
|
@ -0,0 +1,18 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
path = os.getcwd()
|
||||||
|
path = path.replace(os.environ["HOME"], "~")
|
||||||
|
|
||||||
|
path_parts = path.split("/")
|
||||||
|
path = ""
|
||||||
|
for part in path_parts[:-2]:
|
||||||
|
if len(part) > 0:
|
||||||
|
path = "{}/{}".format(path, part[0])
|
||||||
|
else:
|
||||||
|
path = "{}/{}".format(path, part)
|
||||||
|
|
||||||
|
path = "{}/{}".format(path, "/".join(path_parts[-2:]))
|
||||||
|
|
||||||
|
print path[1:]
|
47
bin/ssh-knownhosts-cleanup.py
Executable file
47
bin/ssh-knownhosts-cleanup.py
Executable file
|
@ -0,0 +1,47 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
# encoding: utf-8
|
||||||
|
# By Joonas Kuorilehto 2011, MIT license
|
||||||
|
# https://gist.github.com/joneskoo/1306614
|
||||||
|
#
|
||||||
|
# The script combines .ssh/known_hosts so that each fingerprint is only
|
||||||
|
# listed once.
|
||||||
|
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
HOME = os.environ['HOME']
|
||||||
|
KNOWN_HOSTS = os.path.join(HOME, '.ssh', 'known_hosts')
|
||||||
|
|
||||||
|
# Backup known hosts file
|
||||||
|
shutil.copyfile(KNOWN_HOSTS, KNOWN_HOSTS+".old")
|
||||||
|
|
||||||
|
# Read known hosts to memory
|
||||||
|
with open(KNOWN_HOSTS) as f:
|
||||||
|
knownhosts = dict()
|
||||||
|
oldlines = 0
|
||||||
|
for line in f:
|
||||||
|
if line.strip() == "" or line.strip().startswith("#"):
|
||||||
|
continue
|
||||||
|
oldlines += 1
|
||||||
|
hosts, keytype, fingerprint = line.strip().split(" ")
|
||||||
|
dictkey = keytype + fingerprint
|
||||||
|
hosts = hosts.split(",")
|
||||||
|
if knownhosts.get(dictkey) == None:
|
||||||
|
knownhosts[dictkey] = dict(hosts=set(), keytype=keytype,
|
||||||
|
fingerprint=fingerprint)
|
||||||
|
knownhosts[dictkey]['hosts'].update(hosts)
|
||||||
|
|
||||||
|
lines = []
|
||||||
|
for key, host in knownhosts.items():
|
||||||
|
host['hosts_joined'] = ",".join(sorted(host['hosts'], reverse=True))
|
||||||
|
lines.append("%(hosts_joined)s %(keytype)s %(fingerprint)s" % host)
|
||||||
|
|
||||||
|
# Replace known hosts with a cleaned version
|
||||||
|
with open(KNOWN_HOSTS, 'w') as f:
|
||||||
|
f.write("\n".join(sorted(lines)))
|
||||||
|
f.write("\n")
|
||||||
|
|
||||||
|
print("OK. Cleaned up", KNOWN_HOSTS)
|
||||||
|
print("Change: from %d lines to %d lines." % (oldlines, len(knownhosts)))
|
22
bin/sslcert
Executable file
22
bin/sslcert
Executable file
|
@ -0,0 +1,22 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
selfsigned=false
|
||||||
|
|
||||||
|
while getopts s opt; do
|
||||||
|
case $opt in
|
||||||
|
s)
|
||||||
|
selfsigned=true
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
shift $(( OPTIND - 1 ))
|
||||||
|
|
||||||
|
domain=$1
|
||||||
|
|
||||||
|
openssl genrsa -out ${domain}.key 2048
|
||||||
|
openssl req -new -sha256 -key ${domain}.key -out ${domain}.csr
|
||||||
|
|
||||||
|
if $selfsigned; then
|
||||||
|
openssl x509 -req -days 365 -in ${domain}.csr -signkey ${domain}.key -out ${domain}.crt
|
||||||
|
fi
|
3
bin/supervisorctl
Executable file
3
bin/supervisorctl
Executable file
|
@ -0,0 +1,3 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
exec /usr/local/bin/supervisorctl -c /Users/luzifer/config/supervisord/supervisord.conf "$@"
|
9
bin/tmux-reboot-required
Executable file
9
bin/tmux-reboot-required
Executable file
|
@ -0,0 +1,9 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
if ! [ -f /var/run/reboot-required ]; then
|
||||||
|
exit
|
||||||
|
fi
|
||||||
|
|
||||||
|
PKG_COUNT=$(wc -l /var/run/reboot-required.pkgs | awk '{ print $1 }')
|
||||||
|
|
||||||
|
echo -n "Reboot required (${PKG_COUNT} pkgs)"
|
16
bin/traveltime
Executable file
16
bin/traveltime
Executable file
|
@ -0,0 +1,16 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import urllib, json, sys
|
||||||
|
|
||||||
|
if len(sys.argv) < 3:
|
||||||
|
print 'Usage: %s <lat,lon> <lat,lon>' % sys.argv[0]
|
||||||
|
sys.exit(2)
|
||||||
|
|
||||||
|
url = 'http://maps.googleapis.com/maps/api/directions/json?origin=%s&destination=%s&sensor=false' % (sys.argv[1], sys.argv[2])
|
||||||
|
|
||||||
|
result = json.loads(urllib.urlopen(url).read())
|
||||||
|
|
||||||
|
duration = result['routes'][0]['legs'][0]['duration']['text']
|
||||||
|
distance = result['routes'][0]['legs'][0]['distance']['text']
|
||||||
|
|
||||||
|
print '%s for %s' % (duration, distance)
|
27
bin/update-gotools
Executable file
27
bin/update-gotools
Executable file
|
@ -0,0 +1,27 @@
|
||||||
|
#!/bin/bash -ex
|
||||||
|
|
||||||
|
export GOPATH="${HOME}/gocode"
|
||||||
|
|
||||||
|
# Re-Install gocode
|
||||||
|
rm -rf "${GOPATH}/bin/gocode"
|
||||||
|
go get -u github.com/nsf/gocode
|
||||||
|
|
||||||
|
# Re-Install gometalinter
|
||||||
|
rm -rf "${GOPATH}/bin/gometalinter"
|
||||||
|
go get -u github.com/alecthomas/gometalinter
|
||||||
|
${GOPATH}/bin/gometalinter --install --update
|
||||||
|
|
||||||
|
# Re-Install godebug
|
||||||
|
rm -rf "${GOPATH}/bin/godebug"
|
||||||
|
go get -u github.com/mailgun/godebug
|
||||||
|
|
||||||
|
# Re-Install godep
|
||||||
|
rm -rf "${GOPATH}/bin/godep"
|
||||||
|
go get -u github.com/tools/godep
|
||||||
|
|
||||||
|
# Re-Install go-bindata
|
||||||
|
rm -rf "${GOPATH}/bin/go-bindata"
|
||||||
|
go get -u github.com/jteeuwen/go-bindata/...
|
||||||
|
|
||||||
|
# Ensure vim-go can work
|
||||||
|
vim +:GoInstallBinaries +:qall
|
41
bin/vault-awsenv
Executable file
41
bin/vault-awsenv
Executable file
|
@ -0,0 +1,41 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
if ! ( which vault > /dev/null ); then
|
||||||
|
error "vault is required."
|
||||||
|
exit 2
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
# If we can list the environments there is no need to unlock the database
|
||||||
|
if ( awsenv list > /dev/null 2>&1 ); then
|
||||||
|
echo "Database already unlocked."
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Retrieve key from LastPass
|
||||||
|
PWD=$(vault read -field=passphrase "/secret/private/awsenv")
|
||||||
|
|
||||||
|
# In case Vault exitted non-zero we have no password
|
||||||
|
if ! [ $? -eq 0 ]; then
|
||||||
|
echo "Unable to get password. Not trying to unlock."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Fill password to ssh-add utility
|
||||||
|
expect <<EOF >/dev/null
|
||||||
|
spawn -ignore HUP awsenv unlock
|
||||||
|
expect "Password: "
|
||||||
|
send "$PWD\n"
|
||||||
|
expect "Database unlocked."
|
||||||
|
expect eof
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Check whether awsenv was unlocked
|
||||||
|
if ( awsenv list > /dev/null 2>&1 ); then
|
||||||
|
echo "Database unlocked successfully"
|
||||||
|
exit 0
|
||||||
|
else
|
||||||
|
echo "Found passphrase but could not unlock database."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
32
bin/vault-gpg
Executable file
32
bin/vault-gpg
Executable file
|
@ -0,0 +1,32 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
KEY=$1
|
||||||
|
|
||||||
|
if [ -z "${KEY}" ] || ! (gpg --list-secret-keys | grep -q ${KEY}); then
|
||||||
|
echo "No key given or no secret key found for '${KEY}'"
|
||||||
|
exit 2
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Read password for this key
|
||||||
|
PWD=$(vault read --field=passphrase "/secret/gpg-key/${KEY}")
|
||||||
|
|
||||||
|
if [ -z "${PWD}" ]; then
|
||||||
|
echo "Could not read passphrase from vault."
|
||||||
|
exit 2
|
||||||
|
fi
|
||||||
|
|
||||||
|
HEXPWD=$(python -c "print '${PWD}'.encode('hex')")
|
||||||
|
|
||||||
|
# Get keygrip of secret key
|
||||||
|
for KEYGRIP in $(gpg2 --with-keygrip -k ${KEY} | grep Keygrip | cut -d '=' -f 2 | xargs); do
|
||||||
|
|
||||||
|
# Set password for keygrip
|
||||||
|
if ! ( gpg-connect-agent -q "PRESET_PASSPHRASE ${KEYGRIP} -1 ${HEXPWD}" /bye >/dev/null 2>&1 ); then
|
||||||
|
echo "An error occurred while caching password in GPG agent"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "Successfully cached password in GPG agent"
|
||||||
|
exit 0
|
12
bin/vault-muttpwd
Executable file
12
bin/vault-muttpwd
Executable file
|
@ -0,0 +1,12 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
USER=$(vault read -field=gmail_user /secret/private/mutt)
|
||||||
|
PASS=$(vault read -field=gmail_pass /secret/private/mutt)
|
||||||
|
|
||||||
|
cat <<EOM
|
||||||
|
set imap_user="${USER}"
|
||||||
|
set imap_pass="${PASS}"
|
||||||
|
|
||||||
|
set smtp_url="smtp://${USER}@smtp.gmail.com:587/"
|
||||||
|
set smtp_pass="${PASS}"
|
||||||
|
EOM
|
35
bin/vault-rotate-sshkey-passphrase
Executable file
35
bin/vault-rotate-sshkey-passphrase
Executable file
|
@ -0,0 +1,35 @@
|
||||||
|
#!/bin/bash -e
|
||||||
|
|
||||||
|
keyfile=$1
|
||||||
|
|
||||||
|
if [ -z "$keyfile" ] || [ ! -e "${keyfile}" ]; then
|
||||||
|
echo "Keyfile not provided or not found: '${keyfile}'"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
KEYNAME=$(basename ${keyfile})
|
||||||
|
|
||||||
|
OLDPASS=$(vault read -field=passphrase "/secret/ssh-key/${KEYNAME}")
|
||||||
|
|
||||||
|
if [ $? -gt 0 ]; then
|
||||||
|
echo "Unable to retrieve old passphrase."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
NEWPASS=$(password get -l 64)
|
||||||
|
|
||||||
|
if [ -z "$NEWPASS" ]; then
|
||||||
|
echo "Unable to generate a new passphrase"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
vault write "/secret/ssh-key/${KEYNAME}" passphrase="${NEWPASS}"
|
||||||
|
|
||||||
|
if ! (ssh-keygen -p -P "${OLDPASS}" -N "${NEWPASS}" -f "${keyfile}"); then
|
||||||
|
echo "Key has not been changed successfully. Writing old secret back to vault."
|
||||||
|
echo "A backup of the new password has been written to 'tmp_passphrase' attribute."
|
||||||
|
vault write "/secret/ssh-key/${KEYNAME}" passphrase="${OLDPASS}" tmp_passphrase="${NEWPASS}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Everything was fine, key has been changed."
|
76
bin/vault-sshadd
Executable file
76
bin/vault-sshadd
Executable file
|
@ -0,0 +1,76 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
COLOR_RED="\033[0;31m"
|
||||||
|
COLOR_GREEN="\033[0;32m"
|
||||||
|
COLOR_CYAN="\033[0;36m"
|
||||||
|
COLOR_PLAIN="\033[0m"
|
||||||
|
|
||||||
|
function error {
|
||||||
|
echo -e "${COLOR_RED}$@${COLOR_PLAIN}"
|
||||||
|
}
|
||||||
|
|
||||||
|
function success {
|
||||||
|
echo -e "${COLOR_GREEN}$@${COLOR_PLAIN}"
|
||||||
|
}
|
||||||
|
|
||||||
|
function info {
|
||||||
|
echo -e "${COLOR_CYAN}$@${COLOR_PLAIN}"
|
||||||
|
}
|
||||||
|
|
||||||
|
if ! ( which vault > /dev/null ); then
|
||||||
|
error "vault is required."
|
||||||
|
exit 2
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Require something to be passed to this command
|
||||||
|
if [ $# -eq 0 ]; then
|
||||||
|
error "You need to specify a key name."
|
||||||
|
exit 2
|
||||||
|
fi
|
||||||
|
|
||||||
|
for KEY_NAME in $@; do
|
||||||
|
KEYNAME_IN=${KEY_NAME}
|
||||||
|
|
||||||
|
# Try to find the passed key path / name
|
||||||
|
if ! [ -e "${KEY_NAME}" ]; then
|
||||||
|
if [ -e "${HOME}/.ssh/${KEY_NAME}" ]; then
|
||||||
|
KEY_NAME="${HOME}/.ssh/${KEY_NAME}"
|
||||||
|
else
|
||||||
|
error "[${KEYNAME_IN}] Could not find key file."
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# If this key is already in the agent we don't need to do anything
|
||||||
|
if ( ssh-add -l | grep -q "${KEY_NAME}" ); then
|
||||||
|
info "[${KEYNAME_IN}] Key already present."
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Retrieve key from LastPass
|
||||||
|
PWD=$(vault read -field=passphrase "/secret/ssh-key/$(basename ${KEY_NAME})")
|
||||||
|
|
||||||
|
# In case LastPass exitted non-zero we have no password
|
||||||
|
if ! [ $? -eq 0 ]; then
|
||||||
|
error "[${KEYNAME_IN}] Unable to get password. Not trying to unlock."
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Fill password to ssh-add utility
|
||||||
|
expect <<EOF >/dev/null
|
||||||
|
spawn ssh-add ${KEY_NAME}
|
||||||
|
expect "Enter passphrase"
|
||||||
|
send "$PWD\n"
|
||||||
|
expect eof
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Check whether the key was added to the agent
|
||||||
|
if ( ssh-add -l | grep -q "${KEY_NAME}" ); then
|
||||||
|
success "[${KEYNAME_IN}] Key successfully added."
|
||||||
|
continue
|
||||||
|
else
|
||||||
|
error "[${KEYNAME_IN}] Found passphrase but could not add key."
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
done
|
4
bin/zsh-refresh-godirs
Executable file
4
bin/zsh-refresh-godirs
Executable file
|
@ -0,0 +1,4 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
find $HOME -type f -name '.gopath' | sed 's!/.gopath$!!' > /tmp/godirs
|
||||||
|
mv /tmp/godirs $HOME/.config/godirs
|
Loading…
Reference in a new issue