public bashrc snippets
## from CoffeeOps Slack Rob Evans
# don't put duplicate lines or lines starting with space in the history.
# See bash(1) for more options
HISTCONTROL=ignoreboth
# append to the history file, don't overwrite it
shopt -s histappend
# for setting history length see HISTSIZE and HISTFILESIZE in bash(1)
HISTSIZE=100000
HISTFILESIZE=200000
ip-public () #courtesy xoraxiom
{
local IP_OPENDNS="$(dig +short myip.opendns.com @resolver2.opendns.com)";
local IP_GOOGLE="$(dig TXT +short o-o.myaddr.l.google.com @ns2.google.com | awk -F '\"' '{print $2}')";
[[ "${IP_OPENDNS}" == "${IP_GOOGLE}" ]] && {
echo "${IP_OPENDNS}"
} || {
echo "OpenDNS: ${IP_OPENDNS}";
echo "GoogleDNS: ${IP_GOOGLE}"
}
}
du-list () #generate static du.list files in dirs, so I can find out "how big was this folder last time I checked"
{
if unset $1; then
echo "Usage: $0 [foldername]"
return 1
fi
local folder="${1:-.}";
du -hs ${folder}/* | sort -h | tee ${folder}/du.list
return 0
}
# Where's the bottleneck? Break cURL output up into sections, in an infinite loop to check variance
function curlspeed() {
while true;
do date;
curl -L -so /dev/null -w"\nTiming:\n-~-~-~-~-~-~-~-~-~-~-\nDNS Resolution Time=%{time_namelookup}\nTCP Handshake=%{time_connect}\nSSL Handshake=%{time_appconnect}\ntime_pre_transfer=%{time_pretransfer}\ntime_redirect=%{time_redirect}\ntime_starttransfer=%{time_starttransfer}\nnum_connects=%{num_connects}\n-~- Total Time Spent=%{time_total}\nHTTP Details:\nHTTP Status Code:%{http_code}\nRequest Size:%{size_request}\n==========================================\n" ${1:-https://google.com};
sleep 5;
done
}
# aliases
alias giff='git diff --no-index --color=always ${@}'