aboutsummaryrefslogtreecommitdiff
path: root/includes
diff options
context:
space:
mode:
Diffstat (limited to 'includes')
-rw-r--r--includes691
1 files changed, 691 insertions, 0 deletions
diff --git a/includes b/includes
new file mode 100644
index 0000000..b080e51
--- /dev/null
+++ b/includes
@@ -0,0 +1,691 @@
+# vi:syntax=bash
+
+# Heavily inspired from https://github.com/sorin-ionescu/prezto/blob/master/modules/git/alias.zsh
+# Git aliases
+# Log
+zstyle -s ':prezto:module:git:log:medium' format '_git_log_medium_format' \
+ || _git_log_medium_format='%C(bold)Commit:%C(reset) %C(green)%H%C(red)%d%n%C(bold)Author:%C(reset) %C(cyan)%an <%ae>%n%C(bold)Date:%C(reset) %C(blue)%ai (%ar)%C(reset)%n%w(80,1,2)%+B'
+zstyle -s ':prezto:module:git:log:oneline' format '_git_log_oneline_format' \
+ || _git_log_oneline_format='%C(green)%h%C(reset) %><(55,trunc)%s%C(red)%d%C(reset) %C(blue)[%an]%C(reset) %C(yellow)%ad%C(reset)%n'
+zstyle -s ':prezto:module:git:log:brief' format '_git_log_brief_format' \
+ || _git_log_brief_format='%C(green)%h%C(reset) %s%n%C(blue)(%ar by %an)%C(red)%d%C(reset)%n'
+
+# Status
+zstyle -s ':prezto:module:git:status:ignore' submodules '_git_status_ignore_submodules' \
+ || _git_status_ignore_submodules='none'
+
+# Aliases
+
+# Branch (b)
+alias gb='git branch'
+alias gba='git rev-parse --abbrev-ref HEAD'
+alias gbc='git checkout -b'
+alias gbx='git branch -d'
+alias gbX='git branch -D'
+# Show the differences of the current head since it diverged from master
+function gbd (){
+ git diff $(git merge-base master HEAD)...HEAD
+}
+function gbds (){
+ git diff --stat $(git merge-base master HEAD)...HEAD
+}
+alias gdiff='git diff --no-index'
+
+# Commit (c)
+alias gc='git commit --verbose'
+alias gcam='git commit --verbose --amend'
+alias ga='git add'
+alias gcu='git add -u; git commit --verbose'
+alias gca='git add -A; git commit --verbose'
+alias gco='git checkout'
+gcoo() {
+ if [[ "$#" -eq 0 ]]; then
+ local FILES=(${(f)"$(git diff --name-only| fzf --multi --reverse)"})
+ for FILE in ${FILES[@]}; do
+ git checkout "$FILE"
+ done
+ else
+ git checkout "$@"
+ fi
+}
+gsq() {
+ if [[ "$#" -ne 1 ]]; then
+ echo "requires an int arg representing number of commits"
+ fi
+ MSG=$(git log --format=%B HEAD~"${1}"..HEAD)
+ git reset --soft HEAD~"${1}"
+ git commit --verbose --edit -m"${MSG}"
+}
+gfo() {
+ git commit -m 'temp'
+ MSG=$(git log --format=%B HEAD~2..HEAD~1)
+ git reset --soft HEAD~"2"
+ git commit --verbose --edit -m"${MSG}"
+}
+alias gcp='git cherry-pick --ff'
+alias gcm='git commit --amend'
+
+# Fetch (f)
+alias gf='git fetch'
+alias gfc='git clone'
+
+# Log (l)
+alias gl='git log --topo-order --pretty=format:${_git_log_medium_format}'
+alias glp='git log --topo-order --pretty=format:${_git_log_medium_format} -p'
+alias gls='git log --topo-order --stat --pretty=format:${_git_log_medium_format}'
+alias gld='git log --topo-order --stat --patch --full-diff --pretty=format:${_git_log_medium_format}'
+alias glo='git log --topo-order --date=local --pretty=format:${_git_log_oneline_format}'
+# sed will remove multiple adjacent whitespaces
+alias glg='git log --topo-order --all --graph --date=local --pretty=format:${_git_log_oneline_format}'
+alias glb='git log --topo-order --pretty=format:${_git_log_brief_format}'
+alias glc='git shortlog --summary --numbered'
+
+# Rebase (r)
+alias gr='git rebase'
+alias gra='git rebase --abort'
+alias grc='git rebase --continue'
+alias gri='git rebase --interactive'
+alias grs='git rebase --skip'
+
+# Merge (m)
+alias gm='git merge'
+
+# Push (p)
+alias gp='git push'
+alias gpl='git fetch origin master && git rebase origin/master'
+
+# Stash (s)
+alias gs='git stash'
+alias gsa='git stash apply'
+alias gsx='git stash drop'
+alias gsX='git-stash-clear-interactive'
+alias gsl='git stash list'
+alias gss='git stash save --include-untracked'
+
+# Working Copy (w)
+alias gws='git status --ignore-submodules=${_git_status_ignore_submodules} --short'
+alias gwS='git status --ignore-submodules=${_git_status_ignore_submodules}'
+alias gwd='git diff --no-ext-diff'
+alias gwsd='git diff --cached'
+alias gwD='git diff --no-ext-diff --word-diff'
+alias gwr='git reset'
+alias gwR='git reset --hard'
+alias gwc='git clean -f'
+gwu() {
+ local FILES=(${(f)"$(git ls-files --others --exclude-standard| fzf --multi --reverse)"})
+ for FILE in ${FILES[@]}; do
+ rm "$FILE"
+ done
+}
+alias gcp='git cherry-pick --ff'
+
+
+# Personal Aliases
+alias s='ssh'
+alias n='nvim'
+alias ks='kitty +kitten ssh'
+alias sco='nvim ~/.ssh/config'
+alias nvimf='nvim $(fzf)'
+alias cdf='cd $(find . -type d | fzf)'
+alias ..='cd ../'
+alias ...='cd ../../'
+alias ....='cd ../../../'
+alias t='tmux'
+alias ta='tmux attach'
+alias tre='~/.tmux/window_renum.sh'
+alias ncdu='ncdu --color dark -x'
+alias rcp='rsync --verbose --progress --human-readable -zz --archive --hard-links --one-file-system'
+alias rmv='rsync --verbose --progress --human-readable -zz --archive --hard-links --one-file-system --remove-source-files'
+alias rmvu='rsync --verbose --progress --human-readable -zz --archive --hard-links --one-file-system --remove-source-files --update'
+alias rsynchronize='rsync --verbose --progress --human-readable --compress --archive --hard-links --one-file-system --remove-source-files --update --delete'
+
+
+###########
+# Functions
+###########
+
+# Color shortcuts
+G="\e[32m"
+R="\e[31m"
+C="\e[36m"
+NC="\e[39m"
+
+# Docker functions
+alias dl="docker logs"
+alias dlf="docker logs --follow"
+dpa() { docker ps -a } # List all containers
+di() { docker images } # Show images
+drm() { docker rm $(docker ps -a -q) 2> /dev/null; } # Remove dead containers
+drv() { docker volume rm $(docker volume ls -qf dangling=true) } # remove dangling volumes
+dri() { docker rmi -f $(docker images -q) } # Remove unused images
+dstop() { docker stop $(docker ps -a -q); } # Stop all containers
+dip() { docker inspect --format '{{ .NetworkSettings.IPAddress }}' "$1" }
+dfp() { # Function to get forwarded port of docker container
+ if [ -z "$2" ]; then
+ docker inspect --format='{{(index (index .NetworkSettings.Ports "8000/tcp") 0).HostPort}}' $1
+ else
+ docker inspect --format='{{(index (index .NetworkSettings.Ports "'$2'/tcp") 0).HostPort}}' $1
+ fi
+}
+dfpo() { # Open chrome to a forwarded container port
+ if [ "$#" -ne 1 ]; then
+ echo 'Usage: dfpo $port'
+ else
+ /usr/bin/open -a '/Applications/Google Chrome.app' 'http://localhost:'"$1"
+ fi
+}
+dex() { # Enter a container
+ if [ -z "$2"]; then
+ docker exec -it "$1" /bin/bash
+ else
+ docker exec -it "$1" "$2"
+ fi
+}
+
+
+# Tmux functions
+t4() {
+ tmux split-window
+ tmux split-window
+ tmux split-window
+ tmux select-layout tiled
+}
+tat() { # Creating a second window on a session
+ if [ ! -z "$1" ]; then
+ tmux new -t "$1" -s "$1"1
+ fi
+}
+tla() {
+ if [[ -z "$1" ]]; then
+ return
+ fi
+ # Search in all folders under 'Work'
+ local PROJS=($(find "$HOME"/Work/* -mindepth 1 -maxdepth 1 -type d))
+ local PROJ_NAME=''
+ local PROJ_DIR=''
+ # Find if we have a match
+ for dir in "${PROJS[@]}"; do
+ if [[ $(basename "$dir") == "$1" ]]; then
+ PROJ_NAME=$(basename "$dir")
+ PROJ_DIR="$dir/code"
+ fi
+ done
+ if [[ -z "$PROJ_NAME" ]]; then
+ echo 'Project not found'
+ return
+ fi
+ PROJECT="$PROJ_NAME" DIR="$PROJ_DIR" tmuxp load ~/.tmux/templates/alternative.yaml
+}
+
+# Nixos
+alias nrs="sudo -i nixos-rebuild switch"
+alias nco="sudo nixos-container"
+alias ns="nix-shell -p"
+
+# Git
+gup() { # Loop through args, branches, and update them
+ orig_head="$(git name-rev --name-only HEAD)"
+ for var in "$@"
+ do
+ git checkout "$var"
+ git pull origin "$var"
+ git merge origin/"$var"
+ done
+ git checkout "$orig_head"
+}
+
+gupr() {
+ git checkout "$1"
+ git fetch origin master
+ git rebase origin/master
+}
+
+gpf() {
+ current_branch="$(git name-rev --name-only HEAD)"
+ git push origin "$current_branch" --force
+}
+
+gpu() {
+ current_branch="$(git name-rev --name-only HEAD)"
+ git fetch origin "$current_branch"
+ git reset --hard origin/"$current_branch"
+}
+
+
+# GPG
+gpgen() { # Function for compressing and encrypting a file
+ tar -zc $1 | gpg --encrypt --sign --armor --recipient cody@hiar.ca > $2
+}
+gpgde() { # Function for uncompressing and decrypting a file
+ gpg -d $1 | tar -zx
+}
+
+
+# SSH
+sl() { # Handy ssh forwarding commands, pull a port down
+ if [ $# -eq 0 ]; then
+ echo 'Usage: sl $host $port $bindingaddress(optional)'
+ else
+ while true
+ do
+ if [ -z "$3"]; then
+ ssh -nNT -L "$2":localhost:"$2" "$1"
+ else
+ ssh -nNT -L "$2":"$3":"$2" "$1"
+ fi
+ sleep 10
+ done &
+ fi
+}
+sr() { # Handy ssh forwarding commands, push a port up
+ if [ $# -eq 0 ]; then
+ echo 'Usage: sl $host $port $bindingaddress(optional)'
+ else
+ while true
+ do
+ if [ -z "$3"]; then
+ ssh -nNT -R "$2":localhost:"$2" "$1"
+ else
+ ssh -nNT -R "$2":"$3":"$2" "$1"
+ fi
+ done &
+ fi
+}
+
+
+pgdumpr() { # Dump remote postgres database.
+ ssh -t "$1" "sudo -u postgres bash -c \"pg_dump --no-acl --no-owner -d "$2" > /tmp/"$2"-$(date +%F).sql\""
+ scp "$1":/tmp/"$2"-$(date +%F).sql .
+}
+pgimport() { # SCP file remotely and import it.
+ scp "$2" "$1":/tmp
+ SHORTNAME=$(echo "$2" | cut -d'-' -f1)
+ ssh -t "$1" "sudo -u postgres bash -c \"psql -d "$SHORTNAME" < /tmp/"$2"\""
+}
+pgls() { # List commands on a server
+ HOST="$1"
+ ssh -tt "$HOST" 'sudo -u postgres bash -c "psql --list"'
+}
+mysqldumpr() { # Dump remote mysql database
+ ssh -t "$1" "mysqldump "$2" > /tmp/"$2"-$(date +%F).sql" && scp "$1":/tmp/"$2"-$(date +%F).sql .
+}
+fwkill() { # Kill all of the forwarded ports on the machine
+ ps aux | grep 'ssh -nNT -L' | grep -v 'grep' | awk '{ print $2 }' | xargs -n 1 kill
+}
+j() { # Jump to project code
+ PROJ=$(find "$HOME/Work" -mindepth 2 -maxdepth 2 -type d -name "$1")
+ if [[ -d "$PROJ/code" ]]; then
+ cd "$PROJ/code"
+ fi
+}
+ch() { # Force ownership on a projects files. Sometimes docker generates root owned files
+ PROJ=$(find "$HOME/Work" -mindepth 2 -maxdepth 2 -type d -name "$1")
+ sudo chown -R thorny:users "$PROJ/code"
+}
+finalurl() { # check that redirects are properly happening
+ curl http://"$1" -s -L -o /dev/null -w '%{url_effective}'
+ echo ''
+ curl http://www."$1" -s -L -o /dev/null -w '%{url_effective}'
+ echo ''
+ curl https://"$1" -s -L -o /dev/null -w '%{url_effective}'
+ echo ''
+ curl https://www."$1" -s -L -o /dev/null -w '%{url_effective}'
+ echo ''
+}
+newproj() { # Create a new cookiecuter project
+ cookiecutter https://git.codyhiar.com/docker/cookiecutter-docker
+ PROJECT=$(ls -t1 --color=never | head -1)
+ mv "$PROJECT" code
+ mkdir "$PROJECT"
+ mv code "$PROJECT"
+}
+p() { # List all projects
+ find "$HOME"/Work/* -mindepth 1 -maxdepth 1 -type d | xargs -n1 basename | sort
+}
+ssl() {
+ echo | openssl s_client -servername "$1" -connect "$1":443 2>/dev/null | openssl x509 -noout -issuer -dates -subject -fingerprint
+}
+
+gfcc () {
+ # This function assumes urls of one of the following formats. All others
+ # will not work:
+ #
+ # git@github.com:user/repo.git
+ # https://github.com/user/repo
+ PROTOCOL=$(echo "$1" | cut -c1-3)
+ if [[ "$PROTOCOL" == 'git' ]]; then
+ REPO=$(echo "$1" | cut -d'/' -f2 | cut -d'.' -f1)
+ elif [[ "$PROTOCOL" == 'htt' ]]; then
+ REPO=$(echo "$1" | cut -d'/' -f5 )
+ fi
+ git clone "$1" "$REPO"/code
+}
+
+heartbeat() { # Keep a heartbeat on a website
+ while true; do
+ STATUS=$(nice curl -I "$1" 2> /dev/null | grep '200 OK')
+ if [[ -n $STATUS ]]; then
+ echo -e "$(date) ${G}$1 is up${NC}"
+ else
+ STATUS=$(nice curl -I "$1" 2> /dev/null | grep 'HTTP/2 200')
+ if [[ -n $STATUS ]]; then
+ echo -e "$(date) ${G}$1 is up${NC}"
+ else
+ echo -e "$(date) ${R}$1 is down${NC}"
+ fi
+ fi
+ sleep 2
+ done
+}
+mvw() { # i3 move workspace to monitor
+ i3-msg "workspace ${1}, move workspace to output ${2}"
+}
+getip() { # Get ip for website, ignore cloudflare ips
+ IS_CLOUDFLARE=$(dig +short NS "$1" | grep 'cloudflare')
+ if [[ -n "$IS_CLOUDFLARE" ]]; then
+ echo 'Behind Cloudflare'
+ return
+ fi
+ IP_ADDR=$(dig +short "$1")
+ echo "$IP_ADDR"
+ grep -B 2 "$IP_ADDR" ~/.ssh/config | grep 'Host '
+}
+lorem() {
+ WORD_LENGTH=$(((RANDOM % 5) + 5))
+ WORD=$(openssl rand -base64 12| head -n 1 | cut -c1-"$WORD_LENGTH")
+ SENTENCE="$WORD"
+ for i in {1..40}; do
+ WORD_LENGTH=$(((RANDOM % 5) + 5))
+ WORD=$(openssl rand -base64 12| head -n 1 | cut -c1-"$WORD_LENGTH")
+ SENTENCE="$SENTENCE $WORD"
+ done
+ echo $SENTENCE
+}
+amis() {
+ aws ec2 describe-images --owners self | jq '.Images[] | {id: .ImageId, name: .Name, state: .State, snapshot: .BlockDeviceMappings[0].Ebs.SnapshotId}'
+}
+rm_ami() {
+ AMI_NAME="$1"
+ DATA=$(aws ec2 describe-images --owners self)
+ AMI_ID=$(echo "$DATA"| jq '.Images[] | select(.Name | contains("'"$AMI_NAME"'")) | .ImageId' | sed -e 's/^"//' -e 's/"$//')
+ SNAPSHOT_ID=$(echo "$DATA"| jq '.Images[] | select(.Name | contains("'"$AMI_NAME"'")) | .BlockDeviceMappings[0].Ebs.SnapshotId' | sed -e 's/^"//' -e 's/"$//')
+ aws ec2 deregister-image --image-id "$AMI_ID"
+ aws ec2 delete-snapshot --snapshot-id "$SNAPSHOT_ID"
+}
+settitle() {
+ xdotool set_window --name "$*" $(xdotool getactivewindow)
+}
+csv() {
+ clear; csvlook -d ',' --no-inference "$1" | less -s
+}
+y() {
+ yank | xp
+}
+gbxm() { # Clear out branches
+ git branch | egrep -v "(master)" | xargs -n 1 git branch -D
+ rm -rf .git/refs/remotes/origin/*
+ git fetch origin master
+}
+d() { # reset monitors on desktop
+ i3-msg "workspace 1, move workspace to output DVI-D-1"
+ i3-msg "workspace 2, move workspace to output DVI-I-1"
+ i3-msg "workspace 3, move workspace to output HDMI-4"
+}
+mkv2mp4() { # Create an mp4 of mkv
+ ffmpeg -i "$1" -codec copy "${1%.*}.mp4"
+}
+rzf() {
+ local FILENAME=$(fzf)
+ local DIRNAME=$(dirname "${FILENAME}")
+ ranger "${DIRNAME}"
+}
+
+btc() { # Get current btc
+ curl -s https://bitpay.com/api/rates | python -c "import json, sys; print(json.load(sys.stdin)[6]['rate'])"
+}
+btcc() { # convert btc to cad
+ BTC_RATE=$(curl -s https://bitpay.com/api/rates | python -c 'import json, sys; print(json.load(sys.stdin)[6]["rate"])')
+ echo $(($1 * $BTC_RATE))
+}
+brightd() {
+ sudo python3 "$HOME/.dotfiles/repos/additional/scripts/brightness_daemon.py"
+}
+brightness_up() {
+ echo 'up' | nc -U /tmp/brightd.sock
+}
+brightness_down() {
+ echo 'down' | nc -U /tmp/brightd.sock
+}
+tlo () {
+ if [[ -z "$1" ]]; then
+ return
+ fi
+ local PROJS=($(find "$HOME"/Work/* -mindepth 1 -maxdepth 1 -type d))
+ local PROJ_NAME=''
+ local PROJ_DIR=''
+ for dir in "${PROJS[@]}"; do
+ if [[ $(basename "$dir") == "$1" ]]; then
+ PROJ_NAME=$(basename "$dir")
+ PROJ_DIR="$dir/code"
+ tmux new-session -c "${PROJ_DIR}" -s "${PROJ_NAME}"
+ return
+ fi
+ done
+ echo "Project '${1}' was not found"
+}
+
+gpp() {
+ MESSAGE=${1:-auto}
+ git commit -m "$MESSAGE" && git push origin master
+}
+gppa() {
+ MESSAGE=${1:-auto}
+ git add -A; git commit -m "$MESSAGE" && git push origin master
+}
+cwh() {
+ cat $(which "${1}")
+}
+vwh() {
+ nvim $(which "${1}")
+}
+xephyr() {
+ Xephyr -br -ac -noreset -screen 1080x1080 :1
+}
+pwgen() {
+ date +%s | sha256sum | base64 | head -c 32 | cut -c1-10
+}
+# rotate uses ImageMagick
+rotate() { convert "$1" -rotate 90 "$1" }
+rotate90() { convert "$1" -rotate 90 "$1" }
+rotate180() { convert "$1" -rotate 180 "$1" }
+rotate270() { convert "$1" -rotate 270 "$1" }
+# task warrior commands
+tkwa() {task add "$1" +work; task sync}
+tkpa() {task add "$1" +personal; task sync}
+tkw() {task +work}
+tkp() {task +personal}
+ts() {task sync}
+ttotal() {
+ task "$1" information | grep 'duration' | awk -F'duration: ' '{ print $2 }' | cut -d')' -f1 | iso8601_adder
+}
+twstart() {
+ TAG=$(cat ~/.timewarrior/my_tags | fzf)
+ timew start "$TAG"
+}
+twstop() {
+ TAG=$(cat ~/.timewarrior/my_tags | fzf)
+ timew stop "$TAG"
+}
+twremove() {
+ TAGS=(${(f)"$(cat ~/.timewarrior/my_tags | fzf --multi)"})
+ for TAG in ${TAGS[@]}; do
+ sed -i '/'"$TAG"'/d' ~/.timewarrior/my_tags
+ done
+}
+twsummary() {
+ timew summary
+}
+twsummaryt() {
+ TAG=$(cat ~/.timewarrior/my_tags | fzf)
+ timew summary "$TAG"
+}
+inc() {
+ nvim ~/.dotfiles/zsh/includes
+}
+dot2png() {
+ dot "${1}" -Tpng -o "${1%.*}.png"
+}
+dot2svg() {
+ dot "${1}" -Tsvg -o "${1%.*}.svg"
+}
+alias vbm="vboxmanage"
+xlsx2csv() {
+ in2csv "${1}" > "${1%.*}.csv"
+}
+xls2csv() {
+ in2csv "${1}" > "${1%.*}.csv"
+}
+klu() {
+ CLUSTER=$(kubectl config get-contexts | tail -n +2 | awk '{ print $2 }' | fzf)
+ kubectl config use-context "$CLUSTER"
+}
+ksl() {
+ kubectl get svc
+}
+kdl() {
+ clear
+ kubectl get deployment -o wide | less -S
+}
+kex() {
+ POD=$(kubectl get pods | tail -n +2 | awk '{ print $1 }' | fzf)
+ kubectl exec -it "$POD" -- bash
+}
+klf() {
+ POD=$(kubectl get pods | tail -n +2 | awk '{ print $1 }' | fzf)
+ kubectl logs --follow "$POD"
+}
+mrs() {
+ TOKEN=$(sops -d /not/real/path/to/keys.yaml | yq -r .gitlab_token)
+ curl --header "PRIVATE-TOKEN: ${TOKEN}" -X GET "https://gitlab.com/api/v4/projects/someproject/merge_requests?state=opened" 2> /dev/null | \
+ jq ".[] | {title: .title, branch: .source_branch, author: .author.name, web_url: .web_url, labels: .labels}"
+}
+mrsc() {
+ TOKEN=$(sops -d /not/real/path/to/keys.yaml | yq -r .gitlab_token)
+ curl --header "PRIVATE-TOKEN: ${TOKEN}" -X GET "https://gitlab.com/api/v4/projects/someproject/merge_requests?state=merged&order_by=updated_at" 2> /dev/null | \
+ jq "limit(4;.[]) | {title: .title, branch: .source_branch, author: .author.name, web_url: .web_url}"
+}
+glab_ci() {
+ TOKEN=$(sops -d /not/real/path/to/keys.yaml | yq -r .gitlab_token)
+ RUNNING_BRANCH=$(curl --header "PRIVATE-TOKEN: ${TOKEN}" -X GET "https://gitlab.com/api/v4/projects/someproject/pipelines?status=running" 2> /dev/null | jq -r ".[0].ref")
+ glab ci view $RUNNING_BRANCH
+}
+
+2faimport() {
+ if [[ "$#" -ne 2 ]]; then
+ echo 'error in num args'
+ else
+ # 1 is image 2 is account
+ zbarimg -q --raw "${1}" | pass otp append "${2}"
+ fi
+}
+2fadisplay() {
+ pass otp uri -q "${1}"
+}
+rst2md() {
+ pandoc -s -o "${1%.*}.md" "${1}"
+}
+rtf2txt() {
+ unoconv -f txt "${1}"
+}
+zshpure() {
+ zsh -d -f -i
+}
+cheat() {
+ curl https://cheat.sh/"${1}"
+}
+fz() {
+ DIR=$(_z 2>&1 -l "${1}" | rg -v 'common:' | awk '{ print $2 }' | fzf --tac)
+ cd $DIR
+}
+alias icat="kitty +kitten icat"
+rwh() {
+ readlink $(which "${1}")
+}
+cdrwh() {
+ cd $(dirname $(dirname $(rwh "${1}")))
+}
+cdr() {
+ cd $(dirname $(dirname "${1}"))
+}
+nums() {
+ # set IFS to deal with dirs having spaces
+ local IFS=$'\n\t'
+ for DIR in $(find . -mindepth 1 -maxdepth 1 -type d | sort); do
+ local MYDIR=$(basename "${DIR}")
+ local NUMFILES=$(find ${MYDIR} -type f | wc -l)
+ printf "%6s %s\n" $NUMFILES $MYDIR
+ done
+}
+l() {
+ if [ -x "$(command -v exa)" ]; then
+ exa "$@"
+ else
+ ls "$@"
+ fi
+}
+# This goes in an infinite recursion if exa is not installed. lol
+ls() {
+ if [ -x "$(command -v exa)" ]; then
+ exa "$@"
+ else
+ ls "$@"
+ fi
+}
+ll() {
+ l -l
+}
+l1() {
+ l -1
+}
+
+clean_nix_generations() {
+ sudo nix-env -p /nix/var/nix/profiles/system --delete-generations +5
+ sudo nix-collect-garbage
+}
+ghprl() {
+ gh pr list --author @me --json number,title,headRefName,url | jq -r 'map({number,title,headRefName,url}) | (first | keys_unsorted) as $keys | map([to_entries[] | .value]) as $rows | $keys,$rows[] | @csv' | csvlook -d ',' --no-inference | less
+ gh pr list --search "is:open is:pr review-requested:@me" --json number,title,headRefName,url | jq -r 'map({number,title,headRefName,url}) | (first | keys_unsorted) as $keys | map([to_entries[] | .value]) as $rows | $keys,$rows[] | @csv' | csvlook -d ',' --no-inference
+}
+ghpra() {
+ gh pr list --search "is:open is:pr author:@me review:approved" --json number,title,headRefName,url | jq -r 'map({number,title,headRefName,url}) | (first | keys_unsorted) as $keys | map([to_entries[] | .value]) as $rows | $keys,$rows[] | @csv' | csvlook -d ',' --no-inference
+}
+ghpr() {
+ local PR=$(gh pr list --author @me --json number,title -q '.[] | "\(.number) \(.title)"' | fzf --prompt "Which PR do you want to check out?" | awk '{ print $1 }')
+ export GH_PR=$PR
+}
+ghprs() {
+ gh pr checks $1
+ echo ""
+ gh pr view $1
+}
+ghprd() {
+ gh pr diff "$1"
+}
+ghprm() {
+ echo "Merge PR: $1?"
+ read choice
+ case "$choice" in
+ y|Y ) gh pr merge --auto --rebase --delete-branch $1;;
+ n|N ) return;;
+ * ) echo "invalid";;
+ esac
+}
+ghprr() {
+ gh pr list -S 'review-requested:@me'
+}
+ghil() {
+ gh issue list -a @me
+}
+ghprw() {
+ while true :; do clear; gh pr checks "$1" ; sleep 7; done
+}