#!/bin/bash # # Synopsis: # # #!/bin/bash # . export-json.bash # export_JSON # # Example: # # $ ( # source export-json.bash && # export_JSON SSH_TTY \ # SSH_CONNECTION \ # SSH_CLIENT \ # SSH_AUTH_SOCK # ) # # { # "SSH_TTY": "/dev/pts/0", # "SSH_CONNECTION": "192.0.2.140 41564 198.51.100.25 22", # "SSH_CLIENT": "192.0.2.140 41564 22", # "SSH_AUTH_SOCK": "/tmp/ssh-XXXXcePKJl/agent.36326" # } # # This bash function exports shell variables as # JSON ("javascript object notation") strings. # # The string is printed to stdout. # # JSON is a format that represents data as # Javascript source code so modern programmers # can read it unlike bash source code. # # Variable names are given to the function as its # argument list: # # $ source export-json.bash && # export_JSON PATH # # The output is a single JSON object containing # key-value mappings between JSON strings: # # { # "PATH": "/usr/local/sbin:[...]" # } # # It is possible to use a different name for the # variable in the JSON object key field than in # the shell environment, like this: # # $ export_JSON DBUS_SESSION_BUS_ADDRESS=p # { # "p": "unix:path=/run/user/1000/bus" # } # # It uses the external tool "jq" to parse string # values placed in jq's argument list by bash and # then encode them as JSON string values. This # is no accidental dependency. The jq program is # the foundation of the trustworthiness of this # code. If we were encoding JSON strings in bash # we would have to be a lot more careful. arg1_to_env0() { case "$1" in *=*=* ) set -- "${1#*=}" echo "Error: invalid variable: ${1@Q}" >&2 return 30 ;; *[^a-zA-Z0-9_=]* | [^a-zA-Z_]* | '') echo "Error: invalid variable: ${1@Q}" >&2 return 10 ;; esac set -- "${1##*=}" "${1%%=*}" if [ -v "$2" ] then printf '%s=%s\0' "$1" "${!2}" else echo "Warning: ignoring unset variable: ${2@Q}" >&2 fi } # Hygienic loop. Doesn't touch the shell # environment. for_each() { while [ $# -ge 2 ] do $1 "$2" || return set -- "$1" "${@:3}" done } to_JSON_all() { ( if [ "$1" = '-a' ] then jq_env fi for_each to_JSON1 $(compgen -A arrayvar) ) | jq -s 'add' } jq_zip2() { cat <<'END' $ARGS.positional | [ .[ length/2 : ], .[ : length/2 ] ] | transpose | map ({ (.[0]): .[1] }) | add END } json_encode_associative_array() { eval \ 'set -- "$1" \ "${'"$1"'[@]}" \ "${!'"$1"'[@]}"; ' && if [ $# -ge 3 ] then jq -n "{ (\$k): $(jq_zip2) }" \ --arg k "$1" \ --args "${@:2}" else jq -n '{ ($k): {} }' --arg k "$1" fi } json_encode_indexed_array() { eval \ 'set -- "$1" \ "${'"$1"'[@]}" \ ' && jq -n '{ ($k): $ARGS.positional }' \ --arg k "$1" \ --args "${@:2}" } # This uses the more complex implementation that, # when used with multiple arguments, does not # require extra calls to jq. Since this is only # using a single argument, a more straightforward # implementation could be used just as well. json_encode_string() { [ -v "$1" ] && export_JSON_unsafe "$1" } # The straightforward implementation: json_encode_string() { [ -v "$1" ] && jq -n '{ ($k): $v }' --arg k "$1" --arg v "${!1}" } # But is having two implementations really # straightforward? And what about this # parenthetical commentary? Moreso wobbly, if not # winding, error-prone complexity finding itself # grinding up on the accidents of time passing it # might be anyway but the takeaway is don't touch # it till it croak, the tests will run and the # result will speak, may truth spread through the # dendrites of Samizdat! to_JSON() { case "$#$1" in 0 ) printf \ '%s\n' \ 'usage: to_JSON -a # print all' \ ' to_JSON -A # print arrays' \ ' to_JSON [...]' \ >&2 return -1 ;; 1-a | 1-A ) to_JSON_all $1 ;; * ) for_each to_JSON1 "$@" ;; esac } to_JSON1() { case "${!1@a}" in *a* ) json_encode_indexed_array "$1" ;; *A* ) json_encode_associative_array "$1" ;; * ) json_encode_string "$1" ;; esac } env0_to_JSON() { set -- while read -d '' do set -- "$@" --arg "${REPLY%%=*}" "${REPLY#*=}" done jq -n -r '$ARGS.named' "$@" } export_JSON_unsafe() { ( set -o pipefail for_each arg1_to_env0 "$@" | env0_to_JSON ) } safety_pipe() { [ $# -ge 2 ] || return set -- "$(mktemp)" "$@" || return ( trap "rm ${1@Q}" EXIT "${@:3}" > "$1" && $2 < "$1" ) } export_JSON() { safety_pipe env0_to_JSON for_each arg1_to_env0 "$@" } filter_vars() { while read do if [ -v "$REPLY" ] then printf '%s\n' "$REPLY" fi done } jq_env() { export_JSON $(compgen -v | filter_vars) | jq "$@" } jq_exports() { export_JSON $(compgen -e | filter_vars) | jq "$@" } try() { "$@" printf '(Exit %s) <- [%s]\n' "$?" "${*@Q}" >&2 } runtest() { set -- SSH_CLIENT SSH_TTY SSH_AUTH_SOCK SSH_CONNECTION try export_JSON "$@" unset unsetvar try export_JSON SSH_TTY unsetvar try export_JSON unsetvar SSH_TTY try export_JSON try export_JSON '' try export_JSON 'invalid!' SSH_TTY try export_JSON SSH_TTY 'invalid!' try jq_env .unsetvar emptyvar= try jq_env .emptyvar try jq_exports '.|{TERM,LANG,HOSTTYPE,EDITOR,SHELL}' try jq_env '.|{TERM,LANG,HOSTTYPE,EDITOR,SHELL}' try to_JSON PATH BASH_ARGV BASH_VERSINFO BASH_ALIASES BASH_CMDS } # code poetry flowetry toiletry coiled spring load # the home row write tight c with terry davis down # in a basement univalent foundation concrete on # the bottom with donald while they all on top of # things they found up there still we founding # here building tall stacks of calls out to all to # any accepting returns of types unknown or known # # defensively typed values for anomic millennial # hackers # # atomized castaways of aging social bodies # reviving ancient code bodies encoding ancient # knowns into own bodies endomorphing endorphins # of discovery over arrows of time chance and # finger dance into machine flow that won't halt # or falter to a byzantine emperors structures # span relativities messages