#!/bin/bash

set -f
OIFS=$IFS
LIBEXEC_DIR=${LIBEXEC_DIR:-${0%/*}/../libexec}
PROGNAME=${0##*/}
ECS_GITHUB_OWNER=elastic
ECS_GITHUB_REPO=ecs
OBJECT_PREFIX=zlc
OVERWRITE=

declare TMPDIR="${TMPDIR:-/tmp}/$PROGNAME.$RANDOM"
[[ ${TMPDIR:0:1} == / ]] || TMPDIR="$PWD/$TMPDIR"
declare -r TMPDIR

# Force simulating curl --fail-with-body if disabled in es-curl, usually
# because it is unsupported by the version of curl (easier for troubleshooting
# errors, server output gets printed when enabling --debug).
export ES_LOCAL_CURL_FAIL=2

function exit_usage() {
    echo "\
Usage: $PROGNAME normed-file-name.extension
Helper to install all sort of objects in Elasticsearch

Available options:
    -t, --type   VALUE      Force object type
    -n, --name   VALUE      Force object name
    -k, --keep              Keep downloaded files
    -P, --proxy  VALUE      HTTP proxy for external requests, eg: downloads
    --overwrite             (Re)install protected objects even if exist
    --dry-run               Do install anything, print what would be done
    -d, --debug             Enable debug
    -h, --help              Display this help

Regarding --overwrite option:
    - for elasticsearch, inject @custom objects even if exist
    - for kibana saved objects, import with overwrite=true instead of false
"
    exit 1
}

function info() { echo "${LABEL:-INFO}: ${PROGNAME}${DRY_RUN:+ ** DRY-RUN}: $*"; }
function debug() { LABEL=DEBUG info "$@"; }
function error() { LABEL=ERROR info "$@"; }
function fatal() { LABEL=FATAL info "$@"; exit 2; }

function es-curl-dry-run() { [[ -n $DRY_RUN ]] && return 0; es-curl "$@"; }
function kbn-curl-dry-run() { [[ -n $DRY_RUN ]] && return 0; kbn-curl "$@"; }

function on_exit() {
    if [[ -n $KEEP_DOWNLOADED_FILES && -n $DOWNLOADED_FILES ]]; then
        cp -a "${DOWNLOADED_FILES[@]}" "$PWD/"
    fi
    if [[ -d $TMPDIR ]]; then
        rm -rf "$TMPDIR"
    fi
}

function mycurl() {
    curl -sS -f ${EXTERNAL_PROXY:+-x "$EXTERNAL_PROXY"} \
        --connect-timeout "${CURL_CONNECT_TIMEOUT:-10}" "$@"
}

# $1: file
function guess_object_type_from_content() {
    if [[ -d $1/. && -f $1/manifest.yml ]]; then
        REPLY=$(convert_to_json "$1/manifest.yml" |jq -r 'try
            if .type == "integration" then "integration"
            else "" end
        ')
    else
        REPLY=$(convert_to_json "$1" |jq -sr 'try
            first as $first |
            last |
            if .script != null then "script"
            elif .policy?.phases != null then "lifecycle"
            elif .processors != null then "pipeline"
            elif .index_patterns != null then "template"
            elif .template != null then "component"
            elif .ep_download_url != null then "integration"
            elif .epr_package != null then "integration"
            elif (.exportedCount |type) == "number" then
                if .exportedCount == 1 then
                    if ($first.type == "tag") then "kibana_tag"
                    elif ($first.type == "index-pattern") then "kibana_dataview"
                    elif ($first.type == "dashboard") then "kibana_dashboard"
                    else "kibana_saved_object" end
                else "kibana_saved_object" end
            else "" end
        ')
    fi
    [[ -z $REPLY ]] && return 1
    declare -F -f "do_file_type_$REPLY" >/dev/null || return 1
    return 0
}

# $1: file
function convert_to_json() {
    local file=$1; shift

    if [[ $file == *.json || $file == *.ndjson ]]; then
        cat "$file"
        return $?
    elif [[ $file == *.yaml || $file == *.yml ]]; then
        yq -Moj "$file"
        return $?
    elif [[ $file == *.painless ]]; then
        jq -MRs '{"script":{"lang":"painless","source":.}}' < "$file"
        return $?
    fi
    return 1
}

# $1: method
# $2: URI
# $3: input file
function push_to_es() {
    local data curlout curlret
    # convert_to_json "$3" |es-curl-dry-run "$2" -X "$1" -d @-

    if [[ -n $DEBUG ]]; then
        debug '>> begin raw input'
        cat "$3"
        debug '<< end raw input'
    fi

    if ! data=$(convert_to_json "$3"); then
        REPLY='Convert to JSON failed'
        return 1
    fi
    if ! data=$(echo "$data" |"$LIBEXEC_DIR/es-object-preprocessor"); then
        REPLY='Preprocessor failed'
        return 1
    fi
    if [[ -n $DEBUG ]]; then
        debug '>> begin data piped to curl'
        [[ -n $data ]] && echo "$data"
        debug '<< end data piped to curl'
    fi

    curlout=$(echo "$data" |es-curl-dry-run "$2" -X "$1" -d @-)
    curlret=$?
    if [[ -n $DEBUG ]]; then
        debug '>> begin curl output'
        [[ -n $curlout ]] && echo "$curlout"
        debug '<< end curl output'
    fi
    if (( curlret != 0 )); then
        REPLY='Elasticsearch query failed'
        return 1
    fi

    REPLY=
    return 0
}

# $1: URI
function check_custom_override() {
    [[ $1 == *@custom ]] || { REPLY=; return 0; }
    # disable both curl --fail-with-body (el9) and -f (el8)
    REPLY=$(ES_LOCAL_CURL_FAIL_WITH_BODY= \
            ES_LOCAL_CURL_FAIL= \
        es-curl "$1" -o /dev/null -w '%{http_code}')
    [[ $REPLY == 2?? ]] && { REPLY=200; return 0; }
    [[ $REPLY == 404 ]] && return 0
    REPLY='Failed to check object existence'
    return 1
}

# $1: object name
# $2: file
function do_file_type_component() {
    check_custom_override "/_component_template/$1" || return 1
    [[ $REPLY == 200 && -z $OVERWRITE ]] && { REPLY=Skip; return 0; }
    push_to_es PUT "/_component_template/$1" "$2"
}

# $1: object name
# $2: file
function do_file_type_lifecycle() {
    check_custom_override "/_ilm/policy/$1" || return 1
    [[ $REPLY == 200 && -z $OVERWRITE ]] && { REPLY=Skip; return 0; }
    push_to_es PUT "/_ilm/policy/$1" "$2"
}

# $1: object name
# $2: file
function do_file_type_pipeline() {
    check_custom_override "/_ingest/pipeline/$1" || return 1
    [[ $REPLY == 200 && -z $OVERWRITE ]] && { REPLY=Skip; return 0; }
    push_to_es PUT "/_ingest/pipeline/$1" "$2"
}

# $1: object name
# $2: file
function do_file_type_template() {
    check_custom_override "/_index_template/$1" || return 1
    [[ $REPLY == 200 && -z $OVERWRITE ]] && { REPLY=Skip; return 0; }
    push_to_es PUT "/_index_template/$1" "$2"
}

# $1: object name
# $2: file
function do_file_type_script() {
    check_custom_override "/_scripts/$1" || return 1
    [[ $REPLY == 200 && -z $OVERWRITE ]] && { REPLY=Skip; return 0; }
    push_to_es PUT "/_scripts/$1" "$2"
}

# $1: object name
# $2: file
function do_file_type_kibana_saved_object() {
    local overwrite=false
    [[ -n $OVERWRITE ]] && overwrite=true
    local kbn_output kbn_retval kbn_stats
    kbn_output=$(convert_to_json "$2" |jq -c |
        kbn-curl-dry-run "/api/saved_objects/_import?overwrite=$overwrite" \
            -H 'content-type: multipart/form-data' \
            --form 'file=@-;filename=data.ndjson' \
            -X POST)
    kbn_retval=$?
    if [[ -n $DEBUG ]]; then
        debug '>> begin curl output'
        [[ -n $kbn_output ]] && echo "$kbn_output"
        debug '<< end curl output'
    fi
    if (( kbn_retval != 0 )); then
        REPLY='Kibana query failed'
        return 1
    fi
    # skip response analysis on dry-run
    [[ -n $DRY_RUN ]] && { REPLY=OK; return 0; }
    kbn_stats=($(echo "$kbn_output" |
        jq -r --argjson overwrite "$overwrite" 'try
([(.errors//[])[] |select(.error?.type == "conflict")]|length) as $conflicts |
(if $overwrite then 0 else $conflicts end) as $skip |
((.errors|length)-$conflicts) as $error |
"\($error) \($skip) \(.successCount)"'))
    case "${kbn_stats[0]},${kbn_stats[1]},${kbn_stats[2]}" in
        ,,) REPLY='Unknown, could not parse response'; return 1 ;;
        *,0,0) REPLY=Failed; return 1 ;;
        0,*,0) REPLY=Skip; return 0 ;;
        0,0,*) REPLY=OK; return 0 ;;
        *)  local reply=()
            [[ ${kbn_stats[0]} != 0 ]] && reply+=( "Failed:${kbn_stats[0]}" )
            [[ ${kbn_stats[1]} != 0 ]] && reply+=( "Skip:${kbn_stats[1]}" )
            [[ ${kbn_stats[2]} != 0 ]] && reply+=( "OK:${kbn_stats[2]}" )
            REPLY=${reply[*]}
            [[ ${kbn_stats[0]} != 0 ]] && return 1
    esac
    return 0
}
function do_file_type_kibana_tag() { do_file_type_kibana_saved_object "$@"; }
function do_file_type_kibana_dataview() { do_file_type_kibana_saved_object "$@"; }
function do_file_type_kibana_dashboard() { do_file_type_kibana_saved_object "$@"; }

# $1: object name
# $2: file
function do_file_type_integration() {
    local spec package pick_spec meta_meta

    if [[ -d $2/. ]]; then
        spec=($(convert_to_json "$2/manifest.yml" |jq -r '"\(.name//"") \(.version//"")"'))
        if [[ -z ${spec[0]} || -z ${spec[1]} ]]; then
            REPLY='Failed to get name/version from manifest.yml'
            return 1
        fi

        package="$TMPDIR/${spec[0]}-${spec[1]}.tar.gz"
        info "Create tarball ${package##*/}"
        if ! tar czf "$package" -C "$2/" --transform "s,^\.,${spec[0]}-${spec[1]}," ./; then
            REPLY='Failed to create tarball'
            return 1
        fi
        pick_spec=
        meta=
    else
        local download_url download_path
        eval spec=($(convert_to_json "$2" |jq -r '[
            .ep_download_url//"",
            .epr_url//"",
            .epr_package//"",
            (if .pick == null then "" else .pick |@json end)
        ] |@sh'))

        if [[ -n ${spec[0]} ]]; then
            download_url=${spec[0]}
        elif [[ -n ${spec[1]} && -n ${spec[2]} ]]; then
            # get download url
            info "Query ${spec[1]} for latest version of package ${spec[2]}"
            download_path=$(mycurl -L "${spec[1]}/search?package=${spec[2]}&release=ga&type=integration" |
                jq -r 'try (.[0].download // "")')
            if [[ -z $download_path ]]; then
                REPLY='Failed to get url for latest version'
                return 1
            fi
            download_url="${spec[1]}${download_path}"
        else
            REPLY='Invalid integration specs'
            return 1
        fi

        package="$TMPDIR/${download_url##*/}"
        info "Downloading $download_url..."
        if ! mycurl -L "$download_url" -o "$package"; then
            REPLY='Download failed'
            return 1
        fi
        pick_spec=${spec[3]}
        pick_meta=$(jq -n --arg u "$download_url" '{zlc:{origin:{type:"integration",url:$u}}}')
        DOWNLOADED_FILES+=( "$package" )
    fi

    if [[ -n $pick_spec ]]; then
        pick_from_integration_tarball "$package" "$pick_spec" "$pick_meta"
    else
        do_tarball_type_integration "$package"
    fi
}

# $1: integration tarball
# $2: pick spec
# $3: pick meta
function pick_from_integration_tarball() {
    local tarball=$1 pick=$2 meta=$3
    local nfiles=$(echo "$pick" |jq -r 'length')
    local i spec j m position file object_name modifier

    local xdir="$TMPDIR/i.$RANDOM"
    extract_tarball "$tarball" "$xdir" || return 1

    for (( i = 0; i < nfiles; i++ )); do
        position="$.pick[$i]"
        eval spec=($(echo "$pick" |jq -r --argjson i "$i" '
            .[$i] |
            [ .file, .target_name ] +
                if .modifiers
                then [(.modifiers |map([length, .[]]) [][])]
                else [] end |
            @sh
        '))
        file=$(find -L "$xdir/" -type f -path "*/${spec[0]}")
        [[ -e $file ]] || { REPLY="$position: not found, ${spec[0]}"; return 1; }
        object_name=${spec[1]}
        for (( j = 2, m = 0; j < ${#spec[@]}; j += (${spec[j]}+1), m++ )); do
            position="${position%.modifiers*}.modifiers[$m]"
            modifier=( "${spec[@]:j+1:${spec[j]}}" )
            declare -f -F "pick_modifier_${modifier[0]}" >/dev/null ||
                { REPLY="$position: not supported, ${modifier[0]}"; return 1; }
            meta="$meta" "pick_modifier_${modifier[0]}" "$file" "${modifier[@]:1}" ||
                { REPLY="$position: modifier failed, ${modifier[0]}"; return 1; }
        done
        FORCED_OBJECT_NAME="$object_name" do_file "$file" || return $?
    done
}

function pick_modifier_gsub() {
    local sep=$'\x21'
    sed -i -re "s${sep}${2}${sep}${3}${sep}g" "$1"
}

function pick_modifier_meta() {
    local noarg='{}'
    convert_to_json "$1" |
        jq --argjson m1 "$meta" --argjson m2 "${2:-$noarg}" '. * {_meta:($m1 * $m2)}' |
        if [[ $1 == *.yml || $1 == *.yaml ]]; then yq -pj -oy; else cat; fi \
            > "$1.tmp" && mv "$1"{.tmp,}
}

# $1: ecs[:version]
function download_ecs_tarball() {
    local version=${1#*:}
    [[ $version == $1 ]] && version=
    version=${version#v}

    if [[ -z $version ]]; then
        version=$(mycurl "https://api.github.com/repos/$ECS_GITHUB_OWNER/$ECS_GITHUB_REPO/releases/latest" |
            jq -r '.tag_name |sub("^v"; "")')
        if [[ $? != 0 || -z $version ]]; then
            REPLY='Failed to find lastest ecs version number'
            return 1
        fi
    fi

    local url="https://github.com/$ECS_GITHUB_OWNER/$ECS_GITHUB_REPO/archive/refs/tags/v$version.tar.gz"
    local tarball="$TMPDIR/ecs-$version.tar.gz"
    info "Downloading $url..."
    if ! mycurl -L "$url" -o "$tarball"; then
        REPLY='Download failed'
        return 1
    fi

    DOWNLOADED_FILES+=( "$tarball" )
    REPLY=$tarball
    return 0
}

# $1: file
function guess_tarball() {
    # ecs tarball
    list_tarball "$1" 2>/dev/null |grep -qE \
        '^ecs-[^/]+/generated/elasticsearch/composable/component/ecs\.json$'
    (( $? == 0 )) && { REPLY=ecs; return 0; }

    # elastic integration package
    local x=$(cat_tarball "$1" '*/manifest.yml' 2>/dev/null |yq -py '.type')
    [[ $x == integration ]] && { REPLY=integration; return 0; }

    return 1
}

# $1: file
function list_tarball() {
    if [[ $1 == *.tar.* ]]; then
        tar taf "$1"
    elif [[ $1 == *.zip ]]; then
        unzip -l "$1" |awk '$1 ~ /^[0-9]+$/ && NF > 3 { print $NF }'
    else
        REPLY='Unsupported extension'
        return 1
    fi
    if (( PIPESTATUS[0] != 0 )); then
        REPLY='Failed to list archive content'
        return 1
    fi
}

# $1: file
# $2: destination directory
function extract_tarball() {
    if ! mkdir -p "$2"; then
        REPLY='Failed to create destination directory'
        return 1
    fi
    if [[ $1 == *.tar.* ]]; then
        tar xaf "$1" -C "$2"
    elif [[ $1 == *.zip ]]; then
        unzip -q -d "$2" "$1"
    else
        REPLY='Unsupported extension'
        return 1
    fi
    if (( $? != 0 )); then
        REPLY='Failed to extract archive'
        return 1
    fi
}

# $1: file
# $@: optional file or pattern to extract
function cat_tarball() {
    if [[ $1 == *.tar.* ]]; then
        tar xaOf "$@"
    elif [[ $1 == *.zip ]]; then
        unzip -qp "$@"
    else
        REPLY='Unsupported extension'
        return 1
    fi
    if (( $? != 0 )); then
        REPLY='Failed to cat archive content'
        return 1
    fi
}

# $1: file
function do_tarball_type_ecs() {
    local file=$1
    extract_tarball "$file" "$TMPDIR/ecs" || return 1

    local retval=0
    find "$TMPDIR/ecs"/ecs-*/generated/elasticsearch/composable/component \
        -mindepth 1 -maxdepth 1 -type f -name '*.json' |while read -r file; do

        local fname=${file##*/}
        FORCED_OBJECT_TYPE=component \
        FORCED_OBJECT_NAME="${OBJECT_PREFIX}-component-ecs-${fname%.json}" \
           do_file "$file" || retval=1
    done

    REPLY=
    return "$retval"
}

# $1: file
function do_tarball_type_integration() {
    local mime curlout curlret

    if [[ $1 == *.zip ]]; then
        mime=application/zip
    elif [[ $1 == *.tar.gz ]]; then
        mime=application/gzip
    else
        REPLY='Require .zip or .tar.gz extension'
        return 1
    fi

    curlout=$(kbn-curl-dry-run /api/fleet/epm/packages --data-binary "@$1" \
                -H "content-type: $mime" -X POST)
    curlret=$?
    if [[ -n $DEBUG ]]; then
        debug '>> begin curl output'
        [[ -n $curlout ]] && echo "$curlout"
        debug '<< end curl output'
    fi
    if (( curlret != 0 )); then
        REPLY='Kibana query failed'
        return 1
    fi

    REPLY=
    return 0
}

function do_tarball() {
    if ! guess_tarball "$file"; then
        error "Unsupported tarball file, $file"
        return 1
    fi
    local tarball_type=$REPLY

    if ! declare -f -F "do_tarball_type_${tarball_type}" >/dev/null; then
        error "Unsupported tarball type, $file ($tarball_type)"
        return 1
    fi

    if ! "do_tarball_type_${tarball_type}" "$file"; then
        error "${REPLY:-Failed}, $file ($tarball_type)"
        return 1
    fi

    info "${REPLY:-OK}, $file ($tarball_type)"
    return 0
}

# $1: file
function do_file() {
    local file=$1

    if [[ $file == ecs || $file == ecs:* ]]; then
        if ! download_ecs_tarball "$file"; then
            error "${REPLY:-Failed}, $file (download)"
            return 1
        fi
        file=$REPLY
    fi

    if [[ ! -r $file ]]; then
        error "Cannot read file, $file"
        return 1
    fi

    local filename=${file##*/}
    local object_type=$FORCED_OBJECT_TYPE
    local object_name=${FORCED_OBJECT_NAME:-${filename%.*}}

    if [[ $file == *.tar.* || $file == *.zip ]]; then
        do_tarball "$file"
        return $?
    fi

    if [[ -z $object_type ]]; then
        if ! guess_object_type_from_content "$file"; then
            error "Cannot guess object type, $file"
            return 1
        fi
        object_type=$REPLY
    fi

    if ! declare -f -F "do_file_type_${object_type}" >/dev/null; then
        error "Unsupported object type, $file ($object_type)"
        return 1
    fi

    if ! "do_file_type_${object_type}" "$object_name" "$file"; then
        error "${REPLY:-Failed}, $file ($object_type, $object_name)"
        return 1
    fi

    info "${REPLY:-OK}, $file ($object_type, $object_name)"
    return 0
}

DEBUG=
FORCED_OBJECT_TYPE=
FORCED_OBJECT_NAME=
KEEP_DOWNLOADED_FILES=
DOWNLOADED_FILES=()
EXTERNAL_PROXY=
DRY_RUN=
args=()
while (( $# > 0 )); do
    case "$1" in
        -t|--type) FORCED_OBJECT_TYPE=$2; shift ;;
        -n|--name) FORCED_OBJECT_NAME=$2; shift ;;
        -k|--keep) KEEP_DOWNLOADED_FILES=1 ;;
        -P|--proxy) EXTERNAL_PROXY=$2; shift ;;
        --overwrite) OVERWRITE=1 ;;
        --dry-run) DRY_RUN=1 ;;
        -d|--debug) DEBUG=1; export ES_DEBUG=1 ;;
        -h|--help) exit_usage 0 ;;
        --) shift; break ;;
        -*) exit_usage 1 ;;
        *) args+=( "$1" ) ;;
    esac
    shift
done
args+=( "$@" )

[[ -z $args ]] && exit_usage 1

# Handle directories, expand to files
IFS=$'\n'
for (( i=0,len=${#args[@]}; i<len; i++ )); do
    [[ -d ${args[i]}/. ]] || continue
    args+=( $(find -L "${args[i]}" -type f \( -name '*.json' -o \
        -name '*.ndjson' -o -name '*.yaml' -o -name '*.yml' -o \
        -name '*.painless' \)) )
    unset args[i]
done
IFS=$OIFS

# Sort files to minimize the chance of failure due to reference of an object
# that does not exist yet, eg: pipeline referencing a script.
declare -A type2prio=(
    [integration]=30
    [script]=40
    [lifecycle]=50
    [pipeline]=60
    [component]=70
    [kibana_tag]=82
    [kibana_dataview]=84
    [kibana_saved_object]=86
    [kibana_dashboard]=88
    [_]=79
)
IFS=$'\n'
args=($(
    for i in "${args[@]}"; do
        guess_object_type_from_content "$i" && prio=${type2prio[$REPLY]}
        [[ -z $prio ]] && prio=${type2prio[_]}
        echo "$prio $i"
    done |sort -n |sed -re 's,^[0-9]+ ,,'
))
IFS=$OIFS

if (( ${#args[@]} > 1 )) && [[ -n $FORCED_OBJECT_NAME ]]; then
    fatal "Forced name unsupported with multiple files arguments"
fi

trap on_exit EXIT

mkdir -p "$TMPDIR" || fatal 'Faied to create temporary directory'

retval=0
for i in "${args[@]}"; do
    do_file "$i" || retval=1
done

exit "$retval"
