#!/bin/sh

# Copyright (C) 2012-2013 Aleksey Lim
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.

[ "${V}" ] && set -x

info() {
    echo "-- $@"
}

phase() {
    echo "   $@"
}


warning() {
    echo "-- $@"
}

abort() {
    rm -f *.sync
    echo "-- $@"
    [ "${V}" ] || echo "   run \"V=1 $0 ${cmdline}\" to get full debugging output"
    exit 1
}

help() {
    cat <<EOF
Usage: $(basename $0) [PATH] [URL]

Sugar Network sneakernet synchronization utility.

Command arguments:
  PATH      if specified, utility will try to recursively search for
            synchronization packet files (files with ".sneakernet" suffix);
            using wget or curl utility, each file will be uploaded
            to the targeting Sugar Network server with downloading resulting
            packets; on success, uploaded packets will be removed and resulting
            packets will be placed to PATH instead
  URL       if specified, should be Sugar Network API url, e.g.,
            http://api-testing.network.sugarlabs.org; script will download
            full data dump from the server

Utility is intended to upload request packet files (generated by Sugar Network
node servers) to Sugar Network master server and download response packets
to deliver them back to nodes.

See http://wiki.sugarlabs.org/go/Sugar_Network for details.
EOF
}

get_hostname() {
    echo $@ | awk -F/ '{print $3}'
}

get_header_key() {
    local package="$1"
    local key="$2"

    zcat "${package}" | head -n1 | \
        grep -o "\"${key}\":[^,}]\+" | \
        sed 's/^[^:]*://; s/^[ "]*//; s/"$//'
}

upload() {
    local url="$1"
    local cookie="$2"
    local in_packet="$3"
    local out_packet="${in_packet}.sync"

    [ -e "${out_packet}" ] && abort "Destination ${out_packet} already exists"

    if which wget >/dev/null 2>&1; then
        cmd="wget --output-document=${out_packet} --load-cookies=${cookie} --save-cookies=${cookie}"
        [ "${in_packet}" ] && cmd="${cmd} --post-file=${in_packet}"
        [ "${V}" ] && cmd="${cmd} --server-response" || cmd="${cmd} --quiet"
    else
        cmd="curl --output ${out_packet} --cookie ${cookie} --cookie-jar ${cookie}"
        [ "${in_packet}" ] && cmd="${cmd} -XPOST --data-binary @${in_packet}"
        [ "${V}" ] || cmd="${cmd} --silent"
    fi

    ${cmd} "${url}" || abort "Cannot run upload command"

    if [ -e "${out_packet}" ]; then
        if [ $(stat -c %s "${out_packet}") -eq 0 ]; then
            rm "${out_packet}"
        else
            out_filename="$(get_header_key "${out_packet}" filename)"
            phase "Store results in ${out_filename}"
            mv "${out_packet}" "${out_filename}" || abort "Cannot write ${out_filename}"
        fi
    fi

    if [ -e "${cookie}" ]; then
        if grep unset_sugar_network_pull "${cookie}" >/dev/null; then
            rm "${cookie}"
        elif grep sugar_network_pull "${cookie}" >/dev/null; then
            phase "Postpone pull with ${cookie}"
        else
            rm "${cookie}"
        fi
    fi

    if [ "${in_packet}" ]; then
        phase "Remove uploaded file"
        rm "${in_packet}"
    fi
}

pull() {
    local api_url="$1?cmd=pull"
    local cookie="$(get_hostname $1).cookie"

    while true; do
        local disk_free=$(df . --block-size=1 | tail -n1 | awk '{print $4}')
        [ $disk_free -gt $disk_limit ] || abort \
            "No free disk space on $PWD, copy all *.cookie files" \
            "to directory with more free space and re-run $0 from there"

        upload "${api_url}&accept_length=${disk_free}" "${cookie}"
        [ -e "${cookie}" ] || break

        delay=$(grep -o 'sugar_network_delay[[:space:]]*[0-9]\+' "${cookie}" 2>/dev/null | awk '{print $2}')
        if [ "${delay}" ]; then
            phase "Server started processing pull, check for results in ${delay} seconds"
            sleep ${delay}
        else
            phase "Pull postponed updates"
        fi
    done
}

sync_path="$1"
clone_url="$2"
cmdline="$@"
FS=
disk_limit=$(expr 1024 \* 1024 \* 10)

if [ $# -eq 0 ]; then
    if [ "$(basename $(dirname $0))" = "sugar-network-sync" ]; then
        # Script was launched from sync directory, so, process sync
        sync_path="$(dirname $0)"
    else
        help
        exit 0
    fi
fi

mkdir -p "${sync_path}" || abort "Cannot create ${sync_path} sync directory"
cd "${sync_path}" || abort "Cannot switch to ${sync_path} sync directory"

mountpoint="$(stat --printf %m .)"
[ "${mountpoint}/sugar-network-sync" = "$PWD" ] || \
    info "NOTICE To make $PWD directory capable for further auto synchronization, place it to the mount's root"

if [ "${clone_url}" ]; then
    info "Clone master"
    pull "${clone_url}"
    exit 0
fi

# Upload push packets at first
for package in $(find -type f -name '*.sneakernet' -printf '%f\n'); do
    api_url="$(get_header_key "${package}" api_url)"
    if [ -z "${api_url}" ]; then
        info "Skip ${package}, it is not intended for uploading"
    else
        info "Push ${package} to ${api_url}"
        upload "${api_url}?cmd=push" "$(get_hostname ${api_url}).cookie" "${package}"
    fi
done

# Using cookies from uploaded packets, download master data
while true; do
    found=
    for cookie in $(find -type f -name '*.cookie'); do
        api_url="http://$(basename "${cookie}" .cookie)"
        info "Pull updates from ${api_url}"
        pull "${api_url}"
        found=1
    done
    [ "${found}" ] || break
done
