scripts: recover on partially downloaded files
author"Yann E. MORIN" <yann.morin.1998@anciens.enib.fr>
Mon Nov 29 00:26:39 2010 +0100 (2010-11-29)
changeset 220562b3f52315b3
parent 2204 ea1c9143e1e3
child 2206 2b5f5173daa0
scripts: recover on partially downloaded files

Download to an intermediate temp file, and rename it to its final
name only of download succeeds.

This catches both a failed download, and also the case where the user
interrupts the download. Thus, the a partial download gets discarded,
and we no longer try to extract a partial tarball, which we would
previously have done.

Suggested by Thomas PETAZZONI.

Signed-off-by: "Yann E. MORIN" <yann.morin.1998@anciens.enib.fr>
scripts/functions
     1.1 --- a/scripts/functions	Tue Nov 16 17:49:15 2010 +0100
     1.2 +++ b/scripts/functions	Mon Nov 29 00:26:39 2010 +0100
     1.3 @@ -359,6 +359,8 @@
     1.4  # to find the requested URL (think about snapshots, different layouts
     1.5  # for different gcc versions, etc...).
     1.6  CT_DoGetFile() {
     1.7 +    local dest="${1##*/}"
     1.8 +    local tmp="${dest}.tmp-dl"
     1.9      # OK, just look if we have them...
    1.10      # We are sure at least one is available, ./configure checked for it.
    1.11      local _curl=$(CT_Which curl)
    1.12 @@ -366,6 +368,9 @@
    1.13      _curl="${_curl:-false}"
    1.14      _wget="${_wget:-false}"
    1.15  
    1.16 +    # Remove potential left-over from a previous run
    1.17 +    rm -f "${tmp}"
    1.18 +
    1.19      # Some (very old!) FTP server might not support the passive mode, thus
    1.20      # retry without.
    1.21      # We also retry a few times, in case there is a transient error (eg. behind
    1.22 @@ -377,11 +382,17 @@
    1.23      # not easy to detect them, and wget does not timeout by default while
    1.24      # connecting, so force a global ${CT_CONNECT_TIMEOUT}-second timeout.
    1.25      # For curl, no good progress indicator is available. So, be silent.
    1.26 -       CT_DoExecLog ALL "${_curl}" --ftp-pasv    --retry 3 --connect-timeout ${CT_CONNECT_TIMEOUT} -L -f -s -O "$1" \
    1.27 -    || CT_DoExecLog ALL "${_curl}"               --retry 3 --connect-timeout ${CT_CONNECT_TIMEOUT} -L -f -s -O "$1" \
    1.28 -    || CT_DoExecLog ALL "${_wget}" --passive-ftp --tries=3 -T ${CT_CONNECT_TIMEOUT} -nc --progress=dot:binary  "$1" \
    1.29 -    || CT_DoExecLog ALL "${_wget}"               --tries=3 -T ${CT_CONNECT_TIMEOUT} -nc --progress=dot:binary  "$1" \
    1.30 -    || rm -f "${1##*/}"
    1.31 +    if CT_DoExecLog ALL "${_curl}" --ftp-pasv    --retry 3 --connect-timeout ${CT_CONNECT_TIMEOUT} -L -f -s -o "${tmp}"   "$1"  \
    1.32 +    || CT_DoExecLog ALL "${_curl}"               --retry 3 --connect-timeout ${CT_CONNECT_TIMEOUT} -L -f -s -o "${tmp}"   "$1"  \
    1.33 +    || CT_DoExecLog ALL "${_wget}" --passive-ftp --tries=3 -T ${CT_CONNECT_TIMEOUT} -nc --progress=dot:binary -O "${tmp}" "$1"  \
    1.34 +    || CT_DoExecLog ALL "${_wget}"               --tries=3 -T ${CT_CONNECT_TIMEOUT} -nc --progress=dot:binary -O "${tmp}" "$1"  \
    1.35 +    ; then
    1.36 +        # One of them succeeded, good!
    1.37 +        mv "${tmp}" "${dest}"
    1.38 +    else
    1.39 +        # Woops...
    1.40 +        rm -f "${tmp}"
    1.41 +    fi
    1.42  }
    1.43  
    1.44  # This function tries to retrieve a tarball form a local directory