scripts/functions: only use one download program
author"Yann E. MORIN" <yann.morin.1998@anciens.enib.fr>
Fri Sep 09 15:34:04 2011 +0200 (2011-09-09)
changeset 26602a44af825e60
parent 2654 32209f462bbb
child 2661 95ad28b9dea6
scripts/functions: only use one download program

Currently, we use either wget or curl, whichever is installed.
In case both are installed, both are used. This means that it
takes a while trying all extensions.

Remove use of wget, and use only curl.

Signed-off-by: "Yann E. MORIN" <yann.morin.1998@anciens.enib.fr>
configure
scripts/functions
     1.1 --- a/configure	Fri Sep 09 14:15:10 2011 +0200
     1.2 +++ b/configure	Fri Sep 09 15:34:04 2011 +0200
     1.3 @@ -496,7 +496,7 @@
     1.4               ver='\(GNU libtool.*\) (2[[:digit:]]*\.|1\.6[[:digit:]]*\.|1\.5\.[2-9][[:digit:]]+)'   \
     1.5               err="'libtoolize' 1.5.26 or above was not found"
     1.6  has_or_abort prog=stat
     1.7 -has_or_abort prog="curl wget"
     1.8 +has_or_abort prog="curl"
     1.9  has_or_abort prog=patch
    1.10  has_or_abort prog=tar
    1.11  has_or_abort prog=gzip
     2.1 --- a/scripts/functions	Fri Sep 09 14:15:10 2011 +0200
     2.2 +++ b/scripts/functions	Fri Sep 09 15:34:04 2011 +0200
     2.3 @@ -444,12 +444,6 @@
     2.4      local url="${1}"
     2.5      local dest="${CT_TARBALLS_DIR}/${url##*/}"
     2.6      local tmp="${dest}.tmp-dl"
     2.7 -    # OK, just look if we have them...
     2.8 -    # We are sure at least one is available, ./configure checked for it.
     2.9 -    local _curl=$(CT_Which curl)
    2.10 -    local _wget=$(CT_Which wget)
    2.11 -    _curl="${_curl:-false}"
    2.12 -    _wget="${_wget:-false}"
    2.13  
    2.14      # Remove potential left-over from a previous run
    2.15      rm -f "${tmp}"
    2.16 @@ -462,13 +456,11 @@
    2.17      # continue the downloads. It's far better to simply overwrite the
    2.18      # destination file.
    2.19      # Some company networks have firewalls to connect to the internet, but it's
    2.20 -    # not easy to detect them, and wget does not timeout by default while
    2.21 -    # connecting, so force a global ${CT_CONNECT_TIMEOUT}-second timeout.
    2.22 +    # not easy to detect them, so force a global ${CT_CONNECT_TIMEOUT}-second
    2.23 +    # timeout.
    2.24      # For curl, no good progress indicator is available. So, be silent.
    2.25 -    if CT_DoExecLog ALL "${_curl}" --ftp-pasv    --retry 3 --connect-timeout ${CT_CONNECT_TIMEOUT} -L -f -s -o "${tmp}"   "${url}"  \
    2.26 -    || CT_DoExecLog ALL "${_curl}"               --retry 3 --connect-timeout ${CT_CONNECT_TIMEOUT} -L -f -s -o "${tmp}"   "${url}"  \
    2.27 -    || CT_DoExecLog ALL "${_wget}" --passive-ftp --tries=3 -T ${CT_CONNECT_TIMEOUT} -nc --progress=dot:binary -O "${tmp}" "${url}"  \
    2.28 -    || CT_DoExecLog ALL "${_wget}"               --tries=3 -T ${CT_CONNECT_TIMEOUT} -nc --progress=dot:binary -O "${tmp}" "${url}"  \
    2.29 +    if CT_DoExecLog ALL curl --ftp-pasv --retry 3 --connect-timeout ${CT_CONNECT_TIMEOUT} -L -f -s -o "${tmp}" "${url}"  \
    2.30 +    || CT_DoExecLog ALL curl            --retry 3 --connect-timeout ${CT_CONNECT_TIMEOUT} -L -f -s -o "${tmp}" "${url}"  \
    2.31      ; then
    2.32          # One of them succeeded, good!
    2.33          mv "${tmp}" "${dest}"