1.1 --- a/scripts/functions Sun Apr 13 18:03:28 2008 +0000
1.2 +++ b/scripts/functions Sun Apr 13 18:16:58 2008 +0000
1.3 @@ -283,16 +283,23 @@
1.4 # With automated download as we are doing, it can be very dangerous to use
1.5 # -c to continue the downloads. It's far better to simply overwrite the
1.6 # destination file
1.7 - wget -nc --progress=dot:binary --tries=3 --passive-ftp "$1" || wget -nc --progress=dot:binary --tries=3 "$1" || true
1.8 + # Some company networks have proxies to connec to to the internet, but it's
1.9 + # not easy to detect them, and wget may never timeout while connecting, so
1.10 + # force a global 120s timeout.
1.11 + wget -T 120 -nc --progress=dot:binary --tries=3 --passive-ftp "$1" \
1.12 + || wget -T 120 -nc --progress=dot:binary --tries=3 "$1" \
1.13 + || true
1.14 }
1.15
1.16 # Download an URL using curl
1.17 # Usage: CT_DoGetFileCurl <URL>
1.18 CT_DoGetFileCurl() {
1.19 - # Note: comments about wget method are also valid here
1.20 - # Plus: no good progreess indicator is available with curl,
1.21 - # so output is consigned to oblivion
1.22 - curl --ftp-pasv -O --retry 3 "$1" >/dev/null || curl -O --retry 3 "$1" >/dev/null || true
1.23 + # Note: comments about wget method are also valid here
1.24 + # Plus: no good progress indicator is available with curl,
1.25 + # so output is consigned to oblivion
1.26 + curl --ftp-pasv -O --retry 3 "$1" --connect-timeout 120 >/dev/null \
1.27 + || curl -O --retry 3 "$1" --connect-timeout 120 >/dev/null \
1.28 + || true
1.29 }
1.30
1.31 _wget=`CT_Which wget`