diff -r 548b7aa23385 -r dd62fca2d6fd scripts/functions --- a/scripts/functions Thu Feb 14 22:44:34 2008 +0000 +++ b/scripts/functions Sun Apr 13 18:16:58 2008 +0000 @@ -283,16 +283,23 @@ # With automated download as we are doing, it can be very dangerous to use # -c to continue the downloads. It's far better to simply overwrite the # destination file - wget -nc --progress=dot:binary --tries=3 --passive-ftp "$1" || wget -nc --progress=dot:binary --tries=3 "$1" || true + # Some company networks have proxies to connec to to the internet, but it's + # not easy to detect them, and wget may never timeout while connecting, so + # force a global 120s timeout. + wget -T 120 -nc --progress=dot:binary --tries=3 --passive-ftp "$1" \ + || wget -T 120 -nc --progress=dot:binary --tries=3 "$1" \ + || true } # Download an URL using curl # Usage: CT_DoGetFileCurl CT_DoGetFileCurl() { - # Note: comments about wget method are also valid here - # Plus: no good progreess indicator is available with curl, - # so output is consigned to oblivion - curl --ftp-pasv -O --retry 3 "$1" >/dev/null || curl -O --retry 3 "$1" >/dev/null || true + # Note: comments about wget method are also valid here + # Plus: no good progress indicator is available with curl, + # so output is consigned to oblivion + curl --ftp-pasv -O --retry 3 "$1" --connect-timeout 120 >/dev/null \ + || curl -O --retry 3 "$1" --connect-timeout 120 >/dev/null \ + || true } _wget=`CT_Which wget`