From b3841f0aa933120e284c91bded8e3fb76932b055 Mon Sep 17 00:00:00 2001 From: "Yann E. MORIN\"" Date: Sun, 13 Apr 2008 18:16:58 +0000 Subject: Matthias Kaehlcke reported hanged downloads on his network, most probably due to proxies. Have downloaders (wget and curl) timeout on too slow connections (they don't by default). scripts/functions | 17 12 5 0 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/scripts/functions b/scripts/functions index caca281..c4b6803 100644 --- a/scripts/functions +++ b/scripts/functions @@ -283,16 +283,23 @@ CT_DoGetFileWget() { # With automated download as we are doing, it can be very dangerous to use # -c to continue the downloads. It's far better to simply overwrite the # destination file - wget -nc --progress=dot:binary --tries=3 --passive-ftp "$1" || wget -nc --progress=dot:binary --tries=3 "$1" || true + # Some company networks have proxies to connec to to the internet, but it's + # not easy to detect them, and wget may never timeout while connecting, so + # force a global 120s timeout. + wget -T 120 -nc --progress=dot:binary --tries=3 --passive-ftp "$1" \ + || wget -T 120 -nc --progress=dot:binary --tries=3 "$1" \ + || true } # Download an URL using curl # Usage: CT_DoGetFileCurl CT_DoGetFileCurl() { - # Note: comments about wget method are also valid here - # Plus: no good progreess indicator is available with curl, - # so output is consigned to oblivion - curl --ftp-pasv -O --retry 3 "$1" >/dev/null || curl -O --retry 3 "$1" >/dev/null || true + # Note: comments about wget method are also valid here + # Plus: no good progress indicator is available with curl, + # so output is consigned to oblivion + curl --ftp-pasv -O --retry 3 "$1" --connect-timeout 120 >/dev/null \ + || curl -O --retry 3 "$1" --connect-timeout 120 >/dev/null \ + || true } _wget=`CT_Which wget` -- cgit v0.10.2-6-g49f6