Add a new option to set connection timeout while downloading.
/trunk/scripts/functions | 16 8 8 0 ++++++++--------
/trunk/config/global/download_extract.in | 25 25 0 0 +++++++++++++++++++++++++
2 files changed, 33 insertions(+), 8 deletions(-)
1.1 --- a/config/global/download_extract.in Sun May 04 10:46:32 2008 +0000
1.2 +++ b/config/global/download_extract.in Tue May 06 20:30:49 2008 +0000
1.3 @@ -20,6 +20,31 @@
1.4
1.5 Usefull to pre-retrieve the tarballs before going off-line.
1.6
1.7 +config CONNECT_TIMEOUT
1.8 + int
1.9 + prompt "connection timeout"
1.10 + default 10
1.11 + help
1.12 + From the curl manual:
1.13 + Maximum time in seconds that you allow the connection to the server to take.
1.14 +
1.15 + The scenario is as follows;
1.16 + - some enterprise networks have firewalls that prohibit FTP traffic, while
1.17 + still allowing HTTP
1.18 + - most download sites have http:// equivalent for the ftp:// URL
1.19 + - after this number of seconds, it is considered that the connection could
1.20 + not be established, and the next URL in the list is tried, until we reach
1.21 + an URL that will go through the firewall, most probably an http:// URL.
1.22 +
1.23 + If you have a slow network, you'd better set this value higher than the default
1.24 + 10s. If you know a firewall is blocking connections, but your network is globally
1.25 + fast, you can try to lower this value to jump more quickly to allowed URLs. YMMV.
1.26 +
1.27 + Note that this value applies equally to wget if you have that installed.
1.28 +
1.29 + Of course, you'd be better off to use a proxy, as offered by the following
1.30 + choice of options.
1.31 +
1.32 choice
1.33 bool
1.34 prompt "Proxy type"
2.1 --- a/scripts/functions Sun May 04 10:46:32 2008 +0000
2.2 +++ b/scripts/functions Tue May 06 20:30:49 2008 +0000
2.3 @@ -283,22 +283,22 @@
2.4 # With automated download as we are doing, it can be very dangerous to use
2.5 # -c to continue the downloads. It's far better to simply overwrite the
2.6 # destination file
2.7 - # Some company networks have proxies to connect to to the internet, but
2.8 - # it's not easy to detect them, and wget may never timeout while connecting,
2.9 - # so force a global 120s timeout.
2.10 - wget -T 120 -nc --progress=dot:binary --tries=3 --passive-ftp "$1" \
2.11 - || wget -T 120 -nc --progress=dot:binary --tries=3 "$1" \
2.12 + # Some company networks have firewalls to connect to the internet, but it's
2.13 + # not easy to detect them, and wget does not timeout by default while
2.14 + # connecting, so force a global ${CT_CONNECT_TIMEOUT}-second timeout.
2.15 + wget -T ${CT_CONNECT_TIMEOUT} -nc --progress=dot:binary --tries=3 --passive-ftp "$1" \
2.16 + || wget -T ${CT_CONNECT_TIMEOUT} -nc --progress=dot:binary --tries=3 "$1" \
2.17 || true
2.18 }
2.19
2.20 # Download an URL using curl
2.21 # Usage: CT_DoGetFileCurl <URL>
2.22 CT_DoGetFileCurl() {
2.23 - # Note: comments about wget method are also valid here
2.24 + # Note: comments about wget method (above) are also valid here
2.25 # Plus: no good progress indicator is available with curl,
2.26 # so output is consigned to oblivion
2.27 - curl --ftp-pasv -O --retry 3 "$1" --connect-timeout 120 >/dev/null \
2.28 - || curl -O --retry 3 "$1" --connect-timeout 120 >/dev/null \
2.29 + curl --ftp-pasv -O --retry 3 "$1" --connect-timeout ${CT_CONNECT_TIMEOUT} >/dev/null \
2.30 + || curl -O --retry 3 "$1" --connect-timeout ${CT_CONNECT_TIMEOUT} >/dev/null \
2.31 || true
2.32 }
2.33