summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorYann E. MORIN" <yann.morin.1998@anciens.enib.fr>2008-05-06 20:30:49 (GMT)
committerYann E. MORIN" <yann.morin.1998@anciens.enib.fr>2008-05-06 20:30:49 (GMT)
commit770bed1f53afed679f938d2b8521f667d8cbdf86 (patch)
treeb7aa3b123a031c827efb41ae9c4927897c044b5d
parent281eb725420a8c1a97cbe34df14e5fd74950d4b5 (diff)
Add a new option to set connection timeout while downloading.
/trunk/scripts/functions | 16 8 8 0 ++++++++-------- /trunk/config/global/download_extract.in | 25 25 0 0 +++++++++++++++++++++++++ 2 files changed, 33 insertions(+), 8 deletions(-)
-rw-r--r--config/global/download_extract.in25
-rw-r--r--scripts/functions16
2 files changed, 33 insertions, 8 deletions
diff --git a/config/global/download_extract.in b/config/global/download_extract.in
index b23f88e..d638ab0 100644
--- a/config/global/download_extract.in
+++ b/config/global/download_extract.in
@@ -20,6 +20,31 @@ config ONLY_DOWNLOAD
Usefull to pre-retrieve the tarballs before going off-line.
+config CONNECT_TIMEOUT
+ int
+ prompt "connection timeout"
+ default 10
+ help
+ From the curl manual:
+ Maximum time in seconds that you allow the connection to the server to take.
+
+ The scenario is as follows;
+ - some enterprise networks have firewalls that prohibit FTP traffic, while
+ still allowing HTTP
+ - most download sites have http:// equivalent for the ftp:// URL
+ - after this number of seconds, it is considered that the connection could
+ not be established, and the next URL in the list is tried, until we reach
+ an URL that will go through the firewall, most probably an http:// URL.
+
+ If you have a slow network, you'd better set this value higher than the default
+ 10s. If you know a firewall is blocking connections, but your network is globally
+ fast, you can try to lower this value to jump more quickly to allowed URLs. YMMV.
+
+ Note that this value applies equally to wget if you have that installed.
+
+ Of course, you'd be better off to use a proxy, as offered by the following
+ choice of options.
+
choice
bool
prompt "Proxy type"
diff --git a/scripts/functions b/scripts/functions
index c4f739a..a5a4f94 100644
--- a/scripts/functions
+++ b/scripts/functions
@@ -283,22 +283,22 @@ CT_DoGetFileWget() {
# With automated download as we are doing, it can be very dangerous to use
# -c to continue the downloads. It's far better to simply overwrite the
# destination file
- # Some company networks have proxies to connect to to the internet, but
- # it's not easy to detect them, and wget may never timeout while connecting,
- # so force a global 120s timeout.
- wget -T 120 -nc --progress=dot:binary --tries=3 --passive-ftp "$1" \
- || wget -T 120 -nc --progress=dot:binary --tries=3 "$1" \
+ # Some company networks have firewalls to connect to the internet, but it's
+ # not easy to detect them, and wget does not timeout by default while
+ # connecting, so force a global ${CT_CONNECT_TIMEOUT}-second timeout.
+ wget -T ${CT_CONNECT_TIMEOUT} -nc --progress=dot:binary --tries=3 --passive-ftp "$1" \
+ || wget -T ${CT_CONNECT_TIMEOUT} -nc --progress=dot:binary --tries=3 "$1" \
|| true
}
# Download an URL using curl
# Usage: CT_DoGetFileCurl <URL>
CT_DoGetFileCurl() {
- # Note: comments about wget method are also valid here
+ # Note: comments about wget method (above) are also valid here
# Plus: no good progress indicator is available with curl,
# so output is consigned to oblivion
- curl --ftp-pasv -O --retry 3 "$1" --connect-timeout 120 >/dev/null \
- || curl -O --retry 3 "$1" --connect-timeout 120 >/dev/null \
+ curl --ftp-pasv -O --retry 3 "$1" --connect-timeout ${CT_CONNECT_TIMEOUT} >/dev/null \
+ || curl -O --retry 3 "$1" --connect-timeout ${CT_CONNECT_TIMEOUT} >/dev/null \
|| true
}