# HG changeset patch # User "Yann E. MORIN" # Date 1262183782 -3600 # Node ID 61edd9d19e3c99e16e4e88ef5fdc834c15c35fe4 # Parent 383c37e754df0dae2e04a3f7e2daecf7f1864022 scripts/functions: add aria2, a powerfull downloader aria2 is a powerfull downloader that is capable of chunking and parallel retrieval. Due to li;itations in crosstool-NG retrieval facilities, it's not possible to take fully advantage of aria2. It might happen that, in the future, those limitations get lifted away, so we can take use features such as parallel downloading from more than one server at the same time. For now, it should still speed up downloads thanks to parallel downloading of chunks. diff -r 383c37e754df -r 61edd9d19e3c config/global/download.in --- a/config/global/download.in Tue Dec 29 22:11:09 2009 +0100 +++ b/config/global/download.in Wed Dec 30 15:36:22 2009 +0100 @@ -89,7 +89,7 @@ config CONNECT_TIMEOUT int - prompt "connection timeout" + prompt "Connection timeout" default 10 help From the curl manual: @@ -109,6 +109,21 @@ Note that this value applies equally to wget if you have that installed. +config DOWNLOAD_MAX_CHUNKS + int + prompt "Maximum number of // chunks" + default 5 + range 1 10 + help + If you have aria2 installed, then it will be used to download files. + Aria2 can split the download in chunks, and download those chunks in // + which can be interesting to speed up the download. + + On the other hand, using many chunks, or even chunking in general, may + be seen by some site admins as being kind of unfair, or even as a DoS. + That's why the range of acceptable values is [1..10], and the default + is 5 (aria2's default). + config ONLY_DOWNLOAD bool prompt "Stop after downloading tarballs" diff -r 383c37e754df -r 61edd9d19e3c configure --- a/configure Tue Dec 29 22:11:09 2009 +0100 +++ b/configure Wed Dec 30 15:36:22 2009 +0100 @@ -354,7 +354,7 @@ ver='\(GNU libtool.*\) (2[[:digit:]]*\.|1\.6[[:digit:]]*\.|1\.5\.[2-9][[:digit:]]+)' \ err="'libtool' 1.5.26 or above was not found" has_or_abort prog=stat ver='GNU coreutils' -has_or_abort prog="curl wget" +has_or_abort prog="aria2c curl wget" has_or_abort prog=cvs has_or_abort prog=patch has_or_abort prog=tar diff -r 383c37e754df -r 61edd9d19e3c scripts/functions --- a/scripts/functions Tue Dec 29 22:11:09 2009 +0100 +++ b/scripts/functions Wed Dec 30 15:36:22 2009 +0100 @@ -348,12 +348,31 @@ || true } +# Download using aria2 +# Usage: CT_DoGetFileAria2 +CT_DoGetFileAria2() { + # Note: comments about curl method (above) are also valid here + # Plus: default progress indicator is a single line, so use verbose log + # so that the CT-NG's ouput is 'live'. + CT_DoExecLog ALL aria2c -l - -s ${CT_DOWNLOAD_MAX_CHUNKS} -m 3 --retry-wait 5 -t ${CT_CONNECT_TIMEOUT} -p "$1" \ + || CT_DoExecLog ALL aria2c -l - -s ${CT_DOWNLOAD_MAX_CHUNKS} -m 3 --retry-wait 5 -t ${CT_CONNECT_TIMEOUT} "$1" \ + || true +} + +# OK, just look if we have them... +_aria2c=$(CT_Which aria2c) _wget=$(CT_Which wget) _curl=$(CT_Which curl) -# Wrapper function to call one of curl or wget + +# Wrapper function to call one of, in order of preference: +# aria2 +# curl +# wget # Usage: CT_DoGetFile CT_DoGetFile() { - if [ -n "${_curl}" ]; then + if [ -n "${_aria2c}" ]; then + CT_DoGetFileAria2 "$1" + elif [ -n "${_curl}" ]; then CT_DoGetFileCurl "$1" elif [ -n "${_wget}" ]; then CT_DoGetFileWget "$1"