buildx.sh: download tarballs using keepalive

Executing wget like

  wget http://example.com/file1 http://example.com/file2 ...

can do HTTP keepalive.

Quit calling wget with single file. Downloading multiple files in one
http connection is a little bit faster than executing wget more than
100 times. And parallelly execute two instances of wget to increase
download speed.
This commit is contained in:
Koichiro IWAO 2016-10-20 18:33:57 +09:00
parent cf1a99b4e2
commit 6b589569ee

View File

@ -23,23 +23,19 @@
# debian packages needed
# flex bison libxml2-dev intltool xsltproc xutils-dev python-libxml2 g++ xutils
download_file()
download_all_files()
{
local file url status
file=$1
# download files parallelly using keepalive
# a little bit faster than calling wget with single file more than 100 times
< x11_file_list.txt cut -f1 -d: | sed -e "s|^|${download_url}/|" | \
xargs -P2 -n $(expr $num_modules / 2 + 1) \
wget \
--directory-prefix=downloads \
--no-verbose \
--timestamping \
--continue
# if we already have the file, don't download it
if [ -r downloads/$file ]; then
return 0
fi
echo "downloading file $download_url/$file"
cd downloads
wget -cq $download_url/$file
status=$?
cd ..
return $status
}
@ -74,15 +70,6 @@ extract_it()
return 0
fi
# download file
if ! download_file $mod_file
then
echo ""
echo "failed to download $mod_file - aborting build"
echo ""
exit 1
fi
cd build_dir
# if pkg has not yet been extracted, do so now
@ -263,6 +250,14 @@ if ! NPROC=`nproc`; then
NPROC=1
fi
if ! download_all_files; then
echo ""
echo "download failed - aborting build"
echo "rerun this script to resume download/build"
echo ""
exit 1
fi
while IFS=: read mod_file mod_dir mod_args
do
mod_args=`eval echo $mod_args`