--- a/scripts/build/tools/200-sstrip.sh +++ b/scripts/build/tools/200-sstrip.sh @@ -34,8 +34,8 @@ case "${CT_SSTRIP_FROM}" in } do_tools_sstrip_get() { # Note: the space between sstrip and .c is on purpose. - CT_GetFile sstrip .c \ - "http://buildroot.uclibc.org/cgi-bin/viewcvs.cgi/*checkout*/trunk/buildroot/toolchain/sstrip/" + CT_GetFile sstrip .c?view=co \ + "http://sources.busybox.net/index.py/trunk/buildroot/toolchain/sstrip/" } do_tools_sstrip_extract() { # We'll let buildroot guys take care of sstrip maintenance and patching. --- a/scripts/functions +++ b/scripts/functions @@ -391,7 +391,7 @@ CT_DoSetProxy() { } # Download an URL using wget -# Usage: CT_DoGetFileWget +# Usage: CT_DoGetFileWget CT_DoGetFileWget() { # Need to return true because it is legitimate to not find the tarball at # some of the provided URLs (think about snapshots, different layouts for @@ -404,18 +404,18 @@ CT_DoGetFileWget() { # Some company networks have firewalls to connect to the internet, but it's # not easy to detect them, and wget does not timeout by default while # connecting, so force a global ${CT_CONNECT_TIMEOUT}-second timeout. - wget -T ${CT_CONNECT_TIMEOUT} -nc --progress=dot:binary --tries=3 --passive-ftp "$1" \ - || wget -T ${CT_CONNECT_TIMEOUT} -nc --progress=dot:binary --tries=3 "$1" \ + wget -T ${CT_CONNECT_TIMEOUT} -nc --progress=dot:binary --tries=3 --passive-ftp -O "$2" "$1" \ + || wget -T ${CT_CONNECT_TIMEOUT} -nc --progress=dot:binary --tries=3 -O "$2" "$1" \ || true } # Download an URL using curl -# Usage: CT_DoGetFileCurl +# Usage: CT_DoGetFileCurl CT_DoGetFileCurl() { # Note: comments about wget method (above) are also valid here # Plus: no good progress indicator is available with curl, # so output is consigned to oblivion - curl --ftp-pasv -O --retry 3 "$1" --connect-timeout ${CT_CONNECT_TIMEOUT} >/dev/null \ + curl --ftp-pasv -o "$2" --retry 3 "$1" --connect-timeout ${CT_CONNECT_TIMEOUT} >/dev/null \ || curl -O --retry 3 "$1" --connect-timeout ${CT_CONNECT_TIMEOUT} >/dev/null \ || true } @@ -423,12 +423,12 @@ CT_DoGetFileCurl() { _wget=$(CT_Which wget) _curl=$(CT_Which curl) # Wrapper function to call one of curl or wget -# Usage: CT_DoGetFile +# Usage: CT_DoGetFile CT_DoGetFile() { case "${_wget},${_curl}" in ,) CT_DoError "Could find neither wget nor curl";; ,*) CT_DoExecLog ALL CT_DoGetFileCurl "$1" 2>&1;; - *) CT_DoExecLog ALL CT_DoGetFileWget "$1" 2>&1;; + *) CT_DoExecLog ALL CT_DoGetFileWget "$1" "$2" 2>&1;; esac } @@ -499,7 +499,9 @@ CT_GetFile() { # Try all urls in turn for url in ${URLS}; do CT_DoLog DEBUG "Trying '${url}/${file}${ext}'" - CT_DoGetFile "${url}/${file}${ext}" + real_ext=$(echo ${ext} | cut -d'?' -f1) + CT_DoGetFile "${url}/${file}${ext}" "${file}${real_ext}" + ext=${real_ext} if [ -f "${file}${ext}" ]; then CT_DoLog DEBUG "Got '${file}' from the Internet" if [ "${CT_SAVE_TARBALLS}" = "y" ]; then @@ -507,7 +509,7 @@ CT_GetFile() { CT_DoLog EXTRA "Saving '${file}' to local storage" CT_DoExecLog ALL rm -f "${CT_LOCAL_TARBALLS_DIR}/${file}${ext}" CT_DoExecLog ALL mv -f "${file}${ext}" "${CT_LOCAL_TARBALLS_DIR}" - CT_DoExecLog ALL ln -s "${CT_LOCAL_TARBALLS_DIR}/${file}${ext}" "${file}${ext}" + CT_DoExecLog ALL ln -s "${CT_LOCAL_TARBALLS_DIR}/${file}${real_ext}" "${file}${ext}" fi return 0 fi