mirror of
https://github.com/godotengine/buildroot.git
synced 2026-01-04 06:10:16 +03:00
download: put most of the infra in dl-wrapper
The goal here is to simplify the infrastructure by putting most of the code in the dl-wrapper as it is easier to implement and to read. Most of the functions were common already, this patch finalizes it by making the pkg-download.mk pass all the parameters needed to the dl-wrapper which in turn will pass everything to every backend. The backend will then cherry-pick what it needs from these arguments and act accordingly. It eases the transition to the addition of a sub directory per package in the DL_DIR, and later on, a git cache. [Peter: drop ';' in BR_NO_CHECK_HASH_FOR in DOWNLOAD macro and swap cd/rm -rf as mentioned by Yann, fix typos] Signed-off-by: Maxime Hadjinlian <maxime.hadjinlian@gmail.com> Tested-by: Luca Ceresoli <luca@lucaceresoli.net> Reviewed-by: Luca Ceresoli <luca@lucaceresoli.net> Reviewed-by: "Yann E. MORIN" <yann.morin.1998@free.fr> Signed-off-by: Peter Korsgaard <peter@korsgaard.com>
This commit is contained in:
committed by
Peter Korsgaard
parent
91e776b5af
commit
c8ef0c03b0
@@ -21,7 +21,7 @@ while getopts "${BR_BACKEND_DL_GETOPTS}" OPT; do
|
||||
case "${OPT}" in
|
||||
q) verbose=-Q;;
|
||||
o) output="${OPTARG}";;
|
||||
u) uri="${OPTARG}";;
|
||||
u) uri="${OPTARG#*://}";;
|
||||
c) rev="${OPTARG}";;
|
||||
N) rawname="${OPTARG}";;
|
||||
n) basename="${OPTARG}";;
|
||||
|
||||
@@ -19,31 +19,34 @@
|
||||
# We want to catch any unexpected failure, and exit immediately.
|
||||
set -e
|
||||
|
||||
export BR_BACKEND_DL_GETOPTS=":hc:o:n:N:H:ru:q"
|
||||
export BR_BACKEND_DL_GETOPTS=":hc:o:n:N:H:ru:qf:e"
|
||||
|
||||
main() {
|
||||
local OPT OPTARG
|
||||
local backend output hfile recurse quiet rc
|
||||
local -a uris
|
||||
|
||||
# Parse our options; anything after '--' is for the backend
|
||||
while getopts :hb:o:H:rq OPT; do
|
||||
while getopts ":hc:o:n:N:H:rf:u:q" OPT; do
|
||||
case "${OPT}" in
|
||||
h) help; exit 0;;
|
||||
b) backend="${OPTARG}";;
|
||||
c) cset="${OPTARG}";;
|
||||
o) output="${OPTARG}";;
|
||||
n) raw_base_name="${OPTARG}";;
|
||||
N) base_name="${OPTARG}";;
|
||||
H) hfile="${OPTARG}";;
|
||||
r) recurse="-r";;
|
||||
f) filename="${OPTARG}";;
|
||||
u) uris+=( "${OPTARG}" );;
|
||||
q) quiet="-q";;
|
||||
:) error "option '%s' expects a mandatory argument\n" "${OPTARG}";;
|
||||
\?) error "unknown option '%s'\n" "${OPTARG}";;
|
||||
esac
|
||||
done
|
||||
|
||||
# Forget our options, and keep only those for the backend
|
||||
shift $((OPTIND-1))
|
||||
|
||||
if [ -z "${backend}" ]; then
|
||||
error "no backend specified, use -b\n"
|
||||
fi
|
||||
if [ -z "${output}" ]; then
|
||||
error "no output specified, use -o\n"
|
||||
fi
|
||||
@@ -66,48 +69,85 @@ main() {
|
||||
warn "Re-downloading '%s'...\n" "${output##*/}"
|
||||
fi
|
||||
|
||||
# tmpd is a temporary directory in which backends may store intermediate
|
||||
# by-products of the download.
|
||||
# tmpf is the file in which the backends should put the downloaded content.
|
||||
# tmpd is located in $(BUILD_DIR), so as not to clutter the (precious)
|
||||
# $(BR2_DL_DIR)
|
||||
# We let the backends create tmpf, so they are able to set whatever
|
||||
# permission bits they want (although we're only really interested in
|
||||
# the executable bit.)
|
||||
tmpd="$(mktemp -d "${BUILD_DIR}/.${output##*/}.XXXXXX")"
|
||||
tmpf="${tmpd}/output"
|
||||
# Look through all the uris that we were given to download the package
|
||||
# source
|
||||
download_and_check=0
|
||||
rc=1
|
||||
for uri in "${uris[@]}"; do
|
||||
backend=${uri%+*}
|
||||
case "${backend}" in
|
||||
git|svn|cvs|bzr|file|scp|hg) ;;
|
||||
*) backend="wget" ;;
|
||||
esac
|
||||
uri=${uri#*+}
|
||||
|
||||
# Helpers expect to run in a directory that is *really* trashable, so
|
||||
# they are free to create whatever files and/or sub-dirs they might need.
|
||||
# Doing the 'cd' here rather than in all backends is easier.
|
||||
cd "${tmpd}"
|
||||
urlencode=${backend#*|}
|
||||
# urlencode must be "urlencode"
|
||||
[ "${urlencode}" != "urlencode" ] && urlencode=""
|
||||
|
||||
# If the backend fails, we can just remove the temporary directory to
|
||||
# remove all the cruft it may have left behind. Then we just exit in
|
||||
# error too.
|
||||
if ! "${OLDPWD}/support/download/${backend}" \
|
||||
${quiet} ${recurse} \
|
||||
-o "${tmpf}" "${@}"
|
||||
then
|
||||
rm -rf "${tmpd}"
|
||||
exit 1
|
||||
fi
|
||||
# tmpd is a temporary directory in which backends may store
|
||||
# intermediate by-products of the download.
|
||||
# tmpf is the file in which the backends should put the downloaded
|
||||
# content.
|
||||
# tmpd is located in $(BUILD_DIR), so as not to clutter the (precious)
|
||||
# $(BR2_DL_DIR)
|
||||
# We let the backends create tmpf, so they are able to set whatever
|
||||
# permission bits they want (although we're only really interested in
|
||||
# the executable bit.)
|
||||
tmpd="$(mktemp -d "${BUILD_DIR}/.${output##*/}.XXXXXX")"
|
||||
tmpf="${tmpd}/output"
|
||||
|
||||
# cd back to free the temp-dir, so we can remove it later
|
||||
cd "${OLDPWD}"
|
||||
# Helpers expect to run in a directory that is *really* trashable, so
|
||||
# they are free to create whatever files and/or sub-dirs they might need.
|
||||
# Doing the 'cd' here rather than in all backends is easier.
|
||||
cd "${tmpd}"
|
||||
|
||||
# Check if the downloaded file is sane, and matches the stored hashes
|
||||
# for that file
|
||||
if support/download/check-hash ${quiet} "${hfile}" "${tmpf}" "${output##*/}"; then
|
||||
rc=0
|
||||
else
|
||||
if [ ${?} -ne 3 ]; then
|
||||
# If the backend fails, we can just remove the content of the temporary
|
||||
# directory to remove all the cruft it may have left behind, and try
|
||||
# the next URI until it succeeds. Once out of URI to try, we need to
|
||||
# cleanup and exit.
|
||||
if ! "${OLDPWD}/support/download/${backend}" \
|
||||
$([ -n "${urlencode}" ] && printf %s '-e') \
|
||||
-c "${cset}" \
|
||||
-n "${raw_base_name}" \
|
||||
-N "${raw_name}" \
|
||||
-f "${filename}" \
|
||||
-u "${uri}" \
|
||||
-o "${tmpf}" \
|
||||
${quiet} ${recurse} "${@}"
|
||||
then
|
||||
# cd back to keep path coherence
|
||||
cd "${OLDPWD}"
|
||||
rm -rf "${tmpd}"
|
||||
exit 1
|
||||
continue
|
||||
fi
|
||||
|
||||
# the hash file exists and there was no hash to check the file against
|
||||
rc=1
|
||||
# cd back to free the temp-dir, so we can remove it later
|
||||
cd "${OLDPWD}"
|
||||
|
||||
# Check if the downloaded file is sane, and matches the stored hashes
|
||||
# for that file
|
||||
if support/download/check-hash ${quiet} "${hfile}" "${tmpf}" "${output##*/}"; then
|
||||
rc=0
|
||||
else
|
||||
if [ ${?} -ne 3 ]; then
|
||||
rm -rf "${tmpd}"
|
||||
continue
|
||||
fi
|
||||
|
||||
# the hash file exists and there was no hash to check the file
|
||||
# against
|
||||
rc=1
|
||||
fi
|
||||
download_and_check=1
|
||||
break
|
||||
done
|
||||
|
||||
# We tried every URI possible, none seems to work or to check against the
|
||||
# available hash. *ABORT MISSION*
|
||||
if [ "${download_and_check}" -eq 0 ]; then
|
||||
rm -rf "${tmpd}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# tmp_output is in the same directory as the final output, so we can
|
||||
@@ -173,16 +213,13 @@ DESCRIPTION
|
||||
|
||||
-h This help text.
|
||||
|
||||
-b BACKEND
|
||||
Wrap the specified BACKEND. Known backends are:
|
||||
bzr Bazaar
|
||||
cp Local files
|
||||
cvs Concurrent Versions System
|
||||
git Git
|
||||
hg Mercurial
|
||||
scp Secure copy
|
||||
svn Subversion
|
||||
wget HTTP download
|
||||
-u URIs
|
||||
The URI to get the file from, the URI must respect the format given in
|
||||
the example.
|
||||
You may give as many '-u URI' as you want, the script will stop at the
|
||||
frist successful download.
|
||||
|
||||
Example: backend+URI; git+http://example.com or http+http://example.com
|
||||
|
||||
-o FILE
|
||||
Store the downloaded archive in FILE.
|
||||
|
||||
@@ -8,7 +8,9 @@ set -e
|
||||
# Options:
|
||||
# -q Be quiet.
|
||||
# -o FILE Save into file FILE.
|
||||
# -f FILENAME The filename of the tarball to get at URL
|
||||
# -u URL Download file at URL.
|
||||
# -e ENCODE Tell wget to urlencode the filename passed to it
|
||||
#
|
||||
# Environment:
|
||||
# WGET : the wget command to call
|
||||
@@ -18,7 +20,9 @@ while getopts "${BR_BACKEND_DL_GETOPTS}" OPT; do
|
||||
case "${OPT}" in
|
||||
q) verbose=-q;;
|
||||
o) output="${OPTARG}";;
|
||||
f) filename="${OPTARG}";;
|
||||
u) url="${OPTARG}";;
|
||||
e) encode="-e";;
|
||||
:) printf "option '%s' expects a mandatory argument\n" "${OPTARG}"; exit 1;;
|
||||
\?) printf "unknown option '%s'\n" "${OPTARG}" >&2; exit 1;;
|
||||
esac
|
||||
@@ -32,4 +36,8 @@ _wget() {
|
||||
eval ${WGET} "${@}"
|
||||
}
|
||||
|
||||
_wget ${verbose} "${@}" -O "'${output}'" "'${url}'"
|
||||
# Replace every '?' with '%3F' in the filename; only for the PRIMARY and BACKUP
|
||||
# mirror
|
||||
[ -n "${encode}" ] && filename=${filename//\?/%3F}
|
||||
|
||||
_wget ${verbose} "${@}" -O "'${output}'" "'${url}/${filename}'"
|
||||
|
||||
Reference in New Issue
Block a user