@@ -131,6 +131,7 @@ noconfig_targets := menuconfig nconfig gconfig xconfig config oldconfig randconf
# (default target is to build), or when MAKECMDGOALS contains
# something else than one of the nobuild_targets.
nobuild_targets := source %-source \
+ source-check %-source-check %-all-source-check \
legal-info %-legal-info external-deps _external-deps \
clean distclean help show-targets graph-depends \
%-graph-depends %-show-depends %-show-version \
@@ -818,6 +819,9 @@ target-post-image: $(TARGETS_ROOTFS) target-finalize staging-finalize
.PHONY: source
source: $(foreach p,$(PACKAGES),$(p)-all-source)
+.PHONY: source-check
+source-check: $(foreach p,$(PACKAGES),$(p)-all-source-check)
+
.PHONY: _external-deps external-deps
_external-deps: $(foreach p,$(PACKAGES),$(p)-all-external-deps)
external-deps:
@@ -1103,6 +1107,8 @@ help:
@echo ' <pkg>-dirclean - Remove <pkg> build directory'
@echo ' <pkg>-reconfigure - Restart the build from the configure step'
@echo ' <pkg>-rebuild - Restart the build from the build step'
+ @echo ' <pkg>-source-check - Check package for valid download URLs'
+ @echo ' <pkg>-all-source-check - Check package and its dependencies for valid download URLs'
$(foreach p,$(HELP_PACKAGES), \
@echo $(sep) \
@echo '$($(p)_NAME):' $(sep) \
@@ -1122,6 +1128,7 @@ help:
@echo
@echo 'Miscellaneous:'
@echo ' source - download all sources needed for offline-build'
+ @echo ' source-check - check selected packages for valid download URLs'
@echo ' external-deps - list external packages used'
@echo ' legal-info - generate info about license compliance'
@echo ' printvars - dump all the internal variables'
@@ -70,6 +70,7 @@ export BR_NO_CHECK_HASH_FOR =
# 3) BR2_BACKUP_SITE if enabled, unless BR2_PRIMARY_SITE_ONLY is set
#
# Argument 1 is the source location
+# Argument 2 (optionally) provides extra arguments to pass to DL_WRAPPER
#
################################################################################
@@ -92,6 +93,7 @@ endif
define DOWNLOAD
$(Q)mkdir -p $($(PKG)_DL_DIR)
$(Q)$(EXTRA_ENV) $(FLOCK) $(DL_WRAPPER) \
+ $(2) \
-c '$($(PKG)_DL_VERSION)' \
-d '$($(PKG)_DL_DIR)' \
-D '$(DL_DIR)' \
@@ -106,3 +108,7 @@ define DOWNLOAD
-- \
$($(PKG)_DL_OPTS)
endef
+
+define SOURCE_CHECK
+ $(call DOWNLOAD,$(1),-C)
+endef
@@ -788,6 +788,10 @@ $(1)-legal-source: $$($(2)_TARGET_ACTUAL_SOURCE)
endif # actual sources != sources
endif # actual sources != ""
+$(1)-source-check: PKG=$(2)
+$(1)-source-check:
+ $$(foreach p,$$($(2)_ALL_DOWNLOADS),$$(call SOURCE_CHECK,$$(p))$$(sep))
+
$(1)-external-deps:
@for p in $$($(2)_SOURCE) $$($(2)_PATCH) $$($(2)_EXTRA_DOWNLOADS) ; do \
echo `basename $$$$p` ; \
@@ -812,6 +816,9 @@ $(1)-rsync: $$($(2)_TARGET_RSYNC)
$(1)-source:
$(1)-legal-source:
+$(1)-source-check:
+ test -d $$($(2)_OVERRIDE_SRCDIR)
+
$(1)-external-deps:
@echo "file://$$($(2)_OVERRIDE_SRCDIR)"
endif
@@ -846,6 +853,9 @@ $(1)-graph-rdepends: graph-depends-requirements
$(1)-all-source: $(1)-source
$(1)-all-source: $$(foreach p,$$($(2)_FINAL_ALL_DEPENDENCIES),$$(p)-all-source)
+$(1)-all-source-check: $(1)-source-check
+$(1)-all-source-check: $$(foreach p,$$($(2)_FINAL_ALL_DEPENDENCIES),$$(p)-all-source-check)
+
$(1)-all-external-deps: $(1)-external-deps
$(1)-all-external-deps: $$(foreach p,$$($(2)_FINAL_ALL_DEPENDENCIES),$$(p)-all-external-deps)
@@ -1044,6 +1054,7 @@ DL_TOOLS_DEPENDENCIES += $$(call extractor-dependency,$$($(2)_SOURCE))
$(1)-all-external-deps \
$(1)-all-legal-info \
$(1)-all-source \
+ $(1)-all-source-check \
$(1)-build \
$(1)-clean-for-rebuild \
$(1)-clean-for-reconfigure \
@@ -1068,7 +1079,8 @@ DL_TOOLS_DEPENDENCIES += $$(call extractor-dependency,$$($(2)_SOURCE))
$(1)-rsync \
$(1)-show-depends \
$(1)-show-version \
- $(1)-source
+ $(1)-source \
+ $(1)-source-check
ifneq ($$($(2)_SOURCE),)
ifeq ($$($(2)_SITE),)
@@ -17,7 +17,7 @@
# We want to catch any unexpected failure, and exit immediately.
set -e
-export BR_BACKEND_DL_GETOPTS=":hc:d:o:n:N:H:ru:qf:e"
+export BR_BACKEND_DL_GETOPTS=":hc:Cd:o:n:N:H:ru:qf:e"
main() {
local OPT OPTARG
@@ -25,9 +25,10 @@ main() {
local -a uris
# Parse our options; anything after '--' is for the backend
- while getopts ":c:d:D:o:n:N:H:rf:u:q" OPT; do
+ while getopts ":c:Cd:D:o:n:N:H:rf:u:q" OPT; do
case "${OPT}" in
c) cset="${OPTARG}";;
+ C) checkonly=-C;;
d) dl_dir="${OPTARG}";;
D) old_dl_dir="${OPTARG}";;
o) output="${OPTARG}";;
@@ -46,38 +47,40 @@ main() {
# Forget our options, and keep only those for the backend
shift $((OPTIND-1))
- if [ -z "${output}" ]; then
- error "no output specified, use -o\n"
- fi
+ if [ -z "${checkonly}" ]; then
+ if [ -z "${output}" ]; then
+ error "no output specified, use -o\n"
+ fi
- # Legacy handling: check if the file already exists in the global
- # download directory. If it does, hard-link it. If it turns out it
- # was an incorrect download, we'd still check it below anyway.
- # If we can neither link nor copy, fallback to doing a download.
- # NOTE! This is not atomic, is subject to TOCTTOU, but the whole
- # dl-wrapper runs under an flock, so we're safe.
- if [ ! -e "${output}" -a -e "${old_dl_dir}/${filename}" ]; then
- ln "${old_dl_dir}/${filename}" "${output}" || \
- cp "${old_dl_dir}/${filename}" "${output}" || \
- true
- fi
+ # Legacy handling: check if the file already exists in the global
+ # download directory. If it does, hard-link it. If it turns out it
+ # was an incorrect download, we'd still check it below anyway.
+ # If we can neither link nor copy, fallback to doing a download.
+ # NOTE! This is not atomic, is subject to TOCTTOU, but the whole
+ # dl-wrapper runs under an flock, so we're safe.
+ if [ ! -e "${output}" -a -e "${old_dl_dir}/${filename}" ]; then
+ ln "${old_dl_dir}/${filename}" "${output}" || \
+ cp "${old_dl_dir}/${filename}" "${output}" || \
+ true
+ fi
- # If the output file already exists and:
- # - there's no .hash file: do not download it again and exit promptly
- # - matches all its hashes: do not download it again and exit promptly
- # - fails at least one of its hashes: force a re-download
- # - there's no hash (but a .hash file): consider it a hard error
- if [ -e "${output}" ]; then
- if support/download/check-hash ${quiet} "${hfile}" "${output}" "${output##*/}"; then
- exit 0
- elif [ ${?} -ne 2 ]; then
- # Do not remove the file, otherwise it might get re-downloaded
- # from a later location (i.e. primary -> upstream -> mirror).
- # Do not print a message, check-hash already did.
- exit 1
+ # If the output file already exists and:
+ # - there's no .hash file: do not download it again and exit promptly
+ # - matches all its hashes: do not download it again and exit promptly
+ # - fails at least one of its hashes: force a re-download
+ # - there's no hash (but a .hash file): consider it a hard error
+ if [ -e "${output}" ]; then
+ if support/download/check-hash ${quiet} "${hfile}" "${output}" "${output##*/}"; then
+ exit 0
+ elif [ ${?} -ne 2 ]; then
+ # Do not remove the file, otherwise it might get re-downloaded
+ # from a later location (i.e. primary -> upstream -> mirror).
+ # Do not print a message, check-hash already did.
+ exit 1
+ fi
+ rm -f "${output}"
+ warn "Re-downloading '%s'...\n" "${output##*/}"
fi
- rm -f "${output}"
- warn "Re-downloading '%s'...\n" "${output##*/}"
fi
# Look through all the uris that we were given to download the package
@@ -127,7 +130,7 @@ main() {
-f "${filename}" \
-u "${uri}" \
-o "${tmpf}" \
- ${quiet} ${recurse} -- "${@}"
+ ${quiet} ${recurse} ${checkonly} -- "${@}"
then
# cd back to keep path coherence
cd "${OLDPWD}"
@@ -138,19 +141,21 @@ main() {
# cd back to free the temp-dir, so we can remove it later
cd "${OLDPWD}"
- # Check if the downloaded file is sane, and matches the stored hashes
- # for that file
- if support/download/check-hash ${quiet} "${hfile}" "${tmpf}" "${output##*/}"; then
- rc=0
- else
- if [ ${?} -ne 3 ]; then
- rm -rf "${tmpd}"
- continue
+ if [ -z "${checkonly}" ]; then
+ # Check if the downloaded file is sane, and matches the stored hashes
+ # for that file
+ if support/download/check-hash ${quiet} "${hfile}" "${tmpf}" "${output##*/}"; then
+ rc=0
+ else
+ if [ ${?} -ne 3 ]; then
+ rm -rf "${tmpd}"
+ continue
+ fi
+
+ # the hash file exists and there was no hash to check the file
+ # against
+ rc=1
fi
-
- # the hash file exists and there was no hash to check the file
- # against
- rc=1
fi
download_and_check=1
break
@@ -163,6 +168,13 @@ main() {
exit 1
fi
+ # If we only need to check the presence of sources, stop here.
+ # No need to handle output files.
+ if [ -n "${checkonly}" ]; then
+ rm -rf "${tmpd}"
+ exit 0
+ fi
+
# tmp_output is in the same directory as the final output, so we can
# later move it atomically.
tmp_output="$(mktemp "${output}.XXXXXX")"