fi
(
+ # do not depend on generated URLs
for v in $(env | sed "s/=.*$//" | grep "^REDO") ; do unset $v ; done
redo-ifchange "$metadir"/urls
)
trap "rm -fr $tmp" HUP PIPE INT QUIT TERM EXIT
cd $tmp
case "$DISTFILE_FETCHER" in
-meta4ra)
- [ -s "$metadir"/size ] && read size <"$metadir"/size || size=0
+justdl)
FETCHER_URLS_SORT <"$metadir"/urls |
while read url ; do
echo trying ${url}... >&2
+ url="${url##*|}"
if [ -s "$metadir"/hashes ] ; then
- meta4ra-dl -progress -size $size "$url" |
+ $JUSTDL "$url" |
"$BASS_ROOT"/build/bin/hashes-check "$metadir"/hashes "$BASS_HASHERS" \
>$fn || {
echo hash check failed >&2
}
else
echo warning: no $metadir/hashes >&2
- if [ "$size" -eq 0 ] ; then
- meta4ra-dl -progress "$url" >$fn || continue
- else
- meta4ra-dl -progress -size $size "$url" >$fn || continue
+ $JUSTDL "$url" >$fn || continue
+ if [ -s "$metadir"/size ] ; then
+ read size <"$metadir"/size
sizeGot=$("$BASS_ROOT"/bin/filessize $fn)
[ "$sizeGot" -eq "$size" ] || {
echo size differs: $sizeGot '!=' $size >&2
[ -s $fn ]
cat $fn
;;
-wget)
+wget-meta4)
get-meta4 >meta4
wget --input-metalink=meta4 >&2
cat "$fn.meta4.#1"
for ext in info mod zip ziphash ziphash.sig ; do
fn=v6.1.0.$ext
[ ! -s $fn ] || continue
- $JUSTDL tmp http://proxy.go.cypherpunks.su/go.cypherpunks.su/gogost/v6/@v/$fn
+ $JUSTDL http://proxy.go.cypherpunks.su/go.cypherpunks.su/gogost/v6/@v/$fn >tmp
$FSYNC tmp
mv tmp $fn
done
mkdir -p $vdir
cd $vdir
url=http://proxy.go.stargrave.org/go.stargrave.org/gosha3/@v
-$JUSTDL tmp $url/v1.1.0.zip
+$JUSTDL $url/v1.1.0.zip >tmp
$FSYNC tmp
[ "$("$BASS_ROOT"/build/lib/zip-hash/zip-hash tmp)" = "h1:9FDmAEU07dVhWV5GYoaetbspzU5u+2amPXClwvHVZ6Q=" ]
mv tmp v1.1.0.zip
for ext in info mod ziphash ; do
fn=v1.1.0.$ext
- $JUSTDL tmp $url/$fn
+ $JUSTDL $url/$fn >tmp
$FSYNC tmp
mv tmp $fn
done
to convert .meta4 file to "metadir".
$DISTFILES/bin/metadir-to-meta4 generates .meta4 back.
-* $DISTFILE_FETCHER=meta4ra
- => http://www.meta4ra.stargrave.org/\r
- Use meta4ra-dl for downloading. URLs are sorted with FETCHER_URLS_SORT
- function, which is by default:
+A [Index/Variables] JUSTDL
+* $DISTFILE_FETCHER=justdl
+ * Get list or possible download URLs. It is sorted with
+ FETCHER_URLS_SORT function, which is by default:
FETCHER_URLS_SORT() {
$DISTFILES/lib/urls-sort "" rand
}
- This is by default, because meta4ra utilities are installed by default.
-
-* $DISTFILE_FETCHER=wget
+ * For each URL, until we succeed, run $JUSTDL program to fetch it.
+ If JUSTDL is not set, then BASS will check for following programs
+ existence: fetch, wget, curl, meta4ra-dl.
+ * Check downloaded result with $BASS_ROOT/build/bin/hashes-check
+ utility against $metadir/hashes, if it exists. Otherwise check
+ only the size.
+* $DISTFILE_FETCHER=wget-meta4
=> https://www.gnu.org/software/wget/ GNU Wget\r
- Use wget compiled with --with-metalink option. The only drawback is
- that most OS distributions contain Wget without that (--input-metalink)
- option.
+ Use wget compiled with --with-metalink option.
* $DISTFILE_FETCHER=aria2c
=> http://aria2.github.io/ Aria2\r
* all links should use HTTP as higher priority than HTTPS
* distcache.FreeBSD.org and cdn.NetBSD.org/pub/pkgsrc/distfiles
are used as least priority fallback
+* be aware that codeload.github.com downloads have no guarantees
+ of format/compression stability (their hashes may change)
There are special country codes for identifying and specifying CDNs:
xa -> Akamai
$ $BASS_ROOT/build/bin/hashes-gen <$tarball >$DISTFILES/meta/$tarball/hashes
+Or we can use helper script doing completely the same:
+
+ $ cd $DISTFILES/meta
+ $ ../bin/metadir-from-file /path/to/$tarball
+ $ ../bin/metadir-from-file /path/to/$tarball.sig
+
Then we must store a list of URLs where it can be fetched. That may be
just trivial:
generate your urls file instead then.
$ cat >$DISTFILES/meta/$tarball/urls.do <<EOF
- redo-ifchange ../../utils/urls-for-gnu
- ../../utils/urls-for-gnu parallel/$(basename $(pwd))
+ redo-ifchange ../../lib/urls-for-gnu
+ ../../lib/urls-for-gnu parallel/$(basename $(pwd))
EOF
+ $ git add $DISTFILES/meta/$tarball*
That is all! You may "redo $DISTFILES/meta/$tarball.meta4" to get the
generated Metalink4 file based on metainformation you supplied.
package, containing libarchive-based bsdtar utility, that perfectly
deals with any compressed archive transparently.
- A [Index/Programs] meta4ra\r
-=> http://www.meta4ra.stargrave.org/ meta4ra\r
- Utilities for making and checking .meta4 files. They are just a
- wrapper over XML and external hasher commands interoperation. They
- also can be used for downloading.
-
A [Index/Programs] Perl\r
=> https://www.perl.org/ Perl\r
Shell scripts are hard to write in a portable way. For example there
build Go-related software. Actually Go-written utilities can be
replaced and no Go dependency will be required at all.
+ A [Index/Programs] meta4ra\r
+=> http://www.meta4ra.stargrave.org/ meta4ra\r
+ Optional utilities for making and checking .meta4 files.
+ They are just a wrapper over XML and external hasher
+ commands interoperation. They also can be used for downloading.
+
A [Index/Programs] fetch\r
A [Index/Programs] Wget\r
A [Index/Programs] cURL\r
* FreeBSD's fetch, or
=> https://www.gnu.org/software/wget/ GNU Wget\r
=> https://curl.se/ cURL\r
- Although meta4ra can be used instead all of them.
+ => http://www.meta4ra.stargrave.org/ meta4ra-dl\r
+ To fetch distfiles.
export TMPDIR=${TMPDIR:-/tmp}
MAKE_JOBS=${MAKE_JOBS:-$(nproc)}
-if command -v fetch >/dev/null 2>/dev/null ; then
- JUSTDL="fetch -o"
-else
- command -v wget >/dev/null 2>/dev/null && JUSTDL="wget -O" || JUSTDL="curl -o"
-fi
+[ -n "$JUSTDL" ] || {
+ command -v fetch >/dev/null 2>/dev/null && JUSTDL="fetch -v -o -"
+}
+[ -n "$JUSTDL" ] || {
+ command -v wget >/dev/null 2>/dev/null && JUSTDL="wget -O -"
+}
+[ -n "$JUSTDL" ] || {
+ command -v curl >/dev/null 2>/dev/null && JUSTDL="curl -f -v"
+}
+[ -n "$JUSTDL" ] || {
+ command -v meta4ra-dl >/dev/null 2>/dev/null && JUSTDL="meta4ra-dl"
+}
+[ -n "$JUSTDL" ] || {
+ echo unable to find suitable \$JUSTDL utility >&2
+ exit 1
+}
export DISTFILES=${DISTFILES:-"$BASS_ROOT"/build/distfiles}
export SKELBINS=${SKELBINS:-/tmp/skelbins}
[ -n "$PV" ] || command -v pv >/dev/null 2>/dev/null && PV=pv || PV=cat
-# Program to use for downloading distfiles: "meta4ra", "wget", "aria2c".
-export DISTFILE_FETCHER="${DISTFILE_FETCHER:-meta4ra}"
+# Program to use for downloading distfiles: "justdl", "wget-meta4", "aria2c".
+export DISTFILE_FETCHER="${DISTFILE_FETCHER:-justdl}"
if ! type FETCHER_URLS_SORT 2>/dev/null >/dev/null ; then
FETCHER_URLS_SORT() {
$DISTFILES/lib/urls-sort "" rand