X-Git-Url: http://www.git.stargrave.org/?a=blobdiff_plain;f=cmd%2Fmeta4ra%2Fcheck.go;fp=cmd%2Fmeta4ra%2Fcheck.go;h=95511c088d7123ad1b30c60f3870e98f7f16734b;hb=90e09ac2efff6f16246eb54698fdf5966307a181;hp=3b3e0a916087635136fd4ff6e9db74145aa63e2b;hpb=86b1aad4e7ce975279cd897effcfebd587039d25;p=meta4ra.git diff --git a/cmd/meta4ra/check.go b/cmd/meta4ra/check.go index 3b3e0a9..95511c0 100644 --- a/cmd/meta4ra/check.go +++ b/cmd/meta4ra/check.go @@ -23,6 +23,7 @@ import ( "io" "io/fs" "log" + "net/http" "os" "path" "strings" @@ -37,6 +38,7 @@ func runCheck() { "hash-name:commandline[,...]") extractSig := flag.Bool("extract-sig", false, "Extract signature files") metaPath := flag.String("meta4", "file.meta4", "Metalink file") + dl := flag.Int("dl", -1, "URL index to download, instead of reading from stdin") flag.Usage = func() { fmt.Fprintf(flag.CommandLine.Output(), "Usage: %s [options] [FILE ...]\n", os.Args[0]) @@ -46,6 +48,10 @@ If no FILEs are specified, then all s from metalink are searched and verified if they exist. Otherwise only specified FILEs are checked. If you want to skip any verification (for example only to validate the format and -extract-sig, then you can just specify an empty ("") FILE. + +If -dl (> 0) is specified, then it automatically implies -pipe, but +downloads data by specified URLs index, instead of reading from stdin. +That can be used as a downloading utility. `) } flag.Parse() @@ -73,6 +79,9 @@ and -extract-sig, then you can just specify an empty ("") FILE. for _, fn := range flag.Args() { toCheck[path.Base(fn)] = fn } + if *dl != -1 { + *pipe = true + } if *pipe && len(toCheck) != 1 { log.Fatalln("exactly single FILE must be specified when using -pipe") } @@ -155,19 +164,38 @@ and -extract-sig, then you can just specify an empty ("") FILE. continue HashFound: - fd := os.Stdin + var src io.ReadCloser + src = os.Stdin if !*pipe { - fd, err = os.Open(fullPath) + src, err = os.Open(fullPath) if err != nil { log.Println("Error:", f.Name, err) bad = true continue } } + if *dl != -1 { + resp, err := http.Get(f.URLs[*dl].URL) + if err != nil { + log.Println("Error:", f.Name, err) + bad = true + continue + } + log.Println("HTTP response:") + for k := range resp.Header { + log.Println("\t"+k+":", resp.Header.Get(k)) + } + if resp.StatusCode != http.StatusOK { + log.Println("Bad status code:", f.Name, resp.Status) + bad = true + continue + } + src = resp.Body + } err = hasher.Start() if err != nil { if !*pipe { - fd.Close() + src.Close() } hasher.Stop() log.Println("Error:", f.Name, err) @@ -180,9 +208,9 @@ and -extract-sig, then you can just specify an empty ("") FILE. } else { w = hasher } - _, err = io.Copy(w, bufio.NewReaderSize(fd, meta4ra.BufLen)) - if !*pipe { - fd.Close() + _, err = io.Copy(w, bufio.NewReaderSize(src, meta4ra.BufLen)) + if !*pipe || *dl != -1 { + src.Close() } if err != nil { hasher.Stop()