"io"
"io/fs"
"log"
+ "net/http"
"os"
"path"
"strings"
"hash-name:commandline[,...]")
extractSig := flag.Bool("extract-sig", false, "Extract signature files")
metaPath := flag.String("meta4", "file.meta4", "Metalink file")
+ dl := flag.Int("dl", -1, "URL index to download, instead of reading from stdin")
flag.Usage = func() {
fmt.Fprintf(flag.CommandLine.Output(),
"Usage: %s [options] [FILE ...]\n", os.Args[0])
verified if they exist. Otherwise only specified FILEs are checked. If you
want to skip any <file> verification (for example only to validate the format
and -extract-sig, then you can just specify an empty ("") FILE.
+
+If -dl (> 0) is specified, then it automatically implies -pipe, but
+downloads data by specified URLs index, instead of reading from stdin.
+That can be used as a downloading utility.
`)
}
flag.Parse()
for _, fn := range flag.Args() {
toCheck[path.Base(fn)] = fn
}
+ if *dl != -1 {
+ *pipe = true
+ }
if *pipe && len(toCheck) != 1 {
log.Fatalln("exactly single FILE must be specified when using -pipe")
}
continue
HashFound:
- fd := os.Stdin
+ var src io.ReadCloser
+ src = os.Stdin
if !*pipe {
- fd, err = os.Open(fullPath)
+ src, err = os.Open(fullPath)
if err != nil {
log.Println("Error:", f.Name, err)
bad = true
continue
}
}
+ if *dl != -1 {
+ resp, err := http.Get(f.URLs[*dl].URL)
+ if err != nil {
+ log.Println("Error:", f.Name, err)
+ bad = true
+ continue
+ }
+ log.Println("HTTP response:")
+ for k := range resp.Header {
+ log.Println("\t"+k+":", resp.Header.Get(k))
+ }
+ if resp.StatusCode != http.StatusOK {
+ log.Println("Bad status code:", f.Name, resp.Status)
+ bad = true
+ continue
+ }
+ src = resp.Body
+ }
err = hasher.Start()
if err != nil {
if !*pipe {
- fd.Close()
+ src.Close()
}
hasher.Stop()
log.Println("Error:", f.Name, err)
} else {
w = hasher
}
- _, err = io.Copy(w, bufio.NewReaderSize(fd, meta4ra.BufLen))
- if !*pipe {
- fd.Close()
+ _, err = io.Copy(w, bufio.NewReaderSize(src, meta4ra.BufLen))
+ if !*pipe || *dl != -1 {
+ src.Close()
}
if err != nil {
hasher.Stop()