From 645bc063eb6f8f6e446d5527ff6432559b266461 Mon Sep 17 00:00:00 2001 From: Sergey Matveev Date: Thu, 19 Sep 2024 16:54:31 +0300 Subject: [PATCH] Unify redirection operator usage --- cmd/do-in-parallel | 2 +- cmd/download | 10 +++++----- cmd/encs | 2 +- cmd/env.rc | 2 +- cmd/muttrc-gen | 2 +- cmd/parse | 2 +- cmd/warcs | 2 +- doc/usage/clear.texi | 2 +- doc/usage/feedsdir.texi | 4 ++-- doc/usage/warcs.texi | 2 +- feeds-browse | 2 +- opml2feeds | 2 +- urls2feeds | 2 +- 13 files changed, 18 insertions(+), 18 deletions(-) diff --git a/cmd/do-in-parallel b/cmd/do-in-parallel index dc14114..5c0ff3f 100755 --- a/cmd/do-in-parallel +++ b/cmd/do-in-parallel @@ -6,5 +6,5 @@ log=$1.log ${=PARALLEL} --jobs ${(P)2} --joblog $log $3 ::: feeds/* || rc=$? fpath=($cmds/functions.zsh $fpath) autoload print-joblog-failed -print-joblog-failed < $log +print-joblog-failed <$log exit ${rc:-0} diff --git a/cmd/download b/cmd/download index cee1b45..26ee3a4 100755 --- a/cmd/download +++ b/cmd/download @@ -3,7 +3,7 @@ cmds="$(dirname "$(realpath -- "$0")")" . "$cmds/env.rc" cd "$1" -read url < url +read url &2 if [ -s out ] ; then - $ZSTD < out > feed.zst - touch -r out feed.zst + cp out feed + touch -r out feed truncate -s 0 out - touch -r feed.zst out + touch -r feed out fi -$SHA512 < feed.zst > download.hash +$SHA512 download.hash diff --git a/cmd/encs b/cmd/encs index d16ab5e..0b3d630 100755 --- a/cmd/encs +++ b/cmd/encs @@ -20,5 +20,5 @@ for new (new/*(N)) { ${=WGET} --user-agent=$FEEDER_USER_AGENT \ --output-document=$dst/$fn $url 2>&2 2>encs.log print $dst/$fn - done < $new + done <$new } diff --git a/cmd/env.rc b/cmd/env.rc index b33b15c..4932680 100644 --- a/cmd/env.rc +++ b/cmd/env.rc @@ -16,7 +16,7 @@ command -v sha512 >/dev/null && SHA512="sha512" || SHA512="sha512sum --binary" _feeder_warc_compress() { $HOME/work/tofuproxy/warc-extract.cmd -for-enzstd "$1" | - $HOME/work/tofuproxy/cmd/zstd/enzstd > "$1".zst + $HOME/work/tofuproxy/cmd/zstd/enzstd >"$1".zst rm "$1" } diff --git a/cmd/muttrc-gen b/cmd/muttrc-gen index a64d62d..97e4485 100755 --- a/cmd/muttrc-gen +++ b/cmd/muttrc-gen @@ -63,7 +63,7 @@ for f in feeds/* ; do echo unreadable $f/title >&2 continue } - read title < $f/title + read title <$f/title [ -n "$title" ] && label="-label \"$title\"" || : echo mailboxes $label $f echo "folder-hook $f \"macro index r '$cmds/dnp $f\n=$f\n'\"" diff --git a/cmd/parse b/cmd/parse index 5b59893..ab830b5 100755 --- a/cmd/parse +++ b/cmd/parse @@ -9,4 +9,4 @@ cd "$1" [ -s max ] && max=`cat max` || max=$FEEDER_MAX_ITEMS $ZSTD -d < feed.zst | $cmds/feed2mdir/feed2mdir -max-entries $max . > title.tmp mv title.tmp title -echo "$hash_their" > parse.hash +echo "$hash_their" >parse.hash diff --git a/cmd/warcs b/cmd/warcs index 7934b63..42516af 100755 --- a/cmd/warcs +++ b/cmd/warcs @@ -34,5 +34,5 @@ for new (new/*(N)) { ${=WGET} $wget_opts --output-file=warcs.log --warc-file=$dst/$fn $url $FEEDER_WARC_COMPRESS $dst/$fn.warc print $dst/$fn.warc* - done < $new + done <$new } diff --git a/doc/usage/clear.texi b/doc/usage/clear.texi index 59f6cdd..537b91a 100644 --- a/doc/usage/clear.texi +++ b/doc/usage/clear.texi @@ -14,7 +14,7 @@ Parser only appends posts, but does not remove obsolete ones. You can set that limit on per-feed basis: @example -$ echo 50 > feed/FEED/max +$ echo 50 >feed/FEED/max @end example @strong{0} means no limit and keep all the messages. diff --git a/doc/usage/feedsdir.texi b/doc/usage/feedsdir.texi index 48fcb8a..a2df992 100644 --- a/doc/usage/feedsdir.texi +++ b/doc/usage/feedsdir.texi @@ -6,14 +6,14 @@ manually: @example $ mkdir -p feeds/my_first_feed/@{cur,new,tmp@} -$ echo http://example.com/feed.atom > feeds/my_first_feed/url +$ echo http://example.com/feed.atom >feeds/my_first_feed/url @end example Or you can convert Newsboat @file{urls} file (containing many lines with URLs) with @command{urls2feeds} to subdirectories hierarchy: @example -$ ./urls2feeds < ~/.newsboat/urls +$ ./urls2feeds <~/.newsboat/urls $ cat feeds/blog.stargrave.org_russian_feed.atom/url http://blog.stargrave.org/russian/feed.atom @end example diff --git a/doc/usage/warcs.texi b/doc/usage/warcs.texi index 33c206c..9288b7f 100644 --- a/doc/usage/warcs.texi +++ b/doc/usage/warcs.texi @@ -22,7 +22,7 @@ help as an option. After you get pile of various @file{*.warc} files, you can simply add them to running @command{tofuproxy}: @example -$ for w (feeds/*/warcs/*.warc) print $w:a > path/to/tofuproxy/fifos/add-warcs +$ for w (feeds/*/warcs/*.warc) print $w:a >path/to/tofuproxy/fifos/add-warcs @end example And then visit @url{http://warc/} URL (when @command{tofuproxy} already diff --git a/feeds-browse b/feeds-browse index 8ccfda6..252b608 100755 --- a/feeds-browse +++ b/feeds-browse @@ -2,7 +2,7 @@ cmds="$(dirname "$(realpath -- "$0")")"/cmd muttrc_their="$($cmds/muttrc-gen)" [ -r mutt.rc ] && muttrc_our="$(cat mutt.rc)" || : -[ "$muttrc_our" = "$muttrc_their" ] || cat > mutt.rc <mutt.rc < "$dir"/url + echo "$url" >"$dir"/url printf "%s\n" "$dir" done diff --git a/urls2feeds b/urls2feeds index 90b1341..9761228 100755 --- a/urls2feeds +++ b/urls2feeds @@ -10,7 +10,7 @@ while read url ; do seen[$dir]=1 [[ -e $dir ]] && continue || : mkdir -p $dir/{cur,new,tmp} # make it maildir - print -- "$url" > $dir/url + print -- "$url" >$dir/url print $dir done for dir (feeds/*) [[ $seen[$dir] -eq 1 ]] || print disappeared: $dir -- 2.48.1