#!/bin/sh -e
cmds="$(dirname "$(realpath -- "$0")")"
-"$cmds"/download.sh "$1"
-"$cmds"/parse.sh "$1"
+"$cmds"/download "$1"
+"$cmds"/parse "$1"
FEEDER_WARC_COMPRESS=_feeder_warc_compress
FEEDER_WARC_COMPRESS=: # do not compress
+export http_proxy=http://localhost:8080
+export https_proxy=$http_proxy
set sort_browser_mailboxes=reverse-unread
set folder_format="%3C %t %N [%3n|%3m] %D %f"
macro browser q "<exit><quit>"
-macro browser R "<shell-escape>../feeds-dnp.zsh feeds ; printf \"\\\\a\"\n<check-new><check-stats>"
+macro browser R "<shell-escape>../feeds-dnp feeds ; printf \"\\\\a\"\n<check-new><check-stats>"
set sort=reverse-date
folder-hook . "set sort = reverse-date"
folder-hook . "set index_format = \"%4C %Z [%D] %s (%?l?%4l&%c?)\""
bind pager o noop
-macro pager o "|$cmds/x-urlview.sh\n"
+macro pager o "|$cmds/x-urlview\n"
set pager_index_lines = 6
set query_command = "mu find --muhome mu --clearlinks --format=links --linksdir=search %s"
read title < $f/title
[ -n "$title" ] && label="-label \"$title\"" || :
echo mailboxes $label $f
- echo "folder-hook $f \"macro index r '<shell-escape>$cmds/dnp.sh $f\n<change-folder>=$f\n'\""
+ echo "folder-hook $f \"macro index r '<shell-escape>$cmds/dnp $f\n<change-folder>=$f\n'\""
done
Generate @file{mutt.rc} and run it with:
@example
-$ ./feeds-browse.sh
+$ ./feeds-browse
@end example
@file{mutt.rc} should contain all feeds mailboxes with human readable
message to understand what was touched.
Press @code{o} in pager mode to open links and enclosures URLs. Your
-message will be piped to @command{cmd/x-urlview.sh}, that will show all
+message will be piped to @command{cmd/x-urlview}, that will show all
@code{X-URL} and @code{X-Enclosure} links.
Press @code{F1} to go to next unread mailbox.
Press @code{r} in pager mode to download and parse current feed by
-running (@command{cmd/dnp.sh}).
+running (@command{cmd/dnp}).
Press @code{R} in mailbox browser mode to refresh all feeds by running
-(@command{feeds-dnp.zsh}).
+(@command{feeds-dnp}).
Clear excess number of messages with:
@example
-$ ./feeds-clear.zsh
-$ cmd/clear.zsh feeds/FEED # to clear single feed
+$ ./feeds-clear
+$ cmd/clear feeds/FEED # to clear single feed
@end example
By default (@env{$FEEDER_MAX_ITEMS}) only 100 entries are processed.
To clean download state for some reason:
@example
-$ cmd/download-clean.sh feed/FEED
+$ cmd/download-clean feed/FEED
@end example
Download your feed data with:
@example
-$ cmd/download.sh feeds/blog.stargrave.org_russian_feed.atom
-$ ./feeds-download.zsh # to invoke parallel downloading of everything
+$ cmd/download feeds/blog.stargrave.org_russian_feed.atom
+$ ./feeds-download # to invoke parallel downloading of everything
@end example
It uses @command{curl}, that is aware of @code{If-Modified-Since} and
current timestamp.
@example
-$ ./feeds-encs.zsh
+$ ./feeds-encs
[...]
monsterfeet.com_grue.rss/encs/20220218-152822-traffic.libsyn.com_monsterfeet_grue_018.mp3
www.astronews.ru_astronews.xml/encs/20220219-115710-www.astronews.ru_news_2022_20220216125238.jpg
JPEG image data, JFIF standard 1.01, ...
@end example
-@command{feeds-encs.zsh} does not parallelize jobs, because enclosure are
+@command{feeds-encs} does not parallelize jobs, because enclosure are
often heavy enough to satiate your Internet link. @command{wget}'s
progress is also printed both to stderr and @file{feeds/FEED/encs.log}.
Of course you can download only single feed's enclosures:
@example
-$ cmd/encs.zsh path/to/FEED [optional overriden destination directory]
+$ cmd/encs path/to/FEED [optional overriden destination directory]
@end example
@end example
Or you can convert Newsboat @file{urls} file (containing many lines with
-URLs) with @command{urls2feeds.zsh} to subdirectories hierarchy:
+URLs) with @command{urls2feeds} to subdirectories hierarchy:
@example
-$ ./urls2feeds.zsh < ~/.newsboat/urls
+$ ./urls2feeds < ~/.newsboat/urls
$ cat feeds/blog.stargrave.org_russian_feed.atom/url
http://blog.stargrave.org/russian/feed.atom
@end example
-@command{urls2feeds.zsh} won't touch already existing directories and will
+@command{urls2feeds} won't touch already existing directories and will
warn if any of them disappears from @file{urls}.
Quick overview of feeds with new posts:
@example
-$ ./feeds-news.zsh
+$ ./feeds-news
habr.com_ru_rss_interesting: 7
habr.com_ru_rss_news: 3
lobste.rs_rss: 3
Parse your feeds with:
@example
-$ cmd/parse.sh feeds/blog.stargrave.org_russian_feed.atom
-$ ./feeds-parse.zsh # to parse all feeds in parallel
+$ cmd/parse feeds/blog.stargrave.org_russian_feed.atom
+$ ./feeds-parse # to parse all feeds in parallel
@end example
You can also download and parse the feeds at once:
@example
-$ ./feeds-dnp.zsh
+$ ./feeds-dnp
@end example
(re)Index your messages with:
@example
-$ ./feeds-index.sh
+$ ./feeds-index
@end example
That will create @file{mu/} and @file{search/} directories and run
@url{https://en.wikipedia.org/wiki/Web_ARChive, WARC} format.
@example
-$ ./feeds-warcs.zsh
+$ ./feeds-warcs
[...]
www.darkside.ru_news_rss/warcs/20220218-145755-www.darkside.ru_news_140480.warc
[...]
Of course you can download only single feed:
@example
-$ cmd/warcs.zsh path/to/FEED [optional overriden destination directory]
+$ cmd/warcs path/to/FEED [optional overriden destination directory]
@end example
#!/bin/sh -e
cmds="$(dirname "$(realpath -- "$0")")"/cmd
-muttrc_their="$($cmds/muttrc-gen.sh)"
+muttrc_their="$($cmds/muttrc-gen)"
[ -r mutt.rc ] && muttrc_our="$(cat mutt.rc)" || :
[ "$muttrc_our" = "$muttrc_their" ] || cat > mutt.rc <<EOF
$muttrc_their
#!/usr/bin/env zsh
set -e
cmds=$0:h:a/cmd
-for f (feeds/*) $cmds/clear.zsh $f
+for f (feeds/*) $cmds/clear $f
--- /dev/null
+#!/usr/bin/env zsh
+cmds=$0:h:a/cmd
+exec $cmds/do-in-parallel dnp FEEDER_DOWNLOAD_JOBS "$cmds/dnp {}"
+++ /dev/null
-#!/usr/bin/env zsh
-cmds=$0:h:a/cmd
-exec $cmds/do-in-parallel.zsh dnp FEEDER_DOWNLOAD_JOBS "$cmds/dnp.sh {}"
--- /dev/null
+#!/usr/bin/env zsh
+cmds=$0:h:a/cmd
+exec $cmds/do-in-parallel download FEEDER_DOWNLOAD_JOBS "$cmds/download {}"
+++ /dev/null
-#!/usr/bin/env zsh
-cmds=$0:h:a/cmd
-exec $cmds/do-in-parallel.zsh download FEEDER_DOWNLOAD_JOBS "$cmds/download.sh {}"
--- /dev/null
+#!/usr/bin/env zsh
+cmds=$0:h:a/cmd
+JOBS=1 exec $cmds/do-in-parallel encs JOBS "$cmds/encs {} $1"
+++ /dev/null
-#!/usr/bin/env zsh
-cmds=$0:h:a/cmd
-JOBS=1 exec $cmds/do-in-parallel.zsh encs JOBS "$cmds/encs.zsh {} $1"
--- /dev/null
+#!/usr/bin/env zsh
+cmds=$0:h:a/cmd
+exec $cmds/do-in-parallel parse FEEDER_PARSE_JOBS "$cmds/parse {}"
+++ /dev/null
-#!/usr/bin/env zsh
-cmds=$0:h:a/cmd
-exec $cmds/do-in-parallel.zsh parse FEEDER_PARSE_JOBS "$cmds/parse.sh {}"
--- /dev/null
+#!/usr/bin/env zsh
+cmds=$0:h:a/cmd
+exec $cmds/do-in-parallel warcs FEEDER_DOWNLOAD_JOBS "$cmds/warcs {} $1"
+++ /dev/null
-#!/usr/bin/env zsh
-cmds=$0:h:a/cmd
-exec $cmds/do-in-parallel.zsh warcs FEEDER_DOWNLOAD_JOBS "$cmds/warcs.zsh {} $1"