From 53dcb3a8cb96935316e4986b4e8501e8b5d290d8 Mon Sep 17 00:00:00 2001 From: Sergey Matveev Date: Wed, 18 Jan 2023 15:50:00 +0300 Subject: [PATCH] Remove unnecessary filename extensions --- cmd/{clear.zsh => clear} | 0 cmd/{dnp.sh => dnp} | 4 ++-- cmd/{do-in-parallel.zsh => do-in-parallel} | 0 cmd/{download.sh => download} | 0 cmd/{download-clean.sh => download-clean} | 0 cmd/{encs.zsh => encs} | 0 cmd/env.rc | 2 ++ cmd/{muttrc-gen.sh => muttrc-gen} | 6 +++--- cmd/{parse.sh => parse} | 0 cmd/{warcs.zsh => warcs} | 0 cmd/{x-urlview.sh => x-urlview} | 0 doc/usage/browse.texi | 8 ++++---- doc/usage/clear.texi | 6 +++--- doc/usage/download.texi | 4 ++-- doc/usage/encs.texi | 6 +++--- doc/usage/feedsdir.texi | 6 +++--- doc/usage/news.texi | 2 +- doc/usage/parse.texi | 6 +++--- doc/usage/reindex.texi | 2 +- doc/usage/warcs.texi | 4 ++-- feeds-browse.sh => feeds-browse | 2 +- feeds-clear.zsh => feeds-clear | 2 +- feeds-dnp | 3 +++ feeds-dnp.zsh | 3 --- feeds-download | 3 +++ feeds-download.zsh | 3 --- feeds-encs | 3 +++ feeds-encs.zsh | 3 --- feeds-index.sh => feeds-index | 0 feeds-news.zsh => feeds-news | 0 feeds-parse | 3 +++ feeds-parse.zsh | 3 --- feeds-warcs | 3 +++ feeds-warcs.zsh | 3 --- urls2feeds.zsh => urls2feeds | 0 35 files changed, 46 insertions(+), 44 deletions(-) rename cmd/{clear.zsh => clear} (100%) rename cmd/{dnp.sh => dnp} (53%) rename cmd/{do-in-parallel.zsh => do-in-parallel} (100%) rename cmd/{download.sh => download} (100%) rename cmd/{download-clean.sh => download-clean} (100%) rename cmd/{encs.zsh => encs} (100%) rename cmd/{muttrc-gen.sh => muttrc-gen} (91%) rename cmd/{parse.sh => parse} (100%) rename cmd/{warcs.zsh => warcs} (100%) rename cmd/{x-urlview.sh => x-urlview} (100%) rename feeds-browse.sh => feeds-browse (84%) rename feeds-clear.zsh => feeds-clear (54%) create mode 100755 feeds-dnp delete mode 100755 feeds-dnp.zsh create mode 100755 feeds-download delete mode 100755 feeds-download.zsh create mode 100755 feeds-encs delete mode 100755 feeds-encs.zsh rename feeds-index.sh => feeds-index (100%) rename feeds-news.zsh => feeds-news (100%) create mode 100755 feeds-parse delete mode 100755 feeds-parse.zsh create mode 100755 feeds-warcs delete mode 100755 feeds-warcs.zsh rename urls2feeds.zsh => urls2feeds (100%) diff --git a/cmd/clear.zsh b/cmd/clear similarity index 100% rename from cmd/clear.zsh rename to cmd/clear diff --git a/cmd/dnp.sh b/cmd/dnp similarity index 53% rename from cmd/dnp.sh rename to cmd/dnp index 303620f..70f3302 100755 --- a/cmd/dnp.sh +++ b/cmd/dnp @@ -1,5 +1,5 @@ #!/bin/sh -e cmds="$(dirname "$(realpath -- "$0")")" -"$cmds"/download.sh "$1" -"$cmds"/parse.sh "$1" +"$cmds"/download "$1" +"$cmds"/parse "$1" diff --git a/cmd/do-in-parallel.zsh b/cmd/do-in-parallel similarity index 100% rename from cmd/do-in-parallel.zsh rename to cmd/do-in-parallel diff --git a/cmd/download.sh b/cmd/download similarity index 100% rename from cmd/download.sh rename to cmd/download diff --git a/cmd/download-clean.sh b/cmd/download-clean similarity index 100% rename from cmd/download-clean.sh rename to cmd/download-clean diff --git a/cmd/encs.zsh b/cmd/encs similarity index 100% rename from cmd/encs.zsh rename to cmd/encs diff --git a/cmd/env.rc b/cmd/env.rc index eccc933..619d60d 100644 --- a/cmd/env.rc +++ b/cmd/env.rc @@ -22,3 +22,5 @@ _feeder_warc_compress() { FEEDER_WARC_COMPRESS=_feeder_warc_compress FEEDER_WARC_COMPRESS=: # do not compress +export http_proxy=http://localhost:8080 +export https_proxy=$http_proxy diff --git a/cmd/muttrc-gen.sh b/cmd/muttrc-gen similarity index 91% rename from cmd/muttrc-gen.sh rename to cmd/muttrc-gen index 7d89ba0..8d47d38 100755 --- a/cmd/muttrc-gen.sh +++ b/cmd/muttrc-gen @@ -7,7 +7,7 @@ set mail_check_stats_interval=5 set sort_browser_mailboxes=reverse-unread set folder_format="%3C %t %N [%3n|%3m] %D %f" macro browser q "" -macro browser R "../feeds-dnp.zsh feeds ; printf \"\\\\a\"\n" +macro browser R "../feeds-dnp feeds ; printf \"\\\\a\"\n" set sort=reverse-date folder-hook . "set sort = reverse-date" @@ -18,7 +18,7 @@ set date_format="%Y-%m-%d %H:%M" folder-hook . "set index_format = \"%4C %Z [%D] %s (%?l?%4l&%c?)\"" bind pager o noop -macro pager o "|$cmds/x-urlview.sh\n" +macro pager o "|$cmds/x-urlview\n" set pager_index_lines = 6 set query_command = "mu find --muhome mu --clearlinks --format=links --linksdir=search %s" @@ -66,5 +66,5 @@ for f in feeds/* ; do read title < $f/title [ -n "$title" ] && label="-label \"$title\"" || : echo mailboxes $label $f - echo "folder-hook $f \"macro index r '$cmds/dnp.sh $f\n=$f\n'\"" + echo "folder-hook $f \"macro index r '$cmds/dnp $f\n=$f\n'\"" done diff --git a/cmd/parse.sh b/cmd/parse similarity index 100% rename from cmd/parse.sh rename to cmd/parse diff --git a/cmd/warcs.zsh b/cmd/warcs similarity index 100% rename from cmd/warcs.zsh rename to cmd/warcs diff --git a/cmd/x-urlview.sh b/cmd/x-urlview similarity index 100% rename from cmd/x-urlview.sh rename to cmd/x-urlview diff --git a/doc/usage/browse.texi b/doc/usage/browse.texi index 16050f4..2cc3b59 100644 --- a/doc/usage/browse.texi +++ b/doc/usage/browse.texi @@ -4,7 +4,7 @@ Generate @file{mutt.rc} and run it with: @example -$ ./feeds-browse.sh +$ ./feeds-browse @end example @file{mutt.rc} should contain all feeds mailboxes with human readable @@ -50,13 +50,13 @@ convenience. It will mark both new (@strong{N}) and old-but-unread message to understand what was touched. Press @code{o} in pager mode to open links and enclosures URLs. Your -message will be piped to @command{cmd/x-urlview.sh}, that will show all +message will be piped to @command{cmd/x-urlview}, that will show all @code{X-URL} and @code{X-Enclosure} links. Press @code{F1} to go to next unread mailbox. Press @code{r} in pager mode to download and parse current feed by -running (@command{cmd/dnp.sh}). +running (@command{cmd/dnp}). Press @code{R} in mailbox browser mode to refresh all feeds by running -(@command{feeds-dnp.zsh}). +(@command{feeds-dnp}). diff --git a/doc/usage/clear.texi b/doc/usage/clear.texi index d525aba..59f6cdd 100644 --- a/doc/usage/clear.texi +++ b/doc/usage/clear.texi @@ -4,8 +4,8 @@ Clear excess number of messages with: @example -$ ./feeds-clear.zsh -$ cmd/clear.zsh feeds/FEED # to clear single feed +$ ./feeds-clear +$ cmd/clear feeds/FEED # to clear single feed @end example By default (@env{$FEEDER_MAX_ITEMS}) only 100 entries are processed. @@ -26,5 +26,5 @@ left @command{cron}-ed workers. To clean download state for some reason: @example -$ cmd/download-clean.sh feed/FEED +$ cmd/download-clean feed/FEED @end example diff --git a/doc/usage/download.texi b/doc/usage/download.texi index e7cae31..3abd9d3 100644 --- a/doc/usage/download.texi +++ b/doc/usage/download.texi @@ -4,8 +4,8 @@ Download your feed data with: @example -$ cmd/download.sh feeds/blog.stargrave.org_russian_feed.atom -$ ./feeds-download.zsh # to invoke parallel downloading of everything +$ cmd/download feeds/blog.stargrave.org_russian_feed.atom +$ ./feeds-download # to invoke parallel downloading of everything @end example It uses @command{curl}, that is aware of @code{If-Modified-Since} and diff --git a/doc/usage/encs.texi b/doc/usage/encs.texi index 42bba52..2a0cc41 100644 --- a/doc/usage/encs.texi +++ b/doc/usage/encs.texi @@ -9,7 +9,7 @@ enclosure's filename is more or less filesystem-friendly with the current timestamp. @example -$ ./feeds-encs.zsh +$ ./feeds-encs [...] monsterfeet.com_grue.rss/encs/20220218-152822-traffic.libsyn.com_monsterfeet_grue_018.mp3 www.astronews.ru_astronews.xml/encs/20220219-115710-www.astronews.ru_news_2022_20220216125238.jpg @@ -21,12 +21,12 @@ www.astronews.ru_astronews.xml/encs/20220219-115710-www.astronews.ru_news_2022_2 JPEG image data, JFIF standard 1.01, ... @end example -@command{feeds-encs.zsh} does not parallelize jobs, because enclosure are +@command{feeds-encs} does not parallelize jobs, because enclosure are often heavy enough to satiate your Internet link. @command{wget}'s progress is also printed both to stderr and @file{feeds/FEED/encs.log}. Of course you can download only single feed's enclosures: @example -$ cmd/encs.zsh path/to/FEED [optional overriden destination directory] +$ cmd/encs path/to/FEED [optional overriden destination directory] @end example diff --git a/doc/usage/feedsdir.texi b/doc/usage/feedsdir.texi index 0518009..48fcb8a 100644 --- a/doc/usage/feedsdir.texi +++ b/doc/usage/feedsdir.texi @@ -10,13 +10,13 @@ $ echo http://example.com/feed.atom > feeds/my_first_feed/url @end example Or you can convert Newsboat @file{urls} file (containing many lines with -URLs) with @command{urls2feeds.zsh} to subdirectories hierarchy: +URLs) with @command{urls2feeds} to subdirectories hierarchy: @example -$ ./urls2feeds.zsh < ~/.newsboat/urls +$ ./urls2feeds < ~/.newsboat/urls $ cat feeds/blog.stargrave.org_russian_feed.atom/url http://blog.stargrave.org/russian/feed.atom @end example -@command{urls2feeds.zsh} won't touch already existing directories and will +@command{urls2feeds} won't touch already existing directories and will warn if any of them disappears from @file{urls}. diff --git a/doc/usage/news.texi b/doc/usage/news.texi index 8bcc8df..6e49a2d 100644 --- a/doc/usage/news.texi +++ b/doc/usage/news.texi @@ -4,7 +4,7 @@ Quick overview of feeds with new posts: @example -$ ./feeds-news.zsh +$ ./feeds-news habr.com_ru_rss_interesting: 7 habr.com_ru_rss_news: 3 lobste.rs_rss: 3 diff --git a/doc/usage/parse.texi b/doc/usage/parse.texi index 874dc4d..187b272 100644 --- a/doc/usage/parse.texi +++ b/doc/usage/parse.texi @@ -4,12 +4,12 @@ Parse your feeds with: @example -$ cmd/parse.sh feeds/blog.stargrave.org_russian_feed.atom -$ ./feeds-parse.zsh # to parse all feeds in parallel +$ cmd/parse feeds/blog.stargrave.org_russian_feed.atom +$ ./feeds-parse # to parse all feeds in parallel @end example You can also download and parse the feeds at once: @example -$ ./feeds-dnp.zsh +$ ./feeds-dnp @end example diff --git a/doc/usage/reindex.texi b/doc/usage/reindex.texi index fceeb6f..d0f2400 100644 --- a/doc/usage/reindex.texi +++ b/doc/usage/reindex.texi @@ -4,7 +4,7 @@ (re)Index your messages with: @example -$ ./feeds-index.sh +$ ./feeds-index @end example That will create @file{mu/} and @file{search/} directories and run diff --git a/doc/usage/warcs.texi b/doc/usage/warcs.texi index e8142df..33c206c 100644 --- a/doc/usage/warcs.texi +++ b/doc/usage/warcs.texi @@ -10,7 +10,7 @@ able to output the whole document in @url{https://en.wikipedia.org/wiki/Web_ARChive, WARC} format. @example -$ ./feeds-warcs.zsh +$ ./feeds-warcs [...] www.darkside.ru_news_rss/warcs/20220218-145755-www.darkside.ru_news_140480.warc [...] @@ -31,5 +31,5 @@ acts as a proxy) to view and visit existing URLs. Of course you can download only single feed: @example -$ cmd/warcs.zsh path/to/FEED [optional overriden destination directory] +$ cmd/warcs path/to/FEED [optional overriden destination directory] @end example diff --git a/feeds-browse.sh b/feeds-browse similarity index 84% rename from feeds-browse.sh rename to feeds-browse index c728c0c..6882e32 100755 --- a/feeds-browse.sh +++ b/feeds-browse @@ -1,6 +1,6 @@ #!/bin/sh -e cmds="$(dirname "$(realpath -- "$0")")"/cmd -muttrc_their="$($cmds/muttrc-gen.sh)" +muttrc_their="$($cmds/muttrc-gen)" [ -r mutt.rc ] && muttrc_our="$(cat mutt.rc)" || : [ "$muttrc_our" = "$muttrc_their" ] || cat > mutt.rc <