]> Sergey Matveev's repositories - feeder.git/commitdiff
Remove unnecessary filename extensions
authorSergey Matveev <stargrave@stargrave.org>
Wed, 18 Jan 2023 12:50:00 +0000 (15:50 +0300)
committerSergey Matveev <stargrave@stargrave.org>
Wed, 18 Jan 2023 12:50:00 +0000 (15:50 +0300)
35 files changed:
cmd/clear [moved from cmd/clear.zsh with 100% similarity]
cmd/dnp [moved from cmd/dnp.sh with 53% similarity]
cmd/do-in-parallel [moved from cmd/do-in-parallel.zsh with 100% similarity]
cmd/download [moved from cmd/download.sh with 100% similarity]
cmd/download-clean [moved from cmd/download-clean.sh with 100% similarity]
cmd/encs [moved from cmd/encs.zsh with 100% similarity]
cmd/env.rc
cmd/muttrc-gen [moved from cmd/muttrc-gen.sh with 91% similarity]
cmd/parse [moved from cmd/parse.sh with 100% similarity]
cmd/warcs [moved from cmd/warcs.zsh with 100% similarity]
cmd/x-urlview [moved from cmd/x-urlview.sh with 100% similarity]
doc/usage/browse.texi
doc/usage/clear.texi
doc/usage/download.texi
doc/usage/encs.texi
doc/usage/feedsdir.texi
doc/usage/news.texi
doc/usage/parse.texi
doc/usage/reindex.texi
doc/usage/warcs.texi
feeds-browse [moved from feeds-browse.sh with 84% similarity]
feeds-clear [moved from feeds-clear.zsh with 54% similarity]
feeds-dnp [new file with mode: 0755]
feeds-dnp.zsh [deleted file]
feeds-download [new file with mode: 0755]
feeds-download.zsh [deleted file]
feeds-encs [new file with mode: 0755]
feeds-encs.zsh [deleted file]
feeds-index [moved from feeds-index.sh with 100% similarity]
feeds-news [moved from feeds-news.zsh with 100% similarity]
feeds-parse [new file with mode: 0755]
feeds-parse.zsh [deleted file]
feeds-warcs [new file with mode: 0755]
feeds-warcs.zsh [deleted file]
urls2feeds [moved from urls2feeds.zsh with 100% similarity]

similarity index 100%
rename from cmd/clear.zsh
rename to cmd/clear
similarity index 53%
rename from cmd/dnp.sh
rename to cmd/dnp
index 303620f0d3b9796e8d3d416803706969705cb200..70f3302c3a92ff5260eebbbbc48c4b0db65c2386 100755 (executable)
+++ b/cmd/dnp
@@ -1,5 +1,5 @@
 #!/bin/sh -e
 
 cmds="$(dirname "$(realpath -- "$0")")"
-"$cmds"/download.sh "$1"
-"$cmds"/parse.sh "$1"
+"$cmds"/download "$1"
+"$cmds"/parse "$1"
similarity index 100%
rename from cmd/do-in-parallel.zsh
rename to cmd/do-in-parallel
similarity index 100%
rename from cmd/download.sh
rename to cmd/download
similarity index 100%
rename from cmd/download-clean.sh
rename to cmd/download-clean
similarity index 100%
rename from cmd/encs.zsh
rename to cmd/encs
index eccc9338d857810e703091040917988468ecde54..619d60df0812798291d22d0658e0a4a9684f11de 100644 (file)
@@ -22,3 +22,5 @@ _feeder_warc_compress() {
 
 FEEDER_WARC_COMPRESS=_feeder_warc_compress
 FEEDER_WARC_COMPRESS=: # do not compress
+export http_proxy=http://localhost:8080
+export https_proxy=$http_proxy
similarity index 91%
rename from cmd/muttrc-gen.sh
rename to cmd/muttrc-gen
index 7d89ba035439cdaba945e525fb52a660ae05ca31..8d47d3839e048bc01aba53f199c0f79cc9ca8c08 100755 (executable)
@@ -7,7 +7,7 @@ set mail_check_stats_interval=5
 set sort_browser_mailboxes=reverse-unread
 set folder_format="%3C %t %N [%3n|%3m] %D %f"
 macro browser q "<exit><quit>"
-macro browser R "<shell-escape>../feeds-dnp.zsh feeds ; printf \"\\\\a\"\n<check-new><check-stats>"
+macro browser R "<shell-escape>../feeds-dnp feeds ; printf \"\\\\a\"\n<check-new><check-stats>"
 
 set sort=reverse-date
 folder-hook . "set sort = reverse-date"
@@ -18,7 +18,7 @@ set date_format="%Y-%m-%d %H:%M"
 folder-hook . "set index_format = \"%4C %Z [%D] %s (%?l?%4l&%c?)\""
 
 bind pager o noop
-macro pager o "|$cmds/x-urlview.sh\n"
+macro pager o "|$cmds/x-urlview\n"
 set pager_index_lines = 6
 
 set query_command = "mu find --muhome mu --clearlinks --format=links --linksdir=search %s"
@@ -66,5 +66,5 @@ for f in feeds/* ; do
     read title < $f/title
     [ -n "$title" ] && label="-label \"$title\"" || :
     echo mailboxes $label $f
-    echo "folder-hook $f \"macro index r '<shell-escape>$cmds/dnp.sh $f\n<change-folder>=$f\n'\""
+    echo "folder-hook $f \"macro index r '<shell-escape>$cmds/dnp $f\n<change-folder>=$f\n'\""
 done
similarity index 100%
rename from cmd/parse.sh
rename to cmd/parse
similarity index 100%
rename from cmd/warcs.zsh
rename to cmd/warcs
similarity index 100%
rename from cmd/x-urlview.sh
rename to cmd/x-urlview
index 16050f4fc5860836d6fe3f203ed8e456799391eb..2cc3b59b99fccea4111e88b35d925830eba95cce 100644 (file)
@@ -4,7 +4,7 @@
 Generate @file{mutt.rc} and run it with:
 
 @example
-$ ./feeds-browse.sh
+$ ./feeds-browse
 @end example
 
 @file{mutt.rc} should contain all feeds mailboxes with human readable
@@ -50,13 +50,13 @@ convenience. It will mark both new (@strong{N}) and old-but-unread
 message to understand what was touched.
 
 Press @code{o} in pager mode to open links and enclosures URLs. Your
-message will be piped to @command{cmd/x-urlview.sh}, that will show all
+message will be piped to @command{cmd/x-urlview}, that will show all
 @code{X-URL} and @code{X-Enclosure} links.
 
 Press @code{F1} to go to next unread mailbox.
 
 Press @code{r} in pager mode to download and parse current feed by
-running (@command{cmd/dnp.sh}).
+running (@command{cmd/dnp}).
 
 Press @code{R} in mailbox browser mode to refresh all feeds by running
-(@command{feeds-dnp.zsh}).
+(@command{feeds-dnp}).
index d525abad048b02d2d4f42a17ad3b7d0c92bbe06a..59f6cddc28cf4629004e87a3e5f5d79086b44111 100644 (file)
@@ -4,8 +4,8 @@
 Clear excess number of messages with:
 
 @example
-$ ./feeds-clear.zsh
-$ cmd/clear.zsh feeds/FEED # to clear single feed
+$ ./feeds-clear
+$ cmd/clear feeds/FEED # to clear single feed
 @end example
 
 By default (@env{$FEEDER_MAX_ITEMS}) only 100 entries are processed.
@@ -26,5 +26,5 @@ left @command{cron}-ed workers.
 To clean download state for some reason:
 
 @example
-$ cmd/download-clean.sh feed/FEED
+$ cmd/download-clean feed/FEED
 @end example
index e7cae31d0b96945f0aaaec5636a35ffdf98e0d31..3abd9d32b326608627de494d5773439424960a58 100644 (file)
@@ -4,8 +4,8 @@
 Download your feed data with:
 
 @example
-$ cmd/download.sh feeds/blog.stargrave.org_russian_feed.atom
-$ ./feeds-download.zsh # to invoke parallel downloading of everything
+$ cmd/download feeds/blog.stargrave.org_russian_feed.atom
+$ ./feeds-download # to invoke parallel downloading of everything
 @end example
 
 It uses @command{curl}, that is aware of @code{If-Modified-Since} and
index 42bba52313d0a72a4c6997eff2ed25a670802fc4..2a0cc419db1a68ea73d695efb62d8704c7feadd9 100644 (file)
@@ -9,7 +9,7 @@ enclosure's filename is more or less filesystem-friendly with the
 current timestamp.
 
 @example
-$ ./feeds-encs.zsh
+$ ./feeds-encs
 [...]
 monsterfeet.com_grue.rss/encs/20220218-152822-traffic.libsyn.com_monsterfeet_grue_018.mp3
 www.astronews.ru_astronews.xml/encs/20220219-115710-www.astronews.ru_news_2022_20220216125238.jpg
@@ -21,12 +21,12 @@ www.astronews.ru_astronews.xml/encs/20220219-115710-www.astronews.ru_news_2022_2
   JPEG image data, JFIF standard 1.01, ...
 @end example
 
-@command{feeds-encs.zsh} does not parallelize jobs, because enclosure are
+@command{feeds-encs} does not parallelize jobs, because enclosure are
 often heavy enough to satiate your Internet link. @command{wget}'s
 progress is also printed both to stderr and @file{feeds/FEED/encs.log}.
 
 Of course you can download only single feed's enclosures:
 
 @example
-$ cmd/encs.zsh path/to/FEED [optional overriden destination directory]
+$ cmd/encs path/to/FEED [optional overriden destination directory]
 @end example
index 05180091975c101eb783cf5f4cb908a6b72e8b9e..48fcb8ac81c77aafecb46bbe3b4e24f9731a6732 100644 (file)
@@ -10,13 +10,13 @@ $ echo http://example.com/feed.atom > feeds/my_first_feed/url
 @end example
 
 Or you can convert Newsboat @file{urls} file (containing many lines with
-URLs) with @command{urls2feeds.zsh} to subdirectories hierarchy:
+URLs) with @command{urls2feeds} to subdirectories hierarchy:
 
 @example
-$ ./urls2feeds.zsh < ~/.newsboat/urls
+$ ./urls2feeds < ~/.newsboat/urls
 $ cat feeds/blog.stargrave.org_russian_feed.atom/url
 http://blog.stargrave.org/russian/feed.atom
 @end example
 
-@command{urls2feeds.zsh} won't touch already existing directories and will
+@command{urls2feeds} won't touch already existing directories and will
 warn if any of them disappears from @file{urls}.
index 8bcc8dfd7114a0be7db916f0b6c46a673d88a960..6e49a2d7d188b31fd45ace386f9044abce07b897 100644 (file)
@@ -4,7 +4,7 @@
 Quick overview of feeds with new posts:
 
 @example
-$ ./feeds-news.zsh
+$ ./feeds-news
 habr.com_ru_rss_interesting: 7
 habr.com_ru_rss_news: 3
 lobste.rs_rss: 3
index 874dc4dbf55d70f67df3ab3bfc560c4e7c597fa8..187b2726dd406ab77663c74b8828e3c50025e184 100644 (file)
@@ -4,12 +4,12 @@
 Parse your feeds with:
 
 @example
-$ cmd/parse.sh feeds/blog.stargrave.org_russian_feed.atom
-$ ./feeds-parse.zsh # to parse all feeds in parallel
+$ cmd/parse feeds/blog.stargrave.org_russian_feed.atom
+$ ./feeds-parse # to parse all feeds in parallel
 @end example
 
 You can also download and parse the feeds at once:
 
 @example
-$ ./feeds-dnp.zsh
+$ ./feeds-dnp
 @end example
index fceeb6f993428e40338e7d824667e172f8778067..d0f240068fd57d628efe118dbaf5ab8cb8278974 100644 (file)
@@ -4,7 +4,7 @@
 (re)Index your messages with:
 
 @example
-$ ./feeds-index.sh
+$ ./feeds-index
 @end example
 
 That will create @file{mu/} and @file{search/} directories and run
index e8142dff6592dd45125ca79085f60be7f53c489d..33c206c1054cf1c82ca9098fa2ea94dbad72fc1f 100644 (file)
@@ -10,7 +10,7 @@ able to output the whole document in
 @url{https://en.wikipedia.org/wiki/Web_ARChive, WARC} format.
 
 @example
-$ ./feeds-warcs.zsh
+$ ./feeds-warcs
 [...]
 www.darkside.ru_news_rss/warcs/20220218-145755-www.darkside.ru_news_140480.warc
 [...]
@@ -31,5 +31,5 @@ acts as a proxy) to view and visit existing URLs.
 Of course you can download only single feed:
 
 @example
-$ cmd/warcs.zsh path/to/FEED [optional overriden destination directory]
+$ cmd/warcs path/to/FEED [optional overriden destination directory]
 @end example
similarity index 84%
rename from feeds-browse.sh
rename to feeds-browse
index c728c0ce3a21a7e9f9e0dc7a62a9ddc3358ef2d3..6882e32734d3437795b5470d491a413ca3dd9586 100755 (executable)
@@ -1,6 +1,6 @@
 #!/bin/sh -e
 cmds="$(dirname "$(realpath -- "$0")")"/cmd
-muttrc_their="$($cmds/muttrc-gen.sh)"
+muttrc_their="$($cmds/muttrc-gen)"
 [ -r mutt.rc ] && muttrc_our="$(cat mutt.rc)" || :
 [ "$muttrc_our" = "$muttrc_their" ] || cat > mutt.rc <<EOF
 $muttrc_their
similarity index 54%
rename from feeds-clear.zsh
rename to feeds-clear
index e9424ca7554ed296fdba5695e071deed99cf4f73..62e73f2bdaba97fca28b12ba06be738f20a683ee 100755 (executable)
@@ -1,4 +1,4 @@
 #!/usr/bin/env zsh
 set -e
 cmds=$0:h:a/cmd
-for f (feeds/*) $cmds/clear.zsh $f
+for f (feeds/*) $cmds/clear $f
diff --git a/feeds-dnp b/feeds-dnp
new file mode 100755 (executable)
index 0000000..7e4cae2
--- /dev/null
+++ b/feeds-dnp
@@ -0,0 +1,3 @@
+#!/usr/bin/env zsh
+cmds=$0:h:a/cmd
+exec $cmds/do-in-parallel dnp FEEDER_DOWNLOAD_JOBS "$cmds/dnp {}"
diff --git a/feeds-dnp.zsh b/feeds-dnp.zsh
deleted file mode 100755 (executable)
index bfd672f..0000000
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env zsh
-cmds=$0:h:a/cmd
-exec $cmds/do-in-parallel.zsh dnp FEEDER_DOWNLOAD_JOBS "$cmds/dnp.sh {}"
diff --git a/feeds-download b/feeds-download
new file mode 100755 (executable)
index 0000000..b9d84a0
--- /dev/null
@@ -0,0 +1,3 @@
+#!/usr/bin/env zsh
+cmds=$0:h:a/cmd
+exec $cmds/do-in-parallel download FEEDER_DOWNLOAD_JOBS "$cmds/download {}"
diff --git a/feeds-download.zsh b/feeds-download.zsh
deleted file mode 100755 (executable)
index 06ba96c..0000000
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env zsh
-cmds=$0:h:a/cmd
-exec $cmds/do-in-parallel.zsh download FEEDER_DOWNLOAD_JOBS "$cmds/download.sh {}"
diff --git a/feeds-encs b/feeds-encs
new file mode 100755 (executable)
index 0000000..21ed33e
--- /dev/null
@@ -0,0 +1,3 @@
+#!/usr/bin/env zsh
+cmds=$0:h:a/cmd
+JOBS=1 exec $cmds/do-in-parallel encs JOBS "$cmds/encs {} $1"
diff --git a/feeds-encs.zsh b/feeds-encs.zsh
deleted file mode 100755 (executable)
index fb6e4e1..0000000
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env zsh
-cmds=$0:h:a/cmd
-JOBS=1 exec $cmds/do-in-parallel.zsh encs JOBS "$cmds/encs.zsh {} $1"
similarity index 100%
rename from feeds-index.sh
rename to feeds-index
similarity index 100%
rename from feeds-news.zsh
rename to feeds-news
diff --git a/feeds-parse b/feeds-parse
new file mode 100755 (executable)
index 0000000..4bca512
--- /dev/null
@@ -0,0 +1,3 @@
+#!/usr/bin/env zsh
+cmds=$0:h:a/cmd
+exec $cmds/do-in-parallel parse FEEDER_PARSE_JOBS "$cmds/parse {}"
diff --git a/feeds-parse.zsh b/feeds-parse.zsh
deleted file mode 100755 (executable)
index f5ca7c1..0000000
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env zsh
-cmds=$0:h:a/cmd
-exec $cmds/do-in-parallel.zsh parse FEEDER_PARSE_JOBS "$cmds/parse.sh {}"
diff --git a/feeds-warcs b/feeds-warcs
new file mode 100755 (executable)
index 0000000..670b1e5
--- /dev/null
@@ -0,0 +1,3 @@
+#!/usr/bin/env zsh
+cmds=$0:h:a/cmd
+exec $cmds/do-in-parallel warcs FEEDER_DOWNLOAD_JOBS "$cmds/warcs {} $1"
diff --git a/feeds-warcs.zsh b/feeds-warcs.zsh
deleted file mode 100755 (executable)
index 8a24dab..0000000
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env zsh
-cmds=$0:h:a/cmd
-exec $cmds/do-in-parallel.zsh warcs FEEDER_DOWNLOAD_JOBS "$cmds/warcs.zsh {} $1"
similarity index 100%
rename from urls2feeds.zsh
rename to urls2feeds