From c72a225a43148176a11271a29c6cb18e2e780d4d Mon Sep 17 00:00:00 2001 From: makeworld Date: Wed, 18 Nov 2020 16:10:22 -0500 Subject: [PATCH] =?UTF-8?q?=F0=9F=9A=A7=20Fix=20duplicate=20entries=20in?= =?UTF-8?q?=20feed=20page?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- NOTES.md | 9 +++------ display/feeds.go | 15 ++++++++++----- feeds/feeds.go | 7 +++++-- 3 files changed, 18 insertions(+), 13 deletions(-) diff --git a/NOTES.md b/NOTES.md index 974e1ad..f79e8d9 100644 --- a/NOTES.md +++ b/NOTES.md @@ -1,17 +1,14 @@ # Notes -## Temp -- Recalculating `about:feeds` adds pages multiple times to the view -- Only options for feed files is the download modal - there should be a feed modal before that one -- Auto feed detection fails on `ebc.li/atom.xml` - +## Feeds (temp) +- Only options for "non-text" feed files is the download modal - there should be a feed modal before that one +- Auto feed detection fails on `ebc.li/atom.xml` - which is text - TODO: remove all logger lines ## Issues - URL for each tab should not be stored as a string - in the current code there's lots of reparsing the URL - ## Regressions diff --git a/display/feeds.go b/display/feeds.go index 0203746..21c04ed 100644 --- a/display/feeds.go +++ b/display/feeds.go @@ -19,9 +19,6 @@ import ( "github.com/spf13/viper" ) -var feedPageRaw = "# Feeds & Pages\n\nUpdates" + strings.Repeat(" ", 80-25) + "[Newest -> Oldest]\n" + - strings.Repeat("-", 80) + "\nSee the help (by pressing ?) for details on how to use this page.\n\n" - var feedPageUpdated time.Time // toLocalDay truncates the provided time to a date only, @@ -38,11 +35,17 @@ func Feeds(t *tab) { // Retrieve cached version if there hasn't been any updates p, ok := cache.GetPage("about:feeds") if feedPageUpdated.After(feeds.LastUpdated) && ok { + logger.Log.Println("using cached feeds page") setPage(t, p) t.applyBottomBar() return } + logger.Log.Println("started rendering feeds page") + + feedPageRaw := "# Feeds & Pages\n\nUpdates" + strings.Repeat(" ", 80-25) + "[Newest -> Oldest]\n" + + strings.Repeat("-", 80) + "\nSee the help (by pressing ?) for details on how to use this page.\n\n" + // curDay represents what day of posts the loop is on. // It only goes backwards in time. // Its initial setting means: @@ -62,7 +65,7 @@ func Feeds(t *tab) { if pub.Before(curDay) { // This post is on a new day, add a day header - curDay := pub + curDay = pub feedPageRaw += fmt.Sprintf("\n## %s\n\n", curDay.Format("Jan 02, 2006")) } feedPageRaw += fmt.Sprintf("=>%s %s - %s\n", entry.URL, entry.Author, entry.Title) @@ -82,6 +85,8 @@ func Feeds(t *tab) { t.applyBottomBar() feedPageUpdated = time.Now() + + logger.Log.Println("done rendering feeds page") } // openFeedModal displays the "Add feed/page" modal @@ -159,7 +164,7 @@ func addFeedDirect(u string, feed *gofeed.Feed, tracked bool) { } } -// addFeed goes through the process of adding a bookmark for the current page. +// addFeed goes through the process of tracking the current page/feed. // It is the high-level way of doing it. It should be called in a goroutine. func addFeed() { logger.Log.Println("display.addFeed called") diff --git a/feeds/feeds.go b/feeds/feeds.go index 224a797..a813eff 100644 --- a/feeds/feeds.go +++ b/feeds/feeds.go @@ -42,10 +42,12 @@ var LastUpdated time.Time func Init() error { f, err := os.Open(config.FeedPath) if err == nil { + // File exists and could be opened defer f.Close() fi, err := f.Stat() if err == nil && fi.Size() > 0 { + // File is not empty dec := json.NewDecoder(f) err = dec.Decode(&data) if err != nil && err != io.EOF { @@ -156,13 +158,13 @@ func AddFeed(url string, feed *gofeed.Feed) error { if !ok || !reflect.DeepEqual(feed, oldFeed) { // Feeds are different, or there was never an old one + LastUpdated = time.Now() data.Feeds[url] = feed data.feedMu.Unlock() err := writeJSON() if err != nil { return ErrSaving } - LastUpdated = time.Now() } else { data.feedMu.Unlock() } @@ -189,6 +191,8 @@ func AddPage(url string, r io.Reader) error { _, ok := data.Pages[url] if !ok || data.Pages[url].Hash != newHash { // Page content is different, or it didn't exist + + LastUpdated = time.Now() data.Pages[url] = &pageJSON{ Hash: newHash, Changed: time.Now().UTC(), @@ -199,7 +203,6 @@ func AddPage(url string, r io.Reader) error { if err != nil { return ErrSaving } - LastUpdated = time.Now() } else { data.pageMu.Unlock() }