🚧 Added updateAll

This commit is contained in:
makeworld 2020-08-17 13:31:45 -04:00
parent d254917b36
commit c63861230e
2 changed files with 96 additions and 11 deletions

View File

@ -20,6 +20,9 @@ import (
"github.com/mmcdole/gofeed" "github.com/mmcdole/gofeed"
) )
// TODO: Test for deadlocks and whether there should be more
// goroutines for file writing or other things.
var ( var (
ErrSaving = errors.New("couldn't save JSON to disk") ErrSaving = errors.New("couldn't save JSON to disk")
ErrNotSuccess = errors.New("status 20 not returned") ErrNotSuccess = errors.New("status 20 not returned")
@ -93,11 +96,9 @@ func writeJson() error {
enc.SetEscapeHTML(false) enc.SetEscapeHTML(false)
enc.SetIndent("", " ") enc.SetIndent("", " ")
data.feedMu.Lock() data.Lock()
data.pageMu.Lock()
err = enc.Encode(&data) err = enc.Encode(&data)
data.feedMu.Unlock() data.Unlock()
data.pageMu.Unlock()
return err return err
} }
@ -191,10 +192,15 @@ func updatePage(url string) error {
if _, err := io.Copy(h, res.Body); err != nil { if _, err := io.Copy(h, res.Body); err != nil {
return err return err
} }
newHash := fmt.Sprintf("%x", h.Sum(nil))
data.pageMu.Lock() data.pageMu.Lock()
data.Pages[url] = &pageJson{ if data.Pages[url].Hash != newHash {
Hash: fmt.Sprintf("%x", h.Sum(nil)), // Page content is different
Updated: time.Now().UTC(), data.Pages[url] = &pageJson{
Hash: newHash,
Changed: time.Now().UTC(),
}
} }
data.pageMu.Unlock() data.pageMu.Unlock()
@ -211,7 +217,62 @@ func updatePage(url string) error {
} }
// updateAll updates all feeds and pages. // updateAll updates all feeds and pages.
// It should run in goroutine at a regular interval.
func updateAll() { func updateAll() {
// TODO: Is two goroutines the right amount?
worker := func(jobs <-chan [2]string, wg *sync.WaitGroup) {
// Each job is: []string{<type>, "url"}
// where <type> is "feed" or "page"
defer wg.Done()
for j := range jobs {
if j[0] == "feed" {
updateFeed(j[1])
}
if j[0] == "page" {
updatePage(j[1])
}
}
}
var wg sync.WaitGroup
data.RLock()
numJobs := len(data.Feeds) + len(data.Pages)
jobs := make(chan [2]string, numJobs)
// Start 2 workers, waiting for jobs
for w := 0; w < 2; w++ {
wg.Add(1)
go worker(jobs, &wg)
}
// Get map keys in a slice
feedKeys := make([]string, len(data.Feeds))
i := 0
for k := range data.Feeds {
feedKeys[i] = k
i++
}
pageKeys := make([]string, len(data.Pages))
i = 0
for k := range data.Pages {
pageKeys[i] = k
i++
}
data.RUnlock()
for j := 0; j < numJobs; j++ {
if j < len(feedKeys) {
jobs <- [2]string{"feed", feedKeys[j]}
} else {
// In the Pages
jobs <- [2]string{"page", pageKeys[j-len(feedKeys)]}
}
}
wg.Wait()
} }

View File

@ -17,11 +17,11 @@ Example JSON.
"pages": { "pages": {
"url1": { "url1": {
"hash": <hash>, "hash": <hash>,
"updated": <time> "changed": <time>
}, },
"url2": { "url2": {
"hash": <hash>, "hash": <hash>,
"updated": <time> "changed": <time>
} }
} }
} }
@ -39,9 +39,33 @@ type jsonData struct {
Pages map[string]*pageJson `json:"pages,omitempty"` Pages map[string]*pageJson `json:"pages,omitempty"`
} }
// Lock locks both feed and page mutexes.
func (j *jsonData) Lock() {
j.feedMu.Lock()
j.pageMu.Lock()
}
// Unlock unlocks both feed and page mutexes.
func (j *jsonData) Unlock() {
j.feedMu.Unlock()
j.pageMu.Unlock()
}
// RLock read-locks both feed and page mutexes.
func (j *jsonData) RLock() {
j.feedMu.RLock()
j.pageMu.RLock()
}
// RUnlock read-unlocks both feed and page mutexes.
func (j *jsonData) RUnlock() {
j.feedMu.RUnlock()
j.pageMu.RUnlock()
}
type pageJson struct { type pageJson struct {
Hash string `json:"hash"` Hash string `json:"hash"`
Updated time.Time `json:"updated"` Changed time.Time `json:"changed"` // When the latest change happened
} }
var data jsonData // Global instance of jsonData - loaded from JSON and used var data jsonData // Global instance of jsonData - loaded from JSON and used