2023-06-19 17:42:47 -04:00
|
|
|
// SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
|
|
|
|
// SPDX-License-Identifier: Apache-2.0
|
2017-11-20 00:10:04 -05:00
|
|
|
|
2023-08-10 22:46:45 -04:00
|
|
|
package model // import "miniflux.app/v2/internal/model"
|
2017-11-20 00:10:04 -05:00
|
|
|
|
|
|
|
import (
|
|
|
|
"fmt"
|
2023-10-22 19:07:06 -04:00
|
|
|
"io"
|
2020-05-25 17:06:56 -04:00
|
|
|
"math"
|
2017-11-20 00:10:04 -05:00
|
|
|
"time"
|
2018-10-15 00:43:48 -04:00
|
|
|
|
2023-08-10 22:46:45 -04:00
|
|
|
"miniflux.app/v2/internal/config"
|
2017-11-20 00:10:04 -05:00
|
|
|
)
|
|
|
|
|
2021-01-04 16:49:28 -05:00
|
|
|
// List of supported schedulers.
|
|
|
|
const (
|
2021-01-27 08:09:50 -05:00
|
|
|
SchedulerRoundRobin = "round_robin"
|
|
|
|
SchedulerEntryFrequency = "entry_frequency"
|
2021-01-18 16:22:09 -05:00
|
|
|
// Default settings for the feed query builder
|
|
|
|
DefaultFeedSorting = "parsing_error_count"
|
|
|
|
DefaultFeedSortingDirection = "desc"
|
2021-01-04 16:49:28 -05:00
|
|
|
)
|
|
|
|
|
2018-10-15 00:43:48 -04:00
|
|
|
// Feed represents a feed in the application.
|
2017-11-20 00:10:04 -05:00
|
|
|
type Feed struct {
|
2021-02-21 16:42:49 -05:00
|
|
|
ID int64 `json:"id"`
|
|
|
|
UserID int64 `json:"user_id"`
|
|
|
|
FeedURL string `json:"feed_url"`
|
|
|
|
SiteURL string `json:"site_url"`
|
|
|
|
Title string `json:"title"`
|
|
|
|
CheckedAt time.Time `json:"checked_at"`
|
|
|
|
NextCheckAt time.Time `json:"next_check_at"`
|
|
|
|
EtagHeader string `json:"etag_header"`
|
|
|
|
LastModifiedHeader string `json:"last_modified_header"`
|
|
|
|
ParsingErrorMsg string `json:"parsing_error_message"`
|
|
|
|
ParsingErrorCount int `json:"parsing_error_count"`
|
|
|
|
ScraperRules string `json:"scraper_rules"`
|
|
|
|
RewriteRules string `json:"rewrite_rules"`
|
|
|
|
Crawler bool `json:"crawler"`
|
|
|
|
BlocklistRules string `json:"blocklist_rules"`
|
|
|
|
KeeplistRules string `json:"keeplist_rules"`
|
2022-07-12 00:12:26 -04:00
|
|
|
UrlRewriteRules string `json:"urlrewrite_rules"`
|
2021-02-21 16:42:49 -05:00
|
|
|
UserAgent string `json:"user_agent"`
|
2021-03-22 23:27:58 -04:00
|
|
|
Cookie string `json:"cookie"`
|
2021-02-21 16:42:49 -05:00
|
|
|
Username string `json:"username"`
|
|
|
|
Password string `json:"password"`
|
|
|
|
Disabled bool `json:"disabled"`
|
Add Media Player and resume to last playback position
In order to ease podcast listening, the player can be put on top of the feed entry as main content.
Use the `Use podcast player` option to enable that. It works on audio and video.
Also, when playing audio or video, progression will be saved in order to be able to resume listening later.
This position saving is done using the original attachement/enclosures player AND podcast player and do not rely on
the podcast player option ti be enabled.
Additionally, I made the player fill the width with the entry container to ease seeking and have a bigger video.
updateEnclosures now keep existing enclosures based on URL
When feeds get updated, enclosures entries are always wiped and re-created. This cause two issue
- enclosure progression get lost in the process
- enclosure ID changes
I used the URL as identifier of an enclosure. Not perfect but hopefully should work.
When an enclosure already exist, I simply do nothing and leave the entry as is in the database.
If anyone is listening/watching to this enclosure during the refresh, the id stay coherent and progression saving still works.
The updateEnclosures function got a bit more complex. I tried to make it the more clear I could.
Some optimisation are possible but would make the function harder to read in my opinion.
I'm not sure if this is often the case, but some feeds may include tracking or simply change the url each
time we update the feed. In those situation, enclosures ids and progression will be lost.
I have no idea how to handle this last situation. Use the size instead/alongside url to define the identity of an enclosure ?
Translation: english as placeholder for every language except French
Aside, I tested a video feed and fixed a few things for it. In fact, the MimeType was not working
at all on my side, and found a pretty old stackoverflow discussion that suggest to use an Apple non-standard MimeType for
m4v video format. I only did one substitution because I only have one feed to test. Any new video feed can make this go away
or evolve depending on the situation. Real video feeds does not tend to be easy to find and test extensively this.
Co-authored-by: toastal
2023-04-13 05:46:43 -04:00
|
|
|
NoMediaPlayer bool `json:"no_media_player"`
|
2021-02-21 16:42:49 -05:00
|
|
|
IgnoreHTTPCache bool `json:"ignore_http_cache"`
|
|
|
|
AllowSelfSignedCertificates bool `json:"allow_self_signed_certificates"`
|
|
|
|
FetchViaProxy bool `json:"fetch_via_proxy"`
|
2021-08-15 11:32:43 -04:00
|
|
|
HideGlobally bool `json:"hide_globally"`
|
2023-08-26 03:16:41 -04:00
|
|
|
AppriseServiceURLs string `json:"apprise_service_urls"`
|
2023-10-20 22:39:32 -04:00
|
|
|
|
|
|
|
// Non persisted attributes
|
|
|
|
Category *Category `json:"category,omitempty"`
|
|
|
|
Icon *FeedIcon `json:"icon"`
|
|
|
|
Entries Entries `json:"entries,omitempty"`
|
|
|
|
|
2023-11-08 09:35:44 -05:00
|
|
|
TTL int `json:"-"`
|
|
|
|
IconURL string `json:"-"`
|
|
|
|
UnreadCount int `json:"-"`
|
|
|
|
ReadCount int `json:"-"`
|
|
|
|
NumberOfVisibleEntries int `json:"-"`
|
2017-11-20 00:10:04 -05:00
|
|
|
}
|
|
|
|
|
2022-05-21 14:44:56 -04:00
|
|
|
type FeedCounters struct {
|
|
|
|
ReadCounters map[int64]int `json:"reads"`
|
|
|
|
UnreadCounters map[int64]int `json:"unreads"`
|
|
|
|
}
|
|
|
|
|
2017-11-20 00:10:04 -05:00
|
|
|
func (f *Feed) String() string {
|
|
|
|
return fmt.Sprintf("ID=%d, UserID=%d, FeedURL=%s, SiteURL=%s, Title=%s, Category={%s}",
|
|
|
|
f.ID,
|
|
|
|
f.UserID,
|
|
|
|
f.FeedURL,
|
|
|
|
f.SiteURL,
|
|
|
|
f.Title,
|
|
|
|
f.Category,
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2018-10-15 00:43:48 -04:00
|
|
|
// WithCategoryID initializes the category attribute of the feed.
|
|
|
|
func (f *Feed) WithCategoryID(categoryID int64) {
|
|
|
|
f.Category = &Category{ID: categoryID}
|
|
|
|
}
|
|
|
|
|
2023-10-21 22:50:29 -04:00
|
|
|
// WithTranslatedErrorMessage adds a new error message and increment the error counter.
|
|
|
|
func (f *Feed) WithTranslatedErrorMessage(message string) {
|
2018-10-15 00:43:48 -04:00
|
|
|
f.ParsingErrorCount++
|
|
|
|
f.ParsingErrorMsg = message
|
|
|
|
}
|
|
|
|
|
|
|
|
// ResetErrorCounter removes all previous errors.
|
|
|
|
func (f *Feed) ResetErrorCounter() {
|
|
|
|
f.ParsingErrorCount = 0
|
|
|
|
f.ParsingErrorMsg = ""
|
|
|
|
}
|
|
|
|
|
|
|
|
// CheckedNow set attribute values when the feed is refreshed.
|
|
|
|
func (f *Feed) CheckedNow() {
|
|
|
|
f.CheckedAt = time.Now()
|
|
|
|
|
|
|
|
if f.SiteURL == "" {
|
|
|
|
f.SiteURL = f.FeedURL
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-05-25 17:06:56 -04:00
|
|
|
// ScheduleNextCheck set "next_check_at" of a feed based on the scheduler selected from the configuration.
|
|
|
|
func (f *Feed) ScheduleNextCheck(weeklyCount int) {
|
2020-05-25 17:59:15 -04:00
|
|
|
switch config.Opts.PollingScheduler() {
|
|
|
|
case SchedulerEntryFrequency:
|
2020-05-25 17:06:56 -04:00
|
|
|
var intervalMinutes int
|
|
|
|
if weeklyCount == 0 {
|
2020-05-25 17:59:15 -04:00
|
|
|
intervalMinutes = config.Opts.SchedulerEntryFrequencyMaxInterval()
|
2020-05-25 17:06:56 -04:00
|
|
|
} else {
|
2023-08-24 06:02:46 -04:00
|
|
|
intervalMinutes = int(math.Round(float64(7*24*60) / float64(weeklyCount*config.Opts.SchedulerEntryFrequencyFactor())))
|
|
|
|
intervalMinutes = int(math.Min(float64(intervalMinutes), float64(config.Opts.SchedulerEntryFrequencyMaxInterval())))
|
|
|
|
intervalMinutes = int(math.Max(float64(intervalMinutes), float64(config.Opts.SchedulerEntryFrequencyMinInterval())))
|
2020-05-25 17:06:56 -04:00
|
|
|
}
|
2020-05-25 17:59:15 -04:00
|
|
|
f.NextCheckAt = time.Now().Add(time.Minute * time.Duration(intervalMinutes))
|
2020-05-25 17:06:56 -04:00
|
|
|
default:
|
2023-10-17 00:20:58 -04:00
|
|
|
f.NextCheckAt = time.Now().Add(time.Minute * time.Duration(config.Opts.PollingFrequency()))
|
2020-05-25 17:06:56 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-01-04 16:49:28 -05:00
|
|
|
// FeedCreationRequest represents the request to create a feed.
|
|
|
|
type FeedCreationRequest struct {
|
2021-02-21 16:42:49 -05:00
|
|
|
FeedURL string `json:"feed_url"`
|
|
|
|
CategoryID int64 `json:"category_id"`
|
|
|
|
UserAgent string `json:"user_agent"`
|
2021-03-22 23:27:58 -04:00
|
|
|
Cookie string `json:"cookie"`
|
2021-02-21 16:42:49 -05:00
|
|
|
Username string `json:"username"`
|
|
|
|
Password string `json:"password"`
|
|
|
|
Crawler bool `json:"crawler"`
|
|
|
|
Disabled bool `json:"disabled"`
|
Add Media Player and resume to last playback position
In order to ease podcast listening, the player can be put on top of the feed entry as main content.
Use the `Use podcast player` option to enable that. It works on audio and video.
Also, when playing audio or video, progression will be saved in order to be able to resume listening later.
This position saving is done using the original attachement/enclosures player AND podcast player and do not rely on
the podcast player option ti be enabled.
Additionally, I made the player fill the width with the entry container to ease seeking and have a bigger video.
updateEnclosures now keep existing enclosures based on URL
When feeds get updated, enclosures entries are always wiped and re-created. This cause two issue
- enclosure progression get lost in the process
- enclosure ID changes
I used the URL as identifier of an enclosure. Not perfect but hopefully should work.
When an enclosure already exist, I simply do nothing and leave the entry as is in the database.
If anyone is listening/watching to this enclosure during the refresh, the id stay coherent and progression saving still works.
The updateEnclosures function got a bit more complex. I tried to make it the more clear I could.
Some optimisation are possible but would make the function harder to read in my opinion.
I'm not sure if this is often the case, but some feeds may include tracking or simply change the url each
time we update the feed. In those situation, enclosures ids and progression will be lost.
I have no idea how to handle this last situation. Use the size instead/alongside url to define the identity of an enclosure ?
Translation: english as placeholder for every language except French
Aside, I tested a video feed and fixed a few things for it. In fact, the MimeType was not working
at all on my side, and found a pretty old stackoverflow discussion that suggest to use an Apple non-standard MimeType for
m4v video format. I only did one substitution because I only have one feed to test. Any new video feed can make this go away
or evolve depending on the situation. Real video feeds does not tend to be easy to find and test extensively this.
Co-authored-by: toastal
2023-04-13 05:46:43 -04:00
|
|
|
NoMediaPlayer bool `json:"no_media_player"`
|
2021-02-21 16:42:49 -05:00
|
|
|
IgnoreHTTPCache bool `json:"ignore_http_cache"`
|
|
|
|
AllowSelfSignedCertificates bool `json:"allow_self_signed_certificates"`
|
|
|
|
FetchViaProxy bool `json:"fetch_via_proxy"`
|
|
|
|
ScraperRules string `json:"scraper_rules"`
|
|
|
|
RewriteRules string `json:"rewrite_rules"`
|
|
|
|
BlocklistRules string `json:"blocklist_rules"`
|
2023-10-22 19:07:06 -04:00
|
|
|
KeeplistRules string `json:"keeplist_rules"`
|
|
|
|
HideGlobally bool `json:"hide_globally"`
|
|
|
|
UrlRewriteRules string `json:"urlrewrite_rules"`
|
|
|
|
}
|
|
|
|
|
|
|
|
type FeedCreationRequestFromSubscriptionDiscovery struct {
|
|
|
|
Content io.ReadSeeker
|
|
|
|
ETag string
|
|
|
|
LastModified string
|
|
|
|
|
|
|
|
FeedURL string `json:"feed_url"`
|
|
|
|
CategoryID int64 `json:"category_id"`
|
|
|
|
UserAgent string `json:"user_agent"`
|
|
|
|
Cookie string `json:"cookie"`
|
|
|
|
Username string `json:"username"`
|
|
|
|
Password string `json:"password"`
|
|
|
|
Crawler bool `json:"crawler"`
|
|
|
|
Disabled bool `json:"disabled"`
|
|
|
|
NoMediaPlayer bool `json:"no_media_player"`
|
|
|
|
IgnoreHTTPCache bool `json:"ignore_http_cache"`
|
|
|
|
AllowSelfSignedCertificates bool `json:"allow_self_signed_certificates"`
|
|
|
|
FetchViaProxy bool `json:"fetch_via_proxy"`
|
|
|
|
ScraperRules string `json:"scraper_rules"`
|
|
|
|
RewriteRules string `json:"rewrite_rules"`
|
|
|
|
BlocklistRules string `json:"blocklist_rules"`
|
2021-02-21 16:42:49 -05:00
|
|
|
KeeplistRules string `json:"keeplist_rules"`
|
2021-08-15 11:32:43 -04:00
|
|
|
HideGlobally bool `json:"hide_globally"`
|
2022-07-12 00:12:26 -04:00
|
|
|
UrlRewriteRules string `json:"urlrewrite_rules"`
|
2021-01-04 16:49:28 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
// FeedModificationRequest represents the request to update a feed.
|
|
|
|
type FeedModificationRequest struct {
|
2021-02-21 16:42:49 -05:00
|
|
|
FeedURL *string `json:"feed_url"`
|
|
|
|
SiteURL *string `json:"site_url"`
|
|
|
|
Title *string `json:"title"`
|
|
|
|
ScraperRules *string `json:"scraper_rules"`
|
|
|
|
RewriteRules *string `json:"rewrite_rules"`
|
|
|
|
BlocklistRules *string `json:"blocklist_rules"`
|
|
|
|
KeeplistRules *string `json:"keeplist_rules"`
|
2022-07-12 00:12:26 -04:00
|
|
|
UrlRewriteRules *string `json:"urlrewrite_rules"`
|
2021-02-21 16:42:49 -05:00
|
|
|
Crawler *bool `json:"crawler"`
|
|
|
|
UserAgent *string `json:"user_agent"`
|
2021-03-22 23:27:58 -04:00
|
|
|
Cookie *string `json:"cookie"`
|
2021-02-21 16:42:49 -05:00
|
|
|
Username *string `json:"username"`
|
|
|
|
Password *string `json:"password"`
|
|
|
|
CategoryID *int64 `json:"category_id"`
|
|
|
|
Disabled *bool `json:"disabled"`
|
Add Media Player and resume to last playback position
In order to ease podcast listening, the player can be put on top of the feed entry as main content.
Use the `Use podcast player` option to enable that. It works on audio and video.
Also, when playing audio or video, progression will be saved in order to be able to resume listening later.
This position saving is done using the original attachement/enclosures player AND podcast player and do not rely on
the podcast player option ti be enabled.
Additionally, I made the player fill the width with the entry container to ease seeking and have a bigger video.
updateEnclosures now keep existing enclosures based on URL
When feeds get updated, enclosures entries are always wiped and re-created. This cause two issue
- enclosure progression get lost in the process
- enclosure ID changes
I used the URL as identifier of an enclosure. Not perfect but hopefully should work.
When an enclosure already exist, I simply do nothing and leave the entry as is in the database.
If anyone is listening/watching to this enclosure during the refresh, the id stay coherent and progression saving still works.
The updateEnclosures function got a bit more complex. I tried to make it the more clear I could.
Some optimisation are possible but would make the function harder to read in my opinion.
I'm not sure if this is often the case, but some feeds may include tracking or simply change the url each
time we update the feed. In those situation, enclosures ids and progression will be lost.
I have no idea how to handle this last situation. Use the size instead/alongside url to define the identity of an enclosure ?
Translation: english as placeholder for every language except French
Aside, I tested a video feed and fixed a few things for it. In fact, the MimeType was not working
at all on my side, and found a pretty old stackoverflow discussion that suggest to use an Apple non-standard MimeType for
m4v video format. I only did one substitution because I only have one feed to test. Any new video feed can make this go away
or evolve depending on the situation. Real video feeds does not tend to be easy to find and test extensively this.
Co-authored-by: toastal
2023-04-13 05:46:43 -04:00
|
|
|
NoMediaPlayer *bool `json:"no_media_player"`
|
2021-02-21 16:42:49 -05:00
|
|
|
IgnoreHTTPCache *bool `json:"ignore_http_cache"`
|
|
|
|
AllowSelfSignedCertificates *bool `json:"allow_self_signed_certificates"`
|
|
|
|
FetchViaProxy *bool `json:"fetch_via_proxy"`
|
2021-08-15 11:32:43 -04:00
|
|
|
HideGlobally *bool `json:"hide_globally"`
|
2021-01-04 16:49:28 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
// Patch updates a feed with modified values.
|
|
|
|
func (f *FeedModificationRequest) Patch(feed *Feed) {
|
|
|
|
if f.FeedURL != nil && *f.FeedURL != "" {
|
|
|
|
feed.FeedURL = *f.FeedURL
|
|
|
|
}
|
|
|
|
|
|
|
|
if f.SiteURL != nil && *f.SiteURL != "" {
|
|
|
|
feed.SiteURL = *f.SiteURL
|
|
|
|
}
|
|
|
|
|
|
|
|
if f.Title != nil && *f.Title != "" {
|
|
|
|
feed.Title = *f.Title
|
|
|
|
}
|
|
|
|
|
|
|
|
if f.ScraperRules != nil {
|
|
|
|
feed.ScraperRules = *f.ScraperRules
|
|
|
|
}
|
|
|
|
|
|
|
|
if f.RewriteRules != nil {
|
|
|
|
feed.RewriteRules = *f.RewriteRules
|
|
|
|
}
|
|
|
|
|
|
|
|
if f.KeeplistRules != nil {
|
|
|
|
feed.KeeplistRules = *f.KeeplistRules
|
|
|
|
}
|
|
|
|
|
2022-07-12 00:12:26 -04:00
|
|
|
if f.UrlRewriteRules != nil {
|
|
|
|
feed.UrlRewriteRules = *f.UrlRewriteRules
|
|
|
|
}
|
|
|
|
|
2021-01-04 16:49:28 -05:00
|
|
|
if f.BlocklistRules != nil {
|
|
|
|
feed.BlocklistRules = *f.BlocklistRules
|
|
|
|
}
|
|
|
|
|
|
|
|
if f.Crawler != nil {
|
|
|
|
feed.Crawler = *f.Crawler
|
|
|
|
}
|
|
|
|
|
|
|
|
if f.UserAgent != nil {
|
|
|
|
feed.UserAgent = *f.UserAgent
|
|
|
|
}
|
|
|
|
|
2021-03-22 23:27:58 -04:00
|
|
|
if f.Cookie != nil {
|
|
|
|
feed.Cookie = *f.Cookie
|
|
|
|
}
|
|
|
|
|
2021-01-04 16:49:28 -05:00
|
|
|
if f.Username != nil {
|
|
|
|
feed.Username = *f.Username
|
|
|
|
}
|
|
|
|
|
|
|
|
if f.Password != nil {
|
|
|
|
feed.Password = *f.Password
|
|
|
|
}
|
|
|
|
|
|
|
|
if f.CategoryID != nil && *f.CategoryID > 0 {
|
|
|
|
feed.Category.ID = *f.CategoryID
|
|
|
|
}
|
|
|
|
|
|
|
|
if f.Disabled != nil {
|
|
|
|
feed.Disabled = *f.Disabled
|
|
|
|
}
|
|
|
|
|
Add Media Player and resume to last playback position
In order to ease podcast listening, the player can be put on top of the feed entry as main content.
Use the `Use podcast player` option to enable that. It works on audio and video.
Also, when playing audio or video, progression will be saved in order to be able to resume listening later.
This position saving is done using the original attachement/enclosures player AND podcast player and do not rely on
the podcast player option ti be enabled.
Additionally, I made the player fill the width with the entry container to ease seeking and have a bigger video.
updateEnclosures now keep existing enclosures based on URL
When feeds get updated, enclosures entries are always wiped and re-created. This cause two issue
- enclosure progression get lost in the process
- enclosure ID changes
I used the URL as identifier of an enclosure. Not perfect but hopefully should work.
When an enclosure already exist, I simply do nothing and leave the entry as is in the database.
If anyone is listening/watching to this enclosure during the refresh, the id stay coherent and progression saving still works.
The updateEnclosures function got a bit more complex. I tried to make it the more clear I could.
Some optimisation are possible but would make the function harder to read in my opinion.
I'm not sure if this is often the case, but some feeds may include tracking or simply change the url each
time we update the feed. In those situation, enclosures ids and progression will be lost.
I have no idea how to handle this last situation. Use the size instead/alongside url to define the identity of an enclosure ?
Translation: english as placeholder for every language except French
Aside, I tested a video feed and fixed a few things for it. In fact, the MimeType was not working
at all on my side, and found a pretty old stackoverflow discussion that suggest to use an Apple non-standard MimeType for
m4v video format. I only did one substitution because I only have one feed to test. Any new video feed can make this go away
or evolve depending on the situation. Real video feeds does not tend to be easy to find and test extensively this.
Co-authored-by: toastal
2023-04-13 05:46:43 -04:00
|
|
|
if f.NoMediaPlayer != nil {
|
|
|
|
feed.NoMediaPlayer = *f.NoMediaPlayer
|
|
|
|
}
|
|
|
|
|
2021-01-04 16:49:28 -05:00
|
|
|
if f.IgnoreHTTPCache != nil {
|
|
|
|
feed.IgnoreHTTPCache = *f.IgnoreHTTPCache
|
|
|
|
}
|
|
|
|
|
2021-02-21 16:42:49 -05:00
|
|
|
if f.AllowSelfSignedCertificates != nil {
|
|
|
|
feed.AllowSelfSignedCertificates = *f.AllowSelfSignedCertificates
|
|
|
|
}
|
|
|
|
|
2021-01-04 16:49:28 -05:00
|
|
|
if f.FetchViaProxy != nil {
|
|
|
|
feed.FetchViaProxy = *f.FetchViaProxy
|
|
|
|
}
|
2021-08-15 11:32:43 -04:00
|
|
|
|
|
|
|
if f.HideGlobally != nil {
|
|
|
|
feed.HideGlobally = *f.HideGlobally
|
|
|
|
}
|
2021-01-04 16:49:28 -05:00
|
|
|
}
|
|
|
|
|
2017-11-20 00:10:04 -05:00
|
|
|
// Feeds is a list of feed
|
|
|
|
type Feeds []*Feed
|