2017-11-20 00:10:04 -05:00
|
|
|
// Copyright 2017 Frédéric Guillot. All rights reserved.
|
|
|
|
// Use of this source code is governed by the Apache 2.0
|
|
|
|
// license that can be found in the LICENSE file.
|
|
|
|
|
2018-08-25 00:51:50 -04:00
|
|
|
package storage // import "miniflux.app/storage"
|
2017-11-20 00:10:04 -05:00
|
|
|
|
|
|
|
import (
|
2021-01-18 16:22:09 -05:00
|
|
|
"database/sql"
|
2017-11-20 00:10:04 -05:00
|
|
|
"fmt"
|
|
|
|
"strings"
|
|
|
|
"time"
|
2017-11-21 20:40:29 -05:00
|
|
|
|
2017-12-03 20:44:27 -05:00
|
|
|
"github.com/lib/pq"
|
|
|
|
|
2018-08-25 00:51:50 -04:00
|
|
|
"miniflux.app/model"
|
|
|
|
"miniflux.app/timezone"
|
2017-11-20 00:10:04 -05:00
|
|
|
)
|
|
|
|
|
2017-11-21 20:40:29 -05:00
|
|
|
// EntryQueryBuilder builds a SQL query to fetch entries.
|
2017-11-20 00:10:04 -05:00
|
|
|
type EntryQueryBuilder struct {
|
2023-06-19 17:00:10 -04:00
|
|
|
store *Storage
|
|
|
|
args []interface{}
|
|
|
|
conditions []string
|
|
|
|
sortExpressions []string
|
|
|
|
limit int
|
|
|
|
offset int
|
2017-12-22 14:33:01 -05:00
|
|
|
}
|
|
|
|
|
2018-07-04 20:40:03 -04:00
|
|
|
// WithSearchQuery adds full-text search query to the condition.
|
|
|
|
func (e *EntryQueryBuilder) WithSearchQuery(query string) *EntryQueryBuilder {
|
|
|
|
if query != "" {
|
2020-03-10 23:58:45 -04:00
|
|
|
nArgs := len(e.args) + 1
|
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.document_vectors @@ plainto_tsquery($%d)", nArgs))
|
2018-07-04 20:40:03 -04:00
|
|
|
e.args = append(e.args, query)
|
2020-06-30 23:50:34 -04:00
|
|
|
|
|
|
|
// 0.0000001 = 0.1 / (seconds_in_a_day)
|
2023-06-19 17:00:10 -04:00
|
|
|
e.WithSorting(
|
|
|
|
fmt.Sprintf("ts_rank(document_vectors, plainto_tsquery($%d)) - extract (epoch from now() - published_at)::float * 0.0000001", nArgs),
|
|
|
|
"DESC",
|
|
|
|
)
|
2018-07-04 20:40:03 -04:00
|
|
|
}
|
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2017-12-22 14:33:01 -05:00
|
|
|
// WithStarred adds starred filter.
|
2022-04-14 00:53:06 -04:00
|
|
|
func (e *EntryQueryBuilder) WithStarred(starred bool) *EntryQueryBuilder {
|
|
|
|
if starred {
|
|
|
|
e.conditions = append(e.conditions, "e.starred is true")
|
|
|
|
} else {
|
|
|
|
e.conditions = append(e.conditions, "e.starred is false")
|
|
|
|
}
|
2017-12-22 14:33:01 -05:00
|
|
|
return e
|
2017-12-03 20:44:27 -05:00
|
|
|
}
|
|
|
|
|
2018-06-09 16:40:20 -04:00
|
|
|
// BeforeDate adds a condition < published_at
|
|
|
|
func (e *EntryQueryBuilder) BeforeDate(date time.Time) *EntryQueryBuilder {
|
2018-06-08 21:24:41 -04:00
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.published_at < $%d", len(e.args)+1))
|
|
|
|
e.args = append(e.args, date)
|
2017-12-03 20:44:27 -05:00
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2018-06-09 16:40:20 -04:00
|
|
|
// AfterDate adds a condition > published_at
|
|
|
|
func (e *EntryQueryBuilder) AfterDate(date time.Time) *EntryQueryBuilder {
|
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.published_at > $%d", len(e.args)+1))
|
|
|
|
e.args = append(e.args, date)
|
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
|
|
|
// BeforeEntryID adds a condition < entryID.
|
|
|
|
func (e *EntryQueryBuilder) BeforeEntryID(entryID int64) *EntryQueryBuilder {
|
|
|
|
if entryID != 0 {
|
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.id < $%d", len(e.args)+1))
|
|
|
|
e.args = append(e.args, entryID)
|
|
|
|
}
|
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
|
|
|
// AfterEntryID adds a condition > entryID.
|
|
|
|
func (e *EntryQueryBuilder) AfterEntryID(entryID int64) *EntryQueryBuilder {
|
2018-06-08 21:24:41 -04:00
|
|
|
if entryID != 0 {
|
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.id > $%d", len(e.args)+1))
|
|
|
|
e.args = append(e.args, entryID)
|
|
|
|
}
|
2017-12-03 20:44:27 -05:00
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2020-09-13 00:17:55 -04:00
|
|
|
// WithEntryIDs filter by entry IDs.
|
2017-12-03 20:44:27 -05:00
|
|
|
func (e *EntryQueryBuilder) WithEntryIDs(entryIDs []int64) *EntryQueryBuilder {
|
2018-06-08 21:24:41 -04:00
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.id = ANY($%d)", len(e.args)+1))
|
2020-09-13 00:17:55 -04:00
|
|
|
e.args = append(e.args, pq.Int64Array(entryIDs))
|
2017-12-03 20:44:27 -05:00
|
|
|
return e
|
2017-11-20 00:10:04 -05:00
|
|
|
}
|
|
|
|
|
2020-09-13 00:17:55 -04:00
|
|
|
// WithEntryID filter by entry ID.
|
2017-11-20 00:10:04 -05:00
|
|
|
func (e *EntryQueryBuilder) WithEntryID(entryID int64) *EntryQueryBuilder {
|
2018-06-08 21:24:41 -04:00
|
|
|
if entryID != 0 {
|
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.id = $%d", len(e.args)+1))
|
|
|
|
e.args = append(e.args, entryID)
|
|
|
|
}
|
2017-11-20 00:10:04 -05:00
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2020-09-13 00:17:55 -04:00
|
|
|
// WithFeedID filter by feed ID.
|
2017-11-20 00:10:04 -05:00
|
|
|
func (e *EntryQueryBuilder) WithFeedID(feedID int64) *EntryQueryBuilder {
|
2020-09-14 00:38:11 -04:00
|
|
|
if feedID > 0 {
|
2018-06-08 21:24:41 -04:00
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.feed_id = $%d", len(e.args)+1))
|
|
|
|
e.args = append(e.args, feedID)
|
|
|
|
}
|
2017-11-20 00:10:04 -05:00
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2020-09-13 00:17:55 -04:00
|
|
|
// WithCategoryID filter by category ID.
|
2017-11-20 00:10:04 -05:00
|
|
|
func (e *EntryQueryBuilder) WithCategoryID(categoryID int64) *EntryQueryBuilder {
|
2019-11-18 01:53:11 -05:00
|
|
|
if categoryID > 0 {
|
2018-06-08 21:24:41 -04:00
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("f.category_id = $%d", len(e.args)+1))
|
|
|
|
e.args = append(e.args, categoryID)
|
|
|
|
}
|
2017-11-20 00:10:04 -05:00
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2020-09-13 00:17:55 -04:00
|
|
|
// WithStatus filter by entry status.
|
2017-11-20 00:10:04 -05:00
|
|
|
func (e *EntryQueryBuilder) WithStatus(status string) *EntryQueryBuilder {
|
2018-06-08 21:24:41 -04:00
|
|
|
if status != "" {
|
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.status = $%d", len(e.args)+1))
|
|
|
|
e.args = append(e.args, status)
|
|
|
|
}
|
2017-11-20 00:10:04 -05:00
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2020-09-13 00:17:55 -04:00
|
|
|
// WithStatuses filter by a list of entry statuses.
|
|
|
|
func (e *EntryQueryBuilder) WithStatuses(statuses []string) *EntryQueryBuilder {
|
|
|
|
if len(statuses) > 0 {
|
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.status = ANY($%d)", len(e.args)+1))
|
|
|
|
e.args = append(e.args, pq.StringArray(statuses))
|
|
|
|
}
|
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2023-02-24 23:52:45 -05:00
|
|
|
// WithTags filter by a list of entry tags.
|
|
|
|
func (e *EntryQueryBuilder) WithTags(tags []string) *EntryQueryBuilder {
|
|
|
|
if len(tags) > 0 {
|
|
|
|
for _, cat := range tags {
|
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("$%d = ANY(e.tags)", len(e.args)+1))
|
|
|
|
e.args = append(e.args, cat)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2017-11-21 20:40:29 -05:00
|
|
|
// WithoutStatus set the entry status that should not be returned.
|
|
|
|
func (e *EntryQueryBuilder) WithoutStatus(status string) *EntryQueryBuilder {
|
2018-06-08 21:24:41 -04:00
|
|
|
if status != "" {
|
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.status <> $%d", len(e.args)+1))
|
|
|
|
e.args = append(e.args, status)
|
|
|
|
}
|
2017-11-21 20:40:29 -05:00
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2020-03-17 23:22:02 -04:00
|
|
|
// WithShareCode set the entry share code.
|
2019-10-05 07:30:25 -04:00
|
|
|
func (e *EntryQueryBuilder) WithShareCode(shareCode string) *EntryQueryBuilder {
|
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.share_code = $%d", len(e.args)+1))
|
|
|
|
e.args = append(e.args, shareCode)
|
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2020-03-22 21:48:14 -04:00
|
|
|
// WithShareCodeNotEmpty adds a filter for non-empty share code.
|
|
|
|
func (e *EntryQueryBuilder) WithShareCodeNotEmpty() *EntryQueryBuilder {
|
|
|
|
e.conditions = append(e.conditions, "e.share_code <> ''")
|
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2023-06-19 17:00:10 -04:00
|
|
|
// WithSorting add a sort expression.
|
|
|
|
func (e *EntryQueryBuilder) WithSorting(column, direction string) *EntryQueryBuilder {
|
|
|
|
e.sortExpressions = append(e.sortExpressions, fmt.Sprintf("%s %s", column, direction))
|
2017-11-20 00:10:04 -05:00
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2017-11-21 20:40:29 -05:00
|
|
|
// WithLimit set the limit.
|
2017-11-20 00:10:04 -05:00
|
|
|
func (e *EntryQueryBuilder) WithLimit(limit int) *EntryQueryBuilder {
|
2021-01-04 00:20:21 -05:00
|
|
|
if limit > 0 {
|
|
|
|
e.limit = limit
|
|
|
|
}
|
2017-11-20 00:10:04 -05:00
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2017-11-21 20:40:29 -05:00
|
|
|
// WithOffset set the offset.
|
2017-11-20 00:10:04 -05:00
|
|
|
func (e *EntryQueryBuilder) WithOffset(offset int) *EntryQueryBuilder {
|
2021-01-04 00:20:21 -05:00
|
|
|
if offset > 0 {
|
|
|
|
e.offset = offset
|
|
|
|
}
|
2017-11-20 00:10:04 -05:00
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2021-06-02 20:39:47 -04:00
|
|
|
func (e *EntryQueryBuilder) WithGloballyVisible() *EntryQueryBuilder {
|
|
|
|
e.conditions = append(e.conditions, "not c.hide_globally")
|
2021-08-15 11:32:43 -04:00
|
|
|
e.conditions = append(e.conditions, "not f.hide_globally")
|
2021-06-02 20:39:47 -04:00
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2017-11-21 20:40:29 -05:00
|
|
|
// CountEntries count the number of entries that match the condition.
|
2017-11-20 00:10:04 -05:00
|
|
|
func (e *EntryQueryBuilder) CountEntries() (count int, err error) {
|
2021-06-02 20:39:47 -04:00
|
|
|
query := `
|
|
|
|
SELECT count(*)
|
|
|
|
FROM entries e
|
|
|
|
JOIN feeds f ON f.id = e.feed_id
|
|
|
|
JOIN categories c ON c.id = f.category_id
|
|
|
|
WHERE %s
|
|
|
|
`
|
2018-06-08 21:24:41 -04:00
|
|
|
condition := e.buildCondition()
|
|
|
|
|
|
|
|
err = e.store.db.QueryRow(fmt.Sprintf(query, condition), e.args...).Scan(&count)
|
2017-11-20 00:10:04 -05:00
|
|
|
if err != nil {
|
|
|
|
return 0, fmt.Errorf("unable to count entries: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
return count, nil
|
|
|
|
}
|
|
|
|
|
2017-11-21 20:40:29 -05:00
|
|
|
// GetEntry returns a single entry that match the condition.
|
2017-11-20 00:10:04 -05:00
|
|
|
func (e *EntryQueryBuilder) GetEntry() (*model.Entry, error) {
|
|
|
|
e.limit = 1
|
|
|
|
entries, err := e.GetEntries()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(entries) != 1 {
|
|
|
|
return nil, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
entries[0].Enclosures, err = e.store.GetEnclosures(entries[0].ID)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return entries[0], nil
|
|
|
|
}
|
|
|
|
|
2017-11-21 20:40:29 -05:00
|
|
|
// GetEntries returns a list of entries that match the condition.
|
2017-11-20 00:10:04 -05:00
|
|
|
func (e *EntryQueryBuilder) GetEntries() (model.Entries, error) {
|
|
|
|
query := `
|
|
|
|
SELECT
|
2020-03-10 23:58:45 -04:00
|
|
|
e.id,
|
|
|
|
e.user_id,
|
|
|
|
e.feed_id,
|
|
|
|
e.hash,
|
|
|
|
e.published_at at time zone u.timezone,
|
|
|
|
e.title,
|
|
|
|
e.url,
|
|
|
|
e.comments_url,
|
|
|
|
e.author,
|
2019-10-05 07:30:25 -04:00
|
|
|
e.share_code,
|
2020-03-10 23:58:45 -04:00
|
|
|
e.content,
|
|
|
|
e.status,
|
|
|
|
e.starred,
|
2020-11-18 20:29:40 -05:00
|
|
|
e.reading_time,
|
2020-11-29 20:04:18 -05:00
|
|
|
e.created_at,
|
2021-05-26 01:13:38 -04:00
|
|
|
e.changed_at,
|
2023-02-24 23:52:45 -05:00
|
|
|
e.tags,
|
2020-03-10 23:58:45 -04:00
|
|
|
f.title as feed_title,
|
|
|
|
f.feed_url,
|
|
|
|
f.site_url,
|
|
|
|
f.checked_at,
|
|
|
|
f.category_id, c.title as category_title,
|
|
|
|
f.scraper_rules,
|
|
|
|
f.rewrite_rules,
|
|
|
|
f.crawler,
|
|
|
|
f.user_agent,
|
2021-03-22 23:27:58 -04:00
|
|
|
f.cookie,
|
Add Media Player and resume to last playback position
In order to ease podcast listening, the player can be put on top of the feed entry as main content.
Use the `Use podcast player` option to enable that. It works on audio and video.
Also, when playing audio or video, progression will be saved in order to be able to resume listening later.
This position saving is done using the original attachement/enclosures player AND podcast player and do not rely on
the podcast player option ti be enabled.
Additionally, I made the player fill the width with the entry container to ease seeking and have a bigger video.
updateEnclosures now keep existing enclosures based on URL
When feeds get updated, enclosures entries are always wiped and re-created. This cause two issue
- enclosure progression get lost in the process
- enclosure ID changes
I used the URL as identifier of an enclosure. Not perfect but hopefully should work.
When an enclosure already exist, I simply do nothing and leave the entry as is in the database.
If anyone is listening/watching to this enclosure during the refresh, the id stay coherent and progression saving still works.
The updateEnclosures function got a bit more complex. I tried to make it the more clear I could.
Some optimisation are possible but would make the function harder to read in my opinion.
I'm not sure if this is often the case, but some feeds may include tracking or simply change the url each
time we update the feed. In those situation, enclosures ids and progression will be lost.
I have no idea how to handle this last situation. Use the size instead/alongside url to define the identity of an enclosure ?
Translation: english as placeholder for every language except French
Aside, I tested a video feed and fixed a few things for it. In fact, the MimeType was not working
at all on my side, and found a pretty old stackoverflow discussion that suggest to use an Apple non-standard MimeType for
m4v video format. I only did one substitution because I only have one feed to test. Any new video feed can make this go away
or evolve depending on the situation. Real video feeds does not tend to be easy to find and test extensively this.
Co-authored-by: toastal
2023-04-13 05:46:43 -04:00
|
|
|
f.no_media_player,
|
2020-03-10 23:58:45 -04:00
|
|
|
fi.icon_id,
|
|
|
|
u.timezone
|
|
|
|
FROM
|
|
|
|
entries e
|
|
|
|
LEFT JOIN
|
|
|
|
feeds f ON f.id=e.feed_id
|
|
|
|
LEFT JOIN
|
|
|
|
categories c ON c.id=f.category_id
|
|
|
|
LEFT JOIN
|
|
|
|
feed_icons fi ON fi.feed_id=f.id
|
|
|
|
LEFT JOIN
|
|
|
|
users u ON u.id=e.user_id
|
2017-11-20 00:10:04 -05:00
|
|
|
WHERE %s %s
|
|
|
|
`
|
|
|
|
|
2018-06-08 21:24:41 -04:00
|
|
|
condition := e.buildCondition()
|
2018-06-09 16:40:20 -04:00
|
|
|
sorting := e.buildSorting()
|
|
|
|
query = fmt.Sprintf(query, condition, sorting)
|
2017-11-20 00:10:04 -05:00
|
|
|
|
2018-06-08 21:24:41 -04:00
|
|
|
rows, err := e.store.db.Query(query, e.args...)
|
2017-11-20 00:10:04 -05:00
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("unable to get entries: %v", err)
|
|
|
|
}
|
|
|
|
defer rows.Close()
|
|
|
|
|
|
|
|
entries := make(model.Entries, 0)
|
|
|
|
for rows.Next() {
|
|
|
|
var entry model.Entry
|
2021-01-18 16:22:09 -05:00
|
|
|
var iconID sql.NullInt64
|
2018-03-02 00:24:58 -05:00
|
|
|
var tz string
|
2017-11-20 00:10:04 -05:00
|
|
|
|
2018-06-08 21:24:41 -04:00
|
|
|
entry.Feed = &model.Feed{}
|
|
|
|
entry.Feed.Category = &model.Category{}
|
2017-11-20 00:10:04 -05:00
|
|
|
entry.Feed.Icon = &model.FeedIcon{}
|
|
|
|
|
|
|
|
err := rows.Scan(
|
|
|
|
&entry.ID,
|
|
|
|
&entry.UserID,
|
|
|
|
&entry.FeedID,
|
|
|
|
&entry.Hash,
|
|
|
|
&entry.Date,
|
|
|
|
&entry.Title,
|
|
|
|
&entry.URL,
|
2018-04-07 16:50:45 -04:00
|
|
|
&entry.CommentsURL,
|
2017-11-20 00:10:04 -05:00
|
|
|
&entry.Author,
|
2019-10-05 07:30:25 -04:00
|
|
|
&entry.ShareCode,
|
2017-11-20 00:10:04 -05:00
|
|
|
&entry.Content,
|
|
|
|
&entry.Status,
|
2017-12-22 14:33:01 -05:00
|
|
|
&entry.Starred,
|
2020-11-18 20:29:40 -05:00
|
|
|
&entry.ReadingTime,
|
2020-11-29 20:04:18 -05:00
|
|
|
&entry.CreatedAt,
|
2021-05-26 01:13:38 -04:00
|
|
|
&entry.ChangedAt,
|
2023-02-24 23:52:45 -05:00
|
|
|
pq.Array(&entry.Tags),
|
2017-11-20 00:10:04 -05:00
|
|
|
&entry.Feed.Title,
|
|
|
|
&entry.Feed.FeedURL,
|
|
|
|
&entry.Feed.SiteURL,
|
|
|
|
&entry.Feed.CheckedAt,
|
|
|
|
&entry.Feed.Category.ID,
|
|
|
|
&entry.Feed.Category.Title,
|
2017-12-10 23:51:04 -05:00
|
|
|
&entry.Feed.ScraperRules,
|
2017-12-12 01:16:32 -05:00
|
|
|
&entry.Feed.RewriteRules,
|
2017-12-12 22:19:36 -05:00
|
|
|
&entry.Feed.Crawler,
|
2018-09-19 21:19:24 -04:00
|
|
|
&entry.Feed.UserAgent,
|
2021-03-22 23:27:58 -04:00
|
|
|
&entry.Feed.Cookie,
|
Add Media Player and resume to last playback position
In order to ease podcast listening, the player can be put on top of the feed entry as main content.
Use the `Use podcast player` option to enable that. It works on audio and video.
Also, when playing audio or video, progression will be saved in order to be able to resume listening later.
This position saving is done using the original attachement/enclosures player AND podcast player and do not rely on
the podcast player option ti be enabled.
Additionally, I made the player fill the width with the entry container to ease seeking and have a bigger video.
updateEnclosures now keep existing enclosures based on URL
When feeds get updated, enclosures entries are always wiped and re-created. This cause two issue
- enclosure progression get lost in the process
- enclosure ID changes
I used the URL as identifier of an enclosure. Not perfect but hopefully should work.
When an enclosure already exist, I simply do nothing and leave the entry as is in the database.
If anyone is listening/watching to this enclosure during the refresh, the id stay coherent and progression saving still works.
The updateEnclosures function got a bit more complex. I tried to make it the more clear I could.
Some optimisation are possible but would make the function harder to read in my opinion.
I'm not sure if this is often the case, but some feeds may include tracking or simply change the url each
time we update the feed. In those situation, enclosures ids and progression will be lost.
I have no idea how to handle this last situation. Use the size instead/alongside url to define the identity of an enclosure ?
Translation: english as placeholder for every language except French
Aside, I tested a video feed and fixed a few things for it. In fact, the MimeType was not working
at all on my side, and found a pretty old stackoverflow discussion that suggest to use an Apple non-standard MimeType for
m4v video format. I only did one substitution because I only have one feed to test. Any new video feed can make this go away
or evolve depending on the situation. Real video feeds does not tend to be easy to find and test extensively this.
Co-authored-by: toastal
2023-04-13 05:46:43 -04:00
|
|
|
&entry.Feed.NoMediaPlayer,
|
2017-11-20 00:10:04 -05:00
|
|
|
&iconID,
|
2018-03-02 00:24:58 -05:00
|
|
|
&tz,
|
2017-11-20 00:10:04 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
if err != nil {
|
2017-12-02 20:04:01 -05:00
|
|
|
return nil, fmt.Errorf("unable to fetch entry row: %v", err)
|
2017-11-20 00:10:04 -05:00
|
|
|
}
|
|
|
|
|
2021-01-18 16:22:09 -05:00
|
|
|
if iconID.Valid {
|
|
|
|
entry.Feed.Icon.IconID = iconID.Int64
|
2017-11-20 00:10:04 -05:00
|
|
|
} else {
|
2021-01-18 16:22:09 -05:00
|
|
|
entry.Feed.Icon.IconID = 0
|
2017-11-20 00:10:04 -05:00
|
|
|
}
|
|
|
|
|
2018-03-02 00:24:58 -05:00
|
|
|
// Make sure that timestamp fields contains timezone information (API)
|
|
|
|
entry.Date = timezone.Convert(tz, entry.Date)
|
2020-11-29 20:04:18 -05:00
|
|
|
entry.CreatedAt = timezone.Convert(tz, entry.CreatedAt)
|
2021-05-26 01:13:38 -04:00
|
|
|
entry.ChangedAt = timezone.Convert(tz, entry.ChangedAt)
|
2018-03-02 00:24:58 -05:00
|
|
|
entry.Feed.CheckedAt = timezone.Convert(tz, entry.Feed.CheckedAt)
|
|
|
|
|
2017-11-20 00:10:04 -05:00
|
|
|
entry.Feed.ID = entry.FeedID
|
2018-06-08 21:24:41 -04:00
|
|
|
entry.Feed.UserID = entry.UserID
|
2017-11-20 00:10:04 -05:00
|
|
|
entry.Feed.Icon.FeedID = entry.FeedID
|
2018-06-08 21:24:41 -04:00
|
|
|
entry.Feed.Category.UserID = entry.UserID
|
2017-11-20 00:10:04 -05:00
|
|
|
entries = append(entries, &entry)
|
|
|
|
}
|
|
|
|
|
|
|
|
return entries, nil
|
|
|
|
}
|
|
|
|
|
2017-12-03 20:44:27 -05:00
|
|
|
// GetEntryIDs returns a list of entry IDs that match the condition.
|
|
|
|
func (e *EntryQueryBuilder) GetEntryIDs() ([]int64, error) {
|
2018-06-08 21:24:41 -04:00
|
|
|
query := `SELECT e.id FROM entries e LEFT JOIN feeds f ON f.id=e.feed_id WHERE %s %s`
|
2017-12-03 20:44:27 -05:00
|
|
|
|
2018-06-08 21:24:41 -04:00
|
|
|
condition := e.buildCondition()
|
|
|
|
query = fmt.Sprintf(query, condition, e.buildSorting())
|
|
|
|
|
|
|
|
rows, err := e.store.db.Query(query, e.args...)
|
2017-12-03 20:44:27 -05:00
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("unable to get entries: %v", err)
|
|
|
|
}
|
|
|
|
defer rows.Close()
|
|
|
|
|
|
|
|
var entryIDs []int64
|
|
|
|
for rows.Next() {
|
|
|
|
var entryID int64
|
|
|
|
|
|
|
|
err := rows.Scan(&entryID)
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("unable to fetch entry row: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
entryIDs = append(entryIDs, entryID)
|
|
|
|
}
|
|
|
|
|
|
|
|
return entryIDs, nil
|
|
|
|
}
|
|
|
|
|
2018-06-08 21:24:41 -04:00
|
|
|
func (e *EntryQueryBuilder) buildCondition() string {
|
|
|
|
return strings.Join(e.conditions, " AND ")
|
2017-11-20 00:10:04 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
func (e *EntryQueryBuilder) buildSorting() string {
|
2018-06-09 16:40:20 -04:00
|
|
|
var parts []string
|
2017-11-20 00:10:04 -05:00
|
|
|
|
2023-06-19 17:00:10 -04:00
|
|
|
if len(e.sortExpressions) > 0 {
|
|
|
|
parts = append(parts, fmt.Sprintf(`ORDER BY %s`, strings.Join(e.sortExpressions, ", ")))
|
2017-11-20 00:10:04 -05:00
|
|
|
}
|
|
|
|
|
2021-01-04 00:20:21 -05:00
|
|
|
if e.limit > 0 {
|
2018-06-09 16:40:20 -04:00
|
|
|
parts = append(parts, fmt.Sprintf(`LIMIT %d`, e.limit))
|
2017-11-20 00:10:04 -05:00
|
|
|
}
|
|
|
|
|
2021-01-04 00:20:21 -05:00
|
|
|
if e.offset > 0 {
|
2018-06-09 16:40:20 -04:00
|
|
|
parts = append(parts, fmt.Sprintf(`OFFSET %d`, e.offset))
|
2017-11-20 00:10:04 -05:00
|
|
|
}
|
|
|
|
|
2018-06-09 16:40:20 -04:00
|
|
|
return strings.Join(parts, " ")
|
2017-11-20 00:10:04 -05:00
|
|
|
}
|
|
|
|
|
2017-11-21 20:40:29 -05:00
|
|
|
// NewEntryQueryBuilder returns a new EntryQueryBuilder.
|
2017-12-28 22:20:14 -05:00
|
|
|
func NewEntryQueryBuilder(store *Storage, userID int64) *EntryQueryBuilder {
|
2017-11-20 00:10:04 -05:00
|
|
|
return &EntryQueryBuilder{
|
2018-06-08 21:24:41 -04:00
|
|
|
store: store,
|
|
|
|
args: []interface{}{userID},
|
|
|
|
conditions: []string{"e.user_id = $1"},
|
2017-11-20 00:10:04 -05:00
|
|
|
}
|
|
|
|
}
|
2019-10-05 07:30:25 -04:00
|
|
|
|
|
|
|
// NewAnonymousQueryBuilder returns a new EntryQueryBuilder suitable for anonymous users.
|
|
|
|
func NewAnonymousQueryBuilder(store *Storage) *EntryQueryBuilder {
|
|
|
|
return &EntryQueryBuilder{
|
|
|
|
store: store,
|
|
|
|
}
|
|
|
|
}
|