2017-11-20 00:10:04 -05:00
|
|
|
// Copyright 2017 Frédéric Guillot. All rights reserved.
|
|
|
|
// Use of this source code is governed by the Apache 2.0
|
|
|
|
// license that can be found in the LICENSE file.
|
|
|
|
|
2018-08-25 00:51:50 -04:00
|
|
|
package storage // import "miniflux.app/storage"
|
2017-11-20 00:10:04 -05:00
|
|
|
|
|
|
|
import (
|
2021-01-18 16:22:09 -05:00
|
|
|
"database/sql"
|
2017-11-20 00:10:04 -05:00
|
|
|
"fmt"
|
|
|
|
"strings"
|
|
|
|
"time"
|
2017-11-21 20:40:29 -05:00
|
|
|
|
2017-12-03 20:44:27 -05:00
|
|
|
"github.com/lib/pq"
|
|
|
|
|
2018-08-25 00:51:50 -04:00
|
|
|
"miniflux.app/model"
|
|
|
|
"miniflux.app/timezone"
|
2017-11-20 00:10:04 -05:00
|
|
|
)
|
|
|
|
|
2017-11-21 20:40:29 -05:00
|
|
|
// EntryQueryBuilder builds a SQL query to fetch entries.
|
2017-11-20 00:10:04 -05:00
|
|
|
type EntryQueryBuilder struct {
|
2018-06-08 21:24:41 -04:00
|
|
|
store *Storage
|
|
|
|
args []interface{}
|
|
|
|
conditions []string
|
|
|
|
order string
|
|
|
|
direction string
|
|
|
|
limit int
|
|
|
|
offset int
|
2017-12-22 14:33:01 -05:00
|
|
|
}
|
|
|
|
|
2018-07-04 20:40:03 -04:00
|
|
|
// WithSearchQuery adds full-text search query to the condition.
|
|
|
|
func (e *EntryQueryBuilder) WithSearchQuery(query string) *EntryQueryBuilder {
|
|
|
|
if query != "" {
|
2020-03-10 23:58:45 -04:00
|
|
|
nArgs := len(e.args) + 1
|
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.document_vectors @@ plainto_tsquery($%d)", nArgs))
|
2018-07-04 20:40:03 -04:00
|
|
|
e.args = append(e.args, query)
|
2020-06-30 23:50:34 -04:00
|
|
|
|
|
|
|
// 0.0000001 = 0.1 / (seconds_in_a_day)
|
|
|
|
e.WithOrder(fmt.Sprintf("ts_rank(document_vectors, plainto_tsquery($%d)) - extract (epoch from now() - published_at)::float * 0.0000001", nArgs))
|
2020-03-10 23:58:45 -04:00
|
|
|
e.WithDirection("DESC")
|
2018-07-04 20:40:03 -04:00
|
|
|
}
|
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2017-12-22 14:33:01 -05:00
|
|
|
// WithStarred adds starred filter.
|
|
|
|
func (e *EntryQueryBuilder) WithStarred() *EntryQueryBuilder {
|
2018-06-08 21:24:41 -04:00
|
|
|
e.conditions = append(e.conditions, "e.starred is true")
|
2017-12-22 14:33:01 -05:00
|
|
|
return e
|
2017-12-03 20:44:27 -05:00
|
|
|
}
|
|
|
|
|
2018-06-09 16:40:20 -04:00
|
|
|
// BeforeDate adds a condition < published_at
|
|
|
|
func (e *EntryQueryBuilder) BeforeDate(date time.Time) *EntryQueryBuilder {
|
2018-06-08 21:24:41 -04:00
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.published_at < $%d", len(e.args)+1))
|
|
|
|
e.args = append(e.args, date)
|
2017-12-03 20:44:27 -05:00
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2018-06-09 16:40:20 -04:00
|
|
|
// AfterDate adds a condition > published_at
|
|
|
|
func (e *EntryQueryBuilder) AfterDate(date time.Time) *EntryQueryBuilder {
|
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.published_at > $%d", len(e.args)+1))
|
|
|
|
e.args = append(e.args, date)
|
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
|
|
|
// BeforeEntryID adds a condition < entryID.
|
|
|
|
func (e *EntryQueryBuilder) BeforeEntryID(entryID int64) *EntryQueryBuilder {
|
|
|
|
if entryID != 0 {
|
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.id < $%d", len(e.args)+1))
|
|
|
|
e.args = append(e.args, entryID)
|
|
|
|
}
|
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
|
|
|
// AfterEntryID adds a condition > entryID.
|
|
|
|
func (e *EntryQueryBuilder) AfterEntryID(entryID int64) *EntryQueryBuilder {
|
2018-06-08 21:24:41 -04:00
|
|
|
if entryID != 0 {
|
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.id > $%d", len(e.args)+1))
|
|
|
|
e.args = append(e.args, entryID)
|
|
|
|
}
|
2017-12-03 20:44:27 -05:00
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2020-09-13 00:17:55 -04:00
|
|
|
// WithEntryIDs filter by entry IDs.
|
2017-12-03 20:44:27 -05:00
|
|
|
func (e *EntryQueryBuilder) WithEntryIDs(entryIDs []int64) *EntryQueryBuilder {
|
2018-06-08 21:24:41 -04:00
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.id = ANY($%d)", len(e.args)+1))
|
2020-09-13 00:17:55 -04:00
|
|
|
e.args = append(e.args, pq.Int64Array(entryIDs))
|
2017-12-03 20:44:27 -05:00
|
|
|
return e
|
2017-11-20 00:10:04 -05:00
|
|
|
}
|
|
|
|
|
2020-09-13 00:17:55 -04:00
|
|
|
// WithEntryID filter by entry ID.
|
2017-11-20 00:10:04 -05:00
|
|
|
func (e *EntryQueryBuilder) WithEntryID(entryID int64) *EntryQueryBuilder {
|
2018-06-08 21:24:41 -04:00
|
|
|
if entryID != 0 {
|
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.id = $%d", len(e.args)+1))
|
|
|
|
e.args = append(e.args, entryID)
|
|
|
|
}
|
2017-11-20 00:10:04 -05:00
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2020-09-13 00:17:55 -04:00
|
|
|
// WithFeedID filter by feed ID.
|
2017-11-20 00:10:04 -05:00
|
|
|
func (e *EntryQueryBuilder) WithFeedID(feedID int64) *EntryQueryBuilder {
|
2020-09-14 00:38:11 -04:00
|
|
|
if feedID > 0 {
|
2018-06-08 21:24:41 -04:00
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.feed_id = $%d", len(e.args)+1))
|
|
|
|
e.args = append(e.args, feedID)
|
|
|
|
}
|
2017-11-20 00:10:04 -05:00
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2020-09-13 00:17:55 -04:00
|
|
|
// WithCategoryID filter by category ID.
|
2017-11-20 00:10:04 -05:00
|
|
|
func (e *EntryQueryBuilder) WithCategoryID(categoryID int64) *EntryQueryBuilder {
|
2019-11-18 01:53:11 -05:00
|
|
|
if categoryID > 0 {
|
2018-06-08 21:24:41 -04:00
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("f.category_id = $%d", len(e.args)+1))
|
|
|
|
e.args = append(e.args, categoryID)
|
|
|
|
}
|
2017-11-20 00:10:04 -05:00
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2020-09-13 00:17:55 -04:00
|
|
|
// WithStatus filter by entry status.
|
2017-11-20 00:10:04 -05:00
|
|
|
func (e *EntryQueryBuilder) WithStatus(status string) *EntryQueryBuilder {
|
2018-06-08 21:24:41 -04:00
|
|
|
if status != "" {
|
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.status = $%d", len(e.args)+1))
|
|
|
|
e.args = append(e.args, status)
|
|
|
|
}
|
2017-11-20 00:10:04 -05:00
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2020-09-13 00:17:55 -04:00
|
|
|
// WithStatuses filter by a list of entry statuses.
|
|
|
|
func (e *EntryQueryBuilder) WithStatuses(statuses []string) *EntryQueryBuilder {
|
|
|
|
if len(statuses) > 0 {
|
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.status = ANY($%d)", len(e.args)+1))
|
|
|
|
e.args = append(e.args, pq.StringArray(statuses))
|
|
|
|
}
|
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2017-11-21 20:40:29 -05:00
|
|
|
// WithoutStatus set the entry status that should not be returned.
|
|
|
|
func (e *EntryQueryBuilder) WithoutStatus(status string) *EntryQueryBuilder {
|
2018-06-08 21:24:41 -04:00
|
|
|
if status != "" {
|
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.status <> $%d", len(e.args)+1))
|
|
|
|
e.args = append(e.args, status)
|
|
|
|
}
|
2017-11-21 20:40:29 -05:00
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2020-03-17 23:22:02 -04:00
|
|
|
// WithShareCode set the entry share code.
|
2019-10-05 07:30:25 -04:00
|
|
|
func (e *EntryQueryBuilder) WithShareCode(shareCode string) *EntryQueryBuilder {
|
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.share_code = $%d", len(e.args)+1))
|
|
|
|
e.args = append(e.args, shareCode)
|
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2020-03-22 21:48:14 -04:00
|
|
|
// WithShareCodeNotEmpty adds a filter for non-empty share code.
|
|
|
|
func (e *EntryQueryBuilder) WithShareCodeNotEmpty() *EntryQueryBuilder {
|
|
|
|
e.conditions = append(e.conditions, "e.share_code <> ''")
|
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2017-11-21 20:40:29 -05:00
|
|
|
// WithOrder set the sorting order.
|
2017-11-20 00:10:04 -05:00
|
|
|
func (e *EntryQueryBuilder) WithOrder(order string) *EntryQueryBuilder {
|
|
|
|
e.order = order
|
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2017-11-21 20:40:29 -05:00
|
|
|
// WithDirection set the sorting direction.
|
2017-11-20 00:10:04 -05:00
|
|
|
func (e *EntryQueryBuilder) WithDirection(direction string) *EntryQueryBuilder {
|
|
|
|
e.direction = direction
|
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2017-11-21 20:40:29 -05:00
|
|
|
// WithLimit set the limit.
|
2017-11-20 00:10:04 -05:00
|
|
|
func (e *EntryQueryBuilder) WithLimit(limit int) *EntryQueryBuilder {
|
2021-01-04 00:20:21 -05:00
|
|
|
if limit > 0 {
|
|
|
|
e.limit = limit
|
|
|
|
}
|
2017-11-20 00:10:04 -05:00
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2017-11-21 20:40:29 -05:00
|
|
|
// WithOffset set the offset.
|
2017-11-20 00:10:04 -05:00
|
|
|
func (e *EntryQueryBuilder) WithOffset(offset int) *EntryQueryBuilder {
|
2021-01-04 00:20:21 -05:00
|
|
|
if offset > 0 {
|
|
|
|
e.offset = offset
|
|
|
|
}
|
2017-11-20 00:10:04 -05:00
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2021-06-02 20:39:47 -04:00
|
|
|
func (e *EntryQueryBuilder) WithGloballyVisible() *EntryQueryBuilder {
|
|
|
|
e.conditions = append(e.conditions, "not c.hide_globally")
|
2021-08-15 11:32:43 -04:00
|
|
|
e.conditions = append(e.conditions, "not f.hide_globally")
|
2021-06-02 20:39:47 -04:00
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2017-11-21 20:40:29 -05:00
|
|
|
// CountEntries count the number of entries that match the condition.
|
2017-11-20 00:10:04 -05:00
|
|
|
func (e *EntryQueryBuilder) CountEntries() (count int, err error) {
|
2021-06-02 20:39:47 -04:00
|
|
|
query := `
|
|
|
|
SELECT count(*)
|
|
|
|
FROM entries e
|
|
|
|
JOIN feeds f ON f.id = e.feed_id
|
|
|
|
JOIN categories c ON c.id = f.category_id
|
|
|
|
WHERE %s
|
|
|
|
`
|
2018-06-08 21:24:41 -04:00
|
|
|
condition := e.buildCondition()
|
|
|
|
|
|
|
|
err = e.store.db.QueryRow(fmt.Sprintf(query, condition), e.args...).Scan(&count)
|
2017-11-20 00:10:04 -05:00
|
|
|
if err != nil {
|
|
|
|
return 0, fmt.Errorf("unable to count entries: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
return count, nil
|
|
|
|
}
|
|
|
|
|
2017-11-21 20:40:29 -05:00
|
|
|
// GetEntry returns a single entry that match the condition.
|
2017-11-20 00:10:04 -05:00
|
|
|
func (e *EntryQueryBuilder) GetEntry() (*model.Entry, error) {
|
|
|
|
e.limit = 1
|
|
|
|
entries, err := e.GetEntries()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(entries) != 1 {
|
|
|
|
return nil, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
entries[0].Enclosures, err = e.store.GetEnclosures(entries[0].ID)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return entries[0], nil
|
|
|
|
}
|
|
|
|
|
2017-11-21 20:40:29 -05:00
|
|
|
// GetEntries returns a list of entries that match the condition.
|
2017-11-20 00:10:04 -05:00
|
|
|
func (e *EntryQueryBuilder) GetEntries() (model.Entries, error) {
|
|
|
|
query := `
|
|
|
|
SELECT
|
2020-03-10 23:58:45 -04:00
|
|
|
e.id,
|
|
|
|
e.user_id,
|
|
|
|
e.feed_id,
|
|
|
|
e.hash,
|
|
|
|
e.published_at at time zone u.timezone,
|
|
|
|
e.title,
|
|
|
|
e.url,
|
|
|
|
e.comments_url,
|
|
|
|
e.author,
|
2019-10-05 07:30:25 -04:00
|
|
|
e.share_code,
|
2020-03-10 23:58:45 -04:00
|
|
|
e.content,
|
|
|
|
e.status,
|
|
|
|
e.starred,
|
2020-11-18 20:29:40 -05:00
|
|
|
e.reading_time,
|
2020-11-29 20:04:18 -05:00
|
|
|
e.created_at,
|
2021-05-26 01:13:38 -04:00
|
|
|
e.changed_at,
|
2020-03-10 23:58:45 -04:00
|
|
|
f.title as feed_title,
|
|
|
|
f.feed_url,
|
|
|
|
f.site_url,
|
|
|
|
f.checked_at,
|
|
|
|
f.category_id, c.title as category_title,
|
|
|
|
f.scraper_rules,
|
|
|
|
f.rewrite_rules,
|
|
|
|
f.crawler,
|
|
|
|
f.user_agent,
|
2021-03-22 23:27:58 -04:00
|
|
|
f.cookie,
|
2020-03-10 23:58:45 -04:00
|
|
|
fi.icon_id,
|
|
|
|
u.timezone
|
|
|
|
FROM
|
|
|
|
entries e
|
|
|
|
LEFT JOIN
|
|
|
|
feeds f ON f.id=e.feed_id
|
|
|
|
LEFT JOIN
|
|
|
|
categories c ON c.id=f.category_id
|
|
|
|
LEFT JOIN
|
|
|
|
feed_icons fi ON fi.feed_id=f.id
|
|
|
|
LEFT JOIN
|
|
|
|
users u ON u.id=e.user_id
|
2017-11-20 00:10:04 -05:00
|
|
|
WHERE %s %s
|
|
|
|
`
|
|
|
|
|
2018-06-08 21:24:41 -04:00
|
|
|
condition := e.buildCondition()
|
2018-06-09 16:40:20 -04:00
|
|
|
sorting := e.buildSorting()
|
|
|
|
query = fmt.Sprintf(query, condition, sorting)
|
2017-11-20 00:10:04 -05:00
|
|
|
|
2018-06-08 21:24:41 -04:00
|
|
|
rows, err := e.store.db.Query(query, e.args...)
|
2017-11-20 00:10:04 -05:00
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("unable to get entries: %v", err)
|
|
|
|
}
|
|
|
|
defer rows.Close()
|
|
|
|
|
|
|
|
entries := make(model.Entries, 0)
|
|
|
|
for rows.Next() {
|
|
|
|
var entry model.Entry
|
2021-01-18 16:22:09 -05:00
|
|
|
var iconID sql.NullInt64
|
2018-03-02 00:24:58 -05:00
|
|
|
var tz string
|
2017-11-20 00:10:04 -05:00
|
|
|
|
2018-06-08 21:24:41 -04:00
|
|
|
entry.Feed = &model.Feed{}
|
|
|
|
entry.Feed.Category = &model.Category{}
|
2017-11-20 00:10:04 -05:00
|
|
|
entry.Feed.Icon = &model.FeedIcon{}
|
|
|
|
|
|
|
|
err := rows.Scan(
|
|
|
|
&entry.ID,
|
|
|
|
&entry.UserID,
|
|
|
|
&entry.FeedID,
|
|
|
|
&entry.Hash,
|
|
|
|
&entry.Date,
|
|
|
|
&entry.Title,
|
|
|
|
&entry.URL,
|
2018-04-07 16:50:45 -04:00
|
|
|
&entry.CommentsURL,
|
2017-11-20 00:10:04 -05:00
|
|
|
&entry.Author,
|
2019-10-05 07:30:25 -04:00
|
|
|
&entry.ShareCode,
|
2017-11-20 00:10:04 -05:00
|
|
|
&entry.Content,
|
|
|
|
&entry.Status,
|
2017-12-22 14:33:01 -05:00
|
|
|
&entry.Starred,
|
2020-11-18 20:29:40 -05:00
|
|
|
&entry.ReadingTime,
|
2020-11-29 20:04:18 -05:00
|
|
|
&entry.CreatedAt,
|
2021-05-26 01:13:38 -04:00
|
|
|
&entry.ChangedAt,
|
2017-11-20 00:10:04 -05:00
|
|
|
&entry.Feed.Title,
|
|
|
|
&entry.Feed.FeedURL,
|
|
|
|
&entry.Feed.SiteURL,
|
|
|
|
&entry.Feed.CheckedAt,
|
|
|
|
&entry.Feed.Category.ID,
|
|
|
|
&entry.Feed.Category.Title,
|
2017-12-10 23:51:04 -05:00
|
|
|
&entry.Feed.ScraperRules,
|
2017-12-12 01:16:32 -05:00
|
|
|
&entry.Feed.RewriteRules,
|
2017-12-12 22:19:36 -05:00
|
|
|
&entry.Feed.Crawler,
|
2018-09-19 21:19:24 -04:00
|
|
|
&entry.Feed.UserAgent,
|
2021-03-22 23:27:58 -04:00
|
|
|
&entry.Feed.Cookie,
|
2017-11-20 00:10:04 -05:00
|
|
|
&iconID,
|
2018-03-02 00:24:58 -05:00
|
|
|
&tz,
|
2017-11-20 00:10:04 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
if err != nil {
|
2017-12-02 20:04:01 -05:00
|
|
|
return nil, fmt.Errorf("unable to fetch entry row: %v", err)
|
2017-11-20 00:10:04 -05:00
|
|
|
}
|
|
|
|
|
2021-01-18 16:22:09 -05:00
|
|
|
if iconID.Valid {
|
|
|
|
entry.Feed.Icon.IconID = iconID.Int64
|
2017-11-20 00:10:04 -05:00
|
|
|
} else {
|
2021-01-18 16:22:09 -05:00
|
|
|
entry.Feed.Icon.IconID = 0
|
2017-11-20 00:10:04 -05:00
|
|
|
}
|
|
|
|
|
2018-03-02 00:24:58 -05:00
|
|
|
// Make sure that timestamp fields contains timezone information (API)
|
|
|
|
entry.Date = timezone.Convert(tz, entry.Date)
|
2020-11-29 20:04:18 -05:00
|
|
|
entry.CreatedAt = timezone.Convert(tz, entry.CreatedAt)
|
2021-05-26 01:13:38 -04:00
|
|
|
entry.ChangedAt = timezone.Convert(tz, entry.ChangedAt)
|
2018-03-02 00:24:58 -05:00
|
|
|
entry.Feed.CheckedAt = timezone.Convert(tz, entry.Feed.CheckedAt)
|
|
|
|
|
2017-11-20 00:10:04 -05:00
|
|
|
entry.Feed.ID = entry.FeedID
|
2018-06-08 21:24:41 -04:00
|
|
|
entry.Feed.UserID = entry.UserID
|
2017-11-20 00:10:04 -05:00
|
|
|
entry.Feed.Icon.FeedID = entry.FeedID
|
2018-06-08 21:24:41 -04:00
|
|
|
entry.Feed.Category.UserID = entry.UserID
|
2017-11-20 00:10:04 -05:00
|
|
|
entries = append(entries, &entry)
|
|
|
|
}
|
|
|
|
|
|
|
|
return entries, nil
|
|
|
|
}
|
|
|
|
|
2017-12-03 20:44:27 -05:00
|
|
|
// GetEntryIDs returns a list of entry IDs that match the condition.
|
|
|
|
func (e *EntryQueryBuilder) GetEntryIDs() ([]int64, error) {
|
2018-06-08 21:24:41 -04:00
|
|
|
query := `SELECT e.id FROM entries e LEFT JOIN feeds f ON f.id=e.feed_id WHERE %s %s`
|
2017-12-03 20:44:27 -05:00
|
|
|
|
2018-06-08 21:24:41 -04:00
|
|
|
condition := e.buildCondition()
|
|
|
|
query = fmt.Sprintf(query, condition, e.buildSorting())
|
|
|
|
|
|
|
|
rows, err := e.store.db.Query(query, e.args...)
|
2017-12-03 20:44:27 -05:00
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("unable to get entries: %v", err)
|
|
|
|
}
|
|
|
|
defer rows.Close()
|
|
|
|
|
|
|
|
var entryIDs []int64
|
|
|
|
for rows.Next() {
|
|
|
|
var entryID int64
|
|
|
|
|
|
|
|
err := rows.Scan(&entryID)
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("unable to fetch entry row: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
entryIDs = append(entryIDs, entryID)
|
|
|
|
}
|
|
|
|
|
|
|
|
return entryIDs, nil
|
|
|
|
}
|
|
|
|
|
2018-06-08 21:24:41 -04:00
|
|
|
func (e *EntryQueryBuilder) buildCondition() string {
|
|
|
|
return strings.Join(e.conditions, " AND ")
|
2017-11-20 00:10:04 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
func (e *EntryQueryBuilder) buildSorting() string {
|
2018-06-09 16:40:20 -04:00
|
|
|
var parts []string
|
2017-11-20 00:10:04 -05:00
|
|
|
|
|
|
|
if e.order != "" {
|
2019-02-12 01:20:07 -05:00
|
|
|
parts = append(parts, fmt.Sprintf(`ORDER BY %s`, e.order))
|
2017-11-20 00:10:04 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
if e.direction != "" {
|
2021-03-23 00:04:10 -04:00
|
|
|
parts = append(parts, e.direction)
|
2017-11-20 00:10:04 -05:00
|
|
|
}
|
|
|
|
|
2021-01-04 00:20:21 -05:00
|
|
|
if e.limit > 0 {
|
2018-06-09 16:40:20 -04:00
|
|
|
parts = append(parts, fmt.Sprintf(`LIMIT %d`, e.limit))
|
2017-11-20 00:10:04 -05:00
|
|
|
}
|
|
|
|
|
2021-01-04 00:20:21 -05:00
|
|
|
if e.offset > 0 {
|
2018-06-09 16:40:20 -04:00
|
|
|
parts = append(parts, fmt.Sprintf(`OFFSET %d`, e.offset))
|
2017-11-20 00:10:04 -05:00
|
|
|
}
|
|
|
|
|
2018-06-09 16:40:20 -04:00
|
|
|
return strings.Join(parts, " ")
|
2017-11-20 00:10:04 -05:00
|
|
|
}
|
|
|
|
|
2017-11-21 20:40:29 -05:00
|
|
|
// NewEntryQueryBuilder returns a new EntryQueryBuilder.
|
2017-12-28 22:20:14 -05:00
|
|
|
func NewEntryQueryBuilder(store *Storage, userID int64) *EntryQueryBuilder {
|
2017-11-20 00:10:04 -05:00
|
|
|
return &EntryQueryBuilder{
|
2018-06-08 21:24:41 -04:00
|
|
|
store: store,
|
|
|
|
args: []interface{}{userID},
|
|
|
|
conditions: []string{"e.user_id = $1"},
|
2017-11-20 00:10:04 -05:00
|
|
|
}
|
|
|
|
}
|
2019-10-05 07:30:25 -04:00
|
|
|
|
|
|
|
// NewAnonymousQueryBuilder returns a new EntryQueryBuilder suitable for anonymous users.
|
|
|
|
func NewAnonymousQueryBuilder(store *Storage) *EntryQueryBuilder {
|
|
|
|
return &EntryQueryBuilder{
|
|
|
|
store: store,
|
|
|
|
}
|
|
|
|
}
|