2017-11-20 00:10:04 -05:00
|
|
|
// Copyright 2017 Frédéric Guillot. All rights reserved.
|
|
|
|
// Use of this source code is governed by the Apache 2.0
|
|
|
|
// license that can be found in the LICENSE file.
|
|
|
|
|
2018-08-25 00:51:50 -04:00
|
|
|
package subscription // import "miniflux.app/reader/subscription"
|
2017-11-20 00:10:04 -05:00
|
|
|
|
|
|
|
import (
|
2020-08-02 14:24:02 -04:00
|
|
|
"fmt"
|
2017-11-20 20:12:37 -05:00
|
|
|
"io"
|
2020-08-02 14:24:02 -04:00
|
|
|
"regexp"
|
2018-10-14 14:46:41 -04:00
|
|
|
"strings"
|
2017-11-20 20:12:37 -05:00
|
|
|
|
2020-09-27 17:29:48 -04:00
|
|
|
"miniflux.app/config"
|
2018-08-25 00:51:50 -04:00
|
|
|
"miniflux.app/errors"
|
|
|
|
"miniflux.app/http/client"
|
2018-10-15 00:43:48 -04:00
|
|
|
"miniflux.app/reader/browser"
|
2018-10-14 14:46:41 -04:00
|
|
|
"miniflux.app/reader/parser"
|
2018-08-25 00:51:50 -04:00
|
|
|
"miniflux.app/url"
|
2017-11-20 00:10:04 -05:00
|
|
|
|
|
|
|
"github.com/PuerkitoBio/goquery"
|
|
|
|
)
|
|
|
|
|
|
|
|
var (
|
2020-08-02 14:24:02 -04:00
|
|
|
errUnreadableDoc = "Unable to analyze this page: %v"
|
|
|
|
youtubeChannelRegex = regexp.MustCompile(`youtube\.com/channel/(.*)`)
|
2020-08-02 15:16:17 -04:00
|
|
|
youtubeVideoRegex = regexp.MustCompile(`youtube\.com/watch\?v=(.*)`)
|
2017-11-20 00:10:04 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
// FindSubscriptions downloads and try to find one or more subscriptions from an URL.
|
2020-09-10 02:28:54 -04:00
|
|
|
func FindSubscriptions(websiteURL, userAgent, username, password string, fetchViaProxy bool) (Subscriptions, *errors.LocalizedError) {
|
2020-08-02 14:24:02 -04:00
|
|
|
websiteURL = findYoutubeChannelFeed(websiteURL)
|
2020-08-02 15:16:17 -04:00
|
|
|
websiteURL = parseYoutubeVideoPage(websiteURL)
|
2020-08-02 14:24:02 -04:00
|
|
|
|
2020-09-27 17:29:48 -04:00
|
|
|
clt := client.NewClientWithConfig(websiteURL, config.Opts)
|
|
|
|
clt.WithCredentials(username, password)
|
|
|
|
clt.WithUserAgent(userAgent)
|
2020-09-10 02:28:54 -04:00
|
|
|
|
|
|
|
if fetchViaProxy {
|
2020-09-27 17:29:48 -04:00
|
|
|
clt.WithProxy()
|
2020-09-10 02:28:54 -04:00
|
|
|
}
|
|
|
|
|
2020-09-27 17:29:48 -04:00
|
|
|
response, err := browser.Exec(clt)
|
2017-11-20 00:10:04 -05:00
|
|
|
if err != nil {
|
2018-01-20 01:42:55 -05:00
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2019-12-26 18:26:23 -05:00
|
|
|
body := response.BodyAsString()
|
2018-10-14 14:46:41 -04:00
|
|
|
if format := parser.DetectFeedFormat(body); format != parser.FormatUnknown {
|
2017-11-20 00:10:04 -05:00
|
|
|
var subscriptions Subscriptions
|
|
|
|
subscriptions = append(subscriptions, &Subscription{
|
|
|
|
Title: response.EffectiveURL,
|
|
|
|
URL: response.EffectiveURL,
|
|
|
|
Type: format,
|
|
|
|
})
|
|
|
|
|
|
|
|
return subscriptions, nil
|
|
|
|
}
|
|
|
|
|
2020-08-02 15:16:17 -04:00
|
|
|
subscriptions, err := parseWebPage(response.EffectiveURL, strings.NewReader(body))
|
2020-06-16 16:52:20 -04:00
|
|
|
if err != nil || subscriptions != nil {
|
|
|
|
return subscriptions, err
|
|
|
|
}
|
2020-08-02 15:16:17 -04:00
|
|
|
|
2020-06-16 16:52:20 -04:00
|
|
|
return tryWellKnownUrls(websiteURL, userAgent, username, password)
|
2017-11-20 00:10:04 -05:00
|
|
|
}
|
|
|
|
|
2020-08-02 15:16:17 -04:00
|
|
|
func parseWebPage(websiteURL string, data io.Reader) (Subscriptions, *errors.LocalizedError) {
|
2017-11-20 00:10:04 -05:00
|
|
|
var subscriptions Subscriptions
|
|
|
|
queries := map[string]string{
|
|
|
|
"link[type='application/rss+xml']": "rss",
|
|
|
|
"link[type='application/atom+xml']": "atom",
|
|
|
|
"link[type='application/json']": "json",
|
|
|
|
}
|
|
|
|
|
|
|
|
doc, err := goquery.NewDocumentFromReader(data)
|
|
|
|
if err != nil {
|
|
|
|
return nil, errors.NewLocalizedError(errUnreadableDoc, err)
|
|
|
|
}
|
|
|
|
|
|
|
|
for query, kind := range queries {
|
|
|
|
doc.Find(query).Each(func(i int, s *goquery.Selection) {
|
|
|
|
subscription := new(Subscription)
|
|
|
|
subscription.Type = kind
|
|
|
|
|
|
|
|
if title, exists := s.Attr("title"); exists {
|
|
|
|
subscription.Title = title
|
|
|
|
} else {
|
|
|
|
subscription.Title = "Feed"
|
|
|
|
}
|
|
|
|
|
|
|
|
if feedURL, exists := s.Attr("href"); exists {
|
2017-12-02 01:29:18 -05:00
|
|
|
subscription.URL, _ = url.AbsoluteURL(websiteURL, feedURL)
|
2017-11-20 00:10:04 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
if subscription.Title == "" {
|
|
|
|
subscription.Title = subscription.URL
|
|
|
|
}
|
|
|
|
|
|
|
|
if subscription.URL != "" {
|
|
|
|
subscriptions = append(subscriptions, subscription)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
return subscriptions, nil
|
|
|
|
}
|
2020-06-16 16:52:20 -04:00
|
|
|
|
2020-08-02 14:24:02 -04:00
|
|
|
func findYoutubeChannelFeed(websiteURL string) string {
|
|
|
|
matches := youtubeChannelRegex.FindStringSubmatch(websiteURL)
|
|
|
|
|
|
|
|
if len(matches) == 2 {
|
|
|
|
return fmt.Sprintf(`https://www.youtube.com/feeds/videos.xml?channel_id=%s`, matches[1])
|
|
|
|
}
|
|
|
|
return websiteURL
|
|
|
|
}
|
|
|
|
|
2020-08-02 15:16:17 -04:00
|
|
|
func parseYoutubeVideoPage(websiteURL string) string {
|
|
|
|
if !youtubeVideoRegex.MatchString(websiteURL) {
|
|
|
|
return websiteURL
|
|
|
|
}
|
|
|
|
|
2020-09-27 17:29:48 -04:00
|
|
|
clt := client.NewClientWithConfig(websiteURL, config.Opts)
|
|
|
|
response, browserErr := browser.Exec(clt)
|
2020-08-02 15:16:17 -04:00
|
|
|
if browserErr != nil {
|
|
|
|
return websiteURL
|
|
|
|
}
|
|
|
|
|
|
|
|
doc, docErr := goquery.NewDocumentFromReader(response.Body)
|
|
|
|
if docErr != nil {
|
|
|
|
return websiteURL
|
|
|
|
}
|
|
|
|
|
|
|
|
if channelID, exists := doc.Find(`meta[itemprop="channelId"]`).First().Attr("content"); exists {
|
|
|
|
return fmt.Sprintf(`https://www.youtube.com/feeds/videos.xml?channel_id=%s`, channelID)
|
|
|
|
}
|
|
|
|
|
|
|
|
return websiteURL
|
|
|
|
}
|
|
|
|
|
2020-06-16 16:52:20 -04:00
|
|
|
func tryWellKnownUrls(websiteURL, userAgent, username, password string) (Subscriptions, *errors.LocalizedError) {
|
|
|
|
var subscriptions Subscriptions
|
|
|
|
knownURLs := map[string]string{
|
|
|
|
"/atom.xml": "atom",
|
|
|
|
"/feed.xml": "atom",
|
|
|
|
"/feed/": "atom",
|
|
|
|
"/rss.xml": "rss",
|
|
|
|
}
|
|
|
|
|
|
|
|
lastCharacter := websiteURL[len(websiteURL)-1:]
|
|
|
|
if lastCharacter == "/" {
|
|
|
|
websiteURL = websiteURL[:len(websiteURL)-1]
|
|
|
|
}
|
|
|
|
|
|
|
|
for knownURL, kind := range knownURLs {
|
|
|
|
fullURL, err := url.AbsoluteURL(websiteURL, knownURL)
|
|
|
|
if err != nil {
|
|
|
|
continue
|
|
|
|
}
|
2020-09-27 17:29:48 -04:00
|
|
|
clt := client.NewClientWithConfig(fullURL, config.Opts)
|
|
|
|
clt.WithCredentials(username, password)
|
|
|
|
clt.WithUserAgent(userAgent)
|
|
|
|
response, err := clt.Get()
|
2020-06-16 16:52:20 -04:00
|
|
|
if err != nil {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if response != nil && response.StatusCode == 200 {
|
|
|
|
subscription := new(Subscription)
|
|
|
|
subscription.Type = kind
|
|
|
|
subscription.Title = fullURL
|
|
|
|
subscription.URL = fullURL
|
|
|
|
if subscription.URL != "" {
|
|
|
|
subscriptions = append(subscriptions, subscription)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return subscriptions, nil
|
|
|
|
}
|