processor.go 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408
  1. // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
  2. // SPDX-License-Identifier: Apache-2.0
  3. package processor
  4. import (
  5. "errors"
  6. "fmt"
  7. "log/slog"
  8. "regexp"
  9. "slices"
  10. "strconv"
  11. "time"
  12. "miniflux.app/v2/internal/config"
  13. "miniflux.app/v2/internal/metric"
  14. "miniflux.app/v2/internal/model"
  15. "miniflux.app/v2/internal/reader/fetcher"
  16. "miniflux.app/v2/internal/reader/readingtime"
  17. "miniflux.app/v2/internal/reader/rewrite"
  18. "miniflux.app/v2/internal/reader/sanitizer"
  19. "miniflux.app/v2/internal/reader/scraper"
  20. "miniflux.app/v2/internal/storage"
  21. "github.com/PuerkitoBio/goquery"
  22. )
  23. var (
  24. youtubeRegex = regexp.MustCompile(`youtube\.com/watch\?v=(.*)$`)
  25. odyseeRegex = regexp.MustCompile(`^https://odysee\.com`)
  26. iso8601Regex = regexp.MustCompile(`^P((?P<year>\d+)Y)?((?P<month>\d+)M)?((?P<week>\d+)W)?((?P<day>\d+)D)?(T((?P<hour>\d+)H)?((?P<minute>\d+)M)?((?P<second>\d+)S)?)?$`)
  27. customReplaceRuleRegex = regexp.MustCompile(`rewrite\("(.*)"\|"(.*)"\)`)
  28. )
  29. // ProcessFeedEntries downloads original web page for entries and apply filters.
  30. func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, user *model.User, forceRefresh bool) {
  31. var filteredEntries model.Entries
  32. // Process older entries first
  33. for i := len(feed.Entries) - 1; i >= 0; i-- {
  34. entry := feed.Entries[i]
  35. slog.Debug("Processing entry",
  36. slog.Int64("user_id", user.ID),
  37. slog.Int64("entry_id", entry.ID),
  38. slog.String("entry_url", entry.URL),
  39. slog.Int64("feed_id", feed.ID),
  40. slog.String("feed_url", feed.FeedURL),
  41. )
  42. if isBlockedEntry(feed, entry) || !isAllowedEntry(feed, entry) {
  43. continue
  44. }
  45. websiteURL := getUrlFromEntry(feed, entry)
  46. entryIsNew := !store.EntryURLExists(feed.ID, entry.URL)
  47. if feed.Crawler && (entryIsNew || forceRefresh) {
  48. slog.Debug("Scraping entry",
  49. slog.Int64("user_id", user.ID),
  50. slog.Int64("entry_id", entry.ID),
  51. slog.String("entry_url", entry.URL),
  52. slog.Int64("feed_id", feed.ID),
  53. slog.String("feed_url", feed.FeedURL),
  54. )
  55. startTime := time.Now()
  56. requestBuilder := fetcher.NewRequestBuilder()
  57. requestBuilder.WithUserAgent(feed.UserAgent, config.Opts.HTTPClientUserAgent())
  58. requestBuilder.WithCookie(feed.Cookie)
  59. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  60. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  61. requestBuilder.UseProxy(feed.FetchViaProxy)
  62. requestBuilder.IgnoreTLSErrors(feed.AllowSelfSignedCertificates)
  63. requestBuilder.DisableHTTP2(feed.DisableHTTP2)
  64. content, scraperErr := scraper.ScrapeWebsite(
  65. requestBuilder,
  66. websiteURL,
  67. feed.ScraperRules,
  68. )
  69. if config.Opts.HasMetricsCollector() {
  70. status := "success"
  71. if scraperErr != nil {
  72. status = "error"
  73. }
  74. metric.ScraperRequestDuration.WithLabelValues(status).Observe(time.Since(startTime).Seconds())
  75. }
  76. if scraperErr != nil {
  77. slog.Warn("Unable to scrape entry",
  78. slog.Int64("user_id", user.ID),
  79. slog.Int64("entry_id", entry.ID),
  80. slog.String("entry_url", entry.URL),
  81. slog.Int64("feed_id", feed.ID),
  82. slog.String("feed_url", feed.FeedURL),
  83. slog.Any("error", scraperErr),
  84. )
  85. } else if content != "" {
  86. // We replace the entry content only if the scraper doesn't return any error.
  87. entry.Content = content
  88. }
  89. }
  90. rewrite.Rewriter(websiteURL, entry, feed.RewriteRules)
  91. // The sanitizer should always run at the end of the process to make sure unsafe HTML is filtered.
  92. entry.Content = sanitizer.Sanitize(websiteURL, entry.Content)
  93. updateEntryReadingTime(store, feed, entry, entryIsNew, user)
  94. filteredEntries = append(filteredEntries, entry)
  95. }
  96. feed.Entries = filteredEntries
  97. }
  98. func isBlockedEntry(feed *model.Feed, entry *model.Entry) bool {
  99. if feed.BlocklistRules != "" {
  100. containsBlockedTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
  101. return matchField(feed.BlocklistRules, tag)
  102. })
  103. if matchField(feed.BlocklistRules, entry.URL) || matchField(feed.BlocklistRules, entry.Title) || matchField(feed.BlocklistRules, entry.Author) || containsBlockedTag {
  104. slog.Debug("Blocking entry based on rule",
  105. slog.Int64("entry_id", entry.ID),
  106. slog.String("entry_url", entry.URL),
  107. slog.Int64("feed_id", feed.ID),
  108. slog.String("feed_url", feed.FeedURL),
  109. slog.String("rule", feed.BlocklistRules),
  110. )
  111. return true
  112. }
  113. }
  114. return false
  115. }
  116. func isAllowedEntry(feed *model.Feed, entry *model.Entry) bool {
  117. if feed.KeeplistRules != "" {
  118. containsAllowedTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
  119. return matchField(feed.KeeplistRules, tag)
  120. })
  121. if matchField(feed.KeeplistRules, entry.URL) || matchField(feed.KeeplistRules, entry.Title) || matchField(feed.KeeplistRules, entry.Author) || containsAllowedTag {
  122. slog.Debug("Allow entry based on rule",
  123. slog.Int64("entry_id", entry.ID),
  124. slog.String("entry_url", entry.URL),
  125. slog.Int64("feed_id", feed.ID),
  126. slog.String("feed_url", feed.FeedURL),
  127. slog.String("rule", feed.KeeplistRules),
  128. )
  129. return true
  130. }
  131. return false
  132. }
  133. return true
  134. }
  135. func matchField(pattern, value string) bool {
  136. match, err := regexp.MatchString(pattern, value)
  137. if err != nil {
  138. slog.Debug("Failed on regexp match",
  139. slog.String("pattern", pattern),
  140. slog.String("value", value),
  141. slog.Bool("match", match),
  142. slog.Any("error", err),
  143. )
  144. }
  145. return match
  146. }
  147. // ProcessEntryWebPage downloads the entry web page and apply rewrite rules.
  148. func ProcessEntryWebPage(feed *model.Feed, entry *model.Entry, user *model.User) error {
  149. startTime := time.Now()
  150. websiteURL := getUrlFromEntry(feed, entry)
  151. requestBuilder := fetcher.NewRequestBuilder()
  152. requestBuilder.WithUserAgent(feed.UserAgent, config.Opts.HTTPClientUserAgent())
  153. requestBuilder.WithCookie(feed.Cookie)
  154. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  155. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  156. requestBuilder.UseProxy(feed.FetchViaProxy)
  157. requestBuilder.IgnoreTLSErrors(feed.AllowSelfSignedCertificates)
  158. requestBuilder.DisableHTTP2(feed.DisableHTTP2)
  159. content, scraperErr := scraper.ScrapeWebsite(
  160. requestBuilder,
  161. websiteURL,
  162. feed.ScraperRules,
  163. )
  164. if config.Opts.HasMetricsCollector() {
  165. status := "success"
  166. if scraperErr != nil {
  167. status = "error"
  168. }
  169. metric.ScraperRequestDuration.WithLabelValues(status).Observe(time.Since(startTime).Seconds())
  170. }
  171. if scraperErr != nil {
  172. return scraperErr
  173. }
  174. if content != "" {
  175. entry.Content = content
  176. if user.ShowReadingTime {
  177. entry.ReadingTime = readingtime.EstimateReadingTime(entry.Content, user.DefaultReadingSpeed, user.CJKReadingSpeed)
  178. }
  179. }
  180. rewrite.Rewriter(websiteURL, entry, entry.Feed.RewriteRules)
  181. entry.Content = sanitizer.Sanitize(websiteURL, entry.Content)
  182. return nil
  183. }
  184. func getUrlFromEntry(feed *model.Feed, entry *model.Entry) string {
  185. var url = entry.URL
  186. if feed.UrlRewriteRules != "" {
  187. parts := customReplaceRuleRegex.FindStringSubmatch(feed.UrlRewriteRules)
  188. if len(parts) >= 3 {
  189. re := regexp.MustCompile(parts[1])
  190. url = re.ReplaceAllString(entry.URL, parts[2])
  191. slog.Debug("Rewriting entry URL",
  192. slog.Int64("entry_id", entry.ID),
  193. slog.String("original_entry_url", entry.URL),
  194. slog.String("rewritten_entry_url", url),
  195. slog.Int64("feed_id", feed.ID),
  196. slog.String("feed_url", feed.FeedURL),
  197. )
  198. } else {
  199. slog.Debug("Cannot find search and replace terms for replace rule",
  200. slog.Int64("entry_id", entry.ID),
  201. slog.String("original_entry_url", entry.URL),
  202. slog.String("rewritten_entry_url", url),
  203. slog.Int64("feed_id", feed.ID),
  204. slog.String("feed_url", feed.FeedURL),
  205. slog.String("url_rewrite_rules", feed.UrlRewriteRules),
  206. )
  207. }
  208. }
  209. return url
  210. }
  211. func updateEntryReadingTime(store *storage.Storage, feed *model.Feed, entry *model.Entry, entryIsNew bool, user *model.User) {
  212. if shouldFetchYouTubeWatchTime(entry) {
  213. if entryIsNew {
  214. watchTime, err := fetchYouTubeWatchTime(entry.URL)
  215. if err != nil {
  216. slog.Warn("Unable to fetch YouTube watch time",
  217. slog.Int64("user_id", user.ID),
  218. slog.Int64("entry_id", entry.ID),
  219. slog.String("entry_url", entry.URL),
  220. slog.Int64("feed_id", feed.ID),
  221. slog.String("feed_url", feed.FeedURL),
  222. slog.Any("error", err),
  223. )
  224. }
  225. entry.ReadingTime = watchTime
  226. } else {
  227. entry.ReadingTime = store.GetReadTime(entry, feed)
  228. }
  229. }
  230. if shouldFetchOdyseeWatchTime(entry) {
  231. if entryIsNew {
  232. watchTime, err := fetchOdyseeWatchTime(entry.URL)
  233. if err != nil {
  234. slog.Warn("Unable to fetch Odysee watch time",
  235. slog.Int64("user_id", user.ID),
  236. slog.Int64("entry_id", entry.ID),
  237. slog.String("entry_url", entry.URL),
  238. slog.Int64("feed_id", feed.ID),
  239. slog.String("feed_url", feed.FeedURL),
  240. slog.Any("error", err),
  241. )
  242. }
  243. entry.ReadingTime = watchTime
  244. } else {
  245. entry.ReadingTime = store.GetReadTime(entry, feed)
  246. }
  247. }
  248. // Handle YT error case and non-YT entries.
  249. if entry.ReadingTime == 0 {
  250. if user.ShowReadingTime {
  251. entry.ReadingTime = readingtime.EstimateReadingTime(entry.Content, user.DefaultReadingSpeed, user.CJKReadingSpeed)
  252. }
  253. }
  254. }
  255. func shouldFetchYouTubeWatchTime(entry *model.Entry) bool {
  256. if !config.Opts.FetchYouTubeWatchTime() {
  257. return false
  258. }
  259. matches := youtubeRegex.FindStringSubmatch(entry.URL)
  260. urlMatchesYouTubePattern := len(matches) == 2
  261. return urlMatchesYouTubePattern
  262. }
  263. func shouldFetchOdyseeWatchTime(entry *model.Entry) bool {
  264. if !config.Opts.FetchOdyseeWatchTime() {
  265. return false
  266. }
  267. matches := odyseeRegex.FindStringSubmatch(entry.URL)
  268. return matches != nil
  269. }
  270. func fetchYouTubeWatchTime(websiteURL string) (int, error) {
  271. requestBuilder := fetcher.NewRequestBuilder()
  272. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  273. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  274. responseHandler := fetcher.NewResponseHandler(requestBuilder.ExecuteRequest(websiteURL))
  275. defer responseHandler.Close()
  276. if localizedError := responseHandler.LocalizedError(); localizedError != nil {
  277. slog.Warn("Unable to fetch YouTube page", slog.String("website_url", websiteURL), slog.Any("error", localizedError.Error()))
  278. return 0, localizedError.Error()
  279. }
  280. doc, docErr := goquery.NewDocumentFromReader(responseHandler.Body(config.Opts.HTTPClientMaxBodySize()))
  281. if docErr != nil {
  282. return 0, docErr
  283. }
  284. durs, exists := doc.Find(`meta[itemprop="duration"]`).First().Attr("content")
  285. if !exists {
  286. return 0, errors.New("duration has not found")
  287. }
  288. dur, err := parseISO8601(durs)
  289. if err != nil {
  290. return 0, fmt.Errorf("unable to parse duration %s: %v", durs, err)
  291. }
  292. return int(dur.Minutes()), nil
  293. }
  294. func fetchOdyseeWatchTime(websiteURL string) (int, error) {
  295. requestBuilder := fetcher.NewRequestBuilder()
  296. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  297. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  298. responseHandler := fetcher.NewResponseHandler(requestBuilder.ExecuteRequest(websiteURL))
  299. defer responseHandler.Close()
  300. if localizedError := responseHandler.LocalizedError(); localizedError != nil {
  301. slog.Warn("Unable to fetch Odysee watch time", slog.String("website_url", websiteURL), slog.Any("error", localizedError.Error()))
  302. return 0, localizedError.Error()
  303. }
  304. doc, docErr := goquery.NewDocumentFromReader(responseHandler.Body(config.Opts.HTTPClientMaxBodySize()))
  305. if docErr != nil {
  306. return 0, docErr
  307. }
  308. durs, exists := doc.Find(`meta[property="og:video:duration"]`).First().Attr("content")
  309. // durs contains video watch time in seconds
  310. if !exists {
  311. return 0, errors.New("duration has not found")
  312. }
  313. dur, err := strconv.ParseInt(durs, 10, 64)
  314. if err != nil {
  315. return 0, fmt.Errorf("unable to parse duration %s: %v", durs, err)
  316. }
  317. return int(dur / 60), nil
  318. }
  319. // parseISO8601 parses an ISO 8601 duration string.
  320. func parseISO8601(from string) (time.Duration, error) {
  321. var match []string
  322. var d time.Duration
  323. if iso8601Regex.MatchString(from) {
  324. match = iso8601Regex.FindStringSubmatch(from)
  325. } else {
  326. return 0, errors.New("could not parse duration string")
  327. }
  328. for i, name := range iso8601Regex.SubexpNames() {
  329. part := match[i]
  330. if i == 0 || name == "" || part == "" {
  331. continue
  332. }
  333. val, err := strconv.ParseInt(part, 10, 64)
  334. if err != nil {
  335. return 0, err
  336. }
  337. switch name {
  338. case "hour":
  339. d = d + (time.Duration(val) * time.Hour)
  340. case "minute":
  341. d = d + (time.Duration(val) * time.Minute)
  342. case "second":
  343. d = d + (time.Duration(val) * time.Second)
  344. default:
  345. return 0, fmt.Errorf("unknown field %s", name)
  346. }
  347. }
  348. return d, nil
  349. }