processor.go 7.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229
  1. // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
  2. // SPDX-License-Identifier: Apache-2.0
  3. package processor // import "miniflux.app/v2/internal/reader/processor"
  4. import (
  5. "log/slog"
  6. "net/url"
  7. "regexp"
  8. "slices"
  9. "time"
  10. "miniflux.app/v2/internal/config"
  11. "miniflux.app/v2/internal/metric"
  12. "miniflux.app/v2/internal/model"
  13. "miniflux.app/v2/internal/proxyrotator"
  14. "miniflux.app/v2/internal/reader/fetcher"
  15. "miniflux.app/v2/internal/reader/readingtime"
  16. "miniflux.app/v2/internal/reader/rewrite"
  17. "miniflux.app/v2/internal/reader/sanitizer"
  18. "miniflux.app/v2/internal/reader/scraper"
  19. "miniflux.app/v2/internal/reader/urlcleaner"
  20. "miniflux.app/v2/internal/storage"
  21. )
  22. var customReplaceRuleRegex = regexp.MustCompile(`rewrite\("([^"]+)"\|"([^"]+)"\)`)
  23. // ProcessFeedEntries downloads original web page for entries and apply filters.
  24. func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, userID int64, forceRefresh bool) {
  25. var filteredEntries model.Entries
  26. user, storeErr := store.UserByID(userID)
  27. if storeErr != nil {
  28. slog.Error("Database error", slog.Any("error", storeErr))
  29. return
  30. }
  31. // The errors are handled in RemoveTrackingParameters.
  32. parsedFeedURL, _ := url.Parse(feed.FeedURL)
  33. parsedSiteURL, _ := url.Parse(feed.SiteURL)
  34. // Process older entries first
  35. for _, entry := range slices.Backward(feed.Entries) {
  36. slog.Debug("Processing entry",
  37. slog.Int64("user_id", user.ID),
  38. slog.String("entry_url", entry.URL),
  39. slog.String("entry_hash", entry.Hash),
  40. slog.String("entry_title", entry.Title),
  41. slog.Int64("feed_id", feed.ID),
  42. slog.String("feed_url", feed.FeedURL),
  43. )
  44. if isBlockedEntry(feed, entry, user) || !isAllowedEntry(feed, entry, user) || !isRecentEntry(entry) {
  45. continue
  46. }
  47. parsedInputUrl, _ := url.Parse(entry.URL)
  48. if cleanedURL, err := urlcleaner.RemoveTrackingParameters(parsedFeedURL, parsedSiteURL, parsedInputUrl); err == nil {
  49. entry.URL = cleanedURL
  50. }
  51. webpageBaseURL := ""
  52. entry.URL = rewriteEntryURL(feed, entry)
  53. entryIsNew := store.IsNewEntry(feed.ID, entry.Hash)
  54. if feed.Crawler && (entryIsNew || forceRefresh) {
  55. slog.Debug("Scraping entry",
  56. slog.Int64("user_id", user.ID),
  57. slog.String("entry_url", entry.URL),
  58. slog.String("entry_hash", entry.Hash),
  59. slog.String("entry_title", entry.Title),
  60. slog.Int64("feed_id", feed.ID),
  61. slog.String("feed_url", feed.FeedURL),
  62. slog.Bool("entry_is_new", entryIsNew),
  63. slog.Bool("force_refresh", forceRefresh),
  64. )
  65. startTime := time.Now()
  66. requestBuilder := fetcher.NewRequestBuilder()
  67. requestBuilder.WithUserAgent(feed.UserAgent, config.Opts.HTTPClientUserAgent())
  68. requestBuilder.WithCookie(feed.Cookie)
  69. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  70. requestBuilder.WithProxyRotator(proxyrotator.ProxyRotatorInstance)
  71. requestBuilder.WithCustomFeedProxyURL(feed.ProxyURL)
  72. requestBuilder.WithCustomApplicationProxyURL(config.Opts.HTTPClientProxyURL())
  73. requestBuilder.UseCustomApplicationProxyURL(feed.FetchViaProxy)
  74. requestBuilder.IgnoreTLSErrors(feed.AllowSelfSignedCertificates)
  75. requestBuilder.DisableHTTP2(feed.DisableHTTP2)
  76. scrapedPageBaseURL, extractedContent, scraperErr := scraper.ScrapeWebsite(
  77. requestBuilder,
  78. entry.URL,
  79. feed.ScraperRules,
  80. )
  81. if scrapedPageBaseURL != "" {
  82. webpageBaseURL = scrapedPageBaseURL
  83. }
  84. if config.Opts.HasMetricsCollector() {
  85. status := "success"
  86. if scraperErr != nil {
  87. status = "error"
  88. }
  89. metric.ScraperRequestDuration.WithLabelValues(status).Observe(time.Since(startTime).Seconds())
  90. }
  91. if scraperErr != nil {
  92. slog.Warn("Unable to scrape entry",
  93. slog.Int64("user_id", user.ID),
  94. slog.String("entry_url", entry.URL),
  95. slog.Int64("feed_id", feed.ID),
  96. slog.String("feed_url", feed.FeedURL),
  97. slog.Any("error", scraperErr),
  98. )
  99. } else if extractedContent != "" {
  100. // We replace the entry content only if the scraper doesn't return any error.
  101. entry.Content = minifyContent(extractedContent)
  102. }
  103. }
  104. rewrite.ApplyContentRewriteRules(entry, feed.RewriteRules)
  105. if webpageBaseURL == "" {
  106. webpageBaseURL = entry.URL
  107. }
  108. // The sanitizer should always run at the end of the process to make sure unsafe HTML is filtered out.
  109. entry.Content = sanitizer.SanitizeHTML(webpageBaseURL, entry.Content, &sanitizer.SanitizerOptions{OpenLinksInNewTab: user.OpenExternalLinksInNewTab})
  110. updateEntryReadingTime(store, feed, entry, entryIsNew, user)
  111. filteredEntries = append(filteredEntries, entry)
  112. }
  113. if user.ShowReadingTime && shouldFetchYouTubeWatchTimeInBulk() {
  114. fetchYouTubeWatchTimeInBulk(filteredEntries)
  115. }
  116. feed.Entries = filteredEntries
  117. }
  118. // ProcessEntryWebPage downloads the entry web page and apply rewrite rules.
  119. func ProcessEntryWebPage(feed *model.Feed, entry *model.Entry, user *model.User) error {
  120. startTime := time.Now()
  121. entry.URL = rewriteEntryURL(feed, entry)
  122. requestBuilder := fetcher.NewRequestBuilder()
  123. requestBuilder.WithUserAgent(feed.UserAgent, config.Opts.HTTPClientUserAgent())
  124. requestBuilder.WithCookie(feed.Cookie)
  125. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  126. requestBuilder.WithProxyRotator(proxyrotator.ProxyRotatorInstance)
  127. requestBuilder.WithCustomFeedProxyURL(feed.ProxyURL)
  128. requestBuilder.WithCustomApplicationProxyURL(config.Opts.HTTPClientProxyURL())
  129. requestBuilder.UseCustomApplicationProxyURL(feed.FetchViaProxy)
  130. requestBuilder.IgnoreTLSErrors(feed.AllowSelfSignedCertificates)
  131. requestBuilder.DisableHTTP2(feed.DisableHTTP2)
  132. webpageBaseURL, extractedContent, scraperErr := scraper.ScrapeWebsite(
  133. requestBuilder,
  134. entry.URL,
  135. feed.ScraperRules,
  136. )
  137. if config.Opts.HasMetricsCollector() {
  138. status := "success"
  139. if scraperErr != nil {
  140. status = "error"
  141. }
  142. metric.ScraperRequestDuration.WithLabelValues(status).Observe(time.Since(startTime).Seconds())
  143. }
  144. if scraperErr != nil {
  145. return scraperErr
  146. }
  147. if extractedContent != "" {
  148. entry.Content = minifyContent(extractedContent)
  149. if user.ShowReadingTime {
  150. entry.ReadingTime = readingtime.EstimateReadingTime(entry.Content, user.DefaultReadingSpeed, user.CJKReadingSpeed)
  151. }
  152. }
  153. rewrite.ApplyContentRewriteRules(entry, entry.Feed.RewriteRules)
  154. entry.Content = sanitizer.SanitizeHTML(webpageBaseURL, entry.Content, &sanitizer.SanitizerOptions{OpenLinksInNewTab: user.OpenExternalLinksInNewTab})
  155. return nil
  156. }
  157. func rewriteEntryURL(feed *model.Feed, entry *model.Entry) string {
  158. var rewrittenURL = entry.URL
  159. if feed.UrlRewriteRules != "" {
  160. parts := customReplaceRuleRegex.FindStringSubmatch(feed.UrlRewriteRules)
  161. if len(parts) >= 3 {
  162. re, err := regexp.Compile(parts[1])
  163. if err != nil {
  164. slog.Error("Failed on regexp compilation",
  165. slog.String("url_rewrite_rules", feed.UrlRewriteRules),
  166. slog.Any("error", err),
  167. )
  168. return rewrittenURL
  169. }
  170. rewrittenURL = re.ReplaceAllString(entry.URL, parts[2])
  171. slog.Debug("Rewriting entry URL",
  172. slog.String("original_entry_url", entry.URL),
  173. slog.String("rewritten_entry_url", rewrittenURL),
  174. slog.Int64("feed_id", feed.ID),
  175. slog.String("feed_url", feed.FeedURL),
  176. )
  177. } else {
  178. slog.Debug("Cannot find search and replace terms for replace rule",
  179. slog.String("original_entry_url", entry.URL),
  180. slog.String("rewritten_entry_url", rewrittenURL),
  181. slog.Int64("feed_id", feed.ID),
  182. slog.String("feed_url", feed.FeedURL),
  183. slog.String("url_rewrite_rules", feed.UrlRewriteRules),
  184. )
  185. }
  186. }
  187. return rewrittenURL
  188. }
  189. func isRecentEntry(entry *model.Entry) bool {
  190. if config.Opts.FilterEntryMaxAgeDays() == 0 || entry.Date.After(time.Now().AddDate(0, 0, -config.Opts.FilterEntryMaxAgeDays())) {
  191. return true
  192. }
  193. return false
  194. }