processor.go 8.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251
  1. // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
  2. // SPDX-License-Identifier: Apache-2.0
  3. package processor // import "miniflux.app/v2/internal/reader/processor"
  4. import (
  5. "log/slog"
  6. "net/url"
  7. "regexp"
  8. "time"
  9. "github.com/tdewolff/minify/v2"
  10. "github.com/tdewolff/minify/v2/html"
  11. "miniflux.app/v2/internal/config"
  12. "miniflux.app/v2/internal/metric"
  13. "miniflux.app/v2/internal/model"
  14. "miniflux.app/v2/internal/proxyrotator"
  15. "miniflux.app/v2/internal/reader/fetcher"
  16. "miniflux.app/v2/internal/reader/readingtime"
  17. "miniflux.app/v2/internal/reader/rewrite"
  18. "miniflux.app/v2/internal/reader/sanitizer"
  19. "miniflux.app/v2/internal/reader/scraper"
  20. "miniflux.app/v2/internal/reader/urlcleaner"
  21. "miniflux.app/v2/internal/storage"
  22. )
  23. var customReplaceRuleRegex = regexp.MustCompile(`rewrite\("([^"]+)"\|"([^"]+)"\)`)
  24. // ProcessFeedEntries downloads original web page for entries and apply filters.
  25. func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, userID int64, forceRefresh bool) {
  26. var filteredEntries model.Entries
  27. user, storeErr := store.UserByID(userID)
  28. if storeErr != nil {
  29. slog.Error("Database error", slog.Any("error", storeErr))
  30. return
  31. }
  32. // The errors are handled in RemoveTrackingParameters.
  33. parsedFeedURL, _ := url.Parse(feed.FeedURL)
  34. parsedSiteURL, _ := url.Parse(feed.SiteURL)
  35. // Process older entries first
  36. for i := len(feed.Entries) - 1; i >= 0; i-- {
  37. entry := feed.Entries[i]
  38. slog.Debug("Processing entry",
  39. slog.Int64("user_id", user.ID),
  40. slog.String("entry_url", entry.URL),
  41. slog.String("entry_hash", entry.Hash),
  42. slog.String("entry_title", entry.Title),
  43. slog.Int64("feed_id", feed.ID),
  44. slog.String("feed_url", feed.FeedURL),
  45. )
  46. if isBlockedEntry(feed, entry, user) || !isAllowedEntry(feed, entry, user) || !isRecentEntry(entry) {
  47. continue
  48. }
  49. parsedInputUrl, _ := url.Parse(entry.URL)
  50. if cleanedURL, err := urlcleaner.RemoveTrackingParameters(parsedFeedURL, parsedSiteURL, parsedInputUrl); err == nil {
  51. entry.URL = cleanedURL
  52. }
  53. pageBaseURL := ""
  54. rewrittenURL := rewriteEntryURL(feed, entry)
  55. entry.URL = rewrittenURL
  56. entryIsNew := store.IsNewEntry(feed.ID, entry.Hash)
  57. if feed.Crawler && (entryIsNew || forceRefresh) {
  58. slog.Debug("Scraping entry",
  59. slog.Int64("user_id", user.ID),
  60. slog.String("entry_url", entry.URL),
  61. slog.String("entry_hash", entry.Hash),
  62. slog.String("entry_title", entry.Title),
  63. slog.Int64("feed_id", feed.ID),
  64. slog.String("feed_url", feed.FeedURL),
  65. slog.Bool("entry_is_new", entryIsNew),
  66. slog.Bool("force_refresh", forceRefresh),
  67. slog.String("rewritten_url", rewrittenURL),
  68. )
  69. startTime := time.Now()
  70. requestBuilder := fetcher.NewRequestBuilder()
  71. requestBuilder.WithUserAgent(feed.UserAgent, config.Opts.HTTPClientUserAgent())
  72. requestBuilder.WithCookie(feed.Cookie)
  73. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  74. requestBuilder.WithProxyRotator(proxyrotator.ProxyRotatorInstance)
  75. requestBuilder.WithCustomFeedProxyURL(feed.ProxyURL)
  76. requestBuilder.WithCustomApplicationProxyURL(config.Opts.HTTPClientProxyURL())
  77. requestBuilder.UseCustomApplicationProxyURL(feed.FetchViaProxy)
  78. requestBuilder.IgnoreTLSErrors(feed.AllowSelfSignedCertificates)
  79. requestBuilder.DisableHTTP2(feed.DisableHTTP2)
  80. scrapedPageBaseURL, extractedContent, scraperErr := scraper.ScrapeWebsite(
  81. requestBuilder,
  82. rewrittenURL,
  83. feed.ScraperRules,
  84. )
  85. if scrapedPageBaseURL != "" {
  86. pageBaseURL = scrapedPageBaseURL
  87. }
  88. if config.Opts.HasMetricsCollector() {
  89. status := "success"
  90. if scraperErr != nil {
  91. status = "error"
  92. }
  93. metric.ScraperRequestDuration.WithLabelValues(status).Observe(time.Since(startTime).Seconds())
  94. }
  95. if scraperErr != nil {
  96. slog.Warn("Unable to scrape entry",
  97. slog.Int64("user_id", user.ID),
  98. slog.String("entry_url", entry.URL),
  99. slog.Int64("feed_id", feed.ID),
  100. slog.String("feed_url", feed.FeedURL),
  101. slog.Any("error", scraperErr),
  102. )
  103. } else if extractedContent != "" {
  104. // We replace the entry content only if the scraper doesn't return any error.
  105. entry.Content = minifyEntryContent(extractedContent)
  106. }
  107. }
  108. rewrite.Rewriter(rewrittenURL, entry, feed.RewriteRules)
  109. if pageBaseURL == "" {
  110. pageBaseURL = rewrittenURL
  111. }
  112. // The sanitizer should always run at the end of the process to make sure unsafe HTML is filtered out.
  113. entry.Content = sanitizer.SanitizeHTML(pageBaseURL, entry.Content, &sanitizer.SanitizerOptions{OpenLinksInNewTab: user.OpenExternalLinksInNewTab})
  114. updateEntryReadingTime(store, feed, entry, entryIsNew, user)
  115. filteredEntries = append(filteredEntries, entry)
  116. }
  117. if user.ShowReadingTime && shouldFetchYouTubeWatchTimeInBulk() {
  118. fetchYouTubeWatchTimeInBulk(filteredEntries)
  119. }
  120. feed.Entries = filteredEntries
  121. }
  122. // ProcessEntryWebPage downloads the entry web page and apply rewrite rules.
  123. func ProcessEntryWebPage(feed *model.Feed, entry *model.Entry, user *model.User) error {
  124. startTime := time.Now()
  125. rewrittenEntryURL := rewriteEntryURL(feed, entry)
  126. requestBuilder := fetcher.NewRequestBuilder()
  127. requestBuilder.WithUserAgent(feed.UserAgent, config.Opts.HTTPClientUserAgent())
  128. requestBuilder.WithCookie(feed.Cookie)
  129. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  130. requestBuilder.WithProxyRotator(proxyrotator.ProxyRotatorInstance)
  131. requestBuilder.WithCustomFeedProxyURL(feed.ProxyURL)
  132. requestBuilder.WithCustomApplicationProxyURL(config.Opts.HTTPClientProxyURL())
  133. requestBuilder.UseCustomApplicationProxyURL(feed.FetchViaProxy)
  134. requestBuilder.IgnoreTLSErrors(feed.AllowSelfSignedCertificates)
  135. requestBuilder.DisableHTTP2(feed.DisableHTTP2)
  136. pageBaseURL, extractedContent, scraperErr := scraper.ScrapeWebsite(
  137. requestBuilder,
  138. rewrittenEntryURL,
  139. feed.ScraperRules,
  140. )
  141. if config.Opts.HasMetricsCollector() {
  142. status := "success"
  143. if scraperErr != nil {
  144. status = "error"
  145. }
  146. metric.ScraperRequestDuration.WithLabelValues(status).Observe(time.Since(startTime).Seconds())
  147. }
  148. if scraperErr != nil {
  149. return scraperErr
  150. }
  151. if extractedContent != "" {
  152. entry.Content = minifyEntryContent(extractedContent)
  153. if user.ShowReadingTime {
  154. entry.ReadingTime = readingtime.EstimateReadingTime(entry.Content, user.DefaultReadingSpeed, user.CJKReadingSpeed)
  155. }
  156. }
  157. rewrite.Rewriter(rewrittenEntryURL, entry, entry.Feed.RewriteRules)
  158. entry.Content = sanitizer.SanitizeHTML(pageBaseURL, entry.Content, &sanitizer.SanitizerOptions{OpenLinksInNewTab: user.OpenExternalLinksInNewTab})
  159. return nil
  160. }
  161. func rewriteEntryURL(feed *model.Feed, entry *model.Entry) string {
  162. var rewrittenURL = entry.URL
  163. if feed.UrlRewriteRules != "" {
  164. parts := customReplaceRuleRegex.FindStringSubmatch(feed.UrlRewriteRules)
  165. if len(parts) >= 3 {
  166. re, err := regexp.Compile(parts[1])
  167. if err != nil {
  168. slog.Error("Failed on regexp compilation",
  169. slog.String("url_rewrite_rules", feed.UrlRewriteRules),
  170. slog.Any("error", err),
  171. )
  172. return rewrittenURL
  173. }
  174. rewrittenURL = re.ReplaceAllString(entry.URL, parts[2])
  175. slog.Debug("Rewriting entry URL",
  176. slog.String("original_entry_url", entry.URL),
  177. slog.String("rewritten_entry_url", rewrittenURL),
  178. slog.Int64("feed_id", feed.ID),
  179. slog.String("feed_url", feed.FeedURL),
  180. )
  181. } else {
  182. slog.Debug("Cannot find search and replace terms for replace rule",
  183. slog.String("original_entry_url", entry.URL),
  184. slog.String("rewritten_entry_url", rewrittenURL),
  185. slog.Int64("feed_id", feed.ID),
  186. slog.String("feed_url", feed.FeedURL),
  187. slog.String("url_rewrite_rules", feed.UrlRewriteRules),
  188. )
  189. }
  190. }
  191. return rewrittenURL
  192. }
  193. func isRecentEntry(entry *model.Entry) bool {
  194. if config.Opts.FilterEntryMaxAgeDays() == 0 || entry.Date.After(time.Now().AddDate(0, 0, -config.Opts.FilterEntryMaxAgeDays())) {
  195. return true
  196. }
  197. return false
  198. }
  199. func minifyEntryContent(entryContent string) string {
  200. m := minify.New()
  201. // Options required to avoid breaking the HTML content.
  202. m.Add("text/html", &html.Minifier{
  203. KeepEndTags: true,
  204. KeepQuotes: true,
  205. })
  206. if minifiedHTML, err := m.String("text/html", entryContent); err == nil {
  207. entryContent = minifiedHTML
  208. }
  209. return entryContent
  210. }