4
0

processor.go 7.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224
  1. // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
  2. // SPDX-License-Identifier: Apache-2.0
  3. package processor // import "miniflux.app/v2/internal/reader/processor"
  4. import (
  5. "log/slog"
  6. "net/url"
  7. "slices"
  8. "time"
  9. "miniflux.app/v2/internal/config"
  10. "miniflux.app/v2/internal/metric"
  11. "miniflux.app/v2/internal/model"
  12. "miniflux.app/v2/internal/proxyrotator"
  13. "miniflux.app/v2/internal/reader/fetcher"
  14. "miniflux.app/v2/internal/reader/filter"
  15. "miniflux.app/v2/internal/reader/readingtime"
  16. "miniflux.app/v2/internal/reader/rewrite"
  17. "miniflux.app/v2/internal/reader/sanitizer"
  18. "miniflux.app/v2/internal/reader/scraper"
  19. "miniflux.app/v2/internal/reader/urlcleaner"
  20. "miniflux.app/v2/internal/storage"
  21. )
  22. // ProcessFeedEntries downloads original web page for entries and apply filters.
  23. func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, userID int64, forceRefresh bool) {
  24. var filteredEntries model.Entries
  25. user, storeErr := store.UserByID(userID)
  26. if storeErr != nil {
  27. slog.Error("Database error", slog.Any("error", storeErr))
  28. return
  29. }
  30. // The errors are handled in RemoveTrackingParameters.
  31. parsedFeedURL, _ := url.Parse(feed.FeedURL)
  32. parsedSiteURL, _ := url.Parse(feed.SiteURL)
  33. blockRules := filter.ParseRules(user.BlockFilterEntryRules, feed.BlockFilterEntryRules)
  34. allowRules := filter.ParseRules(user.KeepFilterEntryRules, feed.KeepFilterEntryRules)
  35. slog.Debug("Filter rules",
  36. slog.String("user_block_filter_rules", user.BlockFilterEntryRules),
  37. slog.String("feed_block_filter_rules", feed.BlockFilterEntryRules),
  38. slog.String("user_keep_filter_rules", user.KeepFilterEntryRules),
  39. slog.String("feed_keep_filter_rules", feed.KeepFilterEntryRules),
  40. slog.Any("block_rules", blockRules),
  41. slog.Any("allow_rules", allowRules),
  42. slog.Int64("user_id", user.ID),
  43. slog.Int64("feed_id", feed.ID),
  44. )
  45. requestBuilder := fetcher.NewRequestBuilder()
  46. requestBuilder.WithUserAgent(feed.UserAgent, config.Opts.HTTPClientUserAgent())
  47. requestBuilder.WithCookie(feed.Cookie)
  48. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  49. requestBuilder.WithProxyRotator(proxyrotator.ProxyRotatorInstance)
  50. requestBuilder.WithCustomFeedProxyURL(feed.ProxyURL)
  51. requestBuilder.WithCustomApplicationProxyURL(config.Opts.HTTPClientProxyURL())
  52. requestBuilder.UseCustomApplicationProxyURL(feed.FetchViaProxy)
  53. requestBuilder.IgnoreTLSErrors(feed.AllowSelfSignedCertificates)
  54. requestBuilder.DisableHTTP2(feed.DisableHTTP2)
  55. // Processing older entries first ensures that their creation timestamp is lower than newer entries.
  56. for _, entry := range slices.Backward(feed.Entries) {
  57. slog.Debug("Processing entry",
  58. slog.Int64("user_id", user.ID),
  59. slog.String("entry_url", entry.URL),
  60. slog.String("entry_hash", entry.Hash),
  61. slog.String("entry_title", entry.Title),
  62. slog.Int64("feed_id", feed.ID),
  63. slog.String("feed_url", feed.FeedURL),
  64. )
  65. if filter.IsBlockedEntry(blockRules, allowRules, feed, entry) {
  66. slog.Debug("Entry is blocked by filter rules",
  67. slog.Int64("user_id", user.ID),
  68. slog.String("entry_url", entry.URL),
  69. slog.String("entry_hash", entry.Hash),
  70. slog.String("entry_title", entry.Title),
  71. slog.Int64("feed_id", feed.ID),
  72. slog.String("feed_url", feed.FeedURL),
  73. slog.String("filter_stage", "before_scrape"),
  74. )
  75. continue
  76. }
  77. parsedInputUrl, _ := url.Parse(entry.URL)
  78. if cleanedURL, err := urlcleaner.RemoveTrackingParameters(parsedFeedURL, parsedSiteURL, parsedInputUrl); err == nil {
  79. entry.URL = cleanedURL
  80. }
  81. webpageBaseURL := ""
  82. entry.URL = rewrite.RewriteEntryURL(feed, entry)
  83. entryIsNew := store.IsNewEntry(feed.ID, entry.Hash)
  84. contentExtractedSuccessfully := false
  85. if feed.Crawler && (entryIsNew || forceRefresh) {
  86. slog.Debug("Scraping entry",
  87. slog.Int64("user_id", user.ID),
  88. slog.String("entry_url", entry.URL),
  89. slog.String("entry_hash", entry.Hash),
  90. slog.String("entry_title", entry.Title),
  91. slog.Int64("feed_id", feed.ID),
  92. slog.String("feed_url", feed.FeedURL),
  93. slog.Bool("entry_is_new", entryIsNew),
  94. slog.Bool("force_refresh", forceRefresh),
  95. )
  96. startTime := time.Now()
  97. scrapedPageBaseURL, extractedContent, scraperErr := scraper.ScrapeWebsite(
  98. requestBuilder,
  99. entry.URL,
  100. feed.ScraperRules,
  101. )
  102. if scrapedPageBaseURL != "" {
  103. webpageBaseURL = scrapedPageBaseURL
  104. }
  105. if config.Opts.HasMetricsCollector() {
  106. status := metric.StatusSuccess
  107. if scraperErr != nil {
  108. status = metric.StatusError
  109. }
  110. metric.ScraperRequestDuration.WithLabelValues(status).Observe(time.Since(startTime).Seconds())
  111. }
  112. if scraperErr != nil {
  113. slog.Warn("Unable to scrape entry",
  114. slog.Int64("user_id", user.ID),
  115. slog.String("entry_url", entry.URL),
  116. slog.Int64("feed_id", feed.ID),
  117. slog.String("feed_url", feed.FeedURL),
  118. slog.Any("error", scraperErr),
  119. )
  120. } else if extractedContent != "" {
  121. // We replace the entry content only if the scraper doesn't return any error.
  122. entry.Content = minifyContent(extractedContent)
  123. contentExtractedSuccessfully = true
  124. }
  125. }
  126. rewrite.ApplyContentRewriteRules(entry, feed.RewriteRules)
  127. // Re-run filters only when extracted content replaced entry.Content.
  128. if contentExtractedSuccessfully && filter.IsBlockedEntry(blockRules, allowRules, feed, entry) {
  129. slog.Debug("Entry is blocked by filter rules",
  130. slog.Int64("user_id", user.ID),
  131. slog.String("entry_url", entry.URL),
  132. slog.String("entry_hash", entry.Hash),
  133. slog.String("entry_title", entry.Title),
  134. slog.Int64("feed_id", feed.ID),
  135. slog.String("feed_url", feed.FeedURL),
  136. slog.String("filter_stage", "after_scrape"),
  137. )
  138. continue
  139. }
  140. if webpageBaseURL == "" {
  141. webpageBaseURL = entry.URL
  142. }
  143. // The sanitizer should always run at the end of the process to make sure unsafe HTML is filtered out.
  144. entry.Content = sanitizer.SanitizeHTML(webpageBaseURL, entry.Content, &sanitizer.SanitizerOptions{OpenLinksInNewTab: user.OpenExternalLinksInNewTab})
  145. updateEntryReadingTime(store, feed, entry, entryIsNew, user)
  146. filteredEntries = append(filteredEntries, entry)
  147. }
  148. if user.ShowReadingTime && shouldFetchYouTubeWatchTimeInBulk() {
  149. fetchYouTubeWatchTimeInBulk(filteredEntries)
  150. }
  151. feed.Entries = filteredEntries
  152. }
  153. // ProcessEntryWebPage downloads the entry web page and apply rewrite rules.
  154. func ProcessEntryWebPage(feed *model.Feed, entry *model.Entry, user *model.User) error {
  155. startTime := time.Now()
  156. entry.URL = rewrite.RewriteEntryURL(feed, entry)
  157. requestBuilder := fetcher.NewRequestBuilder()
  158. requestBuilder.WithUserAgent(feed.UserAgent, config.Opts.HTTPClientUserAgent())
  159. requestBuilder.WithCookie(feed.Cookie)
  160. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  161. requestBuilder.WithProxyRotator(proxyrotator.ProxyRotatorInstance)
  162. requestBuilder.WithCustomFeedProxyURL(feed.ProxyURL)
  163. requestBuilder.WithCustomApplicationProxyURL(config.Opts.HTTPClientProxyURL())
  164. requestBuilder.UseCustomApplicationProxyURL(feed.FetchViaProxy)
  165. requestBuilder.IgnoreTLSErrors(feed.AllowSelfSignedCertificates)
  166. requestBuilder.DisableHTTP2(feed.DisableHTTP2)
  167. webpageBaseURL, extractedContent, scraperErr := scraper.ScrapeWebsite(
  168. requestBuilder,
  169. entry.URL,
  170. feed.ScraperRules,
  171. )
  172. if config.Opts.HasMetricsCollector() {
  173. status := metric.StatusSuccess
  174. if scraperErr != nil {
  175. status = metric.StatusError
  176. }
  177. metric.ScraperRequestDuration.WithLabelValues(status).Observe(time.Since(startTime).Seconds())
  178. }
  179. if scraperErr != nil {
  180. return scraperErr
  181. }
  182. if extractedContent != "" {
  183. entry.Content = minifyContent(extractedContent)
  184. if user.ShowReadingTime {
  185. entry.ReadingTime = readingtime.EstimateReadingTime(entry.Content, user.DefaultReadingSpeed, user.CJKReadingSpeed)
  186. }
  187. }
  188. rewrite.ApplyContentRewriteRules(entry, entry.Feed.RewriteRules)
  189. entry.Content = sanitizer.SanitizeHTML(webpageBaseURL, entry.Content, &sanitizer.SanitizerOptions{OpenLinksInNewTab: user.OpenExternalLinksInNewTab})
  190. return nil
  191. }