processor.go 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464
  1. // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
  2. // SPDX-License-Identifier: Apache-2.0
  3. package processor
  4. import (
  5. "log/slog"
  6. "regexp"
  7. "slices"
  8. "strings"
  9. "time"
  10. "miniflux.app/v2/internal/config"
  11. "miniflux.app/v2/internal/metric"
  12. "miniflux.app/v2/internal/model"
  13. "miniflux.app/v2/internal/reader/fetcher"
  14. "miniflux.app/v2/internal/reader/readingtime"
  15. "miniflux.app/v2/internal/reader/rewrite"
  16. "miniflux.app/v2/internal/reader/sanitizer"
  17. "miniflux.app/v2/internal/reader/scraper"
  18. "miniflux.app/v2/internal/reader/urlcleaner"
  19. "miniflux.app/v2/internal/storage"
  20. "github.com/tdewolff/minify/v2"
  21. "github.com/tdewolff/minify/v2/html"
  22. )
  23. var customReplaceRuleRegex = regexp.MustCompile(`rewrite\("([^"]+)"\|"([^"]+)"\)`)
  24. // ProcessFeedEntries downloads original web page for entries and apply filters.
  25. func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, userID int64, forceRefresh bool) {
  26. var filteredEntries model.Entries
  27. user, storeErr := store.UserByID(userID)
  28. if storeErr != nil {
  29. slog.Error("Database error", slog.Any("error", storeErr))
  30. return
  31. }
  32. // Process older entries first
  33. for i := len(feed.Entries) - 1; i >= 0; i-- {
  34. entry := feed.Entries[i]
  35. slog.Debug("Processing entry",
  36. slog.Int64("user_id", user.ID),
  37. slog.String("entry_url", entry.URL),
  38. slog.String("entry_hash", entry.Hash),
  39. slog.String("entry_title", entry.Title),
  40. slog.Int64("feed_id", feed.ID),
  41. slog.String("feed_url", feed.FeedURL),
  42. )
  43. if isBlockedEntry(feed, entry, user) || !isAllowedEntry(feed, entry, user) || !isRecentEntry(entry) {
  44. continue
  45. }
  46. if cleanedURL, err := urlcleaner.RemoveTrackingParameters(entry.URL); err == nil {
  47. entry.URL = cleanedURL
  48. }
  49. pageBaseURL := ""
  50. rewrittenURL := rewriteEntryURL(feed, entry)
  51. entry.URL = rewrittenURL
  52. entryIsNew := store.IsNewEntry(feed.ID, entry.Hash)
  53. if feed.Crawler && (entryIsNew || forceRefresh) {
  54. slog.Debug("Scraping entry",
  55. slog.Int64("user_id", user.ID),
  56. slog.String("entry_url", entry.URL),
  57. slog.String("entry_hash", entry.Hash),
  58. slog.String("entry_title", entry.Title),
  59. slog.Int64("feed_id", feed.ID),
  60. slog.String("feed_url", feed.FeedURL),
  61. slog.Bool("entry_is_new", entryIsNew),
  62. slog.Bool("force_refresh", forceRefresh),
  63. slog.String("rewritten_url", rewrittenURL),
  64. )
  65. startTime := time.Now()
  66. requestBuilder := fetcher.NewRequestBuilder()
  67. requestBuilder.WithUserAgent(feed.UserAgent, config.Opts.HTTPClientUserAgent())
  68. requestBuilder.WithCookie(feed.Cookie)
  69. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  70. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  71. requestBuilder.UseProxy(feed.FetchViaProxy)
  72. requestBuilder.IgnoreTLSErrors(feed.AllowSelfSignedCertificates)
  73. requestBuilder.DisableHTTP2(feed.DisableHTTP2)
  74. scrapedPageBaseURL, extractedContent, scraperErr := scraper.ScrapeWebsite(
  75. requestBuilder,
  76. rewrittenURL,
  77. feed.ScraperRules,
  78. )
  79. if scrapedPageBaseURL != "" {
  80. pageBaseURL = scrapedPageBaseURL
  81. }
  82. if config.Opts.HasMetricsCollector() {
  83. status := "success"
  84. if scraperErr != nil {
  85. status = "error"
  86. }
  87. metric.ScraperRequestDuration.WithLabelValues(status).Observe(time.Since(startTime).Seconds())
  88. }
  89. if scraperErr != nil {
  90. slog.Warn("Unable to scrape entry",
  91. slog.Int64("user_id", user.ID),
  92. slog.String("entry_url", entry.URL),
  93. slog.Int64("feed_id", feed.ID),
  94. slog.String("feed_url", feed.FeedURL),
  95. slog.Any("error", scraperErr),
  96. )
  97. } else if extractedContent != "" {
  98. // We replace the entry content only if the scraper doesn't return any error.
  99. entry.Content = minifyEntryContent(extractedContent)
  100. }
  101. }
  102. rewrite.Rewriter(rewrittenURL, entry, feed.RewriteRules)
  103. if pageBaseURL == "" {
  104. pageBaseURL = rewrittenURL
  105. }
  106. // The sanitizer should always run at the end of the process to make sure unsafe HTML is filtered out.
  107. entry.Content = sanitizer.Sanitize(pageBaseURL, entry.Content)
  108. updateEntryReadingTime(store, feed, entry, entryIsNew, user)
  109. filteredEntries = append(filteredEntries, entry)
  110. }
  111. feed.Entries = filteredEntries
  112. }
  113. func isBlockedEntry(feed *model.Feed, entry *model.Entry, user *model.User) bool {
  114. if user.BlockFilterEntryRules != "" {
  115. rules := strings.Split(user.BlockFilterEntryRules, "\n")
  116. for _, rule := range rules {
  117. parts := strings.SplitN(rule, "=", 2)
  118. var match bool
  119. switch parts[0] {
  120. case "EntryTitle":
  121. match, _ = regexp.MatchString(parts[1], entry.Title)
  122. case "EntryURL":
  123. match, _ = regexp.MatchString(parts[1], entry.URL)
  124. case "EntryCommentsURL":
  125. match, _ = regexp.MatchString(parts[1], entry.CommentsURL)
  126. case "EntryContent":
  127. match, _ = regexp.MatchString(parts[1], entry.Content)
  128. case "EntryAuthor":
  129. match, _ = regexp.MatchString(parts[1], entry.Author)
  130. case "EntryTag":
  131. containsTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
  132. match, _ = regexp.MatchString(parts[1], tag)
  133. return match
  134. })
  135. if containsTag {
  136. match = true
  137. }
  138. }
  139. if match {
  140. slog.Debug("Blocking entry based on rule",
  141. slog.String("entry_url", entry.URL),
  142. slog.Int64("feed_id", feed.ID),
  143. slog.String("feed_url", feed.FeedURL),
  144. slog.String("rule", rule),
  145. )
  146. return true
  147. }
  148. }
  149. }
  150. if feed.BlocklistRules == "" {
  151. return false
  152. }
  153. compiledBlocklist, err := regexp.Compile(feed.BlocklistRules)
  154. if err != nil {
  155. slog.Debug("Failed on regexp compilation",
  156. slog.String("pattern", feed.BlocklistRules),
  157. slog.Any("error", err),
  158. )
  159. return false
  160. }
  161. containsBlockedTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
  162. return compiledBlocklist.MatchString(tag)
  163. })
  164. if compiledBlocklist.MatchString(entry.URL) || compiledBlocklist.MatchString(entry.Title) || compiledBlocklist.MatchString(entry.Author) || containsBlockedTag {
  165. slog.Debug("Blocking entry based on rule",
  166. slog.String("entry_url", entry.URL),
  167. slog.Int64("feed_id", feed.ID),
  168. slog.String("feed_url", feed.FeedURL),
  169. slog.String("rule", feed.BlocklistRules),
  170. )
  171. return true
  172. }
  173. return false
  174. }
  175. func isAllowedEntry(feed *model.Feed, entry *model.Entry, user *model.User) bool {
  176. if user.KeepFilterEntryRules != "" {
  177. rules := strings.Split(user.KeepFilterEntryRules, "\n")
  178. for _, rule := range rules {
  179. parts := strings.SplitN(rule, "=", 2)
  180. var match bool
  181. switch parts[0] {
  182. case "EntryTitle":
  183. match, _ = regexp.MatchString(parts[1], entry.Title)
  184. case "EntryURL":
  185. match, _ = regexp.MatchString(parts[1], entry.URL)
  186. case "EntryCommentsURL":
  187. match, _ = regexp.MatchString(parts[1], entry.CommentsURL)
  188. case "EntryContent":
  189. match, _ = regexp.MatchString(parts[1], entry.Content)
  190. case "EntryAuthor":
  191. match, _ = regexp.MatchString(parts[1], entry.Author)
  192. case "EntryTag":
  193. containsTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
  194. match, _ = regexp.MatchString(parts[1], tag)
  195. return match
  196. })
  197. if containsTag {
  198. match = true
  199. }
  200. }
  201. if match {
  202. slog.Debug("Allowing entry based on rule",
  203. slog.String("entry_url", entry.URL),
  204. slog.Int64("feed_id", feed.ID),
  205. slog.String("feed_url", feed.FeedURL),
  206. slog.String("rule", rule),
  207. )
  208. return true
  209. }
  210. }
  211. return false
  212. }
  213. if feed.KeeplistRules == "" {
  214. return true
  215. }
  216. compiledKeeplist, err := regexp.Compile(feed.KeeplistRules)
  217. if err != nil {
  218. slog.Debug("Failed on regexp compilation",
  219. slog.String("pattern", feed.KeeplistRules),
  220. slog.Any("error", err),
  221. )
  222. return false
  223. }
  224. containsAllowedTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
  225. return compiledKeeplist.MatchString(tag)
  226. })
  227. if compiledKeeplist.MatchString(entry.URL) || compiledKeeplist.MatchString(entry.Title) || compiledKeeplist.MatchString(entry.Author) || containsAllowedTag {
  228. slog.Debug("Allow entry based on rule",
  229. slog.String("entry_url", entry.URL),
  230. slog.Int64("feed_id", feed.ID),
  231. slog.String("feed_url", feed.FeedURL),
  232. slog.String("rule", feed.KeeplistRules),
  233. )
  234. return true
  235. }
  236. return false
  237. }
  238. // ProcessEntryWebPage downloads the entry web page and apply rewrite rules.
  239. func ProcessEntryWebPage(feed *model.Feed, entry *model.Entry, user *model.User) error {
  240. startTime := time.Now()
  241. rewrittenEntryURL := rewriteEntryURL(feed, entry)
  242. requestBuilder := fetcher.NewRequestBuilder()
  243. requestBuilder.WithUserAgent(feed.UserAgent, config.Opts.HTTPClientUserAgent())
  244. requestBuilder.WithCookie(feed.Cookie)
  245. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  246. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  247. requestBuilder.UseProxy(feed.FetchViaProxy)
  248. requestBuilder.IgnoreTLSErrors(feed.AllowSelfSignedCertificates)
  249. requestBuilder.DisableHTTP2(feed.DisableHTTP2)
  250. pageBaseURL, extractedContent, scraperErr := scraper.ScrapeWebsite(
  251. requestBuilder,
  252. rewrittenEntryURL,
  253. feed.ScraperRules,
  254. )
  255. if config.Opts.HasMetricsCollector() {
  256. status := "success"
  257. if scraperErr != nil {
  258. status = "error"
  259. }
  260. metric.ScraperRequestDuration.WithLabelValues(status).Observe(time.Since(startTime).Seconds())
  261. }
  262. if scraperErr != nil {
  263. return scraperErr
  264. }
  265. if extractedContent != "" {
  266. entry.Content = minifyEntryContent(extractedContent)
  267. if user.ShowReadingTime {
  268. entry.ReadingTime = readingtime.EstimateReadingTime(entry.Content, user.DefaultReadingSpeed, user.CJKReadingSpeed)
  269. }
  270. }
  271. rewrite.Rewriter(rewrittenEntryURL, entry, entry.Feed.RewriteRules)
  272. entry.Content = sanitizer.Sanitize(pageBaseURL, entry.Content)
  273. return nil
  274. }
  275. func rewriteEntryURL(feed *model.Feed, entry *model.Entry) string {
  276. var rewrittenURL = entry.URL
  277. if feed.UrlRewriteRules != "" {
  278. parts := customReplaceRuleRegex.FindStringSubmatch(feed.UrlRewriteRules)
  279. if len(parts) >= 3 {
  280. re, err := regexp.Compile(parts[1])
  281. if err != nil {
  282. slog.Error("Failed on regexp compilation",
  283. slog.String("url_rewrite_rules", feed.UrlRewriteRules),
  284. slog.Any("error", err),
  285. )
  286. return rewrittenURL
  287. }
  288. rewrittenURL = re.ReplaceAllString(entry.URL, parts[2])
  289. slog.Debug("Rewriting entry URL",
  290. slog.String("original_entry_url", entry.URL),
  291. slog.String("rewritten_entry_url", rewrittenURL),
  292. slog.Int64("feed_id", feed.ID),
  293. slog.String("feed_url", feed.FeedURL),
  294. )
  295. } else {
  296. slog.Debug("Cannot find search and replace terms for replace rule",
  297. slog.String("original_entry_url", entry.URL),
  298. slog.String("rewritten_entry_url", rewrittenURL),
  299. slog.Int64("feed_id", feed.ID),
  300. slog.String("feed_url", feed.FeedURL),
  301. slog.String("url_rewrite_rules", feed.UrlRewriteRules),
  302. )
  303. }
  304. }
  305. return rewrittenURL
  306. }
  307. func updateEntryReadingTime(store *storage.Storage, feed *model.Feed, entry *model.Entry, entryIsNew bool, user *model.User) {
  308. if !user.ShowReadingTime {
  309. slog.Debug("Skip reading time estimation for this user", slog.Int64("user_id", user.ID))
  310. return
  311. }
  312. if shouldFetchYouTubeWatchTime(entry) {
  313. if entryIsNew {
  314. watchTime, err := fetchYouTubeWatchTime(entry.URL)
  315. if err != nil {
  316. slog.Warn("Unable to fetch YouTube watch time",
  317. slog.Int64("user_id", user.ID),
  318. slog.Int64("entry_id", entry.ID),
  319. slog.String("entry_url", entry.URL),
  320. slog.Int64("feed_id", feed.ID),
  321. slog.String("feed_url", feed.FeedURL),
  322. slog.Any("error", err),
  323. )
  324. }
  325. entry.ReadingTime = watchTime
  326. } else {
  327. entry.ReadingTime = store.GetReadTime(feed.ID, entry.Hash)
  328. }
  329. }
  330. if shouldFetchNebulaWatchTime(entry) {
  331. if entryIsNew {
  332. watchTime, err := fetchNebulaWatchTime(entry.URL)
  333. if err != nil {
  334. slog.Warn("Unable to fetch Nebula watch time",
  335. slog.Int64("user_id", user.ID),
  336. slog.Int64("entry_id", entry.ID),
  337. slog.String("entry_url", entry.URL),
  338. slog.Int64("feed_id", feed.ID),
  339. slog.String("feed_url", feed.FeedURL),
  340. slog.Any("error", err),
  341. )
  342. }
  343. entry.ReadingTime = watchTime
  344. } else {
  345. entry.ReadingTime = store.GetReadTime(feed.ID, entry.Hash)
  346. }
  347. }
  348. if shouldFetchOdyseeWatchTime(entry) {
  349. if entryIsNew {
  350. watchTime, err := fetchOdyseeWatchTime(entry.URL)
  351. if err != nil {
  352. slog.Warn("Unable to fetch Odysee watch time",
  353. slog.Int64("user_id", user.ID),
  354. slog.Int64("entry_id", entry.ID),
  355. slog.String("entry_url", entry.URL),
  356. slog.Int64("feed_id", feed.ID),
  357. slog.String("feed_url", feed.FeedURL),
  358. slog.Any("error", err),
  359. )
  360. }
  361. entry.ReadingTime = watchTime
  362. } else {
  363. entry.ReadingTime = store.GetReadTime(feed.ID, entry.Hash)
  364. }
  365. }
  366. if shouldFetchBilibiliWatchTime(entry) {
  367. if entryIsNew {
  368. watchTime, err := fetchBilibiliWatchTime(entry.URL)
  369. if err != nil {
  370. slog.Warn("Unable to fetch Bilibili watch time",
  371. slog.Int64("user_id", user.ID),
  372. slog.Int64("entry_id", entry.ID),
  373. slog.String("entry_url", entry.URL),
  374. slog.Int64("feed_id", feed.ID),
  375. slog.String("feed_url", feed.FeedURL),
  376. slog.Any("error", err),
  377. )
  378. }
  379. entry.ReadingTime = watchTime
  380. } else {
  381. entry.ReadingTime = store.GetReadTime(feed.ID, entry.Hash)
  382. }
  383. }
  384. // Handle YT error case and non-YT entries.
  385. if entry.ReadingTime == 0 {
  386. entry.ReadingTime = readingtime.EstimateReadingTime(entry.Content, user.DefaultReadingSpeed, user.CJKReadingSpeed)
  387. }
  388. }
  389. func isRecentEntry(entry *model.Entry) bool {
  390. if config.Opts.FilterEntryMaxAgeDays() == 0 || entry.Date.After(time.Now().AddDate(0, 0, -config.Opts.FilterEntryMaxAgeDays())) {
  391. return true
  392. }
  393. return false
  394. }
  395. func minifyEntryContent(entryContent string) string {
  396. m := minify.New()
  397. // Options required to avoid breaking the HTML content.
  398. m.Add("text/html", &html.Minifier{
  399. KeepEndTags: true,
  400. KeepQuotes: true,
  401. })
  402. if minifiedHTML, err := m.String("text/html", entryContent); err == nil {
  403. entryContent = minifiedHTML
  404. }
  405. return entryContent
  406. }