processor.go 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590
  1. // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
  2. // SPDX-License-Identifier: Apache-2.0
  3. package processor
  4. import (
  5. "errors"
  6. "fmt"
  7. "log/slog"
  8. "regexp"
  9. "slices"
  10. "strconv"
  11. "strings"
  12. "time"
  13. "miniflux.app/v2/internal/config"
  14. "miniflux.app/v2/internal/metric"
  15. "miniflux.app/v2/internal/model"
  16. "miniflux.app/v2/internal/reader/fetcher"
  17. "miniflux.app/v2/internal/reader/readingtime"
  18. "miniflux.app/v2/internal/reader/rewrite"
  19. "miniflux.app/v2/internal/reader/sanitizer"
  20. "miniflux.app/v2/internal/reader/scraper"
  21. "miniflux.app/v2/internal/storage"
  22. "github.com/PuerkitoBio/goquery"
  23. "github.com/tdewolff/minify/v2"
  24. "github.com/tdewolff/minify/v2/html"
  25. )
  26. var (
  27. youtubeRegex = regexp.MustCompile(`youtube\.com/watch\?v=(.*)$`)
  28. nebulaRegex = regexp.MustCompile(`^https://nebula\.tv`)
  29. odyseeRegex = regexp.MustCompile(`^https://odysee\.com`)
  30. iso8601Regex = regexp.MustCompile(`^P((?P<year>\d+)Y)?((?P<month>\d+)M)?((?P<week>\d+)W)?((?P<day>\d+)D)?(T((?P<hour>\d+)H)?((?P<minute>\d+)M)?((?P<second>\d+)S)?)?$`)
  31. customReplaceRuleRegex = regexp.MustCompile(`rewrite\("(.*)"\|"(.*)"\)`)
  32. )
  33. // ProcessFeedEntries downloads original web page for entries and apply filters.
  34. func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, user *model.User, forceRefresh bool) {
  35. var filteredEntries model.Entries
  36. // Process older entries first
  37. for i := len(feed.Entries) - 1; i >= 0; i-- {
  38. entry := feed.Entries[i]
  39. slog.Debug("Processing entry",
  40. slog.Int64("user_id", user.ID),
  41. slog.String("entry_url", entry.URL),
  42. slog.String("entry_hash", entry.Hash),
  43. slog.String("entry_title", entry.Title),
  44. slog.Int64("feed_id", feed.ID),
  45. slog.String("feed_url", feed.FeedURL),
  46. )
  47. if isBlockedEntry(feed, entry, user) || !isAllowedEntry(feed, entry, user) || !isRecentEntry(entry) {
  48. continue
  49. }
  50. websiteURL := getUrlFromEntry(feed, entry)
  51. entryIsNew := store.IsNewEntry(feed.ID, entry.Hash)
  52. if feed.Crawler && (entryIsNew || forceRefresh) {
  53. slog.Debug("Scraping entry",
  54. slog.Int64("user_id", user.ID),
  55. slog.String("entry_url", entry.URL),
  56. slog.String("entry_hash", entry.Hash),
  57. slog.String("entry_title", entry.Title),
  58. slog.Int64("feed_id", feed.ID),
  59. slog.String("feed_url", feed.FeedURL),
  60. slog.Bool("entry_is_new", entryIsNew),
  61. slog.Bool("force_refresh", forceRefresh),
  62. slog.String("website_url", websiteURL),
  63. )
  64. startTime := time.Now()
  65. requestBuilder := fetcher.NewRequestBuilder()
  66. requestBuilder.WithUserAgent(feed.UserAgent, config.Opts.HTTPClientUserAgent())
  67. requestBuilder.WithCookie(feed.Cookie)
  68. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  69. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  70. requestBuilder.UseProxy(feed.FetchViaProxy)
  71. requestBuilder.IgnoreTLSErrors(feed.AllowSelfSignedCertificates)
  72. requestBuilder.DisableHTTP2(feed.DisableHTTP2)
  73. content, scraperErr := scraper.ScrapeWebsite(
  74. requestBuilder,
  75. websiteURL,
  76. feed.ScraperRules,
  77. )
  78. if config.Opts.HasMetricsCollector() {
  79. status := "success"
  80. if scraperErr != nil {
  81. status = "error"
  82. }
  83. metric.ScraperRequestDuration.WithLabelValues(status).Observe(time.Since(startTime).Seconds())
  84. }
  85. if scraperErr != nil {
  86. slog.Warn("Unable to scrape entry",
  87. slog.Int64("user_id", user.ID),
  88. slog.String("entry_url", entry.URL),
  89. slog.Int64("feed_id", feed.ID),
  90. slog.String("feed_url", feed.FeedURL),
  91. slog.Any("error", scraperErr),
  92. )
  93. } else if content != "" {
  94. // We replace the entry content only if the scraper doesn't return any error.
  95. entry.Content = minifyEntryContent(content)
  96. }
  97. }
  98. rewrite.Rewriter(websiteURL, entry, feed.RewriteRules)
  99. // The sanitizer should always run at the end of the process to make sure unsafe HTML is filtered.
  100. entry.Content = sanitizer.Sanitize(websiteURL, entry.Content)
  101. updateEntryReadingTime(store, feed, entry, entryIsNew, user)
  102. filteredEntries = append(filteredEntries, entry)
  103. }
  104. feed.Entries = filteredEntries
  105. }
  106. func isBlockedEntry(feed *model.Feed, entry *model.Entry, user *model.User) bool {
  107. if user.BlockFilterEntryRules != "" {
  108. rules := strings.Split(user.BlockFilterEntryRules, "\n")
  109. for _, rule := range rules {
  110. parts := strings.SplitN(rule, "=", 2)
  111. var match bool
  112. switch parts[0] {
  113. case "EntryTitle":
  114. match, _ = regexp.MatchString(parts[1], entry.Title)
  115. case "EntryURL":
  116. match, _ = regexp.MatchString(parts[1], entry.URL)
  117. case "EntryCommentsURL":
  118. match, _ = regexp.MatchString(parts[1], entry.CommentsURL)
  119. case "EntryContent":
  120. match, _ = regexp.MatchString(parts[1], entry.Content)
  121. case "EntryAuthor":
  122. match, _ = regexp.MatchString(parts[1], entry.Author)
  123. case "EntryTag":
  124. containsTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
  125. match, _ = regexp.MatchString(parts[1], tag)
  126. return match
  127. })
  128. if containsTag {
  129. match = true
  130. }
  131. }
  132. if match {
  133. slog.Debug("Blocking entry based on rule",
  134. slog.String("entry_url", entry.URL),
  135. slog.Int64("feed_id", feed.ID),
  136. slog.String("feed_url", feed.FeedURL),
  137. slog.String("rule", rule),
  138. )
  139. return true
  140. }
  141. }
  142. }
  143. if feed.BlocklistRules == "" {
  144. return false
  145. }
  146. compiledBlocklist, err := regexp.Compile(feed.BlocklistRules)
  147. if err != nil {
  148. slog.Debug("Failed on regexp compilation",
  149. slog.String("pattern", feed.BlocklistRules),
  150. slog.Any("error", err),
  151. )
  152. return false
  153. }
  154. containsBlockedTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
  155. return compiledBlocklist.MatchString(tag)
  156. })
  157. if compiledBlocklist.MatchString(entry.URL) || compiledBlocklist.MatchString(entry.Title) || compiledBlocklist.MatchString(entry.Author) || containsBlockedTag {
  158. slog.Debug("Blocking entry based on rule",
  159. slog.String("entry_url", entry.URL),
  160. slog.Int64("feed_id", feed.ID),
  161. slog.String("feed_url", feed.FeedURL),
  162. slog.String("rule", feed.BlocklistRules),
  163. )
  164. return true
  165. }
  166. return false
  167. }
  168. func isAllowedEntry(feed *model.Feed, entry *model.Entry, user *model.User) bool {
  169. if user.KeepFilterEntryRules != "" {
  170. rules := strings.Split(user.KeepFilterEntryRules, "\n")
  171. for _, rule := range rules {
  172. parts := strings.SplitN(rule, "=", 2)
  173. var match bool
  174. switch parts[0] {
  175. case "EntryTitle":
  176. match, _ = regexp.MatchString(parts[1], entry.Title)
  177. case "EntryURL":
  178. match, _ = regexp.MatchString(parts[1], entry.URL)
  179. case "EntryCommentsURL":
  180. match, _ = regexp.MatchString(parts[1], entry.CommentsURL)
  181. case "EntryContent":
  182. match, _ = regexp.MatchString(parts[1], entry.Content)
  183. case "EntryAuthor":
  184. match, _ = regexp.MatchString(parts[1], entry.Author)
  185. case "EntryTag":
  186. containsTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
  187. match, _ = regexp.MatchString(parts[1], tag)
  188. return match
  189. })
  190. if containsTag {
  191. match = true
  192. }
  193. }
  194. if match {
  195. slog.Debug("Allowing entry based on rule",
  196. slog.String("entry_url", entry.URL),
  197. slog.Int64("feed_id", feed.ID),
  198. slog.String("feed_url", feed.FeedURL),
  199. slog.String("rule", rule),
  200. )
  201. return true
  202. }
  203. }
  204. return false
  205. }
  206. if feed.KeeplistRules == "" {
  207. return true
  208. }
  209. compiledKeeplist, err := regexp.Compile(feed.KeeplistRules)
  210. if err != nil {
  211. slog.Debug("Failed on regexp compilation",
  212. slog.String("pattern", feed.KeeplistRules),
  213. slog.Any("error", err),
  214. )
  215. return false
  216. }
  217. containsAllowedTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
  218. return compiledKeeplist.MatchString(tag)
  219. })
  220. if compiledKeeplist.MatchString(entry.URL) || compiledKeeplist.MatchString(entry.Title) || compiledKeeplist.MatchString(entry.Author) || containsAllowedTag {
  221. slog.Debug("Allow entry based on rule",
  222. slog.String("entry_url", entry.URL),
  223. slog.Int64("feed_id", feed.ID),
  224. slog.String("feed_url", feed.FeedURL),
  225. slog.String("rule", feed.KeeplistRules),
  226. )
  227. return true
  228. }
  229. return false
  230. }
  231. // ProcessEntryWebPage downloads the entry web page and apply rewrite rules.
  232. func ProcessEntryWebPage(feed *model.Feed, entry *model.Entry, user *model.User) error {
  233. startTime := time.Now()
  234. websiteURL := getUrlFromEntry(feed, entry)
  235. requestBuilder := fetcher.NewRequestBuilder()
  236. requestBuilder.WithUserAgent(feed.UserAgent, config.Opts.HTTPClientUserAgent())
  237. requestBuilder.WithCookie(feed.Cookie)
  238. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  239. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  240. requestBuilder.UseProxy(feed.FetchViaProxy)
  241. requestBuilder.IgnoreTLSErrors(feed.AllowSelfSignedCertificates)
  242. requestBuilder.DisableHTTP2(feed.DisableHTTP2)
  243. content, scraperErr := scraper.ScrapeWebsite(
  244. requestBuilder,
  245. websiteURL,
  246. feed.ScraperRules,
  247. )
  248. if config.Opts.HasMetricsCollector() {
  249. status := "success"
  250. if scraperErr != nil {
  251. status = "error"
  252. }
  253. metric.ScraperRequestDuration.WithLabelValues(status).Observe(time.Since(startTime).Seconds())
  254. }
  255. if scraperErr != nil {
  256. return scraperErr
  257. }
  258. if content != "" {
  259. entry.Content = minifyEntryContent(content)
  260. if user.ShowReadingTime {
  261. entry.ReadingTime = readingtime.EstimateReadingTime(entry.Content, user.DefaultReadingSpeed, user.CJKReadingSpeed)
  262. }
  263. }
  264. rewrite.Rewriter(websiteURL, entry, entry.Feed.RewriteRules)
  265. entry.Content = sanitizer.Sanitize(websiteURL, entry.Content)
  266. return nil
  267. }
  268. func getUrlFromEntry(feed *model.Feed, entry *model.Entry) string {
  269. var url = entry.URL
  270. if feed.UrlRewriteRules != "" {
  271. parts := customReplaceRuleRegex.FindStringSubmatch(feed.UrlRewriteRules)
  272. if len(parts) >= 3 {
  273. re, err := regexp.Compile(parts[1])
  274. if err != nil {
  275. slog.Error("Failed on regexp compilation",
  276. slog.String("url_rewrite_rules", feed.UrlRewriteRules),
  277. slog.Any("error", err),
  278. )
  279. return url
  280. }
  281. url = re.ReplaceAllString(entry.URL, parts[2])
  282. slog.Debug("Rewriting entry URL",
  283. slog.String("original_entry_url", entry.URL),
  284. slog.String("rewritten_entry_url", url),
  285. slog.Int64("feed_id", feed.ID),
  286. slog.String("feed_url", feed.FeedURL),
  287. )
  288. } else {
  289. slog.Debug("Cannot find search and replace terms for replace rule",
  290. slog.String("original_entry_url", entry.URL),
  291. slog.String("rewritten_entry_url", url),
  292. slog.Int64("feed_id", feed.ID),
  293. slog.String("feed_url", feed.FeedURL),
  294. slog.String("url_rewrite_rules", feed.UrlRewriteRules),
  295. )
  296. }
  297. }
  298. return url
  299. }
  300. func updateEntryReadingTime(store *storage.Storage, feed *model.Feed, entry *model.Entry, entryIsNew bool, user *model.User) {
  301. if !user.ShowReadingTime {
  302. slog.Debug("Skip reading time estimation for this user", slog.Int64("user_id", user.ID))
  303. return
  304. }
  305. if shouldFetchYouTubeWatchTime(entry) {
  306. if entryIsNew {
  307. watchTime, err := fetchYouTubeWatchTime(entry.URL)
  308. if err != nil {
  309. slog.Warn("Unable to fetch YouTube watch time",
  310. slog.Int64("user_id", user.ID),
  311. slog.Int64("entry_id", entry.ID),
  312. slog.String("entry_url", entry.URL),
  313. slog.Int64("feed_id", feed.ID),
  314. slog.String("feed_url", feed.FeedURL),
  315. slog.Any("error", err),
  316. )
  317. }
  318. entry.ReadingTime = watchTime
  319. } else {
  320. entry.ReadingTime = store.GetReadTime(feed.ID, entry.Hash)
  321. }
  322. }
  323. if shouldFetchNebulaWatchTime(entry) {
  324. if entryIsNew {
  325. watchTime, err := fetchNebulaWatchTime(entry.URL)
  326. if err != nil {
  327. slog.Warn("Unable to fetch Nebula watch time",
  328. slog.Int64("user_id", user.ID),
  329. slog.Int64("entry_id", entry.ID),
  330. slog.String("entry_url", entry.URL),
  331. slog.Int64("feed_id", feed.ID),
  332. slog.String("feed_url", feed.FeedURL),
  333. slog.Any("error", err),
  334. )
  335. }
  336. entry.ReadingTime = watchTime
  337. } else {
  338. entry.ReadingTime = store.GetReadTime(feed.ID, entry.Hash)
  339. }
  340. }
  341. if shouldFetchOdyseeWatchTime(entry) {
  342. if entryIsNew {
  343. watchTime, err := fetchOdyseeWatchTime(entry.URL)
  344. if err != nil {
  345. slog.Warn("Unable to fetch Odysee watch time",
  346. slog.Int64("user_id", user.ID),
  347. slog.Int64("entry_id", entry.ID),
  348. slog.String("entry_url", entry.URL),
  349. slog.Int64("feed_id", feed.ID),
  350. slog.String("feed_url", feed.FeedURL),
  351. slog.Any("error", err),
  352. )
  353. }
  354. entry.ReadingTime = watchTime
  355. } else {
  356. entry.ReadingTime = store.GetReadTime(feed.ID, entry.Hash)
  357. }
  358. }
  359. // Handle YT error case and non-YT entries.
  360. if entry.ReadingTime == 0 {
  361. entry.ReadingTime = readingtime.EstimateReadingTime(entry.Content, user.DefaultReadingSpeed, user.CJKReadingSpeed)
  362. }
  363. }
  364. func shouldFetchYouTubeWatchTime(entry *model.Entry) bool {
  365. if !config.Opts.FetchYouTubeWatchTime() {
  366. return false
  367. }
  368. matches := youtubeRegex.FindStringSubmatch(entry.URL)
  369. urlMatchesYouTubePattern := len(matches) == 2
  370. return urlMatchesYouTubePattern
  371. }
  372. func shouldFetchNebulaWatchTime(entry *model.Entry) bool {
  373. if !config.Opts.FetchNebulaWatchTime() {
  374. return false
  375. }
  376. matches := nebulaRegex.FindStringSubmatch(entry.URL)
  377. return matches != nil
  378. }
  379. func shouldFetchOdyseeWatchTime(entry *model.Entry) bool {
  380. if !config.Opts.FetchOdyseeWatchTime() {
  381. return false
  382. }
  383. matches := odyseeRegex.FindStringSubmatch(entry.URL)
  384. return matches != nil
  385. }
  386. func fetchYouTubeWatchTime(websiteURL string) (int, error) {
  387. requestBuilder := fetcher.NewRequestBuilder()
  388. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  389. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  390. responseHandler := fetcher.NewResponseHandler(requestBuilder.ExecuteRequest(websiteURL))
  391. defer responseHandler.Close()
  392. if localizedError := responseHandler.LocalizedError(); localizedError != nil {
  393. slog.Warn("Unable to fetch YouTube page", slog.String("website_url", websiteURL), slog.Any("error", localizedError.Error()))
  394. return 0, localizedError.Error()
  395. }
  396. doc, docErr := goquery.NewDocumentFromReader(responseHandler.Body(config.Opts.HTTPClientMaxBodySize()))
  397. if docErr != nil {
  398. return 0, docErr
  399. }
  400. durs, exists := doc.Find(`meta[itemprop="duration"]`).First().Attr("content")
  401. if !exists {
  402. return 0, errors.New("duration has not found")
  403. }
  404. dur, err := parseISO8601(durs)
  405. if err != nil {
  406. return 0, fmt.Errorf("unable to parse duration %s: %v", durs, err)
  407. }
  408. return int(dur.Minutes()), nil
  409. }
  410. func fetchNebulaWatchTime(websiteURL string) (int, error) {
  411. requestBuilder := fetcher.NewRequestBuilder()
  412. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  413. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  414. responseHandler := fetcher.NewResponseHandler(requestBuilder.ExecuteRequest(websiteURL))
  415. defer responseHandler.Close()
  416. if localizedError := responseHandler.LocalizedError(); localizedError != nil {
  417. slog.Warn("Unable to fetch Nebula watch time", slog.String("website_url", websiteURL), slog.Any("error", localizedError.Error()))
  418. return 0, localizedError.Error()
  419. }
  420. doc, docErr := goquery.NewDocumentFromReader(responseHandler.Body(config.Opts.HTTPClientMaxBodySize()))
  421. if docErr != nil {
  422. return 0, docErr
  423. }
  424. durs, exists := doc.Find(`meta[property="video:duration"]`).First().Attr("content")
  425. // durs contains video watch time in seconds
  426. if !exists {
  427. return 0, errors.New("duration has not found")
  428. }
  429. dur, err := strconv.ParseInt(durs, 10, 64)
  430. if err != nil {
  431. return 0, fmt.Errorf("unable to parse duration %s: %v", durs, err)
  432. }
  433. return int(dur / 60), nil
  434. }
  435. func fetchOdyseeWatchTime(websiteURL string) (int, error) {
  436. requestBuilder := fetcher.NewRequestBuilder()
  437. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  438. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  439. responseHandler := fetcher.NewResponseHandler(requestBuilder.ExecuteRequest(websiteURL))
  440. defer responseHandler.Close()
  441. if localizedError := responseHandler.LocalizedError(); localizedError != nil {
  442. slog.Warn("Unable to fetch Odysee watch time", slog.String("website_url", websiteURL), slog.Any("error", localizedError.Error()))
  443. return 0, localizedError.Error()
  444. }
  445. doc, docErr := goquery.NewDocumentFromReader(responseHandler.Body(config.Opts.HTTPClientMaxBodySize()))
  446. if docErr != nil {
  447. return 0, docErr
  448. }
  449. durs, exists := doc.Find(`meta[property="og:video:duration"]`).First().Attr("content")
  450. // durs contains video watch time in seconds
  451. if !exists {
  452. return 0, errors.New("duration has not found")
  453. }
  454. dur, err := strconv.ParseInt(durs, 10, 64)
  455. if err != nil {
  456. return 0, fmt.Errorf("unable to parse duration %s: %v", durs, err)
  457. }
  458. return int(dur / 60), nil
  459. }
  460. // parseISO8601 parses an ISO 8601 duration string.
  461. func parseISO8601(from string) (time.Duration, error) {
  462. var match []string
  463. var d time.Duration
  464. if iso8601Regex.MatchString(from) {
  465. match = iso8601Regex.FindStringSubmatch(from)
  466. } else {
  467. return 0, errors.New("could not parse duration string")
  468. }
  469. for i, name := range iso8601Regex.SubexpNames() {
  470. part := match[i]
  471. if i == 0 || name == "" || part == "" {
  472. continue
  473. }
  474. val, err := strconv.ParseInt(part, 10, 64)
  475. if err != nil {
  476. return 0, err
  477. }
  478. switch name {
  479. case "hour":
  480. d += (time.Duration(val) * time.Hour)
  481. case "minute":
  482. d += (time.Duration(val) * time.Minute)
  483. case "second":
  484. d += (time.Duration(val) * time.Second)
  485. default:
  486. return 0, fmt.Errorf("unknown field %s", name)
  487. }
  488. }
  489. return d, nil
  490. }
  491. func isRecentEntry(entry *model.Entry) bool {
  492. if config.Opts.FilterEntryMaxAgeDays() == 0 || entry.Date.After(time.Now().AddDate(0, 0, -config.Opts.FilterEntryMaxAgeDays())) {
  493. return true
  494. }
  495. return false
  496. }
  497. func minifyEntryContent(entryContent string) string {
  498. m := minify.New()
  499. // Options required to avoid breaking the HTML content.
  500. m.Add("text/html", &html.Minifier{
  501. KeepEndTags: true,
  502. KeepQuotes: true,
  503. })
  504. if minifiedHTML, err := m.String("text/html", entryContent); err == nil {
  505. entryContent = minifiedHTML
  506. }
  507. return entryContent
  508. }