processor.go 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503
  1. // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
  2. // SPDX-License-Identifier: Apache-2.0
  3. package processor
  4. import (
  5. "errors"
  6. "fmt"
  7. "log/slog"
  8. "regexp"
  9. "slices"
  10. "strconv"
  11. "time"
  12. "miniflux.app/v2/internal/config"
  13. "miniflux.app/v2/internal/metric"
  14. "miniflux.app/v2/internal/model"
  15. "miniflux.app/v2/internal/reader/fetcher"
  16. "miniflux.app/v2/internal/reader/readingtime"
  17. "miniflux.app/v2/internal/reader/rewrite"
  18. "miniflux.app/v2/internal/reader/sanitizer"
  19. "miniflux.app/v2/internal/reader/scraper"
  20. "miniflux.app/v2/internal/storage"
  21. "github.com/PuerkitoBio/goquery"
  22. "github.com/tdewolff/minify/v2"
  23. "github.com/tdewolff/minify/v2/html"
  24. )
  25. var (
  26. youtubeRegex = regexp.MustCompile(`youtube\.com/watch\?v=(.*)$`)
  27. nebulaRegex = regexp.MustCompile(`^https://nebula\.tv`)
  28. odyseeRegex = regexp.MustCompile(`^https://odysee\.com`)
  29. iso8601Regex = regexp.MustCompile(`^P((?P<year>\d+)Y)?((?P<month>\d+)M)?((?P<week>\d+)W)?((?P<day>\d+)D)?(T((?P<hour>\d+)H)?((?P<minute>\d+)M)?((?P<second>\d+)S)?)?$`)
  30. customReplaceRuleRegex = regexp.MustCompile(`rewrite\("(.*)"\|"(.*)"\)`)
  31. )
  32. // ProcessFeedEntries downloads original web page for entries and apply filters.
  33. func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, user *model.User, forceRefresh bool) {
  34. var filteredEntries model.Entries
  35. // Process older entries first
  36. for i := len(feed.Entries) - 1; i >= 0; i-- {
  37. entry := feed.Entries[i]
  38. slog.Debug("Processing entry",
  39. slog.Int64("user_id", user.ID),
  40. slog.String("entry_url", entry.URL),
  41. slog.String("entry_hash", entry.Hash),
  42. slog.String("entry_title", entry.Title),
  43. slog.Int64("feed_id", feed.ID),
  44. slog.String("feed_url", feed.FeedURL),
  45. )
  46. if isBlockedEntry(feed, entry) || !isAllowedEntry(feed, entry) || !isRecentEntry(entry) {
  47. continue
  48. }
  49. websiteURL := getUrlFromEntry(feed, entry)
  50. entryIsNew := store.IsNewEntry(feed.ID, entry.Hash)
  51. if feed.Crawler && (entryIsNew || forceRefresh) {
  52. slog.Debug("Scraping entry",
  53. slog.Int64("user_id", user.ID),
  54. slog.String("entry_url", entry.URL),
  55. slog.String("entry_hash", entry.Hash),
  56. slog.String("entry_title", entry.Title),
  57. slog.Int64("feed_id", feed.ID),
  58. slog.String("feed_url", feed.FeedURL),
  59. slog.Bool("entry_is_new", entryIsNew),
  60. slog.Bool("force_refresh", forceRefresh),
  61. slog.String("website_url", websiteURL),
  62. )
  63. startTime := time.Now()
  64. requestBuilder := fetcher.NewRequestBuilder()
  65. requestBuilder.WithUserAgent(feed.UserAgent, config.Opts.HTTPClientUserAgent())
  66. requestBuilder.WithCookie(feed.Cookie)
  67. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  68. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  69. requestBuilder.UseProxy(feed.FetchViaProxy)
  70. requestBuilder.IgnoreTLSErrors(feed.AllowSelfSignedCertificates)
  71. requestBuilder.DisableHTTP2(feed.DisableHTTP2)
  72. content, scraperErr := scraper.ScrapeWebsite(
  73. requestBuilder,
  74. websiteURL,
  75. feed.ScraperRules,
  76. )
  77. if config.Opts.HasMetricsCollector() {
  78. status := "success"
  79. if scraperErr != nil {
  80. status = "error"
  81. }
  82. metric.ScraperRequestDuration.WithLabelValues(status).Observe(time.Since(startTime).Seconds())
  83. }
  84. if scraperErr != nil {
  85. slog.Warn("Unable to scrape entry",
  86. slog.Int64("user_id", user.ID),
  87. slog.String("entry_url", entry.URL),
  88. slog.Int64("feed_id", feed.ID),
  89. slog.String("feed_url", feed.FeedURL),
  90. slog.Any("error", scraperErr),
  91. )
  92. } else if content != "" {
  93. // We replace the entry content only if the scraper doesn't return any error.
  94. entry.Content = minifyEntryContent(content)
  95. }
  96. }
  97. rewrite.Rewriter(websiteURL, entry, feed.RewriteRules)
  98. // The sanitizer should always run at the end of the process to make sure unsafe HTML is filtered.
  99. entry.Content = sanitizer.Sanitize(websiteURL, entry.Content)
  100. updateEntryReadingTime(store, feed, entry, entryIsNew, user)
  101. filteredEntries = append(filteredEntries, entry)
  102. }
  103. feed.Entries = filteredEntries
  104. }
  105. func isBlockedEntry(feed *model.Feed, entry *model.Entry) bool {
  106. if feed.BlocklistRules == "" {
  107. return false
  108. }
  109. compiledBlocklist, err := regexp.Compile(feed.BlocklistRules)
  110. if err != nil {
  111. slog.Debug("Failed on regexp compilation",
  112. slog.String("pattern", feed.BlocklistRules),
  113. slog.Any("error", err),
  114. )
  115. return false
  116. }
  117. containsBlockedTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
  118. return compiledBlocklist.MatchString(tag)
  119. })
  120. if compiledBlocklist.MatchString(entry.URL) || compiledBlocklist.MatchString(entry.Title) || compiledBlocklist.MatchString(entry.Author) || containsBlockedTag {
  121. slog.Debug("Blocking entry based on rule",
  122. slog.String("entry_url", entry.URL),
  123. slog.Int64("feed_id", feed.ID),
  124. slog.String("feed_url", feed.FeedURL),
  125. slog.String("rule", feed.BlocklistRules),
  126. )
  127. return true
  128. }
  129. return false
  130. }
  131. func isAllowedEntry(feed *model.Feed, entry *model.Entry) bool {
  132. if feed.KeeplistRules == "" {
  133. return true
  134. }
  135. compiledKeeplist, err := regexp.Compile(feed.KeeplistRules)
  136. if err != nil {
  137. slog.Debug("Failed on regexp compilation",
  138. slog.String("pattern", feed.KeeplistRules),
  139. slog.Any("error", err),
  140. )
  141. return false
  142. }
  143. containsAllowedTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
  144. return compiledKeeplist.MatchString(tag)
  145. })
  146. if compiledKeeplist.MatchString(entry.URL) || compiledKeeplist.MatchString(entry.Title) || compiledKeeplist.MatchString(entry.Author) || containsAllowedTag {
  147. slog.Debug("Allow entry based on rule",
  148. slog.String("entry_url", entry.URL),
  149. slog.Int64("feed_id", feed.ID),
  150. slog.String("feed_url", feed.FeedURL),
  151. slog.String("rule", feed.KeeplistRules),
  152. )
  153. return true
  154. }
  155. return false
  156. }
  157. // ProcessEntryWebPage downloads the entry web page and apply rewrite rules.
  158. func ProcessEntryWebPage(feed *model.Feed, entry *model.Entry, user *model.User) error {
  159. startTime := time.Now()
  160. websiteURL := getUrlFromEntry(feed, entry)
  161. requestBuilder := fetcher.NewRequestBuilder()
  162. requestBuilder.WithUserAgent(feed.UserAgent, config.Opts.HTTPClientUserAgent())
  163. requestBuilder.WithCookie(feed.Cookie)
  164. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  165. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  166. requestBuilder.UseProxy(feed.FetchViaProxy)
  167. requestBuilder.IgnoreTLSErrors(feed.AllowSelfSignedCertificates)
  168. requestBuilder.DisableHTTP2(feed.DisableHTTP2)
  169. content, scraperErr := scraper.ScrapeWebsite(
  170. requestBuilder,
  171. websiteURL,
  172. feed.ScraperRules,
  173. )
  174. if config.Opts.HasMetricsCollector() {
  175. status := "success"
  176. if scraperErr != nil {
  177. status = "error"
  178. }
  179. metric.ScraperRequestDuration.WithLabelValues(status).Observe(time.Since(startTime).Seconds())
  180. }
  181. if scraperErr != nil {
  182. return scraperErr
  183. }
  184. if content != "" {
  185. entry.Content = minifyEntryContent(content)
  186. if user.ShowReadingTime {
  187. entry.ReadingTime = readingtime.EstimateReadingTime(entry.Content, user.DefaultReadingSpeed, user.CJKReadingSpeed)
  188. }
  189. }
  190. rewrite.Rewriter(websiteURL, entry, entry.Feed.RewriteRules)
  191. entry.Content = sanitizer.Sanitize(websiteURL, entry.Content)
  192. return nil
  193. }
  194. func getUrlFromEntry(feed *model.Feed, entry *model.Entry) string {
  195. var url = entry.URL
  196. if feed.UrlRewriteRules != "" {
  197. parts := customReplaceRuleRegex.FindStringSubmatch(feed.UrlRewriteRules)
  198. if len(parts) >= 3 {
  199. re := regexp.MustCompile(parts[1])
  200. url = re.ReplaceAllString(entry.URL, parts[2])
  201. slog.Debug("Rewriting entry URL",
  202. slog.String("original_entry_url", entry.URL),
  203. slog.String("rewritten_entry_url", url),
  204. slog.Int64("feed_id", feed.ID),
  205. slog.String("feed_url", feed.FeedURL),
  206. )
  207. } else {
  208. slog.Debug("Cannot find search and replace terms for replace rule",
  209. slog.String("original_entry_url", entry.URL),
  210. slog.String("rewritten_entry_url", url),
  211. slog.Int64("feed_id", feed.ID),
  212. slog.String("feed_url", feed.FeedURL),
  213. slog.String("url_rewrite_rules", feed.UrlRewriteRules),
  214. )
  215. }
  216. }
  217. return url
  218. }
  219. func updateEntryReadingTime(store *storage.Storage, feed *model.Feed, entry *model.Entry, entryIsNew bool, user *model.User) {
  220. if !user.ShowReadingTime {
  221. slog.Debug("Skip reading time estimation for this user", slog.Int64("user_id", user.ID))
  222. return
  223. }
  224. if shouldFetchYouTubeWatchTime(entry) {
  225. if entryIsNew {
  226. watchTime, err := fetchYouTubeWatchTime(entry.URL)
  227. if err != nil {
  228. slog.Warn("Unable to fetch YouTube watch time",
  229. slog.Int64("user_id", user.ID),
  230. slog.Int64("entry_id", entry.ID),
  231. slog.String("entry_url", entry.URL),
  232. slog.Int64("feed_id", feed.ID),
  233. slog.String("feed_url", feed.FeedURL),
  234. slog.Any("error", err),
  235. )
  236. }
  237. entry.ReadingTime = watchTime
  238. } else {
  239. entry.ReadingTime = store.GetReadTime(feed.ID, entry.Hash)
  240. }
  241. }
  242. if shouldFetchNebulaWatchTime(entry) {
  243. if entryIsNew {
  244. watchTime, err := fetchNebulaWatchTime(entry.URL)
  245. if err != nil {
  246. slog.Warn("Unable to fetch Nebula watch time",
  247. slog.Int64("user_id", user.ID),
  248. slog.Int64("entry_id", entry.ID),
  249. slog.String("entry_url", entry.URL),
  250. slog.Int64("feed_id", feed.ID),
  251. slog.String("feed_url", feed.FeedURL),
  252. slog.Any("error", err),
  253. )
  254. }
  255. entry.ReadingTime = watchTime
  256. } else {
  257. entry.ReadingTime = store.GetReadTime(feed.ID, entry.Hash)
  258. }
  259. }
  260. if shouldFetchOdyseeWatchTime(entry) {
  261. if entryIsNew {
  262. watchTime, err := fetchOdyseeWatchTime(entry.URL)
  263. if err != nil {
  264. slog.Warn("Unable to fetch Odysee watch time",
  265. slog.Int64("user_id", user.ID),
  266. slog.Int64("entry_id", entry.ID),
  267. slog.String("entry_url", entry.URL),
  268. slog.Int64("feed_id", feed.ID),
  269. slog.String("feed_url", feed.FeedURL),
  270. slog.Any("error", err),
  271. )
  272. }
  273. entry.ReadingTime = watchTime
  274. } else {
  275. entry.ReadingTime = store.GetReadTime(feed.ID, entry.Hash)
  276. }
  277. }
  278. // Handle YT error case and non-YT entries.
  279. if entry.ReadingTime == 0 {
  280. entry.ReadingTime = readingtime.EstimateReadingTime(entry.Content, user.DefaultReadingSpeed, user.CJKReadingSpeed)
  281. }
  282. }
  283. func shouldFetchYouTubeWatchTime(entry *model.Entry) bool {
  284. if !config.Opts.FetchYouTubeWatchTime() {
  285. return false
  286. }
  287. matches := youtubeRegex.FindStringSubmatch(entry.URL)
  288. urlMatchesYouTubePattern := len(matches) == 2
  289. return urlMatchesYouTubePattern
  290. }
  291. func shouldFetchNebulaWatchTime(entry *model.Entry) bool {
  292. if !config.Opts.FetchNebulaWatchTime() {
  293. return false
  294. }
  295. matches := nebulaRegex.FindStringSubmatch(entry.URL)
  296. return matches != nil
  297. }
  298. func shouldFetchOdyseeWatchTime(entry *model.Entry) bool {
  299. if !config.Opts.FetchOdyseeWatchTime() {
  300. return false
  301. }
  302. matches := odyseeRegex.FindStringSubmatch(entry.URL)
  303. return matches != nil
  304. }
  305. func fetchYouTubeWatchTime(websiteURL string) (int, error) {
  306. requestBuilder := fetcher.NewRequestBuilder()
  307. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  308. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  309. responseHandler := fetcher.NewResponseHandler(requestBuilder.ExecuteRequest(websiteURL))
  310. defer responseHandler.Close()
  311. if localizedError := responseHandler.LocalizedError(); localizedError != nil {
  312. slog.Warn("Unable to fetch YouTube page", slog.String("website_url", websiteURL), slog.Any("error", localizedError.Error()))
  313. return 0, localizedError.Error()
  314. }
  315. doc, docErr := goquery.NewDocumentFromReader(responseHandler.Body(config.Opts.HTTPClientMaxBodySize()))
  316. if docErr != nil {
  317. return 0, docErr
  318. }
  319. durs, exists := doc.Find(`meta[itemprop="duration"]`).First().Attr("content")
  320. if !exists {
  321. return 0, errors.New("duration has not found")
  322. }
  323. dur, err := parseISO8601(durs)
  324. if err != nil {
  325. return 0, fmt.Errorf("unable to parse duration %s: %v", durs, err)
  326. }
  327. return int(dur.Minutes()), nil
  328. }
  329. func fetchNebulaWatchTime(websiteURL string) (int, error) {
  330. requestBuilder := fetcher.NewRequestBuilder()
  331. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  332. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  333. responseHandler := fetcher.NewResponseHandler(requestBuilder.ExecuteRequest(websiteURL))
  334. defer responseHandler.Close()
  335. if localizedError := responseHandler.LocalizedError(); localizedError != nil {
  336. slog.Warn("Unable to fetch Nebula watch time", slog.String("website_url", websiteURL), slog.Any("error", localizedError.Error()))
  337. return 0, localizedError.Error()
  338. }
  339. doc, docErr := goquery.NewDocumentFromReader(responseHandler.Body(config.Opts.HTTPClientMaxBodySize()))
  340. if docErr != nil {
  341. return 0, docErr
  342. }
  343. durs, exists := doc.Find(`meta[property="video:duration"]`).First().Attr("content")
  344. // durs contains video watch time in seconds
  345. if !exists {
  346. return 0, errors.New("duration has not found")
  347. }
  348. dur, err := strconv.ParseInt(durs, 10, 64)
  349. if err != nil {
  350. return 0, fmt.Errorf("unable to parse duration %s: %v", durs, err)
  351. }
  352. return int(dur / 60), nil
  353. }
  354. func fetchOdyseeWatchTime(websiteURL string) (int, error) {
  355. requestBuilder := fetcher.NewRequestBuilder()
  356. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  357. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  358. responseHandler := fetcher.NewResponseHandler(requestBuilder.ExecuteRequest(websiteURL))
  359. defer responseHandler.Close()
  360. if localizedError := responseHandler.LocalizedError(); localizedError != nil {
  361. slog.Warn("Unable to fetch Odysee watch time", slog.String("website_url", websiteURL), slog.Any("error", localizedError.Error()))
  362. return 0, localizedError.Error()
  363. }
  364. doc, docErr := goquery.NewDocumentFromReader(responseHandler.Body(config.Opts.HTTPClientMaxBodySize()))
  365. if docErr != nil {
  366. return 0, docErr
  367. }
  368. durs, exists := doc.Find(`meta[property="og:video:duration"]`).First().Attr("content")
  369. // durs contains video watch time in seconds
  370. if !exists {
  371. return 0, errors.New("duration has not found")
  372. }
  373. dur, err := strconv.ParseInt(durs, 10, 64)
  374. if err != nil {
  375. return 0, fmt.Errorf("unable to parse duration %s: %v", durs, err)
  376. }
  377. return int(dur / 60), nil
  378. }
  379. // parseISO8601 parses an ISO 8601 duration string.
  380. func parseISO8601(from string) (time.Duration, error) {
  381. var match []string
  382. var d time.Duration
  383. if iso8601Regex.MatchString(from) {
  384. match = iso8601Regex.FindStringSubmatch(from)
  385. } else {
  386. return 0, errors.New("could not parse duration string")
  387. }
  388. for i, name := range iso8601Regex.SubexpNames() {
  389. part := match[i]
  390. if i == 0 || name == "" || part == "" {
  391. continue
  392. }
  393. val, err := strconv.ParseInt(part, 10, 64)
  394. if err != nil {
  395. return 0, err
  396. }
  397. switch name {
  398. case "hour":
  399. d += (time.Duration(val) * time.Hour)
  400. case "minute":
  401. d += (time.Duration(val) * time.Minute)
  402. case "second":
  403. d += (time.Duration(val) * time.Second)
  404. default:
  405. return 0, fmt.Errorf("unknown field %s", name)
  406. }
  407. }
  408. return d, nil
  409. }
  410. func isRecentEntry(entry *model.Entry) bool {
  411. if config.Opts.FilterEntryMaxAgeDays() == 0 || entry.Date.After(time.Now().AddDate(0, 0, -config.Opts.FilterEntryMaxAgeDays())) {
  412. return true
  413. }
  414. return false
  415. }
  416. func minifyEntryContent(entryContent string) string {
  417. m := minify.New()
  418. // Options required to avoid breaking the HTML content.
  419. m.Add("text/html", &html.Minifier{
  420. KeepEndTags: true,
  421. KeepQuotes: true,
  422. })
  423. if minifiedHTML, err := m.String("text/html", entryContent); err == nil {
  424. entryContent = minifiedHTML
  425. }
  426. return entryContent
  427. }