processor.go 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441
  1. // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
  2. // SPDX-License-Identifier: Apache-2.0
  3. package processor
  4. import (
  5. "errors"
  6. "fmt"
  7. "log/slog"
  8. "regexp"
  9. "slices"
  10. "strconv"
  11. "time"
  12. "miniflux.app/v2/internal/config"
  13. "miniflux.app/v2/internal/metric"
  14. "miniflux.app/v2/internal/model"
  15. "miniflux.app/v2/internal/reader/fetcher"
  16. "miniflux.app/v2/internal/reader/readingtime"
  17. "miniflux.app/v2/internal/reader/rewrite"
  18. "miniflux.app/v2/internal/reader/sanitizer"
  19. "miniflux.app/v2/internal/reader/scraper"
  20. "miniflux.app/v2/internal/storage"
  21. "github.com/PuerkitoBio/goquery"
  22. "github.com/tdewolff/minify/v2"
  23. "github.com/tdewolff/minify/v2/html"
  24. )
  25. var (
  26. youtubeRegex = regexp.MustCompile(`youtube\.com/watch\?v=(.*)$`)
  27. odyseeRegex = regexp.MustCompile(`^https://odysee\.com`)
  28. iso8601Regex = regexp.MustCompile(`^P((?P<year>\d+)Y)?((?P<month>\d+)M)?((?P<week>\d+)W)?((?P<day>\d+)D)?(T((?P<hour>\d+)H)?((?P<minute>\d+)M)?((?P<second>\d+)S)?)?$`)
  29. customReplaceRuleRegex = regexp.MustCompile(`rewrite\("(.*)"\|"(.*)"\)`)
  30. )
  31. // ProcessFeedEntries downloads original web page for entries and apply filters.
  32. func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, user *model.User, forceRefresh bool) {
  33. var filteredEntries model.Entries
  34. minifier := minify.New()
  35. minifier.AddFunc("text/html", html.Minify)
  36. // Process older entries first
  37. for i := len(feed.Entries) - 1; i >= 0; i-- {
  38. entry := feed.Entries[i]
  39. slog.Debug("Processing entry",
  40. slog.Int64("user_id", user.ID),
  41. slog.String("entry_url", entry.URL),
  42. slog.String("entry_hash", entry.Hash),
  43. slog.String("entry_title", entry.Title),
  44. slog.Int64("feed_id", feed.ID),
  45. slog.String("feed_url", feed.FeedURL),
  46. )
  47. if isBlockedEntry(feed, entry) || !isAllowedEntry(feed, entry) || !isRecentEntry(entry) {
  48. continue
  49. }
  50. websiteURL := getUrlFromEntry(feed, entry)
  51. entryIsNew := store.IsNewEntry(feed.ID, entry.Hash)
  52. if feed.Crawler && (entryIsNew || forceRefresh) {
  53. slog.Debug("Scraping entry",
  54. slog.Int64("user_id", user.ID),
  55. slog.String("entry_url", entry.URL),
  56. slog.String("entry_hash", entry.Hash),
  57. slog.String("entry_title", entry.Title),
  58. slog.Int64("feed_id", feed.ID),
  59. slog.String("feed_url", feed.FeedURL),
  60. slog.Bool("entry_is_new", entryIsNew),
  61. slog.Bool("force_refresh", forceRefresh),
  62. slog.String("website_url", websiteURL),
  63. )
  64. startTime := time.Now()
  65. requestBuilder := fetcher.NewRequestBuilder()
  66. requestBuilder.WithUserAgent(feed.UserAgent, config.Opts.HTTPClientUserAgent())
  67. requestBuilder.WithCookie(feed.Cookie)
  68. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  69. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  70. requestBuilder.UseProxy(feed.FetchViaProxy)
  71. requestBuilder.IgnoreTLSErrors(feed.AllowSelfSignedCertificates)
  72. requestBuilder.DisableHTTP2(feed.DisableHTTP2)
  73. content, scraperErr := scraper.ScrapeWebsite(
  74. requestBuilder,
  75. websiteURL,
  76. feed.ScraperRules,
  77. )
  78. if config.Opts.HasMetricsCollector() {
  79. status := "success"
  80. if scraperErr != nil {
  81. status = "error"
  82. }
  83. metric.ScraperRequestDuration.WithLabelValues(status).Observe(time.Since(startTime).Seconds())
  84. }
  85. if scraperErr != nil {
  86. slog.Warn("Unable to scrape entry",
  87. slog.Int64("user_id", user.ID),
  88. slog.String("entry_url", entry.URL),
  89. slog.Int64("feed_id", feed.ID),
  90. slog.String("feed_url", feed.FeedURL),
  91. slog.Any("error", scraperErr),
  92. )
  93. } else if content != "" {
  94. // We replace the entry content only if the scraper doesn't return any error.
  95. if minifiedHTML, err := minifier.String("text/html", content); err == nil {
  96. entry.Content = minifiedHTML
  97. } else {
  98. entry.Content = content
  99. }
  100. }
  101. }
  102. rewrite.Rewriter(websiteURL, entry, feed.RewriteRules)
  103. // The sanitizer should always run at the end of the process to make sure unsafe HTML is filtered.
  104. entry.Content = sanitizer.Sanitize(websiteURL, entry.Content)
  105. updateEntryReadingTime(store, feed, entry, entryIsNew, user)
  106. filteredEntries = append(filteredEntries, entry)
  107. }
  108. feed.Entries = filteredEntries
  109. }
  110. func isBlockedEntry(feed *model.Feed, entry *model.Entry) bool {
  111. if feed.BlocklistRules == "" {
  112. return false
  113. }
  114. compiledBlocklist, err := regexp.Compile(feed.BlocklistRules)
  115. if err != nil {
  116. slog.Debug("Failed on regexp compilation",
  117. slog.String("pattern", feed.BlocklistRules),
  118. slog.Any("error", err),
  119. )
  120. return false
  121. }
  122. containsBlockedTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
  123. return compiledBlocklist.MatchString(tag)
  124. })
  125. if compiledBlocklist.MatchString(entry.URL) || compiledBlocklist.MatchString(entry.Title) || compiledBlocklist.MatchString(entry.Author) || containsBlockedTag {
  126. slog.Debug("Blocking entry based on rule",
  127. slog.String("entry_url", entry.URL),
  128. slog.Int64("feed_id", feed.ID),
  129. slog.String("feed_url", feed.FeedURL),
  130. slog.String("rule", feed.BlocklistRules),
  131. )
  132. return true
  133. }
  134. return false
  135. }
  136. func isAllowedEntry(feed *model.Feed, entry *model.Entry) bool {
  137. if feed.KeeplistRules == "" {
  138. return true
  139. }
  140. compiledKeeplist, err := regexp.Compile(feed.KeeplistRules)
  141. if err != nil {
  142. slog.Debug("Failed on regexp compilation",
  143. slog.String("pattern", feed.KeeplistRules),
  144. slog.Any("error", err),
  145. )
  146. return false
  147. }
  148. containsAllowedTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
  149. return compiledKeeplist.MatchString(tag)
  150. })
  151. if compiledKeeplist.MatchString(entry.URL) || compiledKeeplist.MatchString(entry.Title) || compiledKeeplist.MatchString(entry.Author) || containsAllowedTag {
  152. slog.Debug("Allow entry based on rule",
  153. slog.String("entry_url", entry.URL),
  154. slog.Int64("feed_id", feed.ID),
  155. slog.String("feed_url", feed.FeedURL),
  156. slog.String("rule", feed.KeeplistRules),
  157. )
  158. return true
  159. }
  160. return false
  161. }
  162. // ProcessEntryWebPage downloads the entry web page and apply rewrite rules.
  163. func ProcessEntryWebPage(feed *model.Feed, entry *model.Entry, user *model.User) error {
  164. minifier := minify.New()
  165. minifier.AddFunc("text/html", html.Minify)
  166. startTime := time.Now()
  167. websiteURL := getUrlFromEntry(feed, entry)
  168. requestBuilder := fetcher.NewRequestBuilder()
  169. requestBuilder.WithUserAgent(feed.UserAgent, config.Opts.HTTPClientUserAgent())
  170. requestBuilder.WithCookie(feed.Cookie)
  171. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  172. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  173. requestBuilder.UseProxy(feed.FetchViaProxy)
  174. requestBuilder.IgnoreTLSErrors(feed.AllowSelfSignedCertificates)
  175. requestBuilder.DisableHTTP2(feed.DisableHTTP2)
  176. content, scraperErr := scraper.ScrapeWebsite(
  177. requestBuilder,
  178. websiteURL,
  179. feed.ScraperRules,
  180. )
  181. if config.Opts.HasMetricsCollector() {
  182. status := "success"
  183. if scraperErr != nil {
  184. status = "error"
  185. }
  186. metric.ScraperRequestDuration.WithLabelValues(status).Observe(time.Since(startTime).Seconds())
  187. }
  188. if scraperErr != nil {
  189. return scraperErr
  190. }
  191. if content != "" {
  192. if minifiedHTML, err := minifier.String("text/html", content); err == nil {
  193. entry.Content = minifiedHTML
  194. } else {
  195. entry.Content = content
  196. }
  197. if user.ShowReadingTime {
  198. entry.ReadingTime = readingtime.EstimateReadingTime(entry.Content, user.DefaultReadingSpeed, user.CJKReadingSpeed)
  199. }
  200. }
  201. rewrite.Rewriter(websiteURL, entry, entry.Feed.RewriteRules)
  202. entry.Content = sanitizer.Sanitize(websiteURL, entry.Content)
  203. return nil
  204. }
  205. func getUrlFromEntry(feed *model.Feed, entry *model.Entry) string {
  206. var url = entry.URL
  207. if feed.UrlRewriteRules != "" {
  208. parts := customReplaceRuleRegex.FindStringSubmatch(feed.UrlRewriteRules)
  209. if len(parts) >= 3 {
  210. re := regexp.MustCompile(parts[1])
  211. url = re.ReplaceAllString(entry.URL, parts[2])
  212. slog.Debug("Rewriting entry URL",
  213. slog.String("original_entry_url", entry.URL),
  214. slog.String("rewritten_entry_url", url),
  215. slog.Int64("feed_id", feed.ID),
  216. slog.String("feed_url", feed.FeedURL),
  217. )
  218. } else {
  219. slog.Debug("Cannot find search and replace terms for replace rule",
  220. slog.String("original_entry_url", entry.URL),
  221. slog.String("rewritten_entry_url", url),
  222. slog.Int64("feed_id", feed.ID),
  223. slog.String("feed_url", feed.FeedURL),
  224. slog.String("url_rewrite_rules", feed.UrlRewriteRules),
  225. )
  226. }
  227. }
  228. return url
  229. }
  230. func updateEntryReadingTime(store *storage.Storage, feed *model.Feed, entry *model.Entry, entryIsNew bool, user *model.User) {
  231. if !user.ShowReadingTime {
  232. slog.Debug("Skip reading time estimation for this user", slog.Int64("user_id", user.ID))
  233. return
  234. }
  235. if shouldFetchYouTubeWatchTime(entry) {
  236. if entryIsNew {
  237. watchTime, err := fetchYouTubeWatchTime(entry.URL)
  238. if err != nil {
  239. slog.Warn("Unable to fetch YouTube watch time",
  240. slog.Int64("user_id", user.ID),
  241. slog.Int64("entry_id", entry.ID),
  242. slog.String("entry_url", entry.URL),
  243. slog.Int64("feed_id", feed.ID),
  244. slog.String("feed_url", feed.FeedURL),
  245. slog.Any("error", err),
  246. )
  247. }
  248. entry.ReadingTime = watchTime
  249. } else {
  250. entry.ReadingTime = store.GetReadTime(feed.ID, entry.Hash)
  251. }
  252. }
  253. if shouldFetchOdyseeWatchTime(entry) {
  254. if entryIsNew {
  255. watchTime, err := fetchOdyseeWatchTime(entry.URL)
  256. if err != nil {
  257. slog.Warn("Unable to fetch Odysee watch time",
  258. slog.Int64("user_id", user.ID),
  259. slog.Int64("entry_id", entry.ID),
  260. slog.String("entry_url", entry.URL),
  261. slog.Int64("feed_id", feed.ID),
  262. slog.String("feed_url", feed.FeedURL),
  263. slog.Any("error", err),
  264. )
  265. }
  266. entry.ReadingTime = watchTime
  267. } else {
  268. entry.ReadingTime = store.GetReadTime(feed.ID, entry.Hash)
  269. }
  270. }
  271. // Handle YT error case and non-YT entries.
  272. if entry.ReadingTime == 0 {
  273. entry.ReadingTime = readingtime.EstimateReadingTime(entry.Content, user.DefaultReadingSpeed, user.CJKReadingSpeed)
  274. }
  275. }
  276. func shouldFetchYouTubeWatchTime(entry *model.Entry) bool {
  277. if !config.Opts.FetchYouTubeWatchTime() {
  278. return false
  279. }
  280. matches := youtubeRegex.FindStringSubmatch(entry.URL)
  281. urlMatchesYouTubePattern := len(matches) == 2
  282. return urlMatchesYouTubePattern
  283. }
  284. func shouldFetchOdyseeWatchTime(entry *model.Entry) bool {
  285. if !config.Opts.FetchOdyseeWatchTime() {
  286. return false
  287. }
  288. matches := odyseeRegex.FindStringSubmatch(entry.URL)
  289. return matches != nil
  290. }
  291. func fetchYouTubeWatchTime(websiteURL string) (int, error) {
  292. requestBuilder := fetcher.NewRequestBuilder()
  293. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  294. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  295. responseHandler := fetcher.NewResponseHandler(requestBuilder.ExecuteRequest(websiteURL))
  296. defer responseHandler.Close()
  297. if localizedError := responseHandler.LocalizedError(); localizedError != nil {
  298. slog.Warn("Unable to fetch YouTube page", slog.String("website_url", websiteURL), slog.Any("error", localizedError.Error()))
  299. return 0, localizedError.Error()
  300. }
  301. doc, docErr := goquery.NewDocumentFromReader(responseHandler.Body(config.Opts.HTTPClientMaxBodySize()))
  302. if docErr != nil {
  303. return 0, docErr
  304. }
  305. durs, exists := doc.Find(`meta[itemprop="duration"]`).First().Attr("content")
  306. if !exists {
  307. return 0, errors.New("duration has not found")
  308. }
  309. dur, err := parseISO8601(durs)
  310. if err != nil {
  311. return 0, fmt.Errorf("unable to parse duration %s: %v", durs, err)
  312. }
  313. return int(dur.Minutes()), nil
  314. }
  315. func fetchOdyseeWatchTime(websiteURL string) (int, error) {
  316. requestBuilder := fetcher.NewRequestBuilder()
  317. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  318. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  319. responseHandler := fetcher.NewResponseHandler(requestBuilder.ExecuteRequest(websiteURL))
  320. defer responseHandler.Close()
  321. if localizedError := responseHandler.LocalizedError(); localizedError != nil {
  322. slog.Warn("Unable to fetch Odysee watch time", slog.String("website_url", websiteURL), slog.Any("error", localizedError.Error()))
  323. return 0, localizedError.Error()
  324. }
  325. doc, docErr := goquery.NewDocumentFromReader(responseHandler.Body(config.Opts.HTTPClientMaxBodySize()))
  326. if docErr != nil {
  327. return 0, docErr
  328. }
  329. durs, exists := doc.Find(`meta[property="og:video:duration"]`).First().Attr("content")
  330. // durs contains video watch time in seconds
  331. if !exists {
  332. return 0, errors.New("duration has not found")
  333. }
  334. dur, err := strconv.ParseInt(durs, 10, 64)
  335. if err != nil {
  336. return 0, fmt.Errorf("unable to parse duration %s: %v", durs, err)
  337. }
  338. return int(dur / 60), nil
  339. }
  340. // parseISO8601 parses an ISO 8601 duration string.
  341. func parseISO8601(from string) (time.Duration, error) {
  342. var match []string
  343. var d time.Duration
  344. if iso8601Regex.MatchString(from) {
  345. match = iso8601Regex.FindStringSubmatch(from)
  346. } else {
  347. return 0, errors.New("could not parse duration string")
  348. }
  349. for i, name := range iso8601Regex.SubexpNames() {
  350. part := match[i]
  351. if i == 0 || name == "" || part == "" {
  352. continue
  353. }
  354. val, err := strconv.ParseInt(part, 10, 64)
  355. if err != nil {
  356. return 0, err
  357. }
  358. switch name {
  359. case "hour":
  360. d += (time.Duration(val) * time.Hour)
  361. case "minute":
  362. d += (time.Duration(val) * time.Minute)
  363. case "second":
  364. d += (time.Duration(val) * time.Second)
  365. default:
  366. return 0, fmt.Errorf("unknown field %s", name)
  367. }
  368. }
  369. return d, nil
  370. }
  371. func isRecentEntry(entry *model.Entry) bool {
  372. if config.Opts.FilterEntryMaxAgeDays() == 0 || entry.Date.After(time.Now().AddDate(0, 0, -config.Opts.FilterEntryMaxAgeDays())) {
  373. return true
  374. }
  375. return false
  376. }