processor.go 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425
  1. // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
  2. // SPDX-License-Identifier: Apache-2.0
  3. package processor
  4. import (
  5. "errors"
  6. "fmt"
  7. "log/slog"
  8. "regexp"
  9. "slices"
  10. "strconv"
  11. "time"
  12. "miniflux.app/v2/internal/config"
  13. "miniflux.app/v2/internal/metric"
  14. "miniflux.app/v2/internal/model"
  15. "miniflux.app/v2/internal/reader/fetcher"
  16. "miniflux.app/v2/internal/reader/readingtime"
  17. "miniflux.app/v2/internal/reader/rewrite"
  18. "miniflux.app/v2/internal/reader/sanitizer"
  19. "miniflux.app/v2/internal/reader/scraper"
  20. "miniflux.app/v2/internal/storage"
  21. "github.com/PuerkitoBio/goquery"
  22. )
  23. var (
  24. youtubeRegex = regexp.MustCompile(`youtube\.com/watch\?v=(.*)$`)
  25. odyseeRegex = regexp.MustCompile(`^https://odysee\.com`)
  26. iso8601Regex = regexp.MustCompile(`^P((?P<year>\d+)Y)?((?P<month>\d+)M)?((?P<week>\d+)W)?((?P<day>\d+)D)?(T((?P<hour>\d+)H)?((?P<minute>\d+)M)?((?P<second>\d+)S)?)?$`)
  27. customReplaceRuleRegex = regexp.MustCompile(`rewrite\("(.*)"\|"(.*)"\)`)
  28. )
  29. // ProcessFeedEntries downloads original web page for entries and apply filters.
  30. func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, user *model.User, forceRefresh bool) {
  31. var filteredEntries model.Entries
  32. // Process older entries first
  33. for i := len(feed.Entries) - 1; i >= 0; i-- {
  34. entry := feed.Entries[i]
  35. slog.Debug("Processing entry",
  36. slog.Int64("user_id", user.ID),
  37. slog.String("entry_url", entry.URL),
  38. slog.String("entry_hash", entry.Hash),
  39. slog.String("entry_title", entry.Title),
  40. slog.Int64("feed_id", feed.ID),
  41. slog.String("feed_url", feed.FeedURL),
  42. )
  43. if isBlockedEntry(feed, entry) || !isAllowedEntry(feed, entry) || !isRecentEntry(entry) {
  44. continue
  45. }
  46. websiteURL := getUrlFromEntry(feed, entry)
  47. entryIsNew := store.IsNewEntry(feed.ID, entry.Hash)
  48. if feed.Crawler && (entryIsNew || forceRefresh) {
  49. slog.Debug("Scraping entry",
  50. slog.Int64("user_id", user.ID),
  51. slog.String("entry_url", entry.URL),
  52. slog.String("entry_hash", entry.Hash),
  53. slog.String("entry_title", entry.Title),
  54. slog.Int64("feed_id", feed.ID),
  55. slog.String("feed_url", feed.FeedURL),
  56. slog.Bool("entry_is_new", entryIsNew),
  57. slog.Bool("force_refresh", forceRefresh),
  58. slog.String("website_url", websiteURL),
  59. )
  60. startTime := time.Now()
  61. requestBuilder := fetcher.NewRequestBuilder()
  62. requestBuilder.WithUserAgent(feed.UserAgent, config.Opts.HTTPClientUserAgent())
  63. requestBuilder.WithCookie(feed.Cookie)
  64. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  65. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  66. requestBuilder.UseProxy(feed.FetchViaProxy)
  67. requestBuilder.IgnoreTLSErrors(feed.AllowSelfSignedCertificates)
  68. requestBuilder.DisableHTTP2(feed.DisableHTTP2)
  69. content, scraperErr := scraper.ScrapeWebsite(
  70. requestBuilder,
  71. websiteURL,
  72. feed.ScraperRules,
  73. )
  74. if config.Opts.HasMetricsCollector() {
  75. status := "success"
  76. if scraperErr != nil {
  77. status = "error"
  78. }
  79. metric.ScraperRequestDuration.WithLabelValues(status).Observe(time.Since(startTime).Seconds())
  80. }
  81. if scraperErr != nil {
  82. slog.Warn("Unable to scrape entry",
  83. slog.Int64("user_id", user.ID),
  84. slog.String("entry_url", entry.URL),
  85. slog.Int64("feed_id", feed.ID),
  86. slog.String("feed_url", feed.FeedURL),
  87. slog.Any("error", scraperErr),
  88. )
  89. } else if content != "" {
  90. // We replace the entry content only if the scraper doesn't return any error.
  91. entry.Content = content
  92. }
  93. }
  94. rewrite.Rewriter(websiteURL, entry, feed.RewriteRules)
  95. // The sanitizer should always run at the end of the process to make sure unsafe HTML is filtered.
  96. entry.Content = sanitizer.Sanitize(websiteURL, entry.Content)
  97. updateEntryReadingTime(store, feed, entry, entryIsNew, user)
  98. filteredEntries = append(filteredEntries, entry)
  99. }
  100. feed.Entries = filteredEntries
  101. }
  102. func isBlockedEntry(feed *model.Feed, entry *model.Entry) bool {
  103. if feed.BlocklistRules == "" {
  104. return false
  105. }
  106. compiledBlocklist, err := regexp.Compile(feed.BlocklistRules)
  107. if err != nil {
  108. slog.Debug("Failed on regexp compilation",
  109. slog.String("pattern", feed.BlocklistRules),
  110. slog.Any("error", err),
  111. )
  112. return false
  113. }
  114. containsBlockedTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
  115. return compiledBlocklist.MatchString(tag)
  116. })
  117. if compiledBlocklist.MatchString(entry.URL) || compiledBlocklist.MatchString(entry.Title) || compiledBlocklist.MatchString(entry.Author) || containsBlockedTag {
  118. slog.Debug("Blocking entry based on rule",
  119. slog.String("entry_url", entry.URL),
  120. slog.Int64("feed_id", feed.ID),
  121. slog.String("feed_url", feed.FeedURL),
  122. slog.String("rule", feed.BlocklistRules),
  123. )
  124. return true
  125. }
  126. return false
  127. }
  128. func isAllowedEntry(feed *model.Feed, entry *model.Entry) bool {
  129. if feed.KeeplistRules == "" {
  130. return true
  131. }
  132. compiledKeeplist, err := regexp.Compile(feed.KeeplistRules)
  133. if err != nil {
  134. slog.Debug("Failed on regexp compilation",
  135. slog.String("pattern", feed.KeeplistRules),
  136. slog.Any("error", err),
  137. )
  138. return false
  139. }
  140. containsAllowedTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
  141. return compiledKeeplist.MatchString(tag)
  142. })
  143. if compiledKeeplist.MatchString(entry.URL) || compiledKeeplist.MatchString(entry.Title) || compiledKeeplist.MatchString(entry.Author) || containsAllowedTag {
  144. slog.Debug("Allow entry based on rule",
  145. slog.String("entry_url", entry.URL),
  146. slog.Int64("feed_id", feed.ID),
  147. slog.String("feed_url", feed.FeedURL),
  148. slog.String("rule", feed.KeeplistRules),
  149. )
  150. return true
  151. }
  152. return false
  153. }
  154. // ProcessEntryWebPage downloads the entry web page and apply rewrite rules.
  155. func ProcessEntryWebPage(feed *model.Feed, entry *model.Entry, user *model.User) error {
  156. startTime := time.Now()
  157. websiteURL := getUrlFromEntry(feed, entry)
  158. requestBuilder := fetcher.NewRequestBuilder()
  159. requestBuilder.WithUserAgent(feed.UserAgent, config.Opts.HTTPClientUserAgent())
  160. requestBuilder.WithCookie(feed.Cookie)
  161. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  162. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  163. requestBuilder.UseProxy(feed.FetchViaProxy)
  164. requestBuilder.IgnoreTLSErrors(feed.AllowSelfSignedCertificates)
  165. requestBuilder.DisableHTTP2(feed.DisableHTTP2)
  166. content, scraperErr := scraper.ScrapeWebsite(
  167. requestBuilder,
  168. websiteURL,
  169. feed.ScraperRules,
  170. )
  171. if config.Opts.HasMetricsCollector() {
  172. status := "success"
  173. if scraperErr != nil {
  174. status = "error"
  175. }
  176. metric.ScraperRequestDuration.WithLabelValues(status).Observe(time.Since(startTime).Seconds())
  177. }
  178. if scraperErr != nil {
  179. return scraperErr
  180. }
  181. if content != "" {
  182. entry.Content = content
  183. if user.ShowReadingTime {
  184. entry.ReadingTime = readingtime.EstimateReadingTime(entry.Content, user.DefaultReadingSpeed, user.CJKReadingSpeed)
  185. }
  186. }
  187. rewrite.Rewriter(websiteURL, entry, entry.Feed.RewriteRules)
  188. entry.Content = sanitizer.Sanitize(websiteURL, entry.Content)
  189. return nil
  190. }
  191. func getUrlFromEntry(feed *model.Feed, entry *model.Entry) string {
  192. var url = entry.URL
  193. if feed.UrlRewriteRules != "" {
  194. parts := customReplaceRuleRegex.FindStringSubmatch(feed.UrlRewriteRules)
  195. if len(parts) >= 3 {
  196. re := regexp.MustCompile(parts[1])
  197. url = re.ReplaceAllString(entry.URL, parts[2])
  198. slog.Debug("Rewriting entry URL",
  199. slog.String("original_entry_url", entry.URL),
  200. slog.String("rewritten_entry_url", url),
  201. slog.Int64("feed_id", feed.ID),
  202. slog.String("feed_url", feed.FeedURL),
  203. )
  204. } else {
  205. slog.Debug("Cannot find search and replace terms for replace rule",
  206. slog.String("original_entry_url", entry.URL),
  207. slog.String("rewritten_entry_url", url),
  208. slog.Int64("feed_id", feed.ID),
  209. slog.String("feed_url", feed.FeedURL),
  210. slog.String("url_rewrite_rules", feed.UrlRewriteRules),
  211. )
  212. }
  213. }
  214. return url
  215. }
  216. func updateEntryReadingTime(store *storage.Storage, feed *model.Feed, entry *model.Entry, entryIsNew bool, user *model.User) {
  217. if !user.ShowReadingTime {
  218. slog.Debug("Skip reading time estimation for this user", slog.Int64("user_id", user.ID))
  219. return
  220. }
  221. if shouldFetchYouTubeWatchTime(entry) {
  222. if entryIsNew {
  223. watchTime, err := fetchYouTubeWatchTime(entry.URL)
  224. if err != nil {
  225. slog.Warn("Unable to fetch YouTube watch time",
  226. slog.Int64("user_id", user.ID),
  227. slog.Int64("entry_id", entry.ID),
  228. slog.String("entry_url", entry.URL),
  229. slog.Int64("feed_id", feed.ID),
  230. slog.String("feed_url", feed.FeedURL),
  231. slog.Any("error", err),
  232. )
  233. }
  234. entry.ReadingTime = watchTime
  235. } else {
  236. entry.ReadingTime = store.GetReadTime(feed.ID, entry.Hash)
  237. }
  238. }
  239. if shouldFetchOdyseeWatchTime(entry) {
  240. if entryIsNew {
  241. watchTime, err := fetchOdyseeWatchTime(entry.URL)
  242. if err != nil {
  243. slog.Warn("Unable to fetch Odysee watch time",
  244. slog.Int64("user_id", user.ID),
  245. slog.Int64("entry_id", entry.ID),
  246. slog.String("entry_url", entry.URL),
  247. slog.Int64("feed_id", feed.ID),
  248. slog.String("feed_url", feed.FeedURL),
  249. slog.Any("error", err),
  250. )
  251. }
  252. entry.ReadingTime = watchTime
  253. } else {
  254. entry.ReadingTime = store.GetReadTime(feed.ID, entry.Hash)
  255. }
  256. }
  257. // Handle YT error case and non-YT entries.
  258. if entry.ReadingTime == 0 {
  259. entry.ReadingTime = readingtime.EstimateReadingTime(entry.Content, user.DefaultReadingSpeed, user.CJKReadingSpeed)
  260. }
  261. }
  262. func shouldFetchYouTubeWatchTime(entry *model.Entry) bool {
  263. if !config.Opts.FetchYouTubeWatchTime() {
  264. return false
  265. }
  266. matches := youtubeRegex.FindStringSubmatch(entry.URL)
  267. urlMatchesYouTubePattern := len(matches) == 2
  268. return urlMatchesYouTubePattern
  269. }
  270. func shouldFetchOdyseeWatchTime(entry *model.Entry) bool {
  271. if !config.Opts.FetchOdyseeWatchTime() {
  272. return false
  273. }
  274. matches := odyseeRegex.FindStringSubmatch(entry.URL)
  275. return matches != nil
  276. }
  277. func fetchYouTubeWatchTime(websiteURL string) (int, error) {
  278. requestBuilder := fetcher.NewRequestBuilder()
  279. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  280. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  281. responseHandler := fetcher.NewResponseHandler(requestBuilder.ExecuteRequest(websiteURL))
  282. defer responseHandler.Close()
  283. if localizedError := responseHandler.LocalizedError(); localizedError != nil {
  284. slog.Warn("Unable to fetch YouTube page", slog.String("website_url", websiteURL), slog.Any("error", localizedError.Error()))
  285. return 0, localizedError.Error()
  286. }
  287. doc, docErr := goquery.NewDocumentFromReader(responseHandler.Body(config.Opts.HTTPClientMaxBodySize()))
  288. if docErr != nil {
  289. return 0, docErr
  290. }
  291. durs, exists := doc.Find(`meta[itemprop="duration"]`).First().Attr("content")
  292. if !exists {
  293. return 0, errors.New("duration has not found")
  294. }
  295. dur, err := parseISO8601(durs)
  296. if err != nil {
  297. return 0, fmt.Errorf("unable to parse duration %s: %v", durs, err)
  298. }
  299. return int(dur.Minutes()), nil
  300. }
  301. func fetchOdyseeWatchTime(websiteURL string) (int, error) {
  302. requestBuilder := fetcher.NewRequestBuilder()
  303. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  304. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  305. responseHandler := fetcher.NewResponseHandler(requestBuilder.ExecuteRequest(websiteURL))
  306. defer responseHandler.Close()
  307. if localizedError := responseHandler.LocalizedError(); localizedError != nil {
  308. slog.Warn("Unable to fetch Odysee watch time", slog.String("website_url", websiteURL), slog.Any("error", localizedError.Error()))
  309. return 0, localizedError.Error()
  310. }
  311. doc, docErr := goquery.NewDocumentFromReader(responseHandler.Body(config.Opts.HTTPClientMaxBodySize()))
  312. if docErr != nil {
  313. return 0, docErr
  314. }
  315. durs, exists := doc.Find(`meta[property="og:video:duration"]`).First().Attr("content")
  316. // durs contains video watch time in seconds
  317. if !exists {
  318. return 0, errors.New("duration has not found")
  319. }
  320. dur, err := strconv.ParseInt(durs, 10, 64)
  321. if err != nil {
  322. return 0, fmt.Errorf("unable to parse duration %s: %v", durs, err)
  323. }
  324. return int(dur / 60), nil
  325. }
  326. // parseISO8601 parses an ISO 8601 duration string.
  327. func parseISO8601(from string) (time.Duration, error) {
  328. var match []string
  329. var d time.Duration
  330. if iso8601Regex.MatchString(from) {
  331. match = iso8601Regex.FindStringSubmatch(from)
  332. } else {
  333. return 0, errors.New("could not parse duration string")
  334. }
  335. for i, name := range iso8601Regex.SubexpNames() {
  336. part := match[i]
  337. if i == 0 || name == "" || part == "" {
  338. continue
  339. }
  340. val, err := strconv.ParseInt(part, 10, 64)
  341. if err != nil {
  342. return 0, err
  343. }
  344. switch name {
  345. case "hour":
  346. d += (time.Duration(val) * time.Hour)
  347. case "minute":
  348. d += (time.Duration(val) * time.Minute)
  349. case "second":
  350. d += (time.Duration(val) * time.Second)
  351. default:
  352. return 0, fmt.Errorf("unknown field %s", name)
  353. }
  354. }
  355. return d, nil
  356. }
  357. func isRecentEntry(entry *model.Entry) bool {
  358. if config.Opts.FilterEntryMaxAgeDays() == 0 || entry.Date.After(time.Now().AddDate(0, 0, -config.Opts.FilterEntryMaxAgeDays())) {
  359. return true
  360. }
  361. return false
  362. }