processor.go 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381
  1. // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
  2. // SPDX-License-Identifier: Apache-2.0
  3. package processor
  4. import (
  5. "errors"
  6. "fmt"
  7. "log/slog"
  8. "regexp"
  9. "strconv"
  10. "time"
  11. "miniflux.app/v2/internal/config"
  12. "miniflux.app/v2/internal/metric"
  13. "miniflux.app/v2/internal/model"
  14. "miniflux.app/v2/internal/reader/fetcher"
  15. "miniflux.app/v2/internal/reader/readingtime"
  16. "miniflux.app/v2/internal/reader/rewrite"
  17. "miniflux.app/v2/internal/reader/sanitizer"
  18. "miniflux.app/v2/internal/reader/scraper"
  19. "miniflux.app/v2/internal/storage"
  20. "github.com/PuerkitoBio/goquery"
  21. )
  22. var (
  23. youtubeRegex = regexp.MustCompile(`youtube\.com/watch\?v=(.*)`)
  24. odyseeRegex = regexp.MustCompile(`^https://odysee\.com`)
  25. iso8601Regex = regexp.MustCompile(`^P((?P<year>\d+)Y)?((?P<month>\d+)M)?((?P<week>\d+)W)?((?P<day>\d+)D)?(T((?P<hour>\d+)H)?((?P<minute>\d+)M)?((?P<second>\d+)S)?)?$`)
  26. customReplaceRuleRegex = regexp.MustCompile(`rewrite\("(.*)"\|"(.*)"\)`)
  27. )
  28. // ProcessFeedEntries downloads original web page for entries and apply filters.
  29. func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, user *model.User, forceRefresh bool) {
  30. var filteredEntries model.Entries
  31. // Process older entries first
  32. for i := len(feed.Entries) - 1; i >= 0; i-- {
  33. entry := feed.Entries[i]
  34. slog.Debug("Processing entry",
  35. slog.Int64("user_id", user.ID),
  36. slog.Int64("entry_id", entry.ID),
  37. slog.String("entry_url", entry.URL),
  38. slog.Int64("feed_id", feed.ID),
  39. slog.String("feed_url", feed.FeedURL),
  40. )
  41. if isBlockedEntry(feed, entry) || !isAllowedEntry(feed, entry) {
  42. continue
  43. }
  44. websiteURL := getUrlFromEntry(feed, entry)
  45. entryIsNew := !store.EntryURLExists(feed.ID, entry.URL)
  46. if feed.Crawler && (entryIsNew || forceRefresh) {
  47. slog.Debug("Scraping entry",
  48. slog.Int64("user_id", user.ID),
  49. slog.Int64("entry_id", entry.ID),
  50. slog.String("entry_url", entry.URL),
  51. slog.Int64("feed_id", feed.ID),
  52. slog.String("feed_url", feed.FeedURL),
  53. )
  54. startTime := time.Now()
  55. requestBuilder := fetcher.NewRequestBuilder()
  56. requestBuilder.WithUserAgent(feed.UserAgent)
  57. requestBuilder.WithCookie(feed.Cookie)
  58. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  59. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  60. requestBuilder.UseProxy(feed.FetchViaProxy)
  61. requestBuilder.IgnoreTLSErrors(feed.AllowSelfSignedCertificates)
  62. content, scraperErr := scraper.ScrapeWebsite(
  63. requestBuilder,
  64. websiteURL,
  65. feed.ScraperRules,
  66. )
  67. if config.Opts.HasMetricsCollector() {
  68. status := "success"
  69. if scraperErr != nil {
  70. status = "error"
  71. }
  72. metric.ScraperRequestDuration.WithLabelValues(status).Observe(time.Since(startTime).Seconds())
  73. }
  74. if scraperErr != nil {
  75. slog.Warn("Unable to scrape entry",
  76. slog.Int64("user_id", user.ID),
  77. slog.Int64("entry_id", entry.ID),
  78. slog.String("entry_url", entry.URL),
  79. slog.Int64("feed_id", feed.ID),
  80. slog.String("feed_url", feed.FeedURL),
  81. slog.Any("error", scraperErr),
  82. )
  83. } else if content != "" {
  84. // We replace the entry content only if the scraper doesn't return any error.
  85. entry.Content = content
  86. }
  87. }
  88. rewrite.Rewriter(websiteURL, entry, feed.RewriteRules)
  89. // The sanitizer should always run at the end of the process to make sure unsafe HTML is filtered.
  90. entry.Content = sanitizer.Sanitize(websiteURL, entry.Content)
  91. updateEntryReadingTime(store, feed, entry, entryIsNew, user)
  92. filteredEntries = append(filteredEntries, entry)
  93. }
  94. feed.Entries = filteredEntries
  95. }
  96. func isBlockedEntry(feed *model.Feed, entry *model.Entry) bool {
  97. if feed.BlocklistRules != "" {
  98. match, _ := regexp.MatchString(feed.BlocklistRules, entry.Title)
  99. if match {
  100. slog.Debug("Blocking entry based on rule",
  101. slog.Int64("entry_id", entry.ID),
  102. slog.String("entry_url", entry.URL),
  103. slog.Int64("feed_id", feed.ID),
  104. slog.String("feed_url", feed.FeedURL),
  105. slog.String("rule", feed.BlocklistRules),
  106. )
  107. return true
  108. }
  109. }
  110. return false
  111. }
  112. func isAllowedEntry(feed *model.Feed, entry *model.Entry) bool {
  113. if feed.KeeplistRules != "" {
  114. match, _ := regexp.MatchString(feed.KeeplistRules, entry.Title)
  115. if match {
  116. slog.Debug("Allow entry based on rule",
  117. slog.Int64("entry_id", entry.ID),
  118. slog.String("entry_url", entry.URL),
  119. slog.Int64("feed_id", feed.ID),
  120. slog.String("feed_url", feed.FeedURL),
  121. slog.String("rule", feed.KeeplistRules),
  122. )
  123. return true
  124. }
  125. return false
  126. }
  127. return true
  128. }
  129. // ProcessEntryWebPage downloads the entry web page and apply rewrite rules.
  130. func ProcessEntryWebPage(feed *model.Feed, entry *model.Entry, user *model.User) error {
  131. startTime := time.Now()
  132. websiteURL := getUrlFromEntry(feed, entry)
  133. requestBuilder := fetcher.NewRequestBuilder()
  134. requestBuilder.WithUserAgent(feed.UserAgent)
  135. requestBuilder.WithCookie(feed.Cookie)
  136. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  137. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  138. requestBuilder.UseProxy(feed.FetchViaProxy)
  139. requestBuilder.IgnoreTLSErrors(feed.AllowSelfSignedCertificates)
  140. content, scraperErr := scraper.ScrapeWebsite(
  141. requestBuilder,
  142. websiteURL,
  143. feed.ScraperRules,
  144. )
  145. if config.Opts.HasMetricsCollector() {
  146. status := "success"
  147. if scraperErr != nil {
  148. status = "error"
  149. }
  150. metric.ScraperRequestDuration.WithLabelValues(status).Observe(time.Since(startTime).Seconds())
  151. }
  152. if scraperErr != nil {
  153. return scraperErr
  154. }
  155. if content != "" {
  156. entry.Content = content
  157. entry.ReadingTime = readingtime.EstimateReadingTime(entry.Content, user.DefaultReadingSpeed, user.CJKReadingSpeed)
  158. }
  159. rewrite.Rewriter(websiteURL, entry, entry.Feed.RewriteRules)
  160. entry.Content = sanitizer.Sanitize(websiteURL, entry.Content)
  161. return nil
  162. }
  163. func getUrlFromEntry(feed *model.Feed, entry *model.Entry) string {
  164. var url = entry.URL
  165. if feed.UrlRewriteRules != "" {
  166. parts := customReplaceRuleRegex.FindStringSubmatch(feed.UrlRewriteRules)
  167. if len(parts) >= 3 {
  168. re := regexp.MustCompile(parts[1])
  169. url = re.ReplaceAllString(entry.URL, parts[2])
  170. slog.Debug("Rewriting entry URL",
  171. slog.Int64("entry_id", entry.ID),
  172. slog.String("original_entry_url", entry.URL),
  173. slog.String("rewritten_entry_url", url),
  174. slog.Int64("feed_id", feed.ID),
  175. slog.String("feed_url", feed.FeedURL),
  176. )
  177. } else {
  178. slog.Debug("Cannot find search and replace terms for replace rule",
  179. slog.Int64("entry_id", entry.ID),
  180. slog.String("original_entry_url", entry.URL),
  181. slog.String("rewritten_entry_url", url),
  182. slog.Int64("feed_id", feed.ID),
  183. slog.String("feed_url", feed.FeedURL),
  184. slog.String("url_rewrite_rules", feed.UrlRewriteRules),
  185. )
  186. }
  187. }
  188. return url
  189. }
  190. func updateEntryReadingTime(store *storage.Storage, feed *model.Feed, entry *model.Entry, entryIsNew bool, user *model.User) {
  191. if shouldFetchYouTubeWatchTime(entry) {
  192. if entryIsNew {
  193. watchTime, err := fetchYouTubeWatchTime(entry.URL)
  194. if err != nil {
  195. slog.Warn("Unable to fetch YouTube watch time",
  196. slog.Int64("user_id", user.ID),
  197. slog.Int64("entry_id", entry.ID),
  198. slog.String("entry_url", entry.URL),
  199. slog.Int64("feed_id", feed.ID),
  200. slog.String("feed_url", feed.FeedURL),
  201. slog.Any("error", err),
  202. )
  203. }
  204. entry.ReadingTime = watchTime
  205. } else {
  206. entry.ReadingTime = store.GetReadTime(entry, feed)
  207. }
  208. }
  209. if shouldFetchOdyseeWatchTime(entry) {
  210. if entryIsNew {
  211. watchTime, err := fetchOdyseeWatchTime(entry.URL)
  212. if err != nil {
  213. slog.Warn("Unable to fetch Odysee watch time",
  214. slog.Int64("user_id", user.ID),
  215. slog.Int64("entry_id", entry.ID),
  216. slog.String("entry_url", entry.URL),
  217. slog.Int64("feed_id", feed.ID),
  218. slog.String("feed_url", feed.FeedURL),
  219. slog.Any("error", err),
  220. )
  221. }
  222. entry.ReadingTime = watchTime
  223. } else {
  224. entry.ReadingTime = store.GetReadTime(entry, feed)
  225. }
  226. }
  227. // Handle YT error case and non-YT entries.
  228. if entry.ReadingTime == 0 {
  229. entry.ReadingTime = readingtime.EstimateReadingTime(entry.Content, user.DefaultReadingSpeed, user.CJKReadingSpeed)
  230. }
  231. }
  232. func shouldFetchYouTubeWatchTime(entry *model.Entry) bool {
  233. if !config.Opts.FetchYouTubeWatchTime() {
  234. return false
  235. }
  236. matches := youtubeRegex.FindStringSubmatch(entry.URL)
  237. urlMatchesYouTubePattern := len(matches) == 2
  238. return urlMatchesYouTubePattern
  239. }
  240. func shouldFetchOdyseeWatchTime(entry *model.Entry) bool {
  241. if !config.Opts.FetchOdyseeWatchTime() {
  242. return false
  243. }
  244. matches := odyseeRegex.FindStringSubmatch(entry.URL)
  245. return matches != nil
  246. }
  247. func fetchYouTubeWatchTime(websiteURL string) (int, error) {
  248. requestBuilder := fetcher.NewRequestBuilder()
  249. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  250. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  251. responseHandler := fetcher.NewResponseHandler(requestBuilder.ExecuteRequest(websiteURL))
  252. defer responseHandler.Close()
  253. if localizedError := responseHandler.LocalizedError(); localizedError != nil {
  254. slog.Warn("Unable to fetch YouTube page", slog.String("website_url", websiteURL), slog.Any("error", localizedError.Error()))
  255. return 0, localizedError.Error()
  256. }
  257. doc, docErr := goquery.NewDocumentFromReader(responseHandler.Body(config.Opts.HTTPClientMaxBodySize()))
  258. if docErr != nil {
  259. return 0, docErr
  260. }
  261. durs, exists := doc.Find(`meta[itemprop="duration"]`).First().Attr("content")
  262. if !exists {
  263. return 0, errors.New("duration has not found")
  264. }
  265. dur, err := parseISO8601(durs)
  266. if err != nil {
  267. return 0, fmt.Errorf("unable to parse duration %s: %v", durs, err)
  268. }
  269. return int(dur.Minutes()), nil
  270. }
  271. func fetchOdyseeWatchTime(websiteURL string) (int, error) {
  272. requestBuilder := fetcher.NewRequestBuilder()
  273. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  274. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  275. responseHandler := fetcher.NewResponseHandler(requestBuilder.ExecuteRequest(websiteURL))
  276. defer responseHandler.Close()
  277. if localizedError := responseHandler.LocalizedError(); localizedError != nil {
  278. slog.Warn("Unable to fetch Odysee watch time", slog.String("website_url", websiteURL), slog.Any("error", localizedError.Error()))
  279. return 0, localizedError.Error()
  280. }
  281. doc, docErr := goquery.NewDocumentFromReader(responseHandler.Body(config.Opts.HTTPClientMaxBodySize()))
  282. if docErr != nil {
  283. return 0, docErr
  284. }
  285. durs, exists := doc.Find(`meta[property="og:video:duration"]`).First().Attr("content")
  286. // durs contains video watch time in seconds
  287. if !exists {
  288. return 0, errors.New("duration has not found")
  289. }
  290. dur, err := strconv.ParseInt(durs, 10, 64)
  291. if err != nil {
  292. return 0, fmt.Errorf("unable to parse duration %s: %v", durs, err)
  293. }
  294. return int(dur / 60), nil
  295. }
  296. // parseISO8601 parses an ISO 8601 duration string.
  297. func parseISO8601(from string) (time.Duration, error) {
  298. var match []string
  299. var d time.Duration
  300. if iso8601Regex.MatchString(from) {
  301. match = iso8601Regex.FindStringSubmatch(from)
  302. } else {
  303. return 0, errors.New("could not parse duration string")
  304. }
  305. for i, name := range iso8601Regex.SubexpNames() {
  306. part := match[i]
  307. if i == 0 || name == "" || part == "" {
  308. continue
  309. }
  310. val, err := strconv.ParseInt(part, 10, 64)
  311. if err != nil {
  312. return 0, err
  313. }
  314. switch name {
  315. case "hour":
  316. d = d + (time.Duration(val) * time.Hour)
  317. case "minute":
  318. d = d + (time.Duration(val) * time.Minute)
  319. case "second":
  320. d = d + (time.Duration(val) * time.Second)
  321. default:
  322. return 0, fmt.Errorf("unknown field %s", name)
  323. }
  324. }
  325. return d, nil
  326. }