processor.go 9.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346
  1. // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
  2. // SPDX-License-Identifier: Apache-2.0
  3. package processor
  4. import (
  5. "errors"
  6. "fmt"
  7. "math"
  8. "regexp"
  9. "strconv"
  10. "strings"
  11. "time"
  12. "unicode/utf8"
  13. "miniflux.app/v2/internal/integration"
  14. "miniflux.app/v2/internal/config"
  15. "miniflux.app/v2/internal/http/client"
  16. "miniflux.app/v2/internal/logger"
  17. "miniflux.app/v2/internal/metric"
  18. "miniflux.app/v2/internal/model"
  19. "miniflux.app/v2/internal/reader/browser"
  20. "miniflux.app/v2/internal/reader/rewrite"
  21. "miniflux.app/v2/internal/reader/sanitizer"
  22. "miniflux.app/v2/internal/reader/scraper"
  23. "miniflux.app/v2/internal/storage"
  24. "github.com/PuerkitoBio/goquery"
  25. "github.com/rylans/getlang"
  26. )
  27. var (
  28. youtubeRegex = regexp.MustCompile(`youtube\.com/watch\?v=(.*)`)
  29. odyseeRegex = regexp.MustCompile(`^https://odysee\.com`)
  30. iso8601Regex = regexp.MustCompile(`^P((?P<year>\d+)Y)?((?P<month>\d+)M)?((?P<week>\d+)W)?((?P<day>\d+)D)?(T((?P<hour>\d+)H)?((?P<minute>\d+)M)?((?P<second>\d+)S)?)?$`)
  31. customReplaceRuleRegex = regexp.MustCompile(`rewrite\("(.*)"\|"(.*)"\)`)
  32. )
  33. // ProcessFeedEntries downloads original web page for entries and apply filters.
  34. func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, user *model.User, forceRefresh bool) {
  35. var filteredEntries model.Entries
  36. // array used for bulk push
  37. entriesToPush := model.Entries{}
  38. // Process older entries first
  39. for i := len(feed.Entries) - 1; i >= 0; i-- {
  40. entry := feed.Entries[i]
  41. logger.Debug("[Processor] Processing entry %q from feed %q", entry.URL, feed.FeedURL)
  42. if isBlockedEntry(feed, entry) || !isAllowedEntry(feed, entry) {
  43. continue
  44. }
  45. url := getUrlFromEntry(feed, entry)
  46. entryIsNew := !store.EntryURLExists(feed.ID, entry.URL)
  47. if feed.Crawler && (entryIsNew || forceRefresh) {
  48. logger.Debug("[Processor] Crawling entry %q from feed %q", url, feed.FeedURL)
  49. startTime := time.Now()
  50. content, scraperErr := scraper.Fetch(
  51. url,
  52. feed.ScraperRules,
  53. feed.UserAgent,
  54. feed.Cookie,
  55. feed.AllowSelfSignedCertificates,
  56. feed.FetchViaProxy,
  57. )
  58. if config.Opts.HasMetricsCollector() {
  59. status := "success"
  60. if scraperErr != nil {
  61. status = "error"
  62. }
  63. metric.ScraperRequestDuration.WithLabelValues(status).Observe(time.Since(startTime).Seconds())
  64. }
  65. if scraperErr != nil {
  66. logger.Error(`[Processor] Unable to crawl this entry: %q => %v`, entry.URL, scraperErr)
  67. } else if content != "" {
  68. // We replace the entry content only if the scraper doesn't return any error.
  69. entry.Content = content
  70. }
  71. }
  72. rewrite.Rewriter(url, entry, feed.RewriteRules)
  73. // The sanitizer should always run at the end of the process to make sure unsafe HTML is filtered.
  74. entry.Content = sanitizer.Sanitize(url, entry.Content)
  75. if entryIsNew {
  76. intg, err := store.Integration(feed.UserID)
  77. if err != nil {
  78. logger.Error("[Processor] Get integrations for user %d failed: %v; the refresh process will go on, but no integrations will run this time.", feed.UserID, err)
  79. } else if intg != nil {
  80. localEntry := entry
  81. go func() {
  82. integration.PushEntry(localEntry, feed, intg)
  83. }()
  84. entriesToPush = append(entriesToPush, localEntry)
  85. }
  86. }
  87. updateEntryReadingTime(store, feed, entry, entryIsNew, user)
  88. filteredEntries = append(filteredEntries, entry)
  89. }
  90. intg, err := store.Integration(feed.UserID)
  91. if err != nil {
  92. logger.Error("[Processor] Get integrations for user %d failed: %v; the refresh process will go on, but no integrations will run this time.", feed.UserID, err)
  93. } else if intg != nil && len(entriesToPush) > 0 {
  94. go func() {
  95. integration.PushEntries(entriesToPush, intg)
  96. }()
  97. }
  98. feed.Entries = filteredEntries
  99. }
  100. func isBlockedEntry(feed *model.Feed, entry *model.Entry) bool {
  101. if feed.BlocklistRules != "" {
  102. match, _ := regexp.MatchString(feed.BlocklistRules, entry.Title)
  103. if match {
  104. logger.Debug("[Processor] Blocking entry %q from feed %q based on rule %q", entry.Title, feed.FeedURL, feed.BlocklistRules)
  105. return true
  106. }
  107. }
  108. return false
  109. }
  110. func isAllowedEntry(feed *model.Feed, entry *model.Entry) bool {
  111. if feed.KeeplistRules != "" {
  112. match, _ := regexp.MatchString(feed.KeeplistRules, entry.Title)
  113. if match {
  114. logger.Debug("[Processor] Allow entry %q from feed %q based on rule %q", entry.Title, feed.FeedURL, feed.KeeplistRules)
  115. return true
  116. }
  117. return false
  118. }
  119. return true
  120. }
  121. // ProcessEntryWebPage downloads the entry web page and apply rewrite rules.
  122. func ProcessEntryWebPage(feed *model.Feed, entry *model.Entry, user *model.User) error {
  123. startTime := time.Now()
  124. url := getUrlFromEntry(feed, entry)
  125. content, scraperErr := scraper.Fetch(
  126. url,
  127. entry.Feed.ScraperRules,
  128. entry.Feed.UserAgent,
  129. entry.Feed.Cookie,
  130. feed.AllowSelfSignedCertificates,
  131. feed.FetchViaProxy,
  132. )
  133. if config.Opts.HasMetricsCollector() {
  134. status := "success"
  135. if scraperErr != nil {
  136. status = "error"
  137. }
  138. metric.ScraperRequestDuration.WithLabelValues(status).Observe(time.Since(startTime).Seconds())
  139. }
  140. if scraperErr != nil {
  141. return scraperErr
  142. }
  143. if content != "" {
  144. entry.Content = content
  145. entry.ReadingTime = calculateReadingTime(content, user)
  146. }
  147. rewrite.Rewriter(url, entry, entry.Feed.RewriteRules)
  148. entry.Content = sanitizer.Sanitize(url, entry.Content)
  149. return nil
  150. }
  151. func getUrlFromEntry(feed *model.Feed, entry *model.Entry) string {
  152. var url = entry.URL
  153. if feed.UrlRewriteRules != "" {
  154. parts := customReplaceRuleRegex.FindStringSubmatch(feed.UrlRewriteRules)
  155. if len(parts) >= 3 {
  156. re := regexp.MustCompile(parts[1])
  157. url = re.ReplaceAllString(entry.URL, parts[2])
  158. logger.Debug(`[Processor] Rewriting entry URL %s to %s`, entry.URL, url)
  159. } else {
  160. logger.Debug("[Processor] Cannot find search and replace terms for replace rule %s", feed.UrlRewriteRules)
  161. }
  162. }
  163. return url
  164. }
  165. func updateEntryReadingTime(store *storage.Storage, feed *model.Feed, entry *model.Entry, entryIsNew bool, user *model.User) {
  166. if shouldFetchYouTubeWatchTime(entry) {
  167. if entryIsNew {
  168. watchTime, err := fetchYouTubeWatchTime(entry.URL)
  169. if err != nil {
  170. logger.Error("[Processor] Unable to fetch YouTube watch time: %q => %v", entry.URL, err)
  171. }
  172. entry.ReadingTime = watchTime
  173. } else {
  174. entry.ReadingTime = store.GetReadTime(entry, feed)
  175. }
  176. }
  177. if shouldFetchOdyseeWatchTime(entry) {
  178. if entryIsNew {
  179. watchTime, err := fetchOdyseeWatchTime(entry.URL)
  180. if err != nil {
  181. logger.Error("[Processor] Unable to fetch Odysee watch time: %q => %v", entry.URL, err)
  182. }
  183. entry.ReadingTime = watchTime
  184. } else {
  185. entry.ReadingTime = store.GetReadTime(entry, feed)
  186. }
  187. }
  188. // Handle YT error case and non-YT entries.
  189. if entry.ReadingTime == 0 {
  190. entry.ReadingTime = calculateReadingTime(entry.Content, user)
  191. }
  192. }
  193. func shouldFetchYouTubeWatchTime(entry *model.Entry) bool {
  194. if !config.Opts.FetchYouTubeWatchTime() {
  195. return false
  196. }
  197. matches := youtubeRegex.FindStringSubmatch(entry.URL)
  198. urlMatchesYouTubePattern := len(matches) == 2
  199. return urlMatchesYouTubePattern
  200. }
  201. func shouldFetchOdyseeWatchTime(entry *model.Entry) bool {
  202. if !config.Opts.FetchOdyseeWatchTime() {
  203. return false
  204. }
  205. matches := odyseeRegex.FindStringSubmatch(entry.URL)
  206. return matches != nil
  207. }
  208. func fetchYouTubeWatchTime(url string) (int, error) {
  209. clt := client.NewClientWithConfig(url, config.Opts)
  210. response, browserErr := browser.Exec(clt)
  211. if browserErr != nil {
  212. return 0, browserErr
  213. }
  214. doc, docErr := goquery.NewDocumentFromReader(response.Body)
  215. if docErr != nil {
  216. return 0, docErr
  217. }
  218. durs, exists := doc.Find(`meta[itemprop="duration"]`).First().Attr("content")
  219. if !exists {
  220. return 0, errors.New("duration has not found")
  221. }
  222. dur, err := parseISO8601(durs)
  223. if err != nil {
  224. return 0, fmt.Errorf("unable to parse duration %s: %v", durs, err)
  225. }
  226. return int(dur.Minutes()), nil
  227. }
  228. func fetchOdyseeWatchTime(url string) (int, error) {
  229. clt := client.NewClientWithConfig(url, config.Opts)
  230. response, browserErr := browser.Exec(clt)
  231. if browserErr != nil {
  232. return 0, browserErr
  233. }
  234. doc, docErr := goquery.NewDocumentFromReader(response.Body)
  235. if docErr != nil {
  236. return 0, docErr
  237. }
  238. durs, exists := doc.Find(`meta[property="og:video:duration"]`).First().Attr("content")
  239. // durs contains video watch time in seconds
  240. if !exists {
  241. return 0, errors.New("duration has not found")
  242. }
  243. dur, err := strconv.ParseInt(durs, 10, 64)
  244. if err != nil {
  245. return 0, fmt.Errorf("unable to parse duration %s: %v", durs, err)
  246. }
  247. return int(dur / 60), nil
  248. }
  249. // parseISO8601 parses an ISO 8601 duration string.
  250. func parseISO8601(from string) (time.Duration, error) {
  251. var match []string
  252. var d time.Duration
  253. if iso8601Regex.MatchString(from) {
  254. match = iso8601Regex.FindStringSubmatch(from)
  255. } else {
  256. return 0, errors.New("could not parse duration string")
  257. }
  258. for i, name := range iso8601Regex.SubexpNames() {
  259. part := match[i]
  260. if i == 0 || name == "" || part == "" {
  261. continue
  262. }
  263. val, err := strconv.ParseInt(part, 10, 64)
  264. if err != nil {
  265. return 0, err
  266. }
  267. switch name {
  268. case "hour":
  269. d = d + (time.Duration(val) * time.Hour)
  270. case "minute":
  271. d = d + (time.Duration(val) * time.Minute)
  272. case "second":
  273. d = d + (time.Duration(val) * time.Second)
  274. default:
  275. return 0, fmt.Errorf("unknown field %s", name)
  276. }
  277. }
  278. return d, nil
  279. }
  280. func calculateReadingTime(content string, user *model.User) int {
  281. sanitizedContent := sanitizer.StripTags(content)
  282. languageInfo := getlang.FromString(sanitizedContent)
  283. var timeToReadInt int
  284. if languageInfo.LanguageCode() == "ko" || languageInfo.LanguageCode() == "zh" || languageInfo.LanguageCode() == "jp" {
  285. timeToReadInt = int(math.Ceil(float64(utf8.RuneCountInString(sanitizedContent)) / float64(user.CJKReadingSpeed)))
  286. } else {
  287. nbOfWords := len(strings.Fields(sanitizedContent))
  288. timeToReadInt = int(math.Ceil(float64(nbOfWords) / float64(user.DefaultReadingSpeed)))
  289. }
  290. return timeToReadInt
  291. }