processor.go 7.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285
  1. // Copyright 2018 Frédéric Guillot. All rights reserved.
  2. // Use of this source code is governed by the Apache 2.0
  3. // license that can be found in the LICENSE file.
  4. package processor
  5. import (
  6. "errors"
  7. "fmt"
  8. "math"
  9. "regexp"
  10. "strconv"
  11. "strings"
  12. "time"
  13. "unicode/utf8"
  14. "miniflux.app/integration"
  15. "miniflux.app/config"
  16. "miniflux.app/http/client"
  17. "miniflux.app/logger"
  18. "miniflux.app/metric"
  19. "miniflux.app/model"
  20. "miniflux.app/reader/browser"
  21. "miniflux.app/reader/rewrite"
  22. "miniflux.app/reader/sanitizer"
  23. "miniflux.app/reader/scraper"
  24. "miniflux.app/storage"
  25. "github.com/PuerkitoBio/goquery"
  26. "github.com/rylans/getlang"
  27. )
  28. var (
  29. youtubeRegex = regexp.MustCompile(`youtube\.com/watch\?v=(.*)`)
  30. iso8601Regex = regexp.MustCompile(`^P((?P<year>\d+)Y)?((?P<month>\d+)M)?((?P<week>\d+)W)?((?P<day>\d+)D)?(T((?P<hour>\d+)H)?((?P<minute>\d+)M)?((?P<second>\d+)S)?)?$`)
  31. customReplaceRuleRegex = regexp.MustCompile(`rewrite\("(.*)"\|"(.*)"\)`)
  32. )
  33. // ProcessFeedEntries downloads original web page for entries and apply filters.
  34. func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, user *model.User) {
  35. var filteredEntries model.Entries
  36. for _, entry := range feed.Entries {
  37. logger.Debug("[Processor] Processing entry %q from feed %q", entry.URL, feed.FeedURL)
  38. if isBlockedEntry(feed, entry) || !isAllowedEntry(feed, entry) {
  39. continue
  40. }
  41. url := getUrlFromEntry(feed, entry)
  42. entryIsNew := !store.EntryURLExists(feed.ID, entry.URL)
  43. if feed.Crawler && entryIsNew {
  44. logger.Debug("[Processor] Crawling entry %q from feed %q", url, feed.FeedURL)
  45. startTime := time.Now()
  46. content, scraperErr := scraper.Fetch(
  47. url,
  48. feed.ScraperRules,
  49. feed.UserAgent,
  50. feed.Cookie,
  51. feed.AllowSelfSignedCertificates,
  52. feed.FetchViaProxy,
  53. )
  54. if config.Opts.HasMetricsCollector() {
  55. status := "success"
  56. if scraperErr != nil {
  57. status = "error"
  58. }
  59. metric.ScraperRequestDuration.WithLabelValues(status).Observe(time.Since(startTime).Seconds())
  60. }
  61. if scraperErr != nil {
  62. logger.Error(`[Processor] Unable to crawl this entry: %q => %v`, entry.URL, scraperErr)
  63. } else if content != "" {
  64. // We replace the entry content only if the scraper doesn't return any error.
  65. entry.Content = content
  66. }
  67. }
  68. entry.Content = rewrite.Rewriter(url, entry.Content, feed.RewriteRules)
  69. // The sanitizer should always run at the end of the process to make sure unsafe HTML is filtered.
  70. entry.Content = sanitizer.Sanitize(url, entry.Content)
  71. if entryIsNew {
  72. intg, err := store.Integration(feed.UserID)
  73. if err != nil {
  74. logger.Error("[Processor] Get integrations for user %d failed: %v; the refresh process will go on, but no integrations will run this time.", feed.UserID, err)
  75. } else if intg != nil {
  76. localEntry := entry
  77. go func() {
  78. integration.PushEntry(localEntry, intg)
  79. }()
  80. }
  81. }
  82. updateEntryReadingTime(store, feed, entry, entryIsNew, user)
  83. filteredEntries = append(filteredEntries, entry)
  84. }
  85. feed.Entries = filteredEntries
  86. }
  87. func isBlockedEntry(feed *model.Feed, entry *model.Entry) bool {
  88. if feed.BlocklistRules != "" {
  89. match, _ := regexp.MatchString(feed.BlocklistRules, entry.Title)
  90. if match {
  91. logger.Debug("[Processor] Blocking entry %q from feed %q based on rule %q", entry.Title, feed.FeedURL, feed.BlocklistRules)
  92. return true
  93. }
  94. }
  95. return false
  96. }
  97. func isAllowedEntry(feed *model.Feed, entry *model.Entry) bool {
  98. if feed.KeeplistRules != "" {
  99. match, _ := regexp.MatchString(feed.KeeplistRules, entry.Title)
  100. if match {
  101. logger.Debug("[Processor] Allow entry %q from feed %q based on rule %q", entry.Title, feed.FeedURL, feed.KeeplistRules)
  102. return true
  103. }
  104. return false
  105. }
  106. return true
  107. }
  108. // ProcessEntryWebPage downloads the entry web page and apply rewrite rules.
  109. func ProcessEntryWebPage(feed *model.Feed, entry *model.Entry, user *model.User) error {
  110. startTime := time.Now()
  111. url := getUrlFromEntry(feed, entry)
  112. content, scraperErr := scraper.Fetch(
  113. url,
  114. entry.Feed.ScraperRules,
  115. entry.Feed.UserAgent,
  116. entry.Feed.Cookie,
  117. feed.AllowSelfSignedCertificates,
  118. feed.FetchViaProxy,
  119. )
  120. if config.Opts.HasMetricsCollector() {
  121. status := "success"
  122. if scraperErr != nil {
  123. status = "error"
  124. }
  125. metric.ScraperRequestDuration.WithLabelValues(status).Observe(time.Since(startTime).Seconds())
  126. }
  127. if scraperErr != nil {
  128. return scraperErr
  129. }
  130. content = rewrite.Rewriter(url, content, entry.Feed.RewriteRules)
  131. content = sanitizer.Sanitize(url, content)
  132. if content != "" {
  133. entry.Content = content
  134. entry.ReadingTime = calculateReadingTime(content, user)
  135. }
  136. return nil
  137. }
  138. func getUrlFromEntry(feed *model.Feed, entry *model.Entry) string {
  139. var url = entry.URL
  140. if feed.UrlRewriteRules != "" {
  141. parts := customReplaceRuleRegex.FindStringSubmatch(feed.UrlRewriteRules)
  142. if len(parts) >= 3 {
  143. re := regexp.MustCompile(parts[1])
  144. url = re.ReplaceAllString(entry.URL, parts[2])
  145. logger.Debug(`[Processor] Rewriting entry URL %s to %s`, entry.URL, url)
  146. } else {
  147. logger.Debug("[Processor] Cannot find search and replace terms for replace rule %s", feed.UrlRewriteRules)
  148. }
  149. }
  150. return url
  151. }
  152. func updateEntryReadingTime(store *storage.Storage, feed *model.Feed, entry *model.Entry, entryIsNew bool, user *model.User) {
  153. if shouldFetchYouTubeWatchTime(entry) {
  154. if entryIsNew {
  155. watchTime, err := fetchYouTubeWatchTime(entry.URL)
  156. if err != nil {
  157. logger.Error("[Processor] Unable to fetch YouTube watch time: %q => %v", entry.URL, err)
  158. }
  159. entry.ReadingTime = watchTime
  160. } else {
  161. entry.ReadingTime = store.GetReadTime(entry, feed)
  162. }
  163. }
  164. // Handle YT error case and non-YT entries.
  165. if entry.ReadingTime == 0 {
  166. entry.ReadingTime = calculateReadingTime(entry.Content, user)
  167. }
  168. }
  169. func shouldFetchYouTubeWatchTime(entry *model.Entry) bool {
  170. if !config.Opts.FetchYouTubeWatchTime() {
  171. return false
  172. }
  173. matches := youtubeRegex.FindStringSubmatch(entry.URL)
  174. urlMatchesYouTubePattern := len(matches) == 2
  175. return urlMatchesYouTubePattern
  176. }
  177. func fetchYouTubeWatchTime(url string) (int, error) {
  178. clt := client.NewClientWithConfig(url, config.Opts)
  179. response, browserErr := browser.Exec(clt)
  180. if browserErr != nil {
  181. return 0, browserErr
  182. }
  183. doc, docErr := goquery.NewDocumentFromReader(response.Body)
  184. if docErr != nil {
  185. return 0, docErr
  186. }
  187. durs, exists := doc.Find(`meta[itemprop="duration"]`).First().Attr("content")
  188. if !exists {
  189. return 0, errors.New("duration has not found")
  190. }
  191. dur, err := parseISO8601(durs)
  192. if err != nil {
  193. return 0, fmt.Errorf("unable to parse duration %s: %v", durs, err)
  194. }
  195. return int(dur.Minutes()), nil
  196. }
  197. // parseISO8601 parses an ISO 8601 duration string.
  198. func parseISO8601(from string) (time.Duration, error) {
  199. var match []string
  200. var d time.Duration
  201. if iso8601Regex.MatchString(from) {
  202. match = iso8601Regex.FindStringSubmatch(from)
  203. } else {
  204. return 0, errors.New("could not parse duration string")
  205. }
  206. for i, name := range iso8601Regex.SubexpNames() {
  207. part := match[i]
  208. if i == 0 || name == "" || part == "" {
  209. continue
  210. }
  211. val, err := strconv.ParseInt(part, 10, 64)
  212. if err != nil {
  213. return 0, err
  214. }
  215. switch name {
  216. case "hour":
  217. d = d + (time.Duration(val) * time.Hour)
  218. case "minute":
  219. d = d + (time.Duration(val) * time.Minute)
  220. case "second":
  221. d = d + (time.Duration(val) * time.Second)
  222. default:
  223. return 0, fmt.Errorf("unknown field %s", name)
  224. }
  225. }
  226. return d, nil
  227. }
  228. func calculateReadingTime(content string, user *model.User) int {
  229. sanitizedContent := sanitizer.StripTags(content)
  230. languageInfo := getlang.FromString(sanitizedContent)
  231. var timeToReadInt int
  232. if languageInfo.LanguageCode() == "ko" || languageInfo.LanguageCode() == "zh" || languageInfo.LanguageCode() == "jp" {
  233. timeToReadInt = int(math.Ceil(float64(utf8.RuneCountInString(sanitizedContent)) / float64(user.CJKReadingSpeed)))
  234. } else {
  235. nbOfWords := len(strings.Fields(sanitizedContent))
  236. timeToReadInt = int(math.Ceil(float64(nbOfWords) / float64(user.DefaultReadingSpeed)))
  237. }
  238. return timeToReadInt
  239. }