processor.go 6.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247
  1. // Copyright 2018 Frédéric Guillot. All rights reserved.
  2. // Use of this source code is governed by the Apache 2.0
  3. // license that can be found in the LICENSE file.
  4. package processor
  5. import (
  6. "errors"
  7. "fmt"
  8. "math"
  9. "regexp"
  10. "strconv"
  11. "strings"
  12. "time"
  13. "unicode/utf8"
  14. "miniflux.app/config"
  15. "miniflux.app/http/client"
  16. "miniflux.app/logger"
  17. "miniflux.app/metric"
  18. "miniflux.app/model"
  19. "miniflux.app/reader/browser"
  20. "miniflux.app/reader/rewrite"
  21. "miniflux.app/reader/sanitizer"
  22. "miniflux.app/reader/scraper"
  23. "miniflux.app/storage"
  24. "github.com/PuerkitoBio/goquery"
  25. "github.com/rylans/getlang"
  26. )
  27. var (
  28. youtubeRegex = regexp.MustCompile(`youtube\.com/watch\?v=(.*)`)
  29. iso8601Regex = regexp.MustCompile(`^P((?P<year>\d+)Y)?((?P<month>\d+)M)?((?P<week>\d+)W)?((?P<day>\d+)D)?(T((?P<hour>\d+)H)?((?P<minute>\d+)M)?((?P<second>\d+)S)?)?$`)
  30. )
  31. // ProcessFeedEntries downloads original web page for entries and apply filters.
  32. func ProcessFeedEntries(store *storage.Storage, feed *model.Feed) {
  33. var filteredEntries model.Entries
  34. for _, entry := range feed.Entries {
  35. logger.Debug("[Processor] Processing entry %q from feed %q", entry.URL, feed.FeedURL)
  36. if isBlockedEntry(feed, entry) || !isAllowedEntry(feed, entry) {
  37. continue
  38. }
  39. entryIsNew := !store.EntryURLExists(feed.ID, entry.URL)
  40. if feed.Crawler && entryIsNew {
  41. logger.Debug("[Processor] Crawling entry %q from feed %q", entry.URL, feed.FeedURL)
  42. startTime := time.Now()
  43. content, scraperErr := scraper.Fetch(
  44. entry.URL,
  45. feed.ScraperRules,
  46. feed.UserAgent,
  47. feed.AllowSelfSignedCertificates,
  48. )
  49. if config.Opts.HasMetricsCollector() {
  50. status := "success"
  51. if scraperErr != nil {
  52. status = "error"
  53. }
  54. metric.ScraperRequestDuration.WithLabelValues(status).Observe(time.Since(startTime).Seconds())
  55. }
  56. if scraperErr != nil {
  57. logger.Error(`[Processor] Unable to crawl this entry: %q => %v`, entry.URL, scraperErr)
  58. } else if content != "" {
  59. // We replace the entry content only if the scraper doesn't return any error.
  60. entry.Content = content
  61. }
  62. }
  63. entry.Content = rewrite.Rewriter(entry.URL, entry.Content, feed.RewriteRules)
  64. // The sanitizer should always run at the end of the process to make sure unsafe HTML is filtered.
  65. entry.Content = sanitizer.Sanitize(entry.URL, entry.Content)
  66. updateEntryReadingTime(store, feed, entry, entryIsNew)
  67. filteredEntries = append(filteredEntries, entry)
  68. }
  69. feed.Entries = filteredEntries
  70. }
  71. func isBlockedEntry(feed *model.Feed, entry *model.Entry) bool {
  72. if feed.BlocklistRules != "" {
  73. match, _ := regexp.MatchString(feed.BlocklistRules, entry.Title)
  74. if match {
  75. logger.Debug("[Processor] Blocking entry %q from feed %q based on rule %q", entry.Title, feed.FeedURL, feed.BlocklistRules)
  76. return true
  77. }
  78. }
  79. return false
  80. }
  81. func isAllowedEntry(feed *model.Feed, entry *model.Entry) bool {
  82. if feed.KeeplistRules != "" {
  83. match, _ := regexp.MatchString(feed.KeeplistRules, entry.Title)
  84. if match {
  85. logger.Debug("[Processor] Allow entry %q from feed %q based on rule %q", entry.Title, feed.FeedURL, feed.KeeplistRules)
  86. return true
  87. }
  88. return false
  89. }
  90. return true
  91. }
  92. // ProcessEntryWebPage downloads the entry web page and apply rewrite rules.
  93. func ProcessEntryWebPage(feed *model.Feed, entry *model.Entry) error {
  94. startTime := time.Now()
  95. content, scraperErr := scraper.Fetch(
  96. entry.URL,
  97. entry.Feed.ScraperRules,
  98. entry.Feed.UserAgent,
  99. feed.AllowSelfSignedCertificates,
  100. )
  101. if config.Opts.HasMetricsCollector() {
  102. status := "success"
  103. if scraperErr != nil {
  104. status = "error"
  105. }
  106. metric.ScraperRequestDuration.WithLabelValues(status).Observe(time.Since(startTime).Seconds())
  107. }
  108. if scraperErr != nil {
  109. return scraperErr
  110. }
  111. content = rewrite.Rewriter(entry.URL, content, entry.Feed.RewriteRules)
  112. content = sanitizer.Sanitize(entry.URL, content)
  113. if content != "" {
  114. entry.Content = content
  115. entry.ReadingTime = calculateReadingTime(content)
  116. }
  117. return nil
  118. }
  119. func updateEntryReadingTime(store *storage.Storage, feed *model.Feed, entry *model.Entry, entryIsNew bool) {
  120. if shouldFetchYouTubeWatchTime(entry) {
  121. if entryIsNew {
  122. watchTime, err := fetchYouTubeWatchTime(entry.URL)
  123. if err != nil {
  124. logger.Error("[Processor] Unable to fetch YouTube watch time: %q => %v", entry.URL, err)
  125. }
  126. entry.ReadingTime = watchTime
  127. } else {
  128. entry.ReadingTime = store.GetReadTime(entry, feed)
  129. }
  130. }
  131. // Handle YT error case and non-YT entries.
  132. if entry.ReadingTime == 0 {
  133. entry.ReadingTime = calculateReadingTime(entry.Content)
  134. }
  135. }
  136. func shouldFetchYouTubeWatchTime(entry *model.Entry) bool {
  137. if !config.Opts.FetchYouTubeWatchTime() {
  138. return false
  139. }
  140. matches := youtubeRegex.FindStringSubmatch(entry.URL)
  141. urlMatchesYouTubePattern := len(matches) == 2
  142. return urlMatchesYouTubePattern
  143. }
  144. func fetchYouTubeWatchTime(url string) (int, error) {
  145. clt := client.NewClientWithConfig(url, config.Opts)
  146. response, browserErr := browser.Exec(clt)
  147. if browserErr != nil {
  148. return 0, browserErr
  149. }
  150. doc, docErr := goquery.NewDocumentFromReader(response.Body)
  151. if docErr != nil {
  152. return 0, docErr
  153. }
  154. durs, exists := doc.Find(`meta[itemprop="duration"]`).First().Attr("content")
  155. if !exists {
  156. return 0, errors.New("duration has not found")
  157. }
  158. dur, err := parseISO8601(durs)
  159. if err != nil {
  160. return 0, fmt.Errorf("unable to parse duration %s: %v", durs, err)
  161. }
  162. return int(dur.Minutes()), nil
  163. }
  164. // parseISO8601 parses an ISO 8601 duration string.
  165. func parseISO8601(from string) (time.Duration, error) {
  166. var match []string
  167. var d time.Duration
  168. if iso8601Regex.MatchString(from) {
  169. match = iso8601Regex.FindStringSubmatch(from)
  170. } else {
  171. return 0, errors.New("could not parse duration string")
  172. }
  173. for i, name := range iso8601Regex.SubexpNames() {
  174. part := match[i]
  175. if i == 0 || name == "" || part == "" {
  176. continue
  177. }
  178. val, err := strconv.ParseInt(part, 10, 64)
  179. if err != nil {
  180. return 0, err
  181. }
  182. switch name {
  183. case "hour":
  184. d = d + (time.Duration(val) * time.Hour)
  185. case "minute":
  186. d = d + (time.Duration(val) * time.Minute)
  187. case "second":
  188. d = d + (time.Duration(val) * time.Second)
  189. default:
  190. return 0, fmt.Errorf("unknown field %s", name)
  191. }
  192. }
  193. return d, nil
  194. }
  195. func calculateReadingTime(content string) int {
  196. sanitizedContent := sanitizer.StripTags(content)
  197. languageInfo := getlang.FromString(sanitizedContent)
  198. var timeToReadInt int
  199. if languageInfo.LanguageCode() == "ko" || languageInfo.LanguageCode() == "zh" || languageInfo.LanguageCode() == "jp" {
  200. timeToReadInt = int(math.Ceil(float64(utf8.RuneCountInString(sanitizedContent)) / 500))
  201. } else {
  202. nbOfWords := len(strings.Fields(sanitizedContent))
  203. timeToReadInt = int(math.Ceil(float64(nbOfWords) / 265))
  204. }
  205. return timeToReadInt
  206. }