| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458 |
- // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
- // SPDX-License-Identifier: Apache-2.0
- package processor
- import (
- "log/slog"
- "regexp"
- "slices"
- "strings"
- "time"
- "miniflux.app/v2/internal/config"
- "miniflux.app/v2/internal/metric"
- "miniflux.app/v2/internal/model"
- "miniflux.app/v2/internal/reader/fetcher"
- "miniflux.app/v2/internal/reader/readingtime"
- "miniflux.app/v2/internal/reader/rewrite"
- "miniflux.app/v2/internal/reader/sanitizer"
- "miniflux.app/v2/internal/reader/scraper"
- "miniflux.app/v2/internal/reader/urlcleaner"
- "miniflux.app/v2/internal/storage"
- "github.com/tdewolff/minify/v2"
- "github.com/tdewolff/minify/v2/html"
- )
- var customReplaceRuleRegex = regexp.MustCompile(`rewrite\("(.*)"\|"(.*)"\)`)
- // ProcessFeedEntries downloads original web page for entries and apply filters.
- func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, user *model.User, forceRefresh bool) {
- var filteredEntries model.Entries
- // Process older entries first
- for i := len(feed.Entries) - 1; i >= 0; i-- {
- entry := feed.Entries[i]
- slog.Debug("Processing entry",
- slog.Int64("user_id", user.ID),
- slog.String("entry_url", entry.URL),
- slog.String("entry_hash", entry.Hash),
- slog.String("entry_title", entry.Title),
- slog.Int64("feed_id", feed.ID),
- slog.String("feed_url", feed.FeedURL),
- )
- if isBlockedEntry(feed, entry, user) || !isAllowedEntry(feed, entry, user) || !isRecentEntry(entry) {
- continue
- }
- if cleanedURL, err := urlcleaner.RemoveTrackingParameters(entry.URL); err == nil {
- entry.URL = cleanedURL
- }
- pageBaseURL := ""
- rewrittenURL := rewriteEntryURL(feed, entry)
- entry.URL = rewrittenURL
- entryIsNew := store.IsNewEntry(feed.ID, entry.Hash)
- if feed.Crawler && (entryIsNew || forceRefresh) {
- slog.Debug("Scraping entry",
- slog.Int64("user_id", user.ID),
- slog.String("entry_url", entry.URL),
- slog.String("entry_hash", entry.Hash),
- slog.String("entry_title", entry.Title),
- slog.Int64("feed_id", feed.ID),
- slog.String("feed_url", feed.FeedURL),
- slog.Bool("entry_is_new", entryIsNew),
- slog.Bool("force_refresh", forceRefresh),
- slog.String("rewritten_url", rewrittenURL),
- )
- startTime := time.Now()
- requestBuilder := fetcher.NewRequestBuilder()
- requestBuilder.WithUserAgent(feed.UserAgent, config.Opts.HTTPClientUserAgent())
- requestBuilder.WithCookie(feed.Cookie)
- requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
- requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
- requestBuilder.UseProxy(feed.FetchViaProxy)
- requestBuilder.IgnoreTLSErrors(feed.AllowSelfSignedCertificates)
- requestBuilder.DisableHTTP2(feed.DisableHTTP2)
- scrapedPageBaseURL, extractedContent, scraperErr := scraper.ScrapeWebsite(
- requestBuilder,
- rewrittenURL,
- feed.ScraperRules,
- )
- if scrapedPageBaseURL != "" {
- pageBaseURL = scrapedPageBaseURL
- }
- if config.Opts.HasMetricsCollector() {
- status := "success"
- if scraperErr != nil {
- status = "error"
- }
- metric.ScraperRequestDuration.WithLabelValues(status).Observe(time.Since(startTime).Seconds())
- }
- if scraperErr != nil {
- slog.Warn("Unable to scrape entry",
- slog.Int64("user_id", user.ID),
- slog.String("entry_url", entry.URL),
- slog.Int64("feed_id", feed.ID),
- slog.String("feed_url", feed.FeedURL),
- slog.Any("error", scraperErr),
- )
- } else if extractedContent != "" {
- // We replace the entry content only if the scraper doesn't return any error.
- entry.Content = minifyEntryContent(extractedContent)
- }
- }
- rewrite.Rewriter(rewrittenURL, entry, feed.RewriteRules)
- if pageBaseURL == "" {
- pageBaseURL = rewrittenURL
- }
- // The sanitizer should always run at the end of the process to make sure unsafe HTML is filtered out.
- entry.Content = sanitizer.Sanitize(pageBaseURL, entry.Content)
- updateEntryReadingTime(store, feed, entry, entryIsNew, user)
- filteredEntries = append(filteredEntries, entry)
- }
- feed.Entries = filteredEntries
- }
- func isBlockedEntry(feed *model.Feed, entry *model.Entry, user *model.User) bool {
- if user.BlockFilterEntryRules != "" {
- rules := strings.Split(user.BlockFilterEntryRules, "\n")
- for _, rule := range rules {
- parts := strings.SplitN(rule, "=", 2)
- var match bool
- switch parts[0] {
- case "EntryTitle":
- match, _ = regexp.MatchString(parts[1], entry.Title)
- case "EntryURL":
- match, _ = regexp.MatchString(parts[1], entry.URL)
- case "EntryCommentsURL":
- match, _ = regexp.MatchString(parts[1], entry.CommentsURL)
- case "EntryContent":
- match, _ = regexp.MatchString(parts[1], entry.Content)
- case "EntryAuthor":
- match, _ = regexp.MatchString(parts[1], entry.Author)
- case "EntryTag":
- containsTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
- match, _ = regexp.MatchString(parts[1], tag)
- return match
- })
- if containsTag {
- match = true
- }
- }
- if match {
- slog.Debug("Blocking entry based on rule",
- slog.String("entry_url", entry.URL),
- slog.Int64("feed_id", feed.ID),
- slog.String("feed_url", feed.FeedURL),
- slog.String("rule", rule),
- )
- return true
- }
- }
- }
- if feed.BlocklistRules == "" {
- return false
- }
- compiledBlocklist, err := regexp.Compile(feed.BlocklistRules)
- if err != nil {
- slog.Debug("Failed on regexp compilation",
- slog.String("pattern", feed.BlocklistRules),
- slog.Any("error", err),
- )
- return false
- }
- containsBlockedTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
- return compiledBlocklist.MatchString(tag)
- })
- if compiledBlocklist.MatchString(entry.URL) || compiledBlocklist.MatchString(entry.Title) || compiledBlocklist.MatchString(entry.Author) || containsBlockedTag {
- slog.Debug("Blocking entry based on rule",
- slog.String("entry_url", entry.URL),
- slog.Int64("feed_id", feed.ID),
- slog.String("feed_url", feed.FeedURL),
- slog.String("rule", feed.BlocklistRules),
- )
- return true
- }
- return false
- }
- func isAllowedEntry(feed *model.Feed, entry *model.Entry, user *model.User) bool {
- if user.KeepFilterEntryRules != "" {
- rules := strings.Split(user.KeepFilterEntryRules, "\n")
- for _, rule := range rules {
- parts := strings.SplitN(rule, "=", 2)
- var match bool
- switch parts[0] {
- case "EntryTitle":
- match, _ = regexp.MatchString(parts[1], entry.Title)
- case "EntryURL":
- match, _ = regexp.MatchString(parts[1], entry.URL)
- case "EntryCommentsURL":
- match, _ = regexp.MatchString(parts[1], entry.CommentsURL)
- case "EntryContent":
- match, _ = regexp.MatchString(parts[1], entry.Content)
- case "EntryAuthor":
- match, _ = regexp.MatchString(parts[1], entry.Author)
- case "EntryTag":
- containsTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
- match, _ = regexp.MatchString(parts[1], tag)
- return match
- })
- if containsTag {
- match = true
- }
- }
- if match {
- slog.Debug("Allowing entry based on rule",
- slog.String("entry_url", entry.URL),
- slog.Int64("feed_id", feed.ID),
- slog.String("feed_url", feed.FeedURL),
- slog.String("rule", rule),
- )
- return true
- }
- }
- return false
- }
- if feed.KeeplistRules == "" {
- return true
- }
- compiledKeeplist, err := regexp.Compile(feed.KeeplistRules)
- if err != nil {
- slog.Debug("Failed on regexp compilation",
- slog.String("pattern", feed.KeeplistRules),
- slog.Any("error", err),
- )
- return false
- }
- containsAllowedTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
- return compiledKeeplist.MatchString(tag)
- })
- if compiledKeeplist.MatchString(entry.URL) || compiledKeeplist.MatchString(entry.Title) || compiledKeeplist.MatchString(entry.Author) || containsAllowedTag {
- slog.Debug("Allow entry based on rule",
- slog.String("entry_url", entry.URL),
- slog.Int64("feed_id", feed.ID),
- slog.String("feed_url", feed.FeedURL),
- slog.String("rule", feed.KeeplistRules),
- )
- return true
- }
- return false
- }
- // ProcessEntryWebPage downloads the entry web page and apply rewrite rules.
- func ProcessEntryWebPage(feed *model.Feed, entry *model.Entry, user *model.User) error {
- startTime := time.Now()
- rewrittenEntryURL := rewriteEntryURL(feed, entry)
- requestBuilder := fetcher.NewRequestBuilder()
- requestBuilder.WithUserAgent(feed.UserAgent, config.Opts.HTTPClientUserAgent())
- requestBuilder.WithCookie(feed.Cookie)
- requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
- requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
- requestBuilder.UseProxy(feed.FetchViaProxy)
- requestBuilder.IgnoreTLSErrors(feed.AllowSelfSignedCertificates)
- requestBuilder.DisableHTTP2(feed.DisableHTTP2)
- pageBaseURL, extractedContent, scraperErr := scraper.ScrapeWebsite(
- requestBuilder,
- rewrittenEntryURL,
- feed.ScraperRules,
- )
- if config.Opts.HasMetricsCollector() {
- status := "success"
- if scraperErr != nil {
- status = "error"
- }
- metric.ScraperRequestDuration.WithLabelValues(status).Observe(time.Since(startTime).Seconds())
- }
- if scraperErr != nil {
- return scraperErr
- }
- if extractedContent != "" {
- entry.Content = minifyEntryContent(extractedContent)
- if user.ShowReadingTime {
- entry.ReadingTime = readingtime.EstimateReadingTime(entry.Content, user.DefaultReadingSpeed, user.CJKReadingSpeed)
- }
- }
- rewrite.Rewriter(rewrittenEntryURL, entry, entry.Feed.RewriteRules)
- entry.Content = sanitizer.Sanitize(pageBaseURL, entry.Content)
- return nil
- }
- func rewriteEntryURL(feed *model.Feed, entry *model.Entry) string {
- var rewrittenURL = entry.URL
- if feed.UrlRewriteRules != "" {
- parts := customReplaceRuleRegex.FindStringSubmatch(feed.UrlRewriteRules)
- if len(parts) >= 3 {
- re, err := regexp.Compile(parts[1])
- if err != nil {
- slog.Error("Failed on regexp compilation",
- slog.String("url_rewrite_rules", feed.UrlRewriteRules),
- slog.Any("error", err),
- )
- return rewrittenURL
- }
- rewrittenURL = re.ReplaceAllString(entry.URL, parts[2])
- slog.Debug("Rewriting entry URL",
- slog.String("original_entry_url", entry.URL),
- slog.String("rewritten_entry_url", rewrittenURL),
- slog.Int64("feed_id", feed.ID),
- slog.String("feed_url", feed.FeedURL),
- )
- } else {
- slog.Debug("Cannot find search and replace terms for replace rule",
- slog.String("original_entry_url", entry.URL),
- slog.String("rewritten_entry_url", rewrittenURL),
- slog.Int64("feed_id", feed.ID),
- slog.String("feed_url", feed.FeedURL),
- slog.String("url_rewrite_rules", feed.UrlRewriteRules),
- )
- }
- }
- return rewrittenURL
- }
- func updateEntryReadingTime(store *storage.Storage, feed *model.Feed, entry *model.Entry, entryIsNew bool, user *model.User) {
- if !user.ShowReadingTime {
- slog.Debug("Skip reading time estimation for this user", slog.Int64("user_id", user.ID))
- return
- }
- if shouldFetchYouTubeWatchTime(entry) {
- if entryIsNew {
- watchTime, err := fetchYouTubeWatchTime(entry.URL)
- if err != nil {
- slog.Warn("Unable to fetch YouTube watch time",
- slog.Int64("user_id", user.ID),
- slog.Int64("entry_id", entry.ID),
- slog.String("entry_url", entry.URL),
- slog.Int64("feed_id", feed.ID),
- slog.String("feed_url", feed.FeedURL),
- slog.Any("error", err),
- )
- }
- entry.ReadingTime = watchTime
- } else {
- entry.ReadingTime = store.GetReadTime(feed.ID, entry.Hash)
- }
- }
- if shouldFetchNebulaWatchTime(entry) {
- if entryIsNew {
- watchTime, err := fetchNebulaWatchTime(entry.URL)
- if err != nil {
- slog.Warn("Unable to fetch Nebula watch time",
- slog.Int64("user_id", user.ID),
- slog.Int64("entry_id", entry.ID),
- slog.String("entry_url", entry.URL),
- slog.Int64("feed_id", feed.ID),
- slog.String("feed_url", feed.FeedURL),
- slog.Any("error", err),
- )
- }
- entry.ReadingTime = watchTime
- } else {
- entry.ReadingTime = store.GetReadTime(feed.ID, entry.Hash)
- }
- }
- if shouldFetchOdyseeWatchTime(entry) {
- if entryIsNew {
- watchTime, err := fetchOdyseeWatchTime(entry.URL)
- if err != nil {
- slog.Warn("Unable to fetch Odysee watch time",
- slog.Int64("user_id", user.ID),
- slog.Int64("entry_id", entry.ID),
- slog.String("entry_url", entry.URL),
- slog.Int64("feed_id", feed.ID),
- slog.String("feed_url", feed.FeedURL),
- slog.Any("error", err),
- )
- }
- entry.ReadingTime = watchTime
- } else {
- entry.ReadingTime = store.GetReadTime(feed.ID, entry.Hash)
- }
- }
- if shouldFetchBilibiliWatchTime(entry) {
- if entryIsNew {
- watchTime, err := fetchBilibiliWatchTime(entry.URL)
- if err != nil {
- slog.Warn("Unable to fetch Bilibili watch time",
- slog.Int64("user_id", user.ID),
- slog.Int64("entry_id", entry.ID),
- slog.String("entry_url", entry.URL),
- slog.Int64("feed_id", feed.ID),
- slog.String("feed_url", feed.FeedURL),
- slog.Any("error", err),
- )
- }
- entry.ReadingTime = watchTime
- } else {
- entry.ReadingTime = store.GetReadTime(feed.ID, entry.Hash)
- }
- }
- // Handle YT error case and non-YT entries.
- if entry.ReadingTime == 0 {
- entry.ReadingTime = readingtime.EstimateReadingTime(entry.Content, user.DefaultReadingSpeed, user.CJKReadingSpeed)
- }
- }
- func isRecentEntry(entry *model.Entry) bool {
- if config.Opts.FilterEntryMaxAgeDays() == 0 || entry.Date.After(time.Now().AddDate(0, 0, -config.Opts.FilterEntryMaxAgeDays())) {
- return true
- }
- return false
- }
- func minifyEntryContent(entryContent string) string {
- m := minify.New()
- // Options required to avoid breaking the HTML content.
- m.Add("text/html", &html.Minifier{
- KeepEndTags: true,
- KeepQuotes: true,
- })
- if minifiedHTML, err := m.String("text/html", entryContent); err == nil {
- entryContent = minifiedHTML
- }
- return entryContent
- }
|