|
|
@@ -14,9 +14,9 @@ import (
|
|
|
"miniflux.app/logger"
|
|
|
"miniflux.app/model"
|
|
|
"miniflux.app/reader/browser"
|
|
|
+ "miniflux.app/reader/filter"
|
|
|
"miniflux.app/reader/icon"
|
|
|
"miniflux.app/reader/parser"
|
|
|
- "miniflux.app/reader/processor"
|
|
|
"miniflux.app/storage"
|
|
|
"miniflux.app/timer"
|
|
|
)
|
|
|
@@ -63,9 +63,7 @@ func (h *Handler) CreateFeed(userID, categoryID int64, url string, crawler bool,
|
|
|
subscription.WithClientResponse(response)
|
|
|
subscription.CheckedNow()
|
|
|
|
|
|
- feedProcessor := processor.NewFeedProcessor(userID, h.store, subscription)
|
|
|
- feedProcessor.WithCrawler(crawler)
|
|
|
- feedProcessor.Process()
|
|
|
+ filter.Apply(h.store, subscription)
|
|
|
|
|
|
if storeErr := h.store.CreateFeed(subscription); storeErr != nil {
|
|
|
return nil, storeErr
|
|
|
@@ -108,22 +106,18 @@ func (h *Handler) RefreshFeed(userID, feedID int64) error {
|
|
|
if response.IsModified(originalFeed.EtagHeader, originalFeed.LastModifiedHeader) {
|
|
|
logger.Debug("[Handler:RefreshFeed] Feed #%d has been modified", feedID)
|
|
|
|
|
|
- subscription, parseErr := parser.ParseFeed(response.String())
|
|
|
+ updatedFeed, parseErr := parser.ParseFeed(response.String())
|
|
|
if parseErr != nil {
|
|
|
originalFeed.WithError(parseErr.Localize(printer))
|
|
|
h.store.UpdateFeed(originalFeed)
|
|
|
return parseErr
|
|
|
}
|
|
|
|
|
|
- feedProcessor := processor.NewFeedProcessor(userID, h.store, subscription)
|
|
|
- feedProcessor.WithScraperRules(originalFeed.ScraperRules)
|
|
|
- feedProcessor.WithUserAgent(originalFeed.UserAgent)
|
|
|
- feedProcessor.WithRewriteRules(originalFeed.RewriteRules)
|
|
|
- feedProcessor.WithCrawler(originalFeed.Crawler)
|
|
|
- feedProcessor.Process()
|
|
|
+ originalFeed.Entries = updatedFeed.Entries
|
|
|
+ filter.Apply(h.store, originalFeed)
|
|
|
|
|
|
- // Note: We don't update existing entries when the crawler is enabled (we crawl only inexisting entries).
|
|
|
- if storeErr := h.store.UpdateEntries(originalFeed.UserID, originalFeed.ID, subscription.Entries, !originalFeed.Crawler); storeErr != nil {
|
|
|
+ // We don't update existing entries when the crawler is enabled (we crawl only inexisting entries).
|
|
|
+ if storeErr := h.store.UpdateEntries(originalFeed.UserID, originalFeed.ID, originalFeed.Entries, !originalFeed.Crawler); storeErr != nil {
|
|
|
return storeErr
|
|
|
}
|
|
|
|