handler.go 6.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194
  1. // Copyright 2017 Frédéric Guillot. All rights reserved.
  2. // Use of this source code is governed by the Apache 2.0
  3. // license that can be found in the LICENSE file.
  4. package feed // import "miniflux.app/reader/feed"
  5. import (
  6. "fmt"
  7. "time"
  8. "miniflux.app/config"
  9. "miniflux.app/errors"
  10. "miniflux.app/http/client"
  11. "miniflux.app/locale"
  12. "miniflux.app/logger"
  13. "miniflux.app/model"
  14. "miniflux.app/reader/browser"
  15. "miniflux.app/reader/icon"
  16. "miniflux.app/reader/parser"
  17. "miniflux.app/reader/processor"
  18. "miniflux.app/storage"
  19. "miniflux.app/timer"
  20. )
  21. var (
  22. errDuplicate = "This feed already exists (%s)"
  23. errNotFound = "Feed %d not found"
  24. errCategoryNotFound = "Category not found for this user"
  25. )
  26. // Handler contains all the logic to create and refresh feeds.
  27. type Handler struct {
  28. store *storage.Storage
  29. }
  30. // CreateFeed fetch, parse and store a new feed.
  31. func (h *Handler) CreateFeed(userID, categoryID int64, url string, crawler bool, userAgent, username, password, scraperRules, rewriteRules, blocklistRules, keeplistRules string, fetchViaProxy bool) (*model.Feed, error) {
  32. defer timer.ExecutionTime(time.Now(), fmt.Sprintf("[Handler:CreateFeed] feedUrl=%s", url))
  33. if !h.store.CategoryExists(userID, categoryID) {
  34. return nil, errors.NewLocalizedError(errCategoryNotFound)
  35. }
  36. request := client.NewClientWithConfig(url, config.Opts)
  37. request.WithCredentials(username, password)
  38. request.WithUserAgent(userAgent)
  39. if fetchViaProxy {
  40. request.WithProxy()
  41. }
  42. response, requestErr := browser.Exec(request)
  43. if requestErr != nil {
  44. return nil, requestErr
  45. }
  46. if h.store.FeedURLExists(userID, response.EffectiveURL) {
  47. return nil, errors.NewLocalizedError(errDuplicate, response.EffectiveURL)
  48. }
  49. subscription, parseErr := parser.ParseFeed(response.BodyAsString())
  50. if parseErr != nil {
  51. return nil, parseErr
  52. }
  53. subscription.UserID = userID
  54. subscription.WithCategoryID(categoryID)
  55. subscription.WithBrowsingParameters(crawler, userAgent, username, password, scraperRules, rewriteRules, blocklistRules, keeplistRules, fetchViaProxy)
  56. subscription.WithClientResponse(response)
  57. subscription.CheckedNow()
  58. processor.ProcessFeedEntries(h.store, subscription)
  59. if storeErr := h.store.CreateFeed(subscription); storeErr != nil {
  60. return nil, storeErr
  61. }
  62. logger.Debug("[Handler:CreateFeed] Feed saved with ID: %d", subscription.ID)
  63. checkFeedIcon(h.store, subscription.ID, subscription.SiteURL, fetchViaProxy)
  64. return subscription, nil
  65. }
  66. // RefreshFeed refreshes a feed.
  67. func (h *Handler) RefreshFeed(userID, feedID int64) error {
  68. defer timer.ExecutionTime(time.Now(), fmt.Sprintf("[Handler:RefreshFeed] feedID=%d", feedID))
  69. userLanguage := h.store.UserLanguage(userID)
  70. printer := locale.NewPrinter(userLanguage)
  71. originalFeed, storeErr := h.store.FeedByID(userID, feedID)
  72. if storeErr != nil {
  73. return storeErr
  74. }
  75. if originalFeed == nil {
  76. return errors.NewLocalizedError(errNotFound, feedID)
  77. }
  78. weeklyEntryCount := 0
  79. if config.Opts.PollingScheduler() == model.SchedulerEntryFrequency {
  80. var weeklyCountErr error
  81. weeklyEntryCount, weeklyCountErr = h.store.WeeklyFeedEntryCount(userID, feedID)
  82. if weeklyCountErr != nil {
  83. return weeklyCountErr
  84. }
  85. }
  86. originalFeed.CheckedNow()
  87. originalFeed.ScheduleNextCheck(weeklyEntryCount)
  88. request := client.NewClientWithConfig(originalFeed.FeedURL, config.Opts)
  89. request.WithCredentials(originalFeed.Username, originalFeed.Password)
  90. request.WithUserAgent(originalFeed.UserAgent)
  91. if !originalFeed.IgnoreHTTPCache {
  92. request.WithCacheHeaders(originalFeed.EtagHeader, originalFeed.LastModifiedHeader)
  93. }
  94. if originalFeed.FetchViaProxy {
  95. request.WithProxy()
  96. }
  97. response, requestErr := browser.Exec(request)
  98. if requestErr != nil {
  99. originalFeed.WithError(requestErr.Localize(printer))
  100. h.store.UpdateFeedError(originalFeed)
  101. return requestErr
  102. }
  103. if h.store.AnotherFeedURLExists(userID, originalFeed.ID, response.EffectiveURL) {
  104. storeErr := errors.NewLocalizedError(errDuplicate, response.EffectiveURL)
  105. originalFeed.WithError(storeErr.Error())
  106. h.store.UpdateFeedError(originalFeed)
  107. return storeErr
  108. }
  109. if originalFeed.IgnoreHTTPCache || response.IsModified(originalFeed.EtagHeader, originalFeed.LastModifiedHeader) {
  110. logger.Debug("[Handler:RefreshFeed] Feed #%d has been modified", feedID)
  111. updatedFeed, parseErr := parser.ParseFeed(response.BodyAsString())
  112. if parseErr != nil {
  113. originalFeed.WithError(parseErr.Localize(printer))
  114. h.store.UpdateFeedError(originalFeed)
  115. return parseErr
  116. }
  117. originalFeed.Entries = updatedFeed.Entries
  118. processor.ProcessFeedEntries(h.store, originalFeed)
  119. // We don't update existing entries when the crawler is enabled (we crawl only inexisting entries).
  120. if storeErr := h.store.RefreshFeedEntries(originalFeed.UserID, originalFeed.ID, originalFeed.Entries, !originalFeed.Crawler); storeErr != nil {
  121. originalFeed.WithError(storeErr.Error())
  122. h.store.UpdateFeedError(originalFeed)
  123. return storeErr
  124. }
  125. // We update caching headers only if the feed has been modified,
  126. // because some websites don't return the same headers when replying with a 304.
  127. originalFeed.WithClientResponse(response)
  128. checkFeedIcon(h.store, originalFeed.ID, originalFeed.SiteURL, originalFeed.FetchViaProxy)
  129. } else {
  130. logger.Debug("[Handler:RefreshFeed] Feed #%d not modified", feedID)
  131. }
  132. originalFeed.ResetErrorCounter()
  133. if storeErr := h.store.UpdateFeed(originalFeed); storeErr != nil {
  134. originalFeed.WithError(storeErr.Error())
  135. h.store.UpdateFeedError(originalFeed)
  136. return storeErr
  137. }
  138. return nil
  139. }
  140. // NewFeedHandler returns a feed handler.
  141. func NewFeedHandler(store *storage.Storage) *Handler {
  142. return &Handler{store}
  143. }
  144. func checkFeedIcon(store *storage.Storage, feedID int64, websiteURL string, fetchViaProxy bool) {
  145. if !store.HasIcon(feedID) {
  146. icon, err := icon.FindIcon(websiteURL, fetchViaProxy)
  147. if err != nil {
  148. logger.Debug("CheckFeedIcon: %v (feedID=%d websiteURL=%s)", err, feedID, websiteURL)
  149. } else if icon == nil {
  150. logger.Debug("CheckFeedIcon: No icon found (feedID=%d websiteURL=%s)", feedID, websiteURL)
  151. } else {
  152. if err := store.CreateFeedIcon(feedID, icon); err != nil {
  153. logger.Debug("CheckFeedIcon: %v (feedID=%d websiteURL=%s)", err, feedID, websiteURL)
  154. }
  155. }
  156. }
  157. }