Browse Source

add proxy arg in scraper.Fetch

hulb 4 years ago
parent
commit
01f678c3b1
2 changed files with 6 additions and 1 deletions
  1. 2 0
      reader/processor/processor.go
  2. 4 1
      reader/scraper/scraper.go

+ 2 - 0
reader/processor/processor.go

@@ -56,6 +56,7 @@ func ProcessFeedEntries(store *storage.Storage, feed *model.Feed) {
 				feed.UserAgent,
 				feed.Cookie,
 				feed.AllowSelfSignedCertificates,
+				feed.FetchViaProxy,
 			)
 
 			if config.Opts.HasMetricsCollector() {
@@ -118,6 +119,7 @@ func ProcessEntryWebPage(feed *model.Feed, entry *model.Entry) error {
 		entry.Feed.UserAgent,
 		entry.Feed.Cookie,
 		feed.AllowSelfSignedCertificates,
+		feed.FetchViaProxy,
 	)
 
 	if config.Opts.HasMetricsCollector() {

+ 4 - 1
reader/scraper/scraper.go

@@ -20,10 +20,13 @@ import (
 )
 
 // Fetch downloads a web page and returns relevant contents.
-func Fetch(websiteURL, rules, userAgent string, cookie string, allowSelfSignedCertificates bool) (string, error) {
+func Fetch(websiteURL, rules, userAgent string, cookie string, allowSelfSignedCertificates, useProxy bool) (string, error) {
 	clt := client.NewClientWithConfig(websiteURL, config.Opts)
 	clt.WithUserAgent(userAgent)
 	clt.WithCookie(cookie)
+	if useProxy {
+		clt.WithProxy()
+	}
 	clt.AllowSelfSignedCertificates = allowSelfSignedCertificates
 
 	response, err := clt.Get()