scraper.go 2.0 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192
  1. // Copyright 2017 Frédéric Guillot. All rights reserved.
  2. // Use of this source code is governed by the Apache 2.0
  3. // license that can be found in the LICENSE file.
  4. package scraper
  5. import (
  6. "errors"
  7. "io"
  8. "strings"
  9. "github.com/PuerkitoBio/goquery"
  10. "github.com/miniflux/miniflux/http"
  11. "github.com/miniflux/miniflux/logger"
  12. "github.com/miniflux/miniflux/reader/readability"
  13. "github.com/miniflux/miniflux/url"
  14. )
  15. // Fetch download a web page a returns relevant contents.
  16. func Fetch(websiteURL, rules string) (string, error) {
  17. client := http.NewClient(websiteURL)
  18. response, err := client.Get()
  19. if err != nil {
  20. return "", err
  21. }
  22. if response.HasServerFailure() {
  23. return "", errors.New("unable to download web page")
  24. }
  25. page, err := response.NormalizeBodyEncoding()
  26. if err != nil {
  27. return "", err
  28. }
  29. // The entry URL could be a redirect somewhere else.
  30. websiteURL = response.EffectiveURL
  31. if rules == "" {
  32. rules = getPredefinedScraperRules(websiteURL)
  33. }
  34. var content string
  35. if rules != "" {
  36. logger.Debug(`[Scraper] Using rules "%s" for "%s"`, rules, websiteURL)
  37. content, err = scrapContent(page, rules)
  38. } else {
  39. logger.Debug(`[Scraper] Using readability for "%s"`, websiteURL)
  40. content, err = readability.ExtractContent(page)
  41. }
  42. if err != nil {
  43. return "", err
  44. }
  45. return content, nil
  46. }
  47. func scrapContent(page io.Reader, rules string) (string, error) {
  48. document, err := goquery.NewDocumentFromReader(page)
  49. if err != nil {
  50. return "", err
  51. }
  52. contents := ""
  53. document.Find(rules).Each(func(i int, s *goquery.Selection) {
  54. var content string
  55. // For some inline elements, we get the parent.
  56. if s.Is("img") {
  57. content, _ = s.Parent().Html()
  58. } else {
  59. content, _ = s.Html()
  60. }
  61. contents += content
  62. })
  63. return contents, nil
  64. }
  65. func getPredefinedScraperRules(websiteURL string) string {
  66. urlDomain := url.Domain(websiteURL)
  67. for domain, rules := range predefinedRules {
  68. if strings.Contains(urlDomain, domain) {
  69. return rules
  70. }
  71. }
  72. return ""
  73. }