scraper.go 1.8 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889
  1. // Copyright 2017 Frédéric Guillot. All rights reserved.
  2. // Use of this source code is governed by the Apache 2.0
  3. // license that can be found in the LICENSE file.
  4. package scraper
  5. import (
  6. "errors"
  7. "io"
  8. "log"
  9. "strings"
  10. "github.com/PuerkitoBio/goquery"
  11. "github.com/miniflux/miniflux/http"
  12. "github.com/miniflux/miniflux/reader/readability"
  13. "github.com/miniflux/miniflux/url"
  14. )
  15. // Fetch download a web page a returns relevant contents.
  16. func Fetch(websiteURL, rules string) (string, error) {
  17. client := http.NewClient(websiteURL)
  18. response, err := client.Get()
  19. if err != nil {
  20. return "", err
  21. }
  22. if response.HasServerFailure() {
  23. return "", errors.New("unable to download web page")
  24. }
  25. page, err := response.NormalizeBodyEncoding()
  26. if err != nil {
  27. return "", err
  28. }
  29. if rules == "" {
  30. rules = getPredefinedScraperRules(websiteURL)
  31. }
  32. var content string
  33. if rules != "" {
  34. log.Printf(`[Scraper] Using rules "%s" for "%s"`, rules, websiteURL)
  35. content, err = scrapContent(page, rules)
  36. } else {
  37. log.Printf(`[Scraper] Using readability for "%s"`, websiteURL)
  38. content, err = readability.ExtractContent(page)
  39. }
  40. if err != nil {
  41. return "", err
  42. }
  43. return content, nil
  44. }
  45. func scrapContent(page io.Reader, rules string) (string, error) {
  46. document, err := goquery.NewDocumentFromReader(page)
  47. if err != nil {
  48. return "", err
  49. }
  50. contents := ""
  51. document.Find(rules).Each(func(i int, s *goquery.Selection) {
  52. var content string
  53. // For some inline elements, we get the parent.
  54. if s.Is("img") {
  55. content, _ = s.Parent().Html()
  56. } else {
  57. content, _ = s.Html()
  58. }
  59. contents += content
  60. })
  61. return contents, nil
  62. }
  63. func getPredefinedScraperRules(websiteURL string) string {
  64. urlDomain := url.Domain(websiteURL)
  65. for domain, rules := range predefinedRules {
  66. if strings.Contains(urlDomain, domain) {
  67. return rules
  68. }
  69. }
  70. return ""
  71. }