4
0

processor.go 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596
  1. // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
  2. // SPDX-License-Identifier: Apache-2.0
  3. package processor
  4. import (
  5. "errors"
  6. "fmt"
  7. "log/slog"
  8. "regexp"
  9. "slices"
  10. "strconv"
  11. "strings"
  12. "time"
  13. "miniflux.app/v2/internal/config"
  14. "miniflux.app/v2/internal/metric"
  15. "miniflux.app/v2/internal/model"
  16. "miniflux.app/v2/internal/reader/fetcher"
  17. "miniflux.app/v2/internal/reader/readingtime"
  18. "miniflux.app/v2/internal/reader/rewrite"
  19. "miniflux.app/v2/internal/reader/sanitizer"
  20. "miniflux.app/v2/internal/reader/scraper"
  21. "miniflux.app/v2/internal/reader/urlcleaner"
  22. "miniflux.app/v2/internal/storage"
  23. "github.com/PuerkitoBio/goquery"
  24. "github.com/tdewolff/minify/v2"
  25. "github.com/tdewolff/minify/v2/html"
  26. )
  27. var (
  28. youtubeRegex = regexp.MustCompile(`youtube\.com/watch\?v=(.*)$`)
  29. nebulaRegex = regexp.MustCompile(`^https://nebula\.tv`)
  30. odyseeRegex = regexp.MustCompile(`^https://odysee\.com`)
  31. iso8601Regex = regexp.MustCompile(`^P((?P<year>\d+)Y)?((?P<month>\d+)M)?((?P<week>\d+)W)?((?P<day>\d+)D)?(T((?P<hour>\d+)H)?((?P<minute>\d+)M)?((?P<second>\d+)S)?)?$`)
  32. customReplaceRuleRegex = regexp.MustCompile(`rewrite\("(.*)"\|"(.*)"\)`)
  33. )
  34. // ProcessFeedEntries downloads original web page for entries and apply filters.
  35. func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, user *model.User, forceRefresh bool) {
  36. var filteredEntries model.Entries
  37. // Process older entries first
  38. for i := len(feed.Entries) - 1; i >= 0; i-- {
  39. entry := feed.Entries[i]
  40. slog.Debug("Processing entry",
  41. slog.Int64("user_id", user.ID),
  42. slog.String("entry_url", entry.URL),
  43. slog.String("entry_hash", entry.Hash),
  44. slog.String("entry_title", entry.Title),
  45. slog.Int64("feed_id", feed.ID),
  46. slog.String("feed_url", feed.FeedURL),
  47. )
  48. if isBlockedEntry(feed, entry, user) || !isAllowedEntry(feed, entry, user) || !isRecentEntry(entry) {
  49. continue
  50. }
  51. if cleanedURL, err := urlcleaner.RemoveTrackingParameters(entry.URL); err == nil {
  52. entry.URL = cleanedURL
  53. }
  54. rewrittenURL := rewriteEntryURL(feed, entry)
  55. entryIsNew := store.IsNewEntry(feed.ID, entry.Hash)
  56. if feed.Crawler && (entryIsNew || forceRefresh) {
  57. slog.Debug("Scraping entry",
  58. slog.Int64("user_id", user.ID),
  59. slog.String("entry_url", entry.URL),
  60. slog.String("entry_hash", entry.Hash),
  61. slog.String("entry_title", entry.Title),
  62. slog.Int64("feed_id", feed.ID),
  63. slog.String("feed_url", feed.FeedURL),
  64. slog.Bool("entry_is_new", entryIsNew),
  65. slog.Bool("force_refresh", forceRefresh),
  66. slog.String("rewritten_url", rewrittenURL),
  67. )
  68. startTime := time.Now()
  69. requestBuilder := fetcher.NewRequestBuilder()
  70. requestBuilder.WithUserAgent(feed.UserAgent, config.Opts.HTTPClientUserAgent())
  71. requestBuilder.WithCookie(feed.Cookie)
  72. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  73. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  74. requestBuilder.UseProxy(feed.FetchViaProxy)
  75. requestBuilder.IgnoreTLSErrors(feed.AllowSelfSignedCertificates)
  76. requestBuilder.DisableHTTP2(feed.DisableHTTP2)
  77. content, scraperErr := scraper.ScrapeWebsite(
  78. requestBuilder,
  79. rewrittenURL,
  80. feed.ScraperRules,
  81. )
  82. if config.Opts.HasMetricsCollector() {
  83. status := "success"
  84. if scraperErr != nil {
  85. status = "error"
  86. }
  87. metric.ScraperRequestDuration.WithLabelValues(status).Observe(time.Since(startTime).Seconds())
  88. }
  89. if scraperErr != nil {
  90. slog.Warn("Unable to scrape entry",
  91. slog.Int64("user_id", user.ID),
  92. slog.String("entry_url", entry.URL),
  93. slog.Int64("feed_id", feed.ID),
  94. slog.String("feed_url", feed.FeedURL),
  95. slog.Any("error", scraperErr),
  96. )
  97. } else if content != "" {
  98. // We replace the entry content only if the scraper doesn't return any error.
  99. entry.Content = minifyEntryContent(content)
  100. }
  101. }
  102. rewrite.Rewriter(rewrittenURL, entry, feed.RewriteRules)
  103. // The sanitizer should always run at the end of the process to make sure unsafe HTML is filtered out.
  104. entry.Content = sanitizer.Sanitize(rewrittenURL, entry.Content)
  105. updateEntryReadingTime(store, feed, entry, entryIsNew, user)
  106. filteredEntries = append(filteredEntries, entry)
  107. }
  108. feed.Entries = filteredEntries
  109. }
  110. func isBlockedEntry(feed *model.Feed, entry *model.Entry, user *model.User) bool {
  111. if user.BlockFilterEntryRules != "" {
  112. rules := strings.Split(user.BlockFilterEntryRules, "\n")
  113. for _, rule := range rules {
  114. parts := strings.SplitN(rule, "=", 2)
  115. var match bool
  116. switch parts[0] {
  117. case "EntryTitle":
  118. match, _ = regexp.MatchString(parts[1], entry.Title)
  119. case "EntryURL":
  120. match, _ = regexp.MatchString(parts[1], entry.URL)
  121. case "EntryCommentsURL":
  122. match, _ = regexp.MatchString(parts[1], entry.CommentsURL)
  123. case "EntryContent":
  124. match, _ = regexp.MatchString(parts[1], entry.Content)
  125. case "EntryAuthor":
  126. match, _ = regexp.MatchString(parts[1], entry.Author)
  127. case "EntryTag":
  128. containsTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
  129. match, _ = regexp.MatchString(parts[1], tag)
  130. return match
  131. })
  132. if containsTag {
  133. match = true
  134. }
  135. }
  136. if match {
  137. slog.Debug("Blocking entry based on rule",
  138. slog.String("entry_url", entry.URL),
  139. slog.Int64("feed_id", feed.ID),
  140. slog.String("feed_url", feed.FeedURL),
  141. slog.String("rule", rule),
  142. )
  143. return true
  144. }
  145. }
  146. }
  147. if feed.BlocklistRules == "" {
  148. return false
  149. }
  150. compiledBlocklist, err := regexp.Compile(feed.BlocklistRules)
  151. if err != nil {
  152. slog.Debug("Failed on regexp compilation",
  153. slog.String("pattern", feed.BlocklistRules),
  154. slog.Any("error", err),
  155. )
  156. return false
  157. }
  158. containsBlockedTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
  159. return compiledBlocklist.MatchString(tag)
  160. })
  161. if compiledBlocklist.MatchString(entry.URL) || compiledBlocklist.MatchString(entry.Title) || compiledBlocklist.MatchString(entry.Author) || containsBlockedTag {
  162. slog.Debug("Blocking entry based on rule",
  163. slog.String("entry_url", entry.URL),
  164. slog.Int64("feed_id", feed.ID),
  165. slog.String("feed_url", feed.FeedURL),
  166. slog.String("rule", feed.BlocklistRules),
  167. )
  168. return true
  169. }
  170. return false
  171. }
  172. func isAllowedEntry(feed *model.Feed, entry *model.Entry, user *model.User) bool {
  173. if user.KeepFilterEntryRules != "" {
  174. rules := strings.Split(user.KeepFilterEntryRules, "\n")
  175. for _, rule := range rules {
  176. parts := strings.SplitN(rule, "=", 2)
  177. var match bool
  178. switch parts[0] {
  179. case "EntryTitle":
  180. match, _ = regexp.MatchString(parts[1], entry.Title)
  181. case "EntryURL":
  182. match, _ = regexp.MatchString(parts[1], entry.URL)
  183. case "EntryCommentsURL":
  184. match, _ = regexp.MatchString(parts[1], entry.CommentsURL)
  185. case "EntryContent":
  186. match, _ = regexp.MatchString(parts[1], entry.Content)
  187. case "EntryAuthor":
  188. match, _ = regexp.MatchString(parts[1], entry.Author)
  189. case "EntryTag":
  190. containsTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
  191. match, _ = regexp.MatchString(parts[1], tag)
  192. return match
  193. })
  194. if containsTag {
  195. match = true
  196. }
  197. }
  198. if match {
  199. slog.Debug("Allowing entry based on rule",
  200. slog.String("entry_url", entry.URL),
  201. slog.Int64("feed_id", feed.ID),
  202. slog.String("feed_url", feed.FeedURL),
  203. slog.String("rule", rule),
  204. )
  205. return true
  206. }
  207. }
  208. return false
  209. }
  210. if feed.KeeplistRules == "" {
  211. return true
  212. }
  213. compiledKeeplist, err := regexp.Compile(feed.KeeplistRules)
  214. if err != nil {
  215. slog.Debug("Failed on regexp compilation",
  216. slog.String("pattern", feed.KeeplistRules),
  217. slog.Any("error", err),
  218. )
  219. return false
  220. }
  221. containsAllowedTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
  222. return compiledKeeplist.MatchString(tag)
  223. })
  224. if compiledKeeplist.MatchString(entry.URL) || compiledKeeplist.MatchString(entry.Title) || compiledKeeplist.MatchString(entry.Author) || containsAllowedTag {
  225. slog.Debug("Allow entry based on rule",
  226. slog.String("entry_url", entry.URL),
  227. slog.Int64("feed_id", feed.ID),
  228. slog.String("feed_url", feed.FeedURL),
  229. slog.String("rule", feed.KeeplistRules),
  230. )
  231. return true
  232. }
  233. return false
  234. }
  235. // ProcessEntryWebPage downloads the entry web page and apply rewrite rules.
  236. func ProcessEntryWebPage(feed *model.Feed, entry *model.Entry, user *model.User) error {
  237. startTime := time.Now()
  238. rewrittenEntryURL := rewriteEntryURL(feed, entry)
  239. requestBuilder := fetcher.NewRequestBuilder()
  240. requestBuilder.WithUserAgent(feed.UserAgent, config.Opts.HTTPClientUserAgent())
  241. requestBuilder.WithCookie(feed.Cookie)
  242. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  243. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  244. requestBuilder.UseProxy(feed.FetchViaProxy)
  245. requestBuilder.IgnoreTLSErrors(feed.AllowSelfSignedCertificates)
  246. requestBuilder.DisableHTTP2(feed.DisableHTTP2)
  247. content, scraperErr := scraper.ScrapeWebsite(
  248. requestBuilder,
  249. rewrittenEntryURL,
  250. feed.ScraperRules,
  251. )
  252. if config.Opts.HasMetricsCollector() {
  253. status := "success"
  254. if scraperErr != nil {
  255. status = "error"
  256. }
  257. metric.ScraperRequestDuration.WithLabelValues(status).Observe(time.Since(startTime).Seconds())
  258. }
  259. if scraperErr != nil {
  260. return scraperErr
  261. }
  262. if content != "" {
  263. entry.Content = minifyEntryContent(content)
  264. if user.ShowReadingTime {
  265. entry.ReadingTime = readingtime.EstimateReadingTime(entry.Content, user.DefaultReadingSpeed, user.CJKReadingSpeed)
  266. }
  267. }
  268. rewrite.Rewriter(rewrittenEntryURL, entry, entry.Feed.RewriteRules)
  269. entry.Content = sanitizer.Sanitize(rewrittenEntryURL, entry.Content)
  270. return nil
  271. }
  272. func rewriteEntryURL(feed *model.Feed, entry *model.Entry) string {
  273. var rewrittenURL = entry.URL
  274. if feed.UrlRewriteRules != "" {
  275. parts := customReplaceRuleRegex.FindStringSubmatch(feed.UrlRewriteRules)
  276. if len(parts) >= 3 {
  277. re, err := regexp.Compile(parts[1])
  278. if err != nil {
  279. slog.Error("Failed on regexp compilation",
  280. slog.String("url_rewrite_rules", feed.UrlRewriteRules),
  281. slog.Any("error", err),
  282. )
  283. return rewrittenURL
  284. }
  285. rewrittenURL = re.ReplaceAllString(entry.URL, parts[2])
  286. slog.Debug("Rewriting entry URL",
  287. slog.String("original_entry_url", entry.URL),
  288. slog.String("rewritten_entry_url", rewrittenURL),
  289. slog.Int64("feed_id", feed.ID),
  290. slog.String("feed_url", feed.FeedURL),
  291. )
  292. } else {
  293. slog.Debug("Cannot find search and replace terms for replace rule",
  294. slog.String("original_entry_url", entry.URL),
  295. slog.String("rewritten_entry_url", rewrittenURL),
  296. slog.Int64("feed_id", feed.ID),
  297. slog.String("feed_url", feed.FeedURL),
  298. slog.String("url_rewrite_rules", feed.UrlRewriteRules),
  299. )
  300. }
  301. }
  302. return rewrittenURL
  303. }
  304. func updateEntryReadingTime(store *storage.Storage, feed *model.Feed, entry *model.Entry, entryIsNew bool, user *model.User) {
  305. if !user.ShowReadingTime {
  306. slog.Debug("Skip reading time estimation for this user", slog.Int64("user_id", user.ID))
  307. return
  308. }
  309. if shouldFetchYouTubeWatchTime(entry) {
  310. if entryIsNew {
  311. watchTime, err := fetchYouTubeWatchTime(entry.URL)
  312. if err != nil {
  313. slog.Warn("Unable to fetch YouTube watch time",
  314. slog.Int64("user_id", user.ID),
  315. slog.Int64("entry_id", entry.ID),
  316. slog.String("entry_url", entry.URL),
  317. slog.Int64("feed_id", feed.ID),
  318. slog.String("feed_url", feed.FeedURL),
  319. slog.Any("error", err),
  320. )
  321. }
  322. entry.ReadingTime = watchTime
  323. } else {
  324. entry.ReadingTime = store.GetReadTime(feed.ID, entry.Hash)
  325. }
  326. }
  327. if shouldFetchNebulaWatchTime(entry) {
  328. if entryIsNew {
  329. watchTime, err := fetchNebulaWatchTime(entry.URL)
  330. if err != nil {
  331. slog.Warn("Unable to fetch Nebula watch time",
  332. slog.Int64("user_id", user.ID),
  333. slog.Int64("entry_id", entry.ID),
  334. slog.String("entry_url", entry.URL),
  335. slog.Int64("feed_id", feed.ID),
  336. slog.String("feed_url", feed.FeedURL),
  337. slog.Any("error", err),
  338. )
  339. }
  340. entry.ReadingTime = watchTime
  341. } else {
  342. entry.ReadingTime = store.GetReadTime(feed.ID, entry.Hash)
  343. }
  344. }
  345. if shouldFetchOdyseeWatchTime(entry) {
  346. if entryIsNew {
  347. watchTime, err := fetchOdyseeWatchTime(entry.URL)
  348. if err != nil {
  349. slog.Warn("Unable to fetch Odysee watch time",
  350. slog.Int64("user_id", user.ID),
  351. slog.Int64("entry_id", entry.ID),
  352. slog.String("entry_url", entry.URL),
  353. slog.Int64("feed_id", feed.ID),
  354. slog.String("feed_url", feed.FeedURL),
  355. slog.Any("error", err),
  356. )
  357. }
  358. entry.ReadingTime = watchTime
  359. } else {
  360. entry.ReadingTime = store.GetReadTime(feed.ID, entry.Hash)
  361. }
  362. }
  363. // Handle YT error case and non-YT entries.
  364. if entry.ReadingTime == 0 {
  365. entry.ReadingTime = readingtime.EstimateReadingTime(entry.Content, user.DefaultReadingSpeed, user.CJKReadingSpeed)
  366. }
  367. }
  368. func shouldFetchYouTubeWatchTime(entry *model.Entry) bool {
  369. if !config.Opts.FetchYouTubeWatchTime() {
  370. return false
  371. }
  372. matches := youtubeRegex.FindStringSubmatch(entry.URL)
  373. urlMatchesYouTubePattern := len(matches) == 2
  374. return urlMatchesYouTubePattern
  375. }
  376. func shouldFetchNebulaWatchTime(entry *model.Entry) bool {
  377. if !config.Opts.FetchNebulaWatchTime() {
  378. return false
  379. }
  380. matches := nebulaRegex.FindStringSubmatch(entry.URL)
  381. return matches != nil
  382. }
  383. func shouldFetchOdyseeWatchTime(entry *model.Entry) bool {
  384. if !config.Opts.FetchOdyseeWatchTime() {
  385. return false
  386. }
  387. matches := odyseeRegex.FindStringSubmatch(entry.URL)
  388. return matches != nil
  389. }
  390. func fetchYouTubeWatchTime(websiteURL string) (int, error) {
  391. requestBuilder := fetcher.NewRequestBuilder()
  392. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  393. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  394. responseHandler := fetcher.NewResponseHandler(requestBuilder.ExecuteRequest(websiteURL))
  395. defer responseHandler.Close()
  396. if localizedError := responseHandler.LocalizedError(); localizedError != nil {
  397. slog.Warn("Unable to fetch YouTube page", slog.String("website_url", websiteURL), slog.Any("error", localizedError.Error()))
  398. return 0, localizedError.Error()
  399. }
  400. doc, docErr := goquery.NewDocumentFromReader(responseHandler.Body(config.Opts.HTTPClientMaxBodySize()))
  401. if docErr != nil {
  402. return 0, docErr
  403. }
  404. durs, exists := doc.Find(`meta[itemprop="duration"]`).First().Attr("content")
  405. if !exists {
  406. return 0, errors.New("duration has not found")
  407. }
  408. dur, err := parseISO8601(durs)
  409. if err != nil {
  410. return 0, fmt.Errorf("unable to parse duration %s: %v", durs, err)
  411. }
  412. return int(dur.Minutes()), nil
  413. }
  414. func fetchNebulaWatchTime(websiteURL string) (int, error) {
  415. requestBuilder := fetcher.NewRequestBuilder()
  416. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  417. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  418. responseHandler := fetcher.NewResponseHandler(requestBuilder.ExecuteRequest(websiteURL))
  419. defer responseHandler.Close()
  420. if localizedError := responseHandler.LocalizedError(); localizedError != nil {
  421. slog.Warn("Unable to fetch Nebula watch time", slog.String("website_url", websiteURL), slog.Any("error", localizedError.Error()))
  422. return 0, localizedError.Error()
  423. }
  424. doc, docErr := goquery.NewDocumentFromReader(responseHandler.Body(config.Opts.HTTPClientMaxBodySize()))
  425. if docErr != nil {
  426. return 0, docErr
  427. }
  428. durs, exists := doc.Find(`meta[property="video:duration"]`).First().Attr("content")
  429. // durs contains video watch time in seconds
  430. if !exists {
  431. return 0, errors.New("duration has not found")
  432. }
  433. dur, err := strconv.ParseInt(durs, 10, 64)
  434. if err != nil {
  435. return 0, fmt.Errorf("unable to parse duration %s: %v", durs, err)
  436. }
  437. return int(dur / 60), nil
  438. }
  439. func fetchOdyseeWatchTime(websiteURL string) (int, error) {
  440. requestBuilder := fetcher.NewRequestBuilder()
  441. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  442. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  443. responseHandler := fetcher.NewResponseHandler(requestBuilder.ExecuteRequest(websiteURL))
  444. defer responseHandler.Close()
  445. if localizedError := responseHandler.LocalizedError(); localizedError != nil {
  446. slog.Warn("Unable to fetch Odysee watch time", slog.String("website_url", websiteURL), slog.Any("error", localizedError.Error()))
  447. return 0, localizedError.Error()
  448. }
  449. doc, docErr := goquery.NewDocumentFromReader(responseHandler.Body(config.Opts.HTTPClientMaxBodySize()))
  450. if docErr != nil {
  451. return 0, docErr
  452. }
  453. durs, exists := doc.Find(`meta[property="og:video:duration"]`).First().Attr("content")
  454. // durs contains video watch time in seconds
  455. if !exists {
  456. return 0, errors.New("duration has not found")
  457. }
  458. dur, err := strconv.ParseInt(durs, 10, 64)
  459. if err != nil {
  460. return 0, fmt.Errorf("unable to parse duration %s: %v", durs, err)
  461. }
  462. return int(dur / 60), nil
  463. }
  464. // parseISO8601 parses an ISO 8601 duration string.
  465. func parseISO8601(from string) (time.Duration, error) {
  466. var match []string
  467. var d time.Duration
  468. if iso8601Regex.MatchString(from) {
  469. match = iso8601Regex.FindStringSubmatch(from)
  470. } else {
  471. return 0, errors.New("could not parse duration string")
  472. }
  473. for i, name := range iso8601Regex.SubexpNames() {
  474. part := match[i]
  475. if i == 0 || name == "" || part == "" {
  476. continue
  477. }
  478. val, err := strconv.ParseInt(part, 10, 64)
  479. if err != nil {
  480. return 0, err
  481. }
  482. switch name {
  483. case "hour":
  484. d += (time.Duration(val) * time.Hour)
  485. case "minute":
  486. d += (time.Duration(val) * time.Minute)
  487. case "second":
  488. d += (time.Duration(val) * time.Second)
  489. default:
  490. return 0, fmt.Errorf("unknown field %s", name)
  491. }
  492. }
  493. return d, nil
  494. }
  495. func isRecentEntry(entry *model.Entry) bool {
  496. if config.Opts.FilterEntryMaxAgeDays() == 0 || entry.Date.After(time.Now().AddDate(0, 0, -config.Opts.FilterEntryMaxAgeDays())) {
  497. return true
  498. }
  499. return false
  500. }
  501. func minifyEntryContent(entryContent string) string {
  502. m := minify.New()
  503. // Options required to avoid breaking the HTML content.
  504. m.Add("text/html", &html.Minifier{
  505. KeepEndTags: true,
  506. KeepQuotes: true,
  507. })
  508. if minifiedHTML, err := m.String("text/html", entryContent); err == nil {
  509. entryContent = minifiedHTML
  510. }
  511. return entryContent
  512. }