processor.go 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672
  1. // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
  2. // SPDX-License-Identifier: Apache-2.0
  3. package processor
  4. import (
  5. "errors"
  6. "fmt"
  7. "log/slog"
  8. "regexp"
  9. "slices"
  10. "strconv"
  11. "strings"
  12. "time"
  13. "miniflux.app/v2/internal/config"
  14. "miniflux.app/v2/internal/metric"
  15. "miniflux.app/v2/internal/model"
  16. "miniflux.app/v2/internal/reader/fetcher"
  17. "miniflux.app/v2/internal/reader/readingtime"
  18. "miniflux.app/v2/internal/reader/rewrite"
  19. "miniflux.app/v2/internal/reader/sanitizer"
  20. "miniflux.app/v2/internal/reader/scraper"
  21. "miniflux.app/v2/internal/reader/urlcleaner"
  22. "miniflux.app/v2/internal/storage"
  23. "github.com/PuerkitoBio/goquery"
  24. "github.com/tdewolff/minify/v2"
  25. "github.com/tdewolff/minify/v2/html"
  26. )
  27. var (
  28. youtubeRegex = regexp.MustCompile(`youtube\.com/watch\?v=(.*)$`)
  29. nebulaRegex = regexp.MustCompile(`^https://nebula\.tv`)
  30. odyseeRegex = regexp.MustCompile(`^https://odysee\.com`)
  31. bilibiliRegex = regexp.MustCompile(`bilibili\.com/video/(.*)$`)
  32. timelengthRegex = regexp.MustCompile(`"timelength":\s*(\d+)`)
  33. iso8601Regex = regexp.MustCompile(`^P((?P<year>\d+)Y)?((?P<month>\d+)M)?((?P<week>\d+)W)?((?P<day>\d+)D)?(T((?P<hour>\d+)H)?((?P<minute>\d+)M)?((?P<second>\d+)S)?)?$`)
  34. customReplaceRuleRegex = regexp.MustCompile(`rewrite\("(.*)"\|"(.*)"\)`)
  35. )
  36. // ProcessFeedEntries downloads original web page for entries and apply filters.
  37. func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, user *model.User, forceRefresh bool) {
  38. var filteredEntries model.Entries
  39. // Process older entries first
  40. for i := len(feed.Entries) - 1; i >= 0; i-- {
  41. entry := feed.Entries[i]
  42. slog.Debug("Processing entry",
  43. slog.Int64("user_id", user.ID),
  44. slog.String("entry_url", entry.URL),
  45. slog.String("entry_hash", entry.Hash),
  46. slog.String("entry_title", entry.Title),
  47. slog.Int64("feed_id", feed.ID),
  48. slog.String("feed_url", feed.FeedURL),
  49. )
  50. if isBlockedEntry(feed, entry, user) || !isAllowedEntry(feed, entry, user) || !isRecentEntry(entry) {
  51. continue
  52. }
  53. if cleanedURL, err := urlcleaner.RemoveTrackingParameters(entry.URL); err == nil {
  54. entry.URL = cleanedURL
  55. }
  56. pageBaseURL := ""
  57. rewrittenURL := rewriteEntryURL(feed, entry)
  58. entryIsNew := store.IsNewEntry(feed.ID, entry.Hash)
  59. if feed.Crawler && (entryIsNew || forceRefresh) {
  60. slog.Debug("Scraping entry",
  61. slog.Int64("user_id", user.ID),
  62. slog.String("entry_url", entry.URL),
  63. slog.String("entry_hash", entry.Hash),
  64. slog.String("entry_title", entry.Title),
  65. slog.Int64("feed_id", feed.ID),
  66. slog.String("feed_url", feed.FeedURL),
  67. slog.Bool("entry_is_new", entryIsNew),
  68. slog.Bool("force_refresh", forceRefresh),
  69. slog.String("rewritten_url", rewrittenURL),
  70. )
  71. startTime := time.Now()
  72. requestBuilder := fetcher.NewRequestBuilder()
  73. requestBuilder.WithUserAgent(feed.UserAgent, config.Opts.HTTPClientUserAgent())
  74. requestBuilder.WithCookie(feed.Cookie)
  75. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  76. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  77. requestBuilder.UseProxy(feed.FetchViaProxy)
  78. requestBuilder.IgnoreTLSErrors(feed.AllowSelfSignedCertificates)
  79. requestBuilder.DisableHTTP2(feed.DisableHTTP2)
  80. scrapedPageBaseURL, extractedContent, scraperErr := scraper.ScrapeWebsite(
  81. requestBuilder,
  82. rewrittenURL,
  83. feed.ScraperRules,
  84. )
  85. if scrapedPageBaseURL != "" {
  86. pageBaseURL = scrapedPageBaseURL
  87. }
  88. if config.Opts.HasMetricsCollector() {
  89. status := "success"
  90. if scraperErr != nil {
  91. status = "error"
  92. }
  93. metric.ScraperRequestDuration.WithLabelValues(status).Observe(time.Since(startTime).Seconds())
  94. }
  95. if scraperErr != nil {
  96. slog.Warn("Unable to scrape entry",
  97. slog.Int64("user_id", user.ID),
  98. slog.String("entry_url", entry.URL),
  99. slog.Int64("feed_id", feed.ID),
  100. slog.String("feed_url", feed.FeedURL),
  101. slog.Any("error", scraperErr),
  102. )
  103. } else if extractedContent != "" {
  104. // We replace the entry content only if the scraper doesn't return any error.
  105. entry.Content = minifyEntryContent(extractedContent)
  106. }
  107. }
  108. rewrite.Rewriter(rewrittenURL, entry, feed.RewriteRules)
  109. if pageBaseURL == "" {
  110. pageBaseURL = rewrittenURL
  111. }
  112. // The sanitizer should always run at the end of the process to make sure unsafe HTML is filtered out.
  113. entry.Content = sanitizer.Sanitize(pageBaseURL, entry.Content)
  114. updateEntryReadingTime(store, feed, entry, entryIsNew, user)
  115. filteredEntries = append(filteredEntries, entry)
  116. }
  117. feed.Entries = filteredEntries
  118. }
  119. func isBlockedEntry(feed *model.Feed, entry *model.Entry, user *model.User) bool {
  120. if user.BlockFilterEntryRules != "" {
  121. rules := strings.Split(user.BlockFilterEntryRules, "\n")
  122. for _, rule := range rules {
  123. parts := strings.SplitN(rule, "=", 2)
  124. var match bool
  125. switch parts[0] {
  126. case "EntryTitle":
  127. match, _ = regexp.MatchString(parts[1], entry.Title)
  128. case "EntryURL":
  129. match, _ = regexp.MatchString(parts[1], entry.URL)
  130. case "EntryCommentsURL":
  131. match, _ = regexp.MatchString(parts[1], entry.CommentsURL)
  132. case "EntryContent":
  133. match, _ = regexp.MatchString(parts[1], entry.Content)
  134. case "EntryAuthor":
  135. match, _ = regexp.MatchString(parts[1], entry.Author)
  136. case "EntryTag":
  137. containsTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
  138. match, _ = regexp.MatchString(parts[1], tag)
  139. return match
  140. })
  141. if containsTag {
  142. match = true
  143. }
  144. }
  145. if match {
  146. slog.Debug("Blocking entry based on rule",
  147. slog.String("entry_url", entry.URL),
  148. slog.Int64("feed_id", feed.ID),
  149. slog.String("feed_url", feed.FeedURL),
  150. slog.String("rule", rule),
  151. )
  152. return true
  153. }
  154. }
  155. }
  156. if feed.BlocklistRules == "" {
  157. return false
  158. }
  159. compiledBlocklist, err := regexp.Compile(feed.BlocklistRules)
  160. if err != nil {
  161. slog.Debug("Failed on regexp compilation",
  162. slog.String("pattern", feed.BlocklistRules),
  163. slog.Any("error", err),
  164. )
  165. return false
  166. }
  167. containsBlockedTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
  168. return compiledBlocklist.MatchString(tag)
  169. })
  170. if compiledBlocklist.MatchString(entry.URL) || compiledBlocklist.MatchString(entry.Title) || compiledBlocklist.MatchString(entry.Author) || containsBlockedTag {
  171. slog.Debug("Blocking entry based on rule",
  172. slog.String("entry_url", entry.URL),
  173. slog.Int64("feed_id", feed.ID),
  174. slog.String("feed_url", feed.FeedURL),
  175. slog.String("rule", feed.BlocklistRules),
  176. )
  177. return true
  178. }
  179. return false
  180. }
  181. func isAllowedEntry(feed *model.Feed, entry *model.Entry, user *model.User) bool {
  182. if user.KeepFilterEntryRules != "" {
  183. rules := strings.Split(user.KeepFilterEntryRules, "\n")
  184. for _, rule := range rules {
  185. parts := strings.SplitN(rule, "=", 2)
  186. var match bool
  187. switch parts[0] {
  188. case "EntryTitle":
  189. match, _ = regexp.MatchString(parts[1], entry.Title)
  190. case "EntryURL":
  191. match, _ = regexp.MatchString(parts[1], entry.URL)
  192. case "EntryCommentsURL":
  193. match, _ = regexp.MatchString(parts[1], entry.CommentsURL)
  194. case "EntryContent":
  195. match, _ = regexp.MatchString(parts[1], entry.Content)
  196. case "EntryAuthor":
  197. match, _ = regexp.MatchString(parts[1], entry.Author)
  198. case "EntryTag":
  199. containsTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
  200. match, _ = regexp.MatchString(parts[1], tag)
  201. return match
  202. })
  203. if containsTag {
  204. match = true
  205. }
  206. }
  207. if match {
  208. slog.Debug("Allowing entry based on rule",
  209. slog.String("entry_url", entry.URL),
  210. slog.Int64("feed_id", feed.ID),
  211. slog.String("feed_url", feed.FeedURL),
  212. slog.String("rule", rule),
  213. )
  214. return true
  215. }
  216. }
  217. return false
  218. }
  219. if feed.KeeplistRules == "" {
  220. return true
  221. }
  222. compiledKeeplist, err := regexp.Compile(feed.KeeplistRules)
  223. if err != nil {
  224. slog.Debug("Failed on regexp compilation",
  225. slog.String("pattern", feed.KeeplistRules),
  226. slog.Any("error", err),
  227. )
  228. return false
  229. }
  230. containsAllowedTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
  231. return compiledKeeplist.MatchString(tag)
  232. })
  233. if compiledKeeplist.MatchString(entry.URL) || compiledKeeplist.MatchString(entry.Title) || compiledKeeplist.MatchString(entry.Author) || containsAllowedTag {
  234. slog.Debug("Allow entry based on rule",
  235. slog.String("entry_url", entry.URL),
  236. slog.Int64("feed_id", feed.ID),
  237. slog.String("feed_url", feed.FeedURL),
  238. slog.String("rule", feed.KeeplistRules),
  239. )
  240. return true
  241. }
  242. return false
  243. }
  244. // ProcessEntryWebPage downloads the entry web page and apply rewrite rules.
  245. func ProcessEntryWebPage(feed *model.Feed, entry *model.Entry, user *model.User) error {
  246. startTime := time.Now()
  247. rewrittenEntryURL := rewriteEntryURL(feed, entry)
  248. requestBuilder := fetcher.NewRequestBuilder()
  249. requestBuilder.WithUserAgent(feed.UserAgent, config.Opts.HTTPClientUserAgent())
  250. requestBuilder.WithCookie(feed.Cookie)
  251. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  252. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  253. requestBuilder.UseProxy(feed.FetchViaProxy)
  254. requestBuilder.IgnoreTLSErrors(feed.AllowSelfSignedCertificates)
  255. requestBuilder.DisableHTTP2(feed.DisableHTTP2)
  256. pageBaseURL, extractedContent, scraperErr := scraper.ScrapeWebsite(
  257. requestBuilder,
  258. rewrittenEntryURL,
  259. feed.ScraperRules,
  260. )
  261. if config.Opts.HasMetricsCollector() {
  262. status := "success"
  263. if scraperErr != nil {
  264. status = "error"
  265. }
  266. metric.ScraperRequestDuration.WithLabelValues(status).Observe(time.Since(startTime).Seconds())
  267. }
  268. if scraperErr != nil {
  269. return scraperErr
  270. }
  271. if extractedContent != "" {
  272. entry.Content = minifyEntryContent(extractedContent)
  273. if user.ShowReadingTime {
  274. entry.ReadingTime = readingtime.EstimateReadingTime(entry.Content, user.DefaultReadingSpeed, user.CJKReadingSpeed)
  275. }
  276. }
  277. rewrite.Rewriter(rewrittenEntryURL, entry, entry.Feed.RewriteRules)
  278. entry.Content = sanitizer.Sanitize(pageBaseURL, entry.Content)
  279. return nil
  280. }
  281. func rewriteEntryURL(feed *model.Feed, entry *model.Entry) string {
  282. var rewrittenURL = entry.URL
  283. if feed.UrlRewriteRules != "" {
  284. parts := customReplaceRuleRegex.FindStringSubmatch(feed.UrlRewriteRules)
  285. if len(parts) >= 3 {
  286. re, err := regexp.Compile(parts[1])
  287. if err != nil {
  288. slog.Error("Failed on regexp compilation",
  289. slog.String("url_rewrite_rules", feed.UrlRewriteRules),
  290. slog.Any("error", err),
  291. )
  292. return rewrittenURL
  293. }
  294. rewrittenURL = re.ReplaceAllString(entry.URL, parts[2])
  295. slog.Debug("Rewriting entry URL",
  296. slog.String("original_entry_url", entry.URL),
  297. slog.String("rewritten_entry_url", rewrittenURL),
  298. slog.Int64("feed_id", feed.ID),
  299. slog.String("feed_url", feed.FeedURL),
  300. )
  301. } else {
  302. slog.Debug("Cannot find search and replace terms for replace rule",
  303. slog.String("original_entry_url", entry.URL),
  304. slog.String("rewritten_entry_url", rewrittenURL),
  305. slog.Int64("feed_id", feed.ID),
  306. slog.String("feed_url", feed.FeedURL),
  307. slog.String("url_rewrite_rules", feed.UrlRewriteRules),
  308. )
  309. }
  310. }
  311. return rewrittenURL
  312. }
  313. func updateEntryReadingTime(store *storage.Storage, feed *model.Feed, entry *model.Entry, entryIsNew bool, user *model.User) {
  314. if !user.ShowReadingTime {
  315. slog.Debug("Skip reading time estimation for this user", slog.Int64("user_id", user.ID))
  316. return
  317. }
  318. if shouldFetchYouTubeWatchTime(entry) {
  319. if entryIsNew {
  320. watchTime, err := fetchYouTubeWatchTime(entry.URL)
  321. if err != nil {
  322. slog.Warn("Unable to fetch YouTube watch time",
  323. slog.Int64("user_id", user.ID),
  324. slog.Int64("entry_id", entry.ID),
  325. slog.String("entry_url", entry.URL),
  326. slog.Int64("feed_id", feed.ID),
  327. slog.String("feed_url", feed.FeedURL),
  328. slog.Any("error", err),
  329. )
  330. }
  331. entry.ReadingTime = watchTime
  332. } else {
  333. entry.ReadingTime = store.GetReadTime(feed.ID, entry.Hash)
  334. }
  335. }
  336. if shouldFetchNebulaWatchTime(entry) {
  337. if entryIsNew {
  338. watchTime, err := fetchNebulaWatchTime(entry.URL)
  339. if err != nil {
  340. slog.Warn("Unable to fetch Nebula watch time",
  341. slog.Int64("user_id", user.ID),
  342. slog.Int64("entry_id", entry.ID),
  343. slog.String("entry_url", entry.URL),
  344. slog.Int64("feed_id", feed.ID),
  345. slog.String("feed_url", feed.FeedURL),
  346. slog.Any("error", err),
  347. )
  348. }
  349. entry.ReadingTime = watchTime
  350. } else {
  351. entry.ReadingTime = store.GetReadTime(feed.ID, entry.Hash)
  352. }
  353. }
  354. if shouldFetchOdyseeWatchTime(entry) {
  355. if entryIsNew {
  356. watchTime, err := fetchOdyseeWatchTime(entry.URL)
  357. if err != nil {
  358. slog.Warn("Unable to fetch Odysee watch time",
  359. slog.Int64("user_id", user.ID),
  360. slog.Int64("entry_id", entry.ID),
  361. slog.String("entry_url", entry.URL),
  362. slog.Int64("feed_id", feed.ID),
  363. slog.String("feed_url", feed.FeedURL),
  364. slog.Any("error", err),
  365. )
  366. }
  367. entry.ReadingTime = watchTime
  368. } else {
  369. entry.ReadingTime = store.GetReadTime(feed.ID, entry.Hash)
  370. }
  371. }
  372. if shouldFetchBilibiliWatchTime(entry) {
  373. if entryIsNew {
  374. watchTime, err := fetchBilibiliWatchTime(entry.URL)
  375. if err != nil {
  376. slog.Warn("Unable to fetch Bilibili watch time",
  377. slog.Int64("user_id", user.ID),
  378. slog.Int64("entry_id", entry.ID),
  379. slog.String("entry_url", entry.URL),
  380. slog.Int64("feed_id", feed.ID),
  381. slog.String("feed_url", feed.FeedURL),
  382. slog.Any("error", err),
  383. )
  384. }
  385. entry.ReadingTime = watchTime
  386. } else {
  387. entry.ReadingTime = store.GetReadTime(feed.ID, entry.Hash)
  388. }
  389. }
  390. // Handle YT error case and non-YT entries.
  391. if entry.ReadingTime == 0 {
  392. entry.ReadingTime = readingtime.EstimateReadingTime(entry.Content, user.DefaultReadingSpeed, user.CJKReadingSpeed)
  393. }
  394. }
  395. func shouldFetchYouTubeWatchTime(entry *model.Entry) bool {
  396. if !config.Opts.FetchYouTubeWatchTime() {
  397. return false
  398. }
  399. matches := youtubeRegex.FindStringSubmatch(entry.URL)
  400. urlMatchesYouTubePattern := len(matches) == 2
  401. return urlMatchesYouTubePattern
  402. }
  403. func shouldFetchNebulaWatchTime(entry *model.Entry) bool {
  404. if !config.Opts.FetchNebulaWatchTime() {
  405. return false
  406. }
  407. matches := nebulaRegex.FindStringSubmatch(entry.URL)
  408. return matches != nil
  409. }
  410. func shouldFetchOdyseeWatchTime(entry *model.Entry) bool {
  411. if !config.Opts.FetchOdyseeWatchTime() {
  412. return false
  413. }
  414. matches := odyseeRegex.FindStringSubmatch(entry.URL)
  415. return matches != nil
  416. }
  417. func shouldFetchBilibiliWatchTime(entry *model.Entry) bool {
  418. if !config.Opts.FetchBilibiliWatchTime() {
  419. return false
  420. }
  421. matches := bilibiliRegex.FindStringSubmatch(entry.URL)
  422. urlMatchesBilibiliPattern := len(matches) == 2
  423. return urlMatchesBilibiliPattern
  424. }
  425. func fetchYouTubeWatchTime(websiteURL string) (int, error) {
  426. requestBuilder := fetcher.NewRequestBuilder()
  427. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  428. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  429. responseHandler := fetcher.NewResponseHandler(requestBuilder.ExecuteRequest(websiteURL))
  430. defer responseHandler.Close()
  431. if localizedError := responseHandler.LocalizedError(); localizedError != nil {
  432. slog.Warn("Unable to fetch YouTube page", slog.String("website_url", websiteURL), slog.Any("error", localizedError.Error()))
  433. return 0, localizedError.Error()
  434. }
  435. doc, docErr := goquery.NewDocumentFromReader(responseHandler.Body(config.Opts.HTTPClientMaxBodySize()))
  436. if docErr != nil {
  437. return 0, docErr
  438. }
  439. durs, exists := doc.Find(`meta[itemprop="duration"]`).First().Attr("content")
  440. if !exists {
  441. return 0, errors.New("duration has not found")
  442. }
  443. dur, err := parseISO8601(durs)
  444. if err != nil {
  445. return 0, fmt.Errorf("unable to parse duration %s: %v", durs, err)
  446. }
  447. return int(dur.Minutes()), nil
  448. }
  449. func fetchNebulaWatchTime(websiteURL string) (int, error) {
  450. requestBuilder := fetcher.NewRequestBuilder()
  451. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  452. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  453. responseHandler := fetcher.NewResponseHandler(requestBuilder.ExecuteRequest(websiteURL))
  454. defer responseHandler.Close()
  455. if localizedError := responseHandler.LocalizedError(); localizedError != nil {
  456. slog.Warn("Unable to fetch Nebula watch time", slog.String("website_url", websiteURL), slog.Any("error", localizedError.Error()))
  457. return 0, localizedError.Error()
  458. }
  459. doc, docErr := goquery.NewDocumentFromReader(responseHandler.Body(config.Opts.HTTPClientMaxBodySize()))
  460. if docErr != nil {
  461. return 0, docErr
  462. }
  463. durs, exists := doc.Find(`meta[property="video:duration"]`).First().Attr("content")
  464. // durs contains video watch time in seconds
  465. if !exists {
  466. return 0, errors.New("duration has not found")
  467. }
  468. dur, err := strconv.ParseInt(durs, 10, 64)
  469. if err != nil {
  470. return 0, fmt.Errorf("unable to parse duration %s: %v", durs, err)
  471. }
  472. return int(dur / 60), nil
  473. }
  474. func fetchOdyseeWatchTime(websiteURL string) (int, error) {
  475. requestBuilder := fetcher.NewRequestBuilder()
  476. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  477. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  478. responseHandler := fetcher.NewResponseHandler(requestBuilder.ExecuteRequest(websiteURL))
  479. defer responseHandler.Close()
  480. if localizedError := responseHandler.LocalizedError(); localizedError != nil {
  481. slog.Warn("Unable to fetch Odysee watch time", slog.String("website_url", websiteURL), slog.Any("error", localizedError.Error()))
  482. return 0, localizedError.Error()
  483. }
  484. doc, docErr := goquery.NewDocumentFromReader(responseHandler.Body(config.Opts.HTTPClientMaxBodySize()))
  485. if docErr != nil {
  486. return 0, docErr
  487. }
  488. durs, exists := doc.Find(`meta[property="og:video:duration"]`).First().Attr("content")
  489. // durs contains video watch time in seconds
  490. if !exists {
  491. return 0, errors.New("duration has not found")
  492. }
  493. dur, err := strconv.ParseInt(durs, 10, 64)
  494. if err != nil {
  495. return 0, fmt.Errorf("unable to parse duration %s: %v", durs, err)
  496. }
  497. return int(dur / 60), nil
  498. }
  499. func fetchBilibiliWatchTime(websiteURL string) (int, error) {
  500. requestBuilder := fetcher.NewRequestBuilder()
  501. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  502. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  503. responseHandler := fetcher.NewResponseHandler(requestBuilder.ExecuteRequest(websiteURL))
  504. defer responseHandler.Close()
  505. if localizedError := responseHandler.LocalizedError(); localizedError != nil {
  506. slog.Warn("Unable to fetch Bilibili page", slog.String("website_url", websiteURL), slog.Any("error", localizedError.Error()))
  507. return 0, localizedError.Error()
  508. }
  509. doc, docErr := goquery.NewDocumentFromReader(responseHandler.Body(config.Opts.HTTPClientMaxBodySize()))
  510. if docErr != nil {
  511. return 0, docErr
  512. }
  513. timelengthMatches := timelengthRegex.FindStringSubmatch(doc.Text())
  514. if len(timelengthMatches) < 2 {
  515. return 0, errors.New("duration has not found")
  516. }
  517. durationMs, err := strconv.ParseInt(timelengthMatches[1], 10, 64)
  518. if err != nil {
  519. return 0, fmt.Errorf("unable to parse duration %s: %v", timelengthMatches[1], err)
  520. }
  521. durationSec := durationMs / 1000
  522. durationMin := durationSec / 60
  523. if durationSec%60 != 0 {
  524. durationMin++
  525. }
  526. return int(durationMin), nil
  527. }
  528. // parseISO8601 parses an ISO 8601 duration string.
  529. func parseISO8601(from string) (time.Duration, error) {
  530. var match []string
  531. var d time.Duration
  532. if iso8601Regex.MatchString(from) {
  533. match = iso8601Regex.FindStringSubmatch(from)
  534. } else {
  535. return 0, errors.New("could not parse duration string")
  536. }
  537. for i, name := range iso8601Regex.SubexpNames() {
  538. part := match[i]
  539. if i == 0 || name == "" || part == "" {
  540. continue
  541. }
  542. val, err := strconv.ParseInt(part, 10, 64)
  543. if err != nil {
  544. return 0, err
  545. }
  546. switch name {
  547. case "hour":
  548. d += (time.Duration(val) * time.Hour)
  549. case "minute":
  550. d += (time.Duration(val) * time.Minute)
  551. case "second":
  552. d += (time.Duration(val) * time.Second)
  553. default:
  554. return 0, fmt.Errorf("unknown field %s", name)
  555. }
  556. }
  557. return d, nil
  558. }
  559. func isRecentEntry(entry *model.Entry) bool {
  560. if config.Opts.FilterEntryMaxAgeDays() == 0 || entry.Date.After(time.Now().AddDate(0, 0, -config.Opts.FilterEntryMaxAgeDays())) {
  561. return true
  562. }
  563. return false
  564. }
  565. func minifyEntryContent(entryContent string) string {
  566. m := minify.New()
  567. // Options required to avoid breaking the HTML content.
  568. m.Add("text/html", &html.Minifier{
  569. KeepEndTags: true,
  570. KeepQuotes: true,
  571. })
  572. if minifiedHTML, err := m.String("text/html", entryContent); err == nil {
  573. entryContent = minifiedHTML
  574. }
  575. return entryContent
  576. }