processor.go 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698
  1. // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
  2. // SPDX-License-Identifier: Apache-2.0
  3. package processor
  4. import (
  5. "encoding/json"
  6. "errors"
  7. "fmt"
  8. "log/slog"
  9. "regexp"
  10. "slices"
  11. "strconv"
  12. "strings"
  13. "time"
  14. "miniflux.app/v2/internal/config"
  15. "miniflux.app/v2/internal/metric"
  16. "miniflux.app/v2/internal/model"
  17. "miniflux.app/v2/internal/reader/fetcher"
  18. "miniflux.app/v2/internal/reader/readingtime"
  19. "miniflux.app/v2/internal/reader/rewrite"
  20. "miniflux.app/v2/internal/reader/sanitizer"
  21. "miniflux.app/v2/internal/reader/scraper"
  22. "miniflux.app/v2/internal/reader/urlcleaner"
  23. "miniflux.app/v2/internal/storage"
  24. "github.com/PuerkitoBio/goquery"
  25. "github.com/tdewolff/minify/v2"
  26. "github.com/tdewolff/minify/v2/html"
  27. )
  28. var (
  29. youtubeRegex = regexp.MustCompile(`youtube\.com/watch\?v=(.*)$`)
  30. nebulaRegex = regexp.MustCompile(`^https://nebula\.tv`)
  31. odyseeRegex = regexp.MustCompile(`^https://odysee\.com`)
  32. bilibiliURLRegex = regexp.MustCompile(`bilibili\.com/video/(.*)$`)
  33. bilibiliVideoIdRegex = regexp.MustCompile(`/video/(?:av(\d+)|BV([a-zA-Z0-9]+))`)
  34. iso8601Regex = regexp.MustCompile(`^P((?P<year>\d+)Y)?((?P<month>\d+)M)?((?P<week>\d+)W)?((?P<day>\d+)D)?(T((?P<hour>\d+)H)?((?P<minute>\d+)M)?((?P<second>\d+)S)?)?$`)
  35. customReplaceRuleRegex = regexp.MustCompile(`rewrite\("(.*)"\|"(.*)"\)`)
  36. )
  37. // ProcessFeedEntries downloads original web page for entries and apply filters.
  38. func ProcessFeedEntries(store *storage.Storage, feed *model.Feed, user *model.User, forceRefresh bool) {
  39. var filteredEntries model.Entries
  40. // Process older entries first
  41. for i := len(feed.Entries) - 1; i >= 0; i-- {
  42. entry := feed.Entries[i]
  43. slog.Debug("Processing entry",
  44. slog.Int64("user_id", user.ID),
  45. slog.String("entry_url", entry.URL),
  46. slog.String("entry_hash", entry.Hash),
  47. slog.String("entry_title", entry.Title),
  48. slog.Int64("feed_id", feed.ID),
  49. slog.String("feed_url", feed.FeedURL),
  50. )
  51. if isBlockedEntry(feed, entry, user) || !isAllowedEntry(feed, entry, user) || !isRecentEntry(entry) {
  52. continue
  53. }
  54. if cleanedURL, err := urlcleaner.RemoveTrackingParameters(entry.URL); err == nil {
  55. entry.URL = cleanedURL
  56. }
  57. pageBaseURL := ""
  58. rewrittenURL := rewriteEntryURL(feed, entry)
  59. entryIsNew := store.IsNewEntry(feed.ID, entry.Hash)
  60. if feed.Crawler && (entryIsNew || forceRefresh) {
  61. slog.Debug("Scraping entry",
  62. slog.Int64("user_id", user.ID),
  63. slog.String("entry_url", entry.URL),
  64. slog.String("entry_hash", entry.Hash),
  65. slog.String("entry_title", entry.Title),
  66. slog.Int64("feed_id", feed.ID),
  67. slog.String("feed_url", feed.FeedURL),
  68. slog.Bool("entry_is_new", entryIsNew),
  69. slog.Bool("force_refresh", forceRefresh),
  70. slog.String("rewritten_url", rewrittenURL),
  71. )
  72. startTime := time.Now()
  73. requestBuilder := fetcher.NewRequestBuilder()
  74. requestBuilder.WithUserAgent(feed.UserAgent, config.Opts.HTTPClientUserAgent())
  75. requestBuilder.WithCookie(feed.Cookie)
  76. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  77. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  78. requestBuilder.UseProxy(feed.FetchViaProxy)
  79. requestBuilder.IgnoreTLSErrors(feed.AllowSelfSignedCertificates)
  80. requestBuilder.DisableHTTP2(feed.DisableHTTP2)
  81. scrapedPageBaseURL, extractedContent, scraperErr := scraper.ScrapeWebsite(
  82. requestBuilder,
  83. rewrittenURL,
  84. feed.ScraperRules,
  85. )
  86. if scrapedPageBaseURL != "" {
  87. pageBaseURL = scrapedPageBaseURL
  88. }
  89. if config.Opts.HasMetricsCollector() {
  90. status := "success"
  91. if scraperErr != nil {
  92. status = "error"
  93. }
  94. metric.ScraperRequestDuration.WithLabelValues(status).Observe(time.Since(startTime).Seconds())
  95. }
  96. if scraperErr != nil {
  97. slog.Warn("Unable to scrape entry",
  98. slog.Int64("user_id", user.ID),
  99. slog.String("entry_url", entry.URL),
  100. slog.Int64("feed_id", feed.ID),
  101. slog.String("feed_url", feed.FeedURL),
  102. slog.Any("error", scraperErr),
  103. )
  104. } else if extractedContent != "" {
  105. // We replace the entry content only if the scraper doesn't return any error.
  106. entry.Content = minifyEntryContent(extractedContent)
  107. }
  108. }
  109. rewrite.Rewriter(rewrittenURL, entry, feed.RewriteRules)
  110. if pageBaseURL == "" {
  111. pageBaseURL = rewrittenURL
  112. }
  113. // The sanitizer should always run at the end of the process to make sure unsafe HTML is filtered out.
  114. entry.Content = sanitizer.Sanitize(pageBaseURL, entry.Content)
  115. updateEntryReadingTime(store, feed, entry, entryIsNew, user)
  116. filteredEntries = append(filteredEntries, entry)
  117. }
  118. feed.Entries = filteredEntries
  119. }
  120. func isBlockedEntry(feed *model.Feed, entry *model.Entry, user *model.User) bool {
  121. if user.BlockFilterEntryRules != "" {
  122. rules := strings.Split(user.BlockFilterEntryRules, "\n")
  123. for _, rule := range rules {
  124. parts := strings.SplitN(rule, "=", 2)
  125. var match bool
  126. switch parts[0] {
  127. case "EntryTitle":
  128. match, _ = regexp.MatchString(parts[1], entry.Title)
  129. case "EntryURL":
  130. match, _ = regexp.MatchString(parts[1], entry.URL)
  131. case "EntryCommentsURL":
  132. match, _ = regexp.MatchString(parts[1], entry.CommentsURL)
  133. case "EntryContent":
  134. match, _ = regexp.MatchString(parts[1], entry.Content)
  135. case "EntryAuthor":
  136. match, _ = regexp.MatchString(parts[1], entry.Author)
  137. case "EntryTag":
  138. containsTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
  139. match, _ = regexp.MatchString(parts[1], tag)
  140. return match
  141. })
  142. if containsTag {
  143. match = true
  144. }
  145. }
  146. if match {
  147. slog.Debug("Blocking entry based on rule",
  148. slog.String("entry_url", entry.URL),
  149. slog.Int64("feed_id", feed.ID),
  150. slog.String("feed_url", feed.FeedURL),
  151. slog.String("rule", rule),
  152. )
  153. return true
  154. }
  155. }
  156. }
  157. if feed.BlocklistRules == "" {
  158. return false
  159. }
  160. compiledBlocklist, err := regexp.Compile(feed.BlocklistRules)
  161. if err != nil {
  162. slog.Debug("Failed on regexp compilation",
  163. slog.String("pattern", feed.BlocklistRules),
  164. slog.Any("error", err),
  165. )
  166. return false
  167. }
  168. containsBlockedTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
  169. return compiledBlocklist.MatchString(tag)
  170. })
  171. if compiledBlocklist.MatchString(entry.URL) || compiledBlocklist.MatchString(entry.Title) || compiledBlocklist.MatchString(entry.Author) || containsBlockedTag {
  172. slog.Debug("Blocking entry based on rule",
  173. slog.String("entry_url", entry.URL),
  174. slog.Int64("feed_id", feed.ID),
  175. slog.String("feed_url", feed.FeedURL),
  176. slog.String("rule", feed.BlocklistRules),
  177. )
  178. return true
  179. }
  180. return false
  181. }
  182. func isAllowedEntry(feed *model.Feed, entry *model.Entry, user *model.User) bool {
  183. if user.KeepFilterEntryRules != "" {
  184. rules := strings.Split(user.KeepFilterEntryRules, "\n")
  185. for _, rule := range rules {
  186. parts := strings.SplitN(rule, "=", 2)
  187. var match bool
  188. switch parts[0] {
  189. case "EntryTitle":
  190. match, _ = regexp.MatchString(parts[1], entry.Title)
  191. case "EntryURL":
  192. match, _ = regexp.MatchString(parts[1], entry.URL)
  193. case "EntryCommentsURL":
  194. match, _ = regexp.MatchString(parts[1], entry.CommentsURL)
  195. case "EntryContent":
  196. match, _ = regexp.MatchString(parts[1], entry.Content)
  197. case "EntryAuthor":
  198. match, _ = regexp.MatchString(parts[1], entry.Author)
  199. case "EntryTag":
  200. containsTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
  201. match, _ = regexp.MatchString(parts[1], tag)
  202. return match
  203. })
  204. if containsTag {
  205. match = true
  206. }
  207. }
  208. if match {
  209. slog.Debug("Allowing entry based on rule",
  210. slog.String("entry_url", entry.URL),
  211. slog.Int64("feed_id", feed.ID),
  212. slog.String("feed_url", feed.FeedURL),
  213. slog.String("rule", rule),
  214. )
  215. return true
  216. }
  217. }
  218. return false
  219. }
  220. if feed.KeeplistRules == "" {
  221. return true
  222. }
  223. compiledKeeplist, err := regexp.Compile(feed.KeeplistRules)
  224. if err != nil {
  225. slog.Debug("Failed on regexp compilation",
  226. slog.String("pattern", feed.KeeplistRules),
  227. slog.Any("error", err),
  228. )
  229. return false
  230. }
  231. containsAllowedTag := slices.ContainsFunc(entry.Tags, func(tag string) bool {
  232. return compiledKeeplist.MatchString(tag)
  233. })
  234. if compiledKeeplist.MatchString(entry.URL) || compiledKeeplist.MatchString(entry.Title) || compiledKeeplist.MatchString(entry.Author) || containsAllowedTag {
  235. slog.Debug("Allow entry based on rule",
  236. slog.String("entry_url", entry.URL),
  237. slog.Int64("feed_id", feed.ID),
  238. slog.String("feed_url", feed.FeedURL),
  239. slog.String("rule", feed.KeeplistRules),
  240. )
  241. return true
  242. }
  243. return false
  244. }
  245. // ProcessEntryWebPage downloads the entry web page and apply rewrite rules.
  246. func ProcessEntryWebPage(feed *model.Feed, entry *model.Entry, user *model.User) error {
  247. startTime := time.Now()
  248. rewrittenEntryURL := rewriteEntryURL(feed, entry)
  249. requestBuilder := fetcher.NewRequestBuilder()
  250. requestBuilder.WithUserAgent(feed.UserAgent, config.Opts.HTTPClientUserAgent())
  251. requestBuilder.WithCookie(feed.Cookie)
  252. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  253. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  254. requestBuilder.UseProxy(feed.FetchViaProxy)
  255. requestBuilder.IgnoreTLSErrors(feed.AllowSelfSignedCertificates)
  256. requestBuilder.DisableHTTP2(feed.DisableHTTP2)
  257. pageBaseURL, extractedContent, scraperErr := scraper.ScrapeWebsite(
  258. requestBuilder,
  259. rewrittenEntryURL,
  260. feed.ScraperRules,
  261. )
  262. if config.Opts.HasMetricsCollector() {
  263. status := "success"
  264. if scraperErr != nil {
  265. status = "error"
  266. }
  267. metric.ScraperRequestDuration.WithLabelValues(status).Observe(time.Since(startTime).Seconds())
  268. }
  269. if scraperErr != nil {
  270. return scraperErr
  271. }
  272. if extractedContent != "" {
  273. entry.Content = minifyEntryContent(extractedContent)
  274. if user.ShowReadingTime {
  275. entry.ReadingTime = readingtime.EstimateReadingTime(entry.Content, user.DefaultReadingSpeed, user.CJKReadingSpeed)
  276. }
  277. }
  278. rewrite.Rewriter(rewrittenEntryURL, entry, entry.Feed.RewriteRules)
  279. entry.Content = sanitizer.Sanitize(pageBaseURL, entry.Content)
  280. return nil
  281. }
  282. func rewriteEntryURL(feed *model.Feed, entry *model.Entry) string {
  283. var rewrittenURL = entry.URL
  284. if feed.UrlRewriteRules != "" {
  285. parts := customReplaceRuleRegex.FindStringSubmatch(feed.UrlRewriteRules)
  286. if len(parts) >= 3 {
  287. re, err := regexp.Compile(parts[1])
  288. if err != nil {
  289. slog.Error("Failed on regexp compilation",
  290. slog.String("url_rewrite_rules", feed.UrlRewriteRules),
  291. slog.Any("error", err),
  292. )
  293. return rewrittenURL
  294. }
  295. rewrittenURL = re.ReplaceAllString(entry.URL, parts[2])
  296. slog.Debug("Rewriting entry URL",
  297. slog.String("original_entry_url", entry.URL),
  298. slog.String("rewritten_entry_url", rewrittenURL),
  299. slog.Int64("feed_id", feed.ID),
  300. slog.String("feed_url", feed.FeedURL),
  301. )
  302. } else {
  303. slog.Debug("Cannot find search and replace terms for replace rule",
  304. slog.String("original_entry_url", entry.URL),
  305. slog.String("rewritten_entry_url", rewrittenURL),
  306. slog.Int64("feed_id", feed.ID),
  307. slog.String("feed_url", feed.FeedURL),
  308. slog.String("url_rewrite_rules", feed.UrlRewriteRules),
  309. )
  310. }
  311. }
  312. return rewrittenURL
  313. }
  314. func updateEntryReadingTime(store *storage.Storage, feed *model.Feed, entry *model.Entry, entryIsNew bool, user *model.User) {
  315. if !user.ShowReadingTime {
  316. slog.Debug("Skip reading time estimation for this user", slog.Int64("user_id", user.ID))
  317. return
  318. }
  319. if shouldFetchYouTubeWatchTime(entry) {
  320. if entryIsNew {
  321. watchTime, err := fetchYouTubeWatchTime(entry.URL)
  322. if err != nil {
  323. slog.Warn("Unable to fetch YouTube watch time",
  324. slog.Int64("user_id", user.ID),
  325. slog.Int64("entry_id", entry.ID),
  326. slog.String("entry_url", entry.URL),
  327. slog.Int64("feed_id", feed.ID),
  328. slog.String("feed_url", feed.FeedURL),
  329. slog.Any("error", err),
  330. )
  331. }
  332. entry.ReadingTime = watchTime
  333. } else {
  334. entry.ReadingTime = store.GetReadTime(feed.ID, entry.Hash)
  335. }
  336. }
  337. if shouldFetchNebulaWatchTime(entry) {
  338. if entryIsNew {
  339. watchTime, err := fetchNebulaWatchTime(entry.URL)
  340. if err != nil {
  341. slog.Warn("Unable to fetch Nebula watch time",
  342. slog.Int64("user_id", user.ID),
  343. slog.Int64("entry_id", entry.ID),
  344. slog.String("entry_url", entry.URL),
  345. slog.Int64("feed_id", feed.ID),
  346. slog.String("feed_url", feed.FeedURL),
  347. slog.Any("error", err),
  348. )
  349. }
  350. entry.ReadingTime = watchTime
  351. } else {
  352. entry.ReadingTime = store.GetReadTime(feed.ID, entry.Hash)
  353. }
  354. }
  355. if shouldFetchOdyseeWatchTime(entry) {
  356. if entryIsNew {
  357. watchTime, err := fetchOdyseeWatchTime(entry.URL)
  358. if err != nil {
  359. slog.Warn("Unable to fetch Odysee watch time",
  360. slog.Int64("user_id", user.ID),
  361. slog.Int64("entry_id", entry.ID),
  362. slog.String("entry_url", entry.URL),
  363. slog.Int64("feed_id", feed.ID),
  364. slog.String("feed_url", feed.FeedURL),
  365. slog.Any("error", err),
  366. )
  367. }
  368. entry.ReadingTime = watchTime
  369. } else {
  370. entry.ReadingTime = store.GetReadTime(feed.ID, entry.Hash)
  371. }
  372. }
  373. if shouldFetchBilibiliWatchTime(entry) {
  374. if entryIsNew {
  375. watchTime, err := fetchBilibiliWatchTime(entry.URL)
  376. if err != nil {
  377. slog.Warn("Unable to fetch Bilibili watch time",
  378. slog.Int64("user_id", user.ID),
  379. slog.Int64("entry_id", entry.ID),
  380. slog.String("entry_url", entry.URL),
  381. slog.Int64("feed_id", feed.ID),
  382. slog.String("feed_url", feed.FeedURL),
  383. slog.Any("error", err),
  384. )
  385. }
  386. entry.ReadingTime = watchTime
  387. } else {
  388. entry.ReadingTime = store.GetReadTime(feed.ID, entry.Hash)
  389. }
  390. }
  391. // Handle YT error case and non-YT entries.
  392. if entry.ReadingTime == 0 {
  393. entry.ReadingTime = readingtime.EstimateReadingTime(entry.Content, user.DefaultReadingSpeed, user.CJKReadingSpeed)
  394. }
  395. }
  396. func shouldFetchYouTubeWatchTime(entry *model.Entry) bool {
  397. if !config.Opts.FetchYouTubeWatchTime() {
  398. return false
  399. }
  400. matches := youtubeRegex.FindStringSubmatch(entry.URL)
  401. urlMatchesYouTubePattern := len(matches) == 2
  402. return urlMatchesYouTubePattern
  403. }
  404. func shouldFetchNebulaWatchTime(entry *model.Entry) bool {
  405. if !config.Opts.FetchNebulaWatchTime() {
  406. return false
  407. }
  408. matches := nebulaRegex.FindStringSubmatch(entry.URL)
  409. return matches != nil
  410. }
  411. func shouldFetchOdyseeWatchTime(entry *model.Entry) bool {
  412. if !config.Opts.FetchOdyseeWatchTime() {
  413. return false
  414. }
  415. matches := odyseeRegex.FindStringSubmatch(entry.URL)
  416. return matches != nil
  417. }
  418. func shouldFetchBilibiliWatchTime(entry *model.Entry) bool {
  419. if !config.Opts.FetchBilibiliWatchTime() {
  420. return false
  421. }
  422. matches := bilibiliURLRegex.FindStringSubmatch(entry.URL)
  423. urlMatchesBilibiliPattern := len(matches) == 2
  424. return urlMatchesBilibiliPattern
  425. }
  426. func fetchYouTubeWatchTime(websiteURL string) (int, error) {
  427. requestBuilder := fetcher.NewRequestBuilder()
  428. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  429. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  430. responseHandler := fetcher.NewResponseHandler(requestBuilder.ExecuteRequest(websiteURL))
  431. defer responseHandler.Close()
  432. if localizedError := responseHandler.LocalizedError(); localizedError != nil {
  433. slog.Warn("Unable to fetch YouTube page", slog.String("website_url", websiteURL), slog.Any("error", localizedError.Error()))
  434. return 0, localizedError.Error()
  435. }
  436. doc, docErr := goquery.NewDocumentFromReader(responseHandler.Body(config.Opts.HTTPClientMaxBodySize()))
  437. if docErr != nil {
  438. return 0, docErr
  439. }
  440. durs, exists := doc.Find(`meta[itemprop="duration"]`).First().Attr("content")
  441. if !exists {
  442. return 0, errors.New("duration has not found")
  443. }
  444. dur, err := parseISO8601(durs)
  445. if err != nil {
  446. return 0, fmt.Errorf("unable to parse duration %s: %v", durs, err)
  447. }
  448. return int(dur.Minutes()), nil
  449. }
  450. func fetchNebulaWatchTime(websiteURL string) (int, error) {
  451. requestBuilder := fetcher.NewRequestBuilder()
  452. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  453. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  454. responseHandler := fetcher.NewResponseHandler(requestBuilder.ExecuteRequest(websiteURL))
  455. defer responseHandler.Close()
  456. if localizedError := responseHandler.LocalizedError(); localizedError != nil {
  457. slog.Warn("Unable to fetch Nebula watch time", slog.String("website_url", websiteURL), slog.Any("error", localizedError.Error()))
  458. return 0, localizedError.Error()
  459. }
  460. doc, docErr := goquery.NewDocumentFromReader(responseHandler.Body(config.Opts.HTTPClientMaxBodySize()))
  461. if docErr != nil {
  462. return 0, docErr
  463. }
  464. durs, exists := doc.Find(`meta[property="video:duration"]`).First().Attr("content")
  465. // durs contains video watch time in seconds
  466. if !exists {
  467. return 0, errors.New("duration has not found")
  468. }
  469. dur, err := strconv.ParseInt(durs, 10, 64)
  470. if err != nil {
  471. return 0, fmt.Errorf("unable to parse duration %s: %v", durs, err)
  472. }
  473. return int(dur / 60), nil
  474. }
  475. func fetchOdyseeWatchTime(websiteURL string) (int, error) {
  476. requestBuilder := fetcher.NewRequestBuilder()
  477. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  478. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  479. responseHandler := fetcher.NewResponseHandler(requestBuilder.ExecuteRequest(websiteURL))
  480. defer responseHandler.Close()
  481. if localizedError := responseHandler.LocalizedError(); localizedError != nil {
  482. slog.Warn("Unable to fetch Odysee watch time", slog.String("website_url", websiteURL), slog.Any("error", localizedError.Error()))
  483. return 0, localizedError.Error()
  484. }
  485. doc, docErr := goquery.NewDocumentFromReader(responseHandler.Body(config.Opts.HTTPClientMaxBodySize()))
  486. if docErr != nil {
  487. return 0, docErr
  488. }
  489. durs, exists := doc.Find(`meta[property="og:video:duration"]`).First().Attr("content")
  490. // durs contains video watch time in seconds
  491. if !exists {
  492. return 0, errors.New("duration has not found")
  493. }
  494. dur, err := strconv.ParseInt(durs, 10, 64)
  495. if err != nil {
  496. return 0, fmt.Errorf("unable to parse duration %s: %v", durs, err)
  497. }
  498. return int(dur / 60), nil
  499. }
  500. func extractBilibiliVideoID(websiteURL string) (string, string, error) {
  501. matches := bilibiliVideoIdRegex.FindStringSubmatch(websiteURL)
  502. if matches == nil {
  503. return "", "", fmt.Errorf("no video ID found in URL: %s", websiteURL)
  504. }
  505. if matches[1] != "" {
  506. return "aid", matches[1], nil
  507. }
  508. if matches[2] != "" {
  509. return "bvid", matches[2], nil
  510. }
  511. return "", "", fmt.Errorf("unexpected regex match result for URL: %s", websiteURL)
  512. }
  513. func fetchBilibiliWatchTime(websiteURL string) (int, error) {
  514. requestBuilder := fetcher.NewRequestBuilder()
  515. requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
  516. requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
  517. idType, videoID, extractErr := extractBilibiliVideoID(websiteURL)
  518. if extractErr != nil {
  519. return 0, extractErr
  520. }
  521. bilibiliApiURL := fmt.Sprintf("https://api.bilibili.com/x/web-interface/view?%s=%s", idType, videoID)
  522. responseHandler := fetcher.NewResponseHandler(requestBuilder.ExecuteRequest(bilibiliApiURL))
  523. defer responseHandler.Close()
  524. if localizedError := responseHandler.LocalizedError(); localizedError != nil {
  525. slog.Warn("Unable to fetch Bilibili API",
  526. slog.String("website_url", bilibiliApiURL),
  527. slog.Any("error", localizedError.Error()))
  528. return 0, localizedError.Error()
  529. }
  530. var result map[string]interface{}
  531. doc := json.NewDecoder(responseHandler.Body(config.Opts.HTTPClientMaxBodySize()))
  532. if docErr := doc.Decode(&result); docErr != nil {
  533. return 0, fmt.Errorf("failed to decode API response: %v", docErr)
  534. }
  535. if code, ok := result["code"].(float64); !ok || code != 0 {
  536. return 0, fmt.Errorf("API returned error code: %v", result["code"])
  537. }
  538. data, ok := result["data"].(map[string]interface{})
  539. if !ok {
  540. return 0, fmt.Errorf("data field not found or not an object")
  541. }
  542. duration, ok := data["duration"].(float64)
  543. if !ok {
  544. return 0, fmt.Errorf("duration not found or not a number")
  545. }
  546. intDuration := int(duration)
  547. durationMin := intDuration / 60
  548. if intDuration%60 != 0 {
  549. durationMin++
  550. }
  551. return durationMin, nil
  552. }
  553. // parseISO8601 parses an ISO 8601 duration string.
  554. func parseISO8601(from string) (time.Duration, error) {
  555. var match []string
  556. var d time.Duration
  557. if iso8601Regex.MatchString(from) {
  558. match = iso8601Regex.FindStringSubmatch(from)
  559. } else {
  560. return 0, errors.New("could not parse duration string")
  561. }
  562. for i, name := range iso8601Regex.SubexpNames() {
  563. part := match[i]
  564. if i == 0 || name == "" || part == "" {
  565. continue
  566. }
  567. val, err := strconv.ParseInt(part, 10, 64)
  568. if err != nil {
  569. return 0, err
  570. }
  571. switch name {
  572. case "hour":
  573. d += (time.Duration(val) * time.Hour)
  574. case "minute":
  575. d += (time.Duration(val) * time.Minute)
  576. case "second":
  577. d += (time.Duration(val) * time.Second)
  578. default:
  579. return 0, fmt.Errorf("unknown field %s", name)
  580. }
  581. }
  582. return d, nil
  583. }
  584. func isRecentEntry(entry *model.Entry) bool {
  585. if config.Opts.FilterEntryMaxAgeDays() == 0 || entry.Date.After(time.Now().AddDate(0, 0, -config.Opts.FilterEntryMaxAgeDays())) {
  586. return true
  587. }
  588. return false
  589. }
  590. func minifyEntryContent(entryContent string) string {
  591. m := minify.New()
  592. // Options required to avoid breaking the HTML content.
  593. m.Add("text/html", &html.Minifier{
  594. KeepEndTags: true,
  595. KeepQuotes: true,
  596. })
  597. if minifiedHTML, err := m.String("text/html", entryContent); err == nil {
  598. entryContent = minifiedHTML
  599. }
  600. return entryContent
  601. }