adapter.go 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427
  1. // SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
  2. // SPDX-License-Identifier: Apache-2.0
  3. package rss // import "miniflux.app/v2/internal/reader/rss"
  4. import (
  5. "html"
  6. "log/slog"
  7. "path"
  8. "slices"
  9. "strconv"
  10. "strings"
  11. "time"
  12. "miniflux.app/v2/internal/crypto"
  13. "miniflux.app/v2/internal/model"
  14. "miniflux.app/v2/internal/reader/date"
  15. "miniflux.app/v2/internal/reader/sanitizer"
  16. "miniflux.app/v2/internal/urllib"
  17. )
  18. type rssAdapter struct {
  19. rss *rss
  20. }
  21. func (r *rssAdapter) buildFeed(baseURL string) *model.Feed {
  22. feed := &model.Feed{
  23. Title: html.UnescapeString(strings.TrimSpace(r.rss.Channel.Title)),
  24. FeedURL: strings.TrimSpace(baseURL),
  25. SiteURL: strings.TrimSpace(r.rss.Channel.Link),
  26. Description: strings.TrimSpace(r.rss.Channel.Description),
  27. }
  28. // Ensure the Site URL is absolute.
  29. if absoluteSiteURL, err := urllib.ResolveToAbsoluteURL(baseURL, feed.SiteURL); err == nil {
  30. feed.SiteURL = absoluteSiteURL
  31. }
  32. // Try to find the feed URL from the Atom links.
  33. for _, atomLink := range r.rss.Channel.Links {
  34. atomLinkHref := strings.TrimSpace(atomLink.Href)
  35. if atomLinkHref != "" && atomLink.Rel == "self" {
  36. if absoluteFeedURL, err := urllib.ResolveToAbsoluteURL(feed.FeedURL, atomLinkHref); err == nil {
  37. feed.FeedURL = absoluteFeedURL
  38. break
  39. }
  40. }
  41. }
  42. // Fallback to the site URL if the title is empty.
  43. if feed.Title == "" {
  44. feed.Title = feed.SiteURL
  45. }
  46. // Get TTL if defined.
  47. if r.rss.Channel.TTL != "" {
  48. if ttl, err := strconv.Atoi(r.rss.Channel.TTL); err == nil {
  49. feed.TTL = time.Duration(ttl) * time.Minute
  50. }
  51. }
  52. // Get the feed icon URL if defined.
  53. if r.rss.Channel.Image != nil {
  54. if absoluteIconURL, err := urllib.ResolveToAbsoluteURL(feed.SiteURL, r.rss.Channel.Image.URL); err == nil {
  55. feed.IconURL = absoluteIconURL
  56. }
  57. }
  58. // Track GUIDs already seen in this feed to disambiguate items from
  59. // non-conformant feeds that reuse the same <guid> for every entry.
  60. seenGUIDs := make(map[string]int)
  61. for _, item := range r.rss.Channel.Items {
  62. entry := model.NewEntry()
  63. entry.Date = findEntryDate(&item)
  64. entry.Content = findEntryContent(&item)
  65. entry.Enclosures = findEntryEnclosures(&item, feed.SiteURL)
  66. // Populate the entry URL.
  67. entryURL := findEntryURL(&item)
  68. if entryURL == "" {
  69. // Fallback to the first enclosure URL if it exists.
  70. if len(entry.Enclosures) > 0 && entry.Enclosures[0].URL != "" {
  71. entry.URL = entry.Enclosures[0].URL
  72. } else {
  73. // Fallback to the feed URL if no entry URL is found.
  74. entry.URL = feed.SiteURL
  75. }
  76. } else {
  77. if absoluteEntryURL, err := urllib.ResolveToAbsoluteURL(feed.SiteURL, entryURL); err == nil {
  78. entry.URL = absoluteEntryURL
  79. } else {
  80. entry.URL = entryURL
  81. }
  82. }
  83. // Populate the entry title.
  84. entry.Title = findEntryTitle(&item)
  85. if entry.Title == "" {
  86. entry.Title = sanitizer.TruncateHTML(entry.Content, 100)
  87. if entry.Title == "" {
  88. entry.Title = entry.URL
  89. }
  90. }
  91. entry.Author = findEntryAuthor(&item)
  92. if entry.Author == "" {
  93. entry.Author = findFeedAuthor(&r.rss.Channel)
  94. }
  95. // Generate the entry hash.
  96. //
  97. // The RSS 2.0 spec requires <guid> to uniquely identify the item, but
  98. // some feeds ship the same GUID for every entry. Keep the first
  99. // occurrence stable (so existing stored entries still match) and
  100. // disambiguate later collisions using the entry URL or, as a last
  101. // resort, the item position.
  102. switch {
  103. case item.GUID.Data != "":
  104. n := seenGUIDs[item.GUID.Data]
  105. seenGUIDs[item.GUID.Data] = n + 1
  106. switch {
  107. case n == 0:
  108. entry.Hash = crypto.SHA256(item.GUID.Data)
  109. case entry.URL != "":
  110. entry.Hash = crypto.SHA256(item.GUID.Data + "|" + entry.URL)
  111. default:
  112. entry.Hash = crypto.SHA256(item.GUID.Data + "|" + strconv.Itoa(n))
  113. }
  114. case entryURL != "":
  115. entry.Hash = crypto.SHA256(entryURL)
  116. default:
  117. entry.Hash = crypto.SHA256(entry.Title + entry.Content)
  118. }
  119. // Find CommentsURL if defined.
  120. if absoluteCommentsURL := strings.TrimSpace(item.CommentsURL); absoluteCommentsURL != "" && urllib.IsAbsoluteURL(absoluteCommentsURL) {
  121. entry.CommentsURL = absoluteCommentsURL
  122. }
  123. // Set podcast listening time.
  124. if item.ItunesDuration != "" {
  125. if duration, err := getDurationInMinutes(item.ItunesDuration); err == nil {
  126. entry.ReadingTime = duration
  127. }
  128. }
  129. // Populate entry categories.
  130. entry.Tags = findEntryTags(&item)
  131. if len(entry.Tags) == 0 {
  132. entry.Tags = findFeedTags(&r.rss.Channel)
  133. }
  134. // Sort and deduplicate tags.
  135. slices.Sort(entry.Tags)
  136. entry.Tags = slices.Compact(entry.Tags)
  137. feed.Entries = append(feed.Entries, entry)
  138. }
  139. return feed
  140. }
  141. func findFeedAuthor(rssChannel *rssChannel) string {
  142. var author string
  143. switch {
  144. case rssChannel.ItunesAuthor != "":
  145. author = rssChannel.ItunesAuthor
  146. case rssChannel.GooglePlayAuthor != "":
  147. author = rssChannel.GooglePlayAuthor
  148. case rssChannel.ItunesOwner.String() != "":
  149. author = rssChannel.ItunesOwner.String()
  150. case rssChannel.ManagingEditor != "":
  151. author = rssChannel.ManagingEditor
  152. case rssChannel.Webmaster != "":
  153. author = rssChannel.Webmaster
  154. default:
  155. return ""
  156. }
  157. return strings.TrimSpace(sanitizer.StripTags(author))
  158. }
  159. func findFeedTags(rssChannel *rssChannel) []string {
  160. tags := make([]string, 0)
  161. for _, tag := range rssChannel.Categories {
  162. tag = strings.TrimSpace(tag)
  163. if tag != "" {
  164. tags = append(tags, tag)
  165. }
  166. }
  167. for _, tag := range rssChannel.GetItunesCategories() {
  168. tag = strings.TrimSpace(tag)
  169. if tag != "" {
  170. tags = append(tags, tag)
  171. }
  172. }
  173. if tag := strings.TrimSpace(rssChannel.GooglePlayCategory.Text); tag != "" {
  174. tags = append(tags, tag)
  175. }
  176. return tags
  177. }
  178. func findEntryTitle(rssItem *rssItem) string {
  179. title := rssItem.Title.Content
  180. if rssItem.DublinCoreTitle != "" {
  181. title = rssItem.DublinCoreTitle
  182. }
  183. return html.UnescapeString(html.UnescapeString(strings.TrimSpace(title)))
  184. }
  185. func findEntryURL(rssItem *rssItem) string {
  186. for _, link := range []string{rssItem.FeedBurnerLink, rssItem.Link} {
  187. if link != "" {
  188. return strings.TrimSpace(link)
  189. }
  190. }
  191. for _, atomLink := range rssItem.Links {
  192. if atomLink.Href != "" && (strings.EqualFold(atomLink.Rel, "alternate") || atomLink.Rel == "") {
  193. return strings.TrimSpace(atomLink.Href)
  194. }
  195. }
  196. // Specs: https://cyber.harvard.edu/rss/rss.html#ltguidgtSubelementOfLtitemgt
  197. // isPermaLink is optional, its default value is true.
  198. // If its value is false, the guid may not be assumed to be a url, or a url to anything in particular.
  199. if rssItem.GUID.IsPermaLink == "true" || rssItem.GUID.IsPermaLink == "" {
  200. return strings.TrimSpace(rssItem.GUID.Data)
  201. }
  202. return ""
  203. }
  204. func findEntryContent(rssItem *rssItem) string {
  205. for _, value := range []string{
  206. rssItem.DublinCoreContent,
  207. rssItem.Description,
  208. rssItem.GooglePlayDescription,
  209. rssItem.ItunesSummary,
  210. rssItem.ItunesSubtitle,
  211. } {
  212. if value != "" {
  213. return value
  214. }
  215. }
  216. return ""
  217. }
  218. func findEntryDate(rssItem *rssItem) time.Time {
  219. value := rssItem.PubDate
  220. if rssItem.DublinCoreDate != "" {
  221. value = rssItem.DublinCoreDate
  222. }
  223. if value != "" {
  224. result, err := date.Parse(value)
  225. if err != nil {
  226. slog.Debug("Unable to parse date from RSS feed",
  227. slog.String("date", value),
  228. slog.String("guid", rssItem.GUID.Data),
  229. slog.Any("error", err),
  230. )
  231. return time.Now()
  232. }
  233. return result
  234. }
  235. return time.Now()
  236. }
  237. func findEntryAuthor(rssItem *rssItem) string {
  238. var author string
  239. switch {
  240. case rssItem.GooglePlayAuthor != "":
  241. author = rssItem.GooglePlayAuthor
  242. case rssItem.ItunesAuthor != "":
  243. author = rssItem.ItunesAuthor
  244. case rssItem.DublinCoreCreator != "":
  245. author = rssItem.DublinCoreCreator
  246. case rssItem.PersonName() != "":
  247. author = rssItem.PersonName()
  248. case strings.Contains(rssItem.Author.Inner, "<![CDATA["):
  249. author = rssItem.Author.Data
  250. case rssItem.Author.Inner != "":
  251. author = rssItem.Author.Inner
  252. default:
  253. return ""
  254. }
  255. return strings.TrimSpace(sanitizer.StripTags(author))
  256. }
  257. func findEntryTags(rssItem *rssItem) []string {
  258. tags := make([]string, 0)
  259. for _, tag := range rssItem.Categories {
  260. tag = strings.TrimSpace(tag)
  261. if tag != "" {
  262. tags = append(tags, tag)
  263. }
  264. }
  265. for _, tag := range rssItem.MediaCategories.Labels() {
  266. tag = strings.TrimSpace(tag)
  267. if tag != "" {
  268. tags = append(tags, tag)
  269. }
  270. }
  271. return tags
  272. }
  273. func findEntryEnclosures(rssItem *rssItem, siteURL string) model.EnclosureList {
  274. enclosures := make(model.EnclosureList, 0)
  275. duplicates := make(map[string]bool)
  276. for _, mediaThumbnail := range rssItem.AllMediaThumbnails() {
  277. mediaURL := strings.TrimSpace(mediaThumbnail.URL)
  278. if mediaURL == "" {
  279. continue
  280. }
  281. if _, found := duplicates[mediaURL]; !found {
  282. if mediaAbsoluteURL, err := urllib.ResolveToAbsoluteURL(siteURL, mediaURL); err != nil {
  283. slog.Debug("Unable to build absolute URL for media thumbnail",
  284. slog.String("url", mediaThumbnail.URL),
  285. slog.String("site_url", siteURL),
  286. slog.Any("error", err),
  287. )
  288. } else {
  289. duplicates[mediaAbsoluteURL] = true
  290. enclosures = append(enclosures, &model.Enclosure{
  291. URL: mediaAbsoluteURL,
  292. MimeType: mediaThumbnail.MimeType(),
  293. Size: mediaThumbnail.Size(),
  294. })
  295. }
  296. }
  297. }
  298. for _, enclosure := range rssItem.Enclosures {
  299. enclosureURL := enclosure.URL
  300. if rssItem.FeedBurnerEnclosureLink != "" {
  301. filename := path.Base(rssItem.FeedBurnerEnclosureLink)
  302. if strings.HasSuffix(enclosureURL, filename) {
  303. enclosureURL = rssItem.FeedBurnerEnclosureLink
  304. }
  305. }
  306. enclosureURL = strings.TrimSpace(enclosureURL)
  307. if enclosureURL == "" {
  308. continue
  309. }
  310. if absoluteEnclosureURL, err := urllib.ResolveToAbsoluteURL(siteURL, enclosureURL); err == nil {
  311. enclosureURL = absoluteEnclosureURL
  312. }
  313. if _, found := duplicates[enclosureURL]; !found {
  314. duplicates[enclosureURL] = true
  315. enclosures = append(enclosures, &model.Enclosure{
  316. URL: enclosureURL,
  317. MimeType: enclosure.Type,
  318. Size: enclosure.Size(),
  319. })
  320. }
  321. }
  322. for _, mediaContent := range rssItem.AllMediaContents() {
  323. mediaURL := strings.TrimSpace(mediaContent.URL)
  324. if mediaURL == "" {
  325. continue
  326. }
  327. if _, found := duplicates[mediaURL]; !found {
  328. mediaURL := strings.TrimSpace(mediaContent.URL)
  329. if mediaAbsoluteURL, err := urllib.ResolveToAbsoluteURL(siteURL, mediaURL); err != nil {
  330. slog.Debug("Unable to build absolute URL for media content",
  331. slog.String("url", mediaContent.URL),
  332. slog.String("site_url", siteURL),
  333. slog.Any("error", err),
  334. )
  335. } else {
  336. duplicates[mediaAbsoluteURL] = true
  337. enclosures = append(enclosures, &model.Enclosure{
  338. URL: mediaAbsoluteURL,
  339. MimeType: mediaContent.MimeType(),
  340. Size: mediaContent.Size(),
  341. })
  342. }
  343. }
  344. }
  345. for _, mediaPeerLink := range rssItem.AllMediaPeerLinks() {
  346. mediaURL := strings.TrimSpace(mediaPeerLink.URL)
  347. if mediaURL == "" {
  348. continue
  349. }
  350. if _, found := duplicates[mediaURL]; !found {
  351. mediaURL := strings.TrimSpace(mediaPeerLink.URL)
  352. if mediaAbsoluteURL, err := urllib.ResolveToAbsoluteURL(siteURL, mediaURL); err != nil {
  353. slog.Debug("Unable to build absolute URL for media peer link",
  354. slog.String("url", mediaPeerLink.URL),
  355. slog.String("site_url", siteURL),
  356. slog.Any("error", err),
  357. )
  358. } else {
  359. duplicates[mediaAbsoluteURL] = true
  360. enclosures = append(enclosures, &model.Enclosure{
  361. URL: mediaAbsoluteURL,
  362. MimeType: mediaPeerLink.MimeType(),
  363. Size: mediaPeerLink.Size(),
  364. })
  365. }
  366. }
  367. }
  368. return enclosures
  369. }