Add function to fetch and parse RSS feed

This commit is contained in:
ekzyis 2023-08-23 00:38:50 +02:00
parent 4b4736c68d
commit 76cb7483f7
2 changed files with 74 additions and 0 deletions

45
rss.go Normal file
View File

@ -0,0 +1,45 @@
package sn
import (
"encoding/xml"
"fmt"
"log"
"net/http"
"time"
)
func (c *RssDate) UnmarshalXML(d *xml.Decoder, start xml.StartElement) error {
var v string
dateFormat := "Mon, 02 Jan 2006 15:04:05 GMT"
d.DecodeElement(&v, &start)
parse, err := time.Parse(dateFormat, v)
if err != nil {
return err
}
*c = RssDate{parse}
return nil
}
var (
StackerNewsRssFeedUrl = "https://stacker.news/rss"
)
// Fetch RSS feed
func RssFeed() (*Rss, error) {
resp, err := http.Get(StackerNewsRssFeedUrl)
if err != nil {
err = fmt.Errorf("error fetching RSS feed: %w", err)
log.Println(err)
return nil, err
}
defer resp.Body.Close()
var rss Rss
err = xml.NewDecoder(resp.Body).Decode(&rss)
if err != nil {
err = fmt.Errorf("error decoding RSS feed XML: %w", err)
return nil, err
}
return &rss, nil
}

View File

@ -91,3 +91,32 @@ type DupesError struct {
func (e *DupesError) Error() string { func (e *DupesError) Error() string {
return fmt.Sprintf("found %d dupes for %s", len(e.Dupes), e.Url) return fmt.Sprintf("found %d dupes for %s", len(e.Dupes), e.Url)
} }
type RssItem struct {
Guid string `xml:"guid"`
Title string `xml:"title"`
Link string `xml:"link"`
Description string `xml:"description"`
PubDate RssDate `xml:"pubDate"`
Author RssAuthor `xml:"author"`
}
type RssChannel struct {
Title string `xml:"title"`
Description string `xml:"description"`
Link string `xml:"link"`
Items []RssItem `xml:"item"`
LastBuildDate RssDate `xml:"lastBuildDate"`
}
type Rss struct {
Channel RssChannel `xml:"channel"`
}
type RssDate struct {
time.Time
}
type RssAuthor struct {
Name string `xml:"name"`
}