// Feedget scrapes RSS feeds (and other sources) // and spits the latest headline from each onto a static web page. package main import ( "context" "fmt" "log" "sort" "sync" "time" "github.com/mmcdole/gofeed" ) func main() { var sources = []*FeedSource{ // TODO: interface Source NewFeed("https://tilde.team/~dozens/dreams/rss.xml"), NewFeed("https://tilde.town/~magical/xkcd.xml"), // "https://xkcd.com/atom.xml", } var wg sync.WaitGroup wg.Add(len(sources)) for i := range sources { src := sources[i] go func() { src.update() wg.Done() }() } wg.Wait() for _, src := range sources { fmt.Println(src.Title, src.Error) } } type Source interface { Title() string Link() string Error() error Update(context.Context) } // want to keep track of: // - whether the most recent update suceeded // - when the last successful update was // - how many of the last N updates succeeded // - status codes for the last N updates // - response time for the last N updates // - how frequently items are posted type Cache struct { } type FeedSource struct { Items []*gofeed.Item Title string URL string LastFetch time.Time Error error mu sync.Mutex } func NewFeed(url string) *FeedSource { return &FeedSource{ URL: url, } } func (src *FeedSource) update() { src.mu.Lock() defer src.mu.Unlock() fp := gofeed.NewParser() feed, err := fp.ParseURL(src.URL) if err != nil { err := fmt.Errorf("error parsing %q: %v", src.URL, err) log.Println(err) src.Error = err return // return err? } items := feed.Items sort.Slice(items, func(i, j int) bool { return items[i].Updated >= items[j].Updated }) src.Title = feed.Title src.Items = items src.LastFetch = time.Now() }