fetch the url ourself

makes way for caching the response

also clear src.Error on success
master
magical 2021-12-31 06:20:56 +00:00
parent d260db550d
commit 6a8856c170
1 changed files with 51 additions and 3 deletions

54
main.go
View File

@ -6,6 +6,7 @@ import (
"context" "context"
"fmt" "fmt"
"log" "log"
"net/http"
"sort" "sort"
"sync" "sync"
"time" "time"
@ -13,6 +14,8 @@ import (
"github.com/mmcdole/gofeed" "github.com/mmcdole/gofeed"
) )
const UserAgent = "feedget/0.1"
func main() { func main() {
var sources = []*FeedSource{ // TODO: interface Source var sources = []*FeedSource{ // TODO: interface Source
NewFeed("https://tilde.team/~dozens/dreams/rss.xml"), NewFeed("https://tilde.team/~dozens/dreams/rss.xml"),
@ -31,7 +34,7 @@ func main() {
wg.Wait() wg.Wait()
for _, src := range sources { for _, src := range sources {
fmt.Println(src.Title, src.Error) fmt.Println(src.Title, src.Error, src.LastStatus)
} }
} }
@ -60,6 +63,9 @@ type FeedSource struct {
LastFetch time.Time LastFetch time.Time
Error error Error error
LastStatusCode int
LastStatus string
mu sync.Mutex mu sync.Mutex
} }
@ -73,13 +79,54 @@ func (src *FeedSource) update() {
src.mu.Lock() src.mu.Lock()
defer src.mu.Unlock() defer src.mu.Unlock()
fp := gofeed.NewParser() fp := gofeed.NewParser()
feed, err := fp.ParseURL(src.URL)
ctx := context.TODO()
req, err := http.NewRequest("GET", src.URL, nil)
if err != nil { if err != nil {
err := fmt.Errorf("error parsing %q: %v", src.URL, err) src.Error = fmt.Errorf("error fetching %q: %w", src.URL, err)
log.Println(src.Error)
return // return err?
}
req = req.WithContext(ctx)
req.Header.Set("User-Agent", UserAgent)
// TODO: If-Modified-Since, Etag
resp, err := http.DefaultClient.Do(req)
if err != nil {
err := fmt.Errorf("error fetching %q: %w", src.URL, err)
log.Println(err) log.Println(err)
src.Error = err src.Error = err
return // return err? return // return err?
} }
if resp != nil && resp.Body != nil {
defer func() {
err := resp.Body.Close()
if err != nil {
log.Printf("error closing response body for %q: %v", src.URL, err)
}
}()
}
src.LastStatusCode = resp.StatusCode
src.LastStatus = resp.Status
if resp.StatusCode != 200 {
src.Error = fmt.Errorf("error fetching %q: status %s", src.URL, resp.Status)
log.Println(src.Error)
return
}
// TODO: cache body
feed, err := fp.Parse(resp.Body)
if err != nil {
err := fmt.Errorf("error parsing %q: %w", src.URL, err)
log.Println(err)
src.Error = err
return // return err?
}
items := feed.Items items := feed.Items
sort.Slice(items, func(i, j int) bool { sort.Slice(items, func(i, j int) bool {
return items[i].Updated >= items[j].Updated return items[i].Updated >= items[j].Updated
@ -87,4 +134,5 @@ func (src *FeedSource) update() {
src.Title = feed.Title src.Title = feed.Title
src.Items = items src.Items = items
src.LastFetch = time.Now() src.LastFetch = time.Now()
src.Error = nil
} }