Initial commit
This commit is contained in:
77
internal/feeds/refresh.go
Normal file
77
internal/feeds/refresh.go
Normal file
@@ -0,0 +1,77 @@
|
||||
package feeds
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/mmcdole/gofeed"
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
var fp = gofeed.NewParser()
|
||||
var feedStore = &FeedStore{}
|
||||
|
||||
func Refresh() error {
|
||||
interval := viper.GetInt("refreshTimeoutMinutes")
|
||||
|
||||
for {
|
||||
fmt.Println("Refreshing feeds...")
|
||||
|
||||
for _, feed := range *feedStore.GetFeeds() {
|
||||
go RefreshFeed(feed.FeedURL)
|
||||
}
|
||||
|
||||
fmt.Printf("Going to sleep for %d minutes\n", interval)
|
||||
time.Sleep(time.Duration(interval) * time.Minute)
|
||||
}
|
||||
}
|
||||
|
||||
func RefreshFeed(url string) error {
|
||||
fmt.Printf("Refreshing %s\n", url)
|
||||
f, err := fp.ParseURL(url)
|
||||
if err != nil {
|
||||
fmt.Printf("Failed to refresh %s\n", url)
|
||||
} else {
|
||||
imageURL := ""
|
||||
if f.Image != nil {
|
||||
imageURL = f.Image.URL
|
||||
}
|
||||
|
||||
feed := Feed{
|
||||
ID: strings.ReplaceAll(base64.StdEncoding.EncodeToString([]byte(url)), "/", ""),
|
||||
Title: f.Title,
|
||||
Description: f.Description,
|
||||
HomepageURL: f.Link,
|
||||
FeedURL: url,
|
||||
ImageURL: imageURL,
|
||||
LastUpdated: f.UpdatedParsed,
|
||||
Items: []Item{},
|
||||
}
|
||||
for _, item := range f.Items {
|
||||
imageURL := ""
|
||||
if f.Image != nil {
|
||||
imageURL = f.Image.URL
|
||||
}
|
||||
|
||||
feed.Items = append(feed.Items, Item{
|
||||
ID: strings.ReplaceAll(base64.StdEncoding.EncodeToString([]byte(item.GUID)), "/", ""),
|
||||
Title: item.Title,
|
||||
Description: item.Description,
|
||||
Content: item.Content,
|
||||
URL: item.Link,
|
||||
ImageURL: imageURL,
|
||||
LastUpdated: item.UpdatedParsed,
|
||||
Created: item.PublishedParsed,
|
||||
GUID: item.GUID,
|
||||
FeedID: feed.ID,
|
||||
})
|
||||
}
|
||||
feedStore.SaveFeed(feed)
|
||||
|
||||
fmt.Printf("Finished refreshing '%s'\n", feed.Title)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
72
internal/feeds/store.go
Normal file
72
internal/feeds/store.go
Normal file
@@ -0,0 +1,72 @@
|
||||
package feeds
|
||||
|
||||
import (
|
||||
"github.com/spf13/viper"
|
||||
"gorm.io/driver/sqlite"
|
||||
"gorm.io/gorm"
|
||||
"gorm.io/gorm/clause"
|
||||
)
|
||||
|
||||
func (fs *FeedStore) getDB() *gorm.DB {
|
||||
db, err := gorm.Open(sqlite.Open(viper.GetString("dbPath")), &gorm.Config{})
|
||||
if err != nil {
|
||||
panic("failed to connect database")
|
||||
}
|
||||
db.AutoMigrate(&Feed{})
|
||||
db.AutoMigrate(&Item{})
|
||||
|
||||
return db
|
||||
}
|
||||
|
||||
func (fs *FeedStore) GetFeed(id string) *Feed {
|
||||
feed := &Feed{}
|
||||
fs.getDB().Preload("Items").Where("id = ?", id).First(feed)
|
||||
return feed
|
||||
}
|
||||
|
||||
func (fs *FeedStore) GetItem(id string) *Item {
|
||||
item := &Item{}
|
||||
fs.getDB().Where("id = ?", id).First(item)
|
||||
return item
|
||||
}
|
||||
|
||||
func (fs *FeedStore) GetFeeds() *[]Feed {
|
||||
feeds := &[]Feed{}
|
||||
fs.getDB().Preload("Items").Order("title asc").Find(feeds)
|
||||
return feeds
|
||||
}
|
||||
|
||||
func (fs *FeedStore) GetUnread() *[]ItemWithFeed {
|
||||
items := &[]ItemWithFeed{}
|
||||
fs.getDB().Table("items").
|
||||
Where("read = ?", false).
|
||||
Select("items.*, feeds.title as feed_title, feeds.homepage_url as feed_homepage_url").
|
||||
Order("items.created desc, items.title").
|
||||
Joins("left join feeds on feeds.id = items.feed_id").
|
||||
Find(items)
|
||||
|
||||
return items
|
||||
}
|
||||
|
||||
func (fs *FeedStore) SaveFeed(feed Feed) {
|
||||
fs.getDB().Omit("Items").Clauses(clause.OnConflict{
|
||||
Columns: []clause.Column{{Name: "id"}},
|
||||
DoUpdates: clause.AssignmentColumns([]string{"title", "description", "homepage_url", "image_url", "last_updated"}),
|
||||
}).Create(feed)
|
||||
|
||||
for _, item := range feed.Items {
|
||||
fs.getDB().Clauses(clause.OnConflict{
|
||||
Columns: []clause.Column{{Name: "id"}},
|
||||
DoUpdates: clause.AssignmentColumns([]string{"title", "description", "content", "url", "image_url", "last_updated"}),
|
||||
}).Create(item)
|
||||
}
|
||||
}
|
||||
|
||||
func (fs *FeedStore) MarkAsRead(itemID string) {
|
||||
item := &Item{}
|
||||
fs.getDB().Where("id = ?", itemID).First(item)
|
||||
|
||||
item.Read = true
|
||||
|
||||
fs.getDB().Save(*item)
|
||||
}
|
67
internal/feeds/types.go
Normal file
67
internal/feeds/types.go
Normal file
@@ -0,0 +1,67 @@
|
||||
package feeds
|
||||
|
||||
import (
|
||||
"encoding/xml"
|
||||
"time"
|
||||
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
type FeedStore struct {
|
||||
db *gorm.DB
|
||||
}
|
||||
|
||||
type Feed struct {
|
||||
ID string `gorm:"primaryKey"` // Base64 of FeedURL
|
||||
Title string
|
||||
Description string
|
||||
HomepageURL string
|
||||
FeedURL string
|
||||
ImageURL string
|
||||
LastUpdated *time.Time
|
||||
Items []Item `gorm:"foreignKey:FeedID"`
|
||||
UnreadCount int
|
||||
}
|
||||
|
||||
func (feed *Feed) AfterFind(tx *gorm.DB) (err error) {
|
||||
feed.UnreadCount = 0
|
||||
for _, item := range feed.Items {
|
||||
if !item.Read {
|
||||
feed.UnreadCount++
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
type Item struct {
|
||||
ID string `gorm:"primaryKey"` // Base64 of GUID
|
||||
Title string
|
||||
Description string
|
||||
Content string
|
||||
URL string
|
||||
ImageURL string
|
||||
LastUpdated *time.Time
|
||||
Created *time.Time
|
||||
GUID string
|
||||
FeedID string
|
||||
Read bool
|
||||
Save bool
|
||||
}
|
||||
|
||||
type ItemWithFeed struct {
|
||||
Item
|
||||
|
||||
FeedTitle string
|
||||
FeedHomepageURL string
|
||||
}
|
||||
|
||||
type Opml struct {
|
||||
XMLName xml.Name `xml:"opml"`
|
||||
Version string `xml:"version,attr"`
|
||||
Outlines []Outline `xml:"body>outline"`
|
||||
}
|
||||
|
||||
type Outline struct {
|
||||
Title string `xml:"title,attr"`
|
||||
XmlUrl string `xml:"xmlUrl,attr"`
|
||||
}
|
Reference in New Issue
Block a user