2019-03-13 15:47:38 -04:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
2019-03-15 09:57:33 -04:00
|
|
|
"fmt"
|
2019-03-13 15:47:38 -04:00
|
|
|
"log"
|
2019-03-15 09:57:33 -04:00
|
|
|
"net/url"
|
2019-03-13 15:47:38 -04:00
|
|
|
"os"
|
2019-03-15 09:57:33 -04:00
|
|
|
"path"
|
|
|
|
"sort"
|
2019-03-14 10:28:54 -04:00
|
|
|
"strconv"
|
2019-03-15 09:57:33 -04:00
|
|
|
"sync"
|
2019-03-13 15:47:38 -04:00
|
|
|
"time"
|
|
|
|
|
|
|
|
"github.com/BurntSushi/toml"
|
2019-03-15 09:57:33 -04:00
|
|
|
"github.com/cavaliercoder/grab"
|
2019-03-15 11:23:27 -04:00
|
|
|
homedir "github.com/mitchellh/go-homedir"
|
2019-03-13 15:47:38 -04:00
|
|
|
"github.com/mmcdole/gofeed"
|
|
|
|
)
|
|
|
|
|
2019-03-15 11:23:27 -04:00
|
|
|
var (
|
|
|
|
confFile,dataFile,dstDir string
|
|
|
|
)
|
|
|
|
|
|
|
|
func init() {
|
|
|
|
os.Chdir(path.Join()) // go to the root directory
|
|
|
|
homeDir,err := homedir.Dir()
|
|
|
|
if err != nil {
|
|
|
|
log.Fatal("Cannot locate user's home directory")
|
|
|
|
}
|
|
|
|
confDir := path.Join(homeDir,".config","rssd")
|
|
|
|
confFile = path.Join(confDir,"rssd.conf")
|
|
|
|
dataFile = path.Join(confDir,"podcasts.conf")
|
|
|
|
}
|
|
|
|
|
2019-03-13 15:47:38 -04:00
|
|
|
type Config struct {
|
2019-03-15 11:23:27 -04:00
|
|
|
Workers int
|
|
|
|
DestDir string
|
2019-03-13 15:47:38 -04:00
|
|
|
Urls []string
|
|
|
|
}
|
|
|
|
|
|
|
|
type Item struct {
|
|
|
|
Title, Description, Url, Filename string
|
2019-03-14 10:28:54 -04:00
|
|
|
Length int
|
2019-03-15 09:57:33 -04:00
|
|
|
Published time.Time
|
|
|
|
Podcast *Podcast
|
2019-03-13 15:47:38 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
type Podcast struct {
|
2019-03-14 10:28:54 -04:00
|
|
|
Title, Description, Url string
|
2019-03-13 15:47:38 -04:00
|
|
|
Items []Item
|
|
|
|
}
|
|
|
|
|
|
|
|
type pcList struct {
|
|
|
|
Podcasts []Podcast
|
|
|
|
}
|
|
|
|
|
2019-03-14 10:28:54 -04:00
|
|
|
func newpcList(confs ...string) (ret *pcList) {
|
|
|
|
ret = &pcList{}
|
|
|
|
ret.Podcasts = make([]Podcast,0)
|
|
|
|
if len(confs) > 0 {
|
2019-03-15 11:23:27 -04:00
|
|
|
if _, err := toml.DecodeFile(dataFile, &ret); err != nil {
|
2019-03-14 10:28:54 -04:00
|
|
|
log.Print("Error reading podcast list:",err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return
|
2019-03-13 15:47:38 -04:00
|
|
|
}
|
|
|
|
|
2019-03-14 10:28:54 -04:00
|
|
|
func (p *pcList) Find(x *Podcast) (int, bool) {
|
|
|
|
for i,y := range p.Podcasts {
|
|
|
|
if y.Title == x.Title {
|
|
|
|
return i, true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return 0, false
|
2019-03-13 15:47:38 -04:00
|
|
|
}
|
|
|
|
|
2019-03-14 10:28:54 -04:00
|
|
|
func (p *pcList) Add(x *Podcast) {
|
|
|
|
if x == nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if i,ok := p.Find(x); ok == true {
|
|
|
|
log.Print(" Existing podcast")
|
|
|
|
p.Podcasts[i].Merge(x)
|
|
|
|
} else {
|
|
|
|
log.Print(" New podcast")
|
|
|
|
p.Podcasts = append((*p).Podcasts,*x)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Podcast) Merge(x *Podcast) {
|
|
|
|
for _,item := range x.Items {
|
|
|
|
if !p.Has(item) {
|
|
|
|
p.Items = append(p.Items,item)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-03-13 15:47:38 -04:00
|
|
|
|
2019-03-14 10:28:54 -04:00
|
|
|
func (p *Podcast) Has(i Item) bool {
|
|
|
|
for _,x := range p.Items {
|
|
|
|
if x.Title == i.Title {
|
2019-03-13 15:47:38 -04:00
|
|
|
return true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
2019-03-14 10:28:54 -04:00
|
|
|
type Selector func(*gofeed.Item) bool
|
|
|
|
|
|
|
|
func AllSelectors(ss ...Selector) Selector {
|
|
|
|
return func(i *gofeed.Item) bool {
|
|
|
|
for _, s := range ss {
|
|
|
|
if !s(i) {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func AnySelector(ss ...Selector) Selector {
|
|
|
|
return func(i *gofeed.Item) bool {
|
|
|
|
for _, s := range ss {
|
|
|
|
if s(i) {
|
|
|
|
return true
|
|
|
|
}
|
2019-03-13 15:47:38 -04:00
|
|
|
}
|
2019-03-14 10:28:54 -04:00
|
|
|
return false
|
2019-03-13 15:47:38 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func newerThan(t time.Time) Selector {
|
|
|
|
return func(i *gofeed.Item) bool {
|
|
|
|
if i.PublishedParsed.After(t) {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-03-14 10:28:54 -04:00
|
|
|
func daysAgo(x int) Selector {
|
|
|
|
d := time.Now()
|
|
|
|
return newerThan(time.Date(d.Year(),d.Month(),d.Day()-x,0,0,0,0,time.Local))
|
|
|
|
}
|
|
|
|
|
2019-03-15 09:57:33 -04:00
|
|
|
func toPodcast(sel Selector, u string, feed *gofeed.Feed) (ret *Podcast) {
|
2019-03-14 10:28:54 -04:00
|
|
|
ret = &Podcast{
|
|
|
|
Title: feed.Title,
|
|
|
|
Description: feed.Description,
|
2019-03-15 09:57:33 -04:00
|
|
|
Url: u,
|
2019-03-14 10:28:54 -04:00
|
|
|
Items: []Item{},
|
2019-03-13 15:47:38 -04:00
|
|
|
}
|
|
|
|
for _, i := range feed.Items {
|
2019-03-14 10:28:54 -04:00
|
|
|
if sel(i) {
|
2019-03-15 11:23:27 -04:00
|
|
|
fn := i.PublishedParsed.Format("20060102--") + i.Title
|
2019-03-14 10:28:54 -04:00
|
|
|
it := Item{
|
|
|
|
Title: i.Title,
|
|
|
|
Description: i.Description,
|
2019-03-15 11:23:27 -04:00
|
|
|
Filename: path.Join(ret.Title, fn),
|
2019-03-15 09:57:33 -04:00
|
|
|
Published: *i.PublishedParsed,
|
2019-03-14 10:28:54 -04:00
|
|
|
}
|
|
|
|
for _, n := range i.Enclosures {
|
|
|
|
if n.Type == "audio/mpeg" {
|
2019-03-15 09:57:33 -04:00
|
|
|
u,err := url.Parse(n.URL)
|
|
|
|
if err != nil {
|
|
|
|
it.Url = n.URL
|
|
|
|
} else {
|
|
|
|
it.Url = fmt.Sprintf("%s://%s%s",u.Scheme,u.Host,u.Path)
|
|
|
|
}
|
2019-03-14 10:28:54 -04:00
|
|
|
if l, err := strconv.Atoi(n.Length); err == nil {
|
|
|
|
it.Length = l
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
ret.Items = append(ret.Items,it)
|
2019-03-13 15:47:38 -04:00
|
|
|
}
|
|
|
|
}
|
2019-03-14 10:28:54 -04:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2019-03-15 09:57:33 -04:00
|
|
|
func readFeed(u string,sel Selector) *Podcast {
|
2019-03-14 10:28:54 -04:00
|
|
|
fp := gofeed.NewParser()
|
2019-03-15 09:57:33 -04:00
|
|
|
feed, err := fp.ParseURL(u)
|
2019-03-14 10:28:54 -04:00
|
|
|
if err != nil {
|
|
|
|
log.Print(err)
|
|
|
|
return nil
|
|
|
|
}
|
2019-03-15 09:57:33 -04:00
|
|
|
return toPodcast(sel,u,feed)
|
|
|
|
}
|
|
|
|
|
|
|
|
type dlItem struct {
|
|
|
|
Item *Item
|
|
|
|
downloading bool
|
|
|
|
}
|
|
|
|
|
|
|
|
type ByDate []*dlItem
|
|
|
|
func (a ByDate) Len() int { return len(a) }
|
|
|
|
func (a ByDate) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
|
|
|
|
func (a ByDate) Less(i, j int) bool { return a[j].Item.Published.After(a[i].Item.Published) }
|
|
|
|
|
|
|
|
type dlQueue struct {
|
|
|
|
items []*dlItem
|
|
|
|
reqch chan *grab.Request
|
|
|
|
respch chan *grab.Response
|
|
|
|
sync.Mutex
|
|
|
|
wg sync.WaitGroup
|
2019-03-15 12:41:32 -04:00
|
|
|
wake chan struct{}
|
2019-03-15 09:57:33 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
func (q *dlQueue) Sort() {
|
|
|
|
sort.Sort(ByDate(q.items))
|
|
|
|
}
|
|
|
|
|
|
|
|
func (q *dlQueue) Find(x *Item) (int, bool) {
|
|
|
|
for i,y := range q.items {
|
|
|
|
if y.Item.Title == x.Title {
|
|
|
|
return i, true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return 0, false
|
|
|
|
}
|
|
|
|
|
|
|
|
func (q *dlQueue) Waiting() *dlQueue {
|
|
|
|
ret := &dlQueue{
|
|
|
|
items: make([]*dlItem,0),
|
|
|
|
reqch: q.reqch,
|
|
|
|
respch: q.respch,
|
|
|
|
Mutex: q.Mutex,
|
|
|
|
wg: q.wg,
|
2019-03-15 12:41:32 -04:00
|
|
|
wake: q.wake,
|
2019-03-15 09:57:33 -04:00
|
|
|
}
|
|
|
|
for _,i := range q.items {
|
|
|
|
if i.downloading == false {
|
|
|
|
ret.items = append(ret.items,i)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return ret
|
|
|
|
}
|
|
|
|
|
|
|
|
func (q *dlQueue) Add(i *Item) {
|
|
|
|
if _, ok := q.Find(i); ok == true {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
di := &dlItem{Item: &Item{} }
|
|
|
|
*di.Item = *i
|
|
|
|
q.items = append(q.items,di)
|
|
|
|
}
|
|
|
|
|
|
|
|
type Daemon struct {
|
|
|
|
conf Config
|
|
|
|
g *grab.Client
|
|
|
|
pl *pcList
|
|
|
|
queue *dlQueue
|
|
|
|
sync.Mutex
|
2019-03-15 11:23:27 -04:00
|
|
|
workers int
|
|
|
|
dlwake chan struct{}
|
2019-03-15 09:57:33 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
func NewDaemon(conf Config, pl *pcList) *Daemon {
|
|
|
|
return &Daemon{
|
|
|
|
conf: conf,
|
|
|
|
g: grab.NewClient(),
|
|
|
|
pl: pl,
|
2019-03-15 12:41:32 -04:00
|
|
|
queue: &dlQueue{ wake: make(chan struct{}) },
|
2019-03-15 11:23:27 -04:00
|
|
|
dlwake: make(chan struct{}),
|
2019-03-15 09:57:33 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (d *Daemon) Update(urls []string) {
|
|
|
|
sel := daysAgo(60)
|
|
|
|
for _,url := range urls {
|
|
|
|
log.Print(" -> ",url)
|
|
|
|
f := readFeed(url,sel) // do not lock around IO
|
|
|
|
d.Lock()
|
|
|
|
d.pl.Add(f)
|
|
|
|
d.Unlock()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (d *Daemon) Monitor() {
|
|
|
|
status := func(resp *grab.Response) {
|
|
|
|
log.Printf(" %s: %v bytes (%.2f%%)\n",
|
|
|
|
resp.Filename,
|
|
|
|
resp.BytesComplete(),
|
|
|
|
100*resp.Progress())
|
|
|
|
}
|
|
|
|
mon := func(resp *grab.Response) {
|
2019-03-15 12:41:32 -04:00
|
|
|
t := time.NewTicker(5 * time.Second)
|
2019-03-15 09:57:33 -04:00
|
|
|
defer t.Stop()
|
|
|
|
Loop:
|
|
|
|
for {
|
|
|
|
select {
|
|
|
|
case <-t.C:
|
|
|
|
status(resp)
|
|
|
|
case <-resp.Done:
|
|
|
|
status(resp)
|
|
|
|
break Loop
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if err := resp.Err(); err != nil {
|
|
|
|
log.Printf("Download failed for %s (%s)\n",resp.Filename,err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for {
|
|
|
|
mon(<-d.queue.respch)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-03-15 12:41:32 -04:00
|
|
|
func (d *Daemon) StartDownloader() {
|
2019-03-15 09:57:33 -04:00
|
|
|
log.Print("Downloader(): starting")
|
|
|
|
d.queue.reqch = make(chan *grab.Request)
|
|
|
|
d.queue.respch = make(chan *grab.Response)
|
|
|
|
log.Print("Downloader(): spawning workers")
|
2019-03-15 11:23:27 -04:00
|
|
|
for i := 0; i < d.workers; i++ {
|
2019-03-15 09:57:33 -04:00
|
|
|
d.queue.wg.Add(1)
|
|
|
|
go func() {
|
|
|
|
d.g.DoChannel(d.queue.reqch,d.queue.respch)
|
|
|
|
d.queue.wg.Done()
|
|
|
|
}()
|
|
|
|
}
|
|
|
|
log.Print("Downloader(): starting monitor")
|
|
|
|
go d.Monitor()
|
|
|
|
|
|
|
|
if d.queue.items == nil {
|
|
|
|
d.queue.Lock()
|
|
|
|
d.queue.items = make([]*dlItem,0)
|
|
|
|
d.queue.Unlock()
|
|
|
|
}
|
2019-03-15 12:41:32 -04:00
|
|
|
go d.QueueUpdater()
|
|
|
|
go d.queue.Downloader()
|
|
|
|
}
|
|
|
|
|
|
|
|
func (d *Daemon) QueueUpdater() {
|
|
|
|
t := time.NewTicker(30 * time.Second)
|
|
|
|
defer t.Stop()
|
|
|
|
for {
|
|
|
|
// lock Podcast list and update download queue
|
|
|
|
log.Print("QueueUpdater(): Updating download queue")
|
|
|
|
d.queue.Lock()
|
|
|
|
for _,p := range d.pl.Podcasts {
|
|
|
|
for _,i := range p.Items {
|
|
|
|
d.queue.Add(&i)
|
2019-03-15 11:23:27 -04:00
|
|
|
}
|
2019-03-15 09:57:33 -04:00
|
|
|
}
|
2019-03-15 12:41:32 -04:00
|
|
|
log.Print("QueueUpdater(): Done updating download queue")
|
|
|
|
d.queue.Unlock()
|
|
|
|
d.queue.wake<- struct{}{}
|
|
|
|
log.Print("QueueUpdater(): Sleeping")
|
|
|
|
select {
|
|
|
|
case <-t.C:
|
|
|
|
continue
|
|
|
|
case <-d.dlwake:
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (q *dlQueue) Downloader() {
|
|
|
|
t := time.NewTicker(30 * time.Second)
|
|
|
|
defer t.Stop()
|
2019-03-15 09:57:33 -04:00
|
|
|
LOOP:
|
|
|
|
for {
|
|
|
|
// launch requests for files we are not yet downloading
|
2019-03-15 12:41:32 -04:00
|
|
|
q.Lock()
|
|
|
|
waiting := q.Waiting()
|
|
|
|
q.Unlock()
|
2019-03-15 09:57:33 -04:00
|
|
|
log.Print("Download queue length: ",len(waiting.items))
|
|
|
|
for _,i := range waiting.items {
|
|
|
|
if !i.downloading {
|
2019-03-15 11:23:27 -04:00
|
|
|
dst := path.Join(dstDir,i.Item.Filename) + path.Ext(i.Item.Url)
|
2019-03-15 09:57:33 -04:00
|
|
|
req,err := grab.NewRequest(dst,i.Item.Url)
|
|
|
|
if err != nil {
|
|
|
|
log.Print("Request error: ",err)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
i.downloading = true
|
|
|
|
t := time.Now()
|
2019-03-15 12:41:32 -04:00
|
|
|
q.reqch <- req
|
2019-03-15 09:57:33 -04:00
|
|
|
if time.Now().After(t.Add(5 * time.Second)) {
|
|
|
|
continue LOOP // refresh list
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
log.Print("Downloader(): sleeping")
|
|
|
|
select {
|
|
|
|
case <-t.C:
|
2019-03-15 11:23:27 -04:00
|
|
|
continue
|
2019-03-15 12:41:32 -04:00
|
|
|
case <-q.wake:
|
2019-03-15 11:23:27 -04:00
|
|
|
continue
|
2019-03-15 09:57:33 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (d *Daemon) Updater() {
|
2019-03-15 11:23:27 -04:00
|
|
|
log.Print("Updater(): starting")
|
2019-03-15 12:41:32 -04:00
|
|
|
d.StartDownloader()
|
2019-03-15 09:57:33 -04:00
|
|
|
for {
|
|
|
|
time.Sleep(1 * time.Second)
|
|
|
|
d.Update(d.conf.Urls)
|
2019-03-15 11:23:27 -04:00
|
|
|
of,err := os.Create(dataFile)
|
2019-03-15 09:57:33 -04:00
|
|
|
if err != nil {
|
2019-03-15 11:23:27 -04:00
|
|
|
log.Print("Updater(): Cannot open output file")
|
2019-03-15 09:57:33 -04:00
|
|
|
} else {
|
|
|
|
enc := toml.NewEncoder(of)
|
2019-03-15 11:23:27 -04:00
|
|
|
log.Print("Updater(): writing output")
|
|
|
|
d.Lock()
|
2019-03-15 09:57:33 -04:00
|
|
|
enc.Encode(d.pl)
|
2019-03-15 11:23:27 -04:00
|
|
|
d.Unlock()
|
2019-03-15 09:57:33 -04:00
|
|
|
of.Close()
|
|
|
|
}
|
2019-03-15 11:23:27 -04:00
|
|
|
d.dlwake <-struct{}{}
|
2019-03-15 09:57:33 -04:00
|
|
|
time.Sleep(30 * time.Minute)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (d *Daemon) Start() {
|
|
|
|
go d.Updater()
|
2019-03-13 15:47:38 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
func main() {
|
|
|
|
log.Print("rssd")
|
|
|
|
log.Print("reading configuration")
|
|
|
|
var conf Config
|
2019-03-15 11:23:27 -04:00
|
|
|
var err error
|
|
|
|
if _, err = toml.DecodeFile(confFile, &conf); err != nil {
|
2019-03-13 15:47:38 -04:00
|
|
|
log.Fatal("Error reading config file:",err)
|
|
|
|
}
|
|
|
|
|
2019-03-15 11:23:27 -04:00
|
|
|
pl := newpcList(dataFile)
|
2019-03-15 09:57:33 -04:00
|
|
|
d := NewDaemon(conf,pl)
|
2019-03-15 11:23:27 -04:00
|
|
|
if d.conf.Workers != 0 {
|
|
|
|
d.workers = d.conf.Workers
|
|
|
|
} else {
|
|
|
|
d.workers = 3
|
|
|
|
}
|
|
|
|
dstDir,err = homedir.Expand(conf.DestDir)
|
|
|
|
if err != nil {
|
|
|
|
log.Fatal("Error locating DestDir.")
|
|
|
|
}
|
2019-03-15 09:57:33 -04:00
|
|
|
d.Start()
|
|
|
|
select { }
|
2019-03-13 15:47:38 -04:00
|
|
|
}
|