bug fixes; convert outgoing feed to rss instead of json; generic libmpv options support; filter, sorting and limiting options

This commit is contained in:
caffeinelucy 2024-08-15 02:53:02 +02:00
parent b4a978043e
commit fd21d6d9eb
11 changed files with 251 additions and 16 deletions

52
build-rss.go Normal file
View file

@ -0,0 +1,52 @@
package main
import (
"github.com/gorilla/feeds"
"github.com/mmcdole/gofeed"
"time"
)
func (ctx* ServerContext) BuildRSS(input *gofeed.Feed) (string, error) {
output := &feeds.Feed{
Title : input.Title,
Link : &feeds.Link{ Href : input.Link },
Description : input.Description,
Author : &feeds.Author{
Name : input.Author.Name,
Email : input.Author.Email,
},
Created : *input.PublishedParsed,
Updated : *input.UpdatedParsed,
Image : &feeds.Image{
Url : input.Image.URL,
Title : input.Image.Title,
},
}
for _, item := range input.Items {
var created time.Time
if item.PublishedParsed!=nil {
created = *item.PublishedParsed
}
var updated time.Time
if item.UpdatedParsed!=nil {
updated = *item.UpdatedParsed
}
output.Items = append(output.Items, &feeds.Item{
Title : item.Title,
Link : &feeds.Link{ Href : item.Link },
Description : item.Description,
Author : &feeds.Author{
Name : item.Author.Name,
Email : item.Author.Email,
},
Created : created,
Updated : updated,
Enclosure : &feeds.Enclosure{
Url : item.Enclosures[0].URL,
Length : item.Enclosures[0].Length,
Type : item.Enclosures[0].Type,
},
})
}
return output.ToRss()
}

View file

@ -12,6 +12,11 @@ import (
type GenericError struct { msg string }
func (e *GenericError) Error() string { return e.msg }
type MPVPair struct {
Option string `toml:"option"`
Value string `toml:"value"`
}
type PodcastConfig struct {
Name string `toml:"name"`
URL string `toml:"url"`
@ -19,6 +24,10 @@ type PodcastConfig struct {
User string `toml:"user"`
Pass string `toml:"pass"`
Convert string `toml:"convert"`
FilterType string `toml:"filter-type"`
FilterDirAsc bool `toml:"filter-direction-asc"`
FilterLimit uint `toml:"filter-limit"`
MPVOptions []MPVPair `toml:"mpv-options"`
}
type ServerConfig struct {

View file

@ -1,5 +1,6 @@
port = 3666
media-proxy-base-url = "http://localhost:3666"
# MUST match the PUBLICALLY accessible url!
file-root = "set path here..."
update-timeout = 30
@ -11,4 +12,18 @@ convert = "opus"
[[podcasts]]
name = "a_unique_name"
url = "https://myawesomepodcast"
convert = "opus"
convert = "opus" # the file format to convert to
#[[podcasts.mpvoptions]]
#option = "option1"
#value = "value.."
#[[podcasts.mpvoptions]]
#option = "option2"
#value = "blabla"
# mpvoptions is an array of simple option=value tuples, passed to libmpv
filter-type = "published"
# the type of the filter to use. omit or leave empty to use no filtering
filter-direction-asc = false
# the order to sort before limiting. true means ascending order, false means descending.
filter-limit = 5
# take the first n elements AFTER sorting. to get the 5 last of the entire feed,
# use filter-direction-asc = false (to get old ones first), and filter-limit = 5

44
filter-published.go Normal file
View file

@ -0,0 +1,44 @@
package main
import (
"github.com/mmcdole/gofeed"
)
func (ctx* ServerContext) FeedFilter_published(feed PodcastConfig, doc gofeed.Feed) ([]*gofeed.Item, error) {
merge := func(left []*gofeed.Item, right []*gofeed.Item) []*gofeed.Item {
buffer := []*gofeed.Item{}
idxl := 0
idxr := 0
for idxl < len(left) && idxr < len(right) {
if (left[idxl].PublishedParsed.After(*right[idxr].PublishedParsed) && !feed.FilterDirAsc) ||
(left[idxl].PublishedParsed.Before(*right[idxr].PublishedParsed) && feed.FilterDirAsc) {
buffer = append(buffer, left[idxl])
idxl++
} else {
buffer = append(buffer, right[idxr])
idxr++
}
}
for idxl < len(left) {
buffer = append(buffer, left[idxl])
idxl++
}
for idxr < len(right) {
buffer = append(buffer, right[idxr])
idxr++
}
return buffer
}
var sort func(items []*gofeed.Item) []*gofeed.Item
sort = func(items []*gofeed.Item) []*gofeed.Item {
if len(items) < 2 {
return items
} else {
left := sort(items[:len(items)/2])
right := sort(items[len(items)/2:])
return merge(left, right)
}
}
doc.Items = sort(doc.Items)[:feed.FilterLimit]
return doc.Items, nil
}

1
go.mod
View file

@ -5,6 +5,7 @@ go 1.22.5
require (
github.com/PuerkitoBio/goquery v1.8.0 // indirect
github.com/andybalholm/cascadia v1.3.1 // indirect
github.com/gorilla/feeds v1.2.0 // indirect
github.com/json-iterator/go v1.1.12 // indirect
github.com/mmcdole/gofeed v1.3.0 // indirect
github.com/mmcdole/goxpp v1.1.1-0.20240225020742-a0c311522b23 // indirect

2
go.sum
View file

@ -5,6 +5,8 @@ github.com/andybalholm/cascadia v1.3.1/go.mod h1:R4bJ1UQfqADjvDa4P6HZHLh/3OxWWEq
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/gorilla/feeds v1.2.0 h1:O6pBiXJ5JHhPvqy53NsjKOThq+dNFm8+DFrxBEdzSCc=
github.com/gorilla/feeds v1.2.0/go.mod h1:WMib8uJP3BbY+X8Szd1rA5Pzhdfh+HCCAYT2z7Fza6Y=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
github.com/mmcdole/gofeed v1.3.0 h1:5yn+HeqlcvjMeAI4gu6T+crm7d0anY85+M+v6fIFNG4=

View file

@ -16,6 +16,7 @@ func (ctx *ServerContext) HandleFeed(w http.ResponseWriter, req *http.Request) e
ctx.FeedCache.Lock()
defer ctx.FeedCache.Unlock()
if e, has := ctx.FeedCache.Texts[name]; has {
w.Header().Set("Content-Type", "application/rss+json")
fmt.Fprintf(w, e)
} else {
http.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)

View file

@ -2,11 +2,16 @@ package main
import (
"net/http"
"strings"
"log"
)
func (ctx *ServerContext) HandleFile(w http.ResponseWriter, req *http.Request) error {
ctx.WG.Add(1)
defer ctx.WG.Done()
http.ServeFile(w, req, ctx.Config.FileRoot + "/" + req.URL.Path);
log.Println("requested file ", req.URL.Path)
if strings.HasPrefix(req.URL.Path, "/file/") {
http.ServeFile(w, req, ctx.Config.FileRoot + "/" + req.URL.Path[6:]);
}
return nil
}

View file

@ -37,7 +37,7 @@ func main(){
defer ctx.WG.Done()
for ctx.IsActive {
ctx.WG.Add(1)
ctx.UpdateCache()
ctx.UpdateCacheFiltered()
ctx.Sleep(60)
}
}()

View file

@ -14,14 +14,15 @@ import (
// mpv_handle* mpvinit(){
// return mpv_create();
// }
// int mpvbegin(mpv_handle* mpv, char* src, char* dst, char* ext) {
// int mpvbegin(mpv_handle* mpv, char* src, char* dst) {
// mpv_set_property_string(mpv, "o", dst);
// mpv_set_property_string(mpv, "of", ext);
// mpv_set_property_string(mpv, "oacopts", "b=96k");
// mpv_initialize(mpv);
// const char *args[3] = {"loadfile", src, NULL};
// return mpv_command(mpv, args);
// }
// void mpvset(mpv_handle* mpv, char* opt, char* val) {
// mpv_set_property_string(mpv, opt, val);
// }
// mpv_event* mpvwait(mpv_handle* mpv){
// return mpv_wait_event(mpv, 0.25);
// }
@ -30,22 +31,31 @@ import (
// }
import "C"
func (ctx* ServerContext) MPVConvert(source string, destination string, ext string) {
func (ctx* ServerContext) MPVConvert(source string, destination string, ext string, options []MPVPair) {
defer ctx.WG.Done()
dir := filepath.Dir(destination)
os.MkdirAll(dir, os.ModePerm)
mpv := C.mpvinit()
set := func (opt string, val string) {
copt := C.CString(opt)
defer C.free(unsafe.Pointer(copt))
cval := C.CString(val)
defer C.free(unsafe.Pointer(cval))
C.mpvset(mpv, copt, cval)
}
csrc := C.CString(source)
defer C.free(unsafe.Pointer(csrc))
cdst := C.CString(destination)
defer C.free(unsafe.Pointer(cdst))
cext := C.CString(ext)
oacopts := C.CString("oacopts")
defer C.free(unsafe.Pointer(oacopts))
r := C.mpvbegin(mpv, csrc, cdst, cext)
for _, p := range options {
set(p.Option, p.Value)
}
r := C.mpvbegin(mpv, csrc, cdst)
if r!=0 {
log.Println("mpv error (", source, ", ", destination, ", ", ext, "): ", r)
}

View file

@ -5,14 +5,110 @@ import (
"context"
"time"
"strings"
"strconv"
_"strconv"
"net/url"
"log"
"os"
"reflect"
"encoding/base64"
)
func (ctx* ServerContext) UpdateCache() error {
func (ctx* ServerContext) UpdateCacheFiltered() error {
defer ctx.WG.Done()
cache := make(map[string]string)
parser := gofeed.NewParser()
for _, feed := range ctx.Config.Feeds {
if feed.UseAuth {
parser.AuthConfig = &gofeed.Auth{ Username : feed.User, Password : feed.Pass }
} else {
parser.AuthConfig = nil
}
pctx, cancel := context.WithTimeout(context.Background(), time.Duration(ctx.Config.UpdateTimeout)*time.Second)
defer cancel()
if doc, err := parser.ParseURLWithContext(feed.URL, pctx); err!=nil {
log.Println(err)
} else {
if feed.FilterType!="" {
meth := reflect.ValueOf(ctx).MethodByName("FeedFilter_" + feed.FilterType);
if !meth.IsValid() {
log.Fatal("invalid filter type <", feed.FilterType, "> for feed <", feed.Name, ">")
}
meth_ret := meth.Call([]reflect.Value {
reflect.ValueOf(feed),
reflect.ValueOf(*doc),
})
err := meth_ret[1].Interface()
if err!=nil {
log.Println(err)
continue
}
doc.Items = meth_ret[0].Interface().([]*gofeed.Item)
}
log.Println("downloading ", len(doc.Items), " items")
for _, item := range doc.Items {
for _, enclosure := range item.Enclosures {
if !ctx.IsActive {
return nil
}
if strings.HasPrefix(enclosure.Type, "audio") {
log.Println("captured enclosure audio! @ " + enclosure.URL)
u, err := url.Parse(enclosure.URL)
if err!=nil {
log.Println(err)
continue
}
new_filename := feed.Name + "/" + base64.StdEncoding.EncodeToString([]byte(u.Path)) + "." + feed.Convert;
old_url := enclosure.URL
enclosure.URL = ctx.Config.MediaProxyBaseURL + "/file/" + new_filename
enclosure.Type = "audio/" + feed.Convert
if _, err := os.Stat(ctx.Config.FileRoot + "/" + new_filename); os.IsNotExist(err) {
ctx.WG.Add(1)
ctx.MPVConvert(old_url, ctx.Config.FileRoot + "/" + new_filename, feed.Convert, feed.MPVOptions)
}
}
}
/*if media, has := item.Extensions["media"]; has {
if content, has := media["content"]; has {
for _, ext := range content{
if attr_type, has := ext.Attrs["type"]; has {
if strings.HasPrefix(attr_type, "audio") {
if source, has_url := ext.Attrs["url"]; has_url {
log.Println("captured extension audio! @ " + source)
new_filename := feed.Name + "/" + base64.StdEncoding.EncodeToString([]byte(source)) + "." + feed.Convert;
if _, err := os.Stat(ctx.Config.FileRoot + "/" + new_filename); os.IsNotExist(err) {
ext.Attrs["url"] = ctx.Config.MediaProxyBaseURL + "/" + new_filename
ext.Attrs["type"] = "audio/" + feed.Convert
ctx.WG.Add(1)
ctx.MPVConvert(source, ctx.Config.FileRoot + "/" + new_filename, feed.Convert, feed.MPVOptions)
if fi, err := os.Stat(ctx.Config.FileRoot + "/" + new_filename); err==nil {
ext.Attrs["fileSize"] = strconv.FormatInt(fi.Size(), 10)
}
}
}
}
}
}
}
}*/
}
cache[feed.Name], err = ctx.BuildRSS(doc)
if err!=nil {
log.Fatal(err)
}
}
}
ctx.FeedCache.Lock()
defer ctx.FeedCache.Unlock()
clear(ctx.FeedCache.Texts)
for k, v := range cache {
log.Println(k)
ctx.FeedCache.Texts[k] = v
}
log.Println("cache updated")
return nil
}
/*func (ctx* ServerContext) UpdateCache() error {
defer ctx.WG.Done()
cache := make(map[string]string)
parser := gofeed.NewParser()
@ -46,7 +142,7 @@ func (ctx* ServerContext) UpdateCache() error {
enclosure.URL = ctx.Config.MediaProxyBaseURL + "/" + new_filename
enclosure.Type = "audio/" + feed.Convert
ctx.WG.Add(1)
ctx.MPVConvert(old_url, ctx.Config.FileRoot + "/" + new_filename, feed.Convert)
ctx.MPVConvert(old_url, ctx.Config.FileRoot + "/" + new_filename, feed.Convert, feed.MPVOptions)
}
}
}
@ -62,7 +158,7 @@ func (ctx* ServerContext) UpdateCache() error {
ext.Attrs["url"] = ctx.Config.MediaProxyBaseURL + "/" + new_filename
ext.Attrs["type"] = "audio/" + feed.Convert
ctx.WG.Add(1)
ctx.MPVConvert(source, ctx.Config.FileRoot + "/" + new_filename, feed.Convert)
ctx.MPVConvert(source, ctx.Config.FileRoot + "/" + new_filename, feed.Convert, feed.MPVOptions)
if fi, err := os.Stat(ctx.Config.FileRoot + "/" + new_filename); err==nil {
ext.Attrs["fileSize"] = strconv.FormatInt(fi.Size(), 10)
}
@ -86,4 +182,4 @@ func (ctx* ServerContext) UpdateCache() error {
}
log.Println("cache updated")
return nil
}
}*/