Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
158 changes: 79 additions & 79 deletions handlers.go

Large diffs are not rendered by default.

39 changes: 23 additions & 16 deletions internal/archiver/archiver.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,10 @@ import (
)

var (
HTTPUserAgent = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.36"
httpUserAgent = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.36"
)

type Job struct {
type job struct {
id string
source string
context *context.Context
Expand All @@ -35,41 +35,44 @@ type Job struct {
audiofile string
}

// NewArchiver returns a new Archiver
func NewArchiver(datadir string, concurrency int, logger *zap.SugaredLogger) *Archiver {
a := &Archiver{
datadir: datadir,
concurrency: concurrency,
active: make(map[string]*Job),
active: make(map[string]*job),
failed: make(map[string]error),
logger: logger,
}
go a.manager()
return a
}

// Archiver object
type Archiver struct {
mu sync.RWMutex
datadir string
concurrency int
queue []*Job
active map[string]*Job
queue []*job
active map[string]*job
failed map[string]error
logger *zap.SugaredLogger
debug bool
}

func (a *Archiver) SetConcurrency(n int) {
a.lock("Concurrency")
defer a.unlock("Concurrency")
func (a *Archiver) setConcurrency(n int) {
a.lock("concurrency")
defer a.unlock("concurrency")
a.concurrency = n
}

func (a *Archiver) Concurrency() int {
a.rlock("Concurrency")
defer a.runlock("Concurrency")
func (a *Archiver) getConcurrency() int {
a.rlock("concurrency")
defer a.runlock("concurrency")
return a.concurrency
}

// QueuedJobs return job queue list
func (a *Archiver) QueuedJobs() []string {
a.rlock("QueuedJobs")
defer a.runlock("QueuedJobID")
Expand All @@ -81,17 +84,19 @@ func (a *Archiver) QueuedJobs() []string {
return ids
}

// ActiveJobs return job active list
func (a *Archiver) ActiveJobs() []string {
a.rlock("ActiveJobs")
defer a.runlock("ActiveJobs")
var ids []string
for id, _ := range a.active {
for id := range a.active {
ids = append(ids, id)
}
sort.Strings(ids)
return ids
}

// InProgress return true if job id is in progress
func (a *Archiver) InProgress(id string) bool {
for _, job := range a.QueuedJobs() {
if job == id {
Expand All @@ -106,6 +111,7 @@ func (a *Archiver) InProgress(id string) bool {
return false
}

// Remove removes a job
func (a *Archiver) Remove(id string) {
a.lock("Remove")
defer a.unlock("Remove")
Expand All @@ -123,6 +129,7 @@ func (a *Archiver) Remove(id string) {
return
}

// Add adds a job
func (a *Archiver) Add(id string, source string) {
a.lock("Add")
defer a.unlock("Add")
Expand Down Expand Up @@ -165,9 +172,9 @@ func (a *Archiver) runlock(loc string) {
a.mu.RUnlock()
}

func (a *Archiver) newJob(id, source string) *Job {
func (a *Archiver) newJob(id, source string) *job {
ctx, cancel := context.WithCancel(context.Background())
return &Job{
return &job{
id: id,
source: source,
context: &ctx,
Expand Down Expand Up @@ -210,7 +217,7 @@ func (a *Archiver) manager() {
}
}

func (a *Archiver) archive(job *Job) {
func (a *Archiver) archive(job *job) {
var failed error

// Clean up on completion.
Expand Down Expand Up @@ -268,7 +275,7 @@ func (a *Archiver) download(ctx context.Context, rawurl, filename string) error
if err != nil {
return err
}
req.Header.Set("User-Agent", HTTPUserAgent)
req.Header.Set("User-Agent", httpUserAgent)
req = req.WithContext(ctx)

res, err := http.DefaultClient.Do(req)
Expand Down
5 changes: 5 additions & 0 deletions internal/logtailer/logtailer.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,14 @@ import (
"sync"
)

// Logtailer ...
type Logtailer struct {
sync.RWMutex

tail *circbuf.Buffer
}

// NewLogtailer ...
func NewLogtailer(size int64) (*Logtailer, error) {
buf, err := circbuf.NewBuffer(size)
if err != nil {
Expand All @@ -20,6 +22,7 @@ func NewLogtailer(size int64) (*Logtailer, error) {
return &Logtailer{tail: buf}, nil
}

// Lines ...
func (l *Logtailer) Lines() []string {
l.RLock()
buf := l.tail.Bytes()
Expand All @@ -33,13 +36,15 @@ func (l *Logtailer) Lines() []string {
return strings.Split(s[start:], "\n")
}

// Write ...
func (l *Logtailer) Write(buf []byte) (int, error) {
l.Lock()
n, err := l.tail.Write(buf)
l.Unlock()
return n, err
}

// Sync ...
func (l *Logtailer) Sync() error {
return nil
}
2 changes: 2 additions & 0 deletions internal/youtube/search.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,12 @@ import (
log "github.com/Sirupsen/logrus"
)

// SetDebug ...
func SetDebug() {
log.SetLevel(log.DebugLevel)
}

// Search ...
func Search(query string) ([]Video, error) {
u, err := url.Parse("https://www.youtube.com/results")
if err != nil {
Expand Down
1 change: 1 addition & 0 deletions internal/youtube/stream.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import (
"fmt"
)

// Stream ...
type Stream struct {
URL string `json:"url"`
Extension string `json:"extension"`
Expand Down
8 changes: 8 additions & 0 deletions internal/youtube/video.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ var (
fixurlRegexp = regexp.MustCompile(`\,[^=]+=.*$`)
)

// Video ...
type Video struct {
ID string `json:"id"`
Title string `json:"title"`
Expand All @@ -37,18 +38,22 @@ type Video struct {
Streams []Stream
}

// Filename ...
func (v Video) Filename(dir string) string {
return filepath.Join(dir, v.ID+".mp4")
}

// ThumbnailFilename ...
func (v Video) ThumbnailFilename(dir string) string {
return filepath.Join(dir, v.ID+".jpg")
}

// jsonFilename ...
func (v Video) jsonFilename(dir string) string {
return filepath.Join(dir, v.ID+".json")
}

// Transcode ...
func (v Video) Transcode(ctx context.Context, dir string) error {
filename := v.Filename(dir)
tmpname := filename + ".encoding"
Expand All @@ -71,6 +76,7 @@ func (v Video) Transcode(ctx context.Context, dir string) error {
return nil
}

// Download ...
func (v Video) Download(ctx context.Context, dir string) error {
if len(v.Streams) == 0 {
return fmt.Errorf("no streams")
Expand Down Expand Up @@ -141,6 +147,7 @@ func download(ctx context.Context, rawurl, filename string) error {
return err
}

// GetVideo ...
func GetVideo(rawid string) (Video, error) {
id := rawid
if strings.HasPrefix(rawid, "http") {
Expand Down Expand Up @@ -293,6 +300,7 @@ func GetVideo(rawid string) (Video, error) {
}, nil
}

// GET ...
func GET(ctx context.Context, rawurl string) (*http.Response, error) {
client := &http.Client{
Timeout: 15 * time.Second,
Expand Down
60 changes: 30 additions & 30 deletions main.go
Original file line number Diff line number Diff line change
Expand Up @@ -174,55 +174,55 @@ func main() {
r.HandleMethodNotAllowed = false

// Handlers
r.GET("/", Log(auth(index, "readonly")))
r.GET(Prefix("/logs"), Log(auth(logs, "admin")))
r.GET(Prefix("/"), Log(auth(home, "readonly")))
r.GET("/", log(auth(index, "readonly")))
r.GET(prefix("/logs"), log(auth(logs, "admin")))
r.GET(prefix("/"), log(auth(home, "readonly")))

// Library
r.GET(Prefix("/library"), Log(auth(library, "readonly")))
r.GET(prefix("/library"), log(auth(library, "readonly")))

// Media
r.GET(Prefix("/media/thumbnail/:media"), Log(auth(thumbnailMedia, "readonly")))
r.GET(Prefix("/media/view/:media"), Log(auth(viewMedia, "readonly")))
r.GET(Prefix("/media/delete/:media"), Log(auth(deleteMedia, "admin")))
r.GET(Prefix("/media/access/:filename"), auth(streamMedia, "readonly"))
r.GET(Prefix("/media/download/:filename"), auth(downloadMedia, "readonly"))
r.GET(prefix("/media/thumbnail/:media"), log(auth(thumbnailMedia, "readonly")))
r.GET(prefix("/media/view/:media"), log(auth(viewMedia, "readonly")))
r.GET(prefix("/media/delete/:media"), log(auth(deleteMedia, "admin")))
r.GET(prefix("/media/access/:filename"), auth(streamMedia, "readonly"))
r.GET(prefix("/media/download/:filename"), auth(downloadMedia, "readonly"))

// Publicly accessible streaming (using playlist id as "auth")
r.GET(Prefix("/stream/:list/:filename"), auth(streamMedia, "none"))
r.GET(prefix("/stream/:list/:filename"), auth(streamMedia, "none"))

// Import
r.GET(Prefix("/import"), Log(auth(importHandler, "admin")))
r.GET(prefix("/import"), log(auth(importHandler, "admin")))

// Archiver
r.GET(Prefix("/archiver/jobs"), auth(archiverJobs, "admin"))
r.POST(Prefix("/archiver/save/:id"), Log(auth(archiverSave, "admin")))
r.GET(Prefix("/archiver/cancel/:id"), Log(auth(archiverCancel, "admin")))
r.GET(prefix("/archiver/jobs"), auth(archiverJobs, "admin"))
r.POST(prefix("/archiver/save/:id"), log(auth(archiverSave, "admin")))
r.GET(prefix("/archiver/cancel/:id"), log(auth(archiverCancel, "admin")))

// List
r.GET(Prefix("/create"), Log(auth(createList, "admin")))
r.POST(Prefix("/create"), Log(auth(createList, "admin")))
r.POST(Prefix("/add/:list/:media"), Log(auth(addMediaList, "admin")))
r.POST(Prefix("/remove/:list/:media"), Log(auth(removeMediaList, "admin")))
r.GET(Prefix("/remove/:list/:media"), Log(auth(removeMediaList, "admin")))
r.GET(prefix("/create"), log(auth(createList, "admin")))
r.POST(prefix("/create"), log(auth(createList, "admin")))
r.POST(prefix("/add/:list/:media"), log(auth(addMediaList, "admin")))
r.POST(prefix("/remove/:list/:media"), log(auth(removeMediaList, "admin")))
r.GET(prefix("/remove/:list/:media"), log(auth(removeMediaList, "admin")))

r.GET(Prefix("/edit/:id"), Log(auth(editList, "admin")))
r.POST(Prefix("/edit/:id"), Log(auth(editList, "admin")))
r.GET(Prefix("/shuffle/:id"), Log(auth(shuffleList, "admin")))
r.GET(Prefix("/play/:id"), Log(auth(playList, "none")))
r.GET(Prefix("/m3u/:id"), Log(auth(m3uList, "none")))
r.GET(Prefix("/podcast/:id"), Log(auth(podcastList, "none")))
r.GET(prefix("/edit/:id"), log(auth(editList, "admin")))
r.POST(prefix("/edit/:id"), log(auth(editList, "admin")))
r.GET(prefix("/shuffle/:id"), log(auth(shuffleList, "admin")))
r.GET(prefix("/play/:id"), log(auth(playList, "none")))
r.GET(prefix("/m3u/:id"), log(auth(m3uList, "none")))
r.GET(prefix("/podcast/:id"), log(auth(podcastList, "none")))

r.POST(Prefix("/config"), Log(auth(configHandler, "admin")))
r.POST(prefix("/config"), log(auth(configHandler, "admin")))

r.GET(Prefix("/delete/:id"), Log(auth(deleteList, "admin")))
r.GET(prefix("/delete/:id"), log(auth(deleteList, "admin")))

// API
r.GET(Prefix("/v1/status"), Log(auth(v1status, "none")))
r.GET(prefix("/v1/status"), log(auth(v1status, "none")))

// Assets
r.GET(Prefix("/static/*path"), auth(staticAsset, "none"))
r.GET(Prefix("/logo.png"), Log(auth(logo, "none")))
r.GET(prefix("/static/*path"), auth(staticAsset, "none"))
r.GET(prefix("/logo.png"), log(auth(logo, "none")))

//
// Server
Expand Down
Loading