mirror of
https://github.com/photoprism/photoprism.git
synced 2025-12-12 00:34:13 +01:00
CLI: Improve "photoprism dl" to download multiple URLs with auth #5219
Signed-off-by: Michael Mayer <michael@photoprism.app>
This commit is contained in:
@@ -25,7 +25,7 @@ var CopyCommand = &cli.Command{
|
||||
&cli.StringFlag{
|
||||
Name: "dest",
|
||||
Aliases: []string{"d"},
|
||||
Usage: "relative originals `PATH` to which the files should be imported",
|
||||
Usage: "relative originals `PATH` in which new files should be imported",
|
||||
},
|
||||
},
|
||||
Action: copyAction,
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
package commands
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
@@ -15,7 +15,6 @@ import (
|
||||
|
||||
"github.com/photoprism/photoprism/internal/config"
|
||||
"github.com/photoprism/photoprism/internal/ffmpeg"
|
||||
"github.com/photoprism/photoprism/internal/ffmpeg/encode"
|
||||
"github.com/photoprism/photoprism/internal/photoprism"
|
||||
"github.com/photoprism/photoprism/internal/photoprism/dl"
|
||||
"github.com/photoprism/photoprism/internal/photoprism/get"
|
||||
@@ -26,17 +25,52 @@ import (
|
||||
"github.com/photoprism/photoprism/pkg/service/http/scheme"
|
||||
)
|
||||
|
||||
var downloadExamples = `
|
||||
Usage examples:
|
||||
|
||||
photoprism dl --cookies cookies.txt \
|
||||
--add-header 'Authorization: Bearer <token>' \
|
||||
--dl-method file --file-remux auto -- \
|
||||
https://example.com/a.mp4 https://example.com/b.jpg
|
||||
|
||||
# Add two headers (repeatable flag)
|
||||
photoprism dl -a 'Authorization: Bearer <token>' \
|
||||
-a 'Accept: application/json' -- URL`
|
||||
|
||||
// DownloadCommand configures the command name, flags, and action.
|
||||
var DownloadCommand = &cli.Command{
|
||||
Name: "download",
|
||||
Aliases: []string{"dl"},
|
||||
Usage: "Imports media from a URL",
|
||||
ArgsUsage: "[url]",
|
||||
Name: "download",
|
||||
Aliases: []string{"dl"},
|
||||
Usage: "Imports media from one or more URLs",
|
||||
Description: "Download and import media from one or more URLs.\n" + downloadExamples,
|
||||
ArgsUsage: "[url]...",
|
||||
Flags: []cli.Flag{
|
||||
&cli.StringFlag{
|
||||
Name: "dest",
|
||||
Aliases: []string{"d"},
|
||||
Usage: "relative originals `PATH` to which the files should be imported",
|
||||
Usage: "relative originals `PATH` in which new files should be imported",
|
||||
},
|
||||
&cli.StringFlag{
|
||||
Name: "cookies",
|
||||
Aliases: []string{"c"},
|
||||
Usage: "use Netscape-format cookies.txt `FILE` for HTTP authentication",
|
||||
},
|
||||
&cli.StringSliceFlag{
|
||||
Name: "add-header",
|
||||
Aliases: []string{"a"},
|
||||
Usage: "add HTTP request `HEADER` in the form 'Name: Value' (repeatable)",
|
||||
},
|
||||
&cli.StringFlag{
|
||||
Name: "dl-method",
|
||||
Aliases: []string{"m"},
|
||||
Value: "pipe",
|
||||
Usage: "download `METHOD` when using external commands: pipe (stdio stream) or file (temporary files)",
|
||||
},
|
||||
&cli.StringFlag{
|
||||
Name: "file-remux",
|
||||
Aliases: []string{"r"},
|
||||
Value: "always",
|
||||
Usage: "remux `POLICY` for videos when using --dl-method file: auto (skip if MP4), always, or skip",
|
||||
},
|
||||
},
|
||||
Action: downloadAction,
|
||||
@@ -63,13 +97,32 @@ func downloadAction(ctx *cli.Context) error {
|
||||
conf.InitDb()
|
||||
defer conf.Shutdown()
|
||||
|
||||
// Get URL from first argument.
|
||||
sourceUrl, sourceErr := url.Parse(strings.TrimSpace(ctx.Args().First()))
|
||||
// Collect URLs: args or STDIN when no args
|
||||
var inputURLs []string
|
||||
if ctx.Args().Len() > 0 {
|
||||
inputURLs = append(inputURLs, ctx.Args().Slice()...)
|
||||
} else {
|
||||
// If STDIN is a pipe, read URLs line by line (Phase 1: args take precedence; no --stdin merge)
|
||||
fi, _ := os.Stdin.Stat()
|
||||
if (fi.Mode() & os.ModeCharDevice) == 0 {
|
||||
scanner := bufio.NewScanner(os.Stdin)
|
||||
buf := make([]byte, 0, 64*1024)
|
||||
scanner.Buffer(buf, 1024*1024)
|
||||
for scanner.Scan() {
|
||||
line := strings.TrimSpace(scanner.Text())
|
||||
if line == "" || strings.HasPrefix(line, "#") {
|
||||
continue
|
||||
}
|
||||
inputURLs = append(inputURLs, line)
|
||||
}
|
||||
if err := scanner.Err(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if sourceErr != nil {
|
||||
return sourceErr
|
||||
} else if sourceUrl.Scheme != scheme.Http && sourceUrl.Scheme != scheme.Https {
|
||||
return fmt.Errorf("invalid download URL scheme %s", clean.Log(sourceUrl.Scheme))
|
||||
if len(inputURLs) == 0 {
|
||||
return fmt.Errorf("no download URLs provided")
|
||||
}
|
||||
|
||||
var destFolder string
|
||||
@@ -89,142 +142,178 @@ func downloadAction(ctx *cli.Context) error {
|
||||
|
||||
defer os.RemoveAll(downloadPath)
|
||||
|
||||
mediaType := media.FromName(sourceUrl.Path)
|
||||
mediaExt := fs.Ext(sourceUrl.Path)
|
||||
|
||||
switch mediaType {
|
||||
case media.Image, media.Vector, media.Raw, media.Document, media.Audio:
|
||||
log.Infof("downloading %s from %s", mediaType, clean.Log(sourceUrl.String()))
|
||||
|
||||
if dlName := clean.DlName(fs.BasePrefix(sourceUrl.Path, true)); dlName != "" {
|
||||
downloadFile = dlName + mediaExt
|
||||
} else {
|
||||
downloadFile = time.Now().Format("20060102_150405") + mediaExt
|
||||
}
|
||||
|
||||
downloadFilePath := filepath.Join(downloadPath, downloadFile)
|
||||
|
||||
if downloadErr := fs.Download(downloadFilePath, sourceUrl.String()); downloadErr != nil {
|
||||
return downloadErr
|
||||
}
|
||||
// Flags for yt-dlp auth and headers
|
||||
cookies := strings.TrimSpace(ctx.String("cookies"))
|
||||
// cookiesFromBrowser := strings.TrimSpace(ctx.String("cookies-from-browser"))
|
||||
addHeaders := ctx.StringSlice("add-header")
|
||||
method := strings.ToLower(strings.TrimSpace(ctx.String("dl-method")))
|
||||
if method == "" {
|
||||
method = "pipe"
|
||||
}
|
||||
if method != "pipe" && method != "file" {
|
||||
return fmt.Errorf("invalid --dl-method: %s (expected 'pipe' or 'file')", method)
|
||||
}
|
||||
fileRemux := strings.ToLower(strings.TrimSpace(ctx.String("file-remux")))
|
||||
if fileRemux == "" {
|
||||
fileRemux = "always"
|
||||
}
|
||||
switch fileRemux {
|
||||
case "always", "auto", "skip":
|
||||
default:
|
||||
mediaType = media.Video
|
||||
log.Infof("downloading %s from %s", mediaType, clean.Log(sourceUrl.String()))
|
||||
return fmt.Errorf("invalid --file-remux: %s (expected 'always', 'auto', or 'skip')", fileRemux)
|
||||
}
|
||||
|
||||
opt := dl.Options{
|
||||
// The following flags currently seem to have no effect when piping the output to stdout;
|
||||
// however, that may change in a future version of the "yt-dlp" video downloader:
|
||||
MergeOutputFormat: fs.VideoMp4.String(),
|
||||
RemuxVideo: fs.VideoMp4.String(),
|
||||
// Alternative codec sorting format to prioritize H264/AVC:
|
||||
// vcodec:h264>av01>h265>vp9.2>vp9>h263,acodec:m4a>mp4a>aac>mp3>mp3>ac3>dts
|
||||
SortingFormat: "lang,quality,res,fps,codec:avc:m4a,channels,size,br,asr,proto,ext,hasaud,source,id",
|
||||
// Process inputs sequentially (Phase 1)
|
||||
var failures int
|
||||
for _, raw := range inputURLs {
|
||||
u, perr := url.Parse(strings.TrimSpace(raw))
|
||||
if perr != nil {
|
||||
log.Errorf("invalid URL: %s", clean.Log(raw))
|
||||
failures++
|
||||
continue
|
||||
}
|
||||
if u.Scheme != scheme.Http && u.Scheme != scheme.Https {
|
||||
log.Errorf("invalid URL scheme %s: %s", clean.Log(u.Scheme), clean.Log(raw))
|
||||
failures++
|
||||
continue
|
||||
}
|
||||
|
||||
result, err := dl.NewMetadata(context.Background(), sourceUrl.String(), opt)
|
||||
mt := media.FromName(u.Path)
|
||||
ext := fs.Ext(u.Path)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
switch mt {
|
||||
case media.Image, media.Vector, media.Raw, media.Document, media.Audio:
|
||||
log.Infof("downloading %s from %s", mt, clean.Log(u.String()))
|
||||
if dlName := clean.DlName(fs.BasePrefix(u.Path, true)); dlName != "" {
|
||||
downloadFile = dlName + ext
|
||||
} else {
|
||||
downloadFile = time.Now().Format("20060102_150405") + ext
|
||||
}
|
||||
downloadFilePath := filepath.Join(downloadPath, downloadFile)
|
||||
if downloadErr := fs.Download(downloadFilePath, u.String()); downloadErr != nil {
|
||||
log.Errorf("download failed: %v", downloadErr)
|
||||
failures++
|
||||
continue
|
||||
}
|
||||
default:
|
||||
mt = media.Video
|
||||
log.Infof("downloading %s from %s", mt, clean.Log(u.String()))
|
||||
|
||||
if dlName := clean.DlName(result.Info.Title); dlName != "" {
|
||||
downloadFile = dlName + fs.ExtMp4
|
||||
} else {
|
||||
downloadFile = time.Now().Format("20060102_150405") + fs.ExtMp4
|
||||
}
|
||||
opt := dl.Options{
|
||||
MergeOutputFormat: fs.VideoMp4.String(),
|
||||
RemuxVideo: fs.VideoMp4.String(),
|
||||
SortingFormat: "lang,quality,res,fps,codec:avc:m4a,channels,size,br,asr,proto,ext,hasaud,source,id",
|
||||
Cookies: cookies,
|
||||
AddHeaders: addHeaders,
|
||||
}
|
||||
|
||||
// Compose download file path.
|
||||
downloadFilePath := filepath.Join(downloadPath, downloadFile)
|
||||
result, err := dl.NewMetadata(context.Background(), u.String(), opt)
|
||||
if err != nil {
|
||||
log.Errorf("metadata failed: %v", err)
|
||||
failures++
|
||||
continue
|
||||
}
|
||||
|
||||
// Download the first video and embed its metadata,
|
||||
// see https://github.com/yt-dlp/yt-dlp?tab=readme-ov-file#format-selection-examples.
|
||||
downloadResult, err := result.DownloadWithOptions(context.Background(), dl.DownloadOptions{
|
||||
// TODO: While this may work with a future version of the "yt-dlp" video downloader,
|
||||
// it is currently not possible to properly download videos with separate video and
|
||||
// audio streams when piping the output to stdout. For now, the following Filter
|
||||
// will download the best combined video and audio content (see docs for details).
|
||||
Filter: "best",
|
||||
// Alternative filters for combining the best video and audio streams:
|
||||
// Filter: "bestvideo*+bestaudio/best",
|
||||
// Filter: "best/bestvideo+bestaudio",
|
||||
DownloadAudioOnly: false,
|
||||
EmbedMetadata: true,
|
||||
EmbedSubs: false,
|
||||
ForceOverwrites: false,
|
||||
DisableCaching: false,
|
||||
// Download the first video if multiple videos are available:
|
||||
PlaylistIndex: 1,
|
||||
})
|
||||
// Base filename for pipe method
|
||||
if dlName := clean.DlName(result.Info.Title); dlName != "" {
|
||||
downloadFile = dlName + fs.ExtMp4
|
||||
} else {
|
||||
downloadFile = time.Now().Format("20060102_150405") + fs.ExtMp4
|
||||
}
|
||||
downloadFilePath := filepath.Join(downloadPath, downloadFile)
|
||||
|
||||
// Check if download was successful.
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if method == "pipe" {
|
||||
// Stream to stdout
|
||||
downloadResult, err := result.DownloadWithOptions(context.Background(), dl.DownloadOptions{
|
||||
Filter: "best",
|
||||
DownloadAudioOnly: false,
|
||||
EmbedMetadata: true,
|
||||
EmbedSubs: false,
|
||||
ForceOverwrites: false,
|
||||
DisableCaching: false,
|
||||
PlaylistIndex: 1,
|
||||
})
|
||||
if err != nil {
|
||||
log.Errorf("download failed: %v", err)
|
||||
failures++
|
||||
continue
|
||||
}
|
||||
func() {
|
||||
defer downloadResult.Close()
|
||||
f, ferr := os.Create(downloadFilePath)
|
||||
if ferr != nil {
|
||||
log.Errorf("create file failed: %v", ferr)
|
||||
failures++
|
||||
return
|
||||
}
|
||||
if _, cerr := io.Copy(f, downloadResult); cerr != nil {
|
||||
_ = f.Close()
|
||||
log.Errorf("write file failed: %v", cerr)
|
||||
failures++
|
||||
return
|
||||
}
|
||||
_ = f.Close()
|
||||
}()
|
||||
|
||||
defer downloadResult.Close()
|
||||
|
||||
file, err := os.Create(downloadFilePath)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if _, err = io.Copy(file, downloadResult); err != nil {
|
||||
file.Close()
|
||||
return err
|
||||
}
|
||||
|
||||
file.Close()
|
||||
|
||||
// TODO: The remux command flags currently don't seem to have an effect when piping the output to stdout,
|
||||
// so this command will manually remux the downloaded file with ffmpeg. This ensures that the file is a
|
||||
// valid MP4 that can be played. It also adds metadata in the same step.
|
||||
remuxOpt := encode.NewRemuxOptions(conf.FFmpegBin(), fs.VideoMp4, false)
|
||||
|
||||
if title := clean.Name(result.Info.Title); title != "" {
|
||||
remuxOpt.Title = title
|
||||
} else if title = clean.Name(result.Info.AltTitle); title != "" {
|
||||
remuxOpt.Title = title
|
||||
}
|
||||
|
||||
if desc := strings.TrimSpace(result.Info.Description); desc != "" {
|
||||
remuxOpt.Description = desc
|
||||
}
|
||||
|
||||
if u := strings.TrimSpace(sourceUrl.String()); u != "" {
|
||||
remuxOpt.Comment = u
|
||||
}
|
||||
|
||||
if author := clean.Name(result.Info.Artist); author != "" {
|
||||
remuxOpt.Author = author
|
||||
} else if author = clean.Name(result.Info.AlbumArtist); author != "" {
|
||||
remuxOpt.Author = author
|
||||
} else if author = clean.Name(result.Info.Creator); author != "" {
|
||||
remuxOpt.Author = author
|
||||
} else if author = clean.Name(result.Info.License); author != "" {
|
||||
remuxOpt.Author = author
|
||||
}
|
||||
|
||||
if result.Info.Timestamp > 1 {
|
||||
sec, dec := math.Modf(result.Info.Timestamp)
|
||||
remuxOpt.Created = time.Unix(int64(sec), int64(dec*(1e9)))
|
||||
}
|
||||
|
||||
if remuxErr := ffmpeg.RemuxFile(downloadFilePath, "", remuxOpt); remuxErr != nil {
|
||||
return remuxErr
|
||||
// Remux and embed metadata (pipe policy: always)
|
||||
remuxOpt := dl.RemuxOptionsFromInfo(conf.FFmpegBin(), fs.VideoMp4, result.Info, u.String())
|
||||
if remuxErr := ffmpeg.RemuxFile(downloadFilePath, "", remuxOpt); remuxErr != nil {
|
||||
log.Errorf("remux failed: %v", remuxErr)
|
||||
failures++
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
// file method
|
||||
// Deterministic output template within the session temp dir
|
||||
outTpl := filepath.Join(downloadPath, "ppdl_%(id)s.%(ext)s")
|
||||
files, err := result.DownloadToFileWithOptions(context.Background(), dl.DownloadOptions{
|
||||
Filter: "best",
|
||||
DownloadAudioOnly: false,
|
||||
EmbedMetadata: true,
|
||||
EmbedSubs: false,
|
||||
ForceOverwrites: false,
|
||||
DisableCaching: false,
|
||||
PlaylistIndex: 1,
|
||||
Output: outTpl,
|
||||
})
|
||||
if err != nil {
|
||||
log.Errorf("download failed: %v", err)
|
||||
// even on error, any completed files returned will be imported
|
||||
}
|
||||
// Ensure container/metadata per remux policy for file method
|
||||
if fileRemux != "skip" {
|
||||
for _, fp := range files {
|
||||
if fileRemux == "auto" && strings.EqualFold(filepath.Ext(fp), fs.ExtMp4) {
|
||||
// Assume yt-dlp produced a valid MP4 and embedded metadata
|
||||
continue
|
||||
}
|
||||
remuxOpt := dl.RemuxOptionsFromInfo(conf.FFmpegBin(), fs.VideoMp4, result.Info, u.String())
|
||||
if remuxErr := ffmpeg.RemuxFile(fp, "", remuxOpt); remuxErr != nil {
|
||||
log.Errorf("remux failed: %v", remuxErr)
|
||||
failures++
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
log.Infof("importing %s to %s", mediaType, clean.Log(filepath.Join(conf.OriginalsPath(), destFolder)))
|
||||
|
||||
// Import results once
|
||||
log.Infof("importing downloads to %s", clean.Log(filepath.Join(conf.OriginalsPath(), destFolder)))
|
||||
w := get.Import()
|
||||
opt := photoprism.ImportOptionsMove(downloadPath, destFolder)
|
||||
|
||||
w.Start(opt)
|
||||
|
||||
elapsed := time.Since(start)
|
||||
if failures > 0 {
|
||||
log.Warnf("completed with %d error(s) in %s", failures, elapsed)
|
||||
} else {
|
||||
log.Infof("completed in %s", elapsed)
|
||||
}
|
||||
|
||||
log.Infof("completed in %s", elapsed)
|
||||
|
||||
if failures > 0 {
|
||||
return fmt.Errorf("some downloads failed: %d", failures)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
101
internal/commands/download_e2e_test.go
Normal file
101
internal/commands/download_e2e_test.go
Normal file
@@ -0,0 +1,101 @@
|
||||
package commands
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/photoprism/photoprism/internal/photoprism/dl"
|
||||
"github.com/photoprism/photoprism/internal/photoprism/get"
|
||||
)
|
||||
|
||||
// createFakeYtDlp writes a small script that:
|
||||
// - prints JSON when --dump-single-json (metadata)
|
||||
// - parses --output TEMPLATE and on --print creates a dummy file at TEMPLATE
|
||||
// with %(id)s -> abc and %(ext)s -> mp4, then prints the path
|
||||
func createFakeYtDlp(t *testing.T) string {
|
||||
t.Helper()
|
||||
dir := t.TempDir()
|
||||
path := filepath.Join(dir, "yt-dlp")
|
||||
if runtime.GOOS == "windows" {
|
||||
// Not needed in CI/dev container. Keep simple stub.
|
||||
content := "@echo off\r\n" +
|
||||
"for %%A in (%*) do (\r\n" +
|
||||
" if \"%%~A\"==\"--dump-single-json\" ( echo {\"id\":\"abc\",\"title\":\"Test\",\"url\":\"http://example.com\",\"_type\":\"video\"} & goto :eof )\r\n" +
|
||||
")\r\n"
|
||||
if err := os.WriteFile(path, []byte(content), 0o755); err != nil {
|
||||
t.Fatalf("failed to write fake yt-dlp: %v", err)
|
||||
}
|
||||
return path
|
||||
}
|
||||
var b strings.Builder
|
||||
b.WriteString("#!/usr/bin/env bash\n")
|
||||
b.WriteString("set -euo pipefail\n")
|
||||
b.WriteString("OUT_TPL=\"\"\n")
|
||||
b.WriteString("i=0; while [[ $i -lt $# ]]; do i=$((i+1)); arg=\"${!i}\"; if [[ \"$arg\" == \"--dump-single-json\" ]]; then echo '{\"id\":\"abc\",\"title\":\"Test\",\"url\":\"http://example.com\",\"_type\":\"video\"}'; exit 0; fi; if [[ \"$arg\" == \"--output\" ]]; then i=$((i+1)); OUT_TPL=\"${!i}\"; fi; done\n")
|
||||
b.WriteString("if [[ $* == *'--print '* ]]; then OUT=\"$OUT_TPL\"; OUT=${OUT//%(id)s/abc}; OUT=${OUT//%(ext)s/mp4}; mkdir -p \"$(dirname \"$OUT\")\"; echo 'dummy' > \"$OUT\"; echo \"$OUT\"; exit 0; fi\n")
|
||||
if err := os.WriteFile(path, []byte(b.String()), 0o755); err != nil {
|
||||
t.Fatalf("failed to write fake yt-dlp: %v", err)
|
||||
}
|
||||
return path
|
||||
}
|
||||
|
||||
func TestDownloadImpl_FileMethod_AutoSkipsRemux(t *testing.T) {
|
||||
fake := createFakeYtDlp(t)
|
||||
orig := dl.YtDlpBin
|
||||
defer func() { dl.YtDlpBin = orig }()
|
||||
|
||||
dest := "dl-e2e"
|
||||
// Force ffmpeg to an invalid path; with remux=auto the remux should be skipped for mp4
|
||||
if c := get.Config(); c != nil {
|
||||
c.Options().FFmpegBin = "/bin/false"
|
||||
// Disable convert (thumb generation) to avoid ffmpeg dependency in test
|
||||
s := c.Settings()
|
||||
s.Index.Convert = false
|
||||
}
|
||||
conf := get.Config()
|
||||
if conf == nil {
|
||||
t.Fatalf("missing test config")
|
||||
}
|
||||
// Ensure DB is initialized and registered (bypassing CLI InitConfig)
|
||||
_ = conf.Init()
|
||||
conf.RegisterDb()
|
||||
// Override yt-dlp after config init (config may set dl.YtDlpBin)
|
||||
dl.YtDlpBin = fake
|
||||
t.Logf("using yt-dlp binary: %s", dl.YtDlpBin)
|
||||
// Execute the implementation core directly
|
||||
err := runDownload(conf, DownloadOpts{
|
||||
Dest: dest,
|
||||
Method: "file",
|
||||
FileRemux: "auto",
|
||||
}, []string{"https://example.com/video"})
|
||||
if err != nil {
|
||||
t.Fatalf("runDownload failed: %v", err)
|
||||
}
|
||||
|
||||
// Verify a file exists under Originals/dest with .mp4 extension
|
||||
c := get.Config()
|
||||
if c == nil {
|
||||
t.Fatalf("missing test config")
|
||||
}
|
||||
outDir := filepath.Join(c.OriginalsPath(), dest)
|
||||
found := false
|
||||
_ = filepath.WalkDir(outDir, func(path string, d os.DirEntry, err error) error {
|
||||
if err != nil || d == nil {
|
||||
return nil
|
||||
}
|
||||
if !d.IsDir() && strings.HasSuffix(strings.ToLower(d.Name()), ".mp4") {
|
||||
found = true
|
||||
return filepath.SkipDir
|
||||
}
|
||||
return nil
|
||||
})
|
||||
if !found {
|
||||
t.Fatalf("expected at least one mp4 in %s", outDir)
|
||||
}
|
||||
|
||||
// Cleanup destination folder
|
||||
_ = os.RemoveAll(outDir)
|
||||
}
|
||||
28
internal/commands/download_help_test.go
Normal file
28
internal/commands/download_help_test.go
Normal file
@@ -0,0 +1,28 @@
|
||||
package commands
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestDownloadCommand_HelpFlagsAndArgs(t *testing.T) {
|
||||
if DownloadCommand.ArgsUsage != "[url]..." {
|
||||
t.Fatalf("ArgsUsage mismatch: %q", DownloadCommand.ArgsUsage)
|
||||
}
|
||||
// Verify new flags are present by name
|
||||
want := map[string]bool{
|
||||
"cookies": false,
|
||||
"add-header": false,
|
||||
"dl-method": false,
|
||||
}
|
||||
for _, f := range DownloadCommand.Flags {
|
||||
name := f.Names()[0]
|
||||
if _, ok := want[name]; ok {
|
||||
want[name] = true
|
||||
}
|
||||
}
|
||||
for k, ok := range want {
|
||||
if !ok {
|
||||
t.Fatalf("missing flag: %s", k)
|
||||
}
|
||||
}
|
||||
}
|
||||
221
internal/commands/download_impl.go
Normal file
221
internal/commands/download_impl.go
Normal file
@@ -0,0 +1,221 @@
|
||||
package commands
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/photoprism/photoprism/internal/config"
|
||||
"github.com/photoprism/photoprism/internal/ffmpeg"
|
||||
"github.com/photoprism/photoprism/internal/photoprism"
|
||||
"github.com/photoprism/photoprism/internal/photoprism/dl"
|
||||
"github.com/photoprism/photoprism/internal/photoprism/get"
|
||||
"github.com/photoprism/photoprism/pkg/clean"
|
||||
"github.com/photoprism/photoprism/pkg/fs"
|
||||
"github.com/photoprism/photoprism/pkg/media"
|
||||
"github.com/photoprism/photoprism/pkg/rnd"
|
||||
"github.com/photoprism/photoprism/pkg/service/http/scheme"
|
||||
)
|
||||
|
||||
// DownloadOpts contains the command options used by runDownload.
|
||||
type DownloadOpts struct {
|
||||
Dest string
|
||||
Cookies string
|
||||
CookiesFromBrowser string
|
||||
AddHeaders []string
|
||||
Method string // pipe|file
|
||||
FileRemux string // always|auto|skip
|
||||
}
|
||||
|
||||
// runDownload executes the download/import flow for the given inputs and options.
|
||||
// It is the testable core used by the CLI action.
|
||||
func runDownload(conf *config.Config, opts DownloadOpts, inputURLs []string) error {
|
||||
start := time.Now()
|
||||
if conf == nil {
|
||||
return fmt.Errorf("nil config")
|
||||
}
|
||||
if conf.ReadOnly() {
|
||||
return config.ErrReadOnly
|
||||
}
|
||||
if len(inputURLs) == 0 {
|
||||
return fmt.Errorf("no download URLs provided")
|
||||
}
|
||||
|
||||
// Resolve destination folder
|
||||
destFolder := opts.Dest
|
||||
if destFolder == "" {
|
||||
destFolder = conf.ImportDest()
|
||||
} else {
|
||||
destFolder = clean.UserPath(destFolder)
|
||||
}
|
||||
|
||||
// Create session download directory
|
||||
downloadPath := filepath.Join(conf.TempPath(), fs.DownloadDir+"_"+rnd.Base36(12))
|
||||
if err := fs.MkdirAll(downloadPath); err != nil {
|
||||
return err
|
||||
}
|
||||
defer os.RemoveAll(downloadPath)
|
||||
|
||||
// Normalize method/remux policy
|
||||
method := strings.ToLower(strings.TrimSpace(opts.Method))
|
||||
if method == "" {
|
||||
method = "pipe"
|
||||
}
|
||||
if method != "pipe" && method != "file" {
|
||||
return fmt.Errorf("invalid method: %s", method)
|
||||
}
|
||||
fileRemux := strings.ToLower(strings.TrimSpace(opts.FileRemux))
|
||||
if fileRemux == "" {
|
||||
fileRemux = "always"
|
||||
}
|
||||
switch fileRemux {
|
||||
case "always", "auto", "skip":
|
||||
default:
|
||||
return fmt.Errorf("invalid file remux policy: %s", fileRemux)
|
||||
}
|
||||
|
||||
// Process inputs sequentially
|
||||
var failures int
|
||||
for _, raw := range inputURLs {
|
||||
u, perr := url.Parse(strings.TrimSpace(raw))
|
||||
if perr != nil {
|
||||
log.Errorf("invalid URL: %s", clean.Log(raw))
|
||||
failures++
|
||||
continue
|
||||
}
|
||||
if u.Scheme != scheme.Http && u.Scheme != scheme.Https {
|
||||
log.Errorf("invalid URL scheme %s: %s", clean.Log(u.Scheme), clean.Log(raw))
|
||||
failures++
|
||||
continue
|
||||
}
|
||||
|
||||
mt := media.FromName(u.Path)
|
||||
ext := fs.Ext(u.Path)
|
||||
var downloadFile string
|
||||
|
||||
switch mt {
|
||||
case media.Image, media.Vector, media.Raw, media.Document, media.Audio:
|
||||
log.Infof("downloading %s from %s", mt, clean.Log(u.String()))
|
||||
if dlName := clean.DlName(fs.BasePrefix(u.Path, true)); dlName != "" {
|
||||
downloadFile = dlName + ext
|
||||
} else {
|
||||
downloadFile = time.Now().Format("20060102_150405") + ext
|
||||
}
|
||||
downloadFilePath := filepath.Join(downloadPath, downloadFile)
|
||||
if downloadErr := fs.Download(downloadFilePath, u.String()); downloadErr != nil {
|
||||
log.Errorf("download failed: %v", downloadErr)
|
||||
failures++
|
||||
continue
|
||||
}
|
||||
default:
|
||||
mt = media.Video
|
||||
log.Infof("downloading %s from %s", mt, clean.Log(u.String()))
|
||||
opt := dl.Options{
|
||||
MergeOutputFormat: fs.VideoMp4.String(),
|
||||
RemuxVideo: fs.VideoMp4.String(),
|
||||
SortingFormat: "lang,quality,res,fps,codec:avc:m4a,channels,size,br,asr,proto,ext,hasaud,source,id",
|
||||
Cookies: opts.Cookies,
|
||||
CookiesFromBrowser: opts.CookiesFromBrowser,
|
||||
AddHeaders: opts.AddHeaders,
|
||||
}
|
||||
result, err := dl.NewMetadata(context.Background(), u.String(), opt)
|
||||
if err != nil {
|
||||
log.Errorf("metadata failed: %v", err)
|
||||
failures++
|
||||
continue
|
||||
}
|
||||
if dlName := clean.DlName(result.Info.Title); dlName != "" {
|
||||
downloadFile = dlName + fs.ExtMp4
|
||||
} else {
|
||||
downloadFile = time.Now().Format("20060102_150405") + fs.ExtMp4
|
||||
}
|
||||
downloadFilePath := filepath.Join(downloadPath, downloadFile)
|
||||
|
||||
if method == "pipe" {
|
||||
downloadResult, err := result.DownloadWithOptions(context.Background(), dl.DownloadOptions{
|
||||
Filter: "best",
|
||||
DownloadAudioOnly: false,
|
||||
EmbedMetadata: true,
|
||||
EmbedSubs: false,
|
||||
ForceOverwrites: false,
|
||||
DisableCaching: false,
|
||||
PlaylistIndex: 1,
|
||||
})
|
||||
if err != nil {
|
||||
log.Errorf("download failed: %v", err)
|
||||
failures++
|
||||
continue
|
||||
}
|
||||
func() {
|
||||
defer downloadResult.Close()
|
||||
f, ferr := os.Create(downloadFilePath)
|
||||
if ferr != nil {
|
||||
log.Errorf("create file failed: %v", ferr)
|
||||
failures++
|
||||
return
|
||||
}
|
||||
if _, cerr := io.Copy(f, downloadResult); cerr != nil {
|
||||
_ = f.Close()
|
||||
log.Errorf("write file failed: %v", cerr)
|
||||
failures++
|
||||
return
|
||||
}
|
||||
_ = f.Close()
|
||||
}()
|
||||
|
||||
remuxOpt := dl.RemuxOptionsFromInfo(conf.FFmpegBin(), fs.VideoMp4, result.Info, u.String())
|
||||
if remuxErr := ffmpeg.RemuxFile(downloadFilePath, "", remuxOpt); remuxErr != nil {
|
||||
log.Errorf("remux failed: %v", remuxErr)
|
||||
failures++
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
outTpl := filepath.Join(downloadPath, "ppdl_%(id)s.%(ext)s")
|
||||
files, err := result.DownloadToFileWithOptions(context.Background(), dl.DownloadOptions{
|
||||
Filter: "best",
|
||||
DownloadAudioOnly: false,
|
||||
EmbedMetadata: true,
|
||||
EmbedSubs: false,
|
||||
ForceOverwrites: false,
|
||||
DisableCaching: false,
|
||||
PlaylistIndex: 1,
|
||||
Output: outTpl,
|
||||
})
|
||||
if err != nil {
|
||||
log.Errorf("download failed: %v", err)
|
||||
}
|
||||
if fileRemux != "skip" {
|
||||
for _, fp := range files {
|
||||
if fileRemux == "auto" && strings.EqualFold(filepath.Ext(fp), fs.ExtMp4) {
|
||||
continue
|
||||
}
|
||||
remuxOpt := dl.RemuxOptionsFromInfo(conf.FFmpegBin(), fs.VideoMp4, result.Info, u.String())
|
||||
if remuxErr := ffmpeg.RemuxFile(fp, "", remuxOpt); remuxErr != nil {
|
||||
log.Errorf("remux failed: %v", remuxErr)
|
||||
failures++
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
log.Infof("importing downloads to %s", clean.Log(filepath.Join(conf.OriginalsPath(), destFolder)))
|
||||
w := get.Import()
|
||||
opt := photoprism.ImportOptionsMove(downloadPath, destFolder)
|
||||
w.Start(opt)
|
||||
|
||||
elapsed := time.Since(start)
|
||||
if failures > 0 {
|
||||
log.Warnf("completed with %d error(s) in %s", failures, elapsed)
|
||||
return fmt.Errorf("some downloads failed: %d", failures)
|
||||
}
|
||||
log.Infof("completed in %s", elapsed)
|
||||
return nil
|
||||
}
|
||||
@@ -25,7 +25,7 @@ var ImportCommand = &cli.Command{
|
||||
&cli.StringFlag{
|
||||
Name: "dest",
|
||||
Aliases: []string{"d"},
|
||||
Usage: "relative originals `PATH` to which the files should be imported",
|
||||
Usage: "relative originals `PATH` in which new files should be imported",
|
||||
},
|
||||
},
|
||||
Action: importAction,
|
||||
|
||||
@@ -20,7 +20,7 @@ var MigrationsStatusCommand = &cli.Command{
|
||||
Name: "ls",
|
||||
Aliases: []string{"status", "show"},
|
||||
Usage: "Displays the status of schema migrations",
|
||||
ArgsUsage: "[migrations...]",
|
||||
ArgsUsage: "[migrations]...",
|
||||
Flags: report.CliFlags,
|
||||
Action: migrationsStatusAction,
|
||||
}
|
||||
@@ -29,7 +29,7 @@ var MigrationsRunCommand = &cli.Command{
|
||||
Name: "run",
|
||||
Aliases: []string{"execute", "migrate"},
|
||||
Usage: "Executes database schema migrations",
|
||||
ArgsUsage: "[migrations...]",
|
||||
ArgsUsage: "[migrations]...",
|
||||
Flags: []cli.Flag{
|
||||
&cli.BoolFlag{
|
||||
Name: "failed",
|
||||
|
||||
@@ -16,7 +16,7 @@ import (
|
||||
var VisionRunCommand = &cli.Command{
|
||||
Name: "run",
|
||||
Usage: "Runs one or more computer vision models on a set of pictures that match the specified search filters",
|
||||
ArgsUsage: "[filter...]",
|
||||
ArgsUsage: "[filter]...",
|
||||
Flags: []cli.Flag{
|
||||
&cli.StringFlag{
|
||||
Name: "models",
|
||||
|
||||
@@ -253,7 +253,7 @@ var Flags = CliFlags{
|
||||
}}, {
|
||||
Flag: &cli.PathFlag{
|
||||
Name: "import-dest",
|
||||
Usage: "relative originals `PATH` to which the files should be imported by default *optional*",
|
||||
Usage: "relative originals `PATH` in which files should be imported by default *optional*",
|
||||
EnvVars: EnvVars("IMPORT_DEST"),
|
||||
TakesFile: true,
|
||||
}}, {
|
||||
|
||||
30
internal/photoprism/dl/README.md
Normal file
30
internal/photoprism/dl/README.md
Normal file
@@ -0,0 +1,30 @@
|
||||
# PhotoPrism Download Helpers
|
||||
|
||||
This package provides thin wrappers around `yt-dlp`, which the `photoprism dl` command uses for metadata discovery and downloading.
|
||||
|
||||
It currently supports two invocation methods:
|
||||
- Pipe: stream to stdout, PhotoPrism writes a file and remuxes with ffmpeg to ensure MP4 + embedded metadata.
|
||||
- File: `yt-dlp` writes files to disk using `--output`; PhotoPrism captures final paths via `--print after_move:filepath` and may remux when needed.
|
||||
|
||||
## Auth & Headers
|
||||
|
||||
- Supports `--cookies` and repeatable `--add-header` for both metadata and download flows.
|
||||
- Secrets are never logged; header values are redacted in trace logs.
|
||||
|
||||
## Key APIs
|
||||
|
||||
- `NewMetadata(ctx, url, Options)` → discovers formats and info (via `--dump-single-json`).
|
||||
- `Metadata.DownloadWithOptions(ctx, DownloadOptions)` → pipe method (`stdout`).
|
||||
- `Metadata.DownloadToFileWithOptions(ctx, DownloadOptions)` → file method (`--output` + `--print`).
|
||||
- `RemuxOptionsFromInfo(ffmpegBin, fs.VideoMp4, Info, sourceURL)` → builds ffmpeg options to embed title/description/author/comment/created.
|
||||
|
||||
## Testing
|
||||
|
||||
- Tests stub `yt-dlp` with a tiny shell script that echoes JSON or creates a dummy file and prints its path. This avoids external network calls and brittle extractor behavior.
|
||||
- Logging redaction is covered; argument construction is verified for cookies/headers.
|
||||
|
||||
## Notes
|
||||
|
||||
- Prefer the file method for sources with separate audio/video streams; the pipe method cannot always merge in that case.
|
||||
- When the CLI’s `--file-remux=auto` is used, the final ffmpeg remux is skipped for MP4 outputs that already include metadata.
|
||||
|
||||
@@ -1,5 +1,31 @@
|
||||
/*
|
||||
Package dl provides media download functionality.
|
||||
Package dl provides helpers to discover media metadata and download content
|
||||
from remote sources via yt-dlp. It underpins the `photoprism dl` CLI.
|
||||
|
||||
Two download methods are supported:
|
||||
|
||||
1. Pipe method (stdout): yt-dlp streams media to stdout and PhotoPrism
|
||||
writes it to a temporary file. After writing, PhotoPrism remuxes the
|
||||
file with ffmpeg to ensure a valid MP4 container and to embed basic
|
||||
metadata such as title, description, author, source URL (as comment),
|
||||
and creation timestamp when available. This method is simple and works
|
||||
well for sources that provide a combined A/V stream. Some sites split
|
||||
audio and video; in those cases yt-dlp cannot always mux when piping.
|
||||
|
||||
2. File method (on-disk): yt-dlp writes output files directly using
|
||||
`--output` templates and built-in post-processors (merge/remux/metadata).
|
||||
PhotoPrism captures the final file paths (via `--print after_move:filepath`)
|
||||
and then optionally runs a final ffmpeg remux to normalize the container
|
||||
and embed metadata if necessary. This method is recommended for sources
|
||||
that deliver separate audio/video streams or require post-processing.
|
||||
|
||||
Both methods accept the same authentication-related options and headers.
|
||||
Cookies can be supplied via a file or a browser profile, and custom headers
|
||||
(e.g. Authorization) are forwarded to yt-dlp for both metadata discovery and
|
||||
downloading. Secrets are not logged; header values are redacted in traces.
|
||||
|
||||
The package exposes convenience constructors around yt-dlp invocation as
|
||||
well as small utilities for safer logging and remux metadata preparation.
|
||||
|
||||
Copyright (c) 2018 - 2025 PhotoPrism UG. All rights reserved.
|
||||
|
||||
|
||||
201
internal/photoprism/dl/file.go
Normal file
201
internal/photoprism/dl/file.go
Normal file
@@ -0,0 +1,201 @@
|
||||
package dl
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// DownloadToFileWithOptions downloads media using yt-dlp into files on disk (no piping).
|
||||
// It returns the list of files produced by yt-dlp, as printed via --print after_move:filepath.
|
||||
func (result Metadata) DownloadToFileWithOptions(
|
||||
ctx context.Context,
|
||||
options DownloadOptions,
|
||||
) ([]string, error) {
|
||||
if !result.Options.noInfoDownload {
|
||||
if (result.Info.Type == "playlist" ||
|
||||
result.Info.Type == "multi_video" ||
|
||||
result.Info.Type == "channel") &&
|
||||
options.PlaylistIndex == 0 {
|
||||
return nil, fmt.Errorf(
|
||||
"can't download a playlist when the playlist index options is not set",
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
tempPath, tempErr := os.MkdirTemp("", "ydls")
|
||||
if tempErr != nil {
|
||||
return nil, tempErr
|
||||
}
|
||||
defer os.RemoveAll(tempPath)
|
||||
|
||||
var jsonTempPath string
|
||||
if !result.Options.noInfoDownload {
|
||||
jsonTempPath = filepath.Join(tempPath, "info.json")
|
||||
if err := os.WriteFile(jsonTempPath, result.RawJSON, 0600); err != nil {
|
||||
os.RemoveAll(tempPath)
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
cmd := exec.CommandContext(
|
||||
ctx,
|
||||
FindYtDlpBin(),
|
||||
// see comment below about ignoring errors for playlists
|
||||
"--ignore-errors",
|
||||
// TODO: deprecated in yt-dlp?
|
||||
"--no-call-home",
|
||||
// use non-fancy progress bar
|
||||
"--newline",
|
||||
// safer filenames
|
||||
"--restrict-filenames",
|
||||
)
|
||||
|
||||
// Output template: caller may provide one; otherwise use a deterministic fallback in CWD
|
||||
// Note: caller should set a template rooted in the session temp dir.
|
||||
if options.Output != "" {
|
||||
cmd.Args = append(cmd.Args, "--output", options.Output)
|
||||
}
|
||||
|
||||
// Print the final file paths after move/processing; also print plain filepath as a fallback
|
||||
cmd.Args = append(cmd.Args, "--print", "after_move:filepath")
|
||||
cmd.Args = append(cmd.Args, "--print", "filepath")
|
||||
|
||||
if result.Options.noInfoDownload {
|
||||
cmd.Args = append(cmd.Args, "--batch-file", "-")
|
||||
cmd.Stdin = bytes.NewBufferString(result.RawURL + "\n")
|
||||
|
||||
if result.Options.Type == TypePlaylist {
|
||||
cmd.Args = append(cmd.Args, "--yes-playlist")
|
||||
|
||||
if result.Options.PlaylistStart > 0 {
|
||||
cmd.Args = append(cmd.Args,
|
||||
"--playlist-start", strconv.Itoa(int(result.Options.PlaylistStart)),
|
||||
)
|
||||
}
|
||||
if result.Options.PlaylistEnd > 0 {
|
||||
cmd.Args = append(cmd.Args,
|
||||
"--playlist-end", strconv.Itoa(int(result.Options.PlaylistEnd)),
|
||||
)
|
||||
}
|
||||
if result.Options.FlatPlaylist {
|
||||
cmd.Args = append(cmd.Args, "--flat-playlist")
|
||||
}
|
||||
} else {
|
||||
cmd.Args = append(cmd.Args,
|
||||
"--no-playlist",
|
||||
)
|
||||
}
|
||||
} else {
|
||||
cmd.Args = append(cmd.Args, "--load-info", jsonTempPath)
|
||||
}
|
||||
|
||||
// force IPV4 Usage
|
||||
if result.Options.UseIPV4 {
|
||||
cmd.Args = append(cmd.Args, "-4")
|
||||
}
|
||||
// filter and playlist index
|
||||
if !result.Info.Direct && options.Filter != "" {
|
||||
cmd.Args = append(cmd.Args, "-f", options.Filter)
|
||||
}
|
||||
if options.PlaylistIndex > 0 {
|
||||
cmd.Args = append(cmd.Args, "--playlist-items", fmt.Sprint(options.PlaylistIndex))
|
||||
}
|
||||
if options.DownloadAudioOnly {
|
||||
cmd.Args = append(cmd.Args, "-x")
|
||||
}
|
||||
if options.EmbedMetadata {
|
||||
cmd.Args = append(cmd.Args, "--embed-metadata")
|
||||
}
|
||||
if options.EmbedSubs {
|
||||
cmd.Args = append(cmd.Args, "--embed-subs")
|
||||
}
|
||||
if options.ForceOverwrites {
|
||||
cmd.Args = append(cmd.Args, "--force-overwrites")
|
||||
}
|
||||
if options.DisableCaching {
|
||||
cmd.Args = append(cmd.Args, "--no-cache-dir")
|
||||
}
|
||||
if options.AudioFormats != "" {
|
||||
cmd.Args = append(cmd.Args, "--audio-format", options.AudioFormats)
|
||||
}
|
||||
if result.Options.ProxyUrl != "" {
|
||||
cmd.Args = append(cmd.Args, "--proxy", result.Options.ProxyUrl)
|
||||
}
|
||||
if result.Options.Downloader != "" {
|
||||
cmd.Args = append(cmd.Args, "--downloader", result.Options.Downloader)
|
||||
}
|
||||
if result.Options.DownloadSections != "" {
|
||||
cmd.Args = append(cmd.Args, "--download-sections", result.Options.DownloadSections)
|
||||
}
|
||||
if result.Options.CookiesFromBrowser != "" {
|
||||
cmd.Args = append(cmd.Args, "--cookies-from-browser", result.Options.CookiesFromBrowser)
|
||||
}
|
||||
if result.Options.Cookies != "" {
|
||||
cmd.Args = append(cmd.Args, "--cookies", result.Options.Cookies)
|
||||
}
|
||||
if result.Options.MergeOutputFormat != "" {
|
||||
cmd.Args = append(cmd.Args, "--merge-output-format", result.Options.MergeOutputFormat)
|
||||
}
|
||||
if result.Options.RemuxVideo != "" {
|
||||
cmd.Args = append(cmd.Args, "--remux-video", result.Options.RemuxVideo)
|
||||
}
|
||||
if result.Options.RecodeVideo != "" {
|
||||
cmd.Args = append(cmd.Args, "--recode-video", result.Options.RecodeVideo)
|
||||
}
|
||||
if result.Options.Fixup != "" {
|
||||
cmd.Args = append(cmd.Args, "--fixup", result.Options.Fixup)
|
||||
}
|
||||
if result.Options.SortingFormat != "" {
|
||||
cmd.Args = append(cmd.Args, "--format-sort", result.Options.SortingFormat)
|
||||
}
|
||||
if len(result.Options.AddHeaders) > 0 {
|
||||
for _, h := range result.Options.AddHeaders {
|
||||
if strings.TrimSpace(h) == "" {
|
||||
continue
|
||||
}
|
||||
cmd.Args = append(cmd.Args, "--add-header", h)
|
||||
}
|
||||
}
|
||||
|
||||
cmd.Dir = tempPath
|
||||
|
||||
// Capture stdout/stderr for parsing results and errors
|
||||
stdoutBuf := &bytes.Buffer{}
|
||||
stderrBuf := &bytes.Buffer{}
|
||||
cmd.Stdout = stdoutBuf
|
||||
cmd.Stderr = stderrBuf
|
||||
|
||||
log.Trace("cmd", " ", redactArgs(cmd.Args))
|
||||
err := cmd.Run()
|
||||
|
||||
// Parse printed file paths from stdout
|
||||
var files []string
|
||||
scanner := bufio.NewScanner(bytes.NewReader(stdoutBuf.Bytes()))
|
||||
for scanner.Scan() {
|
||||
line := strings.TrimSpace(scanner.Text())
|
||||
if line == "" {
|
||||
continue
|
||||
}
|
||||
// If relative, resolve against tempPath
|
||||
if !filepath.IsAbs(line) {
|
||||
line = filepath.Join(tempPath, line)
|
||||
}
|
||||
if _, statErr := os.Stat(line); statErr == nil {
|
||||
files = append(files, line)
|
||||
}
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
// Prefer returning the process error; callers can inspect stderr if needed
|
||||
return files, err
|
||||
}
|
||||
|
||||
return files, nil
|
||||
}
|
||||
@@ -153,6 +153,15 @@ func infoFromURL(
|
||||
cmd.Args = append(cmd.Args, "--cookies-from-browser", options.CookiesFromBrowser)
|
||||
}
|
||||
|
||||
if len(options.AddHeaders) > 0 {
|
||||
for _, h := range options.AddHeaders {
|
||||
if strings.TrimSpace(h) == "" {
|
||||
continue
|
||||
}
|
||||
cmd.Args = append(cmd.Args, "--add-header", h)
|
||||
}
|
||||
}
|
||||
|
||||
switch options.Type {
|
||||
case TypePlaylist, TypeChannel:
|
||||
cmd.Args = append(cmd.Args, "--yes-playlist")
|
||||
@@ -200,7 +209,7 @@ func infoFromURL(
|
||||
cmd.Stderr = io.MultiWriter(stderrBuf, stderrWriter)
|
||||
cmd.Stdin = bytes.NewBufferString(rawURL + "\n")
|
||||
|
||||
log.Trace("cmd", " ", cmd.Args)
|
||||
log.Trace("cmd", " ", redactArgs(cmd.Args))
|
||||
cmdErr := cmd.Run()
|
||||
|
||||
stderrLineScanner := bufio.NewScanner(stderrBuf)
|
||||
|
||||
23
internal/photoprism/dl/logging.go
Normal file
23
internal/photoprism/dl/logging.go
Normal file
@@ -0,0 +1,23 @@
|
||||
package dl
|
||||
|
||||
import "strings"
|
||||
|
||||
// redactArgs returns a copy of args with sensitive header values masked.
|
||||
// It looks for patterns: --add-header "Name: Value" and rewrites Value as ****.
|
||||
func redactArgs(args []string) []string {
|
||||
out := make([]string, len(args))
|
||||
copy(out, args)
|
||||
for i := 0; i < len(out); i++ {
|
||||
if out[i] == "--add-header" && i+1 < len(out) {
|
||||
hv := out[i+1]
|
||||
if idx := strings.Index(hv, ":"); idx > 0 {
|
||||
name := strings.TrimSpace(hv[:idx])
|
||||
out[i+1] = name + ": ****"
|
||||
} else {
|
||||
out[i+1] = "****"
|
||||
}
|
||||
i++
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
17
internal/photoprism/dl/logging_test.go
Normal file
17
internal/photoprism/dl/logging_test.go
Normal file
@@ -0,0 +1,17 @@
|
||||
package dl
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestRedactArgs(t *testing.T) {
|
||||
in := []string{"--add-header", "Authorization: Bearer secret", "--add-header", "Origin: https://example.com", "--other", "v"}
|
||||
out := redactArgs(in)
|
||||
if out[1] != "Authorization: ****" {
|
||||
t.Fatalf("expected redaction for Authorization, got %q", out[1])
|
||||
}
|
||||
if out[3] != "Origin: ****" {
|
||||
t.Fatalf("expected redaction for Origin, got %q", out[3])
|
||||
}
|
||||
if in[1] == out[1] {
|
||||
t.Fatalf("redaction should not modify input slice in-place")
|
||||
}
|
||||
}
|
||||
49
internal/photoprism/dl/meta.go
Normal file
49
internal/photoprism/dl/meta.go
Normal file
@@ -0,0 +1,49 @@
|
||||
package dl
|
||||
|
||||
import (
|
||||
"math"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/photoprism/photoprism/internal/ffmpeg/encode"
|
||||
"github.com/photoprism/photoprism/pkg/clean"
|
||||
"github.com/photoprism/photoprism/pkg/fs"
|
||||
)
|
||||
|
||||
// RemuxOptionsFromInfo builds ffmpeg remux options (container + metadata)
|
||||
// based on yt-dlp Info and the source URL. The returned options enforce the
|
||||
// target container and set title, description, author, comment, and created
|
||||
// timestamp when provided by the extractor.
|
||||
func RemuxOptionsFromInfo(ffmpegBin string, container fs.Type, info Info, sourceURL string) encode.Options {
|
||||
opt := encode.NewRemuxOptions(ffmpegBin, container, false)
|
||||
|
||||
if title := clean.Name(info.Title); title != "" {
|
||||
opt.Title = title
|
||||
} else if title = clean.Name(info.AltTitle); title != "" {
|
||||
opt.Title = title
|
||||
}
|
||||
|
||||
if desc := strings.TrimSpace(info.Description); desc != "" {
|
||||
opt.Description = desc
|
||||
}
|
||||
if u := strings.TrimSpace(sourceURL); u != "" {
|
||||
opt.Comment = u
|
||||
}
|
||||
|
||||
if author := clean.Name(info.Artist); author != "" {
|
||||
opt.Author = author
|
||||
} else if author = clean.Name(info.AlbumArtist); author != "" {
|
||||
opt.Author = author
|
||||
} else if author = clean.Name(info.Creator); author != "" {
|
||||
opt.Author = author
|
||||
} else if author = clean.Name(info.License); author != "" {
|
||||
opt.Author = author
|
||||
}
|
||||
|
||||
if info.Timestamp > 1 {
|
||||
sec, dec := math.Modf(info.Timestamp)
|
||||
opt.Created = time.Unix(int64(sec), int64(dec*(1e9)))
|
||||
}
|
||||
|
||||
return opt
|
||||
}
|
||||
34
internal/photoprism/dl/meta_test.go
Normal file
34
internal/photoprism/dl/meta_test.go
Normal file
@@ -0,0 +1,34 @@
|
||||
package dl
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/photoprism/photoprism/pkg/fs"
|
||||
)
|
||||
|
||||
func TestRemuxOptionsFromInfo(t *testing.T) {
|
||||
info := Info{
|
||||
Title: " My Title ",
|
||||
AltTitle: "Alt",
|
||||
Description: " Desc ",
|
||||
Artist: "Artist Name",
|
||||
Timestamp: float64(time.Date(2024, 12, 31, 23, 59, 58, 0, time.UTC).Unix()),
|
||||
}
|
||||
opt := RemuxOptionsFromInfo("ffmpeg", fs.VideoMp4, info, "https://example.com/v")
|
||||
if opt.Title != "My Title" {
|
||||
t.Fatalf("Title mismatch: %q", opt.Title)
|
||||
}
|
||||
if opt.Description != "Desc" {
|
||||
t.Fatalf("Description mismatch: %q", opt.Description)
|
||||
}
|
||||
if opt.Author != "Artist Name" {
|
||||
t.Fatalf("Author mismatch: %q", opt.Author)
|
||||
}
|
||||
if opt.Comment != "https://example.com/v" {
|
||||
t.Fatalf("Comment mismatch: %q", opt.Comment)
|
||||
}
|
||||
if opt.Created.IsZero() {
|
||||
t.Fatalf("Created timestamp should be set")
|
||||
}
|
||||
}
|
||||
@@ -30,6 +30,7 @@ type Options struct {
|
||||
UseIPV4 bool // -4 Make all connections via IPv4
|
||||
Cookies string // --cookies FILE
|
||||
CookiesFromBrowser string // --cookies-from-browser BROWSER[:FOLDER]
|
||||
AddHeaders []string // --add-header "Name: Value" (repeatable)
|
||||
StderrFn func(cmd *exec.Cmd) io.Writer // if not nil, function to get Writer for stderr
|
||||
HttpClient *http.Client // Client for download thumbnail and subtitles (nil use http.DefaultClient)
|
||||
MergeOutputFormat string // --merge-output-format
|
||||
@@ -196,6 +197,19 @@ func (result Metadata) DownloadWithOptions(
|
||||
cmd.Args = append(cmd.Args, "--cookies-from-browser", result.Options.CookiesFromBrowser)
|
||||
}
|
||||
|
||||
if result.Options.Cookies != "" {
|
||||
cmd.Args = append(cmd.Args, "--cookies", result.Options.Cookies)
|
||||
}
|
||||
|
||||
if len(result.Options.AddHeaders) > 0 {
|
||||
for _, h := range result.Options.AddHeaders {
|
||||
if strings.TrimSpace(h) == "" {
|
||||
continue
|
||||
}
|
||||
cmd.Args = append(cmd.Args, "--add-header", h)
|
||||
}
|
||||
}
|
||||
|
||||
if result.Options.MergeOutputFormat != "" {
|
||||
cmd.Args = append(cmd.Args,
|
||||
"--merge-output-format", result.Options.MergeOutputFormat,
|
||||
@@ -239,7 +253,7 @@ func (result Metadata) DownloadWithOptions(
|
||||
cmd.Stdout = stdoutW
|
||||
cmd.Stderr = io.MultiWriter(optStderrWriter, stderrW)
|
||||
|
||||
log.Trace("cmd", " ", cmd.Args)
|
||||
log.Trace("cmd", " ", redactArgs(cmd.Args))
|
||||
if err := cmd.Start(); err != nil {
|
||||
os.RemoveAll(tempPath)
|
||||
return nil, err
|
||||
|
||||
172
internal/photoprism/dl/options_headers_test.go
Normal file
172
internal/photoprism/dl/options_headers_test.go
Normal file
@@ -0,0 +1,172 @@
|
||||
package dl
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// createFakeYtDlp writes a small script that logs args to YTDLP_ARGS_LOG,
|
||||
// optionally writes JSON when --dump-single-json is present, prints a file path
|
||||
// when --print is present, emits a download prefix on stderr, and writes small
|
||||
// data to stdout for pipe mode.
|
||||
func createFakeYtDlp(t *testing.T) string {
|
||||
t.Helper()
|
||||
dir := t.TempDir()
|
||||
path := filepath.Join(dir, "yt-dlp")
|
||||
var script bytes.Buffer
|
||||
if runtime.GOOS == "windows" {
|
||||
// not needed in our CI/dev container; keep placeholder
|
||||
script.WriteString("@echo off\r\n")
|
||||
script.WriteString("setlocal enabledelayedexpansion\r\n")
|
||||
script.WriteString("echo %* >> %YTDLP_ARGS_LOG%\r\n")
|
||||
script.WriteString("echo {\"id\":\"abc\",\"title\":\"Test\",\"url\":\"http://example.com\",\"_type\":\"video\"} \r\n")
|
||||
} else {
|
||||
script.WriteString("#!/usr/bin/env bash\n")
|
||||
script.WriteString("set -euo pipefail\n")
|
||||
script.WriteString("ARGS_LOG=\"${YTDLP_ARGS_LOG:-}\"\n")
|
||||
script.WriteString("OUT_FILE=\"${YTDLP_OUTPUT_FILE:-}\"\n")
|
||||
script.WriteString("if [[ -n \"$ARGS_LOG\" ]]; then echo \"$*\" >> \"$ARGS_LOG\"; fi\n")
|
||||
// If metadata mode, print minimal JSON to stdout
|
||||
script.WriteString("for a in \"$@\"; do if [[ \"$a\" == \"--dump-single-json\" ]]; then echo '{\"id\":\"abc\",\"title\":\"Test\",\"url\":\"http://example.com\",\"_type\":\"video\"}'; exit 0; fi; done\n")
|
||||
// If print mode (file download), create file and print path(s)
|
||||
script.WriteString("for a in \"$@\"; do if [[ \"$a\" == \"--print\" ]]; then if [[ -n \"$OUT_FILE\" ]]; then mkdir -p \"$(dirname \"$OUT_FILE\")\"; echo 'dummy' > \"$OUT_FILE\"; echo \"$OUT_FILE\"; fi; exit 0; fi; done\n")
|
||||
// Pipe mode: emit download prefix on stderr then write some bytes to stdout
|
||||
script.WriteString("echo '[download]' 1>&2\n")
|
||||
script.WriteString("echo 'DATA'\n")
|
||||
}
|
||||
if err := os.WriteFile(path, script.Bytes(), 0o755); err != nil {
|
||||
t.Fatalf("failed to write fake yt-dlp: %v", err)
|
||||
}
|
||||
return path
|
||||
}
|
||||
|
||||
func TestInfoFromURL_IncludesHeadersAndCookies(t *testing.T) {
|
||||
fake := createFakeYtDlp(t)
|
||||
orig := YtDlpBin
|
||||
YtDlpBin = fake
|
||||
defer func() { YtDlpBin = orig }()
|
||||
|
||||
argsLog := filepath.Join(t.TempDir(), "args.log")
|
||||
t.Setenv("YTDLP_ARGS_LOG", argsLog)
|
||||
|
||||
_, _, err := infoFromURL(context.Background(), "https://example.com/video", Options{
|
||||
Cookies: "cookies.txt",
|
||||
CookiesFromBrowser: "chrome:Default",
|
||||
AddHeaders: []string{"Authorization: Bearer X", "Origin: https://example.com"},
|
||||
Type: TypeSingle,
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatalf("infoFromURL error: %v", err)
|
||||
}
|
||||
|
||||
data, err := os.ReadFile(argsLog)
|
||||
if err != nil {
|
||||
t.Fatalf("reading args log failed: %v", err)
|
||||
}
|
||||
s := string(data)
|
||||
for _, expect := range []string{"--cookies cookies.txt", "--cookies-from-browser chrome:Default", "--add-header Authorization: Bearer X", "--add-header Origin: https://example.com"} {
|
||||
if !strings.Contains(s, expect) {
|
||||
t.Fatalf("missing expected arg %q in %q", expect, s)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestDownloadWithOptions_IncludesHeadersAndCookies_Pipe(t *testing.T) {
|
||||
fake := createFakeYtDlp(t)
|
||||
orig := YtDlpBin
|
||||
YtDlpBin = fake
|
||||
defer func() { YtDlpBin = orig }()
|
||||
argsLog := filepath.Join(t.TempDir(), "args.log")
|
||||
t.Setenv("YTDLP_ARGS_LOG", argsLog)
|
||||
|
||||
r := Metadata{
|
||||
RawURL: "https://example.com/v",
|
||||
Options: Options{
|
||||
noInfoDownload: true,
|
||||
Cookies: "cookies.txt",
|
||||
CookiesFromBrowser: "firefox:Profile",
|
||||
AddHeaders: []string{"Authorization: Bearer Y"},
|
||||
},
|
||||
}
|
||||
dr, err := r.DownloadWithOptions(context.Background(), DownloadOptions{})
|
||||
if err != nil {
|
||||
t.Fatalf("DownloadWithOptions error: %v", err)
|
||||
}
|
||||
// Read a bit and close
|
||||
buf := make([]byte, 4)
|
||||
_, _ = dr.Read(buf)
|
||||
_ = dr.Close()
|
||||
|
||||
data, err := os.ReadFile(argsLog)
|
||||
if err != nil {
|
||||
t.Fatalf("reading args log failed: %v", err)
|
||||
}
|
||||
s := string(data)
|
||||
for _, expect := range []string{"--cookies cookies.txt", "--cookies-from-browser firefox:Profile", "--add-header Authorization: Bearer Y"} {
|
||||
if !strings.Contains(s, expect) {
|
||||
t.Fatalf("missing expected arg %q in %q", expect, s)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestDownloadWithOptions_OmitsFilterWhenDirect(t *testing.T) {
|
||||
fake := createFakeYtDlp(t)
|
||||
orig := YtDlpBin
|
||||
YtDlpBin = fake
|
||||
defer func() { YtDlpBin = orig }()
|
||||
|
||||
argsLog := filepath.Join(t.TempDir(), "args.log")
|
||||
t.Setenv("YTDLP_ARGS_LOG", argsLog)
|
||||
|
||||
r := Metadata{
|
||||
RawURL: "https://example.com/direct",
|
||||
Info: Info{Direct: true},
|
||||
Options: Options{noInfoDownload: true},
|
||||
}
|
||||
_, err := r.DownloadWithOptions(context.Background(), DownloadOptions{Filter: "best"})
|
||||
if err != nil {
|
||||
t.Fatalf("DownloadWithOptions error: %v", err)
|
||||
}
|
||||
data, err := os.ReadFile(argsLog)
|
||||
if err != nil {
|
||||
t.Fatalf("reading args log failed: %v", err)
|
||||
}
|
||||
s := string(data)
|
||||
if strings.Contains(s, "-f best") {
|
||||
t.Fatalf("expected -f not to be present for direct downloads; args: %s", s)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDownloadToFileWithOptions_PrintsAndCreatesFiles(t *testing.T) {
|
||||
fake := createFakeYtDlp(t)
|
||||
orig := YtDlpBin
|
||||
YtDlpBin = fake
|
||||
defer func() { YtDlpBin = orig }()
|
||||
argsLog := filepath.Join(t.TempDir(), "args.log")
|
||||
t.Setenv("YTDLP_ARGS_LOG", argsLog)
|
||||
outDir := t.TempDir()
|
||||
outFile := filepath.Join(outDir, "ppdl_test.mp4")
|
||||
t.Setenv("YTDLP_OUTPUT_FILE", outFile)
|
||||
|
||||
r := Metadata{
|
||||
RawURL: "https://example.com/v",
|
||||
Options: Options{
|
||||
noInfoDownload: true,
|
||||
},
|
||||
}
|
||||
files, err := r.DownloadToFileWithOptions(context.Background(), DownloadOptions{Output: filepath.Join(outDir, "ppdl_%(id)s.%(ext)s")})
|
||||
if err != nil {
|
||||
t.Fatalf("DownloadToFileWithOptions error: %v", err)
|
||||
}
|
||||
if len(files) == 0 {
|
||||
t.Fatalf("expected at least one file path returned")
|
||||
}
|
||||
if _, statErr := os.Stat(outFile); statErr != nil {
|
||||
t.Fatalf("expected file to exist: %v", statErr)
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user