mirror of
https://github.com/rclone/rclone.git
synced 2025-12-11 22:14:05 +01:00
serve http: download folders as zip
Now folders can be downloaded as a zip. You can also use --disable-zip to not show this.
This commit is contained in:
@@ -41,9 +41,10 @@ var OptionsInfo = fs.Options{}.
|
||||
|
||||
// Options required for http server
|
||||
type Options struct {
|
||||
Auth libhttp.AuthConfig
|
||||
HTTP libhttp.Config
|
||||
Template libhttp.TemplateConfig
|
||||
Auth libhttp.AuthConfig
|
||||
HTTP libhttp.Config
|
||||
Template libhttp.TemplateConfig
|
||||
DisableZip bool
|
||||
}
|
||||
|
||||
// DefaultOpt is the default values used for Options
|
||||
@@ -69,6 +70,7 @@ func init() {
|
||||
flags.AddFlagsFromOptions(flagSet, "", OptionsInfo)
|
||||
vfsflags.AddFlags(flagSet)
|
||||
proxyflags.AddFlags(flagSet)
|
||||
flagSet.BoolVar(&Opt.DisableZip, "disable-zip", false, "Disable zip download of directories")
|
||||
cmdserve.Command.AddCommand(Command)
|
||||
cmdserve.AddRc("http", func(ctx context.Context, f fs.Fs, in rc.Params) (cmdserve.Handle, error) {
|
||||
// Read VFS Opts
|
||||
@@ -257,6 +259,24 @@ func (s *HTTP) serveDir(w http.ResponseWriter, r *http.Request, dirRemote string
|
||||
return
|
||||
}
|
||||
dir := node.(*vfs.Dir)
|
||||
|
||||
if r.URL.Query().Get("download") == "zip" && !s.opt.DisableZip {
|
||||
fs.Infof(dirRemote, "%s: Zipping directory", r.RemoteAddr)
|
||||
zipName := path.Base(dirRemote)
|
||||
if dirRemote == "" {
|
||||
zipName = "root"
|
||||
}
|
||||
w.Header().Set("Content-Disposition", "attachment; filename=\""+zipName+".zip\"")
|
||||
w.Header().Set("Content-Type", "application/zip")
|
||||
w.Header().Set("Last-Modified", time.Now().UTC().Format(http.TimeFormat))
|
||||
err := vfs.CreateZip(ctx, dir, w)
|
||||
if err != nil {
|
||||
serve.Error(ctx, dirRemote, w, "Failed to create zip", err)
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
dirEntries, err := dir.ReadDirAll()
|
||||
if err != nil {
|
||||
serve.Error(ctx, dirRemote, w, "Failed to list directory", err)
|
||||
@@ -280,6 +300,8 @@ func (s *HTTP) serveDir(w http.ResponseWriter, r *http.Request, dirRemote string
|
||||
// Set the Last-Modified header to the timestamp
|
||||
w.Header().Set("Last-Modified", dir.ModTime().UTC().Format(http.TimeFormat))
|
||||
|
||||
directory.DisableZip = s.opt.DisableZip
|
||||
|
||||
directory.Serve(w, r)
|
||||
}
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"context"
|
||||
"flag"
|
||||
"io"
|
||||
stdfs "io/fs"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
@@ -75,6 +76,16 @@ func start(ctx context.Context, t *testing.T, f fs.Fs) (s *HTTP, testURL string)
|
||||
return s, testURL
|
||||
}
|
||||
|
||||
// setAllModTimes walks root and sets atime/mtime to t for every file & directory.
|
||||
func setAllModTimes(root string, t time.Time) error {
|
||||
return filepath.WalkDir(root, func(path string, d stdfs.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return os.Chtimes(path, t, t)
|
||||
})
|
||||
}
|
||||
|
||||
var (
|
||||
datedObject = "two.txt"
|
||||
expectedTime = time.Date(2000, 1, 2, 3, 4, 5, 0, time.UTC)
|
||||
@@ -123,6 +134,8 @@ func testGET(t *testing.T, useProxy bool) {
|
||||
|
||||
f = nil
|
||||
} else {
|
||||
// set all the mod times to expectedTime
|
||||
require.NoError(t, setAllModTimes("testdata/files", expectedTime))
|
||||
// Create a test Fs
|
||||
var err error
|
||||
f, err = fs.NewFs(context.Background(), "testdata/files")
|
||||
@@ -233,6 +246,16 @@ func testGET(t *testing.T, useProxy bool) {
|
||||
Range: "bytes=3-",
|
||||
Golden: "testdata/golden/two3-.txt",
|
||||
},
|
||||
{
|
||||
URL: "/?download=zip",
|
||||
Status: http.StatusOK,
|
||||
Golden: "testdata/golden/root.zip",
|
||||
},
|
||||
{
|
||||
URL: "/three/?download=zip",
|
||||
Status: http.StatusOK,
|
||||
Golden: "testdata/golden/three.zip",
|
||||
},
|
||||
} {
|
||||
method := test.Method
|
||||
if method == "" {
|
||||
|
||||
BIN
cmd/serve/http/testdata/golden/root.zip
vendored
Normal file
BIN
cmd/serve/http/testdata/golden/root.zip
vendored
Normal file
Binary file not shown.
BIN
cmd/serve/http/testdata/golden/three.zip
vendored
Normal file
BIN
cmd/serve/http/testdata/golden/three.zip
vendored
Normal file
Binary file not shown.
@@ -21,6 +21,7 @@ import (
|
||||
type DirEntry struct {
|
||||
remote string
|
||||
URL string
|
||||
ZipURL string
|
||||
Leaf string
|
||||
IsDir bool
|
||||
Size int64
|
||||
@@ -32,6 +33,8 @@ type Directory struct {
|
||||
DirRemote string
|
||||
Title string
|
||||
Name string
|
||||
ZipURL string
|
||||
DisableZip bool
|
||||
Entries []DirEntry
|
||||
Query string
|
||||
HTMLTemplate *template.Template
|
||||
@@ -70,6 +73,7 @@ func NewDirectory(dirRemote string, htmlTemplate *template.Template) *Directory
|
||||
DirRemote: dirRemote,
|
||||
Title: fmt.Sprintf("Directory listing of /%s", dirRemote),
|
||||
Name: fmt.Sprintf("/%s", dirRemote),
|
||||
ZipURL: "?download=zip",
|
||||
HTMLTemplate: htmlTemplate,
|
||||
Breadcrumb: breadcrumb,
|
||||
}
|
||||
@@ -99,11 +103,15 @@ func (d *Directory) AddHTMLEntry(remote string, isDir bool, size int64, modTime
|
||||
d.Entries = append(d.Entries, DirEntry{
|
||||
remote: remote,
|
||||
URL: rest.URLPathEscape(urlRemote) + d.Query,
|
||||
ZipURL: "",
|
||||
Leaf: leaf,
|
||||
IsDir: isDir,
|
||||
Size: size,
|
||||
ModTime: modTime,
|
||||
})
|
||||
if isDir {
|
||||
d.Entries[len(d.Entries)-1].ZipURL = rest.URLPathEscape(urlRemote) + "?download=zip"
|
||||
}
|
||||
}
|
||||
|
||||
// AddEntry adds an entry to that directory
|
||||
|
||||
@@ -46,11 +46,11 @@ func TestAddHTMLEntry(t *testing.T) {
|
||||
d.AddHTMLEntry("a/b/c/colon:colon.txt", false, 64, modtime)
|
||||
d.AddHTMLEntry("\"quotes\".txt", false, 64, modtime)
|
||||
assert.Equal(t, []DirEntry{
|
||||
{remote: "", URL: "/", Leaf: "/", IsDir: true, Size: 0, ModTime: modtime},
|
||||
{remote: "dir", URL: "dir/", Leaf: "dir/", IsDir: true, Size: 0, ModTime: modtime},
|
||||
{remote: "a/b/c/d.txt", URL: "d.txt", Leaf: "d.txt", IsDir: false, Size: 64, ModTime: modtime},
|
||||
{remote: "a/b/c/colon:colon.txt", URL: "./colon:colon.txt", Leaf: "colon:colon.txt", IsDir: false, Size: 64, ModTime: modtime},
|
||||
{remote: "\"quotes\".txt", URL: "%22quotes%22.txt", Leaf: "\"quotes\".txt", Size: 64, IsDir: false, ModTime: modtime},
|
||||
{remote: "", URL: "/", ZipURL: "/?download=zip", Leaf: "/", IsDir: true, Size: 0, ModTime: modtime},
|
||||
{remote: "dir", URL: "dir/", ZipURL: "dir/?download=zip", Leaf: "dir/", IsDir: true, Size: 0, ModTime: modtime},
|
||||
{remote: "a/b/c/d.txt", URL: "d.txt", ZipURL: "", Leaf: "d.txt", IsDir: false, Size: 64, ModTime: modtime},
|
||||
{remote: "a/b/c/colon:colon.txt", URL: "./colon:colon.txt", ZipURL: "", Leaf: "colon:colon.txt", IsDir: false, Size: 64, ModTime: modtime},
|
||||
{remote: "\"quotes\".txt", URL: "%22quotes%22.txt", ZipURL: "", Leaf: "\"quotes\".txt", Size: 64, IsDir: false, ModTime: modtime},
|
||||
}, d.Entries)
|
||||
|
||||
// Now test with a query parameter
|
||||
@@ -58,8 +58,8 @@ func TestAddHTMLEntry(t *testing.T) {
|
||||
d.AddHTMLEntry("file", false, 64, modtime)
|
||||
d.AddHTMLEntry("dir", true, 0, modtime)
|
||||
assert.Equal(t, []DirEntry{
|
||||
{remote: "file", URL: "file?potato=42", Leaf: "file", IsDir: false, Size: 64, ModTime: modtime},
|
||||
{remote: "dir", URL: "dir/?potato=42", Leaf: "dir/", IsDir: true, Size: 0, ModTime: modtime},
|
||||
{remote: "file", URL: "file?potato=42", ZipURL: "", Leaf: "file", IsDir: false, Size: 64, ModTime: modtime},
|
||||
{remote: "dir", URL: "dir/?potato=42", ZipURL: "dir/?download=zip", Leaf: "dir/", IsDir: true, Size: 0, ModTime: modtime},
|
||||
}, d.Entries)
|
||||
}
|
||||
|
||||
|
||||
@@ -59,6 +59,8 @@ inserts leading and trailing "/" on ` + "`--{{ .Prefix }}baseurl`" + `, so ` + "
|
||||
` + "`--{{ .Prefix }}baseurl \"/rclone\"` and `--{{ .Prefix }}baseurl \"/rclone/\"`" + ` are all treated
|
||||
identically.
|
||||
|
||||
` + "`--{{ .Prefix }}disable-zip`" + ` may be set to disable the zipping download option.
|
||||
|
||||
#### TLS (SSL)
|
||||
|
||||
By default this will serve over http. If you want you can serve over
|
||||
|
||||
@@ -21,7 +21,7 @@ Modifications: Adapted to rclone markup -->
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<meta name="google" content="notranslate">
|
||||
<style>
|
||||
<style>/
|
||||
* { padding: 0; margin: 0; }
|
||||
body {
|
||||
font-family: sans-serif;
|
||||
@@ -187,6 +187,19 @@ footer {
|
||||
max-width: 100px;
|
||||
}
|
||||
}
|
||||
td .zip {
|
||||
opacity: 0;
|
||||
margin-left: 6px;
|
||||
transition: opacity 0.15s ease-in-out;
|
||||
}
|
||||
tr.file:hover td .zip {
|
||||
opacity: 1;
|
||||
}
|
||||
.zip-root {
|
||||
margin-left: 8px;
|
||||
vertical-align: middle;
|
||||
opacity: 1;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body onload='filter();toggle("order");changeSize()'>
|
||||
@@ -206,6 +219,9 @@ footer {
|
||||
<path d="M126.154134,250.559184 C126.850974,251.883673 127.300549,253.006122 127.772602,254.106122 C128.469442,255.206122 128.919016,256.104082 129.638335,257.002041 C130.559962,258.326531 131.728855,259 133.100057,259 C134.493737,259 135.415364,258.55102 136.112204,257.67551 C136.809044,257.002041 137.258619,255.902041 137.258619,254.577551 C137.258619,253.904082 137.258619,252.804082 137.033832,251.457143 C136.786566,249.908163 136.561779,249.032653 136.561779,248.583673 C136.089726,242.814286 135.864939,237.920408 135.864939,233.273469 C135.864939,225.057143 136.786566,217.514286 138.180246,210.846939 C139.798713,204.202041 141.889234,198.634694 144.429328,193.763265 C147.216689,188.869388 150.678411,184.873469 154.836973,181.326531 C158.995535,177.779592 163.626149,174.883673 168.481552,172.661224 C173.336954,170.438776 179.113983,168.665306 185.587852,167.340816 C192.061722,166.218367 198.760378,165.342857 205.481514,164.669388 C212.18017,164.220408 219.598146,163.995918 228.162535,163.995918 L246.055591,163.995918 L246.055591,195.514286 C246.055591,197.736735 246.752431,199.510204 248.370899,201.059184 C250.214153,202.608163 252.079886,203.506122 254.372715,203.506122 C256.463236,203.506122 258.531277,202.608163 260.172223,201.059184 L326.102289,137.797959 C327.720757,136.24898 328.642384,134.47551 328.642384,132.253061 C328.642384,130.030612 327.720757,128.257143 326.102289,126.708163 L260.172223,63.4469388 C258.553756,61.8979592 256.463236,61 254.395194,61 C252.079886,61 250.236632,61.8979592 248.393377,63.4469388 C246.77491,64.9959184 246.07807,66.7693878 246.07807,68.9918367 L246.07807,100.510204 L228.162535,100.510204 C166.863084,100.510204 129.166282,117.167347 115.274437,150.459184 C110.666301,161.54898 108.350993,175.310204 108.350993,191.742857 C108.350993,205.279592 113.903236,223.912245 124.760454,247.438776 C125.00772,248.112245 125.457294,249.010204 126.154134,250.559184 Z" id="Shape" fill="#FFFFFF" transform="translate(218.496689, 160.000000) scale(-1, 1) translate(-218.496689, -160.000000) "></path>
|
||||
</g>
|
||||
</g>
|
||||
<g id="zip-folder">
|
||||
<path d="M640-480v-80h80v80h-80Zm0 80h-80v-80h80v80Zm0 80v-80h80v80h-80ZM447-640l-80-80H160v480h400v-80h80v80h160v-400H640v80h-80v-80H447ZM160-160q-33 0-56.5-23.5T80-240v-480q0-33 23.5-56.5T160-800h240l80 80h320q33 0 56.5 23.5T880-640v400q0 33-23.5 56.5T800-160H160Zm0-80v-480 480Z"/>
|
||||
</g>
|
||||
<!-- File -->
|
||||
<g id="file" stroke="#000" stroke-width="25" fill="#FFF" fill-rule="evenodd" stroke-linecap="round" stroke-linejoin="round">
|
||||
<path d="M13 24.12v274.76c0 6.16 5.87 11.12 13.17 11.12H239c7.3 0 13.17-4.96 13.17-11.12V136.15S132.6 13 128.37 13H26.17C18.87 13 13 17.96 13 24.12z"/>
|
||||
@@ -233,6 +249,15 @@ footer {
|
||||
<header>
|
||||
<h1>
|
||||
{{range $i, $crumb := .Breadcrumb}}<a href="{{html $crumb.Link}}">{{html $crumb.Text}}</a>{{if ne $i 0}}/{{end}}{{end}}
|
||||
|
||||
{{- if not .DisableZip}}
|
||||
<a class="zip-root" href="{{html .ZipURL}}" title="Download root as .zip">
|
||||
<svg width="1.5em" height="1.5em" viewBox="0 -960 960 960">
|
||||
<use xlink:href="#zip-folder"></use>
|
||||
</svg>
|
||||
</a>
|
||||
{{- end}}
|
||||
|
||||
</h1>
|
||||
</header>
|
||||
<main>
|
||||
@@ -283,6 +308,13 @@ footer {
|
||||
<svg width="1.5em" height="1em" version="1.1" viewBox="0 0 265 323"><use xlink:href="#file"></use></svg>
|
||||
{{- end}}
|
||||
<span class="name"><a href="{{html .URL}}">{{html .Leaf}}</a></span>
|
||||
{{- if and .IsDir (not $.DisableZip)}}
|
||||
<a class="zip" href="{{html .ZipURL}}" title="Download folder as .zip">
|
||||
<svg width="1.5em" height="1.5em" viewBox="0 -960 960 960">
|
||||
<use xlink:href="#zip-folder"></use>
|
||||
</svg>
|
||||
</a>
|
||||
{{- end}}
|
||||
</td>
|
||||
{{- if .IsDir}}
|
||||
<td data-order="-1">—</td>
|
||||
|
||||
73
vfs/zip.go
Normal file
73
vfs/zip.go
Normal file
@@ -0,0 +1,73 @@
|
||||
package vfs
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
|
||||
"github.com/rclone/rclone/fs"
|
||||
)
|
||||
|
||||
// CreateZip creates a zip file from a vfs.Dir writing it to w
|
||||
func CreateZip(ctx context.Context, dir *Dir, w io.Writer) (err error) {
|
||||
zipWriter := zip.NewWriter(w)
|
||||
defer fs.CheckClose(zipWriter, &err)
|
||||
var walk func(dir *Dir, root string) error
|
||||
walk = func(dir *Dir, root string) error {
|
||||
nodes, err := dir.ReadDirAll()
|
||||
if err != nil {
|
||||
return fmt.Errorf("create zip directory read: %w", err)
|
||||
}
|
||||
for _, node := range nodes {
|
||||
switch e := node.(type) {
|
||||
case *File:
|
||||
in, err := e.Open(os.O_RDONLY)
|
||||
if err != nil {
|
||||
return fmt.Errorf("create zip open file: %w", err)
|
||||
}
|
||||
header := &zip.FileHeader{
|
||||
Name: root + e.Name(),
|
||||
Method: zip.Deflate,
|
||||
Modified: e.ModTime(),
|
||||
}
|
||||
fileWriter, err := zipWriter.CreateHeader(header)
|
||||
if err != nil {
|
||||
fs.CheckClose(in, &err)
|
||||
return fmt.Errorf("create zip file header: %w", err)
|
||||
}
|
||||
_, err = io.Copy(fileWriter, in)
|
||||
if err != nil {
|
||||
fs.CheckClose(in, &err)
|
||||
return fmt.Errorf("create zip copy: %w", err)
|
||||
}
|
||||
fs.CheckClose(in, &err)
|
||||
case *Dir:
|
||||
name := root + e.Path()
|
||||
if name != "" && name[len(name)-1] != '/' {
|
||||
name += "/"
|
||||
}
|
||||
header := &zip.FileHeader{
|
||||
Name: name,
|
||||
Method: zip.Store,
|
||||
Modified: e.ModTime(),
|
||||
}
|
||||
_, err := zipWriter.CreateHeader(header)
|
||||
if err != nil {
|
||||
return fmt.Errorf("create zip directory header: %w", err)
|
||||
}
|
||||
err = walk(e, name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
err = walk(dir, "")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
156
vfs/zip_test.go
Normal file
156
vfs/zip_test.go
Normal file
@@ -0,0 +1,156 @@
|
||||
package vfs
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/rclone/rclone/fstest"
|
||||
"github.com/rclone/rclone/lib/random"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func readZip(t *testing.T, buf *bytes.Buffer) *zip.Reader {
|
||||
t.Helper()
|
||||
r, err := zip.NewReader(bytes.NewReader(buf.Bytes()), int64(buf.Len()))
|
||||
require.NoError(t, err)
|
||||
return r
|
||||
}
|
||||
|
||||
func mustCreateZip(t *testing.T, d *Dir) *bytes.Buffer {
|
||||
t.Helper()
|
||||
var buf bytes.Buffer
|
||||
require.NoError(t, CreateZip(context.Background(), d, &buf))
|
||||
return &buf
|
||||
}
|
||||
|
||||
func zipReadFile(t *testing.T, zr *zip.Reader, match func(name string) bool) ([]byte, string) {
|
||||
t.Helper()
|
||||
for _, f := range zr.File {
|
||||
if strings.HasSuffix(f.Name, "/") {
|
||||
continue
|
||||
}
|
||||
if match(f.Name) {
|
||||
rc, err := f.Open()
|
||||
require.NoError(t, err)
|
||||
defer func() { require.NoError(t, rc.Close()) }()
|
||||
b, err := io.ReadAll(rc)
|
||||
require.NoError(t, err)
|
||||
return b, f.Name
|
||||
}
|
||||
}
|
||||
t.Fatalf("zip entry matching predicate not found")
|
||||
return nil, ""
|
||||
}
|
||||
|
||||
func TestZipManyFiles(t *testing.T) {
|
||||
r, vfs := newTestVFS(t)
|
||||
|
||||
const N = 5
|
||||
want := make(map[string]string, N)
|
||||
items := make([]fstest.Item, 0, N)
|
||||
|
||||
for i := range N {
|
||||
name := fmt.Sprintf("flat/f%03d.txt", i)
|
||||
data := strings.Repeat(fmt.Sprintf("line-%d\n", i), (i%5)+1)
|
||||
it := r.WriteObject(context.Background(), name, data, t1)
|
||||
items = append(items, it)
|
||||
want[name[strings.LastIndex(name, "/")+1:]] = data
|
||||
}
|
||||
r.CheckRemoteItems(t, items...)
|
||||
|
||||
node, err := vfs.Stat("flat")
|
||||
require.NoError(t, err)
|
||||
dir := node.(*Dir)
|
||||
|
||||
buf := mustCreateZip(t, dir)
|
||||
zr := readZip(t, buf)
|
||||
|
||||
// count only file entries (skip dir entries with trailing "/")
|
||||
files := 0
|
||||
for _, f := range zr.File {
|
||||
if !strings.HasSuffix(f.Name, "/") {
|
||||
files++
|
||||
}
|
||||
}
|
||||
require.Equal(t, N, files)
|
||||
|
||||
// validate contents by base name
|
||||
for base, data := range want {
|
||||
got, _ := zipReadFile(t, zr, func(name string) bool { return name == base })
|
||||
require.Equal(t, data, string(got), "mismatch for %s", base)
|
||||
}
|
||||
}
|
||||
|
||||
func TestZipManySubDirs(t *testing.T) {
|
||||
r, vfs := newTestVFS(t)
|
||||
|
||||
r.WriteObject(context.Background(), "a/top.txt", "top", t1)
|
||||
r.WriteObject(context.Background(), "a/b/mid.txt", "mid", t1)
|
||||
r.WriteObject(context.Background(), "a/b/c/deep.txt", "deep", t1)
|
||||
|
||||
node, err := vfs.Stat("a")
|
||||
require.NoError(t, err)
|
||||
dir := node.(*Dir)
|
||||
|
||||
buf := mustCreateZip(t, dir)
|
||||
zr := readZip(t, buf)
|
||||
|
||||
// paths may include directory prefixes; assert by suffix
|
||||
got, name := zipReadFile(t, zr, func(n string) bool { return strings.HasSuffix(n, "/top.txt") || n == "top.txt" })
|
||||
require.Equal(t, "top", string(got), "bad content for %s", name)
|
||||
|
||||
got, name = zipReadFile(t, zr, func(n string) bool { return strings.HasSuffix(n, "/mid.txt") || n == "mid.txt" })
|
||||
require.Equal(t, "mid", string(got), "bad content for %s", name)
|
||||
|
||||
got, name = zipReadFile(t, zr, func(n string) bool { return strings.HasSuffix(n, "/deep.txt") || n == "deep.txt" })
|
||||
require.Equal(t, "deep", string(got), "bad content for %s", name)
|
||||
}
|
||||
|
||||
func TestZipLargeFiles(t *testing.T) {
|
||||
r, vfs := newTestVFS(t)
|
||||
|
||||
data := random.String(5 * 1024 * 1024)
|
||||
sum := sha256.Sum256([]byte(data))
|
||||
|
||||
r.WriteObject(context.Background(), "bigdir/big.bin", data, t1)
|
||||
|
||||
node, err := vfs.Stat("bigdir")
|
||||
require.NoError(t, err)
|
||||
dir := node.(*Dir)
|
||||
|
||||
buf := mustCreateZip(t, dir)
|
||||
zr := readZip(t, buf)
|
||||
|
||||
got, _ := zipReadFile(t, zr, func(n string) bool { return n == "big.bin" || strings.HasSuffix(n, "/big.bin") })
|
||||
require.Equal(t, sum, sha256.Sum256(got))
|
||||
}
|
||||
|
||||
func TestZipDirsInRoot(t *testing.T) {
|
||||
r, vfs := newTestVFS(t)
|
||||
|
||||
r.WriteObject(context.Background(), "dir1/a.txt", "x", t1)
|
||||
r.WriteObject(context.Background(), "dir2/b.txt", "y", t1)
|
||||
r.WriteObject(context.Background(), "dir3/c.txt", "z", t1)
|
||||
|
||||
root, err := vfs.Root()
|
||||
require.NoError(t, err)
|
||||
|
||||
buf := mustCreateZip(t, root)
|
||||
zr := readZip(t, buf)
|
||||
|
||||
// Check each file exists (ignore exact directory-entry names)
|
||||
gx, _ := zipReadFile(t, zr, func(n string) bool { return strings.HasSuffix(n, "/a.txt") })
|
||||
require.Equal(t, "x", string(gx))
|
||||
|
||||
gy, _ := zipReadFile(t, zr, func(n string) bool { return strings.HasSuffix(n, "/b.txt") })
|
||||
require.Equal(t, "y", string(gy))
|
||||
|
||||
gz, _ := zipReadFile(t, zr, func(n string) bool { return strings.HasSuffix(n, "/c.txt") })
|
||||
require.Equal(t, "z", string(gz))
|
||||
}
|
||||
Reference in New Issue
Block a user