mirror of
https://github.com/photoprism/photoprism.git
synced 2025-12-12 00:34:13 +01:00
CI: Apply Go linter recommendations to remaining "pkg/..." code #5330
Signed-off-by: Michael Mayer <michael@photoprism.app>
This commit is contained in:
@@ -26,10 +26,10 @@ func Output(f func()) string {
|
|||||||
}()
|
}()
|
||||||
|
|
||||||
f()
|
f()
|
||||||
w.Close()
|
_ = w.Close()
|
||||||
|
|
||||||
var buf bytes.Buffer
|
var buf bytes.Buffer
|
||||||
io.Copy(&buf, r)
|
_, _ = io.Copy(&buf, r)
|
||||||
|
|
||||||
return buf.String()
|
return buf.String()
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -20,10 +20,10 @@ func Stdout(f func()) string {
|
|||||||
}()
|
}()
|
||||||
|
|
||||||
f()
|
f()
|
||||||
w.Close()
|
_ = w.Close()
|
||||||
|
|
||||||
var buf bytes.Buffer
|
var buf bytes.Buffer
|
||||||
io.Copy(&buf, r)
|
_, _ = io.Copy(&buf, r)
|
||||||
|
|
||||||
return buf.String()
|
return buf.String()
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
package capture
|
package capture
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@@ -11,6 +10,6 @@ import (
|
|||||||
func TestTime(t *testing.T) {
|
func TestTime(t *testing.T) {
|
||||||
start := time.Now()
|
start := time.Now()
|
||||||
time.Sleep(1 * time.Millisecond)
|
time.Sleep(1 * time.Millisecond)
|
||||||
result := Time(start, fmt.Sprintf("%s", "Successful test"))
|
result := Time(start, "Successful test")
|
||||||
assert.Contains(t, result, "Successful test [")
|
assert.Contains(t, result, "Successful test [")
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,8 +1,11 @@
|
|||||||
package checksum
|
package checksum
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
// CharsetBase10 contains digits for base10 encoding.
|
||||||
CharsetBase10 = "0123456789"
|
CharsetBase10 = "0123456789"
|
||||||
|
// CharsetBase36 contains lowercase alphanumerics for base36.
|
||||||
CharsetBase36 = "abcdefghijklmnopqrstuvwxyz0123456789"
|
CharsetBase36 = "abcdefghijklmnopqrstuvwxyz0123456789"
|
||||||
|
// CharsetBase62 contains mixed-case alphanumerics for base62.
|
||||||
CharsetBase62 = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
|
CharsetBase62 = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import (
|
|||||||
"hash/crc32"
|
"hash/crc32"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// Crc32Castagnoli provides the Castagnoli polynomial table for CRC32.
|
||||||
var Crc32Castagnoli = crc32.MakeTable(crc32.Castagnoli)
|
var Crc32Castagnoli = crc32.MakeTable(crc32.Castagnoli)
|
||||||
|
|
||||||
// Crc32 returns the CRC-32 checksum of data using the crc32.IEEE polynomial.
|
// Crc32 returns the CRC-32 checksum of data using the crc32.IEEE polynomial.
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ package fs
|
|||||||
|
|
||||||
// Required file format decoders and encoders.
|
// Required file format decoders and encoders.
|
||||||
import (
|
import (
|
||||||
_ "image/gif"
|
_ "image/gif" // register GIF decoder
|
||||||
_ "image/jpeg"
|
_ "image/jpeg" // register JPEG decoder
|
||||||
_ "image/png"
|
_ "image/png" // register PNG decoder
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ func ConfigFilePath(configPath, baseName, defaultExt string) string {
|
|||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
// Search file in current directory if configPath is emtpy.
|
// Search file in current directory if configPath is empty.
|
||||||
if configPath == "" {
|
if configPath == "" {
|
||||||
if dir, err := os.Getwd(); err == nil && dir != "" {
|
if dir, err := os.Getwd(); err == nil && dir != "" {
|
||||||
configPath = dir
|
configPath = dir
|
||||||
|
|||||||
@@ -55,7 +55,7 @@ func Copy(src, dest string, force bool) (err error) {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
thisFile, err := os.Open(src)
|
thisFile, err := os.Open(src) //nolint:gosec // src is validated by callers
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@@ -64,7 +64,7 @@ func Copy(src, dest string, force bool) (err error) {
|
|||||||
defer thisFile.Close()
|
defer thisFile.Close()
|
||||||
|
|
||||||
// Open destination for write; create or truncate to avoid trailing bytes
|
// Open destination for write; create or truncate to avoid trailing bytes
|
||||||
destFile, err := os.OpenFile(dest, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, ModeFile)
|
destFile, err := os.OpenFile(dest, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, ModeFile) //nolint:gosec // dest is derived from validated input
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@@ -136,9 +136,5 @@ func Move(src, dest string, force bool) (err error) {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if err = os.Remove(src); err != nil {
|
return os.Remove(src)
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ func TestCopy_NewDestination_Succeeds(t *testing.T) {
|
|||||||
|
|
||||||
err := Copy(src, dst, false)
|
err := Copy(src, dst, false)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
b, _ := os.ReadFile(dst)
|
b, _ := os.ReadFile(dst) //nolint:gosec // test helper reads temp file
|
||||||
assert.Equal(t, "hello", string(b))
|
assert.Equal(t, "hello", string(b))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -31,7 +31,7 @@ func TestCopy_ExistingNonEmpty_NoForce_Error(t *testing.T) {
|
|||||||
|
|
||||||
err := Copy(src, dst, false)
|
err := Copy(src, dst, false)
|
||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
b, _ := os.ReadFile(dst)
|
b, _ := os.ReadFile(dst) //nolint:gosec // test helper reads temp file
|
||||||
assert.Equal(t, "existing", string(b))
|
assert.Equal(t, "existing", string(b))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -46,7 +46,7 @@ func TestCopy_ExistingNonEmpty_Force_TruncatesAndOverwrites(t *testing.T) {
|
|||||||
|
|
||||||
err := Copy(src, dst, true)
|
err := Copy(src, dst, true)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
b, _ := os.ReadFile(dst)
|
b, _ := os.ReadFile(dst) //nolint:gosec // test helper reads temp file
|
||||||
assert.Equal(t, "short", string(b))
|
assert.Equal(t, "short", string(b))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -60,7 +60,7 @@ func TestCopy_ExistingEmpty_NoForce_AllowsReplace(t *testing.T) {
|
|||||||
|
|
||||||
err := Copy(src, dst, false)
|
err := Copy(src, dst, false)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
b, _ := os.ReadFile(dst)
|
b, _ := os.ReadFile(dst) //nolint:gosec // test helper reads temp file
|
||||||
assert.Equal(t, "data", string(b))
|
assert.Equal(t, "data", string(b))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -93,7 +93,7 @@ func TestMove_NewDestination_Succeeds(t *testing.T) {
|
|||||||
// Source is removed; dest contains data
|
// Source is removed; dest contains data
|
||||||
_, serr := os.Stat(src)
|
_, serr := os.Stat(src)
|
||||||
assert.True(t, os.IsNotExist(serr))
|
assert.True(t, os.IsNotExist(serr))
|
||||||
b, _ := os.ReadFile(dst)
|
b, _ := os.ReadFile(dst) //nolint:gosec // test helper reads temp file
|
||||||
assert.Equal(t, "hello", string(b))
|
assert.Equal(t, "hello", string(b))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -108,8 +108,8 @@ func TestMove_ExistingNonEmpty_NoForce_Error(t *testing.T) {
|
|||||||
err := Move(src, dst, false)
|
err := Move(src, dst, false)
|
||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
// Verify both files unchanged
|
// Verify both files unchanged
|
||||||
bsrc, _ := os.ReadFile(src)
|
bsrc, _ := os.ReadFile(src) //nolint:gosec // test helper reads temp file
|
||||||
bdst, _ := os.ReadFile(dst)
|
bdst, _ := os.ReadFile(dst) //nolint:gosec // test helper reads temp file
|
||||||
assert.Equal(t, "src", string(bsrc))
|
assert.Equal(t, "src", string(bsrc))
|
||||||
assert.Equal(t, "dst", string(bdst))
|
assert.Equal(t, "dst", string(bdst))
|
||||||
}
|
}
|
||||||
@@ -126,7 +126,7 @@ func TestMove_ExistingEmpty_NoForce_AllowsReplace(t *testing.T) {
|
|||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
_, serr := os.Stat(src)
|
_, serr := os.Stat(src)
|
||||||
assert.True(t, os.IsNotExist(serr))
|
assert.True(t, os.IsNotExist(serr))
|
||||||
bdst, _ := os.ReadFile(dst)
|
bdst, _ := os.ReadFile(dst) //nolint:gosec // test helper reads temp file
|
||||||
assert.Equal(t, "src", string(bdst))
|
assert.Equal(t, "src", string(bdst))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -142,7 +142,7 @@ func TestMove_ExistingNonEmpty_Force_Succeeds(t *testing.T) {
|
|||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
_, serr := os.Stat(src)
|
_, serr := os.Stat(src)
|
||||||
assert.True(t, os.IsNotExist(serr))
|
assert.True(t, os.IsNotExist(serr))
|
||||||
bdst, _ := os.ReadFile(dst)
|
bdst, _ := os.ReadFile(dst) //nolint:gosec // test helper reads temp file
|
||||||
assert.Equal(t, "AAA", string(bdst))
|
assert.Equal(t, "AAA", string(bdst))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ import (
|
|||||||
"github.com/photoprism/photoprism/pkg/fs/fastwalk"
|
"github.com/photoprism/photoprism/pkg/fs/fastwalk"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// OriginalPaths lists default Originals search paths.
|
||||||
var OriginalPaths = []string{
|
var OriginalPaths = []string{
|
||||||
"/photoprism/storage/media/originals",
|
"/photoprism/storage/media/originals",
|
||||||
"/photoprism/media/originals",
|
"/photoprism/media/originals",
|
||||||
@@ -76,6 +77,7 @@ var OriginalPaths = []string{
|
|||||||
"/var/lib/photoprism/originals",
|
"/var/lib/photoprism/originals",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ImportPaths lists default Import search paths.
|
||||||
var ImportPaths = []string{
|
var ImportPaths = []string{
|
||||||
"/photoprism/storage/media/import",
|
"/photoprism/storage/media/import",
|
||||||
"/photoprism/media/import",
|
"/photoprism/media/import",
|
||||||
@@ -110,6 +112,7 @@ var ImportPaths = []string{
|
|||||||
"/var/lib/photoprism/import",
|
"/var/lib/photoprism/import",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// AssetPaths lists default asset paths.
|
||||||
var AssetPaths = []string{
|
var AssetPaths = []string{
|
||||||
"/opt/photoprism/assets",
|
"/opt/photoprism/assets",
|
||||||
"/photoprism/assets",
|
"/photoprism/assets",
|
||||||
@@ -120,6 +123,7 @@ var AssetPaths = []string{
|
|||||||
"/var/lib/photoprism/assets",
|
"/var/lib/photoprism/assets",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ModelsPaths lists default model lookup paths.
|
||||||
var ModelsPaths = []string{
|
var ModelsPaths = []string{
|
||||||
"/opt/photoprism/assets/models",
|
"/opt/photoprism/assets/models",
|
||||||
"/photoprism/assets/models",
|
"/photoprism/assets/models",
|
||||||
|
|||||||
@@ -1,12 +1,16 @@
|
|||||||
package fs
|
package fs
|
||||||
|
|
||||||
|
// Status indicates whether a path was seen or processed.
|
||||||
type Status int8
|
type Status int8
|
||||||
|
|
||||||
const (
|
const (
|
||||||
Found Status = 1
|
// Found marks a path as seen.
|
||||||
|
Found Status = 1
|
||||||
|
// Processed marks a path as fully handled.
|
||||||
Processed Status = 2
|
Processed Status = 2
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// Done stores per-path processing state.
|
||||||
type Done map[string]Status
|
type Done map[string]Status
|
||||||
|
|
||||||
// Processed counts the number of processed files.
|
// Processed counts the number of processed files.
|
||||||
@@ -22,10 +26,12 @@ func (d Done) Processed() int {
|
|||||||
return count
|
return count
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Exists reports whether any status is recorded.
|
||||||
func (s Status) Exists() bool {
|
func (s Status) Exists() bool {
|
||||||
return s > 0
|
return s > 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Processed returns true if the path was marked as processed.
|
||||||
func (s Status) Processed() bool {
|
func (s Status) Processed() bool {
|
||||||
return s >= Processed
|
return s >= Processed
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// nolint:unused // kept for potential platform-specific filesystem filtering
|
||||||
func findMounts(mounts []Mount, path string) ([]Mount, error) {
|
func findMounts(mounts []Mount, path string) ([]Mount, error) {
|
||||||
var err error
|
var err error
|
||||||
path, err = filepath.Abs(path)
|
path, err = filepath.Abs(path)
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ package duf
|
|||||||
|
|
||||||
import "strings"
|
import "strings"
|
||||||
|
|
||||||
//nolint:revive,deadcode
|
//nolint:revive // constants kept for reference in filesystem detection
|
||||||
const (
|
const (
|
||||||
// man statfs
|
// man statfs
|
||||||
ADFS_SUPER_MAGIC = 0xadf5
|
ADFS_SUPER_MAGIC = 0xadf5
|
||||||
|
|||||||
@@ -10,8 +10,10 @@ var (
|
|||||||
onlyMp = ""
|
onlyMp = ""
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// FilterValues holds a set of filter strings.
|
||||||
type FilterValues map[string]struct{}
|
type FilterValues map[string]struct{}
|
||||||
|
|
||||||
|
// NewFilterValues converts strings or comma-separated lists into a FilterValues set.
|
||||||
func NewFilterValues(s ...string) FilterValues {
|
func NewFilterValues(s ...string) FilterValues {
|
||||||
if len(s) == 0 {
|
if len(s) == 0 {
|
||||||
return make(FilterValues)
|
return make(FilterValues)
|
||||||
|
|||||||
@@ -5,11 +5,14 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
// nolint:unused // kept for potential future grouping logic extensions
|
||||||
groups = []string{LocalDevice, NetworkDevice, FuseDevice, SpecialDevice, LoopsDevice, BindsMount}
|
groups = []string{LocalDevice, NetworkDevice, FuseDevice, SpecialDevice, LoopsDevice, BindsMount}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// GroupedMounts maps device types to their mounts.
|
||||||
type GroupedMounts map[string][]Mount
|
type GroupedMounts map[string][]Mount
|
||||||
|
|
||||||
|
// GroupMounts groups mounts by device type, applying the given filters.
|
||||||
func GroupMounts(m []Mount, filters FilterOptions) GroupedMounts {
|
func GroupMounts(m []Mount, filters FilterOptions) GroupedMounts {
|
||||||
deviceMounts := make(GroupedMounts)
|
deviceMounts := make(GroupedMounts)
|
||||||
hasOnlyDevices := len(filters.OnlyDevices) != 0
|
hasOnlyDevices := len(filters.OnlyDevices) != 0
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ type Mount struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func readLines(filename string) ([]string, error) {
|
func readLines(filename string) ([]string, error) {
|
||||||
file, err := os.Open(filename)
|
file, err := os.Open(filename) //nolint:gosec // filename comes from platform mountinfo source
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -49,7 +49,7 @@ func unescapeFstab(path string) string {
|
|||||||
return escaped
|
return escaped
|
||||||
}
|
}
|
||||||
|
|
||||||
//nolint:deadcode,unused // used on BSD
|
//nolint:unused // used on BSD
|
||||||
func byteToString(orig []byte) string {
|
func byteToString(orig []byte) string {
|
||||||
n := -1
|
n := -1
|
||||||
l := -1
|
l := -1
|
||||||
@@ -73,7 +73,7 @@ func byteToString(orig []byte) string {
|
|||||||
return string(orig[l:n])
|
return string(orig[l:n])
|
||||||
}
|
}
|
||||||
|
|
||||||
//nolint:deadcode,unused // used on OpenBSD
|
//nolint:unused // used on OpenBSD
|
||||||
func intToString(orig []int8) string {
|
func intToString(orig []int8) string {
|
||||||
ret := make([]byte, len(orig))
|
ret := make([]byte, len(orig))
|
||||||
size := -1
|
size := -1
|
||||||
|
|||||||
@@ -18,13 +18,13 @@ const (
|
|||||||
// (0) (1) (2) (3) (4) (5) (6) (7) (8) (9) (10)
|
// (0) (1) (2) (3) (4) (5) (6) (7) (8) (9) (10)
|
||||||
//
|
//
|
||||||
// (0) mount ID: unique identifier of the mount (may be reused after umount).
|
// (0) mount ID: unique identifier of the mount (may be reused after umount).
|
||||||
//mountinfoMountID = 0
|
// mountinfoMountID = 0
|
||||||
// (1) parent ID: ID of parent (or of self for the top of the mount tree).
|
// (1) parent ID: ID of parent (or of self for the top of the mount tree).
|
||||||
//mountinfoParentID = 1
|
// mountinfoParentID = 1
|
||||||
// (2) major:minor: value of st_dev for files on filesystem.
|
// (2) major:minor: value of st_dev for files on filesystem.
|
||||||
//mountinfoMajorMinor = 2
|
// mountinfoMajorMinor = 2
|
||||||
// (3) root: root of the mount within the filesystem.
|
// (3) root: root of the mount within the filesystem.
|
||||||
//mountinfoRoot = 3
|
// mountinfoRoot = 3
|
||||||
// (4) mount point: mount point relative to the process's root.
|
// (4) mount point: mount point relative to the process's root.
|
||||||
mountinfoMountPoint = 4
|
mountinfoMountPoint = 4
|
||||||
// (5) mount options: per mount options.
|
// (5) mount options: per mount options.
|
||||||
@@ -32,13 +32,13 @@ const (
|
|||||||
// (6) optional fields: zero or more fields terminated by "-".
|
// (6) optional fields: zero or more fields terminated by "-".
|
||||||
mountinfoOptionalFields = 6
|
mountinfoOptionalFields = 6
|
||||||
// (7) separator between optional fields.
|
// (7) separator between optional fields.
|
||||||
//mountinfoSeparator = 7
|
// mountinfoSeparator = 7
|
||||||
// (8) filesystem type: name of filesystem of the form.
|
// (8) filesystem type: name of filesystem of the form.
|
||||||
mountinfoFsType = 8
|
mountinfoFsType = 8
|
||||||
// (9) mount source: filesystem specific information or "none".
|
// (9) mount source: filesystem specific information or "none".
|
||||||
mountinfoMountSource = 9
|
mountinfoMountSource = 9
|
||||||
// (10) super options: per super block options.
|
// (10) super options: per super block options.
|
||||||
//mountinfoSuperOptions = 10
|
// mountinfoSuperOptions = 10
|
||||||
)
|
)
|
||||||
|
|
||||||
// Stat returns the mountpoint's stat information.
|
// Stat returns the mountpoint's stat information.
|
||||||
@@ -70,6 +70,11 @@ func mounts() ([]Mount, []string, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// blockDeviceID := fields[mountinfoMountID]
|
// blockDeviceID := fields[mountinfoMountID]
|
||||||
|
if len(fields) <= mountinfoMountSource {
|
||||||
|
warnings = append(warnings, fmt.Sprintf("incomplete mountinfo line: %s", line))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
mountPoint := fields[mountinfoMountPoint]
|
mountPoint := fields[mountinfoMountPoint]
|
||||||
mountOpts := fields[mountinfoMountOpts]
|
mountOpts := fields[mountinfoMountOpts]
|
||||||
fstype := fields[mountinfoFsType]
|
fstype := fields[mountinfoFsType]
|
||||||
@@ -93,14 +98,14 @@ func mounts() ([]Mount, []string, error) {
|
|||||||
Type: fsTypeMap[int64(stat.Type)], //nolint:unconvert
|
Type: fsTypeMap[int64(stat.Type)], //nolint:unconvert
|
||||||
Opts: mountOpts,
|
Opts: mountOpts,
|
||||||
Metadata: stat,
|
Metadata: stat,
|
||||||
Total: (uint64(stat.Blocks) * uint64(stat.Bsize)), //nolint:unconvert
|
Total: (uint64(stat.Blocks) * uint64(stat.Bsize)), //nolint:unconvert,gosec // stat values are kernel-provided
|
||||||
Free: (uint64(stat.Bavail) * uint64(stat.Bsize)), //nolint:unconvert
|
Free: (uint64(stat.Bavail) * uint64(stat.Bsize)), //nolint:unconvert,gosec
|
||||||
Used: (uint64(stat.Blocks) - uint64(stat.Bfree)) * uint64(stat.Bsize), //nolint:unconvert
|
Used: (uint64(stat.Blocks) - uint64(stat.Bfree)) * uint64(stat.Bsize), //nolint:unconvert,gosec
|
||||||
Inodes: stat.Files,
|
Inodes: stat.Files,
|
||||||
InodesFree: stat.Ffree,
|
InodesFree: stat.Ffree,
|
||||||
InodesUsed: stat.Files - stat.Ffree,
|
InodesUsed: stat.Files - stat.Ffree,
|
||||||
Blocks: uint64(stat.Blocks), //nolint:unconvert
|
Blocks: uint64(stat.Blocks), //nolint:unconvert
|
||||||
BlockSize: uint64(stat.Bsize),
|
BlockSize: uint64(stat.Bsize), //nolint:gosec // kernel-provided value fits uint64
|
||||||
}
|
}
|
||||||
d.DeviceType = deviceType(d)
|
d.DeviceType = deviceType(d)
|
||||||
|
|
||||||
|
|||||||
@@ -25,6 +25,7 @@ func parseCommaSeparatedValues(values string) FilterValues {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// validateGroups validates the parsed group maps.
|
// validateGroups validates the parsed group maps.
|
||||||
|
// nolint:unused // reserved for future validation hooks
|
||||||
func validateGroups(m FilterValues) error {
|
func validateGroups(m FilterValues) error {
|
||||||
for k := range m {
|
for k := range m {
|
||||||
found := slices.Contains(groups, k)
|
found := slices.Contains(groups, k)
|
||||||
|
|||||||
@@ -3,7 +3,6 @@
|
|||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
//go:build freebsd || openbsd || netbsd
|
//go:build freebsd || openbsd || netbsd
|
||||||
// +build freebsd openbsd netbsd
|
|
||||||
|
|
||||||
package fastwalk
|
package fastwalk
|
||||||
|
|
||||||
|
|||||||
@@ -3,8 +3,6 @@
|
|||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
//go:build (linux || darwin) && !appengine
|
//go:build (linux || darwin) && !appengine
|
||||||
// +build linux darwin
|
|
||||||
// +build !appengine
|
|
||||||
|
|
||||||
package fastwalk
|
package fastwalk
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,6 @@
|
|||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
//go:build darwin || freebsd || openbsd || netbsd
|
//go:build darwin || freebsd || openbsd || netbsd
|
||||||
// +build darwin freebsd openbsd netbsd
|
|
||||||
|
|
||||||
package fastwalk
|
package fastwalk
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,6 @@
|
|||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
//go:build linux && !appengine
|
//go:build linux && !appengine
|
||||||
// +build linux,!appengine
|
|
||||||
|
|
||||||
package fastwalk
|
package fastwalk
|
||||||
|
|
||||||
@@ -15,7 +14,7 @@ import (
|
|||||||
|
|
||||||
func direntNamlen(dirent *syscall.Dirent) uint64 {
|
func direntNamlen(dirent *syscall.Dirent) uint64 {
|
||||||
const fixedHdr = uint16(unsafe.Offsetof(syscall.Dirent{}.Name))
|
const fixedHdr = uint16(unsafe.Offsetof(syscall.Dirent{}.Name))
|
||||||
nameBuf := (*[unsafe.Sizeof(dirent.Name)]byte)(unsafe.Pointer(&dirent.Name[0]))
|
nameBuf := (*[unsafe.Sizeof(dirent.Name)]byte)(unsafe.Pointer(&dirent.Name[0])) //nolint:gosec // bounded by Dirent name buffer size
|
||||||
const nameBufLen = uint16(len(nameBuf))
|
const nameBufLen = uint16(len(nameBuf))
|
||||||
limit := dirent.Reclen - fixedHdr
|
limit := dirent.Reclen - fixedHdr
|
||||||
if limit > nameBufLen {
|
if limit > nameBufLen {
|
||||||
|
|||||||
@@ -3,7 +3,6 @@
|
|||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
//go:build appengine || (!linux && !darwin && !freebsd && !openbsd && !netbsd)
|
//go:build appengine || (!linux && !darwin && !freebsd && !openbsd && !netbsd)
|
||||||
// +build appengine !linux,!darwin,!freebsd,!openbsd,!netbsd
|
|
||||||
|
|
||||||
package fastwalk
|
package fastwalk
|
||||||
|
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ import (
|
|||||||
"bytes"
|
"bytes"
|
||||||
"flag"
|
"flag"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"go/build"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"reflect"
|
"reflect"
|
||||||
@@ -43,14 +44,14 @@ func testFastWalk(t *testing.T, files map[string]string, callback func(path stri
|
|||||||
for path, contents := range files {
|
for path, contents := range files {
|
||||||
file := filepath.Join(tempdir, "/src", path)
|
file := filepath.Join(tempdir, "/src", path)
|
||||||
|
|
||||||
if err = os.MkdirAll(filepath.Dir(file), 0755); err != nil {
|
if err = os.MkdirAll(filepath.Dir(file), 0o750); err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if strings.HasPrefix(contents, "LINK:") {
|
if strings.HasPrefix(contents, "LINK:") {
|
||||||
err = os.Symlink(strings.TrimPrefix(contents, "LINK:"), file)
|
err = os.Symlink(strings.TrimPrefix(contents, "LINK:"), file)
|
||||||
} else {
|
} else {
|
||||||
err = os.WriteFile(file, []byte(contents), 0644)
|
err = os.WriteFile(file, []byte(contents), 0o600)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -229,7 +230,8 @@ func TestFastWalk_TraverseSymlink(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
var benchDir = flag.String("benchdir", runtime.GOROOT(), "The directory to scan for BenchmarkFastWalk")
|
// Default to build.Default.GOROOT to avoid runtime.GOROOT deprecation.
|
||||||
|
var benchDir = flag.String("benchdir", build.Default.GOROOT, "The directory to scan for BenchmarkFastWalk")
|
||||||
|
|
||||||
func BenchmarkFastWalk(b *testing.B) {
|
func BenchmarkFastWalk(b *testing.B) {
|
||||||
b.ReportAllocs()
|
b.ReportAllocs()
|
||||||
|
|||||||
@@ -3,8 +3,6 @@
|
|||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
//go:build (linux || darwin || freebsd || openbsd || netbsd) && !appengine
|
//go:build (linux || darwin || freebsd || openbsd || netbsd) && !appengine
|
||||||
// +build linux darwin freebsd openbsd netbsd
|
|
||||||
// +build !appengine
|
|
||||||
|
|
||||||
package fastwalk
|
package fastwalk
|
||||||
|
|
||||||
@@ -79,7 +77,7 @@ func readDir(dirName string, fn func(dirName, entName string, typ os.FileMode) e
|
|||||||
func parseDirEnt(buf []byte) (consumed int, name string, typ os.FileMode) {
|
func parseDirEnt(buf []byte) (consumed int, name string, typ os.FileMode) {
|
||||||
// golang.org/issue/37269
|
// golang.org/issue/37269
|
||||||
dirent := &syscall.Dirent{}
|
dirent := &syscall.Dirent{}
|
||||||
copy((*[unsafe.Sizeof(syscall.Dirent{})]byte)(unsafe.Pointer(dirent))[:], buf)
|
copy((*[unsafe.Sizeof(syscall.Dirent{})]byte)(unsafe.Pointer(dirent))[:], buf) //nolint:gosec // unsafe needed for fast directory walk
|
||||||
if v := unsafe.Offsetof(dirent.Reclen) + unsafe.Sizeof(dirent.Reclen); uintptr(len(buf)) < v {
|
if v := unsafe.Offsetof(dirent.Reclen) + unsafe.Sizeof(dirent.Reclen); uintptr(len(buf)) < v {
|
||||||
panic(fmt.Sprintf("buf size of %d smaller than dirent header size %d", len(buf), v))
|
panic(fmt.Sprintf("buf size of %d smaller than dirent header size %d", len(buf), v))
|
||||||
}
|
}
|
||||||
@@ -114,15 +112,16 @@ func parseDirEnt(buf []byte) (consumed int, name string, typ os.FileMode) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
nameBuf := (*[unsafe.Sizeof(dirent.Name)]byte)(unsafe.Pointer(&dirent.Name[0]))
|
nameBuf := (*[unsafe.Sizeof(dirent.Name)]byte)(unsafe.Pointer(&dirent.Name[0])) //nolint:gosec // bounded by dirent name buffer
|
||||||
nameLen := direntNamlen(dirent)
|
nameLen := direntNamlen(dirent)
|
||||||
|
|
||||||
// Special cases for common things:
|
// Special cases for common things:
|
||||||
if nameLen == 1 && nameBuf[0] == '.' {
|
switch {
|
||||||
|
case nameLen == 1 && nameBuf[0] == '.':
|
||||||
name = "."
|
name = "."
|
||||||
} else if nameLen == 2 && nameBuf[0] == '.' && nameBuf[1] == '.' {
|
case nameLen == 2 && nameBuf[0] == '.' && nameBuf[1] == '.':
|
||||||
name = ".."
|
name = ".."
|
||||||
} else {
|
default:
|
||||||
name = string(nameBuf[:nameLen])
|
name = string(nameBuf[:nameLen])
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// Common file extensions used throughout PhotoPrism.
|
||||||
const (
|
const (
|
||||||
ExtNone = ""
|
ExtNone = ""
|
||||||
ExtLocal = ".local"
|
ExtLocal = ".local"
|
||||||
|
|||||||
@@ -197,20 +197,12 @@ func (m FileExtensions) Types(noUppercase bool) TypesExt {
|
|||||||
|
|
||||||
if noUppercase {
|
if noUppercase {
|
||||||
for ext, t := range m {
|
for ext, t := range m {
|
||||||
if _, ok := result[t]; ok {
|
result[t] = append(result[t], ext)
|
||||||
result[t] = append(result[t], ext)
|
|
||||||
} else {
|
|
||||||
result[t] = []string{ext}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
for ext, t := range m {
|
for ext, t := range m {
|
||||||
extUpper := strings.ToUpper(ext)
|
extUpper := strings.ToUpper(ext)
|
||||||
if _, ok := result[t]; ok {
|
result[t] = append(result[t], ext, extUpper)
|
||||||
result[t] = append(result[t], ext, extUpper)
|
|
||||||
} else {
|
|
||||||
result[t] = []string{ext, extUpper}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
package fs
|
package fs
|
||||||
|
|
||||||
|
// TypeMap maps file types to a representative extension string.
|
||||||
type TypeMap map[Type]string
|
type TypeMap map[Type]string
|
||||||
|
|
||||||
// TypeInfo contains human-readable descriptions for supported file formats
|
// TypeInfo contains human-readable descriptions for supported file formats
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
package fs
|
package fs
|
||||||
|
|
||||||
import (
|
import (
|
||||||
_ "image/gif"
|
_ "image/gif" // register GIF decoder
|
||||||
_ "image/jpeg"
|
_ "image/jpeg" // register JPEG decoder
|
||||||
_ "image/png"
|
_ "image/png" // register PNG decoder
|
||||||
|
|
||||||
_ "golang.org/x/image/bmp"
|
_ "golang.org/x/image/bmp" // register BMP decoder
|
||||||
_ "golang.org/x/image/tiff"
|
_ "golang.org/x/image/tiff" // register TIFF decoder
|
||||||
_ "golang.org/x/image/webp"
|
_ "golang.org/x/image/webp" // register WEBP decoder
|
||||||
)
|
)
|
||||||
|
|
||||||
// Supported archive file types:
|
// Supported archive file types:
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
package fs
|
package fs
|
||||||
|
|
||||||
import (
|
import (
|
||||||
_ "image/gif"
|
_ "image/gif" // register GIF decoder
|
||||||
_ "image/jpeg"
|
_ "image/jpeg" // register JPEG decoder
|
||||||
_ "image/png"
|
_ "image/png" // register PNG decoder
|
||||||
|
|
||||||
_ "golang.org/x/image/bmp"
|
_ "golang.org/x/image/bmp" // register BMP decoder
|
||||||
_ "golang.org/x/image/tiff"
|
_ "golang.org/x/image/tiff" // register TIFF decoder
|
||||||
_ "golang.org/x/image/webp"
|
_ "golang.org/x/image/webp" // register WEBP decoder
|
||||||
)
|
)
|
||||||
|
|
||||||
// TypesExt maps standard formats to file extensions.
|
// TypesExt maps standard formats to file extensions.
|
||||||
@@ -15,4 +15,6 @@ type TypesExt map[Type][]string
|
|||||||
|
|
||||||
// FileTypes contains the default file type extensions.
|
// FileTypes contains the default file type extensions.
|
||||||
var FileTypes = Extensions.Types(ignoreCase)
|
var FileTypes = Extensions.Types(ignoreCase)
|
||||||
|
|
||||||
|
// FileTypesLower contains lowercase extensions for case-insensitive lookup.
|
||||||
var FileTypesLower = Extensions.Types(true)
|
var FileTypesLower = Extensions.Types(true)
|
||||||
|
|||||||
@@ -65,6 +65,7 @@ func WebFileInfo(file webdav.FileInfo, dir string) FileInfo {
|
|||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FileInfos is a slice helper for bulk file info operations.
|
||||||
type FileInfos []FileInfo
|
type FileInfos []FileInfo
|
||||||
|
|
||||||
func (infos FileInfos) Len() int { return len(infos) }
|
func (infos FileInfos) Len() int { return len(infos) }
|
||||||
@@ -72,6 +73,8 @@ func (infos FileInfos) Swap(i, j int) { infos[i], infos[j] = infos[j], infos[i]
|
|||||||
func (infos FileInfos) Less(i, j int) bool {
|
func (infos FileInfos) Less(i, j int) bool {
|
||||||
return strings.Compare(infos[i].Abs, infos[j].Abs) == -1
|
return strings.Compare(infos[i].Abs, infos[j].Abs) == -1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Abs returns absolute file paths for all file infos.
|
||||||
func (infos FileInfos) Abs() (result []string) {
|
func (infos FileInfos) Abs() (result []string) {
|
||||||
for _, info := range infos {
|
for _, info := range infos {
|
||||||
result = append(result, info.Abs)
|
result = append(result, info.Abs)
|
||||||
@@ -80,6 +83,7 @@ func (infos FileInfos) Abs() (result []string) {
|
|||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// NewFileInfos builds FileInfos from os.FileInfo with directory prefix.
|
||||||
func NewFileInfos(infos []os.FileInfo, dir string) FileInfos {
|
func NewFileInfos(infos []os.FileInfo, dir string) FileInfos {
|
||||||
var result FileInfos
|
var result FileInfos
|
||||||
|
|
||||||
|
|||||||
16
pkg/fs/fs.go
16
pkg/fs/fs.go
@@ -38,9 +38,12 @@ import (
|
|||||||
var ignoreCase bool
|
var ignoreCase bool
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
// PathSeparator is the filesystem path separator for the current OS.
|
||||||
PathSeparator = string(filepath.Separator)
|
PathSeparator = string(filepath.Separator)
|
||||||
Home = "~"
|
// Home represents the tilde shorthand for the user's home directory.
|
||||||
HomePath = Home + PathSeparator
|
Home = "~"
|
||||||
|
// HomePath expands Home with a trailing separator.
|
||||||
|
HomePath = Home + PathSeparator
|
||||||
)
|
)
|
||||||
|
|
||||||
// Stat returns the os.FileInfo for the given file path, or an error if it does not exist.
|
// Stat returns the os.FileInfo for the given file path, or an error if it does not exist.
|
||||||
@@ -214,7 +217,7 @@ func Download(fileName string, url string) error {
|
|||||||
|
|
||||||
// DirIsEmpty returns true if a directory is empty.
|
// DirIsEmpty returns true if a directory is empty.
|
||||||
func DirIsEmpty(path string) bool {
|
func DirIsEmpty(path string) bool {
|
||||||
f, err := os.Open(path)
|
f, err := os.Open(path) //nolint:gosec // path provided by caller; intended to access filesystem
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return false
|
return false
|
||||||
@@ -223,10 +226,5 @@ func DirIsEmpty(path string) bool {
|
|||||||
defer f.Close()
|
defer f.Close()
|
||||||
|
|
||||||
_, err = f.Readdirnames(1)
|
_, err = f.Readdirnames(1)
|
||||||
|
return err == io.EOF
|
||||||
if err == io.EOF {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
return false
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -130,7 +130,7 @@ func TestDirIsEmpty(t *testing.T) {
|
|||||||
assert.Equal(t, false, DirIsEmpty("./xxx"))
|
assert.Equal(t, false, DirIsEmpty("./xxx"))
|
||||||
})
|
})
|
||||||
t.Run("EmptyDir", func(t *testing.T) {
|
t.Run("EmptyDir", func(t *testing.T) {
|
||||||
if err := os.Mkdir("./testdata/emptyDir", 0777); err != nil {
|
if err := os.Mkdir("./testdata/emptyDir", 0o750); err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
defer os.RemoveAll("./testdata/emptyDir")
|
defer os.RemoveAll("./testdata/emptyDir")
|
||||||
@@ -168,12 +168,12 @@ func TestDownload_SuccessAndErrors(t *testing.T) {
|
|||||||
|
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
goodPath := filepath.Join(dir, "sub", "file.txt")
|
goodPath := filepath.Join(dir, "sub", "file.txt")
|
||||||
badPath := filepath.Join("file.txt") // invalid path according to Download
|
badPath := "file.txt" // invalid path according to Download
|
||||||
|
|
||||||
// Success
|
// Success
|
||||||
err := Download(goodPath, tsOK.URL)
|
err := Download(goodPath, tsOK.URL)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
b, rerr := os.ReadFile(goodPath)
|
b, rerr := os.ReadFile(goodPath) //nolint:gosec // test helper reads temp file
|
||||||
assert.NoError(t, rerr)
|
assert.NoError(t, rerr)
|
||||||
assert.Equal(t, "hello world", string(b))
|
assert.Equal(t, "hello world", string(b))
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
package fs
|
package fs
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"crypto/sha1"
|
"crypto/sha1" //nolint:gosec // SHA1 retained for legacy hash compatibility
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
"hash/crc32"
|
"hash/crc32"
|
||||||
"io"
|
"io"
|
||||||
@@ -14,7 +14,7 @@ import (
|
|||||||
func Hash(fileName string) string {
|
func Hash(fileName string) string {
|
||||||
var result []byte
|
var result []byte
|
||||||
|
|
||||||
file, err := os.Open(fileName)
|
file, err := os.Open(fileName) //nolint:gosec // caller-controlled path; intended file read
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return ""
|
return ""
|
||||||
@@ -22,7 +22,7 @@ func Hash(fileName string) string {
|
|||||||
|
|
||||||
defer file.Close()
|
defer file.Close()
|
||||||
|
|
||||||
hash := sha1.New()
|
hash := sha1.New() //nolint:gosec // legacy SHA1 hashes retained for compatibility
|
||||||
|
|
||||||
if _, err := io.Copy(hash, file); err != nil {
|
if _, err := io.Copy(hash, file); err != nil {
|
||||||
return ""
|
return ""
|
||||||
@@ -35,7 +35,7 @@ func Hash(fileName string) string {
|
|||||||
func Checksum(fileName string) string {
|
func Checksum(fileName string) string {
|
||||||
var result []byte
|
var result []byte
|
||||||
|
|
||||||
file, err := os.Open(fileName)
|
file, err := os.Open(fileName) //nolint:gosec // caller-controlled path; intended file read
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return ""
|
return ""
|
||||||
|
|||||||
28
pkg/fs/id.go
28
pkg/fs/id.go
@@ -6,9 +6,14 @@ import (
|
|||||||
"github.com/photoprism/photoprism/pkg/rnd"
|
"github.com/photoprism/photoprism/pkg/rnd"
|
||||||
)
|
)
|
||||||
|
|
||||||
var DscNameRegexp = regexp.MustCompile("\\D{3}[\\d_]\\d{4,8}_?\\d{0,6}_?\\d{0,6}[\\.jpgJPGXx]{0,4}")
|
// DscNameRegexp matches DSLR-like file names.
|
||||||
|
var DscNameRegexp = regexp.MustCompile(`\D{3}[\d_]\d{4,8}_?\d{0,6}_?\d{0,6}[\.jpgJPGXx]{0,4}`)
|
||||||
|
|
||||||
|
// UniqueNameRegexp matches generated unique names.
|
||||||
var UniqueNameRegexp = regexp.MustCompile("[a-f0-9]{8,16}_[a-f0-9]{6,16}_[A-Za-z0-9]{1,20}_?[A-Za-z0-9]{0,4}") // Example: 8263987746_d0a6055c58_o
|
var UniqueNameRegexp = regexp.MustCompile("[a-f0-9]{8,16}_[a-f0-9]{6,16}_[A-Za-z0-9]{1,20}_?[A-Za-z0-9]{0,4}") // Example: 8263987746_d0a6055c58_o
|
||||||
var UUIDNameRegexp = regexp.MustCompile("[A-Fa-f0-9\\-]{16,36}_?[A-Za-z0-9_]{0,20}") // Example: 8263987746_d0a6055c58_o
|
|
||||||
|
// UUIDNameRegexp matches names prefixed with UUIDs.
|
||||||
|
var UUIDNameRegexp = regexp.MustCompile(`[A-Fa-f0-9\-]{16,36}_?[A-Za-z0-9_]{0,20}`) // Example: 8263987746_d0a6055c58_o
|
||||||
|
|
||||||
// IsInt tests if the file base is an integer number.
|
// IsInt tests if the file base is an integer number.
|
||||||
func IsInt(s string) bool {
|
func IsInt(s string) bool {
|
||||||
@@ -76,21 +81,22 @@ func IsGenerated(fileName string) bool {
|
|||||||
|
|
||||||
base := BasePrefix(fileName, false)
|
base := BasePrefix(fileName, false)
|
||||||
|
|
||||||
if IsAsciiID(base) {
|
switch {
|
||||||
|
case IsAsciiID(base):
|
||||||
return true
|
return true
|
||||||
} else if IsHash(base) {
|
case IsHash(base):
|
||||||
return true
|
return true
|
||||||
} else if IsInt(base) {
|
case IsInt(base):
|
||||||
return true
|
return true
|
||||||
} else if IsDscName(base) {
|
case IsDscName(base):
|
||||||
return true
|
return true
|
||||||
} else if IsUniqueName(base) {
|
case IsUniqueName(base):
|
||||||
return true
|
return true
|
||||||
} else if rnd.IsUnique(base, 0) {
|
case rnd.IsUnique(base, 0):
|
||||||
return true
|
return true
|
||||||
} else if IsCanonical(base) {
|
case IsCanonical(base):
|
||||||
return true
|
return true
|
||||||
|
default:
|
||||||
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
return false
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ import (
|
|||||||
"sync"
|
"sync"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// IgnoreLogFunc logs ignored file names.
|
||||||
type IgnoreLogFunc func(fileName string)
|
type IgnoreLogFunc func(fileName string)
|
||||||
|
|
||||||
// IgnorePattern represents a name pattern to be ignored.
|
// IgnorePattern represents a name pattern to be ignored.
|
||||||
@@ -171,7 +172,7 @@ func (l *IgnoreList) Ignore(name string) bool {
|
|||||||
baseName := filepath.Base(name)
|
baseName := filepath.Base(name)
|
||||||
|
|
||||||
// Change name to lowercase for case-insensitive comparison.
|
// Change name to lowercase for case-insensitive comparison.
|
||||||
if l.caseSensitive == false {
|
if !l.caseSensitive {
|
||||||
dir = strings.ToLower(dir)
|
dir = strings.ToLower(dir)
|
||||||
baseName = strings.ToLower(baseName)
|
baseName = strings.ToLower(baseName)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
// MimeTypeUnknown represents an unknown mime type.
|
||||||
MimeTypeUnknown = ""
|
MimeTypeUnknown = ""
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ import (
|
|||||||
|
|
||||||
// ReadLines returns all lines in a text file as string slice.
|
// ReadLines returns all lines in a text file as string slice.
|
||||||
func ReadLines(fileName string) (lines []string, err error) {
|
func ReadLines(fileName string) (lines []string, err error) {
|
||||||
file, err := os.Open(fileName)
|
file, err := os.Open(fileName) //nolint:gosec // caller-controlled path; intended file read
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return lines, err
|
return lines, err
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ func SymlinksSupported(storagePath string) (bool, error) {
|
|||||||
}(linkName, targetName)
|
}(linkName, targetName)
|
||||||
|
|
||||||
// Create empty test target file.
|
// Create empty test target file.
|
||||||
if targetFile, err := os.OpenFile(targetName, os.O_RDONLY|os.O_CREATE, ModeFile); err != nil {
|
if targetFile, err := os.OpenFile(targetName, os.O_RDONLY|os.O_CREATE, ModeFile); err != nil { //nolint:gosec // targetName is validated by caller
|
||||||
return false, err
|
return false, err
|
||||||
} else if err = targetFile.Close(); err != nil {
|
} else if err = targetFile.Close(); err != nil {
|
||||||
return false, err
|
return false, err
|
||||||
|
|||||||
@@ -9,7 +9,8 @@ import (
|
|||||||
func SkipWalk(name string, isDir, isSymlink bool, done Done, ignore *IgnoreList) (skip bool, result error) {
|
func SkipWalk(name string, isDir, isSymlink bool, done Done, ignore *IgnoreList) (skip bool, result error) {
|
||||||
isDone := done[name].Exists()
|
isDone := done[name].Exists()
|
||||||
|
|
||||||
if isSymlink {
|
switch {
|
||||||
|
case isSymlink:
|
||||||
// Check if symlink points to a directory.
|
// Check if symlink points to a directory.
|
||||||
if link, err := os.Stat(name); err == nil && link.IsDir() {
|
if link, err := os.Stat(name); err == nil && link.IsDir() {
|
||||||
// Skip directories.
|
// Skip directories.
|
||||||
@@ -22,12 +23,13 @@ func SkipWalk(name string, isDir, isSymlink bool, done Done, ignore *IgnoreList)
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Skip symlinked directories that cannot be resolved or are ignored, hidden, or already done.
|
// Skip symlinked directories that cannot be resolved or are ignored, hidden, or already done.
|
||||||
if ignore.Ignore(name) || evalErr != nil || isDone || done[resolved].Exists() {
|
switch {
|
||||||
|
case ignore.Ignore(name) || evalErr != nil || isDone || done[resolved].Exists():
|
||||||
result = filepath.SkipDir
|
result = filepath.SkipDir
|
||||||
} else if FileExists(filepath.Join(resolved, PPStorageFilename)) {
|
case FileExists(filepath.Join(resolved, PPStorageFilename)):
|
||||||
// Skip symlinked directories that contain a .ppstorage file.
|
// Skip symlinked directories that contain a .ppstorage file.
|
||||||
result = filepath.SkipDir
|
result = filepath.SkipDir
|
||||||
} else {
|
default:
|
||||||
// Flag the symlink target as processed.
|
// Flag the symlink target as processed.
|
||||||
done[resolved] = Found
|
done[resolved] = Found
|
||||||
}
|
}
|
||||||
@@ -36,7 +38,7 @@ func SkipWalk(name string, isDir, isSymlink bool, done Done, ignore *IgnoreList)
|
|||||||
skip = true
|
skip = true
|
||||||
result = filepath.SkipDir
|
result = filepath.SkipDir
|
||||||
}
|
}
|
||||||
} else if isDir {
|
case isDir:
|
||||||
skip = true
|
skip = true
|
||||||
|
|
||||||
if _ = ignore.Path(name); ignore.Ignore(name) || isDone {
|
if _ = ignore.Path(name); ignore.Ignore(name) || isDone {
|
||||||
@@ -46,9 +48,11 @@ func SkipWalk(name string, isDir, isSymlink bool, done Done, ignore *IgnoreList)
|
|||||||
// Skip directories that contain a .ppstorage file.
|
// Skip directories that contain a .ppstorage file.
|
||||||
result = filepath.SkipDir
|
result = filepath.SkipDir
|
||||||
}
|
}
|
||||||
} else if ignore.Ignore(name) || isDone {
|
default:
|
||||||
// Skip files that are hidden or already done.
|
if ignore.Ignore(name) || isDone {
|
||||||
skip = true
|
// Skip files that are hidden or already done.
|
||||||
|
skip = true
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if skip {
|
if skip {
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ func WriteFile(fileName string, data []byte, perm os.FileMode) error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
file, err := os.OpenFile(fileName, os.O_RDWR|os.O_CREATE|os.O_TRUNC, perm)
|
file, err := os.OpenFile(fileName, os.O_RDWR|os.O_CREATE|os.O_TRUNC, perm) //nolint:gosec // caller-controlled path; intended write
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@@ -68,7 +68,7 @@ func WriteFileFromReader(fileName string, reader io.Reader) (err error) {
|
|||||||
|
|
||||||
var file *os.File
|
var file *os.File
|
||||||
|
|
||||||
if file, err = os.OpenFile(fileName, os.O_RDWR|os.O_CREATE|os.O_TRUNC, ModeFile); err != nil {
|
if file, err = os.OpenFile(fileName, os.O_RDWR|os.O_CREATE|os.O_TRUNC, ModeFile); err != nil { //nolint:gosec // caller-controlled path; intended write
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -129,7 +129,7 @@ func TestWriteFileFromReader(t *testing.T) {
|
|||||||
assert.NoError(t, writeErr)
|
assert.NoError(t, writeErr)
|
||||||
assert.True(t, unixTime >= time.Now().Unix())
|
assert.True(t, unixTime >= time.Now().Unix())
|
||||||
|
|
||||||
fileReader, readerErr := os.Open(filePath1)
|
fileReader, readerErr := os.Open(filePath1) //nolint:gosec // test helper reads temp file
|
||||||
assert.NoError(t, readerErr)
|
assert.NoError(t, readerErr)
|
||||||
|
|
||||||
fileErr := WriteFileFromReader(filePath2, fileReader)
|
fileErr := WriteFileFromReader(filePath2, fileReader)
|
||||||
@@ -172,7 +172,7 @@ func TestCacheFileFromReader(t *testing.T) {
|
|||||||
assert.NoError(t, writeErr)
|
assert.NoError(t, writeErr)
|
||||||
assert.True(t, unixTime >= time.Now().Unix())
|
assert.True(t, unixTime >= time.Now().Unix())
|
||||||
|
|
||||||
fileReader, readerErr := os.Open(filePath1)
|
fileReader, readerErr := os.Open(filePath1) //nolint:gosec // test helper reads temp file
|
||||||
assert.NoError(t, readerErr)
|
assert.NoError(t, readerErr)
|
||||||
|
|
||||||
cacheFile, cacheErr := CacheFileFromReader(filePath2, fileReader)
|
cacheFile, cacheErr := CacheFileFromReader(filePath2, fileReader)
|
||||||
@@ -208,7 +208,7 @@ func TestWriteFile_Truncates(t *testing.T) {
|
|||||||
p := filepath.Join(dir, "f.txt")
|
p := filepath.Join(dir, "f.txt")
|
||||||
assert.NoError(t, os.WriteFile(p, []byte("LONGDATA"), ModeFile))
|
assert.NoError(t, os.WriteFile(p, []byte("LONGDATA"), ModeFile))
|
||||||
assert.NoError(t, WriteFile(p, []byte("short"), ModeFile))
|
assert.NoError(t, WriteFile(p, []byte("short"), ModeFile))
|
||||||
b, err := os.ReadFile(p)
|
b, err := os.ReadFile(p) //nolint:gosec // test helper reads temp file
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, "short", string(b))
|
assert.Equal(t, "short", string(b))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ func Zip(zipName string, files []string, compress bool) (err error) {
|
|||||||
|
|
||||||
var newZipFile *os.File
|
var newZipFile *os.File
|
||||||
|
|
||||||
if newZipFile, err = os.Create(zipName); err != nil {
|
if newZipFile, err = os.Create(zipName); err != nil { //nolint:gosec // zipName provided by caller
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -46,7 +46,7 @@ func Zip(zipName string, files []string, compress bool) (err error) {
|
|||||||
// ZipFile adds a file to a zip archive, optionally with an alias and compression.
|
// ZipFile adds a file to a zip archive, optionally with an alias and compression.
|
||||||
func ZipFile(zipWriter *zip.Writer, fileName, fileAlias string, compress bool) (err error) {
|
func ZipFile(zipWriter *zip.Writer, fileName, fileAlias string, compress bool) (err error) {
|
||||||
// Open file.
|
// Open file.
|
||||||
fileToZip, err := os.Open(fileName)
|
fileToZip, err := os.Open(fileName) //nolint:gosec // fileName provided by caller
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@@ -174,7 +174,7 @@ func unzipFileWithLimit(f *zip.File, dir string, fileSizeLimit int64) (fileName
|
|||||||
return fileName, err
|
return fileName, err
|
||||||
}
|
}
|
||||||
|
|
||||||
fd, err := os.OpenFile(fileName, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, f.Mode())
|
fd, err := os.OpenFile(fileName, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, f.Mode()) //nolint:gosec // destination derived from safeJoin
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fileName, err
|
return fileName, err
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ import (
|
|||||||
|
|
||||||
func writeZip(t *testing.T, path string, entries map[string][]byte) {
|
func writeZip(t *testing.T, path string, entries map[string][]byte) {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
f, err := os.Create(path)
|
f, err := os.Create(path) //nolint:gosec // test helper creates temp zip file
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
@@ -62,7 +62,7 @@ func TestUnzip_SkipRulesAndLimits(t *testing.T) {
|
|||||||
// ok2 (1 byte) allowed; total limit reduces to 2; nothing else left that fits
|
// ok2 (1 byte) allowed; total limit reduces to 2; nothing else left that fits
|
||||||
assert.ElementsMatch(t, []string{filepath.Join(outDir, "ok2.txt")}, files)
|
assert.ElementsMatch(t, []string{filepath.Join(outDir, "ok2.txt")}, files)
|
||||||
// Ensure file written
|
// Ensure file written
|
||||||
b, rerr := os.ReadFile(filepath.Join(outDir, "ok2.txt"))
|
b, rerr := os.ReadFile(filepath.Join(outDir, "ok2.txt")) //nolint:gosec // test helper reads temp file
|
||||||
assert.NoError(t, rerr)
|
assert.NoError(t, rerr)
|
||||||
assert.Equal(t, []byte("x"), b)
|
assert.Equal(t, []byte("x"), b)
|
||||||
// Skipped contains at least the three excluded entries
|
// Skipped contains at least the three excluded entries
|
||||||
@@ -213,7 +213,7 @@ func writeZip64Stub(t *testing.T, path, name string, size uint64) {
|
|||||||
if len(filename) > math.MaxUint16 {
|
if len(filename) > math.MaxUint16 {
|
||||||
t.Fatalf("filename too long")
|
t.Fatalf("filename too long")
|
||||||
}
|
}
|
||||||
writeLE(uint16(len(filename)))
|
writeLE(uint16(len(filename))) //nolint:gosec // filename length checked above
|
||||||
writeLE(localExtraLen)
|
writeLE(localExtraLen)
|
||||||
bw(filename)
|
bw(filename)
|
||||||
// zip64 extra
|
// zip64 extra
|
||||||
@@ -239,7 +239,7 @@ func writeZip64Stub(t *testing.T, path, name string, size uint64) {
|
|||||||
if len(filename) > math.MaxUint16 {
|
if len(filename) > math.MaxUint16 {
|
||||||
t.Fatalf("filename too long")
|
t.Fatalf("filename too long")
|
||||||
}
|
}
|
||||||
writeLE(uint16(len(filename)))
|
writeLE(uint16(len(filename))) //nolint:gosec // filename length checked above
|
||||||
writeLE(centralExtraLen)
|
writeLE(centralExtraLen)
|
||||||
writeLE(uint16(0)) // comment len
|
writeLE(uint16(0)) // comment len
|
||||||
writeLE(uint16(0)) // disk start
|
writeLE(uint16(0)) // disk start
|
||||||
@@ -264,9 +264,9 @@ func writeZip64Stub(t *testing.T, path, name string, size uint64) {
|
|||||||
if centralLen > math.MaxUint32 || localLen > math.MaxUint32 {
|
if centralLen > math.MaxUint32 || localLen > math.MaxUint32 {
|
||||||
t.Fatalf("central or local length exceeds uint32")
|
t.Fatalf("central or local length exceeds uint32")
|
||||||
}
|
}
|
||||||
writeLE(uint32(centralLen))
|
writeLE(uint32(centralLen)) //nolint:gosec // lengths checked above
|
||||||
writeLE(uint32(localLen))
|
writeLE(uint32(localLen)) //nolint:gosec
|
||||||
writeLE(uint16(0)) // comment length
|
writeLE(uint16(0)) // comment length
|
||||||
|
|
||||||
if err := os.WriteFile(path, buf, 0o600); err != nil {
|
if err := os.WriteFile(path, buf, 0o600); err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
|
|||||||
@@ -5,9 +5,12 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
DistLimit float64 = 5000
|
// DistLimit is the maximum distance in km considered realistic.
|
||||||
|
DistLimit float64 = 5000
|
||||||
|
// ScopeDistLimit is the maximum distance in km for scope queries.
|
||||||
ScopeDistLimit float64 = 50
|
ScopeDistLimit float64 = 50
|
||||||
DefaultDist float64 = 2
|
// DefaultDist is the default distance in km used when none is provided.
|
||||||
|
DefaultDist float64 = 2
|
||||||
)
|
)
|
||||||
|
|
||||||
// Deg returns the distance in decimal degrees based on the specified distance in meters and the latitude,
|
// Deg returns the distance in decimal degrees based on the specified distance in meters and the latitude,
|
||||||
@@ -22,11 +25,12 @@ func Deg(lat, meter float64) (dLat, dLng float64) {
|
|||||||
|
|
||||||
// Do not calculate the exact longitude distance in
|
// Do not calculate the exact longitude distance in
|
||||||
// degrees if the latitude is zero or out of range.
|
// degrees if the latitude is zero or out of range.
|
||||||
if lat == 0.0 {
|
switch {
|
||||||
|
case lat == 0.0:
|
||||||
return dLat, dLat
|
return dLat, dLat
|
||||||
} else if lat < -89.9 {
|
case lat < -89.9:
|
||||||
lat = -89.9
|
lat = -89.9
|
||||||
} else if lat > 89.9 {
|
case lat > 89.9:
|
||||||
lat = 89.9
|
lat = 89.9
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -25,8 +25,12 @@ Additional information can be found in our Developer Guide:
|
|||||||
package geo
|
package geo
|
||||||
|
|
||||||
const (
|
const (
|
||||||
AverageEarthRadiusKm = 6371.0 // Global-average earth radius in km
|
// AverageEarthRadiusKm is the global-average earth radius in km.
|
||||||
AverageEarthRadiusMeter = AverageEarthRadiusKm * 1000.0 // Global-average earth radius in m
|
AverageEarthRadiusKm = 6371.0
|
||||||
WGS84EarthRadiusKm = 6378.137 // WGS84 earth radius in km
|
// AverageEarthRadiusMeter is the global-average earth radius in meters.
|
||||||
WGS84EarthRadiusMeter = WGS84EarthRadiusKm * 1000.0 // WGS84 earth radius in m
|
AverageEarthRadiusMeter = AverageEarthRadiusKm * 1000.0
|
||||||
|
// WGS84EarthRadiusKm is the WGS84 equatorial earth radius in km.
|
||||||
|
WGS84EarthRadiusKm = 6378.137
|
||||||
|
// WGS84EarthRadiusMeter is the WGS84 equatorial earth radius in meters.
|
||||||
|
WGS84EarthRadiusMeter = WGS84EarthRadiusKm * 1000.0
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package latlng
|
|||||||
|
|
||||||
import "math"
|
import "math"
|
||||||
|
|
||||||
|
// RoundDecimals defines the precision used when rounding coordinates.
|
||||||
var RoundDecimals = float64(10000000)
|
var RoundDecimals = float64(10000000)
|
||||||
|
|
||||||
// Round rounds the given coordinate to six decimal places.
|
// Round rounds the given coordinate to six decimal places.
|
||||||
|
|||||||
@@ -140,32 +140,34 @@ func (m *Movement) Realistic() bool {
|
|||||||
|
|
||||||
// AverageAltitude returns the average altitude.
|
// AverageAltitude returns the average altitude.
|
||||||
func (m *Movement) AverageAltitude() float64 {
|
func (m *Movement) AverageAltitude() float64 {
|
||||||
if m.Start.Altitude != 0 && m.End.Altitude == 0 {
|
switch {
|
||||||
|
case m.Start.Altitude != 0 && m.End.Altitude == 0:
|
||||||
return m.Start.Altitude
|
return m.Start.Altitude
|
||||||
} else if m.Start.Altitude == 0 && m.End.Altitude != 0 {
|
case m.Start.Altitude == 0 && m.End.Altitude != 0:
|
||||||
return m.End.Altitude
|
return m.End.Altitude
|
||||||
} else if m.Start.Altitude != 0 && m.End.Altitude != 0 {
|
case m.Start.Altitude != 0 && m.End.Altitude != 0:
|
||||||
return (m.Start.Altitude + m.End.Altitude) / 2
|
return (m.Start.Altitude + m.End.Altitude) / 2
|
||||||
|
default:
|
||||||
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
return 0
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// EstimateAccuracy returns the position estimate accuracy in meter.
|
// EstimateAccuracy returns the position estimate accuracy in meter.
|
||||||
func (m *Movement) EstimateAccuracy(t time.Time) int {
|
func (m *Movement) EstimateAccuracy(t time.Time) int {
|
||||||
var a float64
|
var a float64
|
||||||
|
|
||||||
if !m.Realistic() {
|
switch {
|
||||||
|
case !m.Realistic():
|
||||||
a = m.Meter() / 2
|
a = m.Meter() / 2
|
||||||
} else if t.Before(m.Start.Time) {
|
case t.Before(m.Start.Time):
|
||||||
d := m.Start.Time.Sub(t).Hours() * 1000
|
d := m.Start.Time.Sub(t).Hours() * 1000
|
||||||
d = math.Copysign(math.Sqrt(math.Abs(d)), d)
|
d = math.Copysign(math.Sqrt(math.Abs(d)), d)
|
||||||
a = m.Speed() * d
|
a = m.Speed() * d
|
||||||
} else if t.After(m.End.Time) {
|
case t.After(m.End.Time):
|
||||||
d := t.Sub(m.End.Time).Hours() * 1000
|
d := t.Sub(m.End.Time).Hours() * 1000
|
||||||
d = math.Copysign(math.Sqrt(math.Abs(d)), d)
|
d = math.Copysign(math.Sqrt(math.Abs(d)), d)
|
||||||
a = m.Speed() * d
|
a = m.Speed() * d
|
||||||
} else {
|
default:
|
||||||
a = m.Meter() / 20
|
a = m.Meter() / 20
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// Meter represents one meter in decimal degrees at the equator.
|
||||||
const Meter = 0.00001
|
const Meter = 0.00001
|
||||||
|
|
||||||
// Position represents a geo coordinate.
|
// Position represents a geo coordinate.
|
||||||
|
|||||||
@@ -1,10 +1,19 @@
|
|||||||
package geo
|
package geo
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"math/rand/v2"
|
"crypto/rand"
|
||||||
|
"math/big"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Randomize adds a random offset to a value.
|
// Randomize adds a random offset to a value.
|
||||||
func Randomize(value, diameter float64) float64 {
|
func Randomize(value, diameter float64) float64 {
|
||||||
return value + (rand.Float64()-0.5)*diameter
|
// Use crypto/rand to avoid predictable offsets.
|
||||||
|
// randomFloat in [0,1)
|
||||||
|
n, err := rand.Int(rand.Reader, big.NewInt(1_000_000_000))
|
||||||
|
if err != nil {
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
|
||||||
|
randomFloat := float64(n.Int64()) / 1_000_000_000.0
|
||||||
|
return value + (randomFloat-0.5)*diameter
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// TokenPrefix is the optional prefix for S2 tokens.
|
||||||
var TokenPrefix = "s2:"
|
var TokenPrefix = "s2:"
|
||||||
|
|
||||||
// NormalizeToken removes the prefix from a token and converts all characters to lower case.
|
// NormalizeToken removes the prefix from a token and converts all characters to lower case.
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
package header
|
package header
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"crypto/sha1"
|
"crypto/sha1" //nolint:gosec // SHA1 retained for legacy cache key hashing
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
@@ -13,7 +13,7 @@ import (
|
|||||||
// Authentication header names.
|
// Authentication header names.
|
||||||
const (
|
const (
|
||||||
Auth = "Authorization" // https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Authorization
|
Auth = "Authorization" // https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Authorization
|
||||||
XAuthToken = "X-Auth-Token"
|
XAuthToken = "X-Auth-Token" //nolint:gosec // header name, not a secret
|
||||||
XSessionID = "X-Session-ID"
|
XSessionID = "X-Session-ID"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -98,7 +98,7 @@ func BasicAuth(c *gin.Context) (username, password, cacheKey string) {
|
|||||||
return "", "", ""
|
return "", "", ""
|
||||||
}
|
}
|
||||||
|
|
||||||
cacheKey = fmt.Sprintf("%x", sha1.Sum([]byte(authToken)))
|
cacheKey = fmt.Sprintf("%x", sha1.Sum([]byte(authToken))) //nolint:gosec // cache key only
|
||||||
|
|
||||||
return credentials[0], credentials[1], cacheKey
|
return credentials[0], credentials[1], cacheKey
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -219,7 +219,7 @@ func TestAuthorization(t *testing.T) {
|
|||||||
Header: make(http.Header),
|
Header: make(http.Header),
|
||||||
}
|
}
|
||||||
|
|
||||||
token := "eyJhbGciOiJFZERTQSIsImtpZCI6IjEyMyJ9.eyJpc3MiOiJwb3J0YWw6dGVzdCIsImF1ZCI6Im5vZGU6YWJjIiwiZXhwIjoxNzAwMDAwMDB9.dGVzdC1zaWduYXR1cmUtYnl0ZXM"
|
token := "eyJhbGciOiJFZERTQSIsImtpZCI6IjEyMyJ9.eyJpc3MiOiJwb3J0YWw6dGVzdCIsImF1ZCI6Im5vZGU6YWJjIiwiZXhwIjoxNzAwMDAwMDB9.dGVzdC1zaWduYXR1cmUtYnl0ZXM" //nolint:gosec // static test token
|
||||||
c.Request.Header.Add(Auth, "Bearer "+token)
|
c.Request.Header.Add(Auth, "Bearer "+token)
|
||||||
|
|
||||||
authType, authToken := Authorization(c)
|
authType, authToken := Authorization(c)
|
||||||
|
|||||||
@@ -7,8 +7,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
// The CacheControl request and response header field contains directives (instructions)
|
// CacheControl request and response header field contains directives for caching.
|
||||||
// that control caching in browsers and shared caches (e.g. proxies, CDNs).
|
|
||||||
// See: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Cache-Control
|
// See: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Cache-Control
|
||||||
CacheControl = "Cache-Control"
|
CacheControl = "Cache-Control"
|
||||||
|
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ const (
|
|||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
// CdnMethods lists HTTP methods allowed via CDN.
|
||||||
CdnMethods = []string{http.MethodGet, http.MethodHead, http.MethodOptions}
|
CdnMethods = []string{http.MethodGet, http.MethodHead, http.MethodOptions}
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -36,13 +37,14 @@ func IsCdn(req *http.Request) bool {
|
|||||||
|
|
||||||
// AbortCdnRequest checks if the request should not be served through a CDN.
|
// AbortCdnRequest checks if the request should not be served through a CDN.
|
||||||
func AbortCdnRequest(req *http.Request) bool {
|
func AbortCdnRequest(req *http.Request) bool {
|
||||||
if !IsCdn(req) {
|
switch {
|
||||||
|
case !IsCdn(req):
|
||||||
return false
|
return false
|
||||||
} else if req.Header.Get(XAuthToken) != "" {
|
case req.Header.Get(XAuthToken) != "":
|
||||||
return true
|
return true
|
||||||
} else if req.URL.Path == "/" {
|
case req.URL.Path == "/":
|
||||||
return true
|
return true
|
||||||
|
default:
|
||||||
|
return list.Excludes(CdnMethods, req.Method)
|
||||||
}
|
}
|
||||||
|
|
||||||
return list.Excludes(CdnMethods, req.Method)
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,10 @@
|
|||||||
package header
|
package header
|
||||||
|
|
||||||
const (
|
const (
|
||||||
CidrPodInternal = "10.0.0.0/8"
|
// CidrPodInternal covers internal pod traffic ranges.
|
||||||
|
CidrPodInternal = "10.0.0.0/8"
|
||||||
|
// CidrDockerInternal covers default Docker internal ranges.
|
||||||
CidrDockerInternal = "172.16.0.0/12"
|
CidrDockerInternal = "172.16.0.0/12"
|
||||||
|
// CidrCalicoInternal covers Calico internal ranges.
|
||||||
CidrCalicoInternal = "192.168.0.0/16"
|
CidrCalicoInternal = "192.168.0.0/16"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ import (
|
|||||||
var IpRegExp = regexp.MustCompile(`[^a-zA-Z0-9:.]`)
|
var IpRegExp = regexp.MustCompile(`[^a-zA-Z0-9:.]`)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
// IPv6Length represents the maximum length of an IPv6 address string.
|
||||||
IPv6Length = 39
|
IPv6Length = 39
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -29,7 +30,8 @@ func IP(s, defaultIp string) string {
|
|||||||
fastOK := true
|
fastOK := true
|
||||||
for i := 0; i < len(s); i++ {
|
for i := 0; i < len(s); i++ {
|
||||||
b := s[i]
|
b := s[i]
|
||||||
if !((b >= '0' && b <= '9') || (b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') || b == ':' || b == '.') {
|
isAlphaNum := (b >= '0' && b <= '9') || (b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z')
|
||||||
|
if !isAlphaNum && b != ':' && b != '.' {
|
||||||
fastOK = false
|
fastOK = false
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,10 @@
|
|||||||
package header
|
package header
|
||||||
|
|
||||||
var (
|
var (
|
||||||
ProtoHttp = "http"
|
// ProtoHttp is the HTTP scheme.
|
||||||
|
ProtoHttp = "http"
|
||||||
|
// ProtoHttps is the HTTPS scheme.
|
||||||
ProtoHttps = "https"
|
ProtoHttps = "https"
|
||||||
ProtoWss = "wss"
|
// ProtoWss is the secure WebSocket scheme.
|
||||||
|
ProtoWss = "wss"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
package header
|
package header
|
||||||
|
|
||||||
|
// RobotsRule represents a robots.txt directive rule.
|
||||||
type RobotsRule = string
|
type RobotsRule = string
|
||||||
|
|
||||||
// RobotsTag controls how pages are indexed and crawled by search engines:
|
// RobotsTag controls how pages are indexed and crawled by search engines:
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
package header
|
package header
|
||||||
|
|
||||||
const (
|
const (
|
||||||
Any = "*"
|
// Any wildcard value for header lists.
|
||||||
|
Any = "*"
|
||||||
|
// Deny disallows embedding/access (used in frame/permission headers).
|
||||||
Deny = "DENY"
|
Deny = "DENY"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
package header
|
package header
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
// XFavorite marks favorite status in WebDAV headers.
|
||||||
XFavorite = "X-Favorite"
|
XFavorite = "X-Favorite"
|
||||||
XModTime = "X-OC-MTime"
|
// XModTime conveys modification time in WebDAV headers.
|
||||||
|
XModTime = "X-OC-MTime"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,8 +1,12 @@
|
|||||||
package header
|
package header
|
||||||
|
|
||||||
const (
|
const (
|
||||||
WebhookID string = "webhook-id"
|
// WebhookID is the request header containing a webhook identifier.
|
||||||
WebhookSignature string = "webhook-signature"
|
WebhookID string = "webhook-id"
|
||||||
WebhookTimestamp string = "webhook-timestamp"
|
// WebhookSignature carries the signature header.
|
||||||
|
WebhookSignature string = "webhook-signature"
|
||||||
|
// WebhookTimestamp carries the timestamp header.
|
||||||
|
WebhookTimestamp string = "webhook-timestamp"
|
||||||
|
// WebhookSecretPrefix prefixes stored webhook secrets.
|
||||||
WebhookSecretPrefix string = "whsec_"
|
WebhookSecretPrefix string = "whsec_"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -153,12 +153,12 @@ func Download(destPath, rawURL string, opt *Options) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
tmp := destPath + ".part"
|
tmp := destPath + ".part"
|
||||||
f, err := os.OpenFile(tmp, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0o600)
|
f, err := os.OpenFile(tmp, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0o600) //nolint:gosec // destPath validated by caller; temp file
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
defer func() {
|
defer func() {
|
||||||
f.Close()
|
_ = f.Close()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
_ = os.Remove(tmp)
|
_ = os.Remove(tmp)
|
||||||
}
|
}
|
||||||
@@ -180,10 +180,8 @@ func Download(destPath, rawURL string, opt *Options) error {
|
|||||||
if err = f.Close(); err != nil {
|
if err = f.Close(); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err = os.Rename(tmp, destPath); err != nil {
|
|
||||||
return err
|
return os.Rename(tmp, destPath)
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func isPrivateOrDisallowedIP(ip net.IP) bool {
|
func isPrivateOrDisallowedIP(ip net.IP) bool {
|
||||||
|
|||||||
@@ -50,7 +50,7 @@ func TestDownload_AllowRedirectToPrivate(t *testing.T) {
|
|||||||
if err := Download(dest, ts.URL, &Options{Timeout: 5 * time.Second, MaxSizeBytes: 1 << 20, AllowPrivate: true}); err != nil {
|
if err := Download(dest, ts.URL, &Options{Timeout: 5 * time.Second, MaxSizeBytes: 1 << 20, AllowPrivate: true}); err != nil {
|
||||||
t.Fatalf("unexpected error: %v", err)
|
t.Fatalf("unexpected error: %v", err)
|
||||||
}
|
}
|
||||||
b, err := os.ReadFile(dest)
|
b, err := os.ReadFile(dest) //nolint:gosec // test reads temp file
|
||||||
if err != nil || string(b) != "ok" {
|
if err != nil || string(b) != "ok" {
|
||||||
t.Fatalf("unexpected content: %v %q", err, string(b))
|
t.Fatalf("unexpected content: %v %q", err, string(b))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ func TestSafeDownload_OK(t *testing.T) {
|
|||||||
if err := Download(dest, ts.URL, &Options{Timeout: 5 * time.Second, MaxSizeBytes: 1024, AllowPrivate: true}); err != nil {
|
if err := Download(dest, ts.URL, &Options{Timeout: 5 * time.Second, MaxSizeBytes: 1024, AllowPrivate: true}); err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
b, err := os.ReadFile(dest)
|
b, err := os.ReadFile(dest) //nolint:gosec // test reads temp file
|
||||||
if err != nil || string(b) != "hello" {
|
if err != nil || string(b) != "hello" {
|
||||||
t.Fatalf("unexpected content: %v %q", err, string(b))
|
t.Fatalf("unexpected content: %v %q", err, string(b))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -21,9 +21,12 @@ var (
|
|||||||
defaultTimeout = 30 * time.Second
|
defaultTimeout = 30 * time.Second
|
||||||
defaultMaxSize = int64(200 * 1024 * 1024) // 200 MiB
|
defaultMaxSize = int64(200 * 1024 * 1024) // 200 MiB
|
||||||
|
|
||||||
|
// ErrSchemeNotAllowed is returned when a URL scheme is not permitted.
|
||||||
ErrSchemeNotAllowed = errors.New("invalid scheme (only http/https allowed)")
|
ErrSchemeNotAllowed = errors.New("invalid scheme (only http/https allowed)")
|
||||||
ErrSizeExceeded = errors.New("response exceeds maximum allowed size")
|
// ErrSizeExceeded is returned when a response exceeds the configured limit.
|
||||||
ErrPrivateIP = errors.New("connection to private or loopback address not allowed")
|
ErrSizeExceeded = errors.New("response exceeds maximum allowed size")
|
||||||
|
// ErrPrivateIP is returned when the target resolves to a private or loopback address.
|
||||||
|
ErrPrivateIP = errors.New("connection to private or loopback address not allowed")
|
||||||
)
|
)
|
||||||
|
|
||||||
// envInt64 returns an int64 from env or -1 if unset/invalid.
|
// envInt64 returns an int64 from env or -1 if unset/invalid.
|
||||||
|
|||||||
@@ -4,19 +4,31 @@ package scheme
|
|||||||
type Type = string
|
type Type = string
|
||||||
|
|
||||||
const (
|
const (
|
||||||
File Type = "file"
|
// File scheme.
|
||||||
Data Type = "data"
|
File Type = "file"
|
||||||
Base64 Type = "base64"
|
// Data scheme.
|
||||||
Http Type = "http"
|
Data Type = "data"
|
||||||
Https Type = "https"
|
// Base64 scheme.
|
||||||
Websocket Type = "wss"
|
Base64 Type = "base64"
|
||||||
Unix Type = "unix"
|
// Http scheme.
|
||||||
HttpUnix Type = "http+unix"
|
Http Type = "http"
|
||||||
Unixgram Type = "unixgram"
|
// Https scheme.
|
||||||
|
Https Type = "https"
|
||||||
|
// Websocket scheme (secure).
|
||||||
|
Websocket Type = "wss"
|
||||||
|
// Unix scheme.
|
||||||
|
Unix Type = "unix"
|
||||||
|
// HttpUnix scheme.
|
||||||
|
HttpUnix Type = "http+unix"
|
||||||
|
// Unixgram scheme.
|
||||||
|
Unixgram Type = "unixgram"
|
||||||
|
// Unixpacket scheme.
|
||||||
Unixpacket Type = "unixpacket"
|
Unixpacket Type = "unixpacket"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
// HttpsData lists allowed schemes (https, data).
|
||||||
HttpsData = []string{Https, Data}
|
HttpsData = []string{Https, Data}
|
||||||
|
// HttpsHttp lists allowed schemes (https, http).
|
||||||
HttpsHttp = []string{Https, Http}
|
HttpsHttp = []string{Https, Http}
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -34,7 +34,10 @@ import (
|
|||||||
|
|
||||||
//go:generate xgettext --no-wrap --language=c --from-code=UTF-8 --output=../../assets/locales/messages.pot messages.go
|
//go:generate xgettext --no-wrap --language=c --from-code=UTF-8 --output=../../assets/locales/messages.pot messages.go
|
||||||
|
|
||||||
|
// Message represents a localized message identifier.
|
||||||
type Message int
|
type Message int
|
||||||
|
|
||||||
|
// MessageMap maps message IDs to their localized strings.
|
||||||
type MessageMap map[Message]string
|
type MessageMap map[Message]string
|
||||||
|
|
||||||
var noVars []interface{}
|
var noVars []interface{}
|
||||||
|
|||||||
@@ -6,30 +6,45 @@ import (
|
|||||||
"github.com/leonelquinteros/gotext"
|
"github.com/leonelquinteros/gotext"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// Locale represents a language/region tag (e.g., "en", "pt_BR").
|
||||||
type Locale string
|
type Locale string
|
||||||
|
|
||||||
const (
|
const (
|
||||||
German Locale = "de"
|
// German locale.
|
||||||
English Locale = "en"
|
German Locale = "de"
|
||||||
Spanish Locale = "es"
|
// English locale.
|
||||||
French Locale = "fr"
|
English Locale = "en"
|
||||||
Dutch Locale = "nl"
|
// Spanish locale.
|
||||||
Polish Locale = "pl"
|
Spanish Locale = "es"
|
||||||
Portuguese Locale = "pt"
|
// French locale.
|
||||||
|
French Locale = "fr"
|
||||||
|
// Dutch locale.
|
||||||
|
Dutch Locale = "nl"
|
||||||
|
// Polish locale.
|
||||||
|
Polish Locale = "pl"
|
||||||
|
// Portuguese locale.
|
||||||
|
Portuguese Locale = "pt"
|
||||||
|
// BrazilianPortuguese locale.
|
||||||
BrazilianPortuguese Locale = "pt_BR"
|
BrazilianPortuguese Locale = "pt_BR"
|
||||||
Russian Locale = "ru"
|
// Russian locale.
|
||||||
ChineseSimplified Locale = "zh"
|
Russian Locale = "ru"
|
||||||
ChineseTraditional Locale = "zh_TW"
|
// ChineseSimplified locale.
|
||||||
Default = English
|
ChineseSimplified Locale = "zh"
|
||||||
|
// ChineseTraditional locale.
|
||||||
|
ChineseTraditional Locale = "zh_TW"
|
||||||
|
// Default locale used when none is supplied.
|
||||||
|
Default = English
|
||||||
)
|
)
|
||||||
|
|
||||||
var localeDir = "../../assets/locales"
|
var localeDir = "../../assets/locales"
|
||||||
var locale = Default
|
var locale = Default
|
||||||
|
|
||||||
|
// SetDir sets the path to the locales directory.
|
||||||
func SetDir(dir string) {
|
func SetDir(dir string) {
|
||||||
localeDir = dir
|
localeDir = dir
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SetLocale sets the current locale.
|
||||||
func SetLocale(loc string) {
|
func SetLocale(loc string) {
|
||||||
switch len(loc) {
|
switch len(loc) {
|
||||||
case 2:
|
case 2:
|
||||||
@@ -45,6 +60,7 @@ func SetLocale(loc string) {
|
|||||||
gotext.Configure(localeDir, string(locale), "default")
|
gotext.Configure(localeDir, string(locale), "default")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Locale returns the string value of the locale.
|
||||||
func (l Locale) Locale() string {
|
func (l Locale) Locale() string {
|
||||||
return string(l)
|
return string(l)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,10 @@
|
|||||||
package i18n
|
package i18n
|
||||||
|
|
||||||
|
// Message and Error identifiers.
|
||||||
const (
|
const (
|
||||||
|
// ErrUnexpected is returned for unexpected errors.
|
||||||
ErrUnexpected Message = iota + 1
|
ErrUnexpected Message = iota + 1
|
||||||
|
// ErrBadRequest indicates malformed input.
|
||||||
ErrBadRequest
|
ErrBadRequest
|
||||||
ErrSaveFailed
|
ErrSaveFailed
|
||||||
ErrDeleteFailed
|
ErrDeleteFailed
|
||||||
@@ -99,6 +102,7 @@ const (
|
|||||||
MsgActivated
|
MsgActivated
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// Messages holds default English message strings.
|
||||||
var Messages = MessageMap{
|
var Messages = MessageMap{
|
||||||
// Error messages:
|
// Error messages:
|
||||||
ErrUnexpected: gettext("Something went wrong, try again"),
|
ErrUnexpected: gettext("Something went wrong, try again"),
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package i18n
|
|||||||
|
|
||||||
import "strings"
|
import "strings"
|
||||||
|
|
||||||
|
// Response represents an i18n-aware response payload.
|
||||||
type Response struct {
|
type Response struct {
|
||||||
Code int `json:"code"`
|
Code int `json:"code"`
|
||||||
Err string `json:"error,omitempty"`
|
Err string `json:"error,omitempty"`
|
||||||
@@ -17,6 +18,7 @@ func (r Response) String() string {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// LowerString returns the lowercased message string.
|
||||||
func (r Response) LowerString() string {
|
func (r Response) LowerString() string {
|
||||||
return strings.ToLower(r.String())
|
return strings.ToLower(r.String())
|
||||||
}
|
}
|
||||||
@@ -25,10 +27,12 @@ func (r Response) Error() string {
|
|||||||
return r.Err
|
return r.Err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Success reports whether the response code indicates success (2xx).
|
||||||
func (r Response) Success() bool {
|
func (r Response) Success() bool {
|
||||||
return r.Err == "" && r.Code < 400
|
return r.Err == "" && r.Code < 400
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// NewResponse builds a Response with the given code, message ID, and optional parameters.
|
||||||
func NewResponse(code int, id Message, params ...interface{}) Response {
|
func NewResponse(code int, id Message, params ...interface{}) Response {
|
||||||
if code < 400 {
|
if code < 400 {
|
||||||
return Response{Code: code, Msg: Msg(id, params...)}
|
return Response{Code: code, Msg: Msg(id, params...)}
|
||||||
|
|||||||
@@ -2,13 +2,14 @@ package list
|
|||||||
|
|
||||||
// Add adds a string to the list if it does not exist yet.
|
// Add adds a string to the list if it does not exist yet.
|
||||||
func Add(list []string, s string) []string {
|
func Add(list []string, s string) []string {
|
||||||
if s == "" {
|
switch {
|
||||||
|
case s == "":
|
||||||
return list
|
return list
|
||||||
} else if len(list) == 0 {
|
case len(list) == 0:
|
||||||
return []string{s}
|
return []string{s}
|
||||||
} else if Contains(list, s) {
|
case Contains(list, s):
|
||||||
return list
|
return list
|
||||||
|
default:
|
||||||
|
return append(list, s)
|
||||||
}
|
}
|
||||||
|
|
||||||
return append(list, s)
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -45,9 +45,8 @@ func (list Attr) Strings() []string {
|
|||||||
|
|
||||||
if s == "" {
|
if s == "" {
|
||||||
continue
|
continue
|
||||||
} else if i == 0 {
|
}
|
||||||
// Skip check.
|
if i > 0 && result[i-1] == s {
|
||||||
} else if result[i-1] == s {
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -68,13 +67,14 @@ func (list Attr) Strings() []string {
|
|||||||
// Sort sorts the attributes by key.
|
// Sort sorts the attributes by key.
|
||||||
func (list Attr) Sort() Attr {
|
func (list Attr) Sort() Attr {
|
||||||
sort.Slice(list, func(i, j int) bool {
|
sort.Slice(list, func(i, j int) bool {
|
||||||
if list[i].Key == list[j].Key {
|
switch {
|
||||||
|
case list[i].Key == list[j].Key:
|
||||||
return list[i].Value < list[j].Value
|
return list[i].Value < list[j].Value
|
||||||
} else if list[i].Key == Any {
|
case list[i].Key == Any:
|
||||||
return false
|
return false
|
||||||
} else if list[j].Key == Any {
|
case list[j].Key == Any:
|
||||||
return true
|
return true
|
||||||
} else {
|
default:
|
||||||
return list[i].Key < list[j].Key
|
return list[i].Key < list[j].Key
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
@@ -122,11 +122,12 @@ func (list Attr) Find(s string) (a KeyValue) {
|
|||||||
} else {
|
} else {
|
||||||
for i := range list {
|
for i := range list {
|
||||||
if strings.EqualFold(attr.Key, list[i].Key) {
|
if strings.EqualFold(attr.Key, list[i].Key) {
|
||||||
if attr.Value == enum.True && list[i].Value == enum.False {
|
switch {
|
||||||
|
case attr.Value == enum.True && list[i].Value == enum.False:
|
||||||
return KeyValue{Key: "", Value: ""}
|
return KeyValue{Key: "", Value: ""}
|
||||||
} else if attr.Value == list[i].Value {
|
case attr.Value == list[i].Value:
|
||||||
return *list[i]
|
return *list[i]
|
||||||
} else if list[i].Value == Any {
|
case list[i].Value == Any:
|
||||||
a = *list[i]
|
a = *list[i]
|
||||||
}
|
}
|
||||||
} else if list[i].Key == Any && attr.Value != enum.False {
|
} else if list[i].Key == Any && attr.Value != enum.False {
|
||||||
|
|||||||
@@ -30,12 +30,8 @@ func ContainsAny(l, s []string) bool {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
// If second list contains All, it's a wildcard match.
|
for _, v := range s {
|
||||||
if s[0] == Any {
|
if v == Any {
|
||||||
return true
|
|
||||||
}
|
|
||||||
for j := 1; j < len(s); j++ {
|
|
||||||
if s[j] == Any {
|
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -51,7 +51,7 @@ func (l *list[K, V]) Back() *Element[K, V] {
|
|||||||
return l.root.prev
|
return l.root.prev
|
||||||
}
|
}
|
||||||
|
|
||||||
// Remove detaches e from the list while keeping the remaining neighbours
|
// Remove detaches e from the list while keeping the remaining neighbors
|
||||||
// correctly linked. After removal the element's next/prev references are
|
// correctly linked. After removal the element's next/prev references are
|
||||||
// zeroed so the node can be safely re-used or left for GC without retaining
|
// zeroed so the node can be safely re-used or left for GC without retaining
|
||||||
// other elements.
|
// other elements.
|
||||||
|
|||||||
@@ -130,7 +130,7 @@ func (m *Map[K, V]) AllFromBack() iter.Seq2[K, V] {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Keys returns an iterator that yields all keys in insertion order. Use
|
// Keys returns an iterator that yields all keys in insertion order. Use
|
||||||
// slices.Collect(m.Keys()) if a materialised slice is required.
|
// slices.Collect(m.Keys()) if a materialized slice is required.
|
||||||
func (m *Map[K, V]) Keys() iter.Seq[K] {
|
func (m *Map[K, V]) Keys() iter.Seq[K] {
|
||||||
return func(yield func(key K) bool) {
|
return func(yield func(key K) bool) {
|
||||||
for el := m.Front(); el != nil; el = el.Next() {
|
for el := m.Front(); el != nil; el = el.Next() {
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
package ordered_test
|
package ordered_test
|
||||||
|
|
||||||
|
//revive:disable:var-naming // benchmark helpers follow Go benchmark naming with underscores
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"slices"
|
"slices"
|
||||||
"strconv"
|
"strconv"
|
||||||
@@ -493,7 +495,7 @@ func benchmarkOrderedMap_Len(multiplier int) func(b *testing.B) {
|
|||||||
temp = m.Len()
|
temp = m.Len()
|
||||||
}
|
}
|
||||||
|
|
||||||
// prevent compiler from optimising Len away.
|
// prevent compiler from optimizing Len away.
|
||||||
tempInt = temp
|
tempInt = temp
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -800,7 +802,7 @@ func BenchmarkOrderedMapString_Has(b *testing.B) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func nothing(v interface{}) {
|
func nothing(v interface{}) {
|
||||||
v = false
|
_ = v
|
||||||
}
|
}
|
||||||
|
|
||||||
func benchmarkBigMap_Set() func(b *testing.B) {
|
func benchmarkBigMap_Set() func(b *testing.B) {
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ package ordered
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"math/rand"
|
"math/rand" //nolint:gosec // pseudo-random is sufficient for concurrency tests
|
||||||
"sync"
|
"sync"
|
||||||
"testing"
|
"testing"
|
||||||
)
|
)
|
||||||
@@ -10,11 +10,15 @@ import (
|
|||||||
func TestRaceCondition(t *testing.T) {
|
func TestRaceCondition(t *testing.T) {
|
||||||
m := NewSyncMap[int, int]()
|
m := NewSyncMap[int, int]()
|
||||||
wg := &sync.WaitGroup{}
|
wg := &sync.WaitGroup{}
|
||||||
|
//nolint:gosec // pseudo-random is sufficient for race testing
|
||||||
|
randInt := func() int {
|
||||||
|
return rand.Intn(100)
|
||||||
|
}
|
||||||
|
|
||||||
var asyncGet = func() {
|
var asyncGet = func() {
|
||||||
wg.Add(1)
|
wg.Add(1)
|
||||||
go func() {
|
go func() {
|
||||||
key := rand.Intn(100)
|
key := randInt()
|
||||||
m.Get(key)
|
m.Get(key)
|
||||||
wg.Done()
|
wg.Done()
|
||||||
}()
|
}()
|
||||||
@@ -23,8 +27,8 @@ func TestRaceCondition(t *testing.T) {
|
|||||||
var asyncSet = func() {
|
var asyncSet = func() {
|
||||||
wg.Add(1)
|
wg.Add(1)
|
||||||
go func() {
|
go func() {
|
||||||
key := rand.Intn(100)
|
key := randInt()
|
||||||
value := rand.Intn(100)
|
value := randInt()
|
||||||
m.Set(key, value)
|
m.Set(key, value)
|
||||||
wg.Done()
|
wg.Done()
|
||||||
}()
|
}()
|
||||||
@@ -33,7 +37,7 @@ func TestRaceCondition(t *testing.T) {
|
|||||||
var asyncDelete = func() {
|
var asyncDelete = func() {
|
||||||
wg.Add(1)
|
wg.Add(1)
|
||||||
go func() {
|
go func() {
|
||||||
key := rand.Intn(100)
|
key := randInt()
|
||||||
m.Delete(key)
|
m.Delete(key)
|
||||||
wg.Done()
|
wg.Done()
|
||||||
}()
|
}()
|
||||||
@@ -42,7 +46,7 @@ func TestRaceCondition(t *testing.T) {
|
|||||||
var asyncHas = func() {
|
var asyncHas = func() {
|
||||||
wg.Add(1)
|
wg.Add(1)
|
||||||
go func() {
|
go func() {
|
||||||
key := rand.Intn(100)
|
key := randInt()
|
||||||
m.Has(key)
|
m.Has(key)
|
||||||
wg.Done()
|
wg.Done()
|
||||||
}()
|
}()
|
||||||
@@ -51,8 +55,8 @@ func TestRaceCondition(t *testing.T) {
|
|||||||
var asyncReplaceKEy = func() {
|
var asyncReplaceKEy = func() {
|
||||||
wg.Add(1)
|
wg.Add(1)
|
||||||
go func() {
|
go func() {
|
||||||
key := rand.Intn(100)
|
key := randInt()
|
||||||
newKey := rand.Intn(100)
|
newKey := randInt()
|
||||||
m.ReplaceKey(key, newKey)
|
m.ReplaceKey(key, newKey)
|
||||||
wg.Done()
|
wg.Done()
|
||||||
}()
|
}()
|
||||||
@@ -61,8 +65,8 @@ func TestRaceCondition(t *testing.T) {
|
|||||||
var asyncGetOrDefault = func() {
|
var asyncGetOrDefault = func() {
|
||||||
wg.Add(1)
|
wg.Add(1)
|
||||||
go func() {
|
go func() {
|
||||||
key := rand.Intn(100)
|
key := randInt()
|
||||||
def := rand.Intn(100)
|
def := randInt()
|
||||||
m.GetOrDefault(key, def)
|
m.GetOrDefault(key, def)
|
||||||
wg.Done()
|
wg.Done()
|
||||||
}()
|
}()
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
package dummy
|
package dummy
|
||||||
|
|
||||||
|
//revive:disable:exported
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"io"
|
"io"
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ func TestError(t *testing.T) {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "SanitizeSpecialCharacters",
|
name: "SanitizeSpecialCharacters",
|
||||||
err: errors.New("permission denied { DROP TABLE users; }\n"),
|
err: errors.New("permission denied { DROP TABLE users; }"),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "WhitespaceOnly",
|
name: "WhitespaceOnly",
|
||||||
|
|||||||
@@ -1,19 +1,20 @@
|
|||||||
Hybrid Photo/Video File Support
|
## PhotoPrism — Media Package
|
||||||
===============================
|
|
||||||
|
|
||||||
## Apple iPhone and iPad
|
**Last Updated:** November 22, 2025
|
||||||
|
|
||||||
|
### Apple iPhone and iPad
|
||||||
|
|
||||||
[iOS Live Photos](https://developer.apple.com/live-photos/) consist of a JPEG/HEIC image and a QuickTime AVC/HEVC video, which are both required for viewing.
|
[iOS Live Photos](https://developer.apple.com/live-photos/) consist of a JPEG/HEIC image and a QuickTime AVC/HEVC video, which are both required for viewing.
|
||||||
|
|
||||||
We recommend [using an app like PhotoSync](https://docs.photoprism.app/user-guide/sync/mobile-devices/#photosync) to upload Live Photos to PhotoPrism, since the iOS web upload usually only submits the HEIC image file without the video.
|
We recommend [using an app like PhotoSync](https://docs.photoprism.app/user-guide/sync/mobile-devices/#photosync) to upload Live Photos to PhotoPrism, since the iOS web upload usually only submits the HEIC image file without the video.
|
||||||
|
|
||||||
## Android Devices
|
### Android Devices
|
||||||
|
|
||||||
Some Samsung and Google Android devices support taking "Motion Photos" with the included Camera app. Motion Photos are JPEG/HEIC image with a short MP4 video embedded after the image data.
|
Some Samsung and Google Android devices support taking "Motion Photos" with the included Camera app. Motion Photos are JPEG/HEIC image with a short MP4 video embedded after the image data.
|
||||||
|
|
||||||
The image part of these files can be opened in any image viewer that supports JPEG/HEIC, but the video part cannot. However, since the MP4 video is simply appended at the end of the image file, it can be easily read by our software and streamed through the API as needed.
|
The image part of these files can be opened in any image viewer that supports JPEG/HEIC, but the video part cannot. However, since the MP4 video is simply appended at the end of the image file, it can be easily read by our software and streamed through the API as needed.
|
||||||
|
|
||||||
## Introductory Tutorials
|
### Introductory Tutorials
|
||||||
|
|
||||||
| Title | Date | URL |
|
| Title | Date | URL |
|
||||||
|---------------------------------------------------------|----------|------------------------------------------------------------------------------------|
|
|---------------------------------------------------------|----------|------------------------------------------------------------------------------------|
|
||||||
@@ -24,7 +25,7 @@ The image part of these files can be opened in any image viewer that supports JP
|
|||||||
| Working with Motion Photos | Jan 2019 | https://medium.com/android-news/working-with-motion-photos-da0aa49b50c |
|
| Working with Motion Photos | Jan 2019 | https://medium.com/android-news/working-with-motion-photos-da0aa49b50c |
|
||||||
| Google: Behind the Motion Photos Technology in Pixel 2 | Mar 2018 | https://blog.research.google/2018/03/behind-motion-photos-technology-in.html |
|
| Google: Behind the Motion Photos Technology in Pixel 2 | Mar 2018 | https://blog.research.google/2018/03/behind-motion-photos-technology-in.html |
|
||||||
|
|
||||||
## Software Libraries and References
|
### Software Libraries and References
|
||||||
|
|
||||||
| Title | URL |
|
| Title | URL |
|
||||||
|------------------------------------------------------|-------------------------------------------------------------------------|
|
|------------------------------------------------------|-------------------------------------------------------------------------|
|
||||||
@@ -37,19 +38,15 @@ The image part of these files can be opened in any image viewer that supports JP
|
|||||||
| How to use the io.Reader interface | https://yourbasic.org/golang/io-reader-interface-explained/ |
|
| How to use the io.Reader interface | https://yourbasic.org/golang/io-reader-interface-explained/ |
|
||||||
| AV1 Codec ISO Media File Format | https://aomediacodec.github.io/av1-isobmff |
|
| AV1 Codec ISO Media File Format | https://aomediacodec.github.io/av1-isobmff |
|
||||||
|
|
||||||
## Related GitHub Issues
|
### Related GitHub Issues
|
||||||
|
|
||||||
- https://github.com/photoprism/photoprism/issues/439 (Samsung: Initial support for Motion Photos)
|
- https://github.com/photoprism/photoprism/issues/439 (Samsung: Initial support for Motion Photos)
|
||||||
- https://github.com/photoprism/photoprism/issues/1739 (Google: Initial support for Motion Photos)
|
- https://github.com/photoprism/photoprism/issues/1739 (Google: Initial support for Motion Photos)
|
||||||
- https://github.com/photoprism/photoprism/issues/2788 (Metadata: Flag Samsung/Google Motion Photos as Live Photos)
|
- https://github.com/photoprism/photoprism/issues/2788 (Metadata: Flag Samsung/Google Motion Photos as Live Photos)
|
||||||
- https://github.com/cliveontoast/GoMoPho/issues/23 (Google Motion Photos Video Extractor: Add Android 12 Support)
|
- https://github.com/cliveontoast/GoMoPho/issues/23 (Google Motion Photos Video Extractor: Add Android 12 Support)
|
||||||
|
|
||||||
## Related Pull Requests
|
### Related Pull Requests
|
||||||
|
|
||||||
- https://github.com/photoprism/photoprism/pull/3709 (Google: Initial support for Motion Photos)
|
- https://github.com/photoprism/photoprism/pull/3709 (Google: Initial support for Motion Photos)
|
||||||
- https://github.com/photoprism/photoprism/pull/3722 (Google: Add support for Motion Photos)
|
- https://github.com/photoprism/photoprism/pull/3722 (Google: Add support for Motion Photos)
|
||||||
- https://github.com/photoprism/photoprism/pull/3660 (Samsung: Improved support for Motion Photos)
|
- https://github.com/photoprism/photoprism/pull/3660 (Samsung: Improved support for Motion Photos)
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
*PhotoPrism® is a [registered trademark](https://www.photoprism.app/trademark). By using the software and services we provide, you agree to our [Terms of Service](https://www.photoprism.app/terms), [Privacy Policy](https://www.photoprism.app/privacy), and [Code of Conduct](https://www.photoprism.app/code-of-conduct). Docs are [available](https://link.photoprism.app/github-docs) under the [CC BY-NC-SA 4.0 License](https://creativecommons.org/licenses/by-nc-sa/4.0/); [additional terms](https://github.com/photoprism/photoprism/blob/develop/assets/README.md) may apply.*
|
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ func (c Chroma) Hex() string {
|
|||||||
|
|
||||||
// Uint returns the colourfulness in percent as unsigned integer.
|
// Uint returns the colourfulness in percent as unsigned integer.
|
||||||
func (c Chroma) Uint() uint {
|
func (c Chroma) Uint() uint {
|
||||||
return uint(c.Percent())
|
return uint(c.Percent()) //nolint:gosec // Percent is bounded 0..100
|
||||||
}
|
}
|
||||||
|
|
||||||
// Int returns the colourfulness in percent as integer.
|
// Int returns the colourfulness in percent as integer.
|
||||||
|
|||||||
@@ -30,28 +30,48 @@ import (
|
|||||||
"github.com/photoprism/photoprism/pkg/txt"
|
"github.com/photoprism/photoprism/pkg/txt"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// Color represents a indexed color value.
|
||||||
type Color int16
|
type Color int16
|
||||||
|
|
||||||
|
// Colors is a slice of Color values.
|
||||||
type Colors []Color
|
type Colors []Color
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
// Black color.
|
||||||
Black Color = iota
|
Black Color = iota
|
||||||
|
// Grey color.
|
||||||
Grey
|
Grey
|
||||||
|
// Brown color.
|
||||||
Brown
|
Brown
|
||||||
|
// Gold color.
|
||||||
Gold
|
Gold
|
||||||
|
// White color.
|
||||||
White
|
White
|
||||||
|
// Purple color.
|
||||||
Purple
|
Purple
|
||||||
|
// Blue color.
|
||||||
Blue
|
Blue
|
||||||
|
// Cyan color.
|
||||||
Cyan
|
Cyan
|
||||||
|
// Teal color.
|
||||||
Teal
|
Teal
|
||||||
|
// Green color.
|
||||||
Green
|
Green
|
||||||
|
// Lime color.
|
||||||
Lime
|
Lime
|
||||||
|
// Yellow color.
|
||||||
Yellow
|
Yellow
|
||||||
|
// Magenta color.
|
||||||
Magenta
|
Magenta
|
||||||
|
// Orange color.
|
||||||
Orange
|
Orange
|
||||||
|
// Red color.
|
||||||
Red
|
Red
|
||||||
|
// Pink color.
|
||||||
Pink
|
Pink
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// All lists all defined colors in display order.
|
||||||
var All = Colors{
|
var All = Colors{
|
||||||
Purple,
|
Purple,
|
||||||
Magenta,
|
Magenta,
|
||||||
@@ -71,6 +91,7 @@ var All = Colors{
|
|||||||
Black,
|
Black,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Names maps Color to their lowercase names.
|
||||||
var Names = map[Color]string{
|
var Names = map[Color]string{
|
||||||
Black: "black", // 0
|
Black: "black", // 0
|
||||||
Grey: "grey", // 1
|
Grey: "grey", // 1
|
||||||
@@ -90,6 +111,7 @@ var Names = map[Color]string{
|
|||||||
Pink: "pink", // F
|
Pink: "pink", // F
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Weights assigns relative importance to colors.
|
||||||
var Weights = map[Color]uint16{
|
var Weights = map[Color]uint16{
|
||||||
Grey: 1,
|
Grey: 1,
|
||||||
Black: 2,
|
Black: 2,
|
||||||
@@ -109,14 +131,17 @@ var Weights = map[Color]uint16{
|
|||||||
Magenta: 5,
|
Magenta: 5,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Name returns the lowercase name for the color.
|
||||||
func (c Color) Name() string {
|
func (c Color) Name() string {
|
||||||
return Names[c]
|
return Names[c]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ID returns the numeric identifier for the color.
|
||||||
func (c Color) ID() int16 {
|
func (c Color) ID() int16 {
|
||||||
return int16(c)
|
return int16(c)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Hex returns the hex nibble for the color or "0" if out of range.
|
||||||
func (c Color) Hex() string {
|
func (c Color) Hex() string {
|
||||||
if c < 0 || c > 15 {
|
if c < 0 || c > 15 {
|
||||||
return "0"
|
return "0"
|
||||||
@@ -125,6 +150,7 @@ func (c Color) Hex() string {
|
|||||||
return fmt.Sprintf("%X", c)
|
return fmt.Sprintf("%X", c)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Hex returns the concatenated hex values for the slice.
|
||||||
func (c Colors) Hex() (result string) {
|
func (c Colors) Hex() (result string) {
|
||||||
for _, indexedColor := range c {
|
for _, indexedColor := range c {
|
||||||
result += indexedColor.Hex()
|
result += indexedColor.Hex()
|
||||||
@@ -133,6 +159,7 @@ func (c Colors) Hex() (result string) {
|
|||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// List returns a slice of maps with slug, display name, and example color.
|
||||||
func (c Colors) List() []map[string]string {
|
func (c Colors) List() []map[string]string {
|
||||||
result := make([]map[string]string, 0, len(c))
|
result := make([]map[string]string, 0, len(c))
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
package colors
|
package colors
|
||||||
|
|
||||||
|
// ColorExamples contains representative hex values for each Color.
|
||||||
var ColorExamples = map[Color]string{
|
var ColorExamples = map[Color]string{
|
||||||
Black: "#212121",
|
Black: "#212121",
|
||||||
Grey: "#9E9E9E",
|
Grey: "#9E9E9E",
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
package colors
|
package colors
|
||||||
|
|
||||||
|
// LightMap stores luminance values for a palette.
|
||||||
type LightMap []Luminance
|
type LightMap []Luminance
|
||||||
|
|
||||||
// Hex returns all luminance value as a hex encoded string.
|
// Hex returns all luminance value as a hex encoded string.
|
||||||
@@ -69,7 +70,7 @@ func (m LightMap) Diff() (result int) {
|
|||||||
result = 1
|
result = 1
|
||||||
|
|
||||||
for _, val := range diffValues {
|
for _, val := range diffValues {
|
||||||
result = result << 1
|
result <<= 1
|
||||||
|
|
||||||
a := 0
|
a := 0
|
||||||
b := 0
|
b := 0
|
||||||
@@ -83,7 +84,7 @@ func (m LightMap) Diff() (result int) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if a+4 > b {
|
if a+4 > b {
|
||||||
result += 1
|
result++
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -63,16 +63,16 @@ func TestLightMap_Diff(t *testing.T) {
|
|||||||
t.Run("Happy", func(t *testing.T) {
|
t.Run("Happy", func(t *testing.T) {
|
||||||
m1 := LightMap{8, 13, 7, 2, 2, 3, 6, 3, 4}
|
m1 := LightMap{8, 13, 7, 2, 2, 3, 6, 3, 4}
|
||||||
d1 := m1.Diff()
|
d1 := m1.Diff()
|
||||||
t.Log(strconv.FormatUint(uint64(d1), 2))
|
t.Log(strconv.FormatUint(uint64(uint16(d1)), 2)) //nolint:gosec // test logging
|
||||||
m2 := LightMap{8, 13, 7, 3, 1, 3, 5, 3, 4}
|
m2 := LightMap{8, 13, 7, 3, 1, 3, 5, 3, 4}
|
||||||
d2 := m2.Diff()
|
d2 := m2.Diff()
|
||||||
t.Log(strconv.FormatUint(uint64(d2), 2))
|
t.Log(strconv.FormatUint(uint64(uint16(d2)), 2)) //nolint:gosec // test logging
|
||||||
m3 := LightMap{9, 13, 7, 8, 2, 4, 5, 3, 4}
|
m3 := LightMap{9, 13, 7, 8, 2, 4, 5, 3, 4}
|
||||||
d3 := m3.Diff()
|
d3 := m3.Diff()
|
||||||
t.Log(strconv.FormatUint(uint64(d3), 2))
|
t.Log(strconv.FormatUint(uint64(uint16(d3)), 2)) //nolint:gosec // test logging
|
||||||
m4 := LightMap{9, 13, 7, 7, 2, 4, 6, 2, 3}
|
m4 := LightMap{9, 13, 7, 7, 2, 4, 6, 2, 3}
|
||||||
d4 := m4.Diff()
|
d4 := m4.Diff()
|
||||||
t.Log(strconv.FormatUint(uint64(d4), 2))
|
t.Log(strconv.FormatUint(uint64(uint16(d4)), 2)) //nolint:gosec // test logging
|
||||||
|
|
||||||
t.Logf("values: %d, %d, %d, %d", d1, d2, d3, d4)
|
t.Logf("values: %d, %d, %d, %d", d1, d2, d3, d4)
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -2,8 +2,10 @@ package colors
|
|||||||
|
|
||||||
import "fmt"
|
import "fmt"
|
||||||
|
|
||||||
|
// Luminance represents a luminance value.
|
||||||
type Luminance int16
|
type Luminance int16
|
||||||
|
|
||||||
|
// Hex returns the hex string for the luminance value.
|
||||||
func (l Luminance) Hex() string {
|
func (l Luminance) Hex() string {
|
||||||
return fmt.Sprintf("%X", l)
|
return fmt.Sprintf("%X", l)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package colors
|
|||||||
|
|
||||||
import "image/color"
|
import "image/color"
|
||||||
|
|
||||||
|
// ColorMap maps RGBA values to Color enums.
|
||||||
var ColorMap = map[color.RGBA]Color{
|
var ColorMap = map[color.RGBA]Color{
|
||||||
{0x00, 0x00, 0x00, 0xff}: Black,
|
{0x00, 0x00, 0x00, 0xff}: Black,
|
||||||
{0xe0, 0xe0, 0xe0, 0xff}: Grey,
|
{0xe0, 0xe0, 0xe0, 0xff}: Grey,
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package colors
|
|||||||
|
|
||||||
import "strings"
|
import "strings"
|
||||||
|
|
||||||
|
// Profile represents a color profile name.
|
||||||
type Profile string
|
type Profile string
|
||||||
|
|
||||||
// Supported color profiles.
|
// Supported color profiles.
|
||||||
|
|||||||
@@ -2,7 +2,8 @@ package colors
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"image"
|
"image"
|
||||||
_ "image/jpeg"
|
_ "image/jpeg" // register JPEG
|
||||||
|
_ "image/png" // register PNG (may appear in decoded sources)
|
||||||
"runtime"
|
"runtime"
|
||||||
|
|
||||||
"github.com/mandykoh/prism"
|
"github.com/mandykoh/prism"
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func writeImage(path string, img image.Image) error {
|
func writeImage(path string, img image.Image) error {
|
||||||
imgFile, err := os.Create(path)
|
imgFile, err := os.Create(path) //nolint:gosec // test temp file
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@@ -32,7 +32,7 @@ func TestToSRGB(t *testing.T) {
|
|||||||
|
|
||||||
t.Logf("testfile: %s", testFile)
|
t.Logf("testfile: %s", testFile)
|
||||||
|
|
||||||
imgFile, err := os.Open(testFile)
|
imgFile, err := os.Open(testFile) //nolint:gosec // test temp file
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
|
|||||||
@@ -82,7 +82,7 @@ func ReadUrl(fileUrl string, schemes []string) (data []byte, err error) {
|
|||||||
// Fetch the file data from the specified URL, depending on its scheme.
|
// Fetch the file data from the specified URL, depending on its scheme.
|
||||||
switch u.Scheme {
|
switch u.Scheme {
|
||||||
case scheme.Https, scheme.Http, scheme.Unix, scheme.HttpUnix:
|
case scheme.Https, scheme.Http, scheme.Unix, scheme.HttpUnix:
|
||||||
resp, httpErr := http.Get(fileUrl)
|
resp, httpErr := http.Get(fileUrl) //nolint:gosec // URL already validated by caller; https/http only
|
||||||
|
|
||||||
if httpErr != nil {
|
if httpErr != nil {
|
||||||
return data, fmt.Errorf("invalid %s url (%s)", u.Scheme, httpErr)
|
return data, fmt.Errorf("invalid %s url (%s)", u.Scheme, httpErr)
|
||||||
@@ -100,7 +100,7 @@ func ReadUrl(fileUrl string, schemes []string) (data []byte, err error) {
|
|||||||
return DecodeBase64String(binaryData)
|
return DecodeBase64String(binaryData)
|
||||||
}
|
}
|
||||||
case scheme.File:
|
case scheme.File:
|
||||||
if data, err = os.ReadFile(fileUrl); err != nil {
|
if data, err = os.ReadFile(fileUrl); err != nil { //nolint:gosec // fileUrl validated earlier
|
||||||
return data, fmt.Errorf("invalid %s url (%s)", u.Scheme, err)
|
return data, fmt.Errorf("invalid %s url (%s)", u.Scheme, err)
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
|
|||||||
@@ -117,7 +117,8 @@ func TestDataUrl_WebpDetection(t *testing.T) {
|
|||||||
// Minimal RIFF/WEBP container header
|
// Minimal RIFF/WEBP container header
|
||||||
// RIFF <size=26> WEBP VP8 + padding
|
// RIFF <size=26> WEBP VP8 + padding
|
||||||
riff := []byte{'R', 'I', 'F', 'F', 26, 0, 0, 0, 'W', 'E', 'B', 'P', 'V', 'P', '8', ' '}
|
riff := []byte{'R', 'I', 'F', 'F', 26, 0, 0, 0, 'W', 'E', 'B', 'P', 'V', 'P', '8', ' '}
|
||||||
buf := append(riff, bytes.Repeat([]byte{0}, 32)...)
|
riff = append(riff, bytes.Repeat([]byte{0}, 32)...)
|
||||||
|
buf := riff
|
||||||
s := DataUrl(bytes.NewReader(buf))
|
s := DataUrl(bytes.NewReader(buf))
|
||||||
assert.True(t, strings.HasPrefix(s, "data:image/webp;base64,"))
|
assert.True(t, strings.HasPrefix(s, "data:image/webp;base64,"))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,7 +7,9 @@ import "strings"
|
|||||||
type Orientation = string
|
type Orientation = string
|
||||||
|
|
||||||
const (
|
const (
|
||||||
KeepOrientation Orientation = "keep"
|
// KeepOrientation preserves existing orientation metadata.
|
||||||
|
KeepOrientation Orientation = "keep"
|
||||||
|
// ResetOrientation strips orientation metadata.
|
||||||
ResetOrientation Orientation = "reset"
|
ResetOrientation Orientation = "reset"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -8,5 +8,8 @@ import (
|
|||||||
"github.com/photoprism/photoprism/pkg/fs"
|
"github.com/photoprism/photoprism/pkg/fs"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// PreviewFileTypes lists MIME types eligible for preview generation.
|
||||||
var PreviewFileTypes = []string{fs.ImageJpeg.String(), fs.ImagePng.String()}
|
var PreviewFileTypes = []string{fs.ImageJpeg.String(), fs.ImagePng.String()}
|
||||||
|
|
||||||
|
// PreviewExpr is a SQL expression containing allowed preview MIME types.
|
||||||
var PreviewExpr = gorm.Expr("'" + strings.Join(PreviewFileTypes, "','") + "'")
|
var PreviewExpr = gorm.Expr("'" + strings.Join(PreviewFileTypes, "','") + "'")
|
||||||
|
|||||||
@@ -1,13 +1,20 @@
|
|||||||
package projection
|
package projection
|
||||||
|
|
||||||
const (
|
const (
|
||||||
Unknown Type = ""
|
// Unknown projection.
|
||||||
Equirectangular Type = "equirectangular"
|
Unknown Type = ""
|
||||||
Cubestrip Type = "cubestrip"
|
// Equirectangular projection type.
|
||||||
Cylindrical Type = "cylindrical"
|
Equirectangular Type = "equirectangular"
|
||||||
TransverseCylindrical Type = "transverse-cylindrical"
|
// Cubestrip projection type.
|
||||||
|
Cubestrip Type = "cubestrip"
|
||||||
|
// Cylindrical projection type.
|
||||||
|
Cylindrical Type = "cylindrical"
|
||||||
|
// TransverseCylindrical projection type.
|
||||||
|
TransverseCylindrical Type = "transverse-cylindrical"
|
||||||
|
// PseudocylindricalCompromise projection type.
|
||||||
PseudocylindricalCompromise Type = "pseudocylindrical-compromise"
|
PseudocylindricalCompromise Type = "pseudocylindrical-compromise"
|
||||||
Other Type = "other"
|
// Other projection type.
|
||||||
|
Other Type = "other"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Types maps identifiers to known types.
|
// Types maps identifiers to known types.
|
||||||
|
|||||||
@@ -1,9 +1,12 @@
|
|||||||
package media
|
package media
|
||||||
|
|
||||||
|
// Src identifies a media source.
|
||||||
type Src = string
|
type Src = string
|
||||||
|
|
||||||
// Data source types.
|
// Data source types.
|
||||||
const (
|
const (
|
||||||
SrcLocal Src = "local"
|
// SrcLocal indicates the media originates from local storage.
|
||||||
|
SrcLocal Src = "local"
|
||||||
|
// SrcRemote indicates the media originates from a remote source.
|
||||||
SrcRemote Src = "remote"
|
SrcRemote Src = "remote"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -91,7 +91,7 @@ func FileTypeOffset(fileName string, brands Chunks) (int, error) {
|
|||||||
return -1, errors.New("file not found")
|
return -1, errors.New("file not found")
|
||||||
}
|
}
|
||||||
|
|
||||||
file, err := os.Open(fileName)
|
file, err := os.Open(fileName) //nolint:gosec // fileName validated by caller
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return -1, err
|
return -1, err
|
||||||
|
|||||||
@@ -52,7 +52,7 @@ func (c Chunk) FileOffset(fileName string) (int, error) {
|
|||||||
return -1, errors.New("file not found")
|
return -1, errors.New("file not found")
|
||||||
}
|
}
|
||||||
|
|
||||||
file, err := os.Open(fileName)
|
file, err := os.Open(fileName) //nolint:gosec // fileName validated by caller
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return -1, err
|
return -1, err
|
||||||
|
|||||||
@@ -5,9 +5,12 @@ package video
|
|||||||
type Profile = string
|
type Profile = string
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
// ProfileBaseline indicates H.264 Baseline profile.
|
||||||
ProfileBaseline Profile = "Baseline"
|
ProfileBaseline Profile = "Baseline"
|
||||||
ProfileMain Profile = "Main"
|
// ProfileMain indicates H.264 Main profile.
|
||||||
ProfileHigh Profile = "High"
|
ProfileMain Profile = "Main"
|
||||||
|
// ProfileHigh indicates H.264 High profile.
|
||||||
|
ProfileHigh Profile = "High"
|
||||||
)
|
)
|
||||||
|
|
||||||
// CodecProfile represents a codec subtype with its standardized ID,
|
// CodecProfile represents a codec subtype with its standardized ID,
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user