mirror of
https://github.com/penpot/penpot.git
synced 2025-12-11 22:14:05 +01:00
♻️ Refactor file storage
Make it more scallable and make it easily extensible
This commit is contained in:
@@ -30,8 +30,8 @@
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.main :as main]
|
||||
[app.srepl.helpers :as srepl.helpers]
|
||||
[app.srepl.main :as srepl]
|
||||
[app.srepl.helpers :as h]
|
||||
[app.srepl.main :refer :all]
|
||||
[app.util.blob :as blob]
|
||||
[clj-async-profiler.core :as prof]
|
||||
[clojure.contrib.humanize :as hum]
|
||||
|
||||
80
backend/scripts/_env
Normal file
80
backend/scripts/_env
Normal file
@@ -0,0 +1,80 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
|
||||
export PENPOT_MANAGEMENT_API_SHARED_KEY=super-secret-management-api-key
|
||||
export PENPOT_SECRET_KEY=super-secret-devenv-key
|
||||
export PENPOT_HOST=devenv
|
||||
|
||||
export PENPOT_FLAGS="\
|
||||
$PENPOT_FLAGS \
|
||||
enable-login-with-ldap \
|
||||
enable-login-with-password
|
||||
enable-login-with-oidc \
|
||||
enable-login-with-google \
|
||||
enable-login-with-github \
|
||||
enable-login-with-gitlab \
|
||||
enable-backend-worker \
|
||||
enable-backend-asserts \
|
||||
disable-feature-fdata-pointer-map \
|
||||
enable-feature-fdata-objects-map \
|
||||
enable-audit-log \
|
||||
enable-transit-readable-response \
|
||||
enable-demo-users \
|
||||
disable-secure-session-cookies \
|
||||
enable-smtp \
|
||||
enable-prepl-server \
|
||||
enable-urepl-server \
|
||||
enable-rpc-climit \
|
||||
enable-rpc-rlimit \
|
||||
enable-quotes \
|
||||
enable-soft-rpc-rlimit \
|
||||
enable-auto-file-snapshot \
|
||||
enable-webhooks \
|
||||
enable-access-tokens \
|
||||
disable-tiered-file-data-storage \
|
||||
enable-file-validation \
|
||||
enable-file-schema-validation \
|
||||
enable-subscriptions";
|
||||
|
||||
# Default deletion delay for devenv
|
||||
export PENPOT_DELETION_DELAY="24h"
|
||||
|
||||
# Setup default upload media file size to 100MiB
|
||||
export PENPOT_MEDIA_MAX_FILE_SIZE=104857600
|
||||
|
||||
# Setup default multipart upload size to 300MiB
|
||||
export PENPOT_HTTP_SERVER_MAX_MULTIPART_BODY_SIZE=314572800
|
||||
|
||||
export AWS_ACCESS_KEY_ID=penpot-devenv
|
||||
export AWS_SECRET_ACCESS_KEY=penpot-devenv
|
||||
export PENPOT_OBJECTS_STORAGE_BACKEND=s3
|
||||
export PENPOT_OBJECTS_STORAGE_S3_ENDPOINT=http://minio:9000
|
||||
export PENPOT_OBJECTS_STORAGE_S3_BUCKET=penpot
|
||||
|
||||
export JAVA_OPTS="\
|
||||
-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
|
||||
-Djdk.attach.allowAttachSelf \
|
||||
-Dlog4j2.configurationFile=log4j2-devenv.xml \
|
||||
-Djdk.tracePinnedThreads=full \
|
||||
-Dim4java.useV7=true \
|
||||
-XX:+UseShenandoahGC \
|
||||
-XX:+UseCompactObjectHeaders \
|
||||
-XX:+UnlockExperimentalVMOptions \
|
||||
-XX:ShenandoahGCMode=generational \
|
||||
-XX:-OmitStackTraceInFastThrow \
|
||||
--sun-misc-unsafe-memory-access=allow \
|
||||
--enable-preview \
|
||||
--enable-native-access=ALL-UNNAMED";
|
||||
|
||||
function setup_minio() {
|
||||
# Initialize MINIO config
|
||||
mc alias set penpot-s3/ http://minio:9000 minioadmin minioadmin -q
|
||||
mc admin user add penpot-s3 penpot-devenv penpot-devenv -q
|
||||
mc admin user info penpot-s3 penpot-devenv |grep -F -q "readwrite"
|
||||
if [ "$?" = "1" ]; then
|
||||
mc admin policy attach penpot-s3 readwrite --user=penpot-devenv -q
|
||||
fi
|
||||
mc mb penpot-s3/penpot -p -q
|
||||
}
|
||||
|
||||
|
||||
@@ -1,115 +1,13 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
export PENPOT_SECRET_KEY=super-secret-devenv-key
|
||||
export PENPOT_HOST=devenv
|
||||
export PENPOT_FLAGS="\
|
||||
$PENPOT_FLAGS \
|
||||
enable-login-with-ldap \
|
||||
enable-login-with-password
|
||||
enable-login-with-oidc \
|
||||
enable-login-with-google \
|
||||
enable-login-with-github \
|
||||
enable-login-with-gitlab \
|
||||
enable-backend-worker \
|
||||
enable-backend-asserts \
|
||||
disable-feature-fdata-pointer-map \
|
||||
enable-feature-fdata-objects-map \
|
||||
enable-audit-log \
|
||||
enable-transit-readable-response \
|
||||
enable-demo-users \
|
||||
disable-secure-session-cookies \
|
||||
enable-smtp \
|
||||
enable-prepl-server \
|
||||
enable-urepl-server \
|
||||
enable-rpc-climit \
|
||||
enable-rpc-rlimit \
|
||||
enable-quotes \
|
||||
enable-soft-rpc-rlimit \
|
||||
enable-auto-file-snapshot \
|
||||
enable-webhooks \
|
||||
enable-access-tokens \
|
||||
disable-tiered-file-data-storage \
|
||||
enable-file-validation \
|
||||
enable-file-schema-validation \
|
||||
enable-subscriptions";
|
||||
|
||||
# Default deletion delay for devenv
|
||||
export PENPOT_DELETION_DELAY="24h"
|
||||
|
||||
# Setup default upload media file size to 100MiB
|
||||
export PENPOT_MEDIA_MAX_FILE_SIZE=104857600
|
||||
|
||||
# Setup default multipart upload size to 300MiB
|
||||
export PENPOT_HTTP_SERVER_MAX_MULTIPART_BODY_SIZE=314572800
|
||||
|
||||
# export PENPOT_DATABASE_URI="postgresql://172.17.0.1:5432/penpot"
|
||||
# export PENPOT_DATABASE_USERNAME="penpot"
|
||||
# export PENPOT_DATABASE_PASSWORD="penpot"
|
||||
# export PENPOT_DATABASE_READONLY=true
|
||||
|
||||
# export PENPOT_DATABASE_URI="postgresql://172.17.0.1:5432/penpot_pre"
|
||||
# export PENPOT_DATABASE_USERNAME="penpot_pre"
|
||||
# export PENPOT_DATABASE_PASSWORD="penpot_pre"
|
||||
|
||||
# export PENPOT_LOGGERS_LOKI_URI="http://172.17.0.1:3100/loki/api/v1/push"
|
||||
# export PENPOT_AUDIT_LOG_ARCHIVE_URI="http://localhost:6070/api/audit"
|
||||
SCRIPT_DIR=$(dirname $0);
|
||||
source $SCRIPT_DIR/_env;
|
||||
|
||||
# Initialize MINIO config
|
||||
mc alias set penpot-s3/ http://minio:9000 minioadmin minioadmin -q
|
||||
mc admin user add penpot-s3 penpot-devenv penpot-devenv -q
|
||||
mc admin user info penpot-s3 penpot-devenv |grep -F -q "readwrite"
|
||||
if [ "$?" = "1" ]; then
|
||||
mc admin policy attach penpot-s3 readwrite --user=penpot-devenv -q
|
||||
fi
|
||||
mc mb penpot-s3/penpot -p -q
|
||||
|
||||
export AWS_ACCESS_KEY_ID=penpot-devenv
|
||||
export AWS_SECRET_ACCESS_KEY=penpot-devenv
|
||||
|
||||
export PENPOT_OBJECTS_STORAGE_BACKEND=s3
|
||||
export PENPOT_OBJECTS_STORAGE_S3_ENDPOINT=http://minio:9000
|
||||
export PENPOT_OBJECTS_STORAGE_S3_BUCKET=penpot
|
||||
export PENPOT_OBJECTS_STORAGE_FS_DIRECTORY="assets"
|
||||
|
||||
export JAVA_OPTS="\
|
||||
-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
|
||||
-Djdk.attach.allowAttachSelf \
|
||||
-Dlog4j2.configurationFile=log4j2-devenv-repl.xml \
|
||||
-Djdk.tracePinnedThreads=full \
|
||||
-Dim4java.useV7=true \
|
||||
-XX:+UseShenandoahGC \
|
||||
-XX:+EnableDynamicAgentLoading \
|
||||
-XX:-OmitStackTraceInFastThrow \
|
||||
-XX:+UnlockExperimentalVMOptions \
|
||||
-XX:+UnlockDiagnosticVMOptions \
|
||||
-XX:+DebugNonSafepoints \
|
||||
-XX:ShenandoahGCMode=generational \
|
||||
-XX:+UseCompactObjectHeaders \
|
||||
--sun-misc-unsafe-memory-access=allow \
|
||||
--enable-preview \
|
||||
--enable-native-access=ALL-UNNAMED";
|
||||
setup_minio;
|
||||
|
||||
export JAVA_OPTS="$JAVA_OPTS -Dlog4j2.configurationFile=log4j2-devenv-repl.xml"
|
||||
export OPTIONS="-A:jmx-remote -A:dev"
|
||||
|
||||
# Setup HEAP
|
||||
# export OPTIONS="$OPTIONS -J-Xms50m -J-Xmx1024m"
|
||||
# export OPTIONS="$OPTIONS -J-Xms1100m -J-Xmx1100m -J-XX:+AlwaysPreTouch"
|
||||
|
||||
# Increase virtual thread pool size
|
||||
# export OPTIONS="$OPTIONS -J-Djdk.virtualThreadScheduler.parallelism=16"
|
||||
|
||||
# Disable C2 Compiler
|
||||
# export OPTIONS="$OPTIONS -J-XX:TieredStopAtLevel=1"
|
||||
|
||||
# Disable all compilers
|
||||
# export OPTIONS="$OPTIONS -J-Xint"
|
||||
|
||||
# Setup GC
|
||||
# export OPTIONS="$OPTIONS -J-XX:+UseG1GC"
|
||||
|
||||
# Setup GC
|
||||
# export OPTIONS="$OPTIONS -J-XX:+UseZGC"
|
||||
|
||||
export OPTIONS_EVAL="nil"
|
||||
# export OPTIONS_EVAL="(set! *warn-on-reflection* true)"
|
||||
|
||||
|
||||
@@ -1,48 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
source /home/penpot/environ
|
||||
export PENPOT_FLAGS="$PENPOT_FLAGS disable-backend-worker"
|
||||
|
||||
export OPTIONS="
|
||||
-A:jmx-remote -A:dev \
|
||||
-J-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
|
||||
-J-Djdk.attach.allowAttachSelf \
|
||||
-J-Dlog4j2.configurationFile=log4j2-experiments.xml \
|
||||
-J-XX:-OmitStackTraceInFastThrow \
|
||||
-J-XX:+UnlockDiagnosticVMOptions \
|
||||
-J-XX:+DebugNonSafepoints \
|
||||
-J-Djdk.tracePinnedThreads=full \
|
||||
-J-XX:+UseTransparentHugePages \
|
||||
-J-XX:ReservedCodeCacheSize=1g \
|
||||
-J-Dpolyglot.engine.WarnInterpreterOnly=false \
|
||||
-J--enable-preview";
|
||||
|
||||
# Setup HEAP
|
||||
export OPTIONS="$OPTIONS -J-Xms320g -J-Xmx320g -J-XX:+AlwaysPreTouch"
|
||||
|
||||
export PENPOT_HTTP_SERVER_IO_THREADS=2
|
||||
export PENPOT_HTTP_SERVER_WORKER_THREADS=2
|
||||
|
||||
# Increase virtual thread pool size
|
||||
# export OPTIONS="$OPTIONS -J-Djdk.virtualThreadScheduler.parallelism=16"
|
||||
|
||||
# Disable C2 Compiler
|
||||
# export OPTIONS="$OPTIONS -J-XX:TieredStopAtLevel=1"
|
||||
|
||||
# Disable all compilers
|
||||
# export OPTIONS="$OPTIONS -J-Xint"
|
||||
|
||||
# Setup GC
|
||||
export OPTIONS="$OPTIONS -J-XX:+UseG1GC -J-Xlog:gc:logs/gc.log"
|
||||
|
||||
# Setup GC
|
||||
#export OPTIONS="$OPTIONS -J-XX:+UseZGC -J-XX:+ZGenerational -J-Xlog:gc:logs/gc.log"
|
||||
|
||||
# Enable ImageMagick v7.x support
|
||||
# export OPTIONS="-J-Dim4java.useV7=true $OPTIONS";
|
||||
|
||||
export OPTIONS_EVAL="nil"
|
||||
# export OPTIONS_EVAL="(set! *warn-on-reflection* true)"
|
||||
|
||||
set -ex
|
||||
exec clojure $OPTIONS -M -e "$OPTIONS_EVAL" -m rebel-readline.main
|
||||
@@ -1,44 +1,13 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
export PENPOT_SECRET_KEY=super-secret-devenv-key
|
||||
export PENPOT_HOST=devenv
|
||||
export PENPOT_FLAGS="\
|
||||
$PENPOT_FLAGS \
|
||||
enable-backend-asserts \
|
||||
enable-feature-fdata-pointer-map \
|
||||
enable-feature-fdata-objects-map \
|
||||
enable-file-snapshot \
|
||||
enable-tiered-file-data-storage";
|
||||
SCRIPT_DIR=$(dirname $0);
|
||||
|
||||
export JAVA_OPTS="
|
||||
-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
|
||||
-Djdk.attach.allowAttachSelf \
|
||||
-Dlog4j2.configurationFile=log4j2-devenv.xml \
|
||||
-XX:+EnableDynamicAgentLoading \
|
||||
-XX:-OmitStackTraceInFastThrow \
|
||||
-XX:+UnlockDiagnosticVMOptions \
|
||||
-XX:+DebugNonSafepoints";
|
||||
|
||||
export CLOJURE_OPTIONS="-A:dev"
|
||||
|
||||
|
||||
# Default deletion delay for devenv
|
||||
export PENPOT_DELETION_DELAY="24h"
|
||||
|
||||
# Setup default upload media file size to 100MiB
|
||||
export PENPOT_MEDIA_MAX_FILE_SIZE=104857600
|
||||
|
||||
# Setup default multipart upload size to 300MiB
|
||||
export PENPOT_HTTP_SERVER_MAX_MULTIPART_BODY_SIZE=314572800
|
||||
|
||||
export AWS_ACCESS_KEY_ID=penpot-devenv
|
||||
export AWS_SECRET_ACCESS_KEY=penpot-devenv
|
||||
export PENPOT_OBJECTS_STORAGE_BACKEND=s3
|
||||
export PENPOT_OBJECTS_STORAGE_S3_ENDPOINT=http://minio:9000
|
||||
export PENPOT_OBJECTS_STORAGE_S3_BUCKET=penpot
|
||||
source $SCRIPT_DIR/_env;
|
||||
export OPTIONS="-A:dev"
|
||||
|
||||
entrypoint=${1:-app.main};
|
||||
|
||||
shift 1;
|
||||
set -ex
|
||||
|
||||
clojure $CLOJURE_OPTIONS -A:dev -M -m $entrypoint "$@";
|
||||
exec clojure $OPTIONS -A:dev -M -m $entrypoint "$@";
|
||||
|
||||
@@ -1,70 +1,11 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
export PENPOT_SECRET_KEY=super-secret-devenv-key
|
||||
export PENPOT_MANAGEMENT_API_SHARED_KEY=super-secret-management-api-key
|
||||
export PENPOT_HOST=devenv
|
||||
export PENPOT_FLAGS="\
|
||||
$PENPOT_FLAGS \
|
||||
enable-prepl-server \
|
||||
enable-urepl-server \
|
||||
enable-nrepl-server \
|
||||
enable-webhooks \
|
||||
enable-backend-asserts \
|
||||
enable-audit-log \
|
||||
enable-login-with-ldap \
|
||||
enable-transit-readable-response \
|
||||
enable-demo-users \
|
||||
disable-feature-fdata-pointer-map \
|
||||
enable-feature-fdata-objects-map \
|
||||
disable-secure-session-cookies \
|
||||
enable-rpc-climit \
|
||||
enable-smtp \
|
||||
enable-quotes \
|
||||
enable-file-snapshot \
|
||||
enable-access-tokens \
|
||||
disable-tiered-file-data-storage \
|
||||
enable-file-validation \
|
||||
enable-file-schema-validation \
|
||||
enable-subscriptions";
|
||||
|
||||
# Default deletion delay for devenv
|
||||
export PENPOT_DELETION_DELAY="24h"
|
||||
|
||||
# Setup default upload media file size to 100MiB
|
||||
export PENPOT_MEDIA_MAX_FILE_SIZE=104857600
|
||||
|
||||
# Setup default multipart upload size to 300MiB
|
||||
export PENPOT_HTTP_SERVER_MAX_MULTIPART_BODY_SIZE=314572800
|
||||
SCRIPT_DIR=$(dirname $0);
|
||||
source $SCRIPT_DIR/_env;
|
||||
|
||||
# Initialize MINIO config
|
||||
mc alias set penpot-s3/ http://minio:9000 minioadmin minioadmin -q
|
||||
mc admin user add penpot-s3 penpot-devenv penpot-devenv -q
|
||||
mc admin user info penpot-s3 penpot-devenv |grep -F -q "readwrite"
|
||||
if [ "$?" = "1" ]; then
|
||||
mc admin policy attach penpot-s3 readwrite --user=penpot-devenv -q
|
||||
fi
|
||||
mc mb penpot-s3/penpot -p -q
|
||||
|
||||
export AWS_ACCESS_KEY_ID=penpot-devenv
|
||||
export AWS_SECRET_ACCESS_KEY=penpot-devenv
|
||||
export PENPOT_OBJECTS_STORAGE_BACKEND=s3
|
||||
export PENPOT_OBJECTS_STORAGE_S3_ENDPOINT=http://minio:9000
|
||||
export PENPOT_OBJECTS_STORAGE_S3_BUCKET=penpot
|
||||
|
||||
entrypoint=${1:-app.main};
|
||||
|
||||
export JAVA_OPTS="\
|
||||
-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
|
||||
-Djdk.attach.allowAttachSelf \
|
||||
-Dlog4j2.configurationFile=log4j2-devenv.xml \
|
||||
-Djdk.tracePinnedThreads=full \
|
||||
-Dim4java.useV7=true \
|
||||
-XX:-OmitStackTraceInFastThrow \
|
||||
--sun-misc-unsafe-memory-access=allow \
|
||||
--enable-preview \
|
||||
--enable-native-access=ALL-UNNAMED";
|
||||
|
||||
export OPTIONS="-A:jmx-remote -A:dev"
|
||||
setup_minio;
|
||||
|
||||
shift 1;
|
||||
set -ex
|
||||
clojure $OPTIONS -M -m $entrypoint;
|
||||
exec clojure -A:jmx-remote -A:dev -M -m app.main "$@";
|
||||
|
||||
@@ -141,13 +141,11 @@
|
||||
([index coll attr]
|
||||
(reduce #(index-object %1 %2 attr) index coll)))
|
||||
|
||||
(defn decode-row
|
||||
[{:keys [data changes features] :as row}]
|
||||
(defn- decode-row-features
|
||||
[{:keys [features] :as row}]
|
||||
(when row
|
||||
(cond-> row
|
||||
features (assoc :features (db/decode-pgarray features #{}))
|
||||
changes (assoc :changes (blob/decode changes))
|
||||
data (assoc :data (blob/decode data)))))
|
||||
(db/pgarray? features) (assoc :features (db/decode-pgarray features #{})))))
|
||||
|
||||
(def sql:get-minimal-file
|
||||
"SELECT f.id,
|
||||
@@ -161,23 +159,124 @@
|
||||
[cfg id & {:as opts}]
|
||||
(db/get-with-sql cfg [sql:get-minimal-file id] opts))
|
||||
|
||||
(defn decode-file
|
||||
"A general purpose file decoding function that resolves all external
|
||||
pointers, run migrations and return plain vanilla file map"
|
||||
[cfg {:keys [id] :as file} & {:keys [migrate?] :or {migrate? true}}]
|
||||
(binding [pmap/*load-fn* (partial fdata/load-pointer cfg id)]
|
||||
(let [file (->> file
|
||||
(fmigr/resolve-applied-migrations cfg)
|
||||
(fdata/resolve-file-data cfg))
|
||||
libs (delay (get-resolved-file-libraries cfg file))]
|
||||
(def sql:get-file
|
||||
"SELECT f.id,
|
||||
f.project_id,
|
||||
f.created_at,
|
||||
f.modified_at,
|
||||
f.deleted_at,
|
||||
f.name,
|
||||
f.is_shared,
|
||||
f.has_media_trimmed,
|
||||
f.revn,
|
||||
f.data AS legacy_data,
|
||||
f.ignore_sync_until,
|
||||
f.comment_thread_seqn,
|
||||
f.features,
|
||||
f.version,
|
||||
f.vern,
|
||||
p.team_id,
|
||||
coalesce(fd.backend, 'legacy-db') AS backend,
|
||||
fd.metadata AS metadata,
|
||||
fd.data AS data
|
||||
FROM file AS f
|
||||
LEFT JOIN file_data AS fd ON (fd.file_id = f.id AND fd.id = f.id)
|
||||
INNER JOIN project AS p ON (p.id = f.project_id)
|
||||
WHERE f.id = ?")
|
||||
|
||||
(-> file
|
||||
(update :features db/decode-pgarray #{})
|
||||
(update :data blob/decode)
|
||||
(update :data fdata/process-pointers deref)
|
||||
(update :data fdata/process-objects (partial into {}))
|
||||
(update :data assoc :id id)
|
||||
(cond-> migrate? (fmg/migrate-file libs))))))
|
||||
(defn- migrate-file
|
||||
[{:keys [::db/conn] :as cfg} {:keys [read-only?]} {:keys [id] :as file}]
|
||||
(binding [pmap/*load-fn* (partial fdata/load-pointer cfg id)
|
||||
pmap/*tracked* (pmap/create-tracked)]
|
||||
(let [libs (delay (get-resolved-file-libraries cfg file))
|
||||
;; For avoid unnecesary overhead of creating multiple
|
||||
;; pointers and handly internally with objects map in their
|
||||
;; worst case (when probably all shapes and all pointers
|
||||
;; will be readed in any case), we just realize/resolve them
|
||||
;; before applying the migration to the file.
|
||||
file (-> (fdata/realize cfg file)
|
||||
(fmg/migrate-file libs))]
|
||||
|
||||
(if (or read-only? (db/read-only? conn))
|
||||
file
|
||||
(do ;; When file is migrated, we break the rule of no
|
||||
;; perform mutations on get operations and update the
|
||||
;; file with all migrations applied
|
||||
(update-file! cfg file)
|
||||
(fmigr/resolve-applied-migrations cfg file))))))
|
||||
|
||||
(defn- get-file*
|
||||
[{:keys [::db/conn] :as cfg} id
|
||||
{:keys [migrate?
|
||||
realize?
|
||||
decode?
|
||||
skip-locked?
|
||||
include-deleted?
|
||||
throw-if-not-exists?
|
||||
lock-for-update?
|
||||
lock-for-share?]
|
||||
:or {lock-for-update? false
|
||||
lock-for-share? false
|
||||
migrate? true
|
||||
decode? true
|
||||
include-deleted? false
|
||||
throw-if-not-exists? true
|
||||
realize? false}
|
||||
:as options}]
|
||||
|
||||
(assert (db/connection? conn) "expected cfg with valid connection")
|
||||
|
||||
(let [sql
|
||||
(cond
|
||||
lock-for-update?
|
||||
(str sql:get-file " FOR UPDATE of f")
|
||||
|
||||
lock-for-share?
|
||||
(str sql:get-file " FOR SHARE of f")
|
||||
|
||||
:else
|
||||
sql:get-file)
|
||||
|
||||
sql
|
||||
(if skip-locked?
|
||||
(str sql " SKIP LOCKED")
|
||||
sql)
|
||||
|
||||
file
|
||||
(db/get-with-sql conn [sql id]
|
||||
{::db/throw-if-not-exists false
|
||||
::db/remove-deleted (not include-deleted?)})
|
||||
|
||||
file
|
||||
(-> file
|
||||
(d/update-when :features db/decode-pgarray #{})
|
||||
(d/update-when :metadata fdata/decode-metadata))]
|
||||
|
||||
(if file
|
||||
(let [file
|
||||
(->> file
|
||||
(fmigr/resolve-applied-migrations cfg)
|
||||
(fdata/resolve-file-data cfg))
|
||||
|
||||
will-migrate?
|
||||
(and migrate? (fmg/need-migration? file))]
|
||||
|
||||
(if decode?
|
||||
(cond->> (fdata/decode-file-data cfg file)
|
||||
(and realize? (not will-migrate?))
|
||||
(fdata/realize cfg)
|
||||
|
||||
will-migrate?
|
||||
(migrate-file cfg options))
|
||||
|
||||
file))
|
||||
|
||||
(when-not (or skip-locked? (not throw-if-not-exists?))
|
||||
(ex/raise :type :not-found
|
||||
:code :object-not-found
|
||||
:hint "database object not found"
|
||||
:table :file
|
||||
:file-id id)))))
|
||||
|
||||
(defn get-file
|
||||
"Get file, resolve all features and apply migrations.
|
||||
@@ -186,10 +285,7 @@
|
||||
operations on file, because it removes the ovehead of lazy fetching
|
||||
and decoding."
|
||||
[cfg file-id & {:as opts}]
|
||||
(db/run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(when-let [row (db/get* conn :file {:id file-id}
|
||||
(assoc opts ::db/remove-deleted false))]
|
||||
(decode-file cfg row opts)))))
|
||||
(db/run! cfg get-file* file-id opts))
|
||||
|
||||
(defn clean-file-features
|
||||
[file]
|
||||
@@ -213,12 +309,12 @@
|
||||
(let [conn (db/get-connection cfg)
|
||||
ids (db/create-array conn "uuid" ids)]
|
||||
(->> (db/exec! conn [sql:get-teams ids])
|
||||
(map decode-row))))
|
||||
(map decode-row-features))))
|
||||
|
||||
(defn get-team
|
||||
[cfg team-id]
|
||||
(-> (db/get cfg :team {:id team-id})
|
||||
(decode-row)))
|
||||
(decode-row-features)))
|
||||
|
||||
(defn get-fonts
|
||||
[cfg team-id]
|
||||
@@ -310,7 +406,6 @@
|
||||
(do
|
||||
(l/trc :hint "lookup index"
|
||||
:file-id (str file-id)
|
||||
:snap-id (str (:snapshot-id file))
|
||||
:id (str id)
|
||||
:result (str (get mobj :id)))
|
||||
(get mobj :id))
|
||||
@@ -327,7 +422,6 @@
|
||||
(doseq [[old-id item] missing-index]
|
||||
(l/dbg :hint "create missing references"
|
||||
:file-id (str file-id)
|
||||
:snap-id (str (:snapshot-id file))
|
||||
:old-id (str old-id)
|
||||
:id (str (:id item)))
|
||||
(db/insert! conn :file-media-object item
|
||||
@@ -338,12 +432,16 @@
|
||||
(def sql:get-file-media
|
||||
"SELECT * FROM file_media_object WHERE id = ANY(?)")
|
||||
|
||||
(defn get-file-media*
|
||||
[{:keys [::db/conn] :as cfg} {:keys [data id] :as file}]
|
||||
(let [used (cfh/collect-used-media data)
|
||||
used (db/create-array conn "uuid" used)]
|
||||
(->> (db/exec! conn [sql:get-file-media used])
|
||||
(mapv (fn [row] (assoc row :file-id id))))))
|
||||
|
||||
(defn get-file-media
|
||||
[cfg {:keys [data] :as file}]
|
||||
(db/run! cfg (fn [{:keys [::db/conn]}]
|
||||
(let [used (cfh/collect-used-media data)
|
||||
used (db/create-array conn "uuid" used)]
|
||||
(db/exec! conn [sql:get-file-media used])))))
|
||||
[cfg file]
|
||||
(db/run! cfg get-file-media* file))
|
||||
|
||||
(def ^:private sql:get-team-files-ids
|
||||
"SELECT f.id FROM file AS f
|
||||
@@ -474,7 +572,7 @@
|
||||
;; all of them, not only the applied
|
||||
(vary-meta dissoc ::fmg/migrated))))
|
||||
|
||||
(defn encode-file
|
||||
(defn- encode-file
|
||||
[cfg {:keys [id features] :as file}]
|
||||
(let [file (if (and (contains? features "fdata/objects-map")
|
||||
(:data file))
|
||||
@@ -497,13 +595,28 @@
|
||||
(defn- file->params
|
||||
[file]
|
||||
(-> (select-keys file file-attrs)
|
||||
(assoc :data nil)
|
||||
(dissoc :team-id)
|
||||
(dissoc :migrations)))
|
||||
|
||||
(defn file->file-data-params
|
||||
[{:keys [id] :as file} & {:as opts}]
|
||||
(let [created-at (or (:created-at file) (ct/now))
|
||||
modified-at (or (:modified-at file) created-at)]
|
||||
(d/without-nils
|
||||
{:id id
|
||||
:type "main"
|
||||
:file-id id
|
||||
:data (:data file)
|
||||
:metadata (:metadata file)
|
||||
:created-at created-at
|
||||
:modified-at modified-at})))
|
||||
|
||||
(defn insert-file!
|
||||
"Insert a new file into the database table. Expectes a not-encoded file.
|
||||
Returns nil."
|
||||
[{:keys [::db/conn] :as cfg} file & {:as opts}]
|
||||
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])
|
||||
|
||||
(when (:migrations file)
|
||||
(fmigr/upsert-migrations! conn file))
|
||||
@@ -511,35 +624,43 @@
|
||||
(let [file (encode-file cfg file)]
|
||||
(db/insert! conn :file
|
||||
(file->params file)
|
||||
{::db/return-keys false})
|
||||
(assoc opts ::db/return-keys false))
|
||||
|
||||
(->> (file->file-data-params file)
|
||||
(fdata/upsert! cfg))
|
||||
|
||||
nil))
|
||||
|
||||
(defn update-file!
|
||||
"Update an existing file on the database. Expects not encoded file."
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id] :as file} & {:as opts}]
|
||||
|
||||
(if (::reset-migrations opts false)
|
||||
(if (::reset-migrations? opts false)
|
||||
(fmigr/reset-migrations! conn file)
|
||||
(fmigr/upsert-migrations! conn file))
|
||||
|
||||
(let [file
|
||||
(encode-file cfg file)
|
||||
|
||||
params
|
||||
(file->params (dissoc file :id))]
|
||||
file-params
|
||||
(file->params (dissoc file :id))
|
||||
|
||||
(db/update! conn :file params
|
||||
file-data-params
|
||||
(file->file-data-params file)]
|
||||
|
||||
(db/update! conn :file file-params
|
||||
{:id id}
|
||||
{::db/return-keys false})
|
||||
|
||||
(fdata/upsert! cfg file-data-params)
|
||||
nil))
|
||||
|
||||
(defn save-file!
|
||||
"Applies all the final validations and perist the file, binfile
|
||||
specific, should not be used outside of binfile domain.
|
||||
|
||||
Returns nil"
|
||||
[{:keys [::timestamp] :as cfg} file & {:as opts}]
|
||||
|
||||
(assert (ct/inst? timestamp) "expected valid timestamp")
|
||||
|
||||
(let [file (-> file
|
||||
@@ -564,7 +685,7 @@
|
||||
(l/error :hint "file schema validation error" :cause result))))
|
||||
|
||||
(if (::overwrite cfg)
|
||||
(update-file! cfg file (assoc opts ::reset-migrations true))
|
||||
(update-file! cfg file (assoc opts ::reset-migrations? true))
|
||||
(insert-file! cfg file opts))))
|
||||
|
||||
(def ^:private sql:get-file-libraries
|
||||
@@ -603,7 +724,7 @@
|
||||
;; FIXME: :is-indirect set to false to all rows looks
|
||||
;; completly useless
|
||||
(map #(assoc % :is-indirect false))
|
||||
(map decode-row))
|
||||
(map decode-row-features))
|
||||
(db/exec! conn [sql:get-file-libraries file-id])))
|
||||
|
||||
(defn get-resolved-file-libraries
|
||||
|
||||
@@ -346,7 +346,7 @@
|
||||
thumbnails (->> (bfc/get-file-object-thumbnails cfg file-id)
|
||||
(mapv #(dissoc % :file-id)))
|
||||
|
||||
file (cond-> (bfc/get-file cfg file-id)
|
||||
file (cond-> (bfc/get-file cfg file-id :realize? true)
|
||||
detach?
|
||||
(-> (ctf/detach-external-references file-id)
|
||||
(dissoc :libraries))
|
||||
|
||||
@@ -153,7 +153,7 @@
|
||||
|
||||
(defn- write-file!
|
||||
[cfg file-id]
|
||||
(let [file (bfc/get-file cfg file-id)
|
||||
(let [file (bfc/get-file cfg file-id :realize? true)
|
||||
thumbs (bfc/get-file-object-thumbnails cfg file-id)
|
||||
media (bfc/get-file-media cfg file)
|
||||
rels (bfc/get-files-rels cfg #{file-id})]
|
||||
|
||||
@@ -224,9 +224,11 @@
|
||||
(throw (IllegalArgumentException.
|
||||
"the `include-libraries` and `embed-assets` are mutally excluding options")))
|
||||
|
||||
(let [detach? (and (not embed-assets) (not include-libraries))]
|
||||
(let [detach? (and (not embed-assets) (not include-libraries))]
|
||||
(db/tx-run! cfg (fn [cfg]
|
||||
(cond-> (bfc/get-file cfg file-id {::sql/for-update true})
|
||||
(cond-> (bfc/get-file cfg file-id
|
||||
{:realize? true
|
||||
:lock-for-update? true})
|
||||
detach?
|
||||
(-> (ctf/detach-external-references file-id)
|
||||
(dissoc :libraries))
|
||||
|
||||
@@ -52,6 +52,8 @@
|
||||
|
||||
:redis-uri "redis://redis/0"
|
||||
|
||||
:file-data-backend "legacy-db"
|
||||
|
||||
:objects-storage-backend "fs"
|
||||
:objects-storage-fs-directory "assets"
|
||||
|
||||
@@ -107,7 +109,8 @@
|
||||
[:auto-file-snapshot-timeout {:optional true} ::ct/duration]
|
||||
|
||||
[:media-max-file-size {:optional true} ::sm/int]
|
||||
[:deletion-delay {:optional true} ::ct/duration] ;; REVIEW
|
||||
[:deletion-delay {:optional true} ::ct/duration]
|
||||
[:file-clean-delay {:optional true} ::ct/duration]
|
||||
[:telemetry-enabled {:optional true} ::sm/boolean]
|
||||
[:default-blob-version {:optional true} ::sm/int]
|
||||
[:allow-demo-users {:optional true} ::sm/boolean]
|
||||
@@ -211,6 +214,8 @@
|
||||
[:prepl-host {:optional true} :string]
|
||||
[:prepl-port {:optional true} ::sm/int]
|
||||
|
||||
[:file-data-backend {:optional true} [:enum "db" "legacy-db" "storage"]]
|
||||
|
||||
[:media-directory {:optional true} :string] ;; REVIEW
|
||||
[:media-uri {:optional true} :string]
|
||||
[:assets-path {:optional true} :string]
|
||||
@@ -302,6 +307,11 @@
|
||||
(or (c/get config :deletion-delay)
|
||||
(ct/duration {:days 7})))
|
||||
|
||||
(defn get-file-clean-delay
|
||||
[]
|
||||
(or (c/get config :file-clean-delay)
|
||||
(ct/duration {:days 2})))
|
||||
|
||||
(defn get
|
||||
"A configuration getter. Helps code be more testable."
|
||||
([key]
|
||||
|
||||
@@ -298,7 +298,7 @@
|
||||
(defn insert!
|
||||
"A helper that builds an insert sql statement and executes it. By
|
||||
default returns the inserted row with all the field; you can delimit
|
||||
the returned columns with the `::columns` option."
|
||||
the returned columns with the `::sql/columns` option."
|
||||
[ds table params & {:as opts}]
|
||||
(let [conn (get-connectable ds)
|
||||
sql (sql/insert table params opts)
|
||||
@@ -406,15 +406,15 @@
|
||||
:hint "database object not found"))
|
||||
row))
|
||||
|
||||
|
||||
(defn get-with-sql
|
||||
[ds sql & {:as opts}]
|
||||
(let [rows (cond->> (exec! ds sql opts)
|
||||
(::remove-deleted opts true)
|
||||
(remove is-row-deleted?)
|
||||
(let [rows
|
||||
(cond->> (exec! ds sql opts)
|
||||
(::remove-deleted opts true)
|
||||
(remove is-row-deleted?)
|
||||
|
||||
:always
|
||||
(not-empty))]
|
||||
:always
|
||||
(not-empty))]
|
||||
|
||||
(when (and (not rows) (::throw-if-not-exists opts true))
|
||||
(ex/raise :type :not-found
|
||||
@@ -423,7 +423,6 @@
|
||||
|
||||
(first rows)))
|
||||
|
||||
|
||||
(def ^:private default-plan-opts
|
||||
(-> default-opts
|
||||
(assoc :fetch-size 1000)
|
||||
@@ -578,10 +577,10 @@
|
||||
[system f & params]
|
||||
(cond
|
||||
(connection? system)
|
||||
(run! {::conn system} f)
|
||||
(apply run! {::conn system} f params)
|
||||
|
||||
(pool? system)
|
||||
(run! {::pool system} f)
|
||||
(apply run! {::pool system} f params)
|
||||
|
||||
(::conn system)
|
||||
(apply f system params)
|
||||
|
||||
@@ -9,11 +9,11 @@
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.files.helpers :as cfh]
|
||||
[app.common.files.migrations :as fmg]
|
||||
[app.common.logging :as l]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.time :as ct]
|
||||
[app.common.types.objects-map :as omap]
|
||||
[app.common.types.path :as path]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]
|
||||
[app.storage :as sto]
|
||||
@@ -21,18 +21,34 @@
|
||||
[app.util.objects-map :as omap.legacy]
|
||||
[app.util.pointer-map :as pmap]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; OFFLOAD
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn offloaded?
|
||||
[file]
|
||||
(= "objects-storage" (:data-backend file)))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; OBJECTS-MAP
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn process-objects
|
||||
"Apply a function to all objects-map on the file. Usualy used for convert
|
||||
the objects-map instances to plain maps"
|
||||
[fdata update-fn]
|
||||
(if (contains? fdata :pages-index)
|
||||
(update fdata :pages-index d/update-vals
|
||||
(fn [page]
|
||||
(update page :objects
|
||||
(fn [objects]
|
||||
(if (or (omap/objects-map? objects)
|
||||
(omap.legacy/objects-map? objects))
|
||||
(update-fn objects)
|
||||
objects)))))
|
||||
fdata))
|
||||
|
||||
|
||||
(defn realize-objects
|
||||
"Process a file and remove all instances of objects map realizing them
|
||||
to a plain data. Used in operation where is more efficient have the
|
||||
whole file loaded in memory or we going to persist it in an
|
||||
alterantive storage."
|
||||
[_cfg file]
|
||||
(update file :data process-objects (partial into {})))
|
||||
|
||||
(defn enable-objects-map
|
||||
[file & _opts]
|
||||
(let [update-page
|
||||
@@ -61,51 +77,186 @@
|
||||
(update :data update-data)
|
||||
(update :features disj "fdata/objects-map"))))
|
||||
|
||||
(defn process-objects
|
||||
"Apply a function to all objects-map on the file. Usualy used for convert
|
||||
the objects-map instances to plain maps"
|
||||
[fdata update-fn]
|
||||
(if (contains? fdata :pages-index)
|
||||
(update fdata :pages-index d/update-vals
|
||||
(fn [page]
|
||||
(update page :objects
|
||||
(fn [objects]
|
||||
(if (or (omap/objects-map? objects)
|
||||
(omap.legacy/objects-map? objects))
|
||||
(update-fn objects)
|
||||
objects)))))
|
||||
fdata))
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; STORAGE
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defmulti resolve-file-data
|
||||
(fn [_cfg file] (get file :backend "legacy-db")))
|
||||
|
||||
(defmethod resolve-file-data "legacy-db"
|
||||
[_cfg {:keys [legacy-data] :as file}]
|
||||
(-> file
|
||||
(assoc :data legacy-data)
|
||||
(dissoc :legacy-data)))
|
||||
|
||||
(defmethod resolve-file-data "db"
|
||||
[_cfg file]
|
||||
(dissoc file :legacy-data))
|
||||
|
||||
(defmethod resolve-file-data "storage"
|
||||
[cfg {:keys [metadata] :as file}]
|
||||
(let [storage (sto/resolve cfg ::db/reuse-conn true)
|
||||
ref-id (:storage-ref-id metadata)
|
||||
data (->> (sto/get-object storage ref-id)
|
||||
(sto/get-object-bytes storage))]
|
||||
(-> file
|
||||
(assoc :data data)
|
||||
(dissoc :legacy-data))))
|
||||
|
||||
(defn decode-file-data
|
||||
[_cfg {:keys [data] :as file}]
|
||||
(cond-> file
|
||||
(bytes? data)
|
||||
(assoc :data (blob/decode data))))
|
||||
|
||||
(def ^:private sql:insert-file-data
|
||||
"INSERT INTO file_data (file_id, id, created_at, modified_at, deleted_at,
|
||||
type, backend, metadata, data)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)")
|
||||
|
||||
(def ^:private sql:upsert-file-data
|
||||
(str sql:insert-file-data
|
||||
" ON CONFLICT (file_id, id)
|
||||
DO UPDATE SET modified_at=?,
|
||||
deleted_at=?,
|
||||
backend=?,
|
||||
metadata=?,
|
||||
data=?"))
|
||||
|
||||
(defn- upsert-in-database
|
||||
[cfg {:keys [id file-id created-at modified-at deleted-at type backend data metadata]}]
|
||||
(let [created-at (or created-at (ct/now))
|
||||
metadata (some-> metadata db/json)
|
||||
modified-at (or modified-at created-at)]
|
||||
|
||||
(db/exec-one! cfg [sql:upsert-file-data
|
||||
file-id id
|
||||
created-at
|
||||
modified-at
|
||||
deleted-at
|
||||
type
|
||||
backend
|
||||
metadata
|
||||
data
|
||||
modified-at
|
||||
deleted-at
|
||||
backend
|
||||
metadata
|
||||
data])))
|
||||
|
||||
(defn- handle-persistence
|
||||
[cfg {:keys [type backend id file-id data] :as params}]
|
||||
|
||||
(cond
|
||||
(= backend "storage")
|
||||
(let [storage (sto/resolve cfg)
|
||||
content (sto/content data)
|
||||
sobject (sto/put-object! storage
|
||||
{::sto/content content
|
||||
::sto/touch true
|
||||
:bucket "file-data"
|
||||
:content-type "application/octet-stream"
|
||||
:file-id file-id
|
||||
:id id})
|
||||
metadata {:storage-ref-id (:id sobject)}
|
||||
params (-> params
|
||||
(assoc :metadata metadata)
|
||||
(assoc :data nil))]
|
||||
(upsert-in-database cfg params))
|
||||
|
||||
(= backend "db")
|
||||
(->> (dissoc params :metadata)
|
||||
(upsert-in-database cfg))
|
||||
|
||||
(= backend "legacy-db")
|
||||
(cond
|
||||
(= type "main")
|
||||
(db/update! cfg :file
|
||||
{:data data}
|
||||
{:id file-id}
|
||||
{::db/return-keys false})
|
||||
|
||||
(= type "snapshot")
|
||||
(db/update! cfg :file-change
|
||||
{:data data}
|
||||
{:file-id file-id :id id}
|
||||
{::db/return-keys false})
|
||||
|
||||
(= type "fragment")
|
||||
(upsert-in-database cfg
|
||||
(-> (dissoc params :metadata)
|
||||
(assoc :backend "db")))
|
||||
|
||||
:else
|
||||
(throw (RuntimeException. "not implemented")))
|
||||
|
||||
:else
|
||||
(throw (IllegalArgumentException.
|
||||
(str "backend '" backend "' not supported")))))
|
||||
|
||||
(defn process-metadata
|
||||
[cfg metadata]
|
||||
(when-let [storage-id (:storage-ref-id metadata)]
|
||||
(let [storage (sto/resolve cfg ::db/reuse-conn true)]
|
||||
(sto/touch-object! storage storage-id))))
|
||||
|
||||
(defn- default-backend
|
||||
[backend]
|
||||
(or backend (cf/get :file-data-backend)))
|
||||
|
||||
(def ^:private schema:metadata
|
||||
[:map {:title "Metadata"}
|
||||
[:storage-ref-id {:optional true} ::sm/uuid]])
|
||||
|
||||
(def decode-metadata-with-schema
|
||||
(sm/decoder schema:metadata sm/json-transformer))
|
||||
|
||||
(defn decode-metadata
|
||||
[metadata]
|
||||
(some-> metadata
|
||||
(db/decode-json-pgobject)
|
||||
(decode-metadata-with-schema)))
|
||||
|
||||
(def ^:private schema:update-params
|
||||
[:map {:closed true}
|
||||
[:id ::sm/uuid]
|
||||
[:type [:enum "main" "snapshot" "fragment"]]
|
||||
[:file-id ::sm/uuid]
|
||||
[:backend {:optional true} [:enum "db" "legacy-db" "storage"]]
|
||||
[:metadata {:optional true} [:maybe schema:metadata]]
|
||||
[:data {:optional true} bytes?]
|
||||
[:created-at {:optional true} ::ct/inst]
|
||||
[:modified-at {:optional true} [:maybe ::ct/inst]]
|
||||
[:deleted-at {:optional true} [:maybe ::ct/inst]]])
|
||||
|
||||
(def ^:private check-update-params
|
||||
(sm/check-fn schema:update-params :hint "invalid params received for update"))
|
||||
|
||||
(defn upsert!
|
||||
"Create or update file data"
|
||||
[cfg params & {:as opts}]
|
||||
(let [params (-> (check-update-params params)
|
||||
(update :backend default-backend))]
|
||||
|
||||
(some->> (:metadata params)
|
||||
(process-metadata cfg))
|
||||
|
||||
(-> (handle-persistence cfg params)
|
||||
(db/get-update-count)
|
||||
(pos?))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; POINTER-MAP
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn get-file-data
|
||||
"Get file data given a file instance."
|
||||
[system file]
|
||||
(if (offloaded? file)
|
||||
(let [storage (sto/resolve system ::db/reuse-conn true)]
|
||||
(->> (sto/get-object storage (:data-ref-id file))
|
||||
(sto/get-object-bytes storage)))
|
||||
(:data file)))
|
||||
|
||||
(defn resolve-file-data
|
||||
[system file]
|
||||
(let [data (get-file-data system file)]
|
||||
(assoc file :data data)))
|
||||
|
||||
(defn decode-file-data
|
||||
[_system {:keys [data] :as file}]
|
||||
(cond-> file
|
||||
(bytes? data)
|
||||
(assoc :data (blob/decode data))))
|
||||
|
||||
(defn load-pointer
|
||||
"A database loader pointer helper"
|
||||
[system file-id id]
|
||||
(let [fragment (db/get* system :file-data-fragment
|
||||
{:id id :file-id file-id}
|
||||
{::sql/columns [:data :data-backend :data-ref-id :id]})]
|
||||
[cfg file-id id]
|
||||
(let [fragment (some-> (db/get* cfg :file-data
|
||||
{:id id :file-id file-id :type "fragment"}
|
||||
{::sql/columns [:data :backend :id :metadata]})
|
||||
(update :metadata decode-metadata))]
|
||||
|
||||
(l/trc :hint "load pointer"
|
||||
:file-id (str file-id)
|
||||
@@ -119,22 +270,21 @@
|
||||
:file-id file-id
|
||||
:fragment-id id))
|
||||
|
||||
(let [data (get-file-data system fragment)]
|
||||
;; FIXME: conditional thread scheduling for decoding big objects
|
||||
(blob/decode data))))
|
||||
(-> (resolve-file-data cfg fragment)
|
||||
(get :data)
|
||||
(blob/decode))))
|
||||
|
||||
(defn persist-pointers!
|
||||
"Persist all currently tracked pointer objects"
|
||||
[system file-id]
|
||||
(let [conn (db/get-connection system)]
|
||||
(doseq [[id item] @pmap/*tracked*]
|
||||
(when (pmap/modified? item)
|
||||
(l/trc :hint "persist pointer" :file-id (str file-id) :id (str id))
|
||||
(let [content (-> item deref blob/encode)]
|
||||
(db/insert! conn :file-data-fragment
|
||||
{:id id
|
||||
:file-id file-id
|
||||
:data content}))))))
|
||||
[cfg file-id]
|
||||
(doseq [[id item] @pmap/*tracked*]
|
||||
(when (pmap/modified? item)
|
||||
(l/trc :hint "persist pointer" :file-id (str file-id) :id (str id))
|
||||
(let [content (-> item deref blob/encode)]
|
||||
(upsert! cfg {:id id
|
||||
:file-id file-id
|
||||
:type "fragment"
|
||||
:data content})))))
|
||||
|
||||
(defn process-pointers
|
||||
"Apply a function to all pointers on the file. Usuly used for
|
||||
@@ -148,6 +298,14 @@
|
||||
(d/update-vals update-fn')
|
||||
(update :pages-index d/update-vals update-fn'))))
|
||||
|
||||
(defn realize-pointers
|
||||
"Process a file and remove all instances of pointers realizing them to
|
||||
a plain data. Used in operation where is more efficient have the
|
||||
whole file loaded in memory."
|
||||
[cfg {:keys [id] :as file}]
|
||||
(binding [pmap/*load-fn* (partial load-pointer cfg id)]
|
||||
(update file :data process-pointers deref)))
|
||||
|
||||
(defn get-used-pointer-ids
|
||||
"Given a file, return all pointer ids used in the data."
|
||||
[fdata]
|
||||
@@ -167,47 +325,12 @@
|
||||
(update :features conj "fdata/pointer-map")))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; PATH-DATA
|
||||
;; GENERAL PURPOSE HELPERS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn enable-path-data
|
||||
"Enable the fdata/path-data feature on the file."
|
||||
[file & _opts]
|
||||
(letfn [(update-object [object]
|
||||
(if (or (cfh/path-shape? object)
|
||||
(cfh/bool-shape? object))
|
||||
(update object :content path/content)
|
||||
object))
|
||||
|
||||
(update-container [container]
|
||||
(d/update-when container :objects d/update-vals update-object))]
|
||||
|
||||
(-> file
|
||||
(update :data (fn [data]
|
||||
(-> data
|
||||
(update :pages-index d/update-vals update-container)
|
||||
(d/update-when :components d/update-vals update-container))))
|
||||
(update :features conj "fdata/path-data"))))
|
||||
|
||||
(defn disable-path-data
|
||||
[file & _opts]
|
||||
(letfn [(update-object [object]
|
||||
(if (or (cfh/path-shape? object)
|
||||
(cfh/bool-shape? object))
|
||||
(update object :content vec)
|
||||
object))
|
||||
|
||||
(update-container [container]
|
||||
(d/update-when container :objects d/update-vals update-object))]
|
||||
|
||||
(when-let [conn db/*conn*]
|
||||
(db/delete! conn :file-migration {:file-id (:id file)
|
||||
:name "0003-convert-path-content"}))
|
||||
(-> file
|
||||
(update :data (fn [data]
|
||||
(-> data
|
||||
(update :pages-index d/update-vals update-container)
|
||||
(d/update-when :components d/update-vals update-container))))
|
||||
(update :features disj "fdata/path-data")
|
||||
(update :migrations disj "0003-convert-path-content")
|
||||
(vary-meta update ::fmg/migrated disj "0003-convert-path-content"))))
|
||||
(defn realize
|
||||
"A helper that combines realize-pointers and realize-objects"
|
||||
[cfg file]
|
||||
(->> file
|
||||
(realize-pointers cfg)
|
||||
(realize-objects cfg)))
|
||||
|
||||
444
backend/src/app/features/file_snapshots.clj
Normal file
444
backend/src/app/features/file_snapshots.clj
Normal file
@@ -0,0 +1,444 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.features.file-snapshots
|
||||
(:require
|
||||
[app.binfile.common :as bfc]
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.features :as-alias cfeat]
|
||||
[app.common.files.migrations :as fmg]
|
||||
[app.common.logging :as l]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.time :as ct]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]
|
||||
[app.features.fdata :as fdata]
|
||||
[app.storage :as sto]
|
||||
[app.util.blob :as blob]
|
||||
[app.worker :as wrk]
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
(def sql:snapshots
|
||||
"SELECT c.id,
|
||||
c.label,
|
||||
c.created_at,
|
||||
c.updated_at AS modified_at,
|
||||
c.deleted_at,
|
||||
c.profile_id,
|
||||
c.created_by,
|
||||
c.locked_by,
|
||||
c.revn,
|
||||
c.features,
|
||||
c.migrations,
|
||||
c.version,
|
||||
c.file_id,
|
||||
c.data AS legacy_data,
|
||||
fd.data AS data,
|
||||
coalesce(fd.backend, 'legacy-db') AS backend,
|
||||
fd.metadata AS metadata
|
||||
FROM file_change AS c
|
||||
LEFT JOIN file_data AS fd ON (fd.file_id = c.file_id
|
||||
AND fd.id = c.id
|
||||
AND fd.type = 'snapshot')
|
||||
WHERE c.label IS NOT NULL")
|
||||
|
||||
(defn- decode-snapshot
|
||||
[snapshot]
|
||||
(some-> snapshot
|
||||
(-> (d/update-when :metadata fdata/decode-metadata)
|
||||
(d/update-when :migrations db/decode-pgarray [])
|
||||
(d/update-when :features db/decode-pgarray #{}))))
|
||||
|
||||
(def ^:private sql:get-minimal-file
|
||||
"SELECT f.id,
|
||||
f.revn,
|
||||
f.modified_at,
|
||||
f.deleted_at,
|
||||
fd.backend AS backend,
|
||||
fd.metadata AS metadata
|
||||
FROM file AS f
|
||||
LEFT JOIN file_data AS fd ON (fd.file_id = f.id AND fd.id = f.id)
|
||||
WHERE f.id = ?")
|
||||
|
||||
(defn- get-minimal-file
|
||||
[cfg id & {:as opts}]
|
||||
(-> (db/get-with-sql cfg [sql:get-minimal-file id] opts)
|
||||
(d/update-when :metadata fdata/decode-metadata)))
|
||||
|
||||
(def ^:private sql:get-snapshot-without-data
|
||||
(str "WITH snapshots AS (" sql:snapshots ")"
|
||||
"SELECT c.id,
|
||||
c.label,
|
||||
c.revn,
|
||||
c.created_at,
|
||||
c.modified_at,
|
||||
c.deleted_at,
|
||||
c.profile_id,
|
||||
c.created_by,
|
||||
c.locked_by,
|
||||
c.features,
|
||||
c.metadata,
|
||||
c.migrations,
|
||||
c.version,
|
||||
c.file_id
|
||||
FROM snapshots AS c
|
||||
WHERE c.id = ?
|
||||
AND CASE WHEN c.created_by = 'user'
|
||||
THEN (c.deleted_at IS NULL)
|
||||
WHEN c.created_by = 'system'
|
||||
THEN (c.deleted_at IS NULL OR c.deleted_at >= ?::timestamptz)
|
||||
END"))
|
||||
|
||||
(defn get-minimal-snapshot
|
||||
[cfg snapshot-id]
|
||||
(let [now (ct/now)]
|
||||
(-> (db/get-with-sql cfg [sql:get-snapshot-without-data snapshot-id now])
|
||||
(decode-snapshot))))
|
||||
|
||||
(def ^:private sql:get-snapshot
|
||||
(str sql:snapshots
|
||||
" AND c.file_id = ?
|
||||
AND c.id = ?
|
||||
AND CASE WHEN c.created_by = 'user'
|
||||
THEN (c.deleted_at IS NULL)
|
||||
WHEN c.created_by = 'system'
|
||||
THEN (c.deleted_at IS NULL OR c.deleted_at >= ?::timestamptz)
|
||||
END"))
|
||||
|
||||
(defn- get-snapshot
|
||||
"Get snapshot with decoded data"
|
||||
[cfg file-id snapshot-id]
|
||||
(let [now (ct/now)]
|
||||
(->> (db/get-with-sql cfg [sql:get-snapshot file-id snapshot-id now])
|
||||
(decode-snapshot)
|
||||
(fdata/resolve-file-data cfg)
|
||||
(fdata/decode-file-data cfg))))
|
||||
|
||||
(def ^:private sql:get-visible-snapshots
|
||||
(str "WITH "
|
||||
"snapshots1 AS ( " sql:snapshots "),"
|
||||
"snapshots2 AS (
|
||||
SELECT c.id,
|
||||
c.label,
|
||||
c.revn,
|
||||
c.version,
|
||||
c.created_at,
|
||||
c.modified_at,
|
||||
c.created_by,
|
||||
c.locked_by,
|
||||
c.profile_id,
|
||||
c.deleted_at
|
||||
FROM snapshots1 AS c
|
||||
WHERE c.file_id = ?
|
||||
), snapshots3 AS (
|
||||
(SELECT * FROM snapshots2
|
||||
WHERE created_by = 'system'
|
||||
AND (deleted_at IS NULL OR
|
||||
deleted_at >= ?::timestamptz)
|
||||
LIMIT 500)
|
||||
UNION ALL
|
||||
(SELECT * FROM snapshots2
|
||||
WHERE created_by = 'user'
|
||||
AND deleted_at IS NULL
|
||||
LIMIT 500)
|
||||
)
|
||||
SELECT * FROM snapshots3
|
||||
ORDER BY created_at DESC"))
|
||||
|
||||
(defn get-visible-snapshots
|
||||
"Return a list of snapshots fecheable from the API, it has a limited
|
||||
set of fields and applies big but safe limits over all available
|
||||
snapshots. It return a ordered vector by the snapshot date of
|
||||
creation."
|
||||
[cfg file-id]
|
||||
(let [now (ct/now)]
|
||||
(->> (db/exec! cfg [sql:get-visible-snapshots file-id now])
|
||||
(mapv decode-snapshot))))
|
||||
|
||||
(def ^:private schema:decoded-file
|
||||
[:map {:title "DecodedFile"}
|
||||
[:id ::sm/uuid]
|
||||
[:revn :int]
|
||||
[:vern :int]
|
||||
[:data :map]
|
||||
[:version :int]
|
||||
[:features ::cfeat/features]
|
||||
[:migrations [::sm/set :string]]])
|
||||
|
||||
(def ^:private schema:snapshot
|
||||
[:map {:title "Snapshot"}
|
||||
[:id ::sm/uuid]
|
||||
[:revn [::sm/int {:min 0}]]
|
||||
[:version [::sm/int {:min 0}]]
|
||||
[:features ::cfeat/features]
|
||||
[:migrations [::sm/set ::sm/text]]
|
||||
[:profile-id {:optional true} ::sm/uuid]
|
||||
[:label ::sm/text]
|
||||
[:file-id ::sm/uuid]
|
||||
[:created-by [:enum "system" "user" "admin"]]
|
||||
[:deleted-at {:optional true} ::ct/inst]
|
||||
[:modified-at ::ct/inst]
|
||||
[:created-at ::ct/inst]])
|
||||
|
||||
(def ^:private check-snapshot
|
||||
(sm/check-fn schema:snapshot))
|
||||
|
||||
(def ^:private check-decoded-file
|
||||
(sm/check-fn schema:decoded-file))
|
||||
|
||||
(defn- generate-snapshot-label
|
||||
[]
|
||||
(let [ts (-> (ct/now)
|
||||
(ct/format-inst)
|
||||
(str/replace #"[T:\.]" "-")
|
||||
(str/rtrim "Z"))]
|
||||
(str "snapshot-" ts)))
|
||||
|
||||
(def ^:private schema:create-params
|
||||
[:map {:title "SnapshotCreateParams"}
|
||||
[:profile-id ::sm/uuid]
|
||||
[:created-by {:optional true} [:enum "user" "system"]]
|
||||
[:label {:optional true} ::sm/text]
|
||||
[:session-id {:optional true} ::sm/uuid]
|
||||
[:modified-at {:optional true} ::ct/inst]
|
||||
[:deleted-at {:optional true} ::ct/inst]])
|
||||
|
||||
(def ^:private check-create-params
|
||||
(sm/check-fn schema:create-params))
|
||||
|
||||
(defn create!
|
||||
"Create a file snapshot; expects a non-encoded file"
|
||||
[cfg file & {:as params}]
|
||||
(let [{:keys [label created-by deleted-at profile-id session-id]}
|
||||
(check-create-params params)
|
||||
|
||||
file
|
||||
(check-decoded-file file)
|
||||
|
||||
created-by
|
||||
(or created-by "system")
|
||||
|
||||
snapshot-id
|
||||
(uuid/next)
|
||||
|
||||
created-at
|
||||
(ct/now)
|
||||
|
||||
deleted-at
|
||||
(or deleted-at
|
||||
(if (= created-by "system")
|
||||
(ct/in-future (cf/get-deletion-delay))
|
||||
nil))
|
||||
|
||||
label
|
||||
(or label (generate-snapshot-label))
|
||||
|
||||
snapshot
|
||||
(cond-> {:id snapshot-id
|
||||
:revn (:revn file)
|
||||
:version (:version file)
|
||||
:file-id (:id file)
|
||||
:features (:features file)
|
||||
:migrations (:migrations file)
|
||||
:label label
|
||||
:created-at created-at
|
||||
:modified-at created-at
|
||||
:created-by created-by}
|
||||
|
||||
deleted-at
|
||||
(assoc :deleted-at deleted-at)
|
||||
|
||||
:always
|
||||
(check-snapshot))]
|
||||
|
||||
(db/insert! cfg :file-change
|
||||
(-> snapshot
|
||||
(update :features into-array)
|
||||
(update :migrations into-array)
|
||||
(assoc :updated-at created-at)
|
||||
(assoc :profile-id profile-id)
|
||||
(assoc :session-id session-id)
|
||||
(dissoc :modified-at))
|
||||
{::db/return-keys false})
|
||||
|
||||
(fdata/upsert! cfg
|
||||
{:id snapshot-id
|
||||
:file-id (:id file)
|
||||
:type "snapshot"
|
||||
:data (blob/encode (:data file))
|
||||
:created-at created-at
|
||||
:deleted-at deleted-at})
|
||||
|
||||
snapshot))
|
||||
|
||||
(def ^:private schema:update-params
|
||||
[:map {:title "SnapshotUpdateParams"}
|
||||
[:id ::sm/uuid]
|
||||
[:file-id ::sm/uuid]
|
||||
[:label ::sm/text]
|
||||
[:modified-at {:optional true} ::ct/inst]])
|
||||
|
||||
(def ^:private check-update-params
|
||||
(sm/check-fn schema:update-params))
|
||||
|
||||
(defn update!
|
||||
[cfg params]
|
||||
|
||||
(let [{:keys [id file-id label modified-at]}
|
||||
(check-update-params params)
|
||||
|
||||
modified-at
|
||||
(or modified-at (ct/now))]
|
||||
|
||||
(db/update! cfg :file-data
|
||||
{:deleted-at nil
|
||||
:modified-at modified-at}
|
||||
{:file-id file-id
|
||||
:id id
|
||||
:type "snapshot"}
|
||||
{::db/return-keys false})
|
||||
|
||||
(-> (db/update! cfg :file-change
|
||||
{:label label
|
||||
:created-by "user"
|
||||
:updated-at modified-at
|
||||
:deleted-at nil}
|
||||
{:file-id file-id
|
||||
:id id}
|
||||
{::db/return-keys false})
|
||||
(db/get-update-count)
|
||||
(pos?))))
|
||||
|
||||
(defn restore!
|
||||
[{:keys [::db/conn] :as cfg} file-id snapshot-id]
|
||||
(let [file (get-minimal-file conn file-id {::db/for-update true})
|
||||
vern (rand-int Integer/MAX_VALUE)
|
||||
|
||||
storage
|
||||
(sto/resolve cfg {::db/reuse-conn true})
|
||||
|
||||
snapshot
|
||||
(get-snapshot cfg file-id snapshot-id)]
|
||||
|
||||
(when-not snapshot
|
||||
(ex/raise :type :not-found
|
||||
:code :snapshot-not-found
|
||||
:hint "unable to find snapshot with the provided label"
|
||||
:snapshot-id snapshot-id
|
||||
:file-id file-id))
|
||||
|
||||
(when-not (:data snapshot)
|
||||
(ex/raise :type :internal
|
||||
:code :snapshot-without-data
|
||||
:hint "snapshot has no data"
|
||||
:label (:label snapshot)
|
||||
:file-id file-id))
|
||||
|
||||
(let [;; If the snapshot has applied migrations stored, we reuse
|
||||
;; them, if not, we take a safest set of migrations as
|
||||
;; starting point. This is because, at the time of
|
||||
;; implementing snapshots, migrations were not taken into
|
||||
;; account so we need to make this backward compatible in
|
||||
;; some way.
|
||||
migrations
|
||||
(or (:migrations snapshot)
|
||||
(fmg/generate-migrations-from-version 67))
|
||||
|
||||
file
|
||||
(-> file
|
||||
(update :revn inc)
|
||||
(assoc :migrations migrations)
|
||||
(assoc :data (:data snapshot))
|
||||
(assoc :vern vern)
|
||||
(assoc :version (:version snapshot))
|
||||
(assoc :has-media-trimmed false)
|
||||
(assoc :modified-at (:modified-at snapshot))
|
||||
(assoc :features (:features snapshot)))]
|
||||
|
||||
(l/dbg :hint "restoring snapshot"
|
||||
:file-id (str file-id)
|
||||
:label (:label snapshot)
|
||||
:snapshot-id (str (:id snapshot)))
|
||||
|
||||
;; In the same way, on reseting the file data, we need to restore
|
||||
;; the applied migrations on the moment of taking the snapshot
|
||||
(bfc/update-file! cfg file ::bfc/reset-migrations? true)
|
||||
|
||||
;; FIXME: this should be separated functions, we should not have
|
||||
;; inline sql here.
|
||||
|
||||
;; clean object thumbnails
|
||||
(let [sql (str "update file_tagged_object_thumbnail "
|
||||
" set deleted_at = now() "
|
||||
" where file_id=? returning media_id")
|
||||
res (db/exec! conn [sql file-id])]
|
||||
(doseq [media-id (into #{} (keep :media-id) res)]
|
||||
(sto/touch-object! storage media-id)))
|
||||
|
||||
;; clean file thumbnails
|
||||
(let [sql (str "update file_thumbnail "
|
||||
" set deleted_at = now() "
|
||||
" where file_id=? returning media_id")
|
||||
res (db/exec! conn [sql file-id])]
|
||||
(doseq [media-id (into #{} (keep :media-id) res)]
|
||||
(sto/touch-object! storage media-id)))
|
||||
|
||||
vern)))
|
||||
|
||||
(defn delete!
|
||||
[cfg & {:keys [id file-id deleted-at]}]
|
||||
(assert (uuid? id) "missing id")
|
||||
(assert (uuid? file-id) "missing file-id")
|
||||
(assert (ct/inst? deleted-at) "missing deleted-at")
|
||||
|
||||
(wrk/submit! {::db/conn (db/get-connection cfg)
|
||||
::wrk/task :delete-object
|
||||
::wrk/params {:object :snapshot
|
||||
:deleted-at deleted-at
|
||||
:file-id file-id
|
||||
:id id}})
|
||||
(db/update! cfg :file-change
|
||||
{:deleted-at deleted-at}
|
||||
{:id id :file-id file-id}
|
||||
{::db/return-keys false})
|
||||
true)
|
||||
|
||||
(def ^:private sql:get-snapshots
|
||||
(str sql:snapshots " AND c.file_id = ?"))
|
||||
|
||||
(defn lock-by!
|
||||
[conn id profile-id]
|
||||
(-> (db/update! conn :file-change
|
||||
{:locked-by profile-id}
|
||||
{:id id}
|
||||
{::db/return-keys false})
|
||||
(db/get-update-count)
|
||||
(pos?)))
|
||||
|
||||
(defn unlock!
|
||||
[conn id]
|
||||
(-> (db/update! conn :file-change
|
||||
{:locked-by nil}
|
||||
{:id id}
|
||||
{::db/return-keys false})
|
||||
(db/get-update-count)
|
||||
(pos?)))
|
||||
|
||||
(defn reduce-snapshots
|
||||
"Process the file snapshots using efficient reduction; the file
|
||||
reduction comes with all snapshots, including maked as deleted"
|
||||
[cfg file-id xform f init]
|
||||
(let [conn (db/get-connection cfg)
|
||||
xform (comp
|
||||
(map (partial fdata/resolve-file-data cfg))
|
||||
(map (partial fdata/decode-file-data cfg))
|
||||
xform)]
|
||||
|
||||
(->> (db/plan conn [sql:get-snapshots file-id] {:fetch-size 1})
|
||||
(transduce xform f init))))
|
||||
@@ -397,33 +397,6 @@
|
||||
::yres/headers {"content-type" "text/plain"}
|
||||
::yres/body (str/ffmt "PROFILE '%' ACTIVATED" (:email profile))}))))))
|
||||
|
||||
|
||||
(defn- reset-file-version
|
||||
[cfg {:keys [params] :as request}]
|
||||
(let [file-id (some-> params :file-id d/parse-uuid)
|
||||
version (some-> params :version d/parse-integer)]
|
||||
|
||||
(when-not (contains? params :force)
|
||||
(ex/raise :type :validation
|
||||
:code :missing-force
|
||||
:hint "missing force checkbox"))
|
||||
|
||||
(when (nil? file-id)
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-file-id
|
||||
:hint "provided invalid file id"))
|
||||
|
||||
(when (nil? version)
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-version
|
||||
:hint "provided invalid version"))
|
||||
|
||||
(db/tx-run! cfg srepl/process-file! file-id #(assoc % :version version))
|
||||
|
||||
{::yres/status 200
|
||||
::yres/headers {"content-type" "text/plain"}
|
||||
::yres/body "OK"}))
|
||||
|
||||
(defn- handle-team-features
|
||||
[cfg {:keys [params] :as request}]
|
||||
(let [team-id (some-> params :team-id d/parse-uuid)
|
||||
@@ -576,8 +549,6 @@
|
||||
{:handler (partial set-virtual-clock cfg)}]
|
||||
["/resend-email-verification"
|
||||
{:handler (partial resend-email-notification cfg)}]
|
||||
["/reset-file-version"
|
||||
{:handler (partial reset-file-version cfg)}]
|
||||
["/handle-team-features"
|
||||
{:handler (partial handle-team-features cfg)}]
|
||||
["/file-export" {:handler (partial export-handler cfg)}]
|
||||
|
||||
@@ -447,7 +447,10 @@
|
||||
:fn (mg/resource "app/migrations/sql/0140-add-locked-by-column-to-file-change-table.sql")}
|
||||
|
||||
{:name "0141-add-idx-to-file-library-rel"
|
||||
:fn (mg/resource "app/migrations/sql/0141-add-idx-to-file-library-rel.sql")}])
|
||||
:fn (mg/resource "app/migrations/sql/0141-add-idx-to-file-library-rel.sql")}
|
||||
|
||||
{:name "0141-add-file-data-table.sql"
|
||||
:fn (mg/resource "app/migrations/sql/0141-add-file-data-table.sql")}])
|
||||
|
||||
(defn apply-migrations!
|
||||
[pool name migrations]
|
||||
|
||||
@@ -10,8 +10,8 @@
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.pprint]
|
||||
[app.srepl.fixes.media-refs :refer [process-file]]
|
||||
[app.srepl.main :as srepl]
|
||||
[app.srepl.procs.media-refs]
|
||||
[clojure.edn :as edn]))
|
||||
|
||||
(def ^:private required-services
|
||||
@@ -20,7 +20,10 @@
|
||||
:app.storage/storage
|
||||
:app.metrics/metrics
|
||||
:app.db/pool
|
||||
:app.worker/executor])
|
||||
:app.worker/netty-io-executor])
|
||||
|
||||
(def default-options
|
||||
{:rollback? false})
|
||||
|
||||
(defn -main
|
||||
[& [options]]
|
||||
@@ -28,22 +31,20 @@
|
||||
(let [config-var (requiring-resolve 'app.main/system-config)
|
||||
start-var (requiring-resolve 'app.main/start-custom)
|
||||
stop-var (requiring-resolve 'app.main/stop)
|
||||
config (select-keys @config-var required-services)]
|
||||
|
||||
config (select-keys @config-var required-services)
|
||||
options (if (string? options)
|
||||
(ex/ignoring (edn/read-string options))
|
||||
{})
|
||||
options (-> (merge default-options options)
|
||||
(assoc :proc-fn #'app.srepl.procs.media-refs/fix-media-refs))]
|
||||
|
||||
(start-var config)
|
||||
|
||||
(let [options (if (string? options)
|
||||
(ex/ignoring (edn/read-string options))
|
||||
{})]
|
||||
|
||||
(l/inf :hint "executing media-refs migration" :options options)
|
||||
(srepl/process-files! process-file options))
|
||||
|
||||
(l/inf :hint "executing media-refs migration" :options options)
|
||||
(srepl/process! options)
|
||||
(stop-var)
|
||||
(System/exit 0))
|
||||
(catch Throwable cause
|
||||
(ex/print-throwable cause)
|
||||
(flush)
|
||||
(System/exit -1))))
|
||||
|
||||
|
||||
|
||||
38
backend/src/app/migrations/sql/0141-add-file-data-table.sql
Normal file
38
backend/src/app/migrations/sql/0141-add-file-data-table.sql
Normal file
@@ -0,0 +1,38 @@
|
||||
CREATE TABLE file_data (
|
||||
file_id uuid NOT NULL REFERENCES file(id) DEFERRABLE,
|
||||
id uuid NOT NULL,
|
||||
|
||||
created_at timestamptz NOT NULL DEFAULT now(),
|
||||
modified_at timestamptz NOT NULL DEFAULT now(),
|
||||
deleted_at timestamptz NULL,
|
||||
|
||||
type text NOT NULL,
|
||||
backend text NULL,
|
||||
|
||||
metadata jsonb NULL,
|
||||
data bytea NULL,
|
||||
|
||||
PRIMARY KEY (file_id, id)
|
||||
|
||||
) PARTITION BY HASH (file_id);
|
||||
|
||||
CREATE TABLE file_data_00 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 0);
|
||||
CREATE TABLE file_data_01 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 1);
|
||||
CREATE TABLE file_data_02 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 2);
|
||||
CREATE TABLE file_data_03 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 3);
|
||||
CREATE TABLE file_data_04 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 4);
|
||||
CREATE TABLE file_data_05 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 5);
|
||||
CREATE TABLE file_data_06 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 6);
|
||||
CREATE TABLE file_data_07 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 7);
|
||||
CREATE TABLE file_data_08 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 8);
|
||||
CREATE TABLE file_data_09 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 9);
|
||||
CREATE TABLE file_data_10 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 10);
|
||||
CREATE TABLE file_data_11 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 11);
|
||||
CREATE TABLE file_data_12 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 12);
|
||||
CREATE TABLE file_data_13 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 13);
|
||||
CREATE TABLE file_data_14 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 14);
|
||||
CREATE TABLE file_data_15 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 15);
|
||||
|
||||
CREATE INDEX file_data__deleted_at__idx
|
||||
ON file_data (deleted_at, file_id, id)
|
||||
WHERE deleted_at IS NOT NULL;
|
||||
@@ -239,7 +239,6 @@
|
||||
'app.rpc.commands.files
|
||||
'app.rpc.commands.files-create
|
||||
'app.rpc.commands.files-share
|
||||
'app.rpc.commands.files-temp
|
||||
'app.rpc.commands.files-update
|
||||
'app.rpc.commands.files-snapshot
|
||||
'app.rpc.commands.files-thumbnails
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
|
||||
(ns app.rpc.commands.comments
|
||||
(:require
|
||||
[app.binfile.common :as bfc]
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
@@ -163,34 +164,16 @@
|
||||
(def xf-decode-row
|
||||
(map decode-row))
|
||||
|
||||
(def ^:private
|
||||
sql:get-file
|
||||
"SELECT f.id, f.modified_at, f.revn, f.features, f.name,
|
||||
f.project_id, p.team_id, f.data,
|
||||
f.data_ref_id, f.data_backend
|
||||
FROM file as f
|
||||
INNER JOIN project as p on (p.id = f.project_id)
|
||||
WHERE f.id = ?
|
||||
AND (f.deleted_at IS NULL OR f.deleted_at > now())")
|
||||
|
||||
(defn- get-file
|
||||
"A specialized version of get-file for comments module."
|
||||
[cfg file-id page-id]
|
||||
(let [file (db/exec-one! cfg [sql:get-file file-id])]
|
||||
(when-not file
|
||||
(ex/raise :type :not-found
|
||||
:code :object-not-found
|
||||
:hint "file not found"))
|
||||
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id)]
|
||||
(let [file (->> file
|
||||
(files/decode-row)
|
||||
(feat.fdata/resolve-file-data cfg))
|
||||
data (get file :data)]
|
||||
(-> file
|
||||
(assoc :page-name (dm/get-in data [:pages-index page-id :name]))
|
||||
(assoc :page-id page-id)
|
||||
(dissoc :data))))))
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id)]
|
||||
(let [file (bfc/get-file cfg file-id)
|
||||
data (get file :data)]
|
||||
(-> file
|
||||
(assoc :page-name (dm/get-in data [:pages-index page-id :name]))
|
||||
(assoc :page-id page-id)
|
||||
(dissoc :data)))))
|
||||
|
||||
;; FIXME: rename
|
||||
(defn- get-comment-thread
|
||||
|
||||
@@ -24,7 +24,6 @@
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]
|
||||
[app.features.fdata :as feat.fdata]
|
||||
[app.features.file-migrations :as feat.fmigr]
|
||||
[app.features.logical-deletion :as ldel]
|
||||
[app.loggers.audit :as-alias audit]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
@@ -54,12 +53,10 @@
|
||||
(ct/duration {:days 7}))
|
||||
|
||||
(defn decode-row
|
||||
[{:keys [data changes features] :as row}]
|
||||
[{:keys [features] :as row}]
|
||||
(when row
|
||||
(cond-> row
|
||||
features (assoc :features (db/decode-pgarray features #{}))
|
||||
changes (assoc :changes (blob/decode changes))
|
||||
data (assoc :data (blob/decode data)))))
|
||||
(db/pgarray? features) (assoc :features (db/decode-pgarray features #{})))))
|
||||
|
||||
(defn check-version!
|
||||
[file]
|
||||
@@ -209,85 +206,9 @@
|
||||
[:id ::sm/uuid]
|
||||
[:project-id {:optional true} ::sm/uuid]])
|
||||
|
||||
(defn- migrate-file
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id] :as file} {:keys [read-only?]}]
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)
|
||||
pmap/*tracked* (pmap/create-tracked)]
|
||||
(let [libs (delay (bfc/get-resolved-file-libraries cfg file))
|
||||
;; For avoid unnecesary overhead of creating multiple pointers and
|
||||
;; handly internally with objects map in their worst case (when
|
||||
;; probably all shapes and all pointers will be readed in any
|
||||
;; case), we just realize/resolve them before applying the
|
||||
;; migration to the file
|
||||
file (-> file
|
||||
(update :data feat.fdata/process-pointers deref)
|
||||
(update :data feat.fdata/process-objects (partial into {}))
|
||||
(fmg/migrate-file libs))]
|
||||
|
||||
(if (or read-only? (db/read-only? conn))
|
||||
file
|
||||
(let [;; When file is migrated, we break the rule of no perform
|
||||
;; mutations on get operations and update the file with all
|
||||
;; migrations applied
|
||||
file (if (contains? (:features file) "fdata/objects-map")
|
||||
(feat.fdata/enable-objects-map file)
|
||||
file)
|
||||
file (if (contains? (:features file) "fdata/pointer-map")
|
||||
(feat.fdata/enable-pointer-map file)
|
||||
file)]
|
||||
|
||||
(db/update! conn :file
|
||||
{:data (blob/encode (:data file))
|
||||
:version (:version file)
|
||||
:features (db/create-array conn "text" (:features file))}
|
||||
{:id id}
|
||||
{::db/return-keys false})
|
||||
|
||||
(when (contains? (:features file) "fdata/pointer-map")
|
||||
(feat.fdata/persist-pointers! cfg id))
|
||||
|
||||
(feat.fmigr/upsert-migrations! conn file)
|
||||
(feat.fmigr/resolve-applied-migrations cfg file))))))
|
||||
|
||||
(defn get-file
|
||||
[{:keys [::db/conn] :as cfg} id
|
||||
& {:keys [project-id
|
||||
migrate?
|
||||
include-deleted?
|
||||
lock-for-update?
|
||||
preload-pointers?]
|
||||
:or {include-deleted? false
|
||||
lock-for-update? false
|
||||
migrate? true
|
||||
preload-pointers? false}
|
||||
:as options}]
|
||||
|
||||
(assert (db/connection? conn) "expected cfg with valid connection")
|
||||
|
||||
(let [params (merge {:id id}
|
||||
(when (some? project-id)
|
||||
{:project-id project-id}))
|
||||
file (->> (db/get conn :file params
|
||||
{::db/check-deleted (not include-deleted?)
|
||||
::db/remove-deleted (not include-deleted?)
|
||||
::sql/for-update lock-for-update?})
|
||||
(feat.fmigr/resolve-applied-migrations cfg)
|
||||
(feat.fdata/resolve-file-data cfg)
|
||||
(decode-row))
|
||||
|
||||
file (if (and migrate? (fmg/need-migration? file))
|
||||
(migrate-file cfg file options)
|
||||
file)]
|
||||
|
||||
(if preload-pointers?
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
|
||||
(update file :data feat.fdata/process-pointers deref))
|
||||
|
||||
file)))
|
||||
|
||||
(defn get-minimal-file
|
||||
[cfg id & {:as opts}]
|
||||
(let [opts (assoc opts ::sql/columns [:id :modified-at :deleted-at :revn :vern :data-ref-id :data-backend])]
|
||||
(let [opts (assoc opts ::sql/columns [:id :modified-at :deleted-at :revn :vern])]
|
||||
(db/get cfg :file {:id id} opts)))
|
||||
|
||||
(defn- get-minimal-file-with-perms
|
||||
@@ -327,9 +248,9 @@
|
||||
:project-id project-id
|
||||
:file-id id)
|
||||
|
||||
file (-> (get-file cfg id :project-id project-id)
|
||||
file (-> (bfc/get-file cfg id
|
||||
:project-id project-id)
|
||||
(assoc :permissions perms)
|
||||
(assoc :team-id (:id team))
|
||||
(check-version!))]
|
||||
|
||||
(-> (cfeat/get-team-enabled-features cf/flags team)
|
||||
@@ -343,8 +264,7 @@
|
||||
;; return a complete file
|
||||
(if (and (contains? (:features file) "fdata/pointer-map")
|
||||
(not (contains? (:features params) "fdata/pointer-map")))
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
|
||||
(update file :data feat.fdata/process-pointers deref))
|
||||
(feat.fdata/realize-pointers cfg file)
|
||||
file)
|
||||
|
||||
;; This operation is needed for backward comapatibility with
|
||||
@@ -352,7 +272,7 @@
|
||||
;; just converts all objects map instaces to plain maps
|
||||
(if (and (contains? (:features file) "fdata/objects-map")
|
||||
(not (contains? (:features params) "fdata/objects-map")))
|
||||
(update file :data feat.fdata/process-objects (partial into {}))
|
||||
(feat.fdata/realize-objects cfg file)
|
||||
file)))))
|
||||
|
||||
;; --- COMMAND QUERY: get-file-fragment (by id)
|
||||
@@ -372,10 +292,8 @@
|
||||
|
||||
(defn- get-file-fragment
|
||||
[cfg file-id fragment-id]
|
||||
(let [resolve-file-data (partial feat.fdata/resolve-file-data cfg)]
|
||||
(some-> (db/get cfg :file-data-fragment {:file-id file-id :id fragment-id})
|
||||
(resolve-file-data)
|
||||
(update :data blob/decode))))
|
||||
(some-> (db/get cfg :file-data {:file-id file-id :id fragment-id :type "fragment"})
|
||||
(update :data blob/decode)))
|
||||
|
||||
(sv/defmethod ::get-file-fragment
|
||||
"Retrieve a file fragment by its ID. Only authenticated users."
|
||||
@@ -534,7 +452,7 @@
|
||||
|
||||
(let [perms (get-permissions conn profile-id file-id share-id)
|
||||
|
||||
file (get-file cfg file-id :read-only? true)
|
||||
file (bfc/get-file cfg file-id :read-only? true)
|
||||
|
||||
proj (db/get conn :project {:id (:project-id file)})
|
||||
|
||||
@@ -608,81 +526,68 @@
|
||||
{:components components
|
||||
:variant-ids variant-ids}))
|
||||
|
||||
;;coalesce(string_agg(flr.library_file_id::text, ','), '') as library_file_ids
|
||||
(def ^:private sql:team-shared-files
|
||||
"with file_library_agg as (
|
||||
select flr.file_id,
|
||||
coalesce(array_agg(flr.library_file_id) filter (where flr.library_file_id is not null), '{}') as library_file_ids
|
||||
from file_library_rel flr
|
||||
group by flr.file_id
|
||||
"WITH file_library_agg AS (
|
||||
SELECT flr.file_id,
|
||||
coalesce(array_agg(flr.library_file_id) filter (WHERE flr.library_file_id IS NOT NULL), '{}') AS library_file_ids
|
||||
FROM file_library_rel flr
|
||||
GROUP BY flr.file_id
|
||||
)
|
||||
|
||||
select f.id,
|
||||
f.revn,
|
||||
f.vern,
|
||||
f.data,
|
||||
f.project_id,
|
||||
f.created_at,
|
||||
f.modified_at,
|
||||
f.data_backend,
|
||||
f.data_ref_id,
|
||||
f.name,
|
||||
f.version,
|
||||
f.is_shared,
|
||||
ft.media_id,
|
||||
p.team_id,
|
||||
fla.library_file_ids
|
||||
from file as f
|
||||
inner join project as p on (p.id = f.project_id)
|
||||
left join file_thumbnail as ft on (ft.file_id = f.id and ft.revn = f.revn and ft.deleted_at is null)
|
||||
left join file_library_agg as fla on fla.file_id = f.id
|
||||
where f.is_shared = true
|
||||
and f.deleted_at is null
|
||||
and p.deleted_at is null
|
||||
and p.team_id = ?
|
||||
order by f.modified_at desc")
|
||||
SELECT f.id,
|
||||
fla.library_file_ids,
|
||||
ft.media_id AS thumbnail_id
|
||||
FROM file AS f
|
||||
INNER JOIN project AS p ON (p.id = f.project_id)
|
||||
LEFT JOIN file_thumbnail AS ft ON (ft.file_id = f.id AND ft.revn = f.revn AND ft.deleted_at IS NULL)
|
||||
LEFT JOIN file_library_agg AS fla ON (fla.file_id = f.id)
|
||||
WHERE f.is_shared = true
|
||||
AND f.deleted_at is null
|
||||
AND p.deleted_at is null
|
||||
AND p.team_id = ?
|
||||
ORDER BY f.modified_at DESC")
|
||||
|
||||
(defn- get-library-summary
|
||||
[cfg {:keys [id data] :as file}]
|
||||
(letfn [(assets-sample [assets limit]
|
||||
(let [sorted-assets (->> (vals assets)
|
||||
(sort-by #(str/lower (:name %))))]
|
||||
{:count (count sorted-assets)
|
||||
:sample (into [] (take limit sorted-assets))}))]
|
||||
[{:keys [data] :as file}]
|
||||
(let [assets-sample
|
||||
(fn [assets limit]
|
||||
(let [sorted-assets (->> (vals assets)
|
||||
(sort-by #(str/lower (:name %))))]
|
||||
{:count (count sorted-assets)
|
||||
:sample (into [] (take limit sorted-assets))}))
|
||||
load-objects
|
||||
(fn [component]
|
||||
(ctf/load-component-objects data component))
|
||||
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
|
||||
(let [load-objects (fn [component]
|
||||
(ctf/load-component-objects data component))
|
||||
comps-and-variants (components-and-variants (ctkl/components-seq data))
|
||||
components (into {} (map (juxt :id identity) (:components comps-and-variants)))
|
||||
components-sample (-> (assets-sample components 4)
|
||||
(update :sample #(mapv load-objects %))
|
||||
(assoc :variants-count (-> comps-and-variants :variant-ids count)))]
|
||||
{:components components-sample
|
||||
:media (assets-sample (:media data) 3)
|
||||
:colors (assets-sample (:colors data) 3)
|
||||
:typographies (assets-sample (:typographies data) 3)}))))
|
||||
comps-and-variants
|
||||
(components-and-variants (ctkl/components-seq data))
|
||||
|
||||
components
|
||||
(into {} (map (juxt :id identity) (:components comps-and-variants)))
|
||||
|
||||
components-sample
|
||||
(-> (assets-sample components 4)
|
||||
(update :sample #(mapv load-objects %))
|
||||
(assoc :variants-count (-> comps-and-variants :variant-ids count)))]
|
||||
|
||||
{:components components-sample
|
||||
:media (assets-sample (:media data) 3)
|
||||
:colors (assets-sample (:colors data) 3)
|
||||
:typographies (assets-sample (:typographies data) 3)}))
|
||||
|
||||
(defn- get-team-shared-files
|
||||
[{:keys [::db/conn] :as cfg} {:keys [team-id profile-id]}]
|
||||
(teams/check-read-permissions! conn profile-id team-id)
|
||||
(->> (db/exec! conn [sql:team-shared-files team-id])
|
||||
(into #{} (comp
|
||||
;; NOTE: this decode operation is a workaround for a
|
||||
;; fast fix, this should be approached with a more
|
||||
;; efficient implementation, for now it loads all
|
||||
;; the files in memory.
|
||||
(map (partial bfc/decode-file cfg))
|
||||
(map (fn [row]
|
||||
(if-let [media-id (:media-id row)]
|
||||
(-> row
|
||||
(dissoc :media-id)
|
||||
(assoc :thumbnail-id media-id))
|
||||
(dissoc row :media-id))))
|
||||
(map (fn [row]
|
||||
(update row :library-file-ids db/decode-pgarray #{})))
|
||||
(map #(assoc % :library-summary (get-library-summary cfg %)))
|
||||
(map #(dissoc % :data))))))
|
||||
(let [xform (map (fn [{:keys [id library-file-ids]}]
|
||||
(let [file (bfc/get-file cfg id :migrate? false)
|
||||
summ (binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
|
||||
(get-library-summary file))]
|
||||
(-> file
|
||||
(dissoc :data)
|
||||
(assoc :library-file-ids (db/decode-pgarray library-file-ids #{}))
|
||||
(assoc :library-summary summ)))))]
|
||||
(->> (db/plan conn [sql:team-shared-files team-id] {:fetch-size 1})
|
||||
(transduce xform conj #{}))))
|
||||
|
||||
(def ^:private schema:get-team-shared-files
|
||||
[:map {:title "get-team-shared-files"}
|
||||
@@ -795,9 +700,9 @@
|
||||
:project-id project-id
|
||||
:file-id id)
|
||||
|
||||
file (get-file cfg id
|
||||
:project-id project-id
|
||||
:read-only? true)]
|
||||
file (bfc/get-file cfg id
|
||||
:project-id project-id
|
||||
:read-only? true)]
|
||||
|
||||
(-> (cfeat/get-team-enabled-features cf/flags team)
|
||||
(cfeat/check-client-features! (:features params))
|
||||
@@ -887,7 +792,7 @@
|
||||
|
||||
;; --- MUTATION COMMAND: set-file-shared
|
||||
|
||||
(def sql:get-referenced-files
|
||||
(def ^:private sql:get-referenced-files
|
||||
"SELECT f.id
|
||||
FROM file_library_rel AS flr
|
||||
INNER JOIN file AS f ON (f.id = flr.file_id)
|
||||
@@ -898,56 +803,51 @@
|
||||
(defn- absorb-library-by-file!
|
||||
[cfg ldata file-id]
|
||||
|
||||
(dm/assert!
|
||||
"expected cfg with valid connection"
|
||||
(db/connection-map? cfg))
|
||||
(assert (db/connection-map? cfg)
|
||||
"expected cfg with valid connection")
|
||||
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id)
|
||||
pmap/*tracked* (pmap/create-tracked)]
|
||||
(let [file (-> (get-file cfg file-id
|
||||
:include-deleted? true
|
||||
:lock-for-update? true)
|
||||
(let [file (-> (bfc/get-file cfg file-id
|
||||
:include-deleted? true
|
||||
:lock-for-update? true)
|
||||
(update :data ctf/absorb-assets ldata))]
|
||||
|
||||
(l/trc :hint "library absorbed"
|
||||
:library-id (str (:id ldata))
|
||||
:file-id (str file-id))
|
||||
|
||||
(db/update! cfg :file
|
||||
{:revn (inc (:revn file))
|
||||
:data (blob/encode (:data file))
|
||||
:modified-at (ct/now)
|
||||
:has-media-trimmed false}
|
||||
{:id file-id})
|
||||
|
||||
(feat.fdata/persist-pointers! cfg file-id))))
|
||||
(bfc/update-file! cfg {:id file-id
|
||||
:migrations (:migrations file)
|
||||
:revn (inc (:revn file))
|
||||
:data (:data file)
|
||||
:modified-at (ct/now)
|
||||
:has-media-trimmed false}))))
|
||||
|
||||
(defn- absorb-library
|
||||
"Find all files using a shared library, and absorb all library assets
|
||||
into the file local libraries"
|
||||
[cfg {:keys [id] :as library}]
|
||||
[cfg {:keys [id data] :as library}]
|
||||
|
||||
(dm/assert!
|
||||
"expected cfg with valid connection"
|
||||
(db/connection-map? cfg))
|
||||
(assert (db/connection-map? cfg)
|
||||
"expected cfg with valid connection")
|
||||
|
||||
(let [ldata (binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
|
||||
(-> library :data (feat.fdata/process-pointers deref)))
|
||||
ids (->> (db/exec! cfg [sql:get-referenced-files id])
|
||||
(map :id))]
|
||||
(let [ids (->> (db/exec! cfg [sql:get-referenced-files id])
|
||||
(sequence bfc/xf-map-id))]
|
||||
|
||||
(l/trc :hint "absorbing library"
|
||||
:library-id (str id)
|
||||
:files (str/join "," (map str ids)))
|
||||
|
||||
(run! (partial absorb-library-by-file! cfg ldata) ids)
|
||||
(run! (partial absorb-library-by-file! cfg data) ids)
|
||||
library))
|
||||
|
||||
(defn absorb-library!
|
||||
[{:keys [::db/conn] :as cfg} id]
|
||||
(let [file (-> (get-file cfg id
|
||||
:lock-for-update? true
|
||||
:include-deleted? true)
|
||||
(let [file (-> (bfc/get-file cfg id
|
||||
:realize? true
|
||||
:lock-for-update? true
|
||||
:include-deleted? true)
|
||||
(check-version!))
|
||||
|
||||
proj (db/get* conn :project {:id (:project-id file)}
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
(:require
|
||||
[app.binfile.common :as bfc]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.files.migrations :as fmg]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.time :as ct]
|
||||
[app.common.types.file :as ctf]
|
||||
@@ -45,12 +46,14 @@
|
||||
|
||||
(binding [pmap/*tracked* (pmap/create-tracked)
|
||||
cfeat/*current* features]
|
||||
|
||||
(let [file (ctf/make-file {:id id
|
||||
:project-id project-id
|
||||
:name name
|
||||
:revn revn
|
||||
:is-shared is-shared
|
||||
:features features
|
||||
:migrations fmg/available-migrations
|
||||
:ignore-sync-until ignore-sync-until
|
||||
:created-at modified-at
|
||||
:deleted-at deleted-at}
|
||||
@@ -66,7 +69,7 @@
|
||||
{:modified-at (ct/now)}
|
||||
{:id project-id})
|
||||
|
||||
file)))
|
||||
(bfc/get-file cfg (:id file)))))
|
||||
|
||||
(def ^:private schema:create-file
|
||||
[:map {:title "create-file"}
|
||||
|
||||
@@ -8,52 +8,20 @@
|
||||
(:require
|
||||
[app.binfile.common :as bfc]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.files.migrations :as fmg]
|
||||
[app.common.logging :as l]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.time :as ct]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]
|
||||
[app.features.fdata :as feat.fdata]
|
||||
[app.features.file-migrations :refer [reset-migrations!]]
|
||||
[app.features.file-snapshots :as fsnap]
|
||||
[app.features.logical-deletion :as ldel]
|
||||
[app.main :as-alias main]
|
||||
[app.msgbus :as mbus]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.commands.teams :as teams]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.quotes :as quotes]
|
||||
[app.storage :as sto]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.services :as sv]
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
(defn decode-row
|
||||
[{:keys [migrations] :as row}]
|
||||
(when row
|
||||
(cond-> row
|
||||
(some? migrations)
|
||||
(assoc :migrations (db/decode-pgarray migrations)))))
|
||||
|
||||
(def sql:get-file-snapshots
|
||||
"WITH changes AS (
|
||||
SELECT id, label, revn, created_at, created_by, profile_id, locked_by
|
||||
FROM file_change
|
||||
WHERE file_id = ?
|
||||
AND data IS NOT NULL
|
||||
AND (deleted_at IS NULL OR deleted_at > now())
|
||||
), versions AS (
|
||||
(SELECT * FROM changes WHERE created_by = 'system' LIMIT 1000)
|
||||
UNION ALL
|
||||
(SELECT * FROM changes WHERE created_by != 'system' LIMIT 1000)
|
||||
)
|
||||
SELECT * FROM versions
|
||||
ORDER BY created_at DESC;")
|
||||
|
||||
(defn get-file-snapshots
|
||||
[conn file-id]
|
||||
(db/exec! conn [sql:get-file-snapshots file-id]))
|
||||
[app.util.services :as sv]))
|
||||
|
||||
(def ^:private schema:get-file-snapshots
|
||||
[:map {:title "get-file-snapshots"}
|
||||
@@ -65,73 +33,7 @@
|
||||
[cfg {:keys [::rpc/profile-id file-id] :as params}]
|
||||
(db/run! cfg (fn [{:keys [::db/conn]}]
|
||||
(files/check-read-permissions! conn profile-id file-id)
|
||||
(get-file-snapshots conn file-id))))
|
||||
|
||||
(defn- generate-snapshot-label
|
||||
[]
|
||||
(let [ts (-> (ct/now)
|
||||
(ct/format-inst)
|
||||
(str/replace #"[T:\.]" "-")
|
||||
(str/rtrim "Z"))]
|
||||
(str "snapshot-" ts)))
|
||||
|
||||
(defn create-file-snapshot!
|
||||
[cfg file & {:keys [label created-by deleted-at profile-id]
|
||||
:or {deleted-at :default
|
||||
created-by :system}}]
|
||||
|
||||
(assert (#{:system :user :admin} created-by)
|
||||
"expected valid keyword for created-by")
|
||||
|
||||
(let [created-by
|
||||
(name created-by)
|
||||
|
||||
deleted-at
|
||||
(cond
|
||||
(= deleted-at :default)
|
||||
(ct/plus (ct/now) (cf/get-deletion-delay))
|
||||
|
||||
(ct/inst? deleted-at)
|
||||
deleted-at
|
||||
|
||||
:else
|
||||
nil)
|
||||
|
||||
label
|
||||
(or label (generate-snapshot-label))
|
||||
|
||||
snapshot-id
|
||||
(uuid/next)
|
||||
|
||||
data
|
||||
(blob/encode (:data file))
|
||||
|
||||
features
|
||||
(into-array (:features file))
|
||||
|
||||
migrations
|
||||
(into-array (:migrations file))]
|
||||
|
||||
(l/dbg :hint "creating file snapshot"
|
||||
:file-id (str (:id file))
|
||||
:id (str snapshot-id)
|
||||
:label label)
|
||||
|
||||
(db/insert! cfg :file-change
|
||||
{:id snapshot-id
|
||||
:revn (:revn file)
|
||||
:data data
|
||||
:version (:version file)
|
||||
:features features
|
||||
:migrations migrations
|
||||
:profile-id profile-id
|
||||
:file-id (:id file)
|
||||
:label label
|
||||
:deleted-at deleted-at
|
||||
:created-by created-by}
|
||||
{::db/return-keys false})
|
||||
|
||||
{:id snapshot-id :label label}))
|
||||
(fsnap/get-visible-snapshots conn file-id))))
|
||||
|
||||
(def ^:private schema:create-file-snapshot
|
||||
[:map
|
||||
@@ -144,7 +46,7 @@
|
||||
::db/transaction true}
|
||||
[{:keys [::db/conn] :as cfg} {:keys [::rpc/profile-id file-id label]}]
|
||||
(files/check-edition-permissions! conn profile-id file-id)
|
||||
(let [file (bfc/get-file cfg file-id)
|
||||
(let [file (bfc/get-file cfg file-id :realize? true)
|
||||
project (db/get-by-id cfg :project (:project-id file))]
|
||||
|
||||
(-> cfg
|
||||
@@ -155,96 +57,10 @@
|
||||
(quotes/check! {::quotes/id ::quotes/snapshots-per-file}
|
||||
{::quotes/id ::quotes/snapshots-per-team}))
|
||||
|
||||
(create-file-snapshot! cfg file
|
||||
{:label label
|
||||
:profile-id profile-id
|
||||
:created-by :user})))
|
||||
|
||||
(defn restore-file-snapshot!
|
||||
[{:keys [::db/conn ::mbus/msgbus] :as cfg} file-id snapshot-id]
|
||||
(let [storage (sto/resolve cfg {::db/reuse-conn true})
|
||||
file (files/get-minimal-file conn file-id {::db/for-update true})
|
||||
vern (rand-int Integer/MAX_VALUE)
|
||||
snapshot (some->> (db/get* conn :file-change
|
||||
{:file-id file-id
|
||||
:id snapshot-id}
|
||||
{::db/for-share true})
|
||||
(feat.fdata/resolve-file-data cfg)
|
||||
(decode-row))
|
||||
|
||||
;; If snapshot has tracked applied migrations, we reuse them,
|
||||
;; if not we take a safest set of migrations as starting
|
||||
;; point. This is because, at the time of implementing
|
||||
;; snapshots, migrations were not taken into account so we
|
||||
;; need to make this backward compatible in some way.
|
||||
file (assoc file :migrations
|
||||
(or (:migrations snapshot)
|
||||
(fmg/generate-migrations-from-version 67)))]
|
||||
|
||||
(when-not snapshot
|
||||
(ex/raise :type :not-found
|
||||
:code :snapshot-not-found
|
||||
:hint "unable to find snapshot with the provided label"
|
||||
:snapshot-id snapshot-id
|
||||
:file-id file-id))
|
||||
|
||||
(when-not (:data snapshot)
|
||||
(ex/raise :type :validation
|
||||
:code :snapshot-without-data
|
||||
:hint "snapshot has no data"
|
||||
:label (:label snapshot)
|
||||
:file-id file-id))
|
||||
|
||||
(l/dbg :hint "restoring snapshot"
|
||||
:file-id (str file-id)
|
||||
:label (:label snapshot)
|
||||
:snapshot-id (str (:id snapshot)))
|
||||
|
||||
;; If the file was already offloaded, on restoring the snapshot we
|
||||
;; are going to replace the file data, so we need to touch the old
|
||||
;; referenced storage object and avoid possible leaks
|
||||
(when (feat.fdata/offloaded? file)
|
||||
(sto/touch-object! storage (:data-ref-id file)))
|
||||
|
||||
;; In the same way, on reseting the file data, we need to restore
|
||||
;; the applied migrations on the moment of taking the snapshot
|
||||
(reset-migrations! conn file)
|
||||
|
||||
(db/update! conn :file
|
||||
{:data (:data snapshot)
|
||||
:revn (inc (:revn file))
|
||||
:vern vern
|
||||
:version (:version snapshot)
|
||||
:data-backend nil
|
||||
:data-ref-id nil
|
||||
:has-media-trimmed false
|
||||
:features (:features snapshot)}
|
||||
{:id file-id})
|
||||
|
||||
;; clean object thumbnails
|
||||
(let [sql (str "update file_tagged_object_thumbnail "
|
||||
" set deleted_at = now() "
|
||||
" where file_id=? returning media_id")
|
||||
res (db/exec! conn [sql file-id])]
|
||||
(doseq [media-id (into #{} (keep :media-id) res)]
|
||||
(sto/touch-object! storage media-id)))
|
||||
|
||||
;; clean file thumbnails
|
||||
(let [sql (str "update file_thumbnail "
|
||||
" set deleted_at = now() "
|
||||
" where file_id=? returning media_id")
|
||||
res (db/exec! conn [sql file-id])]
|
||||
(doseq [media-id (into #{} (keep :media-id) res)]
|
||||
(sto/touch-object! storage media-id)))
|
||||
|
||||
;; Send to the clients a notification to reload the file
|
||||
(mbus/pub! msgbus
|
||||
:topic (:id file)
|
||||
:message {:type :file-restore
|
||||
:file-id (:id file)
|
||||
:vern vern})
|
||||
{:id (:id snapshot)
|
||||
:label (:label snapshot)}))
|
||||
(fsnap/create! cfg file
|
||||
{:label label
|
||||
:profile-id profile-id
|
||||
:created-by "user"})))
|
||||
|
||||
(def ^:private schema:restore-file-snapshot
|
||||
[:map {:title "restore-file-snapshot"}
|
||||
@@ -253,88 +69,76 @@
|
||||
|
||||
(sv/defmethod ::restore-file-snapshot
|
||||
{::doc/added "1.20"
|
||||
::sm/params schema:restore-file-snapshot}
|
||||
[cfg {:keys [::rpc/profile-id file-id id] :as params}]
|
||||
(db/tx-run! cfg
|
||||
(fn [{:keys [::db/conn] :as cfg}]
|
||||
(files/check-edition-permissions! conn profile-id file-id)
|
||||
(let [file (bfc/get-file cfg file-id)]
|
||||
(create-file-snapshot! cfg file
|
||||
{:profile-id profile-id
|
||||
:created-by :system})
|
||||
(restore-file-snapshot! cfg file-id id)))))
|
||||
::sm/params schema:restore-file-snapshot
|
||||
::db/transaction true}
|
||||
[{:keys [::db/conn ::mbus/msgbus] :as cfg} {:keys [::rpc/profile-id file-id id] :as params}]
|
||||
(files/check-edition-permissions! conn profile-id file-id)
|
||||
(let [file (bfc/get-file cfg file-id)
|
||||
team (teams/get-team conn
|
||||
:profile-id profile-id
|
||||
:file-id file-id)
|
||||
delay (ldel/get-deletion-delay team)]
|
||||
|
||||
(fsnap/create! cfg file
|
||||
{:profile-id profile-id
|
||||
:deleted-at (ct/in-future delay)
|
||||
:created-by "system"})
|
||||
|
||||
(let [vern (fsnap/restore! cfg file-id id)]
|
||||
;; Send to the clients a notification to reload the file
|
||||
(mbus/pub! msgbus
|
||||
:topic (:id file)
|
||||
:message {:type :file-restore
|
||||
:file-id (:id file)
|
||||
:vern vern})
|
||||
nil)))
|
||||
|
||||
(def ^:private schema:update-file-snapshot
|
||||
[:map {:title "update-file-snapshot"}
|
||||
[:id ::sm/uuid]
|
||||
[:label ::sm/text]])
|
||||
|
||||
(defn- update-file-snapshot!
|
||||
[conn snapshot-id label]
|
||||
(-> (db/update! conn :file-change
|
||||
{:label label
|
||||
:created-by "user"
|
||||
:deleted-at nil}
|
||||
{:id snapshot-id}
|
||||
{::db/return-keys true})
|
||||
(dissoc :data :features :migrations)))
|
||||
|
||||
(defn- get-snapshot
|
||||
"Get a minimal snapshot from database and lock for update"
|
||||
[conn id]
|
||||
(db/get conn :file-change
|
||||
{:id id}
|
||||
{::sql/columns [:id :file-id :created-by :deleted-at :profile-id :locked-by]
|
||||
::db/for-update true}))
|
||||
|
||||
(sv/defmethod ::update-file-snapshot
|
||||
{::doc/added "1.20"
|
||||
::sm/params schema:update-file-snapshot}
|
||||
[cfg {:keys [::rpc/profile-id id label]}]
|
||||
(db/tx-run! cfg
|
||||
(fn [{:keys [::db/conn]}]
|
||||
(let [snapshot (get-snapshot conn id)]
|
||||
(files/check-edition-permissions! conn profile-id (:file-id snapshot))
|
||||
(update-file-snapshot! conn id label)))))
|
||||
::sm/params schema:update-file-snapshot
|
||||
::db/transaction true}
|
||||
[{:keys [::db/conn]} {:keys [::rpc/profile-id id label]}]
|
||||
(let [snapshot (fsnap/get-minimal-snapshot conn id)]
|
||||
(files/check-edition-permissions! conn profile-id (:file-id snapshot))
|
||||
(fsnap/update! conn (assoc snapshot :label label))))
|
||||
|
||||
(def ^:private schema:remove-file-snapshot
|
||||
[:map {:title "remove-file-snapshot"}
|
||||
[:id ::sm/uuid]])
|
||||
|
||||
(defn- delete-file-snapshot!
|
||||
[conn snapshot-id]
|
||||
(db/update! conn :file-change
|
||||
{:deleted-at (ct/now)}
|
||||
{:id snapshot-id}
|
||||
{::db/return-keys false})
|
||||
nil)
|
||||
|
||||
(sv/defmethod ::delete-file-snapshot
|
||||
{::doc/added "1.20"
|
||||
::sm/params schema:remove-file-snapshot}
|
||||
[cfg {:keys [::rpc/profile-id id]}]
|
||||
(db/tx-run! cfg
|
||||
(fn [{:keys [::db/conn]}]
|
||||
(let [snapshot (get-snapshot conn id)]
|
||||
(files/check-edition-permissions! conn profile-id (:file-id snapshot))
|
||||
::sm/params schema:remove-file-snapshot
|
||||
::db/transaction true}
|
||||
[{:keys [::db/conn]} {:keys [::rpc/profile-id id]}]
|
||||
(let [snapshot (fsnap/get-minimal-snapshot conn id)]
|
||||
(files/check-edition-permissions! conn profile-id (:file-id snapshot))
|
||||
|
||||
(when (not= (:created-by snapshot) "user")
|
||||
(ex/raise :type :validation
|
||||
:code :system-snapshots-cant-be-deleted
|
||||
:snapshot-id id
|
||||
:profile-id profile-id))
|
||||
(when (not= (:created-by snapshot) "user")
|
||||
(ex/raise :type :validation
|
||||
:code :system-snapshots-cant-be-deleted
|
||||
:file-id (:file-id snapshot)
|
||||
:snapshot-id id
|
||||
:profile-id profile-id))
|
||||
|
||||
;; Check if version is locked by someone else
|
||||
(when (and (:locked-by snapshot)
|
||||
(not= (:locked-by snapshot) profile-id))
|
||||
(ex/raise :type :validation
|
||||
:code :snapshot-is-locked
|
||||
:hint "Cannot delete a locked version"
|
||||
:snapshot-id id
|
||||
:profile-id profile-id
|
||||
:locked-by (:locked-by snapshot)))
|
||||
(when (and (some? (:locked-by snapshot))
|
||||
(not= (:locked-by snapshot) profile-id))
|
||||
(ex/raise :type :validation
|
||||
:code :snapshot-is-locked
|
||||
:file-id (:file-id snapshot)
|
||||
:snapshot-id id
|
||||
:profile-id profile-id))
|
||||
|
||||
(delete-file-snapshot! conn id)))))
|
||||
(let [team (teams/get-team conn
|
||||
:profile-id profile-id
|
||||
:file-id (:file-id snapshot))
|
||||
delay (ldel/get-deletion-delay team)]
|
||||
(fsnap/delete! conn (assoc snapshot :deleted-at (ct/in-future delay))))))
|
||||
|
||||
;;; Lock/unlock version endpoints
|
||||
|
||||
@@ -342,93 +146,75 @@
|
||||
[:map {:title "lock-file-snapshot"}
|
||||
[:id ::sm/uuid]])
|
||||
|
||||
(defn- lock-file-snapshot!
|
||||
[conn snapshot-id profile-id]
|
||||
(db/update! conn :file-change
|
||||
{:locked-by profile-id}
|
||||
{:id snapshot-id}
|
||||
{::db/return-keys false})
|
||||
nil)
|
||||
|
||||
(sv/defmethod ::lock-file-snapshot
|
||||
{::doc/added "1.20"
|
||||
::sm/params schema:lock-file-snapshot}
|
||||
[cfg {:keys [::rpc/profile-id id]}]
|
||||
(db/tx-run! cfg
|
||||
(fn [{:keys [::db/conn]}]
|
||||
(let [snapshot (get-snapshot conn id)]
|
||||
(files/check-edition-permissions! conn profile-id (:file-id snapshot))
|
||||
::sm/params schema:lock-file-snapshot
|
||||
::db/transaction true}
|
||||
[{:keys [::db/conn]} {:keys [::rpc/profile-id id]}]
|
||||
(let [snapshot (fsnap/get-minimal-snapshot conn id)]
|
||||
(files/check-edition-permissions! conn profile-id (:file-id snapshot))
|
||||
|
||||
(when (not= (:created-by snapshot) "user")
|
||||
(ex/raise :type :validation
|
||||
:code :system-snapshots-cant-be-locked
|
||||
:hint "Only user-created versions can be locked"
|
||||
:snapshot-id id
|
||||
:profile-id profile-id))
|
||||
(when (not= (:created-by snapshot) "user")
|
||||
(ex/raise :type :validation
|
||||
:code :system-snapshots-cant-be-locked
|
||||
:hint "Only user-created versions can be locked"
|
||||
:snapshot-id id
|
||||
:profile-id profile-id))
|
||||
|
||||
;; Only the creator can lock their own version
|
||||
(when (not= (:profile-id snapshot) profile-id)
|
||||
(ex/raise :type :validation
|
||||
:code :only-creator-can-lock
|
||||
:hint "Only the version creator can lock it"
|
||||
:snapshot-id id
|
||||
:profile-id profile-id
|
||||
:creator-id (:profile-id snapshot)))
|
||||
;; Only the creator can lock their own version
|
||||
(when (not= (:profile-id snapshot) profile-id)
|
||||
(ex/raise :type :validation
|
||||
:code :only-creator-can-lock
|
||||
:hint "Only the version creator can lock it"
|
||||
:snapshot-id id
|
||||
:profile-id profile-id
|
||||
:creator-id (:profile-id snapshot)))
|
||||
|
||||
;; Check if already locked
|
||||
(when (:locked-by snapshot)
|
||||
(ex/raise :type :validation
|
||||
:code :snapshot-already-locked
|
||||
:hint "Version is already locked"
|
||||
:snapshot-id id
|
||||
:profile-id profile-id
|
||||
:locked-by (:locked-by snapshot)))
|
||||
;; Check if already locked
|
||||
(when (:locked-by snapshot)
|
||||
(ex/raise :type :validation
|
||||
:code :snapshot-already-locked
|
||||
:hint "Version is already locked"
|
||||
:snapshot-id id
|
||||
:profile-id profile-id
|
||||
:locked-by (:locked-by snapshot)))
|
||||
|
||||
(lock-file-snapshot! conn id profile-id)))))
|
||||
(fsnap/lock-by! conn id profile-id)))
|
||||
|
||||
(def ^:private schema:unlock-file-snapshot
|
||||
[:map {:title "unlock-file-snapshot"}
|
||||
[:id ::sm/uuid]])
|
||||
|
||||
(defn- unlock-file-snapshot!
|
||||
[conn snapshot-id]
|
||||
(db/update! conn :file-change
|
||||
{:locked-by nil}
|
||||
{:id snapshot-id}
|
||||
{::db/return-keys false})
|
||||
nil)
|
||||
|
||||
(sv/defmethod ::unlock-file-snapshot
|
||||
{::doc/added "1.20"
|
||||
::sm/params schema:unlock-file-snapshot}
|
||||
[cfg {:keys [::rpc/profile-id id]}]
|
||||
(db/tx-run! cfg
|
||||
(fn [{:keys [::db/conn]}]
|
||||
(let [snapshot (get-snapshot conn id)]
|
||||
(files/check-edition-permissions! conn profile-id (:file-id snapshot))
|
||||
::sm/params schema:unlock-file-snapshot
|
||||
::db/transaction true}
|
||||
[{:keys [::db/conn]} {:keys [::rpc/profile-id id]}]
|
||||
(let [snapshot (fsnap/get-minimal-snapshot conn id)]
|
||||
(files/check-edition-permissions! conn profile-id (:file-id snapshot))
|
||||
|
||||
(when (not= (:created-by snapshot) "user")
|
||||
(ex/raise :type :validation
|
||||
:code :system-snapshots-cant-be-unlocked
|
||||
:hint "Only user-created versions can be unlocked"
|
||||
:snapshot-id id
|
||||
:profile-id profile-id))
|
||||
(when (not= (:created-by snapshot) "user")
|
||||
(ex/raise :type :validation
|
||||
:code :system-snapshots-cant-be-unlocked
|
||||
:hint "Only user-created versions can be unlocked"
|
||||
:snapshot-id id
|
||||
:profile-id profile-id))
|
||||
|
||||
;; Only the creator can unlock their own version
|
||||
(when (not= (:profile-id snapshot) profile-id)
|
||||
(ex/raise :type :validation
|
||||
:code :only-creator-can-unlock
|
||||
:hint "Only the version creator can unlock it"
|
||||
:snapshot-id id
|
||||
:profile-id profile-id
|
||||
:creator-id (:profile-id snapshot)))
|
||||
;; Only the creator can unlock their own version
|
||||
(when (not= (:profile-id snapshot) profile-id)
|
||||
(ex/raise :type :validation
|
||||
:code :only-creator-can-unlock
|
||||
:hint "Only the version creator can unlock it"
|
||||
:snapshot-id id
|
||||
:profile-id profile-id
|
||||
:creator-id (:profile-id snapshot)))
|
||||
|
||||
;; Check if not locked
|
||||
(when (not (:locked-by snapshot))
|
||||
(ex/raise :type :validation
|
||||
:code :snapshot-not-locked
|
||||
:hint "Version is not locked"
|
||||
:snapshot-id id
|
||||
:profile-id profile-id))
|
||||
;; Check if not locked
|
||||
(when (not (:locked-by snapshot))
|
||||
(ex/raise :type :validation
|
||||
:code :snapshot-not-locked
|
||||
:hint "Version is not locked"
|
||||
:snapshot-id id
|
||||
:profile-id profile-id))
|
||||
|
||||
(unlock-file-snapshot! conn id)))))
|
||||
(fsnap/unlock! conn id)))
|
||||
|
||||
@@ -1,160 +0,0 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.rpc.commands.files-temp
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.files.changes :as cpc]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.time :as ct]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as sql]
|
||||
[app.features.fdata :as fdata]
|
||||
[app.loggers.audit :as audit]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.commands.files-create :as files.create]
|
||||
[app.rpc.commands.files-update :as-alias files.update]
|
||||
[app.rpc.commands.projects :as projects]
|
||||
[app.rpc.commands.teams :as teams]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.services :as sv]
|
||||
[clojure.set :as set]))
|
||||
|
||||
;; --- MUTATION COMMAND: create-temp-file
|
||||
|
||||
(def ^:private schema:create-temp-file
|
||||
[:map {:title "create-temp-file"}
|
||||
[:name [:string {:max 250}]]
|
||||
[:project-id ::sm/uuid]
|
||||
[:id {:optional true} ::sm/uuid]
|
||||
[:is-shared ::sm/boolean]
|
||||
[:features ::cfeat/features]
|
||||
[:create-page ::sm/boolean]])
|
||||
|
||||
(sv/defmethod ::create-temp-file
|
||||
{::doc/added "1.17"
|
||||
::doc/module :files
|
||||
::sm/params schema:create-temp-file
|
||||
::db/transaction true}
|
||||
[{:keys [::db/conn] :as cfg} {:keys [::rpc/profile-id project-id] :as params}]
|
||||
(projects/check-edition-permissions! conn profile-id project-id)
|
||||
(let [team (teams/get-team conn :profile-id profile-id :project-id project-id)
|
||||
;; When we create files, we only need to respect the team
|
||||
;; features, because some features can be enabled
|
||||
;; globally, but the team is still not migrated properly.
|
||||
input-features
|
||||
(:features params #{})
|
||||
|
||||
;; If the imported project doesn't contain v2 we need to remove it
|
||||
team-features
|
||||
(cond-> (cfeat/get-team-enabled-features cf/flags team)
|
||||
(not (contains? input-features "components/v2"))
|
||||
(disj "components/v2"))
|
||||
|
||||
;; We also include all no migration features declared by
|
||||
;; client; that enables the ability to enable a runtime
|
||||
;; feature on frontend and make it permanent on file
|
||||
features
|
||||
(-> input-features
|
||||
(set/intersection cfeat/no-migration-features)
|
||||
(set/union team-features))
|
||||
|
||||
params
|
||||
(-> params
|
||||
(assoc :profile-id profile-id)
|
||||
(assoc :deleted-at (ct/in-future {:days 1}))
|
||||
(assoc :features features))]
|
||||
|
||||
(files.create/create-file cfg params)))
|
||||
|
||||
;; --- MUTATION COMMAND: update-temp-file
|
||||
|
||||
(def ^:private schema:update-temp-file
|
||||
[:map {:title "update-temp-file"}
|
||||
[:changes [:vector cpc/schema:change]]
|
||||
[:revn [::sm/int {:min 0}]]
|
||||
[:session-id ::sm/uuid]
|
||||
[:id ::sm/uuid]])
|
||||
|
||||
(sv/defmethod ::update-temp-file
|
||||
{::doc/added "1.17"
|
||||
::doc/module :files
|
||||
::sm/params schema:update-temp-file}
|
||||
[cfg {:keys [::rpc/profile-id session-id id revn changes] :as params}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn]}]
|
||||
(db/insert! conn :file-change
|
||||
{:id (uuid/next)
|
||||
:session-id session-id
|
||||
:profile-id profile-id
|
||||
:created-at (ct/now)
|
||||
:file-id id
|
||||
:revn revn
|
||||
:data nil
|
||||
:changes (blob/encode changes)})
|
||||
(rph/with-meta (rph/wrap nil)
|
||||
{::audit/replace-props {:file-id id
|
||||
:revn revn}}))))
|
||||
|
||||
;; --- MUTATION COMMAND: persist-temp-file
|
||||
|
||||
(defn persist-temp-file
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id] :as params}]
|
||||
(let [file (files/get-file cfg id
|
||||
:migrate? false
|
||||
:lock-for-update? true)]
|
||||
|
||||
(when (nil? (:deleted-at file))
|
||||
(ex/raise :type :validation
|
||||
:code :cant-persist-already-persisted-file))
|
||||
|
||||
(let [changes (->> (db/cursor conn
|
||||
(sql/select :file-change {:file-id id}
|
||||
{:order-by [[:revn :asc]]})
|
||||
{:chunk-size 10})
|
||||
(sequence (mapcat (comp blob/decode :changes))))
|
||||
|
||||
file (update file :data cpc/process-changes changes)
|
||||
|
||||
file (if (contains? (:features file) "fdata/objects-map")
|
||||
(fdata/enable-objects-map file)
|
||||
file)
|
||||
|
||||
file (if (contains? (:features file) "fdata/pointer-map")
|
||||
(binding [pmap/*tracked* (pmap/create-tracked)]
|
||||
(let [file (fdata/enable-pointer-map file)]
|
||||
(fdata/persist-pointers! cfg id)
|
||||
file))
|
||||
file)]
|
||||
|
||||
;; Delete changes from the changes history
|
||||
(db/delete! conn :file-change {:file-id id})
|
||||
|
||||
(db/update! conn :file
|
||||
{:deleted-at nil
|
||||
:revn 1
|
||||
:data (blob/encode (:data file))}
|
||||
{:id id})
|
||||
nil)))
|
||||
|
||||
(def ^:private schema:persist-temp-file
|
||||
[:map {:title "persist-temp-file"}
|
||||
[:id ::sm/uuid]])
|
||||
|
||||
(sv/defmethod ::persist-temp-file
|
||||
{::doc/added "1.17"
|
||||
::doc/module :files
|
||||
::sm/params schema:persist-temp-file}
|
||||
[cfg {:keys [::rpc/profile-id id] :as params}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(files/check-edition-permissions! conn profile-id id)
|
||||
(persist-temp-file cfg params))))
|
||||
@@ -6,6 +6,7 @@
|
||||
|
||||
(ns app.rpc.commands.files-thumbnails
|
||||
(:require
|
||||
[app.binfile.common :as bfc]
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.features :as cfeat]
|
||||
@@ -202,9 +203,9 @@
|
||||
:profile-id profile-id
|
||||
:file-id file-id)
|
||||
|
||||
file (files/get-file cfg file-id
|
||||
:preload-pointers? true
|
||||
:read-only? true)]
|
||||
file (bfc/get-file cfg file-id
|
||||
:realize? true
|
||||
:read-only? true)]
|
||||
|
||||
(-> (cfeat/get-team-enabled-features cf/flags team)
|
||||
(cfeat/check-file-features! (:features file)))
|
||||
@@ -339,6 +340,7 @@
|
||||
data (-> (sto/content path)
|
||||
(sto/wrap-with-hash hash))
|
||||
tnow (ct/now)
|
||||
|
||||
media (sto/put-object! storage
|
||||
{::sto/content data
|
||||
::sto/deduplicate? true
|
||||
|
||||
@@ -19,8 +19,8 @@
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.features.fdata :as feat.fdata]
|
||||
[app.features.file-migrations :as feat.fmigr]
|
||||
[app.features.fdata :as fdata]
|
||||
[app.features.file-snapshots :as fsnap]
|
||||
[app.features.logical-deletion :as ldel]
|
||||
[app.http.errors :as errors]
|
||||
[app.loggers.audit :as audit]
|
||||
@@ -33,7 +33,6 @@
|
||||
[app.rpc.commands.teams :as teams]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.storage :as sto]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.services :as sv]
|
||||
@@ -127,76 +126,78 @@
|
||||
::sm/params schema:update-file
|
||||
::sm/result schema:update-file-result
|
||||
::doc/module :files
|
||||
::doc/added "1.17"}
|
||||
[{:keys [::mtx/metrics] :as cfg}
|
||||
::doc/added "1.17"
|
||||
::db/transaction true}
|
||||
[{:keys [::mtx/metrics ::db/conn] :as cfg}
|
||||
{:keys [::rpc/profile-id id changes changes-with-metadata] :as params}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(files/check-edition-permissions! conn profile-id id)
|
||||
(db/xact-lock! conn id)
|
||||
|
||||
(let [file (get-file conn id)
|
||||
team (teams/get-team conn
|
||||
:profile-id profile-id
|
||||
:team-id (:team-id file))
|
||||
(files/check-edition-permissions! conn profile-id id)
|
||||
(db/xact-lock! conn id)
|
||||
|
||||
features (-> (cfeat/get-team-enabled-features cf/flags team)
|
||||
(cfeat/check-client-features! (:features params))
|
||||
(cfeat/check-file-features! (:features file)))
|
||||
(let [file (get-file cfg id)
|
||||
team (teams/get-team conn
|
||||
:profile-id profile-id
|
||||
:team-id (:team-id file))
|
||||
|
||||
changes (if changes-with-metadata
|
||||
(->> changes-with-metadata (mapcat :changes) vec)
|
||||
(vec changes))
|
||||
features (-> (cfeat/get-team-enabled-features cf/flags team)
|
||||
(cfeat/check-client-features! (:features params))
|
||||
(cfeat/check-file-features! (:features file)))
|
||||
|
||||
params (-> params
|
||||
(assoc :profile-id profile-id)
|
||||
(assoc :features (set/difference features cfeat/frontend-only-features))
|
||||
(assoc :team team)
|
||||
(assoc :file file)
|
||||
(assoc :changes changes))
|
||||
changes (if changes-with-metadata
|
||||
(->> changes-with-metadata (mapcat :changes) vec)
|
||||
(vec changes))
|
||||
|
||||
cfg (assoc cfg ::timestamp (ct/now))
|
||||
params (-> params
|
||||
(assoc :profile-id profile-id)
|
||||
(assoc :features (set/difference features cfeat/frontend-only-features))
|
||||
(assoc :team team)
|
||||
(assoc :file file)
|
||||
(assoc :changes changes))
|
||||
|
||||
tpoint (ct/tpoint)]
|
||||
cfg (assoc cfg ::timestamp (ct/now))
|
||||
|
||||
(when (not= (:vern params)
|
||||
(:vern file))
|
||||
(ex/raise :type :validation
|
||||
:code :vern-conflict
|
||||
:hint "A different version has been restored for the file."
|
||||
:context {:incoming-revn (:revn params)
|
||||
:stored-revn (:revn file)}))
|
||||
tpoint (ct/tpoint)]
|
||||
|
||||
(when (> (:revn params)
|
||||
(:revn file))
|
||||
(ex/raise :type :validation
|
||||
:code :revn-conflict
|
||||
:hint "The incoming revision number is greater that stored version."
|
||||
:context {:incoming-revn (:revn params)
|
||||
:stored-revn (:revn file)}))
|
||||
(when (not= (:vern params)
|
||||
(:vern file))
|
||||
(ex/raise :type :validation
|
||||
:code :vern-conflict
|
||||
:hint "A different version has been restored for the file."
|
||||
:context {:incoming-revn (:revn params)
|
||||
:stored-revn (:revn file)}))
|
||||
|
||||
;; When newly computed features does not match exactly with
|
||||
;; the features defined on team row, we update it
|
||||
(when-let [features (-> features
|
||||
(set/difference (:features team))
|
||||
(set/difference cfeat/no-team-inheritable-features)
|
||||
(not-empty))]
|
||||
(let [features (-> features
|
||||
(set/union (:features team))
|
||||
(set/difference cfeat/no-team-inheritable-features)
|
||||
(into-array))]
|
||||
(db/update! conn :team
|
||||
{:features features}
|
||||
{:id (:id team)}
|
||||
{::db/return-keys false})))
|
||||
(when (> (:revn params)
|
||||
(:revn file))
|
||||
(ex/raise :type :validation
|
||||
:code :revn-conflict
|
||||
:hint "The incoming revision number is greater that stored version."
|
||||
:context {:incoming-revn (:revn params)
|
||||
:stored-revn (:revn file)}))
|
||||
|
||||
(mtx/run! metrics {:id :update-file-changes :inc (count changes)})
|
||||
;; When newly computed features does not match exactly with the
|
||||
;; features defined on team row, we update it
|
||||
(when-let [features (-> features
|
||||
(set/difference (:features team))
|
||||
(set/difference cfeat/no-team-inheritable-features)
|
||||
(not-empty))]
|
||||
(let [features (-> features
|
||||
(set/union (:features team))
|
||||
(set/difference cfeat/no-team-inheritable-features)
|
||||
(into-array))]
|
||||
(db/update! conn :team
|
||||
{:features features}
|
||||
{:id (:id team)}
|
||||
{::db/return-keys false})))
|
||||
|
||||
(binding [l/*context* (some-> (meta params)
|
||||
(get :app.http/request)
|
||||
(errors/request->context))]
|
||||
(-> (update-file* cfg params)
|
||||
(rph/with-defer #(let [elapsed (tpoint)]
|
||||
(l/trace :hint "update-file" :time (ct/format-duration elapsed))))))))))
|
||||
|
||||
(mtx/run! metrics {:id :update-file-changes :inc (count changes)})
|
||||
|
||||
(binding [l/*context* (some-> (meta params)
|
||||
(get :app.http/request)
|
||||
(errors/request->context))]
|
||||
(-> (update-file* cfg params)
|
||||
(rph/with-defer #(let [elapsed (tpoint)]
|
||||
(l/trace :hint "update-file" :time (ct/format-duration elapsed))))))))
|
||||
|
||||
(defn- update-file*
|
||||
"Internal function, part of the update-file process, that encapsulates
|
||||
@@ -209,24 +210,38 @@
|
||||
[{:keys [::db/conn ::timestamp] :as cfg}
|
||||
{:keys [profile-id file team features changes session-id skip-validate] :as params}]
|
||||
|
||||
(let [;; Retrieve the file data
|
||||
file (feat.fmigr/resolve-applied-migrations cfg file)
|
||||
file (feat.fdata/resolve-file-data cfg file)
|
||||
file (assoc file :features
|
||||
(-> features
|
||||
(set/difference cfeat/frontend-only-features)
|
||||
(set/union (:features file))))]
|
||||
(binding [pmap/*tracked* (pmap/create-tracked)
|
||||
pmap/*load-fn* (partial fdata/load-pointer cfg (:id file))]
|
||||
|
||||
;; We create a new lexycal scope for clearly delimit the result of
|
||||
;; executing this update file operation and all its side effects
|
||||
(let [file (binding [cfeat/*current* features
|
||||
cfeat/*previous* (:features file)]
|
||||
(update-file-data! cfg file
|
||||
process-changes-and-validate
|
||||
changes skip-validate))]
|
||||
(let [file (assoc file :features
|
||||
(-> features
|
||||
(set/difference cfeat/frontend-only-features)
|
||||
(set/union (:features file))))
|
||||
|
||||
(feat.fmigr/upsert-migrations! conn file)
|
||||
(persist-file! cfg file)
|
||||
;; We need to preserve the original revn for the response
|
||||
revn
|
||||
(get file :revn)
|
||||
|
||||
file
|
||||
(binding [cfeat/*current* features
|
||||
cfeat/*previous* (:features file)]
|
||||
(update-file-data! cfg file
|
||||
process-changes-and-validate
|
||||
changes skip-validate))
|
||||
|
||||
deleted-at
|
||||
(ct/plus timestamp (ct/duration {:hours 1}))]
|
||||
|
||||
(when-let [file (::snapshot file)]
|
||||
(let [deleted-at (ct/plus timestamp (ldel/get-deletion-delay team))
|
||||
label (str "internal/snapshot/" revn)]
|
||||
|
||||
(fsnap/create! cfg file
|
||||
{:label label
|
||||
:created-by "system"
|
||||
:deleted-at deleted-at
|
||||
:profile-id profile-id
|
||||
:session-id session-id})))
|
||||
|
||||
;; Insert change (xlog) with deleted_at in a future data for
|
||||
;; make them automatically eleggible for GC once they expires
|
||||
@@ -236,87 +251,62 @@
|
||||
:profile-id profile-id
|
||||
:created-at timestamp
|
||||
:updated-at timestamp
|
||||
:deleted-at (if (::snapshot-data file)
|
||||
(ct/plus timestamp (ldel/get-deletion-delay team))
|
||||
(ct/plus timestamp (ct/duration {:hours 1})))
|
||||
:deleted-at deleted-at
|
||||
:file-id (:id file)
|
||||
:revn (:revn file)
|
||||
:version (:version file)
|
||||
:features (:features file)
|
||||
:label (::snapshot-label file)
|
||||
:data (::snapshot-data file)
|
||||
:features (into-array (:features file))
|
||||
:changes (blob/encode changes)}
|
||||
{::db/return-keys false})
|
||||
|
||||
(persist-file! cfg file)
|
||||
|
||||
;; Send asynchronous notifications
|
||||
(send-notifications! cfg params file))
|
||||
(send-notifications! cfg params file)
|
||||
|
||||
(when (feat.fdata/offloaded? file)
|
||||
(let [storage (sto/resolve cfg ::db/reuse-conn true)]
|
||||
(some->> (:data-ref-id file) (sto/touch-object! storage))))
|
||||
|
||||
(let [response {:revn (:revn file)
|
||||
:lagged (get-lagged-changes conn params)}]
|
||||
(vary-meta response assoc ::audit/replace-props
|
||||
{:id (:id file)
|
||||
:name (:name file)
|
||||
:features (:features file)
|
||||
:project-id (:project-id file)
|
||||
:team-id (:team-id file)}))))
|
||||
|
||||
(defn update-file!
|
||||
"A public api that allows apply a transformation to a file with all context setup."
|
||||
[{:keys [::db/conn] :as cfg} file-id update-fn & args]
|
||||
(let [file (get-file cfg file-id)
|
||||
file (apply update-file-data! cfg file update-fn args)]
|
||||
(feat.fmigr/upsert-migrations! conn file)
|
||||
(persist-file! cfg file)))
|
||||
|
||||
(def ^:private sql:get-file
|
||||
"SELECT f.*, p.team_id
|
||||
FROM file AS f
|
||||
JOIN project AS p ON (p.id = f.project_id)
|
||||
WHERE f.id = ?
|
||||
AND (f.deleted_at IS NULL OR
|
||||
f.deleted_at > now())
|
||||
FOR KEY SHARE")
|
||||
(with-meta {:revn revn :lagged (get-lagged-changes conn params)}
|
||||
{::audit/replace-props
|
||||
{:id (:id file)
|
||||
:name (:name file)
|
||||
:features (:features file)
|
||||
:project-id (:project-id file)
|
||||
:team-id (:team-id file)}}))))
|
||||
|
||||
(defn get-file
|
||||
"Get not-decoded file, only decodes the features set."
|
||||
[conn id]
|
||||
(let [file (db/exec-one! conn [sql:get-file id])]
|
||||
(when-not file
|
||||
(ex/raise :type :not-found
|
||||
:code :object-not-found
|
||||
:hint (format "file with id '%s' does not exists" id)))
|
||||
(update file :features db/decode-pgarray #{})))
|
||||
[cfg id]
|
||||
(bfc/get-file cfg id :decode? false :lock-for-share? true))
|
||||
|
||||
(defn persist-file!
|
||||
"Function responsible of persisting already encoded file. Should be
|
||||
used together with `get-file` and `update-file-data!`.
|
||||
|
||||
It also updates the project modified-at attr."
|
||||
[{:keys [::db/conn ::timestamp]} file]
|
||||
[{:keys [::db/conn ::timestamp] :as cfg} file]
|
||||
(let [;; The timestamp can be nil because this function is also
|
||||
;; intended to be used outside of this module
|
||||
modified-at (or timestamp (ct/now))]
|
||||
modified-at
|
||||
(or timestamp (ct/now))
|
||||
|
||||
file
|
||||
(-> file
|
||||
(dissoc ::snapshot)
|
||||
(assoc :modified-at modified-at)
|
||||
(assoc :has-media-trimmed false))]
|
||||
|
||||
(db/update! conn :project
|
||||
{:modified-at modified-at}
|
||||
{:id (:project-id file)}
|
||||
{::db/return-keys false})
|
||||
|
||||
(db/update! conn :file
|
||||
{:revn (:revn file)
|
||||
:data (:data file)
|
||||
:version (:version file)
|
||||
:features (:features file)
|
||||
:data-backend nil
|
||||
:data-ref-id nil
|
||||
:modified-at modified-at
|
||||
:has-media-trimmed false}
|
||||
{:id (:id file)}
|
||||
{::db/return-keys false})))
|
||||
(bfc/update-file! cfg file)))
|
||||
|
||||
(defn- attach-snapshot
|
||||
"Attach snapshot data to the file. This should be called before the
|
||||
upcoming file operations are applied to the file."
|
||||
[cfg migrated? file]
|
||||
(let [snapshot (if migrated? file (fdata/realize cfg file))]
|
||||
(assoc file ::snapshot snapshot)))
|
||||
|
||||
(defn- update-file-data!
|
||||
"Perform a file data transformation in with all update context setup.
|
||||
@@ -328,52 +318,35 @@
|
||||
fdata/pointer-map modified fragments."
|
||||
|
||||
[cfg {:keys [id] :as file} update-fn & args]
|
||||
(binding [pmap/*tracked* (pmap/create-tracked)
|
||||
pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
|
||||
(let [file (update file :data (fn [data]
|
||||
(-> data
|
||||
(blob/decode)
|
||||
(assoc :id (:id file)))))
|
||||
libs (delay (bfc/get-resolved-file-libraries cfg file))
|
||||
(let [file (update file :data (fn [data]
|
||||
(-> data
|
||||
(blob/decode)
|
||||
(assoc :id id))))
|
||||
libs (delay (bfc/get-resolved-file-libraries cfg file))
|
||||
|
||||
;; For avoid unnecesary overhead of creating multiple pointers
|
||||
;; and handly internally with objects map in their worst
|
||||
;; case (when probably all shapes and all pointers will be
|
||||
;; readed in any case), we just realize/resolve them before
|
||||
;; applying the migration to the file
|
||||
file (if (fmg/need-migration? file)
|
||||
(-> file
|
||||
(update :data feat.fdata/process-pointers deref)
|
||||
(update :data feat.fdata/process-objects (partial into {}))
|
||||
(fmg/migrate-file libs))
|
||||
file)
|
||||
need-migration?
|
||||
(fmg/need-migration? file)
|
||||
|
||||
file (apply update-fn cfg file args)
|
||||
take-snapshot?
|
||||
(take-snapshot? file)
|
||||
|
||||
;; TODO: reuse operations if file is migrated
|
||||
;; TODO: move encoding to a separated thread
|
||||
file (if (take-snapshot? file)
|
||||
(let [tpoint (ct/tpoint)
|
||||
snapshot (-> (:data file)
|
||||
(feat.fdata/process-pointers deref)
|
||||
(feat.fdata/process-objects (partial into {}))
|
||||
(blob/encode))
|
||||
elapsed (tpoint)
|
||||
label (str "internal/snapshot/" (:revn file))]
|
||||
;; For avoid unnecesary overhead of creating multiple
|
||||
;; pointers and handly internally with objects map in their
|
||||
;; worst case (when probably all shapes and all pointers
|
||||
;; will be readed in any case), we just realize/resolve them
|
||||
;; before applying the migration to the file
|
||||
file
|
||||
(cond-> file
|
||||
;; need-migration?
|
||||
;; (->> (fdata/realize cfg))
|
||||
|
||||
(l/trc :hint "take snapshot"
|
||||
:file-id (str (:id file))
|
||||
:revn (:revn file)
|
||||
:label label
|
||||
:elapsed (ct/format-duration elapsed))
|
||||
need-migration?
|
||||
(fmg/migrate-file libs)
|
||||
|
||||
(-> file
|
||||
(assoc ::snapshot-data snapshot)
|
||||
(assoc ::snapshot-label label)))
|
||||
file)]
|
||||
|
||||
(bfc/encode-file cfg file))))
|
||||
take-snapshot?
|
||||
(->> (attach-snapshot cfg need-migration?)))]
|
||||
|
||||
(apply update-fn cfg file args)))
|
||||
|
||||
(defn- soft-validate-file-schema!
|
||||
[file]
|
||||
@@ -462,8 +435,9 @@
|
||||
(defn- get-lagged-changes
|
||||
[conn {:keys [id revn] :as params}]
|
||||
(->> (db/exec! conn [sql:lagged-changes id revn])
|
||||
(map files/decode-row)
|
||||
(vec)))
|
||||
(filter :changes)
|
||||
(mapv (fn [row]
|
||||
(update row :changes blob/decode)))))
|
||||
|
||||
(defn- send-notifications!
|
||||
[cfg {:keys [team changes session-id] :as params} file]
|
||||
|
||||
@@ -35,7 +35,7 @@
|
||||
(defn duplicate-file
|
||||
[{:keys [::db/conn ::bfc/timestamp] :as cfg} {:keys [profile-id file-id name reset-shared-flag] :as params}]
|
||||
(let [;; We don't touch the original file on duplication
|
||||
file (bfc/get-file cfg file-id)
|
||||
file (bfc/get-file cfg file-id :realize? true)
|
||||
project-id (:project-id file)
|
||||
file (-> file
|
||||
(update :id bfc/lookup-index)
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
|
||||
(ns app.rpc.commands.teams-invitations
|
||||
(:require
|
||||
[app.binfile.common :as bfc]
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
@@ -21,7 +22,6 @@
|
||||
[app.loggers.audit :as audit]
|
||||
[app.main :as-alias main]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.commands.profile :as profile]
|
||||
[app.rpc.commands.teams :as teams]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
@@ -550,7 +550,7 @@
|
||||
"A specific method for obtain a file with name and page-id used for
|
||||
team request access procediment"
|
||||
[cfg file-id]
|
||||
(let [file (files/get-file cfg file-id :migrate? false)]
|
||||
(let [file (bfc/get-file cfg file-id :migrate? false)]
|
||||
(-> file
|
||||
(dissoc :data)
|
||||
(dissoc :deleted-at)
|
||||
|
||||
@@ -51,7 +51,7 @@
|
||||
|
||||
(defn- get-view-only-bundle
|
||||
[{:keys [::db/conn] :as cfg} {:keys [profile-id file-id ::perms] :as params}]
|
||||
(let [file (files/get-file cfg file-id)
|
||||
(let [file (bfc/get-file cfg file-id)
|
||||
|
||||
project (db/get conn :project
|
||||
{:id (:project-id file)}
|
||||
@@ -81,7 +81,7 @@
|
||||
|
||||
libs (->> (bfc/get-file-libraries conn file-id)
|
||||
(mapv (fn [{:keys [id] :as lib}]
|
||||
(merge lib (files/get-file cfg id)))))
|
||||
(merge lib (bfc/get-file cfg id)))))
|
||||
|
||||
links (->> (db/query conn :share-link {:file-id file-id})
|
||||
(mapv (fn [row]
|
||||
|
||||
@@ -1,278 +0,0 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.srepl.fixes
|
||||
"A misc of fix functions"
|
||||
(:refer-clojure :exclude [parse-uuid])
|
||||
(:require
|
||||
[app.binfile.common :as bfc]
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.files.changes :as cpc]
|
||||
[app.common.files.helpers :as cfh]
|
||||
[app.common.files.repair :as cfr]
|
||||
[app.common.files.validate :as cfv]
|
||||
[app.common.logging :as l]
|
||||
[app.common.types.component :as ctk]
|
||||
[app.common.types.container :as ctn]
|
||||
[app.common.types.file :as ctf]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db :as db]
|
||||
[app.features.fdata :as feat.fdata]
|
||||
[app.srepl.helpers :as h]))
|
||||
|
||||
(defn disable-fdata-features
|
||||
[{:keys [id features] :as file} _]
|
||||
(when (or (contains? features "fdata/pointer-map")
|
||||
(contains? features "fdata/objects-map"))
|
||||
(l/warn :hint "disable fdata features" :file-id (str id))
|
||||
(-> file
|
||||
(update :data feat.fdata/process-pointers deref)
|
||||
(update :data feat.fdata/process-objects (partial into {}))
|
||||
(update :features disj "fdata/pointer-map" "fdata/objects-map"))))
|
||||
|
||||
(def sql:get-fdata-files
|
||||
"SELECT id FROM file
|
||||
WHERE deleted_at is NULL
|
||||
AND (features @> '{fdata/pointer-map}' OR
|
||||
features @> '{fdata/objects-map}')
|
||||
ORDER BY created_at DESC")
|
||||
|
||||
(defn find-fdata-pointers
|
||||
[{:keys [id features data] :as file} _]
|
||||
(when (contains? features "fdata/pointer-map")
|
||||
(let [pointers (feat.fdata/get-used-pointer-ids data)]
|
||||
(l/warn :hint "found pointers" :file-id (str id) :pointers pointers)
|
||||
nil)))
|
||||
|
||||
(defn repair-file-media
|
||||
"A helper intended to be used with `srepl.main/process-files!` that
|
||||
fixes all not propertly referenced file-media-object for a file"
|
||||
[{:keys [id data] :as file} & _]
|
||||
(let [conn (db/get-connection h/*system*)
|
||||
used (cfh/collect-used-media data)
|
||||
ids (db/create-array conn "uuid" used)
|
||||
sql "SELECT * FROM file_media_object WHERE id = ANY(?)"
|
||||
rows (db/exec! conn [sql ids])
|
||||
index (reduce (fn [index media]
|
||||
(if (not= (:file-id media) id)
|
||||
(let [media-id (uuid/next)]
|
||||
(l/wrn :hint "found not referenced media"
|
||||
:file-id (str id)
|
||||
:media-id (str (:id media)))
|
||||
|
||||
(db/insert! conn :file-media-object
|
||||
(-> media
|
||||
(assoc :file-id id)
|
||||
(assoc :id media-id)))
|
||||
(assoc index (:id media) media-id))
|
||||
index))
|
||||
{}
|
||||
rows)]
|
||||
|
||||
(when (seq index)
|
||||
(binding [bfc/*state* (atom {:index index})]
|
||||
(update file :data (fn [fdata]
|
||||
(-> fdata
|
||||
(update :pages-index #'bfc/relink-shapes)
|
||||
(update :components #'bfc/relink-shapes)
|
||||
(update :media #'bfc/relink-media)
|
||||
(d/without-nils))))))))
|
||||
|
||||
|
||||
(defn repair-file
|
||||
"Internal helper for validate and repair the file. The operation is
|
||||
applied multiple times untile file is fixed or max iteration counter
|
||||
is reached (default 10)"
|
||||
[file libs & {:keys [max-iterations] :or {max-iterations 10}}]
|
||||
|
||||
(let [validate-and-repair
|
||||
(fn [file libs iteration]
|
||||
(when-let [errors (not-empty (cfv/validate-file file libs))]
|
||||
(l/trc :hint "repairing file"
|
||||
:file-id (str (:id file))
|
||||
:iteration iteration
|
||||
:errors (count errors))
|
||||
(let [changes (cfr/repair-file file libs errors)]
|
||||
(-> file
|
||||
(update :revn inc)
|
||||
(update :data cpc/process-changes changes)))))
|
||||
|
||||
process-file
|
||||
(fn [file libs]
|
||||
(loop [file file
|
||||
iteration 0]
|
||||
(if (< iteration max-iterations)
|
||||
(if-let [file (validate-and-repair file libs iteration)]
|
||||
(recur file (inc iteration))
|
||||
file)
|
||||
(do
|
||||
(l/wrn :hint "max retry num reached on repairing file"
|
||||
:file-id (str (:id file))
|
||||
:iteration iteration)
|
||||
file))))
|
||||
|
||||
file'
|
||||
(process-file file libs)]
|
||||
|
||||
(when (not= (:revn file) (:revn file'))
|
||||
(l/trc :hint "file repaired" :file-id (str (:id file))))
|
||||
|
||||
file'))
|
||||
|
||||
(defn fix-touched-shapes-group
|
||||
[file _]
|
||||
;; Remove :shapes-group from the touched elements
|
||||
(letfn [(fix-fdata [data]
|
||||
(-> data
|
||||
(update :pages-index update-vals fix-container)))
|
||||
|
||||
(fix-container [container]
|
||||
(d/update-when container :objects update-vals fix-shape))
|
||||
|
||||
(fix-shape [shape]
|
||||
(d/update-when shape :touched
|
||||
(fn [touched]
|
||||
(disj touched :shapes-group))))]
|
||||
file (-> file
|
||||
(update :data fix-fdata))))
|
||||
|
||||
(defn add-swap-slots
|
||||
[file libs _opts]
|
||||
;; Detect swapped copies and try to generate a valid swap-slot.
|
||||
(letfn [(process-fdata [data]
|
||||
;; Walk through all containers in the file, both pages and deleted components.
|
||||
(reduce process-container data (ctf/object-containers-seq data)))
|
||||
|
||||
(process-container [data container]
|
||||
;; Walk through all shapes in depth-first tree order.
|
||||
(l/dbg :hint "Processing container" :type (:type container) :name (:name container))
|
||||
(let [root-shape (ctn/get-container-root container)]
|
||||
(ctf/update-container data
|
||||
container
|
||||
#(reduce process-shape % (ctn/get-direct-children container root-shape)))))
|
||||
|
||||
(process-shape [container shape]
|
||||
;; Look for head copies in the first level (either component roots or inside main components).
|
||||
;; Even if they have been swapped, we don't add slot to them because there is no way to know
|
||||
;; the original shape. Only children.
|
||||
(if (and (ctk/instance-head? shape)
|
||||
(ctk/in-component-copy? shape)
|
||||
(nil? (ctk/get-swap-slot shape)))
|
||||
(process-copy-head container shape)
|
||||
(reduce process-shape container (ctn/get-direct-children container shape))))
|
||||
|
||||
(process-copy-head [container head-shape]
|
||||
;; Process recursively all children, comparing each one with the corresponding child in the main
|
||||
;; component, looking by position. If the shape-ref does not point to the found child, then it has
|
||||
;; been swapped and need to set up a slot.
|
||||
(l/trc :hint "Processing copy-head" :id (:id head-shape) :name (:name head-shape))
|
||||
(let [component-shape (ctf/find-ref-shape file container libs head-shape :include-deleted? true :with-context? true)
|
||||
component-container (:container (meta component-shape))]
|
||||
(loop [container container
|
||||
children (map #(ctn/get-shape container %) (:shapes head-shape))
|
||||
component-children (map #(ctn/get-shape component-container %) (:shapes component-shape))]
|
||||
(let [child (first children)
|
||||
component-child (first component-children)]
|
||||
(if (or (nil? child) (nil? component-child))
|
||||
container
|
||||
(let [container (if (and (not (ctk/is-main-of? component-child child))
|
||||
(nil? (ctk/get-swap-slot child))
|
||||
(ctk/instance-head? child))
|
||||
(let [slot (guess-swap-slot component-child component-container)]
|
||||
(l/dbg :hint "child" :id (:id child) :name (:name child) :slot slot)
|
||||
(ctn/update-shape container (:id child) #(ctk/set-swap-slot % slot)))
|
||||
container)]
|
||||
(recur (process-copy-head container child)
|
||||
(rest children)
|
||||
(rest component-children))))))))
|
||||
|
||||
(guess-swap-slot [shape container]
|
||||
;; To guess the slot, we must follow the chain until we find the definitive main. But
|
||||
;; we cannot navigate by shape-ref, because main shapes may also have been swapped. So
|
||||
;; chain by position, too.
|
||||
(if-let [slot (ctk/get-swap-slot shape)]
|
||||
slot
|
||||
(if-not (ctk/in-component-copy? shape)
|
||||
(:id shape)
|
||||
(let [head-copy (ctn/get-component-shape (:objects container) shape)]
|
||||
(if (= (:id head-copy) (:id shape))
|
||||
(:id shape)
|
||||
(let [head-main (ctf/find-ref-shape file
|
||||
container
|
||||
libs
|
||||
head-copy
|
||||
:include-deleted? true
|
||||
:with-context? true)
|
||||
container-main (:container (meta head-main))
|
||||
shape-main (find-match-by-position shape
|
||||
head-copy
|
||||
container
|
||||
head-main
|
||||
container-main)]
|
||||
(guess-swap-slot shape-main container-main)))))))
|
||||
|
||||
(find-match-by-position [shape-copy head-copy container-copy head-main container-main]
|
||||
;; Find the shape in the main that has the same position under its parent than
|
||||
;; the copy under its one. To get the parent we must process recursively until
|
||||
;; the component head, because mains may also have been swapped.
|
||||
(let [parent-copy (ctn/get-shape container-copy (:parent-id shape-copy))
|
||||
parent-main (if (= (:id parent-copy) (:id head-copy))
|
||||
head-main
|
||||
(find-match-by-position parent-copy
|
||||
head-copy
|
||||
container-copy
|
||||
head-main
|
||||
container-main))
|
||||
index (cfh/get-position-on-parent (:objects container-copy)
|
||||
(:id shape-copy))
|
||||
shape-main-id (dm/get-in parent-main [:shapes index])]
|
||||
(ctn/get-shape container-main shape-main-id)))]
|
||||
|
||||
file (-> file
|
||||
(update :data process-fdata))))
|
||||
|
||||
|
||||
|
||||
(defn fix-find-duplicated-slots
|
||||
[file _]
|
||||
;; Find the shapes whose children have duplicated slots
|
||||
(let [check-duplicate-swap-slot
|
||||
(fn [shape page]
|
||||
(let [shapes (map #(get (:objects page) %) (:shapes shape))
|
||||
slots (->> (map #(ctk/get-swap-slot %) shapes)
|
||||
(remove nil?))
|
||||
counts (frequencies slots)]
|
||||
#_(when (some (fn [[_ count]] (> count 1)) counts)
|
||||
(l/trc :info "This shape has children with the same swap slot" :id (:id shape) :file-id (str (:id file))))
|
||||
(some (fn [[_ count]] (> count 1)) counts)))
|
||||
|
||||
count-slots-shape
|
||||
(fn [page shape]
|
||||
(if (ctk/instance-root? shape)
|
||||
(check-duplicate-swap-slot shape page)
|
||||
false))
|
||||
|
||||
count-slots-page
|
||||
(fn [page]
|
||||
(->> (:objects page)
|
||||
(vals)
|
||||
(mapv #(count-slots-shape page %))
|
||||
(filter true?)
|
||||
count))
|
||||
|
||||
count-slots-data
|
||||
(fn [data]
|
||||
(->> (:pages-index data)
|
||||
(vals)
|
||||
(mapv count-slots-page)
|
||||
(reduce +)))
|
||||
|
||||
num-missing-slots (count-slots-data (:data file))]
|
||||
|
||||
(when (pos? num-missing-slots)
|
||||
(l/trc :info (str "Shapes with children with the same swap slot: " num-missing-slots) :file-id (str (:id file))))
|
||||
file))
|
||||
@@ -1,88 +0,0 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.srepl.fixes.lost-colors
|
||||
"A collection of adhoc fixes scripts."
|
||||
(:require
|
||||
[app.binfile.common :as bfc]
|
||||
[app.common.logging :as l]
|
||||
[app.common.types.color :as types.color]
|
||||
[app.db :as db]
|
||||
[app.srepl.helpers :as h]))
|
||||
|
||||
(def sql:get-affected-files
|
||||
"SELECT fm.file_id AS id FROM file_migration AS fm WHERE fm.name = '0008-fix-library-colors-v2'")
|
||||
|
||||
(def sql:get-matching-snapshot
|
||||
"SELECT * FROM file_change
|
||||
WHERE file_id = ?
|
||||
AND created_at <= ?
|
||||
AND label IS NOT NULL
|
||||
AND data IS NOT NULL
|
||||
ORDER BY created_at DESC
|
||||
LIMIT 2")
|
||||
|
||||
(defn get-affected-migration
|
||||
[conn file-id]
|
||||
(db/get* conn :file-migration
|
||||
{:name "0008-fix-library-colors-v2"
|
||||
:file-id file-id}))
|
||||
|
||||
(defn get-last-valid-snapshot
|
||||
[conn migration]
|
||||
(let [[snapshot] (db/exec! conn [sql:get-matching-snapshot
|
||||
(:file-id migration)
|
||||
(:created-at migration)])]
|
||||
(when snapshot
|
||||
(let [snapshot (assoc snapshot :id (:file-id snapshot))]
|
||||
(bfc/decode-file h/*system* snapshot)))))
|
||||
|
||||
(defn restore-color
|
||||
[{:keys [data] :as snapshot} color]
|
||||
(when-let [scolor (get-in data [:colors (:id color)])]
|
||||
(-> (select-keys scolor types.color/library-color-attrs)
|
||||
(types.color/check-library-color))))
|
||||
|
||||
(defn restore-missing-colors
|
||||
[{:keys [id] :as file} & _opts]
|
||||
(l/inf :hint "process file" :file-id (str id) :name (:name file) :has-colors (-> file :data :colors not-empty boolean))
|
||||
(if-let [colors (-> file :data :colors not-empty)]
|
||||
(let [migration (get-affected-migration h/*system* id)]
|
||||
(if-let [snapshot (get-last-valid-snapshot h/*system* migration)]
|
||||
(do
|
||||
(l/inf :hint "using snapshot" :snapshot (:label snapshot))
|
||||
(let [colors (reduce-kv (fn [colors color-id color]
|
||||
(if-let [result (restore-color snapshot color)]
|
||||
(do
|
||||
(l/inf :hint "restored color" :file-id (str id) :color-id (str color-id))
|
||||
(assoc colors color-id result))
|
||||
(do
|
||||
(l/wrn :hint "ignoring color" :file-id (str id) :color (pr-str color))
|
||||
colors)))
|
||||
colors
|
||||
colors)
|
||||
file (-> file
|
||||
(update :data assoc :colors colors)
|
||||
(update :migrations disj "0008-fix-library-colors-v2"))]
|
||||
|
||||
(db/delete! h/*system* :file-migration
|
||||
{:name "0008-fix-library-colors-v2"
|
||||
:file-id (:id file)})
|
||||
file))
|
||||
|
||||
(do
|
||||
(db/delete! h/*system* :file-migration
|
||||
{:name "0008-fix-library-colors-v2"
|
||||
:file-id (:id file)})
|
||||
nil)))
|
||||
|
||||
(do
|
||||
(db/delete! h/*system* :file-migration
|
||||
{:name "0008-fix-library-colors-v2"
|
||||
:file-id (:id file)})
|
||||
nil)))
|
||||
|
||||
|
||||
@@ -14,9 +14,8 @@
|
||||
[app.common.files.validate :as cfv]
|
||||
[app.common.time :as ct]
|
||||
[app.db :as db]
|
||||
[app.main :as main]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.commands.files-snapshot :as fsnap]))
|
||||
[app.features.file-snapshots :as fsnap]
|
||||
[app.main :as main]))
|
||||
|
||||
(def ^:dynamic *system* nil)
|
||||
|
||||
@@ -48,7 +47,7 @@
|
||||
([system id]
|
||||
(db/run! system
|
||||
(fn [system]
|
||||
(files/get-file system id :migrate? false)))))
|
||||
(bfc/get-file system id :decode? false)))))
|
||||
|
||||
(defn update-team!
|
||||
[system {:keys [id] :as team}]
|
||||
@@ -118,10 +117,10 @@
|
||||
(let [conn (db/get-connection system)]
|
||||
(->> (get-and-lock-team-files conn team-id)
|
||||
(reduce (fn [result file-id]
|
||||
(let [file (fsnap/get-file-snapshots system file-id)]
|
||||
(fsnap/create-file-snapshot! system file
|
||||
{:label label
|
||||
:created-by :admin})
|
||||
(let [file (bfc/get-file system file-id :realize? true :lock-for-update? true)]
|
||||
(fsnap/create! system file
|
||||
{:label label
|
||||
:created-by "admin"})
|
||||
(inc result)))
|
||||
0))))
|
||||
|
||||
@@ -132,21 +131,34 @@
|
||||
(into #{}))
|
||||
|
||||
snap (search-file-snapshots conn ids label)
|
||||
|
||||
ids' (into #{} (map :file-id) snap)]
|
||||
|
||||
(when (not= ids ids')
|
||||
(throw (RuntimeException. "no uniform snapshot available")))
|
||||
|
||||
(reduce (fn [result {:keys [file-id id]}]
|
||||
(fsnap/restore-file-snapshot! system file-id id)
|
||||
(fsnap/restore! system file-id id)
|
||||
(inc result))
|
||||
0
|
||||
snap)))
|
||||
|
||||
(defn mark-migrated!
|
||||
"A helper that inserts an entry in the file migration table for make
|
||||
file migrated for the specified migration label."
|
||||
[system file-id label]
|
||||
(db/insert! system :file-migration
|
||||
{:file-id file-id
|
||||
:name label}
|
||||
{::db/return-keys false}))
|
||||
|
||||
(defn process-file!
|
||||
[system file-id update-fn & {:keys [label validate? with-libraries?] :or {validate? true} :as opts}]
|
||||
(let [file (bfc/get-file system file-id ::db/for-update true)
|
||||
[system file-id update-fn
|
||||
& {:keys [::snapshot-label ::validate? ::with-libraries?]
|
||||
:or {validate? true} :as opts}]
|
||||
(let [file (bfc/get-file system file-id
|
||||
:lock-for-update? true
|
||||
:realize? true)
|
||||
|
||||
libs (when with-libraries?
|
||||
(bfc/get-resolved-file-libraries system file))
|
||||
|
||||
@@ -162,12 +174,12 @@
|
||||
(when validate?
|
||||
(cfv/validate-file-schema! file'))
|
||||
|
||||
(when (string? label)
|
||||
(fsnap/create-file-snapshot! system file
|
||||
{:label label
|
||||
:deleted-at (ct/in-future {:days 30})
|
||||
:created-by :admin}))
|
||||
(when (string? snapshot-label)
|
||||
(fsnap/create! system file
|
||||
{:label snapshot-label
|
||||
:deleted-at (ct/in-future {:days 30})
|
||||
:created-by "admin"}))
|
||||
|
||||
(let [file' (update file' :revn inc)]
|
||||
(bfc/update-file! system file')
|
||||
(bfc/update-file! system file' opts)
|
||||
true))))
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.srepl.main
|
||||
"A collection of adhoc fixes scripts."
|
||||
#_:clj-kondo/ignore
|
||||
(:require
|
||||
[app.auth :refer [derive-password]]
|
||||
@@ -24,19 +23,19 @@
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]
|
||||
[app.features.fdata :as feat.fdata]
|
||||
[app.features.fdata :as fdata]
|
||||
[app.features.file-snapshots :as fsnap]
|
||||
[app.loggers.audit :as audit]
|
||||
[app.main :as main]
|
||||
[app.msgbus :as mbus]
|
||||
[app.rpc.commands.auth :as auth]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.commands.files-snapshot :as fsnap]
|
||||
[app.rpc.commands.management :as mgmt]
|
||||
[app.rpc.commands.profile :as profile]
|
||||
[app.rpc.commands.projects :as projects]
|
||||
[app.rpc.commands.teams :as teams]
|
||||
[app.srepl.fixes :as fixes]
|
||||
[app.srepl.helpers :as h]
|
||||
[app.srepl.procs.file-repair :as procs.file-repair]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.worker :as wrk]
|
||||
@@ -48,6 +47,7 @@
|
||||
[cuerdas.core :as str]
|
||||
[datoteka.fs :as fs]
|
||||
[promesa.exec :as px]
|
||||
[promesa.exec.csp :as sp]
|
||||
[promesa.exec.semaphore :as ps]
|
||||
[promesa.util :as pu]))
|
||||
|
||||
@@ -147,25 +147,6 @@
|
||||
;; FEATURES
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(declare process-file!)
|
||||
|
||||
(defn enable-objects-map-feature-on-file!
|
||||
[file-id & {:as opts}]
|
||||
(process-file! file-id feat.fdata/enable-objects-map opts))
|
||||
|
||||
(defn enable-pointer-map-feature-on-file!
|
||||
[file-id & {:as opts}]
|
||||
(process-file! file-id feat.fdata/enable-pointer-map opts))
|
||||
|
||||
(defn enable-path-data-feature-on-file!
|
||||
[file-id & {:as opts}]
|
||||
(process-file! file-id feat.fdata/enable-path-data opts))
|
||||
|
||||
(defn enable-storage-features-on-file!
|
||||
[file-id & {:as opts}]
|
||||
(enable-objects-map-feature-on-file! file-id opts)
|
||||
(enable-pointer-map-feature-on-file! file-id opts))
|
||||
|
||||
(defn enable-team-feature!
|
||||
[team-id feature & {:keys [skip-check] :or {skip-check false}}]
|
||||
(when (and (not skip-check) (not (contains? cfeat/supported-features feature)))
|
||||
@@ -339,7 +320,10 @@
|
||||
collectable file-changes entry."
|
||||
[& {:keys [file-id label]}]
|
||||
(let [file-id (h/parse-uuid file-id)]
|
||||
(db/tx-run! main/system fsnap/create-file-snapshot! {:file-id file-id :label label})))
|
||||
(db/tx-run! main/system
|
||||
(fn [cfg]
|
||||
(let [file (bfc/get-file cfg file-id :realize? true)]
|
||||
(fsnap/create! cfg file {:label label :created-by "admin"}))))))
|
||||
|
||||
(defn restore-file-snapshot!
|
||||
[file-id & {:keys [label id]}]
|
||||
@@ -349,13 +333,13 @@
|
||||
(fn [{:keys [::db/conn] :as system}]
|
||||
(cond
|
||||
(uuid? snapshot-id)
|
||||
(fsnap/restore-file-snapshot! system file-id snapshot-id)
|
||||
(fsnap/restore! system file-id snapshot-id)
|
||||
|
||||
(string? label)
|
||||
(->> (h/search-file-snapshots conn #{file-id} label)
|
||||
(map :id)
|
||||
(first)
|
||||
(fsnap/restore-file-snapshot! system file-id))
|
||||
(fsnap/restore! system file-id))
|
||||
|
||||
:else
|
||||
(throw (ex-info "snapshot id or label should be provided" {})))))))
|
||||
@@ -364,9 +348,9 @@
|
||||
[file-id & {:as _}]
|
||||
(let [file-id (h/parse-uuid file-id)]
|
||||
(db/tx-run! main/system
|
||||
(fn [{:keys [::db/conn]}]
|
||||
(->> (fsnap/get-file-snapshots conn file-id)
|
||||
(print-table [:label :id :revn :created-at]))))))
|
||||
(fn [cfg]
|
||||
(->> (fsnap/get-visible-snapshots cfg file-id)
|
||||
(print-table [:label :id :revn :created-at :created-by]))))))
|
||||
|
||||
(defn take-team-snapshot!
|
||||
[team-id & {:keys [label rollback?] :or {rollback? true}}]
|
||||
@@ -413,24 +397,19 @@
|
||||
(println (sm/humanize-explain explain))
|
||||
(ex/print-throwable cause))))))))
|
||||
|
||||
(defn repair-file!
|
||||
"Repair the list of errors detected by validation."
|
||||
[file-id & {:keys [rollback?] :or {rollback? true} :as opts}]
|
||||
(let [system (assoc main/system ::db/rollback rollback?)
|
||||
file-id (h/parse-uuid file-id)
|
||||
opts (assoc opts :with-libraries? true)]
|
||||
(db/tx-run! system h/process-file! file-id fixes/repair-file opts)))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; PROCESSING
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def sql:get-files
|
||||
"SELECT id FROM file
|
||||
WHERE deleted_at is NULL
|
||||
ORDER BY created_at DESC")
|
||||
(defn repair-file!
|
||||
"Repair the list of errors detected by validation."
|
||||
[file-id & {:keys [rollback?] :or {rollback? true} :as options}]
|
||||
(let [system (assoc main/system ::db/rollback rollback?)
|
||||
file-id (h/parse-uuid file-id)
|
||||
options (assoc options ::h/with-libraries? true)]
|
||||
(db/tx-run! system h/process-file! file-id procs.file-repair/repair-file options)))
|
||||
|
||||
(defn process-file!
|
||||
(defn update-file!
|
||||
"Apply a function to the file. Optionally save the changes or not.
|
||||
The function receives the decoded and migrated file data."
|
||||
[file-id update-fn & {:keys [rollback?] :or {rollback? true} :as opts}]
|
||||
@@ -441,114 +420,117 @@
|
||||
db/*conn* (db/get-connection system)]
|
||||
(h/process-file! system file-id update-fn opts))))))
|
||||
|
||||
(defn process-team-files!
|
||||
"Apply a function to each file of the specified team."
|
||||
[team-id update-fn & {:keys [rollback? label] :or {rollback? true} :as opts}]
|
||||
(let [team-id (h/parse-uuid team-id)
|
||||
opts (dissoc opts :label)]
|
||||
(db/tx-run! (assoc main/system ::db/rollback rollback?)
|
||||
(fn [{:keys [::db/conn] :as system}]
|
||||
(when (string? label)
|
||||
(h/take-team-snapshot! system team-id label))
|
||||
(defn process!
|
||||
[& {:keys [max-items
|
||||
max-jobs
|
||||
rollback?
|
||||
query
|
||||
proc-fn
|
||||
buffer]
|
||||
:or {max-items Long/MAX_VALUE
|
||||
rollback? true
|
||||
max-jobs 1
|
||||
buffer 128}
|
||||
:as opts}]
|
||||
|
||||
(binding [h/*system* system
|
||||
db/*conn* (db/get-connection system)]
|
||||
(->> (h/get-and-lock-team-files conn team-id)
|
||||
(reduce (fn [result file-id]
|
||||
(if (h/process-file! system file-id update-fn opts)
|
||||
(inc result)
|
||||
result))
|
||||
0)))))))
|
||||
|
||||
(defn process-files!
|
||||
"Apply a function to all files in the database"
|
||||
[update-fn & {:keys [max-items
|
||||
max-jobs
|
||||
rollback?
|
||||
query]
|
||||
:or {max-jobs 1
|
||||
max-items Long/MAX_VALUE
|
||||
rollback? true
|
||||
query sql:get-files}
|
||||
:as opts}]
|
||||
|
||||
(l/dbg :hint "process:start"
|
||||
(l/inf :hint "process start"
|
||||
:rollback rollback?
|
||||
:max-jobs max-jobs
|
||||
:max-items max-items)
|
||||
|
||||
(let [tpoint (ct/tpoint)
|
||||
factory (px/thread-factory :virtual false :prefix "penpot/file-process/")
|
||||
executor (px/cached-executor :factory factory)
|
||||
sjobs (ps/create :permits max-jobs)
|
||||
max-jobs (or max-jobs (px/get-available-processors))
|
||||
query (or query
|
||||
(:query (meta proc-fn))
|
||||
(throw (ex-info "missing query" {})))
|
||||
query (if (vector? query) query [query])
|
||||
|
||||
process-file
|
||||
(fn [file-id idx tpoint]
|
||||
(let [thread-id (px/get-thread-id)]
|
||||
(try
|
||||
(l/trc :hint "process:file:start"
|
||||
:tid thread-id
|
||||
:file-id (str file-id)
|
||||
:index idx)
|
||||
(let [system (assoc main/system ::db/rollback rollback?)]
|
||||
(db/tx-run! system (fn [system]
|
||||
(binding [h/*system* system
|
||||
db/*conn* (db/get-connection system)]
|
||||
(h/process-file! system file-id update-fn opts)))))
|
||||
proc-fn (if (var? proc-fn)
|
||||
(deref proc-fn)
|
||||
proc-fn)
|
||||
|
||||
(catch Throwable cause
|
||||
(l/wrn :hint "unexpected error on processing file (skiping)"
|
||||
:tid thread-id
|
||||
:file-id (str file-id)
|
||||
:index idx
|
||||
:cause cause))
|
||||
(finally
|
||||
(when-let [pause (:pause opts)]
|
||||
(Thread/sleep (int pause)))
|
||||
in-ch (sp/chan :buf buffer)
|
||||
|
||||
(ps/release! sjobs)
|
||||
(let [elapsed (ct/format-duration (tpoint))]
|
||||
(l/trc :hint "process:file:end"
|
||||
:tid thread-id
|
||||
:file-id (str file-id)
|
||||
:index idx
|
||||
:elapsed elapsed))))))
|
||||
worker-fn
|
||||
(fn [worker-id]
|
||||
(l/dbg :hint "worker started"
|
||||
:id worker-id)
|
||||
|
||||
process-file*
|
||||
(fn [idx file-id]
|
||||
(ps/acquire! sjobs)
|
||||
(px/run! executor (partial process-file file-id idx (ct/tpoint)))
|
||||
(inc idx))
|
||||
(loop []
|
||||
(when-let [[index item] (sp/<! in-ch)]
|
||||
(l/dbg :hint "process item" :worker-id worker-id :index index :item item)
|
||||
(try
|
||||
(-> main/system
|
||||
(assoc ::db/rollback rollback?)
|
||||
(db/tx-run! (fn [system]
|
||||
(binding [h/*system* system
|
||||
db/*conn* (db/get-connection system)]
|
||||
(proc-fn system item opts)))))
|
||||
|
||||
process-files
|
||||
(catch Throwable cause
|
||||
(l/wrn :hint "unexpected error on processing item (skiping)"
|
||||
:worker-id worker-id
|
||||
:item item
|
||||
:cause cause))
|
||||
(finally
|
||||
(when-let [pause (:pause opts)]
|
||||
(Thread/sleep (int pause)))))
|
||||
|
||||
(recur)))
|
||||
|
||||
(l/dbg :hint "worker stoped"
|
||||
:id worker-id))
|
||||
|
||||
enqueue-item
|
||||
(fn [index row]
|
||||
(sp/>! in-ch [index (into {} row)])
|
||||
(inc index))
|
||||
|
||||
process-items
|
||||
(fn [{:keys [::db/conn] :as system}]
|
||||
(db/exec! conn ["SET statement_timeout = 0"])
|
||||
(db/exec! conn ["SET idle_in_transaction_session_timeout = 0"])
|
||||
|
||||
(try
|
||||
(->> (db/plan conn [query])
|
||||
(transduce (comp
|
||||
(take max-items)
|
||||
(map :id))
|
||||
(completing process-file*)
|
||||
0))
|
||||
(finally
|
||||
;; Close and await tasks
|
||||
(pu/close! executor))))]
|
||||
(->> (db/plan conn query {:fetch-size (* max-jobs 3)})
|
||||
(transduce (take max-items)
|
||||
(completing enqueue-item)
|
||||
0))
|
||||
(sp/close! in-ch))
|
||||
|
||||
threads
|
||||
(->> (range max-jobs)
|
||||
(map (fn [idx]
|
||||
(px/fn->thread (partial worker-fn idx)
|
||||
:name (str "pentpot/process/" idx))))
|
||||
(doall))]
|
||||
|
||||
(try
|
||||
(db/tx-run! main/system process-files)
|
||||
(db/tx-run! main/system process-items)
|
||||
|
||||
;; Await threads termination
|
||||
(doseq [thread threads]
|
||||
(px/await! thread))
|
||||
|
||||
(catch Throwable cause
|
||||
(l/dbg :hint "process:error" :cause cause))
|
||||
|
||||
(finally
|
||||
(let [elapsed (ct/format-duration (tpoint))]
|
||||
(l/dbg :hint "process:end"
|
||||
(l/inf :hint "process end"
|
||||
:rollback rollback?
|
||||
:elapsed elapsed))))))
|
||||
|
||||
|
||||
(defn process-file!
|
||||
"A specialized, file specific process! alternative"
|
||||
[& {:keys [id] :as opts}]
|
||||
(let [id (h/parse-uuid id)]
|
||||
(-> opts
|
||||
(assoc :query ["select id from file where id = ?" id])
|
||||
(assoc :max-items 1)
|
||||
(assoc :max-jobs 1)
|
||||
(process!))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; DELETE/RESTORE OBJECTS (WITH CASCADE, SOFT)
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
@@ -579,25 +561,34 @@
|
||||
(db/update! conn :file
|
||||
{:deleted-at nil
|
||||
:has-media-trimmed false}
|
||||
{:id file-id})
|
||||
|
||||
;; Fragments are not handled here because they
|
||||
;; use the database cascade operation and they
|
||||
;; are not marked for deletion with objects-gc
|
||||
;; task
|
||||
{:id file-id}
|
||||
{::db/return-keys false})
|
||||
|
||||
(db/update! conn :file-media-object
|
||||
{:deleted-at nil}
|
||||
{:file-id file-id})
|
||||
{:file-id file-id}
|
||||
{::db/return-keys false})
|
||||
|
||||
(db/update! conn :file-change
|
||||
{:deleted-at nil}
|
||||
{:file-id file-id}
|
||||
{::db/return-keys false})
|
||||
|
||||
(db/update! conn :file-data
|
||||
{:deleted-at nil}
|
||||
{:file-id file-id}
|
||||
{::db/return-keys false})
|
||||
|
||||
;; Mark thumbnails to be deleted
|
||||
(db/update! conn :file-thumbnail
|
||||
{:deleted-at nil}
|
||||
{:file-id file-id})
|
||||
{:file-id file-id}
|
||||
{::db/return-keys false})
|
||||
|
||||
(db/update! conn :file-tagged-object-thumbnail
|
||||
{:deleted-at nil}
|
||||
{:file-id file-id})
|
||||
{:file-id file-id}
|
||||
{::db/return-keys false})
|
||||
|
||||
:restored)
|
||||
|
||||
@@ -607,11 +598,10 @@
|
||||
(let [file-id (h/parse-uuid file-id)]
|
||||
(db/tx-run! main/system
|
||||
(fn [system]
|
||||
(when-let [file (some-> (db/get* system :file
|
||||
{:id file-id}
|
||||
{::db/remove-deleted false
|
||||
::sql/columns [:id :name]})
|
||||
(files/decode-row))]
|
||||
(when-let [file (db/get* system :file
|
||||
{:id file-id}
|
||||
{::db/remove-deleted false
|
||||
::sql/columns [:id :name]})]
|
||||
(audit/insert! system
|
||||
{::audit/name "restore-file"
|
||||
::audit/type "action"
|
||||
|
||||
141
backend/src/app/srepl/procs/fdata_storage.clj
Normal file
141
backend/src/app/srepl/procs/fdata_storage.clj
Normal file
@@ -0,0 +1,141 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.srepl.procs.fdata-storage
|
||||
(:require
|
||||
[app.common.logging :as l]
|
||||
[app.db :as db]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; SNAPSHOTS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def sql:get-unmigrated-snapshots
|
||||
"SELECT fc.id, fc.file_id
|
||||
FROM file_change AS fc
|
||||
WHERE fc.data IS NOT NULL
|
||||
AND fc.label IS NOT NULL
|
||||
ORDER BY fc.id ASC")
|
||||
|
||||
(def sql:get-migrated-snapshots
|
||||
"SELECT f.id, f.file_id
|
||||
FROM file_data AS f
|
||||
WHERE f.data IS NOT NULL
|
||||
AND f.type = 'snapshot'
|
||||
AND f.id != f.file_id
|
||||
ORDER BY f.id ASC")
|
||||
|
||||
(defn migrate-snapshot-to-storage
|
||||
"Migrate the current existing files to store data in new storage
|
||||
tables."
|
||||
{:query sql:get-unmigrated-snapshots}
|
||||
[{:keys [::db/conn]} {:keys [id file-id]} & {:as options}]
|
||||
(let [{:keys [id file-id data created-at updated-at]}
|
||||
(db/get* conn :file-change {:id id :file-id file-id}
|
||||
::db/for-update true
|
||||
::db/remove-deleted false)]
|
||||
(when data
|
||||
(l/inf :hint "migrating snapshot" :file-id (str file-id) :id (str id))
|
||||
(db/update! conn :file-change
|
||||
{:data nil}
|
||||
{:id id :file-id file-id}
|
||||
{::db/return-keys false})
|
||||
(db/insert! conn :file-data
|
||||
{:backend "db"
|
||||
:metadata nil
|
||||
:type "snapshot"
|
||||
:data data
|
||||
:created-at created-at
|
||||
:modified-at updated-at
|
||||
:file-id file-id
|
||||
:id id}
|
||||
{::db/return-keys false}))))
|
||||
|
||||
(defn rollback-snapshot-from-storage
|
||||
"Migrate back to the file table storage."
|
||||
{:query sql:get-unmigrated-snapshots}
|
||||
[{:keys [::db/conn]} {:keys [id file-id]} & {:as opts}]
|
||||
(when-let [{:keys [id file-id data]}
|
||||
(db/get* conn :file-data {:id id :file-id file-id :type "snapshot"}
|
||||
::db/for-update true
|
||||
::db/remove-deleted false)]
|
||||
(l/inf :hint "rollback snapshot" :file-id (str file-id) :id (str id))
|
||||
(db/update! conn :file-change
|
||||
{:data data}
|
||||
{:id id :file-id file-id}
|
||||
{::db/return-keys false})
|
||||
(db/delete! conn :file-data
|
||||
{:id id :file-id file-id :type "snapshot"}
|
||||
{::db/return-keys false})))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; FILES
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def sql:get-unmigrated-files
|
||||
"SELECT f.id
|
||||
FROM file AS f
|
||||
WHERE f.data IS NOT NULL
|
||||
ORDER BY f.modified_at ASC")
|
||||
|
||||
(def sql:get-migrated-files
|
||||
"SELECT f.id, f.file_id
|
||||
FROM file_data AS f
|
||||
WHERE f.data IS NOT NULL
|
||||
AND f.id = f.file_id
|
||||
ORDER BY f.id ASC")
|
||||
|
||||
(defn migrate-file-to-storage
|
||||
"Migrate the current existing files to store data in new storage
|
||||
tables."
|
||||
{:query sql:get-unmigrated-files}
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id]} & {:as opts}]
|
||||
(let [{:keys [id data created-at modified-at]}
|
||||
(db/get* conn :file {:id id}
|
||||
::db/for-update true
|
||||
::db/remove-deleted false)]
|
||||
|
||||
(when data
|
||||
(l/inf :hint "migrating file" :file-id (str id))
|
||||
|
||||
(db/update! conn :file {:data nil} {:id id} ::db/return-keys false)
|
||||
(db/insert! conn :file-data
|
||||
{:backend "db"
|
||||
:metadata nil
|
||||
:type "main"
|
||||
:data data
|
||||
:created-at created-at
|
||||
:modified-at modified-at
|
||||
:file-id id
|
||||
:id id}
|
||||
{::db/return-keys false}))
|
||||
|
||||
(let [snapshots-sql
|
||||
(str "WITH snapshots AS (" sql:get-unmigrated-snapshots ") "
|
||||
"SELECT s.* FROM snapshots AS s WHERE s.file_id = ?")]
|
||||
(run! (fn [params]
|
||||
(migrate-snapshot-to-storage cfg params opts))
|
||||
(db/plan cfg [snapshots-sql id])))))
|
||||
|
||||
|
||||
(defn rollback-file-from-storage
|
||||
"Migrate back to the file table storage."
|
||||
{:query sql:get-migrated-files}
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id]} & {:as opts}]
|
||||
(when-let [{:keys [id data]}
|
||||
(db/get* conn :file-data {:id id :file-id id :type "main"}
|
||||
::db/for-update true
|
||||
::db/remove-deleted false)]
|
||||
(l/inf :hint "rollback file" :file-id (str id))
|
||||
(db/update! conn :file {:data data} {:id id} ::db/return-keys false)
|
||||
(db/delete! conn :file-data {:file-id id :id id :type "main"} ::db/return-keys false)
|
||||
|
||||
(let [snapshots-sql
|
||||
(str "WITH snapshots AS (" sql:get-migrated-snapshots ") "
|
||||
"SELECT s.* FROM snapshots AS s WHERE s.file_id = ?")]
|
||||
(run! (fn [params]
|
||||
(rollback-snapshot-from-storage cfg params opts))
|
||||
(db/plan cfg [snapshots-sql id])))))
|
||||
60
backend/src/app/srepl/procs/file_repair.clj
Normal file
60
backend/src/app/srepl/procs/file_repair.clj
Normal file
@@ -0,0 +1,60 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.srepl.procs.file-repair
|
||||
(:require
|
||||
[app.common.files.changes :as cfc]
|
||||
[app.common.files.repair :as cfr]
|
||||
[app.common.files.validate :as cfv]
|
||||
[app.common.logging :as l]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; GENERAL PURPOSE REPAIR
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn repair-file
|
||||
"Internal helper for validate and repair the file. The operation is
|
||||
applied multiple times untile file is fixed or max iteration counter
|
||||
is reached (default 10).
|
||||
|
||||
This function should not be used directly, it is used throught the
|
||||
app.srepl.main/repair-file! helper. In practical terms this function
|
||||
is private and implementation detail."
|
||||
[file libs & {:keys [max-iterations] :or {max-iterations 10}}]
|
||||
|
||||
(let [validate-and-repair
|
||||
(fn [file libs iteration]
|
||||
(when-let [errors (not-empty (cfv/validate-file file libs))]
|
||||
(l/trc :hint "repairing file"
|
||||
:file-id (str (:id file))
|
||||
:iteration iteration
|
||||
:errors (count errors))
|
||||
(let [changes (cfr/repair-file file libs errors)]
|
||||
(-> file
|
||||
(update :revn inc)
|
||||
(update :data cfc/process-changes changes)))))
|
||||
|
||||
process-file
|
||||
(fn [file libs]
|
||||
(loop [file file
|
||||
iteration 0]
|
||||
(if (< iteration max-iterations)
|
||||
(if-let [file (validate-and-repair file libs iteration)]
|
||||
(recur file (inc iteration))
|
||||
file)
|
||||
(do
|
||||
(l/wrn :hint "max retry num reached on repairing file"
|
||||
:file-id (str (:id file))
|
||||
:iteration iteration)
|
||||
file))))
|
||||
|
||||
file'
|
||||
(process-file file libs)]
|
||||
|
||||
(when (not= (:revn file) (:revn file'))
|
||||
(l/trc :hint "file repaired" :file-id (str (:id file))))
|
||||
|
||||
file'))
|
||||
@@ -4,10 +4,11 @@
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.srepl.fixes.media-refs
|
||||
(ns app.srepl.procs.media-refs
|
||||
(:require
|
||||
[app.binfile.common :as bfc]
|
||||
[app.common.files.helpers :as cfh]
|
||||
[app.common.logging :as l]
|
||||
[app.srepl.helpers :as h]))
|
||||
|
||||
(defn- collect-media-refs
|
||||
@@ -37,7 +38,22 @@
|
||||
(let [media-refs (collect-media-refs (:data file))]
|
||||
(bfc/update-media-references! cfg file media-refs)))
|
||||
|
||||
(defn process-file
|
||||
[file _opts]
|
||||
(let [system (h/get-current-system)]
|
||||
(update-all-media-references system file)))
|
||||
(def ^:private sql:get-files
|
||||
"SELECT f.id
|
||||
FROM file AS f
|
||||
LEFT JOIN file_migration AS fm ON (fm.file_id = f.id AND fm.name = 'internal/procs/media-refs')
|
||||
WHERE fm.name IS NULL
|
||||
ORDER BY f.project_id")
|
||||
|
||||
(defn fix-media-refs
|
||||
{:query sql:get-files}
|
||||
[cfg {:keys [id]} & {:as options}]
|
||||
(l/inf :hint "processing file" :id (str id))
|
||||
|
||||
(h/process-file! cfg id
|
||||
(fn [file _opts]
|
||||
(update-all-media-references cfg file))
|
||||
(assoc options
|
||||
::bfc/reset-migrations? true
|
||||
::h/validate? false))
|
||||
(h/mark-migrated! cfg id "internal/procs/media-refs"))
|
||||
57
backend/src/app/srepl/procs/path_data.clj
Normal file
57
backend/src/app/srepl/procs/path_data.clj
Normal file
@@ -0,0 +1,57 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.srepl.procs.path-data
|
||||
(:require
|
||||
[app.binfile.common :as bfc]
|
||||
[app.common.data :as d]
|
||||
[app.common.files.helpers :as cfh]
|
||||
[app.common.logging :as l]
|
||||
[app.srepl.helpers :as h]))
|
||||
|
||||
(def ^:private sql:get-files-with-path-data
|
||||
"SELECT id FROM file WHERE features @> '{fdata/path-data}'")
|
||||
|
||||
(defn disable
|
||||
"A script responsible for remove the path data type from file data and
|
||||
allow file to be open in older penpot versions.
|
||||
|
||||
Should be used only in cases when you want to downgrade to an older
|
||||
penpot version for some reason."
|
||||
{:query sql:get-files-with-path-data}
|
||||
[cfg {:keys [id]} & {:as options}]
|
||||
|
||||
(l/inf :hint "disabling path-data" :file-id (str id))
|
||||
|
||||
(let [update-object
|
||||
(fn [object]
|
||||
(if (or (cfh/path-shape? object)
|
||||
(cfh/bool-shape? object))
|
||||
(update object :content vec)
|
||||
object))
|
||||
|
||||
update-container
|
||||
(fn [container]
|
||||
(d/update-when container :objects d/update-vals update-object))
|
||||
|
||||
update-file
|
||||
(fn [file & _opts]
|
||||
(-> file
|
||||
(update :data (fn [data]
|
||||
(-> data
|
||||
(update :pages-index d/update-vals update-container)
|
||||
(d/update-when :components d/update-vals update-container))))
|
||||
(update :features disj "fdata/path-data")
|
||||
(update :migrations disj
|
||||
"0003-convert-path-content-v2"
|
||||
"0003-convert-path-content")))
|
||||
|
||||
options
|
||||
(-> options
|
||||
(assoc ::bfc/reset-migrations? true)
|
||||
(assoc ::h/validate? false))]
|
||||
|
||||
(h/process-file! cfg id update-file options)))
|
||||
@@ -115,13 +115,10 @@
|
||||
|
||||
(defn- create-database-object
|
||||
[{:keys [::backend ::db/connectable]} {:keys [::content ::expired-at ::touched-at ::touch] :as params}]
|
||||
(let [id (or (:id params) (uuid/random))
|
||||
(let [id (or (::id params) (uuid/random))
|
||||
mdata (cond-> (get-metadata params)
|
||||
(satisfies? impl/IContentHash content)
|
||||
(assoc :hash (impl/get-hash content))
|
||||
|
||||
:always
|
||||
(dissoc :id))
|
||||
(assoc :hash (impl/get-hash content)))
|
||||
|
||||
touched-at (if touch
|
||||
(or touched-at (ct/now))
|
||||
|
||||
@@ -34,7 +34,7 @@
|
||||
(SELECT EXISTS (SELECT 1 FROM team_font_variant WHERE ttf_file_id = ?))) AS has_refs")
|
||||
|
||||
(defn- has-team-font-variant-refs?
|
||||
[conn id]
|
||||
[conn {:keys [id]}]
|
||||
(-> (db/exec-one! conn [sql:has-team-font-variant-refs id id id id])
|
||||
(get :has-refs)))
|
||||
|
||||
@@ -44,7 +44,7 @@
|
||||
(SELECT EXISTS (SELECT 1 FROM file_media_object WHERE thumbnail_id = ?))) AS has_refs")
|
||||
|
||||
(defn- has-file-media-object-refs?
|
||||
[conn id]
|
||||
[conn {:keys [id]}]
|
||||
(-> (db/exec-one! conn [sql:has-file-media-object-refs id id])
|
||||
(get :has-refs)))
|
||||
|
||||
@@ -53,7 +53,7 @@
|
||||
(SELECT EXISTS (SELECT 1 FROM team WHERE photo_id = ?))) AS has_refs")
|
||||
|
||||
(defn- has-profile-refs?
|
||||
[conn id]
|
||||
[conn {:keys [id]}]
|
||||
(-> (db/exec-one! conn [sql:has-profile-refs id id])
|
||||
(get :has-refs)))
|
||||
|
||||
@@ -62,7 +62,7 @@
|
||||
"SELECT EXISTS (SELECT 1 FROM file_tagged_object_thumbnail WHERE media_id = ?) AS has_refs")
|
||||
|
||||
(defn- has-file-object-thumbnails-refs?
|
||||
[conn id]
|
||||
[conn {:keys [id]}]
|
||||
(-> (db/exec-one! conn [sql:has-file-object-thumbnail-refs id])
|
||||
(get :has-refs)))
|
||||
|
||||
@@ -71,36 +71,23 @@
|
||||
"SELECT EXISTS (SELECT 1 FROM file_thumbnail WHERE media_id = ?) AS has_refs")
|
||||
|
||||
(defn- has-file-thumbnails-refs?
|
||||
[conn id]
|
||||
[conn {:keys [id]}]
|
||||
(-> (db/exec-one! conn [sql:has-file-thumbnail-refs id])
|
||||
(get :has-refs)))
|
||||
|
||||
(def ^:private
|
||||
sql:has-file-data-refs
|
||||
"SELECT EXISTS (SELECT 1 FROM file WHERE data_ref_id = ?) AS has_refs")
|
||||
(def sql:exists-file-data-refs
|
||||
"SELECT EXISTS (
|
||||
SELECT 1 FROM file_data
|
||||
WHERE file_id = ?
|
||||
AND id = ?
|
||||
AND metadata->>'storage-ref-id' = ?::text
|
||||
) AS has_refs")
|
||||
|
||||
(defn- has-file-data-refs?
|
||||
[conn id]
|
||||
(-> (db/exec-one! conn [sql:has-file-data-refs id])
|
||||
(get :has-refs)))
|
||||
|
||||
(def ^:private
|
||||
sql:has-file-data-fragment-refs
|
||||
"SELECT EXISTS (SELECT 1 FROM file_data_fragment WHERE data_ref_id = ?) AS has_refs")
|
||||
|
||||
(defn- has-file-data-fragment-refs?
|
||||
[conn id]
|
||||
(-> (db/exec-one! conn [sql:has-file-data-fragment-refs id])
|
||||
(get :has-refs)))
|
||||
|
||||
(def ^:private
|
||||
sql:has-file-change-refs
|
||||
"SELECT EXISTS (SELECT 1 FROM file_change WHERE data_ref_id = ?) AS has_refs")
|
||||
|
||||
(defn- has-file-change-refs?
|
||||
[conn id]
|
||||
(-> (db/exec-one! conn [sql:has-file-change-refs id])
|
||||
(get :has-refs)))
|
||||
[conn sobject]
|
||||
(let [{:keys [file-id id]} (:metadata sobject)]
|
||||
(-> (db/exec-one! conn [sql:exists-file-data-refs file-id id (:id sobject)])
|
||||
(get :has-refs))))
|
||||
|
||||
(def ^:private sql:mark-freeze-in-bulk
|
||||
"UPDATE storage_object
|
||||
@@ -143,52 +130,48 @@
|
||||
"file-media-object"))
|
||||
|
||||
(defn- process-objects!
|
||||
[conn has-refs? ids bucket]
|
||||
[conn has-refs? bucket objects]
|
||||
(loop [to-freeze #{}
|
||||
to-delete #{}
|
||||
ids (seq ids)]
|
||||
(if-let [id (first ids)]
|
||||
(if (has-refs? conn id)
|
||||
objects (seq objects)]
|
||||
(if-let [{:keys [id] :as object} (first objects)]
|
||||
(if (has-refs? conn object)
|
||||
(do
|
||||
(l/debug :hint "processing object"
|
||||
:id (str id)
|
||||
(l/debug :id (str id)
|
||||
:status "freeze"
|
||||
:bucket bucket)
|
||||
(recur (conj to-freeze id) to-delete (rest ids)))
|
||||
(recur (conj to-freeze id) to-delete (rest objects)))
|
||||
(do
|
||||
(l/debug :hint "processing object"
|
||||
:id (str id)
|
||||
(l/debug :id (str id)
|
||||
:status "delete"
|
||||
:bucket bucket)
|
||||
(recur to-freeze (conj to-delete id) (rest ids))))
|
||||
(recur to-freeze (conj to-delete id) (rest objects))))
|
||||
(do
|
||||
(some->> (seq to-freeze) (mark-freeze-in-bulk! conn))
|
||||
(some->> (seq to-delete) (mark-delete-in-bulk! conn))
|
||||
[(count to-freeze) (count to-delete)]))))
|
||||
|
||||
(defn- process-bucket!
|
||||
[conn bucket ids]
|
||||
[conn bucket objects]
|
||||
(case bucket
|
||||
"file-media-object" (process-objects! conn has-file-media-object-refs? ids bucket)
|
||||
"team-font-variant" (process-objects! conn has-team-font-variant-refs? ids bucket)
|
||||
"file-object-thumbnail" (process-objects! conn has-file-object-thumbnails-refs? ids bucket)
|
||||
"file-thumbnail" (process-objects! conn has-file-thumbnails-refs? ids bucket)
|
||||
"profile" (process-objects! conn has-profile-refs? ids bucket)
|
||||
"file-data" (process-objects! conn has-file-data-refs? ids bucket)
|
||||
"file-data-fragment" (process-objects! conn has-file-data-fragment-refs? ids bucket)
|
||||
"file-change" (process-objects! conn has-file-change-refs? ids bucket)
|
||||
"file-media-object" (process-objects! conn has-file-media-object-refs? bucket objects)
|
||||
"team-font-variant" (process-objects! conn has-team-font-variant-refs? bucket objects)
|
||||
"file-object-thumbnail" (process-objects! conn has-file-object-thumbnails-refs? bucket objects)
|
||||
"file-thumbnail" (process-objects! conn has-file-thumbnails-refs? bucket objects)
|
||||
"profile" (process-objects! conn has-profile-refs? bucket objects)
|
||||
"file-data" (process-objects! conn has-file-data-refs? bucket objects)
|
||||
(ex/raise :type :internal
|
||||
:code :unexpected-unknown-reference
|
||||
:hint (dm/fmt "unknown reference '%'" bucket))))
|
||||
|
||||
(defn process-chunk!
|
||||
[{:keys [::db/conn]} chunk]
|
||||
(reduce-kv (fn [[nfo ndo] bucket ids]
|
||||
(let [[nfo' ndo'] (process-bucket! conn bucket ids)]
|
||||
(reduce-kv (fn [[nfo ndo] bucket objects]
|
||||
(let [[nfo' ndo'] (process-bucket! conn bucket objects)]
|
||||
[(+ nfo nfo')
|
||||
(+ ndo ndo')]))
|
||||
[0 0]
|
||||
(d/group-by lookup-bucket :id #{} chunk)))
|
||||
(d/group-by lookup-bucket identity #{} chunk)))
|
||||
|
||||
(def ^:private
|
||||
sql:get-touched-storage-objects
|
||||
@@ -214,12 +197,7 @@
|
||||
(let [[nfo ndo] (db/tx-run! cfg process-chunk! chunk)]
|
||||
(recur (long (+ freezed nfo))
|
||||
(long (+ deleted ndo))))
|
||||
(do
|
||||
(l/inf :hint "task finished"
|
||||
:to-freeze freezed
|
||||
:to-delete deleted)
|
||||
|
||||
{:freeze freezed :delete deleted}))))
|
||||
{:freeze freezed :delete deleted})))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; HANDLER
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
[app.common.logging :as l]
|
||||
[app.common.time :as ct]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.commands.profile :as profile]
|
||||
[integrant.core :as ig]))
|
||||
@@ -19,10 +20,28 @@
|
||||
(defmulti delete-object
|
||||
(fn [_ props] (:object props)))
|
||||
|
||||
(defmethod delete-object :snapshot
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id file-id deleted-at]}]
|
||||
(l/trc :obj "snapshot" :id (str id) :file-id (str file-id)
|
||||
:deleted-at (ct/format-inst deleted-at))
|
||||
|
||||
(db/update! conn :file-change
|
||||
{:deleted-at deleted-at}
|
||||
{:id id :file-id file-id}
|
||||
{::db/return-keys false})
|
||||
|
||||
(db/update! conn :file-data
|
||||
{:deleted-at deleted-at}
|
||||
{:id id :file-id file-id :type "snapshot"}
|
||||
{::db/return-keys false}))
|
||||
|
||||
(defmethod delete-object :file
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id deleted-at]}]
|
||||
(when-let [file (db/get* conn :file {:id id} {::db/remove-deleted false})]
|
||||
(l/trc :hint "marking for deletion" :rel "file" :id (str id)
|
||||
(when-let [file (db/get* conn :file {:id id}
|
||||
{::db/remove-deleted false
|
||||
::sql/columns [:id :is-shared]})]
|
||||
|
||||
(l/trc :obj "file" :id (str id)
|
||||
:deleted-at (ct/format-inst deleted-at))
|
||||
|
||||
(db/update! conn :file
|
||||
@@ -43,25 +62,35 @@
|
||||
;; Mark file change to be deleted
|
||||
(db/update! conn :file-change
|
||||
{:deleted-at deleted-at}
|
||||
{:file-id id})
|
||||
{:file-id id}
|
||||
{::db/return-keys false})
|
||||
|
||||
;; Mark file data fragment to be deleted
|
||||
(db/update! conn :file-data
|
||||
{:deleted-at deleted-at}
|
||||
{:file-id id}
|
||||
{::db/return-keys false})
|
||||
|
||||
;; Mark file media objects to be deleted
|
||||
(db/update! conn :file-media-object
|
||||
{:deleted-at deleted-at}
|
||||
{:file-id id})
|
||||
{:file-id id}
|
||||
{::db/return-keys false})
|
||||
|
||||
;; Mark thumbnails to be deleted
|
||||
(db/update! conn :file-thumbnail
|
||||
{:deleted-at deleted-at}
|
||||
{:file-id id})
|
||||
{:file-id id}
|
||||
{::db/return-keys false})
|
||||
|
||||
(db/update! conn :file-tagged-object-thumbnail
|
||||
{:deleted-at deleted-at}
|
||||
{:file-id id})))
|
||||
{:file-id id}
|
||||
{::db/return-keys false})))
|
||||
|
||||
(defmethod delete-object :project
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id deleted-at]}]
|
||||
(l/trc :hint "marking for deletion" :rel "project" :id (str id)
|
||||
(l/trc :obj "project" :id (str id)
|
||||
:deleted-at (ct/format-inst deleted-at))
|
||||
|
||||
(db/update! conn :project
|
||||
@@ -78,7 +107,7 @@
|
||||
|
||||
(defmethod delete-object :team
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id deleted-at]}]
|
||||
(l/trc :hint "marking for deletion" :rel "team" :id (str id)
|
||||
(l/trc :obj "team" :id (str id)
|
||||
:deleted-at (ct/format-inst deleted-at))
|
||||
(db/update! conn :team
|
||||
{:deleted-at deleted-at}
|
||||
@@ -100,7 +129,7 @@
|
||||
|
||||
(defmethod delete-object :profile
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id deleted-at]}]
|
||||
(l/trc :hint "marking for deletion" :rel "profile" :id (str id)
|
||||
(l/trc :obj "profile" :id (str id)
|
||||
:deleted-at (ct/format-inst deleted-at))
|
||||
|
||||
(db/update! conn :profile
|
||||
@@ -115,7 +144,7 @@
|
||||
|
||||
(defmethod delete-object :default
|
||||
[_cfg props]
|
||||
(l/wrn :hint "not implementation found" :rel (:object props)))
|
||||
(l/wrn :obj (:object props) :hint "not implementation found"))
|
||||
|
||||
(defmethod ig/assert-key ::handler
|
||||
[_ params]
|
||||
|
||||
@@ -23,29 +23,16 @@
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.features.fdata :as feat.fdata]
|
||||
[app.features.file-snapshots :as fsnap]
|
||||
[app.storage :as sto]
|
||||
[app.worker :as wrk]
|
||||
[integrant.core :as ig]))
|
||||
|
||||
(declare get-file)
|
||||
|
||||
(def sql:get-snapshots
|
||||
"SELECT fc.file_id AS id,
|
||||
fc.id AS snapshot_id,
|
||||
fc.data,
|
||||
fc.revn,
|
||||
fc.version,
|
||||
fc.features,
|
||||
fc.data_backend,
|
||||
fc.data_ref_id
|
||||
FROM file_change AS fc
|
||||
WHERE fc.file_id = ?
|
||||
AND fc.data IS NOT NULL
|
||||
ORDER BY fc.created_at ASC")
|
||||
|
||||
(def ^:private sql:mark-file-media-object-deleted
|
||||
"UPDATE file_media_object
|
||||
SET deleted_at = now()
|
||||
SET deleted_at = ?
|
||||
WHERE file_id = ? AND id != ALL(?::uuid[])
|
||||
RETURNING id")
|
||||
|
||||
@@ -56,37 +43,35 @@
|
||||
|
||||
(defn- clean-file-media!
|
||||
"Performs the garbage collection of file media objects."
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id] :as file}]
|
||||
(let [xform (comp
|
||||
(map (partial bfc/decode-file cfg))
|
||||
xf:collect-used-media)
|
||||
[{:keys [::db/conn ::timestamp] :as cfg} {:keys [id] :as file}]
|
||||
(let [used-media
|
||||
(fsnap/reduce-snapshots cfg id xf:collect-used-media conj #{})
|
||||
|
||||
used (->> (db/plan conn [sql:get-snapshots id] {:fetch-size 1})
|
||||
(transduce xform conj #{}))
|
||||
used (into used xf:collect-used-media [file])
|
||||
used-media
|
||||
(into used-media xf:collect-used-media [file])
|
||||
|
||||
ids (db/create-array conn "uuid" used)
|
||||
unused (->> (db/exec! conn [sql:mark-file-media-object-deleted id ids])
|
||||
(into #{} (map :id)))]
|
||||
used-media
|
||||
(db/create-array conn "uuid" used-media)
|
||||
|
||||
(l/dbg :hint "clean" :rel "file-media-object" :file-id (str id) :total (count unused))
|
||||
unused-media
|
||||
(->> (db/exec! conn [sql:mark-file-media-object-deleted timestamp id used-media])
|
||||
(into #{} (map :id)))]
|
||||
|
||||
(doseq [id unused]
|
||||
(l/trc :hint "mark deleted"
|
||||
:rel "file-media-object"
|
||||
:id (str id)
|
||||
:file-id (str id)))
|
||||
(doseq [id unused-media]
|
||||
(l/trc :obj "media-object"
|
||||
:file-id (str id)
|
||||
:id (str id)))
|
||||
|
||||
file))
|
||||
|
||||
(def ^:private sql:mark-file-object-thumbnails-deleted
|
||||
"UPDATE file_tagged_object_thumbnail
|
||||
SET deleted_at = now()
|
||||
SET deleted_at = ?
|
||||
WHERE file_id = ? AND object_id != ALL(?::text[])
|
||||
RETURNING object_id")
|
||||
|
||||
(defn- clean-file-object-thumbnails!
|
||||
[{:keys [::db/conn]} {:keys [data] :as file}]
|
||||
[{:keys [::db/conn ::timestamp]} {:keys [data] :as file}]
|
||||
(let [file-id (:id file)
|
||||
using (->> (vals (:pages-index data))
|
||||
(into #{} (comp
|
||||
@@ -98,49 +83,37 @@
|
||||
(thc/fmt-object-id file-id page-id id "frame")
|
||||
(thc/fmt-object-id file-id page-id id "component")))))))
|
||||
|
||||
ids (db/create-array conn "text" using)
|
||||
unused (->> (db/exec! conn [sql:mark-file-object-thumbnails-deleted file-id ids])
|
||||
ids (into-array String using)
|
||||
unused (->> (db/exec! conn [sql:mark-file-object-thumbnails-deleted timestamp file-id ids])
|
||||
(into #{} (map :object-id)))]
|
||||
|
||||
(l/dbg :hint "clean" :rel "file-object-thumbnail" :file-id (str file-id) :total (count unused))
|
||||
|
||||
(doseq [object-id unused]
|
||||
(l/trc :hint "mark deleted"
|
||||
:rel "file-tagged-object-thumbnail"
|
||||
:object-id object-id
|
||||
:file-id (str file-id)))
|
||||
(l/trc :obj "object-thumbnail"
|
||||
:file-id (str file-id)
|
||||
:id object-id))
|
||||
|
||||
file))
|
||||
|
||||
(def ^:private sql:mark-file-thumbnails-deleted
|
||||
"UPDATE file_thumbnail
|
||||
SET deleted_at = now()
|
||||
SET deleted_at = ?
|
||||
WHERE file_id = ? AND revn < ?
|
||||
RETURNING revn")
|
||||
|
||||
(defn- clean-file-thumbnails!
|
||||
[{:keys [::db/conn]} {:keys [id revn] :as file}]
|
||||
(let [unused (->> (db/exec! conn [sql:mark-file-thumbnails-deleted id revn])
|
||||
[{:keys [::db/conn ::timestamp]} {:keys [id revn] :as file}]
|
||||
(let [unused (->> (db/exec! conn [sql:mark-file-thumbnails-deleted timestamp id revn])
|
||||
(into #{} (map :revn)))]
|
||||
|
||||
(l/dbg :hint "clean" :rel "file-thumbnail" :file-id (str id) :total (count unused))
|
||||
|
||||
(doseq [revn unused]
|
||||
(l/trc :hint "mark deleted"
|
||||
:rel "file-thumbnail"
|
||||
:revn revn
|
||||
:file-id (str id)))
|
||||
(l/trc :obj "thumbnail"
|
||||
:file-id (str id)
|
||||
:revn revn))
|
||||
|
||||
file))
|
||||
|
||||
(def ^:private sql:get-files-for-library
|
||||
"SELECT f.id,
|
||||
f.data,
|
||||
f.modified_at,
|
||||
f.features,
|
||||
f.version,
|
||||
f.data_backend,
|
||||
f.data_ref_id
|
||||
"SELECT f.id
|
||||
FROM file AS f
|
||||
LEFT JOIN file_library_rel AS fl ON (fl.file_id = f.id)
|
||||
WHERE fl.library_file_id = ?
|
||||
@@ -161,15 +134,21 @@
|
||||
deleted-components
|
||||
(ctkl/deleted-components-seq data)
|
||||
|
||||
xform
|
||||
file-xform
|
||||
(mapcat (partial get-used-components deleted-components file-id))
|
||||
|
||||
library-xform
|
||||
(comp
|
||||
(map :id)
|
||||
(map #(bfc/get-file cfg % :realize? true :read-only? true))
|
||||
file-xform)
|
||||
|
||||
used-remote
|
||||
(->> (db/plan conn [sql:get-files-for-library file-id] {:fetch-size 1})
|
||||
(transduce (comp (map (partial bfc/decode-file cfg)) xform) conj #{}))
|
||||
(transduce library-xform conj #{}))
|
||||
|
||||
used-local
|
||||
(into #{} xform [file])
|
||||
(into #{} file-xform [file])
|
||||
|
||||
unused
|
||||
(transduce bfc/xf-map-id disj
|
||||
@@ -180,21 +159,21 @@
|
||||
(update file :data
|
||||
(fn [data]
|
||||
(reduce (fn [data id]
|
||||
(l/trc :hint "delete component"
|
||||
:component-id (str id)
|
||||
:file-id (str file-id))
|
||||
(l/trc :obj "component"
|
||||
:file-id (str file-id)
|
||||
:id (str id))
|
||||
(ctkl/delete-component data id))
|
||||
data
|
||||
unused)))]
|
||||
|
||||
(l/dbg :hint "clean" :rel "components" :file-id (str file-id) :total (count unused))
|
||||
file))
|
||||
|
||||
(def ^:private sql:mark-deleted-data-fragments
|
||||
"UPDATE file_data_fragment
|
||||
SET deleted_at = now()
|
||||
"UPDATE file_data
|
||||
SET deleted_at = ?
|
||||
WHERE file_id = ?
|
||||
AND id != ALL(?::uuid[])
|
||||
AND type = 'fragment'
|
||||
AND deleted_at IS NULL
|
||||
RETURNING id")
|
||||
|
||||
@@ -203,19 +182,16 @@
|
||||
(mapcat feat.fdata/get-used-pointer-ids)))
|
||||
|
||||
(defn- clean-fragments!
|
||||
[{:keys [::db/conn]} {:keys [id] :as file}]
|
||||
[{:keys [::db/conn ::timestamp]} {:keys [id] :as file}]
|
||||
(let [used (into #{} xf:collect-pointers [file])
|
||||
|
||||
unused (->> (db/exec! conn [sql:mark-deleted-data-fragments id
|
||||
unused (->> (db/exec! conn [sql:mark-deleted-data-fragments timestamp id
|
||||
(db/create-array conn "uuid" used)])
|
||||
(into #{} bfc/xf-map-id))]
|
||||
|
||||
(l/dbg :hint "clean" :rel "file-data-fragment" :file-id (str id) :total (count unused))
|
||||
(doseq [id unused]
|
||||
(l/trc :hint "mark deleted"
|
||||
:rel "file-data-fragment"
|
||||
:id (str id)
|
||||
:file-id (str id)))
|
||||
(l/trc :obj "fragment"
|
||||
:file-id (str id)
|
||||
:id (str id)))
|
||||
|
||||
file))
|
||||
|
||||
@@ -229,36 +205,23 @@
|
||||
(cfv/validate-file-schema! file)
|
||||
file))
|
||||
|
||||
(def ^:private sql:get-file
|
||||
"SELECT f.id,
|
||||
f.data,
|
||||
f.revn,
|
||||
f.version,
|
||||
f.features,
|
||||
f.modified_at,
|
||||
f.data_backend,
|
||||
f.data_ref_id
|
||||
FROM file AS f
|
||||
WHERE f.has_media_trimmed IS false
|
||||
AND f.modified_at < now() - ?::interval
|
||||
AND f.deleted_at IS NULL
|
||||
AND f.id = ?
|
||||
FOR UPDATE
|
||||
SKIP LOCKED")
|
||||
|
||||
(defn get-file
|
||||
[{:keys [::db/conn ::min-age]} file-id]
|
||||
(let [min-age (if min-age
|
||||
(db/interval min-age)
|
||||
(db/interval 0))]
|
||||
(->> (db/exec! conn [sql:get-file min-age file-id])
|
||||
(first))))
|
||||
[cfg {:keys [file-id revn]}]
|
||||
(let [file (bfc/get-file cfg file-id
|
||||
:realize? true
|
||||
:skip-locked? true
|
||||
:lock-for-update? true)]
|
||||
|
||||
;; We should ensure that the scheduled file and the procesing file
|
||||
;; has not changed since schedule, for this reason we check the
|
||||
;; revn from props with the revn from retrieved file from database
|
||||
(when (or (nil? revn) (= revn (:revn file)))
|
||||
file)))
|
||||
|
||||
(defn- process-file!
|
||||
[cfg file-id]
|
||||
(if-let [file (get-file cfg file-id)]
|
||||
[cfg {:keys [file-id] :as props}]
|
||||
(if-let [file (get-file cfg props)]
|
||||
(let [file (->> file
|
||||
(bfc/decode-file cfg)
|
||||
(bfl/clean-file)
|
||||
(clean-media! cfg)
|
||||
(clean-fragments! cfg))
|
||||
@@ -267,7 +230,7 @@
|
||||
true)
|
||||
|
||||
(do
|
||||
(l/dbg :hint "skip" :file-id (str file-id))
|
||||
(l/dbg :hint "skip cleaning, criteria does not match" :file-id (str file-id))
|
||||
false)))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
@@ -282,26 +245,23 @@
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ cfg]
|
||||
(fn [{:keys [props] :as task}]
|
||||
(let [min-age (ct/duration (or (:min-age props)
|
||||
(cf/get-deletion-delay)))
|
||||
file-id (get props :file-id)
|
||||
cfg (-> cfg
|
||||
(assoc ::db/rollback (:rollback? props))
|
||||
(assoc ::min-age min-age))]
|
||||
(try
|
||||
(-> cfg
|
||||
(assoc ::db/rollback (:rollback? props))
|
||||
(db/tx-run! (fn [{:keys [::db/conn] :as cfg}]
|
||||
(let [cfg (-> cfg
|
||||
(update ::sto/storage sto/configure conn)
|
||||
(assoc ::timestamp (ct/now)))
|
||||
processed? (process-file! cfg props)]
|
||||
|
||||
(try
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(let [cfg (update cfg ::sto/storage sto/configure conn)
|
||||
processed? (process-file! cfg file-id)]
|
||||
(when (and processed? (contains? cf/flags :tiered-file-data-storage))
|
||||
(wrk/submit! (-> cfg
|
||||
(assoc ::wrk/task :offload-file-data)
|
||||
(assoc ::wrk/params props)
|
||||
(assoc ::wrk/priority 10)
|
||||
(assoc ::wrk/delay 1000))))
|
||||
processed?)))
|
||||
|
||||
(catch Throwable cause
|
||||
(l/err :hint "error on cleaning file"
|
||||
:file-id (str (:file-id props))
|
||||
:cause cause))))))
|
||||
(when (and processed? (contains? cf/flags :tiered-file-data-storage))
|
||||
(wrk/submit! (-> cfg
|
||||
(assoc ::wrk/task :offload-file-data)
|
||||
(assoc ::wrk/params props)
|
||||
(assoc ::wrk/priority 10)
|
||||
(assoc ::wrk/delay 1000))))
|
||||
processed?))))
|
||||
(catch Throwable cause
|
||||
(l/err :hint "error on cleaning file"
|
||||
:file-id (str (:file-id props))
|
||||
:cause cause)))))
|
||||
|
||||
@@ -17,29 +17,29 @@
|
||||
(def ^:private
|
||||
sql:get-candidates
|
||||
"SELECT f.id,
|
||||
f.revn,
|
||||
f.modified_at
|
||||
FROM file AS f
|
||||
WHERE f.has_media_trimmed IS false
|
||||
AND f.modified_at < now() - ?::interval
|
||||
AND f.deleted_at IS NULL
|
||||
ORDER BY f.modified_at DESC
|
||||
FOR UPDATE
|
||||
FOR UPDATE OF f
|
||||
SKIP LOCKED")
|
||||
|
||||
(defn- get-candidates
|
||||
[{:keys [::db/conn ::min-age] :as cfg}]
|
||||
(let [min-age (db/interval min-age)]
|
||||
(db/cursor conn [sql:get-candidates min-age] {:chunk-size 10})))
|
||||
(db/plan conn [sql:get-candidates min-age] {:fetch-size 10})))
|
||||
|
||||
(defn- schedule!
|
||||
[{:keys [::min-age] :as cfg}]
|
||||
(let [total (reduce (fn [total {:keys [id]}]
|
||||
(let [params {:file-id id :min-age min-age}]
|
||||
[cfg]
|
||||
(let [total (reduce (fn [total {:keys [id modified-at revn]}]
|
||||
(let [params {:file-id id :modified-at modified-at :revn revn}]
|
||||
(wrk/submit! (assoc cfg ::wrk/params params))
|
||||
(inc total)))
|
||||
0
|
||||
(get-candidates cfg))]
|
||||
|
||||
{:processed total}))
|
||||
|
||||
(defmethod ig/assert-key ::handler
|
||||
@@ -48,7 +48,7 @@
|
||||
|
||||
(defmethod ig/expand-key ::handler
|
||||
[k v]
|
||||
{k (assoc v ::min-age (cf/get-deletion-delay))})
|
||||
{k (assoc v ::min-age (cf/get-file-clean-delay))})
|
||||
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ cfg]
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
[app.common.logging :as l]
|
||||
[app.common.time :as ct]
|
||||
[app.db :as db]
|
||||
[app.features.fdata :as fdata]
|
||||
[app.storage :as sto]
|
||||
[integrant.core :as ig]))
|
||||
|
||||
@@ -27,14 +28,14 @@
|
||||
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-profiles deletion-threshold chunk-size] {:fetch-size 5})
|
||||
(reduce (fn [total {:keys [id photo-id]}]
|
||||
(l/trc :hint "permanently delete" :rel "profile" :id (str id))
|
||||
(l/trc :obj "profile" :id (str id))
|
||||
|
||||
;; Mark as deleted the storage object
|
||||
(some->> photo-id (sto/touch-object! storage))
|
||||
|
||||
(db/delete! conn :profile {:id id})
|
||||
|
||||
(inc total))
|
||||
(let [affected (-> (db/delete! conn :profile {:id id})
|
||||
(db/get-update-count))]
|
||||
(+ total affected)))
|
||||
0)))
|
||||
|
||||
(def ^:private sql:get-teams
|
||||
@@ -50,8 +51,7 @@
|
||||
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-teams deletion-threshold chunk-size] {:fetch-size 5})
|
||||
(reduce (fn [total {:keys [id photo-id deleted-at]}]
|
||||
(l/trc :hint "permanently delete"
|
||||
:rel "team"
|
||||
(l/trc :obj "team"
|
||||
:id (str id)
|
||||
:deleted-at (ct/format-inst deleted-at))
|
||||
|
||||
@@ -59,9 +59,9 @@
|
||||
(some->> photo-id (sto/touch-object! storage))
|
||||
|
||||
;; And finally, permanently delete the team.
|
||||
(db/delete! conn :team {:id id})
|
||||
|
||||
(inc total))
|
||||
(let [affected (-> (db/delete! conn :team {:id id})
|
||||
(db/get-update-count))]
|
||||
(+ total affected)))
|
||||
0)))
|
||||
|
||||
(def ^:private sql:get-fonts
|
||||
@@ -78,8 +78,7 @@
|
||||
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-fonts deletion-threshold chunk-size] {:fetch-size 5})
|
||||
(reduce (fn [total {:keys [id team-id deleted-at] :as font}]
|
||||
(l/trc :hint "permanently delete"
|
||||
:rel "team-font-variant"
|
||||
(l/trc :obj "font-variant"
|
||||
:id (str id)
|
||||
:team-id (str team-id)
|
||||
:deleted-at (ct/format-inst deleted-at))
|
||||
@@ -90,10 +89,9 @@
|
||||
(some->> (:otf-file-id font) (sto/touch-object! storage))
|
||||
(some->> (:ttf-file-id font) (sto/touch-object! storage))
|
||||
|
||||
;; And finally, permanently delete the team font variant
|
||||
(db/delete! conn :team-font-variant {:id id})
|
||||
|
||||
(inc total))
|
||||
(let [affected (-> (db/delete! conn :team-font-variant {:id id})
|
||||
(db/get-update-count))]
|
||||
(+ total affected)))
|
||||
0)))
|
||||
|
||||
(def ^:private sql:get-projects
|
||||
@@ -110,45 +108,40 @@
|
||||
[{:keys [::db/conn ::deletion-threshold ::chunk-size] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-projects deletion-threshold chunk-size] {:fetch-size 5})
|
||||
(reduce (fn [total {:keys [id team-id deleted-at]}]
|
||||
(l/trc :hint "permanently delete"
|
||||
:rel "project"
|
||||
(l/trc :obj "project"
|
||||
:id (str id)
|
||||
:team-id (str team-id)
|
||||
:deleted-at (ct/format-inst deleted-at))
|
||||
|
||||
;; And finally, permanently delete the project.
|
||||
(db/delete! conn :project {:id id})
|
||||
|
||||
(inc total))
|
||||
(let [affected (-> (db/delete! conn :project {:id id})
|
||||
(db/get-update-count))]
|
||||
(+ total affected)))
|
||||
0)))
|
||||
|
||||
(def ^:private sql:get-files
|
||||
"SELECT id, deleted_at, project_id, data_backend, data_ref_id
|
||||
FROM file
|
||||
WHERE deleted_at IS NOT NULL
|
||||
AND deleted_at < now() + ?::interval
|
||||
ORDER BY deleted_at ASC
|
||||
"SELECT f.id,
|
||||
f.deleted_at,
|
||||
f.project_id
|
||||
FROM file AS f
|
||||
WHERE f.deleted_at IS NOT NULL
|
||||
AND f.deleted_at < now() + ?::interval
|
||||
ORDER BY f.deleted_at ASC
|
||||
LIMIT ?
|
||||
FOR UPDATE
|
||||
SKIP LOCKED")
|
||||
|
||||
(defn- delete-files!
|
||||
[{:keys [::db/conn ::sto/storage ::deletion-threshold ::chunk-size] :as cfg}]
|
||||
[{:keys [::db/conn ::deletion-threshold ::chunk-size] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-files deletion-threshold chunk-size] {:fetch-size 5})
|
||||
(reduce (fn [total {:keys [id deleted-at project-id] :as file}]
|
||||
(l/trc :hint "permanently delete"
|
||||
:rel "file"
|
||||
(l/trc :obj "file"
|
||||
:id (str id)
|
||||
:project-id (str project-id)
|
||||
:deleted-at (ct/format-inst deleted-at))
|
||||
|
||||
(when (= "objects-storage" (:data-backend file))
|
||||
(sto/touch-object! storage (:data-ref-id file)))
|
||||
|
||||
;; And finally, permanently delete the file.
|
||||
(db/delete! conn :file {:id id})
|
||||
|
||||
(inc total))
|
||||
(let [affected (-> (db/delete! conn :file {:id id})
|
||||
(db/get-update-count))]
|
||||
(+ total affected)))
|
||||
0)))
|
||||
|
||||
(def ^:private sql:get-file-thumbnails
|
||||
@@ -165,8 +158,7 @@
|
||||
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-file-thumbnails deletion-threshold chunk-size] {:fetch-size 5})
|
||||
(reduce (fn [total {:keys [file-id revn media-id deleted-at]}]
|
||||
(l/trc :hint "permanently delete"
|
||||
:rel "file-thumbnail"
|
||||
(l/trc :obj "file-thumbnail"
|
||||
:file-id (str file-id)
|
||||
:revn revn
|
||||
:deleted-at (ct/format-inst deleted-at))
|
||||
@@ -174,10 +166,9 @@
|
||||
;; Mark as deleted the storage object
|
||||
(some->> media-id (sto/touch-object! storage))
|
||||
|
||||
;; And finally, permanently delete the object
|
||||
(db/delete! conn :file-thumbnail {:file-id file-id :revn revn})
|
||||
|
||||
(inc total))
|
||||
(let [affected (-> (db/delete! conn :file-thumbnail {:file-id file-id :revn revn})
|
||||
(db/get-update-count))]
|
||||
(+ total affected)))
|
||||
0)))
|
||||
|
||||
(def ^:private sql:get-file-object-thumbnails
|
||||
@@ -194,8 +185,7 @@
|
||||
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-file-object-thumbnails deletion-threshold chunk-size] {:fetch-size 5})
|
||||
(reduce (fn [total {:keys [file-id object-id media-id deleted-at]}]
|
||||
(l/trc :hint "permanently delete"
|
||||
:rel "file-tagged-object-thumbnail"
|
||||
(l/trc :obj "file-object-thumbnail"
|
||||
:file-id (str file-id)
|
||||
:object-id object-id
|
||||
:deleted-at (ct/format-inst deleted-at))
|
||||
@@ -203,36 +193,10 @@
|
||||
;; Mark as deleted the storage object
|
||||
(some->> media-id (sto/touch-object! storage))
|
||||
|
||||
;; And finally, permanently delete the object
|
||||
(db/delete! conn :file-tagged-object-thumbnail {:file-id file-id :object-id object-id})
|
||||
|
||||
(inc total))
|
||||
0)))
|
||||
|
||||
(def ^:private sql:get-file-data-fragments
|
||||
"SELECT file_id, id, deleted_at, data_ref_id
|
||||
FROM file_data_fragment
|
||||
WHERE deleted_at IS NOT NULL
|
||||
AND deleted_at < now() + ?::interval
|
||||
ORDER BY deleted_at ASC
|
||||
LIMIT ?
|
||||
FOR UPDATE
|
||||
SKIP LOCKED")
|
||||
|
||||
(defn- delete-file-data-fragments!
|
||||
[{:keys [::db/conn ::sto/storage ::deletion-threshold ::chunk-size] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-file-data-fragments deletion-threshold chunk-size] {:fetch-size 5})
|
||||
(reduce (fn [total {:keys [file-id id deleted-at data-ref-id]}]
|
||||
(l/trc :hint "permanently delete"
|
||||
:rel "file-data-fragment"
|
||||
:id (str id)
|
||||
:file-id (str file-id)
|
||||
:deleted-at (ct/format-inst deleted-at))
|
||||
|
||||
(some->> data-ref-id (sto/touch-object! storage))
|
||||
(db/delete! conn :file-data-fragment {:file-id file-id :id id})
|
||||
|
||||
(inc total))
|
||||
(let [affected (-> (db/delete! conn :file-tagged-object-thumbnail
|
||||
{:file-id file-id :object-id object-id})
|
||||
(db/get-update-count))]
|
||||
(+ total affected)))
|
||||
0)))
|
||||
|
||||
(def ^:private sql:get-file-media-objects
|
||||
@@ -249,8 +213,7 @@
|
||||
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-file-media-objects deletion-threshold chunk-size] {:fetch-size 5})
|
||||
(reduce (fn [total {:keys [id file-id deleted-at] :as fmo}]
|
||||
(l/trc :hint "permanently delete"
|
||||
:rel "file-media-object"
|
||||
(l/trc :obj "file-media-object"
|
||||
:id (str id)
|
||||
:file-id (str file-id)
|
||||
:deleted-at (ct/format-inst deleted-at))
|
||||
@@ -259,13 +222,48 @@
|
||||
(some->> (:media-id fmo) (sto/touch-object! storage))
|
||||
(some->> (:thumbnail-id fmo) (sto/touch-object! storage))
|
||||
|
||||
(db/delete! conn :file-media-object {:id id})
|
||||
(let [affected (-> (db/delete! conn :file-media-object {:id id})
|
||||
(db/get-update-count))]
|
||||
(+ total affected)))
|
||||
0)))
|
||||
|
||||
(inc total))
|
||||
(def ^:private sql:get-file-data
|
||||
"SELECT file_id, id, type, deleted_at, metadata, backend
|
||||
FROM file_data
|
||||
WHERE deleted_at IS NOT NULL
|
||||
AND deleted_at < now() + ?::interval
|
||||
ORDER BY deleted_at ASC
|
||||
LIMIT ?
|
||||
FOR UPDATE
|
||||
SKIP LOCKED")
|
||||
|
||||
(defn- delete-file-data!
|
||||
[{:keys [::db/conn ::deletion-threshold ::chunk-size] :as cfg}]
|
||||
|
||||
(->> (db/plan conn [sql:get-file-data deletion-threshold chunk-size] {:fetch-size 5})
|
||||
(reduce (fn [total {:keys [file-id id type deleted-at metadata backend]}]
|
||||
|
||||
(some->> metadata
|
||||
(fdata/decode-metadata)
|
||||
(fdata/process-metadata cfg))
|
||||
|
||||
(l/trc :obj "file-data"
|
||||
:id (str id)
|
||||
:file-id (str file-id)
|
||||
:type type
|
||||
:backend backend
|
||||
:deleted-at (ct/format-inst deleted-at))
|
||||
|
||||
(let [affected (-> (db/delete! conn :file-data
|
||||
{:file-id file-id
|
||||
:id id
|
||||
:type type})
|
||||
(db/get-update-count))]
|
||||
(+ total affected)))
|
||||
0)))
|
||||
|
||||
(def ^:private sql:get-file-change
|
||||
"SELECT id, file_id, deleted_at, data_backend, data_ref_id
|
||||
"SELECT id, file_id, deleted_at
|
||||
FROM file_change
|
||||
WHERE deleted_at IS NOT NULL
|
||||
AND deleted_at < now() + ?::interval
|
||||
@@ -275,29 +273,25 @@
|
||||
SKIP LOCKED")
|
||||
|
||||
(defn- delete-file-changes!
|
||||
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
|
||||
[{:keys [::db/conn ::deletion-threshold ::chunk-size] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-file-change deletion-threshold chunk-size] {:fetch-size 5})
|
||||
(reduce (fn [total {:keys [id file-id deleted-at] :as xlog}]
|
||||
(l/trc :hint "permanently delete"
|
||||
:rel "file-change"
|
||||
(l/trc :obj "file-change"
|
||||
:id (str id)
|
||||
:file-id (str file-id)
|
||||
:deleted-at (ct/format-inst deleted-at))
|
||||
|
||||
(when (= "objects-storage" (:data-backend xlog))
|
||||
(sto/touch-object! storage (:data-ref-id xlog)))
|
||||
|
||||
(db/delete! conn :file-change {:id id})
|
||||
|
||||
(inc total))
|
||||
(let [affected (-> (db/delete! conn :file-change {:id id})
|
||||
(db/get-update-count))]
|
||||
(+ total affected)))
|
||||
0)))
|
||||
|
||||
(def ^:private deletion-proc-vars
|
||||
[#'delete-profiles!
|
||||
#'delete-file-media-objects!
|
||||
#'delete-file-data-fragments!
|
||||
#'delete-file-object-thumbnails!
|
||||
#'delete-file-thumbnails!
|
||||
#'delete-file-data!
|
||||
#'delete-file-changes!
|
||||
#'delete-files!
|
||||
#'delete-projects!
|
||||
@@ -309,9 +303,10 @@
|
||||
until 0 results is returned"
|
||||
[cfg proc-fn]
|
||||
(loop [total 0]
|
||||
(let [result (db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(db/exec-one! conn ["SET LOCAL rules.deletion_protection TO off"])
|
||||
(proc-fn cfg)))]
|
||||
(let [result (db/tx-run! cfg
|
||||
(fn [{:keys [::db/conn] :as cfg}]
|
||||
(db/exec-one! conn ["SET LOCAL rules.deletion_protection TO off"])
|
||||
(proc-fn cfg)))]
|
||||
(if (pos? result)
|
||||
(recur (long (+ total result)))
|
||||
total))))
|
||||
@@ -336,6 +331,4 @@
|
||||
(let [result (execute-proc! cfg proc-fn)]
|
||||
(recur (rest procs)
|
||||
(long (+ total result))))
|
||||
(do
|
||||
(l/inf :hint "task finished" :deleted total)
|
||||
{:processed total}))))))
|
||||
{:processed total})))))
|
||||
|
||||
@@ -8,101 +8,25 @@
|
||||
"A maintenance task responsible of moving file data from hot
|
||||
storage (the database row) to a cold storage (fs or s3)."
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]
|
||||
[app.features.fdata :as fdata]
|
||||
[app.storage :as sto]
|
||||
[integrant.core :as ig]))
|
||||
|
||||
(defn- offload-file-data!
|
||||
[{:keys [::db/conn ::sto/storage ::file-id] :as cfg}]
|
||||
(let [file (db/get conn :file {:id file-id}
|
||||
{::sql/for-update true})]
|
||||
(when (nil? (:data file))
|
||||
(ex/raise :hint "file already offloaded"
|
||||
:type :internal
|
||||
:code :file-already-offloaded
|
||||
:file-id file-id))
|
||||
(def ^:private sql:get-file-data
|
||||
"SELECT fd.*
|
||||
FROM file_data AS fd
|
||||
WHERE fd.file_id = ?
|
||||
AND fd.backend = 'db'
|
||||
AND fd.deleted_at IS NULL")
|
||||
|
||||
(let [data (sto/content (:data file))
|
||||
sobj (sto/put-object! storage
|
||||
{::sto/content data
|
||||
::sto/touch true
|
||||
:bucket "file-data"
|
||||
:content-type "application/octet-stream"
|
||||
:file-id file-id})]
|
||||
|
||||
(l/trc :hint "offload file data"
|
||||
:file-id (str file-id)
|
||||
:storage-id (str (:id sobj)))
|
||||
|
||||
(db/update! conn :file
|
||||
{:data-backend "objects-storage"
|
||||
:data-ref-id (:id sobj)
|
||||
:data nil}
|
||||
{:id file-id}
|
||||
{::db/return-keys false}))))
|
||||
|
||||
(defn- offload-file-data-fragments!
|
||||
[{:keys [::db/conn ::sto/storage ::file-id] :as cfg}]
|
||||
(doseq [fragment (db/query conn :file-data-fragment
|
||||
{:file-id file-id
|
||||
:deleted-at nil
|
||||
:data-backend nil}
|
||||
{::db/for-update true})]
|
||||
(let [data (sto/content (:data fragment))
|
||||
sobj (sto/put-object! storage
|
||||
{::sto/content data
|
||||
::sto/touch true
|
||||
:bucket "file-data-fragment"
|
||||
:content-type "application/octet-stream"
|
||||
:file-id file-id
|
||||
:file-fragment-id (:id fragment)})]
|
||||
|
||||
(l/trc :hint "offload file data fragment"
|
||||
:file-id (str file-id)
|
||||
:file-fragment-id (str (:id fragment))
|
||||
:storage-id (str (:id sobj)))
|
||||
|
||||
(db/update! conn :file-data-fragment
|
||||
{:data-backend "objects-storage"
|
||||
:data-ref-id (:id sobj)
|
||||
:data nil}
|
||||
{:id (:id fragment)}
|
||||
{::db/return-keys false}))))
|
||||
|
||||
(def sql:get-snapshots
|
||||
"SELECT fc.*
|
||||
FROM file_change AS fc
|
||||
WHERE fc.file_id = ?
|
||||
AND fc.label IS NOT NULL
|
||||
AND fc.data IS NOT NULL
|
||||
AND fc.data_backend IS NULL")
|
||||
|
||||
(defn- offload-file-snapshots!
|
||||
[{:keys [::db/conn ::sto/storage ::file-id] :as cfg}]
|
||||
(doseq [snapshot (db/exec! conn [sql:get-snapshots file-id])]
|
||||
(let [data (sto/content (:data snapshot))
|
||||
sobj (sto/put-object! storage
|
||||
{::sto/content data
|
||||
::sto/touch true
|
||||
:bucket "file-change"
|
||||
:content-type "application/octet-stream"
|
||||
:file-id file-id
|
||||
:file-change-id (:id snapshot)})]
|
||||
|
||||
(l/trc :hint "offload file change"
|
||||
:file-id (str file-id)
|
||||
:file-change-id (str (:id snapshot))
|
||||
:storage-id (str (:id sobj)))
|
||||
|
||||
(db/update! conn :file-change
|
||||
{:data-backend "objects-storage"
|
||||
:data-ref-id (:id sobj)
|
||||
:data nil}
|
||||
{:id (:id snapshot)}
|
||||
{::db/return-keys false}))))
|
||||
(defn- offload-file-data
|
||||
[cfg {:keys [id file-id type] :as fdata}]
|
||||
(fdata/upsert! cfg (assoc fdata :backend "storage"))
|
||||
(l/trc :file-id (str file-id)
|
||||
:id (str id)
|
||||
:type type))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; HANDLER
|
||||
@@ -116,10 +40,9 @@
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ cfg]
|
||||
(fn [{:keys [props] :as task}]
|
||||
(-> cfg
|
||||
(assoc ::db/rollback (:rollback? props))
|
||||
(assoc ::file-id (:file-id props))
|
||||
(db/tx-run! (fn [cfg]
|
||||
(offload-file-data! cfg)
|
||||
(offload-file-data-fragments! cfg)
|
||||
(offload-file-snapshots! cfg))))))
|
||||
(let [file-id (:file-id props)]
|
||||
(-> cfg
|
||||
(assoc ::db/rollback (:rollback? props))
|
||||
(db/tx-run! (fn [{:keys [::db/conn] :as cfg}]
|
||||
(run! (partial offload-file-data cfg)
|
||||
(db/plan conn [sql:get-file-data file-id]))))))))
|
||||
|
||||
@@ -62,7 +62,8 @@
|
||||
(def default
|
||||
{:database-uri "postgresql://postgres/penpot_test"
|
||||
:redis-uri "redis://redis/1"
|
||||
:auto-file-snapshot-every 1})
|
||||
:auto-file-snapshot-every 1
|
||||
:file-data-backend "db"})
|
||||
|
||||
(def config
|
||||
(cf/read-config :prefix "penpot-test"
|
||||
@@ -74,9 +75,6 @@
|
||||
:enable-smtp
|
||||
:enable-quotes
|
||||
:enable-rpc-climit
|
||||
:enable-feature-fdata-pointer-map
|
||||
:enable-feature-fdata-objets-map
|
||||
:enable-feature-components-v2
|
||||
:enable-auto-file-snapshot
|
||||
:disable-file-validation])
|
||||
|
||||
|
||||
@@ -144,7 +144,6 @@
|
||||
(t/is (not= (:modified-at comment) (:modified-at comment')))
|
||||
(t/is (= (:content data) (:content comment'))))))
|
||||
|
||||
|
||||
(t/testing "retrieve threads"
|
||||
(let [data {::th/type :get-comment-threads
|
||||
::rpc/profile-id (:id profile-1)
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
true
|
||||
(catch Throwable _cause
|
||||
false)))
|
||||
{:num 30}))
|
||||
{:num 15}))
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -8,10 +8,10 @@
|
||||
(:require
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.pprint :as pp]
|
||||
[app.common.pprint :as pp]
|
||||
[app.common.thumbnails :as thc]
|
||||
[app.common.types.shape :as cts]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as sql]
|
||||
[app.http :as http]
|
||||
@@ -87,10 +87,7 @@
|
||||
|
||||
;; (th/print-result! out)
|
||||
(t/is (nil? (:error out)))
|
||||
|
||||
(let [result (:result out)]
|
||||
(t/is (= "label1" (:label result)))
|
||||
(t/is (uuid? (:id result)))))
|
||||
(t/is (nil? (:result out))))
|
||||
|
||||
(let [[row1 row2 :as rows]
|
||||
(th/db-query :file-change
|
||||
@@ -116,7 +113,7 @@
|
||||
|
||||
;; (th/print-result! out)
|
||||
(t/is (nil? (:error out)))
|
||||
(t/is (nil? (:result out)))))
|
||||
(t/is (true? (:result out)))))
|
||||
|
||||
(t/testing "delete system created snapshot"
|
||||
(let [params {::th/type :delete-file-snapshot
|
||||
@@ -130,7 +127,15 @@
|
||||
data (ex-data error)]
|
||||
(t/is (th/ex-info? error))
|
||||
(t/is (= (:type data) :validation))
|
||||
(t/is (= (:code data) :system-snapshots-cant-be-deleted)))))))))
|
||||
(t/is (= (:code data) :system-snapshots-cant-be-deleted)))))
|
||||
|
||||
;; this will run pending task triggered by deleting user snapshot
|
||||
(th/run-pending-tasks!)
|
||||
|
||||
;; this will
|
||||
(let [res (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
|
||||
;; delete 2 snapshots and 2 file data entries
|
||||
(t/is (= 4 (:processed res))))))))
|
||||
|
||||
(t/deftest snapshots-locking
|
||||
(let [profile-1 (th/create-profile* 1 {:is-active true})
|
||||
@@ -172,7 +177,7 @@
|
||||
out (th/command! params)]
|
||||
;; (th/print-result! out)
|
||||
(t/is (nil? (:error out)))
|
||||
(t/is (nil? (:result out)))
|
||||
(t/is (true? (:result out)))
|
||||
|
||||
(let [snapshot (th/db-get :file-change {:id (:id snapshot)})]
|
||||
(t/is (= (:id profile-1) (:locked-by snapshot))))))
|
||||
@@ -199,7 +204,7 @@
|
||||
out (th/command! params)]
|
||||
;; (th/print-result! out)
|
||||
(t/is (nil? (:error out)))
|
||||
(t/is (nil? (:result out)))
|
||||
(t/is (true? (:result out)))
|
||||
|
||||
(let [snapshot (th/db-get :file-change {:id (:id snapshot)})]
|
||||
(t/is (= nil (:locked-by snapshot))))))
|
||||
@@ -213,4 +218,4 @@
|
||||
|
||||
;; (th/print-result! out)
|
||||
(t/is (nil? (:error out)))
|
||||
(t/is (nil? (:result out)))))))
|
||||
(t/is (true? (:result out)))))))
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as sql]
|
||||
[app.features.fdata :as fdata]
|
||||
[app.http :as http]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.commands.files :as files]
|
||||
@@ -185,10 +186,10 @@
|
||||
shape-id (uuid/random)]
|
||||
|
||||
;; Preventive file-gc
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
|
||||
(t/is (true? (th/run-task! :file-gc {:file-id (:id file) :revn (:revn file)})))
|
||||
|
||||
;; Check the number of fragments before adding the page
|
||||
(let [rows (th/db-query :file-data-fragment {:file-id (:id file)})]
|
||||
(let [rows (th/db-query :file-data {:file-id (:id file) :type "fragment"})]
|
||||
(t/is (= 2 (count rows))))
|
||||
|
||||
;; Add page
|
||||
@@ -203,22 +204,23 @@
|
||||
:id page-id}])
|
||||
|
||||
;; Check the number of fragments before adding the page
|
||||
(let [rows (th/db-query :file-data-fragment {:file-id (:id file)})]
|
||||
(let [rows (th/db-query :file-data {:file-id (:id file) :type "fragment"})]
|
||||
(t/is (= 3 (count rows))))
|
||||
|
||||
;; The file-gc should mark for remove unused fragments
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
|
||||
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
|
||||
|
||||
;; Check the number of fragments
|
||||
(let [rows (th/db-query :file-data-fragment {:file-id (:id file)})]
|
||||
(t/is (= 5 (count rows))))
|
||||
(let [rows (th/db-query :file-data {:file-id (:id file) :type "fragment"})]
|
||||
(t/is (= 5 (count rows)))
|
||||
(t/is (= 3 (count (filterv :deleted-at rows)))))
|
||||
|
||||
;; The objects-gc should remove unused fragments
|
||||
(let [res (th/run-task! :objects-gc {})]
|
||||
(t/is (= 3 (:processed res))))
|
||||
|
||||
;; Check the number of fragments
|
||||
(let [rows (th/db-query :file-data-fragment {:file-id (:id file)})]
|
||||
(let [rows (th/db-query :file-data {:file-id (:id file) :type "fragment"})]
|
||||
(t/is (= 2 (count rows))))
|
||||
|
||||
;; Add shape to page that should add a new fragment
|
||||
@@ -242,44 +244,47 @@
|
||||
:type :rect})}])
|
||||
|
||||
;; Check the number of fragments
|
||||
(let [rows (th/db-query :file-data-fragment {:file-id (:id file)})]
|
||||
(let [rows (th/db-query :file-data {:file-id (:id file) :type "fragment"})]
|
||||
(t/is (= 3 (count rows))))
|
||||
|
||||
;; The file-gc should mark for remove unused fragments
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
|
||||
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
|
||||
|
||||
;; The objects-gc should remove unused fragments
|
||||
(let [res (th/run-task! :objects-gc {})]
|
||||
(t/is (= 3 (:processed res))))
|
||||
|
||||
;; Check the number of fragments;
|
||||
(let [rows (th/db-query :file-data-fragment {:file-id (:id file)
|
||||
:deleted-at nil})]
|
||||
(let [rows (th/db-query :file-data {:file-id (:id file)
|
||||
:type "fragment"
|
||||
:deleted-at nil})]
|
||||
(t/is (= 2 (count rows))))
|
||||
|
||||
;; Lets proceed to delete all changes
|
||||
(th/db-delete! :file-change {:file-id (:id file)})
|
||||
(th/db-delete! :file-data {:file-id (:id file) :type "snapshot"})
|
||||
|
||||
(th/db-update! :file
|
||||
{:has-media-trimmed false}
|
||||
{:id (:id file)})
|
||||
|
||||
;; The file-gc should remove fragments related to changes
|
||||
;; snapshots previously deleted.
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
|
||||
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
|
||||
|
||||
;; Check the number of fragments;
|
||||
(let [rows (th/db-query :file-data-fragment {:file-id (:id file)})]
|
||||
(let [rows (th/db-query :file-data {:file-id (:id file) :type "fragment"})]
|
||||
;; (pp/pprint rows)
|
||||
(t/is (= 4 (count rows)))
|
||||
(t/is (= 2 (count (remove (comp some? :deleted-at) rows)))))
|
||||
(t/is (= 2 (count (remove :deleted-at rows)))))
|
||||
|
||||
(let [res (th/run-task! :objects-gc {})]
|
||||
(t/is (= 2 (:processed res))))
|
||||
|
||||
(let [rows (th/db-query :file-data-fragment {:file-id (:id file)})]
|
||||
(let [rows (th/db-query :file-data {:file-id (:id file) :type "fragment"})]
|
||||
(t/is (= 2 (count rows)))))))
|
||||
|
||||
(t/deftest file-gc-task-with-thumbnails
|
||||
(t/deftest file-gc-with-thumbnails
|
||||
(letfn [(add-file-media-object [& {:keys [profile-id file-id]}]
|
||||
(let [mfile {:filename "sample.jpg"
|
||||
:path (th/tempfile "backend_tests/test_files/sample.jpg")
|
||||
@@ -347,7 +352,7 @@
|
||||
:fills [{:fill-opacity 1
|
||||
:fill-image {:id (:id fmo1) :width 100 :height 100 :mtype "image/jpeg"}}]})}])
|
||||
|
||||
;; Check that reference storage objects on filemediaobjects
|
||||
;; Check that reference storage objects on file_media_objects
|
||||
;; are the same because of deduplication feature.
|
||||
(t/is (= (:media-id fmo1) (:media-id fmo2)))
|
||||
(t/is (= (:thumbnail-id fmo1) (:thumbnail-id fmo2)))
|
||||
@@ -360,32 +365,33 @@
|
||||
(t/is (= 2 (:freeze res)))
|
||||
(t/is (= 0 (:delete res))))
|
||||
|
||||
;; run the file-gc task immediately without forced min-age
|
||||
(t/is (false? (th/run-task! :file-gc {:file-id (:id file)})))
|
||||
|
||||
;; run the task again
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
|
||||
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
|
||||
|
||||
;; retrieve file and check trimmed attribute
|
||||
(let [row (th/db-get :file {:id (:id file)})]
|
||||
(t/is (true? (:has-media-trimmed row))))
|
||||
|
||||
;; check file media objects
|
||||
(let [rows (th/db-query :file-media-object {:file-id (:id file)})]
|
||||
(t/is (= 2 (count rows)))
|
||||
(t/is (= 1 (count (remove (comp some? :deleted-at) rows)))))
|
||||
(let [[row1 row2 :as rows]
|
||||
(th/db-query :file-media-object
|
||||
{:file-id (:id file)}
|
||||
{:order-by [:created-at]})]
|
||||
|
||||
(t/is (= (:id fmo1) (:id row1)))
|
||||
(t/is (= (:id fmo2) (:id row2)))
|
||||
(t/is (ct/inst? (:deleted-at row2))))
|
||||
|
||||
(let [res (th/run-task! :objects-gc {})]
|
||||
;; delete 2 fragments and 1 media object
|
||||
(t/is (= 3 (:processed res))))
|
||||
|
||||
;; check file media objects
|
||||
(let [rows (th/db-query :file-media-object {:file-id (:id file)})]
|
||||
(t/is (= 1 (count rows)))
|
||||
(t/is (= 1 (count (remove (comp some? :deleted-at) rows)))))
|
||||
(t/is (= 1 (count (remove :deleted-at rows)))))
|
||||
|
||||
;; The underlying storage objects are still available.
|
||||
(t/is (some? (sto/get-object storage (:media-id fmo2))))
|
||||
(t/is (some? (sto/get-object storage (:thumbnail-id fmo2))))
|
||||
(t/is (some? (sto/get-object storage (:media-id fmo1))))
|
||||
(t/is (some? (sto/get-object storage (:thumbnail-id fmo1))))
|
||||
|
||||
@@ -402,34 +408,40 @@
|
||||
;; Now, we have deleted the usage of pointers to the
|
||||
;; file-media-objects, if we paste file-gc, they should be marked
|
||||
;; as deleted.
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
|
||||
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
|
||||
|
||||
;; This only clears fragments, the file media objects still referenced because
|
||||
;; snapshots are preserved
|
||||
(let [res (th/run-task! :objects-gc {})]
|
||||
(t/is (= 2 (:processed res))))
|
||||
|
||||
;; Mark all snapshots to be a non-snapshot file change
|
||||
(th/db-exec! ["update file_change set data = null where file_id = ?" (:id file)])
|
||||
;; Delete all snapshots
|
||||
(th/db-exec! ["update file_data set deleted_at = now() where file_id = ? and type = 'snapshot'" (:id file)])
|
||||
(th/db-exec! ["update file_change set deleted_at = now() where file_id = ? and label is not null" (:id file)])
|
||||
(th/db-exec! ["update file set has_media_trimmed = false where id = ?" (:id file)])
|
||||
|
||||
(let [res (th/run-task! :objects-gc {:deletion-threshold 0})]
|
||||
;; this will remove the file change and file data entries for two snapshots
|
||||
(t/is (= 4 (:processed res))))
|
||||
|
||||
;; Rerun the file-gc and objects-gc
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
|
||||
(let [res (th/run-task! :objects-gc {})]
|
||||
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
|
||||
|
||||
(let [res (th/run-task! :objects-gc {:deletion-threshold 0})]
|
||||
;; this will remove the file media objects marked as deleted
|
||||
;; on prev file-gc
|
||||
(t/is (= 2 (:processed res))))
|
||||
|
||||
;; Now that file-gc have deleted the file-media-object usage,
|
||||
;; lets execute the touched-gc task, we should see that two of
|
||||
;; them are marked to be deleted.
|
||||
;; them are marked to be deleted
|
||||
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
|
||||
(t/is (= 0 (:freeze res)))
|
||||
(t/is (= 2 (:delete res))))
|
||||
|
||||
;; Finally, check that some of the objects that are marked as
|
||||
;; deleted we are unable to retrieve them using standard storage
|
||||
;; public api.
|
||||
(t/is (nil? (sto/get-object storage (:media-id fmo2))))
|
||||
(t/is (nil? (sto/get-object storage (:thumbnail-id fmo2))))
|
||||
;; public api
|
||||
(t/is (nil? (sto/get-object storage (:media-id fmo1))))
|
||||
(t/is (nil? (sto/get-object storage (:thumbnail-id fmo1)))))))
|
||||
|
||||
@@ -470,8 +482,9 @@
|
||||
page-id (first (get-in file [:data :pages]))]
|
||||
|
||||
|
||||
(let [rows (th/db-query :file-data-fragment {:file-id (:id file)
|
||||
:deleted-at nil})]
|
||||
(let [rows (th/db-query :file-data {:file-id (:id file)
|
||||
:type "fragment"
|
||||
:deleted-at nil})]
|
||||
(t/is (= (count rows) 1)))
|
||||
|
||||
;; Update file inserting a new image object
|
||||
@@ -536,17 +549,15 @@
|
||||
:strokes [{:stroke-opacity 1 :stroke-image {:id (:id fmo5) :width 100 :height 100 :mtype "image/jpeg"}}]})}])
|
||||
|
||||
|
||||
;; run the file-gc task immediately without forced min-age
|
||||
(t/is (false? (th/run-task! :file-gc {:file-id (:id file)})))
|
||||
|
||||
;; run the task again
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
|
||||
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
|
||||
|
||||
(let [res (th/run-task! :objects-gc {})]
|
||||
(t/is (= 2 (:processed res))))
|
||||
|
||||
(let [rows (th/db-query :file-data-fragment {:file-id (:id file)
|
||||
:deleted-at nil})]
|
||||
(let [rows (th/db-query :file-data {:file-id (:id file)
|
||||
:type "fragment"
|
||||
:deleted-at nil})]
|
||||
(t/is (= (count rows) 1)))
|
||||
|
||||
;; retrieve file and check trimmed attribute
|
||||
@@ -583,7 +594,7 @@
|
||||
;; Now, we have deleted the usage of pointers to the
|
||||
;; file-media-objects, if we paste file-gc, they should be marked
|
||||
;; as deleted.
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
|
||||
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
|
||||
|
||||
;; This only removes unused fragments, file media are still
|
||||
;; referenced on snapshots.
|
||||
@@ -592,16 +603,18 @@
|
||||
|
||||
;; Mark all snapshots to be a non-snapshot file change
|
||||
(th/db-exec! ["update file set has_media_trimmed = false where id = ?" (:id file)])
|
||||
(th/db-exec! ["update file_change set data = null where file_id = ?" (:id file)])
|
||||
(th/db-delete! :file-data {:file-id (:id file)
|
||||
:type "snapshot"})
|
||||
|
||||
;; Rerun file-gc and objects-gc task for the same file once all snapshots are
|
||||
;; "expired/deleted"
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
|
||||
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
|
||||
(let [res (th/run-task! :objects-gc {})]
|
||||
(t/is (= 6 (:processed res))))
|
||||
|
||||
(let [rows (th/db-query :file-data-fragment {:file-id (:id file)
|
||||
:deleted-at nil})]
|
||||
(let [rows (th/db-query :file-data {:file-id (:id file)
|
||||
:type "fragment"
|
||||
:deleted-at nil})]
|
||||
(t/is (= (count rows) 1)))
|
||||
|
||||
;; Now that file-gc have deleted the file-media-object usage,
|
||||
@@ -620,7 +633,7 @@
|
||||
(t/is (nil? (sto/get-object storage (:media-id fmo2))))
|
||||
(t/is (nil? (sto/get-object storage (:media-id fmo1)))))))
|
||||
|
||||
(t/deftest file-gc-task-with-object-thumbnails
|
||||
(t/deftest file-gc-with-object-thumbnails
|
||||
(letfn [(insert-file-object-thumbnail! [& {:keys [profile-id file-id page-id frame-id]}]
|
||||
(let [object-id (thc/fmt-object-id file-id page-id frame-id "frame")
|
||||
mfile {:filename "sample.jpg"
|
||||
@@ -704,11 +717,7 @@
|
||||
(t/is (= 1 (:freeze res)))
|
||||
(t/is (= 0 (:delete res))))
|
||||
|
||||
;; run the file-gc task immediately without forced min-age
|
||||
(t/is (false? (th/run-task! :file-gc {:file-id (:id file)})))
|
||||
|
||||
;; run the task again
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
|
||||
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
|
||||
|
||||
;; retrieve file and check trimmed attribute
|
||||
(let [row (th/db-get :file {:id (:id file)})]
|
||||
@@ -738,7 +747,7 @@
|
||||
:page-id page-id
|
||||
:id frame-id-2}])
|
||||
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
|
||||
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
|
||||
|
||||
(let [rows (th/db-query :file-tagged-object-thumbnail {:file-id file-id})]
|
||||
(t/is (= 2 (count rows)))
|
||||
@@ -772,7 +781,7 @@
|
||||
:page-id page-id
|
||||
:id frame-id-1}])
|
||||
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
|
||||
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
|
||||
|
||||
(let [rows (th/db-query :file-tagged-object-thumbnail {:file-id file-id})]
|
||||
(t/is (= 1 (count rows)))
|
||||
@@ -933,6 +942,8 @@
|
||||
out (th/command! params)]
|
||||
(t/is (nil? (:error out))))
|
||||
|
||||
(th/run-pending-tasks!)
|
||||
|
||||
;; query the list of files after soft deletion
|
||||
(let [data {::th/type :get-project-files
|
||||
::rpc/profile-id (:id profile1)
|
||||
@@ -943,11 +954,6 @@
|
||||
(let [result (:result out)]
|
||||
(t/is (= 0 (count result)))))
|
||||
|
||||
;; run permanent deletion (should be noop)
|
||||
(let [result (th/run-task! :objects-gc {})]
|
||||
(t/is (= 0 (:processed result))))
|
||||
|
||||
;; query the list of file libraries of a after hard deletion
|
||||
(let [data {::th/type :get-file-libraries
|
||||
::rpc/profile-id (:id profile1)
|
||||
:file-id (:id file)}
|
||||
@@ -957,9 +963,13 @@
|
||||
(let [result (:result out)]
|
||||
(t/is (= 0 (count result)))))
|
||||
|
||||
;; run permanent deletion (should be noop)
|
||||
(let [result (th/run-task! :objects-gc {})]
|
||||
(t/is (= 0 (:processed result))))
|
||||
|
||||
;; run permanent deletion
|
||||
(let [result (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
|
||||
(t/is (= 1 (:processed result))))
|
||||
(t/is (= 3 (:processed result))))
|
||||
|
||||
;; query the list of file libraries of a after hard deletion
|
||||
(let [data {::th/type :get-file-libraries
|
||||
@@ -972,7 +982,6 @@
|
||||
(t/is (th/ex-info? error))
|
||||
(t/is (= (:type error-data) :not-found))))))
|
||||
|
||||
|
||||
(t/deftest object-thumbnails-ops
|
||||
(let [prof (th/create-profile* 1 {:is-active true})
|
||||
file (th/create-file* 1 {:profile-id (:id prof)
|
||||
@@ -1282,17 +1291,19 @@
|
||||
:is-shared false})
|
||||
|
||||
page-id (uuid/random)
|
||||
shape-id (uuid/random)]
|
||||
shape-id (uuid/random)
|
||||
sobject (volatile! nil)]
|
||||
|
||||
;; Preventive file-gc
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
|
||||
;; Preventive file-gc
|
||||
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
|
||||
|
||||
;; Preventive objects-gc
|
||||
;; Preventive objects-gc
|
||||
(let [result (th/run-task! :objects-gc {})]
|
||||
;; deletes the fragment created by file-gc
|
||||
(t/is (= 1 (:processed result))))
|
||||
|
||||
;; Check the number of fragments before adding the page
|
||||
(let [rows (th/db-query :file-data-fragment {:file-id (:id file)})]
|
||||
;; Check the number of fragments before adding the page
|
||||
(let [rows (th/db-query :file-data {:file-id (:id file) :type "fragment"})]
|
||||
(t/is (= 1 (count rows)))
|
||||
(t/is (every? #(some? (:data %)) rows)))
|
||||
|
||||
@@ -1301,35 +1312,42 @@
|
||||
{:has-media-trimmed false}
|
||||
{:id (:id file)})
|
||||
|
||||
;; Run FileGC again, with tiered storage activated
|
||||
;; Run FileGC again, with tiered storage activated
|
||||
(with-redefs [app.config/flags (conj app.config/flags :tiered-file-data-storage)]
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
|
||||
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)}))))
|
||||
|
||||
;; The FileGC task will schedule an inner taskq
|
||||
(th/run-pending-tasks!))
|
||||
;; The FileGC task will schedule an inner taskq
|
||||
(th/run-pending-tasks!)
|
||||
|
||||
;; Clean objects after file-gc
|
||||
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
|
||||
(t/is (= 2 (:freeze res)))
|
||||
(t/is (= 0 (:delete res))))
|
||||
|
||||
;; Clean objects after file-gc
|
||||
(let [result (th/run-task! :objects-gc {})]
|
||||
(t/is (= 1 (:processed result))))
|
||||
|
||||
;; Check the number of fragments before adding the page
|
||||
(let [rows (th/db-query :file-data-fragment {:file-id (:id file)})]
|
||||
;; Check the number of fragments before adding the page
|
||||
(let [rows (th/db-query :file-data {:file-id (:id file) :type "fragment"})]
|
||||
;; (pp/pprint rows)
|
||||
(t/is (= 1 (count rows)))
|
||||
(t/is (every? #(nil? (:data %)) rows))
|
||||
(t/is (every? #(uuid? (:data-ref-id %)) rows))
|
||||
(t/is (every? #(= "objects-storage" (:data-backend %)) rows)))
|
||||
(t/is (every? #(= "storage" (:backend %)) rows)))
|
||||
|
||||
(let [file (th/db-get :file {:id (:id file)})
|
||||
(let [file (-> (th/db-get :file-data {:id (:id file) :type "main"})
|
||||
(update :metadata fdata/decode-metadata))
|
||||
storage (sto/resolve th/*system*)]
|
||||
(t/is (= "objects-storage" (:data-backend file)))
|
||||
;; (pp/pprint file)
|
||||
(t/is (= "storage" (:backend file)))
|
||||
(t/is (nil? (:data file)))
|
||||
(t/is (uuid? (:data-ref-id file)))
|
||||
|
||||
(let [sobj (sto/get-object storage (:data-ref-id file))]
|
||||
(let [sobj (sto/get-object storage (-> file :metadata :storage-ref-id))]
|
||||
(vreset! sobject sobj)
|
||||
;; (pp/pprint (meta sobj))
|
||||
(t/is (= "file-data" (:bucket (meta sobj))))
|
||||
(t/is (= (:id file) (:file-id (meta sobj))))))
|
||||
|
||||
;; Add shape to page that should load from cold storage again into the hot storage (db)
|
||||
;; Add shape to page that should load from cold storage again into the hot storage (db)
|
||||
(update-file!
|
||||
:file-id (:id file)
|
||||
:profile-id (:id profile)
|
||||
@@ -1340,36 +1358,68 @@
|
||||
:name "test"
|
||||
:id page-id}])
|
||||
|
||||
;; Check the number of fragments
|
||||
(let [rows (th/db-query :file-data-fragment {:file-id (:id file)})]
|
||||
(t/is (= 2 (count rows))))
|
||||
|
||||
;; Check the number of fragments
|
||||
;; Check the number of fragments
|
||||
(let [[row1 row2 :as rows]
|
||||
(th/db-query :file-data-fragment
|
||||
(th/db-query :file-data
|
||||
{:file-id (:id file)
|
||||
:deleted-at nil}
|
||||
:type "fragment"}
|
||||
{:order-by [:created-at]})]
|
||||
;; (pp/pprint rows)
|
||||
(t/is (= 2 (count rows)))
|
||||
(t/is (nil? (:data row1)))
|
||||
(t/is (= "objects-storage" (:data-backend row1)))
|
||||
(t/is (= "storage" (:backend row1)))
|
||||
(t/is (bytes? (:data row2)))
|
||||
(t/is (nil? (:data-backend row2))))
|
||||
(t/is (= "db" (:backend row2))))
|
||||
|
||||
;; The file-gc should mark for remove unused fragments
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
|
||||
|
||||
;; The objects-gc should remove unused fragments
|
||||
;; The file-gc should mark for remove unused fragments
|
||||
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
|
||||
|
||||
;; The file-gc task, recreates all fragments, so after it we have
|
||||
;; now the double of fragments, and the old ones are marked as
|
||||
;; deleted, and the new ones are on DB
|
||||
(let [[row1 row2 row3 row4 :as rows]
|
||||
(th/db-query :file-data
|
||||
{:file-id (:id file)
|
||||
:type "fragment"}
|
||||
{:order-by [:created-at]})]
|
||||
;; (pp/pprint rows)
|
||||
(t/is (= 4 (count rows)))
|
||||
|
||||
(t/is (nil? (:data row1)))
|
||||
(t/is (ct/inst? (:deleted-at row1)))
|
||||
(t/is (= "storage" (:backend row1)))
|
||||
|
||||
(t/is (bytes? (:data row2)))
|
||||
(t/is (= "db" (:backend row2)))
|
||||
(t/is (ct/inst? (:deleted-at row2)))
|
||||
|
||||
(t/is (bytes? (:data row3)))
|
||||
(t/is (= "db" (:backend row3)))
|
||||
(t/is (nil? (:deleted-at row3)))
|
||||
|
||||
(t/is (bytes? (:data row4)))
|
||||
(t/is (= "db" (:backend row4)))
|
||||
(t/is (nil? (:deleted-at row4))))
|
||||
|
||||
;; The objects-gc should remove the marked to delete fragments
|
||||
(let [res (th/run-task! :objects-gc {})]
|
||||
(t/is (= 2 (:processed res))))
|
||||
|
||||
;; Check the number of fragments before adding the page
|
||||
(let [rows (th/db-query :file-data-fragment {:file-id (:id file)})]
|
||||
(let [rows (th/db-query :file-data {:file-id (:id file) :type "fragment"})]
|
||||
(t/is (= 2 (count rows)))
|
||||
(t/is (every? #(bytes? (:data %)) rows))
|
||||
(t/is (every? #(nil? (:data-ref-id %)) rows))
|
||||
(t/is (every? #(nil? (:data-backend %)) rows)))))
|
||||
(t/is (every? #(= "db" (:backend %)) rows)))
|
||||
|
||||
;; we ensure that once object-gc is passed and marked two storage
|
||||
;; objects to delete
|
||||
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
|
||||
(t/is (= 0 (:freeze res)))
|
||||
(t/is (= 2 (:delete res))))
|
||||
|
||||
(let [storage (sto/resolve th/*system*)]
|
||||
(t/is (uuid? (:id @sobject)))
|
||||
(t/is (nil? (sto/get-object storage (:id @sobject)))))))
|
||||
|
||||
(t/deftest file-gc-with-components-1
|
||||
(let [storage (:app.storage/storage th/*system*)
|
||||
@@ -1384,8 +1434,9 @@
|
||||
|
||||
page-id (first (get-in file [:data :pages]))]
|
||||
|
||||
(let [rows (th/db-query :file-data-fragment {:file-id (:id file)
|
||||
:deleted-at nil})]
|
||||
(let [rows (th/db-query :file-data {:file-id (:id file)
|
||||
:type "fragment"
|
||||
:deleted-at nil})]
|
||||
(t/is (= (count rows) 1)))
|
||||
|
||||
;; Update file inserting new component
|
||||
@@ -1437,11 +1488,8 @@
|
||||
:id c-id
|
||||
:anotation nil}])
|
||||
|
||||
;; Run the file-gc task immediately without forced min-age
|
||||
(t/is (false? (th/run-task! :file-gc {:file-id (:id file)})))
|
||||
|
||||
;; Run the task again
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
|
||||
(t/is (true? (th/run-task! :file-gc {:file-id (:id file)})))
|
||||
|
||||
;; Retrieve file and check trimmed attribute
|
||||
(let [row (th/db-get :file {:id (:id file)})]
|
||||
@@ -1651,8 +1699,7 @@
|
||||
(t/is (some? (not-empty (:objects component))))))
|
||||
|
||||
;; Re-run the file-gc task
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file-1)})))
|
||||
(t/is (false? (th/run-task! :file-gc {:min-age 0 :file-id (:id file-2)})))
|
||||
(t/is (true? (th/run-task! :file-gc {:file-id (:id file-1)})))
|
||||
|
||||
;; Check that component is still there after file-gc task
|
||||
(let [data {::th/type :get-file
|
||||
|
||||
@@ -39,8 +39,6 @@
|
||||
(t/is (nil? (:error out)))
|
||||
(:result out)))
|
||||
|
||||
;; TODO: migrate to commands
|
||||
|
||||
(t/deftest duplicate-file
|
||||
(let [storage (-> (:app.storage/storage th/*system*)
|
||||
(configure-storage-backend))
|
||||
@@ -233,15 +231,7 @@
|
||||
;; check that the both files are equivalent
|
||||
(doseq [[fa fb] (map vector p1-files p2-files)]
|
||||
(t/is (not= (:id fa) (:id fb)))
|
||||
(t/is (= (:name fa) (:name fb)))
|
||||
|
||||
(when (= (:id fa) (:id file1))
|
||||
(t/is (false? (b/equals? (:data fa)
|
||||
(:data fb)))))
|
||||
|
||||
(when (= (:id fa) (:id file2))
|
||||
(t/is (false? (b/equals? (:data fa)
|
||||
(:data fb)))))))))))
|
||||
(t/is (= (:name fa) (:name fb)))))))))
|
||||
|
||||
(t/deftest duplicate-project-with-deleted-files
|
||||
(let [storage (-> (:app.storage/storage th/*system*)
|
||||
@@ -297,15 +287,7 @@
|
||||
;; check that the both files are equivalent
|
||||
(doseq [[fa fb] (map vector (rest p1-files) p2-files)]
|
||||
(t/is (not= (:id fa) (:id fb)))
|
||||
(t/is (= (:name fa) (:name fb)))
|
||||
|
||||
(when (= (:id fa) (:id file1))
|
||||
(t/is (false? (b/equals? (:data fa)
|
||||
(:data fb)))))
|
||||
|
||||
(when (= (:id fa) (:id file2))
|
||||
(t/is (false? (b/equals? (:data fa)
|
||||
(:data fb)))))))))))
|
||||
(t/is (= (:name fa) (:name fb)))))))))
|
||||
|
||||
(t/deftest move-file-on-same-team
|
||||
(let [profile (th/create-profile* 1 {:is-active true})
|
||||
|
||||
@@ -162,7 +162,7 @@
|
||||
|
||||
;; execute permanent deletion task
|
||||
(let [result (th/run-task! :objects-gc {:min-age 0})]
|
||||
(t/is (= 4 (:processed result))))
|
||||
(t/is (= 6 (:processed result))))
|
||||
|
||||
(let [row (th/db-get :team
|
||||
{:id (:default-team-id prof)}
|
||||
@@ -324,7 +324,7 @@
|
||||
|
||||
;; execute permanent deletion task
|
||||
(let [result (th/run-task! :objects-gc {:min-age 0})]
|
||||
(t/is (= 4 (:processed result))))
|
||||
(t/is (= 6 (:processed result))))
|
||||
|
||||
(let [row (th/db-get :team
|
||||
{:id (:default-team-id prof1)}
|
||||
@@ -363,7 +363,7 @@
|
||||
|
||||
;; execute permanent deletion task
|
||||
(let [result (th/run-task! :objects-gc {:min-age 0})]
|
||||
(t/is (= 8 (:processed result))))))
|
||||
(t/is (= 10 (:processed result))))))
|
||||
|
||||
|
||||
(t/deftest email-blacklist-1
|
||||
|
||||
@@ -582,7 +582,7 @@
|
||||
(t/is (ct/inst? (:deleted-at (first rows)))))
|
||||
|
||||
(let [result (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
|
||||
(t/is (= 5 (:processed result))))))
|
||||
(t/is (= 7 (:processed result))))))
|
||||
|
||||
(t/deftest create-team-access-request
|
||||
(with-mocks [mock {:target 'app.email/send! :return nil}]
|
||||
|
||||
@@ -105,13 +105,14 @@
|
||||
(into frontend-only-features)
|
||||
(into backend-only-features)))
|
||||
|
||||
(sm/register!
|
||||
^{::sm/type ::features}
|
||||
[:schema
|
||||
{:title "FileFeatures"
|
||||
::smdj/inline true
|
||||
:gen/gen (smg/subseq supported-features)}
|
||||
[::sm/set :string]])
|
||||
(def schema:features
|
||||
(sm/register!
|
||||
^{::sm/type ::features}
|
||||
[:schema
|
||||
{:title "FileFeatures"
|
||||
::smdj/inline true
|
||||
:gen/gen (smg/subseq supported-features)}
|
||||
[::sm/set :string]]))
|
||||
|
||||
(defn- flag->feature
|
||||
"Translate a flag to a feature name"
|
||||
|
||||
@@ -914,6 +914,8 @@
|
||||
:gen/gen (sg/uri)
|
||||
:decode/string decode-uri
|
||||
:decode/json decode-uri
|
||||
:encode/json str
|
||||
:encode/string str
|
||||
::oapi/type "string"
|
||||
::oapi/format "uri"}})
|
||||
|
||||
|
||||
@@ -155,7 +155,7 @@
|
||||
|
||||
(defn make-file
|
||||
[{:keys [id project-id name revn is-shared features migrations
|
||||
ignore-sync-until created-at modified-at deleted-at]
|
||||
metadata backend ignore-sync-until created-at modified-at deleted-at]
|
||||
:as params}
|
||||
|
||||
& {:keys [create-page with-data page-id]
|
||||
@@ -186,8 +186,9 @@
|
||||
:data data
|
||||
:features features
|
||||
:migrations migrations
|
||||
:metadata metadata
|
||||
:backend backend
|
||||
:ignore-sync-until ignore-sync-until
|
||||
:has-media-trimmed false
|
||||
:created-at created-at
|
||||
:modified-at modified-at
|
||||
:deleted-at deleted-at})]
|
||||
|
||||
@@ -407,7 +407,8 @@
|
||||
(watch [_ _ _]
|
||||
(let [{:keys [on-error on-success]
|
||||
:or {on-error rx/throw
|
||||
on-success identity}} (meta data)]
|
||||
on-success identity}}
|
||||
(meta data)]
|
||||
|
||||
(->> (rp/cmd! :request-profile-recovery data)
|
||||
(rx/tap on-success)
|
||||
|
||||
Reference in New Issue
Block a user