mirror of
https://github.com/penpot/penpot.git
synced 2025-12-11 22:14:05 +01:00
Merge pull request #7493 from penpot/niwinz-staging-hotfix-2
Some checks failed
Commit Message Check / Check Commit Message (push) Has been cancelled
Some checks failed
Commit Message Check / Check Commit Message (push) Has been cancelled
🐛 Fix corrner case on error report validation
This commit is contained in:
@@ -226,14 +226,29 @@ jobs:
|
||||
keys:
|
||||
- v1-dependencies-{{ checksum "frontend/deps.edn"}}-{{ checksum "frontend/yarn.lock" }}
|
||||
|
||||
# Build frontend
|
||||
- run:
|
||||
name: "integration tests"
|
||||
name: "frontend build"
|
||||
working_directory: "./frontend"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run build:app:assets
|
||||
yarn run build:app
|
||||
yarn run build:app:libs
|
||||
|
||||
# Build the wasm bundle
|
||||
- run:
|
||||
name: "wasm build"
|
||||
working_directory: "./render-wasm"
|
||||
command: |
|
||||
EMSDK_QUIET=1 . /opt/emsdk/emsdk_env.sh
|
||||
./build release
|
||||
|
||||
# Run integration tests
|
||||
- run:
|
||||
name: "integration tests"
|
||||
working_directory: "./frontend"
|
||||
command: |
|
||||
yarn run playwright install chromium
|
||||
yarn run test:e2e -x --workers=4
|
||||
|
||||
|
||||
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -13,6 +13,7 @@
|
||||
- [ ] Add a detailed explanation of how to reproduce the issue and/or verify the fix, if applicable.
|
||||
- [ ] Include screenshots or videos, if applicable.
|
||||
- [ ] Add or modify existing integration tests in case of bugs or new features, if applicable.
|
||||
- [ ] Refactor any modified SCSS files following the refactor guide.
|
||||
- [ ] Check CI passes successfully.
|
||||
- [ ] Update the `CHANGES.md` file, referencing the related GitHub issue, if applicable.
|
||||
|
||||
|
||||
16
.github/workflows/build-bundle.yml
vendored
16
.github/workflows/build-bundle.yml
vendored
@@ -1,11 +1,11 @@
|
||||
name: Build and Upload Penpot Bundle
|
||||
name: Bundles Builder
|
||||
|
||||
on:
|
||||
# Create bundle from manual action
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
gh_ref:
|
||||
description: 'Name of the branch'
|
||||
description: 'Name of the branch or ref'
|
||||
type: string
|
||||
required: true
|
||||
default: 'develop'
|
||||
@@ -22,7 +22,7 @@ on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
gh_ref:
|
||||
description: 'Name of the branch'
|
||||
description: 'Name of the branch or ref'
|
||||
type: string
|
||||
required: true
|
||||
default: 'develop'
|
||||
@@ -56,10 +56,9 @@ jobs:
|
||||
- name: Extract some useful variables
|
||||
id: vars
|
||||
run: |
|
||||
echo "commit_hash=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
|
||||
echo "gh_ref=${{ inputs.gh_ref || github.ref_name }}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Run manage.sh build-bundle from host
|
||||
- name: Build bundle
|
||||
env:
|
||||
BUILD_WASM: ${{ inputs.build_wasm }}
|
||||
BUILD_STORYBOOK: ${{ inputs.build_storybook }}
|
||||
@@ -76,13 +75,6 @@ jobs:
|
||||
zip -r zips/penpot.zip penpot
|
||||
|
||||
- name: Upload Penpot bundle to S3
|
||||
if: github.ref_type == 'branch'
|
||||
run: |
|
||||
aws s3 cp zips/penpot.zip s3://${{ secrets.S3_BUCKET }}/penpot-${{ steps.vars.outputs.gh_ref }}-latest.zip
|
||||
aws s3 cp zips/penpot.zip s3://${{ secrets.S3_BUCKET }}/penpot-${{ steps.vars.outputs.commit_hash }}.zip
|
||||
|
||||
- name: Upload Penpot bundle to S3
|
||||
if: github.ref_type == 'tag'
|
||||
run: |
|
||||
aws s3 cp zips/penpot.zip s3://${{ secrets.S3_BUCKET }}/penpot-${{ steps.vars.outputs.gh_ref }}.zip
|
||||
|
||||
|
||||
11
.github/workflows/build-develop.yml
vendored
11
.github/workflows/build-develop.yml
vendored
@@ -1,14 +1,21 @@
|
||||
name: DEVELOP - Build and Upload Penpot Bundle
|
||||
name: _DEVELOP
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '16 5-20 * * 1-5'
|
||||
|
||||
jobs:
|
||||
build-develop-bundle:
|
||||
build-bundle:
|
||||
uses: ./.github/workflows/build-bundle.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
gh_ref: "develop"
|
||||
build_wasm: "yes"
|
||||
build_storybook: "yes"
|
||||
|
||||
build-docker:
|
||||
needs: build-bundle
|
||||
uses: ./.github/workflows/build-docker.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
gh_ref: "develop"
|
||||
|
||||
36
.github/workflows/build-docker-devenv.yml
vendored
Normal file
36
.github/workflows/build-docker-devenv.yml
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
name: DevEnv Docker Image Builder
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
name: Build and push DevEnv Docker image
|
||||
environment: release-admins
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to Docker Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.PUB_DOCKER_USERNAME }}
|
||||
password: ${{ secrets.PUB_DOCKER_PASSWORD }}
|
||||
|
||||
- name: Build and push DevEnv Docker image
|
||||
uses: docker/build-push-action@v6
|
||||
env:
|
||||
DOCKER_IMAGE: 'penpotapp/devenv'
|
||||
with:
|
||||
context: ./docker/devenv/
|
||||
file: ./docker/devenv/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ env.DOCKER_IMAGE }}:latest
|
||||
cache-from: type=registry,ref=${{ env.DOCKER_IMAGE }}:buildcache
|
||||
cache-to: type=registry,ref=${{ env.DOCKER_IMAGE }}:buildcache,mode=max
|
||||
101
.github/workflows/build-docker.yml
vendored
Normal file
101
.github/workflows/build-docker.yml
vendored
Normal file
@@ -0,0 +1,101 @@
|
||||
name: Docker Images Builder
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
gh_ref:
|
||||
description: 'Name of the branch or ref'
|
||||
type: string
|
||||
required: true
|
||||
default: 'develop'
|
||||
workflow_call:
|
||||
inputs:
|
||||
gh_ref:
|
||||
description: 'Name of the branch or ref'
|
||||
type: string
|
||||
required: true
|
||||
default: 'develop'
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
name: Build and Push Penpot Docker Images
|
||||
runs-on: ubuntu-24.04-arm
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ inputs.gh_ref }}
|
||||
|
||||
- name: Extract some useful variables
|
||||
id: vars
|
||||
run: |
|
||||
echo "gh_ref=${{ inputs.gh_ref || github.ref_name }}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Download Penpot Bundles
|
||||
env:
|
||||
FILE_NAME: penpot-${{ steps.vars.outputs.gh_ref }}.zip
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_DEFAULT_REGION: ${{ secrets.AWS_REGION }}
|
||||
run: |
|
||||
pushd docker/images
|
||||
aws s3 cp s3://${{ secrets.S3_BUCKET }}/$FILE_NAME .
|
||||
unzip $FILE_NAME > /dev/null
|
||||
mv penpot/backend bundle-backend
|
||||
mv penpot/frontend bundle-frontend
|
||||
mv penpot/exporter bundle-exporter
|
||||
popd
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to Docker Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ secrets.DOCKER_REGISTRY }}
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Build and push Backend Docker image
|
||||
uses: docker/build-push-action@v6
|
||||
env:
|
||||
DOCKER_IMAGE: 'backend'
|
||||
BUNDLE_PATH: './bundle-backend'
|
||||
with:
|
||||
context: ./docker/images/
|
||||
file: ./docker/images/Dockerfile.backend
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:${{ steps.vars.outputs.gh_ref }}
|
||||
cache-from: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache
|
||||
cache-to: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache,mode=max
|
||||
|
||||
- name: Build and push Frontend Docker image
|
||||
uses: docker/build-push-action@v6
|
||||
env:
|
||||
DOCKER_IMAGE: 'frontend'
|
||||
BUNDLE_PATH: './bundle-frontend'
|
||||
with:
|
||||
context: ./docker/images/
|
||||
file: ./docker/images/Dockerfile.frontend
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:${{ steps.vars.outputs.gh_ref }}
|
||||
cache-from: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache
|
||||
cache-to: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache,mode=max
|
||||
|
||||
- name: Build and push Exporter Docker image
|
||||
uses: docker/build-push-action@v6
|
||||
env:
|
||||
DOCKER_IMAGE: 'exporter'
|
||||
BUNDLE_PATH: './bundle-exporter'
|
||||
with:
|
||||
context: ./docker/images/
|
||||
file: ./docker/images/Dockerfile.exporter
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:${{ steps.vars.outputs.gh_ref }}
|
||||
cache-from: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache
|
||||
cache-to: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache,mode=max
|
||||
11
.github/workflows/build-staging.yml
vendored
11
.github/workflows/build-staging.yml
vendored
@@ -1,14 +1,21 @@
|
||||
name: STAGING - Build and Upload Penpot Bundle
|
||||
name: _STAGING
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '36 5-20 * * 1-5'
|
||||
|
||||
jobs:
|
||||
build-staging-bundle:
|
||||
build-bundle:
|
||||
uses: ./.github/workflows/build-bundle.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
gh_ref: "staging"
|
||||
build_wasm: "yes"
|
||||
build_storybook: "yes"
|
||||
|
||||
build-docker:
|
||||
needs: build-bundle
|
||||
uses: ./.github/workflows/build-docker.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
gh_ref: "staging"
|
||||
|
||||
19
.github/workflows/build-tag.yml
vendored
19
.github/workflows/build-tag.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: TAG - Build and Upload Penpot Bundle
|
||||
name: _TAG
|
||||
|
||||
on:
|
||||
push:
|
||||
@@ -6,10 +6,25 @@ on:
|
||||
- '*'
|
||||
|
||||
jobs:
|
||||
build-tag-bundle:
|
||||
build-bundle:
|
||||
uses: ./.github/workflows/build-bundle.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
gh_ref: ${{ github.ref_name }}
|
||||
build_wasm: "no"
|
||||
build_storybook: "yes"
|
||||
|
||||
build-docker:
|
||||
needs: build-bundle
|
||||
uses: ./.github/workflows/build-docker.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
gh_ref: ${{ github.ref_name }}
|
||||
|
||||
publish-final-tag:
|
||||
if: ${{ !contains(github.ref_name, '-RC') && !contains(github.ref_name, '-alpha') && !contains(github.ref_name, '-beta') && contains(github.ref_name, '.') }}
|
||||
needs: build-docker
|
||||
uses: ./.github/workflows/release.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
gh_ref: ${{ github.ref_name }}
|
||||
|
||||
2
.github/workflows/commit-checker.yml
vendored
2
.github/workflows/commit-checker.yml
vendored
@@ -26,7 +26,7 @@ jobs:
|
||||
- name: Check Commit Type
|
||||
uses: gsactions/commit-message-checker@v2
|
||||
with:
|
||||
pattern: '^(Merge|Revert|:(lipstick|globe_with_meridians|wrench|books|arrow_up|arrow_down|zap|ambulance|construction|boom|fire|whale|bug|sparkles|paperclip|tada|recycle|rewind):)\s[A-Z].*[^.]$'
|
||||
pattern: '^(Merge|Revert|:(lipstick|globe_with_meridians|wrench|books|arrow_up|arrow_down|zap|ambulance|construction|boom|fire|whale|bug|sparkles|paperclip|tada|recycle|rewind):)\s["A-Z].*[^.]$'
|
||||
flags: 'gm'
|
||||
error: 'Commit should match CONTRIBUTING.md guideline'
|
||||
checkAllCommitMessages: 'true' # optional: this checks all commits associated with a pull request
|
||||
|
||||
95
.github/workflows/release.yml
vendored
Normal file
95
.github/workflows/release.yml
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
name: Release Publisher
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
gh_ref:
|
||||
description: 'Tag to release'
|
||||
type: string
|
||||
required: true
|
||||
workflow_call:
|
||||
inputs:
|
||||
gh_ref:
|
||||
description: 'Tag to release'
|
||||
type: string
|
||||
required: true
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
release:
|
||||
environment: release-admins
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
version: ${{ steps.vars.outputs.gh_ref }}
|
||||
release_notes: ${{ steps.extract_release_notes.outputs.release_notes }}
|
||||
steps:
|
||||
- name: Extract some useful variables
|
||||
id: vars
|
||||
run: |
|
||||
echo "gh_ref=${{ inputs.gh_ref || github.ref_name }}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ steps.vars.outputs.gh_ref }}
|
||||
|
||||
# --- Publicly release the docker images ---
|
||||
- name: Login to private registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ secrets.DOCKER_REGISTRY }}
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.PUB_DOCKER_USERNAME }}
|
||||
password: ${{ secrets.PUB_DOCKER_PASSWORD }}
|
||||
|
||||
- name: Publish docker images to DockerHub
|
||||
env:
|
||||
TAG: ${{ steps.vars.outputs.gh_ref }}
|
||||
REGISTRY: ${{ secrets.DOCKER_REGISTRY }}
|
||||
HUB: ${{ secrets.PUB_DOCKER_HUB }}
|
||||
run: |
|
||||
IMAGES=("frontend" "backend" "exporter")
|
||||
EXTRA_TAGS=("main" "latest")
|
||||
|
||||
for image in "${IMAGES[@]}"; do
|
||||
docker pull "$REGISTRY/penpotapp/$image:$TAG"
|
||||
docker tag "$REGISTRY/penpotapp/$image:$TAG" "penpotapp/$image:$TAG"
|
||||
docker push "penpotapp/$image:$TAG"
|
||||
|
||||
for tag in "${EXTRA_TAGS[@]}"; do
|
||||
docker tag "$REGISTRY/penpotapp/$image:$TAG" "penpotapp/$image:$tag"
|
||||
docker push "penpotapp/$image:$tag"
|
||||
done
|
||||
done
|
||||
|
||||
# --- Release notes extraction ---
|
||||
- name: Extract release notes from CHANGES.md
|
||||
id: extract_release_notes
|
||||
env:
|
||||
TAG: ${{ steps.vars.outputs.gh_ref }}
|
||||
run: |
|
||||
RELEASE_NOTES=$(awk "/^## $TAG$/{flag=1; next} /^## /{flag=0} flag" CHANGES.md | awk '{$1=$1};1')
|
||||
if [ -z "$RELEASE_NOTES" ]; then
|
||||
RELEASE_NOTES="No changes for $TAG according to CHANGES.md"
|
||||
fi
|
||||
echo "release_notes<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "$RELEASE_NOTES" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
# --- Create GitHub release ---
|
||||
- name: Create GitHub release
|
||||
uses: softprops/action-gh-release@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
tag_name: ${{ steps.vars.outputs.gh_ref }}
|
||||
name: ${{ steps.vars.outputs.gh_ref }}
|
||||
body: ${{ steps.extract_release_notes.outputs.release_notes }}
|
||||
@@ -61,8 +61,7 @@
|
||||
::yres/body data}
|
||||
|
||||
(binding [l/*context* (request->context request)]
|
||||
(l/err :hint "restriction error"
|
||||
:cause err)
|
||||
(l/wrn :hint "restriction error" :cause err)
|
||||
{::yres/status 400
|
||||
::yres/body data}))))
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.files.validate :as cfv]
|
||||
[app.common.logging :as l]
|
||||
[app.common.pprint :as p]
|
||||
[app.common.pprint :as pp]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.spec :as us]
|
||||
[app.common.time :as ct]
|
||||
@@ -58,7 +58,7 @@
|
||||
(defn print-tasks
|
||||
[]
|
||||
(let [tasks (:app.worker/registry main/system)]
|
||||
(p/pprint (keys tasks) :level 200)))
|
||||
(pp/pprint (keys tasks) :level 200)))
|
||||
|
||||
(defn run-task!
|
||||
([tname]
|
||||
@@ -130,18 +130,18 @@
|
||||
(defn reset-password!
|
||||
"Reset a password to a specific one for a concrete user or all users
|
||||
if email is `:all` keyword."
|
||||
[& {:keys [email password] :or {password "123123"} :as params}]
|
||||
(when-not email
|
||||
(throw (IllegalArgumentException. "email is mandatory")))
|
||||
[& {:keys [email password]}]
|
||||
(assert (string? email) "expected email")
|
||||
(assert (string? password) "expected password")
|
||||
|
||||
(some-> main/system
|
||||
(db/tx-run!
|
||||
(fn [{:keys [::db/conn] :as system}]
|
||||
(let [password (derive-password password)]
|
||||
(if (= email :all)
|
||||
(db/exec! conn ["update profile set password=?" password])
|
||||
(let [email (str/lower email)]
|
||||
(db/exec! conn ["update profile set password=? where email=?" password email]))))))))
|
||||
(let [password (derive-password password)
|
||||
email (str/lower email)]
|
||||
(-> (db/exec-one! conn ["update profile set password=? where email=?" password email])
|
||||
(db/get-update-count)
|
||||
(pos?)))))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; FEATURES
|
||||
@@ -549,6 +549,17 @@
|
||||
:elapsed elapsed))))))
|
||||
|
||||
|
||||
(defn mark-file-as-trimmed
|
||||
[id]
|
||||
(let [id (h/parse-uuid id)]
|
||||
(db/tx-run! main/system (fn [cfg]
|
||||
(-> (db/update! cfg :file
|
||||
{:has-media-trimmed true}
|
||||
{:id id}
|
||||
{::db/return-keys false})
|
||||
(db/get-update-count)
|
||||
(pos?))))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; DELETE/RESTORE OBJECTS (WITH CASCADE, SOFT)
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
@@ -231,7 +231,7 @@
|
||||
shape))
|
||||
|
||||
(update-container [container]
|
||||
(update container :objects d/update-vals fix-line-paths))]
|
||||
(d/update-when container :objects d/update-vals fix-line-paths))]
|
||||
|
||||
(-> data
|
||||
(update :pages-index d/update-vals update-container)
|
||||
@@ -285,7 +285,9 @@
|
||||
(let [[deleted objects] (clean-objects objects)]
|
||||
(if (and (pos? deleted) (< n 1000))
|
||||
(recur (inc n) objects)
|
||||
(assoc container :objects objects)))))]
|
||||
(-> container
|
||||
(assoc :objects objects)
|
||||
(d/without-nils))))))]
|
||||
|
||||
(-> data
|
||||
(update :pages-index d/update-vals clean-container)
|
||||
@@ -383,21 +385,20 @@
|
||||
(dissoc :fill-color :fill-opacity))))
|
||||
|
||||
(update-container [container]
|
||||
(if (contains? container :objects)
|
||||
(loop [objects (:objects container)
|
||||
shapes (->> (vals objects)
|
||||
(filter cfh/image-shape?))]
|
||||
(if-let [shape (first shapes)]
|
||||
(let [{:keys [id frame-id] :as shape'} (process-shape shape)]
|
||||
(if (identical? shape shape')
|
||||
(recur objects (rest shapes))
|
||||
(recur (-> objects
|
||||
(assoc id shape')
|
||||
(d/update-when frame-id dissoc :thumbnail))
|
||||
(rest shapes))))
|
||||
(assoc container :objects objects)))
|
||||
container))]
|
||||
|
||||
(loop [objects (:objects container)
|
||||
shapes (->> (vals objects)
|
||||
(filter cfh/image-shape?))]
|
||||
(if-let [shape (first shapes)]
|
||||
(let [{:keys [id frame-id] :as shape'} (process-shape shape)]
|
||||
(if (identical? shape shape')
|
||||
(recur objects (rest shapes))
|
||||
(recur (-> objects
|
||||
(assoc id shape')
|
||||
(d/update-when frame-id dissoc :thumbnail))
|
||||
(rest shapes))))
|
||||
(-> container
|
||||
(assoc :objects objects)
|
||||
(d/without-nils)))))]
|
||||
(-> data
|
||||
(update :pages-index d/update-vals update-container)
|
||||
(d/update-when :components d/update-vals update-container))))
|
||||
@@ -1607,6 +1608,14 @@
|
||||
(update :pages-index d/update-vals update-container)
|
||||
(d/update-when :components d/update-vals update-container))))
|
||||
|
||||
(defmethod migrate-data "0014-clear-components-nil-objects"
|
||||
[data _]
|
||||
;; Because of a bug in migrations, several files have migrations
|
||||
;; applied in an incorrect order and because of other bug on old
|
||||
;; migrations, some files have components with `:objects` with `nil`
|
||||
;; as value; this migration fixes it.
|
||||
(d/update-when data :components d/update-vals d/without-nils))
|
||||
|
||||
(def available-migrations
|
||||
(into (d/ordered-set)
|
||||
["legacy-2"
|
||||
@@ -1675,4 +1684,5 @@
|
||||
"0010-fix-swap-slots-pointing-non-existent-shapes"
|
||||
"0011-fix-invalid-text-touched-flags"
|
||||
"0012-fix-position-data"
|
||||
"0013-clear-invalid-strokes-and-fills"]))
|
||||
"0013-clear-invalid-strokes-and-fills"
|
||||
"0014-clear-components-nil-objects"]))
|
||||
|
||||
@@ -80,7 +80,7 @@
|
||||
[:file-id ::sm/uuid]
|
||||
[:page-id {:optional true} [:maybe ::sm/uuid]]])
|
||||
|
||||
(def check-error!
|
||||
(def check-error
|
||||
(sm/check-fn schema:error))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
@@ -96,21 +96,17 @@
|
||||
|
||||
(defn- report-error
|
||||
[code hint shape file page & {:as args}]
|
||||
(let [error {:code code
|
||||
:hint hint
|
||||
:shape shape
|
||||
:file-id (:id file)
|
||||
:page-id (:id page)
|
||||
:shape-id (:id shape)
|
||||
:args args}]
|
||||
(let [error (d/without-nils
|
||||
{:code code
|
||||
:hint hint
|
||||
:shape shape
|
||||
:file-id (:id file)
|
||||
:page-id (:id page)
|
||||
:shape-id (:id shape)
|
||||
:args args})]
|
||||
|
||||
(dm/assert!
|
||||
"expected a valid `*errors*` dynamic binding"
|
||||
(some? *errors*))
|
||||
|
||||
(dm/assert!
|
||||
"expected valid error"
|
||||
(check-error! error))
|
||||
(assert (some? *errors*) "expected a valid `*errors*` dynamic binding")
|
||||
(assert (check-error error))
|
||||
|
||||
(vswap! *errors* conj error)))
|
||||
|
||||
|
||||
@@ -423,38 +423,41 @@
|
||||
(fn [{:keys [kind max min ordered] :as props} children _]
|
||||
(let [kind (or (last children) kind)
|
||||
|
||||
pred
|
||||
child-pred
|
||||
(cond
|
||||
(fn? kind) kind
|
||||
(nil? kind) any?
|
||||
:else (validator kind))
|
||||
|
||||
type-pred
|
||||
(if ordered
|
||||
d/ordered-set?
|
||||
set?)
|
||||
|
||||
pred
|
||||
(cond
|
||||
(and max min)
|
||||
(fn [value]
|
||||
(let [size (count value)]
|
||||
(and (set? value)
|
||||
(<= min size max)
|
||||
(every? pred value))))
|
||||
(and (type-pred value)
|
||||
(every? child-pred value)
|
||||
(<= min (count value) max)))
|
||||
|
||||
min
|
||||
(fn [value]
|
||||
(let [size (count value)]
|
||||
(and (set? value)
|
||||
(<= min size)
|
||||
(every? pred value))))
|
||||
(and (type-pred value)
|
||||
(every? child-pred value)
|
||||
(<= min (count value))))
|
||||
|
||||
max
|
||||
(fn [value]
|
||||
(let [size (count value)]
|
||||
(and (set? value)
|
||||
(<= size max)
|
||||
(every? pred value))))
|
||||
(and (type-pred value)
|
||||
(every? child-pred value)
|
||||
(<= (count value) max)))
|
||||
|
||||
:else
|
||||
(fn [value]
|
||||
(every? pred value)))
|
||||
(and (type-pred value)
|
||||
(every? child-pred value))))
|
||||
|
||||
empty-set
|
||||
(if ordered
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
|
||||
(ns common-tests.schema-test
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.schema.generators :as sg]
|
||||
[clojure.test :as t]))
|
||||
@@ -35,6 +36,77 @@
|
||||
(t/is (true? (sm/validate schema #{})))
|
||||
(t/is (false? (sm/validate schema #{"a"})))))
|
||||
|
||||
(t/testing "validate 2"
|
||||
(let [candidate-1 ["a@b.com" "a@c.net"]
|
||||
candidate-2 (into #{} candidate-1)
|
||||
candidate-3 (into (d/ordered-set) candidate-1)
|
||||
candidate-4 #{"a@b.com"}
|
||||
candidate-5 (d/ordered-set "a@b.com")
|
||||
schema-1 [::sm/set ::sm/email]
|
||||
schema-2 [::sm/set {:ordered true} ::sm/email]
|
||||
schema-3 [::sm/set {:ordered true :min 1} ::sm/email]
|
||||
schema-4 [::sm/set {:min 1} ::sm/email]
|
||||
schema-5 [::sm/set {:ordered true :max 1} ::sm/email]
|
||||
schema-6 [::sm/set {:ordered true :min 1 :max 2} ::sm/email]
|
||||
schema-7 [::sm/set {:min 1 :max 2} ::sm/email]]
|
||||
|
||||
(t/is (false? (sm/validate schema-1 [])))
|
||||
(t/is (false? (sm/validate schema-1 candidate-1)))
|
||||
(t/is (true? (sm/validate schema-1 candidate-2)))
|
||||
(t/is (true? (sm/validate schema-1 candidate-3)))
|
||||
|
||||
(t/is (false? (sm/validate schema-2 [])))
|
||||
(t/is (false? (sm/validate schema-2 candidate-1)))
|
||||
(t/is (false? (sm/validate schema-2 candidate-2)))
|
||||
(t/is (true? (sm/validate schema-2 candidate-3)))
|
||||
|
||||
(t/is (false? (sm/validate schema-3 [])))
|
||||
(t/is (false? (sm/validate schema-3 candidate-1)))
|
||||
(t/is (false? (sm/validate schema-3 candidate-2)))
|
||||
(t/is (true? (sm/validate schema-3 candidate-3)))
|
||||
(t/is (false? (sm/validate schema-3 candidate-4)))
|
||||
(t/is (true? (sm/validate schema-3 candidate-5)))
|
||||
(t/is (false? (sm/validate schema-3 (d/ordered-set))))
|
||||
|
||||
(t/is (false? (sm/validate schema-4 [])))
|
||||
(t/is (false? (sm/validate schema-4 candidate-1)))
|
||||
(t/is (true? (sm/validate schema-4 candidate-2)))
|
||||
(t/is (true? (sm/validate schema-4 candidate-3)))
|
||||
(t/is (true? (sm/validate schema-4 candidate-4)))
|
||||
(t/is (true? (sm/validate schema-4 candidate-5)))
|
||||
(t/is (false? (sm/validate schema-4 (d/ordered-set))))
|
||||
(t/is (false? (sm/validate schema-4 #{})))
|
||||
|
||||
(t/is (false? (sm/validate schema-5 [])))
|
||||
(t/is (false? (sm/validate schema-5 candidate-1)))
|
||||
(t/is (false? (sm/validate schema-5 candidate-2)))
|
||||
(t/is (false? (sm/validate schema-5 candidate-3)))
|
||||
(t/is (false? (sm/validate schema-5 candidate-4)))
|
||||
(t/is (true? (sm/validate schema-5 candidate-5)))
|
||||
(t/is (true? (sm/validate schema-5 (d/ordered-set))))
|
||||
(t/is (false? (sm/validate schema-5 #{})))
|
||||
|
||||
(t/is (false? (sm/validate schema-6 [])))
|
||||
(t/is (false? (sm/validate schema-6 candidate-1)))
|
||||
(t/is (false? (sm/validate schema-6 candidate-2)))
|
||||
(t/is (true? (sm/validate schema-6 candidate-3)))
|
||||
(t/is (false? (sm/validate schema-6 candidate-4)))
|
||||
(t/is (true? (sm/validate schema-6 candidate-5)))
|
||||
(t/is (false? (sm/validate schema-6 (d/ordered-set))))
|
||||
(t/is (false? (sm/validate schema-6 #{})))
|
||||
(t/is (false? (sm/validate schema-6 (conj candidate-3 "r@r.com"))))
|
||||
|
||||
(t/is (false? (sm/validate schema-7 [])))
|
||||
(t/is (false? (sm/validate schema-7 candidate-1)))
|
||||
(t/is (true? (sm/validate schema-7 candidate-2)))
|
||||
(t/is (true? (sm/validate schema-7 candidate-3)))
|
||||
(t/is (true? (sm/validate schema-7 candidate-4)))
|
||||
(t/is (true? (sm/validate schema-7 candidate-5)))
|
||||
(t/is (false? (sm/validate schema-7 (d/ordered-set))))
|
||||
(t/is (false? (sm/validate schema-7 #{})))
|
||||
(t/is (false? (sm/validate schema-7 (conj candidate-2 "r@r.com"))))
|
||||
(t/is (false? (sm/validate schema-7 (conj candidate-3 "r@r.com"))))))
|
||||
|
||||
(t/testing "generate"
|
||||
(let [schema [::sm/set ::sm/email]
|
||||
value (sg/generate schema)]
|
||||
|
||||
Reference in New Issue
Block a user