mirror of
https://github.com/penpot/penpot.git
synced 2025-12-11 22:14:05 +01:00
🎉 Add addTokensLib method to the library
This commit is contained in:
@@ -485,6 +485,13 @@
|
||||
(commit-change change1)
|
||||
(commit-change change2))))
|
||||
|
||||
(defn add-tokens-lib
|
||||
[state tokens-lib]
|
||||
(-> state
|
||||
(commit-change
|
||||
{:type :set-tokens-lib
|
||||
:tokens-lib tokens-lib})))
|
||||
|
||||
(defn delete-shape
|
||||
[file id]
|
||||
(commit-change
|
||||
|
||||
@@ -371,7 +371,7 @@
|
||||
[:set-tokens-lib
|
||||
[:map {:title "SetTokensLib"}
|
||||
[:type [:= :set-tokens-lib]]
|
||||
[:tokens-lib ::sm/any]]] ;; TODO: we should define a plain object schema for tokens-lib
|
||||
[:tokens-lib ctob/schema:tokens-lib]]]
|
||||
|
||||
[:set-token
|
||||
[:map {:title "SetTokenChange"}
|
||||
|
||||
@@ -7,10 +7,11 @@
|
||||
(ns app.common.types.tokens-lib
|
||||
(:require
|
||||
#?(:clj [app.common.fressian :as fres])
|
||||
#?(:clj [clojure.data.json :as json])
|
||||
#?(:clj [clojure.data.json :as c.json])
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.files.helpers :as cfh]
|
||||
[app.common.json :as json]
|
||||
[app.common.path-names :as cpn]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.schema.generators :as sg]
|
||||
@@ -198,8 +199,8 @@
|
||||
:tokens tokens})
|
||||
|
||||
#?@(:clj
|
||||
[json/JSONWriter
|
||||
(-write [this writter options] (json/-write (datafy this) writter options))])
|
||||
[c.json/JSONWriter
|
||||
(-write [this writter options] (c.json/-write (datafy this) writter options))])
|
||||
|
||||
INamedItem
|
||||
(get-id [_]
|
||||
@@ -912,6 +913,7 @@ Will return a value that matches this schema:
|
||||
(get-tokens [_ set-id] "return a map of tokens in the set, indexed by token-name"))
|
||||
|
||||
(declare parse-multi-set-dtcg-json)
|
||||
(declare read-multi-set-dtcg)
|
||||
(declare export-dtcg-json)
|
||||
|
||||
(deftype TokensLib [sets themes active-themes]
|
||||
@@ -923,8 +925,8 @@ Will return a value that matches this schema:
|
||||
:active-themes active-themes})
|
||||
|
||||
#?@(:clj
|
||||
[json/JSONWriter
|
||||
(-write [this writter options] (json/-write (export-dtcg-json this) writter options))])
|
||||
[c.json/JSONWriter
|
||||
(-write [this writter options] (c.json/-write (export-dtcg-json this) writter options))])
|
||||
|
||||
ITokenSets
|
||||
;; Naming conventions:
|
||||
@@ -1409,7 +1411,11 @@ Will return a value that matches this schema:
|
||||
;; function that is declared but not defined; so we need to pass
|
||||
;; an anonymous function and delegate the resolution to runtime
|
||||
{:encode/json #(export-dtcg-json %)
|
||||
:decode/json #(parse-multi-set-dtcg-json %)}}))
|
||||
:decode/json #(read-multi-set-dtcg %)
|
||||
;; FIXME: add better, more reallistic generator
|
||||
:gen/gen (->> (sg/small-int)
|
||||
(sg/fmap (fn [_]
|
||||
(make-tokens-lib))))}}))
|
||||
|
||||
(defn duplicate-set
|
||||
"Make a new set with a unique name, copying data from the given set in the lib."
|
||||
@@ -1453,8 +1459,7 @@ Will return a value that matches this schema:
|
||||
["value" :map]
|
||||
["type" :string]]]))
|
||||
|
||||
(def ^:private dtcg-node?
|
||||
(sm/validator
|
||||
(def ^:private schema:dtcg-node
|
||||
[:or
|
||||
[:map
|
||||
["$value" :string]
|
||||
@@ -1464,7 +1469,10 @@ Will return a value that matches this schema:
|
||||
["$type" :string]]
|
||||
[:map
|
||||
["$value" :map]
|
||||
["$type" :string]]]))
|
||||
["$type" :string]]])
|
||||
|
||||
(def ^:private dtcg-node?
|
||||
(sm/validator schema:dtcg-node))
|
||||
|
||||
(defn- get-json-format
|
||||
"Searches through decoded token file and returns:
|
||||
@@ -1651,6 +1659,43 @@ Will return a value that matches this schema:
|
||||
(assert (= (get-json-format decoded-json-tokens) :json-format/legacy) "expected a legacy format for `decoded-json-tokens`")
|
||||
(parse-single-set-dtcg-json set-name (legacy-json->dtcg-json decoded-json-tokens)))
|
||||
|
||||
(def ^:private schema:multi-set-dtcg
|
||||
"Schema for penpot multi-set dtcg json decoded data/
|
||||
|
||||
Mainly used for validate the structure of the incoming data before
|
||||
proceed to parse it to our internal data structures."
|
||||
[:schema {:registry
|
||||
{::node
|
||||
[:or
|
||||
[:map-of :string [:ref ::node]]
|
||||
schema:dtcg-node]}}
|
||||
[:map
|
||||
["$themes" {:optional true}
|
||||
[:vector
|
||||
[:map {:title "Theme"}
|
||||
["id" {:optional true} :string]
|
||||
["name" :string]
|
||||
["description" :string]
|
||||
["isSource" :boolean]
|
||||
["selectedTokenSets"
|
||||
[:map-of :string [:enum "enabled" "disabled"]]]]]]
|
||||
["$metadata" {:optional true}
|
||||
[:map {:title "Metadata"}
|
||||
["tokenSetOrder" {:optional true} [:vector :string]]
|
||||
["activeThemes" {:optional true} [:vector :string]]
|
||||
["activeSets" {:optional true} [:vector :string]]]]
|
||||
|
||||
[:malli.core/default
|
||||
[:map-of :string [:ref ::node]]]]])
|
||||
|
||||
(def ^:private check-multi-set-dtcg-data
|
||||
(sm/check-fn schema:multi-set-dtcg))
|
||||
|
||||
(def ^:private decode-multi-set-dtcg-data
|
||||
(sm/decoder schema:multi-set-dtcg
|
||||
sm/json-transformer))
|
||||
|
||||
;; FIXME: remove `-json` suffix
|
||||
(defn parse-multi-set-dtcg-json
|
||||
"Parse a decoded json file with multi sets in DTCG format into a TokensLib."
|
||||
[decoded-json]
|
||||
@@ -1741,6 +1786,23 @@ Will return a value that matches this schema:
|
||||
|
||||
library))
|
||||
|
||||
(defn read-multi-set-dtcg
|
||||
"Read penpot multi-set dctg tokens. Accepts string or JSON decoded
|
||||
data (without any case transformation). Used as schema decoder and
|
||||
in the SDK."
|
||||
[data]
|
||||
(let [data (if (string? data)
|
||||
(json/decode data :key-fn identity)
|
||||
data)
|
||||
data #?(:cljs (if (object? data)
|
||||
(json/->clj data :key-fn identity)
|
||||
data)
|
||||
:clj data)
|
||||
|
||||
data (decode-multi-set-dtcg-data data)]
|
||||
(-> (check-multi-set-dtcg-data data)
|
||||
(parse-multi-set-dtcg-json))))
|
||||
|
||||
(defn- parse-multi-set-legacy-json
|
||||
"Parse a decoded json file with multi sets in legacy format into a TokensLib."
|
||||
[decoded-json]
|
||||
@@ -1753,6 +1815,7 @@ Will return a value that matches this schema:
|
||||
(parse-multi-set-dtcg-json (merge other-data
|
||||
dtcg-sets-data))))
|
||||
|
||||
;; FIXME: remove `-json` suffix
|
||||
(defn parse-decoded-json
|
||||
"Guess the format and content type of the decoded json file and parse it into a TokensLib.
|
||||
The `file-name` is used to determine the set name when the json file contains a single set."
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
# CHANGELOG
|
||||
|
||||
## 1.1.0-RC1
|
||||
|
||||
- Add experimental addTokensLib method
|
||||
|
||||
|
||||
## 1.0.11
|
||||
|
||||
- Set correct path if it is not provided on addComponent
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
{
|
||||
"name": "@penpot/library",
|
||||
"version": "1.0.11",
|
||||
"version": "1.1.0-RC1",
|
||||
"license": "MPL-2.0",
|
||||
"author": "Kaleidos INC",
|
||||
"packageManager": "yarn@4.10.3+sha512.c38cafb5c7bb273f3926d04e55e1d8c9dfa7d9c3ea1f36a4868fa028b9e5f72298f0b7f401ad5eb921749eb012eb1c3bb74bf7503df3ee43fd600d14a018266f",
|
||||
"type": "module",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/penpot/penpot"
|
||||
"url": "git+https://github.com/penpot/penpot.git"
|
||||
},
|
||||
"resolutions": {
|
||||
"@zip.js/zip.js@npm:^2.7.44": "patch:@zip.js/zip.js@npm%3A2.7.60#~/.yarn/patches/@zip.js-zip.js-npm-2.7.60-b6b814410b.patch"
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
[app.common.files.builder :as fb]
|
||||
[app.common.json :as json]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.types.tokens-lib :refer [read-multi-set-dtcg]]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.util.object :as obj]))
|
||||
|
||||
@@ -263,6 +264,15 @@
|
||||
:mtype (get fmedia :mtype)}]
|
||||
(json/->js (d/without-nils image))))))
|
||||
|
||||
:addTokensLib
|
||||
(fn [data]
|
||||
(try
|
||||
(let [tlib (read-multi-set-dtcg data)]
|
||||
(swap! state fb/add-tokens-lib tlib)
|
||||
nil)
|
||||
(catch :default cause
|
||||
(handle-exception cause))))
|
||||
|
||||
:genId
|
||||
(fn []
|
||||
(dm/str (uuid/next)))
|
||||
|
||||
62
library/test/_tokens-1.json
Normal file
62
library/test/_tokens-1.json
Normal file
@@ -0,0 +1,62 @@
|
||||
{
|
||||
"a": {},
|
||||
"b": {
|
||||
"aaa": {
|
||||
"$value": "red",
|
||||
"$type": "color",
|
||||
"$description": ""
|
||||
},
|
||||
"bbb": {
|
||||
"$value": "blue",
|
||||
"$type": "color",
|
||||
"$description": ""
|
||||
},
|
||||
"ccc": {
|
||||
"eee": {
|
||||
"$value": "green",
|
||||
"$type": "color",
|
||||
"$description": ""
|
||||
}
|
||||
},
|
||||
"fff": {
|
||||
"ttt": {
|
||||
"$value": {
|
||||
"fontFamilies": [
|
||||
"Aboreto"
|
||||
],
|
||||
"fontSizes": "12",
|
||||
"fontWeights": "300"
|
||||
},
|
||||
"$type": "typography",
|
||||
"$description": ""
|
||||
}
|
||||
}
|
||||
},
|
||||
"b/c": {},
|
||||
"$themes": [
|
||||
{
|
||||
"id": "48af6582-f247-8060-8006-ff4dd1d761a8",
|
||||
"name": "tes1",
|
||||
"description": "",
|
||||
"isSource": false,
|
||||
"selectedTokenSets": {
|
||||
"a": "enabled",
|
||||
"b": "enabled"
|
||||
}
|
||||
}
|
||||
],
|
||||
"$metadata": {
|
||||
"tokenSetOrder": [
|
||||
"a",
|
||||
"b",
|
||||
"b/c"
|
||||
],
|
||||
"activeThemes": [
|
||||
"/tes1"
|
||||
],
|
||||
"activeSets": [
|
||||
"a",
|
||||
"b"
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,15 @@
|
||||
import assert from "node:assert/strict";
|
||||
import test from "node:test";
|
||||
import * as fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
|
||||
import * as penpot from "#self";
|
||||
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
test("create empty context", () => {
|
||||
const context = penpot.createBuildContext();
|
||||
assert.ok(context);
|
||||
@@ -118,3 +125,48 @@ test("create context with color", () => {
|
||||
assert.equal(color.opacity, params.opacity);
|
||||
assert.equal(color.name, params.name);
|
||||
});
|
||||
|
||||
|
||||
test("create context with tokens lib as json", () => {
|
||||
const context = penpot.createBuildContext();
|
||||
|
||||
const fileId = context.addFile({name: "file 1"});
|
||||
const pageId = context.addPage({name: "page 1"});
|
||||
|
||||
|
||||
const tokensFilePath = path.join(__dirname, "_tokens-1.json");
|
||||
const tokens = fs.readFileSync(tokensFilePath, "utf8");
|
||||
|
||||
context.addTokensLib(tokens);
|
||||
|
||||
|
||||
const internalState = context.getInternalState();
|
||||
const file = internalState.files[fileId];
|
||||
|
||||
assert.ok(file, "file should exist");
|
||||
|
||||
assert.ok(file.data);
|
||||
assert.ok(file.data.tokensLib)
|
||||
});
|
||||
|
||||
test("create context with tokens lib as obj", () => {
|
||||
const context = penpot.createBuildContext();
|
||||
|
||||
const fileId = context.addFile({name: "file 1"});
|
||||
const pageId = context.addPage({name: "page 1"});
|
||||
|
||||
|
||||
const tokensFilePath = path.join(__dirname, "_tokens-1.json");
|
||||
const tokens = fs.readFileSync(tokensFilePath, "utf8");
|
||||
|
||||
context.addTokensLib(JSON.parse(tokens))
|
||||
|
||||
|
||||
const internalState = context.getInternalState();
|
||||
const file = internalState.files[fileId];
|
||||
|
||||
assert.ok(file, "file should exist");
|
||||
|
||||
assert.ok(file.data);
|
||||
assert.ok(file.data.tokensLib)
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user