mirror of
https://github.com/penpot/penpot.git
synced 2025-12-12 06:24:17 +01:00
♻️ Add minor refactor for internal concurrency model
Replace general usage of virtual threads with platform threads and use virtual threads for lightweight procs such that websocket connections. This decision is made mainly because virtual threads does not appear on thread dumps in an easy way so debugging issues becomes very difficult. The threads requirement of penpot for serving http requests is not very big so having so this decision does not really affects the resource usage.
This commit is contained in:
committed by
Alonso Torres
parent
9d907071aa
commit
c1058c7fdb
20
CHANGES.md
20
CHANGES.md
@@ -6,6 +6,26 @@
|
|||||||
|
|
||||||
### :boom: Breaking changes & Deprecations
|
### :boom: Breaking changes & Deprecations
|
||||||
|
|
||||||
|
- Remove already deprecated configuration variables with the prefix
|
||||||
|
`PENPOT_ASSETS_*` (replaced by variables named with
|
||||||
|
`PENPOT_OBJECTS_STORAGE_*`.
|
||||||
|
|
||||||
|
- Replace the `PENPOT_OBJECTS_STORAGE_S3_IO_THREADS` with a more
|
||||||
|
general configuration `PENPOT_NETTY_IO_THREADS` used to configure a
|
||||||
|
shared netty resources across different services which use netty
|
||||||
|
internally (redis connection, S3 SDK client). This configuration is
|
||||||
|
not very commonly used so don't expected real impact on any user.
|
||||||
|
|
||||||
|
- Add `PENPOT_NETTY_IO_THREADS` and `PENPOT_EXECUTOR_THREADS`
|
||||||
|
variables for provide the control over concurrency of the shared
|
||||||
|
resources used by netty. Penpot uses the netty IO threads for AWS S3
|
||||||
|
SDK and Redis/Valkey communication, and the EXEC threads to perform
|
||||||
|
out of HTTP serving threads tasks such that cache invalidation, S3
|
||||||
|
response completion, configuration reloading and many other auxiliar
|
||||||
|
tasks. By default they use a half number if available cpus with a
|
||||||
|
minumum of 2 for both executors. You should not touch that variables
|
||||||
|
unless you are really know what you are doing.
|
||||||
|
|
||||||
### :heart: Community contributions (Thank you!)
|
### :heart: Community contributions (Thank you!)
|
||||||
|
|
||||||
### :sparkles: New features & Enhancements
|
### :sparkles: New features & Enhancements
|
||||||
|
|||||||
@@ -28,8 +28,8 @@
|
|||||||
com.google.guava/guava {:mvn/version "33.4.8-jre"}
|
com.google.guava/guava {:mvn/version "33.4.8-jre"}
|
||||||
|
|
||||||
funcool/yetti
|
funcool/yetti
|
||||||
{:git/tag "v11.4"
|
{:git/tag "v11.6"
|
||||||
:git/sha "ce50d42"
|
:git/sha "94dc017"
|
||||||
:git/url "https://github.com/funcool/yetti.git"
|
:git/url "https://github.com/funcool/yetti.git"
|
||||||
:exclusions [org.slf4j/slf4j-api]}
|
:exclusions [org.slf4j/slf4j-api]}
|
||||||
|
|
||||||
@@ -86,7 +86,9 @@
|
|||||||
|
|
||||||
:test
|
:test
|
||||||
{:main-opts ["-m" "kaocha.runner"]
|
{:main-opts ["-m" "kaocha.runner"]
|
||||||
:jvm-opts ["-Dlog4j2.configurationFile=log4j2-devenv-repl.xml"]
|
:jvm-opts ["-Dlog4j2.configurationFile=log4j2-devenv-repl.xml"
|
||||||
|
"--sun-misc-unsafe-memory-access=allow"
|
||||||
|
"--enable-native-access=ALL-UNNAMED"]
|
||||||
:extra-deps {lambdaisland/kaocha {:mvn/version "1.91.1392"}}}
|
:extra-deps {lambdaisland/kaocha {:mvn/version "1.91.1392"}}}
|
||||||
|
|
||||||
:outdated
|
:outdated
|
||||||
|
|||||||
@@ -34,8 +34,7 @@
|
|||||||
[clojure.set :as set]
|
[clojure.set :as set]
|
||||||
[cuerdas.core :as str]
|
[cuerdas.core :as str]
|
||||||
[datoteka.fs :as fs]
|
[datoteka.fs :as fs]
|
||||||
[datoteka.io :as io]
|
[datoteka.io :as io]))
|
||||||
[promesa.exec :as px]))
|
|
||||||
|
|
||||||
(set! *warn-on-reflection* true)
|
(set! *warn-on-reflection* true)
|
||||||
|
|
||||||
@@ -476,7 +475,7 @@
|
|||||||
(vary-meta dissoc ::fmg/migrated))))
|
(vary-meta dissoc ::fmg/migrated))))
|
||||||
|
|
||||||
(defn encode-file
|
(defn encode-file
|
||||||
[{:keys [::wrk/executor] :as cfg} {:keys [id features] :as file}]
|
[cfg {:keys [id features] :as file}]
|
||||||
(let [file (if (and (contains? features "fdata/objects-map")
|
(let [file (if (and (contains? features "fdata/objects-map")
|
||||||
(:data file))
|
(:data file))
|
||||||
(fdata/enable-objects-map file)
|
(fdata/enable-objects-map file)
|
||||||
@@ -493,7 +492,7 @@
|
|||||||
|
|
||||||
(-> file
|
(-> file
|
||||||
(d/update-when :features into-array)
|
(d/update-when :features into-array)
|
||||||
(d/update-when :data (fn [data] (px/invoke! executor #(blob/encode data)))))))
|
(d/update-when :data blob/encode))))
|
||||||
|
|
||||||
(defn- file->params
|
(defn- file->params
|
||||||
[file]
|
[file]
|
||||||
|
|||||||
@@ -96,7 +96,7 @@
|
|||||||
[:http-server-max-body-size {:optional true} ::sm/int]
|
[:http-server-max-body-size {:optional true} ::sm/int]
|
||||||
[:http-server-max-multipart-body-size {:optional true} ::sm/int]
|
[:http-server-max-multipart-body-size {:optional true} ::sm/int]
|
||||||
[:http-server-io-threads {:optional true} ::sm/int]
|
[:http-server-io-threads {:optional true} ::sm/int]
|
||||||
[:http-server-worker-threads {:optional true} ::sm/int]
|
[:http-server-max-worker-threads {:optional true} ::sm/int]
|
||||||
|
|
||||||
[:telemetry-uri {:optional true} :string]
|
[:telemetry-uri {:optional true} :string]
|
||||||
[:telemetry-with-taiga {:optional true} ::sm/boolean] ;; DELETE
|
[:telemetry-with-taiga {:optional true} ::sm/boolean] ;; DELETE
|
||||||
@@ -214,20 +214,14 @@
|
|||||||
[:media-uri {:optional true} :string]
|
[:media-uri {:optional true} :string]
|
||||||
[:assets-path {:optional true} :string]
|
[:assets-path {:optional true} :string]
|
||||||
|
|
||||||
;; Legacy, will be removed in 2.5
|
[:netty-io-threads {:optional true} ::sm/int]
|
||||||
[:assets-storage-backend {:optional true} :keyword]
|
[:executor-threads {:optional true} ::sm/int]
|
||||||
[:storage-assets-fs-directory {:optional true} :string]
|
|
||||||
[:storage-assets-s3-bucket {:optional true} :string]
|
|
||||||
[:storage-assets-s3-region {:optional true} :keyword]
|
|
||||||
[:storage-assets-s3-endpoint {:optional true} ::sm/uri]
|
|
||||||
[:storage-assets-s3-io-threads {:optional true} ::sm/int]
|
|
||||||
|
|
||||||
[:objects-storage-backend {:optional true} :keyword]
|
[:objects-storage-backend {:optional true} :keyword]
|
||||||
[:objects-storage-fs-directory {:optional true} :string]
|
[:objects-storage-fs-directory {:optional true} :string]
|
||||||
[:objects-storage-s3-bucket {:optional true} :string]
|
[:objects-storage-s3-bucket {:optional true} :string]
|
||||||
[:objects-storage-s3-region {:optional true} :keyword]
|
[:objects-storage-s3-region {:optional true} :keyword]
|
||||||
[:objects-storage-s3-endpoint {:optional true} ::sm/uri]
|
[:objects-storage-s3-endpoint {:optional true} ::sm/uri]]))
|
||||||
[:objects-storage-s3-io-threads {:optional true} ::sm/int]]))
|
|
||||||
|
|
||||||
(defn- parse-flags
|
(defn- parse-flags
|
||||||
[config]
|
[config]
|
||||||
|
|||||||
@@ -18,9 +18,7 @@
|
|||||||
[app.storage :as sto]
|
[app.storage :as sto]
|
||||||
[app.util.blob :as blob]
|
[app.util.blob :as blob]
|
||||||
[app.util.objects-map :as omap]
|
[app.util.objects-map :as omap]
|
||||||
[app.util.pointer-map :as pmap]
|
[app.util.pointer-map :as pmap]))
|
||||||
[app.worker :as wrk]
|
|
||||||
[promesa.exec :as px]))
|
|
||||||
|
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
;; OFFLOAD
|
;; OFFLOAD
|
||||||
@@ -84,10 +82,10 @@
|
|||||||
(assoc file :data data)))
|
(assoc file :data data)))
|
||||||
|
|
||||||
(defn decode-file-data
|
(defn decode-file-data
|
||||||
[{:keys [::wrk/executor]} {:keys [data] :as file}]
|
[_system {:keys [data] :as file}]
|
||||||
(cond-> file
|
(cond-> file
|
||||||
(bytes? data)
|
(bytes? data)
|
||||||
(assoc :data (px/invoke! executor #(blob/decode data)))))
|
(assoc :data (blob/decode data))))
|
||||||
|
|
||||||
(defn load-pointer
|
(defn load-pointer
|
||||||
"A database loader pointer helper"
|
"A database loader pointer helper"
|
||||||
|
|||||||
@@ -26,9 +26,7 @@
|
|||||||
[app.rpc :as-alias rpc]
|
[app.rpc :as-alias rpc]
|
||||||
[app.rpc.doc :as-alias rpc.doc]
|
[app.rpc.doc :as-alias rpc.doc]
|
||||||
[app.setup :as-alias setup]
|
[app.setup :as-alias setup]
|
||||||
[app.worker :as wrk]
|
|
||||||
[integrant.core :as ig]
|
[integrant.core :as ig]
|
||||||
[promesa.exec :as px]
|
|
||||||
[reitit.core :as r]
|
[reitit.core :as r]
|
||||||
[reitit.middleware :as rr]
|
[reitit.middleware :as rr]
|
||||||
[yetti.adapter :as yt]
|
[yetti.adapter :as yt]
|
||||||
@@ -55,6 +53,8 @@
|
|||||||
[:map
|
[:map
|
||||||
[::port ::sm/int]
|
[::port ::sm/int]
|
||||||
[::host ::sm/text]
|
[::host ::sm/text]
|
||||||
|
[::io-threads {:optional true} ::sm/int]
|
||||||
|
[::max-worker-threads {:optional true} ::sm/int]
|
||||||
[::max-body-size {:optional true} ::sm/int]
|
[::max-body-size {:optional true} ::sm/int]
|
||||||
[::max-multipart-body-size {:optional true} ::sm/int]
|
[::max-multipart-body-size {:optional true} ::sm/int]
|
||||||
[::router {:optional true} [:fn r/router?]]
|
[::router {:optional true} [:fn r/router?]]
|
||||||
@@ -65,31 +65,41 @@
|
|||||||
(assert (sm/check schema:server-params params)))
|
(assert (sm/check schema:server-params params)))
|
||||||
|
|
||||||
(defmethod ig/init-key ::server
|
(defmethod ig/init-key ::server
|
||||||
[_ {:keys [::handler ::router ::host ::port ::wrk/executor] :as cfg}]
|
[_ {:keys [::handler ::router ::host ::port ::mtx/metrics] :as cfg}]
|
||||||
(l/info :hint "starting http server" :port port :host host)
|
(l/info :hint "starting http server" :port port :host host)
|
||||||
(let [options {:http/port port
|
(let [on-dispatch
|
||||||
:http/host host
|
(fn [_ start-at-ns]
|
||||||
:http/max-body-size (::max-body-size cfg)
|
(let [timing (- (System/nanoTime) start-at-ns)
|
||||||
:http/max-multipart-body-size (::max-multipart-body-size cfg)
|
timing (int (/ timing 1000000))]
|
||||||
:xnio/direct-buffers false
|
(mtx/run! metrics
|
||||||
:xnio/io-threads (or (::io-threads cfg)
|
:id :http-server-dispatch-timing
|
||||||
(max 3 (px/get-available-processors)))
|
:val timing)))
|
||||||
:xnio/dispatch executor
|
|
||||||
:ring/compat :ring2
|
|
||||||
:socket/backlog 4069}
|
|
||||||
|
|
||||||
handler (cond
|
options
|
||||||
(some? router)
|
{:http/port port
|
||||||
(router-handler router)
|
:http/host host
|
||||||
|
:http/max-body-size (::max-body-size cfg)
|
||||||
|
:http/max-multipart-body-size (::max-multipart-body-size cfg)
|
||||||
|
:xnio/direct-buffers false
|
||||||
|
:xnio/io-threads (::io-threads cfg)
|
||||||
|
:xnio/max-worker-threads (::max-worker-threads cfg)
|
||||||
|
:ring/compat :ring2
|
||||||
|
:events/on-dispatch on-dispatch
|
||||||
|
:socket/backlog 4069}
|
||||||
|
|
||||||
(some? handler)
|
handler
|
||||||
handler
|
(cond
|
||||||
|
(some? router)
|
||||||
|
(router-handler router)
|
||||||
|
|
||||||
:else
|
(some? handler)
|
||||||
(throw (UnsupportedOperationException. "handler or router are required")))
|
handler
|
||||||
|
|
||||||
options (d/without-nils options)
|
:else
|
||||||
server (yt/server handler options)]
|
(throw (UnsupportedOperationException. "handler or router are required")))
|
||||||
|
|
||||||
|
server
|
||||||
|
(yt/server handler (d/without-nils options))]
|
||||||
|
|
||||||
(assoc cfg ::server (yt/start! server))))
|
(assoc cfg ::server (yt/start! server))))
|
||||||
|
|
||||||
|
|||||||
@@ -17,11 +17,9 @@
|
|||||||
[app.main :as-alias main]
|
[app.main :as-alias main]
|
||||||
[app.setup :as-alias setup]
|
[app.setup :as-alias setup]
|
||||||
[app.tokens :as tokens]
|
[app.tokens :as tokens]
|
||||||
[app.worker :as-alias wrk]
|
|
||||||
[clojure.data.json :as j]
|
[clojure.data.json :as j]
|
||||||
[cuerdas.core :as str]
|
[cuerdas.core :as str]
|
||||||
[integrant.core :as ig]
|
[integrant.core :as ig]
|
||||||
[promesa.exec :as px]
|
|
||||||
[yetti.request :as yreq]
|
[yetti.request :as yreq]
|
||||||
[yetti.response :as-alias yres]))
|
[yetti.response :as-alias yres]))
|
||||||
|
|
||||||
@@ -40,8 +38,8 @@
|
|||||||
[_ cfg]
|
[_ cfg]
|
||||||
(letfn [(handler [request]
|
(letfn [(handler [request]
|
||||||
(let [data (-> request yreq/body slurp)]
|
(let [data (-> request yreq/body slurp)]
|
||||||
(px/run! :vthread (partial handle-request cfg data)))
|
(handle-request cfg data)
|
||||||
{::yres/status 200})]
|
{::yres/status 200}))]
|
||||||
["/sns" {:handler handler
|
["/sns" {:handler handler
|
||||||
:allowed-methods #{:post}}]))
|
:allowed-methods #{:post}}]))
|
||||||
|
|
||||||
|
|||||||
@@ -54,7 +54,7 @@
|
|||||||
::yres/body (yres/stream-body
|
::yres/body (yres/stream-body
|
||||||
(fn [_ output]
|
(fn [_ output]
|
||||||
(let [channel (sp/chan :buf buf :xf (keep encode))
|
(let [channel (sp/chan :buf buf :xf (keep encode))
|
||||||
listener (events/start-listener
|
listener (events/spawn-listener
|
||||||
channel
|
channel
|
||||||
(partial write! output)
|
(partial write! output)
|
||||||
(partial pu/close! output))]
|
(partial pu/close! output))]
|
||||||
|
|||||||
@@ -42,6 +42,7 @@
|
|||||||
[app.svgo :as-alias svgo]
|
[app.svgo :as-alias svgo]
|
||||||
[app.util.cron]
|
[app.util.cron]
|
||||||
[app.worker :as-alias wrk]
|
[app.worker :as-alias wrk]
|
||||||
|
[app.worker.executor]
|
||||||
[clojure.test :as test]
|
[clojure.test :as test]
|
||||||
[clojure.tools.namespace.repl :as repl]
|
[clojure.tools.namespace.repl :as repl]
|
||||||
[cuerdas.core :as str]
|
[cuerdas.core :as str]
|
||||||
@@ -148,23 +149,11 @@
|
|||||||
::mdef/labels []
|
::mdef/labels []
|
||||||
::mdef/type :histogram}
|
::mdef/type :histogram}
|
||||||
|
|
||||||
:executors-active-threads
|
:http-server-dispatch-timing
|
||||||
{::mdef/name "penpot_executors_active_threads"
|
{::mdef/name "penpot_http_server_dispatch_timing"
|
||||||
::mdef/help "Current number of threads available in the executor service."
|
::mdef/help "Histogram of dispatch handler"
|
||||||
::mdef/labels ["name"]
|
::mdef/labels []
|
||||||
::mdef/type :gauge}
|
::mdef/type :histogram}})
|
||||||
|
|
||||||
:executors-completed-tasks
|
|
||||||
{::mdef/name "penpot_executors_completed_tasks_total"
|
|
||||||
::mdef/help "Approximate number of completed tasks by the executor."
|
|
||||||
::mdef/labels ["name"]
|
|
||||||
::mdef/type :counter}
|
|
||||||
|
|
||||||
:executors-running-threads
|
|
||||||
{::mdef/name "penpot_executors_running_threads"
|
|
||||||
::mdef/help "Current number of threads with state RUNNING."
|
|
||||||
::mdef/labels ["name"]
|
|
||||||
::mdef/type :gauge}})
|
|
||||||
|
|
||||||
(def system-config
|
(def system-config
|
||||||
{::db/pool
|
{::db/pool
|
||||||
@@ -176,14 +165,12 @@
|
|||||||
::db/max-size (cf/get :database-max-pool-size 60)
|
::db/max-size (cf/get :database-max-pool-size 60)
|
||||||
::mtx/metrics (ig/ref ::mtx/metrics)}
|
::mtx/metrics (ig/ref ::mtx/metrics)}
|
||||||
|
|
||||||
;; Default thread pool for IO operations
|
;; Default netty IO pool (shared between several services)
|
||||||
::wrk/executor
|
::wrk/netty-io-executor
|
||||||
{}
|
{:threads (cf/get :netty-io-threads)}
|
||||||
|
|
||||||
::wrk/monitor
|
::wrk/netty-executor
|
||||||
{::mtx/metrics (ig/ref ::mtx/metrics)
|
{:threads (cf/get :executor-threads)}
|
||||||
::wrk/executor (ig/ref ::wrk/executor)
|
|
||||||
::wrk/name "default"}
|
|
||||||
|
|
||||||
:app.migrations/migrations
|
:app.migrations/migrations
|
||||||
{::db/pool (ig/ref ::db/pool)}
|
{::db/pool (ig/ref ::db/pool)}
|
||||||
@@ -197,14 +184,19 @@
|
|||||||
::rds/redis
|
::rds/redis
|
||||||
{::rds/uri (cf/get :redis-uri)
|
{::rds/uri (cf/get :redis-uri)
|
||||||
::mtx/metrics (ig/ref ::mtx/metrics)
|
::mtx/metrics (ig/ref ::mtx/metrics)
|
||||||
::wrk/executor (ig/ref ::wrk/executor)}
|
|
||||||
|
::wrk/netty-executor
|
||||||
|
(ig/ref ::wrk/netty-executor)
|
||||||
|
|
||||||
|
::wrk/netty-io-executor
|
||||||
|
(ig/ref ::wrk/netty-io-executor)}
|
||||||
|
|
||||||
::mbus/msgbus
|
::mbus/msgbus
|
||||||
{::wrk/executor (ig/ref ::wrk/executor)
|
{::wrk/executor (ig/ref ::wrk/netty-executor)
|
||||||
::rds/redis (ig/ref ::rds/redis)}
|
::rds/redis (ig/ref ::rds/redis)}
|
||||||
|
|
||||||
:app.storage.tmp/cleaner
|
:app.storage.tmp/cleaner
|
||||||
{::wrk/executor (ig/ref ::wrk/executor)}
|
{::wrk/executor (ig/ref ::wrk/netty-executor)}
|
||||||
|
|
||||||
::sto.gc-deleted/handler
|
::sto.gc-deleted/handler
|
||||||
{::db/pool (ig/ref ::db/pool)
|
{::db/pool (ig/ref ::db/pool)
|
||||||
@@ -232,9 +224,10 @@
|
|||||||
::http/host (cf/get :http-server-host)
|
::http/host (cf/get :http-server-host)
|
||||||
::http/router (ig/ref ::http/router)
|
::http/router (ig/ref ::http/router)
|
||||||
::http/io-threads (cf/get :http-server-io-threads)
|
::http/io-threads (cf/get :http-server-io-threads)
|
||||||
|
::http/max-worker-threads (cf/get :http-server-max-worker-threads)
|
||||||
::http/max-body-size (cf/get :http-server-max-body-size)
|
::http/max-body-size (cf/get :http-server-max-body-size)
|
||||||
::http/max-multipart-body-size (cf/get :http-server-max-multipart-body-size)
|
::http/max-multipart-body-size (cf/get :http-server-max-multipart-body-size)
|
||||||
::wrk/executor (ig/ref ::wrk/executor)}
|
::mtx/metrics (ig/ref ::mtx/metrics)}
|
||||||
|
|
||||||
::ldap/provider
|
::ldap/provider
|
||||||
{:host (cf/get :ldap-host)
|
{:host (cf/get :ldap-host)
|
||||||
@@ -312,17 +305,17 @@
|
|||||||
|
|
||||||
::rpc/climit
|
::rpc/climit
|
||||||
{::mtx/metrics (ig/ref ::mtx/metrics)
|
{::mtx/metrics (ig/ref ::mtx/metrics)
|
||||||
::wrk/executor (ig/ref ::wrk/executor)
|
::wrk/executor (ig/ref ::wrk/netty-executor)
|
||||||
::climit/config (cf/get :rpc-climit-config)
|
::climit/config (cf/get :rpc-climit-config)
|
||||||
::climit/enabled (contains? cf/flags :rpc-climit)}
|
::climit/enabled (contains? cf/flags :rpc-climit)}
|
||||||
|
|
||||||
:app.rpc/rlimit
|
:app.rpc/rlimit
|
||||||
{::wrk/executor (ig/ref ::wrk/executor)}
|
{::wrk/executor (ig/ref ::wrk/netty-executor)}
|
||||||
|
|
||||||
:app.rpc/methods
|
:app.rpc/methods
|
||||||
{::http.client/client (ig/ref ::http.client/client)
|
{::http.client/client (ig/ref ::http.client/client)
|
||||||
::db/pool (ig/ref ::db/pool)
|
::db/pool (ig/ref ::db/pool)
|
||||||
::wrk/executor (ig/ref ::wrk/executor)
|
::wrk/executor (ig/ref ::wrk/netty-executor)
|
||||||
::session/manager (ig/ref ::session/manager)
|
::session/manager (ig/ref ::session/manager)
|
||||||
::ldap/provider (ig/ref ::ldap/provider)
|
::ldap/provider (ig/ref ::ldap/provider)
|
||||||
::sto/storage (ig/ref ::sto/storage)
|
::sto/storage (ig/ref ::sto/storage)
|
||||||
@@ -468,20 +461,15 @@
|
|||||||
:assets-fs (ig/ref :app.storage.fs/backend)}}
|
:assets-fs (ig/ref :app.storage.fs/backend)}}
|
||||||
|
|
||||||
:app.storage.s3/backend
|
:app.storage.s3/backend
|
||||||
{::sto.s3/region (or (cf/get :storage-assets-s3-region)
|
{::sto.s3/region (cf/get :objects-storage-s3-region)
|
||||||
(cf/get :objects-storage-s3-region))
|
::sto.s3/endpoint (cf/get :objects-storage-s3-endpoint)
|
||||||
::sto.s3/endpoint (or (cf/get :storage-assets-s3-endpoint)
|
::sto.s3/bucket (cf/get :objects-storage-s3-bucket)
|
||||||
(cf/get :objects-storage-s3-endpoint))
|
|
||||||
::sto.s3/bucket (or (cf/get :storage-assets-s3-bucket)
|
::wrk/netty-io-executor
|
||||||
(cf/get :objects-storage-s3-bucket))
|
(ig/ref ::wrk/netty-io-executor)}
|
||||||
::sto.s3/io-threads (or (cf/get :storage-assets-s3-io-threads)
|
|
||||||
(cf/get :objects-storage-s3-io-threads))
|
|
||||||
::wrk/executor (ig/ref ::wrk/executor)}
|
|
||||||
|
|
||||||
:app.storage.fs/backend
|
:app.storage.fs/backend
|
||||||
{::sto.fs/directory (or (cf/get :storage-assets-fs-directory)
|
{::sto.fs/directory (cf/get :objects-storage-fs-directory)}})
|
||||||
(cf/get :objects-storage-fs-directory))}})
|
|
||||||
|
|
||||||
|
|
||||||
(def worker-config
|
(def worker-config
|
||||||
{::wrk/cron
|
{::wrk/cron
|
||||||
|
|||||||
@@ -216,8 +216,7 @@
|
|||||||
(rds/add-listener sconn (create-listener rcv-ch))
|
(rds/add-listener sconn (create-listener rcv-ch))
|
||||||
|
|
||||||
(px/thread
|
(px/thread
|
||||||
{:name "penpot/msgbus/io-loop"
|
{:name "penpot/msgbus"}
|
||||||
:virtual true}
|
|
||||||
(try
|
(try
|
||||||
(loop []
|
(loop []
|
||||||
(let [timeout-ch (sp/timeout-chan 1000)
|
(let [timeout-ch (sp/timeout-chan 1000)
|
||||||
|
|||||||
@@ -21,8 +21,7 @@
|
|||||||
[clojure.java.io :as io]
|
[clojure.java.io :as io]
|
||||||
[cuerdas.core :as str]
|
[cuerdas.core :as str]
|
||||||
[integrant.core :as ig]
|
[integrant.core :as ig]
|
||||||
[promesa.core :as p]
|
[promesa.core :as p])
|
||||||
[promesa.exec :as px])
|
|
||||||
(:import
|
(:import
|
||||||
clojure.lang.MapEntry
|
clojure.lang.MapEntry
|
||||||
io.lettuce.core.KeyValue
|
io.lettuce.core.KeyValue
|
||||||
@@ -45,8 +44,10 @@
|
|||||||
io.lettuce.core.pubsub.api.sync.RedisPubSubCommands
|
io.lettuce.core.pubsub.api.sync.RedisPubSubCommands
|
||||||
io.lettuce.core.resource.ClientResources
|
io.lettuce.core.resource.ClientResources
|
||||||
io.lettuce.core.resource.DefaultClientResources
|
io.lettuce.core.resource.DefaultClientResources
|
||||||
|
io.netty.channel.nio.NioEventLoopGroup
|
||||||
io.netty.util.HashedWheelTimer
|
io.netty.util.HashedWheelTimer
|
||||||
io.netty.util.Timer
|
io.netty.util.Timer
|
||||||
|
io.netty.util.concurrent.EventExecutorGroup
|
||||||
java.lang.AutoCloseable
|
java.lang.AutoCloseable
|
||||||
java.time.Duration))
|
java.time.Duration))
|
||||||
|
|
||||||
@@ -111,20 +112,15 @@
|
|||||||
|
|
||||||
(defmethod ig/expand-key ::redis
|
(defmethod ig/expand-key ::redis
|
||||||
[k v]
|
[k v]
|
||||||
(let [cpus (px/get-available-processors)
|
{k (-> (d/without-nils v)
|
||||||
threads (max 1 (int (* cpus 0.2)))]
|
(assoc ::timeout (ct/duration "10s")))})
|
||||||
{k (-> (d/without-nils v)
|
|
||||||
(assoc ::timeout (ct/duration "10s"))
|
|
||||||
(assoc ::io-threads (max 3 threads))
|
|
||||||
(assoc ::worker-threads (max 3 threads)))}))
|
|
||||||
|
|
||||||
(def ^:private schema:redis-params
|
(def ^:private schema:redis-params
|
||||||
[:map {:title "redis-params"}
|
[:map {:title "redis-params"}
|
||||||
::wrk/executor
|
::wrk/netty-io-executor
|
||||||
|
::wrk/netty-executor
|
||||||
::mtx/metrics
|
::mtx/metrics
|
||||||
[::uri ::sm/uri]
|
[::uri ::sm/uri]
|
||||||
[::worker-threads ::sm/int]
|
|
||||||
[::io-threads ::sm/int]
|
|
||||||
[::timeout ::ct/duration]])
|
[::timeout ::ct/duration]])
|
||||||
|
|
||||||
(defmethod ig/assert-key ::redis
|
(defmethod ig/assert-key ::redis
|
||||||
@@ -141,17 +137,30 @@
|
|||||||
|
|
||||||
(defn- initialize-resources
|
(defn- initialize-resources
|
||||||
"Initialize redis connection resources"
|
"Initialize redis connection resources"
|
||||||
[{:keys [::uri ::io-threads ::worker-threads ::wrk/executor ::mtx/metrics] :as params}]
|
[{:keys [::uri ::mtx/metrics ::wrk/netty-io-executor ::wrk/netty-executor] :as params}]
|
||||||
|
|
||||||
(l/inf :hint "initialize redis resources"
|
(l/inf :hint "initialize redis resources"
|
||||||
:uri (str uri)
|
:uri (str uri))
|
||||||
:io-threads io-threads
|
|
||||||
:worker-threads worker-threads)
|
|
||||||
|
|
||||||
(let [timer (HashedWheelTimer.)
|
(let [timer (HashedWheelTimer.)
|
||||||
resources (.. (DefaultClientResources/builder)
|
resources (.. (DefaultClientResources/builder)
|
||||||
(ioThreadPoolSize ^long io-threads)
|
(eventExecutorGroup ^EventExecutorGroup netty-executor)
|
||||||
(computationThreadPoolSize ^long worker-threads)
|
|
||||||
|
;; We provide lettuce with a shared event loop
|
||||||
|
;; group instance instead of letting lettuce to
|
||||||
|
;; create its own
|
||||||
|
(eventLoopGroupProvider
|
||||||
|
(reify io.lettuce.core.resource.EventLoopGroupProvider
|
||||||
|
(allocate [_ _] netty-io-executor)
|
||||||
|
(threadPoolSize [_]
|
||||||
|
(.executorCount ^NioEventLoopGroup netty-io-executor))
|
||||||
|
(release [_ _ _ _ _]
|
||||||
|
;; Do nothing
|
||||||
|
)
|
||||||
|
(shutdown [_ _ _ _]
|
||||||
|
;; Do nothing
|
||||||
|
)))
|
||||||
|
|
||||||
(timer ^Timer timer)
|
(timer ^Timer timer)
|
||||||
(build))
|
(build))
|
||||||
|
|
||||||
@@ -166,7 +175,7 @@
|
|||||||
(l/trace :hint "evict connection (cache)" :key key :reason cause)
|
(l/trace :hint "evict connection (cache)" :key key :reason cause)
|
||||||
(some-> val d/close!))
|
(some-> val d/close!))
|
||||||
|
|
||||||
cache (cache/create :executor executor
|
cache (cache/create :executor netty-executor
|
||||||
:on-remove on-remove
|
:on-remove on-remove
|
||||||
:keepalive "5m")]
|
:keepalive "5m")]
|
||||||
(reify
|
(reify
|
||||||
|
|||||||
@@ -21,7 +21,6 @@
|
|||||||
[clojure.set :as set]
|
[clojure.set :as set]
|
||||||
[datoteka.fs :as fs]
|
[datoteka.fs :as fs]
|
||||||
[integrant.core :as ig]
|
[integrant.core :as ig]
|
||||||
[promesa.exec :as px]
|
|
||||||
[promesa.exec.bulkhead :as pbh])
|
[promesa.exec.bulkhead :as pbh])
|
||||||
(:import
|
(:import
|
||||||
clojure.lang.ExceptionInfo
|
clojure.lang.ExceptionInfo
|
||||||
@@ -289,13 +288,9 @@
|
|||||||
(get-limits cfg)))
|
(get-limits cfg)))
|
||||||
|
|
||||||
(defn invoke!
|
(defn invoke!
|
||||||
"Run a function in context of climit.
|
"Run a function in context of climit."
|
||||||
Intended to be used in virtual threads."
|
[{:keys [::rpc/climit] :as cfg} f params]
|
||||||
[{:keys [::executor ::rpc/climit] :as cfg} f params]
|
|
||||||
(let [f (if climit
|
(let [f (if climit
|
||||||
(let [f (if (some? executor)
|
(build-exec-chain cfg f)
|
||||||
(fn [cfg params] (px/await! (px/submit! executor (fn [] (f cfg params)))))
|
|
||||||
f)]
|
|
||||||
(build-exec-chain cfg f))
|
|
||||||
f)]
|
f)]
|
||||||
(f cfg params)))
|
(f cfg params)))
|
||||||
|
|||||||
@@ -6,6 +6,7 @@
|
|||||||
|
|
||||||
(ns app.rpc.commands.auth
|
(ns app.rpc.commands.auth
|
||||||
(:require
|
(:require
|
||||||
|
[app.auth :as auth]
|
||||||
[app.common.data :as d]
|
[app.common.data :as d]
|
||||||
[app.common.data.macros :as dm]
|
[app.common.data.macros :as dm]
|
||||||
[app.common.exceptions :as ex]
|
[app.common.exceptions :as ex]
|
||||||
@@ -62,7 +63,7 @@
|
|||||||
(ex/raise :type :validation
|
(ex/raise :type :validation
|
||||||
:code :account-without-password
|
:code :account-without-password
|
||||||
:hint "the current account does not have password")
|
:hint "the current account does not have password")
|
||||||
(let [result (profile/verify-password cfg password (:password profile))]
|
(let [result (auth/verify-password password (:password profile))]
|
||||||
(when (:update result)
|
(when (:update result)
|
||||||
(l/trc :hint "updating profile password"
|
(l/trc :hint "updating profile password"
|
||||||
:id (str (:id profile))
|
:id (str (:id profile))
|
||||||
@@ -156,7 +157,7 @@
|
|||||||
(:profile-id tdata)))
|
(:profile-id tdata)))
|
||||||
|
|
||||||
(update-password [conn profile-id]
|
(update-password [conn profile-id]
|
||||||
(let [pwd (profile/derive-password cfg password)]
|
(let [pwd (auth/derive-password password)]
|
||||||
(db/update! conn :profile {:password pwd :is-active true} {:id profile-id})
|
(db/update! conn :profile {:password pwd :is-active true} {:id profile-id})
|
||||||
nil))]
|
nil))]
|
||||||
|
|
||||||
@@ -378,7 +379,7 @@
|
|||||||
(not (contains? cf/flags :email-verification)))
|
(not (contains? cf/flags :email-verification)))
|
||||||
params (-> params
|
params (-> params
|
||||||
(assoc :is-active is-active)
|
(assoc :is-active is-active)
|
||||||
(update :password #(profile/derive-password cfg %)))
|
(update :password auth/derive-password))
|
||||||
profile (->> (create-profile! conn params)
|
profile (->> (create-profile! conn params)
|
||||||
(create-profile-rels! conn))]
|
(create-profile-rels! conn))]
|
||||||
(vary-meta profile assoc :created true))))
|
(vary-meta profile assoc :created true))))
|
||||||
|
|||||||
@@ -28,7 +28,6 @@
|
|||||||
[app.tasks.file-gc]
|
[app.tasks.file-gc]
|
||||||
[app.util.services :as sv]
|
[app.util.services :as sv]
|
||||||
[app.worker :as-alias wrk]
|
[app.worker :as-alias wrk]
|
||||||
[promesa.exec :as px]
|
|
||||||
[yetti.response :as yres]))
|
[yetti.response :as yres]))
|
||||||
|
|
||||||
(set! *warn-on-reflection* true)
|
(set! *warn-on-reflection* true)
|
||||||
@@ -94,7 +93,7 @@
|
|||||||
;; --- Command: import-binfile
|
;; --- Command: import-binfile
|
||||||
|
|
||||||
(defn- import-binfile
|
(defn- import-binfile
|
||||||
[{:keys [::db/pool ::wrk/executor] :as cfg} {:keys [profile-id project-id version name file]}]
|
[{:keys [::db/pool] :as cfg} {:keys [profile-id project-id version name file]}]
|
||||||
(let [team (teams/get-team pool
|
(let [team (teams/get-team pool
|
||||||
:profile-id profile-id
|
:profile-id profile-id
|
||||||
:project-id project-id)
|
:project-id project-id)
|
||||||
@@ -105,13 +104,9 @@
|
|||||||
(assoc ::bfc/name name)
|
(assoc ::bfc/name name)
|
||||||
(assoc ::bfc/input (:path file)))
|
(assoc ::bfc/input (:path file)))
|
||||||
|
|
||||||
;; NOTE: the importation process performs some operations that are
|
|
||||||
;; not very friendly with virtual threads, and for avoid
|
|
||||||
;; unexpected blocking of other concurrent operations we dispatch
|
|
||||||
;; that operation to a dedicated executor.
|
|
||||||
result (case (int version)
|
result (case (int version)
|
||||||
1 (px/invoke! executor (partial bf.v1/import-files! cfg))
|
1 (bf.v1/import-files! cfg)
|
||||||
3 (px/invoke! executor (partial bf.v3/import-files! cfg)))]
|
3 (bf.v3/import-files! cfg))]
|
||||||
|
|
||||||
(db/update! pool :project
|
(db/update! pool :project
|
||||||
{:modified-at (ct/now)}
|
{:modified-at (ct/now)}
|
||||||
|
|||||||
@@ -7,6 +7,7 @@
|
|||||||
(ns app.rpc.commands.demo
|
(ns app.rpc.commands.demo
|
||||||
"A demo specific mutations."
|
"A demo specific mutations."
|
||||||
(:require
|
(:require
|
||||||
|
[app.auth :refer [derive-password]]
|
||||||
[app.common.exceptions :as ex]
|
[app.common.exceptions :as ex]
|
||||||
[app.common.time :as ct]
|
[app.common.time :as ct]
|
||||||
[app.config :as cf]
|
[app.config :as cf]
|
||||||
@@ -14,7 +15,6 @@
|
|||||||
[app.loggers.audit :as audit]
|
[app.loggers.audit :as audit]
|
||||||
[app.rpc :as-alias rpc]
|
[app.rpc :as-alias rpc]
|
||||||
[app.rpc.commands.auth :as auth]
|
[app.rpc.commands.auth :as auth]
|
||||||
[app.rpc.commands.profile :as profile]
|
|
||||||
[app.rpc.doc :as-alias doc]
|
[app.rpc.doc :as-alias doc]
|
||||||
[app.util.services :as sv]
|
[app.util.services :as sv]
|
||||||
[buddy.core.codecs :as bc]
|
[buddy.core.codecs :as bc]
|
||||||
@@ -46,7 +46,7 @@
|
|||||||
:fullname fullname
|
:fullname fullname
|
||||||
:is-active true
|
:is-active true
|
||||||
:deleted-at (ct/in-future (cf/get-deletion-delay))
|
:deleted-at (ct/in-future (cf/get-deletion-delay))
|
||||||
:password (profile/derive-password cfg password)
|
:password (derive-password password)
|
||||||
:props {}}
|
:props {}}
|
||||||
profile (db/tx-run! cfg (fn [{:keys [::db/conn]}]
|
profile (db/tx-run! cfg (fn [{:keys [::db/conn]}]
|
||||||
(->> (auth/create-profile! conn params)
|
(->> (auth/create-profile! conn params)
|
||||||
|
|||||||
@@ -39,8 +39,7 @@
|
|||||||
[app.util.pointer-map :as pmap]
|
[app.util.pointer-map :as pmap]
|
||||||
[app.util.services :as sv]
|
[app.util.services :as sv]
|
||||||
[app.worker :as wrk]
|
[app.worker :as wrk]
|
||||||
[cuerdas.core :as str]
|
[cuerdas.core :as str]))
|
||||||
[promesa.exec :as px]))
|
|
||||||
|
|
||||||
;; --- FEATURES
|
;; --- FEATURES
|
||||||
|
|
||||||
@@ -251,7 +250,7 @@
|
|||||||
(feat.fmigr/resolve-applied-migrations cfg file))))))
|
(feat.fmigr/resolve-applied-migrations cfg file))))))
|
||||||
|
|
||||||
(defn get-file
|
(defn get-file
|
||||||
[{:keys [::db/conn ::wrk/executor] :as cfg} id
|
[{:keys [::db/conn] :as cfg} id
|
||||||
& {:keys [project-id
|
& {:keys [project-id
|
||||||
migrate?
|
migrate?
|
||||||
include-deleted?
|
include-deleted?
|
||||||
@@ -273,13 +272,8 @@
|
|||||||
::db/remove-deleted (not include-deleted?)
|
::db/remove-deleted (not include-deleted?)
|
||||||
::sql/for-update lock-for-update?})
|
::sql/for-update lock-for-update?})
|
||||||
(feat.fmigr/resolve-applied-migrations cfg)
|
(feat.fmigr/resolve-applied-migrations cfg)
|
||||||
(feat.fdata/resolve-file-data cfg))
|
(feat.fdata/resolve-file-data cfg)
|
||||||
|
(decode-row))
|
||||||
;; NOTE: we perform the file decoding in a separate thread
|
|
||||||
;; because it has heavy and synchronous operations for
|
|
||||||
;; decoding file body that are not very friendly with virtual
|
|
||||||
;; threads.
|
|
||||||
file (px/invoke! executor #(decode-row file))
|
|
||||||
|
|
||||||
file (if (and migrate? (fmg/need-migration? file))
|
file (if (and migrate? (fmg/need-migration? file))
|
||||||
(migrate-file cfg file options)
|
(migrate-file cfg file options)
|
||||||
|
|||||||
@@ -37,9 +37,7 @@
|
|||||||
[app.util.blob :as blob]
|
[app.util.blob :as blob]
|
||||||
[app.util.pointer-map :as pmap]
|
[app.util.pointer-map :as pmap]
|
||||||
[app.util.services :as sv]
|
[app.util.services :as sv]
|
||||||
[app.worker :as wrk]
|
[clojure.set :as set]))
|
||||||
[clojure.set :as set]
|
|
||||||
[promesa.exec :as px]))
|
|
||||||
|
|
||||||
(declare ^:private get-lagged-changes)
|
(declare ^:private get-lagged-changes)
|
||||||
(declare ^:private send-notifications!)
|
(declare ^:private send-notifications!)
|
||||||
@@ -209,7 +207,7 @@
|
|||||||
Follow the inner implementation to `update-file-data!` function.
|
Follow the inner implementation to `update-file-data!` function.
|
||||||
|
|
||||||
Only intended for internal use on this module."
|
Only intended for internal use on this module."
|
||||||
[{:keys [::db/conn ::wrk/executor ::timestamp] :as cfg}
|
[{:keys [::db/conn ::timestamp] :as cfg}
|
||||||
{:keys [profile-id file team features changes session-id skip-validate] :as params}]
|
{:keys [profile-id file team features changes session-id skip-validate] :as params}]
|
||||||
|
|
||||||
(let [;; Retrieve the file data
|
(let [;; Retrieve the file data
|
||||||
@@ -222,15 +220,11 @@
|
|||||||
|
|
||||||
;; We create a new lexycal scope for clearly delimit the result of
|
;; We create a new lexycal scope for clearly delimit the result of
|
||||||
;; executing this update file operation and all its side effects
|
;; executing this update file operation and all its side effects
|
||||||
(let [file (px/invoke! executor
|
(let [file (binding [cfeat/*current* features
|
||||||
(fn []
|
cfeat/*previous* (:features file)]
|
||||||
;; Process the file data on separated thread for avoid to do
|
(update-file-data! cfg file
|
||||||
;; the CPU intensive operation on vthread.
|
process-changes-and-validate
|
||||||
(binding [cfeat/*current* features
|
changes skip-validate))]
|
||||||
cfeat/*previous* (:features file)]
|
|
||||||
(update-file-data! cfg file
|
|
||||||
process-changes-and-validate
|
|
||||||
changes skip-validate))))]
|
|
||||||
|
|
||||||
(feat.fmigr/upsert-migrations! conn file)
|
(feat.fmigr/upsert-migrations! conn file)
|
||||||
(persist-file! cfg file)
|
(persist-file! cfg file)
|
||||||
|
|||||||
@@ -26,9 +26,7 @@
|
|||||||
[app.rpc.helpers :as rph]
|
[app.rpc.helpers :as rph]
|
||||||
[app.rpc.quotes :as quotes]
|
[app.rpc.quotes :as quotes]
|
||||||
[app.storage :as sto]
|
[app.storage :as sto]
|
||||||
[app.util.services :as sv]
|
[app.util.services :as sv]))
|
||||||
[app.worker :as-alias wrk]
|
|
||||||
[promesa.exec :as px]))
|
|
||||||
|
|
||||||
(def valid-weight #{100 200 300 400 500 600 700 800 900 950})
|
(def valid-weight #{100 200 300 400 500 600 700 800 900 950})
|
||||||
(def valid-style #{"normal" "italic"})
|
(def valid-style #{"normal" "italic"})
|
||||||
@@ -105,7 +103,7 @@
|
|||||||
(create-font-variant cfg (assoc params :profile-id profile-id)))))
|
(create-font-variant cfg (assoc params :profile-id profile-id)))))
|
||||||
|
|
||||||
(defn create-font-variant
|
(defn create-font-variant
|
||||||
[{:keys [::sto/storage ::db/conn ::wrk/executor]} {:keys [data] :as params}]
|
[{:keys [::sto/storage ::db/conn]} {:keys [data] :as params}]
|
||||||
(letfn [(generate-missing! [data]
|
(letfn [(generate-missing! [data]
|
||||||
(let [data (media/run {:cmd :generate-fonts :input data})]
|
(let [data (media/run {:cmd :generate-fonts :input data})]
|
||||||
(when (and (not (contains? data "font/otf"))
|
(when (and (not (contains? data "font/otf"))
|
||||||
@@ -157,7 +155,7 @@
|
|||||||
:otf-file-id (:id otf)
|
:otf-file-id (:id otf)
|
||||||
:ttf-file-id (:id ttf)}))]
|
:ttf-file-id (:id ttf)}))]
|
||||||
|
|
||||||
(let [data (px/invoke! executor (partial generate-missing! data))
|
(let [data (generate-missing! data)
|
||||||
assets (persist-fonts-files! data)
|
assets (persist-fonts-files! data)
|
||||||
result (insert-font-variant! assets)]
|
result (insert-font-variant! assets)]
|
||||||
(vary-meta result assoc ::audit/replace-props (update params :data (comp vec keys))))))
|
(vary-meta result assoc ::audit/replace-props (update params :data (comp vec keys))))))
|
||||||
|
|||||||
@@ -28,9 +28,7 @@
|
|||||||
[app.setup :as-alias setup]
|
[app.setup :as-alias setup]
|
||||||
[app.setup.templates :as tmpl]
|
[app.setup.templates :as tmpl]
|
||||||
[app.storage.tmp :as tmp]
|
[app.storage.tmp :as tmp]
|
||||||
[app.util.services :as sv]
|
[app.util.services :as sv]))
|
||||||
[app.worker :as-alias wrk]
|
|
||||||
[promesa.exec :as px]))
|
|
||||||
|
|
||||||
;; --- COMMAND: Duplicate File
|
;; --- COMMAND: Duplicate File
|
||||||
|
|
||||||
@@ -313,15 +311,14 @@
|
|||||||
|
|
||||||
;; Update the modification date of the all affected projects
|
;; Update the modification date of the all affected projects
|
||||||
;; ensuring that the destination project is the most recent one.
|
;; ensuring that the destination project is the most recent one.
|
||||||
(doseq [project-id (into (list project-id) source)]
|
(loop [project-ids (into (list project-id) source)
|
||||||
|
modified-at (ct/now)]
|
||||||
;; NOTE: as this is executed on virtual thread, sleeping does
|
(when-let [project-id (first project-ids)]
|
||||||
;; not causes major issues, and allows an easy way to set a
|
(db/update! conn :project
|
||||||
;; trully different modification date to each file.
|
{:modified-at modified-at}
|
||||||
(px/sleep 10)
|
{:id project-id})
|
||||||
(db/update! conn :project
|
(recur (rest project-ids)
|
||||||
{:modified-at (ct/now)}
|
(ct/plus modified-at 10))))
|
||||||
{:id project-id}))
|
|
||||||
|
|
||||||
nil))
|
nil))
|
||||||
|
|
||||||
@@ -396,12 +393,7 @@
|
|||||||
;; --- COMMAND: Clone Template
|
;; --- COMMAND: Clone Template
|
||||||
|
|
||||||
(defn clone-template
|
(defn clone-template
|
||||||
[{:keys [::db/pool ::wrk/executor] :as cfg} {:keys [project-id profile-id] :as params} template]
|
[{:keys [::db/pool] :as cfg} {:keys [project-id profile-id] :as params} template]
|
||||||
|
|
||||||
;; NOTE: the importation process performs some operations
|
|
||||||
;; that are not very friendly with virtual threads, and for
|
|
||||||
;; avoid unexpected blocking of other concurrent operations
|
|
||||||
;; we dispatch that operation to a dedicated executor.
|
|
||||||
(let [template (tmp/tempfile-from template
|
(let [template (tmp/tempfile-from template
|
||||||
:prefix "penpot.template."
|
:prefix "penpot.template."
|
||||||
:suffix ""
|
:suffix ""
|
||||||
@@ -419,8 +411,8 @@
|
|||||||
(assoc ::bfc/features (cfeat/get-team-enabled-features cf/flags team)))
|
(assoc ::bfc/features (cfeat/get-team-enabled-features cf/flags team)))
|
||||||
|
|
||||||
result (if (= format :binfile-v3)
|
result (if (= format :binfile-v3)
|
||||||
(px/invoke! executor (partial bf.v3/import-files! cfg))
|
(bf.v3/import-files! cfg)
|
||||||
(px/invoke! executor (partial bf.v1/import-files! cfg)))]
|
(bf.v1/import-files! cfg))]
|
||||||
|
|
||||||
(db/tx-run! cfg
|
(db/tx-run! cfg
|
||||||
(fn [{:keys [::db/conn] :as cfg}]
|
(fn [{:keys [::db/conn] :as cfg}]
|
||||||
|
|||||||
@@ -24,10 +24,8 @@
|
|||||||
[app.storage :as sto]
|
[app.storage :as sto]
|
||||||
[app.storage.tmp :as tmp]
|
[app.storage.tmp :as tmp]
|
||||||
[app.util.services :as sv]
|
[app.util.services :as sv]
|
||||||
[app.worker :as-alias wrk]
|
|
||||||
[cuerdas.core :as str]
|
[cuerdas.core :as str]
|
||||||
[datoteka.io :as io]
|
[datoteka.io :as io]))
|
||||||
[promesa.exec :as px]))
|
|
||||||
|
|
||||||
(def default-max-file-size
|
(def default-max-file-size
|
||||||
(* 1024 1024 10)) ; 10 MiB
|
(* 1024 1024 10)) ; 10 MiB
|
||||||
@@ -153,9 +151,9 @@
|
|||||||
(assoc ::image (process-main-image info)))))
|
(assoc ::image (process-main-image info)))))
|
||||||
|
|
||||||
(defn- create-file-media-object
|
(defn- create-file-media-object
|
||||||
[{:keys [::sto/storage ::db/conn ::wrk/executor] :as cfg}
|
[{:keys [::sto/storage ::db/conn] :as cfg}
|
||||||
{:keys [id file-id is-local name content]}]
|
{:keys [id file-id is-local name content]}]
|
||||||
(let [result (px/invoke! executor (partial process-image content))
|
(let [result (process-image content)
|
||||||
image (sto/put-object! storage (::image result))
|
image (sto/put-object! storage (::image result))
|
||||||
thumb (when-let [params (::thumb result)]
|
thumb (when-let [params (::thumb result)]
|
||||||
(sto/put-object! storage params))]
|
(sto/put-object! storage params))]
|
||||||
|
|||||||
@@ -30,16 +30,13 @@
|
|||||||
[app.tokens :as tokens]
|
[app.tokens :as tokens]
|
||||||
[app.util.services :as sv]
|
[app.util.services :as sv]
|
||||||
[app.worker :as wrk]
|
[app.worker :as wrk]
|
||||||
[cuerdas.core :as str]
|
[cuerdas.core :as str]))
|
||||||
[promesa.exec :as px]))
|
|
||||||
|
|
||||||
(declare check-profile-existence!)
|
(declare check-profile-existence!)
|
||||||
(declare decode-row)
|
(declare decode-row)
|
||||||
(declare derive-password)
|
|
||||||
(declare filter-props)
|
(declare filter-props)
|
||||||
(declare get-profile)
|
(declare get-profile)
|
||||||
(declare strip-private-attrs)
|
(declare strip-private-attrs)
|
||||||
(declare verify-password)
|
|
||||||
|
|
||||||
(def schema:props-notifications
|
(def schema:props-notifications
|
||||||
[:map {:title "props-notifications"}
|
[:map {:title "props-notifications"}
|
||||||
@@ -192,7 +189,7 @@
|
|||||||
[{:keys [::db/conn] :as cfg} {:keys [profile-id old-password] :as params}]
|
[{:keys [::db/conn] :as cfg} {:keys [profile-id old-password] :as params}]
|
||||||
(let [profile (db/get-by-id conn :profile profile-id ::sql/for-update true)]
|
(let [profile (db/get-by-id conn :profile profile-id ::sql/for-update true)]
|
||||||
(when (and (not= (:password profile) "!")
|
(when (and (not= (:password profile) "!")
|
||||||
(not (:valid (verify-password cfg old-password (:password profile)))))
|
(not (:valid (auth/verify-password old-password (:password profile)))))
|
||||||
(ex/raise :type :validation
|
(ex/raise :type :validation
|
||||||
:code :old-password-not-match))
|
:code :old-password-not-match))
|
||||||
profile))
|
profile))
|
||||||
@@ -201,7 +198,7 @@
|
|||||||
[{:keys [::db/conn] :as cfg} {:keys [id password] :as profile}]
|
[{:keys [::db/conn] :as cfg} {:keys [id password] :as profile}]
|
||||||
(when-not (db/read-only? conn)
|
(when-not (db/read-only? conn)
|
||||||
(db/update! conn :profile
|
(db/update! conn :profile
|
||||||
{:password (derive-password cfg password)}
|
{:password (auth/derive-password password)}
|
||||||
{:id id})
|
{:id id})
|
||||||
nil))
|
nil))
|
||||||
|
|
||||||
@@ -303,12 +300,11 @@
|
|||||||
:content-type (:mtype thumb)}))
|
:content-type (:mtype thumb)}))
|
||||||
|
|
||||||
(defn upload-photo
|
(defn upload-photo
|
||||||
[{:keys [::sto/storage ::wrk/executor] :as cfg} {:keys [file] :as params}]
|
[{:keys [::sto/storage] :as cfg} {:keys [file] :as params}]
|
||||||
(let [params (-> cfg
|
(let [params (-> cfg
|
||||||
(assoc ::climit/id [[:process-image/by-profile (:profile-id params)]
|
(assoc ::climit/id [[:process-image/by-profile (:profile-id params)]
|
||||||
[:process-image/global]])
|
[:process-image/global]])
|
||||||
(assoc ::climit/label "upload-photo")
|
(assoc ::climit/label "upload-photo")
|
||||||
(assoc ::climit/executor executor)
|
|
||||||
(climit/invoke! generate-thumbnail! file))]
|
(climit/invoke! generate-thumbnail! file))]
|
||||||
(sto/put-object! storage params)))
|
(sto/put-object! storage params)))
|
||||||
|
|
||||||
@@ -548,15 +544,6 @@
|
|||||||
[props]
|
[props]
|
||||||
(into {} (filter (fn [[k _]] (simple-ident? k))) props))
|
(into {} (filter (fn [[k _]] (simple-ident? k))) props))
|
||||||
|
|
||||||
(defn derive-password
|
|
||||||
[{:keys [::wrk/executor]} password]
|
|
||||||
(when password
|
|
||||||
(px/invoke! executor (partial auth/derive-password password))))
|
|
||||||
|
|
||||||
(defn verify-password
|
|
||||||
[{:keys [::wrk/executor]} password password-data]
|
|
||||||
(px/invoke! executor (partial auth/verify-password password password-data)))
|
|
||||||
|
|
||||||
(defn decode-row
|
(defn decode-row
|
||||||
[{:keys [props] :as row}]
|
[{:keys [props] :as row}]
|
||||||
(cond-> row
|
(cond-> row
|
||||||
|
|||||||
@@ -7,7 +7,7 @@
|
|||||||
(ns app.srepl.cli
|
(ns app.srepl.cli
|
||||||
"PREPL API for external usage (CLI or ADMIN)"
|
"PREPL API for external usage (CLI or ADMIN)"
|
||||||
(:require
|
(:require
|
||||||
[app.auth :as auth]
|
[app.auth :refer [derive-password]]
|
||||||
[app.common.exceptions :as ex]
|
[app.common.exceptions :as ex]
|
||||||
[app.common.schema :as sm]
|
[app.common.schema :as sm]
|
||||||
[app.common.schema.generators :as sg]
|
[app.common.schema.generators :as sg]
|
||||||
@@ -54,7 +54,7 @@
|
|||||||
(some-> (get-current-system)
|
(some-> (get-current-system)
|
||||||
(db/tx-run!
|
(db/tx-run!
|
||||||
(fn [{:keys [::db/conn] :as system}]
|
(fn [{:keys [::db/conn] :as system}]
|
||||||
(let [password (cmd.profile/derive-password system password)
|
(let [password (derive-password password)
|
||||||
params {:id (uuid/next)
|
params {:id (uuid/next)
|
||||||
:email email
|
:email email
|
||||||
:fullname fullname
|
:fullname fullname
|
||||||
@@ -74,7 +74,7 @@
|
|||||||
(assoc :fullname fullname)
|
(assoc :fullname fullname)
|
||||||
|
|
||||||
(some? password)
|
(some? password)
|
||||||
(assoc :password (auth/derive-password password))
|
(assoc :password (derive-password password))
|
||||||
|
|
||||||
(some? is-active)
|
(some? is-active)
|
||||||
(assoc :is-active is-active))]
|
(assoc :is-active is-active))]
|
||||||
@@ -124,7 +124,7 @@
|
|||||||
|
|
||||||
(defmethod exec-command "derive-password"
|
(defmethod exec-command "derive-password"
|
||||||
[{:keys [password]}]
|
[{:keys [password]}]
|
||||||
(auth/derive-password password))
|
(derive-password password))
|
||||||
|
|
||||||
(defmethod exec-command "authenticate"
|
(defmethod exec-command "authenticate"
|
||||||
[{:keys [token]}]
|
[{:keys [token]}]
|
||||||
|
|||||||
@@ -31,13 +31,13 @@
|
|||||||
java.time.Duration
|
java.time.Duration
|
||||||
java.util.Collection
|
java.util.Collection
|
||||||
java.util.Optional
|
java.util.Optional
|
||||||
|
java.util.concurrent.atomic.AtomicLong
|
||||||
org.reactivestreams.Subscriber
|
org.reactivestreams.Subscriber
|
||||||
software.amazon.awssdk.core.ResponseBytes
|
software.amazon.awssdk.core.ResponseBytes
|
||||||
software.amazon.awssdk.core.async.AsyncRequestBody
|
software.amazon.awssdk.core.async.AsyncRequestBody
|
||||||
software.amazon.awssdk.core.async.AsyncResponseTransformer
|
software.amazon.awssdk.core.async.AsyncResponseTransformer
|
||||||
software.amazon.awssdk.core.async.BlockingInputStreamAsyncRequestBody
|
software.amazon.awssdk.core.async.BlockingInputStreamAsyncRequestBody
|
||||||
software.amazon.awssdk.core.client.config.ClientAsyncConfiguration
|
software.amazon.awssdk.core.client.config.ClientAsyncConfiguration
|
||||||
software.amazon.awssdk.core.client.config.SdkAdvancedAsyncClientOption
|
|
||||||
software.amazon.awssdk.http.nio.netty.NettyNioAsyncHttpClient
|
software.amazon.awssdk.http.nio.netty.NettyNioAsyncHttpClient
|
||||||
software.amazon.awssdk.http.nio.netty.SdkEventLoopGroup
|
software.amazon.awssdk.http.nio.netty.SdkEventLoopGroup
|
||||||
software.amazon.awssdk.regions.Region
|
software.amazon.awssdk.regions.Region
|
||||||
@@ -87,12 +87,11 @@
|
|||||||
|
|
||||||
(def ^:private schema:config
|
(def ^:private schema:config
|
||||||
[:map {:title "s3-backend-config"}
|
[:map {:title "s3-backend-config"}
|
||||||
::wrk/executor
|
::wrk/netty-io-executor
|
||||||
[::region {:optional true} :keyword]
|
[::region {:optional true} :keyword]
|
||||||
[::bucket {:optional true} ::sm/text]
|
[::bucket {:optional true} ::sm/text]
|
||||||
[::prefix {:optional true} ::sm/text]
|
[::prefix {:optional true} ::sm/text]
|
||||||
[::endpoint {:optional true} ::sm/uri]
|
[::endpoint {:optional true} ::sm/uri]])
|
||||||
[::io-threads {:optional true} ::sm/int]])
|
|
||||||
|
|
||||||
(defmethod ig/expand-key ::backend
|
(defmethod ig/expand-key ::backend
|
||||||
[k v]
|
[k v]
|
||||||
@@ -110,6 +109,7 @@
|
|||||||
presigner (build-s3-presigner params)]
|
presigner (build-s3-presigner params)]
|
||||||
(assoc params
|
(assoc params
|
||||||
::sto/type :s3
|
::sto/type :s3
|
||||||
|
::counter (AtomicLong. 0)
|
||||||
::client @client
|
::client @client
|
||||||
::presigner presigner
|
::presigner presigner
|
||||||
::close-fn #(.close ^java.lang.AutoCloseable client)))))
|
::close-fn #(.close ^java.lang.AutoCloseable client)))))
|
||||||
@@ -121,7 +121,7 @@
|
|||||||
(defmethod ig/halt-key! ::backend
|
(defmethod ig/halt-key! ::backend
|
||||||
[_ {:keys [::close-fn]}]
|
[_ {:keys [::close-fn]}]
|
||||||
(when (fn? close-fn)
|
(when (fn? close-fn)
|
||||||
(px/run! close-fn)))
|
(close-fn)))
|
||||||
|
|
||||||
(def ^:private schema:backend
|
(def ^:private schema:backend
|
||||||
[:map {:title "s3-backend"}
|
[:map {:title "s3-backend"}
|
||||||
@@ -198,19 +198,16 @@
|
|||||||
(Region/of (name region)))
|
(Region/of (name region)))
|
||||||
|
|
||||||
(defn- build-s3-client
|
(defn- build-s3-client
|
||||||
[{:keys [::region ::endpoint ::io-threads ::wrk/executor]}]
|
[{:keys [::region ::endpoint ::wrk/netty-io-executor]}]
|
||||||
(let [aconfig (-> (ClientAsyncConfiguration/builder)
|
(let [aconfig (-> (ClientAsyncConfiguration/builder)
|
||||||
(.advancedOption SdkAdvancedAsyncClientOption/FUTURE_COMPLETION_EXECUTOR executor)
|
|
||||||
(.build))
|
(.build))
|
||||||
|
|
||||||
sconfig (-> (S3Configuration/builder)
|
sconfig (-> (S3Configuration/builder)
|
||||||
(cond-> (some? endpoint) (.pathStyleAccessEnabled true))
|
(cond-> (some? endpoint) (.pathStyleAccessEnabled true))
|
||||||
(.build))
|
(.build))
|
||||||
|
|
||||||
thr-num (or io-threads (min 16 (px/get-available-processors)))
|
|
||||||
hclient (-> (NettyNioAsyncHttpClient/builder)
|
hclient (-> (NettyNioAsyncHttpClient/builder)
|
||||||
(.eventLoopGroupBuilder (-> (SdkEventLoopGroup/builder)
|
(.eventLoopGroup (SdkEventLoopGroup/create netty-io-executor))
|
||||||
(.numberOfThreads (int thr-num))))
|
|
||||||
(.connectionAcquisitionTimeout default-timeout)
|
(.connectionAcquisitionTimeout default-timeout)
|
||||||
(.connectionTimeout default-timeout)
|
(.connectionTimeout default-timeout)
|
||||||
(.readTimeout default-timeout)
|
(.readTimeout default-timeout)
|
||||||
@@ -262,7 +259,7 @@
|
|||||||
(.close ^InputStream input))))
|
(.close ^InputStream input))))
|
||||||
|
|
||||||
(defn- make-request-body
|
(defn- make-request-body
|
||||||
[executor content]
|
[counter content]
|
||||||
(let [size (impl/get-size content)]
|
(let [size (impl/get-size content)]
|
||||||
(reify
|
(reify
|
||||||
AsyncRequestBody
|
AsyncRequestBody
|
||||||
@@ -272,16 +269,19 @@
|
|||||||
(^void subscribe [_ ^Subscriber subscriber]
|
(^void subscribe [_ ^Subscriber subscriber]
|
||||||
(let [delegate (AsyncRequestBody/forBlockingInputStream (long size))
|
(let [delegate (AsyncRequestBody/forBlockingInputStream (long size))
|
||||||
input (io/input-stream content)]
|
input (io/input-stream content)]
|
||||||
(px/run! executor (partial write-input-stream delegate input))
|
|
||||||
|
(px/thread-call (partial write-input-stream delegate input)
|
||||||
|
{:name (str "penpot/storage/" (.getAndIncrement ^AtomicLong counter))})
|
||||||
|
|
||||||
(.subscribe ^BlockingInputStreamAsyncRequestBody delegate
|
(.subscribe ^BlockingInputStreamAsyncRequestBody delegate
|
||||||
^Subscriber subscriber))))))
|
^Subscriber subscriber))))))
|
||||||
|
|
||||||
(defn- put-object
|
(defn- put-object
|
||||||
[{:keys [::client ::bucket ::prefix ::wrk/executor]} {:keys [id] :as object} content]
|
[{:keys [::client ::bucket ::prefix ::counter]} {:keys [id] :as object} content]
|
||||||
(let [path (dm/str prefix (impl/id->path id))
|
(let [path (dm/str prefix (impl/id->path id))
|
||||||
mdata (meta object)
|
mdata (meta object)
|
||||||
mtype (:content-type mdata "application/octet-stream")
|
mtype (:content-type mdata "application/octet-stream")
|
||||||
rbody (make-request-body executor content)
|
rbody (make-request-body counter content)
|
||||||
request (.. (PutObjectRequest/builder)
|
request (.. (PutObjectRequest/builder)
|
||||||
(bucket bucket)
|
(bucket bucket)
|
||||||
(contentType mtype)
|
(contentType mtype)
|
||||||
|
|||||||
@@ -44,7 +44,7 @@
|
|||||||
[_ cfg]
|
[_ cfg]
|
||||||
(fs/create-dir default-tmp-dir)
|
(fs/create-dir default-tmp-dir)
|
||||||
(px/fn->thread (partial io-loop cfg)
|
(px/fn->thread (partial io-loop cfg)
|
||||||
{:name "penpot/storage/tmp-cleaner" :virtual true}))
|
{:name "penpot/storage/tmp-cleaner"}))
|
||||||
|
|
||||||
(defmethod ig/halt-key! ::cleaner
|
(defmethod ig/halt-key! ::cleaner
|
||||||
[_ thread]
|
[_ thread]
|
||||||
|
|||||||
@@ -27,7 +27,7 @@
|
|||||||
(sp/put! channel [type data])
|
(sp/put! channel [type data])
|
||||||
nil)))
|
nil)))
|
||||||
|
|
||||||
(defn start-listener
|
(defn spawn-listener
|
||||||
[channel on-event on-close]
|
[channel on-event on-close]
|
||||||
(assert (sp/chan? channel) "expected active events channel")
|
(assert (sp/chan? channel) "expected active events channel")
|
||||||
|
|
||||||
@@ -51,7 +51,7 @@
|
|||||||
[f on-event]
|
[f on-event]
|
||||||
|
|
||||||
(binding [*channel* (sp/chan :buf 32)]
|
(binding [*channel* (sp/chan :buf 32)]
|
||||||
(let [listener (start-listener *channel* on-event (constantly nil))]
|
(let [listener (spawn-listener *channel* on-event (constantly nil))]
|
||||||
(try
|
(try
|
||||||
(f)
|
(f)
|
||||||
(finally
|
(finally
|
||||||
|
|||||||
@@ -112,7 +112,7 @@
|
|||||||
|
|
||||||
(if (db/read-only? pool)
|
(if (db/read-only? pool)
|
||||||
(l/wrn :hint "not started (db is read-only)")
|
(l/wrn :hint "not started (db is read-only)")
|
||||||
(px/fn->thread dispatcher :name "penpot/worker/dispatcher" :virtual false))))
|
(px/fn->thread dispatcher :name "penpot/worker-dispatcher"))))
|
||||||
|
|
||||||
(defmethod ig/halt-key! ::wrk/dispatcher
|
(defmethod ig/halt-key! ::wrk/dispatcher
|
||||||
[_ thread]
|
[_ thread]
|
||||||
|
|||||||
@@ -7,97 +7,79 @@
|
|||||||
(ns app.worker.executor
|
(ns app.worker.executor
|
||||||
"Async tasks abstraction (impl)."
|
"Async tasks abstraction (impl)."
|
||||||
(:require
|
(:require
|
||||||
[app.common.data :as d]
|
|
||||||
[app.common.logging :as l]
|
[app.common.logging :as l]
|
||||||
|
[app.common.math :as mth]
|
||||||
[app.common.schema :as sm]
|
[app.common.schema :as sm]
|
||||||
[app.common.time :as ct]
|
|
||||||
[app.metrics :as mtx]
|
|
||||||
[app.worker :as-alias wrk]
|
[app.worker :as-alias wrk]
|
||||||
[integrant.core :as ig]
|
[integrant.core :as ig]
|
||||||
[promesa.exec :as px])
|
[promesa.exec :as px])
|
||||||
(:import
|
(:import
|
||||||
java.util.concurrent.ThreadPoolExecutor))
|
io.netty.channel.nio.NioEventLoopGroup
|
||||||
|
io.netty.util.concurrent.DefaultEventExecutorGroup
|
||||||
|
java.util.concurrent.ExecutorService
|
||||||
|
java.util.concurrent.ThreadFactory))
|
||||||
|
|
||||||
(set! *warn-on-reflection* true)
|
(set! *warn-on-reflection* true)
|
||||||
|
|
||||||
(sm/register!
|
(sm/register!
|
||||||
{:type ::wrk/executor
|
{:type ::wrk/executor
|
||||||
:pred #(instance? ThreadPoolExecutor %)
|
:pred #(instance? ExecutorService %)
|
||||||
:type-properties
|
:type-properties
|
||||||
{:title "executor"
|
{:title "executor"
|
||||||
:description "Instance of ThreadPoolExecutor"}})
|
:description "Instance of ExecutorService"}})
|
||||||
|
|
||||||
|
(sm/register!
|
||||||
|
{:type ::wrk/netty-io-executor
|
||||||
|
:pred #(instance? NioEventLoopGroup %)
|
||||||
|
:type-properties
|
||||||
|
{:title "executor"
|
||||||
|
:description "Instance of NioEventLoopGroup"}})
|
||||||
|
|
||||||
|
(sm/register!
|
||||||
|
{:type ::wrk/netty-executor
|
||||||
|
:pred #(instance? DefaultEventExecutorGroup %)
|
||||||
|
:type-properties
|
||||||
|
{:title "executor"
|
||||||
|
:description "Instance of DefaultEventExecutorGroup"}})
|
||||||
|
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
;; EXECUTOR
|
;; IO Executor
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
|
|
||||||
(defmethod ig/init-key ::wrk/executor
|
(defmethod ig/assert-key ::wrk/netty-io-executor
|
||||||
[_ _]
|
[_ {:keys [threads]}]
|
||||||
(let [factory (px/thread-factory :prefix "penpot/default/")
|
(assert (or (nil? threads) (int? threads))
|
||||||
executor (px/cached-executor :factory factory :keepalive 60000)]
|
"expected valid threads value, revisit PENPOT_NETTY_IO_THREADS environment variable"))
|
||||||
(l/inf :hint "executor started")
|
|
||||||
executor))
|
|
||||||
|
|
||||||
(defmethod ig/halt-key! ::wrk/executor
|
(defmethod ig/init-key ::wrk/netty-io-executor
|
||||||
|
[_ {:keys [threads]}]
|
||||||
|
(let [factory (px/thread-factory :prefix "penpot/netty-io/")
|
||||||
|
nthreads (or threads (mth/round (/ (px/get-available-processors) 2)))
|
||||||
|
nthreads (max 2 nthreads)]
|
||||||
|
(l/inf :hint "start netty io executor" :threads nthreads)
|
||||||
|
(NioEventLoopGroup. (int nthreads) ^ThreadFactory factory)))
|
||||||
|
|
||||||
|
(defmethod ig/halt-key! ::wrk/netty-io-executor
|
||||||
[_ instance]
|
[_ instance]
|
||||||
(px/shutdown! instance))
|
(px/shutdown! instance))
|
||||||
|
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
;; MONITOR
|
;; IO Offload Executor
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
|
|
||||||
(defn- get-stats
|
(defmethod ig/assert-key ::wrk/netty-executor
|
||||||
[^ThreadPoolExecutor executor]
|
[_ {:keys [threads]}]
|
||||||
{:active (.getPoolSize ^ThreadPoolExecutor executor)
|
(assert (or (nil? threads) (int? threads))
|
||||||
:running (.getActiveCount ^ThreadPoolExecutor executor)
|
"expected valid threads value, revisit PENPOT_EXEC_THREADS environment variable"))
|
||||||
:completed (.getCompletedTaskCount ^ThreadPoolExecutor executor)})
|
|
||||||
|
|
||||||
(defmethod ig/expand-key ::wrk/monitor
|
(defmethod ig/init-key ::wrk/netty-executor
|
||||||
[k v]
|
[_ {:keys [threads]}]
|
||||||
{k (-> (d/without-nils v)
|
(let [factory (px/thread-factory :prefix "penpot/exec/")
|
||||||
(assoc ::interval (ct/duration "2s")))})
|
nthreads (or threads (mth/round (/ (px/get-available-processors) 2)))
|
||||||
|
nthreads (max 2 nthreads)]
|
||||||
|
(l/inf :hint "start default executor" :threads nthreads)
|
||||||
|
(DefaultEventExecutorGroup. (int nthreads) ^ThreadFactory factory)))
|
||||||
|
|
||||||
(defmethod ig/init-key ::wrk/monitor
|
(defmethod ig/halt-key! ::wrk/netty-executor
|
||||||
[_ {:keys [::wrk/executor ::mtx/metrics ::interval ::wrk/name]}]
|
[_ instance]
|
||||||
(letfn [(monitor! [executor prev-completed]
|
(px/shutdown! instance))
|
||||||
(let [labels (into-array String [(d/name name)])
|
|
||||||
stats (get-stats executor)
|
|
||||||
|
|
||||||
completed (:completed stats)
|
|
||||||
completed-inc (- completed prev-completed)
|
|
||||||
completed-inc (if (neg? completed-inc) 0 completed-inc)]
|
|
||||||
|
|
||||||
(mtx/run! metrics
|
|
||||||
:id :executor-active-threads
|
|
||||||
:labels labels
|
|
||||||
:val (:active stats))
|
|
||||||
|
|
||||||
(mtx/run! metrics
|
|
||||||
:id :executor-running-threads
|
|
||||||
:labels labels
|
|
||||||
:val (:running stats))
|
|
||||||
|
|
||||||
(mtx/run! metrics
|
|
||||||
:id :executors-completed-tasks
|
|
||||||
:labels labels
|
|
||||||
:inc completed-inc)
|
|
||||||
|
|
||||||
completed-inc))]
|
|
||||||
|
|
||||||
(px/thread
|
|
||||||
{:name "penpot/executors-monitor" :virtual true}
|
|
||||||
(l/inf :hint "monitor started" :name name)
|
|
||||||
(try
|
|
||||||
(loop [completed 0]
|
|
||||||
(px/sleep interval)
|
|
||||||
(recur (long (monitor! executor completed))))
|
|
||||||
(catch InterruptedException _cause
|
|
||||||
(l/trc :hint "monitor: interrupted" :name name))
|
|
||||||
(catch Throwable cause
|
|
||||||
(l/err :hint "monitor: unexpected error" :name name :cause cause))
|
|
||||||
(finally
|
|
||||||
(l/inf :hint "monitor: terminated" :name name))))))
|
|
||||||
|
|
||||||
(defmethod ig/halt-key! ::wrk/monitor
|
|
||||||
[_ thread]
|
|
||||||
(px/interrupt! thread))
|
|
||||||
|
|||||||
@@ -248,7 +248,7 @@
|
|||||||
(defn- start-thread!
|
(defn- start-thread!
|
||||||
[{:keys [::rds/redis ::id ::queue ::wrk/tenant] :as cfg}]
|
[{:keys [::rds/redis ::id ::queue ::wrk/tenant] :as cfg}]
|
||||||
(px/thread
|
(px/thread
|
||||||
{:name (format "penpot/worker/runner:%s" id)}
|
{:name (str "penpot/worker-runner/" id)}
|
||||||
(l/inf :hint "started" :id id :queue queue)
|
(l/inf :hint "started" :id id :queue queue)
|
||||||
(try
|
(try
|
||||||
(dm/with-open [rconn (rds/connect redis)]
|
(dm/with-open [rconn (rds/connect redis)]
|
||||||
@@ -303,7 +303,7 @@
|
|||||||
(l/wrn :hint "not started (db is read-only)" :queue queue :parallelism parallelism)
|
(l/wrn :hint "not started (db is read-only)" :queue queue :parallelism parallelism)
|
||||||
(doall
|
(doall
|
||||||
(->> (range parallelism)
|
(->> (range parallelism)
|
||||||
(map #(assoc cfg ::id %))
|
(map #(assoc cfg ::id (str queue "/" %)))
|
||||||
(map start-thread!))))))
|
(map start-thread!))))))
|
||||||
|
|
||||||
(defmethod ig/halt-key! ::wrk/runner
|
(defmethod ig/halt-key! ::wrk/runner
|
||||||
|
|||||||
@@ -113,7 +113,6 @@
|
|||||||
:app.auth.oidc.providers/generic
|
:app.auth.oidc.providers/generic
|
||||||
:app.setup/templates
|
:app.setup/templates
|
||||||
:app.auth.oidc/routes
|
:app.auth.oidc/routes
|
||||||
:app.worker/monitor
|
|
||||||
:app.http.oauth/handler
|
:app.http.oauth/handler
|
||||||
:app.notifications/handler
|
:app.notifications/handler
|
||||||
:app.loggers.mattermost/reporter
|
:app.loggers.mattermost/reporter
|
||||||
|
|||||||
@@ -130,7 +130,6 @@
|
|||||||
ms-or-obj
|
ms-or-obj
|
||||||
|
|
||||||
(integer? ms-or-obj)
|
(integer? ms-or-obj)
|
||||||
|
|
||||||
(Duration/ofMillis ms-or-obj)
|
(Duration/ofMillis ms-or-obj)
|
||||||
|
|
||||||
:else
|
:else
|
||||||
@@ -433,4 +432,4 @@
|
|||||||
#?(:cljs
|
#?(:cljs
|
||||||
(extend-protocol cljs.core/IEncodeJS
|
(extend-protocol cljs.core/IEncodeJS
|
||||||
js/Date
|
js/Date
|
||||||
(-clj->js [x] x)))
|
(-clj->js [x] x)))
|
||||||
|
|||||||
Reference in New Issue
Block a user