From c904fc347ead960ae531af0c6a40ac2872390168 Mon Sep 17 00:00:00 2001 From: Judith Massa <52293389+jsmassa@users.noreply.github.com> Date: Wed, 12 Apr 2023 11:11:15 +0200 Subject: [PATCH] style: clj kondo fixes (#88) --- .circleci/config.yml | 1 + .clj-kondo/config.edn | 5 + .clj-kondo/hooks/go_try_.clj | 11 ++ benchmark/src/benchmark/common.clj | 4 +- benchmark/src/benchmark/measure/create.clj | 6 +- bin/run-all | 11 ++ bin/run-cljstests | 2 + bin/run-unittests | 2 + deps.edn | 11 +- src/konserve/cache.cljc | 9 +- src/konserve/compliance_test.cljc | 13 +- src/konserve/compressor.cljc | 9 +- src/konserve/core.cljc | 10 +- src/konserve/filestore.clj | 78 +++------- src/konserve/impl/defaults.cljc | 12 +- src/konserve/impl/storage_layout.cljc | 5 +- src/konserve/indexeddb.cljs | 43 +++--- src/konserve/memory.cljc | 2 +- src/konserve/nio_helpers.clj | 6 +- src/konserve/node_filestore.cljs | 88 +++++------ src/konserve/serializers.cljc | 11 +- template/pom.xml | 1 + test/konserve/cache_test.cljs | 5 +- test/konserve/core_test.clj | 2 +- test/konserve/encryptor_test.cljc | 5 +- test/konserve/filestore_migration_test.clj | 5 +- test/konserve/filestore_test.clj | 4 +- test/konserve/gc_test.clj | 4 +- test/konserve/gc_test.cljs | 4 +- test/konserve/indexeddb_test.cljs | 10 +- test/konserve/node_filestore_test.cljs | 11 +- test/konserve/old_filestore.clj | 162 +++++++++------------ test/konserve/serializers_test.clj | 5 +- test/konserve/serializers_test.cljs | 8 +- 34 files changed, 276 insertions(+), 289 deletions(-) create mode 100644 .clj-kondo/config.edn create mode 100644 .clj-kondo/hooks/go_try_.clj create mode 100755 bin/run-all diff --git a/.circleci/config.yml b/.circleci/config.yml index 5da714a..b28bf75 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -35,6 +35,7 @@ workflows: only: main requires: - tools/unittest + - tools/cljstest - tools/release: context: - github-token diff --git a/.clj-kondo/config.edn b/.clj-kondo/config.edn new file mode 100644 index 0000000..8e30d15 --- /dev/null +++ b/.clj-kondo/config.edn @@ -0,0 +1,5 @@ +{:hooks {:analyze-call {superv.async/go-try- hooks.go-try-/go-try-}} + :lint-as {konserve.utils/with-promise clj-kondo.lint-as/def-catch-all} + :linters {:unresolved-symbol {:exclude [(konserve.utils/async+sync [do])]} + :unused-referred-var {:exclude {konserve.core [locked]}} + :clojure-lsp/unused-public-var {:exclude [build]}}} diff --git a/.clj-kondo/hooks/go_try_.clj b/.clj-kondo/hooks/go_try_.clj new file mode 100644 index 0000000..e1bbfcc --- /dev/null +++ b/.clj-kondo/hooks/go_try_.clj @@ -0,0 +1,11 @@ +(ns hooks.go-try- + (:require [clj-kondo.hooks-api :as api])) + +(defn go-try- [{:keys [:node]}] + (let [new-node (api/list-node + (apply list + (concat + [(api/token-node 'try)] + (rest (:children node)) + [(api/list-node (list (api/token-node 'finally)))])))] + {:node new-node})) diff --git a/benchmark/src/benchmark/common.clj b/benchmark/src/benchmark/common.clj index 9dfa126..cc76dae 100644 --- a/benchmark/src/benchmark/common.clj +++ b/benchmark/src/benchmark/common.clj @@ -1,6 +1,6 @@ (ns benchmark.common (:require [konserve.core :as k] - [konserve.filestore :refer [new-fs-store delete-store]] + [konserve.filestore :refer [connect-fs-store delete-store]] [konserve.memory :refer [new-mem-store]] [clojure.core.async :refer [> (for [store-type stores diff --git a/bin/run-all b/bin/run-all new file mode 100755 index 0000000..4c549c8 --- /dev/null +++ b/bin/run-all @@ -0,0 +1,11 @@ +#!/usr/bin/env bash + +set -o errexit +set -o pipefail + +./bin/run-cljstests + +echo +echo + +./bin/run-unittests diff --git a/bin/run-cljstests b/bin/run-cljstests index b8224c6..dd3cc6e 100755 --- a/bin/run-cljstests +++ b/bin/run-cljstests @@ -10,6 +10,8 @@ clojure -M:run-cljs-tests #[ -d "node_modules/ws" ] || npm install ws #clojure -M:test:cljs -m kaocha.runner unit-node +echo +echo echo "Running tests for browser" # kaocha version: diff --git a/bin/run-unittests b/bin/run-unittests index f61a8c0..7978b84 100755 --- a/bin/run-unittests +++ b/bin/run-unittests @@ -1,3 +1,5 @@ #!/usr/bin/env bash +echo "Running unittests" + TIMBRE_LEVEL=':warn' clojure -M:test -m kaocha.runner unit diff --git a/deps.edn b/deps.edn index 49359f4..c82abed 100644 --- a/deps.edn +++ b/deps.edn @@ -1,16 +1,17 @@ {:paths ["src"] :deps {org.clojure/clojure {:mvn/version "1.11.1"} org.clojure/data.fressian {:mvn/version "1.0.0"} ;; for filestore - fress/fress {:mvn/version "0.4.0"} mvxcvi/clj-cbor {:mvn/version "1.1.1"} io.replikativ/incognito {:mvn/version "0.3.66"} io.replikativ/hasch {:mvn/version "0.3.94"} io.replikativ/superv.async {:mvn/version "0.3.46"} io.replikativ/geheimnis {:mvn/version "0.1.1"} - org.clojars.mmb90/cljs-cache {:mvn/version "0.1.4"} - com.github.pkpkpk/cljs-node-io {:mvn/version "2.0.332"} org.lz4/lz4-java {:mvn/version "1.8.0"} - com.taoensso/timbre {:mvn/version "6.0.1"}} + com.taoensso/timbre {:mvn/version "6.0.1"} + ;; cljs + com.github.pkpkpk/cljs-node-io {:mvn/version "2.0.332"} + fress/fress {:mvn/version "0.4.0"} + org.clojars.mmb90/cljs-cache {:mvn/version "0.1.4"}} :aliases {:cljs {:extra-deps {org.clojure/clojurescript {:mvn/version "1.11.60"} thheller/shadow-cljs {:mvn/version "2.22.0"} binaryage/devtools {:mvn/version "1.0.6"}} @@ -45,5 +46,7 @@ :main-opts ["-m" "cljfmt.main" "check"]} :ffix {:extra-deps {cljfmt/cljfmt {:mvn/version "0.9.2"}} :main-opts ["-m" "cljfmt.main" "fix"]} + :lint {:replace-deps {clj-kondo/clj-kondo {:mvn/version "2023.02.17"}} + :main-opts ["-m" "clj-kondo.main" "--lint" "src"]} :outdated {:extra-deps {com.github.liquidz/antq {:mvn/version "2.2.983"}} :main-opts ["-m" "antq.core"]}}} diff --git a/src/konserve/cache.cljc b/src/konserve/cache.cljc index d688f67..8fdaaa3 100644 --- a/src/konserve/cache.cljc +++ b/src/konserve/cache.cljc @@ -4,7 +4,7 @@ accessing the store, otherwise you should implement your own caching strategy." (:refer-clojure :exclude [assoc-in assoc exists? dissoc get get-in keys update-in update]) - (:require [konserve.protocols :refer [-exists? -get-in -assoc-in + (:require [konserve.protocols :refer [-get-in -assoc-in -update-in -dissoc]] #?(:clj [clojure.core.cache :as cache] :cljs [cljs.cache :as cache]) @@ -101,6 +101,7 @@ (swap! cache cache/miss key new-val)) [old-val new-val]))))) +#_{:clj-kondo/ignore [:clojure-lsp/unused-public-var]} (defn update "Updates a position described by key by applying up-fn and storing the result atomically. Returns a vector [old new] of the previous @@ -123,7 +124,7 @@ (go-locked store (first key-vec) (let [cache (:cache store) - [old-val new-val :as res] (serializer]]) - #?(:clj [konserve.utils :refer [async+sync]]) - [konserve.memory :refer [new-mem-store]])) + #?(:cljs [cljs.test :refer [is]]) + #?(:clj [clojure.test :refer [are is testing]]))) -(deftype UnknownType []) +#_(deftype UnknownType []) -#?(:clj (defn exception? [thing] +#_(:clj (defn exception? [thing] (instance? Throwable thing))) #?(:clj diff --git a/src/konserve/compressor.cljc b/src/konserve/compressor.cljc index 244c2e0..785b7c2 100644 --- a/src/konserve/compressor.cljc +++ b/src/konserve/compressor.cljc @@ -12,9 +12,9 @@ (defrecord UnsupportedLZ4Compressor [serializer] PStoreSerializer - (-deserialize [_ read-handlers bytes] + (-deserialize [_ _read-handlers bytes] (throw (ex-info "Unsupported LZ4 compressor." {:bytes bytes}))) - (-serialize [_ bytes write-handlers val] + (-serialize [_ bytes _write-handlers _val] (throw (ex-info "Unsupported LZ4 compressor." {:bytes bytes})))) #?(:clj @@ -41,8 +41,9 @@ #?(:clj (defmacro native-image-build? [] (try - (and (Class/forName "org.graalvm.nativeimage.ImageInfo") - #_(eval '(org.graalvm.nativeimage.ImageInfo/inImageBuildtimeCode))) + ;(and + (Class/forName "org.graalvm.nativeimage.ImageInfo") + ;(eval '(org.graalvm.nativeimage.ImageInfo/inImageBuildtimeCode))) ;; TODO: when will this be uncommented? (catch Exception _ false)))) diff --git a/src/konserve/core.cljc b/src/konserve/core.cljc index 41d2dc9..c7a1716 100644 --- a/src/konserve/core.cljc +++ b/src/konserve/core.cljc @@ -1,13 +1,13 @@ (ns konserve.core (:refer-clojure :exclude [get get-in update update-in assoc assoc-in exists? dissoc keys]) - (:require [clojure.core.async :refer [chan put! poll!]] + (:require [clojure.core.async :refer [chan put! #?(:clj poll!)]] [hasch.core :as hasch] [konserve.protocols :refer [-exists? -get-meta -get-in -assoc-in -update-in -dissoc -bget -bassoc -keys]] - [konserve.utils :refer [meta-update async+sync #?(:clj *default-sync-translation*)]] + [konserve.utils :refer [meta-update async+sync *default-sync-translation*]] [superv.async :refer [go-try- store-key store-key->uuid-key]] [konserve.impl.storage-layout :refer [PBackingStore - -keys - PBackingBlob -close -get-lock -sync - -read-header -read-meta -read-value -read-binary - -write-header -write-meta -write-value -write-binary - PBackingLock -release header-size]] + PBackingBlob -close + PBackingLock header-size]] [konserve.nio-helpers :refer [blob->channel]] - [konserve.protocols :refer [-deserialize]] + [konserve.protocols :as kp] [konserve.utils :refer [async+sync *default-sync-translation*]] [superv.async :refer [go-try- store-key key) data (when (= :edn format) (->> ( ch - (-read-meta [this meta-size _env] + (-read-meta [_this meta-size _env] (let [view (js/Uint8Array. buf) bytes (.slice view storage-layout/header-size (+ storage-layout/header-size meta-size))] (go bytes))) - (-read-value [this meta-size _env] + (-read-value [_this meta-size _env] (let [view (js/Uint8Array. buf) bytes (.slice view (+ storage-layout/header-size meta-size))] (go bytes))) @@ -169,12 +170,12 @@ (go (set! (.-buf this) nil) (set! (.-header this) header))) (-write-meta [this meta-arr _env] (go (set! (.-metadata this) meta-arr))) - (-write-value [this value-arr meta-size _env] + (-write-value [this value-arr _meta-size _env] (go (set! (.-value this) value-arr))) - (-write-binary [this meta-size blob env] + (-write-binary [this _meta-size blob _env] (go (set! (.-value this) blob))) Object - (force [this metadata?] (flush-blob this)) + (force [this _metadata?] (flush-blob this)) (close [this] (do (set! (.-db this) nil) @@ -191,10 +192,10 @@ ;; needed to unref conn before can cycle database construction (close [_] (go (when (some? db) (.close db)))) storage-layout/PBackingStore - (-create-blob [this store-key env] + (-create-blob [_this store-key env] (assert (not (:sync? env))) (go (open-backing-blob db store-key))) - (-delete-blob [this key env] + (-delete-blob [_this key env] (assert (not (:sync? env))) (let [req (.delete (.objectStore (.transaction db #js[""] "readwrite") "") key)] (with-promise out @@ -203,8 +204,8 @@ #(put! out (ex-info (str "error deleting blob at key '" key "'") {:cause % :caller 'konserve.indexeddb/-delete-blob})))))) - (-migratable [this key store-key env] (go false)) - (-blob-exists? [this key env] + (-migratable [_this _key _store-key _env] (go false)) + (-blob-exists? [_this key env] (assert (not (:sync? env))) (let [req (.getKey (.objectStore (.transaction db #js[""]) "") key)] (with-promise out @@ -213,7 +214,7 @@ #(put! out (ex-info (str "error getting key in objectStore '" key "'") {:cause % :caller 'konserve.indexeddb/-blob-exists?})))))) - (-keys [this env] + (-keys [_this env] (assert (not (:sync? env))) (let [req (.getAllKeys (.objectStore (.transaction db #js[""]) ""))] (with-promise out @@ -222,7 +223,7 @@ #(put! out (ex-info "error listing keys in objectStore" {:cause % :caller 'konserve.indexeddb/-keys})))))) - (-copy [this from to env] + (-copy [_this from to env] (assert (not (:sync? env))) (with-promise out (take! (read-blob db from) @@ -250,7 +251,7 @@ (do (set! (.-db this) res) (put! out this))))))) - (-delete-store [this env] + (-delete-store [_this env] (assert (not (:sync? env))) (with-promise out (.close db) @@ -261,10 +262,10 @@ {:cause ?err :caller 'konserve.indexeddb/-delete-store})) (close! out)))))) - (-store-exists? [this env] + (-store-exists? [_this env] (assert (not (:sync? env))) (db-exists? db-name)) - (-sync-store [this env] (when-not (:sync? env) (go)))) + (-sync-store [_this env] (when-not (:sync? env) (go)))) (defn connect-idb-store "Connect to a IndexedDB backed KV store with the given db name. diff --git a/src/konserve/memory.cljc b/src/konserve/memory.cljc index 876a68d..d31f577 100644 --- a/src/konserve/memory.cljc +++ b/src/konserve/memory.cljc @@ -75,7 +75,7 @@ (swap! state (fn [old] (update old key - (fn [[meta data]] + (fn [[meta _data]] [(meta-up-fn meta) {:input-stream input :size :unknown}])))) nil)))) diff --git a/src/konserve/nio_helpers.clj b/src/konserve/nio_helpers.clj index d024252..4ca0d5d 100644 --- a/src/konserve/nio_helpers.clj +++ b/src/konserve/nio_helpers.clj @@ -20,17 +20,17 @@ (extend-protocol BlobToChannel InputStream - (blob->channel [input buffer-size] + (blob->channel [input _buffer-size] [(Channels/newChannel input) (fn [bis buffer] (.read ^ReadableByteChannel bis ^ByteBuffer buffer))]) File - (blob->channel [input buffer-size] + (blob->channel [input _buffer-size] [(Channels/newChannel (FileInputStream. ^String input)) (fn [bis buffer] (.read ^ReadableByteChannel bis buffer))]) String - (blob->channel [input buffer-size] + (blob->channel [input _buffer-size] [(Channels/newChannel (ByteArrayInputStream. (.getBytes input))) (fn [bis buffer] (.read ^ReadableByteChannel bis buffer))]) diff --git a/src/konserve/node_filestore.cljs b/src/konserve/node_filestore.cljs index d76d7d4..bab7c4f 100644 --- a/src/konserve/node_filestore.cljs +++ b/src/konserve/node_filestore.cljs @@ -1,17 +1,17 @@ (ns konserve.node-filestore - (:require - [cljs.core.async :refer [go take! ch - [fd meta-size blob env] + [fd meta-size blob _env] (with-promise out (try (let [pos (+ storage-layout/header-size meta-size) @@ -230,13 +230,13 @@ (-sync [this _env] (.force this true)) (-close [this _env] (.close this)) (-get-lock [this _env] (.lock this)) - (-read-header [this _env] ;=> ch + (-read-header [_this _env] ;=> ch (let [buf (js/Buffer.alloc storage-layout/header-size)] (afread fd buf 0))) - (-read-meta [this meta-size _env] ;=> ch + (-read-meta [_this meta-size _env] ;=> ch (let [buf (js/Buffer.alloc meta-size)] (afread fd buf storage-layout/header-size))) - (-read-value [this meta-size _env] ;=> ch + (-read-value [_this meta-size _env] ;=> ch (go (let [[?err blob-size] ( ch + (-write-value [_this value-arr meta-size _env] ;=> ch (let [buffer (js/Buffer.from value-arr) pos (+ storage-layout/header-size meta-size)] (afwrite fd buffer pos))) - (-write-binary [this meta-size blob env] + (-write-binary [_this meta-size blob env] (afwrite-binary fd meta-size blob env)) ;=> ch Object (force [this metadata?] (_force-async this metadata?)) @@ -299,12 +299,12 @@ (defn- copy "Copy a blob from one key to another." - [base from to env] + [base from to _env] (fs.copyFileSync (path.join base from) (path.join base to))) (defn- atomic-move "Atomically move (rename) a blob." - [^string base from to env] + [^string base from to _env] ;; https://stackoverflow.com/questions/66780210/is-fs-renamesync-an-atomic-operation-that-is-resistant-to-corruption (let [current-path (path.join base from) next-path (path.join base to)] @@ -367,7 +367,7 @@ (defn- copy-async "Copy a blob from one key to another." - [^string base from to env] + [^string base from to _env] (with-promise out (fs.copyFile (path.join base from) (path.join base to) (fn [?err] @@ -377,7 +377,7 @@ (defn- atomic-move-async "Atomically move (rename) a blob." - [^string base from to env] + [^string base from to _env] ;; https://stackoverflow.com/questions/66780210/is-fs-renamesync-an-atomic-operation-that-is-resistant-to-corruption (with-promise out (take! (iofs/arename (path.join base from) (path.join base to)) @@ -443,7 +443,7 @@ (defn- _migratable "Check if blob exists elsewhere and return a migration key or nil." - [^NodejsBackingFilestore this key store-key _] + [^NodejsBackingFilestore this _key store-key _] (when-let [detected-old-blobs (.-detected-old-blobs this)] (let [uuid-key (defaults/store-key->uuid-key store-key)] (or (@detected-old-blobs (path.join (.-base this) "meta" uuid-key)) @@ -453,7 +453,7 @@ (defrecord NodejsBackingFilestore [base detected-old-blobs ephemeral?] storage-layout/PBackingStore - (-create-blob [this store-key env] + (-create-blob [_this store-key env] (let [store-path (path.join base store-key)] (if (:sync? env) (open-file-channel store-path) @@ -466,8 +466,8 @@ (iofs/aexists? store-path)))) (-migratable [this key store-key env] (cond-> (_migratable this key store-key env) (not (:sync? env)) go)) - (-migrate [this migration-key key-vec serializer read-handlers write-handlers env] (throw (js/Error "TODO"))) - (-handle-foreign-key [this migration-key serializer read-handlers write-handlers env] (throw (js/Error "TODO"))) + (-migrate [_this _migration-key _key-vec _serializer _read-handlers _write-handlers _env] (throw (js/Error "TODO"))) + (-handle-foreign-key [_this _migration-key _serializer _read-handlers _write-handlers _env] (throw (js/Error "TODO"))) (-keys [this env] (if (:sync? env) (list-files base (.-ephemeral? this)) @@ -476,28 +476,28 @@ (if (:sync? env) (copy this from to env) (copy-async this from to env))) - (-atomic-move [this from to env] + (-atomic-move [_this from to env] (if (:sync? env) (atomic-move base from to env) (atomic-move-async base from to env))) - (-create-store [this env] + (-create-store [_this env] (if (:sync? env) (check-and-create-backing-store base) (check-and-create-backing-store-async base))) - (-delete-store [this env] + (-delete-store [_this env] (if (:sync? env) (delete-store (:base env)) (delete-store-async (:base env)))) - (-store-exists? [this env] + (-store-exists? [_this env] (if (:sync? env) (store-exists? base) (store-exists?-async base))) - (-sync-store [this env] + (-sync-store [_this env] (if (:sync? env) (sync-base base) (sync-base-async base)))) -(defn detect-old-file-schema [& args] (throw (js/Error "TODO detect-old-file-schema"))) +(defn detect-old-file-schema [& _args] (throw (js/Error "TODO detect-old-file-schema"))) ;; get-file-channel ;; migration diff --git a/src/konserve/serializers.cljc b/src/konserve/serializers.cljc index 1b0b79c..0ba22cd 100644 --- a/src/konserve/serializers.cljc +++ b/src/konserve/serializers.cljc @@ -1,10 +1,9 @@ (ns konserve.serializers (:require #?(:clj [clj-cbor.core :as cbor]) #?(:clj [clojure.data.fressian :as fress] :cljs [fress.api :as fress]) - [konserve.protocols :refer [PStoreSerializer -serialize -deserialize]] + [konserve.protocols :refer [PStoreSerializer]] [incognito.fressian :refer [incognito-read-handlers incognito-write-handlers]] - [incognito.edn :refer [read-string-safe]]) - #?(:clj (:import [org.fressian.handlers WriteHandler ReadHandler]))) + [incognito.edn :refer [read-string-safe]])) #?(:clj (defrecord CBORSerializer [codec] @@ -39,7 +38,7 @@ (incognito-read-handlers read-handlers)) fress/associative-lookup))] (fress/read bytes :handlers handlers))) - (-serialize [_ bytes write-handlers val] + (-serialize [_ #?(:clj bytes :cljs _) write-handlers val] (let [handlers #?(:clj (-> (merge fress/clojure-write-handlers custom-write-handlers @@ -64,7 +63,7 @@ PStoreSerializer (-deserialize [_ read-handlers s] (read-string-safe @read-handlers s)) - (-serialize [_ output-stream _ val] + (-serialize [_ #?(:clj output-stream :cljs _) _ val] #?(:cljs (pr-str val) :clj (binding [clojure.core/*out* output-stream] (pr val))))) @@ -95,7 +94,7 @@ (construct->keys byte->serializer)) (defn construct->byte [m n] - (->> (map (fn [[k0 v0] [k1 v1]] [k0 k1]) m n) + (->> (map (fn [[k0 _v0] [k1 _v1]] [k0 k1]) m n) (into {}))) (def byte->key diff --git a/template/pom.xml b/template/pom.xml index 67eaf16..f974a94 100644 --- a/template/pom.xml +++ b/template/pom.xml @@ -1,5 +1,6 @@ + template 4.0.0 io.replikativ konserve diff --git a/test/konserve/cache_test.cljs b/test/konserve/cache_test.cljs index b943f7e..5019153 100644 --- a/test/konserve/cache_test.cljs +++ b/test/konserve/cache_test.cljs @@ -1,6 +1,7 @@ (ns konserve.cache-test - (:require [cljs.core.async :refer [! go promise-chan put!]] - [cljs.test :refer [deftest testing is are async]] + (:require-macros [cljs.core.async.macros :refer [go]]) + (:require [cljs.core.async :refer [! put! take! close!]] + (:require-macros [cljs.core.async.macros :refer [go]]) + (:require [cljs.core.async :refer [! put! take! close! chan timeout]] - [cljs.nodejs :as node] - [cljs-node-io.core :as io] + (:require-macros [cljs.core.async.macros :refer [go]]) + (:require [cljs.core.async :refer [! timeout chan alt! go go-loop close! put!]] + [konserve.protocols :refer [-serialize -deserialize]] + [clojure.core.async :as async :refer [! chan go close! put!]] [taoensso.timbre :refer [debug]]) (:import [java.io DataInputStream DataOutputStream @@ -25,19 +23,24 @@ ;; Protocols (defprotocol PEDNAsyncKeyValueStoreV1 "Allows to access a store similar to hash-map in EDN." + #_{:clj-kondo/ignore [:clojure-lsp/unused-public-var]} (-exists? [this key] "Checks whether value is in the store.") + #_{:clj-kondo/ignore [:clojure-lsp/unused-public-var]} (-get-in [this key-vec] "Returns the value stored described by key-vec or nil if the path is not resolvable.") (-update-in [this key-vec up-fn] [this key-vec up-fn up-fn-args] "Updates a position described by key-vec by applying up-fn and storing the result atomically. Returns a vector [old new] of the previous value and the result of applying up-fn (the newly stored value).") (-assoc-in [this key-vec val]) + #_{:clj-kondo/ignore [:clojure-lsp/unused-public-var]} (-dissoc [this key])) (defprotocol PBinaryAsyncKeyValueStoreV1 "Allows binary data byte array storage." + #_{:clj-kondo/ignore [:clojure-lsp/unused-public-var]} (-bget [this key locked-cb] "Calls locked-cb with a platform specific binary representation inside the lock, e.g. wrapped InputStream on the JVM and Blob in JavaScript. You need to properly close/dispose the object when you are done!") (-bassoc [this key val] "Copies given value (InputStream, Reader, File, byte[] or String on JVM, Blob in JavaScript) under key in the store.")) (defprotocol PKeyIterableV1 "Allows lazy iteration of keys in this store." + #_{:clj-kondo/ignore [:clojure-lsp/unused-public-var]} (-keys [this] "Return a channel that will continuously yield keys in this store.")) ;Filestore V1 < Konserve 0.4 @@ -78,6 +81,7 @@ (async/into #{}))] fns)) +#_{:clj-kondo/ignore [:clojure-lsp/unused-public-var]} (defn delete-store "Permanently deletes the folder of the store with all files." [folder] @@ -87,39 +91,12 @@ (.delete f) (try (sync-folder folder) - (catch Exception e + (catch Exception _e nil)))) -(defn filestore-schema-update - "Lists all keys in this binary store. This operation *does not block concurrent operations* and might return an outdated key set. Keys of binary blobs are not tracked atm." - [{:keys [folder serializer read-handlers] :as store}] - (let [fns (->> (io/file folder) - .list - seq - (filter #(re-matches #"[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}" - %)) - (map (fn [fn] - (go-locked - store fn - (let [f (io/file (str folder "/" fn))] - (when (.exists f) - (let [fis (DataInputStream. (FileInputStream. f))] - (try - (first (-deserialize serializer read-handlers fis)) - (catch Exception e - (ex-info "Could not read key." - {:type :read-error - :key fn - :exception e})) - (finally - (.close fis))))))))) - async/merge - (async/into #{}))] - fns)) - (defrecord FileSystemStoreV1 [folder serializer read-handlers write-handlers locks config] PEDNAsyncKeyValueStoreV1 - (-exists? [this key] + (-exists? [_this key] (let [fn (uuid key) bfn (str "B_" (uuid key)) f (io/file (str folder "/" fn)) @@ -130,7 +107,7 @@ res)) ;; non-blocking async version - (-get-in [this key-vec] + (-get-in [_this key-vec] (let [[fkey & rkey] key-vec fn (uuid fkey) f (io/file (str folder "/" fn)) @@ -177,56 +154,56 @@ res-ch)) (-update-in [this key-vec up-fn] (-update-in this key-vec up-fn [])) - (-update-in [this key-vec up-fn up-fn-args] + (-update-in [_this key-vec up-fn up-fn-args] (async/thread (let [[fkey & rkey] key-vec fn (uuid fkey) f (io/file (str folder "/" fn)) - new-file (io/file (str folder "/" fn ".new"))] - (let [old (when (.exists f) - (let [fis (DataInputStream. (FileInputStream. f))] - (try - (second (-deserialize serializer read-handlers fis)) - (catch Exception e - (ex-info "Could not read key." - {:type :read-error - :key fkey - :exception e})) - (finally - (.close fis))))) - fos (FileOutputStream. new-file) - dos (DataOutputStream. fos) - fd (.getFD fos) - new (if-not (empty? rkey) - (apply update-in old rkey up-fn up-fn-args) - (apply up-fn old up-fn-args))] - (if (instance? Throwable old) - old ;; return read error - (try - (-serialize serializer dos write-handlers [key-vec new]) - (.flush dos) - (when (:fsync config) - (.sync fd)) - (.close dos) - (Files/move (.toPath new-file) (.toPath f) - (into-array [StandardCopyOption/ATOMIC_MOVE])) - (when (:fsync config) - (sync-folder folder)) - [(get-in old rkey) - (get-in new rkey)] - (catch Exception e - (.delete new-file) + new-file (io/file (str folder "/" fn ".new")) + old (when (.exists f) + (let [fis (DataInputStream. (FileInputStream. f))] + (try + (second (-deserialize serializer read-handlers fis)) + (catch Exception e + (ex-info "Could not read key." + {:type :read-error + :key fkey + :exception e})) + (finally + (.close fis))))) + fos (FileOutputStream. new-file) + dos (DataOutputStream. fos) + fd (.getFD fos) + new (if-not (empty? rkey) + (apply update-in old rkey up-fn up-fn-args) + (apply up-fn old up-fn-args))] + (if (instance? Throwable old) + old ;; return read error + (try + (-serialize serializer dos write-handlers [key-vec new]) + (.flush dos) + (when (:fsync config) + (.sync fd)) + (.close dos) + (Files/move (.toPath new-file) (.toPath f) + (into-array [StandardCopyOption/ATOMIC_MOVE])) + (when (:fsync config) + (sync-folder folder)) + [(get-in old rkey) + (get-in new rkey)] + (catch Exception e + (.delete new-file) ;; TODO maybe need fsync new-file here? - (ex-info "Could not write key." - {:type :write-error - :key fkey - :exception e})) - (finally - (.close dos)))))))) + (ex-info "Could not write key." + {:type :write-error + :key fkey + :exception e})) + (finally + (.close dos))))))) (-assoc-in [this key-vec val] (-update-in this key-vec (fn [_] val))) - (-dissoc [this key] + (-dissoc [_this key] (async/thread (let [fn (uuid key) f (io/file (str folder "/" fn))] @@ -236,7 +213,7 @@ nil))) PBinaryAsyncKeyValueStoreV1 - (-bget [this key locked-cb] + (-bget [_this key locked-cb] (let [fn (str "B_" (uuid key)) f (io/file (str folder "/" fn)) res-ch (chan)] @@ -282,7 +259,7 @@ :exception e}))))) res-ch)) - (-bassoc [this key input] + (-bassoc [_this key input] (let [fn (uuid key) f (io/file (str folder "/B_" fn)) new-file (io/file (str folder "/B_" fn ".new"))] @@ -370,6 +347,7 @@ (async/into #{}))] fns)) +#_{:clj-kondo/ignore [:clojure-lsp/unused-public-var]} (defn delete-store-v2 "Permanently deletes the folder of the store with all files." [folder] @@ -381,12 +359,12 @@ (.delete f) (try (sync-folder parent-folder) - (catch Exception e + (catch Exception _e nil)))) (defn- read-key "help function for -update-in" - [folder ^File f fkey serializer read-handlers] + [_folder ^File f fkey serializer read-handlers] (when (.exists f) (let [fis (DataInputStream. (FileInputStream. f))] (try @@ -439,7 +417,7 @@ (defn- write-edn-key "help function for -update-in" - [serializer write-handlers read-handlers folder fn {:keys [key] :as key-meta} config] + [serializer write-handlers _read-handlers folder fn {:keys [key] :as key-meta} config] (let [f (io/file (str folder fn)) new-file (io/file (str folder fn ".new")) fos (FileOutputStream. new-file) @@ -634,7 +612,7 @@ (defrecord FileSystemStoreV2 [folder serializer read-handlers write-handlers locks config stale-binaries?] PEDNAsyncKeyValueStoreV1 - (-exists? [this key] + (-exists? [_this key] (let [fn (uuid key) f (io/file (str folder "/data/" fn)) res (chan)] @@ -642,7 +620,7 @@ (close! res) res)) ;; non-blocking async version - (-get-in [this key-vec] + (-get-in [_this key-vec] (let [[fkey & rkey] key-vec fn (uuid fkey) f (io/file (str folder "/data/" fn)) @@ -650,7 +628,7 @@ (read-edn f res-ch folder fn fkey rkey serializer read-handlers) res-ch)) (-update-in [this key-vec up-fn] (-update-in this key-vec up-fn [])) - (-update-in [this key-vec up-fn args] + (-update-in [_this key-vec up-fn args] (async/thread (try (let [file-name (uuid (first key-vec)) @@ -663,7 +641,7 @@ (-assoc-in [this key-vec val] (-update-in this key-vec (fn [_] val))) - (-dissoc [this key] + (-dissoc [_this key] (async/thread (let [fn (uuid key) key-folder (str folder "/meta") @@ -672,7 +650,7 @@ (delete-entry fn data-folder config)))) PBinaryAsyncKeyValueStoreV1 - (-bget [this key locked-cb] + (-bget [_this key locked-cb] (let [fn (str (uuid key)) f (io/file (str folder "/data/" fn)) res-ch (chan)] @@ -687,7 +665,7 @@ (read-binary f res-ch folder fn key locked-cb)) res-ch)) - (-bassoc [this key input] + (-bassoc [_this key input] (let [file-name (uuid key) key-folder (str folder "/meta/")] (async/thread @@ -700,7 +678,7 @@ (async/take! (list-keys this) (fn [ks] - (async/onto-chan ch (map :key ks)))) + (async/onto-chan! ch (map :key ks)))) ch))) (defmethod print-method FileSystemStoreV2 @@ -781,9 +759,7 @@ :locks (atom {}) :config config})] (if-not stale-edn? - (do - (go - (!]] - [cljs.test :refer-macros [deftest is testing async use-fixtures]] + (:require [cljs.core.async :as async :refer [