From a79b04eb2b798403ff37d54e5dca33e987be3e41 Mon Sep 17 00:00:00 2001 From: Timothy Pratley Date: Wed, 7 Jul 2021 12:22:04 -0700 Subject: [PATCH] many minor adjustments for ClojureScript This patch doesn't solve everything. It does get significantly further in compilation (builds with warnings), but more work would be required to make it actually work correctly. Suggestions/collaboration sought on achieving fully compatible. --- deps.edn | 3 +- src/datahike/api.cljc | 886 ++++++++++++------------ src/datahike/array.cljc | 2 +- src/datahike/config.cljc | 69 +- src/datahike/connector.cljc | 60 +- src/datahike/core.cljc | 27 +- src/datahike/db.cljc | 41 +- src/datahike/impl/entity.cljc | 2 +- src/datahike/index.cljc | 13 +- src/datahike/index/hitchhiker_tree.cljc | 27 +- src/datahike/pull_api.cljc | 2 +- src/datahike/query.cljc | 19 +- src/datahike/schema.cljc | 2 +- src/datahike/store.cljc | 2 +- src/datahike/transactor.cljc | 6 +- 15 files changed, 596 insertions(+), 565 deletions(-) diff --git a/deps.edn b/deps.edn index c73c14a43..96eb7791e 100644 --- a/deps.edn +++ b/deps.edn @@ -1,5 +1,5 @@ {:deps {org.clojure/clojure {:mvn/version "1.10.1"} - org.clojure/clojurescript {:mvn/version "1.10.597"} + org.clojure/clojurescript {:mvn/version "1.10.773"} io.replikativ/hitchhiker-tree {:mvn/version "0.1.11"} persistent-sorted-set/persistent-sorted-set {:mvn/version "0.1.2"} org.clojure/tools.reader {:mvn/version "1.3.3"} @@ -8,6 +8,7 @@ io.replikativ/superv.async {:mvn/version "0.2.11"} io.lambdaforge/datalog-parser {:mvn/version "0.1.8"} io.replikativ/zufall {:mvn/version "0.1.0"} + org.clojars.mmb90/cljs-cache {:mvn/version "0.1.4"} junit/junit {:mvn/version "4.13.1"}} :paths ["src" "target/classes"] diff --git a/src/datahike/api.cljc b/src/datahike/api.cljc index d4b19650f..ecfa6bdb1 100644 --- a/src/datahike/api.cljc +++ b/src/datahike/api.cljc @@ -4,258 +4,264 @@ [datahike.core :as dcore] [datahike.pull-api :as dp] [datahike.query :as dq] - [datahike.db :as db #?@(:cljs [:refer [CurrentDB]])] + [datahike.db :as db #?@(:cljs [:refer [HistoricalDB AsOfDB SinceDB FilteredDB]])] [datahike.impl.entity :as de]) #?(:clj (:import [datahike.db HistoricalDB AsOfDB SinceDB FilteredDB] [datahike.impl.entity Entity] - [java.util Date]))) + [java.util Date] + [clojure.lang PersistentArrayMap Keyword]))) -(def - ^{:arglists '([] [config]) - :doc "Connects to a datahike database via configuration map. For more information on the configuration refer to the [docs](https://github.com/replikativ/datahike/blob/master/doc/config.md). +(defn connect + "Connects to a datahike database via configuration map. For more information on the configuration refer to the [docs](https://github.com/replikativ/datahike/blob/master/doc/config.md). - The configuration for a connection is a subset of the Datahike configuration with only the store necessary: `:store`. + The configuration for a connection is a subset of the Datahike configuration with only the store necessary: `:store`. - `:store` defines the backend configuration as hash-map with mandatory key: `:backend` and store dependent keys. + `:store` defines the backend configuration as hash-map with mandatory key: `:backend` and store dependent keys. - Per default Datahike ships with `:mem` and `:file` backend. + Per default Datahike ships with `:mem` and `:file` backend. - The default configuration: - `{:store {:backend :mem :id \"default\"}}` + The default configuration: + `{:store {:backend :mem :id \"default\"}}` - Usage: + Usage: - Connect to default in-memory configuration: - `(connect)` + Connect to default in-memory configuration: + `(connect)` - Connect to a database with persistent store: - `(connect {:store {:backend :file :path \"/tmp/example\"}})`"} + Connect to a database with persistent store: + `(connect {:store {:backend :file :path \"/tmp/example\"}})`" + ([] (dc/connect)) + ([config] (dc/connect config))) - connect dc/connect) +(defn database-exists? + "Checks if a database exists via configuration map. -(def - ^{:arglists '([config]) - :doc "Checks if a database exists via configuration map. - Usage: + Usage: - (database-exists? {:store {:backend :mem :id \"example\"}})"} - database-exists? dc/database-exists?) + (database-exists? {:store {:backend :mem :id \"example\"}})" + [config] + (dc/database-exists? config)) -(def - ^{:arglists '([] [config & deprecated-opts]) - :doc "Creates a database via configuration map. For more information on the configuration refer to the [docs](https://github.com/replikativ/datahike/blob/master/doc/config.md). +(defn create-database + "Creates a database via configuration map. For more information on the configuration refer to the [docs](https://github.com/replikativ/datahike/blob/master/doc/config.md). - The configuration is a hash-map with keys: `:store`, `:initial-tx`, `:keep-history?`, `:schema-flexibility`, `:index` + The configuration is a hash-map with keys: `:store`, `:initial-tx`, `:keep-history?`, `:schema-flexibility`, `:index` - - `:store` defines the backend configuration as hash-map with mandatory key: `:backend` and store dependent keys. - Per default Datahike ships with `:mem` and `:file` backend. - - `:initial-tx` defines the first transaction into the database, often setting default data like the schema. - - `:keep-history?` is a boolean that toggles whether Datahike keeps historical data. - - `:schema-flexibility` can be set to either `:read` or `:write` setting the validation method for the data. - - `:read` validates the data when your read data from the database, `:write` validates the data when you transact new data. - - `:index` defines the data type of the index. Available are `:datahike.index/hitchhiker-tree`, `:datahike.index/persistent-set` (only available with in-memory storage) - - `:name` defines your database name optionally, if not set, a random name is created - - `:transactor` optionally configures a transactor as a hash map. If not set, the default local transactor is used. + - `:store` defines the backend configuration as hash-map with mandatory key: `:backend` and store dependent keys. + Per default Datahike ships with `:mem` and `:file` backend. + - `:initial-tx` defines the first transaction into the database, often setting default data like the schema. + - `:keep-history?` is a boolean that toggles whether Datahike keeps historical data. + - `:schema-flexibility` can be set to either `:read` or `:write` setting the validation method for the data. + - `:read` validates the data when your read data from the database, `:write` validates the data when you transact new data. + - `:index` defines the data type of the index. Available are `:datahike.index/hitchhiker-tree`, `:datahike.index/persistent-set` (only available with in-memory storage) + - `:name` defines your database name optionally, if not set, a random name is created + - `:transactor` optionally configures a transactor as a hash map. If not set, the default local transactor is used. - Default configuration has in-memory store, keeps history with write schema flexibility, and has no initial transaction: - {:store {:backend :mem :id \"default\"} :keep-history? true :schema-flexibility :write} + Default configuration has in-memory store, keeps history with write schema flexibility, and has no initial transaction: + {:store {:backend :mem :id \"default\"} :keep-history? true :schema-flexibility :write} - Usage: + Usage: - ;; create an empty database: - (create-database {:store {:backend :mem :id \"example\"} :name \"my-favourite-database\"}) + ;; create an empty database: + (create-database {:store {:backend :mem :id \"example\"} :name \"my-favourite-database\"}) - ;; Datahike has a strict schema validation (schema-flexibility `:write`) policy by default, that only allows transaction of data that has been pre-defined by a schema. - ;; You may influence this behaviour using the `:schema-flexibility` attribute: - (create-database {:store {:backend :mem :id \"example\"} :schema-flexibility :read}) + ;; Datahike has a strict schema validation (schema-flexibility `:write`) policy by default, that only allows transaction of data that has been pre-defined by a schema. + ;; You may influence this behaviour using the `:schema-flexibility` attribute: + (create-database {:store {:backend :mem :id \"example\"} :schema-flexibility :read}) - ;; By storing historical data in a separate index, datahike has the capability of querying data from any point in time. - ;; You may control this feature using the `:keep-history?` attribute: - (create-database {:store {:backend :mem :id \"example\"} :keep-history? false}) + ;; By storing historical data in a separate index, datahike has the capability of querying data from any point in time. + ;; You may control this feature using the `:keep-history?` attribute: + (create-database {:store {:backend :mem :id \"example\"} :keep-history? false}) - ;; Initial data after creation may be added using the `:initial-tx` attribute, which in this example adds a schema: - (create-database {:store {:backend :mem :id \"example\"} :initial-tx [{:db/ident :name :db/valueType :db.type/string :db.cardinality/one}]})"} + ;; Initial data after creation may be added using the `:initial-tx` attribute, which in this example adds a schema: + (create-database {:store {:backend :mem :id \"example\"} :initial-tx [{:db/ident :name :db/valueType :db.type/string :db.cardinality/one}]})" + ([] (dc/create-database)) + ([config & deprecated-opts] (apply dc/create-database config deprecated-opts))) - create-database - dc/create-database) +(defn delete-database + "Deletes a database given via configuration map. Storage configuration `:store` is mandatory. + For more information refer to the [docs](https://github.com/replikativ/datahike/blob/master/doc/config.md)" + [config] + (dc/delete-database config)) -(def ^{:arglists '([config]) - :doc "Deletes a database given via configuration map. Storage configuration `:store` is mandatory. - For more information refer to the [docs](https://github.com/replikativ/datahike/blob/master/doc/config.md)"} - delete-database - dc/delete-database) +(defn transact + "Applies transaction to the underlying database value and atomically updates the connection reference to point to the result of that transaction, the new db value. -(def ^{:arglists '([conn arg-map]) - :doc "Applies transaction to the underlying database value and atomically updates the connection reference to point to the result of that transaction, the new db value. + Accepts the connection and a map or a vector as argument, specifying the transaction data. - Accepts the connection and a map or a vector as argument, specifying the transaction data. + Returns transaction report, a map: - Returns transaction report, a map: + {:db-before ... ; db value before transaction + :db-after ... ; db value after transaction + :tx-data [...] ; plain datoms that were added/retracted from db-before + :tempids {...} ; map of tempid from tx-data => assigned entid in db-after + :tx-meta tx-meta } ; the exact value you passed as `tx-meta` - {:db-before ... ; db value before transaction - :db-after ... ; db value after transaction - :tx-data [...] ; plain datoms that were added/retracted from db-before - :tempids {...} ; map of tempid from tx-data => assigned entid in db-after - :tx-meta tx-meta } ; the exact value you passed as `tx-meta` + Note! `conn` will be updated in-place and is not returned from [[transact]]. - Note! `conn` will be updated in-place and is not returned from [[transact]]. + Usage: - Usage: + ;; add a single datom to an existing entity (1) + (transact conn [[:db/add 1 :name \"Ivan\"]]) - ;; add a single datom to an existing entity (1) - (transact conn [[:db/add 1 :name \"Ivan\"]]) + ;; retract a single datom + (transact conn [[:db/retract 1 :name \"Ivan\"]]) - ;; retract a single datom - (transact conn [[:db/retract 1 :name \"Ivan\"]]) + ;; retract single entity attribute + (transact conn [[:db.fn/retractAttribute 1 :name]]) - ;; retract single entity attribute - (transact conn [[:db.fn/retractAttribute 1 :name]]) + ;; retract all entity attributes (effectively deletes entity) + (transact conn [[:db.fn/retractEntity 1]]) - ;; retract all entity attributes (effectively deletes entity) - (transact conn [[:db.fn/retractEntity 1]]) + ;; create a new entity (`-1`, as any other negative value, is a tempid + ;; that will be replaced by Datahike with the next unused eid) + (transact conn [[:db/add -1 :name \"Ivan\"]]) - ;; create a new entity (`-1`, as any other negative value, is a tempid - ;; that will be replaced by Datahike with the next unused eid) - (transact conn [[:db/add -1 :name \"Ivan\"]]) + ;; check assigned id (here `*1` is a result returned from previous `transact` call) + (def report *1) + (:tempids report) ; => {-1 296, :db/current-tx 536870913} + + ;; check actual datoms inserted + (:tx-data report) ; => [#datahike/Datom [296 :name \"Ivan\" 536870913]] + + ;; tempid can also be a string + (transact conn [[:db/add \"ivan\" :name \"Ivan\"]]) + (:tempids *1) ; => {\"ivan\" 5, :db/current-tx 536870920} + + ;; reference another entity (must exist) + (transact conn [[:db/add -1 :friend 296]]) + + ;; create an entity and set multiple attributes (in a single transaction + ;; equal tempids will be replaced with the same unused yet entid) + (transact conn [[:db/add -1 :name \"Ivan\"] + [:db/add -1 :likes \"fries\"] + [:db/add -1 :likes \"pizza\"] + [:db/add -1 :friend 296]]) + + ;; create an entity and set multiple attributes (alternative map form) + (transact conn [{:db/id -1 + :name \"Ivan\" + :likes [\"fries\" \"pizza\"] + :friend 296}]) + + ;; update an entity (alternative map form). Can’t retract attributes in + ;; map form. For cardinality many attrs, value (fish in this example) + ;; will be added to the list of existing values + (transact conn [{:db/id 296 + :name \"Oleg\" + :likes [\"fish\"]}]) + + ;; ref attributes can be specified as nested map, that will create a nested entity as well + (transact conn [{:db/id -1 + :name \"Oleg\" + :friend {:db/id -2 + :name \"Sergey\"}}]) + + ;; schema is needed for using a reverse attribute + (is (transact conn [{:db/valueType :db.type/ref + :db/cardinality :db.cardinality/one + :db/ident :friend}])) + + ;; reverse attribute name can be used if you want a created entity to become + ;; a value in another entity reference + (transact conn [{:db/id -1 + :name \"Oleg\" + :_friend 296}]) + ;; equivalent to + (transact conn [[:db/add -1 :name \"Oleg\"] + {:db/add 296 :friend -1]])" + [conn arg-map] + (dc/transact conn arg-map)) + +(defn ^:no-doc transact! + [conn tx-data] + (dc/transact! conn tx-data)) + +(defn load-entities + "Load entities directly" + [conn tx-data] + (dc/load-entities conn tx-data)) + +(defn release + "Releases a database connection" + [conn] + (dc/release conn)) + +(defn pull + "Fetches data from database using recursive declarative description. See [docs.datomic.com/on-prem/pull.html](https://docs.datomic.com/on-prem/pull.html). + + Unlike [[entity]], returns plain Clojure map (not lazy). - ;; check assigned id (here `*1` is a result returned from previous `transact` call) - (def report *1) - (:tempids report) ; => {-1 296, :db/current-tx 536870913} + Usage: - ;; check actual datoms inserted - (:tx-data report) ; => [#datahike/Datom [296 :name \"Ivan\" 536870913]] + (pull db [:db/id, :name, :likes, {:friends [:db/id :name]}] 1) ; => {:db/id 1, + :name \"Ivan\" + :likes [:pizza] + :friends [{:db/id 2, :name \"Oleg\"}]} - ;; tempid can also be a string - (transact conn [[:db/add \"ivan\" :name \"Ivan\"]]) - (:tempids *1) ; => {\"ivan\" 5, :db/current-tx 536870920} + The arity-2 version takes :selector and :eid in arg-map." + ([db selector eid] + (dp/pull db selector eid)) + ([db arg-map] + (dp/pull db arg-map))) - ;; reference another entity (must exist) - (transact conn [[:db/add -1 :friend 296]]) +(defn pull-many + "Same as [[pull]], but accepts sequence of ids and returns sequence of maps. - ;; create an entity and set multiple attributes (in a single transaction - ;; equal tempids will be replaced with the same unused yet entid) - (transact conn [[:db/add -1 :name \"Ivan\"] - [:db/add -1 :likes \"fries\"] - [:db/add -1 :likes \"pizza\"] - [:db/add -1 :friend 296]]) + Usage: - ;; create an entity and set multiple attributes (alternative map form) - (transact conn [{:db/id -1 - :name \"Ivan\" - :likes [\"fries\" \"pizza\"] - :friend 296}]) + (pull-many db [:db/id :name] [1 2]) ; => [{:db/id 1, :name \"Ivan\"} + {:db/id 2, :name \"Oleg\"}]" + [db selector eids] + (dp/pull-many db selector eids)) - ;; update an entity (alternative map form). Can’t retract attributes in - ;; map form. For cardinality many attrs, value (fish in this example) - ;; will be added to the list of existing values - (transact conn [{:db/id 296 - :name \"Oleg\" - :likes [\"fish\"]}]) +(defn q + "Executes a datalog query. See [docs.datomic.com/on-prem/query.html](https://docs.datomic.com/on-prem/query.html). - ;; ref attributes can be specified as nested map, that will create a nested entity as well - (transact conn [{:db/id -1 - :name \"Oleg\" - :friend {:db/id -2 - :name \"Sergey\"}}]) + Usage: - ;; schema is needed for using a reverse attribute - (is (transact conn [{:db/valueType :db.type/ref - :db/cardinality :db.cardinality/one - :db/ident :friend}])) + Query as parameter with additional args: - ;; reverse attribute name can be used if you want a created entity to become - ;; a value in another entity reference - (transact conn [{:db/id -1 - :name \"Oleg\" - :_friend 296}]) - ;; equivalent to - (transact conn [[:db/add -1 :name \"Oleg\"] - {:db/add 296 :friend -1]])"} - transact - dc/transact) + (q '[:find ?value + :where [_ :likes ?value]] + #{[1 :likes \"fries\"] + [2 :likes \"candy\"] + [3 :likes \"pie\"] + [4 :likes \"pizza\"]}) ; => #{[\"fries\"] [\"candy\"] [\"pie\"] [\"pizza\"]} -(def ^{:arglists '([conn tx-data tx-meta]) - :no-doc true} - transact! - dc/transact!) + Or query passed in arg-map: -(def ^{:arglists '([conn tx-data]) - :doc "Load entities directly"} - load-entities - dc/load-entities) + (q {:query '[:find ?value + :where [_ :likes ?value]] + :offset 2 + :limit 1 + :args [#{[1 :likes \"fries\"] + [2 :likes \"candy\"] + [3 :likes \"pie\"] + [4 :likes \"pizza\"]}]}) ; => #{[\"fries\"] [\"candy\"] [\"pie\"] [\"pizza\"]} -(def ^{:arglists '([conn]) - :doc "Releases a database connection"} - release dc/release) + Or query passed as map of vectors: -(def ^{:arglists '([db selector eid] [db arg-map]) - :doc "Fetches data from database using recursive declarative description. See [docs.datomic.com/on-prem/pull.html](https://docs.datomic.com/on-prem/pull.html). + (q '{:find [?value] :where [[_ :likes ?value]]} + #{[1 :likes \"fries\"] + [2 :likes \"candy\"] + [3 :likes \"pie\"] + [4 :likes \"pizza\"]}) ; => #{[\"fries\"] [\"candy\"] [\"pie\"] [\"pizza\"]} - Unlike [[entity]], returns plain Clojure map (not lazy). - - Usage: + Or query passed as string: - (pull db [:db/id, :name, :likes, {:friends [:db/id :name]}] 1) ; => {:db/id 1, - :name \"Ivan\" - :likes [:pizza] - :friends [{:db/id 2, :name \"Oleg\"}]} + (q {:query \"[:find ?value :where [_ :likes ?value]]\" + :args [#{[1 :likes \"fries\"] + [2 :likes \"candy\"] + [3 :likes \"pie\"] + [4 :likes \"pizza\"]}]}) - The arity-2 version takes :selector and :eid in arg-map."} - pull dp/pull) - -(def ^{:arglists '([db selector eids]) - :doc "Same as [[pull]], but accepts sequence of ids and returns sequence of maps. - - Usage: - - (pull-many db [:db/id :name] [1 2]) ; => [{:db/id 1, :name \"Ivan\"} - {:db/id 2, :name \"Oleg\"}]"} - pull-many dp/pull-many) - -(def ^{:arglists '([query & args] [arg-map]) - :doc "Executes a datalog query. See [docs.datomic.com/on-prem/query.html](https://docs.datomic.com/on-prem/query.html). - - Usage: - - Query as parameter with additional args: - - (q '[:find ?value - :where [_ :likes ?value]] - #{[1 :likes \"fries\"] - [2 :likes \"candy\"] - [3 :likes \"pie\"] - [4 :likes \"pizza\"]}) ; => #{[\"fries\"] [\"candy\"] [\"pie\"] [\"pizza\"]} - - Or query passed in arg-map: - - (q {:query '[:find ?value - :where [_ :likes ?value]] - :offset 2 - :limit 1 - :args [#{[1 :likes \"fries\"] - [2 :likes \"candy\"] - [3 :likes \"pie\"] - [4 :likes \"pizza\"]}]}) ; => #{[\"fries\"] [\"candy\"] [\"pie\"] [\"pizza\"]} - - Or query passed as map of vectors: - - (q '{:find [?value] :where [[_ :likes ?value]]} - #{[1 :likes \"fries\"] - [2 :likes \"candy\"] - [3 :likes \"pie\"] - [4 :likes \"pizza\"]}) ; => #{[\"fries\"] [\"candy\"] [\"pie\"] [\"pizza\"]} - - Or query passed as string: - - (q {:query \"[:find ?value :where [_ :likes ?value]]\" - :args [#{[1 :likes \"fries\"] - [2 :likes \"candy\"] - [3 :likes \"pie\"] - [4 :likes \"pizza\"]}]}) - - Query passed as map needs vectors as values. Query can not be passed as list. The 1-arity function takes a map with the arguments :query and :args and optionally the additional keys :offset and :limit."} - q dq/q) + Query passed as map needs vectors as values. Query can not be passed as list. The 1-arity function takes a map with the arguments :query and :args and optionally the additional keys :offset and :limit." + ([query & args] + (apply dq/q query args)) + ([arg-map] + (dq/q arg-map))) (defmulti datoms {:arglists '([db arg-map] [db index & components]) :doc "Index lookup. Returns a sequence of datoms (lazy iterator over actual DB index) which components @@ -387,12 +393,12 @@ ([db index & components] (type index)))) -(defmethod datoms clojure.lang.PersistentArrayMap +(defmethod datoms PersistentArrayMap [db {:keys [index components]}] {:pre [(db/db? db)]} (db/-datoms db index components)) -(defmethod datoms clojure.lang.Keyword +(defmethod datoms Keyword [db index & components] {:pre [(db/db? db) (keyword? index)]} @@ -438,12 +444,12 @@ ([db index & components] (type index)))) -(defmethod seek-datoms clojure.lang.PersistentArrayMap +(defmethod seek-datoms PersistentArrayMap [db {:keys [index components]}] {:pre [(db/db? db)]} (db/-seek-datoms db index components)) -(defmethod seek-datoms clojure.lang.Keyword +(defmethod seek-datoms Keyword [db index & components] {:pre [(db/db? db) (keyword? index)]} @@ -453,63 +459,66 @@ (def ^:private last-tempid (atom -1000000)) -(def ^{:arglists '([part] [part x]) - :doc "Allocates and returns a unique temporary id (a negative integer). Ignores `part`. Returns `x` if it is specified. +(defn tempid + "Allocates and returns a unique temporary id (a negative integer). Ignores `part`. Returns `x` if it is specified. - Exists for Datomic API compatibility. Prefer using negative integers directly if possible."} - tempid - dcore/tempid) + Exists for Datomic API compatibility. Prefer using negative integers directly if possible." + ([part] + (dcore/tempid part)) + ([part x] + (dcore/tempid part x))) -(def ^{:arglists '([db eid]) - :doc "Retrieves an entity by its id from database. Entities are lazy map-like structures to navigate Datahike database content. +(defn entity + "Retrieves an entity by its id from database. Entities are lazy map-like structures to navigate Datahike database content. - For `eid` pass entity id or lookup attr: + For `eid` pass entity id or lookup attr: - (entity db 1) - (entity db [:unique-attr :value]) + (entity db 1) + (entity db [:unique-attr :value]) - If entity does not exist, `nil` is returned: + If entity does not exist, `nil` is returned: - (entity db -1) ; => nil + (entity db -1) ; => nil - Creating an entity by id is very cheap, almost no-op, as attr access is on-demand: + Creating an entity by id is very cheap, almost no-op, as attr access is on-demand: - (entity db 1) ; => {:db/id 1} + (entity db 1) ; => {:db/id 1} - Entity attributes can be lazily accessed through key lookups: + Entity attributes can be lazily accessed through key lookups: - (:attr (entity db 1)) ; => :value - (get (entity db 1) :attr) ; => :value + (:attr (entity db 1)) ; => :value + (get (entity db 1) :attr) ; => :value - Cardinality many attributes are returned sequences: + Cardinality many attributes are returned sequences: - (:attrs (entity db 1)) ; => [:v1 :v2 :v3] + (:attrs (entity db 1)) ; => [:v1 :v2 :v3] - Reference attributes are returned as another entities: + Reference attributes are returned as another entities: - (:ref (entity db 1)) ; => {:db/id 2} - (:ns/ref (entity db 1)) ; => {:db/id 2} + (:ref (entity db 1)) ; => {:db/id 2} + (:ns/ref (entity db 1)) ; => {:db/id 2} - References can be walked backwards by prepending `_` to name part of an attribute: + References can be walked backwards by prepending `_` to name part of an attribute: - (:_ref (entity db 2)) ; => [{:db/id 1}] - (:ns/_ref (entity db 2)) ; => [{:db/id 1}] + (:_ref (entity db 2)) ; => [{:db/id 1}] + (:ns/_ref (entity db 2)) ; => [{:db/id 1}] - Reverse reference lookup returns sequence of entities unless attribute is marked as `:db/component`: + Reverse reference lookup returns sequence of entities unless attribute is marked as `:db/component`: - (:_component-ref (entity db 2)) ; => {:db/id 1} + (:_component-ref (entity db 2)) ; => {:db/id 1} - Entity gotchas: + Entity gotchas: - - Entities print as map, but are not exactly maps (they have compatible get interface though). - - Entities are effectively immutable “views” into a particular version of a database. - - Entities retain reference to the whole database. - - You can’t change database through entities, only read. - - Creating an entity by id is very cheap, almost no-op (attributes are looked up on demand). - - Comparing entities just compares their ids. Be careful when comparing entities taken from differenct dbs or from different versions of the same db. - - Accessed entity attributes are cached on entity itself (except backward references). - - When printing, only cached attributes (the ones you have accessed before) are printed. See [[touch]]."} - entity de/entity) + - Entities print as map, but are not exactly maps (they have compatible get interface though). + - Entities are effectively immutable “views” into a particular version of a database. + - Entities retain reference to the whole database. + - You can’t change database through entities, only read. + - Creating an entity by id is very cheap, almost no-op (attributes are looked up on demand). + - Comparing entities just compares their ids. Be careful when comparing entities taken from differenct dbs or from different versions of the same db. + - Accessed entity attributes are cached on entity itself (except backward references). + - When printing, only cached attributes (the ones you have accessed before) are printed. See [[touch]]." + ([db eid] + (de/entity db eid))) (defn entity-db "Returns a db that entity was created from." @@ -520,58 +529,55 @@ (defn is-filtered "Returns `true` if this database was filtered using [[filter]], `false` otherwise." [x] - (instance? FilteredDB x)) + (instance? datahike.db.FilteredDB x)) -(def ^{:arglists '([db pred]) - :doc "Returns a view over database that has same interface but only includes datoms for which the `(pred db datom)` is true. Can be applied multiple times. +(defn filter + "Returns a view over database that has same interface but only includes datoms for which the `(pred db datom)` is true. Can be applied multiple times. - Filtered DB gotchas: + Filtered DB gotchas: - - All operations on filtered database are proxied to original DB, then filter pred is applied. - - Not cached. You pay filter penalty every time. - - Supports entities, pull, queries, index access. - - Does not support hashing of DB. - - Does not support [[with]] and [[db-with]]."} - filter - dcore/filter) + - All operations on filtered database are proxied to original DB, then filter pred is applied. + - Not cached. You pay filter penalty every time. + - Supports entities, pull, queries, index access. + - Does not support hashing of DB. + - Does not support [[with]] and [[db-with]]." + [db pred] + (dcore/filter db pred)) (defn- is-temporal? [x] (or (instance? HistoricalDB x) (instance? AsOfDB x) (instance? SinceDB x))) -(def ^{:arglists '([db arg-map]) - :doc "Same as [[transact]]`, but applies to an immutable database value. Returns transaction report (see [[transact]]). - - Accepts tx-data and tx-meta as a map. - - (with @conn {:tx-data [[:db/add 1 :name \"Ivan\"]]}) ; => {:db-before #datahike/DB {:max-tx 536870912 :max-eid 0}, - ; :db-after #datahike/DB {:max-tx 536870913 :max-eid 1}, - ; :tx-data [#datahike/Datom [1 :name \"Ivan\" 536870913]], - ; :tempids #:db{:current-tx 536870913}, - ; :tx-meta nil} - - (with @conn {:tx-data [[:db/add 1 :name \"Ivan\"]] - :tx-meta {:foo :bar}}) ; => {:db-before #datahike/DB {:max-tx 536870912 :max-eid 0}, - ; :db-after #datahike/DB {:max-tx 536870913 :max-eid 1}, - ; :tx-data [#datahike/Datom [1 :name \"Ivan\" 536870913]], - ; :tempids #:db{:current-tx 536870913}, - ; :tx-meta {:foo :bar}}"} - with - (fn - ([db arg-map] - (let [tx-data (if (:tx-data arg-map) (:tx-data arg-map) arg-map) - tx-meta (if (:tx-meta arg-map) (:tx-meta arg-map) nil)] - (with db tx-data tx-meta))) - ([db tx-data tx-meta] - (dcore/with db tx-data tx-meta)))) - -(def ^{:arglists '([db tx-data]) - :doc "Applies transaction to an immutable db value, returning new immutable db value. Same as `(:db-after (with db tx-data))`."} - db-with - (fn [db tx-data] - {:pre [(db/db? db)]} - (:db-after (with db tx-data)))) +(defn with + "Same as [[transact]]`, but applies to an immutable database value. Returns transaction report (see [[transact]]). + + Accepts tx-data and tx-meta as a map. + + (with @conn {:tx-data [[:db/add 1 :name \"Ivan\"]]}) ; => {:db-before #datahike/DB {:max-tx 536870912 :max-eid 0}, + ; :db-after #datahike/DB {:max-tx 536870913 :max-eid 1}, + ; :tx-data [#datahike/Datom [1 :name \"Ivan\" 536870913]], + ; :tempids #:db{:current-tx 536870913}, + ; :tx-meta nil} + + (with @conn {:tx-data [[:db/add 1 :name \"Ivan\"]] + :tx-meta {:foo :bar}}) ; => {:db-before #datahike/DB {:max-tx 536870912 :max-eid 0}, + ; :db-after #datahike/DB {:max-tx 536870913 :max-eid 1}, + ; :tx-data [#datahike/Datom [1 :name \"Ivan\" 536870913]], + ; :tempids #:db{:current-tx 536870913}, + ; :tx-meta {:foo :bar}}" + ([db arg-map] + (let [tx-data (if (:tx-data arg-map) (:tx-data arg-map) arg-map) + tx-meta (if (:tx-meta arg-map) (:tx-meta arg-map) nil)] + (with db tx-data tx-meta))) + ([db tx-data tx-meta] + (dcore/with db tx-data tx-meta))) + +(defn db-with + "Applies transaction to an immutable db value, returning new immutable db value. Same as `(:db-after (with db tx-data))`." + [db tx-data] + {:pre [(db/db? db)]} + (:db-after (with db tx-data))) (defn db "Returns the underlying immutable database value from a connection. @@ -584,168 +590,166 @@ #?(:cljs (instance? js/Date d) :clj (instance? Date d))) -(def ^{:arglists '([db time-point]) - :doc "Returns the database state since a given point in time (you may use either java.util.Date or a transaction ID as long). - Be aware: the database contains only the datoms that were added since the date. +(defn since + "Returns the database state since a given point in time (you may use either java.util.Date or a transaction ID as long). + Be aware: the database contains only the datoms that were added since the date. - (transact conn {:tx-data [{:db/ident :name - :db/valueType :db.type/string - :db/unique :db.unique/identity - :db/index true - :db/cardinality :db.cardinality/one} - {:db/ident :age - :db/valueType :db.type/long - :db/cardinality :db.cardinality/one}]}) + (transact conn {:tx-data [{:db/ident :name + :db/valueType :db.type/string + :db/unique :db.unique/identity + :db/index true + :db/cardinality :db.cardinality/one} + {:db/ident :age + :db/valueType :db.type/long + :db/cardinality :db.cardinality/one}]}) + + (transact conn {:tx-data [{:name \"Alice\" :age 25} {:name \"Bob\" :age 30}]}) + + (def date (java.util.Date.)) + + (transact conn [{:db/id [:name \"Alice\"] :age 30}]) + + (q '[:find ?n ?a + :in $ $since + :where + [$ ?e :name ?n] + [$since ?e :age ?a]] + @conn + (since @conn date)) ; => #{[\"Alice\" 30]} + + (q {:query '[:find ?n ?a :where [?e :name ?n] [?e :age ?a]] + :args [@conn]}) ; => #{[\"Alice\" 30] [\"Bob\" 30]}" + [db time-point] + {:pre [(or (int? time-point) (date? time-point))]} + (if (db/-temporal-index? db) + (SinceDB. db time-point) + (throw (ex-info "since is only allowed on temporal indexed databases." {:config (db/-config db)})))) + +(defn as-of + "Returns the database state at given point in time (you may use either java.util.Date or transaction ID as long). + + + (transact conn {:tx-data [{:db/ident :name + :db/valueType :db.type/string + :db/unique :db.unique/identity + :db/index true + :db/cardinality :db.cardinality/one} + {:db/ident :age + :db/valueType :db.type/long + :db/cardinality :db.cardinality/one}]}) + + (transact conn {:tx-data [{:name \"Alice\" :age 25} {:name \"Bob\" :age 30}]}) + + (def date (java.util.Date.)) + + (transact conn {:tx-data [{:db/id [:name \"Alice\"] :age 35}]}) + + (q {:query '[:find ?n ?a :where [?e :name ?n] [?e :age ?a]] + :args [(as-of @conn date)]}) ; => #{[\"Alice\" 25] [\"Bob\" 30]} + + (q {:query '[:find ?n ?a :where [?e :name ?n] [?e :age ?a]] + :args [@conn]}) ; => #{[\"Alice\" 35] [\"Bob\" 30]}" + [db time-point] + {:pre [(or (int? time-point) (date? time-point))]} + (if (db/-temporal-index? db) + (AsOfDB. db time-point) + (throw (ex-info "as-of is only allowed on temporal indexed databases." {:config (db/-config db)})))) + +(defn history + "Returns the full historical state of the database you may interact with. + + + (transact conn {:tx-data [{:db/ident :name + :db/valueType :db.type/string + :db/unique :db.unique/identity + :db/index true + :db/cardinality :db.cardinality/one} + {:db/ident :age + :db/valueType :db.type/long + :db/cardinality :db.cardinality/one}]}) + + (transact conn {:tx-data [{:name \"Alice\" :age 25} {:name \"Bob\" :age 30}]}) + + (q {:query '[:find ?n ?a :where [?e :name ?n] [?e :age ?a]] + :args [(history @conn)]}) ; => #{[\"Alice\" 25] [\"Bob\" 30]} + + (transact conn {:tx-data [{:db/id [:name \"Alice\"] :age 35}]}) + + (q {:query '[:find ?n ?a :where [?e :name ?n] [?e :age ?a]] + :args [@conn]}) ; => #{[\"Alice\" 35] [\"Bob\" 30]} + + (q {:query '[:find ?n ?a :where [?e :name ?n] [?e :age ?a]] + :args [(history @conn)]}) ; => #{[\"Alice\" 25] [\"Bob\" 30]}" + [db] + (if (db/-temporal-index? db) + (HistoricalDB. db) + (throw (ex-info "history is only allowed on temporal indexed databases." {:config (db/-config db)})))) + +(defn index-range + "Returns part of `:avet` index between `[_ attr start]` and `[_ attr end]` in AVET sort order. + + Same properties as [[datoms]]. + + `attr` must be a reference, unique attribute or marked as `:db/index true`. + + Usage: + + + (transact db {:tx-data [{:db/ident :name + :db/type :db.type/string + :db/cardinality :db.cardinality/one} + {:db/ident :likes + :db/index true + :db/type :db.type/string + :db/cardinality :db.cardinality/many} + {:db/ident :age + :db/unique :db.unique/identity + :db/type :db.type/ref + :db/cardinality :db.cardinality/many}]}) + + (transact db {:tx-data [{:name \"Ivan\"} + {:age 19} + {:likes \"fries\"} + {:likes \"pizza\"} + {:likes \"candy\"} + {:likes \"pie\"} + {:likes \"pizza\"}]}) + + (index-range db {:attrid :likes + :start \"a\" + :end \"zzzzzzzzz\"}) ; => '(#datahike/Datom [2 :likes \"candy\"] + ; #datahike/Datom [1 :likes \"fries\"] + ; #datahike/Datom [2 :likes \"pie\"] + ; #datahike/Datom [1 :likes \"pizza\"] + ; #datahike/Datom [2 :likes \"pizza\"]) + + (index-range db {:attrid :likes + :start \"egg\" + :end \"pineapple\"}) ; => '(#datahike/Datom [1 :likes \"fries\"] + ; #datahike/Datom [2 :likes \"pie\"]) + + Useful patterns: + + ; find all entities with age in a specific range (inclusive) + (->> (index-range db {:attrid :age :start 18 :end 60}) (map :e))" + [db {:keys [attrid start end]}] + {:pre [(db/db? db)]} + (db/-index-range db attrid start end)) - (transact conn {:tx-data [{:name \"Alice\" :age 25} {:name \"Bob\" :age 30}]}) +(defn listen + "Listen for changes on the given connection. Whenever a transaction is applied to the database via + [[transact!]], the callback is called with the transaction report. `key` is any opaque unique value. - (def date (java.util.Date.)) + Idempotent. Calling [[listen]] with the same twice will override old callback with the new value. - (transact conn [{:db/id [:name \"Alice\"] :age 30}]) + Returns the key under which this listener is registered. See also [[unlisten]]." + ([conn callback] + (dcore/listen! conn callback)) + ([conn key callback] + (dcore/listen! conn key callback))) - (q '[:find ?n ?a - :in $ $since - :where - [$ ?e :name ?n] - [$since ?e :age ?a]] - @conn - (since @conn date)) ; => #{[\"Alice\" 30]} - - (q {:query '[:find ?n ?a :where [?e :name ?n] [?e :age ?a]] - :args [@conn]}) ; => #{[\"Alice\" 30] [\"Bob\" 30]}"} - since - (fn [db time-point] - {:pre [(or (int? time-point) (date? time-point))]} - (if (db/-temporal-index? db) - (SinceDB. db time-point) - (throw (ex-info "since is only allowed on temporal indexed databases." {:config (db/-config db)}))))) - -(def ^{:arglists '([db time-point]) - :doc "Returns the database state at given point in time (you may use either java.util.Date or transaction ID as long). - - - (transact conn {:tx-data [{:db/ident :name - :db/valueType :db.type/string - :db/unique :db.unique/identity - :db/index true - :db/cardinality :db.cardinality/one} - {:db/ident :age - :db/valueType :db.type/long - :db/cardinality :db.cardinality/one}]}) - - (transact conn {:tx-data [{:name \"Alice\" :age 25} {:name \"Bob\" :age 30}]}) - - (def date (java.util.Date.)) - - (transact conn {:tx-data [{:db/id [:name \"Alice\"] :age 35}]}) - - (q {:query '[:find ?n ?a :where [?e :name ?n] [?e :age ?a]] - :args [(as-of @conn date)]}) ; => #{[\"Alice\" 25] [\"Bob\" 30]} - - (q {:query '[:find ?n ?a :where [?e :name ?n] [?e :age ?a]] - :args [@conn]}) ; => #{[\"Alice\" 35] [\"Bob\" 30]}"} - as-of - (fn [db time-point] - {:pre [(or (int? time-point) (date? time-point))]} - (if (db/-temporal-index? db) - (AsOfDB. db time-point) - (throw (ex-info "as-of is only allowed on temporal indexed databases." {:config (db/-config db)}))))) - -(def ^{:arglists '([db]) - :doc "Returns the full historical state of the database you may interact with. - - - (transact conn {:tx-data [{:db/ident :name - :db/valueType :db.type/string - :db/unique :db.unique/identity - :db/index true - :db/cardinality :db.cardinality/one} - {:db/ident :age - :db/valueType :db.type/long - :db/cardinality :db.cardinality/one}]}) - - (transact conn {:tx-data [{:name \"Alice\" :age 25} {:name \"Bob\" :age 30}]}) - - (q {:query '[:find ?n ?a :where [?e :name ?n] [?e :age ?a]] - :args [(history @conn)]}) ; => #{[\"Alice\" 25] [\"Bob\" 30]} - - (transact conn {:tx-data [{:db/id [:name \"Alice\"] :age 35}]}) - - (q {:query '[:find ?n ?a :where [?e :name ?n] [?e :age ?a]] - :args [@conn]}) ; => #{[\"Alice\" 35] [\"Bob\" 30]} - - (q {:query '[:find ?n ?a :where [?e :name ?n] [?e :age ?a]] - :args [(history @conn)]}) ; => #{[\"Alice\" 25] [\"Bob\" 30]}"} - history - (fn [db] - (if (db/-temporal-index? db) - (HistoricalDB. db) - (throw (ex-info "history is only allowed on temporal indexed databases." {:config (db/-config db)}))))) - -(def ^{:arglists '([db arg-map]) - :doc "Returns part of `:avet` index between `[_ attr start]` and `[_ attr end]` in AVET sort order. - - Same properties as [[datoms]]. - - `attr` must be a reference, unique attribute or marked as `:db/index true`. - - Usage: - - - (transact db {:tx-data [{:db/ident :name - :db/type :db.type/string - :db/cardinality :db.cardinality/one} - {:db/ident :likes - :db/index true - :db/type :db.type/string - :db/cardinality :db.cardinality/many} - {:db/ident :age - :db/unique :db.unique/identity - :db/type :db.type/ref - :db/cardinality :db.cardinality/many}]}) - - (transact db {:tx-data [{:name \"Ivan\"} - {:age 19} - {:likes \"fries\"} - {:likes \"pizza\"} - {:likes \"candy\"} - {:likes \"pie\"} - {:likes \"pizza\"}]}) - - (index-range db {:attrid :likes - :start \"a\" - :end \"zzzzzzzzz\"}) ; => '(#datahike/Datom [2 :likes \"candy\"] - ; #datahike/Datom [1 :likes \"fries\"] - ; #datahike/Datom [2 :likes \"pie\"] - ; #datahike/Datom [1 :likes \"pizza\"] - ; #datahike/Datom [2 :likes \"pizza\"]) - - (index-range db {:attrid :likes - :start \"egg\" - :end \"pineapple\"}) ; => '(#datahike/Datom [1 :likes \"fries\"] - ; #datahike/Datom [2 :likes \"pie\"]) - - Useful patterns: - - ; find all entities with age in a specific range (inclusive) - (->> (index-range db {:attrid :age :start 18 :end 60}) (map :e))"} - index-range - (fn [db {:keys [attrid start end]}] - {:pre [(db/db? db)]} - (db/-index-range db attrid start end))) - -(def ^{:arglists '([conn callback] [conn key callback]) - :doc "Listen for changes on the given connection. Whenever a transaction is applied to the database via - [[transact!]], the callback is called with the transaction report. `key` is any opaque unique value. - - Idempotent. Calling [[listen]] with the same twice will override old callback with the new value. - - Returns the key under which this listener is registered. See also [[unlisten]]."} - listen - dcore/listen!) - -(def ^{:arglists '([conn key]) - :doc "Removes registered listener from connection. See also [[listen]]."} - unlisten - dcore/unlisten!) +(defn unlisten + "Removes registered listener from connection. See also [[listen]]." + [conn key] + (dcore/unlisten! conn key)) diff --git a/src/datahike/array.cljc b/src/datahike/array.cljc index c417a7cb5..2d16b7efd 100644 --- a/src/datahike/array.cljc +++ b/src/datahike/array.cljc @@ -1,6 +1,6 @@ (ns datahike.array (:require [hitchhiker.tree.node :as n]) - (:import [java.util Arrays])) + #?(:clj (:import [java.util Arrays]))) #?(:clj (defn java8? [] diff --git a/src/datahike/config.cljc b/src/datahike/config.cljc index 54b930830..18a2b91d9 100644 --- a/src/datahike/config.cljc +++ b/src/datahike/config.cljc @@ -2,10 +2,15 @@ (:require [clojure.edn :as edn] [clojure.spec.alpha :as s] [zufall.core :as z] - [environ.core :refer [env]] + #?(:clj [environ.core :refer [env]]) [taoensso.timbre :as log] [datahike.store :as ds]) - (:import [java.net URI])) + #?(:clj (:import [java.net URI]))) + +#?(:cljs + (do + (def Exception js/Error) + (def env {}))) (s/def ::index #{:datahike.index/hitchhiker-tree :datahike.index/persistent-set}) (s/def ::keep-history? boolean?) @@ -45,7 +50,7 @@ :path path :host host :port port - :id (str (java.util.UUID/randomUUID))} + :id (str #?(:clj (java.util.UUID/randomUUID) :cljs (random-uuid)))} :level {:path path} :file {:path path})) :index index @@ -57,13 +62,16 @@ (defn int-from-env [key default] (try - (Integer/parseInt (get env key (str default))) + (#?(:clj Integer/parseInt :cljs js/parseInt) (get env key (str default))) (catch Exception _ default))) (defn bool-from-env [key default] (try - (Boolean/parseBoolean (get env key default)) + #?(:clj + (Boolean/parseBoolean (get env key default)) + :cljs + (= "true" (get env key default))) (catch Exception _ default))) (defn map-from-env [key default] @@ -141,7 +149,7 @@ (when (and attribute-refs? (= :read schema-flexibility)) (throw (ex-info "Attribute references cannot be used with schema-flexibility ':read'." config))) (if (string? initial-tx) - (update merged-config :initial-tx (fn [path] (-> path slurp read-string))) + (update merged-config :initial-tx (fn [path] #?(:clj (-> path slurp edn/read-string)))) merged-config)))) ;; deprecation begin @@ -157,30 +165,31 @@ :opt-un [::username ::password ::path ::host ::port])) (defn uri->config [uri] - (let [base-uri (URI. uri) - _ (when-not (= (.getScheme base-uri) "datahike") - (throw (ex-info "URI scheme is not datahike conform." {:uri uri}))) - sub-uri (URI. (.getSchemeSpecificPart base-uri)) - backend (keyword (.getScheme sub-uri)) - [username password] (when-let [user-info (.getUserInfo sub-uri)] - (clojure.string/split user-info #":")) - credentials (when-not (and (nil? username) (nil? password)) - {:username username - :password password}) - port (.getPort sub-uri) - path (.getPath sub-uri) - host (.getHost sub-uri) - config (merge - {:backend backend - :uri uri} - credentials - (when host - {:host host}) - (when-not (empty? path) - {:path path}) - (when (<= 0 port) - {:port port}))] - config)) + #?(:clj + (let [base-uri (URI. uri) + _ (when-not (= (.getScheme base-uri) "datahike") + (throw (ex-info "URI scheme is not datahike conform." {:uri uri}))) + sub-uri (URI. (.getSchemeSpecificPart base-uri)) + backend (keyword (.getScheme sub-uri)) + [username password] (when-let [user-info (.getUserInfo sub-uri)] + (clojure.string/split user-info #":")) + credentials (when-not (and (nil? username) (nil? password)) + {:username username + :password password}) + port (.getPort sub-uri) + path (.getPath sub-uri) + host (.getHost sub-uri) + config (merge + {:backend backend + :uri uri} + credentials + (when host + {:host host}) + (when-not (empty? path) + {:path path}) + (when (<= 0 port) + {:port port}))] + config))) (defn validate-config-depr [config] (when-not (s/valid? :datahike/config-depr config) diff --git a/src/datahike/connector.cljc b/src/datahike/connector.cljc index 575e2d0e1..cfee60cb7 100644 --- a/src/datahike/connector.cljc +++ b/src/datahike/connector.cljc @@ -6,18 +6,22 @@ [datahike.config :as dc] [datahike.tools :as dt :refer [throwable-promise]] [datahike.index.hitchhiker-tree.upsert :as ups] - [datahike.transactor :as t] + #?(:clj [datahike.transactor :as t]) [hitchhiker.tree.bootstrap.konserve :as kons] [konserve.core :as k] [konserve.cache :as kc] - [superv.async :refer [config uri))) - (-create-database [uri & opts] - (apply -create-database (dc/uri->config uri) opts)) + (-create-database [uri opts] + (-create-database (dc/uri->config uri) opts)) (-delete-database [uri] (-delete-database (dc/uri->config uri))) @@ -111,7 +117,7 @@ (-database-exists? [uri] (-database-exists? (dc/uri->config uri))) - clojure.lang.IPersistentMap + #?(:clj clojure.lang.IPersistentMap :cljs PersistentArrayMap) (-database-exists? [config] (let [config (dc/load-config config) store-config (:store config) @@ -122,7 +128,7 @@ (kc/ensure-cache raw-store (atom (cache/lru-cache-factory {} :threshold 1000))))) - stored-db (KonserveBackend store)] - ( (assoc-in db [:schema e] (dissoc (schema v-ident) a-ident)) (update-in [:schema] #(dissoc % v-ident)) (update-in [:ident-ref-map] #(dissoc % v-ident)) @@ -1410,8 +1403,7 @@ (update-in db [:schema e] #(dissoc % a-ident v-ident))) (let [err-msg (str "Schema with entity id " e " does not exist") err-map {:error :retract/schema :entity-id e :attribute a :value e}] - (throw #?(:clj (ex-info err-msg err-map) - :cljs (error err-msg err-map)))))))) + (throw (ex-info err-msg err-map))))))) ;; In context of `with-datom` we can use faster comparators which @@ -1725,12 +1717,15 @@ (transact-tx-data report' es)))) (defn assert-preds [db [_ e _ preds]] - (reduce - (fn [coll pred] - (if ((resolve pred) db e) - coll - (conj coll pred))) - #{} preds)) + ;; Functions can only be resolved at compile time in ClojureScript + ;; https://stackoverflow.com/questions/54227193/clojure-clojurescript-argument-to-resolve-must-be-a-quoted-symbol + #?(:cljs #{} + :clj (reduce + (fn [coll pred] + (if ((resolve pred) db e) + coll + (conj coll pred))) + #{} preds))) (def builtin-fn? #{:db.fn/call diff --git a/src/datahike/impl/entity.cljc b/src/datahike/impl/entity.cljc index 82ca1d822..1891b2886 100644 --- a/src/datahike/impl/entity.cljc +++ b/src/datahike/impl/entity.cljc @@ -2,7 +2,7 @@ (:refer-clojure :exclude [keys get]) (:require [#?(:cljs cljs.core :clj clojure.core) :as c] [datahike.db :as db]) - (:import [datahike.java IEntity])) + #?(:clj (:import [datahike.java IEntity]))) (declare entity ->Entity equiv-entity lookup-entity touch) diff --git a/src/datahike/index.cljc b/src/datahike/index.cljc index 47b7d2fb9..44edde4e1 100644 --- a/src/datahike/index.cljc +++ b/src/datahike/index.cljc @@ -1,8 +1,9 @@ (ns ^:no-doc datahike.index + #?(:cljs (:refer-clojure :exclude [-seq -count -remove -flush -transient -persistent!])) (:require [datahike.index.hitchhiker-tree :as dih] - [datahike.index.persistent-set :as dip]) - #?(:clj (:import [hitchhiker.tree DataNode IndexNode] - [me.tonsky.persistent_sorted_set PersistentSortedSet]))) + [datahike.index.persistent-set :as dip] + [hitchhiker.tree] + [me.tonsky.persistent-sorted-set])) ;; TODO add doc to each function (defprotocol IIndex @@ -18,7 +19,7 @@ (-transient [index]) (-persistent! [index])) -(extend-type DataNode +(extend-type hitchhiker.tree.DataNode IIndex (-all [eavt-tree] (dih/-all eavt-tree :eavt)) @@ -43,7 +44,7 @@ (-persistent! [tree] (dih/-persistent! tree))) -(extend-type IndexNode +(extend-type hitchhiker.tree.IndexNode IIndex (-all [eavt-tree] (dih/-all eavt-tree :eavt)) @@ -68,7 +69,7 @@ (-persistent! [tree] (dih/-persistent! tree))) -(extend-type PersistentSortedSet +(extend-type #?(:clj me.tonsky.persistent_sorted_set.PersistentSortedSet :cljs me.tonsky.persistent-sorted-set.BTSet) IIndex (-all [eavt-set] (dip/-all eavt-set)) diff --git a/src/datahike/index/hitchhiker_tree.cljc b/src/datahike/index/hitchhiker_tree.cljc index 4c89bae6d..04c782418 100644 --- a/src/datahike/index/hitchhiker_tree.cljc +++ b/src/datahike/index/hitchhiker_tree.cljc @@ -1,20 +1,26 @@ (ns ^:no-doc datahike.index.hitchhiker-tree + #?(:cljs (:refer-clojure :exclude [-count -persistent! -flush -seq])) (:require [datahike.index.hitchhiker-tree.upsert :as ups] [hitchhiker.tree.utils.async :as async] [hitchhiker.tree.messaging :as hmsg] [hitchhiker.tree.key-compare :as kc] [hitchhiker.tree :as tree] [datahike.array :refer [compare-arrays]] - [datahike.datom :as dd] + [datahike.datom :as dd #?@(:cljs [:refer-macros [combine-cmp]])] [datahike.constants :refer [e0 tx0 emax txmax]] [hasch.core :as h]) - #?(:clj (:import [clojure.lang AMapEntry] + #?(:clj (:import [clojure.lang AMapEntry PersistentVector Keyword] [datahike.datom Datom]))) +#?(:cljs + (do + (def IllegalArgumentException js/Error) + (def UnsupportedOperationException js/Error))) + (extend-protocol kc/IKeyCompare - clojure.lang.PersistentVector + PersistentVector (-compare [key1 key2] - (if-not (= (class key2) clojure.lang.PersistentVector) + (if-not (= (type key2) PersistentVector) (if (nil? key2) +1 ;; Case for tuples. E.g. (compare [100 200] nil) -1) ;; HACK for nil @@ -25,10 +31,10 @@ (kc/-compare b f) (kc/-compare c g) (kc/-compare d h))))) - java.lang.String + #?(:clj String :cljs string) (-compare [key1 key2] (compare key1 key2)) - clojure.lang.Keyword + Keyword (-compare [key1 key2] (compare key1 key2)) nil @@ -36,10 +42,11 @@ (if (nil? key2) 0 -1))) -(extend-protocol kc/IKeyCompare - (Class/forName "[B") - (-compare [key1 key2] - (compare-arrays key1 key2))) +#?(:clj + (extend-protocol kc/IKeyCompare + (Class/forName "[B") + (-compare [key1 key2] + (compare-arrays key1 key2)))) (def ^:const br 300) ;; TODO name better, total node size; maybe(!) make configurable (def ^:const br-sqrt (long (Math/sqrt br))) ;; branching factor diff --git a/src/datahike/pull_api.cljc b/src/datahike/pull_api.cljc index e8f14c0b2..319d12fb1 100644 --- a/src/datahike/pull_api.cljc +++ b/src/datahike/pull_api.cljc @@ -1,7 +1,7 @@ (ns ^:no-doc datahike.pull-api (:require [datahike.db :as db] - #?@(:cljs [datalog.parser.pull :refer [PullSpec]]) + #?(:cljs [datalog.parser.pull :refer [PullSpec]]) [datalog.parser.pull :as dpp]) #?(:clj (:import diff --git a/src/datahike/query.cljc b/src/datahike/query.cljc index 3fb741c91..7beebf5dc 100644 --- a/src/datahike/query.cljc +++ b/src/datahike/query.cljc @@ -9,15 +9,15 @@ [me.tonsky.persistent-sorted-set.arrays :as da] [datahike.lru] [datahike.impl.entity :as de] - #?@(:cljs [datalog.parser.type :refer [BindColl BindIgnore BindScalar BindTuple Constant - FindColl FindRel FindScalar FindTuple PlainSymbol - RulesVar SrcVar Variable]]) + #?(:cljs [datalog.parser.type :refer [Aggregate BindColl BindIgnore BindScalar BindTuple Constant + FindColl FindRel FindScalar FindTuple PlainSymbol Pull + RulesVar SrcVar Variable]]) [datalog.parser.impl :as dpi] [datalog.parser.impl.proto :as dpip] [datahike.pull-api :as dpa] [datalog.parser :refer [parse]] [datalog.parser.pull :as dpp]) - #?(:clj (:import [clojure.lang Reflector] + #?(:clj (:import [clojure.lang Reflector PersistentVector PersistentArrayMap LazySeq] [datalog.parser.type Aggregate BindColl BindIgnore BindScalar BindTuple Constant FindColl FindRel FindScalar FindTuple PlainSymbol Pull RulesVar SrcVar Variable] @@ -209,14 +209,13 @@ (-increasing? [x more])) (extend-protocol CollectionOrder - - Number + #?(:clj Number :cljs number) (-strictly-decreasing? [x more] (apply < x more)) (-decreasing? [x more] (apply <= x more)) (-strictly-increasing? [x more] (apply > x more)) (-increasing? [x more] (apply >= x more)) - java.util.Date + #?(:clj java.util.Date :cljs js/Date) (-strictly-decreasing? [x more] #?(:clj (reduce (fn [res [d1 d2]] (if (.before ^Date d1 ^Date d2) res (reduced false))) @@ -1069,13 +1068,13 @@ (defmulti q (fn [query & args] (type query))) -(defmethod q clojure.lang.LazySeq [query & args] +(defmethod q LazySeq [query & args] (q {:query query :args args})) -(defmethod q clojure.lang.PersistentVector [query & args] +(defmethod q PersistentVector [query & args] (q {:query query :args args})) -(defmethod q clojure.lang.PersistentArrayMap [query-map & inputs] +(defmethod q PersistentArrayMap [query-map & inputs] (let [query (if (contains? query-map :query) (:query query-map) query-map) query (if (string? query) (edn/read-string query) query) args (if (contains? query-map :args) (:args query-map) inputs) diff --git a/src/datahike/schema.cljc b/src/datahike/schema.cljc index bae5567d4..7cff557d9 100644 --- a/src/datahike/schema.cljc +++ b/src/datahike/schema.cljc @@ -1,6 +1,6 @@ (ns ^:no-doc datahike.schema (:require [clojure.spec.alpha :as s]) - (:import [datahike.datom Datom])) + #?(:clj (:import [datahike.datom Datom]))) (s/def :db.type/id #(or (= (class %) java.lang.Long) string?)) diff --git a/src/datahike/store.cljc b/src/datahike/store.cljc index f03ee3b84..7a5647569 100644 --- a/src/datahike/store.cljc +++ b/src/datahike/store.cljc @@ -4,7 +4,7 @@ [konserve.filestore :as fs] [konserve.memory :as mem] [superv.async :refer [!! chan close! promise-chan put!]])) + [clojure.core.async :refer [#?(:clj >!! :cljs >!) chan close! promise-chan put!]])) (defprotocol PTransactor ; Send a transaction. Returns a channel that resolves when the transaction finalizes. @@ -15,7 +15,7 @@ PTransactor (send-transaction! [_ tx-data tx-fn] (let [p (promise-chan)] - (>!! rx-queue {:tx-data tx-data :callback p :tx-fn tx-fn}) + (#?(:clj >!! :cljs >!) rx-queue {:tx-data tx-data :callback p :tx-fn tx-fn}) p)) (shutdown [_] @@ -51,4 +51,4 @@ rx-thread (create-rx-thread connection rx-queue update-and-flush-db)] (map->LocalTransactor {:rx-queue rx-queue - :rx-thread rx-thread}))) \ No newline at end of file + :rx-thread rx-thread})))