entities
listlengths
1
44.6k
max_stars_repo_path
stringlengths
6
160
max_stars_repo_name
stringlengths
6
66
max_stars_count
int64
0
47.9k
content
stringlengths
18
1.04M
id
stringlengths
1
6
new_content
stringlengths
18
1.04M
modified
bool
1 class
references
stringlengths
32
1.52M
[ { "context": "sword-input (find-element {:xpath \"//input[@type='password']\"})]\n (input-text email-input email)\n (inp", "end": 4440, "score": 0.6730898022651672, "start": 4432, "tag": "PASSWORD", "value": "password" } ]
test/dashboard/functional/test/selenium.clj
Purple-Services/dashboard-service
3
(ns dashboard.functional.test.selenium (:require [clj-webdriver.taxi :refer :all] [common.db :as db] [dashboard.handler] [dashboard.test.db-tools :refer [setup-ebdb-test-pool! clear-test-database setup-ebdb-test-for-conn-fixture clear-and-populate-test-database clear-and-populate-test-database-fixture]] [environ.core :refer [env]] [ring.adapter.jetty :refer [run-jetty]])) ;; note: you will need a bit of elisp in order be able to use load the file ;; without having the vars named improperly ;; here is an example of what you will need below ;; ex: ~/emacs/cider/cider-config.el ;; ;; (defun dashboard-clj-reset () ;; (when (string= (buffer-name) "selenium.clj") ;; (cider-interactive-eval ;; "(dashboard.functional.test.selenium/reset-vars!)"))) ;; (add-hook 'cider-mode-hook ;; (lambda () ;; (add-hook 'cider-file-loaded-hook 'dashboard-clj-reset))) ;; for manual testing: ;; (startup-test-env!) ; make sure profiles.clj was loaded with ;; ; :base-url "http:localhost:5746/" ;; -- run tests -- ;; (reset-db!) ; note: most tests will need this run between them anyhow ;; -- run more tests ;; (stop-server server) ;; (stop-browser) ;; normally, the test server runs on port 3000. If you would like to manually ;; run tests, you can set this to (def test-port 3000) in the repl ;; just reload this file (C-c C-l in cider) when running (def test-port 5747) (def test-base-url (str "http://localhost:" test-port "/")) (def base-url test-base-url) (defn start-server [port] (let [_ (setup-ebdb-test-pool!) ;; !!!BIG WARNING: with-redefs is very tricky in terms of being ;; recognized across threads (which jetty uses!) ;; that is why alter-var-root is used here _ (alter-var-root #'common.sendgrid/send-template-email (fn [send-template-email] (fn [to subject message & {:keys [from template-id substitutions]}] (println "No reset password email was actually sent")))) server (run-jetty #'dashboard.handler/handler {:port port :join? false})] server)) (defn stop-server [server] (do (clear-test-database) ;; close out the db connection (.close (:datasource (db/conn))) (.stop server))) (defn with-server [t] (let [server (start-server test-port)] (t) (stop-server server))) (defn start-browser [] (set-driver! {:browser :chrome})) (defn stop-browser [] (quit)) (defn with-browser [t] (start-browser) (t) (stop-browser)) (defn with-redefs-fixture [t] (with-redefs [common.config/base-url test-base-url] (t))) ;; beging fns for testing at the repl (defn reset-vars! [] (def base-url (env :base-url)) ;; obviously means that :base-url will use port 5746 (def test-port 5746)) (defn set-server! [] (def server (start-server test-port)) (setup-ebdb-test-pool!)) (defn startup-test-env! [] (reset-vars!) (set-server!) (start-browser)) (defn shutdown-test-env! [] (stop-server server) (stop-browser)) (defn reset-db! [] (clear-and-populate-test-database)) ;; end fns for testing at the repl ;; common elements (def login-email-input {:xpath "//input[@type='text' and @id='email']"}) (def login-button {:xpath "//button[@id='login']"}) ;; common elements (def logout-lg-xpath {:xpath "//ul/li[contains(@class,'hidden-lg')]//a[text()='Logout']"}) (def logout-sm-xpath {:xpath "//ul[contains(@class,'hidden-xs')]/li//a[text()='Logout']"}) ;; fns for controlling the browser (defn get-table-body-cell-text "Given a table, get the text in the table body at row r and column c" [table r c] (let [table-xpath (or (:xpath table) table)] (text {:xpath (str table-xpath "/tbody/tr[position()= " r "]" "/td[position()=" c "]")}))) (defn go-to-uri "Given an uri, go to it" [uri] (to (str base-url uri))) (defn login-dashboard "Login with the client using email and password as credentials" [email password] (go-to-uri "login") (let [email-input (find-element login-email-input) password-input (find-element {:xpath "//input[@type='password']"})] (input-text email-input email) (input-text password-input password) (click (find-element login-button)))) (defn logout-dashboard "Logout, assuming the portal has already been logged into" [] (click (if (visible? (find-element logout-lg-xpath)) (find-element logout-lg-xpath) (find-element logout-sm-xpath)))) (defn get-error-alert "Wait for an error alert to appear and test that it says msg" [] (let [alert-danger {:xpath "//div[contains(@class,'alert-danger')]"}] (wait-until #(exists? alert-danger)) (text (find-element alert-danger)))) (defn get-success-alert "Wait for an error alert to appear and test that it says msg" [] (let [alert-danger {:xpath "//div[contains(@class,'alert-success')]"}] (wait-until #(exists? alert-danger)) (text (find-element alert-danger))))
48906
(ns dashboard.functional.test.selenium (:require [clj-webdriver.taxi :refer :all] [common.db :as db] [dashboard.handler] [dashboard.test.db-tools :refer [setup-ebdb-test-pool! clear-test-database setup-ebdb-test-for-conn-fixture clear-and-populate-test-database clear-and-populate-test-database-fixture]] [environ.core :refer [env]] [ring.adapter.jetty :refer [run-jetty]])) ;; note: you will need a bit of elisp in order be able to use load the file ;; without having the vars named improperly ;; here is an example of what you will need below ;; ex: ~/emacs/cider/cider-config.el ;; ;; (defun dashboard-clj-reset () ;; (when (string= (buffer-name) "selenium.clj") ;; (cider-interactive-eval ;; "(dashboard.functional.test.selenium/reset-vars!)"))) ;; (add-hook 'cider-mode-hook ;; (lambda () ;; (add-hook 'cider-file-loaded-hook 'dashboard-clj-reset))) ;; for manual testing: ;; (startup-test-env!) ; make sure profiles.clj was loaded with ;; ; :base-url "http:localhost:5746/" ;; -- run tests -- ;; (reset-db!) ; note: most tests will need this run between them anyhow ;; -- run more tests ;; (stop-server server) ;; (stop-browser) ;; normally, the test server runs on port 3000. If you would like to manually ;; run tests, you can set this to (def test-port 3000) in the repl ;; just reload this file (C-c C-l in cider) when running (def test-port 5747) (def test-base-url (str "http://localhost:" test-port "/")) (def base-url test-base-url) (defn start-server [port] (let [_ (setup-ebdb-test-pool!) ;; !!!BIG WARNING: with-redefs is very tricky in terms of being ;; recognized across threads (which jetty uses!) ;; that is why alter-var-root is used here _ (alter-var-root #'common.sendgrid/send-template-email (fn [send-template-email] (fn [to subject message & {:keys [from template-id substitutions]}] (println "No reset password email was actually sent")))) server (run-jetty #'dashboard.handler/handler {:port port :join? false})] server)) (defn stop-server [server] (do (clear-test-database) ;; close out the db connection (.close (:datasource (db/conn))) (.stop server))) (defn with-server [t] (let [server (start-server test-port)] (t) (stop-server server))) (defn start-browser [] (set-driver! {:browser :chrome})) (defn stop-browser [] (quit)) (defn with-browser [t] (start-browser) (t) (stop-browser)) (defn with-redefs-fixture [t] (with-redefs [common.config/base-url test-base-url] (t))) ;; beging fns for testing at the repl (defn reset-vars! [] (def base-url (env :base-url)) ;; obviously means that :base-url will use port 5746 (def test-port 5746)) (defn set-server! [] (def server (start-server test-port)) (setup-ebdb-test-pool!)) (defn startup-test-env! [] (reset-vars!) (set-server!) (start-browser)) (defn shutdown-test-env! [] (stop-server server) (stop-browser)) (defn reset-db! [] (clear-and-populate-test-database)) ;; end fns for testing at the repl ;; common elements (def login-email-input {:xpath "//input[@type='text' and @id='email']"}) (def login-button {:xpath "//button[@id='login']"}) ;; common elements (def logout-lg-xpath {:xpath "//ul/li[contains(@class,'hidden-lg')]//a[text()='Logout']"}) (def logout-sm-xpath {:xpath "//ul[contains(@class,'hidden-xs')]/li//a[text()='Logout']"}) ;; fns for controlling the browser (defn get-table-body-cell-text "Given a table, get the text in the table body at row r and column c" [table r c] (let [table-xpath (or (:xpath table) table)] (text {:xpath (str table-xpath "/tbody/tr[position()= " r "]" "/td[position()=" c "]")}))) (defn go-to-uri "Given an uri, go to it" [uri] (to (str base-url uri))) (defn login-dashboard "Login with the client using email and password as credentials" [email password] (go-to-uri "login") (let [email-input (find-element login-email-input) password-input (find-element {:xpath "//input[@type='<PASSWORD>']"})] (input-text email-input email) (input-text password-input password) (click (find-element login-button)))) (defn logout-dashboard "Logout, assuming the portal has already been logged into" [] (click (if (visible? (find-element logout-lg-xpath)) (find-element logout-lg-xpath) (find-element logout-sm-xpath)))) (defn get-error-alert "Wait for an error alert to appear and test that it says msg" [] (let [alert-danger {:xpath "//div[contains(@class,'alert-danger')]"}] (wait-until #(exists? alert-danger)) (text (find-element alert-danger)))) (defn get-success-alert "Wait for an error alert to appear and test that it says msg" [] (let [alert-danger {:xpath "//div[contains(@class,'alert-success')]"}] (wait-until #(exists? alert-danger)) (text (find-element alert-danger))))
true
(ns dashboard.functional.test.selenium (:require [clj-webdriver.taxi :refer :all] [common.db :as db] [dashboard.handler] [dashboard.test.db-tools :refer [setup-ebdb-test-pool! clear-test-database setup-ebdb-test-for-conn-fixture clear-and-populate-test-database clear-and-populate-test-database-fixture]] [environ.core :refer [env]] [ring.adapter.jetty :refer [run-jetty]])) ;; note: you will need a bit of elisp in order be able to use load the file ;; without having the vars named improperly ;; here is an example of what you will need below ;; ex: ~/emacs/cider/cider-config.el ;; ;; (defun dashboard-clj-reset () ;; (when (string= (buffer-name) "selenium.clj") ;; (cider-interactive-eval ;; "(dashboard.functional.test.selenium/reset-vars!)"))) ;; (add-hook 'cider-mode-hook ;; (lambda () ;; (add-hook 'cider-file-loaded-hook 'dashboard-clj-reset))) ;; for manual testing: ;; (startup-test-env!) ; make sure profiles.clj was loaded with ;; ; :base-url "http:localhost:5746/" ;; -- run tests -- ;; (reset-db!) ; note: most tests will need this run between them anyhow ;; -- run more tests ;; (stop-server server) ;; (stop-browser) ;; normally, the test server runs on port 3000. If you would like to manually ;; run tests, you can set this to (def test-port 3000) in the repl ;; just reload this file (C-c C-l in cider) when running (def test-port 5747) (def test-base-url (str "http://localhost:" test-port "/")) (def base-url test-base-url) (defn start-server [port] (let [_ (setup-ebdb-test-pool!) ;; !!!BIG WARNING: with-redefs is very tricky in terms of being ;; recognized across threads (which jetty uses!) ;; that is why alter-var-root is used here _ (alter-var-root #'common.sendgrid/send-template-email (fn [send-template-email] (fn [to subject message & {:keys [from template-id substitutions]}] (println "No reset password email was actually sent")))) server (run-jetty #'dashboard.handler/handler {:port port :join? false})] server)) (defn stop-server [server] (do (clear-test-database) ;; close out the db connection (.close (:datasource (db/conn))) (.stop server))) (defn with-server [t] (let [server (start-server test-port)] (t) (stop-server server))) (defn start-browser [] (set-driver! {:browser :chrome})) (defn stop-browser [] (quit)) (defn with-browser [t] (start-browser) (t) (stop-browser)) (defn with-redefs-fixture [t] (with-redefs [common.config/base-url test-base-url] (t))) ;; beging fns for testing at the repl (defn reset-vars! [] (def base-url (env :base-url)) ;; obviously means that :base-url will use port 5746 (def test-port 5746)) (defn set-server! [] (def server (start-server test-port)) (setup-ebdb-test-pool!)) (defn startup-test-env! [] (reset-vars!) (set-server!) (start-browser)) (defn shutdown-test-env! [] (stop-server server) (stop-browser)) (defn reset-db! [] (clear-and-populate-test-database)) ;; end fns for testing at the repl ;; common elements (def login-email-input {:xpath "//input[@type='text' and @id='email']"}) (def login-button {:xpath "//button[@id='login']"}) ;; common elements (def logout-lg-xpath {:xpath "//ul/li[contains(@class,'hidden-lg')]//a[text()='Logout']"}) (def logout-sm-xpath {:xpath "//ul[contains(@class,'hidden-xs')]/li//a[text()='Logout']"}) ;; fns for controlling the browser (defn get-table-body-cell-text "Given a table, get the text in the table body at row r and column c" [table r c] (let [table-xpath (or (:xpath table) table)] (text {:xpath (str table-xpath "/tbody/tr[position()= " r "]" "/td[position()=" c "]")}))) (defn go-to-uri "Given an uri, go to it" [uri] (to (str base-url uri))) (defn login-dashboard "Login with the client using email and password as credentials" [email password] (go-to-uri "login") (let [email-input (find-element login-email-input) password-input (find-element {:xpath "//input[@type='PI:PASSWORD:<PASSWORD>END_PI']"})] (input-text email-input email) (input-text password-input password) (click (find-element login-button)))) (defn logout-dashboard "Logout, assuming the portal has already been logged into" [] (click (if (visible? (find-element logout-lg-xpath)) (find-element logout-lg-xpath) (find-element logout-sm-xpath)))) (defn get-error-alert "Wait for an error alert to appear and test that it says msg" [] (let [alert-danger {:xpath "//div[contains(@class,'alert-danger')]"}] (wait-until #(exists? alert-danger)) (text (find-element alert-danger)))) (defn get-success-alert "Wait for an error alert to appear and test that it says msg" [] (let [alert-danger {:xpath "//div[contains(@class,'alert-success')]"}] (wait-until #(exists? alert-danger)) (text (find-element alert-danger))))
[ { "context": "(ns ^{:author \"Dan Stone <wotbrew@gmail.com>\"}\n com.wotbrew.relic\n \"Func", "end": 24, "score": 0.9998786449432373, "start": 15, "tag": "NAME", "value": "Dan Stone" }, { "context": "(ns ^{:author \"Dan Stone <wotbrew@gmail.com>\"}\n com.wotbrew.relic\n \"Functional relational p", "end": 43, "score": 0.9999270439147949, "start": 26, "tag": "EMAIL", "value": "wotbrew@gmail.com" }, { "context": "`watch` and `unwatch`.\n\n I like to alias :as rel. @wotbrew\"\n (:require [com.wotbrew.relic.impl.dataflow :as", "end": 411, "score": 0.9788396954536438, "start": 403, "tag": "USERNAME", "value": "@wotbrew" }, { "context": "like:\n\n [[:from :Customer]\n [:where [= :name \\\"alice\\\"]]]\n\n Operators quick guide:\n\n [:where & expr]", "end": 5659, "score": 0.6881454586982727, "start": 5654, "tag": "NAME", "value": "alice" } ]
src/com/wotbrew/relic.cljc
wotbrew/tarpit
0
(ns ^{:author "Dan Stone <wotbrew@gmail.com>"} com.wotbrew.relic "Functional relational programming for clojure. Quick hints: a relic database is a map. - put data in with `transact`, see also `what-if`. - get data out with `q`, see also `index`. - go faster maybe with `mat`, see also `demat` - track changes with `track-transact`, `watch` and `unwatch`. I like to alias :as rel. @wotbrew" (:require [com.wotbrew.relic.impl.dataflow :as dataflow] [com.wotbrew.relic.impl.expr :as e])) (defn transact "Return a new relic database, with the transaction applied. Will throw if any constraints are violated at the end of the transaction. Accepts transactional commands as args (tx) You modify relic databases by submitting commands to tables, e.g [:insert :Customer row1, row2]. Commands: Insert with :insert vectors [:insert table row1, row2 ..] Insert or replace (insert or on :unique conflict update by deleting colliding rows and inserting the new ones). [:insert-or-replace table row1, row2 ..] Insert or merge (insert or on :unique conflict update by merging the new row with the old). ;; merge all keys from new row [:insert-or-merge :Customer :* customer1, :customer2] ;; merge a subset [:insert-or-merge :Customer [:firstname, :lastname] customer1, customer2] Insert or update (insert of on :unique conflict update by updating colliding rows using an update fn-or-map). ;; SQL style updates {col expr} [:insert-or-update :Customer {:ts now, :updates inc} customer1, customer2] ;; like :update, you can use a function of a row as an update [:insert-or-update :Customer update-fn customer1, customer2] Insert or ignore on :unique conflict [:insert-ignore :Customer customer1, customer2 ...] Delete rows (exact match) (faster) [:delete-exact table rows] Delete by predicates with :delete vectors [:delete table expr1 expr2 ..] e.g [:delete Customer [< :age 42]] Update rows with :update vectors. [:update table fn-or-map expr1 expr2 .. ] e.g [:update Customer {:age inc} [< :age 42]] You can use a map as terser multi table insert form: {table [row1, row2 ...], ...} --- Note: As relic stores its state and dataflow graph in metadata, all modifications to the database must be made using relic transact/track-transact - all bets are off otherwise. -- See also track-transact, what-if." [db & tx] (dataflow/transact db tx)) ;; -- ;; query is based of index lookup ;; in certain cases, e.g for :hash or :btree ;; it might be useful for library users to have raw index access. (defn index "Returns the raw index storing rows for the query. ONLY RETURNS IF THE QUERY IS MATERIALIZED. Supported operators: :set will yield a set of rows. :hash will yield nested maps (path being the expressions in the hash e.g [:hash :a :b :c]) will yield an index {(a ?row) {(b ?row) {(:c ?row) #{?row}}} :btree is the same as hash but gives you a sorted map (sorted at each level) instead to enable range queries. :unique will give you an index where the keys map to exactly one row, so [:unique :a :b :c] will yield an index {(a ?row) {(b ?row) {(:c ?row) ?row}}} For all other operators, returns nil." [db query] (dataflow/index db query)) (defn mat "Causes relic to maintain the given queries incrementally as the database changes. This will make queries effectively free at the cost of decreased write performance. Additionally, useful to start maintaining constraints by using queries that throw if invariants are broken. e.g (materialize db [[:from Customer] [:unique :email]]) Constraint quick reference: [:req & cols] Required key checks [:check & check-pred] Throws if rows do not meet the predicates. Accepts predicates of the row (relic expressions / functions) or a map containing :pred and :error, both being relic expressions. e.g: [:check [< :age 32]] would require all :age values be under 32. The same predicate with a custom error: [:check {:pred [< :age 32], :error [str \"invalid age, got\" :age ]}] [:fk query|table clause opts] Foreign key, e.g ensure a row exists in the target relation given a join clause. e.g [:fk Customer {:id :id} {:cascade true}] Can use the option :cascade to specify cascading deletes, e.g {:cascade :delete}. [:unique & exprs] Unique constraint, ensures that only one row exists for some combination of relic expressions (e.g columns) e.g [:unique :id] would make sure only one row exists for a given :id value. Allows the use of :insert-or-replace in transact calls. [:constrain & constraint-operations] Lets you combine multiple constraints in one operation. e.g [[:from Customer] [:constrain [:req :id :firstname] [:unique :id]]" [db & queries] (reduce dataflow/materialize db queries)) (defn demat "Dematerializes the query, increasing write performance at the cost of reduced query performance. Can also be used to remove constraints (e.g queries that throw). Note: queries that are being watched with (watch) will continue to be materialized until (unwatch) is called." [db & queries] (reduce dataflow/dematerialize db queries)) (defn q "Queries the db, returns a seq of rows by default. Takes a RQL query vector, or a map form [1]. Queries are relational expressions, they describe some data that you might want. RQL queries are represented as vectors of operations. e.g [op1, op2, op3] Each operation is also a vector, a complete query would look like: [[:from :Customer] [:where [= :name \"alice\"]]] Operators quick guide: [:where & expr] [:extend & [col|[& col] expr]] [:expand & [col expr]] [:agg [& group-col] & [col agg-expr]] [:join query {left-col right-col, ...}] [:left-join query {left-col right-col, ...}] [:from query] [:without & col] [:select & col|[col|[& col] expr]] [:difference query] [:union query] [:intersection query] [:qualify namespace-string] [:rename {existing-col new-col, ...}] [:const collection-of-rows] --- Transducing: If you want a different collection back, you can apply a transducer to the relations rows with :xf e.g :xf (map :a) will instead of returning you a collection of rows, will return a collection of (:a row) --- Sorting: Sort with :sort / :rsort Pass either a relic expr (e.g a column keyword or function, or relic vector), or coll of expressions to sort by those expressions. e.g :sort [:a] == sort by :a :sort [:a :b] == sort by :a then :b :sort [[inc a]] == sort by (inc (:a row)) Note: indexes are not used yet for ad-hoc sorts, but you can use rel/index and :btree for that if you are brave. --- [1] map forms can be used to issue multiple queries at once, this allows relic to share indexes and intermediate structures and can be more efficient. {key query|{:q query, :rsort ...}}" ([db query-or-binds] (if (map? query-or-binds) (let [queries (keep (fn [q] (if (map? q) (:q q) q)) (vals query-or-binds)) missing (remove #(dataflow/materialized? db %) queries) db (reduce dataflow/materialize db missing)] (reduce-kv (fn [m k qr] (if (map? q) (assoc m k (q db (:q qr) qr)) (assoc m k (q db qr)))) {} query-or-binds)) (dataflow/q db query-or-binds))) ([db query opts] (let [{:keys [sort rsort xf] into-coll :into} opts sort* (or sort rsort) sort-fns (mapv e/row-fn sort*) rs (dataflow/qraw db query) sort-fn (when (seq sort-fns) (if (= 1 (count sort-fns)) (first sort-fns) (apply juxt sort-fns))) rs (cond sort (sort-by sort-fn rs) rsort (sort-by sort-fn (fn [a b] (compare b a)) rs) :else rs) rs (cond into-coll (if xf (into into-coll xf rs) (into into-coll rs)) xf (sequence xf rs) :else (seq rs))] rs))) (defn what-if "Returns the relation for query if you were to apply the transactions with transact. Because databases are immutable, it's not hard to do this anyway with q & transact. This is just sugar." [db query & tx] (q (dataflow/transact db tx) query)) ;; -- ;; change tracking api (defn watch "Establishes watches on the queries, watched queries are change tracked for subsequent transactions such that track-transact will return changes to those queries in its results. Returns a new database. See track-transact. Remove watches with unwatch." [db & queries] (reduce dataflow/watch db queries)) (defn unwatch "Removes a watched query, changes for that query will no longer be tracked. Potentially dematerializes the query if it was only materialized to maintain the watch. See track-transact." [db & queries] (reduce dataflow/unwatch db queries)) (defn track-transact "Like transact, but instead of returning you a database, returns a map of :result the result of (apply transact db tx) :changes a map of {query {:added [row1, row2 ...], :deleted [row1, row2, ..]}, ..} The :changes allow you to react to additions/removals from derived queries, and build reactive systems." [db & tx] (dataflow/track-transact db tx)) ;; -- ;; query analysis (defn dependencies "Returns the (table name) dependencies of the query, e.g what tables it could be affected by." [query] (dataflow/dependencies query)) ;; -- ;; min/max (defn max-by "A relic agg function that returns the greatest row by some function. e.g [rel/max-by :a] will return the row for which :a is biggest." [expr] (dataflow/max-by expr)) (defn min-by "A relic agg function that returns the smallest row by some function. e.g [rel/min-by :a] will return the row for which :a is smallest." [expr] (dataflow/min-by expr)) ;; -- ;; sum (def ^:private sum-add-fn #?(:clj +' :cljs +)) (defn sum "A relic agg function that returns the sum of the expressions across each row. e.g [rel/sum :a] will return the sum of (:a row) applied to each row in the aggregation." [& exprs] (case (count exprs) 0 {:combiner (constantly 0) :reducer (constantly 0)} 1 (let [expr (first exprs) f (e/row-fn expr) xf (keep f)] {:combiner sum-add-fn :reducer #(transduce xf sum-add-fn %)}) (let [fns (map e/row-fn exprs) nums (apply juxt fns) xf (comp (mapcat nums) (remove nil?))] {:combiner sum-add-fn :reducer #(transduce xf sum-add-fn %)}))) ;; -- ;; set-concat (defn set-concat [expr] (let [f (e/row-fn expr)] {:custom-node (fn [left cols [binding]] (conj left [dataflow/group cols f] [dataflow/transform-unsafe (dataflow/bind-group binding identity)]))})) ;; -- ;; count-distinct (defn count-distinct [& exprs] (let [expr (if (= 1 (count exprs)) (first exprs) (into [vector] exprs)) f (e/row-fn expr)] {:custom-node (fn [left cols [binding]] (conj left [dataflow/group cols f] [dataflow/transform-unsafe (dataflow/bind-group binding count)]))})) ;; -- ;; avg (defn avg "An aggregation that returns the statistical average, uses `/` so return type depends on operands, You might get a Ratio, like clojure." [expr] {:custom-node (fn [left cols [binding]] (conj left [:agg cols [:s [sum expr]] [:n count]] (into [:select-unsafe [binding [/ :s [:if [pos? :n] :n 1]]]] cols)))}) ;; -- ;; any, like some but this one is called any. (defn any "An aggregate function that binds true if any row has a truthy value of expr, false if not." [expr] (let [f (e/row-fn expr)] {:reducer #(some f %) :combiner #(and %1 %2) :complete boolean})) (defn not-any "An aggregate function that binds false if any row has a truthy value of expr, true if not." [expr] (let [f (e/row-fn expr)] {:reducer #(not-any? f %) :combiner #(and %1 %2) :complete boolean})) ;; -- ;; top / bottom (defn top-by "An aggregate function that binds the n rows with the highest values for expr across the group. Materialization can be slow if the n parameter is large, use for small summaries." [n expr] (assert (nat-int? n) "top requires a 0 or positive integer arg first") (let [f (e/row-fn expr)] {:custom-node (fn [left cols [binding]] (conj left [dataflow/sorted-group cols f] [dataflow/transform-unsafe (dataflow/bind-group binding #(into [] (comp (mapcat val) (take n)) (rseq %)))]))})) (defn bottom-by "An aggregate function that binds the n rows with the lowest values for expr across the group. Materialization can be slow if the n parameter is large, use for small summaries." [n expr] (assert (nat-int? n) "bottom requires a 0 or positive integer arg first") (let [f (e/row-fn expr)] {:custom-node (fn [left cols [binding]] (conj left [dataflow/sorted-group cols f] [dataflow/transform-unsafe (dataflow/bind-group binding #(into [] (comp (mapcat val) (take n)) (seq %)))]))})) (defn top "An aggregate function that binds the highest n values for the expr across the group. Materialization can be slow if the n parameter is large, use for small summaries." [n expr] (assert (nat-int? n) "top requires a 0 or positive integer arg first") (let [f (e/row-fn expr)] {:custom-node (fn [left cols [binding]] (conj left [dataflow/sorted-group cols f] [dataflow/transform-unsafe (dataflow/bind-group binding #(into [] (comp (map key) (take n)) (rseq %)))]))})) (defn bottom "An aggregate function that binds the lowest n values for the expr across the group. Materialization can be slow if the n parameter is large, use for small summaries." [n expr] (assert (nat-int? n) "bottom requires a 0 or positive integer arg first") (let [f (e/row-fn expr)] {:custom-node (fn [left cols [binding]] (conj left [dataflow/sorted-group cols f] [dataflow/transform-unsafe (dataflow/bind-group binding #(into [] (comp (map key) (take n)) (seq %)))]))})) ;; -- ;; env api (defn get-env [db] (first (q db ::e/Env))) (defn set-env-tx [env] [:replace-all ::e/Env {::e/env env}]) (defn with-env [db env] (transact db (set-env-tx env))) (defn update-env [db f & args] (with-env db (apply f (get-env db) args))) ;; -- ;; functions for going back and forth between 'normal maps' and relic (defn strip-meta "Given a relic database map, removes any relic meta data." [db] (vary-meta db (comp not-empty dissoc) ::dataflow/graph)) ;; -- ;; relic expr sentinels ;; these sentinels can be used for unsafe query expansions such as ;; sub-select and env ;; the reason these use sentinels rather than say keywords is that ;; it would be possible to exfiltrate data using untrusted edn if we allowed ;; keywords. (def sel1 "sub-select-first relic expr function. You can use this in the prefix position to do a sub select and bind the first row. e.g [rel/sel1 :OrderItem {:o/order-id :i/order-id}] See also: sel" e/sub-select-first) (def sel "sub-select relic expr function. You can use this in the prefix position to do a sub select and bind all rows as a set. e.g [rel/sel :OrderItem {:o/order-id :i/order-id}] See also: sel1" e/sub-select) (def env "env get relic expr function. You can use this in the prefix position (of relic expressions) to bind the value of an environment param. e.g [rel/env :now]" e/env)
3699
(ns ^{:author "<NAME> <<EMAIL>>"} com.wotbrew.relic "Functional relational programming for clojure. Quick hints: a relic database is a map. - put data in with `transact`, see also `what-if`. - get data out with `q`, see also `index`. - go faster maybe with `mat`, see also `demat` - track changes with `track-transact`, `watch` and `unwatch`. I like to alias :as rel. @wotbrew" (:require [com.wotbrew.relic.impl.dataflow :as dataflow] [com.wotbrew.relic.impl.expr :as e])) (defn transact "Return a new relic database, with the transaction applied. Will throw if any constraints are violated at the end of the transaction. Accepts transactional commands as args (tx) You modify relic databases by submitting commands to tables, e.g [:insert :Customer row1, row2]. Commands: Insert with :insert vectors [:insert table row1, row2 ..] Insert or replace (insert or on :unique conflict update by deleting colliding rows and inserting the new ones). [:insert-or-replace table row1, row2 ..] Insert or merge (insert or on :unique conflict update by merging the new row with the old). ;; merge all keys from new row [:insert-or-merge :Customer :* customer1, :customer2] ;; merge a subset [:insert-or-merge :Customer [:firstname, :lastname] customer1, customer2] Insert or update (insert of on :unique conflict update by updating colliding rows using an update fn-or-map). ;; SQL style updates {col expr} [:insert-or-update :Customer {:ts now, :updates inc} customer1, customer2] ;; like :update, you can use a function of a row as an update [:insert-or-update :Customer update-fn customer1, customer2] Insert or ignore on :unique conflict [:insert-ignore :Customer customer1, customer2 ...] Delete rows (exact match) (faster) [:delete-exact table rows] Delete by predicates with :delete vectors [:delete table expr1 expr2 ..] e.g [:delete Customer [< :age 42]] Update rows with :update vectors. [:update table fn-or-map expr1 expr2 .. ] e.g [:update Customer {:age inc} [< :age 42]] You can use a map as terser multi table insert form: {table [row1, row2 ...], ...} --- Note: As relic stores its state and dataflow graph in metadata, all modifications to the database must be made using relic transact/track-transact - all bets are off otherwise. -- See also track-transact, what-if." [db & tx] (dataflow/transact db tx)) ;; -- ;; query is based of index lookup ;; in certain cases, e.g for :hash or :btree ;; it might be useful for library users to have raw index access. (defn index "Returns the raw index storing rows for the query. ONLY RETURNS IF THE QUERY IS MATERIALIZED. Supported operators: :set will yield a set of rows. :hash will yield nested maps (path being the expressions in the hash e.g [:hash :a :b :c]) will yield an index {(a ?row) {(b ?row) {(:c ?row) #{?row}}} :btree is the same as hash but gives you a sorted map (sorted at each level) instead to enable range queries. :unique will give you an index where the keys map to exactly one row, so [:unique :a :b :c] will yield an index {(a ?row) {(b ?row) {(:c ?row) ?row}}} For all other operators, returns nil." [db query] (dataflow/index db query)) (defn mat "Causes relic to maintain the given queries incrementally as the database changes. This will make queries effectively free at the cost of decreased write performance. Additionally, useful to start maintaining constraints by using queries that throw if invariants are broken. e.g (materialize db [[:from Customer] [:unique :email]]) Constraint quick reference: [:req & cols] Required key checks [:check & check-pred] Throws if rows do not meet the predicates. Accepts predicates of the row (relic expressions / functions) or a map containing :pred and :error, both being relic expressions. e.g: [:check [< :age 32]] would require all :age values be under 32. The same predicate with a custom error: [:check {:pred [< :age 32], :error [str \"invalid age, got\" :age ]}] [:fk query|table clause opts] Foreign key, e.g ensure a row exists in the target relation given a join clause. e.g [:fk Customer {:id :id} {:cascade true}] Can use the option :cascade to specify cascading deletes, e.g {:cascade :delete}. [:unique & exprs] Unique constraint, ensures that only one row exists for some combination of relic expressions (e.g columns) e.g [:unique :id] would make sure only one row exists for a given :id value. Allows the use of :insert-or-replace in transact calls. [:constrain & constraint-operations] Lets you combine multiple constraints in one operation. e.g [[:from Customer] [:constrain [:req :id :firstname] [:unique :id]]" [db & queries] (reduce dataflow/materialize db queries)) (defn demat "Dematerializes the query, increasing write performance at the cost of reduced query performance. Can also be used to remove constraints (e.g queries that throw). Note: queries that are being watched with (watch) will continue to be materialized until (unwatch) is called." [db & queries] (reduce dataflow/dematerialize db queries)) (defn q "Queries the db, returns a seq of rows by default. Takes a RQL query vector, or a map form [1]. Queries are relational expressions, they describe some data that you might want. RQL queries are represented as vectors of operations. e.g [op1, op2, op3] Each operation is also a vector, a complete query would look like: [[:from :Customer] [:where [= :name \"<NAME>\"]]] Operators quick guide: [:where & expr] [:extend & [col|[& col] expr]] [:expand & [col expr]] [:agg [& group-col] & [col agg-expr]] [:join query {left-col right-col, ...}] [:left-join query {left-col right-col, ...}] [:from query] [:without & col] [:select & col|[col|[& col] expr]] [:difference query] [:union query] [:intersection query] [:qualify namespace-string] [:rename {existing-col new-col, ...}] [:const collection-of-rows] --- Transducing: If you want a different collection back, you can apply a transducer to the relations rows with :xf e.g :xf (map :a) will instead of returning you a collection of rows, will return a collection of (:a row) --- Sorting: Sort with :sort / :rsort Pass either a relic expr (e.g a column keyword or function, or relic vector), or coll of expressions to sort by those expressions. e.g :sort [:a] == sort by :a :sort [:a :b] == sort by :a then :b :sort [[inc a]] == sort by (inc (:a row)) Note: indexes are not used yet for ad-hoc sorts, but you can use rel/index and :btree for that if you are brave. --- [1] map forms can be used to issue multiple queries at once, this allows relic to share indexes and intermediate structures and can be more efficient. {key query|{:q query, :rsort ...}}" ([db query-or-binds] (if (map? query-or-binds) (let [queries (keep (fn [q] (if (map? q) (:q q) q)) (vals query-or-binds)) missing (remove #(dataflow/materialized? db %) queries) db (reduce dataflow/materialize db missing)] (reduce-kv (fn [m k qr] (if (map? q) (assoc m k (q db (:q qr) qr)) (assoc m k (q db qr)))) {} query-or-binds)) (dataflow/q db query-or-binds))) ([db query opts] (let [{:keys [sort rsort xf] into-coll :into} opts sort* (or sort rsort) sort-fns (mapv e/row-fn sort*) rs (dataflow/qraw db query) sort-fn (when (seq sort-fns) (if (= 1 (count sort-fns)) (first sort-fns) (apply juxt sort-fns))) rs (cond sort (sort-by sort-fn rs) rsort (sort-by sort-fn (fn [a b] (compare b a)) rs) :else rs) rs (cond into-coll (if xf (into into-coll xf rs) (into into-coll rs)) xf (sequence xf rs) :else (seq rs))] rs))) (defn what-if "Returns the relation for query if you were to apply the transactions with transact. Because databases are immutable, it's not hard to do this anyway with q & transact. This is just sugar." [db query & tx] (q (dataflow/transact db tx) query)) ;; -- ;; change tracking api (defn watch "Establishes watches on the queries, watched queries are change tracked for subsequent transactions such that track-transact will return changes to those queries in its results. Returns a new database. See track-transact. Remove watches with unwatch." [db & queries] (reduce dataflow/watch db queries)) (defn unwatch "Removes a watched query, changes for that query will no longer be tracked. Potentially dematerializes the query if it was only materialized to maintain the watch. See track-transact." [db & queries] (reduce dataflow/unwatch db queries)) (defn track-transact "Like transact, but instead of returning you a database, returns a map of :result the result of (apply transact db tx) :changes a map of {query {:added [row1, row2 ...], :deleted [row1, row2, ..]}, ..} The :changes allow you to react to additions/removals from derived queries, and build reactive systems." [db & tx] (dataflow/track-transact db tx)) ;; -- ;; query analysis (defn dependencies "Returns the (table name) dependencies of the query, e.g what tables it could be affected by." [query] (dataflow/dependencies query)) ;; -- ;; min/max (defn max-by "A relic agg function that returns the greatest row by some function. e.g [rel/max-by :a] will return the row for which :a is biggest." [expr] (dataflow/max-by expr)) (defn min-by "A relic agg function that returns the smallest row by some function. e.g [rel/min-by :a] will return the row for which :a is smallest." [expr] (dataflow/min-by expr)) ;; -- ;; sum (def ^:private sum-add-fn #?(:clj +' :cljs +)) (defn sum "A relic agg function that returns the sum of the expressions across each row. e.g [rel/sum :a] will return the sum of (:a row) applied to each row in the aggregation." [& exprs] (case (count exprs) 0 {:combiner (constantly 0) :reducer (constantly 0)} 1 (let [expr (first exprs) f (e/row-fn expr) xf (keep f)] {:combiner sum-add-fn :reducer #(transduce xf sum-add-fn %)}) (let [fns (map e/row-fn exprs) nums (apply juxt fns) xf (comp (mapcat nums) (remove nil?))] {:combiner sum-add-fn :reducer #(transduce xf sum-add-fn %)}))) ;; -- ;; set-concat (defn set-concat [expr] (let [f (e/row-fn expr)] {:custom-node (fn [left cols [binding]] (conj left [dataflow/group cols f] [dataflow/transform-unsafe (dataflow/bind-group binding identity)]))})) ;; -- ;; count-distinct (defn count-distinct [& exprs] (let [expr (if (= 1 (count exprs)) (first exprs) (into [vector] exprs)) f (e/row-fn expr)] {:custom-node (fn [left cols [binding]] (conj left [dataflow/group cols f] [dataflow/transform-unsafe (dataflow/bind-group binding count)]))})) ;; -- ;; avg (defn avg "An aggregation that returns the statistical average, uses `/` so return type depends on operands, You might get a Ratio, like clojure." [expr] {:custom-node (fn [left cols [binding]] (conj left [:agg cols [:s [sum expr]] [:n count]] (into [:select-unsafe [binding [/ :s [:if [pos? :n] :n 1]]]] cols)))}) ;; -- ;; any, like some but this one is called any. (defn any "An aggregate function that binds true if any row has a truthy value of expr, false if not." [expr] (let [f (e/row-fn expr)] {:reducer #(some f %) :combiner #(and %1 %2) :complete boolean})) (defn not-any "An aggregate function that binds false if any row has a truthy value of expr, true if not." [expr] (let [f (e/row-fn expr)] {:reducer #(not-any? f %) :combiner #(and %1 %2) :complete boolean})) ;; -- ;; top / bottom (defn top-by "An aggregate function that binds the n rows with the highest values for expr across the group. Materialization can be slow if the n parameter is large, use for small summaries." [n expr] (assert (nat-int? n) "top requires a 0 or positive integer arg first") (let [f (e/row-fn expr)] {:custom-node (fn [left cols [binding]] (conj left [dataflow/sorted-group cols f] [dataflow/transform-unsafe (dataflow/bind-group binding #(into [] (comp (mapcat val) (take n)) (rseq %)))]))})) (defn bottom-by "An aggregate function that binds the n rows with the lowest values for expr across the group. Materialization can be slow if the n parameter is large, use for small summaries." [n expr] (assert (nat-int? n) "bottom requires a 0 or positive integer arg first") (let [f (e/row-fn expr)] {:custom-node (fn [left cols [binding]] (conj left [dataflow/sorted-group cols f] [dataflow/transform-unsafe (dataflow/bind-group binding #(into [] (comp (mapcat val) (take n)) (seq %)))]))})) (defn top "An aggregate function that binds the highest n values for the expr across the group. Materialization can be slow if the n parameter is large, use for small summaries." [n expr] (assert (nat-int? n) "top requires a 0 or positive integer arg first") (let [f (e/row-fn expr)] {:custom-node (fn [left cols [binding]] (conj left [dataflow/sorted-group cols f] [dataflow/transform-unsafe (dataflow/bind-group binding #(into [] (comp (map key) (take n)) (rseq %)))]))})) (defn bottom "An aggregate function that binds the lowest n values for the expr across the group. Materialization can be slow if the n parameter is large, use for small summaries." [n expr] (assert (nat-int? n) "bottom requires a 0 or positive integer arg first") (let [f (e/row-fn expr)] {:custom-node (fn [left cols [binding]] (conj left [dataflow/sorted-group cols f] [dataflow/transform-unsafe (dataflow/bind-group binding #(into [] (comp (map key) (take n)) (seq %)))]))})) ;; -- ;; env api (defn get-env [db] (first (q db ::e/Env))) (defn set-env-tx [env] [:replace-all ::e/Env {::e/env env}]) (defn with-env [db env] (transact db (set-env-tx env))) (defn update-env [db f & args] (with-env db (apply f (get-env db) args))) ;; -- ;; functions for going back and forth between 'normal maps' and relic (defn strip-meta "Given a relic database map, removes any relic meta data." [db] (vary-meta db (comp not-empty dissoc) ::dataflow/graph)) ;; -- ;; relic expr sentinels ;; these sentinels can be used for unsafe query expansions such as ;; sub-select and env ;; the reason these use sentinels rather than say keywords is that ;; it would be possible to exfiltrate data using untrusted edn if we allowed ;; keywords. (def sel1 "sub-select-first relic expr function. You can use this in the prefix position to do a sub select and bind the first row. e.g [rel/sel1 :OrderItem {:o/order-id :i/order-id}] See also: sel" e/sub-select-first) (def sel "sub-select relic expr function. You can use this in the prefix position to do a sub select and bind all rows as a set. e.g [rel/sel :OrderItem {:o/order-id :i/order-id}] See also: sel1" e/sub-select) (def env "env get relic expr function. You can use this in the prefix position (of relic expressions) to bind the value of an environment param. e.g [rel/env :now]" e/env)
true
(ns ^{:author "PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>"} com.wotbrew.relic "Functional relational programming for clojure. Quick hints: a relic database is a map. - put data in with `transact`, see also `what-if`. - get data out with `q`, see also `index`. - go faster maybe with `mat`, see also `demat` - track changes with `track-transact`, `watch` and `unwatch`. I like to alias :as rel. @wotbrew" (:require [com.wotbrew.relic.impl.dataflow :as dataflow] [com.wotbrew.relic.impl.expr :as e])) (defn transact "Return a new relic database, with the transaction applied. Will throw if any constraints are violated at the end of the transaction. Accepts transactional commands as args (tx) You modify relic databases by submitting commands to tables, e.g [:insert :Customer row1, row2]. Commands: Insert with :insert vectors [:insert table row1, row2 ..] Insert or replace (insert or on :unique conflict update by deleting colliding rows and inserting the new ones). [:insert-or-replace table row1, row2 ..] Insert or merge (insert or on :unique conflict update by merging the new row with the old). ;; merge all keys from new row [:insert-or-merge :Customer :* customer1, :customer2] ;; merge a subset [:insert-or-merge :Customer [:firstname, :lastname] customer1, customer2] Insert or update (insert of on :unique conflict update by updating colliding rows using an update fn-or-map). ;; SQL style updates {col expr} [:insert-or-update :Customer {:ts now, :updates inc} customer1, customer2] ;; like :update, you can use a function of a row as an update [:insert-or-update :Customer update-fn customer1, customer2] Insert or ignore on :unique conflict [:insert-ignore :Customer customer1, customer2 ...] Delete rows (exact match) (faster) [:delete-exact table rows] Delete by predicates with :delete vectors [:delete table expr1 expr2 ..] e.g [:delete Customer [< :age 42]] Update rows with :update vectors. [:update table fn-or-map expr1 expr2 .. ] e.g [:update Customer {:age inc} [< :age 42]] You can use a map as terser multi table insert form: {table [row1, row2 ...], ...} --- Note: As relic stores its state and dataflow graph in metadata, all modifications to the database must be made using relic transact/track-transact - all bets are off otherwise. -- See also track-transact, what-if." [db & tx] (dataflow/transact db tx)) ;; -- ;; query is based of index lookup ;; in certain cases, e.g for :hash or :btree ;; it might be useful for library users to have raw index access. (defn index "Returns the raw index storing rows for the query. ONLY RETURNS IF THE QUERY IS MATERIALIZED. Supported operators: :set will yield a set of rows. :hash will yield nested maps (path being the expressions in the hash e.g [:hash :a :b :c]) will yield an index {(a ?row) {(b ?row) {(:c ?row) #{?row}}} :btree is the same as hash but gives you a sorted map (sorted at each level) instead to enable range queries. :unique will give you an index where the keys map to exactly one row, so [:unique :a :b :c] will yield an index {(a ?row) {(b ?row) {(:c ?row) ?row}}} For all other operators, returns nil." [db query] (dataflow/index db query)) (defn mat "Causes relic to maintain the given queries incrementally as the database changes. This will make queries effectively free at the cost of decreased write performance. Additionally, useful to start maintaining constraints by using queries that throw if invariants are broken. e.g (materialize db [[:from Customer] [:unique :email]]) Constraint quick reference: [:req & cols] Required key checks [:check & check-pred] Throws if rows do not meet the predicates. Accepts predicates of the row (relic expressions / functions) or a map containing :pred and :error, both being relic expressions. e.g: [:check [< :age 32]] would require all :age values be under 32. The same predicate with a custom error: [:check {:pred [< :age 32], :error [str \"invalid age, got\" :age ]}] [:fk query|table clause opts] Foreign key, e.g ensure a row exists in the target relation given a join clause. e.g [:fk Customer {:id :id} {:cascade true}] Can use the option :cascade to specify cascading deletes, e.g {:cascade :delete}. [:unique & exprs] Unique constraint, ensures that only one row exists for some combination of relic expressions (e.g columns) e.g [:unique :id] would make sure only one row exists for a given :id value. Allows the use of :insert-or-replace in transact calls. [:constrain & constraint-operations] Lets you combine multiple constraints in one operation. e.g [[:from Customer] [:constrain [:req :id :firstname] [:unique :id]]" [db & queries] (reduce dataflow/materialize db queries)) (defn demat "Dematerializes the query, increasing write performance at the cost of reduced query performance. Can also be used to remove constraints (e.g queries that throw). Note: queries that are being watched with (watch) will continue to be materialized until (unwatch) is called." [db & queries] (reduce dataflow/dematerialize db queries)) (defn q "Queries the db, returns a seq of rows by default. Takes a RQL query vector, or a map form [1]. Queries are relational expressions, they describe some data that you might want. RQL queries are represented as vectors of operations. e.g [op1, op2, op3] Each operation is also a vector, a complete query would look like: [[:from :Customer] [:where [= :name \"PI:NAME:<NAME>END_PI\"]]] Operators quick guide: [:where & expr] [:extend & [col|[& col] expr]] [:expand & [col expr]] [:agg [& group-col] & [col agg-expr]] [:join query {left-col right-col, ...}] [:left-join query {left-col right-col, ...}] [:from query] [:without & col] [:select & col|[col|[& col] expr]] [:difference query] [:union query] [:intersection query] [:qualify namespace-string] [:rename {existing-col new-col, ...}] [:const collection-of-rows] --- Transducing: If you want a different collection back, you can apply a transducer to the relations rows with :xf e.g :xf (map :a) will instead of returning you a collection of rows, will return a collection of (:a row) --- Sorting: Sort with :sort / :rsort Pass either a relic expr (e.g a column keyword or function, or relic vector), or coll of expressions to sort by those expressions. e.g :sort [:a] == sort by :a :sort [:a :b] == sort by :a then :b :sort [[inc a]] == sort by (inc (:a row)) Note: indexes are not used yet for ad-hoc sorts, but you can use rel/index and :btree for that if you are brave. --- [1] map forms can be used to issue multiple queries at once, this allows relic to share indexes and intermediate structures and can be more efficient. {key query|{:q query, :rsort ...}}" ([db query-or-binds] (if (map? query-or-binds) (let [queries (keep (fn [q] (if (map? q) (:q q) q)) (vals query-or-binds)) missing (remove #(dataflow/materialized? db %) queries) db (reduce dataflow/materialize db missing)] (reduce-kv (fn [m k qr] (if (map? q) (assoc m k (q db (:q qr) qr)) (assoc m k (q db qr)))) {} query-or-binds)) (dataflow/q db query-or-binds))) ([db query opts] (let [{:keys [sort rsort xf] into-coll :into} opts sort* (or sort rsort) sort-fns (mapv e/row-fn sort*) rs (dataflow/qraw db query) sort-fn (when (seq sort-fns) (if (= 1 (count sort-fns)) (first sort-fns) (apply juxt sort-fns))) rs (cond sort (sort-by sort-fn rs) rsort (sort-by sort-fn (fn [a b] (compare b a)) rs) :else rs) rs (cond into-coll (if xf (into into-coll xf rs) (into into-coll rs)) xf (sequence xf rs) :else (seq rs))] rs))) (defn what-if "Returns the relation for query if you were to apply the transactions with transact. Because databases are immutable, it's not hard to do this anyway with q & transact. This is just sugar." [db query & tx] (q (dataflow/transact db tx) query)) ;; -- ;; change tracking api (defn watch "Establishes watches on the queries, watched queries are change tracked for subsequent transactions such that track-transact will return changes to those queries in its results. Returns a new database. See track-transact. Remove watches with unwatch." [db & queries] (reduce dataflow/watch db queries)) (defn unwatch "Removes a watched query, changes for that query will no longer be tracked. Potentially dematerializes the query if it was only materialized to maintain the watch. See track-transact." [db & queries] (reduce dataflow/unwatch db queries)) (defn track-transact "Like transact, but instead of returning you a database, returns a map of :result the result of (apply transact db tx) :changes a map of {query {:added [row1, row2 ...], :deleted [row1, row2, ..]}, ..} The :changes allow you to react to additions/removals from derived queries, and build reactive systems." [db & tx] (dataflow/track-transact db tx)) ;; -- ;; query analysis (defn dependencies "Returns the (table name) dependencies of the query, e.g what tables it could be affected by." [query] (dataflow/dependencies query)) ;; -- ;; min/max (defn max-by "A relic agg function that returns the greatest row by some function. e.g [rel/max-by :a] will return the row for which :a is biggest." [expr] (dataflow/max-by expr)) (defn min-by "A relic agg function that returns the smallest row by some function. e.g [rel/min-by :a] will return the row for which :a is smallest." [expr] (dataflow/min-by expr)) ;; -- ;; sum (def ^:private sum-add-fn #?(:clj +' :cljs +)) (defn sum "A relic agg function that returns the sum of the expressions across each row. e.g [rel/sum :a] will return the sum of (:a row) applied to each row in the aggregation." [& exprs] (case (count exprs) 0 {:combiner (constantly 0) :reducer (constantly 0)} 1 (let [expr (first exprs) f (e/row-fn expr) xf (keep f)] {:combiner sum-add-fn :reducer #(transduce xf sum-add-fn %)}) (let [fns (map e/row-fn exprs) nums (apply juxt fns) xf (comp (mapcat nums) (remove nil?))] {:combiner sum-add-fn :reducer #(transduce xf sum-add-fn %)}))) ;; -- ;; set-concat (defn set-concat [expr] (let [f (e/row-fn expr)] {:custom-node (fn [left cols [binding]] (conj left [dataflow/group cols f] [dataflow/transform-unsafe (dataflow/bind-group binding identity)]))})) ;; -- ;; count-distinct (defn count-distinct [& exprs] (let [expr (if (= 1 (count exprs)) (first exprs) (into [vector] exprs)) f (e/row-fn expr)] {:custom-node (fn [left cols [binding]] (conj left [dataflow/group cols f] [dataflow/transform-unsafe (dataflow/bind-group binding count)]))})) ;; -- ;; avg (defn avg "An aggregation that returns the statistical average, uses `/` so return type depends on operands, You might get a Ratio, like clojure." [expr] {:custom-node (fn [left cols [binding]] (conj left [:agg cols [:s [sum expr]] [:n count]] (into [:select-unsafe [binding [/ :s [:if [pos? :n] :n 1]]]] cols)))}) ;; -- ;; any, like some but this one is called any. (defn any "An aggregate function that binds true if any row has a truthy value of expr, false if not." [expr] (let [f (e/row-fn expr)] {:reducer #(some f %) :combiner #(and %1 %2) :complete boolean})) (defn not-any "An aggregate function that binds false if any row has a truthy value of expr, true if not." [expr] (let [f (e/row-fn expr)] {:reducer #(not-any? f %) :combiner #(and %1 %2) :complete boolean})) ;; -- ;; top / bottom (defn top-by "An aggregate function that binds the n rows with the highest values for expr across the group. Materialization can be slow if the n parameter is large, use for small summaries." [n expr] (assert (nat-int? n) "top requires a 0 or positive integer arg first") (let [f (e/row-fn expr)] {:custom-node (fn [left cols [binding]] (conj left [dataflow/sorted-group cols f] [dataflow/transform-unsafe (dataflow/bind-group binding #(into [] (comp (mapcat val) (take n)) (rseq %)))]))})) (defn bottom-by "An aggregate function that binds the n rows with the lowest values for expr across the group. Materialization can be slow if the n parameter is large, use for small summaries." [n expr] (assert (nat-int? n) "bottom requires a 0 or positive integer arg first") (let [f (e/row-fn expr)] {:custom-node (fn [left cols [binding]] (conj left [dataflow/sorted-group cols f] [dataflow/transform-unsafe (dataflow/bind-group binding #(into [] (comp (mapcat val) (take n)) (seq %)))]))})) (defn top "An aggregate function that binds the highest n values for the expr across the group. Materialization can be slow if the n parameter is large, use for small summaries." [n expr] (assert (nat-int? n) "top requires a 0 or positive integer arg first") (let [f (e/row-fn expr)] {:custom-node (fn [left cols [binding]] (conj left [dataflow/sorted-group cols f] [dataflow/transform-unsafe (dataflow/bind-group binding #(into [] (comp (map key) (take n)) (rseq %)))]))})) (defn bottom "An aggregate function that binds the lowest n values for the expr across the group. Materialization can be slow if the n parameter is large, use for small summaries." [n expr] (assert (nat-int? n) "bottom requires a 0 or positive integer arg first") (let [f (e/row-fn expr)] {:custom-node (fn [left cols [binding]] (conj left [dataflow/sorted-group cols f] [dataflow/transform-unsafe (dataflow/bind-group binding #(into [] (comp (map key) (take n)) (seq %)))]))})) ;; -- ;; env api (defn get-env [db] (first (q db ::e/Env))) (defn set-env-tx [env] [:replace-all ::e/Env {::e/env env}]) (defn with-env [db env] (transact db (set-env-tx env))) (defn update-env [db f & args] (with-env db (apply f (get-env db) args))) ;; -- ;; functions for going back and forth between 'normal maps' and relic (defn strip-meta "Given a relic database map, removes any relic meta data." [db] (vary-meta db (comp not-empty dissoc) ::dataflow/graph)) ;; -- ;; relic expr sentinels ;; these sentinels can be used for unsafe query expansions such as ;; sub-select and env ;; the reason these use sentinels rather than say keywords is that ;; it would be possible to exfiltrate data using untrusted edn if we allowed ;; keywords. (def sel1 "sub-select-first relic expr function. You can use this in the prefix position to do a sub select and bind the first row. e.g [rel/sel1 :OrderItem {:o/order-id :i/order-id}] See also: sel" e/sub-select-first) (def sel "sub-select relic expr function. You can use this in the prefix position to do a sub select and bind all rows as a set. e.g [rel/sel :OrderItem {:o/order-id :i/order-id}] See also: sel1" e/sub-select) (def env "env get relic expr function. You can use this in the prefix position (of relic expressions) to bind the value of an environment param. e.g [rel/env :now]" e/env)
[ { "context": "uid\"0000fb5e-a9d0-44b6-b293-bb3c506fc0cb\", :name \"Björn\"}\n {:id #uuid\"001e7a76-9c45-49a9-865c", "end": 716, "score": 0.9995867609977722, "start": 711, "tag": "NAME", "value": "Björn" }, { "context": "uid\"001e7a76-9c45-49a9-865c-a631641544dd\", :name \"Martin\"}\n {:id #uuid\"0012d971-6b09-4be3-a5ac", "end": 795, "score": 0.9994612336158752, "start": 789, "tag": "NAME", "value": "Martin" }, { "context": "uid\"0012d971-6b09-4be3-a5ac-f02f2352a235\", :name \"Christian\"}\n {:id #uuid\"001e7a7e-3eb2-4226-b9ab", "end": 877, "score": 0.9993107914924622, "start": 868, "tag": "NAME", "value": "Christian" }, { "context": "uid\"001e7a7e-3eb2-4226-b9ab-36dddcf64106\", :name \"Markus\"}\n {:id #uuid\"0000004a-e4fd-420c-ba19", "end": 956, "score": 0.9994907379150391, "start": 950, "tag": "NAME", "value": "Markus" }, { "context": "uid\"0000004a-e4fd-420c-ba19-6de5b59c702d\", :name \"Jan\"}\n {:id #uuid\"000aa0e2-e4d6-463d-ae7c", "end": 1032, "score": 0.9998610019683838, "start": 1029, "tag": "NAME", "value": "Jan" }, { "context": "uid\"000aa0e2-e4d6-463d-ae7c-46765e13a31b\", :name \"Alexander\"}\n {:id #uuid\"00000956-b2e2-4285-ac73", "end": 1114, "score": 0.9991700649261475, "start": 1105, "tag": "NAME", "value": "Alexander" }, { "context": "uid\"00000956-b2e2-4285-ac73-1414ec692b0c\", :name \"Marc\"}]]\n (assoc (user/tx-map {::user/id id\n ", "end": 1191, "score": 0.9998067617416382, "start": 1187, "tag": "NAME", "value": "Marc" }, { "context": "sion ist private\"\n :moderators [\"Björn\"]\n :participants [\"Markus\"]\n ", "end": 1591, "score": 0.9998419284820557, "start": 1586, "tag": "NAME", "value": "Björn" }, { "context": "ators [\"Björn\"]\n :participants [\"Markus\"]\n :proposals\n ", "end": 1634, "score": 0.9998055696487427, "start": 1628, "tag": "NAME", "value": "Markus" }, { "context": "ocess/latest-id 5,\n ::process/moderators [\"Björn\"]\n #_#_::process/end-time #inst\"2021-02-01", "end": 3438, "score": 0.9998739361763, "start": 3433, "tag": "NAME", "value": "Björn" }, { "context": "/nice-id 1,\n ::proposal/original-author \"Christian\",\n ::proposal/title \"Wir sollten einen W", "end": 3828, "score": 0.9998332262039185, "start": 3819, "tag": "NAME", "value": "Christian" }, { "context": "/nice-id 2,\n ::proposal/original-author \"Martin\",\n ::proposal/parents [\"Wasserspender\"],", "end": 4267, "score": 0.9998388290405273, "start": 4261, "tag": "NAME", "value": "Martin" }, { "context": "e-id 3\n :original-author \"Jan\"}\n proposal/tx-map (assoc :db/id \"3d-dr", "end": 4799, "score": 0.9997478127479553, "start": 4796, "tag": "NAME", "value": "Jan" }, { "context": "00:00\"\n :original-author \"Marc\"}\n proposal/tx-map (assoc :db/id \"hacke", "end": 5334, "score": 0.9997799396514893, "start": 5330, "tag": "NAME", "value": "Marc" }, { "context": "pace\"]\n :original-author \"Markus\"}\n proposal/tx-map (assoc :db/id \"hacke", "end": 6010, "score": 0.9997488856315613, "start": 6004, "tag": "NAME", "value": "Markus" }, { "context": "users-who-agree]\n {\"Wasserspender\" [\"Christian\" \"Martin\" \"Alexander\" \"Jan\"]\n \"gold", "end": 6179, "score": 0.9998396635055542, "start": 6170, "tag": "NAME", "value": "Christian" }, { "context": "ree]\n {\"Wasserspender\" [\"Christian\" \"Martin\" \"Alexander\" \"Jan\"]\n \"goldener Wass", "end": 6188, "score": 0.9998180270195007, "start": 6182, "tag": "NAME", "value": "Martin" }, { "context": " {\"Wasserspender\" [\"Christian\" \"Martin\" \"Alexander\" \"Jan\"]\n \"goldener Wasserspender\" [", "end": 6200, "score": 0.9998248815536499, "start": 6191, "tag": "NAME", "value": "Alexander" }, { "context": "Wasserspender\" [\"Christian\" \"Martin\" \"Alexander\" \"Jan\"]\n \"goldener Wasserspender\" [\"Marti", "end": 6206, "score": 0.9997333884239197, "start": 6203, "tag": "NAME", "value": "Jan" }, { "context": "artin\" \"Alexander\" \"Jan\"]\n \"goldener Wasserspender\" [\"Martin\" \"Alexander\"]\n \"3d-drucke", "end": 6247, "score": 0.832119345664978, "start": 6234, "tag": "NAME", "value": "Wasserspender" }, { "context": " \"Jan\"]\n \"goldener Wasserspender\" [\"Martin\" \"Alexander\"]\n \"3d-drucker\" [\"Marti", "end": 6257, "score": 0.9998524785041809, "start": 6251, "tag": "NAME", "value": "Martin" }, { "context": " \"goldener Wasserspender\" [\"Martin\" \"Alexander\"]\n \"3d-drucker\" [\"Martin\" \"Jan\" \"Ma", "end": 6269, "score": 0.9998325705528259, "start": 6260, "tag": "NAME", "value": "Alexander" }, { "context": "artin\" \"Alexander\"]\n \"3d-drucker\" [\"Martin\" \"Jan\" \"Marc\" \"Christian\"]\n \"hacker", "end": 6308, "score": 0.9998513460159302, "start": 6302, "tag": "NAME", "value": "Martin" }, { "context": "lexander\"]\n \"3d-drucker\" [\"Martin\" \"Jan\" \"Marc\" \"Christian\"]\n \"hackerspace\"", "end": 6314, "score": 0.9998214840888977, "start": 6311, "tag": "NAME", "value": "Jan" }, { "context": "er\"]\n \"3d-drucker\" [\"Martin\" \"Jan\" \"Marc\" \"Christian\"]\n \"hackerspace\" [\"Björ", "end": 6321, "score": 0.9997823238372803, "start": 6317, "tag": "NAME", "value": "Marc" }, { "context": " \"3d-drucker\" [\"Martin\" \"Jan\" \"Marc\" \"Christian\"]\n \"hackerspace\" [\"Björn\" \"Jan\" \"Ma", "end": 6333, "score": 0.9997283220291138, "start": 6324, "tag": "NAME", "value": "Christian" }, { "context": "Marc\" \"Christian\"]\n \"hackerspace\" [\"Björn\" \"Jan\" \"Marc\" \"Christian\" \"Markus\"]\n ", "end": 6372, "score": 0.9998311400413513, "start": 6367, "tag": "NAME", "value": "Björn" }, { "context": "hristian\"]\n \"hackerspace\" [\"Björn\" \"Jan\" \"Marc\" \"Christian\" \"Markus\"]\n \"hac", "end": 6378, "score": 0.9996515512466431, "start": 6375, "tag": "NAME", "value": "Jan" }, { "context": "an\"]\n \"hackerspace\" [\"Björn\" \"Jan\" \"Marc\" \"Christian\" \"Markus\"]\n \"hackerspac", "end": 6385, "score": 0.9997227191925049, "start": 6381, "tag": "NAME", "value": "Marc" }, { "context": " \"hackerspace\" [\"Björn\" \"Jan\" \"Marc\" \"Christian\" \"Markus\"]\n \"hackerspace+3d\" [\"Björ", "end": 6397, "score": 0.9997254610061646, "start": 6388, "tag": "NAME", "value": "Christian" }, { "context": " \"hackerspace\" [\"Björn\" \"Jan\" \"Marc\" \"Christian\" \"Markus\"]\n \"hackerspace+3d\" [\"Björn\" \"Jan\" ", "end": 6406, "score": 0.9997484683990479, "start": 6400, "tag": "NAME", "value": "Markus" }, { "context": "tian\" \"Markus\"]\n \"hackerspace+3d\" [\"Björn\" \"Jan\" \"Marc\" \"Christian\" \"Markus\"]}]\n (", "end": 6448, "score": 0.9998250603675842, "start": 6443, "tag": "NAME", "value": "Björn" }, { "context": "arkus\"]\n \"hackerspace+3d\" [\"Björn\" \"Jan\" \"Marc\" \"Christian\" \"Markus\"]}]\n (apply ", "end": 6454, "score": 0.9995193481445312, "start": 6451, "tag": "NAME", "value": "Jan" }, { "context": "]\n \"hackerspace+3d\" [\"Björn\" \"Jan\" \"Marc\" \"Christian\" \"Markus\"]}]\n (apply concat\n", "end": 6461, "score": 0.9997389316558838, "start": 6457, "tag": "NAME", "value": "Marc" }, { "context": " \"hackerspace+3d\" [\"Björn\" \"Jan\" \"Marc\" \"Christian\" \"Markus\"]}]\n (apply concat\n ", "end": 6473, "score": 0.9997408390045166, "start": 6464, "tag": "NAME", "value": "Christian" }, { "context": "ackerspace+3d\" [\"Björn\" \"Jan\" \"Marc\" \"Christian\" \"Markus\"]}]\n (apply concat\n (for [use", "end": 6482, "score": 0.9997497797012329, "start": 6476, "tag": "NAME", "value": "Markus" } ]
src/main/decide/server_components/database.clj
hhucn/decide3
4
(ns decide.server-components.database (:require [clojure.string :as str] [datahike.api :as d] [decide.models.opinion :as opinion] [decide.models.process :as process] [decide.models.proposal :as proposal] [decide.models.user :as user] [decide.server-components.config :refer [config]] [com.fulcrologic.guardrails.core :refer [>defn =>]] [mount.core :refer [defstate args]] [taoensso.timbre :as log] [datahike.core :as d.core] [decide.server-components.db.migrate :as migrate] [decide.server-components.db.schema :as schema])) (def dev-db (vec (concat (for [{:keys [id name]} [{:id #uuid"0000fb5e-a9d0-44b6-b293-bb3c506fc0cb", :name "Björn"} {:id #uuid"001e7a76-9c45-49a9-865c-a631641544dd", :name "Martin"} {:id #uuid"0012d971-6b09-4be3-a5ac-f02f2352a235", :name "Christian"} {:id #uuid"001e7a7e-3eb2-4226-b9ab-36dddcf64106", :name "Markus"} {:id #uuid"0000004a-e4fd-420c-ba19-6de5b59c702d", :name "Jan"} {:id #uuid"000aa0e2-e4d6-463d-ae7c-46765e13a31b", :name "Alexander"} {:id #uuid"00000956-b2e2-4285-ac73-1414ec692b0c", :name "Marc"}]] (assoc (user/tx-map {::user/id id ::user/email name ::user/password name}) :db/id name)) [#::process{:slug "private-decision" :type ::process/type.private :title "Private decision" :description "This decision ist private" :moderators ["Björn"] :participants ["Markus"] :proposals [#::proposal{:db/id "A" :id #uuid"5fdc8014-bd58-43f6-990f-00000000000a" :title "A" :opinions (repeat 2 #::opinion{:value +1})} #::proposal{:db/id "B" :id #uuid"5fdc8014-bd58-43f6-990f-00000000000b" :title "B" :opinions (repeat 3 #::opinion{:value +1})} #::proposal{:db/id "C" :id #uuid"5fdc8014-bd58-43f6-990f-00000000000c" :title "C" :parents ["A"] :opinions (repeat 5 #::opinion{:value +1})} #::proposal{:db/id "D" :id #uuid"5fdc8014-bd58-43f6-990f-00000000000d" :title "D" :parents ["A" "B"] :opinions (repeat 3 #::opinion{:value +1})} #::proposal{:title "E" :id #uuid"5fdc8014-bd58-43f6-990f-00000000000e" :parents ["C"] :opinions (repeat 1 #::opinion{:value +1})} #::proposal{:title "F" :id #uuid"5fdc8014-bd58-43f6-990f-00000000000f" :parents ["C" "D"] :opinions (repeat 4 #::opinion{:value +1})}]} {::process/slug "test-decision" ::process/title "Meine Test-Entscheidung" ::process/description "Wir müssen irgendwas für die Uni entscheiden." ::process/latest-id 5, ::process/moderators ["Björn"] #_#_::process/end-time #inst"2021-02-01" ::process/proposals [{:db/id "Wasserspender" ::proposal/body "Wasser ist gesund für Studenten.", ::proposal/created #inst"2020-12-18T09:58:15.232-00:00", ::proposal/id #uuid"5fdc7d37-107b-4484-ab85-2911be84c39e", ::proposal/nice-id 1, ::proposal/original-author "Christian", ::proposal/title "Wir sollten einen Wasserspender aufstellen"} {:db/id "goldener Wasserspender" ::proposal/body "Wasser ist gesund für Studenten, aber wir sollten auch auf \"Qualität\" achten.", ::proposal/created #inst"2020-12-18T10:10:28.182-00:00", ::proposal/id #uuid"5fdc8014-bd58-43f6-990f-713741c81d9f", ::proposal/nice-id 2, ::proposal/original-author "Martin", ::proposal/parents ["Wasserspender"], ::proposal/title "Wir sollten einen goldenen Wasserspender aufstellen"} (-> #::proposal{:title "Ein 3-D Drucker für DIY Projekte" :body "Viele DIY Projekte lassen sich heutzutage gut mithilfe von Prototypen aus dem 3-D Drucker bewerkstelligen. Z.B. ein Gehäuse für den Raspberry PI." :created #inst"2020-12-19T10:10:28.182-00:00" :nice-id 3 :original-author "Jan"} proposal/tx-map (assoc :db/id "3d-drucker")) (-> #::proposal{:title "Man könnte einen Hackerspace im ZIM bauen." :body "Möchte man hardware-nahe Projekt entwickeln, dann benötigt man häufig einiges an Arbeitsmaterial, wie z.B. eine Lötstation oder Multimeter. Es könnte im ZIM ein Raum geschaffen werden, wo so etwas vorhanden ist." :nice-id 4 :created #inst"2020-12-19T15:10:28.182-00:00" :original-author "Marc"} proposal/tx-map (assoc :db/id "hackerspace")) (-> #::proposal{:title "Man könnte einen Hackerspace inkl. 3-D Drucker einrichten." :body "Möchte man hardware-nahe Projekt entwickeln, dann benötigt man häufig einiges an Arbeitsmaterial, wie z.B. eine Lötstation oder Multimeter. Es könnte im ZIM ein Raum geschaffen werden, wo so etwas vorhanden ist. \n\n Hier könnte auch ein 3-D Drucker aufgestellt werden. " :nice-id 5 :created #inst"2020-12-20T15:10:28.182-00:00" :parents ["3d-drucker" "hackerspace"] :original-author "Markus"} proposal/tx-map (assoc :db/id "hackerspace+3d"))]}] (apply concat (for [[proposal users-who-agree] {"Wasserspender" ["Christian" "Martin" "Alexander" "Jan"] "goldener Wasserspender" ["Martin" "Alexander"] "3d-drucker" ["Martin" "Jan" "Marc" "Christian"] "hackerspace" ["Björn" "Jan" "Marc" "Christian" "Markus"] "hackerspace+3d" ["Björn" "Jan" "Marc" "Christian" "Markus"]}] (apply concat (for [user users-who-agree] [[:db/add proposal ::proposal/opinions (str proposal "+" user)] [:db/add user ::user/opinions (str proposal "+" user)] {:db/id (str proposal "+" user) ::opinion/value 1}]))))))) (defn transact-as [conn user-or-id arg-map] [d.core/conn? any? map?] (let [user-id (if (uuid? user-or-id) user-or-id (:decide.models.user/id user-or-id))] (d/transact conn (update arg-map :tx-data conj [:db/add "datomic.tx" :tx/by [::user/id user-id]])))) (defn- empty-or-nil-field? [[_ v]] (or (nil? v) (and (string? v) (str/blank? v)))) (defn- retract-statement [eid-or-ident v] [:db/retract eid-or-ident v]) (defn retract-empty?-tx [eid-or-ident m] (->> m (filter empty-or-nil-field?) keys (mapv #(retract-statement eid-or-ident %)))) (>defn transact-schema! [conn] [d.core/conn? => map?] (d/transact conn schema/schema)) (defn ensure-database! [db-config] (when-not (d/database-exists? db-config) (log/info "Database does not exist! Creating...") (d/create-database db-config))) (defn test-database [initial-db] (d/create-database) (let [conn (d/connect)] (transact-schema! conn) (d/transact conn initial-db) conn)) (defstate conn :start (let [db-config (:db config) reset? (:db/reset? db-config)] (when reset? (log/info "Reset Database") (d/delete-database db-config)) (ensure-database! db-config) (log/info "Database exists. Connecting...") (let [conn (d/connect db-config)] (try (migrate/upsert! conn schema/schema) (migrate/migrate-data! conn schema/migrations) (when reset? (d/transact conn dev-db)) (catch Exception e (println e))) conn)) :stop (d/release conn))
115870
(ns decide.server-components.database (:require [clojure.string :as str] [datahike.api :as d] [decide.models.opinion :as opinion] [decide.models.process :as process] [decide.models.proposal :as proposal] [decide.models.user :as user] [decide.server-components.config :refer [config]] [com.fulcrologic.guardrails.core :refer [>defn =>]] [mount.core :refer [defstate args]] [taoensso.timbre :as log] [datahike.core :as d.core] [decide.server-components.db.migrate :as migrate] [decide.server-components.db.schema :as schema])) (def dev-db (vec (concat (for [{:keys [id name]} [{:id #uuid"0000fb5e-a9d0-44b6-b293-bb3c506fc0cb", :name "<NAME>"} {:id #uuid"001e7a76-9c45-49a9-865c-a631641544dd", :name "<NAME>"} {:id #uuid"0012d971-6b09-4be3-a5ac-f02f2352a235", :name "<NAME>"} {:id #uuid"001e7a7e-3eb2-4226-b9ab-36dddcf64106", :name "<NAME>"} {:id #uuid"0000004a-e4fd-420c-ba19-6de5b59c702d", :name "<NAME>"} {:id #uuid"000aa0e2-e4d6-463d-ae7c-46765e13a31b", :name "<NAME>"} {:id #uuid"00000956-b2e2-4285-ac73-1414ec692b0c", :name "<NAME>"}]] (assoc (user/tx-map {::user/id id ::user/email name ::user/password name}) :db/id name)) [#::process{:slug "private-decision" :type ::process/type.private :title "Private decision" :description "This decision ist private" :moderators ["<NAME>"] :participants ["<NAME>"] :proposals [#::proposal{:db/id "A" :id #uuid"5fdc8014-bd58-43f6-990f-00000000000a" :title "A" :opinions (repeat 2 #::opinion{:value +1})} #::proposal{:db/id "B" :id #uuid"5fdc8014-bd58-43f6-990f-00000000000b" :title "B" :opinions (repeat 3 #::opinion{:value +1})} #::proposal{:db/id "C" :id #uuid"5fdc8014-bd58-43f6-990f-00000000000c" :title "C" :parents ["A"] :opinions (repeat 5 #::opinion{:value +1})} #::proposal{:db/id "D" :id #uuid"5fdc8014-bd58-43f6-990f-00000000000d" :title "D" :parents ["A" "B"] :opinions (repeat 3 #::opinion{:value +1})} #::proposal{:title "E" :id #uuid"5fdc8014-bd58-43f6-990f-00000000000e" :parents ["C"] :opinions (repeat 1 #::opinion{:value +1})} #::proposal{:title "F" :id #uuid"5fdc8014-bd58-43f6-990f-00000000000f" :parents ["C" "D"] :opinions (repeat 4 #::opinion{:value +1})}]} {::process/slug "test-decision" ::process/title "Meine Test-Entscheidung" ::process/description "Wir müssen irgendwas für die Uni entscheiden." ::process/latest-id 5, ::process/moderators ["<NAME>"] #_#_::process/end-time #inst"2021-02-01" ::process/proposals [{:db/id "Wasserspender" ::proposal/body "Wasser ist gesund für Studenten.", ::proposal/created #inst"2020-12-18T09:58:15.232-00:00", ::proposal/id #uuid"5fdc7d37-107b-4484-ab85-2911be84c39e", ::proposal/nice-id 1, ::proposal/original-author "<NAME>", ::proposal/title "Wir sollten einen Wasserspender aufstellen"} {:db/id "goldener Wasserspender" ::proposal/body "Wasser ist gesund für Studenten, aber wir sollten auch auf \"Qualität\" achten.", ::proposal/created #inst"2020-12-18T10:10:28.182-00:00", ::proposal/id #uuid"5fdc8014-bd58-43f6-990f-713741c81d9f", ::proposal/nice-id 2, ::proposal/original-author "<NAME>", ::proposal/parents ["Wasserspender"], ::proposal/title "Wir sollten einen goldenen Wasserspender aufstellen"} (-> #::proposal{:title "Ein 3-D Drucker für DIY Projekte" :body "Viele DIY Projekte lassen sich heutzutage gut mithilfe von Prototypen aus dem 3-D Drucker bewerkstelligen. Z.B. ein Gehäuse für den Raspberry PI." :created #inst"2020-12-19T10:10:28.182-00:00" :nice-id 3 :original-author "<NAME>"} proposal/tx-map (assoc :db/id "3d-drucker")) (-> #::proposal{:title "Man könnte einen Hackerspace im ZIM bauen." :body "Möchte man hardware-nahe Projekt entwickeln, dann benötigt man häufig einiges an Arbeitsmaterial, wie z.B. eine Lötstation oder Multimeter. Es könnte im ZIM ein Raum geschaffen werden, wo so etwas vorhanden ist." :nice-id 4 :created #inst"2020-12-19T15:10:28.182-00:00" :original-author "<NAME>"} proposal/tx-map (assoc :db/id "hackerspace")) (-> #::proposal{:title "Man könnte einen Hackerspace inkl. 3-D Drucker einrichten." :body "Möchte man hardware-nahe Projekt entwickeln, dann benötigt man häufig einiges an Arbeitsmaterial, wie z.B. eine Lötstation oder Multimeter. Es könnte im ZIM ein Raum geschaffen werden, wo so etwas vorhanden ist. \n\n Hier könnte auch ein 3-D Drucker aufgestellt werden. " :nice-id 5 :created #inst"2020-12-20T15:10:28.182-00:00" :parents ["3d-drucker" "hackerspace"] :original-author "<NAME>"} proposal/tx-map (assoc :db/id "hackerspace+3d"))]}] (apply concat (for [[proposal users-who-agree] {"Wasserspender" ["<NAME>" "<NAME>" "<NAME>" "<NAME>"] "goldener <NAME>" ["<NAME>" "<NAME>"] "3d-drucker" ["<NAME>" "<NAME>" "<NAME>" "<NAME>"] "hackerspace" ["<NAME>" "<NAME>" "<NAME>" "<NAME>" "<NAME>"] "hackerspace+3d" ["<NAME>" "<NAME>" "<NAME>" "<NAME>" "<NAME>"]}] (apply concat (for [user users-who-agree] [[:db/add proposal ::proposal/opinions (str proposal "+" user)] [:db/add user ::user/opinions (str proposal "+" user)] {:db/id (str proposal "+" user) ::opinion/value 1}]))))))) (defn transact-as [conn user-or-id arg-map] [d.core/conn? any? map?] (let [user-id (if (uuid? user-or-id) user-or-id (:decide.models.user/id user-or-id))] (d/transact conn (update arg-map :tx-data conj [:db/add "datomic.tx" :tx/by [::user/id user-id]])))) (defn- empty-or-nil-field? [[_ v]] (or (nil? v) (and (string? v) (str/blank? v)))) (defn- retract-statement [eid-or-ident v] [:db/retract eid-or-ident v]) (defn retract-empty?-tx [eid-or-ident m] (->> m (filter empty-or-nil-field?) keys (mapv #(retract-statement eid-or-ident %)))) (>defn transact-schema! [conn] [d.core/conn? => map?] (d/transact conn schema/schema)) (defn ensure-database! [db-config] (when-not (d/database-exists? db-config) (log/info "Database does not exist! Creating...") (d/create-database db-config))) (defn test-database [initial-db] (d/create-database) (let [conn (d/connect)] (transact-schema! conn) (d/transact conn initial-db) conn)) (defstate conn :start (let [db-config (:db config) reset? (:db/reset? db-config)] (when reset? (log/info "Reset Database") (d/delete-database db-config)) (ensure-database! db-config) (log/info "Database exists. Connecting...") (let [conn (d/connect db-config)] (try (migrate/upsert! conn schema/schema) (migrate/migrate-data! conn schema/migrations) (when reset? (d/transact conn dev-db)) (catch Exception e (println e))) conn)) :stop (d/release conn))
true
(ns decide.server-components.database (:require [clojure.string :as str] [datahike.api :as d] [decide.models.opinion :as opinion] [decide.models.process :as process] [decide.models.proposal :as proposal] [decide.models.user :as user] [decide.server-components.config :refer [config]] [com.fulcrologic.guardrails.core :refer [>defn =>]] [mount.core :refer [defstate args]] [taoensso.timbre :as log] [datahike.core :as d.core] [decide.server-components.db.migrate :as migrate] [decide.server-components.db.schema :as schema])) (def dev-db (vec (concat (for [{:keys [id name]} [{:id #uuid"0000fb5e-a9d0-44b6-b293-bb3c506fc0cb", :name "PI:NAME:<NAME>END_PI"} {:id #uuid"001e7a76-9c45-49a9-865c-a631641544dd", :name "PI:NAME:<NAME>END_PI"} {:id #uuid"0012d971-6b09-4be3-a5ac-f02f2352a235", :name "PI:NAME:<NAME>END_PI"} {:id #uuid"001e7a7e-3eb2-4226-b9ab-36dddcf64106", :name "PI:NAME:<NAME>END_PI"} {:id #uuid"0000004a-e4fd-420c-ba19-6de5b59c702d", :name "PI:NAME:<NAME>END_PI"} {:id #uuid"000aa0e2-e4d6-463d-ae7c-46765e13a31b", :name "PI:NAME:<NAME>END_PI"} {:id #uuid"00000956-b2e2-4285-ac73-1414ec692b0c", :name "PI:NAME:<NAME>END_PI"}]] (assoc (user/tx-map {::user/id id ::user/email name ::user/password name}) :db/id name)) [#::process{:slug "private-decision" :type ::process/type.private :title "Private decision" :description "This decision ist private" :moderators ["PI:NAME:<NAME>END_PI"] :participants ["PI:NAME:<NAME>END_PI"] :proposals [#::proposal{:db/id "A" :id #uuid"5fdc8014-bd58-43f6-990f-00000000000a" :title "A" :opinions (repeat 2 #::opinion{:value +1})} #::proposal{:db/id "B" :id #uuid"5fdc8014-bd58-43f6-990f-00000000000b" :title "B" :opinions (repeat 3 #::opinion{:value +1})} #::proposal{:db/id "C" :id #uuid"5fdc8014-bd58-43f6-990f-00000000000c" :title "C" :parents ["A"] :opinions (repeat 5 #::opinion{:value +1})} #::proposal{:db/id "D" :id #uuid"5fdc8014-bd58-43f6-990f-00000000000d" :title "D" :parents ["A" "B"] :opinions (repeat 3 #::opinion{:value +1})} #::proposal{:title "E" :id #uuid"5fdc8014-bd58-43f6-990f-00000000000e" :parents ["C"] :opinions (repeat 1 #::opinion{:value +1})} #::proposal{:title "F" :id #uuid"5fdc8014-bd58-43f6-990f-00000000000f" :parents ["C" "D"] :opinions (repeat 4 #::opinion{:value +1})}]} {::process/slug "test-decision" ::process/title "Meine Test-Entscheidung" ::process/description "Wir müssen irgendwas für die Uni entscheiden." ::process/latest-id 5, ::process/moderators ["PI:NAME:<NAME>END_PI"] #_#_::process/end-time #inst"2021-02-01" ::process/proposals [{:db/id "Wasserspender" ::proposal/body "Wasser ist gesund für Studenten.", ::proposal/created #inst"2020-12-18T09:58:15.232-00:00", ::proposal/id #uuid"5fdc7d37-107b-4484-ab85-2911be84c39e", ::proposal/nice-id 1, ::proposal/original-author "PI:NAME:<NAME>END_PI", ::proposal/title "Wir sollten einen Wasserspender aufstellen"} {:db/id "goldener Wasserspender" ::proposal/body "Wasser ist gesund für Studenten, aber wir sollten auch auf \"Qualität\" achten.", ::proposal/created #inst"2020-12-18T10:10:28.182-00:00", ::proposal/id #uuid"5fdc8014-bd58-43f6-990f-713741c81d9f", ::proposal/nice-id 2, ::proposal/original-author "PI:NAME:<NAME>END_PI", ::proposal/parents ["Wasserspender"], ::proposal/title "Wir sollten einen goldenen Wasserspender aufstellen"} (-> #::proposal{:title "Ein 3-D Drucker für DIY Projekte" :body "Viele DIY Projekte lassen sich heutzutage gut mithilfe von Prototypen aus dem 3-D Drucker bewerkstelligen. Z.B. ein Gehäuse für den Raspberry PI." :created #inst"2020-12-19T10:10:28.182-00:00" :nice-id 3 :original-author "PI:NAME:<NAME>END_PI"} proposal/tx-map (assoc :db/id "3d-drucker")) (-> #::proposal{:title "Man könnte einen Hackerspace im ZIM bauen." :body "Möchte man hardware-nahe Projekt entwickeln, dann benötigt man häufig einiges an Arbeitsmaterial, wie z.B. eine Lötstation oder Multimeter. Es könnte im ZIM ein Raum geschaffen werden, wo so etwas vorhanden ist." :nice-id 4 :created #inst"2020-12-19T15:10:28.182-00:00" :original-author "PI:NAME:<NAME>END_PI"} proposal/tx-map (assoc :db/id "hackerspace")) (-> #::proposal{:title "Man könnte einen Hackerspace inkl. 3-D Drucker einrichten." :body "Möchte man hardware-nahe Projekt entwickeln, dann benötigt man häufig einiges an Arbeitsmaterial, wie z.B. eine Lötstation oder Multimeter. Es könnte im ZIM ein Raum geschaffen werden, wo so etwas vorhanden ist. \n\n Hier könnte auch ein 3-D Drucker aufgestellt werden. " :nice-id 5 :created #inst"2020-12-20T15:10:28.182-00:00" :parents ["3d-drucker" "hackerspace"] :original-author "PI:NAME:<NAME>END_PI"} proposal/tx-map (assoc :db/id "hackerspace+3d"))]}] (apply concat (for [[proposal users-who-agree] {"Wasserspender" ["PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI"] "goldener PI:NAME:<NAME>END_PI" ["PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI"] "3d-drucker" ["PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI"] "hackerspace" ["PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI"] "hackerspace+3d" ["PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI"]}] (apply concat (for [user users-who-agree] [[:db/add proposal ::proposal/opinions (str proposal "+" user)] [:db/add user ::user/opinions (str proposal "+" user)] {:db/id (str proposal "+" user) ::opinion/value 1}]))))))) (defn transact-as [conn user-or-id arg-map] [d.core/conn? any? map?] (let [user-id (if (uuid? user-or-id) user-or-id (:decide.models.user/id user-or-id))] (d/transact conn (update arg-map :tx-data conj [:db/add "datomic.tx" :tx/by [::user/id user-id]])))) (defn- empty-or-nil-field? [[_ v]] (or (nil? v) (and (string? v) (str/blank? v)))) (defn- retract-statement [eid-or-ident v] [:db/retract eid-or-ident v]) (defn retract-empty?-tx [eid-or-ident m] (->> m (filter empty-or-nil-field?) keys (mapv #(retract-statement eid-or-ident %)))) (>defn transact-schema! [conn] [d.core/conn? => map?] (d/transact conn schema/schema)) (defn ensure-database! [db-config] (when-not (d/database-exists? db-config) (log/info "Database does not exist! Creating...") (d/create-database db-config))) (defn test-database [initial-db] (d/create-database) (let [conn (d/connect)] (transact-schema! conn) (d/transact conn initial-db) conn)) (defstate conn :start (let [db-config (:db config) reset? (:db/reset? db-config)] (when reset? (log/info "Reset Database") (d/delete-database db-config)) (ensure-database! db-config) (log/info "Database exists. Connecting...") (let [conn (d/connect db-config)] (try (migrate/upsert! conn schema/schema) (migrate/migrate-data! conn schema/migrations) (when reset? (d/transact conn dev-db)) (catch Exception e (println e))) conn)) :stop (d/release conn))
[ { "context": ";; Copyright (c) 2021 Thomas J. Otterson\n;; \n;; This software is released under the MIT Li", "end": 40, "score": 0.9996538758277893, "start": 22, "tag": "NAME", "value": "Thomas J. Otterson" } ]
src/barandis/euler/p12.clj
Barandis/euler-clojure
0
;; Copyright (c) 2021 Thomas J. Otterson ;; ;; This software is released under the MIT License. ;; https://opensource.org/licenses/MIT ;; Solves Project Euler problem 12: ;; ;; The sequence of triangle numbers is generated by adding the natural numbers. ;; So the 7th triangle number would be 1 + 2 + 3 + 4 + 5 + 6 + 7 = 28. The first ;; ten terms would be: ;; ;; 1, 3, 6, 10, 15, 21, 28, 36, 45, 55, ... ;; ;; Let us list the factors of the first seven triangle numbers: ;; ;; 1: 1 ;; 3: 1,3 ;; 6: 1,2,3,6 ;; 10: 1,2,5,10 ;; 15: 1,3,5,15 ;; 21: 1,3,7,21 ;; 28: 1,2,4,7,14,28 ;; ;; We can see that 28 is the first triangle number to have over five divisors. ;; ;; What is the value of the first triangle number to have over five hundred ;; divisors? ;; The solution is straightforward. I am not one to miss an opportunity to ;; create an infinite sequence, so an infinite sequence of triangle numbers is ;; created. These are each factorized until the first one is found whose number ;; of factors exceeds the target. ;; ;; This solution can be run using `clojure -X:p12`. It will default to the 500 ;; target described in the problem. To run with another target, use `clojure ;; -X:p12 :factors 5` or similar. (ns barandis.euler.p12) (def ^:private triangles "Infinite, lazy sequence of triangle numbers." (map #(/ (* % (inc %)) 2) (iterate inc 1))) (defn- factorize "Returns a sorted set of all of the numbers that divide evenly into `n`." [n] (into (sorted-set) (->> (range 1 (inc (Math/sqrt n))) (filter #(zero? (rem n %))) (mapcat #(vector % (/ n %)))))) (defn- first-triangle "Calculates the first triangle number that has at least `n` factors." [n] (first (filter #(<= n (count (factorize %))) triangles))) (defn solve "Displays the first triangle number that has at least (:factors data) factors. This number defaults to 500, which makes the displayed value the solution to Project Euler problem 12." ([] (solve {})) ([data] (-> (get data :factors 500) first-triangle println time)))
45900
;; Copyright (c) 2021 <NAME> ;; ;; This software is released under the MIT License. ;; https://opensource.org/licenses/MIT ;; Solves Project Euler problem 12: ;; ;; The sequence of triangle numbers is generated by adding the natural numbers. ;; So the 7th triangle number would be 1 + 2 + 3 + 4 + 5 + 6 + 7 = 28. The first ;; ten terms would be: ;; ;; 1, 3, 6, 10, 15, 21, 28, 36, 45, 55, ... ;; ;; Let us list the factors of the first seven triangle numbers: ;; ;; 1: 1 ;; 3: 1,3 ;; 6: 1,2,3,6 ;; 10: 1,2,5,10 ;; 15: 1,3,5,15 ;; 21: 1,3,7,21 ;; 28: 1,2,4,7,14,28 ;; ;; We can see that 28 is the first triangle number to have over five divisors. ;; ;; What is the value of the first triangle number to have over five hundred ;; divisors? ;; The solution is straightforward. I am not one to miss an opportunity to ;; create an infinite sequence, so an infinite sequence of triangle numbers is ;; created. These are each factorized until the first one is found whose number ;; of factors exceeds the target. ;; ;; This solution can be run using `clojure -X:p12`. It will default to the 500 ;; target described in the problem. To run with another target, use `clojure ;; -X:p12 :factors 5` or similar. (ns barandis.euler.p12) (def ^:private triangles "Infinite, lazy sequence of triangle numbers." (map #(/ (* % (inc %)) 2) (iterate inc 1))) (defn- factorize "Returns a sorted set of all of the numbers that divide evenly into `n`." [n] (into (sorted-set) (->> (range 1 (inc (Math/sqrt n))) (filter #(zero? (rem n %))) (mapcat #(vector % (/ n %)))))) (defn- first-triangle "Calculates the first triangle number that has at least `n` factors." [n] (first (filter #(<= n (count (factorize %))) triangles))) (defn solve "Displays the first triangle number that has at least (:factors data) factors. This number defaults to 500, which makes the displayed value the solution to Project Euler problem 12." ([] (solve {})) ([data] (-> (get data :factors 500) first-triangle println time)))
true
;; Copyright (c) 2021 PI:NAME:<NAME>END_PI ;; ;; This software is released under the MIT License. ;; https://opensource.org/licenses/MIT ;; Solves Project Euler problem 12: ;; ;; The sequence of triangle numbers is generated by adding the natural numbers. ;; So the 7th triangle number would be 1 + 2 + 3 + 4 + 5 + 6 + 7 = 28. The first ;; ten terms would be: ;; ;; 1, 3, 6, 10, 15, 21, 28, 36, 45, 55, ... ;; ;; Let us list the factors of the first seven triangle numbers: ;; ;; 1: 1 ;; 3: 1,3 ;; 6: 1,2,3,6 ;; 10: 1,2,5,10 ;; 15: 1,3,5,15 ;; 21: 1,3,7,21 ;; 28: 1,2,4,7,14,28 ;; ;; We can see that 28 is the first triangle number to have over five divisors. ;; ;; What is the value of the first triangle number to have over five hundred ;; divisors? ;; The solution is straightforward. I am not one to miss an opportunity to ;; create an infinite sequence, so an infinite sequence of triangle numbers is ;; created. These are each factorized until the first one is found whose number ;; of factors exceeds the target. ;; ;; This solution can be run using `clojure -X:p12`. It will default to the 500 ;; target described in the problem. To run with another target, use `clojure ;; -X:p12 :factors 5` or similar. (ns barandis.euler.p12) (def ^:private triangles "Infinite, lazy sequence of triangle numbers." (map #(/ (* % (inc %)) 2) (iterate inc 1))) (defn- factorize "Returns a sorted set of all of the numbers that divide evenly into `n`." [n] (into (sorted-set) (->> (range 1 (inc (Math/sqrt n))) (filter #(zero? (rem n %))) (mapcat #(vector % (/ n %)))))) (defn- first-triangle "Calculates the first triangle number that has at least `n` factors." [n] (first (filter #(<= n (count (factorize %))) triangles))) (defn solve "Displays the first triangle number that has at least (:factors data) factors. This number defaults to 500, which makes the displayed value the solution to Project Euler problem 12." ([] (solve {})) ([data] (-> (get data :factors 500) first-triangle println time)))
[ { "context": "a kakskümmend üks\"\n (number 31256721)\n\n \"4.\"\n \"neljas\"\n (ordinal 4)\n)\n\n", "end": 1037, "score": 0.9942981004714966, "start": 1031, "tag": "NAME", "value": "neljas" } ]
resources/languages/et/corpus/numbers.clj
irvingflores/duckling
922
( ; Context map {} "0" "null" (number 0) "1" "üks" (number 1) "33" "kolmkümmend kolm" ; "kolmkend kolm" (number 33) "14" "neliteist" (number 14) "16" "kuusteist" (number 16) "17" "seitseteist" (number 17) "18" "kaheksateist" (number 18) "1.1" "1.10" "01.10" (number 1.1) "0.77" ".77" (number 0.77) "100,000" "100000" "100K" "100k" "100 000" (number 100000) "3M" "3000K" "3000000" "3,000,000" "3 000 000" (number 3000000) "1,200,000" "1200000" "1.2M" "1200K" ".0012G" "1 200 000" (number 1200000) "- 1,200,000" "-1200000" "miinus 1,200,000" "-1.2M" "-1200K" "-.0012G" (number -1200000) "viis tuhat" (number 5000) "kakssada tuhat" (number 200000) "kakskümmend üks tuhat üksteist" (number 21011) "seitsesada kakskümmend üks tuhat kaksteist" (number 721012) "kolmkümmend üks miljonit kakssada viiskümmend kuus tuhat seitsesada kakskümmend üks" (number 31256721) "4." "neljas" (ordinal 4) )
101822
( ; Context map {} "0" "null" (number 0) "1" "üks" (number 1) "33" "kolmkümmend kolm" ; "kolmkend kolm" (number 33) "14" "neliteist" (number 14) "16" "kuusteist" (number 16) "17" "seitseteist" (number 17) "18" "kaheksateist" (number 18) "1.1" "1.10" "01.10" (number 1.1) "0.77" ".77" (number 0.77) "100,000" "100000" "100K" "100k" "100 000" (number 100000) "3M" "3000K" "3000000" "3,000,000" "3 000 000" (number 3000000) "1,200,000" "1200000" "1.2M" "1200K" ".0012G" "1 200 000" (number 1200000) "- 1,200,000" "-1200000" "miinus 1,200,000" "-1.2M" "-1200K" "-.0012G" (number -1200000) "viis tuhat" (number 5000) "kakssada tuhat" (number 200000) "kakskümmend üks tuhat üksteist" (number 21011) "seitsesada kakskümmend üks tuhat kaksteist" (number 721012) "kolmkümmend üks miljonit kakssada viiskümmend kuus tuhat seitsesada kakskümmend üks" (number 31256721) "4." "<NAME>" (ordinal 4) )
true
( ; Context map {} "0" "null" (number 0) "1" "üks" (number 1) "33" "kolmkümmend kolm" ; "kolmkend kolm" (number 33) "14" "neliteist" (number 14) "16" "kuusteist" (number 16) "17" "seitseteist" (number 17) "18" "kaheksateist" (number 18) "1.1" "1.10" "01.10" (number 1.1) "0.77" ".77" (number 0.77) "100,000" "100000" "100K" "100k" "100 000" (number 100000) "3M" "3000K" "3000000" "3,000,000" "3 000 000" (number 3000000) "1,200,000" "1200000" "1.2M" "1200K" ".0012G" "1 200 000" (number 1200000) "- 1,200,000" "-1200000" "miinus 1,200,000" "-1.2M" "-1200K" "-.0012G" (number -1200000) "viis tuhat" (number 5000) "kakssada tuhat" (number 200000) "kakskümmend üks tuhat üksteist" (number 21011) "seitsesada kakskümmend üks tuhat kaksteist" (number 721012) "kolmkümmend üks miljonit kakssada viiskümmend kuus tuhat seitsesada kakskümmend üks" (number 31256721) "4." "PI:NAME:<NAME>END_PI" (ordinal 4) )
[ { "context": " {:uid \"02\"\n :name \"authorship\"\n :documentPlan {:segments\n ", "end": 1682, "score": 0.8110073804855347, "start": 1672, "tag": "NAME", "value": "authorship" }, { "context": " :dictionaryItem {:name \"author\" :type \"Dictionary-item\" :srcId \"jirq-V{WgCd+u[sd", "end": 2019, "score": 0.9408203363418579, "start": 2013, "tag": "NAME", "value": "author" } ]
api/test/data/document_plan_test.clj
Site-Command/accelerated-text
0
(ns data.document-plan-test (:require [api.db-fixtures :as fixtures] [clojure.test :refer [deftest is use-fixtures testing]] [data.entities.document-plan :as document-plan] [data.entities.user-group :as user-group])) (use-fixtures :each fixtures/clean-db) (deftest ^:integration io-document-plan (testing "title document plan storing" (let [dp {:uid "01" :name "title-only" :documentPlan {:segments [{:children [{:name "title" :type "Cell" :srcId "isiyTw"}] :type "Segment" :srcId "%!Y"}] :type "Document-plan" :srcId "G=Rh"}} {id :id :as resp} (document-plan/add-document-plan dp user-group/DUMMY-USER-GROUP-ID)] (is (= #{:updatedAt :uid :name :createdAt :id :documentPlan :updateCount} (set (keys resp)))) (is (string? id)) (let [{doc-plan :documentPlan} (document-plan/get-document-plan id)] (is (= {:segments [{:children [{:name "title" :type "Cell" :srcId "isiyTw"}] :type "Segment" :srcId "%!Y"}] :type "Document-plan" :srcId "G=Rh"} doc-plan)))))) (deftest ^:integration authorship (testing "authorship document plan" (let [dp {:uid "02" :name "authorship" :documentPlan {:segments [{:children [{:type "AMR" :srcId "W}lSg%-S(rQ*nmVp3fFV" :conceptId "author" :dictionaryItem {:name "author" :type "Dictionary-item" :srcId "jirq-V{WgCd+u[sdVWpd" :itemId "VB-author"} :roles [{:name "agent" :children [{:name "authors" :type "Cell" :srcId "uakxT`=!W@8/xV#^orHk"}]} {:name "co-agent" :children [{:name "title" :type "Cell" :srcId "X_CwogT+.Z)N@;Mhz.j/"}]} {:name "theme" :children [nil]}]}] :type "Segment" :srcId "}0Ci`hF%i?izegwAT[@J"}] :type "Document-plan" :srcId "eoPNHZ1PSV{MJBwehL^Z"}} {id :id :as resp} (document-plan/add-document-plan dp user-group/DUMMY-USER-GROUP-ID)] (is (= #{:updatedAt :uid :name :createdAt :id :documentPlan :updateCount} (set (keys resp)))) (is (string? id)) (let [{doc-plan :documentPlan} (document-plan/get-document-plan id)] (is (= {:segments [{:children [{:conceptId "author" :dictionaryItem {:itemId "VB-author" :name "author" :srcId "jirq-V{WgCd+u[sdVWpd" :type "Dictionary-item"} :roles [{:children [{:name "authors" :srcId "uakxT`=!W@8/xV#^orHk" :type "Cell"}] :name "agent"} {:children [{:name "title" :srcId "X_CwogT+.Z)N@;Mhz.j/" :type "Cell"}] :name "co-agent"} {:children [nil] :name "theme"}] :srcId "W}lSg%-S(rQ*nmVp3fFV" :type "AMR"}] :srcId "}0Ci`hF%i?izegwAT[@J" :type "Segment"}] :srcId "eoPNHZ1PSV{MJBwehL^Z" :type "Document-plan"} doc-plan)))))) (deftest ^:integration adjective (let [dp {:uid "03" :name "adjective-phrase" :documentPlan {:segments [{:children [{:child {:name "title" :type "Cell" :srcId "k1*3(#7IWxHal=%)AdyQ"} :name "good" :type "Dictionary-item-modifier" :srcId "hy-Io!DlnURxCO!v3`^[" :itemId "NN-good"}] :type "Segment" :srcId "ujW*X(khAvxZNh!jF8c8"}] :type "Document-plan" :srcId "xlp%{tSm4kq9Y?|jz(7e"}} {id :id :as resp} (document-plan/add-document-plan dp user-group/DUMMY-USER-GROUP-ID)] (is (= #{:updatedAt :uid :name :createdAt :id :documentPlan :updateCount} (set (keys resp)))) (is (string? id)) (let [{doc-plan :documentPlan} (document-plan/get-document-plan id)] (is (= {:segments [{:children [{:child {:name "title" :srcId "k1*3(#7IWxHal=%)AdyQ" :type "Cell"} :itemId "NN-good" :name "good" :srcId "hy-Io!DlnURxCO!v3`^[" :type "Dictionary-item-modifier"}] :srcId "ujW*X(khAvxZNh!jF8c8" :type "Segment"}] :srcId "xlp%{tSm4kq9Y?|jz(7e" :type "Document-plan"} doc-plan)))))
110739
(ns data.document-plan-test (:require [api.db-fixtures :as fixtures] [clojure.test :refer [deftest is use-fixtures testing]] [data.entities.document-plan :as document-plan] [data.entities.user-group :as user-group])) (use-fixtures :each fixtures/clean-db) (deftest ^:integration io-document-plan (testing "title document plan storing" (let [dp {:uid "01" :name "title-only" :documentPlan {:segments [{:children [{:name "title" :type "Cell" :srcId "isiyTw"}] :type "Segment" :srcId "%!Y"}] :type "Document-plan" :srcId "G=Rh"}} {id :id :as resp} (document-plan/add-document-plan dp user-group/DUMMY-USER-GROUP-ID)] (is (= #{:updatedAt :uid :name :createdAt :id :documentPlan :updateCount} (set (keys resp)))) (is (string? id)) (let [{doc-plan :documentPlan} (document-plan/get-document-plan id)] (is (= {:segments [{:children [{:name "title" :type "Cell" :srcId "isiyTw"}] :type "Segment" :srcId "%!Y"}] :type "Document-plan" :srcId "G=Rh"} doc-plan)))))) (deftest ^:integration authorship (testing "authorship document plan" (let [dp {:uid "02" :name "<NAME>" :documentPlan {:segments [{:children [{:type "AMR" :srcId "W}lSg%-S(rQ*nmVp3fFV" :conceptId "author" :dictionaryItem {:name "<NAME>" :type "Dictionary-item" :srcId "jirq-V{WgCd+u[sdVWpd" :itemId "VB-author"} :roles [{:name "agent" :children [{:name "authors" :type "Cell" :srcId "uakxT`=!W@8/xV#^orHk"}]} {:name "co-agent" :children [{:name "title" :type "Cell" :srcId "X_CwogT+.Z)N@;Mhz.j/"}]} {:name "theme" :children [nil]}]}] :type "Segment" :srcId "}0Ci`hF%i?izegwAT[@J"}] :type "Document-plan" :srcId "eoPNHZ1PSV{MJBwehL^Z"}} {id :id :as resp} (document-plan/add-document-plan dp user-group/DUMMY-USER-GROUP-ID)] (is (= #{:updatedAt :uid :name :createdAt :id :documentPlan :updateCount} (set (keys resp)))) (is (string? id)) (let [{doc-plan :documentPlan} (document-plan/get-document-plan id)] (is (= {:segments [{:children [{:conceptId "author" :dictionaryItem {:itemId "VB-author" :name "author" :srcId "jirq-V{WgCd+u[sdVWpd" :type "Dictionary-item"} :roles [{:children [{:name "authors" :srcId "uakxT`=!W@8/xV#^orHk" :type "Cell"}] :name "agent"} {:children [{:name "title" :srcId "X_CwogT+.Z)N@;Mhz.j/" :type "Cell"}] :name "co-agent"} {:children [nil] :name "theme"}] :srcId "W}lSg%-S(rQ*nmVp3fFV" :type "AMR"}] :srcId "}0Ci`hF%i?izegwAT[@J" :type "Segment"}] :srcId "eoPNHZ1PSV{MJBwehL^Z" :type "Document-plan"} doc-plan)))))) (deftest ^:integration adjective (let [dp {:uid "03" :name "adjective-phrase" :documentPlan {:segments [{:children [{:child {:name "title" :type "Cell" :srcId "k1*3(#7IWxHal=%)AdyQ"} :name "good" :type "Dictionary-item-modifier" :srcId "hy-Io!DlnURxCO!v3`^[" :itemId "NN-good"}] :type "Segment" :srcId "ujW*X(khAvxZNh!jF8c8"}] :type "Document-plan" :srcId "xlp%{tSm4kq9Y?|jz(7e"}} {id :id :as resp} (document-plan/add-document-plan dp user-group/DUMMY-USER-GROUP-ID)] (is (= #{:updatedAt :uid :name :createdAt :id :documentPlan :updateCount} (set (keys resp)))) (is (string? id)) (let [{doc-plan :documentPlan} (document-plan/get-document-plan id)] (is (= {:segments [{:children [{:child {:name "title" :srcId "k1*3(#7IWxHal=%)AdyQ" :type "Cell"} :itemId "NN-good" :name "good" :srcId "hy-Io!DlnURxCO!v3`^[" :type "Dictionary-item-modifier"}] :srcId "ujW*X(khAvxZNh!jF8c8" :type "Segment"}] :srcId "xlp%{tSm4kq9Y?|jz(7e" :type "Document-plan"} doc-plan)))))
true
(ns data.document-plan-test (:require [api.db-fixtures :as fixtures] [clojure.test :refer [deftest is use-fixtures testing]] [data.entities.document-plan :as document-plan] [data.entities.user-group :as user-group])) (use-fixtures :each fixtures/clean-db) (deftest ^:integration io-document-plan (testing "title document plan storing" (let [dp {:uid "01" :name "title-only" :documentPlan {:segments [{:children [{:name "title" :type "Cell" :srcId "isiyTw"}] :type "Segment" :srcId "%!Y"}] :type "Document-plan" :srcId "G=Rh"}} {id :id :as resp} (document-plan/add-document-plan dp user-group/DUMMY-USER-GROUP-ID)] (is (= #{:updatedAt :uid :name :createdAt :id :documentPlan :updateCount} (set (keys resp)))) (is (string? id)) (let [{doc-plan :documentPlan} (document-plan/get-document-plan id)] (is (= {:segments [{:children [{:name "title" :type "Cell" :srcId "isiyTw"}] :type "Segment" :srcId "%!Y"}] :type "Document-plan" :srcId "G=Rh"} doc-plan)))))) (deftest ^:integration authorship (testing "authorship document plan" (let [dp {:uid "02" :name "PI:NAME:<NAME>END_PI" :documentPlan {:segments [{:children [{:type "AMR" :srcId "W}lSg%-S(rQ*nmVp3fFV" :conceptId "author" :dictionaryItem {:name "PI:NAME:<NAME>END_PI" :type "Dictionary-item" :srcId "jirq-V{WgCd+u[sdVWpd" :itemId "VB-author"} :roles [{:name "agent" :children [{:name "authors" :type "Cell" :srcId "uakxT`=!W@8/xV#^orHk"}]} {:name "co-agent" :children [{:name "title" :type "Cell" :srcId "X_CwogT+.Z)N@;Mhz.j/"}]} {:name "theme" :children [nil]}]}] :type "Segment" :srcId "}0Ci`hF%i?izegwAT[@J"}] :type "Document-plan" :srcId "eoPNHZ1PSV{MJBwehL^Z"}} {id :id :as resp} (document-plan/add-document-plan dp user-group/DUMMY-USER-GROUP-ID)] (is (= #{:updatedAt :uid :name :createdAt :id :documentPlan :updateCount} (set (keys resp)))) (is (string? id)) (let [{doc-plan :documentPlan} (document-plan/get-document-plan id)] (is (= {:segments [{:children [{:conceptId "author" :dictionaryItem {:itemId "VB-author" :name "author" :srcId "jirq-V{WgCd+u[sdVWpd" :type "Dictionary-item"} :roles [{:children [{:name "authors" :srcId "uakxT`=!W@8/xV#^orHk" :type "Cell"}] :name "agent"} {:children [{:name "title" :srcId "X_CwogT+.Z)N@;Mhz.j/" :type "Cell"}] :name "co-agent"} {:children [nil] :name "theme"}] :srcId "W}lSg%-S(rQ*nmVp3fFV" :type "AMR"}] :srcId "}0Ci`hF%i?izegwAT[@J" :type "Segment"}] :srcId "eoPNHZ1PSV{MJBwehL^Z" :type "Document-plan"} doc-plan)))))) (deftest ^:integration adjective (let [dp {:uid "03" :name "adjective-phrase" :documentPlan {:segments [{:children [{:child {:name "title" :type "Cell" :srcId "k1*3(#7IWxHal=%)AdyQ"} :name "good" :type "Dictionary-item-modifier" :srcId "hy-Io!DlnURxCO!v3`^[" :itemId "NN-good"}] :type "Segment" :srcId "ujW*X(khAvxZNh!jF8c8"}] :type "Document-plan" :srcId "xlp%{tSm4kq9Y?|jz(7e"}} {id :id :as resp} (document-plan/add-document-plan dp user-group/DUMMY-USER-GROUP-ID)] (is (= #{:updatedAt :uid :name :createdAt :id :documentPlan :updateCount} (set (keys resp)))) (is (string? id)) (let [{doc-plan :documentPlan} (document-plan/get-document-plan id)] (is (= {:segments [{:children [{:child {:name "title" :srcId "k1*3(#7IWxHal=%)AdyQ" :type "Cell"} :itemId "NN-good" :name "good" :srcId "hy-Io!DlnURxCO!v3`^[" :type "Dictionary-item-modifier"}] :srcId "ujW*X(khAvxZNh!jF8c8" :type "Segment"}] :srcId "xlp%{tSm4kq9Y?|jz(7e" :type "Document-plan"} doc-plan)))))
[ { "context": "ver client-get]]))\n\n(def auth-params {:secret \"test secret\"\n :providers {:google {:client-i", "end": 481, "score": 0.9348921775817871, "start": 470, "tag": "KEY", "value": "test secret" }, { "context": " :client-secret \"6789\"}\n :dev {:automat", "end": 604, "score": 0.9977418184280396, "start": 600, "tag": "KEY", "value": "6789" }, { "context": " (let [{:keys [status body]} (client-get \"http://127.0.0.1:8981/open\")]\n (is (= status 200))\n ", "end": 2072, "score": 0.9576765894889832, "start": 2063, "tag": "IP_ADDRESS", "value": "127.0.0.1" }, { "context": " (let [{:keys [status body]} (client-get \"http://127.0.0.1:8981/private\")]\n (is (= status 401))\n ", "end": 2227, "score": 0.7281031012535095, "start": 2218, "tag": "IP_ADDRESS", "value": "127.0.0.1" }, { "context": "tatus headers body] :as resp} (client-get \"http://127.0.0.1:8981/auth/dev/login\")]\n (is (= status 20", "end": 2406, "score": 0.8214595317840576, "start": 2397, "tag": "IP_ADDRESS", "value": "127.0.0.1" }, { "context": "_40bd001563085fc35165329ea1ff5c5ecbdbbeef\" :name \"dev-user\" :picture \"http://127.0.0.1:8084/avatar?user=dev-", "end": 2549, "score": 0.9976778030395508, "start": 2541, "tag": "USERNAME", "value": "dev-user" }, { "context": "ff5c5ecbdbbeef\" :name \"dev-user\" :picture \"http://127.0.0.1:8084/avatar?user=dev-user\"}))\n (is (= 2 ", "end": 2577, "score": 0.9532431364059448, "start": 2568, "tag": "IP_ADDRESS", "value": "127.0.0.1" }, { "context": "user\" :picture \"http://127.0.0.1:8084/avatar?user=dev-user\"}))\n (is (= 2 (count (get headers \"set-c", "end": 2603, "score": 0.975270688533783, "start": 2595, "tag": "USERNAME", "value": "dev-user" }, { "context": "eef\"\n :name \"dev-user\"\n :picture \"htt", "end": 3154, "score": 0.9983277320861816, "start": 3146, "tag": "USERNAME", "value": "dev-user" }, { "context": " :picture \"http://127.0.0.1:8084/avatar?user=dev-user\"\n :custom-param \"cus", "end": 3243, "score": 0.9365741610527039, "start": 3235, "tag": "USERNAME", "value": "dev-user" }, { "context": "_40bd001563085fc35165329ea1ff5c5ecbdbbeef\" :name \"dev-user\" :picture \"http://127.0.0.1:8084/avatar?user=dev-", "end": 4092, "score": 0.8008736968040466, "start": 4084, "tag": "USERNAME", "value": "dev-user" }, { "context": "user\" :picture \"http://127.0.0.1:8084/avatar?user=dev-user\"})))\n\n (let [{:keys [status body]} (client", "end": 4146, "score": 0.8391867280006409, "start": 4138, "tag": "USERNAME", "value": "dev-user" }, { "context": "_40bd001563085fc35165329ea1ff5c5ecbdbbeef\" :name \"dev-user\" :picture \"http://127.0.0.1:8084/avatar?user=dev-", "end": 4356, "score": 0.7318710684776306, "start": 4348, "tag": "USERNAME", "value": "dev-user" }, { "context": "user\" :picture \"http://127.0.0.1:8084/avatar?user=dev-user\"})))))))\n\n(deftest ^:integration logout-test\n (w", "end": 4410, "score": 0.880520761013031, "start": 4402, "tag": "USERNAME", "value": "dev-user" }, { "context": "_40bd001563085fc35165329ea1ff5c5ecbdbbeef\" :name \"dev-user\" :picture \"http://127.0.0.1:8084/avatar?user=dev-", "end": 4748, "score": 0.7909998893737793, "start": 4740, "tag": "USERNAME", "value": "dev-user" }, { "context": "user\" :picture \"http://127.0.0.1:8084/avatar?user=dev-user\"})))\n\n (let [{:keys [status body]} (client", "end": 4802, "score": 0.969487726688385, "start": 4794, "tag": "USERNAME", "value": "dev-user" }, { "context": " (let [{:keys [status body]} (client-get \"http://127.0.0.1:8981/auth/prov/login\")]\n (is (= status 4", "end": 5310, "score": 0.99543696641922, "start": 5301, "tag": "IP_ADDRESS", "value": "127.0.0.1" }, { "context": " (let [{:keys [status body]} (client-get \"http://127.0.0.1:8981/auth/test/test\")]\n (is (= status 40", "end": 5660, "score": 0.9680520296096802, "start": 5651, "tag": "IP_ADDRESS", "value": "127.0.0.1" }, { "context": " (let [{:keys [status body]} (client-get \"http://127.0.0.1:8981/auth/logout\")]\n (is (= status 400", "end": 6084, "score": 0.9874338507652283, "start": 6075, "tag": "IP_ADDRESS", "value": "127.0.0.1" } ]
test/clj_pkg/auth_test.clj
clj-pkg/auth
1
(ns clj-pkg.auth-test (:require [clojure.test :refer [deftest is]] [clj-pkg.auth :as auth] [ring.middleware.cookies :as cookies] [ring.middleware.json :as json-mw] [clojure.string :as string] [reitit.ring :as ring] [reitit.ring.middleware.parameters :as parameters] [clj-pkg.utils :refer [with-http-client with-api-server with-dev-server client-get]])) (def auth-params {:secret "test secret" :providers {:google {:client-id "12345" :client-secret "6789"} :dev {:automatic? true}}}) (def cookies-middleware {:name ::cookies :wrap cookies/wrap-cookies}) (def json-response-middleware {:name ::json-response :wrap json-mw/wrap-json-response}) (def route-middleware {:data {:middleware [parameters/parameters-middleware json-response-middleware cookies-middleware]}}) (def handler (ring/ring-handler (ring/router [["/auth/*" (auth/handlers auth-params)] ["/open" {:handler (fn [_] {:status 200 :body {:data "open data"}})}] ["/private" {:middleware [(auth/middleware auth-params) (auth/update-user-middleware (fn [user] (assoc user :custom-param "custom-param-value")))] :handler (fn [req] {:status 200 :body {:data "private data" :user (:user req)}})}]] route-middleware))) (deftest ^:integration protected-test (with-http-client (with-dev-server auth-params (with-api-server handler (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/open")] (is (= status 200)) (is (= body {:data "open data"}))) (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/private")] (is (= status 401)) (is (= body {:error "Unauthorized"}))) (let [{:keys [status headers body] :as resp} (client-get "http://127.0.0.1:8981/auth/dev/login")] (is (= status 200)) (is (= body {:id "dev_40bd001563085fc35165329ea1ff5c5ecbdbbeef" :name "dev-user" :picture "http://127.0.0.1:8084/avatar?user=dev-user"})) (is (= 2 (count (get headers "set-cookie")))) (is (true? (-> (get headers "set-cookie") first (string/starts-with? "JWT=")))) (is (true? (-> (get headers "set-cookie") second (string/starts-with? "XSRF-TOKEN="))))) (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/private")] (is (= status 200)) (is (= body {:data "private data" :user {:id "dev_40bd001563085fc35165329ea1ff5c5ecbdbbeef" :name "dev-user" :picture "http://127.0.0.1:8084/avatar?user=dev-user" :custom-param "custom-param-value"}}))))))) (deftest ^:integration list-test (with-http-client (with-dev-server auth-params (with-api-server handler (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/auth/list")] (is (= status 200)) (is (= body ["google" "dev"]))))))) (deftest ^:integration user-info-test (with-http-client (with-dev-server auth-params (with-api-server handler (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/auth/user")] (is (= status 401)) (is (= body {:error "Unauthorized"}))) (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/auth/dev/login")] (is (= status 200)) (is (= body {:id "dev_40bd001563085fc35165329ea1ff5c5ecbdbbeef" :name "dev-user" :picture "http://127.0.0.1:8084/avatar?user=dev-user"}))) (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/auth/user")] (is (= status 200)) (is (= body {:id "dev_40bd001563085fc35165329ea1ff5c5ecbdbbeef" :name "dev-user" :picture "http://127.0.0.1:8084/avatar?user=dev-user"}))))))) (deftest ^:integration logout-test (with-http-client (with-dev-server auth-params (with-api-server handler (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/auth/dev/login")] (is (= status 200)) (is (= body {:id "dev_40bd001563085fc35165329ea1ff5c5ecbdbbeef" :name "dev-user" :picture "http://127.0.0.1:8084/avatar?user=dev-user"}))) (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/auth/logout")] (is (= status 200)) (is (= body nil))) (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/private")] (is (= status 401)) (is (= body {:error "Unauthorized"}))))))) (deftest ^:integration bad-request-test (with-http-client (with-dev-server auth-params (with-api-server handler (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/auth/prov/login")] (is (= status 400)) (is (= body {:error "provider not supported"}))) (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/auth/")] (is (= status 400)) (is (= body {:error "provider not supported"}))) (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/auth/test/test")] (is (= status 400)) (is (= body {:error "provider not supported"}))))))) (deftest ^:integration logout-no-providers-test (let [handler (ring/ring-handler (ring/router [["/auth/*" (auth/handlers {})]] route-middleware))] (with-http-client (with-dev-server auth-params (with-api-server handler (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/auth/logout")] (is (= status 400)) (is (= body {:error "providers not defined"}))))))))
35749
(ns clj-pkg.auth-test (:require [clojure.test :refer [deftest is]] [clj-pkg.auth :as auth] [ring.middleware.cookies :as cookies] [ring.middleware.json :as json-mw] [clojure.string :as string] [reitit.ring :as ring] [reitit.ring.middleware.parameters :as parameters] [clj-pkg.utils :refer [with-http-client with-api-server with-dev-server client-get]])) (def auth-params {:secret "<KEY>" :providers {:google {:client-id "12345" :client-secret "<KEY>"} :dev {:automatic? true}}}) (def cookies-middleware {:name ::cookies :wrap cookies/wrap-cookies}) (def json-response-middleware {:name ::json-response :wrap json-mw/wrap-json-response}) (def route-middleware {:data {:middleware [parameters/parameters-middleware json-response-middleware cookies-middleware]}}) (def handler (ring/ring-handler (ring/router [["/auth/*" (auth/handlers auth-params)] ["/open" {:handler (fn [_] {:status 200 :body {:data "open data"}})}] ["/private" {:middleware [(auth/middleware auth-params) (auth/update-user-middleware (fn [user] (assoc user :custom-param "custom-param-value")))] :handler (fn [req] {:status 200 :body {:data "private data" :user (:user req)}})}]] route-middleware))) (deftest ^:integration protected-test (with-http-client (with-dev-server auth-params (with-api-server handler (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/open")] (is (= status 200)) (is (= body {:data "open data"}))) (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/private")] (is (= status 401)) (is (= body {:error "Unauthorized"}))) (let [{:keys [status headers body] :as resp} (client-get "http://127.0.0.1:8981/auth/dev/login")] (is (= status 200)) (is (= body {:id "dev_40bd001563085fc35165329ea1ff5c5ecbdbbeef" :name "dev-user" :picture "http://127.0.0.1:8084/avatar?user=dev-user"})) (is (= 2 (count (get headers "set-cookie")))) (is (true? (-> (get headers "set-cookie") first (string/starts-with? "JWT=")))) (is (true? (-> (get headers "set-cookie") second (string/starts-with? "XSRF-TOKEN="))))) (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/private")] (is (= status 200)) (is (= body {:data "private data" :user {:id "dev_40bd001563085fc35165329ea1ff5c5ecbdbbeef" :name "dev-user" :picture "http://127.0.0.1:8084/avatar?user=dev-user" :custom-param "custom-param-value"}}))))))) (deftest ^:integration list-test (with-http-client (with-dev-server auth-params (with-api-server handler (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/auth/list")] (is (= status 200)) (is (= body ["google" "dev"]))))))) (deftest ^:integration user-info-test (with-http-client (with-dev-server auth-params (with-api-server handler (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/auth/user")] (is (= status 401)) (is (= body {:error "Unauthorized"}))) (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/auth/dev/login")] (is (= status 200)) (is (= body {:id "dev_40bd001563085fc35165329ea1ff5c5ecbdbbeef" :name "dev-user" :picture "http://127.0.0.1:8084/avatar?user=dev-user"}))) (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/auth/user")] (is (= status 200)) (is (= body {:id "dev_40bd001563085fc35165329ea1ff5c5ecbdbbeef" :name "dev-user" :picture "http://127.0.0.1:8084/avatar?user=dev-user"}))))))) (deftest ^:integration logout-test (with-http-client (with-dev-server auth-params (with-api-server handler (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/auth/dev/login")] (is (= status 200)) (is (= body {:id "dev_40bd001563085fc35165329ea1ff5c5ecbdbbeef" :name "dev-user" :picture "http://127.0.0.1:8084/avatar?user=dev-user"}))) (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/auth/logout")] (is (= status 200)) (is (= body nil))) (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/private")] (is (= status 401)) (is (= body {:error "Unauthorized"}))))))) (deftest ^:integration bad-request-test (with-http-client (with-dev-server auth-params (with-api-server handler (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/auth/prov/login")] (is (= status 400)) (is (= body {:error "provider not supported"}))) (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/auth/")] (is (= status 400)) (is (= body {:error "provider not supported"}))) (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/auth/test/test")] (is (= status 400)) (is (= body {:error "provider not supported"}))))))) (deftest ^:integration logout-no-providers-test (let [handler (ring/ring-handler (ring/router [["/auth/*" (auth/handlers {})]] route-middleware))] (with-http-client (with-dev-server auth-params (with-api-server handler (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/auth/logout")] (is (= status 400)) (is (= body {:error "providers not defined"}))))))))
true
(ns clj-pkg.auth-test (:require [clojure.test :refer [deftest is]] [clj-pkg.auth :as auth] [ring.middleware.cookies :as cookies] [ring.middleware.json :as json-mw] [clojure.string :as string] [reitit.ring :as ring] [reitit.ring.middleware.parameters :as parameters] [clj-pkg.utils :refer [with-http-client with-api-server with-dev-server client-get]])) (def auth-params {:secret "PI:KEY:<KEY>END_PI" :providers {:google {:client-id "12345" :client-secret "PI:KEY:<KEY>END_PI"} :dev {:automatic? true}}}) (def cookies-middleware {:name ::cookies :wrap cookies/wrap-cookies}) (def json-response-middleware {:name ::json-response :wrap json-mw/wrap-json-response}) (def route-middleware {:data {:middleware [parameters/parameters-middleware json-response-middleware cookies-middleware]}}) (def handler (ring/ring-handler (ring/router [["/auth/*" (auth/handlers auth-params)] ["/open" {:handler (fn [_] {:status 200 :body {:data "open data"}})}] ["/private" {:middleware [(auth/middleware auth-params) (auth/update-user-middleware (fn [user] (assoc user :custom-param "custom-param-value")))] :handler (fn [req] {:status 200 :body {:data "private data" :user (:user req)}})}]] route-middleware))) (deftest ^:integration protected-test (with-http-client (with-dev-server auth-params (with-api-server handler (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/open")] (is (= status 200)) (is (= body {:data "open data"}))) (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/private")] (is (= status 401)) (is (= body {:error "Unauthorized"}))) (let [{:keys [status headers body] :as resp} (client-get "http://127.0.0.1:8981/auth/dev/login")] (is (= status 200)) (is (= body {:id "dev_40bd001563085fc35165329ea1ff5c5ecbdbbeef" :name "dev-user" :picture "http://127.0.0.1:8084/avatar?user=dev-user"})) (is (= 2 (count (get headers "set-cookie")))) (is (true? (-> (get headers "set-cookie") first (string/starts-with? "JWT=")))) (is (true? (-> (get headers "set-cookie") second (string/starts-with? "XSRF-TOKEN="))))) (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/private")] (is (= status 200)) (is (= body {:data "private data" :user {:id "dev_40bd001563085fc35165329ea1ff5c5ecbdbbeef" :name "dev-user" :picture "http://127.0.0.1:8084/avatar?user=dev-user" :custom-param "custom-param-value"}}))))))) (deftest ^:integration list-test (with-http-client (with-dev-server auth-params (with-api-server handler (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/auth/list")] (is (= status 200)) (is (= body ["google" "dev"]))))))) (deftest ^:integration user-info-test (with-http-client (with-dev-server auth-params (with-api-server handler (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/auth/user")] (is (= status 401)) (is (= body {:error "Unauthorized"}))) (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/auth/dev/login")] (is (= status 200)) (is (= body {:id "dev_40bd001563085fc35165329ea1ff5c5ecbdbbeef" :name "dev-user" :picture "http://127.0.0.1:8084/avatar?user=dev-user"}))) (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/auth/user")] (is (= status 200)) (is (= body {:id "dev_40bd001563085fc35165329ea1ff5c5ecbdbbeef" :name "dev-user" :picture "http://127.0.0.1:8084/avatar?user=dev-user"}))))))) (deftest ^:integration logout-test (with-http-client (with-dev-server auth-params (with-api-server handler (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/auth/dev/login")] (is (= status 200)) (is (= body {:id "dev_40bd001563085fc35165329ea1ff5c5ecbdbbeef" :name "dev-user" :picture "http://127.0.0.1:8084/avatar?user=dev-user"}))) (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/auth/logout")] (is (= status 200)) (is (= body nil))) (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/private")] (is (= status 401)) (is (= body {:error "Unauthorized"}))))))) (deftest ^:integration bad-request-test (with-http-client (with-dev-server auth-params (with-api-server handler (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/auth/prov/login")] (is (= status 400)) (is (= body {:error "provider not supported"}))) (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/auth/")] (is (= status 400)) (is (= body {:error "provider not supported"}))) (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/auth/test/test")] (is (= status 400)) (is (= body {:error "provider not supported"}))))))) (deftest ^:integration logout-no-providers-test (let [handler (ring/ring-handler (ring/router [["/auth/*" (auth/handlers {})]] route-middleware))] (with-http-client (with-dev-server auth-params (with-api-server handler (let [{:keys [status body]} (client-get "http://127.0.0.1:8981/auth/logout")] (is (= status 400)) (is (= body {:error "providers not defined"}))))))))
[ { "context": "enerate HTML documentation for Clojure libs\n\n;; by Craig Andera, http://pluralsight.com/craig, candera@wangdera.c", "end": 87, "score": 0.999885082244873, "start": 75, "tag": "NAME", "value": "Craig Andera" }, { "context": " libs\n\n;; by Craig Andera, http://pluralsight.com/craig, candera@wangdera.com\n;; February 13th, 2009\n\n;; ", "end": 117, "score": 0.999073326587677, "start": 112, "tag": "USERNAME", "value": "craig" }, { "context": "\n;; by Craig Andera, http://pluralsight.com/craig, candera@wangdera.com\n;; February 13th, 2009\n\n;; Copyright (c) Craig An", "end": 139, "score": 0.9999284744262695, "start": 119, "tag": "EMAIL", "value": "candera@wangdera.com" }, { "context": "gdera.com\n;; February 13th, 2009\n\n;; Copyright (c) Craig Andera, 2009. All rights reserved. The use\n;; and distr", "end": 193, "score": 0.9998842477798462, "start": 181, "tag": "NAME", "value": "Craig Andera" }, { "context": "Remove the whojure dependency\n\n(ns \n #^{:author \"Craig Andera\",\n :doc \"Generates a single HTML page that co", "end": 1851, "score": 0.9998779296875, "start": 1839, "tag": "NAME", "value": "Craig Andera" } ]
ThirdParty/clojure-contrib-1.1.0/src/clojure/contrib/gen_html_docs.clj
allertonm/Couverjure
3
;;; gen-html-docs.clj: Generate HTML documentation for Clojure libs ;; by Craig Andera, http://pluralsight.com/craig, candera@wangdera.com ;; February 13th, 2009 ;; Copyright (c) Craig Andera, 2009. All rights reserved. The use ;; and distribution terms for this software are covered by the Eclipse ;; Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php) ;; which can be found in the file epl-v10.html at the root of this ;; distribution. By using this software in any fashion, you are ;; agreeing to be bound by the terms of this license. You must not ;; remove this notice, or any other, from this software. ;; Generates a single HTML page that contains the documentation for ;; one or more Clojure libraries. See the comments section at the end ;; of this file for usage. ;; TODO ;; ;; * Make symbols in the source hyperlinks to the appropriate section ;; of the documentation. ;; * Investigate issue with miglayout mentioned here: ;; http://groups.google.com/group/clojure/browse_thread/thread/5a0c4395e44f5a79/3ae483100366bd3d?lnk=gst&q=documentation+browser#3ae483100366bd3d ;; ;; DONE ;; ;; * Move to clojure.contrib ;; * Change namespace ;; * Change license as appropriate ;; * Double-check doc strings ;; * Remove doc strings from source code ;; * Add collapse/expand functionality for all namespaces ;; * Add collapse/expand functionality for each namespace ;; * See if converting to use clojure.contrib.prxml is possible ;; * Figure out why the source doesn't show up for most things ;; * Add collapsible source ;; * Add links at the top to jump to each namespace ;; * Add object type (var, function, whatever) ;; * Add argument lists for functions ;; * Add links at the top of each namespace to jump to members ;; * Add license statement ;; * Remove the whojure dependency (ns #^{:author "Craig Andera", :doc "Generates a single HTML page that contains the documentation for one or more Clojure libraries."} clojure.contrib.gen-html-docs (:require [clojure.contrib.duck-streams :as duck-streams]) (:use [clojure.contrib seq-utils str-utils repl-utils def prxml]) (:import [java.lang Exception] [java.util.regex Pattern])) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; Doc generation constants ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (def *script* " // <![CDATA[ function getElem(id) { if( document.getElementById ) { return document.getElementById( id ) } else if ( document.all ) { return eval( 'document.all.' + id ) } else return false; } function setDisplayStyle(id,displayStyle) { var elem = getElem (id) if (elem) { elem.style.display = displayStyle } } function setLinkToggleText (id, text) { var elem = getElem (id) if (elem) { elem.innerHTML = text } } function collapse(id) { setDisplayStyle (id, 'none') } function expand (id) { setDisplayStyle (id, 'block') } function toggleSource( id ) { toggle(id, 'linkto-' + id, 'Hide Source', 'Show Source') } function toggle(targetid, linkid, textWhenOpen, textWhenClosed) { var elem = getElem (targetid) var link = getElem (linkid) if (elem && link) { var isOpen = false if (elem.style.display == '') { isOpen = link.innerHTML == textWhenOpen } else if( elem.style.display == 'block' ) { isOpen = true } if (isOpen) { elem.style.display = 'none' link.innerHTML = textWhenClosed } else { elem.style.display = 'block' link.innerHTML = textWhenOpen } } } //]]> ") (def *style* " .library { padding: 0.5em 0 0 0 } .all-libs-toggle,.library-contents-toggle { font-size: small; } .all-libs-toggle a,.library-contents-toggle a { color: white } .library-member-doc-whitespace { white-space: pre } .library-member-source-toggle { font-size: small; margin-top: 0.5em } .library-member-source { display: none; border-left: solid lightblue } .library-member-docs { font-family:monospace } .library-member-arglists { font-family: monospace } .library-member-type { font-weight: bold; font-size: small; font-style: italic; color: darkred } .lib-links { margin: 0 0 1em 0 } .lib-link-header { color: white; background: darkgreen; width: 100% } .library-name { color: white; background: darkblue; width: 100% } .missing-library { color: darkred; margin: 0 0 1em 0 } .library-members { list-style: none } .library-member-name { font-weight: bold; font-size: 105% }") (defn- extract-documentation "Pulls the documentation for a var v out and turns it into HTML" [v] (if-let [docs (:doc (meta v))] (map (fn [l] [:div {:class "library-member-doc-line"} (if (= 0 (count l)) [:span {:class "library-member-doc-whitespace"} " "] ; We need something here to make the blank line show up l)]) (re-split #"\n" docs)) "")) (defn- member-type "Figures out for a var x whether it's a macro, function, var or multifunction" [x] (try (let [dx (deref x)] (cond (:macro (meta x)) :macro (fn? dx) :fn (= clojure.lang.MultiFn (:tag (meta x))) :multi true :var)) (catch Exception e :unknown))) (defn- anchor-for-member "Returns a suitable HTML anchor name given a library id and a member id" [libid memberid] (str "member-" libid "-" memberid)) (defn- id-for-member-source "Returns a suitable HTML id for a source listing given a library and a member" [libid memberid] (str "membersource-" libid "-" memberid)) (defn- id-for-member-source-link "Returns a suitable HTML id for a link to a source listing given a library and a member" [libid memberid] (str "linkto-membersource-" libid "-" memberid)) (defn- symbol-for "Given a namespace object ns and a namespaceless symbol memberid naming a member of that namespace, returns a namespaced symbol that identifies that member." [ns memberid] (symbol (name (ns-name ns)) (name memberid))) (defn- elide-to-one-line "Elides a string down to one line." [s] (re-sub #"(\n.*)+" "..." s)) (defn- elide-string "Returns a string that is at most the first limit characters of s" [s limit] (if (< (- limit 3) (count s)) (str (subs s 0 (- limit 3)) "...") s)) (defn- doc-elided-src "Returns the src with the docs elided." [docs src] (re-sub (re-pattern (str "\"" (Pattern/quote docs) "\"")) (str "\"" (elide-to-one-line docs) ;; (elide-string docs 10) ;; "..." "\"") src)) (defn- format-source [libid memberid v] (try (let [docs (:doc (meta v)) src (if-let [ns (find-ns libid)] (get-source (symbol-for ns memberid)))] (if (and src docs) (doc-elided-src docs src) src)) (catch Exception ex nil))) (defn- generate-lib-member [libid [n v]] [:li {:class "library-member"} [:a {:name (anchor-for-member libid n)}] [:dl {:class "library-member-table"} [:dt {:class "library-member-name"} (str n)] [:dd [:div {:class "library-member-info"} [:span {:class "library-member-type"} (name (member-type v))] " " [:span {:class "library-member-arglists"} (str (:arglists (meta v)))]] (into [:div {:class "library-member-docs"}] (extract-documentation v)) (let [member-source-id (id-for-member-source libid n) member-source-link-id (id-for-member-source-link libid n)] (if-let [member-source (format-source libid n v)] [:div {:class "library-member-source-section"} [:div {:class "library-member-source-toggle"} "[ " [:a {:href (format "javascript:toggleSource('%s')" member-source-id) :id member-source-link-id} "Show Source"] " ]"] [:div {:class "library-member-source" :id member-source-id} [:pre member-source]]]))]]]) (defn- anchor-for-library "Given a symbol id identifying a namespace, returns an identifier suitable for use as the name attribute of an HTML anchor tag." [id] (str "library-" id)) (defn- generate-lib-member-link "Emits a hyperlink to a member of a namespace given libid (a symbol identifying the namespace) and the vector [n v], where n is the symbol naming the member in question and v is the var pointing to the member." [libid [n v]] [:a {:class "lib-member-link" :href (str "#" (anchor-for-member libid n))} (name n)]) (defn- anchor-for-library-contents "Returns an HTML ID that identifies the element that holds the documentation contents for the specified library." [lib] (str "library-contents-" lib)) (defn- anchor-for-library-contents-toggle "Returns an HTML ID that identifies the element that toggles the visibility of the library contents." [lib] (str "library-contents-toggle-" lib)) (defn- generate-lib-doc "Emits the HTML that documents the namespace identified by the symbol lib." [lib] [:div {:class "library"} [:a {:name (anchor-for-library lib)}] [:div {:class "library-name"} [:span {:class "library-contents-toggle"} "[ " [:a {:id (anchor-for-library-contents-toggle lib) :href (format "javascript:toggle('%s', '%s', '-', '+')" (anchor-for-library-contents lib) (anchor-for-library-contents-toggle lib))} "-"] " ] "] (name lib)] (let [ns (find-ns lib)] (if ns (let [lib-members (sort (ns-publics ns))] [:a {:name (anchor-for-library lib)}] [:div {:class "library-contents" :id (anchor-for-library-contents lib)} (into [:div {:class "library-member-links"}] (interpose " " (map #(generate-lib-member-link lib %) lib-members))) (into [:ol {:class "library-members"}] (map #(generate-lib-member lib %) lib-members))]) [:div {:class "missing-library library-contents" :id (anchor-for-library-contents lib)} "Could not load library"]))]) (defn- load-lib "Calls require on the library identified by lib, eating any exceptions." [lib] (try (require lib) (catch java.lang.Exception x nil))) (defn- generate-lib-link "Generates a hyperlink to the documentation for a namespace given lib, a symbol identifying that namespace." [lib] (let [ns (find-ns lib)] (if ns [:a {:class "lib-link" :href (str "#" (anchor-for-library lib))} (str (ns-name ns))]))) (defn- generate-lib-links "Generates the list of hyperlinks to each namespace, given libs, a vector of symbols naming namespaces." [libs] (into [:div {:class "lib-links"} [:div {:class "lib-link-header"} "Namespaces" [:span {:class "all-libs-toggle"} " [ " [:a {:href "javascript:expandAllNamespaces()"} "Expand All"] " ] [ " [:a {:href "javascript:collapseAllNamespaces()"} "Collapse All"] " ]"]]] (interpose " " (map generate-lib-link libs)))) (defn generate-toggle-namespace-script [action toggle-text lib] (str (format "%s('%s');\n" action (anchor-for-library-contents lib)) (format "setLinkToggleText('%s', '%s');\n" (anchor-for-library-contents-toggle lib) toggle-text))) (defn generate-all-namespaces-action-script [action toggle-text libs] (str (format "function %sAllNamespaces()" action) \newline "{" \newline (reduce str (map #(generate-toggle-namespace-script action toggle-text %) libs)) \newline "}")) (defn generate-documentation "Returns a string which is the HTML documentation for the libraries named by libs. Libs is a vector of symbols identifying Clojure libraries." [libs] (dorun (map load-lib libs)) (let [writer (new java.io.StringWriter)] (binding [*out* writer] (prxml [:html {:xmlns "http://www.w3.org/1999/xhtml"} [:head [:title "Clojure documentation browser"] [:style *style*] [:script {:language "JavaScript" :type "text/javascript"} [:raw! *script*]] [:script {:language "JavaScript" :type "text/javascript"} [:raw! "// <![CDATA[!" \newline] (generate-all-namespaces-action-script "expand" "-" libs) (generate-all-namespaces-action-script "collapse" "+" libs) [:raw! \newline "// ]]>"]]] (let [lib-vec (sort libs)] (into [:body (generate-lib-links lib-vec)] (map generate-lib-doc lib-vec)))])) (.toString writer))) (defn generate-documentation-to-file "Calls generate-documentation on the libraries named by libs and emits the generated HTML to the path named by path." [path libs] (duck-streams/spit path (generate-documentation libs))) (comment (generate-documentation-to-file "C:/TEMP/CLJ-DOCS.HTML" ['clojure.contrib.accumulators]) (defn gen-all-docs [] (generate-documentation-to-file "C:/temp/clj-libs.html" [ 'clojure.set 'clojure.main 'clojure.core 'clojure.zip 'clojure.xml 'clojure.contrib.accumulators 'clojure.contrib.apply-macro 'clojure.contrib.auto-agent 'clojure.contrib.combinatorics 'clojure.contrib.command-line 'clojure.contrib.complex-numbers 'clojure.contrib.cond 'clojure.contrib.def 'clojure.contrib.duck-streams 'clojure.contrib.enum 'clojure.contrib.error-kit 'clojure.contrib.except 'clojure.contrib.fcase 'clojure.contrib.generic 'clojure.contrib.generic.arithmetic 'clojure.contrib.generic.collection 'clojure.contrib.generic.comparison 'clojure.contrib.generic.functor 'clojure.contrib.generic.math-functions 'clojure.contrib.import-static 'clojure.contrib.javadoc 'clojure.contrib.javalog 'clojure.contrib.lazy-seqs 'clojure.contrib.lazy-xml 'clojure.contrib.macro-utils 'clojure.contrib.macros 'clojure.contrib.math 'clojure.contrib.miglayout 'clojure.contrib.mmap 'clojure.contrib.monads 'clojure.contrib.ns-utils 'clojure.contrib.prxml 'clojure.contrib.repl-ln 'clojure.contrib.repl-utils 'clojure.contrib.seq-utils 'clojure.contrib.server-socket 'clojure.contrib.shell-out 'clojure.contrib.sql 'clojure.contrib.stream-utils 'clojure.contrib.str-utils 'clojure.contrib.test-contrib 'clojure.contrib.trace 'clojure.contrib.types 'clojure.contrib.zip-filter 'clojure.contrib.javadoc.browse 'clojure.contrib.json.read 'clojure.contrib.json.write 'clojure.contrib.lazy-xml.with-pull 'clojure.contrib.miglayout.internal 'clojure.contrib.probabilities.finite-distributions 'clojure.contrib.probabilities.monte-carlo 'clojure.contrib.probabilities.random-numbers 'clojure.contrib.sql.internal 'clojure.contrib.test-clojure.evaluation 'clojure.contrib.test-clojure.for 'clojure.contrib.test-clojure.numbers 'clojure.contrib.test-clojure.printer 'clojure.contrib.test-clojure.reader 'clojure.contrib.test-clojure.sequences 'clojure.contrib.test-contrib.shell-out 'clojure.contrib.test-contrib.str-utils 'clojure.contrib.zip-filter.xml ])) )
24663
;;; gen-html-docs.clj: Generate HTML documentation for Clojure libs ;; by <NAME>, http://pluralsight.com/craig, <EMAIL> ;; February 13th, 2009 ;; Copyright (c) <NAME>, 2009. All rights reserved. The use ;; and distribution terms for this software are covered by the Eclipse ;; Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php) ;; which can be found in the file epl-v10.html at the root of this ;; distribution. By using this software in any fashion, you are ;; agreeing to be bound by the terms of this license. You must not ;; remove this notice, or any other, from this software. ;; Generates a single HTML page that contains the documentation for ;; one or more Clojure libraries. See the comments section at the end ;; of this file for usage. ;; TODO ;; ;; * Make symbols in the source hyperlinks to the appropriate section ;; of the documentation. ;; * Investigate issue with miglayout mentioned here: ;; http://groups.google.com/group/clojure/browse_thread/thread/5a0c4395e44f5a79/3ae483100366bd3d?lnk=gst&q=documentation+browser#3ae483100366bd3d ;; ;; DONE ;; ;; * Move to clojure.contrib ;; * Change namespace ;; * Change license as appropriate ;; * Double-check doc strings ;; * Remove doc strings from source code ;; * Add collapse/expand functionality for all namespaces ;; * Add collapse/expand functionality for each namespace ;; * See if converting to use clojure.contrib.prxml is possible ;; * Figure out why the source doesn't show up for most things ;; * Add collapsible source ;; * Add links at the top to jump to each namespace ;; * Add object type (var, function, whatever) ;; * Add argument lists for functions ;; * Add links at the top of each namespace to jump to members ;; * Add license statement ;; * Remove the whojure dependency (ns #^{:author "<NAME>", :doc "Generates a single HTML page that contains the documentation for one or more Clojure libraries."} clojure.contrib.gen-html-docs (:require [clojure.contrib.duck-streams :as duck-streams]) (:use [clojure.contrib seq-utils str-utils repl-utils def prxml]) (:import [java.lang Exception] [java.util.regex Pattern])) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; Doc generation constants ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (def *script* " // <![CDATA[ function getElem(id) { if( document.getElementById ) { return document.getElementById( id ) } else if ( document.all ) { return eval( 'document.all.' + id ) } else return false; } function setDisplayStyle(id,displayStyle) { var elem = getElem (id) if (elem) { elem.style.display = displayStyle } } function setLinkToggleText (id, text) { var elem = getElem (id) if (elem) { elem.innerHTML = text } } function collapse(id) { setDisplayStyle (id, 'none') } function expand (id) { setDisplayStyle (id, 'block') } function toggleSource( id ) { toggle(id, 'linkto-' + id, 'Hide Source', 'Show Source') } function toggle(targetid, linkid, textWhenOpen, textWhenClosed) { var elem = getElem (targetid) var link = getElem (linkid) if (elem && link) { var isOpen = false if (elem.style.display == '') { isOpen = link.innerHTML == textWhenOpen } else if( elem.style.display == 'block' ) { isOpen = true } if (isOpen) { elem.style.display = 'none' link.innerHTML = textWhenClosed } else { elem.style.display = 'block' link.innerHTML = textWhenOpen } } } //]]> ") (def *style* " .library { padding: 0.5em 0 0 0 } .all-libs-toggle,.library-contents-toggle { font-size: small; } .all-libs-toggle a,.library-contents-toggle a { color: white } .library-member-doc-whitespace { white-space: pre } .library-member-source-toggle { font-size: small; margin-top: 0.5em } .library-member-source { display: none; border-left: solid lightblue } .library-member-docs { font-family:monospace } .library-member-arglists { font-family: monospace } .library-member-type { font-weight: bold; font-size: small; font-style: italic; color: darkred } .lib-links { margin: 0 0 1em 0 } .lib-link-header { color: white; background: darkgreen; width: 100% } .library-name { color: white; background: darkblue; width: 100% } .missing-library { color: darkred; margin: 0 0 1em 0 } .library-members { list-style: none } .library-member-name { font-weight: bold; font-size: 105% }") (defn- extract-documentation "Pulls the documentation for a var v out and turns it into HTML" [v] (if-let [docs (:doc (meta v))] (map (fn [l] [:div {:class "library-member-doc-line"} (if (= 0 (count l)) [:span {:class "library-member-doc-whitespace"} " "] ; We need something here to make the blank line show up l)]) (re-split #"\n" docs)) "")) (defn- member-type "Figures out for a var x whether it's a macro, function, var or multifunction" [x] (try (let [dx (deref x)] (cond (:macro (meta x)) :macro (fn? dx) :fn (= clojure.lang.MultiFn (:tag (meta x))) :multi true :var)) (catch Exception e :unknown))) (defn- anchor-for-member "Returns a suitable HTML anchor name given a library id and a member id" [libid memberid] (str "member-" libid "-" memberid)) (defn- id-for-member-source "Returns a suitable HTML id for a source listing given a library and a member" [libid memberid] (str "membersource-" libid "-" memberid)) (defn- id-for-member-source-link "Returns a suitable HTML id for a link to a source listing given a library and a member" [libid memberid] (str "linkto-membersource-" libid "-" memberid)) (defn- symbol-for "Given a namespace object ns and a namespaceless symbol memberid naming a member of that namespace, returns a namespaced symbol that identifies that member." [ns memberid] (symbol (name (ns-name ns)) (name memberid))) (defn- elide-to-one-line "Elides a string down to one line." [s] (re-sub #"(\n.*)+" "..." s)) (defn- elide-string "Returns a string that is at most the first limit characters of s" [s limit] (if (< (- limit 3) (count s)) (str (subs s 0 (- limit 3)) "...") s)) (defn- doc-elided-src "Returns the src with the docs elided." [docs src] (re-sub (re-pattern (str "\"" (Pattern/quote docs) "\"")) (str "\"" (elide-to-one-line docs) ;; (elide-string docs 10) ;; "..." "\"") src)) (defn- format-source [libid memberid v] (try (let [docs (:doc (meta v)) src (if-let [ns (find-ns libid)] (get-source (symbol-for ns memberid)))] (if (and src docs) (doc-elided-src docs src) src)) (catch Exception ex nil))) (defn- generate-lib-member [libid [n v]] [:li {:class "library-member"} [:a {:name (anchor-for-member libid n)}] [:dl {:class "library-member-table"} [:dt {:class "library-member-name"} (str n)] [:dd [:div {:class "library-member-info"} [:span {:class "library-member-type"} (name (member-type v))] " " [:span {:class "library-member-arglists"} (str (:arglists (meta v)))]] (into [:div {:class "library-member-docs"}] (extract-documentation v)) (let [member-source-id (id-for-member-source libid n) member-source-link-id (id-for-member-source-link libid n)] (if-let [member-source (format-source libid n v)] [:div {:class "library-member-source-section"} [:div {:class "library-member-source-toggle"} "[ " [:a {:href (format "javascript:toggleSource('%s')" member-source-id) :id member-source-link-id} "Show Source"] " ]"] [:div {:class "library-member-source" :id member-source-id} [:pre member-source]]]))]]]) (defn- anchor-for-library "Given a symbol id identifying a namespace, returns an identifier suitable for use as the name attribute of an HTML anchor tag." [id] (str "library-" id)) (defn- generate-lib-member-link "Emits a hyperlink to a member of a namespace given libid (a symbol identifying the namespace) and the vector [n v], where n is the symbol naming the member in question and v is the var pointing to the member." [libid [n v]] [:a {:class "lib-member-link" :href (str "#" (anchor-for-member libid n))} (name n)]) (defn- anchor-for-library-contents "Returns an HTML ID that identifies the element that holds the documentation contents for the specified library." [lib] (str "library-contents-" lib)) (defn- anchor-for-library-contents-toggle "Returns an HTML ID that identifies the element that toggles the visibility of the library contents." [lib] (str "library-contents-toggle-" lib)) (defn- generate-lib-doc "Emits the HTML that documents the namespace identified by the symbol lib." [lib] [:div {:class "library"} [:a {:name (anchor-for-library lib)}] [:div {:class "library-name"} [:span {:class "library-contents-toggle"} "[ " [:a {:id (anchor-for-library-contents-toggle lib) :href (format "javascript:toggle('%s', '%s', '-', '+')" (anchor-for-library-contents lib) (anchor-for-library-contents-toggle lib))} "-"] " ] "] (name lib)] (let [ns (find-ns lib)] (if ns (let [lib-members (sort (ns-publics ns))] [:a {:name (anchor-for-library lib)}] [:div {:class "library-contents" :id (anchor-for-library-contents lib)} (into [:div {:class "library-member-links"}] (interpose " " (map #(generate-lib-member-link lib %) lib-members))) (into [:ol {:class "library-members"}] (map #(generate-lib-member lib %) lib-members))]) [:div {:class "missing-library library-contents" :id (anchor-for-library-contents lib)} "Could not load library"]))]) (defn- load-lib "Calls require on the library identified by lib, eating any exceptions." [lib] (try (require lib) (catch java.lang.Exception x nil))) (defn- generate-lib-link "Generates a hyperlink to the documentation for a namespace given lib, a symbol identifying that namespace." [lib] (let [ns (find-ns lib)] (if ns [:a {:class "lib-link" :href (str "#" (anchor-for-library lib))} (str (ns-name ns))]))) (defn- generate-lib-links "Generates the list of hyperlinks to each namespace, given libs, a vector of symbols naming namespaces." [libs] (into [:div {:class "lib-links"} [:div {:class "lib-link-header"} "Namespaces" [:span {:class "all-libs-toggle"} " [ " [:a {:href "javascript:expandAllNamespaces()"} "Expand All"] " ] [ " [:a {:href "javascript:collapseAllNamespaces()"} "Collapse All"] " ]"]]] (interpose " " (map generate-lib-link libs)))) (defn generate-toggle-namespace-script [action toggle-text lib] (str (format "%s('%s');\n" action (anchor-for-library-contents lib)) (format "setLinkToggleText('%s', '%s');\n" (anchor-for-library-contents-toggle lib) toggle-text))) (defn generate-all-namespaces-action-script [action toggle-text libs] (str (format "function %sAllNamespaces()" action) \newline "{" \newline (reduce str (map #(generate-toggle-namespace-script action toggle-text %) libs)) \newline "}")) (defn generate-documentation "Returns a string which is the HTML documentation for the libraries named by libs. Libs is a vector of symbols identifying Clojure libraries." [libs] (dorun (map load-lib libs)) (let [writer (new java.io.StringWriter)] (binding [*out* writer] (prxml [:html {:xmlns "http://www.w3.org/1999/xhtml"} [:head [:title "Clojure documentation browser"] [:style *style*] [:script {:language "JavaScript" :type "text/javascript"} [:raw! *script*]] [:script {:language "JavaScript" :type "text/javascript"} [:raw! "// <![CDATA[!" \newline] (generate-all-namespaces-action-script "expand" "-" libs) (generate-all-namespaces-action-script "collapse" "+" libs) [:raw! \newline "// ]]>"]]] (let [lib-vec (sort libs)] (into [:body (generate-lib-links lib-vec)] (map generate-lib-doc lib-vec)))])) (.toString writer))) (defn generate-documentation-to-file "Calls generate-documentation on the libraries named by libs and emits the generated HTML to the path named by path." [path libs] (duck-streams/spit path (generate-documentation libs))) (comment (generate-documentation-to-file "C:/TEMP/CLJ-DOCS.HTML" ['clojure.contrib.accumulators]) (defn gen-all-docs [] (generate-documentation-to-file "C:/temp/clj-libs.html" [ 'clojure.set 'clojure.main 'clojure.core 'clojure.zip 'clojure.xml 'clojure.contrib.accumulators 'clojure.contrib.apply-macro 'clojure.contrib.auto-agent 'clojure.contrib.combinatorics 'clojure.contrib.command-line 'clojure.contrib.complex-numbers 'clojure.contrib.cond 'clojure.contrib.def 'clojure.contrib.duck-streams 'clojure.contrib.enum 'clojure.contrib.error-kit 'clojure.contrib.except 'clojure.contrib.fcase 'clojure.contrib.generic 'clojure.contrib.generic.arithmetic 'clojure.contrib.generic.collection 'clojure.contrib.generic.comparison 'clojure.contrib.generic.functor 'clojure.contrib.generic.math-functions 'clojure.contrib.import-static 'clojure.contrib.javadoc 'clojure.contrib.javalog 'clojure.contrib.lazy-seqs 'clojure.contrib.lazy-xml 'clojure.contrib.macro-utils 'clojure.contrib.macros 'clojure.contrib.math 'clojure.contrib.miglayout 'clojure.contrib.mmap 'clojure.contrib.monads 'clojure.contrib.ns-utils 'clojure.contrib.prxml 'clojure.contrib.repl-ln 'clojure.contrib.repl-utils 'clojure.contrib.seq-utils 'clojure.contrib.server-socket 'clojure.contrib.shell-out 'clojure.contrib.sql 'clojure.contrib.stream-utils 'clojure.contrib.str-utils 'clojure.contrib.test-contrib 'clojure.contrib.trace 'clojure.contrib.types 'clojure.contrib.zip-filter 'clojure.contrib.javadoc.browse 'clojure.contrib.json.read 'clojure.contrib.json.write 'clojure.contrib.lazy-xml.with-pull 'clojure.contrib.miglayout.internal 'clojure.contrib.probabilities.finite-distributions 'clojure.contrib.probabilities.monte-carlo 'clojure.contrib.probabilities.random-numbers 'clojure.contrib.sql.internal 'clojure.contrib.test-clojure.evaluation 'clojure.contrib.test-clojure.for 'clojure.contrib.test-clojure.numbers 'clojure.contrib.test-clojure.printer 'clojure.contrib.test-clojure.reader 'clojure.contrib.test-clojure.sequences 'clojure.contrib.test-contrib.shell-out 'clojure.contrib.test-contrib.str-utils 'clojure.contrib.zip-filter.xml ])) )
true
;;; gen-html-docs.clj: Generate HTML documentation for Clojure libs ;; by PI:NAME:<NAME>END_PI, http://pluralsight.com/craig, PI:EMAIL:<EMAIL>END_PI ;; February 13th, 2009 ;; Copyright (c) PI:NAME:<NAME>END_PI, 2009. All rights reserved. The use ;; and distribution terms for this software are covered by the Eclipse ;; Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php) ;; which can be found in the file epl-v10.html at the root of this ;; distribution. By using this software in any fashion, you are ;; agreeing to be bound by the terms of this license. You must not ;; remove this notice, or any other, from this software. ;; Generates a single HTML page that contains the documentation for ;; one or more Clojure libraries. See the comments section at the end ;; of this file for usage. ;; TODO ;; ;; * Make symbols in the source hyperlinks to the appropriate section ;; of the documentation. ;; * Investigate issue with miglayout mentioned here: ;; http://groups.google.com/group/clojure/browse_thread/thread/5a0c4395e44f5a79/3ae483100366bd3d?lnk=gst&q=documentation+browser#3ae483100366bd3d ;; ;; DONE ;; ;; * Move to clojure.contrib ;; * Change namespace ;; * Change license as appropriate ;; * Double-check doc strings ;; * Remove doc strings from source code ;; * Add collapse/expand functionality for all namespaces ;; * Add collapse/expand functionality for each namespace ;; * See if converting to use clojure.contrib.prxml is possible ;; * Figure out why the source doesn't show up for most things ;; * Add collapsible source ;; * Add links at the top to jump to each namespace ;; * Add object type (var, function, whatever) ;; * Add argument lists for functions ;; * Add links at the top of each namespace to jump to members ;; * Add license statement ;; * Remove the whojure dependency (ns #^{:author "PI:NAME:<NAME>END_PI", :doc "Generates a single HTML page that contains the documentation for one or more Clojure libraries."} clojure.contrib.gen-html-docs (:require [clojure.contrib.duck-streams :as duck-streams]) (:use [clojure.contrib seq-utils str-utils repl-utils def prxml]) (:import [java.lang Exception] [java.util.regex Pattern])) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; Doc generation constants ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (def *script* " // <![CDATA[ function getElem(id) { if( document.getElementById ) { return document.getElementById( id ) } else if ( document.all ) { return eval( 'document.all.' + id ) } else return false; } function setDisplayStyle(id,displayStyle) { var elem = getElem (id) if (elem) { elem.style.display = displayStyle } } function setLinkToggleText (id, text) { var elem = getElem (id) if (elem) { elem.innerHTML = text } } function collapse(id) { setDisplayStyle (id, 'none') } function expand (id) { setDisplayStyle (id, 'block') } function toggleSource( id ) { toggle(id, 'linkto-' + id, 'Hide Source', 'Show Source') } function toggle(targetid, linkid, textWhenOpen, textWhenClosed) { var elem = getElem (targetid) var link = getElem (linkid) if (elem && link) { var isOpen = false if (elem.style.display == '') { isOpen = link.innerHTML == textWhenOpen } else if( elem.style.display == 'block' ) { isOpen = true } if (isOpen) { elem.style.display = 'none' link.innerHTML = textWhenClosed } else { elem.style.display = 'block' link.innerHTML = textWhenOpen } } } //]]> ") (def *style* " .library { padding: 0.5em 0 0 0 } .all-libs-toggle,.library-contents-toggle { font-size: small; } .all-libs-toggle a,.library-contents-toggle a { color: white } .library-member-doc-whitespace { white-space: pre } .library-member-source-toggle { font-size: small; margin-top: 0.5em } .library-member-source { display: none; border-left: solid lightblue } .library-member-docs { font-family:monospace } .library-member-arglists { font-family: monospace } .library-member-type { font-weight: bold; font-size: small; font-style: italic; color: darkred } .lib-links { margin: 0 0 1em 0 } .lib-link-header { color: white; background: darkgreen; width: 100% } .library-name { color: white; background: darkblue; width: 100% } .missing-library { color: darkred; margin: 0 0 1em 0 } .library-members { list-style: none } .library-member-name { font-weight: bold; font-size: 105% }") (defn- extract-documentation "Pulls the documentation for a var v out and turns it into HTML" [v] (if-let [docs (:doc (meta v))] (map (fn [l] [:div {:class "library-member-doc-line"} (if (= 0 (count l)) [:span {:class "library-member-doc-whitespace"} " "] ; We need something here to make the blank line show up l)]) (re-split #"\n" docs)) "")) (defn- member-type "Figures out for a var x whether it's a macro, function, var or multifunction" [x] (try (let [dx (deref x)] (cond (:macro (meta x)) :macro (fn? dx) :fn (= clojure.lang.MultiFn (:tag (meta x))) :multi true :var)) (catch Exception e :unknown))) (defn- anchor-for-member "Returns a suitable HTML anchor name given a library id and a member id" [libid memberid] (str "member-" libid "-" memberid)) (defn- id-for-member-source "Returns a suitable HTML id for a source listing given a library and a member" [libid memberid] (str "membersource-" libid "-" memberid)) (defn- id-for-member-source-link "Returns a suitable HTML id for a link to a source listing given a library and a member" [libid memberid] (str "linkto-membersource-" libid "-" memberid)) (defn- symbol-for "Given a namespace object ns and a namespaceless symbol memberid naming a member of that namespace, returns a namespaced symbol that identifies that member." [ns memberid] (symbol (name (ns-name ns)) (name memberid))) (defn- elide-to-one-line "Elides a string down to one line." [s] (re-sub #"(\n.*)+" "..." s)) (defn- elide-string "Returns a string that is at most the first limit characters of s" [s limit] (if (< (- limit 3) (count s)) (str (subs s 0 (- limit 3)) "...") s)) (defn- doc-elided-src "Returns the src with the docs elided." [docs src] (re-sub (re-pattern (str "\"" (Pattern/quote docs) "\"")) (str "\"" (elide-to-one-line docs) ;; (elide-string docs 10) ;; "..." "\"") src)) (defn- format-source [libid memberid v] (try (let [docs (:doc (meta v)) src (if-let [ns (find-ns libid)] (get-source (symbol-for ns memberid)))] (if (and src docs) (doc-elided-src docs src) src)) (catch Exception ex nil))) (defn- generate-lib-member [libid [n v]] [:li {:class "library-member"} [:a {:name (anchor-for-member libid n)}] [:dl {:class "library-member-table"} [:dt {:class "library-member-name"} (str n)] [:dd [:div {:class "library-member-info"} [:span {:class "library-member-type"} (name (member-type v))] " " [:span {:class "library-member-arglists"} (str (:arglists (meta v)))]] (into [:div {:class "library-member-docs"}] (extract-documentation v)) (let [member-source-id (id-for-member-source libid n) member-source-link-id (id-for-member-source-link libid n)] (if-let [member-source (format-source libid n v)] [:div {:class "library-member-source-section"} [:div {:class "library-member-source-toggle"} "[ " [:a {:href (format "javascript:toggleSource('%s')" member-source-id) :id member-source-link-id} "Show Source"] " ]"] [:div {:class "library-member-source" :id member-source-id} [:pre member-source]]]))]]]) (defn- anchor-for-library "Given a symbol id identifying a namespace, returns an identifier suitable for use as the name attribute of an HTML anchor tag." [id] (str "library-" id)) (defn- generate-lib-member-link "Emits a hyperlink to a member of a namespace given libid (a symbol identifying the namespace) and the vector [n v], where n is the symbol naming the member in question and v is the var pointing to the member." [libid [n v]] [:a {:class "lib-member-link" :href (str "#" (anchor-for-member libid n))} (name n)]) (defn- anchor-for-library-contents "Returns an HTML ID that identifies the element that holds the documentation contents for the specified library." [lib] (str "library-contents-" lib)) (defn- anchor-for-library-contents-toggle "Returns an HTML ID that identifies the element that toggles the visibility of the library contents." [lib] (str "library-contents-toggle-" lib)) (defn- generate-lib-doc "Emits the HTML that documents the namespace identified by the symbol lib." [lib] [:div {:class "library"} [:a {:name (anchor-for-library lib)}] [:div {:class "library-name"} [:span {:class "library-contents-toggle"} "[ " [:a {:id (anchor-for-library-contents-toggle lib) :href (format "javascript:toggle('%s', '%s', '-', '+')" (anchor-for-library-contents lib) (anchor-for-library-contents-toggle lib))} "-"] " ] "] (name lib)] (let [ns (find-ns lib)] (if ns (let [lib-members (sort (ns-publics ns))] [:a {:name (anchor-for-library lib)}] [:div {:class "library-contents" :id (anchor-for-library-contents lib)} (into [:div {:class "library-member-links"}] (interpose " " (map #(generate-lib-member-link lib %) lib-members))) (into [:ol {:class "library-members"}] (map #(generate-lib-member lib %) lib-members))]) [:div {:class "missing-library library-contents" :id (anchor-for-library-contents lib)} "Could not load library"]))]) (defn- load-lib "Calls require on the library identified by lib, eating any exceptions." [lib] (try (require lib) (catch java.lang.Exception x nil))) (defn- generate-lib-link "Generates a hyperlink to the documentation for a namespace given lib, a symbol identifying that namespace." [lib] (let [ns (find-ns lib)] (if ns [:a {:class "lib-link" :href (str "#" (anchor-for-library lib))} (str (ns-name ns))]))) (defn- generate-lib-links "Generates the list of hyperlinks to each namespace, given libs, a vector of symbols naming namespaces." [libs] (into [:div {:class "lib-links"} [:div {:class "lib-link-header"} "Namespaces" [:span {:class "all-libs-toggle"} " [ " [:a {:href "javascript:expandAllNamespaces()"} "Expand All"] " ] [ " [:a {:href "javascript:collapseAllNamespaces()"} "Collapse All"] " ]"]]] (interpose " " (map generate-lib-link libs)))) (defn generate-toggle-namespace-script [action toggle-text lib] (str (format "%s('%s');\n" action (anchor-for-library-contents lib)) (format "setLinkToggleText('%s', '%s');\n" (anchor-for-library-contents-toggle lib) toggle-text))) (defn generate-all-namespaces-action-script [action toggle-text libs] (str (format "function %sAllNamespaces()" action) \newline "{" \newline (reduce str (map #(generate-toggle-namespace-script action toggle-text %) libs)) \newline "}")) (defn generate-documentation "Returns a string which is the HTML documentation for the libraries named by libs. Libs is a vector of symbols identifying Clojure libraries." [libs] (dorun (map load-lib libs)) (let [writer (new java.io.StringWriter)] (binding [*out* writer] (prxml [:html {:xmlns "http://www.w3.org/1999/xhtml"} [:head [:title "Clojure documentation browser"] [:style *style*] [:script {:language "JavaScript" :type "text/javascript"} [:raw! *script*]] [:script {:language "JavaScript" :type "text/javascript"} [:raw! "// <![CDATA[!" \newline] (generate-all-namespaces-action-script "expand" "-" libs) (generate-all-namespaces-action-script "collapse" "+" libs) [:raw! \newline "// ]]>"]]] (let [lib-vec (sort libs)] (into [:body (generate-lib-links lib-vec)] (map generate-lib-doc lib-vec)))])) (.toString writer))) (defn generate-documentation-to-file "Calls generate-documentation on the libraries named by libs and emits the generated HTML to the path named by path." [path libs] (duck-streams/spit path (generate-documentation libs))) (comment (generate-documentation-to-file "C:/TEMP/CLJ-DOCS.HTML" ['clojure.contrib.accumulators]) (defn gen-all-docs [] (generate-documentation-to-file "C:/temp/clj-libs.html" [ 'clojure.set 'clojure.main 'clojure.core 'clojure.zip 'clojure.xml 'clojure.contrib.accumulators 'clojure.contrib.apply-macro 'clojure.contrib.auto-agent 'clojure.contrib.combinatorics 'clojure.contrib.command-line 'clojure.contrib.complex-numbers 'clojure.contrib.cond 'clojure.contrib.def 'clojure.contrib.duck-streams 'clojure.contrib.enum 'clojure.contrib.error-kit 'clojure.contrib.except 'clojure.contrib.fcase 'clojure.contrib.generic 'clojure.contrib.generic.arithmetic 'clojure.contrib.generic.collection 'clojure.contrib.generic.comparison 'clojure.contrib.generic.functor 'clojure.contrib.generic.math-functions 'clojure.contrib.import-static 'clojure.contrib.javadoc 'clojure.contrib.javalog 'clojure.contrib.lazy-seqs 'clojure.contrib.lazy-xml 'clojure.contrib.macro-utils 'clojure.contrib.macros 'clojure.contrib.math 'clojure.contrib.miglayout 'clojure.contrib.mmap 'clojure.contrib.monads 'clojure.contrib.ns-utils 'clojure.contrib.prxml 'clojure.contrib.repl-ln 'clojure.contrib.repl-utils 'clojure.contrib.seq-utils 'clojure.contrib.server-socket 'clojure.contrib.shell-out 'clojure.contrib.sql 'clojure.contrib.stream-utils 'clojure.contrib.str-utils 'clojure.contrib.test-contrib 'clojure.contrib.trace 'clojure.contrib.types 'clojure.contrib.zip-filter 'clojure.contrib.javadoc.browse 'clojure.contrib.json.read 'clojure.contrib.json.write 'clojure.contrib.lazy-xml.with-pull 'clojure.contrib.miglayout.internal 'clojure.contrib.probabilities.finite-distributions 'clojure.contrib.probabilities.monte-carlo 'clojure.contrib.probabilities.random-numbers 'clojure.contrib.sql.internal 'clojure.contrib.test-clojure.evaluation 'clojure.contrib.test-clojure.for 'clojure.contrib.test-clojure.numbers 'clojure.contrib.test-clojure.printer 'clojure.contrib.test-clojure.reader 'clojure.contrib.test-clojure.sequences 'clojure.contrib.test-contrib.shell-out 'clojure.contrib.test-contrib.str-utils 'clojure.contrib.zip-filter.xml ])) )
[ { "context": "s echo-handler))\n ;:cookies {\"username\" {:value \"alice\"}} ,, {\"secret\" {:value \"foobar\", :secure true, :", "end": 3519, "score": 0.9994301795959473, "start": 3514, "tag": "USERNAME", "value": "alice" }, { "context": "username\" {:value \"alice\"}} ,, {\"secret\" {:value \"foobar\", :secure true, :max-age 3600}}\n (GET \"/wrap-coo", "end": 3551, "score": 0.8537889719009399, "start": 3545, "tag": "KEY", "value": "foobar" } ]
test/clojure/nginx/clojure/ring_handlers_for_test.clj
vibhutisawant/nginx-clojure
698
(ns nginx.clojure.ring-handlers-for-test (:use [ring.util.response] [ring.middleware.session] [ring.middleware.cookies] [ring.middleware.params] [ring.middleware.content-type] [ring.middleware.session.memory] [ring.middleware.session.cookie] [ring.middleware.multipart-params] [compojure.core] [nginx.clojure.core] [clojure.tools.trace] ) (:require [compojure.route :as route] [clj-http.client :as client]) (:require [ring.util.codec :as codec]) (:import [ring.middleware.session.memory.MemoryStore] [nginx.clojure NginxRequest] [nginx.clojure.logger LoggerService] [nginx.clojure NginxClojureRT])) (def ^LoggerService logger (NginxClojureRT/getLog)) (defn- log[fmt & ss] (.info logger (apply (partial format fmt) ss))) (def my-session-store (cookie-store {:key "my-secrect-key!!"})) (defn- echo-handler [r] {:status 200 :headers {"rmap" (pr-str (dissoc r :body))} :body "ok"}) (defn hello-ring [req] {:status 200, :headers {"content-type" "text/html"}, :body "Hello, Ring handler!"}) (defn session-handler [req] (let [session (:session req) user ((:params req) "user") user (or user (:user session "guest")) ;_ (println session) ;_ (println my-session-store) session (assoc session :user user)] (-> (response (str "Welcome " user "!")) (content-type "text/html") (assoc :session session)))) (defn decode [encoded] (String. (codec/base64-decode encoded))) (defn get-username-from-authorization-header [header-value] (if (not-empty header-value) (let [user-pass (second (clojure.string/split header-value #"\s+"))] (first (clojure.string/split (decode user-pass) #":"))) "")) (defn check-authorisation [context] (let [authorised? (= "nginx-clojure" (get-username-from-authorization-header (get-in context [:request :headers "authorization"])))] (do (println (format "request_authorised=%s" authorised?)) authorised?))) (def sse-subscribers (atom {})) (def long-polling-subscribers (atom {})) (def init-topics (delay (def sse-topic (build-topic! "sse-topic")) (def long-polling-topic (build-topic! "long-polling-topic")) (sub! sse-topic nil (fn [m _] (doseq [[ch _] @sse-subscribers] (send! ch (str "data: " m "\r\n\r\n") true (= "finish!" m) )))) (sub! long-polling-topic nil (fn [m _] (doseq [[ch _] @long-polling-subscribers] (send-response! ch {:status 200, :headers {"content-type" "text/plain"}, :body m})))))) (defroutes ring-compojure-test-handler (GET "/hello2" [] {:status 200, :headers {"content-type" "text/plain"}, :body "Hello World"}) (GET "/hello" [] (-> (response "Hello World") (content-type "text/plain"))) (GET "/redirect" [] (redirect "http://example.com")) (GET "/file-response" [] (file-response "small.html" {:root "testfiles"})) (GET "/resource-response" [] (resource-response "small.html" {:root "public"})) (GET "/wrap-content-type.html" [] (wrap-content-type (fn [req] (response "Hello World")) {:mime-types {"html" "text/x-foo"}})) ;http://example.com/demo?x=hello&x=world, {:params {"x" ["hello", "world"]} (GET "/wrap-params" [] (wrap-params echo-handler)) ;test form post (POST "/wrap-params" [] (wrap-params echo-handler)) ;:cookies {"username" {:value "alice"}} ,, {"secret" {:value "foobar", :secure true, :max-age 3600}} (GET "/wrap-cookies" [] (wrap-cookies echo-handler)) (GET "/authorized-service" [] (fn [req] (if (check-authorisation {:request req}) {:status 200, :headers {"content-type" "text/plain"}, :body "OK, you have authorized to see this message!"} {:status 401, :headers {"www-authenticate" "Basic realm=\"Secure Area\"" :body "<HTML><BODY><H1>401 Unauthorized.</H1></BODY></HTML>"}}))) (PATCH "/json-patch" [] (fn [req] {:status 200, :headers {"content-type" "text/plain"}, :body (str "Your patch succeeded! length=" (-> req :body slurp count))})) ;:session (GET "/wrap-session" [] (-> session-handler wrap-params (wrap-session {:store my-session-store}) )) (POST "/ring-upload" [] (wrap-multipart-params (wrap-params (fn [{params :params}] (let [{:keys [tempfile filename]} (params "myfile")] {:status 200, :headers {"rmap" (pr-str (dissoc params "myfile")), "content-type" "text/plain"} :body (java.io.File. filename)}))))) (GET "/not-found" [] (route/not-found "<h1>Page not found</h1>")) (GET "/exception" [] (throw (Exception. "my exception"))) ;;server sent events publisher (GET "/sse-pub" [] (fn [req] @init-topics (pub! sse-topic (:query-string req)) {:body "OK"})) ;;server sent events subscriber (GET "/sse-sub" [] (fn [^NginxRequest req] @init-topics (let [ch (hijack! req true)] (on-close! ch ch (fn [ch] (log "channel closed. id=%d" (.nativeRequest req)) (log "#%s sse-sub: onclose arg:%s, sse-subscribers=%s" process-id ch (pr-str @sse-subscribers)) (swap! sse-subscribers dissoc ch))) (swap! sse-subscribers assoc ch req) (send-header! ch 200 {"Content-Type", "text/event-stream"} false false) (send! ch "retry: 4500\r\n" true false)))) (GET "/pub" [] (fn [req] @init-topics (pub! long-polling-topic (:query-string req)) {:body "OK"})) (GET "/sub" [] (fn [^NginxRequest req] @init-topics (let [ch (hijack! req true)] (on-close! ch ch (fn [ch] (log "#%s channel closed. id=%d" process-id (.nativeRequest req)) (swap! long-polling-subscribers dissoc ch))) (swap! long-polling-subscribers assoc ch req)))) (GET "/ws-echo" [] (fn [^NginxRequest req] (let [ch (hijack! req true)] (if (websocket-upgrade! ch true) (add-listener! ch { :on-open (fn [ch] (log "uri:%s, on-open!" (:uri req))) :on-message (fn [ch msg rem?] (send! ch msg (not rem?) false)) :on-close (fn [ch reason] (log "uri:%s, on-close:%s" (:uri req) reason)) :on-error (fn [ch error] (log "uri:%s, on-error:%s" (:uri req) error)) }) {})))) (GET "/ws-remote" [] (fn [^NginxRequest req] (-> req (hijack! true) (add-listener! { :on-open (fn [ch] (log "uri:%s, on-open!" (:uri req))) :on-message (fn [ch msg rem?] (send! ch (:body (client/get msg {:socket-timeout 50000})) true false) ; (send! ch (clojure.string/join (for [i (range 4708)] 'a')) true false) ) :on-close (fn [ch reason] (log "uri:%s, on-close:%s" (:uri req) reason)) :on-error (fn [ch error] (log "uri:%s, on-error:%s" (:uri req) error)) }))))) ;(trace-ns compojure.core) ;(trace-ns compojure.route) ;(trace-ns ring.util.response) ;(trace-ns ring.middleware.session) ;(trace-ns ring.middleware.cookies) ;(trace-ns ring.middleware.params) ;(trace-ns ring.middleware.content-type) ;(trace-ns ring.middleware.session.memory) ;(trace-ns ring.middleware.session.cookie) ;(trace-ns ring.middleware.multipart-params) ;(trace-ns compojure.core) ;(trace-vars nginx.clojure.ring-handlers-for-test/ring-compojure-test-handler)
1692
(ns nginx.clojure.ring-handlers-for-test (:use [ring.util.response] [ring.middleware.session] [ring.middleware.cookies] [ring.middleware.params] [ring.middleware.content-type] [ring.middleware.session.memory] [ring.middleware.session.cookie] [ring.middleware.multipart-params] [compojure.core] [nginx.clojure.core] [clojure.tools.trace] ) (:require [compojure.route :as route] [clj-http.client :as client]) (:require [ring.util.codec :as codec]) (:import [ring.middleware.session.memory.MemoryStore] [nginx.clojure NginxRequest] [nginx.clojure.logger LoggerService] [nginx.clojure NginxClojureRT])) (def ^LoggerService logger (NginxClojureRT/getLog)) (defn- log[fmt & ss] (.info logger (apply (partial format fmt) ss))) (def my-session-store (cookie-store {:key "my-secrect-key!!"})) (defn- echo-handler [r] {:status 200 :headers {"rmap" (pr-str (dissoc r :body))} :body "ok"}) (defn hello-ring [req] {:status 200, :headers {"content-type" "text/html"}, :body "Hello, Ring handler!"}) (defn session-handler [req] (let [session (:session req) user ((:params req) "user") user (or user (:user session "guest")) ;_ (println session) ;_ (println my-session-store) session (assoc session :user user)] (-> (response (str "Welcome " user "!")) (content-type "text/html") (assoc :session session)))) (defn decode [encoded] (String. (codec/base64-decode encoded))) (defn get-username-from-authorization-header [header-value] (if (not-empty header-value) (let [user-pass (second (clojure.string/split header-value #"\s+"))] (first (clojure.string/split (decode user-pass) #":"))) "")) (defn check-authorisation [context] (let [authorised? (= "nginx-clojure" (get-username-from-authorization-header (get-in context [:request :headers "authorization"])))] (do (println (format "request_authorised=%s" authorised?)) authorised?))) (def sse-subscribers (atom {})) (def long-polling-subscribers (atom {})) (def init-topics (delay (def sse-topic (build-topic! "sse-topic")) (def long-polling-topic (build-topic! "long-polling-topic")) (sub! sse-topic nil (fn [m _] (doseq [[ch _] @sse-subscribers] (send! ch (str "data: " m "\r\n\r\n") true (= "finish!" m) )))) (sub! long-polling-topic nil (fn [m _] (doseq [[ch _] @long-polling-subscribers] (send-response! ch {:status 200, :headers {"content-type" "text/plain"}, :body m})))))) (defroutes ring-compojure-test-handler (GET "/hello2" [] {:status 200, :headers {"content-type" "text/plain"}, :body "Hello World"}) (GET "/hello" [] (-> (response "Hello World") (content-type "text/plain"))) (GET "/redirect" [] (redirect "http://example.com")) (GET "/file-response" [] (file-response "small.html" {:root "testfiles"})) (GET "/resource-response" [] (resource-response "small.html" {:root "public"})) (GET "/wrap-content-type.html" [] (wrap-content-type (fn [req] (response "Hello World")) {:mime-types {"html" "text/x-foo"}})) ;http://example.com/demo?x=hello&x=world, {:params {"x" ["hello", "world"]} (GET "/wrap-params" [] (wrap-params echo-handler)) ;test form post (POST "/wrap-params" [] (wrap-params echo-handler)) ;:cookies {"username" {:value "alice"}} ,, {"secret" {:value "<KEY>", :secure true, :max-age 3600}} (GET "/wrap-cookies" [] (wrap-cookies echo-handler)) (GET "/authorized-service" [] (fn [req] (if (check-authorisation {:request req}) {:status 200, :headers {"content-type" "text/plain"}, :body "OK, you have authorized to see this message!"} {:status 401, :headers {"www-authenticate" "Basic realm=\"Secure Area\"" :body "<HTML><BODY><H1>401 Unauthorized.</H1></BODY></HTML>"}}))) (PATCH "/json-patch" [] (fn [req] {:status 200, :headers {"content-type" "text/plain"}, :body (str "Your patch succeeded! length=" (-> req :body slurp count))})) ;:session (GET "/wrap-session" [] (-> session-handler wrap-params (wrap-session {:store my-session-store}) )) (POST "/ring-upload" [] (wrap-multipart-params (wrap-params (fn [{params :params}] (let [{:keys [tempfile filename]} (params "myfile")] {:status 200, :headers {"rmap" (pr-str (dissoc params "myfile")), "content-type" "text/plain"} :body (java.io.File. filename)}))))) (GET "/not-found" [] (route/not-found "<h1>Page not found</h1>")) (GET "/exception" [] (throw (Exception. "my exception"))) ;;server sent events publisher (GET "/sse-pub" [] (fn [req] @init-topics (pub! sse-topic (:query-string req)) {:body "OK"})) ;;server sent events subscriber (GET "/sse-sub" [] (fn [^NginxRequest req] @init-topics (let [ch (hijack! req true)] (on-close! ch ch (fn [ch] (log "channel closed. id=%d" (.nativeRequest req)) (log "#%s sse-sub: onclose arg:%s, sse-subscribers=%s" process-id ch (pr-str @sse-subscribers)) (swap! sse-subscribers dissoc ch))) (swap! sse-subscribers assoc ch req) (send-header! ch 200 {"Content-Type", "text/event-stream"} false false) (send! ch "retry: 4500\r\n" true false)))) (GET "/pub" [] (fn [req] @init-topics (pub! long-polling-topic (:query-string req)) {:body "OK"})) (GET "/sub" [] (fn [^NginxRequest req] @init-topics (let [ch (hijack! req true)] (on-close! ch ch (fn [ch] (log "#%s channel closed. id=%d" process-id (.nativeRequest req)) (swap! long-polling-subscribers dissoc ch))) (swap! long-polling-subscribers assoc ch req)))) (GET "/ws-echo" [] (fn [^NginxRequest req] (let [ch (hijack! req true)] (if (websocket-upgrade! ch true) (add-listener! ch { :on-open (fn [ch] (log "uri:%s, on-open!" (:uri req))) :on-message (fn [ch msg rem?] (send! ch msg (not rem?) false)) :on-close (fn [ch reason] (log "uri:%s, on-close:%s" (:uri req) reason)) :on-error (fn [ch error] (log "uri:%s, on-error:%s" (:uri req) error)) }) {})))) (GET "/ws-remote" [] (fn [^NginxRequest req] (-> req (hijack! true) (add-listener! { :on-open (fn [ch] (log "uri:%s, on-open!" (:uri req))) :on-message (fn [ch msg rem?] (send! ch (:body (client/get msg {:socket-timeout 50000})) true false) ; (send! ch (clojure.string/join (for [i (range 4708)] 'a')) true false) ) :on-close (fn [ch reason] (log "uri:%s, on-close:%s" (:uri req) reason)) :on-error (fn [ch error] (log "uri:%s, on-error:%s" (:uri req) error)) }))))) ;(trace-ns compojure.core) ;(trace-ns compojure.route) ;(trace-ns ring.util.response) ;(trace-ns ring.middleware.session) ;(trace-ns ring.middleware.cookies) ;(trace-ns ring.middleware.params) ;(trace-ns ring.middleware.content-type) ;(trace-ns ring.middleware.session.memory) ;(trace-ns ring.middleware.session.cookie) ;(trace-ns ring.middleware.multipart-params) ;(trace-ns compojure.core) ;(trace-vars nginx.clojure.ring-handlers-for-test/ring-compojure-test-handler)
true
(ns nginx.clojure.ring-handlers-for-test (:use [ring.util.response] [ring.middleware.session] [ring.middleware.cookies] [ring.middleware.params] [ring.middleware.content-type] [ring.middleware.session.memory] [ring.middleware.session.cookie] [ring.middleware.multipart-params] [compojure.core] [nginx.clojure.core] [clojure.tools.trace] ) (:require [compojure.route :as route] [clj-http.client :as client]) (:require [ring.util.codec :as codec]) (:import [ring.middleware.session.memory.MemoryStore] [nginx.clojure NginxRequest] [nginx.clojure.logger LoggerService] [nginx.clojure NginxClojureRT])) (def ^LoggerService logger (NginxClojureRT/getLog)) (defn- log[fmt & ss] (.info logger (apply (partial format fmt) ss))) (def my-session-store (cookie-store {:key "my-secrect-key!!"})) (defn- echo-handler [r] {:status 200 :headers {"rmap" (pr-str (dissoc r :body))} :body "ok"}) (defn hello-ring [req] {:status 200, :headers {"content-type" "text/html"}, :body "Hello, Ring handler!"}) (defn session-handler [req] (let [session (:session req) user ((:params req) "user") user (or user (:user session "guest")) ;_ (println session) ;_ (println my-session-store) session (assoc session :user user)] (-> (response (str "Welcome " user "!")) (content-type "text/html") (assoc :session session)))) (defn decode [encoded] (String. (codec/base64-decode encoded))) (defn get-username-from-authorization-header [header-value] (if (not-empty header-value) (let [user-pass (second (clojure.string/split header-value #"\s+"))] (first (clojure.string/split (decode user-pass) #":"))) "")) (defn check-authorisation [context] (let [authorised? (= "nginx-clojure" (get-username-from-authorization-header (get-in context [:request :headers "authorization"])))] (do (println (format "request_authorised=%s" authorised?)) authorised?))) (def sse-subscribers (atom {})) (def long-polling-subscribers (atom {})) (def init-topics (delay (def sse-topic (build-topic! "sse-topic")) (def long-polling-topic (build-topic! "long-polling-topic")) (sub! sse-topic nil (fn [m _] (doseq [[ch _] @sse-subscribers] (send! ch (str "data: " m "\r\n\r\n") true (= "finish!" m) )))) (sub! long-polling-topic nil (fn [m _] (doseq [[ch _] @long-polling-subscribers] (send-response! ch {:status 200, :headers {"content-type" "text/plain"}, :body m})))))) (defroutes ring-compojure-test-handler (GET "/hello2" [] {:status 200, :headers {"content-type" "text/plain"}, :body "Hello World"}) (GET "/hello" [] (-> (response "Hello World") (content-type "text/plain"))) (GET "/redirect" [] (redirect "http://example.com")) (GET "/file-response" [] (file-response "small.html" {:root "testfiles"})) (GET "/resource-response" [] (resource-response "small.html" {:root "public"})) (GET "/wrap-content-type.html" [] (wrap-content-type (fn [req] (response "Hello World")) {:mime-types {"html" "text/x-foo"}})) ;http://example.com/demo?x=hello&x=world, {:params {"x" ["hello", "world"]} (GET "/wrap-params" [] (wrap-params echo-handler)) ;test form post (POST "/wrap-params" [] (wrap-params echo-handler)) ;:cookies {"username" {:value "alice"}} ,, {"secret" {:value "PI:KEY:<KEY>END_PI", :secure true, :max-age 3600}} (GET "/wrap-cookies" [] (wrap-cookies echo-handler)) (GET "/authorized-service" [] (fn [req] (if (check-authorisation {:request req}) {:status 200, :headers {"content-type" "text/plain"}, :body "OK, you have authorized to see this message!"} {:status 401, :headers {"www-authenticate" "Basic realm=\"Secure Area\"" :body "<HTML><BODY><H1>401 Unauthorized.</H1></BODY></HTML>"}}))) (PATCH "/json-patch" [] (fn [req] {:status 200, :headers {"content-type" "text/plain"}, :body (str "Your patch succeeded! length=" (-> req :body slurp count))})) ;:session (GET "/wrap-session" [] (-> session-handler wrap-params (wrap-session {:store my-session-store}) )) (POST "/ring-upload" [] (wrap-multipart-params (wrap-params (fn [{params :params}] (let [{:keys [tempfile filename]} (params "myfile")] {:status 200, :headers {"rmap" (pr-str (dissoc params "myfile")), "content-type" "text/plain"} :body (java.io.File. filename)}))))) (GET "/not-found" [] (route/not-found "<h1>Page not found</h1>")) (GET "/exception" [] (throw (Exception. "my exception"))) ;;server sent events publisher (GET "/sse-pub" [] (fn [req] @init-topics (pub! sse-topic (:query-string req)) {:body "OK"})) ;;server sent events subscriber (GET "/sse-sub" [] (fn [^NginxRequest req] @init-topics (let [ch (hijack! req true)] (on-close! ch ch (fn [ch] (log "channel closed. id=%d" (.nativeRequest req)) (log "#%s sse-sub: onclose arg:%s, sse-subscribers=%s" process-id ch (pr-str @sse-subscribers)) (swap! sse-subscribers dissoc ch))) (swap! sse-subscribers assoc ch req) (send-header! ch 200 {"Content-Type", "text/event-stream"} false false) (send! ch "retry: 4500\r\n" true false)))) (GET "/pub" [] (fn [req] @init-topics (pub! long-polling-topic (:query-string req)) {:body "OK"})) (GET "/sub" [] (fn [^NginxRequest req] @init-topics (let [ch (hijack! req true)] (on-close! ch ch (fn [ch] (log "#%s channel closed. id=%d" process-id (.nativeRequest req)) (swap! long-polling-subscribers dissoc ch))) (swap! long-polling-subscribers assoc ch req)))) (GET "/ws-echo" [] (fn [^NginxRequest req] (let [ch (hijack! req true)] (if (websocket-upgrade! ch true) (add-listener! ch { :on-open (fn [ch] (log "uri:%s, on-open!" (:uri req))) :on-message (fn [ch msg rem?] (send! ch msg (not rem?) false)) :on-close (fn [ch reason] (log "uri:%s, on-close:%s" (:uri req) reason)) :on-error (fn [ch error] (log "uri:%s, on-error:%s" (:uri req) error)) }) {})))) (GET "/ws-remote" [] (fn [^NginxRequest req] (-> req (hijack! true) (add-listener! { :on-open (fn [ch] (log "uri:%s, on-open!" (:uri req))) :on-message (fn [ch msg rem?] (send! ch (:body (client/get msg {:socket-timeout 50000})) true false) ; (send! ch (clojure.string/join (for [i (range 4708)] 'a')) true false) ) :on-close (fn [ch reason] (log "uri:%s, on-close:%s" (:uri req) reason)) :on-error (fn [ch error] (log "uri:%s, on-error:%s" (:uri req) error)) }))))) ;(trace-ns compojure.core) ;(trace-ns compojure.route) ;(trace-ns ring.util.response) ;(trace-ns ring.middleware.session) ;(trace-ns ring.middleware.cookies) ;(trace-ns ring.middleware.params) ;(trace-ns ring.middleware.content-type) ;(trace-ns ring.middleware.session.memory) ;(trace-ns ring.middleware.session.cookie) ;(trace-ns ring.middleware.multipart-params) ;(trace-ns compojure.core) ;(trace-vars nginx.clojure.ring-handlers-for-test/ring-compojure-test-handler)
[ { "context": "s://api.crossref.org/v1/works/\" safe-doi \"?mailto=eventdata@crossref.org\")\n :datacite (str \"https://api.d", "end": 3979, "score": 0.9998522400856018, "start": 3957, "tag": "EMAIL", "value": "eventdata@crossref.org" } ]
src/event_data_query/work_cache.clj
CrossRef/event-data-query
6
(ns event-data-query.work-cache "Work metadata cache, retrieved using Content Negotiation and stored in ElasticSearch. The cache is case-sensitive, which means that if the same DOI is represented with different case, two entries will be made. This is a trade-off between the likelihood of this happening (low) with the desire to avoid changing the representation of a DOI during the pipeline." (:require [crossref.util.doi :as cr-doi] [crossref.util.string :as cr-str] [qbits.spandex :as s] [clj-http.client :as client] [clojure.data.json :as json] [clojure.tools.logging :as log] [robert.bruce :refer [try-try-again]] [config.core :refer [env]] [clojure.string :as string]) (:import [java.net URLEncoder] [org.elasticsearch.client ResponseException])) (def work-type-name :work) (def mapping {:doi {:type "keyword"} :ra {:type "keyword"} :content-type {:type "keyword"}}) (def index-id (delay (-> env :query-deployment (str "work_cache")))) (def connection (delay (s/client {:hosts [(:query-elastic-uri env)] :max-retry-timeout 60000 :request {:connect-timeout 60000 :socket-timeout 60000}}))) (defn doi->id [doi] (some-> doi cr-doi/normalise-doi cr-str/md5)) (defn get-work "Get the given URL's metadata from the cache." [doi] (let [id (doi->id doi)] (try (-> (s/request @connection {:url [@index-id work-type-name id] :method :get}) :body :_source) ; Expect to get 404 sometimes. (catch Exception ex nil)))) (defn ensure-index "Set up Index." [] (try (s/request @connection {:url [@index-id] :method :head}) (catch Exception ex (log/info "Need to create Work index") (try (s/request @connection {:url [@index-id] :method :put :body {:settings {"number_of_shards" 8 "number_of_replicas" 1} :mappings {work-type-name {:properties mapping}}}}) (catch Exception ex2 (log/error "Failed to create Work index!" ex2)))))) (defn set-refresh-interval! [] (s/request @connection {:url (str @index-id "/_settings") :method :put :body {:index {"refresh_interval" "60s"}}})) (defn boot! [] (ensure-index) (set-refresh-interval!)) (defn insert-work "Insert a work's metadata, replacing already exists." [doi data] (let [id (doi->id doi)] (try-try-again {:sleep 30000 :tries 5} #(s/request @connection {:url [@index-id work-type-name id] :method :put :body data})))) (defn get-ra-api "Get the Registration Agency from the DOI RA API. Return :crossref :datacite or nil." [non-url-normalized-doi] (when non-url-normalized-doi (try (-> non-url-normalized-doi (URLEncoder/encode "UTF-8") (#(str "https://doi.org/doiRA/" %)) client/get :body (json/read-str :key-fn keyword) first :RA (or "") string/lower-case {"datacite" :datacite "crossref" :crossref}) ; Not found, or invalid data, return nil. (catch Exception ex (do (log/error ex) nil))))) (defn get-work-api "Get the work metadata from the Crossref or DataCite API." [non-url-normalized-doi] ; We might get nils. (when non-url-normalized-doi (try (let [ra (get-ra-api non-url-normalized-doi) safe-doi (URLEncoder/encode non-url-normalized-doi "UTF-8") url (condp = ra :crossref (str "https://api.crossref.org/v1/works/" safe-doi "?mailto=eventdata@crossref.org") :datacite (str "https://api.datacite.org/works/" safe-doi "?include=resource-type") nil) response (try-try-again {:sleep 10000 :tries 2} ; Only retry on genuine exceptions. 404 etc won't be fixed by retrying. #(when url (client/get url {:throw-exceptions false}))) body (when (= 200 (:status response)) (-> response :body (json/read-str :key-fn keyword))) work-type (condp = ra :crossref (-> body :message :type) :datacite (-> body :data :attributes :resource-type-id) nil)] ; If we couldn't discover the RA, then this isn't a real DOI. ; Return nil so this doens't get cached (could produce a false-negative in future). (when (and ra work-type) {:content-type work-type :ra ra :doi non-url-normalized-doi})) (catch Exception ex (do (log/error "Failed to retrieve metadata for DOI" non-url-normalized-doi "error:" (str ex)) nil))))) (defn get-for-dois "For a sequence of DOIs, perform cached lookups and return in a hash-map. When an input isn't a valid DOI, the response is nil." [dois] ; Map of inputs to normalized DOI (or nil if it isn't valid). (let [inputs-normalized (map (fn [input] [input (when (cr-doi/well-formed input) (cr-doi/non-url-doi input))]) dois) ; Look up each entry into triple. from-cache (map (fn [[input-doi normalized-doi]] [input-doi normalized-doi (when normalized-doi (get-work normalized-doi))]) inputs-normalized) missing-entries (filter (fn [[input-doi normalized-doi result]] (nil? result)) from-cache) ; Look up missing ones into triples. from-api (map (fn [[input-doi normalized-doi _]] [input-doi normalized-doi (when normalized-doi (get-work-api normalized-doi))]) missing-entries)] (doseq [[_ normalized-doi result] from-api] ; Don't save in cache if it wasn't a DOI (or was nil). ; Don't save if we couldn't retrieve any data from the API (DOI-like doesn't exist). (when (and normalized-doi result) (insert-work normalized-doi result))) (merge (into {} (map (fn [[input-doi _ result]] [input-doi result]) from-cache)) (into {} (map (fn [[input-doi _ result]] [input-doi result]) from-api)))))
68634
(ns event-data-query.work-cache "Work metadata cache, retrieved using Content Negotiation and stored in ElasticSearch. The cache is case-sensitive, which means that if the same DOI is represented with different case, two entries will be made. This is a trade-off between the likelihood of this happening (low) with the desire to avoid changing the representation of a DOI during the pipeline." (:require [crossref.util.doi :as cr-doi] [crossref.util.string :as cr-str] [qbits.spandex :as s] [clj-http.client :as client] [clojure.data.json :as json] [clojure.tools.logging :as log] [robert.bruce :refer [try-try-again]] [config.core :refer [env]] [clojure.string :as string]) (:import [java.net URLEncoder] [org.elasticsearch.client ResponseException])) (def work-type-name :work) (def mapping {:doi {:type "keyword"} :ra {:type "keyword"} :content-type {:type "keyword"}}) (def index-id (delay (-> env :query-deployment (str "work_cache")))) (def connection (delay (s/client {:hosts [(:query-elastic-uri env)] :max-retry-timeout 60000 :request {:connect-timeout 60000 :socket-timeout 60000}}))) (defn doi->id [doi] (some-> doi cr-doi/normalise-doi cr-str/md5)) (defn get-work "Get the given URL's metadata from the cache." [doi] (let [id (doi->id doi)] (try (-> (s/request @connection {:url [@index-id work-type-name id] :method :get}) :body :_source) ; Expect to get 404 sometimes. (catch Exception ex nil)))) (defn ensure-index "Set up Index." [] (try (s/request @connection {:url [@index-id] :method :head}) (catch Exception ex (log/info "Need to create Work index") (try (s/request @connection {:url [@index-id] :method :put :body {:settings {"number_of_shards" 8 "number_of_replicas" 1} :mappings {work-type-name {:properties mapping}}}}) (catch Exception ex2 (log/error "Failed to create Work index!" ex2)))))) (defn set-refresh-interval! [] (s/request @connection {:url (str @index-id "/_settings") :method :put :body {:index {"refresh_interval" "60s"}}})) (defn boot! [] (ensure-index) (set-refresh-interval!)) (defn insert-work "Insert a work's metadata, replacing already exists." [doi data] (let [id (doi->id doi)] (try-try-again {:sleep 30000 :tries 5} #(s/request @connection {:url [@index-id work-type-name id] :method :put :body data})))) (defn get-ra-api "Get the Registration Agency from the DOI RA API. Return :crossref :datacite or nil." [non-url-normalized-doi] (when non-url-normalized-doi (try (-> non-url-normalized-doi (URLEncoder/encode "UTF-8") (#(str "https://doi.org/doiRA/" %)) client/get :body (json/read-str :key-fn keyword) first :RA (or "") string/lower-case {"datacite" :datacite "crossref" :crossref}) ; Not found, or invalid data, return nil. (catch Exception ex (do (log/error ex) nil))))) (defn get-work-api "Get the work metadata from the Crossref or DataCite API." [non-url-normalized-doi] ; We might get nils. (when non-url-normalized-doi (try (let [ra (get-ra-api non-url-normalized-doi) safe-doi (URLEncoder/encode non-url-normalized-doi "UTF-8") url (condp = ra :crossref (str "https://api.crossref.org/v1/works/" safe-doi "?mailto=<EMAIL>") :datacite (str "https://api.datacite.org/works/" safe-doi "?include=resource-type") nil) response (try-try-again {:sleep 10000 :tries 2} ; Only retry on genuine exceptions. 404 etc won't be fixed by retrying. #(when url (client/get url {:throw-exceptions false}))) body (when (= 200 (:status response)) (-> response :body (json/read-str :key-fn keyword))) work-type (condp = ra :crossref (-> body :message :type) :datacite (-> body :data :attributes :resource-type-id) nil)] ; If we couldn't discover the RA, then this isn't a real DOI. ; Return nil so this doens't get cached (could produce a false-negative in future). (when (and ra work-type) {:content-type work-type :ra ra :doi non-url-normalized-doi})) (catch Exception ex (do (log/error "Failed to retrieve metadata for DOI" non-url-normalized-doi "error:" (str ex)) nil))))) (defn get-for-dois "For a sequence of DOIs, perform cached lookups and return in a hash-map. When an input isn't a valid DOI, the response is nil." [dois] ; Map of inputs to normalized DOI (or nil if it isn't valid). (let [inputs-normalized (map (fn [input] [input (when (cr-doi/well-formed input) (cr-doi/non-url-doi input))]) dois) ; Look up each entry into triple. from-cache (map (fn [[input-doi normalized-doi]] [input-doi normalized-doi (when normalized-doi (get-work normalized-doi))]) inputs-normalized) missing-entries (filter (fn [[input-doi normalized-doi result]] (nil? result)) from-cache) ; Look up missing ones into triples. from-api (map (fn [[input-doi normalized-doi _]] [input-doi normalized-doi (when normalized-doi (get-work-api normalized-doi))]) missing-entries)] (doseq [[_ normalized-doi result] from-api] ; Don't save in cache if it wasn't a DOI (or was nil). ; Don't save if we couldn't retrieve any data from the API (DOI-like doesn't exist). (when (and normalized-doi result) (insert-work normalized-doi result))) (merge (into {} (map (fn [[input-doi _ result]] [input-doi result]) from-cache)) (into {} (map (fn [[input-doi _ result]] [input-doi result]) from-api)))))
true
(ns event-data-query.work-cache "Work metadata cache, retrieved using Content Negotiation and stored in ElasticSearch. The cache is case-sensitive, which means that if the same DOI is represented with different case, two entries will be made. This is a trade-off between the likelihood of this happening (low) with the desire to avoid changing the representation of a DOI during the pipeline." (:require [crossref.util.doi :as cr-doi] [crossref.util.string :as cr-str] [qbits.spandex :as s] [clj-http.client :as client] [clojure.data.json :as json] [clojure.tools.logging :as log] [robert.bruce :refer [try-try-again]] [config.core :refer [env]] [clojure.string :as string]) (:import [java.net URLEncoder] [org.elasticsearch.client ResponseException])) (def work-type-name :work) (def mapping {:doi {:type "keyword"} :ra {:type "keyword"} :content-type {:type "keyword"}}) (def index-id (delay (-> env :query-deployment (str "work_cache")))) (def connection (delay (s/client {:hosts [(:query-elastic-uri env)] :max-retry-timeout 60000 :request {:connect-timeout 60000 :socket-timeout 60000}}))) (defn doi->id [doi] (some-> doi cr-doi/normalise-doi cr-str/md5)) (defn get-work "Get the given URL's metadata from the cache." [doi] (let [id (doi->id doi)] (try (-> (s/request @connection {:url [@index-id work-type-name id] :method :get}) :body :_source) ; Expect to get 404 sometimes. (catch Exception ex nil)))) (defn ensure-index "Set up Index." [] (try (s/request @connection {:url [@index-id] :method :head}) (catch Exception ex (log/info "Need to create Work index") (try (s/request @connection {:url [@index-id] :method :put :body {:settings {"number_of_shards" 8 "number_of_replicas" 1} :mappings {work-type-name {:properties mapping}}}}) (catch Exception ex2 (log/error "Failed to create Work index!" ex2)))))) (defn set-refresh-interval! [] (s/request @connection {:url (str @index-id "/_settings") :method :put :body {:index {"refresh_interval" "60s"}}})) (defn boot! [] (ensure-index) (set-refresh-interval!)) (defn insert-work "Insert a work's metadata, replacing already exists." [doi data] (let [id (doi->id doi)] (try-try-again {:sleep 30000 :tries 5} #(s/request @connection {:url [@index-id work-type-name id] :method :put :body data})))) (defn get-ra-api "Get the Registration Agency from the DOI RA API. Return :crossref :datacite or nil." [non-url-normalized-doi] (when non-url-normalized-doi (try (-> non-url-normalized-doi (URLEncoder/encode "UTF-8") (#(str "https://doi.org/doiRA/" %)) client/get :body (json/read-str :key-fn keyword) first :RA (or "") string/lower-case {"datacite" :datacite "crossref" :crossref}) ; Not found, or invalid data, return nil. (catch Exception ex (do (log/error ex) nil))))) (defn get-work-api "Get the work metadata from the Crossref or DataCite API." [non-url-normalized-doi] ; We might get nils. (when non-url-normalized-doi (try (let [ra (get-ra-api non-url-normalized-doi) safe-doi (URLEncoder/encode non-url-normalized-doi "UTF-8") url (condp = ra :crossref (str "https://api.crossref.org/v1/works/" safe-doi "?mailto=PI:EMAIL:<EMAIL>END_PI") :datacite (str "https://api.datacite.org/works/" safe-doi "?include=resource-type") nil) response (try-try-again {:sleep 10000 :tries 2} ; Only retry on genuine exceptions. 404 etc won't be fixed by retrying. #(when url (client/get url {:throw-exceptions false}))) body (when (= 200 (:status response)) (-> response :body (json/read-str :key-fn keyword))) work-type (condp = ra :crossref (-> body :message :type) :datacite (-> body :data :attributes :resource-type-id) nil)] ; If we couldn't discover the RA, then this isn't a real DOI. ; Return nil so this doens't get cached (could produce a false-negative in future). (when (and ra work-type) {:content-type work-type :ra ra :doi non-url-normalized-doi})) (catch Exception ex (do (log/error "Failed to retrieve metadata for DOI" non-url-normalized-doi "error:" (str ex)) nil))))) (defn get-for-dois "For a sequence of DOIs, perform cached lookups and return in a hash-map. When an input isn't a valid DOI, the response is nil." [dois] ; Map of inputs to normalized DOI (or nil if it isn't valid). (let [inputs-normalized (map (fn [input] [input (when (cr-doi/well-formed input) (cr-doi/non-url-doi input))]) dois) ; Look up each entry into triple. from-cache (map (fn [[input-doi normalized-doi]] [input-doi normalized-doi (when normalized-doi (get-work normalized-doi))]) inputs-normalized) missing-entries (filter (fn [[input-doi normalized-doi result]] (nil? result)) from-cache) ; Look up missing ones into triples. from-api (map (fn [[input-doi normalized-doi _]] [input-doi normalized-doi (when normalized-doi (get-work-api normalized-doi))]) missing-entries)] (doseq [[_ normalized-doi result] from-api] ; Don't save in cache if it wasn't a DOI (or was nil). ; Don't save if we couldn't retrieve any data from the API (DOI-like doesn't exist). (when (and normalized-doi result) (insert-work normalized-doi result))) (merge (into {} (map (fn [[input-doi _ result]] [input-doi result]) from-cache)) (into {} (map (fn [[input-doi _ result]] [input-doi result]) from-api)))))
[ { "context": "[{:id 0\n :name \"Thor Amorim\"\n :postedAt \"01", "end": 240, "score": 0.9998461604118347, "start": 229, "tag": "NAME", "value": "Thor Amorim" }, { "context": "1/2016\"\n :post \"Olar Mundor\"\n :likes 100}\n ", "end": 353, "score": 0.9987323880195618, "start": 342, "tag": "NAME", "value": "Olar Mundor" }, { "context": " {:id 1\n :name \"Thor Amorim\"\n :postedAt \"02", "end": 496, "score": 0.9998449087142944, "start": 485, "tag": "NAME", "value": "Thor Amorim" }, { "context": "1/2016\"\n :post \"Olar\"\n :likes 50}\n ", "end": 602, "score": 0.997471034526825, "start": 598, "tag": "NAME", "value": "Olar" }, { "context": " {:id 2\n :name \"Thor Amorim\"\n :postedAt \"03", "end": 744, "score": 0.9998510479927063, "start": 733, "tag": "NAME", "value": "Thor Amorim" }, { "context": "1/2016\"\n :post \"Mundor\"\n :likes 25}]})", "end": 852, "score": 0.9529182314872742, "start": 846, "tag": "NAME", "value": "Mundor" }, { "context": "e))\n :name \"Thor Amorim\"\n :postedAt", "end": 1158, "score": 0.9998289942741394, "start": 1147, "tag": "NAME", "value": "Thor Amorim" } ]
src/reagent_test_app/core.cljs
tamorim/reagent-test-app
0
(ns reagent-test-app.core (:require [reagent.core :as reagent :refer [atom]])) (enable-console-print!) (defonce app-state (atom {:new-post "" :posts [{:id 0 :name "Thor Amorim" :postedAt "01/01/2016" :post "Olar Mundor" :likes 100} {:id 1 :name "Thor Amorim" :postedAt "02/01/2016" :post "Olar" :likes 50} {:id 2 :name "Thor Amorim" :postedAt "03/01/2016" :post "Mundor" :likes 25}]})) (defn handle-click [id] (swap! app-state update-in [:posts id :likes] inc)) (defn handle-submit [e] (.preventDefault e) (swap! app-state update :posts conj {:id (count (:posts @app-state)) :name "Thor Amorim" :postedAt "04/01/2016" :post (:new-post @app-state) :likes 0}) (swap! app-state assoc :new-post "")) (defn handle-change [e] (swap! app-state assoc :new-post (.-target.value e))) (defn test-header [] [:div.row [:div.col-lg-12.text-center {:style {:background "black" :color "white"}} [:h2 {:style {:margin "10px 0"}} "Header"]]]) (defn test-input [new-post] [:div.row {:style {:margin-top "24px"}} [:div.col-lg-8.col-lg-offset-2 [:form {:on-submit handle-submit} [:input.form-control {:type "text" :value new-post :auto-focus true :on-change handle-change}]]]]) (defn test-post [post] [:div [:div.row [:div.col-lg-8.col-lg-offset-2 [:hr]]] [:div.row [:div.col-lg-4.col-lg-offset-2 [:h2 (:name post)]] [:div.col-lg-4 [:span.pull-right {:style {:color "grey" :margin-top "20px" :font-size "16px"}} (:postedAt post)]]] [:div.row [:div.col-lg-8.col-lg-offset-2 [:br] [:p {:style {:font-size "18px"}} (:post post)]]] [:div.row [:div.col-lg-8.col-lg-offset-2 [:a.pull-right {:on-click #(handle-click (:id post)) :style {:font-size "16px" :cursor "pointer"}} (:likes post)]]]]) (defn test-post-list [posts] [:div {:style {:margin-top "12px"}} (for [post posts] [:div {:key (:id post)} [test-post post]])]) (defn test-app [] [:div.container-fluid [test-header] [test-input (:new-post @app-state)] [test-post-list (reverse (:posts @app-state))]]) (reagent/render-component [test-app] (. js/document (getElementById "app"))) (defn on-js-reload [] ;; optionally touch your app-state to force rerendering depending on ;; your application ;; (swap! app-state update-in [:__figwheel_counter] inc) )
40043
(ns reagent-test-app.core (:require [reagent.core :as reagent :refer [atom]])) (enable-console-print!) (defonce app-state (atom {:new-post "" :posts [{:id 0 :name "<NAME>" :postedAt "01/01/2016" :post "<NAME>" :likes 100} {:id 1 :name "<NAME>" :postedAt "02/01/2016" :post "<NAME>" :likes 50} {:id 2 :name "<NAME>" :postedAt "03/01/2016" :post "<NAME>" :likes 25}]})) (defn handle-click [id] (swap! app-state update-in [:posts id :likes] inc)) (defn handle-submit [e] (.preventDefault e) (swap! app-state update :posts conj {:id (count (:posts @app-state)) :name "<NAME>" :postedAt "04/01/2016" :post (:new-post @app-state) :likes 0}) (swap! app-state assoc :new-post "")) (defn handle-change [e] (swap! app-state assoc :new-post (.-target.value e))) (defn test-header [] [:div.row [:div.col-lg-12.text-center {:style {:background "black" :color "white"}} [:h2 {:style {:margin "10px 0"}} "Header"]]]) (defn test-input [new-post] [:div.row {:style {:margin-top "24px"}} [:div.col-lg-8.col-lg-offset-2 [:form {:on-submit handle-submit} [:input.form-control {:type "text" :value new-post :auto-focus true :on-change handle-change}]]]]) (defn test-post [post] [:div [:div.row [:div.col-lg-8.col-lg-offset-2 [:hr]]] [:div.row [:div.col-lg-4.col-lg-offset-2 [:h2 (:name post)]] [:div.col-lg-4 [:span.pull-right {:style {:color "grey" :margin-top "20px" :font-size "16px"}} (:postedAt post)]]] [:div.row [:div.col-lg-8.col-lg-offset-2 [:br] [:p {:style {:font-size "18px"}} (:post post)]]] [:div.row [:div.col-lg-8.col-lg-offset-2 [:a.pull-right {:on-click #(handle-click (:id post)) :style {:font-size "16px" :cursor "pointer"}} (:likes post)]]]]) (defn test-post-list [posts] [:div {:style {:margin-top "12px"}} (for [post posts] [:div {:key (:id post)} [test-post post]])]) (defn test-app [] [:div.container-fluid [test-header] [test-input (:new-post @app-state)] [test-post-list (reverse (:posts @app-state))]]) (reagent/render-component [test-app] (. js/document (getElementById "app"))) (defn on-js-reload [] ;; optionally touch your app-state to force rerendering depending on ;; your application ;; (swap! app-state update-in [:__figwheel_counter] inc) )
true
(ns reagent-test-app.core (:require [reagent.core :as reagent :refer [atom]])) (enable-console-print!) (defonce app-state (atom {:new-post "" :posts [{:id 0 :name "PI:NAME:<NAME>END_PI" :postedAt "01/01/2016" :post "PI:NAME:<NAME>END_PI" :likes 100} {:id 1 :name "PI:NAME:<NAME>END_PI" :postedAt "02/01/2016" :post "PI:NAME:<NAME>END_PI" :likes 50} {:id 2 :name "PI:NAME:<NAME>END_PI" :postedAt "03/01/2016" :post "PI:NAME:<NAME>END_PI" :likes 25}]})) (defn handle-click [id] (swap! app-state update-in [:posts id :likes] inc)) (defn handle-submit [e] (.preventDefault e) (swap! app-state update :posts conj {:id (count (:posts @app-state)) :name "PI:NAME:<NAME>END_PI" :postedAt "04/01/2016" :post (:new-post @app-state) :likes 0}) (swap! app-state assoc :new-post "")) (defn handle-change [e] (swap! app-state assoc :new-post (.-target.value e))) (defn test-header [] [:div.row [:div.col-lg-12.text-center {:style {:background "black" :color "white"}} [:h2 {:style {:margin "10px 0"}} "Header"]]]) (defn test-input [new-post] [:div.row {:style {:margin-top "24px"}} [:div.col-lg-8.col-lg-offset-2 [:form {:on-submit handle-submit} [:input.form-control {:type "text" :value new-post :auto-focus true :on-change handle-change}]]]]) (defn test-post [post] [:div [:div.row [:div.col-lg-8.col-lg-offset-2 [:hr]]] [:div.row [:div.col-lg-4.col-lg-offset-2 [:h2 (:name post)]] [:div.col-lg-4 [:span.pull-right {:style {:color "grey" :margin-top "20px" :font-size "16px"}} (:postedAt post)]]] [:div.row [:div.col-lg-8.col-lg-offset-2 [:br] [:p {:style {:font-size "18px"}} (:post post)]]] [:div.row [:div.col-lg-8.col-lg-offset-2 [:a.pull-right {:on-click #(handle-click (:id post)) :style {:font-size "16px" :cursor "pointer"}} (:likes post)]]]]) (defn test-post-list [posts] [:div {:style {:margin-top "12px"}} (for [post posts] [:div {:key (:id post)} [test-post post]])]) (defn test-app [] [:div.container-fluid [test-header] [test-input (:new-post @app-state)] [test-post-list (reverse (:posts @app-state))]]) (reagent/render-component [test-app] (. js/document (getElementById "app"))) (defn on-js-reload [] ;; optionally touch your app-state to force rerendering depending on ;; your application ;; (swap! app-state update-in [:__figwheel_counter] inc) )
[ { "context": ";-\n; Copyright 2009 (c) Meikel Brandmeyer.\n; All rights reserved.\n;\n; Permission is hereby ", "end": 41, "score": 0.9998773336410522, "start": 24, "tag": "NAME", "value": "Meikel Brandmeyer" } ]
src/main/clojure/vimclojure/util.clj
Mario-Kart-Felix/vimclojure
1
;- ; Copyright 2009 (c) Meikel Brandmeyer. ; All rights reserved. ; ; Permission is hereby granted, free of charge, to any person obtaining a copy ; of this software and associated documentation files (the "Software"), to deal ; in the Software without restriction, including without limitation the rights ; to use, copy, modify, merge, publish, distribute, sublicense, and/or sell ; copies of the Software, and to permit persons to whom the Software is ; furnished to do so, subject to the following conditions: ; ; The above copyright notice and this permission notice shall be included in ; all copies or substantial portions of the Software. ; ; THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR ; IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, ; FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE ; AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER ; LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, ; OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN ; THE SOFTWARE. (ns vimclojure.util) ; Common helpers (defn str-cut "Cut n characters of the end of the string s." [string n] (.substring string 0 (- (.length string) n))) (defn str-wrap "Wrap the given string into the given separators." ([string sep] (str-wrap string sep sep)) ([string before after] (str before string after))) (defn str-cat "Concatenate the given collection to a string, separating the collection's items with the given separator." [coll sep] (apply str (interpose sep coll))) (defn splitted-match "Splits pattern and candidate at the given delimiters and matches the parts of the pattern with the parts of the candidate. Match means „startsWith“ here." [pattern candidate delimiters] (if-let [delimiters (seq delimiters)] (let [delim (first delimiters) pattern-split (.split pattern delim) candidate-split (.split candidate delim)] (and (<= (count pattern-split) (count candidate-split)) (reduce #(and %1 %2) (map #(splitted-match %1 %2 (rest delimiters)) pattern-split candidate-split)))) (.startsWith candidate pattern))) ; Command-line handling (defn print-usage "Print usage information for the given option spec." [description specs] (println description) (newline) (println "Options:") (doseq [spec (filter vector? specs)] (let [loption (name (first spec)) spec (rest spec) soption (when (symbol? (first spec)) (name (first spec))) spec (if soption (rest spec) spec) descr (first spec) default (first (rest spec))] (print (format " --%-10s " loption)) (when soption (print (format "-%-3s " soption))) (print descr) (when default (newline) (print (format " The default is '%s'." default)))) (newline)) (flush)) (defn with-command-line* "Parse the command line arguments according to the given specifications. A specification consists of a vector of an option name, an optional short option name, a description and an optional default value. An option name ending in ? designates a boolean flag. The last entry in the list of specifications might be a symbol which is bound to the rest of the command line arguments when -- or the first non-option argument is encountered. -h, --help or -? stop the parsing and trigger the printing of the usage message and thunk is not called. thunk is called with a map of option-value pairs found on the command line." [args description specs thunk] (let [[options soptions] (reduce (fn [[opts sopts] spec] (let [lopt (name (first spec)) sopt (second spec) sopt (if (symbol? sopt) (name sopt) nil) [lopt sopt type] (if (.endsWith lopt "?") [(str-cut lopt 1) sopt :flag] [lopt sopt :option])] (vector (assoc opts lopt type) (assoc sopts sopt lopt)))) [{} {}] (filter vector? specs)) rest-arg (when (symbol? (last specs)) (name (last specs)))] (loop [args (seq args) argmap (hash-map)] (let [arg (first args)] (cond (empty? args) (if-not rest-arg (thunk argmap) (throw (Exception. "Missing command line arguments"))) (some #{arg} ["-h" "--help" "-?"]) (print-usage description specs) (= arg "--") (if rest-arg (thunk (assoc argmap rest-arg (rest args))) (throw (Exception. "Unexpected command line arguments"))) (.startsWith arg "--") (let [option (.substring arg 2)] (condp = (options option) :flag (recur (rest args) (assoc argmap option true)) :option (if-let [value (second args)] (recur (nthnext args 2) (assoc argmap option value)) (throw (Exception. (str "Missing value for option: " arg)))) nil (throw (Exception. (str "Unknown option: " option))))) (.startsWith arg "-") (let [option (.substring arg 1)] (if-let [loption (soptions option)] (recur (cons (str "--" loption) (rest args)) argmap) (throw (Exception. (str "Unknown option: " option))))) :else (if rest-arg (thunk (assoc argmap rest-arg args)) (throw (Exception. "Unexpected command line arguments")))))))) (defmacro with-command-line "Parses the command line arguments given according to the specifications. A specification consists of a vector of an option name, an optional short option name, a description and an optional default value. An option name ending in ? designates a boolean flag. The last entry in the list of specifications might be a symbol which is bound to the rest of the command line arguments when -- or the first non-option argument is encountered. -h, --help or -? stop the parsing and trigger the printing of the usage message and body is not executed. The body is executed with the long option names bound to the value found on the command line or the default value if the option was not given. Flags default to nil, ie. logical false." [args description specs & body] (let [defaults (map (fn [spec] (cond (not (vector? spec)) [spec nil] (-> spec first name (.endsWith "?")) (vector (-> spec first name (str-cut 1) symbol) false) (-> spec second symbol?) (vector (first spec) (when (= (count spec) 4) (nth spec 3))) :else (vector (first spec) (when (= (count spec) 3) (nth spec 2))))) specs)] `(with-command-line* ~args ~description (quote ~specs) (fn [{:strs ~(vec (map first defaults)) :or ~(into {} defaults)}] ~@body)))) ; Vim Interface: (defmulti #^{:arglists '([thing]) :doc "Convert the Clojure thing into a Vim thing."} clj->vim class) (defmethod clj->vim :default [thing] (str-wrap thing \")) (derive clojure.lang.ISeq ::ToVimList) (derive clojure.lang.IPersistentSet ::ToVimList) (derive clojure.lang.IPersistentVector ::ToVimList) (defmethod clj->vim ::ToVimList [thing] (str-wrap (str-cat (map clj->vim thing) ", ") \[ \])) (derive clojure.lang.IPersistentMap ::ToVimDict) (defmethod clj->vim ::ToVimDict [thing] (str-wrap (str-cat (map (fn [[kei value]] (str (clj->vim kei) " : " (clj->vim value))) thing) ", ") \{ \})) (defmethod clj->vim String [thing] (pr-str thing)) (defmethod clj->vim clojure.lang.Named [thing] (if-let [prefix (namespace thing)] (str-wrap (str prefix "/" (name thing)) \") (str-wrap (name thing) \"))) (defmethod clj->vim Number [thing] (str thing)) (defn safe-var-get [the-var] (when (.isBound the-var) (var-get the-var))) (defn decide-completion-in [nspace prefix base] (let [nom (name prefix)] (if (pos? (count nom)) (cond (or (contains? (set (map ns-name (all-ns))) prefix) (contains? (ns-aliases nspace) prefix)) [:local-var] (or (Character/isUpperCase (char (first nom))) (try (instance? Class (ns-resolve nspace prefix)) (catch ClassNotFoundException _ false))) [:static-field] :else (throw (Exception. "Cannot determine type of prefix"))) (cond (Character/isUpperCase (char (first base))) [:import] (< -1 (.indexOf base (int \.))) [:namespace] :else [:full-var :alias :namespace])))) (defn- type-of-completion [thing] (cond (instance? clojure.lang.Namespace thing) "n" (instance? java.lang.reflect.Field thing) "S" (instance? java.lang.reflect.Method thing) "M" (class? thing) "c" (coll? thing) (recur (first thing)) (:macro (meta thing)) "m" :else (let [value (safe-var-get thing)] (cond (instance? clojure.lang.MultiFn value) "f" (fn? value) "f" :else "v")))) (defmulti make-completion-item "Create a completion item for Vim's popup-menu." (fn [_ the-thing] (type-of-completion the-thing))) (defmethod make-completion-item "n" [the-name the-space] (let [docs (-> the-space meta :doc) info (str " " the-name \newline (when docs (str \newline docs)))] (hash-map "word" the-name "kind" "n" "menu" "" "info" info))) (defmethod make-completion-item "c" [the-name _] (hash-map "word" the-name "kind" "c" "menu" "" "info" "")) (defmethod make-completion-item "M" [the-name the-methods] (let [nam (name (read-string the-name)) rtypes (map #(-> % .getReturnType .getSimpleName) the-methods) arglists (map (fn [m] (let [types (.getParameterTypes m)] (vec (map #(.getSimpleName %) types)))) the-methods) info (apply str " " the-name \newline \newline (map #(str " " %1 " " nam (str-wrap (str-cat %2 ", ") \( \)) \; \newline) rtypes arglists))] (hash-map "word" the-name "kind" "M" "menu" (print-str arglists) "info" info))) (defmethod make-completion-item "S" [the-name [the-field]] (let [nam (name (read-string the-name)) menu (-> the-field .getType .getSimpleName) info (str " " the-name \newline \newline " " menu " " the-name \newline)] (hash-map "word" the-name "kind" "S" "menu" menu "info" info))) (defmethod make-completion-item "v" [the-name the-var] (let [info (str " " the-name \newline) info (if-let [docstring (-> the-var meta :doc)] (str info \newline " " docstring) info)] (hash-map "word" the-name "kind" "v" "menu" (pr-str (try (type @the-var) (catch IllegalStateException _ "<UNBOUND>"))) "info" info))) (defn- make-completion-item-fm [the-name the-fn typ] (let [info (str " " the-name \newline) metadata (meta the-fn) arglists (:arglists metadata) info (if arglists (reduce #(str %1 " " (prn-str (cons (symbol the-name) %2))) (str info \newline) arglists) info) info (if-let [docstring (:doc metadata)] (str info \newline " " docstring) info)] (hash-map "word" the-name "kind" typ "menu" (pr-str arglists) "info" info))) (defmethod make-completion-item "f" [the-name the-fn] (make-completion-item-fm the-name the-fn "f")) (defmethod make-completion-item "m" [the-name the-fn] (make-completion-item-fm the-name the-fn "m")) ; Namespace helpers (defn resolve-and-load-namespace "Loads and returns the namespace named by the given string or symbol." [namespace] (let [namespace (if (symbol? namespace) namespace (symbol namespace))] (try (the-ns namespace) (catch Exception _ (require namespace) (the-ns namespace))))) (defn stream->seq "Turns a given stream into a seq of Clojure forms read from the stream." [stream] (let [eof (Object.) rdr (fn [] (read stream false eof))] (take-while #(not= % eof) (repeatedly rdr)))) ; Pretty printing. (defn pretty-print "Print the given form in a pretty way. If Tom Faulhaber's pretty printer is not installed simply defaults prn." [form] (prn form)) (defn pretty-print-code "Print the given form in a pretty way. If Tom Faulhaber's pretty printer is not installed simply defaults prn. Uses the *code-dispatch* formatting." [form] (prn form)) ; Load optional libraries (defmacro defoptional [sym args & body] `(let [docstring# (:doc (meta (var ~sym)))] (defn ~sym ~args ~@body) (alter-meta! (var ~sym) assoc :doc docstring#))) (try (load "optional/prettyprint") (catch Exception exc (when-not (re-find #"Could not locate clojure/contrib/pprint__init.class or clojure/contrib/pprint.clj on classpath" (str exc)) (throw exc))))
124105
;- ; Copyright 2009 (c) <NAME>. ; All rights reserved. ; ; Permission is hereby granted, free of charge, to any person obtaining a copy ; of this software and associated documentation files (the "Software"), to deal ; in the Software without restriction, including without limitation the rights ; to use, copy, modify, merge, publish, distribute, sublicense, and/or sell ; copies of the Software, and to permit persons to whom the Software is ; furnished to do so, subject to the following conditions: ; ; The above copyright notice and this permission notice shall be included in ; all copies or substantial portions of the Software. ; ; THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR ; IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, ; FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE ; AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER ; LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, ; OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN ; THE SOFTWARE. (ns vimclojure.util) ; Common helpers (defn str-cut "Cut n characters of the end of the string s." [string n] (.substring string 0 (- (.length string) n))) (defn str-wrap "Wrap the given string into the given separators." ([string sep] (str-wrap string sep sep)) ([string before after] (str before string after))) (defn str-cat "Concatenate the given collection to a string, separating the collection's items with the given separator." [coll sep] (apply str (interpose sep coll))) (defn splitted-match "Splits pattern and candidate at the given delimiters and matches the parts of the pattern with the parts of the candidate. Match means „startsWith“ here." [pattern candidate delimiters] (if-let [delimiters (seq delimiters)] (let [delim (first delimiters) pattern-split (.split pattern delim) candidate-split (.split candidate delim)] (and (<= (count pattern-split) (count candidate-split)) (reduce #(and %1 %2) (map #(splitted-match %1 %2 (rest delimiters)) pattern-split candidate-split)))) (.startsWith candidate pattern))) ; Command-line handling (defn print-usage "Print usage information for the given option spec." [description specs] (println description) (newline) (println "Options:") (doseq [spec (filter vector? specs)] (let [loption (name (first spec)) spec (rest spec) soption (when (symbol? (first spec)) (name (first spec))) spec (if soption (rest spec) spec) descr (first spec) default (first (rest spec))] (print (format " --%-10s " loption)) (when soption (print (format "-%-3s " soption))) (print descr) (when default (newline) (print (format " The default is '%s'." default)))) (newline)) (flush)) (defn with-command-line* "Parse the command line arguments according to the given specifications. A specification consists of a vector of an option name, an optional short option name, a description and an optional default value. An option name ending in ? designates a boolean flag. The last entry in the list of specifications might be a symbol which is bound to the rest of the command line arguments when -- or the first non-option argument is encountered. -h, --help or -? stop the parsing and trigger the printing of the usage message and thunk is not called. thunk is called with a map of option-value pairs found on the command line." [args description specs thunk] (let [[options soptions] (reduce (fn [[opts sopts] spec] (let [lopt (name (first spec)) sopt (second spec) sopt (if (symbol? sopt) (name sopt) nil) [lopt sopt type] (if (.endsWith lopt "?") [(str-cut lopt 1) sopt :flag] [lopt sopt :option])] (vector (assoc opts lopt type) (assoc sopts sopt lopt)))) [{} {}] (filter vector? specs)) rest-arg (when (symbol? (last specs)) (name (last specs)))] (loop [args (seq args) argmap (hash-map)] (let [arg (first args)] (cond (empty? args) (if-not rest-arg (thunk argmap) (throw (Exception. "Missing command line arguments"))) (some #{arg} ["-h" "--help" "-?"]) (print-usage description specs) (= arg "--") (if rest-arg (thunk (assoc argmap rest-arg (rest args))) (throw (Exception. "Unexpected command line arguments"))) (.startsWith arg "--") (let [option (.substring arg 2)] (condp = (options option) :flag (recur (rest args) (assoc argmap option true)) :option (if-let [value (second args)] (recur (nthnext args 2) (assoc argmap option value)) (throw (Exception. (str "Missing value for option: " arg)))) nil (throw (Exception. (str "Unknown option: " option))))) (.startsWith arg "-") (let [option (.substring arg 1)] (if-let [loption (soptions option)] (recur (cons (str "--" loption) (rest args)) argmap) (throw (Exception. (str "Unknown option: " option))))) :else (if rest-arg (thunk (assoc argmap rest-arg args)) (throw (Exception. "Unexpected command line arguments")))))))) (defmacro with-command-line "Parses the command line arguments given according to the specifications. A specification consists of a vector of an option name, an optional short option name, a description and an optional default value. An option name ending in ? designates a boolean flag. The last entry in the list of specifications might be a symbol which is bound to the rest of the command line arguments when -- or the first non-option argument is encountered. -h, --help or -? stop the parsing and trigger the printing of the usage message and body is not executed. The body is executed with the long option names bound to the value found on the command line or the default value if the option was not given. Flags default to nil, ie. logical false." [args description specs & body] (let [defaults (map (fn [spec] (cond (not (vector? spec)) [spec nil] (-> spec first name (.endsWith "?")) (vector (-> spec first name (str-cut 1) symbol) false) (-> spec second symbol?) (vector (first spec) (when (= (count spec) 4) (nth spec 3))) :else (vector (first spec) (when (= (count spec) 3) (nth spec 2))))) specs)] `(with-command-line* ~args ~description (quote ~specs) (fn [{:strs ~(vec (map first defaults)) :or ~(into {} defaults)}] ~@body)))) ; Vim Interface: (defmulti #^{:arglists '([thing]) :doc "Convert the Clojure thing into a Vim thing."} clj->vim class) (defmethod clj->vim :default [thing] (str-wrap thing \")) (derive clojure.lang.ISeq ::ToVimList) (derive clojure.lang.IPersistentSet ::ToVimList) (derive clojure.lang.IPersistentVector ::ToVimList) (defmethod clj->vim ::ToVimList [thing] (str-wrap (str-cat (map clj->vim thing) ", ") \[ \])) (derive clojure.lang.IPersistentMap ::ToVimDict) (defmethod clj->vim ::ToVimDict [thing] (str-wrap (str-cat (map (fn [[kei value]] (str (clj->vim kei) " : " (clj->vim value))) thing) ", ") \{ \})) (defmethod clj->vim String [thing] (pr-str thing)) (defmethod clj->vim clojure.lang.Named [thing] (if-let [prefix (namespace thing)] (str-wrap (str prefix "/" (name thing)) \") (str-wrap (name thing) \"))) (defmethod clj->vim Number [thing] (str thing)) (defn safe-var-get [the-var] (when (.isBound the-var) (var-get the-var))) (defn decide-completion-in [nspace prefix base] (let [nom (name prefix)] (if (pos? (count nom)) (cond (or (contains? (set (map ns-name (all-ns))) prefix) (contains? (ns-aliases nspace) prefix)) [:local-var] (or (Character/isUpperCase (char (first nom))) (try (instance? Class (ns-resolve nspace prefix)) (catch ClassNotFoundException _ false))) [:static-field] :else (throw (Exception. "Cannot determine type of prefix"))) (cond (Character/isUpperCase (char (first base))) [:import] (< -1 (.indexOf base (int \.))) [:namespace] :else [:full-var :alias :namespace])))) (defn- type-of-completion [thing] (cond (instance? clojure.lang.Namespace thing) "n" (instance? java.lang.reflect.Field thing) "S" (instance? java.lang.reflect.Method thing) "M" (class? thing) "c" (coll? thing) (recur (first thing)) (:macro (meta thing)) "m" :else (let [value (safe-var-get thing)] (cond (instance? clojure.lang.MultiFn value) "f" (fn? value) "f" :else "v")))) (defmulti make-completion-item "Create a completion item for Vim's popup-menu." (fn [_ the-thing] (type-of-completion the-thing))) (defmethod make-completion-item "n" [the-name the-space] (let [docs (-> the-space meta :doc) info (str " " the-name \newline (when docs (str \newline docs)))] (hash-map "word" the-name "kind" "n" "menu" "" "info" info))) (defmethod make-completion-item "c" [the-name _] (hash-map "word" the-name "kind" "c" "menu" "" "info" "")) (defmethod make-completion-item "M" [the-name the-methods] (let [nam (name (read-string the-name)) rtypes (map #(-> % .getReturnType .getSimpleName) the-methods) arglists (map (fn [m] (let [types (.getParameterTypes m)] (vec (map #(.getSimpleName %) types)))) the-methods) info (apply str " " the-name \newline \newline (map #(str " " %1 " " nam (str-wrap (str-cat %2 ", ") \( \)) \; \newline) rtypes arglists))] (hash-map "word" the-name "kind" "M" "menu" (print-str arglists) "info" info))) (defmethod make-completion-item "S" [the-name [the-field]] (let [nam (name (read-string the-name)) menu (-> the-field .getType .getSimpleName) info (str " " the-name \newline \newline " " menu " " the-name \newline)] (hash-map "word" the-name "kind" "S" "menu" menu "info" info))) (defmethod make-completion-item "v" [the-name the-var] (let [info (str " " the-name \newline) info (if-let [docstring (-> the-var meta :doc)] (str info \newline " " docstring) info)] (hash-map "word" the-name "kind" "v" "menu" (pr-str (try (type @the-var) (catch IllegalStateException _ "<UNBOUND>"))) "info" info))) (defn- make-completion-item-fm [the-name the-fn typ] (let [info (str " " the-name \newline) metadata (meta the-fn) arglists (:arglists metadata) info (if arglists (reduce #(str %1 " " (prn-str (cons (symbol the-name) %2))) (str info \newline) arglists) info) info (if-let [docstring (:doc metadata)] (str info \newline " " docstring) info)] (hash-map "word" the-name "kind" typ "menu" (pr-str arglists) "info" info))) (defmethod make-completion-item "f" [the-name the-fn] (make-completion-item-fm the-name the-fn "f")) (defmethod make-completion-item "m" [the-name the-fn] (make-completion-item-fm the-name the-fn "m")) ; Namespace helpers (defn resolve-and-load-namespace "Loads and returns the namespace named by the given string or symbol." [namespace] (let [namespace (if (symbol? namespace) namespace (symbol namespace))] (try (the-ns namespace) (catch Exception _ (require namespace) (the-ns namespace))))) (defn stream->seq "Turns a given stream into a seq of Clojure forms read from the stream." [stream] (let [eof (Object.) rdr (fn [] (read stream false eof))] (take-while #(not= % eof) (repeatedly rdr)))) ; Pretty printing. (defn pretty-print "Print the given form in a pretty way. If Tom Faulhaber's pretty printer is not installed simply defaults prn." [form] (prn form)) (defn pretty-print-code "Print the given form in a pretty way. If Tom Faulhaber's pretty printer is not installed simply defaults prn. Uses the *code-dispatch* formatting." [form] (prn form)) ; Load optional libraries (defmacro defoptional [sym args & body] `(let [docstring# (:doc (meta (var ~sym)))] (defn ~sym ~args ~@body) (alter-meta! (var ~sym) assoc :doc docstring#))) (try (load "optional/prettyprint") (catch Exception exc (when-not (re-find #"Could not locate clojure/contrib/pprint__init.class or clojure/contrib/pprint.clj on classpath" (str exc)) (throw exc))))
true
;- ; Copyright 2009 (c) PI:NAME:<NAME>END_PI. ; All rights reserved. ; ; Permission is hereby granted, free of charge, to any person obtaining a copy ; of this software and associated documentation files (the "Software"), to deal ; in the Software without restriction, including without limitation the rights ; to use, copy, modify, merge, publish, distribute, sublicense, and/or sell ; copies of the Software, and to permit persons to whom the Software is ; furnished to do so, subject to the following conditions: ; ; The above copyright notice and this permission notice shall be included in ; all copies or substantial portions of the Software. ; ; THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR ; IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, ; FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE ; AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER ; LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, ; OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN ; THE SOFTWARE. (ns vimclojure.util) ; Common helpers (defn str-cut "Cut n characters of the end of the string s." [string n] (.substring string 0 (- (.length string) n))) (defn str-wrap "Wrap the given string into the given separators." ([string sep] (str-wrap string sep sep)) ([string before after] (str before string after))) (defn str-cat "Concatenate the given collection to a string, separating the collection's items with the given separator." [coll sep] (apply str (interpose sep coll))) (defn splitted-match "Splits pattern and candidate at the given delimiters and matches the parts of the pattern with the parts of the candidate. Match means „startsWith“ here." [pattern candidate delimiters] (if-let [delimiters (seq delimiters)] (let [delim (first delimiters) pattern-split (.split pattern delim) candidate-split (.split candidate delim)] (and (<= (count pattern-split) (count candidate-split)) (reduce #(and %1 %2) (map #(splitted-match %1 %2 (rest delimiters)) pattern-split candidate-split)))) (.startsWith candidate pattern))) ; Command-line handling (defn print-usage "Print usage information for the given option spec." [description specs] (println description) (newline) (println "Options:") (doseq [spec (filter vector? specs)] (let [loption (name (first spec)) spec (rest spec) soption (when (symbol? (first spec)) (name (first spec))) spec (if soption (rest spec) spec) descr (first spec) default (first (rest spec))] (print (format " --%-10s " loption)) (when soption (print (format "-%-3s " soption))) (print descr) (when default (newline) (print (format " The default is '%s'." default)))) (newline)) (flush)) (defn with-command-line* "Parse the command line arguments according to the given specifications. A specification consists of a vector of an option name, an optional short option name, a description and an optional default value. An option name ending in ? designates a boolean flag. The last entry in the list of specifications might be a symbol which is bound to the rest of the command line arguments when -- or the first non-option argument is encountered. -h, --help or -? stop the parsing and trigger the printing of the usage message and thunk is not called. thunk is called with a map of option-value pairs found on the command line." [args description specs thunk] (let [[options soptions] (reduce (fn [[opts sopts] spec] (let [lopt (name (first spec)) sopt (second spec) sopt (if (symbol? sopt) (name sopt) nil) [lopt sopt type] (if (.endsWith lopt "?") [(str-cut lopt 1) sopt :flag] [lopt sopt :option])] (vector (assoc opts lopt type) (assoc sopts sopt lopt)))) [{} {}] (filter vector? specs)) rest-arg (when (symbol? (last specs)) (name (last specs)))] (loop [args (seq args) argmap (hash-map)] (let [arg (first args)] (cond (empty? args) (if-not rest-arg (thunk argmap) (throw (Exception. "Missing command line arguments"))) (some #{arg} ["-h" "--help" "-?"]) (print-usage description specs) (= arg "--") (if rest-arg (thunk (assoc argmap rest-arg (rest args))) (throw (Exception. "Unexpected command line arguments"))) (.startsWith arg "--") (let [option (.substring arg 2)] (condp = (options option) :flag (recur (rest args) (assoc argmap option true)) :option (if-let [value (second args)] (recur (nthnext args 2) (assoc argmap option value)) (throw (Exception. (str "Missing value for option: " arg)))) nil (throw (Exception. (str "Unknown option: " option))))) (.startsWith arg "-") (let [option (.substring arg 1)] (if-let [loption (soptions option)] (recur (cons (str "--" loption) (rest args)) argmap) (throw (Exception. (str "Unknown option: " option))))) :else (if rest-arg (thunk (assoc argmap rest-arg args)) (throw (Exception. "Unexpected command line arguments")))))))) (defmacro with-command-line "Parses the command line arguments given according to the specifications. A specification consists of a vector of an option name, an optional short option name, a description and an optional default value. An option name ending in ? designates a boolean flag. The last entry in the list of specifications might be a symbol which is bound to the rest of the command line arguments when -- or the first non-option argument is encountered. -h, --help or -? stop the parsing and trigger the printing of the usage message and body is not executed. The body is executed with the long option names bound to the value found on the command line or the default value if the option was not given. Flags default to nil, ie. logical false." [args description specs & body] (let [defaults (map (fn [spec] (cond (not (vector? spec)) [spec nil] (-> spec first name (.endsWith "?")) (vector (-> spec first name (str-cut 1) symbol) false) (-> spec second symbol?) (vector (first spec) (when (= (count spec) 4) (nth spec 3))) :else (vector (first spec) (when (= (count spec) 3) (nth spec 2))))) specs)] `(with-command-line* ~args ~description (quote ~specs) (fn [{:strs ~(vec (map first defaults)) :or ~(into {} defaults)}] ~@body)))) ; Vim Interface: (defmulti #^{:arglists '([thing]) :doc "Convert the Clojure thing into a Vim thing."} clj->vim class) (defmethod clj->vim :default [thing] (str-wrap thing \")) (derive clojure.lang.ISeq ::ToVimList) (derive clojure.lang.IPersistentSet ::ToVimList) (derive clojure.lang.IPersistentVector ::ToVimList) (defmethod clj->vim ::ToVimList [thing] (str-wrap (str-cat (map clj->vim thing) ", ") \[ \])) (derive clojure.lang.IPersistentMap ::ToVimDict) (defmethod clj->vim ::ToVimDict [thing] (str-wrap (str-cat (map (fn [[kei value]] (str (clj->vim kei) " : " (clj->vim value))) thing) ", ") \{ \})) (defmethod clj->vim String [thing] (pr-str thing)) (defmethod clj->vim clojure.lang.Named [thing] (if-let [prefix (namespace thing)] (str-wrap (str prefix "/" (name thing)) \") (str-wrap (name thing) \"))) (defmethod clj->vim Number [thing] (str thing)) (defn safe-var-get [the-var] (when (.isBound the-var) (var-get the-var))) (defn decide-completion-in [nspace prefix base] (let [nom (name prefix)] (if (pos? (count nom)) (cond (or (contains? (set (map ns-name (all-ns))) prefix) (contains? (ns-aliases nspace) prefix)) [:local-var] (or (Character/isUpperCase (char (first nom))) (try (instance? Class (ns-resolve nspace prefix)) (catch ClassNotFoundException _ false))) [:static-field] :else (throw (Exception. "Cannot determine type of prefix"))) (cond (Character/isUpperCase (char (first base))) [:import] (< -1 (.indexOf base (int \.))) [:namespace] :else [:full-var :alias :namespace])))) (defn- type-of-completion [thing] (cond (instance? clojure.lang.Namespace thing) "n" (instance? java.lang.reflect.Field thing) "S" (instance? java.lang.reflect.Method thing) "M" (class? thing) "c" (coll? thing) (recur (first thing)) (:macro (meta thing)) "m" :else (let [value (safe-var-get thing)] (cond (instance? clojure.lang.MultiFn value) "f" (fn? value) "f" :else "v")))) (defmulti make-completion-item "Create a completion item for Vim's popup-menu." (fn [_ the-thing] (type-of-completion the-thing))) (defmethod make-completion-item "n" [the-name the-space] (let [docs (-> the-space meta :doc) info (str " " the-name \newline (when docs (str \newline docs)))] (hash-map "word" the-name "kind" "n" "menu" "" "info" info))) (defmethod make-completion-item "c" [the-name _] (hash-map "word" the-name "kind" "c" "menu" "" "info" "")) (defmethod make-completion-item "M" [the-name the-methods] (let [nam (name (read-string the-name)) rtypes (map #(-> % .getReturnType .getSimpleName) the-methods) arglists (map (fn [m] (let [types (.getParameterTypes m)] (vec (map #(.getSimpleName %) types)))) the-methods) info (apply str " " the-name \newline \newline (map #(str " " %1 " " nam (str-wrap (str-cat %2 ", ") \( \)) \; \newline) rtypes arglists))] (hash-map "word" the-name "kind" "M" "menu" (print-str arglists) "info" info))) (defmethod make-completion-item "S" [the-name [the-field]] (let [nam (name (read-string the-name)) menu (-> the-field .getType .getSimpleName) info (str " " the-name \newline \newline " " menu " " the-name \newline)] (hash-map "word" the-name "kind" "S" "menu" menu "info" info))) (defmethod make-completion-item "v" [the-name the-var] (let [info (str " " the-name \newline) info (if-let [docstring (-> the-var meta :doc)] (str info \newline " " docstring) info)] (hash-map "word" the-name "kind" "v" "menu" (pr-str (try (type @the-var) (catch IllegalStateException _ "<UNBOUND>"))) "info" info))) (defn- make-completion-item-fm [the-name the-fn typ] (let [info (str " " the-name \newline) metadata (meta the-fn) arglists (:arglists metadata) info (if arglists (reduce #(str %1 " " (prn-str (cons (symbol the-name) %2))) (str info \newline) arglists) info) info (if-let [docstring (:doc metadata)] (str info \newline " " docstring) info)] (hash-map "word" the-name "kind" typ "menu" (pr-str arglists) "info" info))) (defmethod make-completion-item "f" [the-name the-fn] (make-completion-item-fm the-name the-fn "f")) (defmethod make-completion-item "m" [the-name the-fn] (make-completion-item-fm the-name the-fn "m")) ; Namespace helpers (defn resolve-and-load-namespace "Loads and returns the namespace named by the given string or symbol." [namespace] (let [namespace (if (symbol? namespace) namespace (symbol namespace))] (try (the-ns namespace) (catch Exception _ (require namespace) (the-ns namespace))))) (defn stream->seq "Turns a given stream into a seq of Clojure forms read from the stream." [stream] (let [eof (Object.) rdr (fn [] (read stream false eof))] (take-while #(not= % eof) (repeatedly rdr)))) ; Pretty printing. (defn pretty-print "Print the given form in a pretty way. If Tom Faulhaber's pretty printer is not installed simply defaults prn." [form] (prn form)) (defn pretty-print-code "Print the given form in a pretty way. If Tom Faulhaber's pretty printer is not installed simply defaults prn. Uses the *code-dispatch* formatting." [form] (prn form)) ; Load optional libraries (defmacro defoptional [sym args & body] `(let [docstring# (:doc (meta (var ~sym)))] (defn ~sym ~args ~@body) (alter-meta! (var ~sym) assoc :doc docstring#))) (try (load "optional/prettyprint") (catch Exception exc (when-not (re-find #"Could not locate clojure/contrib/pprint__init.class or clojure/contrib/pprint.clj on classpath" (str exc)) (throw exc))))
[ { "context": "O cycle creds and keep in env\n {:access-key-id \"AKIAIJN3D74NBHJIAARQ\"\n :secret-access-key \"13xmDv33Eya2z0Rbk+UaSznf", "end": 1549, "score": 0.9996356964111328, "start": 1529, "tag": "KEY", "value": "AKIAIJN3D74NBHJIAARQ" }, { "context": "id \"AKIAIJN3D74NBHJIAARQ\"\n :secret-access-key \"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\"}))\n\n(def aws-creds-provider-s3-writer\n (aws-cre", "end": 1615, "score": 0.999182939529419, "start": 1575, "tag": "KEY", "value": "13xmDv33Eya2z0Rbk+UaSznfPQWB+bC0xOH5Boop" }, { "context": "O cycle creds and keep in env\n {:access-key-id \"AKIAJPZBKHS4C6DB2YOQ\"\n :secret-access-key \"SOkMz2LX+LMtwIDz4T0Ot783", "end": 1774, "score": 0.9996964335441589, "start": 1754, "tag": "KEY", "value": "AKIAJPZBKHS4C6DB2YOQ" }, { "context": "id \"AKIAJPZBKHS4C6DB2YOQ\"\n :secret-access-key \"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\"}))\n\n\n(def sns-client (aws/client {:api :sns\n ", "end": 1840, "score": 0.9997614026069641, "start": 1800, "tag": "KEY", "value": "SOkMz2LX+LMtwIDz4T0Ot7839USU55Zh36vS/n2c" }, { "context": "ta-set \"app-prod\"\n :write-key \"fadb42b152f679a1575055e9678ac49a\"}))\n (-> v\n (walk-dissoc [:pwd :ses", "end": 4071, "score": 0.9997360706329346, "start": 4039, "tag": "KEY", "value": "fadb42b152f679a1575055e9678ac49a" } ]
src/clj/com/vetd/app/common.clj
jaydeesimon/vetd-app
98
(ns com.vetd.app.common "This is a grab bag until we come up with a reason that it shouldn't be. Before adding a function here, consider whether it would be more appropriate in util. " (:require [com.vetd.app.util :as ut] [expound.alpha :as expound] [clojure.spec.alpha :as spec] [clojure.core.async :as a] [cognitect.aws.client.api :as aws] [cognitect.aws.credentials :as aws-creds] [taoensso.timbre :as log] [buddy.hashers :as bhsh] [clj-honeycomb.core :as hnyc] clojure.pprint)) (def ^:dynamic *session-id* nil) (def ^:dynamic *user-id* nil) ;; This tells all the various worker threads that it's time to die (defonce shutdown-signal (atom false)) (defonce supress-sns? (atom false)) (def ws-on-close-fns& (atom {})) (defn md5-hex [s] (-> s buddy.core.hash/md5 buddy.core.codecs/bytes->hex)) #_ (def handle-ws-inbound nil) (defmulti handle-ws-inbound (fn [{:keys [cmd]} ws-id subscription-fn] cmd)) (defmethod handle-ws-inbound nil [_ _ _] :NOT-IMPLEMENTED) (defn setup-env [prod?] (if prod? (do (log/merge-config! {:level :info :output-fn (partial log/default-output-fn {:stacktrace-fonts {}})})) (do (alter-var-root #'spec/*explain-out* (constantly expound/printer)) (reset! supress-sns? true)))) ;;;; AWS API (def aws-creds-provider (aws-creds/basic-credentials-provider ;; TODO cycle creds and keep in env {:access-key-id "AKIAIJN3D74NBHJIAARQ" :secret-access-key "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"})) (def aws-creds-provider-s3-writer (aws-creds/basic-credentials-provider ;; TODO cycle creds and keep in env {:access-key-id "AKIAJPZBKHS4C6DB2YOQ" :secret-access-key "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"})) (def sns-client (aws/client {:api :sns :region "us-east-1" :credentials-provider aws-creds-provider})) (def s3-client (aws/client {:api :s3 :region "us-east-1" :credentials-provider aws-creds-provider-s3-writer})) (def topic->arn {:ui-misc "arn:aws:sns:us-east-1:744151627940:ui-misc" :ui-req-new-prod-cat "arn:aws:sns:us-east-1:744151627940:ui-req-new-prod-cat" :ui-start-round "arn:aws:sns:us-east-1:744151627940:ui-start-round" :customer-success "arn:aws:sns:us-east-1:744151627940:customer-success"}) ;; config (def org-ids-suppress-sns "org ids to suppress SNS, as strings" #{"610687260958" ;; Vetd Test 1 ;; "2208512249632" ;; Vetd }) (defn org-id-suppress-sns? "Should suppress SNS message for this org id?" [org-id] (boolean (org-ids-suppress-sns (str org-id)))) ;; TODO consider using a future (to speed up some frontend reactions) (defn sns-publish "Publishes new notification to AWS SNS. topic - keyword abbrev. for topic's ARN subject - notification Subject message - notification message body" [topic subject message & [{:keys [org-id]}]] (let [arg {:op :Publish :request {:TopicArn (topic->arn topic) :Subject subject :Message message}}] (if-not (or @supress-sns? (org-id-suppress-sns? org-id)) (aws/invoke sns-client arg) ;; TODO make separate sns for dev (do (println "SNS PUBLISH SUPPRESSED") (clojure.pprint/pprint arg) {} ;; to ensure ws return gets triggered )))) (defn s3-put [bucket-name file-name data] (aws/invoke s3-client {:op :PutObject :request {:Bucket bucket-name :Key file-name :Body data}})) (defn walk-dissoc [frm ks] (clojure.walk/prewalk (fn [v] (if (map? v) (apply dissoc v ks) v)) frm)) (defn hc-send [v] ;; TODO don't use supress-sns (when-not @supress-sns? (try (when-not (hnyc/initialized?) (hnyc/init {:data-set "app-prod" :write-key "fadb42b152f679a1575055e9678ac49a"})) (-> v (walk-dissoc [:pwd :session-token]) hnyc/send) (catch Throwable e (log/error e))))) (defn log-error [e & [arg]] (hc-send {:type "error" :ex e :arg arg}) (if arg (log/error e arg) (log/error e))) (defn reg-ws-on-close-fn' [ws-on-close-fns ws-id k f] (assoc-in ws-on-close-fns [ws-id k] f)) (defn reg-ws-on-close-fn [ws-id k f] (swap! ws-on-close-fns& reg-ws-on-close-fn' ws-id k f)) (defn unreg-ws-on-close-fn' [ws-on-close-fns ws-id k] (or (try (update ws-on-close-fns ws-id dissoc k) (catch Exception e (log-error e))) ws-on-close-fns)) (defn unreg-ws-on-close-fn [ws-id k] (swap! ws-on-close-fns& unreg-ws-on-close-fn' ws-id k)) (defn force-all-ws-on-close-fns [] (try (let [ws-on-close-fns @ws-on-close-fns&] (log/info (format "Forcing close on %d ws-on-close-fns" (count @ws-on-close-fns&))) (some->> ws-on-close-fns vals (mapcat vals) (pmap #(%)) doall) (log/info "DONE Forcing close on ws-on-close-fns")) (catch Throwable e (log-error e))))
13441
(ns com.vetd.app.common "This is a grab bag until we come up with a reason that it shouldn't be. Before adding a function here, consider whether it would be more appropriate in util. " (:require [com.vetd.app.util :as ut] [expound.alpha :as expound] [clojure.spec.alpha :as spec] [clojure.core.async :as a] [cognitect.aws.client.api :as aws] [cognitect.aws.credentials :as aws-creds] [taoensso.timbre :as log] [buddy.hashers :as bhsh] [clj-honeycomb.core :as hnyc] clojure.pprint)) (def ^:dynamic *session-id* nil) (def ^:dynamic *user-id* nil) ;; This tells all the various worker threads that it's time to die (defonce shutdown-signal (atom false)) (defonce supress-sns? (atom false)) (def ws-on-close-fns& (atom {})) (defn md5-hex [s] (-> s buddy.core.hash/md5 buddy.core.codecs/bytes->hex)) #_ (def handle-ws-inbound nil) (defmulti handle-ws-inbound (fn [{:keys [cmd]} ws-id subscription-fn] cmd)) (defmethod handle-ws-inbound nil [_ _ _] :NOT-IMPLEMENTED) (defn setup-env [prod?] (if prod? (do (log/merge-config! {:level :info :output-fn (partial log/default-output-fn {:stacktrace-fonts {}})})) (do (alter-var-root #'spec/*explain-out* (constantly expound/printer)) (reset! supress-sns? true)))) ;;;; AWS API (def aws-creds-provider (aws-creds/basic-credentials-provider ;; TODO cycle creds and keep in env {:access-key-id "<KEY>" :secret-access-key "<KEY>"})) (def aws-creds-provider-s3-writer (aws-creds/basic-credentials-provider ;; TODO cycle creds and keep in env {:access-key-id "<KEY>" :secret-access-key "<KEY>"})) (def sns-client (aws/client {:api :sns :region "us-east-1" :credentials-provider aws-creds-provider})) (def s3-client (aws/client {:api :s3 :region "us-east-1" :credentials-provider aws-creds-provider-s3-writer})) (def topic->arn {:ui-misc "arn:aws:sns:us-east-1:744151627940:ui-misc" :ui-req-new-prod-cat "arn:aws:sns:us-east-1:744151627940:ui-req-new-prod-cat" :ui-start-round "arn:aws:sns:us-east-1:744151627940:ui-start-round" :customer-success "arn:aws:sns:us-east-1:744151627940:customer-success"}) ;; config (def org-ids-suppress-sns "org ids to suppress SNS, as strings" #{"610687260958" ;; Vetd Test 1 ;; "2208512249632" ;; Vetd }) (defn org-id-suppress-sns? "Should suppress SNS message for this org id?" [org-id] (boolean (org-ids-suppress-sns (str org-id)))) ;; TODO consider using a future (to speed up some frontend reactions) (defn sns-publish "Publishes new notification to AWS SNS. topic - keyword abbrev. for topic's ARN subject - notification Subject message - notification message body" [topic subject message & [{:keys [org-id]}]] (let [arg {:op :Publish :request {:TopicArn (topic->arn topic) :Subject subject :Message message}}] (if-not (or @supress-sns? (org-id-suppress-sns? org-id)) (aws/invoke sns-client arg) ;; TODO make separate sns for dev (do (println "SNS PUBLISH SUPPRESSED") (clojure.pprint/pprint arg) {} ;; to ensure ws return gets triggered )))) (defn s3-put [bucket-name file-name data] (aws/invoke s3-client {:op :PutObject :request {:Bucket bucket-name :Key file-name :Body data}})) (defn walk-dissoc [frm ks] (clojure.walk/prewalk (fn [v] (if (map? v) (apply dissoc v ks) v)) frm)) (defn hc-send [v] ;; TODO don't use supress-sns (when-not @supress-sns? (try (when-not (hnyc/initialized?) (hnyc/init {:data-set "app-prod" :write-key "<KEY>"})) (-> v (walk-dissoc [:pwd :session-token]) hnyc/send) (catch Throwable e (log/error e))))) (defn log-error [e & [arg]] (hc-send {:type "error" :ex e :arg arg}) (if arg (log/error e arg) (log/error e))) (defn reg-ws-on-close-fn' [ws-on-close-fns ws-id k f] (assoc-in ws-on-close-fns [ws-id k] f)) (defn reg-ws-on-close-fn [ws-id k f] (swap! ws-on-close-fns& reg-ws-on-close-fn' ws-id k f)) (defn unreg-ws-on-close-fn' [ws-on-close-fns ws-id k] (or (try (update ws-on-close-fns ws-id dissoc k) (catch Exception e (log-error e))) ws-on-close-fns)) (defn unreg-ws-on-close-fn [ws-id k] (swap! ws-on-close-fns& unreg-ws-on-close-fn' ws-id k)) (defn force-all-ws-on-close-fns [] (try (let [ws-on-close-fns @ws-on-close-fns&] (log/info (format "Forcing close on %d ws-on-close-fns" (count @ws-on-close-fns&))) (some->> ws-on-close-fns vals (mapcat vals) (pmap #(%)) doall) (log/info "DONE Forcing close on ws-on-close-fns")) (catch Throwable e (log-error e))))
true
(ns com.vetd.app.common "This is a grab bag until we come up with a reason that it shouldn't be. Before adding a function here, consider whether it would be more appropriate in util. " (:require [com.vetd.app.util :as ut] [expound.alpha :as expound] [clojure.spec.alpha :as spec] [clojure.core.async :as a] [cognitect.aws.client.api :as aws] [cognitect.aws.credentials :as aws-creds] [taoensso.timbre :as log] [buddy.hashers :as bhsh] [clj-honeycomb.core :as hnyc] clojure.pprint)) (def ^:dynamic *session-id* nil) (def ^:dynamic *user-id* nil) ;; This tells all the various worker threads that it's time to die (defonce shutdown-signal (atom false)) (defonce supress-sns? (atom false)) (def ws-on-close-fns& (atom {})) (defn md5-hex [s] (-> s buddy.core.hash/md5 buddy.core.codecs/bytes->hex)) #_ (def handle-ws-inbound nil) (defmulti handle-ws-inbound (fn [{:keys [cmd]} ws-id subscription-fn] cmd)) (defmethod handle-ws-inbound nil [_ _ _] :NOT-IMPLEMENTED) (defn setup-env [prod?] (if prod? (do (log/merge-config! {:level :info :output-fn (partial log/default-output-fn {:stacktrace-fonts {}})})) (do (alter-var-root #'spec/*explain-out* (constantly expound/printer)) (reset! supress-sns? true)))) ;;;; AWS API (def aws-creds-provider (aws-creds/basic-credentials-provider ;; TODO cycle creds and keep in env {:access-key-id "PI:KEY:<KEY>END_PI" :secret-access-key "PI:KEY:<KEY>END_PI"})) (def aws-creds-provider-s3-writer (aws-creds/basic-credentials-provider ;; TODO cycle creds and keep in env {:access-key-id "PI:KEY:<KEY>END_PI" :secret-access-key "PI:KEY:<KEY>END_PI"})) (def sns-client (aws/client {:api :sns :region "us-east-1" :credentials-provider aws-creds-provider})) (def s3-client (aws/client {:api :s3 :region "us-east-1" :credentials-provider aws-creds-provider-s3-writer})) (def topic->arn {:ui-misc "arn:aws:sns:us-east-1:744151627940:ui-misc" :ui-req-new-prod-cat "arn:aws:sns:us-east-1:744151627940:ui-req-new-prod-cat" :ui-start-round "arn:aws:sns:us-east-1:744151627940:ui-start-round" :customer-success "arn:aws:sns:us-east-1:744151627940:customer-success"}) ;; config (def org-ids-suppress-sns "org ids to suppress SNS, as strings" #{"610687260958" ;; Vetd Test 1 ;; "2208512249632" ;; Vetd }) (defn org-id-suppress-sns? "Should suppress SNS message for this org id?" [org-id] (boolean (org-ids-suppress-sns (str org-id)))) ;; TODO consider using a future (to speed up some frontend reactions) (defn sns-publish "Publishes new notification to AWS SNS. topic - keyword abbrev. for topic's ARN subject - notification Subject message - notification message body" [topic subject message & [{:keys [org-id]}]] (let [arg {:op :Publish :request {:TopicArn (topic->arn topic) :Subject subject :Message message}}] (if-not (or @supress-sns? (org-id-suppress-sns? org-id)) (aws/invoke sns-client arg) ;; TODO make separate sns for dev (do (println "SNS PUBLISH SUPPRESSED") (clojure.pprint/pprint arg) {} ;; to ensure ws return gets triggered )))) (defn s3-put [bucket-name file-name data] (aws/invoke s3-client {:op :PutObject :request {:Bucket bucket-name :Key file-name :Body data}})) (defn walk-dissoc [frm ks] (clojure.walk/prewalk (fn [v] (if (map? v) (apply dissoc v ks) v)) frm)) (defn hc-send [v] ;; TODO don't use supress-sns (when-not @supress-sns? (try (when-not (hnyc/initialized?) (hnyc/init {:data-set "app-prod" :write-key "PI:KEY:<KEY>END_PI"})) (-> v (walk-dissoc [:pwd :session-token]) hnyc/send) (catch Throwable e (log/error e))))) (defn log-error [e & [arg]] (hc-send {:type "error" :ex e :arg arg}) (if arg (log/error e arg) (log/error e))) (defn reg-ws-on-close-fn' [ws-on-close-fns ws-id k f] (assoc-in ws-on-close-fns [ws-id k] f)) (defn reg-ws-on-close-fn [ws-id k f] (swap! ws-on-close-fns& reg-ws-on-close-fn' ws-id k f)) (defn unreg-ws-on-close-fn' [ws-on-close-fns ws-id k] (or (try (update ws-on-close-fns ws-id dissoc k) (catch Exception e (log-error e))) ws-on-close-fns)) (defn unreg-ws-on-close-fn [ws-id k] (swap! ws-on-close-fns& unreg-ws-on-close-fn' ws-id k)) (defn force-all-ws-on-close-fns [] (try (let [ws-on-close-fns @ws-on-close-fns&] (log/info (format "Forcing close on %d ws-on-close-fns" (count @ws-on-close-fns&))) (some->> ws-on-close-fns vals (mapcat vals) (pmap #(%)) doall) (log/info "DONE Forcing close on ws-on-close-fns")) (catch Throwable e (log-error e))))
[ { "context": "Listener OnFailureListener)))\n\n(def config-keys [:firebase-auth-private-key-id\n :firebase-auth-private-key\n ", "end": 448, "score": 0.8651599287986755, "start": 420, "tag": "KEY", "value": "firebase-auth-private-key-id" }, { "context": "[:firebase-auth-private-key-id\n :firebase-auth-private-key\n :firebase-auth-client-email\n ", "end": 493, "score": 0.8507572412490845, "start": 468, "tag": "KEY", "value": "firebase-auth-private-key" }, { "context": "onfig.json\")\n(def auth-config-key-prefix \"firebase-auth-\")\n\n(defn- get-var-as-map\n [key]\n {(.replace (.", "end": 737, "score": 0.7476462125778198, "start": 733, "tag": "KEY", "value": "auth" } ]
src/forces_assemble/auth.clj
oasalonen/forces-assemble
1
(ns forces-assemble.auth (:require [forces-assemble.config :as config] [clojure.java.io :as io] [environ.core :refer [env]] [cheshire.core :as che]) (:import (com.google.firebase FirebaseApp FirebaseOptions FirebaseOptions$Builder) (com.google.firebase.auth FirebaseAuth) (com.google.firebase.tasks OnSuccessListener OnFailureListener))) (def config-keys [:firebase-auth-private-key-id :firebase-auth-private-key :firebase-auth-client-email :firebase-auth-client-id]) (def configuration-ok? (config/configuration-ok? config-keys *ns*)) (def auth-config-path "auth-config.json") (def auth-config-key-prefix "firebase-auth-") (defn- get-var-as-map [key] {(.replace (.replace (name key) auth-config-key-prefix "") "-" "_") (env key)}) (defn- get-auth-secrets [] (reduce merge (map get-var-as-map config-keys))) (defn- auth-config [] (let [partial-auth-config (che/parse-stream (io/reader (io/resource auth-config-path)))] (merge partial-auth-config (get-auth-secrets)))) (defn- create-firebase-options [] (.build (doto (FirebaseOptions$Builder.) (.setServiceAccount (->> (auth-config) (che/generate-string) (.getBytes) (io/input-stream)))))) (defonce fbapp (FirebaseApp/initializeApp (create-firebase-options))) (defn- get-authenticated-user [token-string fbtoken] {:token token-string :user-id (.getUid fbtoken) :name (.getName fbtoken)}) (defn authenticate-token [token] (if-not token (throw (Exception. "Authorization header missing or no authorization token provided"))) (let [p (promise)] (future (doto (.verifyIdToken (FirebaseAuth/getInstance fbapp) token) (.addOnFailureListener (reify OnFailureListener (onFailure [_ exception] (deliver p exception)))) (.addOnSuccessListener (reify OnSuccessListener (onSuccess [_ result] (deliver p result)))))) (let [result (deref p 10000 :timeout)] (if (= :timeout result) (throw (Exception. "Authentication service timeout. Please try again later.")) (if (instance? Exception result) (throw result) (get-authenticated-user token result))))))
50113
(ns forces-assemble.auth (:require [forces-assemble.config :as config] [clojure.java.io :as io] [environ.core :refer [env]] [cheshire.core :as che]) (:import (com.google.firebase FirebaseApp FirebaseOptions FirebaseOptions$Builder) (com.google.firebase.auth FirebaseAuth) (com.google.firebase.tasks OnSuccessListener OnFailureListener))) (def config-keys [:<KEY> :<KEY> :firebase-auth-client-email :firebase-auth-client-id]) (def configuration-ok? (config/configuration-ok? config-keys *ns*)) (def auth-config-path "auth-config.json") (def auth-config-key-prefix "firebase-<KEY>-") (defn- get-var-as-map [key] {(.replace (.replace (name key) auth-config-key-prefix "") "-" "_") (env key)}) (defn- get-auth-secrets [] (reduce merge (map get-var-as-map config-keys))) (defn- auth-config [] (let [partial-auth-config (che/parse-stream (io/reader (io/resource auth-config-path)))] (merge partial-auth-config (get-auth-secrets)))) (defn- create-firebase-options [] (.build (doto (FirebaseOptions$Builder.) (.setServiceAccount (->> (auth-config) (che/generate-string) (.getBytes) (io/input-stream)))))) (defonce fbapp (FirebaseApp/initializeApp (create-firebase-options))) (defn- get-authenticated-user [token-string fbtoken] {:token token-string :user-id (.getUid fbtoken) :name (.getName fbtoken)}) (defn authenticate-token [token] (if-not token (throw (Exception. "Authorization header missing or no authorization token provided"))) (let [p (promise)] (future (doto (.verifyIdToken (FirebaseAuth/getInstance fbapp) token) (.addOnFailureListener (reify OnFailureListener (onFailure [_ exception] (deliver p exception)))) (.addOnSuccessListener (reify OnSuccessListener (onSuccess [_ result] (deliver p result)))))) (let [result (deref p 10000 :timeout)] (if (= :timeout result) (throw (Exception. "Authentication service timeout. Please try again later.")) (if (instance? Exception result) (throw result) (get-authenticated-user token result))))))
true
(ns forces-assemble.auth (:require [forces-assemble.config :as config] [clojure.java.io :as io] [environ.core :refer [env]] [cheshire.core :as che]) (:import (com.google.firebase FirebaseApp FirebaseOptions FirebaseOptions$Builder) (com.google.firebase.auth FirebaseAuth) (com.google.firebase.tasks OnSuccessListener OnFailureListener))) (def config-keys [:PI:KEY:<KEY>END_PI :PI:KEY:<KEY>END_PI :firebase-auth-client-email :firebase-auth-client-id]) (def configuration-ok? (config/configuration-ok? config-keys *ns*)) (def auth-config-path "auth-config.json") (def auth-config-key-prefix "firebase-PI:KEY:<KEY>END_PI-") (defn- get-var-as-map [key] {(.replace (.replace (name key) auth-config-key-prefix "") "-" "_") (env key)}) (defn- get-auth-secrets [] (reduce merge (map get-var-as-map config-keys))) (defn- auth-config [] (let [partial-auth-config (che/parse-stream (io/reader (io/resource auth-config-path)))] (merge partial-auth-config (get-auth-secrets)))) (defn- create-firebase-options [] (.build (doto (FirebaseOptions$Builder.) (.setServiceAccount (->> (auth-config) (che/generate-string) (.getBytes) (io/input-stream)))))) (defonce fbapp (FirebaseApp/initializeApp (create-firebase-options))) (defn- get-authenticated-user [token-string fbtoken] {:token token-string :user-id (.getUid fbtoken) :name (.getName fbtoken)}) (defn authenticate-token [token] (if-not token (throw (Exception. "Authorization header missing or no authorization token provided"))) (let [p (promise)] (future (doto (.verifyIdToken (FirebaseAuth/getInstance fbapp) token) (.addOnFailureListener (reify OnFailureListener (onFailure [_ exception] (deliver p exception)))) (.addOnSuccessListener (reify OnSuccessListener (onSuccess [_ result] (deliver p result)))))) (let [result (deref p 10000 :timeout)] (if (= :timeout result) (throw (Exception. "Authentication service timeout. Please try again later.")) (if (instance? Exception result) (throw result) (get-authenticated-user token result))))))
[ { "context": "(def key \"ckczppom\")\n\n(defn md5\n ; http://rosettacode.org/wiki/MD5#", "end": 18, "score": 0.9993659257888794, "start": 10, "tag": "KEY", "value": "ckczppom" } ]
solutions/day04/solution.clj
dbjohnson/advent-of-code
0
(def key "ckczppom") (defn md5 ; http://rosettacode.org/wiki/MD5#Clojure [input] (apply str (map (partial format "%02x") (.digest (doto (java.security.MessageDigest/getInstance "MD5") .reset (.update (.getBytes input))))))) (defn hashit [d] (md5 (format "%s%s" key (str d)))) (defn winner? [d num-zeros] (every? #{\0} (take num-zeros (hashit d)))) (defn find-first-with-n-zeros [num-zeros] (loop [i 0] (if (winner? i num-zeros) i (recur (inc i))))) (println "part 1:" (find-first-with-n-zeros 5)) (println "part 2:" (find-first-with-n-zeros 6))
111569
(def key "<KEY>") (defn md5 ; http://rosettacode.org/wiki/MD5#Clojure [input] (apply str (map (partial format "%02x") (.digest (doto (java.security.MessageDigest/getInstance "MD5") .reset (.update (.getBytes input))))))) (defn hashit [d] (md5 (format "%s%s" key (str d)))) (defn winner? [d num-zeros] (every? #{\0} (take num-zeros (hashit d)))) (defn find-first-with-n-zeros [num-zeros] (loop [i 0] (if (winner? i num-zeros) i (recur (inc i))))) (println "part 1:" (find-first-with-n-zeros 5)) (println "part 2:" (find-first-with-n-zeros 6))
true
(def key "PI:KEY:<KEY>END_PI") (defn md5 ; http://rosettacode.org/wiki/MD5#Clojure [input] (apply str (map (partial format "%02x") (.digest (doto (java.security.MessageDigest/getInstance "MD5") .reset (.update (.getBytes input))))))) (defn hashit [d] (md5 (format "%s%s" key (str d)))) (defn winner? [d num-zeros] (every? #{\0} (take num-zeros (hashit d)))) (defn find-first-with-n-zeros [num-zeros] (loop [i 0] (if (winner? i num-zeros) i (recur (inc i))))) (println "part 1:" (find-first-with-n-zeros 5)) (println "part 2:" (find-first-with-n-zeros 6))
[ { "context": " ]))))\n\n;;TODO: Add this?\n\n;;'Craig add 10 May 2016 . We have some units changing pol", "end": 106264, "score": 0.893337607383728, "start": 106259, "tag": "NAME", "value": "Craig" } ]
src/marathon/ces/behavior.clj
see-eh-eh-effess/m4
0
;;A namespace for defining and composing entity behaviors. ;;We'll define core behaviors here, leveraging the ;;behavior tree approach defined by spork.ai.behavior . (ns marathon.ces.behavior (:require [spork.ai.core :as ai :refer [deref! fget fassoc push-message- debug ->msg]] [spork.ai.behavior :refer [beval success? success run fail behave ->seq ->elapse ->not ->do ->alter ->elapse-until ->leaf ->wait-until ->if ->and ->and! ->pred ->or ->bnode ->while ->reduce always-succeed always-fail bind! bind!! merge! merge!! push! return! val! befn ] :as b] [spork.ai.behaviorcontext :as base :refer :all] [spork.ai [machine :as fsm]] [marathon.data [protocols :as protocols] ] [marathon.ces [core :as core] [unit :as u] [supply :as supply] [demand :as d] ] [spork.cljgraph.core :as graph] [spork.util.general :as gen] [spork.data.priorityq :as pq] [clojure.core.reducers :as r] [spork.entitysystem.store :as store :refer :all :exclude [default]] [spork.sim.simcontext :as sim] [clojure.core.reducers :as r] [spork.util.general :as general]) (:import [spork.ai.behaviorcontext behaviorenv])) ;;Overview ;;======== ;;The general idea behind how we motivate entities to do things is to ;;use composeable behaviors - as defined in spork.ai.behavior - ;;composed into behavior "trees". These trees simplify the typical ;;state-transition model we find in finite-state machines. Where the ;;FSM has zero or more edges - or transitions - between states, ;;behavior trees focus on a small set of composition operations - ;;called internal or intermediate nodes - that define how to traverse ;;the tree. So, rather than evaluating the next state to transition ;;to - along with the pre, executing, and post conditions for the ;;state - we walk a tree of behaviors, where nodes along the path ;;dictate consistent idiomatic ways to evaluate child nodes. ;;Besides composition, the other core concept is that behaviors may ;;return success, failure, or (in other implementations) run to ;;indicate that a behavior node has not yet finished evaluating. This ;;implementation - focused on unit entity behaviors - takes a fairly ;;naive view and ignores the run evaluation. Rather, we always ;;succeed or fail. ;;Evaluation in the Behavior Environment ;;===================================== ;;Unlike traditional entity "update" or "step" functions, we maintain ;;an explicit context in which the behavior is evaluated - the ;;behavior environment (marathon.ces.basebehavior). This context ;;provides a consistent accumulation of state through which we can ;;view evaluation of the behavior tree as a reduction, with the ;;behavior environment being the accumulated result. Thus, we ;;traverse the tree with an initial behavior environment [reified as a ;;map with useful keys referencing the simulation context/entity ;;store, the entity being processed, the simulated time of the ;;evaluation, and any additional keys useful to evaluation]. Taken as ;;a lexical environment, the keys of the behavior environment form a ;;working set of "variables" or properties that we can either query, ;;update, redefine, add to, or otherwise use to guide behavior ;;evaluation. ;;When evaluating a behavior tree, we start from the root behavior and ;;use its evaluation rules to proceed with the reduction (i.e. ;;compute a resulting behavior environment). The reduced behavior ;;context is then - typically - processed by merging the entity ;;reference into the simulation context reference, returning the ;;simulation context. The function that encapsulates this functional ;;form of entity behavior processing is ;;spork.ai.behaviorcontext/step-entity . ;;Behavior evaluation occurs using the spork.ai.behavior/beval ;;function, which operates similarly to eval but in the domain of ;;behavior trees. The evaluation rules are fairly simple: ;;If the item is a vector pair that matches [:success|fail|run ctx], ;;the vector is returned as the output for beval. ;;If the item to be evaluated is a function, then it is applied to the ;;current accumulated context to determine the next behavior to beval. ;;This means that functions may return a final result ala ;;[:success|:fail|:run ctx] or they may return another behavior ;;(function or node) which will continue to be evaluated against the ;;context. ;;If the item to be evaluated is a behavior node - anything ;;implemented the spork.ai.IBehaviorTree protocol - then it is beval'd ;;with the current accumulated context (delegating to the behave ;;function of the IBehaviorTree). ;;The current implementation assumes that the highest-level of ;;evaluation - as in spork.ai.behaviorcontext/step-entity! will ;;always be successful. Anything else is an error (even returning ;;[:fail ...]. ;;Behavior Functions ;;================= ;;Callers may define functions that operate on the behavior ;;environment directly; in some cases this is a useful - if low level ;;- approach to defining behaviors. Arbitrary functions that map a ;;context to a [:success ...] or a [:fail ...] may be used as ;;behaviors, and will operate correctly under beval. ;;For convenience, and to focus on behavior tree traversal as an ;;"evaluation", the spork.ai.behavior/befn macro provides a convenient ;;way to define aforementioned behavior functions with convenient ;;destructuring and behavior result packing built in. Using the befn ;;macro - to define behavior functions - is similar to the standard ;;clojure.core/defn form, with a change the context: The function ;;arguments correspond to a map-destructing of the behavior ;;environment, and where specified by a type hint, will compile to ;;fast field-based accessors for the specific behavior environment. ;;To elide repetitive use of (success ...) and the like, and to align ;;with clojure's idiom of using nil for failure, nil results are ;;automatically converted to (failure ...) evaluations. Otherwise, ;;behavior evaluation continues as per beval - the caller can ;;immediately return from the behavior using (success ctx) or yield ;;another behavior as a return value - which will effectively continue ;;evaluation using the new behavior. ;;Additional operations available in a behavior function include: ;;(bind!! {:a 1 :b 2}) => (success (merge benv {:a 1 :b 2})) ;;(return! ^MapEntry [:success x]) => x ;;(return! ^MapEntry [:fail x]) => (Throw (Exeption. ...)) ;;Behavior Nodes ;;============== ;;Aside from encoding custom functionality with raw functions, ;;pre-existing behavior nodes provide an expressive domain specific ;;language for defining behavioral "flow control" in a composeable ;;manner. They effectively define custom behavior functions - again ;;returning [:success|:fail|:run ctx] behind a unified protocol. The ;;magic lies in how a behavior node executes and interprets the ;;traversal of its children. For example, the ->or behavior ;;corresponds to a logical or of all child nodes (or clauses). Upon ;;evaluation, ->or will reduce its children - in order - returning on ;;the first [:success ctx] it finds, else [:fail ctx]. This is ;;similar to the 'or macro in clojure. Similarly, the ->and will ;;return at the first sign of a failed child node, else return ;;[:success ctx] as its behavior reduction. In-order, ;;i.e. left-to-right node traversal is a common idiom (although not a ;;constraint) in behavior trees, and allows one to follow the behavior ;;"logic" in a simple, consistent manner by following the traversal. ;;These nodes provide a simple way to compose behaviors and to ;;communicate success/failure throughout the traversal. These trees ;;may be embedded as children of like nodes, creating sophisticatd ;;behaviors with a declarative specification. Callers are advised to ;;use the canonical behavior nodes where possible to utilize their ;;expressive power, readability, and re-use. ;;Updating Units by Sending Messages ;;================================== ;;Technically, a unit entity update is any application of ;;spork.ai.behaviorcontext/step-entity!, in which the entity, the ;;simulation context, and a behavior - either a unique behavior ;;associated with the entity's :behavior component, or a default ;;global behavior defined in ;;spork.ai.behaviorcontext/default-behavior - are munged into a ;;spork.ai.behaviorcontext/behaviorenv. ;;Thus, stepping entities requires the simulation context/entity ;;store, the entity to update, and a message to send it. The result ;;will be a simulation context / entity store reflecting any committed ;;changes in response to how the entity "behaved" in response to the ;;message. ;;We use messages - as defined in marathon.ces.core/->msg, as an ;;entry-point to initiate behavior and provide initial bindings for ;;the behavior environemnt. For instance, the convenience function ;;marathon.ces.core/handle-message! merely wraps step-entity!, while ;;marathon.ces.core/send!! provides a simple API for defining messages ;;to send to the entity in addition to computing the result of a send ;;/ behavior. ;;When are Messages Sent, or When do Updates Happen? ;;======================= ;;Currently, entities send themselves messages typically in reponse to ;;"organic" events such as following a rotational policy. Once the ;;entity is initialized, it will likely request an update at a later ;;time, the span of which is predicated based on the amount of time ;;the unit is supposed to wait in a particular state according to its ;;rotational policy. Absent any "outside" interference, this message ;;will be propogated to the entity at the scheduled time, with the ;;entity living in eventless stasis (retaining the state from its last ;;known update) until the message is delivered. For unit entities, ;;message delivery is dispatched during invocation of ;;marathon.ces.supply/manage-supply, at which point any units ;;scheduled for updating are notified. ;;Inorganic messages occur when external forces exert unexpected ;;control over the unit entity. These typically manifest in events ;;like filling demand, sending units home, changing policies, or any ;;number of things that are unexplained by the unit's rotational ;;policy - yet necessary for simulation. ;;How Are Messages Processed? ;;=========================== ;;Messages may occur out-of-sync with the unit's current status. That ;;is, on the timeline the unit follows, the entity is not guaranteed ;;to have been "updated" at the same time step as the new message is ;;received. ;;Consequently, we need to synchronize, or roll the unit forward in ;;time to account for any pending updates and to bring the entity into ;;a synchronized state at the time of the message. Unit entity ;;behavior is defined to account for an elapsed time, represented by ;;deltat in the behavior environment, which allows us to accomplish ;;rolling forward. For instance, if a unit arrives at a dwelling ;;state, and needs to wait there for 365 days until the next update, ;;with the implication that the dwelling behavior merely adds 1 unit ;;of dwell to a dwell statistic for every elapsed day, the entity will ;;have an update scheduled 365 days later - at which point the deltat ;;will indicate the need to roll forward 365 days and thus add 365 ;;days to the dwell stat. ;;If an update or similar message arrives earlier than the next ;;scheduled update, such as from an inorganic message - say a ;;deployment 18 days later, then the unit must be "aged" or rolled ;;forward 18 days to account for the elapsed time. From that ;;synchronization point, the unit may process the pending message and ;;accomplish its deployment, initiating another scheduled update. ;;Message processing always occurs after synchronizing the unit with ;;the time frame that the message was sent. In terms of behavior ;;trees, message processing and "rolling forward" are merely behavior ;;functions that can be composed like any other. This opens up a raft ;;of flexible options for "communicating" with entities, as well as ;;offering the possibility for either centralizing and synchronously ;;updating entity state for all entities, or using Erlang-style ;;message-passing concurrency (or other asynchronous communication and ;;state management like clojure's software transactional memory or ;;channels) to perform asychronous updates, possibly in parallel. ;;Currently, the default implementation is synchronous and ;;centralized. ;;__utils__ (def ^:constant +inf+ Long/MAX_VALUE) (def ^:constant +twenty-years+ 7300) (defmacro ensure-pos! "Ensures n is a positive, non-zero value, else throws an exception." [n] `(if (pos? ~n) ~n (throw (Exception. (str [:non-positive-value ~n]))))) (defmacro non-neg! "Ensures n is a positive or zero value, else throws an exception." ([lbl x] `(if (not (neg? ~x)) ~x (throw (Exception. (str [~lbl :negative-value ~x]))))) ([x] `(if (not (neg? ~x)) ~x (throw (Exception. (str [:negative-value ~x])))))) #_(defn non-neg! ([lbl x] (if (not (neg? x)) x (throw (Exception. (str lbl " " x " cannot be negative!"))))) ([x] (non-neg! "" x))) (defmacro try-get [m k & else] `(if-let [res# (get ~m ~k)] res# ~@else)) (defn rconcat ([& colls] (reify clojure.core.protocols/CollReduce (coll-reduce [this f1] (let [c1 (first colls) init (reduce (fn [acc x] (reduced x)) (r/take 1 c1)) a0 (reduce f1 init (r/drop 1 c1))] (if (reduced? a0) @a0 (reduce (fn [acc coll] (reduce (fn [acc x] (f1 acc x)) acc coll)) a0 (r/drop 1 colls))))) (coll-reduce [this f init] (reduce (fn [acc coll] (reduce (fn [acc x] (f acc x)) acc coll)) init colls)) clojure.lang.ISeq (seq [this] (seq (into [] (r/mapcat identity colls) ))) ))) (defn pass [msg ctx] (->> (success ctx) (core/debug-print [:passing msg]))) (def ^:dynamic *interact* false) (defmacro if-y [expr & else] `(if ~'*interact* (if (and (= (clojure.string/upper-case (read)) "Y")) ~expr ~@else) ~expr)) (defmacro log! [msg ctx] `(do (debug ~msg) ~ctx)) ;;migrate.,.. (defn echo [msg] (fn [ctx] (do (debug msg) (success ctx)))) (defmacro deref!! [v] (let [v (with-meta v {:tag 'clojure.lang.IDeref})] `(.deref ~v))) (defmacro val-at "Synonimous with clojure.core/get, except it uses interop to directly inject the method call and avoid function invocation. Intended to optimize hotspots where clojure.core/get adds unwanted overhead." [m & args] (let [m (with-meta m {:tag 'clojure.lang.ILookup})] `(.valAt ~m ~@args))) ;;let's see if we can memoize get-next-position for big gainz yo... (defn memo-2 [f & {:keys [xkey ykey] :or {xkey identity ykey identity}}] (let [xs (java.util.HashMap.)] (fn [x1 y1] (let [x (xkey x1) y (ykey y1)] (if-let [^java.util.HashMap ys (.get xs x)] (if-let [res (.get ys y)] res (let [res (f x1 y1)] (do (.put ys y res) res))) (let [res (f x1 y1) ys (doto (java.util.HashMap.) (.put y res)) _ (.put xs x ys)] res)))))) ;;slightly faster for memoizing policy name. ;;This should be a concurent hashmap... (defn memo2-policy [f] (let [xs (java.util.HashMap.)] (fn [^clojure.lang.ILookup x1 y] (let [x (marathon.data.protocols/atomic-name x1) #_(.valAt x1 :name)] (if-let [^java.util.HashMap ys (.get xs x)] (if-let [res (.get ys y)] res (let [res (f x1 y)] (do (.put ys y res) res))) (let [res (f x1 y) ys (java.util.HashMap.) _ (.put ys y res) _ (.put xs x ys)] res)))))) (defn memo1-policy [f] (let [xs (java.util.HashMap.)] (fn [^clojure.lang.ILookup x1] (let [x (marathon.data.protocols/atomic-name x1) #_(.valAt x1 :name)] (if-let [res (.get xs x)] res (let [res (f x1)] (do (.put xs x res) res))))))) ;;an alternative idea here... ;;use a closure to do all this stuff, and reify to give us implementations ;;for the object. We can also just us a mutable hashmap behind the ;;scene if we want to...at some point, it's probably better to have ;;the shared-nothing approach and just leave entities in their ;;own mutable cells, isolated from other state. We can ;;still maintain persistent history. Everything becomes a lookup though; ;;we have to find the current value of the entity at time t; ;;More to think of here.. ;;New ;;Environment for evaluating entity behaviors, adapted for use with the simcontext. ;;If we provide an address, the entity is pushed there. So, we can have nested ;;updates inside associative structures. ;;__Utility functions__ ;;Entity step operations... (defn progress-cycle [x width] (if (>= x width) 0 (unchecked-inc x))) ;;testing function... (defn deployment-over? [y] (or (>= y (* 15 31)) (and (>= y 30) (<= (rand) 0.01)))) ;;testing function... (defn should-deploy? [t tmax] (and (>= t 365) (<= (rand) (* 0.005 (/ (double t) tmax))))) (defn deployed? [e] (identical? (:state e) :deploying)) (defn should-reset? [t tmax] (>= t tmax)) (defn spawning? [^spork.ai.machine.statedata statedata] (identical? (.curstate statedata) :spawning)) ;;aux functions will most likely be plentiful. We specifically ;;have a host of helper functions for unit-specific entity behaviors. ;;Most of them deal with how the units read their policies and stuff. ;;__Aux Functions__ ;;#TODO See if we can encode or derive a more meaningful semantics ;;from the indices currently associated with the states...for ;;instance, :deployable randomly came back with 7 as a state, we ;;either don't want this or we want to have it mean something. ;;Note: these are specific to unit, so could probably go into the unit ;;namespace; save on real estate. ;;Performance: inlined to alleviate minor hotspot....marginal gains. ;;Lol inlining hurts us a bit here, better not to inline... (defn get-state [unit position] (case position :abrupt-withdraw :abrupt-withdraw :recovery :recovery (let [s (protocols/get-state (val-at unit :policy) position)] (if (number? s) :dwelling s) ;;wierd... ))) ;; TOM Hack 24 July 2012 -> again, to facilitate implicit recovery. In the case of explicit recovery policy, ;; we defer to the unit's policy to determine how long to wait. In the case of implicit recovery, we use ;; a global parameter for all units, to determine wait time if they are in a recovery state. ;; Similarly, we account for units with policies that do not have an explicit recovered state. ;; In this case, we inject the equivalent of a fake state, with 0 wait time, to allow for recovery ;; processing to occur. ;;original non-memoized function. #_(defn get-next-position [policy position] (case position :recovery :recovered :recovered :re-entry (if-let [res (protocols/next-position policy position)] res (throw (Exception. (str [:dont-know-following-position position :in (:name policy)])))))) ;;memoized to alleviate hotspot, marginal gains. ;;NOTE: this causes a problem with composite policies... ;;We need to memoize based on a finer criteria, based on the ;;active policy name... ;;Added another default for :modernizing-deployable, indicat (def get-next-position (memo2-policy (fn get-next-position [policy position] (case position :recovery :recovered :recovered :re-entry (if-let [res (protocols/next-position policy position)] res (throw (Exception. (str [:dont-know-following-position position :in (:name policy)])))) )))) ;;We're getting too far ahead of ourselves during policy change calcs. ;;Jumping the position we're "in"...for max/nearmax policies, this leaves ;;us with. ;;Patched to allow specified recovery times. (defn policy-wait-time ([policy statedata position deltat recovery-time] (cond (identical? position :recovery) recovery-time ;;this is a weak default. We'll either fix the policies or wrap the behavior later. (identical? position :recovered) 0 :else (let [frompos (get-next-position policy position) topos (get-next-position policy frompos)] (if-let [t (protocols/transfer-time policy frompos topos)] (- t (- deltat (fsm/remaining statedata))) (throw (Exception. (str [:undefined-transfer :from frompos :to topos :in [(protocols/policy-name policy) (protocols/atomic-name policy)]]))) ;if it's not defined in policy...instant? )))) ([policy statedata position deltat] (policy-wait-time policy statedata position deltat 0)) ;;weak, I just copied this down. Ugh. ([policy position] (cond (identical? position :recovery) 0 ;;this is a weak default. We'll either fix the policies or wrap the behavior later. (identical? position :recovered) 0 :else (let [frompos (get-next-position policy position) topos (get-next-position policy frompos) ] (if-let [t (protocols/transfer-time policy frompos topos)] t (throw (Exception. (str [:undefined-transfer :from frompos :to topos :in [(protocols/policy-name policy) (protocols/atomic-name policy)] ])))))))) ;;aux function to help with policy transfers. (defn immediate-policy-wait-time [policy frompos] (protocols/transfer-time policy frompos (get-next-position policy frompos))) ;;Pulled out to address concerns in get-wait-time. ;;Computes the wait time - i.e. transfer time - between ;;frompos and topos relative to a unit's policy and statedata. (defn immediate-wait-time ([unit frompos topos deltat statedata] (let [wt (protocols/transfer-time (:policy unit) frompos topos) remaining (fsm/remaining statedata) deltat (or deltat remaining) ;allow the ctx to override us... ] (- wt (- deltat remaining)))) ([unit frompos {:keys [deltat statedata] :as benv}] (immediate-wait-time unit frompos (get-next-position (:policy unit) frompos) deltat statedata))) ;;Could be a cleaner way to unpack our data, but this is it for now... ;;need to fix this...let's see where we use it. ;;Note: this depends on policy-wait-time, which is great, but the ;;use-case is intended for a future, planned wait. In other words, ;;this fails us when we want to compute the wait time from a current ;;policy position - ala during a policy change. (defn get-wait-time ;;WARNING: we define an inconsistency here in the 4-arity version. ;;If we specifcy the from,to positions, the wait-time is computed using ;;frompos as the the starting position. The other arities compute ;;using policy-wait-time, which uses the successor wait time of the ;;current position - i.e. how long will I have to wait in the next position. ;;Current usage appears correct - namely the 3-arity version, but that ;;could throw us off - as it did for initial policy-change implementation! ([unit position {:keys [deltat statedata ctx] :as benv}] ;;uses position after current... (policy-wait-time (:policy unit) statedata position (or deltat 0) (or (:default-recovery unit) 0))) ([position {:keys [entity] :as benv}] (get-wait-time @entity position benv)) ([{:keys [wait-time] :as benv}] wait-time)) ;;Basic API ;;========= ;;The rest of the simulation still relies on our pre-existing API, ;;namely that we have "change-state", and "update" ;;note that change-state already exists in marathon.sim.unit/change-state, ;;we're merely providing an interface to the unit's behavior for it. ;;Also note that change-state is only called (currently) from ;;marathon.sim.demand (for abrupt withdraws), and marathon.sim.supply ;;(for deployments). ;;might ditch these.... (declare change-state-beh update-state-beh update-state roll-forward-beh lite-update-state-beh check-overlap check-deployable check-deployable-state finish-cycle spawning-beh ;; age-unit moving-beh process-messages-beh ;;re-entry behaviors abrupt-withdraw-beh re-entry-beh recovery-beh ;;policy change fwd declarations apply-policy-change defer-policy-change policy-change-state try-deferred-policy-change ;; auxillary behavior definitions. location-based-beh wait-based-beh ) ;;API ;;=== ;;These are the entry points that will be called from the outside. ;;Under the legacy implementation, they delegated to a hard coded ;;finite state machine that interpreted rotational policy to infer ;;state transitions. The general mechanism is to augment the ;;simulation context. We may want to define a single function ;;load-context and unload-context the clears up any augmented ;;contextual items we put in. That, or manage the simulation ;;context separate from the behavior context. For now, managing ;;the simcontext along with the behavior context (treating it ;;as a huge blackboard) seems like the simplest thing to do. ;;__update-entity!__ ;;Similarly, we'll have update take the context last. ;;update will depend on change-state-beh, but not change-state. ;;change-state is a higher-level api for changing things. ;;Note: this is covered by step-entity! We need to ;;include the roll-forward-beh though, to ensure we're ;;consistent. ;;we can wrap these up and just pass a generic message for the ;;behavior to interpret. ;;change-state becomes ;;load-entity ;;add-message {:to-state to-state :deltat deltat :ctx ctx} ;;Move this out to marathon.ces.unit? ;;auxillary function that helps us wrap updates to the unit. ;;added check to prevent recording traversals to save time and ;;memory. Does not affect debugging. (defn traverse-unit [u t from to] (-> (if marathon.ces.core/*debug* (u/add-traversal u t from to) u) (assoc :positionpolicy to))) ;;this is kinda weak, we used to use it to determine when not to ;;perform updates via the global state, but it's probably less ;;important now...we can actually codify this structurally ;;in the behavior tree now... ;;special states just diverted the fsm update function to ;;a different path (bypassing the global state, i.e. not ;;aging/advancing). Where we had direct method calls to ;;other state handler functions, we can now just directly ;;encode the transition in the tree... (definline special-state? [s] `(#{:spawning :abrupt-withdraw :recovered :waiting #_:recovery} ~s)) (defn just-spawned? "Determines if the entity recently spawned, indicated by a default negative spawn time or a spawntime in the present." [{:keys [entity ctx] :as benv}] (identical? (:state @entity) :spawning)) ;;These accessors help us ensure that we're not ;;getting stuck in invalid transitions, or spawning ;;with funky null errors. (defn position->state [policy positionpolicy] (if-let [res (protocols/get-state policy positionpolicy)] res (throw (Exception. (str {:unknown-position positionpolicy :policy (:name policy)}))))) ;;We can make this processing more sophisticated... ;;Since we (defn position->time [policy positionpolicy] (if-let [res (protocols/get-cycle-time policy positionpolicy)] res (throw (Exception. (str {:position-not-in-cycle positionpolicy :policy (:name policy)}))))) ; (let [st (:spawntime @entity)] ; (or (neg? st) ; (== st (core/get-time @ctx)))) (defn state-expired? [{:keys [deltat statedata] :as benv}] (let [r (fsm/remaining statedata) dt (or deltat 0) ] (<= r dt))) ;;debatable utility... ;;Not sure where we're using these guys.... (defn to-position? [to benv] (identical? (:next-position benv) to)) (defn from-position? [from benv] (identical? (:from-position benv) from)) ;;Capturing change information in a structure, rather than passing it ;;around willy-nilly in the environment. If we have a pending ;;change, there will be changeinfo. This only applies for instantaneous ;;changes....That way, we can communicate our state updates serially ;;by adding (and removing) changeinfo. (comment (defrecord changeinfo [newstate duration followingstate]) ) ;;Behaviors ;;========= ;;this is a primitive action masked as a behavior. (defn move! ([location deltat destination wait-time] (->and [(->alter (fn [benv] (merge benv {:deltat deltat :next-position destination :next-location location :wait-time wait-time}))) moving-beh])) ([deltat destination wait-time] (->and [(->alter (fn [benv] (merge benv {:deltat deltat :next-position destination :wait-time wait-time}))) moving-beh])) ([destination wait-time] (->and [(->alter (fn [benv] (merge benv {:next-position destination :wait-time wait-time}))) moving-beh])) ([destination] (->and [(->alter (fn [benv] (merge benv {:next-position destination }))) moving-beh]))) ;;A lot of these behaviors operate on the concept of a blackboard. ;;The behavior environment, defined in spork.ai.behaviorcontext, ;;is a map of lexical bindings that we use to evaluate the consequences ;;of a unit's behavior. Certain behaviors may place or remove things ;;from the blackboard to communicate information with other behaviors ;;"down the line". We can couple behaviors directly using the behavior ;;tree, or allow them to be indirectly coupled using the blackboard ;;as a form of simple event communication. Many behaviors, like ;;update-after, and roll-forward-beh, will actually "consume" ;;items in the environment, like time. It will be common to see ;;an ephemeral, or a transactional semantics with the behaviors. (befn +nothing-state+ [entity deltat ctx] (->do (fn [_] (log! (str (:name @entity) " is doing nothing for " deltat) ctx) ))) ;;Determines if our entities are going to wait beyond the feasible ;;time horizon. It's not that much of a stretch to consider anything longer ;;than a decent human lifetime effectively infinite... (defn effectively-infinite? [^long x] (or (== x +inf+ ) (>= x (* 365 100)))) (defn compute-proportion "Given a current cycletime, a cyclelength we're projecting from, and a cyclelength we're projecting to, computes the proportion of the normalized projected cycle length - the cycle propotion. When dealing with effectively infinite policies, we avoid projecting onto finite policies with ~0 propotion for everything by computing the cycle proportion based on the remainder of the current cycletime relative to the target cyclelength. Otherwise, we compute a simple coordinate based on the proportion of ct : clfrom." [ct clfrom clto] (let [finf (effectively-infinite? clfrom) tinf (effectively-infinite? clto)] (cond (or (and (not finf) (not tinf)) (and finf tinf)) ;policy relative (core/float-trunc (/ ct clfrom) 6) tinf ;relative to infinite policy... (core/float-trunc (/ ct clto) 6) :else (-> (rem ct clto) ;chop (/ clto) ;normalize (core/float-trunc 6))))) ;;note-we have a wait time in the context, under :wait-time ;;updates an entity after a specified duration, relative to the ;;current simulation time + duration. ;;Note: Added the invariant that we cannot have negative wait-times. ;;ensure-pos! throws an exception if we encounter negative wait times. (befn update-after ^behaviorenv [entity wait-time tupdate ctx] (when wait-time (->alter #(if (effectively-infinite? wait-time) (do (debug [(:name @entity) :waiting :infinitely]) ;skip requesting update. (dissoc % :wait-time) ) (let [tfut (+ tupdate (ensure-pos! wait-time)) e (:name @entity) _ (debug [e :requesting-update :at tfut])] (swap! ctx (fn [ctx] (core/request-update tfut e :supply-update ctx))) (dissoc % :wait-time) ;remove the wait-time from further consideration... ))))) (require '[clojure.pprint :as pprint]) ;;our idioms for defining behaviors will be to unpack ;;vars we're expecting from the context. typically we'll ;;just be passing around the simulation context, perhaps ;;with some supplementary keys. ;;Let's think about what it means to change state.... ;;Are we in fact changing the root of the behavior? ;;This is where the transition from FSM to behavior tree ;;comes in.... (befn change-state-beh! {:keys [entity ctx statedata state-change deltat] :or {deltat 0} :as benv} (when state-change (let [_ (echo [:state-change (:name @entity)]) {:keys [newstate duration followingstate timeinstate] :or {timeinstate 0}} state-change _ (when (not duration) (throw (Exception. (str "nil value for duration in state change behavior!")))) followingstate (or followingstate newstate) ;;we change statedata here... wt (- duration timeinstate) _ (when (neg? wt) (throw (Exception. (str [:negative-wait-time])))) _ (debug [:changing-state state-change :wait-time wt]) newdata (assoc (fsm/change-statedata statedata newstate duration followingstate marathon.ces.core/*debug*) :timeinstate timeinstate) benv (merge (dissoc benv :state-change) {:statedata newdata :duration duration :timeinstate timeinstate :wait-time wt}) _ (reset! ctx (supply/log-state! (:tupdate benv) @entity (:state @entity) newstate @ctx)) _ (swap! entity #(assoc % :state newstate :statedata newdata)) ;;update the entity state, currently redundant. ;_ (debug [:statedata statedata :newdata newdata :newstate newstate]) ] (beval update-state-beh benv)))) (def change-state-beh (->seq [(echo :<change-state-beh>) change-state-beh!])) ;;Aux function to compute our state change during spawn. ;;Setting up initial conditions is a PITA, particularly ;;since it's possible that some of the input data is ;;intentionally empty or zeroed out. This helps set up ;;the bread-n-butter wait time as a function of the ;;spawning information, if any, the entity's policy, and ;;the proposed position for the entity. #_(defn compute-state-stats [entity cycletime policy positionpolicy] (let [duration (:duration (:spawn-info @entity)) ;;duration may be 0. ;;if so, we check policy to see if we should be waiting more than 0. derive? (or (not duration) (zero? duration)) duration (if derive? (let [pw (policy-wait-time policy positionpolicy) _ (debug [:derived-duration (:name @entity) positionpolicy pw])] pw) ;derive from policy. duration) ;;If the position is not in the policy, then we need to ;;find a way to compute the duration. ;;If we have spawn-info, then we have duration... position-time (if derive? #_(pos? duration) ;prescribed. (try (position->time policy positionpolicy) (catch Exception e (if (protocols/leads-to-start? policy positionpolicy) 0 (throw (Exception. (str [positionpolicy :isolated-from-cycle])))))) 0) ;;We're running into problems here....the positionpolicy cycletime (if (< cycletime position-time) position-time cycletime)] ;;timeinstate also subject to spawn-info.... {:cycletime cycletime :position-time position-time :timeinstate (if duration 0 (non-neg! "timeinstate" (- cycletime position-time))) ;;timeremaining is subject to spawn info. :timeremaining (or duration ;this should keep us from bombing out... (protocols/transfer-time policy positionpolicy (protocols/next-position policy positionpolicy)))})) (defn compute-state-stats [entity cycletime policy positionpolicy] (let [duration (:duration (:spawn-info @entity)) ;;duration may be 0. ;;if so, we check policy to see if we should be waiting more than 0. derive? (or (not duration) (zero? duration)) ;;if so, we check policy to see if we should be waiting more than 0. duration (if #_derive? (and duration (zero? duration)) (let [pw (policy-wait-time policy positionpolicy) _ (debug [:derived-duration (:name @entity) positionpolicy pw])] pw) ;derive from policy. duration) ;;If the position is not in the policy, then we need to ;;find a way to compute the duration. ;;If we have spawn-info, then we have duration... position-time (if derive? ;prescribed. (try (position->time policy positionpolicy) (catch Exception e (if (protocols/leads-to-start? policy positionpolicy) 0 (throw (Exception. (str [positionpolicy :isolated-from-cycle])))))) 0) ;;We're running into problems here....the positionpolicy cycletime (if (< cycletime position-time) position-time cycletime) ] ;;timeinstate also subject to spawn-info.... {:cycletime cycletime :position-time position-time :timeinstate (if duration 0 (non-neg! "timeinstate" (- cycletime position-time))) ;;timeremaining is subject to spawn info. :timeremaining (or duration ;this should keep us from bombing out... (protocols/transfer-time policy positionpolicy (protocols/next-position policy positionpolicy)))})) ;;Adding a lame function to detect for and enable pre-fill. ;;Right now, we just assume it's enabled, but we'll check the context. (defn prefill? [ctx] (-> ctx core/get-parameters :PreFill)) ;;prefill is modeled as the unit's available time impacting its bog budget. So ;;the simplest scheme is to just reduce the bog budget proportional to the ;;unit's available time. Maybe decrement according to its stop-deployable time? ;;Addendum: we want to ensure we never have overlop occuring on day 1. ;;We also want to ensure deployments are somehow optimally ordered in the past. ;;That is, we don't have units deploying (and leaving) at the same time, ;;creating clumps or resonant unfilled demand. So to avoid this, we ;;create a prefill deployemnt schedudle where units are evenly spaced ;;as if they had deployed in the past. The trick is to ensure that ;;we account for overlap in this spacing. We also want this to be ;;determinstic and "ideal" akin to how our cycletime spacing is ;;an ideal representation. So to have an ideal prefill deployment ;;schedule is to minimize the effects of overlap and ensure no ;;clumping in addition to the constraint that no prefilling unit ;;will overlap on day 1... ;;The projection scheme ends up being identical to just offsetting ;;unit cycletimes by - (overlap + 1). The only caveat is that we now ;;have the possibility of units near cycletime of (start-deployable + overlap) ;;becoming ineligible to deploy since the offset pushes their cycletime ;;before start deployble. So we allow them to have max prefill by ;;flooring the cycletime at start-deployable. We thus allow units close ;;to start-deployable to have maximum prefill bog budget if they are ;;selected for prefill deployments. This should be unlikely. ;;Edit: we have a problem for policies where tf - ts is > BogBudget, ;;since we end up with negative numbers. This ends up increasing BOG ;;for prefill. First manifested with an unexpected MaxUtilization and ;;infinite policy cyclelength. Working out a scheme to either warn ;;or correct. ;;For any policy where the span of the deployable window is ;;greater than the bogbudget, we have a situation where it's ;;possible to get a negative number. We apply a correction ;;iff we have eligible deployers (within the interval [ts tf]) ;;where we project the monotonically decreasing prefill (negative ;;numbers the further away you get from tf - bogbudget) onto ;;a rotating sequence of prefills similar to the values we ;;have for the units that had positive numbers. To do this, ;;we transform the negative prefills by computing their ;;abs value's modulus relative to the bogbudget (bound), ;;so they projected onto positive numbers within the span ;;of [0 bound]. Then to get the ordering correct so we ;;have a decreasing order of positive numbers, we ;;subtract the result from the bound. This gives ;;us a nice repeating spread that's determined by ;;cycletime, bogbudget, overlap, and the deployable ;;window and "should" work with any policy. (defn inverse-clamp [bound x] (if (> x -1) x (let [x (mod (- x) bound) y (- bound x)] y))) (defn compute-prefill [ent policy cycletime] (let [ts (protocols/start-deployable policy) tf (protocols/stop-deployable policy) bogbudget (protocols/max-bog policy) ;;addresses infinite cycle stuff, incorporates ;;expected-dwell (assigned to max-dwell). {:keys [max-bog max-dwell cycle-length max-mob]} (u/cycle-stats policy) tf (min tf max-dwell cycle-length)] (when (and (>= cycletime ts) ;;deployable (< cycletime tf)) (let [overlap (inc (protocols/overlap policy)) ctprojected (max (- cycletime overlap) ts) res (long (- bogbudget (- tf ctprojected))) bound (- bogbudget overlap) clamped (inverse-clamp bound res)] (or (and (>= clamped 0) (<= clamped bound) clamped) (throw (ex-info "prefill not in [0 .. bogbudget - (overlap + 1]" {:prefill clamped :policy-name (protocols/policy-name policy) :start-deployable ts :stop-deployable tf :overlap+1 overlap}))))))) ;;if we detect a prefill condition, we reduce the unit's ;;bog budget accordingly to space out deployments. (defn set-prefill [ent policy cycletime ctx] (if-let [pf (and (prefill? ctx) (compute-prefill ent policy cycletime))] (assoc ent :prefill pf) ent)) ;;Our default spawning behavior is to use cycle to indicate. ;;There will be times we alter the methods in which a unit ;;spawns (initializes itself in the simulation). It'd be nice ;;to break this out at some point, for now, we just let it go. ;;we can break the spawning behavior up into smaller tasks... ;;Find out where we're supposed to be. Do we have initial conditions? ;;Initial conditions are currently derived from cycletime and policy. ;;For instance, we narrowly assume that every unit exists somewhere in ;;on a cycle at t=0, rather than setting them in arbitray deployments ;;to begin with. This is limiting, we should be able to define ;;all kinds of initial conditions to perform upon spawn (like set ;;location, cycletime, etc.) For now, we just replicate the ;;cycletime-derived automated initial conditions logic. ;;Given a cycletime, where should we be according to policy? ;;Behavior to control how a unit acts when it spawns. ;;We're trying to account for the unit's initial state... ;;We move from spawning to the initial location. ;;We account for having been at the initial location for ;;timeinstate days (currently tied to cycletime - timetoposition). ;;So, what we really want to do is update the unit initially, possibly ;;with a negative time, and advance it forward to time 0 via the ;;deltat being the timeinstate. (befn spawning-beh ^behaviorenv {:keys [to-position cycletime tupdate statedata entity ctx] :as benv} (when (spawning? statedata) (let [ent @entity ;;we're now tracking default recovery in our context. {:keys [positionpolicy policy]} ent {:keys [curstate prevstate nextstate timeinstate timeinstateprior duration durationprior statestart statehistory]} statedata cycletime (or cycletime (:cycletime ent) 0) topos (if (not (or to-position positionpolicy)) (protocols/get-position (u/get-policy ent) cycletime) positionpolicy) nextstate (position->state policy positionpolicy) {:keys [timeinstate timeremaining cycletime position-time] :as stats} (compute-state-stats entity cycletime policy positionpolicy) _ (debug [:unit (:name ent) stats]) spawned-unit (-> ent (assoc :cycletime cycletime :default-recovery (core/default-recovery @ctx)) (u/initCycles tupdate) (u/add-dwell cycletime) (set-prefill policy cycletime @ctx) ;;added for optional prefill to space out deps. (assoc :last-update tupdate) (dissoc :spawn-info) ;eliminate spawning data. ) ;;may not want to do this.. _ (reset! entity spawned-unit) state-change {:newstate nextstate :duration timeremaining :followingstate nil :timeinstate timeinstate } _ (debug [:nextstate nextstate :state-change state-change :current-state (:state ent)]) ] (->> (assoc benv :state-change state-change :location-change {:from-location "Spawning" :to-location (or (:location (:spawn-info ent)) topos)} :next-position topos ;queue up a move... ) (log! (core/msg "Spawning unit " (select-keys (u/summary spawned-unit) [:name :positionstate :positionpolicy :cycletime]))) (beval (->seq [(echo :change-state) change-state-beh #_(fn [benv] (do (reset! ctx (supply/log-move! tupdate :spawning (:positionpolicy @entity) @entity @ctx)) (success benv)))] )))))) ;;While we're rolling, we want to suspend message processing. ;;We can do this by, at the outer level, dissocing the messages... ;;or, associng a directive to disable message processing... ;;we want to update the unit to its current point in time. Basically, ;;we are folding over the behavior tree, updating along the way by ;;modifying the context. One of the bits of context we're modifying ;;is the current deltat; assumably, some behaviors are predicated on ;;having a positive deltat, others are instantaneous and thus expect ;;deltat = 0 in the context. Note, this is predicated on the ;;assumption that we can eventually pass time in some behavior.... (befn roll-forward-beh {:keys [entity deltat statedata] :as benv} (do (debug [:<<<<<<<<begin-roll-forward (:name @entity) :last-update (:last-update @entity)]) (cond (spawning? statedata) (->seq [spawning-beh roll-forward-beh]) (pos? deltat) (loop [dt deltat benv benv] (let [sd (:statedata benv) timeleft (fsm/remaining sd) _ (debug [:sd sd]) _ (debug [:rolling :dt dt :remaining timeleft]) ] (if-y (if (<= dt timeleft) (do (debug [:dt<=timeleft :updating-for dt]) ;;this is intended to be the last update... ;;as if we're send the unit an update message ;;for the last amount of time... (beval (->seq [update-state-beh process-messages-beh]) ;we suspend message processing until we're current. (assoc benv :deltat dt))) (let [residual (max (- dt timeleft) 0) res (beval update-state-beh (assoc benv :deltat timeleft))] (if (success? res) (recur residual ;advance time be decreasing delta (val! res)) res))) nil))) :else (->seq [update-state-beh process-messages-beh])))) ;;So, at the high level, we have a simple behavior that checks to see ;;if it can move, finds where to move to, starts the process of ;;moving (maybe instantaneous), and waits... ;;We should consider move if our time in state has expired, or ;;if we have a next-location planned. (befn should-move? ^behaviorenv {:keys [next-position statedata] :as benv} (do (debug [:should? {:next-position next-position :remaining (fsm/remaining statedata) :spawning? (spawning? statedata) :wait-time (:wait-time benv)}]) (when (or next-position (zero? (fsm/remaining statedata)) ;;time is up... (spawning? statedata)) (success benv)))) (def locstates #{"Dwelling" "DeMobilizing" "Recovering" :dwelling :demobilizing :recovering :recovery}) (defn position=location? [newstate] (if (not (set? newstate)) (locstates newstate) (gen/some-member newstate locstates) )) ;;memoize this... (alter-var-root #'position=location? gen/memo-1) ;;after updating the unit bound to :entity in our context, ;;we commit it into the supplystore. This is probably ;;slow....we may want to define a mutable version, ;;or detect if mutation is allowed for a faster update ;;path. For instance, on first encountering the unit, ;;we establish a mutable cell to its location and use that ;;during the update process. ;;Given that we have the context for a move in place, ;;we want to move as directed by the context. If there ;;is a wait time associated with the place we're moving ;;to, we will add the wait-time to the context. That way, ;;downstream behaviors can pick up on the wait-time, and ;;apply it. ;;Note: there's a potential problem where our assumptions about ;;deployability may be violated: If a policy change occurs, and ;;the old policy position ends up being the new policy position, ;;we bypass the position-change behavior to save time. If the ;;state-change happens, we still do it, but we miss - by virtue ;;of assuming position changes -> deployable changes - ;;the possibility that while the position may nominally ;;be the same between two policies, the state is not... ;;Case in point: ReqAnalysis_MaxUtilization_FullSurge_AC -> ;;TAA19-23_AC_1:2; for at least one case, we have a transition ;;from #{:deployable :c2 :dwelling} to #{:c2 :dwelling}, ;;while the position is still Ready...the fine difference is ;;that the preceding policy had [Reset :deployable] transition ;;to Ready, where the new policy is not deployable until ;;later in cycle. We end up not updating the deployability ;;of the unit, and it gets selected for a fill that - upon ;;deployment checks - is illegal under the new policy. ;;Solution: strengthen our definition of "no position change" ;;to include no state change....Positions are equal iff ;;they have the same state...the presence (or absence) of ;;:deployable is the key currently... (befn move->statechange ^behaviorenv {:keys [entity next-position location-change tupdate statedata state-change ctx] :as benv} (when-let [nextpos next-position] ;we must have a position computed, else we fail. (let [t tupdate u @entity frompos (get u :positionpolicy) ;;look up where we're coming from. wt (or (:wait-time benv) (get-wait-time u nextpos benv)) ;;how long will we be waiting? location-based? (:location-behavior u) ] (if (= frompos nextpos) ;;if we're already there... (do (debug [:no-movement frompos nextpos {:wt wt :state-change state-change}]) (if state-change (->seq [(->alter #(assoc % :wait-time nil :next-position nil)) check-deployable-state]) (success (dissoc benv :next-position))) ;do nothing, no move has taken place. No change in position. #_(success (if state-change (assoc benv :wait-time nil :next-position nil) (dissoc benv :next-position)))) ;do nothing, no move has taken place. No change in position. (let [_ (debug [:moving frompos nextpos]) newstate (or (get-state u nextpos) nextpos) ;;need to account for prescribed moves. newstate (if location-based? (into (-> (-> statedata :curstate) #_(disj nextpos)) newstate) newstate) _ (when (nil? newstate) (throw (Exception. (str [:undefined-transition newstate u frompos nextpos wt])))) state-change {:newstate newstate :duration wt :followingstate nil :timeinstate 0 } _ (reset! entity (-> (if location-based? (dissoc u :location-behavior) u) (traverse-unit t frompos nextpos) )) ;update the entity atom ;;if we already have a location change set, then we should respect it. from-loc (:locationname u) to-loc (if-let [newloc (:next-location benv)] (do (debug [:preset-location newloc :From from-loc]) newloc) (if (position=location? newstate) nextpos from-loc)) ;_ (println [from-loc to-loc]) ] (bind!! ;update the context with information derived ;from moving {:position-change {:from-position frompos ;record information :to-position nextpos} :state-change state-change :location-change (or location-change (when (not (identical? from-loc to-loc)) {:from-location from-loc :to-location to-loc})) :wait-time nil :next-position nil :next-location nil} )) )))) (def movekeys #{:position-change :state-change :location-change}) (befn prescribed-move->statechange {:keys [prescribed-move tupdate] :as benv} (when prescribed-move (success (reduce-kv (fn [acc k v] (if v (assoc acc k v) acc)) (assoc benv :prescribed-move nil) prescribed-move) ))) (defn prescribed? [e tupdate] (when-let [pm (val-at @e :prescribed-move)] (== (val-at pm :t) tupdate))) ;;PERFORMANCE NOTE: <HOTSPOT> - eliding debug info here saves time... ;;This hooks us up with a next-position and a wait-time ;;going forward. We also now allow prescribed moves to ;;be set, for things like location-specific policies.. (befn find-move ^behaviorenv {:keys [entity next-position wait-time tupdate] :as benv} (if (prescribed? entity tupdate) ;;we have a move set up.. (let [pm (:prescribed-move @entity) _ (debug [:found-prescribed-move pm]) ] (do (swap! entity dissoc :prescribed-move) (bind!! {:prescribed-move pm}))) ;;let's derive a move... (let [e @entity currentpos (:positionpolicy e) ;_ (when (= currentpos :re-entry) (println (:tupdate benv))) p (or next-position (do (debug [:computing-position currentpos]) ;;performance 1 (get-next-position (:policy e) currentpos))) wt (if (and next-position wait-time) wait-time (do (debug [:computing-wait (:positionpolicy e)]) ;;performance 2 ;;WARNING: This may be using the following wait time...is that what we mean? ;;Given the current position, it's determining how long to wait in the next position. ;;I think we're good...should rename get-wait-time to something more appropriate. ;;get-next-wait-time? (get-wait-time @entity (:positionpolicy e) benv))) _ (debug [:found-move {:current-position currentpos :next-position p :wait-time wt}]) ] (bind!! {:next-position p :wait-time wt } ;;have a move scheduled... )))) ;;We know how to wait. If there is an established wait-time, we ;;request an update after the time has elapsed using update-after. (befn wait ^behaviorenv {:keys [wait-time] :as benv} (when-let [wt wait-time] ;;if we have an established wait time... (do #_(debug [:sdb (:statedata benv) :sde (:statedata @(:entity benv))]) (if (zero? wt) ;;skip the wait, instantaneous. No need to request an ;;update. (do (debug [:instantly-updating]) update-state-beh) (do (debug [:waiting wt]) (update-after benv)))))) ;;Note: start-cycle looks somewhat weak. Can we fold this into ;;another behavior? ;;Units starting cycles will go through a series of procedures. ;;Possibly log this as an event? (befn start-cycle {:keys [entity deltat tupdate] :as benv} (do (swap! entity #(merge % {:cycletime 0 :date-to-reset tupdate})) (success benv))) ;;legacy implemenation. no longer using policystack. ;; (befn start-cycle {:keys [entity deltat tupdate] :as benv} ;; (let [unit @entity ;; pstack (:policystack unit)] ;; (do (swap! entity #(merge % {:cycletime 0 ;; :date-to-reset tupdate})) ;; (if (pos? (count pstack)) ;; (bind!! {:policy-change {:next-policy (first pstack)}}) ;; (success benv))))) ;;We may not care about cycles.... ;;Should be able to specify this in our collections logic, go faster... ;;Units ending cycles will record their last cycle locally. We broadcast ;;the change...Maybe we should just queue this as a message instead.. (befn end-cycle {:keys [entity ctx tupdate] :as benv} (let [cyc (assoc (:currentcycle @entity) :tfinal tupdate) _ (swap! entity (fn [unit] (-> unit (assoc :currentcycle cyc) (u/recordcycle tupdate)))) ;;notify interested parties of the event... _ (swap! ctx (fn [ctx] (sim/trigger-event :CycleCompleted (:name @entity) :SupplyStore (str (:name @entity) " Completed A Cycle") nil ctx)))] (success benv))) ;;dunno, just making this up at the moment until I can find a ;;definition of new-cycle. This might change since we have local ;;demand effects that can cause units to stop cycling. ;;Wow...just got burned on this..strings are no good for identity ;;checks....since some are interned and some ore instances. wow.... (defn new-cycle? [unit frompos topos] (and (not= frompos :recovered) ;;additional criteria to cover nonbog reentry. (= (protocols/start-state (:policy unit)) topos))) ;;We check to see if there was a position change, and if so, if that ;;change caused us to finish a policy cycle. Note: this only applies ;;in cyclical policies. ;;Note: We want to preclude finishing cycles if we are applying a ;;policy change. We handle that in another state. This keeps us ;;from entering into a policy change that sends us to reset, and ;;automagically terminates the current cycle stats. Consistent with M3. (befn finish-cycle ^behaviorenv {:keys [entity position-change changed-policy policy-change] :as benv} (when position-change (let [{:keys [from-position to-position]} position-change no-spawn? (not (just-spawned? benv)) new-cyc? (new-cycle? @entity from-position to-position) ;_ (println [:check-cycle no-spawn? new-cyc? (not policy-change) (:tupdate benv)]) ] (when (and no-spawn? new-cyc? (not changed-policy)) ;;If we changed-policy already, preclude... (do (debug [:finishing-cycle (:name @entity) from-position]) (->seq [start-cycle end-cycle try-deferred-policy-change])))))) ;;Now that we have prescribed moves, the entities are going into ;;an overlapping state, but it's a state set.. (defn overlapping? [x] (or (identical? x protocols/Overlapping) (identical? x :overlapping))) ;;this is really a behavior, modified from the old state. called from overlapping_state. ;;used to be called check-overlap. (befn disengage {:keys [entity ctx overlapping-position tupdate] :as benv} (when-let [opvec overlapping-position] (let [[lname op] opvec _ (debug [:overlapping-prescribed op]) _ (debug [:disengaging (:name @entity) (:locationname @entity)]) res (identical? op :to) _ (swap! ctx ;;update the context... #(d/disengage (core/get-demandstore %) (assoc @entity :last-update tupdate) lname % res #_true))] (success (assoc benv :overlapping-position nil))))) ;; (when overlap-detected ;; (when (not (identical? res :none)) ;ugh? ;; (do (debug [:disengaging (:name @entity) (:locationname @entity)]) ;; (swap! ctx ;;update the context... ;; #(d/disengage (core/get-demandstore %) ;; @entity (:locationname @entity) % res)) ;; (success benv))))))) ;;used to be called check-overlap; (def check-overlap disengage) ;;Note: This behavior ASSUMES position changes within the ;;same policy. We can't have changed policies and assume ;;this works. Need an invariant to cover that. ;;Performance: We have a mild hotspot when we eagerly update ;;deployability via supply/update-deploy-status. Might be possible to ;;update deployability lazily, save a little bit. We're typically ;;"not" deploying... #_(befn check-deployable ^behaviorenv {:keys [entity position-change ctx] :as benv} (when position-change (let [{:keys [from-position to-position]} position-change u @entity p (:policy u) _ (debug [:checking-deployable-position :from from-position :to to-position])] (when (or (not= (protocols/deployable-at? p from-position) (protocols/deployable-at? p to-position)) #_(unit/can-non-bog? u)) (do (debug [:deployable-changed! from-position to-position]) (swap! ctx #(supply/update-deploy-status u nil nil %)) (success benv)))) )) (defn update-deploy-status ([u ctx] (->alter (fn [benv] (do (swap! ctx #(supply/update-deploy-status u nil nil %)) benv)))) ([benv] (update-deploy-status @(:entity benv) (:ctx benv)))) ;;Begavior note: ;;When units change policy, they may come from (as in RA) a finite policy ;;with a larger bog budget than the target policy, and have bogged (longer ;;than the bog of the new policy), and end up in a position/state that ;;is identical to the old policy. So, on the surface, we have no ;;state change; no position change, no indicator of deployability change, ;;yet the unit is not technically deployable, since it has no bogbudget. ;;M3 addressed this by adding an automatic update deployability ;;check at the end of policy change, regardless. ;;M3 also added an additional check, where bogbudget exists, ;;but the deployable time is less than the new policy's overlap, ;;causing a negative cycle time error to occur. This second ;;conditions sends the unit to reset if the modified bogbudget ;;< newpolicy.overlap. (befn check-deployable ^behaviorenv {:keys [entity position-change changed-policy ctx] :as benv} (when position-change (if-not changed-policy (let [{:keys [from-position to-position]} position-change u @entity p (:policy u) _ (debug [:checking-deployable-position :from from-position :to to-position])] (when (or (not= (protocols/deployable-at? p from-position) (protocols/deployable-at? p to-position)) #_(unit/can-non-bog? u)) (do (debug [:deployable-changed! from-position to-position]) (update-deploy-status u ctx) #_(swap! ctx #(supply/update-deploy-status u nil nil %)) #_(success benv)))) check-deployable-state) )) ;;Suggestion: To deal with the fact that deployability may change ;;without nominal position changes (but state changes), we should ;;add in the ability to check for a state-change fallback. (befn check-deployable-state ^behaviorenv {:keys [entity state-change ctx] :as benv} (when state-change (let [u @entity from-state (marathon.ces.unit/unit-state u) to-state (:newstate state-change) _ (debug [:checking-deployable-state :from from-state :to to-state])] (when (not= (protocols/deployable-state? from-state) (protocols/deployable-state? to-state)) (do (debug [:deployable-changed! from-state to-state]) (update-deploy-status u ctx) #_(swap! ctx #(supply/update-deploy-status u nil nil %)) #_(success benv)))))) ;; (befn check-deployable ^behaviorenv {:keys [entity position-change state-change ctx] :as benv} ;; (when position-change ;; (let [{:keys [from-position to-position]} position-change ;; u @entity ;; p (:policy u) ;; _ (debug [:checking-deployable :from from-position :to to-position])] ;; (when (or (not= (protocols/deployable-at? p from-position) ;; (protocols/deployable-at? p to-position)) ;; #_(unit/can-non-bog? u)) ;; (do (debug [:deployable-changed! from-position to-position]) ;; (swap! ctx #(supply/update-deploy-status u nil nil %)) ;; (success benv)))))) (befn mark-overlap {:keys [entity position-change] :as benv} (when-let [change position-change] (let [{:keys [to-position from-position]} position-change ;;overlapping is not triggering because we only hae on definition of ;;overlapping per the keyword. There's a string version that shows ;;up. res (cond (overlapping? to-position) :to ;true (overlapping? from-position) :from ;false :else :none) ] (when (not (identical? res :none)) (do (debug [:marking-overlap res]) (success (assoc benv :overlapping-position [(:locationname @entity) res]))))))) ;;When there's a change in position, we want to do all these things. (befn change-position [entity position-change tupdate ctx] (when position-change (let [{:keys [from-position to-position]} position-change] (do (debug [:changed-position from-position to-position]) (reset! ctx (supply/log-position! tupdate from-position to-position @entity @ctx)) ;ugly, fire off a move event.check-overlap (reset! entity (assoc @entity :positionpolicy to-position)) (->seq [check-deployable ;;now being checked a bit more universally... finish-cycle mark-overlap (->alter #(assoc % :position-change nil :next-position nil))]))))) ;;Performance: Mild hotspot. Dissocing costs us here. Change to assoc and ;;check. ;;if there's a location change queued, we see it in the env. (befn change-location {:keys [entity location-change tupdate ctx] :as benv} (when location-change (let [;#_{:keys [from-location to-location]} #_location-change ;minor improvement.. from-location (val-at location-change :from-location) ;;OMG, typo on location...was loction!!! to-location (val-at location-change :to-location) _ (debug [:location-change location-change]) _ (reset! entity (u/push-location @entity to-location)) _ (reset! ctx (supply/log-move! tupdate from-location to-location @entity nil @ctx)) ] ;;we need to trigger a location change on the unit... (success (assoc benv :location-change nil))))) ;;this is a weak predicate..but it should work for now. (defn demand? [e] (not (nil? (:source-first e)))) ;;we can do this like a scalpel.. ;;All that matters is that the demand fill changes. ;;We ensure we remove the unit from the demand's ;;assignment, and then remove the unit from the demand, ;;and update the fill status of the demand. ;;If we leave a demand, we need to update its information ;;and change fill status. ;;is the movement causing a change in fill? (befn change-fill {:keys [entity location-change ctx] :as benv} (when location-change (let [{:keys [from-location]} location-change] (when (demand? (store/get-entity @ctx from-location)) (swap! ctx ;;update the context... #(d/remove-unit-from-demand (core/get-demandstore %) @entity from-location %)) (success benv))))) ;;with a wait-time and a next-position secured, ;;we can now move. Movement may compute a statechange ;;in the process. (def execute-move (->seq [(echo :<move->statechange>) (->or [prescribed-move->statechange move->statechange]) (echo :<change-position>) change-position (echo :<change-fill>) change-fill ;;newly added... (echo :<change-location>) change-location change-state-beh (echo :<check-overlap>) ;moved before change-position check-overlap ;;Added, I think I missed this earlier... (echo :waiting) wait ])) ;;Movement is pretty straightforward: find a place to go, determine ;;any changes necessary to "get" there, apply the changes, wait ;;at the location until a specified time. (def moving-beh (->and [(echo :moving-beh) should-move? ;if there is a next position or our time in state expired. find-move ;determine the wait-time, and possibly the next-position to wait at. (echo :execute-move) execute-move ])) ;;PERFORMANCE NOTE: Minor HotSpot ;;Changed to == instead of zero? due to minor perf issues. ;;State handler for generic updates that occur regardless of the state. ;;These are specific to the unit data structure, not any particular state. ;;Should we keep a timestamp with the unit? That way we can keep track ;;of how fresh it is. (befn age-unit ^behaviorenv {:keys [deltat statedata entity ctx] :as benv} (let [^long dt (or deltat 0)] (if (== dt 0) (success benv) ;done aging. (let [e @entity ;_ (println [:currentcycle (:currentcycle e)]) _ (when-not (u/waiting? e) (swap! entity #(u/add-duration % dt))) #_(debug [:skipping :add-duration (:name entity)]) ;;update the entity atom _ (debug [:aging-unit deltat :cycletime (:cycletime @entity)]) ] (bind!! {:deltat 0 ;is this the sole consumer of time? :last-update (unchecked-inc deltat) :statedata (fsm/add-duration statedata dt)}))))) ;;Dwelling just increments statistics.. (befn dwelling-beh ^behaviorenv {:keys [entity deltat] :as benv} (when (pos? deltat) (do (debug [:dwelling deltat]) (swap! entity #(u/add-dwell % deltat)) (success benv)))) ;;Bogging just increments stastistics.. (befn bogging-beh ^behaviorenv {:keys [entity deltat] :as benv} (when (pos? deltat) (do (debug [:bogging deltat]) (swap! entity #(u/add-bog % deltat)) (success benv)))) (befn modernizing-beh ^behaviorenv {:keys [entity statedata deltat] :as benv} (when (and (pos? deltat) (= (spork.ai.machine/remaining statedata) deltat)) (let [unit @entity uname (:name unit) from (:mod unit) to (dec from) _ (swap! entity assoc :mod to)] (->> benv (log! (core/msg "Modernized unit " (:name unit) " from " from " to " to) ) success)))) ;;This is a little weak; we're loosely hard coding ;;these behaviors. It's not terrible though. (befn special-state {:keys [entity statedata] :as benv} (case (:state (deref!! entity) #_@entity) :spawning spawning-beh :abrupt-withdraw (do (debug [:<special-state-abw>]) abrupt-withdraw-beh) :recovery recovery-beh ;moving-beh ;;setup the move to recovered. :recovered (->and [(echo :recovered-beh) (->seq [re-entry-beh ;;TODO: Optimize. We can skip the re-entry, ;;go to policy-change directly. (->if (fn [{:keys [entity]}] (zero? (u/get-bog @entity))) try-deferred-policy-change)]) ;reset-beh ]) ;; I think we need to implement these. ;; :modernizing modernizing-beh ;; :modernized modernized-beh ;:waiting (success benv) ;up-to-date (fail benv))) ;;rest-beh is kind of what we want to do. We'd like to ;;compute the unit's now position in its old policy. ;;What about pending policy changes? [how'd marathon handle them in vba?] ;;I think we deferred until reset actually. ;;Follow-on state is an absorbing state, where the unit waits until a changestate sends it elsewhere. ;;The only feasible state transfers are to a reentry state, where the unit re-enters the arforgen pool ;;in a dynamically determined position, or the unit goes to another demand compatible with the ;;followon code. (befn followon-beh {:keys [entity ctx] :as benv} (let [fc (u/followon-code @entity) _ (debug [:trying-followon (:name @entity) fc])] (when fc ;if the unit has a followon code (do ;register the unit as a possible followOn ;(println [(:name @entity) :added-followon :for [fc]]) ;;Note: we have a problem here, since add-followon ends up getting our entity ;;out-of-sync with the entity reference stored in the context... ;;We add a bunch of components to the entity, like :followon, which may ;;end up getting ditched when we merge the entity atom in at the end ;;of the transaction... (swap! ctx #(supply/add-followon (core/get-supplystore %) @entity %)) (reset! entity (-> (store/get-entity @ctx (:name @entity)) (merge {:state :followon}))) ;age-unit (debug [:waiting-in-followon-status fc]) (->seq [(->alter (fn [b] (merge b {:wait-time +inf+ :next-position :followon ;(:positionpolicy @entity) ;:followon :next-state :followon;:abruptwithdraw }))) moving-beh]) ;? )))) ;;way to get the unit back to reset. We set up a move to the policy's start state, ;;and rip off the followon code. Added a formal reset policy for ;;reset evaluation associated with policy changes. (befn reset-beh {:keys [entity reset-policy] :as benv} (let [pos (protocols/start-state (or reset-policy (:policy @entity))) wt (if-not reset-policy (immediate-wait-time @entity pos benv) ;;supplied reset policy implies a move to reset with note ;;added transfer time; time remaining in state is ignored. (protocols/transfer-time reset-policy pos (get-next-position reset-policy pos))) _ (debug [:immediate-reset :from (:positionpolicy @entity) :to pos :wait-time wt]) newbogbudget (u/max-bog @entity) _ (swap! entity #(-> % (assoc :followoncode nil) (assoc-in [:currentcycle :bogbudget] newbogbudget))) ] (beval moving-beh (assoc benv :next-position pos :wait-time wt)))) ;; 'A state to handle reentry into the available pool.... (def invalid? #{"Deployed" "Overlapping"}) ;;Note: ;;Attempting to match m3 behavior exactly. Units re-entering ;;with 0 bog and a pending policy change should go ahead ;;and change policies vs. going through re-entry in the ;;current cycle's policy. ;;Kind of like reset, except it's not guaranteed we go to reset. (befn re-entry-beh {:keys [entity ctx tupdate] :as benv} (let [unit @entity p (:policy unit) current-pos (:positionpolicy unit) ct (:cycletime unit) _ (when (< ct 0) (throw (Exception. (str "Cycle Time should not be negative!")))) _ (when (invalid? current-pos) (throw (Exception. "Cannot handle during deployment or overlap"))) is-deployable (protocols/deployable-by? p ct) positionA current-pos positionB (protocols/get-position p ct) _ (when (invalid? positionB) (throw (Exception. (str "Cannot handle during deployment or overlap: " positionB)))) timeremaining (protocols/transfer-time p positionB (protocols/next-position p positionB)) timeinstate (- ct (protocols/get-cycle-time p positionB)) ;;this ends up being 0. wt (max (- timeremaining timeinstate) 0) _ (debug [:re-entry {:cycletime ct :current-pos current-pos :next-pos positionB :timeinstate timeinstate :timeremaining timeremaining :wt wt}]) state-change {:newstate (get-state unit positionB) :duration timeremaining :followingstate nil :timeinstate timeinstate } _ (reset! ctx (->> @ctx ;; (supply/log-position! tupdate positionA positionB unit) (supply/supply-update! {:name "SupplyStore"} unit (core/msg "Unit " (:name unit) " ReEntering at " positionB " with " (:bogbudget (:currentcycle unit)) " BOGBudget.")))) _ (reset! entity (assoc unit :followoncode nil))] (beval change-state-beh (assoc benv :state-change state-change ;; :position-change {:from-position positionA ;; :to-position positionB} :wait-time wt :next-position positionB)))) ;;Function to handle the occurence of an early withdraw from a deployment. ;;when a demand deactivates, what happens to the unit? ;;The behavior will be guided by (the unit's) policy. ;;The default behavior is that a unit will check its policy to see if it CAN deploy. ;;If policy says it's okay, the unit will return to the point time of its current lifecycle. ;;We can parameterize the penalty it takes to get back into lifecycle from deployment. ;; A usual penalty is a move to "90 days of recovery" ;;Note, we can also specify if the unit is instantly available to local demands. ;;Recovery should now be an option by default, not specifically dictated by ;;policy. ;;1)Consult policy to determine if entry back into available / ready pool is feasible. ;;TOM note 18 july 2012 -> this is erroneous. We were check overlap....that's not the definition of ;;a unit's capacity to re-enter the available pool. ;;uuuuuuuge hack....gotta get this out the door though. (def non-recoverable #{"SRMAC" "SRMRC" "SRMRC13"}) ;;we no longer use the default +recovery-time+ shim, ;;now we consult policy or fallback to the :DefaultRecoveryTime ;;parameter. (def policy-recovery-time (memo1-policy (fn policy-rec [p] (or (:recovery p) ;;srm policies have a :recovery field. (marathon.data.protocols/transfer-time p :recovery :recovered))))) (defn recovery-time ([unit p] (or (policy-recovery-time (-> p marathon.data.protocols/get-active-policy)) (:default-recovery unit))) ([unit] (recovery-time unit (:policy unit)))) ;;We need to modify this to prevent any srm units from recovering. (defn can-recover? [unit] (let [cyc (:currentcycle unit) p (:policy unit) rt (recovery-time unit p)] (when (and (not (non-recoverable (protocols/policy-name p))) (pos? (:bogbudget cyc)) (< (+ (:cycletime unit) rt) (:duration-expected cyc))) rt))) (befn recovery-beh {:keys [entity deltat ctx] :as benv} (let [unit @entity] (if-let [t (can-recover? unit)] (do (debug [:unit-can-recover (:name unit)]) (move! :recovered t)) ;;recovery is now determined by policy or parameters. (let [cyc (:currentcycle unit) ct (:cycletime unit) dur (:duration-expected cyc)] (swap! ctx #(sim/trigger-event :supplyUpdate (:name unit) (:name unit) (core/msg "Unit " (:name unit) " Skipping Recovery with " (:bogbudget (:currentcycle unit)) " BOGBudget " ct "/" dur " CycleTime " ) nil %)) (reset! entity (assoc-in unit [:currentcycle :bogbudget] 0)) #_moving-beh reset-beh)))) ;;On second thought, this is sound. If the unit is already in overlap, it's in a terminal state.. ;;For followon eligibility, it means another unit would immediately be overlapping this one anyway, ;;and the demand would not be considered filled....It does nothing to alleviate the demand pressure, ;;which is the intent of followon deployments. Conversely, if overlap is 0, as in typical surge ;;periods, then units will always followon. I take back my earlier assessment, this is accurate. ;;Note: We need to ensure this behavior fails if called from incompatible circumstances... ;;We can only call this on units that are actually deployed/bogging. (befn abrupt-withdraw-beh {:keys [entity deltat] :as benv} (let [_ (when (pos? deltat) (swap! entity #(u/add-bog % deltat))) unit @entity ;1) bogremaining (- (:bogbudget (:currentcycle unit)) (protocols/overlap (:policy unit)) ;;note: this overlap assumption may not hold... ) _ (debug [:abw-beh {:deltat deltat :bogremaining bogremaining :unt (:name unit) :fc (:followoncode unit) ;:unit (dissoc unit :policy) }])] (if (not (pos? bogremaining)) ;makes no sense for the unit to continue BOGGING, send it home. ; (->and [(echo [:abw->reset {:bogremaining bogremaining}]) reset-beh ;]) (->or ;unit has some feasible bogtime left, we can possibly have it followon or extend its bog... ;A follow-on is when a unit can immediately move to fill an unfilled demand from the same ;group of demands. In otherwords, its able to locally fill in. ;This allows us to refer to forcelists as discrete chunks of data, group them together, ;and allow forces to flow from one to the next naturally. [followon-beh recovery-beh])))) ;;Policy Changes ;;============== ;;Changing policies in legacy MARATHON involves something called the "policy stack" ;;and a subscriber model where unit's "subscribe" to a parent policy (typically ;;a composite policy defined over multiple simulation periods). Changes in the ;;period cause changes in policy, which propogate to changes in subscribers' ;;policy. Policy changes are typically limited to "non-deployed" states or ;;dwelling states. That is, units may not permissively change the structure ;;of their policy while "in-use" by a demand. ;;In this case, the policy change is tracked by keeping the policy change ;;stack non-empty. When the unit cycles through a state in which policy ;;changes can occur, it finds a pending change and converts to the new ;;atomic policy. (def infeasible-policy-change? #{"Deployed" "Overlapping" "DeMobilization"}) (defn can-change-policy? [cycle-proportion from-pos] (and (<= cycle-proportion 1) (not (infeasible-policy-change? from-pos)))) ;; 'TOM Change 13 Jul 2011 ;; 'Needed to implement the transition from one policy to another. I chose to add a state to handle just this. ;; 'Visual analysis showed that PolicyChange looks a lot like Spawn, in that when a unit changes policies, it must change ;; 'a lot of its internal state to follow the new policy. The result of the policy change is: ;; ' 1: The unit's cycle time is normalized, and then transformed into the relevant cycletime in the new policy. ;; ' 2: The unit's position "may" change to reflect its position in the new policy. ;; ' 3: The unit's location "may" change to reflect its location in the new policy. ;; 'TOM Change 20 April: ;; ' 4: The unit's BOGBudget "may" change to reflect either increased, or decreased, BOGBudget. ;; 'TOM Change 24 April: ;; ' 5: The unit's BOGBudget and MAXBOG may only change (increase) as the result of a policy change. ;; ' 6: Policy changes can NOT happen during terminal states: ;; ' [Deployed {Bogging, Overlapping}, Demobilizing] ;; 'If NOT deployed (bogging, overlapping) or in a terminal state (demobilizing), then entities can change policy immediately. ;; 'Otherwise, units change policy upon next reset (change is deferred). ;; 'Assumes that the new policy is already set for the unit (i.e. the unitdata is pointing toward the new policy). ;; 'Ideally, an outside agent will have modified the unit's policy, and subsequently told it to changestates to a policy-change ;; 'state. ;; 'Net effect is that policy changes to the same policy are idempotent. ;; 'State to control how a unit acts when it changes policy. ;; 'Note -> we extract the next policy from the unitdata's policy stack. ;; 'TOM note -> figure out how to change this for the deployed population...they have negative cycle ;; 'times. ;; 'Note -> this assumes we have compatible policies, or at least policies that have a cyclical ;; 'rotational lifecycle. ;; Function PolicyChange_State(unit As TimeStep_UnitData, deltat As Single) As TimeStep_UnitData ;;WIP Nov 2016 (befn policy-change-state ^behaviorenv {:keys [entity wait-time tupdate policy-change ctx] :as benv} (when policy-change ;;we have a change. (if (u/waiting? @entity) (do (debug [:deferring-policy-change-while-waiting]) defer-policy-change) ;;units in waiting must defer policy changes! (let [next-policy (:next-policy policy-change) unit @entity tnow tupdate _ (assert (pos? (protocols/bog-budget next-policy)) "No bog budget!") current-policy (:policy unit) ;;'TOM Change 20 April -> We need to separate the unit's experienced ;;'cycle length vs the NOMINAL cycle duration, which exists in ;;'POLICY SPACE. In composite rotational policies, the NOMINAL cycle duration ;;'changes when Atomic policies change. Specificallly, we map the unit's position ;;'or coordinates in the current atomic policy to coordinates in the new policy. ;;'The unit's actual experienced lifecycle, i.e. its cycletime property, is not ;;'an accurate mapping between policies. The implicit assumption is that when ;;'mapping from one policy to another, if the policies have differing cycle lengths ;;'then there is a discount or exchange rate between the policies, such that time ;;'spent in one policy is NOT equal to time spent in another. However, our ;;'unit's cyclelength property is not subject to this, since it technically ;;'exists OUTSIDE of the policy view of time. The cyclelength property reflects the ;;'actual time a unit has spent, under ANY policy, until it has reset or started a ;;'new cycle. ;;'Prior to 19 April 2012, The unit's ability to deploy, via the CanDeploy method, ;;'depended on it's position in the current policy as a function of the cyclelength property. ;;'We should prefer the duration of the current cycle record, which is an accurate reflection ;;'of the relative time in the unit's current policy. ;;'TOM Change 20 April 2012 cycletimeA (:cycletime unit) PositionA (:positionpolicy unit) ;; _ (println [:name (:name unit) :cycletimeA cycletimeA ;; :positionA PositionA (assoc benv :ctx nil)]) _ (assert (not (neg? cycletimeA)) (str {:msg "Cycletime should not be negative!" :cycletime cycletimeA :unit (:name unit) :t tupdate})) ;;We run into a problem here: when changing from an infinite policy to ;;a finite policy, despite units having a substational amount of cycletime - exceeding ;;the cyclelength of the new policy in fact - our proportion is computed as a function ;;of the time in the current cycle. It works out that any unit transitioning ;;will get shucked into a 0.0 truncated cycle proportion coordinate.... ;;The net effect is that, regardless of how much supply we get, this artificially ;;"resets" our surplus supply by shoving them all back to the start of the next cycle.. ;;typically reset and unavailable status. For certain inputs, we can never effectively ;;grow supply, which wrecks requirements analysis. ;;The solution is to detect the edge-case where we have an effectively infinite policy, ;;and change the proportion computation. A fair proposal is to take the current cycle ;;time, and quot it by the cycle length of the target policy. that becomes the input ;;for our cycleproportion calculation....We should still get a useful distribution ;;of cycletimes in the new policy without resorting to randomness, while crediting the ;;units that have a longer time in cycle... CycleProportionA #_(core/float-trunc (/ cycletimeA (protocols/cycle-length current-policy)) 6) (compute-proportion cycletimeA (protocols/cycle-length current-policy) (protocols/cycle-length next-policy)) ;;'TOM change 23 April 2012 -> No longer allow units that are De-mobilizing to enter into available pool. ] (->or [(->and [(->pred (fn [_] (can-change-policy? CycleProportionA PositionA))) (->alter #(assoc % :policy-change {:cycletime cycletimeA :current-policy current-policy :next-policy next-policy :proportion CycleProportionA :current-position PositionA})) apply-policy-change]) defer-policy-change]))))) ;;policy-change specific reset behaviors, due to transforms ;;between policies: ;;if the unit's bog budget does not exceed the prescribed overlap, ;;we go to reset early. (befn infeasible-bog-reset ^behaviorenv {:keys [entity ctx] :as benv} (->if (fn [_] (not (pos? (u/boggable-time @entity)))) reset-beh)) ;;Note: in retrospect, it looks like we can just use the unit/can-deploy? ;;predicate, which performs the same checks (and more!) that check-deployable ;;and check-deployable-state perform. This ends up being the standard ;;by which the unit is judged when selected for fill...so... ;;We just postpone deployable status updates until the end, and do a blanket ;;check using (befn policy-change-deployability-check ^behaviorenv {:keys [entity ctx] :as benv} (->seq [infeasible-bog-reset update-deploy-status])) ;;Assuming we have a change, let's apply it! ;;How long will the unit have been in this state? ;; Since it's a policy change....do we zero it out? ;; Or do we assume that the unit has been in the state the exact amount of time required? ;;We assume that the unit has been in the state the exact amount of time required. ;;We also assume that the unit is not entering another cycle, merely extending or truncating. ;; Its current cycle is modified. ;; Does not get a cycle completion out of it. ;;#WIP Nov 2016 ;;Policy change => Movement => [state-change location-change] ;;So, we can use policy-change to set the stage for movement, then pipeline the normal ;;movement behavior... (befn apply-policy-change [ctx tupdate entity policy-change] (let [unit @entity uname (:name unit) {:keys [cycletime current-policy next-policy proportion current-position]} policy-change cycletimeA cycletime policynameA (protocols/atomic-name current-policy) ;active atomic policy policynameB (protocols/atomic-name next-policy) ;new atomic policy cyclelengthB (protocols/cycle-length next-policy) cycletimeB (if (> cyclelengthB +twenty-years+) ;;effectively infinite... cycletimeA ;;use current cycletime, do NOT project. (long (* proportion cyclelengthB))) ;coerce to a long cyclelength. _ (assert (>= cycletimeB 0) "Negative cycle times are not handled...") _ (assert (<= cycletimeB cyclelengthB) "Cyclelength is too long!") wasDeployable (protocols/deployable-by? (:policy unit) cycletimeA) ;;can maybe do this faster just checking state. isDeployable (protocols/deployable-by? next-policy cycletimeB) positionA current-position positionB (if (u/deployed? unit) ;;REVIEW - Shouldn't matter, should already be non-deployed (:positionpolicy unit) ;deployed units remain deployed. (protocols/get-position next-policy cycletimeB)) timeremaining (immediate-policy-wait-time next-policy positionB) timeinstate (- cycletimeB (protocols/get-cycle-time next-policy positionB)) oldstate (protocols/get-state current-policy positionB) unit (reset! entity (-> unit ;;we change positionpolicy here....bad move? (merge {;:positionpolicy positionB ;;policy-change supercedes old deferred policy changes. :deferred-policy-change nil :policy next-policy :cycletime cycletimeB}) (u/change-cycle tupdate) (u/modify-cycle next-policy))) newduration (- timeremaining timeinstate) ;;added... newstate (protocols/get-state next-policy positionB) _ (debug [:preparing-apply-policy-change {:cycletimeA cycletimeA :policynameA policynameA :positionA positionA :policynameB policynameB :cycletimeB cycletimeB :positionB positionB :timeremaining timeremaining :timeinstate timeinstate :newduration newduration :oldstate oldstate :newstate newstate }]) ] ;;We have a move. ;;Setup the movement and let the behavior execute. ;(if (not= positionA positionB) ;;setup the move and use existing behavior to execute (vs. legacy method that folded stuff in here). (do (swap! ctx #(->> (assoc % :policy-change nil) (core/trigger-event :UnitChangedPolicy uname policynameA (core/msg "Unit " uname " changed policies: " policynameA ":" cycletimeA "->" policynameB ":" cycletimeB) nil))) (->and [(->alter (fn [benv] (assoc benv :state-change {:newstate newstate :duration newduration :timeinstate 0} :changed-policy true :policy-change nil ;;we add a formal reset policy ;;to allow reset-beh to accurately ;;compute reset wait time. :reset-policy next-policy))) ;;for some reason, move! was swallowing up our behavior ;;for specific units, and not following through withh ;;a policy-change-deployability check. This left us ;;with units that should have reset and gained new ;;bog budget not doing so, leading to a runtime ;;invalid deployer error. ->seq should force both ;;behaviors to evaluate. (->seq [(move! positionB newduration) ;;movement behavior policy-change-deployability-check]) ])))) ;;TODO: Add this? ;;'Craig add 10 May 2016 . We have some units changing policies from a longer BOG budget ;;'to a shorter BOG budget. We are running into issues when a unit's new BOG budget is < Overlap ;;'and then this unit got deployed and ended up with a negative BOG budget throwing negativeErr ;;If .CurrentCycle.bogbudget <= .policy.overlap Then 'this matches our check in AbruptWithdraw_State ;; Set unit = Reset_State(unit, deltat) ;; .parent.UpdateDeployStatus unit ;;End If ;;This automatically gets checked during move!... ;; MarathonOpSupply.UpdateDeployStatus simstate.supplystore, unit, , , simstate.context ;; 'Adopt Policy B. ;; 'Policy A -> ;; ' Find relative CT = ct/CLengthA ;; 'Policy B -> ;; ' Find relative positionB = pos(RelativeCT * CLengthB) ;; 'Movingstate from PositionA to relative PositionB. ;; 'Update with delta0. ;; 'TOM Change 2 Sep -> moved this north so that we can use the policy stack as a flag in unit's ;; 'ChangeCycle logic. Check for sideeffects ;; .policyStack.Remove 1 ;; SimLib.triggerEvent UnitChangedPolicy, .name, .policy.AtomicName, "Unit " & .name & " changed policies: " & _ ;; policynameA & ":" & cycletimeA & "->" & policynameB & ":" & CycleTimeB, , simstate.context ;;SET UP A STATECHANGE ;; SimLib.triggerEvent supplyUpdate, .name, .name, "Policy Change Caused Supply Update for unit " & .name, , simstate.context ;; Set PolicyChange_State = ChangeState(unit, nextstate, 0, newduration) ;; 'NOTE -> I may need to consider changing location here..... ;;The unit's cycle cannot project onto another cycle. We need to defer policy change until reset. ;;leave the policy on the stack. Catch it during reset. ;;TOM change 2 Sep 2011 -> we modify the cyclerecord to reflect changes in expectations... ;;This is not a replacement... ;;WIP Nov 2016 (befn defer-policy-change {:keys [entity ctx tupdate policy-change] :as benv} (when policy-change (let [_ (debug [:deferring-policy-change]) {:keys [next-policy]} policy-change unit @entity uname (:name unit) _ (swap! ctx #(core/trigger-event :AwaitingPolicyChange uname (marathon.data.protocols/atomic-name (:policy unit)) (core/msg "Unit " uname " in position " (:positionpolicy unit) " is waiting until reset to change policies") nil %)) ;;marked the deferred policy change. _ (swap! entity #(assoc % :deferred-policy-change (select-keys policy-change [:next-policy])))] (->alter (fn [benv] (assoc benv :policy-change nil)))))) (befn try-deferred-policy-change {:keys [entity ctx tupdate] :as benv} (when-let [pc (:deferred-policy-change @entity)] (let [_ (debug [:applying-deferred-policy-change]) _ (swap! entity assoc :deferred-policy-change nil)] (->seq [(->alter (fn [benv] (assoc benv :policy-change pc))) policy-change-state])))) ;; SimLib.triggerEvent AwaitingPolicyChange, .name, .policy.AtomicName, "Unit " & _ ;; .name & " in position " & .PositionPolicy & " is waiting until reset to change policies", , simstate.context ;; Set unit = RevertState(unit) ;; 'We updated the unit in the process ;; SimLib.triggerEvent supplyUpdate, .name, .name, "Policy Change Attempt Caused Supply Update for unit " & .name, , simstate.context ;;Basic Unit Behaviors (or "States....") ;;===================================== ;;entities have actions that can be taken in a state... (def default-statemap {:reset reset-beh ; :global :abrupt-withdraw abrupt-withdraw-beh :recovery recovery-beh :followon age-unit ; :recovered (echo :recovered-beh) ;:end-cycle ; :spawning spawning-beh :demobilizing dwelling-beh "DeMobilizing" dwelling-beh protocols/demobilization dwelling-beh :bogging bogging-beh protocols/Bogging bogging-beh ;;Added for legacy compatibility... :non-bogging dwelling-beh :recovering (echo :recovering-beh) "Recovering" (echo :recovering-beh) :dwelling dwelling-beh protocols/Dwelling dwelling-beh ;;Need to make sure we don't add bogg if we're already bogging... :overlapping bogging-beh protocols/Overlapping bogging-beh :waiting (echo :waiting-state) #_(->seq [(echo :waiting-state) defer-policy-change]) :modernizing modernizing-beh }) ;;PERFORMANCE NOTE: HotSpot - used val-at macro to inline method calls. ;;lookup what effects or actions should be taken relative to ;;the current state we're in. This is kind of blending fsm ;;and behaviortree. (befn do-current-state {:keys [entity statedata] :as benv} (let [;state (:state @entity) state (:state (deref!! entity)) ;;slightly faster using keyword as function call. state-map (or (:statemap entity) default-statemap)] (if (set? state) ;entity has multiple effects... ;;MEGA-HACK:This a serious hack to prevent double-counting of bog when we have ;;state-sets. Alone, either overlapping or bogging confers collecting bog time, ;;and in legacy policies are mutually exclusive. However, for SRM policies, ;;we have the possibility of bogging/non-bogging, as well as being in an ;;overlap state. This leaves us with a conundrum relative to our default ;;legacy meanings of bog and overlap. What we can do is ensure that if ;;bogging is present, we just skip overlapping if we ever encounter a ;;state-state. This is practical, but somewhat brittle....probabtately ;;a better idea to encode the meaning of states better - like [:bogging :overlapping] (let [stats (r/filter identity (r/map (fn [s] (val-at state-map s)) (disj state :overlapping))) ] (->seq stats)) (get state-map state)))) ;;the entity will see if a message has been sent ;;externally, and then compare this with its current internal ;;knowledge of messages that are happening concurrently. (befn check-messages ^behaviorenv {:keys [entity current-messages ctx] :as benv} (if-let [old-msgs (fget (deref! entity) :messages)] ;we have messages (when-let [msgs (pq/chunk-peek! old-msgs)] (let [new-msgs (rconcat (r/map val msgs) current-messages) _ (b/swap!! entity (fn [^clojure.lang.Associative m] (.assoc m :messages (pq/chunk-pop! old-msgs msgs) )))] (bind!! {:current-messages new-msgs}))) (when current-messages (success benv)))) ;;this is a dumb static message handler. ;;It's a simple little interpreter that ;;dispatches based on the message information. ;;Should result in something that's beval compatible. ;;we can probably override this easily enough. ;;#Optimize: We're bottlnecking here, creating lots of ;;maps.... ;;Where does this live? ;;From an OOP perspective, every actor has a mailbox and a message handler. ;; ;;so now we can handle changing state and friends. ;;we can define a response-map, ala compojure and friends. ;;type sig:: msg -> benv/Associative -> benv/Associative ;;this gets called a lot. (defn message-handler [msg ^behaviorenv benv] (let [entity (.entity benv) current-messages (.current-messages benv) ctx (.ctx benv)] (do (ai/debug (str [(:name (deref! entity)) :handling msg])) (beval (case (:msg msg) :move (let [move-info (:data msg) {:keys [wait-time next-location next-position deltat] :or {wait-time 0 deltat 0}} move-info _ (debug [:executing-move move-info msg (:positionpolicy @entity)])] (beval (move! next-location deltat next-position wait-time) benv)) ;;allow the entity to invoke a state-change-behavior ;;We can always vary this by modifying the message-handler :change-state ;;generic update function. Temporally dependent. ;;we're already stepping the entity. Can we just invoke the change-state behavior? (let [state-change (:data msg) _ (debug [:state-change-message state-change msg])] (beval change-state-beh (assoc benv :state-change state-change :next-position (or (:next-position state-change) (:newstate state-change))))) :change-policy ;;Note: this is allowing us to change policy bypassing our wait state... ;;We need to put a break in here to defer policy changes. ;;Policy-changes are handled by updating the unit, then ;;executing the change-policy behavior. ;;Note: we could tie in change-policy at a lower echelon....so we check for ;;policy changes after updates. (beval policy-change-state (assoc benv :policy-change (:data msg))) :update (if (== (get (deref! entity) :last-update -1) (.tupdate benv)) (success benv) ;entity is current (->and [(echo :update) ;roll-forward-beh ;;See if we can replace this with update-state... update-state-beh ])) :spawn (->and [(echo :spawn) (push! entity :state :spawning) spawning-beh] ) ;;Allow the entity to apply location-based information to its movement, specifically ;;altering behavior due to demands. :location-based-move (beval location-based-beh (assoc benv :location-based-info (:data msg))) ;;Like a location-based move, except with a simple wait time guarantee, with a ;;reversion to the original state upon completion of the wait. :wait-based-move (beval wait-based-beh (assoc benv :wait-based-info (:data msg))) ;;allow the entity to change its behavior. :become (push! entity :behavior (:data msg)) :do (->do (:data msg)) :echo (->do (fn [_] (println (:data msg)))) (do ;(println (str [:ignoring :unknown-message-type (:msg msg) :in msg])) (sim/trigger-event msg @ctx) ;toss it over the fence ;(throw (Exception. (str [:unknown-message-type (:msg msg) :in msg]))) (success benv) )) benv)))) ;;we'd probably like to encapsulate this in a component that can be seen as a "mini system" ;;basically, it'd be a simple record, or a function, that exposes a message-handling ;;interface (could even be a generic fn that eats packets). For now, we'll work ;;inside the behavior context. Note, the entity is a form of continuation....at ;;least the message-handling portion of it is. ;;message handling is currently baked into the behavior. ;;We should parameterize it. ;;handle the current batch of messages that are pending for the ;;entity. We currently define a default behavior. (befn handle-messages ^behaviorenv {:keys [entity current-messages ctx] :as benv} (when current-messages (reduce (fn [acc msg] (message-handler msg (val! acc))) (success (assoc benv :current-messages nil)) current-messages))) ;;The global sequence of behaviors that we'll hit every update. ;;These are effectively shared behaviors across most updates. (def global-state (->seq [(echo :aging) age-unit (echo :aged) moving-beh])) (befn up-to-date {:keys [entity tupdate] :as benv} (let [e (reset! entity (assoc @entity :last-update tupdate))] (echo [:up-to-date (:name e) :cycletime (:cycletime e) :last-update (:last-update e) :tupdate tupdate :positionpolicy (:positionpolicy e)]))) (def process-messages-beh (->or [(->and [(echo :check-messages) check-messages handle-messages]) (echo :no-messages)])) ;;The root behavior for updating the entity. (def update-state-beh (->seq [(echo :<update-state-beh>) ; process-messages-beh (->or [special-state (->seq [(echo :<do-current-state>) do-current-state (echo :global-state) (fn [ctx] (if-y global-state (fail ctx)))]) up-to-date])])) ;;if we have a message, and the message indicates ;;a time delta, we should wait the amount of time ;;the delta indicates. Waiting induces a change in the ;;remaining wait time, as well as a chang (befn wait-in-state ^behaviorenv [entity current-message ctx] (let [;_ (println [:wait-in-state entity msg]) msg current-message t (fget msg :t) delta (- t (fget (deref! entity) :t))] (when-let [duration (fget (deref! entity) :wait-time)] (if (<= delta duration) ;time remains or is zero. ;(println [:entity-waited duration :remaining (- duration delta)]) (merge!! entity {:wait-time (- duration delta) :tupdate t}) ;;update the time. (do ;can't wait out entire time in this state. (merge!! entity {:wait-time 0 :tupdate (- t duration)}) ;;still not up-to-date ;;have we handled the message? ;;what if time remains? this is akin to roll-over behavior. ;;we'll register that time is left over. We can determine what ;;to do in the next evaluation. For now, we defer it. (bind!! {:current-message (.assoc ^clojure.lang.Associative msg :delta (- delta duration))} ) ))))) (defn up-to-date? [e ctx] (== (:tupdate e) (:tupdate ctx))) ;;This will become an API call... ;;instead of associng, we can invoke the protocol. (befn schedule-update ^behaviorenv {:keys [entity ctx new-messages] :as benv} (let [st (deref! entity) nm (:name st) duration (:wait-time st) tnow (:tupdate (deref! ctx)) tfut (+ tnow duration) _ (debug 4 [:entity nm :scheduled :update tfut]) ;_ (when new-messages (println [:existing :new-messages new-messages])) ] (success (push-message- benv nm nm (->msg nm nm tfut :update))))) ;;wire in functionality here for a unit to invoke its own ;;deployment order... ;;From here on, the system will append a deployment order to ;;the unit, and send the unit a message to update. ;;The unit will handle the message by appending a ;;deployment order to its state and invoking an update. ;;This way, we handle messages first, which preps the ;;behavior environment to respond to stimulii (like ;;the presence of a deploy order) (defn deploy-to [o benv] ;;stub (success benv)) (befn try-deploy ^behaviorenv {:keys [entity] :as benv} (when-let [o (:deploy-order @entity)] (deploy-to o))) ;;This is kind of weak, but I don't have a better solution at the moment... (do (println [:setting-defaults]) (reset! base/default-behavior roll-forward-beh)) ;;aux function to help us add a breadcrumb for ;;the location-based behavior updates. ;;Some locations have overlap. If so, we look for this ;;to see if the move is prescribed. We store this as a ;;component in the entity. (defn prescribe-overlap! [benv t overlap state locname] (if (and overlap (pos? overlap)) (let [entity (:entity benv)] (do (debug [:prescribing-overlap (:name @entity) overlap t]) (swap! entity assoc :prescribed-move {:state-change {:newstate state :duration overlap :followingstate nil :timeinstate 0} :overlapping-position [locname :to] ;true :t t} ) benv)) benv)) ;;SRM bs... ;;SRM takes a different view of unit behavior. ;;Most importantly, for AC units (and deploying RC units), ;;the behavior looks at demand to determine position ;;changes, state-changes, duration, etc., rather than look ;;at the policy. ;;When not in a mission state, the default behavior does ;;provide a cyclical routing, even for AC (At the moment, ;;but that crap will probably change like everything else). ;;We should be able to inject a supply of units that ;;follow the baseline SRM policy, with no demand, and ;;Just have them spawn and run through policy changes. ;;The SRM behavior only really varies upon deployment... ;;so we can create special SRM-specific behaviors that ;;read information about the demand and use it ;;to schedule changes. For now, there is no ;;notion of recovery... ;;These differences mean we need to handle ;;local-demand effects if deployed.... ;;For any movement, we need to check to see if ;;there are effects or guidance associated with the ;;place we're moving to. Some places tell us what ;;to do, outside of our policy. ;;The only way we can get here is if there is a location-policy ;;in the environment. How does it get there? ;;TODO_Have the location push behaviors onto some kind of ;;stack. This could be very powerful (and common), in that ;;the behavior would evaluate its top-most behavior first ;;(i.e. do-current-state), and pop the behavior once ;;the time expired. (defn location-based-state [u state] (let [s (get-state u state) s (if (set? s) s #{s})] s)) (befn location-based-beh {:keys [entity location-based-info ctx] :as benv} (when location-based-info (let [{:keys [name MissionLength BOG StartState EndState overlap timeinstate]} location-based-info ;;StartState is really a policy position.... start-state (location-based-state @entity StartState) newstate (if BOG (conj start-state :bogging) start-state) ;;we need to schedule a state change. ;;and a location-change... _ (swap! entity assoc :location-behavior true) followingstate (if (pos? overlap) (conj newstate :overlapping) (location-based-state @entity EndState)) state-change {:newstate newstate :duration (- MissionLength overlap) :followingstate followingstate :timeinstate (or timeinstate 0)} location-change {:from-location (:locationname @entity) :to-location name} position-change {:from-position (:positionpolicy @entity) :to-position StartState} ;;add the ability to check for prescribed moves... ;;if the demand prescribes one, then we go ahead and schedule it with ;;the entity... wt (- MissionLength overlap) _ (debug [:location-based {:name (:name @entity) :state-change state-change :location-change location-change :wait-time wt :next-position StartState}]) ] (beval change-state-beh (-> benv (prescribe-overlap! (+ (:tupdate benv) wt) overlap followingstate name) (assoc :state-change state-change :location-change location-change :position-change position-change ;new :wait-time wt :next-position StartState)))))) ;;Another potential garbage leak! (def wbm (atom nil)) (defn compute-wait-position [unit] (let [p (:policy unit) current-pos (:positionpolicy unit) ct (:cycletime unit)] (protocols/get-position p ct))) (befn wait-based-beh {:keys [entity statedata wait-based-info ctx] :as benv} (when wait-based-info (let [{:keys [demand wait-time wait-state]} wait-based-info name (:name demand) state-change {:newstate wait-state :duration wait-time :followingstate (:state @entity) :timeinstate 0} location-change {:from-location (:locationname @entity) :to-location name} position (:positionpolicy @entity) position-change (when (= position :followon) ;;we need to compute a position change to ;;make sure the unit reverts to its ;;former position, not stay in followon AND wait. ;;this will cause problems. {:from-position position :to-position (compute-wait-position @entity)}) _ (debug [:wait-based {:name (:name @entity) :state-change state-change :location-change location-change :position-change position-change :wait-time wait-time}]) ;; _ (throw (Exception. (str [:about-to-wait {:name (:name @entity) ;; :state-change state-change ;; :location-change location-change ;; :wait-time wait-time}]))) ] (->seq [(->alter #(assoc % :state-change state-change :location-change location-change :position-change position-change :wait-time (when (and wait-time (< wait-time 999999)) wait-time))) change-location change-position change-state-beh (->alter (fn [benv] (let [u (deref (:entity benv)) _ (debug [:deployable-changed! :waiting :deployment-index (:deployment-index u)]) _ (swap! (:ctx benv) #(supply/update-deploy-status u nil nil %)) ;_ (reset! wbm u) _ :ballz #_(throw (Exception. (str [:ballz])))] benv))) wait])))) ;;All our behavior does right now is spawn... ;;The only other changes we need to make are to alter how we deploy entities... ;;We can actually handle that outside of the unit's deployment.... ;;Possibly include it as a message type... ;;Have a special message handler for it... ;;[genius] ;;If we have an location-based-policy to apply, we can ;;tell the unit via messaging... ;;We typically tell the unit form outside, after we've ;;set it up and everything... ;;SRM behavior overrides some functionality for the base behavior. (befn srm-beh [] spawning-beh ;(throw (Exception. (str "SRM Behavior doesn't do anything!"))) ) (do (println [:setting-srm]) (swap! base/behaviors assoc "SRM" roll-forward-beh ;same thing. ;srm-beh )) (comment ;old version (befn do-current-state {:keys [entity statedata] :as benv} (let [;state (:state @entity) state (:state (deref!! entity) ) ;;slightly faster using keyword as function call. state-map (or (:statemap entity) default-statemap)] (if (set? state) ;entity has multiple effects... (let [stats (r/filter identity (r/map (fn [s] (get state-map s)) state))] (->seq stats)) (get state-map state)))) ) (comment ;OBE (defn update-unit "Computes a new simulation context given a specific unit to update, an elapsed time, and an optional time of update. tupdate is inferred to be the current simulation time if none is supplied." ([unit deltat ctx] (update-unit unit deltat (sim/current-time ctx) ctx)) ([unit deltat tupdate ctx] (->> ctx (load-entity! unit deltat tupdate) (roll-forward-beh) ;update the unit according to the change in ;time. (error-on-fail) ;unit updates should never fail. (second ;result is in terms of [:success|:fail ctx], pull out ;the ctx ) (commit-entity!) ; (clear-bb) ))) ;;We'll replace these; for now the units will automatically ;;try to update themselves if possible. ;;Debatable...just invokes roll-forward-beh; I think we can ensure that ;;roll-forward is always invoked first... ;;Re-evaluate the need for this....can we synchronize from outside? ;;ideally, we just keep track of the unit's last update.... (defn sync "Utility function. Synchronize the unit to the current simulation time. If the last update occured before the current time, we roll the unit forward by the delta between the last update and the current time." [unit ctx] (let [tprev (or (sim/last-update (:name unit) ctx) 0) tnow (sim/current-time ctx)] (if (= tprev tnow) (log! (str "unit " (:name unit) "is up to date") ctx) (log! (str "Synchronizing unit " (:name unit) " from " tprev " to " tnow) (update-unit unit (- tnow tprev) tprev ctx))))) ;;Synchronizes the unit to the current time, then applies a time ;;delta, then processes/records the unit's time of update. (defn update "Entry point for computing behavior-based unit updates. Fundamental API function for processing unit entities. Synchronizes the unit to the current simulation time, then computes the new simulation context resulting from the entity behavior over an elapsed deltat (from current simulation time)." [unit deltat ctx] (let [nm (get unit :name)] (->> (sync unit ctx) (update-unit unit deltat) (u/unit-update! nm (core/msg "Updated " nm))))) )
61436
;;A namespace for defining and composing entity behaviors. ;;We'll define core behaviors here, leveraging the ;;behavior tree approach defined by spork.ai.behavior . (ns marathon.ces.behavior (:require [spork.ai.core :as ai :refer [deref! fget fassoc push-message- debug ->msg]] [spork.ai.behavior :refer [beval success? success run fail behave ->seq ->elapse ->not ->do ->alter ->elapse-until ->leaf ->wait-until ->if ->and ->and! ->pred ->or ->bnode ->while ->reduce always-succeed always-fail bind! bind!! merge! merge!! push! return! val! befn ] :as b] [spork.ai.behaviorcontext :as base :refer :all] [spork.ai [machine :as fsm]] [marathon.data [protocols :as protocols] ] [marathon.ces [core :as core] [unit :as u] [supply :as supply] [demand :as d] ] [spork.cljgraph.core :as graph] [spork.util.general :as gen] [spork.data.priorityq :as pq] [clojure.core.reducers :as r] [spork.entitysystem.store :as store :refer :all :exclude [default]] [spork.sim.simcontext :as sim] [clojure.core.reducers :as r] [spork.util.general :as general]) (:import [spork.ai.behaviorcontext behaviorenv])) ;;Overview ;;======== ;;The general idea behind how we motivate entities to do things is to ;;use composeable behaviors - as defined in spork.ai.behavior - ;;composed into behavior "trees". These trees simplify the typical ;;state-transition model we find in finite-state machines. Where the ;;FSM has zero or more edges - or transitions - between states, ;;behavior trees focus on a small set of composition operations - ;;called internal or intermediate nodes - that define how to traverse ;;the tree. So, rather than evaluating the next state to transition ;;to - along with the pre, executing, and post conditions for the ;;state - we walk a tree of behaviors, where nodes along the path ;;dictate consistent idiomatic ways to evaluate child nodes. ;;Besides composition, the other core concept is that behaviors may ;;return success, failure, or (in other implementations) run to ;;indicate that a behavior node has not yet finished evaluating. This ;;implementation - focused on unit entity behaviors - takes a fairly ;;naive view and ignores the run evaluation. Rather, we always ;;succeed or fail. ;;Evaluation in the Behavior Environment ;;===================================== ;;Unlike traditional entity "update" or "step" functions, we maintain ;;an explicit context in which the behavior is evaluated - the ;;behavior environment (marathon.ces.basebehavior). This context ;;provides a consistent accumulation of state through which we can ;;view evaluation of the behavior tree as a reduction, with the ;;behavior environment being the accumulated result. Thus, we ;;traverse the tree with an initial behavior environment [reified as a ;;map with useful keys referencing the simulation context/entity ;;store, the entity being processed, the simulated time of the ;;evaluation, and any additional keys useful to evaluation]. Taken as ;;a lexical environment, the keys of the behavior environment form a ;;working set of "variables" or properties that we can either query, ;;update, redefine, add to, or otherwise use to guide behavior ;;evaluation. ;;When evaluating a behavior tree, we start from the root behavior and ;;use its evaluation rules to proceed with the reduction (i.e. ;;compute a resulting behavior environment). The reduced behavior ;;context is then - typically - processed by merging the entity ;;reference into the simulation context reference, returning the ;;simulation context. The function that encapsulates this functional ;;form of entity behavior processing is ;;spork.ai.behaviorcontext/step-entity . ;;Behavior evaluation occurs using the spork.ai.behavior/beval ;;function, which operates similarly to eval but in the domain of ;;behavior trees. The evaluation rules are fairly simple: ;;If the item is a vector pair that matches [:success|fail|run ctx], ;;the vector is returned as the output for beval. ;;If the item to be evaluated is a function, then it is applied to the ;;current accumulated context to determine the next behavior to beval. ;;This means that functions may return a final result ala ;;[:success|:fail|:run ctx] or they may return another behavior ;;(function or node) which will continue to be evaluated against the ;;context. ;;If the item to be evaluated is a behavior node - anything ;;implemented the spork.ai.IBehaviorTree protocol - then it is beval'd ;;with the current accumulated context (delegating to the behave ;;function of the IBehaviorTree). ;;The current implementation assumes that the highest-level of ;;evaluation - as in spork.ai.behaviorcontext/step-entity! will ;;always be successful. Anything else is an error (even returning ;;[:fail ...]. ;;Behavior Functions ;;================= ;;Callers may define functions that operate on the behavior ;;environment directly; in some cases this is a useful - if low level ;;- approach to defining behaviors. Arbitrary functions that map a ;;context to a [:success ...] or a [:fail ...] may be used as ;;behaviors, and will operate correctly under beval. ;;For convenience, and to focus on behavior tree traversal as an ;;"evaluation", the spork.ai.behavior/befn macro provides a convenient ;;way to define aforementioned behavior functions with convenient ;;destructuring and behavior result packing built in. Using the befn ;;macro - to define behavior functions - is similar to the standard ;;clojure.core/defn form, with a change the context: The function ;;arguments correspond to a map-destructing of the behavior ;;environment, and where specified by a type hint, will compile to ;;fast field-based accessors for the specific behavior environment. ;;To elide repetitive use of (success ...) and the like, and to align ;;with clojure's idiom of using nil for failure, nil results are ;;automatically converted to (failure ...) evaluations. Otherwise, ;;behavior evaluation continues as per beval - the caller can ;;immediately return from the behavior using (success ctx) or yield ;;another behavior as a return value - which will effectively continue ;;evaluation using the new behavior. ;;Additional operations available in a behavior function include: ;;(bind!! {:a 1 :b 2}) => (success (merge benv {:a 1 :b 2})) ;;(return! ^MapEntry [:success x]) => x ;;(return! ^MapEntry [:fail x]) => (Throw (Exeption. ...)) ;;Behavior Nodes ;;============== ;;Aside from encoding custom functionality with raw functions, ;;pre-existing behavior nodes provide an expressive domain specific ;;language for defining behavioral "flow control" in a composeable ;;manner. They effectively define custom behavior functions - again ;;returning [:success|:fail|:run ctx] behind a unified protocol. The ;;magic lies in how a behavior node executes and interprets the ;;traversal of its children. For example, the ->or behavior ;;corresponds to a logical or of all child nodes (or clauses). Upon ;;evaluation, ->or will reduce its children - in order - returning on ;;the first [:success ctx] it finds, else [:fail ctx]. This is ;;similar to the 'or macro in clojure. Similarly, the ->and will ;;return at the first sign of a failed child node, else return ;;[:success ctx] as its behavior reduction. In-order, ;;i.e. left-to-right node traversal is a common idiom (although not a ;;constraint) in behavior trees, and allows one to follow the behavior ;;"logic" in a simple, consistent manner by following the traversal. ;;These nodes provide a simple way to compose behaviors and to ;;communicate success/failure throughout the traversal. These trees ;;may be embedded as children of like nodes, creating sophisticatd ;;behaviors with a declarative specification. Callers are advised to ;;use the canonical behavior nodes where possible to utilize their ;;expressive power, readability, and re-use. ;;Updating Units by Sending Messages ;;================================== ;;Technically, a unit entity update is any application of ;;spork.ai.behaviorcontext/step-entity!, in which the entity, the ;;simulation context, and a behavior - either a unique behavior ;;associated with the entity's :behavior component, or a default ;;global behavior defined in ;;spork.ai.behaviorcontext/default-behavior - are munged into a ;;spork.ai.behaviorcontext/behaviorenv. ;;Thus, stepping entities requires the simulation context/entity ;;store, the entity to update, and a message to send it. The result ;;will be a simulation context / entity store reflecting any committed ;;changes in response to how the entity "behaved" in response to the ;;message. ;;We use messages - as defined in marathon.ces.core/->msg, as an ;;entry-point to initiate behavior and provide initial bindings for ;;the behavior environemnt. For instance, the convenience function ;;marathon.ces.core/handle-message! merely wraps step-entity!, while ;;marathon.ces.core/send!! provides a simple API for defining messages ;;to send to the entity in addition to computing the result of a send ;;/ behavior. ;;When are Messages Sent, or When do Updates Happen? ;;======================= ;;Currently, entities send themselves messages typically in reponse to ;;"organic" events such as following a rotational policy. Once the ;;entity is initialized, it will likely request an update at a later ;;time, the span of which is predicated based on the amount of time ;;the unit is supposed to wait in a particular state according to its ;;rotational policy. Absent any "outside" interference, this message ;;will be propogated to the entity at the scheduled time, with the ;;entity living in eventless stasis (retaining the state from its last ;;known update) until the message is delivered. For unit entities, ;;message delivery is dispatched during invocation of ;;marathon.ces.supply/manage-supply, at which point any units ;;scheduled for updating are notified. ;;Inorganic messages occur when external forces exert unexpected ;;control over the unit entity. These typically manifest in events ;;like filling demand, sending units home, changing policies, or any ;;number of things that are unexplained by the unit's rotational ;;policy - yet necessary for simulation. ;;How Are Messages Processed? ;;=========================== ;;Messages may occur out-of-sync with the unit's current status. That ;;is, on the timeline the unit follows, the entity is not guaranteed ;;to have been "updated" at the same time step as the new message is ;;received. ;;Consequently, we need to synchronize, or roll the unit forward in ;;time to account for any pending updates and to bring the entity into ;;a synchronized state at the time of the message. Unit entity ;;behavior is defined to account for an elapsed time, represented by ;;deltat in the behavior environment, which allows us to accomplish ;;rolling forward. For instance, if a unit arrives at a dwelling ;;state, and needs to wait there for 365 days until the next update, ;;with the implication that the dwelling behavior merely adds 1 unit ;;of dwell to a dwell statistic for every elapsed day, the entity will ;;have an update scheduled 365 days later - at which point the deltat ;;will indicate the need to roll forward 365 days and thus add 365 ;;days to the dwell stat. ;;If an update or similar message arrives earlier than the next ;;scheduled update, such as from an inorganic message - say a ;;deployment 18 days later, then the unit must be "aged" or rolled ;;forward 18 days to account for the elapsed time. From that ;;synchronization point, the unit may process the pending message and ;;accomplish its deployment, initiating another scheduled update. ;;Message processing always occurs after synchronizing the unit with ;;the time frame that the message was sent. In terms of behavior ;;trees, message processing and "rolling forward" are merely behavior ;;functions that can be composed like any other. This opens up a raft ;;of flexible options for "communicating" with entities, as well as ;;offering the possibility for either centralizing and synchronously ;;updating entity state for all entities, or using Erlang-style ;;message-passing concurrency (or other asynchronous communication and ;;state management like clojure's software transactional memory or ;;channels) to perform asychronous updates, possibly in parallel. ;;Currently, the default implementation is synchronous and ;;centralized. ;;__utils__ (def ^:constant +inf+ Long/MAX_VALUE) (def ^:constant +twenty-years+ 7300) (defmacro ensure-pos! "Ensures n is a positive, non-zero value, else throws an exception." [n] `(if (pos? ~n) ~n (throw (Exception. (str [:non-positive-value ~n]))))) (defmacro non-neg! "Ensures n is a positive or zero value, else throws an exception." ([lbl x] `(if (not (neg? ~x)) ~x (throw (Exception. (str [~lbl :negative-value ~x]))))) ([x] `(if (not (neg? ~x)) ~x (throw (Exception. (str [:negative-value ~x])))))) #_(defn non-neg! ([lbl x] (if (not (neg? x)) x (throw (Exception. (str lbl " " x " cannot be negative!"))))) ([x] (non-neg! "" x))) (defmacro try-get [m k & else] `(if-let [res# (get ~m ~k)] res# ~@else)) (defn rconcat ([& colls] (reify clojure.core.protocols/CollReduce (coll-reduce [this f1] (let [c1 (first colls) init (reduce (fn [acc x] (reduced x)) (r/take 1 c1)) a0 (reduce f1 init (r/drop 1 c1))] (if (reduced? a0) @a0 (reduce (fn [acc coll] (reduce (fn [acc x] (f1 acc x)) acc coll)) a0 (r/drop 1 colls))))) (coll-reduce [this f init] (reduce (fn [acc coll] (reduce (fn [acc x] (f acc x)) acc coll)) init colls)) clojure.lang.ISeq (seq [this] (seq (into [] (r/mapcat identity colls) ))) ))) (defn pass [msg ctx] (->> (success ctx) (core/debug-print [:passing msg]))) (def ^:dynamic *interact* false) (defmacro if-y [expr & else] `(if ~'*interact* (if (and (= (clojure.string/upper-case (read)) "Y")) ~expr ~@else) ~expr)) (defmacro log! [msg ctx] `(do (debug ~msg) ~ctx)) ;;migrate.,.. (defn echo [msg] (fn [ctx] (do (debug msg) (success ctx)))) (defmacro deref!! [v] (let [v (with-meta v {:tag 'clojure.lang.IDeref})] `(.deref ~v))) (defmacro val-at "Synonimous with clojure.core/get, except it uses interop to directly inject the method call and avoid function invocation. Intended to optimize hotspots where clojure.core/get adds unwanted overhead." [m & args] (let [m (with-meta m {:tag 'clojure.lang.ILookup})] `(.valAt ~m ~@args))) ;;let's see if we can memoize get-next-position for big gainz yo... (defn memo-2 [f & {:keys [xkey ykey] :or {xkey identity ykey identity}}] (let [xs (java.util.HashMap.)] (fn [x1 y1] (let [x (xkey x1) y (ykey y1)] (if-let [^java.util.HashMap ys (.get xs x)] (if-let [res (.get ys y)] res (let [res (f x1 y1)] (do (.put ys y res) res))) (let [res (f x1 y1) ys (doto (java.util.HashMap.) (.put y res)) _ (.put xs x ys)] res)))))) ;;slightly faster for memoizing policy name. ;;This should be a concurent hashmap... (defn memo2-policy [f] (let [xs (java.util.HashMap.)] (fn [^clojure.lang.ILookup x1 y] (let [x (marathon.data.protocols/atomic-name x1) #_(.valAt x1 :name)] (if-let [^java.util.HashMap ys (.get xs x)] (if-let [res (.get ys y)] res (let [res (f x1 y)] (do (.put ys y res) res))) (let [res (f x1 y) ys (java.util.HashMap.) _ (.put ys y res) _ (.put xs x ys)] res)))))) (defn memo1-policy [f] (let [xs (java.util.HashMap.)] (fn [^clojure.lang.ILookup x1] (let [x (marathon.data.protocols/atomic-name x1) #_(.valAt x1 :name)] (if-let [res (.get xs x)] res (let [res (f x1)] (do (.put xs x res) res))))))) ;;an alternative idea here... ;;use a closure to do all this stuff, and reify to give us implementations ;;for the object. We can also just us a mutable hashmap behind the ;;scene if we want to...at some point, it's probably better to have ;;the shared-nothing approach and just leave entities in their ;;own mutable cells, isolated from other state. We can ;;still maintain persistent history. Everything becomes a lookup though; ;;we have to find the current value of the entity at time t; ;;More to think of here.. ;;New ;;Environment for evaluating entity behaviors, adapted for use with the simcontext. ;;If we provide an address, the entity is pushed there. So, we can have nested ;;updates inside associative structures. ;;__Utility functions__ ;;Entity step operations... (defn progress-cycle [x width] (if (>= x width) 0 (unchecked-inc x))) ;;testing function... (defn deployment-over? [y] (or (>= y (* 15 31)) (and (>= y 30) (<= (rand) 0.01)))) ;;testing function... (defn should-deploy? [t tmax] (and (>= t 365) (<= (rand) (* 0.005 (/ (double t) tmax))))) (defn deployed? [e] (identical? (:state e) :deploying)) (defn should-reset? [t tmax] (>= t tmax)) (defn spawning? [^spork.ai.machine.statedata statedata] (identical? (.curstate statedata) :spawning)) ;;aux functions will most likely be plentiful. We specifically ;;have a host of helper functions for unit-specific entity behaviors. ;;Most of them deal with how the units read their policies and stuff. ;;__Aux Functions__ ;;#TODO See if we can encode or derive a more meaningful semantics ;;from the indices currently associated with the states...for ;;instance, :deployable randomly came back with 7 as a state, we ;;either don't want this or we want to have it mean something. ;;Note: these are specific to unit, so could probably go into the unit ;;namespace; save on real estate. ;;Performance: inlined to alleviate minor hotspot....marginal gains. ;;Lol inlining hurts us a bit here, better not to inline... (defn get-state [unit position] (case position :abrupt-withdraw :abrupt-withdraw :recovery :recovery (let [s (protocols/get-state (val-at unit :policy) position)] (if (number? s) :dwelling s) ;;wierd... ))) ;; TOM Hack 24 July 2012 -> again, to facilitate implicit recovery. In the case of explicit recovery policy, ;; we defer to the unit's policy to determine how long to wait. In the case of implicit recovery, we use ;; a global parameter for all units, to determine wait time if they are in a recovery state. ;; Similarly, we account for units with policies that do not have an explicit recovered state. ;; In this case, we inject the equivalent of a fake state, with 0 wait time, to allow for recovery ;; processing to occur. ;;original non-memoized function. #_(defn get-next-position [policy position] (case position :recovery :recovered :recovered :re-entry (if-let [res (protocols/next-position policy position)] res (throw (Exception. (str [:dont-know-following-position position :in (:name policy)])))))) ;;memoized to alleviate hotspot, marginal gains. ;;NOTE: this causes a problem with composite policies... ;;We need to memoize based on a finer criteria, based on the ;;active policy name... ;;Added another default for :modernizing-deployable, indicat (def get-next-position (memo2-policy (fn get-next-position [policy position] (case position :recovery :recovered :recovered :re-entry (if-let [res (protocols/next-position policy position)] res (throw (Exception. (str [:dont-know-following-position position :in (:name policy)])))) )))) ;;We're getting too far ahead of ourselves during policy change calcs. ;;Jumping the position we're "in"...for max/nearmax policies, this leaves ;;us with. ;;Patched to allow specified recovery times. (defn policy-wait-time ([policy statedata position deltat recovery-time] (cond (identical? position :recovery) recovery-time ;;this is a weak default. We'll either fix the policies or wrap the behavior later. (identical? position :recovered) 0 :else (let [frompos (get-next-position policy position) topos (get-next-position policy frompos)] (if-let [t (protocols/transfer-time policy frompos topos)] (- t (- deltat (fsm/remaining statedata))) (throw (Exception. (str [:undefined-transfer :from frompos :to topos :in [(protocols/policy-name policy) (protocols/atomic-name policy)]]))) ;if it's not defined in policy...instant? )))) ([policy statedata position deltat] (policy-wait-time policy statedata position deltat 0)) ;;weak, I just copied this down. Ugh. ([policy position] (cond (identical? position :recovery) 0 ;;this is a weak default. We'll either fix the policies or wrap the behavior later. (identical? position :recovered) 0 :else (let [frompos (get-next-position policy position) topos (get-next-position policy frompos) ] (if-let [t (protocols/transfer-time policy frompos topos)] t (throw (Exception. (str [:undefined-transfer :from frompos :to topos :in [(protocols/policy-name policy) (protocols/atomic-name policy)] ])))))))) ;;aux function to help with policy transfers. (defn immediate-policy-wait-time [policy frompos] (protocols/transfer-time policy frompos (get-next-position policy frompos))) ;;Pulled out to address concerns in get-wait-time. ;;Computes the wait time - i.e. transfer time - between ;;frompos and topos relative to a unit's policy and statedata. (defn immediate-wait-time ([unit frompos topos deltat statedata] (let [wt (protocols/transfer-time (:policy unit) frompos topos) remaining (fsm/remaining statedata) deltat (or deltat remaining) ;allow the ctx to override us... ] (- wt (- deltat remaining)))) ([unit frompos {:keys [deltat statedata] :as benv}] (immediate-wait-time unit frompos (get-next-position (:policy unit) frompos) deltat statedata))) ;;Could be a cleaner way to unpack our data, but this is it for now... ;;need to fix this...let's see where we use it. ;;Note: this depends on policy-wait-time, which is great, but the ;;use-case is intended for a future, planned wait. In other words, ;;this fails us when we want to compute the wait time from a current ;;policy position - ala during a policy change. (defn get-wait-time ;;WARNING: we define an inconsistency here in the 4-arity version. ;;If we specifcy the from,to positions, the wait-time is computed using ;;frompos as the the starting position. The other arities compute ;;using policy-wait-time, which uses the successor wait time of the ;;current position - i.e. how long will I have to wait in the next position. ;;Current usage appears correct - namely the 3-arity version, but that ;;could throw us off - as it did for initial policy-change implementation! ([unit position {:keys [deltat statedata ctx] :as benv}] ;;uses position after current... (policy-wait-time (:policy unit) statedata position (or deltat 0) (or (:default-recovery unit) 0))) ([position {:keys [entity] :as benv}] (get-wait-time @entity position benv)) ([{:keys [wait-time] :as benv}] wait-time)) ;;Basic API ;;========= ;;The rest of the simulation still relies on our pre-existing API, ;;namely that we have "change-state", and "update" ;;note that change-state already exists in marathon.sim.unit/change-state, ;;we're merely providing an interface to the unit's behavior for it. ;;Also note that change-state is only called (currently) from ;;marathon.sim.demand (for abrupt withdraws), and marathon.sim.supply ;;(for deployments). ;;might ditch these.... (declare change-state-beh update-state-beh update-state roll-forward-beh lite-update-state-beh check-overlap check-deployable check-deployable-state finish-cycle spawning-beh ;; age-unit moving-beh process-messages-beh ;;re-entry behaviors abrupt-withdraw-beh re-entry-beh recovery-beh ;;policy change fwd declarations apply-policy-change defer-policy-change policy-change-state try-deferred-policy-change ;; auxillary behavior definitions. location-based-beh wait-based-beh ) ;;API ;;=== ;;These are the entry points that will be called from the outside. ;;Under the legacy implementation, they delegated to a hard coded ;;finite state machine that interpreted rotational policy to infer ;;state transitions. The general mechanism is to augment the ;;simulation context. We may want to define a single function ;;load-context and unload-context the clears up any augmented ;;contextual items we put in. That, or manage the simulation ;;context separate from the behavior context. For now, managing ;;the simcontext along with the behavior context (treating it ;;as a huge blackboard) seems like the simplest thing to do. ;;__update-entity!__ ;;Similarly, we'll have update take the context last. ;;update will depend on change-state-beh, but not change-state. ;;change-state is a higher-level api for changing things. ;;Note: this is covered by step-entity! We need to ;;include the roll-forward-beh though, to ensure we're ;;consistent. ;;we can wrap these up and just pass a generic message for the ;;behavior to interpret. ;;change-state becomes ;;load-entity ;;add-message {:to-state to-state :deltat deltat :ctx ctx} ;;Move this out to marathon.ces.unit? ;;auxillary function that helps us wrap updates to the unit. ;;added check to prevent recording traversals to save time and ;;memory. Does not affect debugging. (defn traverse-unit [u t from to] (-> (if marathon.ces.core/*debug* (u/add-traversal u t from to) u) (assoc :positionpolicy to))) ;;this is kinda weak, we used to use it to determine when not to ;;perform updates via the global state, but it's probably less ;;important now...we can actually codify this structurally ;;in the behavior tree now... ;;special states just diverted the fsm update function to ;;a different path (bypassing the global state, i.e. not ;;aging/advancing). Where we had direct method calls to ;;other state handler functions, we can now just directly ;;encode the transition in the tree... (definline special-state? [s] `(#{:spawning :abrupt-withdraw :recovered :waiting #_:recovery} ~s)) (defn just-spawned? "Determines if the entity recently spawned, indicated by a default negative spawn time or a spawntime in the present." [{:keys [entity ctx] :as benv}] (identical? (:state @entity) :spawning)) ;;These accessors help us ensure that we're not ;;getting stuck in invalid transitions, or spawning ;;with funky null errors. (defn position->state [policy positionpolicy] (if-let [res (protocols/get-state policy positionpolicy)] res (throw (Exception. (str {:unknown-position positionpolicy :policy (:name policy)}))))) ;;We can make this processing more sophisticated... ;;Since we (defn position->time [policy positionpolicy] (if-let [res (protocols/get-cycle-time policy positionpolicy)] res (throw (Exception. (str {:position-not-in-cycle positionpolicy :policy (:name policy)}))))) ; (let [st (:spawntime @entity)] ; (or (neg? st) ; (== st (core/get-time @ctx)))) (defn state-expired? [{:keys [deltat statedata] :as benv}] (let [r (fsm/remaining statedata) dt (or deltat 0) ] (<= r dt))) ;;debatable utility... ;;Not sure where we're using these guys.... (defn to-position? [to benv] (identical? (:next-position benv) to)) (defn from-position? [from benv] (identical? (:from-position benv) from)) ;;Capturing change information in a structure, rather than passing it ;;around willy-nilly in the environment. If we have a pending ;;change, there will be changeinfo. This only applies for instantaneous ;;changes....That way, we can communicate our state updates serially ;;by adding (and removing) changeinfo. (comment (defrecord changeinfo [newstate duration followingstate]) ) ;;Behaviors ;;========= ;;this is a primitive action masked as a behavior. (defn move! ([location deltat destination wait-time] (->and [(->alter (fn [benv] (merge benv {:deltat deltat :next-position destination :next-location location :wait-time wait-time}))) moving-beh])) ([deltat destination wait-time] (->and [(->alter (fn [benv] (merge benv {:deltat deltat :next-position destination :wait-time wait-time}))) moving-beh])) ([destination wait-time] (->and [(->alter (fn [benv] (merge benv {:next-position destination :wait-time wait-time}))) moving-beh])) ([destination] (->and [(->alter (fn [benv] (merge benv {:next-position destination }))) moving-beh]))) ;;A lot of these behaviors operate on the concept of a blackboard. ;;The behavior environment, defined in spork.ai.behaviorcontext, ;;is a map of lexical bindings that we use to evaluate the consequences ;;of a unit's behavior. Certain behaviors may place or remove things ;;from the blackboard to communicate information with other behaviors ;;"down the line". We can couple behaviors directly using the behavior ;;tree, or allow them to be indirectly coupled using the blackboard ;;as a form of simple event communication. Many behaviors, like ;;update-after, and roll-forward-beh, will actually "consume" ;;items in the environment, like time. It will be common to see ;;an ephemeral, or a transactional semantics with the behaviors. (befn +nothing-state+ [entity deltat ctx] (->do (fn [_] (log! (str (:name @entity) " is doing nothing for " deltat) ctx) ))) ;;Determines if our entities are going to wait beyond the feasible ;;time horizon. It's not that much of a stretch to consider anything longer ;;than a decent human lifetime effectively infinite... (defn effectively-infinite? [^long x] (or (== x +inf+ ) (>= x (* 365 100)))) (defn compute-proportion "Given a current cycletime, a cyclelength we're projecting from, and a cyclelength we're projecting to, computes the proportion of the normalized projected cycle length - the cycle propotion. When dealing with effectively infinite policies, we avoid projecting onto finite policies with ~0 propotion for everything by computing the cycle proportion based on the remainder of the current cycletime relative to the target cyclelength. Otherwise, we compute a simple coordinate based on the proportion of ct : clfrom." [ct clfrom clto] (let [finf (effectively-infinite? clfrom) tinf (effectively-infinite? clto)] (cond (or (and (not finf) (not tinf)) (and finf tinf)) ;policy relative (core/float-trunc (/ ct clfrom) 6) tinf ;relative to infinite policy... (core/float-trunc (/ ct clto) 6) :else (-> (rem ct clto) ;chop (/ clto) ;normalize (core/float-trunc 6))))) ;;note-we have a wait time in the context, under :wait-time ;;updates an entity after a specified duration, relative to the ;;current simulation time + duration. ;;Note: Added the invariant that we cannot have negative wait-times. ;;ensure-pos! throws an exception if we encounter negative wait times. (befn update-after ^behaviorenv [entity wait-time tupdate ctx] (when wait-time (->alter #(if (effectively-infinite? wait-time) (do (debug [(:name @entity) :waiting :infinitely]) ;skip requesting update. (dissoc % :wait-time) ) (let [tfut (+ tupdate (ensure-pos! wait-time)) e (:name @entity) _ (debug [e :requesting-update :at tfut])] (swap! ctx (fn [ctx] (core/request-update tfut e :supply-update ctx))) (dissoc % :wait-time) ;remove the wait-time from further consideration... ))))) (require '[clojure.pprint :as pprint]) ;;our idioms for defining behaviors will be to unpack ;;vars we're expecting from the context. typically we'll ;;just be passing around the simulation context, perhaps ;;with some supplementary keys. ;;Let's think about what it means to change state.... ;;Are we in fact changing the root of the behavior? ;;This is where the transition from FSM to behavior tree ;;comes in.... (befn change-state-beh! {:keys [entity ctx statedata state-change deltat] :or {deltat 0} :as benv} (when state-change (let [_ (echo [:state-change (:name @entity)]) {:keys [newstate duration followingstate timeinstate] :or {timeinstate 0}} state-change _ (when (not duration) (throw (Exception. (str "nil value for duration in state change behavior!")))) followingstate (or followingstate newstate) ;;we change statedata here... wt (- duration timeinstate) _ (when (neg? wt) (throw (Exception. (str [:negative-wait-time])))) _ (debug [:changing-state state-change :wait-time wt]) newdata (assoc (fsm/change-statedata statedata newstate duration followingstate marathon.ces.core/*debug*) :timeinstate timeinstate) benv (merge (dissoc benv :state-change) {:statedata newdata :duration duration :timeinstate timeinstate :wait-time wt}) _ (reset! ctx (supply/log-state! (:tupdate benv) @entity (:state @entity) newstate @ctx)) _ (swap! entity #(assoc % :state newstate :statedata newdata)) ;;update the entity state, currently redundant. ;_ (debug [:statedata statedata :newdata newdata :newstate newstate]) ] (beval update-state-beh benv)))) (def change-state-beh (->seq [(echo :<change-state-beh>) change-state-beh!])) ;;Aux function to compute our state change during spawn. ;;Setting up initial conditions is a PITA, particularly ;;since it's possible that some of the input data is ;;intentionally empty or zeroed out. This helps set up ;;the bread-n-butter wait time as a function of the ;;spawning information, if any, the entity's policy, and ;;the proposed position for the entity. #_(defn compute-state-stats [entity cycletime policy positionpolicy] (let [duration (:duration (:spawn-info @entity)) ;;duration may be 0. ;;if so, we check policy to see if we should be waiting more than 0. derive? (or (not duration) (zero? duration)) duration (if derive? (let [pw (policy-wait-time policy positionpolicy) _ (debug [:derived-duration (:name @entity) positionpolicy pw])] pw) ;derive from policy. duration) ;;If the position is not in the policy, then we need to ;;find a way to compute the duration. ;;If we have spawn-info, then we have duration... position-time (if derive? #_(pos? duration) ;prescribed. (try (position->time policy positionpolicy) (catch Exception e (if (protocols/leads-to-start? policy positionpolicy) 0 (throw (Exception. (str [positionpolicy :isolated-from-cycle])))))) 0) ;;We're running into problems here....the positionpolicy cycletime (if (< cycletime position-time) position-time cycletime)] ;;timeinstate also subject to spawn-info.... {:cycletime cycletime :position-time position-time :timeinstate (if duration 0 (non-neg! "timeinstate" (- cycletime position-time))) ;;timeremaining is subject to spawn info. :timeremaining (or duration ;this should keep us from bombing out... (protocols/transfer-time policy positionpolicy (protocols/next-position policy positionpolicy)))})) (defn compute-state-stats [entity cycletime policy positionpolicy] (let [duration (:duration (:spawn-info @entity)) ;;duration may be 0. ;;if so, we check policy to see if we should be waiting more than 0. derive? (or (not duration) (zero? duration)) ;;if so, we check policy to see if we should be waiting more than 0. duration (if #_derive? (and duration (zero? duration)) (let [pw (policy-wait-time policy positionpolicy) _ (debug [:derived-duration (:name @entity) positionpolicy pw])] pw) ;derive from policy. duration) ;;If the position is not in the policy, then we need to ;;find a way to compute the duration. ;;If we have spawn-info, then we have duration... position-time (if derive? ;prescribed. (try (position->time policy positionpolicy) (catch Exception e (if (protocols/leads-to-start? policy positionpolicy) 0 (throw (Exception. (str [positionpolicy :isolated-from-cycle])))))) 0) ;;We're running into problems here....the positionpolicy cycletime (if (< cycletime position-time) position-time cycletime) ] ;;timeinstate also subject to spawn-info.... {:cycletime cycletime :position-time position-time :timeinstate (if duration 0 (non-neg! "timeinstate" (- cycletime position-time))) ;;timeremaining is subject to spawn info. :timeremaining (or duration ;this should keep us from bombing out... (protocols/transfer-time policy positionpolicy (protocols/next-position policy positionpolicy)))})) ;;Adding a lame function to detect for and enable pre-fill. ;;Right now, we just assume it's enabled, but we'll check the context. (defn prefill? [ctx] (-> ctx core/get-parameters :PreFill)) ;;prefill is modeled as the unit's available time impacting its bog budget. So ;;the simplest scheme is to just reduce the bog budget proportional to the ;;unit's available time. Maybe decrement according to its stop-deployable time? ;;Addendum: we want to ensure we never have overlop occuring on day 1. ;;We also want to ensure deployments are somehow optimally ordered in the past. ;;That is, we don't have units deploying (and leaving) at the same time, ;;creating clumps or resonant unfilled demand. So to avoid this, we ;;create a prefill deployemnt schedudle where units are evenly spaced ;;as if they had deployed in the past. The trick is to ensure that ;;we account for overlap in this spacing. We also want this to be ;;determinstic and "ideal" akin to how our cycletime spacing is ;;an ideal representation. So to have an ideal prefill deployment ;;schedule is to minimize the effects of overlap and ensure no ;;clumping in addition to the constraint that no prefilling unit ;;will overlap on day 1... ;;The projection scheme ends up being identical to just offsetting ;;unit cycletimes by - (overlap + 1). The only caveat is that we now ;;have the possibility of units near cycletime of (start-deployable + overlap) ;;becoming ineligible to deploy since the offset pushes their cycletime ;;before start deployble. So we allow them to have max prefill by ;;flooring the cycletime at start-deployable. We thus allow units close ;;to start-deployable to have maximum prefill bog budget if they are ;;selected for prefill deployments. This should be unlikely. ;;Edit: we have a problem for policies where tf - ts is > BogBudget, ;;since we end up with negative numbers. This ends up increasing BOG ;;for prefill. First manifested with an unexpected MaxUtilization and ;;infinite policy cyclelength. Working out a scheme to either warn ;;or correct. ;;For any policy where the span of the deployable window is ;;greater than the bogbudget, we have a situation where it's ;;possible to get a negative number. We apply a correction ;;iff we have eligible deployers (within the interval [ts tf]) ;;where we project the monotonically decreasing prefill (negative ;;numbers the further away you get from tf - bogbudget) onto ;;a rotating sequence of prefills similar to the values we ;;have for the units that had positive numbers. To do this, ;;we transform the negative prefills by computing their ;;abs value's modulus relative to the bogbudget (bound), ;;so they projected onto positive numbers within the span ;;of [0 bound]. Then to get the ordering correct so we ;;have a decreasing order of positive numbers, we ;;subtract the result from the bound. This gives ;;us a nice repeating spread that's determined by ;;cycletime, bogbudget, overlap, and the deployable ;;window and "should" work with any policy. (defn inverse-clamp [bound x] (if (> x -1) x (let [x (mod (- x) bound) y (- bound x)] y))) (defn compute-prefill [ent policy cycletime] (let [ts (protocols/start-deployable policy) tf (protocols/stop-deployable policy) bogbudget (protocols/max-bog policy) ;;addresses infinite cycle stuff, incorporates ;;expected-dwell (assigned to max-dwell). {:keys [max-bog max-dwell cycle-length max-mob]} (u/cycle-stats policy) tf (min tf max-dwell cycle-length)] (when (and (>= cycletime ts) ;;deployable (< cycletime tf)) (let [overlap (inc (protocols/overlap policy)) ctprojected (max (- cycletime overlap) ts) res (long (- bogbudget (- tf ctprojected))) bound (- bogbudget overlap) clamped (inverse-clamp bound res)] (or (and (>= clamped 0) (<= clamped bound) clamped) (throw (ex-info "prefill not in [0 .. bogbudget - (overlap + 1]" {:prefill clamped :policy-name (protocols/policy-name policy) :start-deployable ts :stop-deployable tf :overlap+1 overlap}))))))) ;;if we detect a prefill condition, we reduce the unit's ;;bog budget accordingly to space out deployments. (defn set-prefill [ent policy cycletime ctx] (if-let [pf (and (prefill? ctx) (compute-prefill ent policy cycletime))] (assoc ent :prefill pf) ent)) ;;Our default spawning behavior is to use cycle to indicate. ;;There will be times we alter the methods in which a unit ;;spawns (initializes itself in the simulation). It'd be nice ;;to break this out at some point, for now, we just let it go. ;;we can break the spawning behavior up into smaller tasks... ;;Find out where we're supposed to be. Do we have initial conditions? ;;Initial conditions are currently derived from cycletime and policy. ;;For instance, we narrowly assume that every unit exists somewhere in ;;on a cycle at t=0, rather than setting them in arbitray deployments ;;to begin with. This is limiting, we should be able to define ;;all kinds of initial conditions to perform upon spawn (like set ;;location, cycletime, etc.) For now, we just replicate the ;;cycletime-derived automated initial conditions logic. ;;Given a cycletime, where should we be according to policy? ;;Behavior to control how a unit acts when it spawns. ;;We're trying to account for the unit's initial state... ;;We move from spawning to the initial location. ;;We account for having been at the initial location for ;;timeinstate days (currently tied to cycletime - timetoposition). ;;So, what we really want to do is update the unit initially, possibly ;;with a negative time, and advance it forward to time 0 via the ;;deltat being the timeinstate. (befn spawning-beh ^behaviorenv {:keys [to-position cycletime tupdate statedata entity ctx] :as benv} (when (spawning? statedata) (let [ent @entity ;;we're now tracking default recovery in our context. {:keys [positionpolicy policy]} ent {:keys [curstate prevstate nextstate timeinstate timeinstateprior duration durationprior statestart statehistory]} statedata cycletime (or cycletime (:cycletime ent) 0) topos (if (not (or to-position positionpolicy)) (protocols/get-position (u/get-policy ent) cycletime) positionpolicy) nextstate (position->state policy positionpolicy) {:keys [timeinstate timeremaining cycletime position-time] :as stats} (compute-state-stats entity cycletime policy positionpolicy) _ (debug [:unit (:name ent) stats]) spawned-unit (-> ent (assoc :cycletime cycletime :default-recovery (core/default-recovery @ctx)) (u/initCycles tupdate) (u/add-dwell cycletime) (set-prefill policy cycletime @ctx) ;;added for optional prefill to space out deps. (assoc :last-update tupdate) (dissoc :spawn-info) ;eliminate spawning data. ) ;;may not want to do this.. _ (reset! entity spawned-unit) state-change {:newstate nextstate :duration timeremaining :followingstate nil :timeinstate timeinstate } _ (debug [:nextstate nextstate :state-change state-change :current-state (:state ent)]) ] (->> (assoc benv :state-change state-change :location-change {:from-location "Spawning" :to-location (or (:location (:spawn-info ent)) topos)} :next-position topos ;queue up a move... ) (log! (core/msg "Spawning unit " (select-keys (u/summary spawned-unit) [:name :positionstate :positionpolicy :cycletime]))) (beval (->seq [(echo :change-state) change-state-beh #_(fn [benv] (do (reset! ctx (supply/log-move! tupdate :spawning (:positionpolicy @entity) @entity @ctx)) (success benv)))] )))))) ;;While we're rolling, we want to suspend message processing. ;;We can do this by, at the outer level, dissocing the messages... ;;or, associng a directive to disable message processing... ;;we want to update the unit to its current point in time. Basically, ;;we are folding over the behavior tree, updating along the way by ;;modifying the context. One of the bits of context we're modifying ;;is the current deltat; assumably, some behaviors are predicated on ;;having a positive deltat, others are instantaneous and thus expect ;;deltat = 0 in the context. Note, this is predicated on the ;;assumption that we can eventually pass time in some behavior.... (befn roll-forward-beh {:keys [entity deltat statedata] :as benv} (do (debug [:<<<<<<<<begin-roll-forward (:name @entity) :last-update (:last-update @entity)]) (cond (spawning? statedata) (->seq [spawning-beh roll-forward-beh]) (pos? deltat) (loop [dt deltat benv benv] (let [sd (:statedata benv) timeleft (fsm/remaining sd) _ (debug [:sd sd]) _ (debug [:rolling :dt dt :remaining timeleft]) ] (if-y (if (<= dt timeleft) (do (debug [:dt<=timeleft :updating-for dt]) ;;this is intended to be the last update... ;;as if we're send the unit an update message ;;for the last amount of time... (beval (->seq [update-state-beh process-messages-beh]) ;we suspend message processing until we're current. (assoc benv :deltat dt))) (let [residual (max (- dt timeleft) 0) res (beval update-state-beh (assoc benv :deltat timeleft))] (if (success? res) (recur residual ;advance time be decreasing delta (val! res)) res))) nil))) :else (->seq [update-state-beh process-messages-beh])))) ;;So, at the high level, we have a simple behavior that checks to see ;;if it can move, finds where to move to, starts the process of ;;moving (maybe instantaneous), and waits... ;;We should consider move if our time in state has expired, or ;;if we have a next-location planned. (befn should-move? ^behaviorenv {:keys [next-position statedata] :as benv} (do (debug [:should? {:next-position next-position :remaining (fsm/remaining statedata) :spawning? (spawning? statedata) :wait-time (:wait-time benv)}]) (when (or next-position (zero? (fsm/remaining statedata)) ;;time is up... (spawning? statedata)) (success benv)))) (def locstates #{"Dwelling" "DeMobilizing" "Recovering" :dwelling :demobilizing :recovering :recovery}) (defn position=location? [newstate] (if (not (set? newstate)) (locstates newstate) (gen/some-member newstate locstates) )) ;;memoize this... (alter-var-root #'position=location? gen/memo-1) ;;after updating the unit bound to :entity in our context, ;;we commit it into the supplystore. This is probably ;;slow....we may want to define a mutable version, ;;or detect if mutation is allowed for a faster update ;;path. For instance, on first encountering the unit, ;;we establish a mutable cell to its location and use that ;;during the update process. ;;Given that we have the context for a move in place, ;;we want to move as directed by the context. If there ;;is a wait time associated with the place we're moving ;;to, we will add the wait-time to the context. That way, ;;downstream behaviors can pick up on the wait-time, and ;;apply it. ;;Note: there's a potential problem where our assumptions about ;;deployability may be violated: If a policy change occurs, and ;;the old policy position ends up being the new policy position, ;;we bypass the position-change behavior to save time. If the ;;state-change happens, we still do it, but we miss - by virtue ;;of assuming position changes -> deployable changes - ;;the possibility that while the position may nominally ;;be the same between two policies, the state is not... ;;Case in point: ReqAnalysis_MaxUtilization_FullSurge_AC -> ;;TAA19-23_AC_1:2; for at least one case, we have a transition ;;from #{:deployable :c2 :dwelling} to #{:c2 :dwelling}, ;;while the position is still Ready...the fine difference is ;;that the preceding policy had [Reset :deployable] transition ;;to Ready, where the new policy is not deployable until ;;later in cycle. We end up not updating the deployability ;;of the unit, and it gets selected for a fill that - upon ;;deployment checks - is illegal under the new policy. ;;Solution: strengthen our definition of "no position change" ;;to include no state change....Positions are equal iff ;;they have the same state...the presence (or absence) of ;;:deployable is the key currently... (befn move->statechange ^behaviorenv {:keys [entity next-position location-change tupdate statedata state-change ctx] :as benv} (when-let [nextpos next-position] ;we must have a position computed, else we fail. (let [t tupdate u @entity frompos (get u :positionpolicy) ;;look up where we're coming from. wt (or (:wait-time benv) (get-wait-time u nextpos benv)) ;;how long will we be waiting? location-based? (:location-behavior u) ] (if (= frompos nextpos) ;;if we're already there... (do (debug [:no-movement frompos nextpos {:wt wt :state-change state-change}]) (if state-change (->seq [(->alter #(assoc % :wait-time nil :next-position nil)) check-deployable-state]) (success (dissoc benv :next-position))) ;do nothing, no move has taken place. No change in position. #_(success (if state-change (assoc benv :wait-time nil :next-position nil) (dissoc benv :next-position)))) ;do nothing, no move has taken place. No change in position. (let [_ (debug [:moving frompos nextpos]) newstate (or (get-state u nextpos) nextpos) ;;need to account for prescribed moves. newstate (if location-based? (into (-> (-> statedata :curstate) #_(disj nextpos)) newstate) newstate) _ (when (nil? newstate) (throw (Exception. (str [:undefined-transition newstate u frompos nextpos wt])))) state-change {:newstate newstate :duration wt :followingstate nil :timeinstate 0 } _ (reset! entity (-> (if location-based? (dissoc u :location-behavior) u) (traverse-unit t frompos nextpos) )) ;update the entity atom ;;if we already have a location change set, then we should respect it. from-loc (:locationname u) to-loc (if-let [newloc (:next-location benv)] (do (debug [:preset-location newloc :From from-loc]) newloc) (if (position=location? newstate) nextpos from-loc)) ;_ (println [from-loc to-loc]) ] (bind!! ;update the context with information derived ;from moving {:position-change {:from-position frompos ;record information :to-position nextpos} :state-change state-change :location-change (or location-change (when (not (identical? from-loc to-loc)) {:from-location from-loc :to-location to-loc})) :wait-time nil :next-position nil :next-location nil} )) )))) (def movekeys #{:position-change :state-change :location-change}) (befn prescribed-move->statechange {:keys [prescribed-move tupdate] :as benv} (when prescribed-move (success (reduce-kv (fn [acc k v] (if v (assoc acc k v) acc)) (assoc benv :prescribed-move nil) prescribed-move) ))) (defn prescribed? [e tupdate] (when-let [pm (val-at @e :prescribed-move)] (== (val-at pm :t) tupdate))) ;;PERFORMANCE NOTE: <HOTSPOT> - eliding debug info here saves time... ;;This hooks us up with a next-position and a wait-time ;;going forward. We also now allow prescribed moves to ;;be set, for things like location-specific policies.. (befn find-move ^behaviorenv {:keys [entity next-position wait-time tupdate] :as benv} (if (prescribed? entity tupdate) ;;we have a move set up.. (let [pm (:prescribed-move @entity) _ (debug [:found-prescribed-move pm]) ] (do (swap! entity dissoc :prescribed-move) (bind!! {:prescribed-move pm}))) ;;let's derive a move... (let [e @entity currentpos (:positionpolicy e) ;_ (when (= currentpos :re-entry) (println (:tupdate benv))) p (or next-position (do (debug [:computing-position currentpos]) ;;performance 1 (get-next-position (:policy e) currentpos))) wt (if (and next-position wait-time) wait-time (do (debug [:computing-wait (:positionpolicy e)]) ;;performance 2 ;;WARNING: This may be using the following wait time...is that what we mean? ;;Given the current position, it's determining how long to wait in the next position. ;;I think we're good...should rename get-wait-time to something more appropriate. ;;get-next-wait-time? (get-wait-time @entity (:positionpolicy e) benv))) _ (debug [:found-move {:current-position currentpos :next-position p :wait-time wt}]) ] (bind!! {:next-position p :wait-time wt } ;;have a move scheduled... )))) ;;We know how to wait. If there is an established wait-time, we ;;request an update after the time has elapsed using update-after. (befn wait ^behaviorenv {:keys [wait-time] :as benv} (when-let [wt wait-time] ;;if we have an established wait time... (do #_(debug [:sdb (:statedata benv) :sde (:statedata @(:entity benv))]) (if (zero? wt) ;;skip the wait, instantaneous. No need to request an ;;update. (do (debug [:instantly-updating]) update-state-beh) (do (debug [:waiting wt]) (update-after benv)))))) ;;Note: start-cycle looks somewhat weak. Can we fold this into ;;another behavior? ;;Units starting cycles will go through a series of procedures. ;;Possibly log this as an event? (befn start-cycle {:keys [entity deltat tupdate] :as benv} (do (swap! entity #(merge % {:cycletime 0 :date-to-reset tupdate})) (success benv))) ;;legacy implemenation. no longer using policystack. ;; (befn start-cycle {:keys [entity deltat tupdate] :as benv} ;; (let [unit @entity ;; pstack (:policystack unit)] ;; (do (swap! entity #(merge % {:cycletime 0 ;; :date-to-reset tupdate})) ;; (if (pos? (count pstack)) ;; (bind!! {:policy-change {:next-policy (first pstack)}}) ;; (success benv))))) ;;We may not care about cycles.... ;;Should be able to specify this in our collections logic, go faster... ;;Units ending cycles will record their last cycle locally. We broadcast ;;the change...Maybe we should just queue this as a message instead.. (befn end-cycle {:keys [entity ctx tupdate] :as benv} (let [cyc (assoc (:currentcycle @entity) :tfinal tupdate) _ (swap! entity (fn [unit] (-> unit (assoc :currentcycle cyc) (u/recordcycle tupdate)))) ;;notify interested parties of the event... _ (swap! ctx (fn [ctx] (sim/trigger-event :CycleCompleted (:name @entity) :SupplyStore (str (:name @entity) " Completed A Cycle") nil ctx)))] (success benv))) ;;dunno, just making this up at the moment until I can find a ;;definition of new-cycle. This might change since we have local ;;demand effects that can cause units to stop cycling. ;;Wow...just got burned on this..strings are no good for identity ;;checks....since some are interned and some ore instances. wow.... (defn new-cycle? [unit frompos topos] (and (not= frompos :recovered) ;;additional criteria to cover nonbog reentry. (= (protocols/start-state (:policy unit)) topos))) ;;We check to see if there was a position change, and if so, if that ;;change caused us to finish a policy cycle. Note: this only applies ;;in cyclical policies. ;;Note: We want to preclude finishing cycles if we are applying a ;;policy change. We handle that in another state. This keeps us ;;from entering into a policy change that sends us to reset, and ;;automagically terminates the current cycle stats. Consistent with M3. (befn finish-cycle ^behaviorenv {:keys [entity position-change changed-policy policy-change] :as benv} (when position-change (let [{:keys [from-position to-position]} position-change no-spawn? (not (just-spawned? benv)) new-cyc? (new-cycle? @entity from-position to-position) ;_ (println [:check-cycle no-spawn? new-cyc? (not policy-change) (:tupdate benv)]) ] (when (and no-spawn? new-cyc? (not changed-policy)) ;;If we changed-policy already, preclude... (do (debug [:finishing-cycle (:name @entity) from-position]) (->seq [start-cycle end-cycle try-deferred-policy-change])))))) ;;Now that we have prescribed moves, the entities are going into ;;an overlapping state, but it's a state set.. (defn overlapping? [x] (or (identical? x protocols/Overlapping) (identical? x :overlapping))) ;;this is really a behavior, modified from the old state. called from overlapping_state. ;;used to be called check-overlap. (befn disengage {:keys [entity ctx overlapping-position tupdate] :as benv} (when-let [opvec overlapping-position] (let [[lname op] opvec _ (debug [:overlapping-prescribed op]) _ (debug [:disengaging (:name @entity) (:locationname @entity)]) res (identical? op :to) _ (swap! ctx ;;update the context... #(d/disengage (core/get-demandstore %) (assoc @entity :last-update tupdate) lname % res #_true))] (success (assoc benv :overlapping-position nil))))) ;; (when overlap-detected ;; (when (not (identical? res :none)) ;ugh? ;; (do (debug [:disengaging (:name @entity) (:locationname @entity)]) ;; (swap! ctx ;;update the context... ;; #(d/disengage (core/get-demandstore %) ;; @entity (:locationname @entity) % res)) ;; (success benv))))))) ;;used to be called check-overlap; (def check-overlap disengage) ;;Note: This behavior ASSUMES position changes within the ;;same policy. We can't have changed policies and assume ;;this works. Need an invariant to cover that. ;;Performance: We have a mild hotspot when we eagerly update ;;deployability via supply/update-deploy-status. Might be possible to ;;update deployability lazily, save a little bit. We're typically ;;"not" deploying... #_(befn check-deployable ^behaviorenv {:keys [entity position-change ctx] :as benv} (when position-change (let [{:keys [from-position to-position]} position-change u @entity p (:policy u) _ (debug [:checking-deployable-position :from from-position :to to-position])] (when (or (not= (protocols/deployable-at? p from-position) (protocols/deployable-at? p to-position)) #_(unit/can-non-bog? u)) (do (debug [:deployable-changed! from-position to-position]) (swap! ctx #(supply/update-deploy-status u nil nil %)) (success benv)))) )) (defn update-deploy-status ([u ctx] (->alter (fn [benv] (do (swap! ctx #(supply/update-deploy-status u nil nil %)) benv)))) ([benv] (update-deploy-status @(:entity benv) (:ctx benv)))) ;;Begavior note: ;;When units change policy, they may come from (as in RA) a finite policy ;;with a larger bog budget than the target policy, and have bogged (longer ;;than the bog of the new policy), and end up in a position/state that ;;is identical to the old policy. So, on the surface, we have no ;;state change; no position change, no indicator of deployability change, ;;yet the unit is not technically deployable, since it has no bogbudget. ;;M3 addressed this by adding an automatic update deployability ;;check at the end of policy change, regardless. ;;M3 also added an additional check, where bogbudget exists, ;;but the deployable time is less than the new policy's overlap, ;;causing a negative cycle time error to occur. This second ;;conditions sends the unit to reset if the modified bogbudget ;;< newpolicy.overlap. (befn check-deployable ^behaviorenv {:keys [entity position-change changed-policy ctx] :as benv} (when position-change (if-not changed-policy (let [{:keys [from-position to-position]} position-change u @entity p (:policy u) _ (debug [:checking-deployable-position :from from-position :to to-position])] (when (or (not= (protocols/deployable-at? p from-position) (protocols/deployable-at? p to-position)) #_(unit/can-non-bog? u)) (do (debug [:deployable-changed! from-position to-position]) (update-deploy-status u ctx) #_(swap! ctx #(supply/update-deploy-status u nil nil %)) #_(success benv)))) check-deployable-state) )) ;;Suggestion: To deal with the fact that deployability may change ;;without nominal position changes (but state changes), we should ;;add in the ability to check for a state-change fallback. (befn check-deployable-state ^behaviorenv {:keys [entity state-change ctx] :as benv} (when state-change (let [u @entity from-state (marathon.ces.unit/unit-state u) to-state (:newstate state-change) _ (debug [:checking-deployable-state :from from-state :to to-state])] (when (not= (protocols/deployable-state? from-state) (protocols/deployable-state? to-state)) (do (debug [:deployable-changed! from-state to-state]) (update-deploy-status u ctx) #_(swap! ctx #(supply/update-deploy-status u nil nil %)) #_(success benv)))))) ;; (befn check-deployable ^behaviorenv {:keys [entity position-change state-change ctx] :as benv} ;; (when position-change ;; (let [{:keys [from-position to-position]} position-change ;; u @entity ;; p (:policy u) ;; _ (debug [:checking-deployable :from from-position :to to-position])] ;; (when (or (not= (protocols/deployable-at? p from-position) ;; (protocols/deployable-at? p to-position)) ;; #_(unit/can-non-bog? u)) ;; (do (debug [:deployable-changed! from-position to-position]) ;; (swap! ctx #(supply/update-deploy-status u nil nil %)) ;; (success benv)))))) (befn mark-overlap {:keys [entity position-change] :as benv} (when-let [change position-change] (let [{:keys [to-position from-position]} position-change ;;overlapping is not triggering because we only hae on definition of ;;overlapping per the keyword. There's a string version that shows ;;up. res (cond (overlapping? to-position) :to ;true (overlapping? from-position) :from ;false :else :none) ] (when (not (identical? res :none)) (do (debug [:marking-overlap res]) (success (assoc benv :overlapping-position [(:locationname @entity) res]))))))) ;;When there's a change in position, we want to do all these things. (befn change-position [entity position-change tupdate ctx] (when position-change (let [{:keys [from-position to-position]} position-change] (do (debug [:changed-position from-position to-position]) (reset! ctx (supply/log-position! tupdate from-position to-position @entity @ctx)) ;ugly, fire off a move event.check-overlap (reset! entity (assoc @entity :positionpolicy to-position)) (->seq [check-deployable ;;now being checked a bit more universally... finish-cycle mark-overlap (->alter #(assoc % :position-change nil :next-position nil))]))))) ;;Performance: Mild hotspot. Dissocing costs us here. Change to assoc and ;;check. ;;if there's a location change queued, we see it in the env. (befn change-location {:keys [entity location-change tupdate ctx] :as benv} (when location-change (let [;#_{:keys [from-location to-location]} #_location-change ;minor improvement.. from-location (val-at location-change :from-location) ;;OMG, typo on location...was loction!!! to-location (val-at location-change :to-location) _ (debug [:location-change location-change]) _ (reset! entity (u/push-location @entity to-location)) _ (reset! ctx (supply/log-move! tupdate from-location to-location @entity nil @ctx)) ] ;;we need to trigger a location change on the unit... (success (assoc benv :location-change nil))))) ;;this is a weak predicate..but it should work for now. (defn demand? [e] (not (nil? (:source-first e)))) ;;we can do this like a scalpel.. ;;All that matters is that the demand fill changes. ;;We ensure we remove the unit from the demand's ;;assignment, and then remove the unit from the demand, ;;and update the fill status of the demand. ;;If we leave a demand, we need to update its information ;;and change fill status. ;;is the movement causing a change in fill? (befn change-fill {:keys [entity location-change ctx] :as benv} (when location-change (let [{:keys [from-location]} location-change] (when (demand? (store/get-entity @ctx from-location)) (swap! ctx ;;update the context... #(d/remove-unit-from-demand (core/get-demandstore %) @entity from-location %)) (success benv))))) ;;with a wait-time and a next-position secured, ;;we can now move. Movement may compute a statechange ;;in the process. (def execute-move (->seq [(echo :<move->statechange>) (->or [prescribed-move->statechange move->statechange]) (echo :<change-position>) change-position (echo :<change-fill>) change-fill ;;newly added... (echo :<change-location>) change-location change-state-beh (echo :<check-overlap>) ;moved before change-position check-overlap ;;Added, I think I missed this earlier... (echo :waiting) wait ])) ;;Movement is pretty straightforward: find a place to go, determine ;;any changes necessary to "get" there, apply the changes, wait ;;at the location until a specified time. (def moving-beh (->and [(echo :moving-beh) should-move? ;if there is a next position or our time in state expired. find-move ;determine the wait-time, and possibly the next-position to wait at. (echo :execute-move) execute-move ])) ;;PERFORMANCE NOTE: Minor HotSpot ;;Changed to == instead of zero? due to minor perf issues. ;;State handler for generic updates that occur regardless of the state. ;;These are specific to the unit data structure, not any particular state. ;;Should we keep a timestamp with the unit? That way we can keep track ;;of how fresh it is. (befn age-unit ^behaviorenv {:keys [deltat statedata entity ctx] :as benv} (let [^long dt (or deltat 0)] (if (== dt 0) (success benv) ;done aging. (let [e @entity ;_ (println [:currentcycle (:currentcycle e)]) _ (when-not (u/waiting? e) (swap! entity #(u/add-duration % dt))) #_(debug [:skipping :add-duration (:name entity)]) ;;update the entity atom _ (debug [:aging-unit deltat :cycletime (:cycletime @entity)]) ] (bind!! {:deltat 0 ;is this the sole consumer of time? :last-update (unchecked-inc deltat) :statedata (fsm/add-duration statedata dt)}))))) ;;Dwelling just increments statistics.. (befn dwelling-beh ^behaviorenv {:keys [entity deltat] :as benv} (when (pos? deltat) (do (debug [:dwelling deltat]) (swap! entity #(u/add-dwell % deltat)) (success benv)))) ;;Bogging just increments stastistics.. (befn bogging-beh ^behaviorenv {:keys [entity deltat] :as benv} (when (pos? deltat) (do (debug [:bogging deltat]) (swap! entity #(u/add-bog % deltat)) (success benv)))) (befn modernizing-beh ^behaviorenv {:keys [entity statedata deltat] :as benv} (when (and (pos? deltat) (= (spork.ai.machine/remaining statedata) deltat)) (let [unit @entity uname (:name unit) from (:mod unit) to (dec from) _ (swap! entity assoc :mod to)] (->> benv (log! (core/msg "Modernized unit " (:name unit) " from " from " to " to) ) success)))) ;;This is a little weak; we're loosely hard coding ;;these behaviors. It's not terrible though. (befn special-state {:keys [entity statedata] :as benv} (case (:state (deref!! entity) #_@entity) :spawning spawning-beh :abrupt-withdraw (do (debug [:<special-state-abw>]) abrupt-withdraw-beh) :recovery recovery-beh ;moving-beh ;;setup the move to recovered. :recovered (->and [(echo :recovered-beh) (->seq [re-entry-beh ;;TODO: Optimize. We can skip the re-entry, ;;go to policy-change directly. (->if (fn [{:keys [entity]}] (zero? (u/get-bog @entity))) try-deferred-policy-change)]) ;reset-beh ]) ;; I think we need to implement these. ;; :modernizing modernizing-beh ;; :modernized modernized-beh ;:waiting (success benv) ;up-to-date (fail benv))) ;;rest-beh is kind of what we want to do. We'd like to ;;compute the unit's now position in its old policy. ;;What about pending policy changes? [how'd marathon handle them in vba?] ;;I think we deferred until reset actually. ;;Follow-on state is an absorbing state, where the unit waits until a changestate sends it elsewhere. ;;The only feasible state transfers are to a reentry state, where the unit re-enters the arforgen pool ;;in a dynamically determined position, or the unit goes to another demand compatible with the ;;followon code. (befn followon-beh {:keys [entity ctx] :as benv} (let [fc (u/followon-code @entity) _ (debug [:trying-followon (:name @entity) fc])] (when fc ;if the unit has a followon code (do ;register the unit as a possible followOn ;(println [(:name @entity) :added-followon :for [fc]]) ;;Note: we have a problem here, since add-followon ends up getting our entity ;;out-of-sync with the entity reference stored in the context... ;;We add a bunch of components to the entity, like :followon, which may ;;end up getting ditched when we merge the entity atom in at the end ;;of the transaction... (swap! ctx #(supply/add-followon (core/get-supplystore %) @entity %)) (reset! entity (-> (store/get-entity @ctx (:name @entity)) (merge {:state :followon}))) ;age-unit (debug [:waiting-in-followon-status fc]) (->seq [(->alter (fn [b] (merge b {:wait-time +inf+ :next-position :followon ;(:positionpolicy @entity) ;:followon :next-state :followon;:abruptwithdraw }))) moving-beh]) ;? )))) ;;way to get the unit back to reset. We set up a move to the policy's start state, ;;and rip off the followon code. Added a formal reset policy for ;;reset evaluation associated with policy changes. (befn reset-beh {:keys [entity reset-policy] :as benv} (let [pos (protocols/start-state (or reset-policy (:policy @entity))) wt (if-not reset-policy (immediate-wait-time @entity pos benv) ;;supplied reset policy implies a move to reset with note ;;added transfer time; time remaining in state is ignored. (protocols/transfer-time reset-policy pos (get-next-position reset-policy pos))) _ (debug [:immediate-reset :from (:positionpolicy @entity) :to pos :wait-time wt]) newbogbudget (u/max-bog @entity) _ (swap! entity #(-> % (assoc :followoncode nil) (assoc-in [:currentcycle :bogbudget] newbogbudget))) ] (beval moving-beh (assoc benv :next-position pos :wait-time wt)))) ;; 'A state to handle reentry into the available pool.... (def invalid? #{"Deployed" "Overlapping"}) ;;Note: ;;Attempting to match m3 behavior exactly. Units re-entering ;;with 0 bog and a pending policy change should go ahead ;;and change policies vs. going through re-entry in the ;;current cycle's policy. ;;Kind of like reset, except it's not guaranteed we go to reset. (befn re-entry-beh {:keys [entity ctx tupdate] :as benv} (let [unit @entity p (:policy unit) current-pos (:positionpolicy unit) ct (:cycletime unit) _ (when (< ct 0) (throw (Exception. (str "Cycle Time should not be negative!")))) _ (when (invalid? current-pos) (throw (Exception. "Cannot handle during deployment or overlap"))) is-deployable (protocols/deployable-by? p ct) positionA current-pos positionB (protocols/get-position p ct) _ (when (invalid? positionB) (throw (Exception. (str "Cannot handle during deployment or overlap: " positionB)))) timeremaining (protocols/transfer-time p positionB (protocols/next-position p positionB)) timeinstate (- ct (protocols/get-cycle-time p positionB)) ;;this ends up being 0. wt (max (- timeremaining timeinstate) 0) _ (debug [:re-entry {:cycletime ct :current-pos current-pos :next-pos positionB :timeinstate timeinstate :timeremaining timeremaining :wt wt}]) state-change {:newstate (get-state unit positionB) :duration timeremaining :followingstate nil :timeinstate timeinstate } _ (reset! ctx (->> @ctx ;; (supply/log-position! tupdate positionA positionB unit) (supply/supply-update! {:name "SupplyStore"} unit (core/msg "Unit " (:name unit) " ReEntering at " positionB " with " (:bogbudget (:currentcycle unit)) " BOGBudget.")))) _ (reset! entity (assoc unit :followoncode nil))] (beval change-state-beh (assoc benv :state-change state-change ;; :position-change {:from-position positionA ;; :to-position positionB} :wait-time wt :next-position positionB)))) ;;Function to handle the occurence of an early withdraw from a deployment. ;;when a demand deactivates, what happens to the unit? ;;The behavior will be guided by (the unit's) policy. ;;The default behavior is that a unit will check its policy to see if it CAN deploy. ;;If policy says it's okay, the unit will return to the point time of its current lifecycle. ;;We can parameterize the penalty it takes to get back into lifecycle from deployment. ;; A usual penalty is a move to "90 days of recovery" ;;Note, we can also specify if the unit is instantly available to local demands. ;;Recovery should now be an option by default, not specifically dictated by ;;policy. ;;1)Consult policy to determine if entry back into available / ready pool is feasible. ;;TOM note 18 july 2012 -> this is erroneous. We were check overlap....that's not the definition of ;;a unit's capacity to re-enter the available pool. ;;uuuuuuuge hack....gotta get this out the door though. (def non-recoverable #{"SRMAC" "SRMRC" "SRMRC13"}) ;;we no longer use the default +recovery-time+ shim, ;;now we consult policy or fallback to the :DefaultRecoveryTime ;;parameter. (def policy-recovery-time (memo1-policy (fn policy-rec [p] (or (:recovery p) ;;srm policies have a :recovery field. (marathon.data.protocols/transfer-time p :recovery :recovered))))) (defn recovery-time ([unit p] (or (policy-recovery-time (-> p marathon.data.protocols/get-active-policy)) (:default-recovery unit))) ([unit] (recovery-time unit (:policy unit)))) ;;We need to modify this to prevent any srm units from recovering. (defn can-recover? [unit] (let [cyc (:currentcycle unit) p (:policy unit) rt (recovery-time unit p)] (when (and (not (non-recoverable (protocols/policy-name p))) (pos? (:bogbudget cyc)) (< (+ (:cycletime unit) rt) (:duration-expected cyc))) rt))) (befn recovery-beh {:keys [entity deltat ctx] :as benv} (let [unit @entity] (if-let [t (can-recover? unit)] (do (debug [:unit-can-recover (:name unit)]) (move! :recovered t)) ;;recovery is now determined by policy or parameters. (let [cyc (:currentcycle unit) ct (:cycletime unit) dur (:duration-expected cyc)] (swap! ctx #(sim/trigger-event :supplyUpdate (:name unit) (:name unit) (core/msg "Unit " (:name unit) " Skipping Recovery with " (:bogbudget (:currentcycle unit)) " BOGBudget " ct "/" dur " CycleTime " ) nil %)) (reset! entity (assoc-in unit [:currentcycle :bogbudget] 0)) #_moving-beh reset-beh)))) ;;On second thought, this is sound. If the unit is already in overlap, it's in a terminal state.. ;;For followon eligibility, it means another unit would immediately be overlapping this one anyway, ;;and the demand would not be considered filled....It does nothing to alleviate the demand pressure, ;;which is the intent of followon deployments. Conversely, if overlap is 0, as in typical surge ;;periods, then units will always followon. I take back my earlier assessment, this is accurate. ;;Note: We need to ensure this behavior fails if called from incompatible circumstances... ;;We can only call this on units that are actually deployed/bogging. (befn abrupt-withdraw-beh {:keys [entity deltat] :as benv} (let [_ (when (pos? deltat) (swap! entity #(u/add-bog % deltat))) unit @entity ;1) bogremaining (- (:bogbudget (:currentcycle unit)) (protocols/overlap (:policy unit)) ;;note: this overlap assumption may not hold... ) _ (debug [:abw-beh {:deltat deltat :bogremaining bogremaining :unt (:name unit) :fc (:followoncode unit) ;:unit (dissoc unit :policy) }])] (if (not (pos? bogremaining)) ;makes no sense for the unit to continue BOGGING, send it home. ; (->and [(echo [:abw->reset {:bogremaining bogremaining}]) reset-beh ;]) (->or ;unit has some feasible bogtime left, we can possibly have it followon or extend its bog... ;A follow-on is when a unit can immediately move to fill an unfilled demand from the same ;group of demands. In otherwords, its able to locally fill in. ;This allows us to refer to forcelists as discrete chunks of data, group them together, ;and allow forces to flow from one to the next naturally. [followon-beh recovery-beh])))) ;;Policy Changes ;;============== ;;Changing policies in legacy MARATHON involves something called the "policy stack" ;;and a subscriber model where unit's "subscribe" to a parent policy (typically ;;a composite policy defined over multiple simulation periods). Changes in the ;;period cause changes in policy, which propogate to changes in subscribers' ;;policy. Policy changes are typically limited to "non-deployed" states or ;;dwelling states. That is, units may not permissively change the structure ;;of their policy while "in-use" by a demand. ;;In this case, the policy change is tracked by keeping the policy change ;;stack non-empty. When the unit cycles through a state in which policy ;;changes can occur, it finds a pending change and converts to the new ;;atomic policy. (def infeasible-policy-change? #{"Deployed" "Overlapping" "DeMobilization"}) (defn can-change-policy? [cycle-proportion from-pos] (and (<= cycle-proportion 1) (not (infeasible-policy-change? from-pos)))) ;; 'TOM Change 13 Jul 2011 ;; 'Needed to implement the transition from one policy to another. I chose to add a state to handle just this. ;; 'Visual analysis showed that PolicyChange looks a lot like Spawn, in that when a unit changes policies, it must change ;; 'a lot of its internal state to follow the new policy. The result of the policy change is: ;; ' 1: The unit's cycle time is normalized, and then transformed into the relevant cycletime in the new policy. ;; ' 2: The unit's position "may" change to reflect its position in the new policy. ;; ' 3: The unit's location "may" change to reflect its location in the new policy. ;; 'TOM Change 20 April: ;; ' 4: The unit's BOGBudget "may" change to reflect either increased, or decreased, BOGBudget. ;; 'TOM Change 24 April: ;; ' 5: The unit's BOGBudget and MAXBOG may only change (increase) as the result of a policy change. ;; ' 6: Policy changes can NOT happen during terminal states: ;; ' [Deployed {Bogging, Overlapping}, Demobilizing] ;; 'If NOT deployed (bogging, overlapping) or in a terminal state (demobilizing), then entities can change policy immediately. ;; 'Otherwise, units change policy upon next reset (change is deferred). ;; 'Assumes that the new policy is already set for the unit (i.e. the unitdata is pointing toward the new policy). ;; 'Ideally, an outside agent will have modified the unit's policy, and subsequently told it to changestates to a policy-change ;; 'state. ;; 'Net effect is that policy changes to the same policy are idempotent. ;; 'State to control how a unit acts when it changes policy. ;; 'Note -> we extract the next policy from the unitdata's policy stack. ;; 'TOM note -> figure out how to change this for the deployed population...they have negative cycle ;; 'times. ;; 'Note -> this assumes we have compatible policies, or at least policies that have a cyclical ;; 'rotational lifecycle. ;; Function PolicyChange_State(unit As TimeStep_UnitData, deltat As Single) As TimeStep_UnitData ;;WIP Nov 2016 (befn policy-change-state ^behaviorenv {:keys [entity wait-time tupdate policy-change ctx] :as benv} (when policy-change ;;we have a change. (if (u/waiting? @entity) (do (debug [:deferring-policy-change-while-waiting]) defer-policy-change) ;;units in waiting must defer policy changes! (let [next-policy (:next-policy policy-change) unit @entity tnow tupdate _ (assert (pos? (protocols/bog-budget next-policy)) "No bog budget!") current-policy (:policy unit) ;;'TOM Change 20 April -> We need to separate the unit's experienced ;;'cycle length vs the NOMINAL cycle duration, which exists in ;;'POLICY SPACE. In composite rotational policies, the NOMINAL cycle duration ;;'changes when Atomic policies change. Specificallly, we map the unit's position ;;'or coordinates in the current atomic policy to coordinates in the new policy. ;;'The unit's actual experienced lifecycle, i.e. its cycletime property, is not ;;'an accurate mapping between policies. The implicit assumption is that when ;;'mapping from one policy to another, if the policies have differing cycle lengths ;;'then there is a discount or exchange rate between the policies, such that time ;;'spent in one policy is NOT equal to time spent in another. However, our ;;'unit's cyclelength property is not subject to this, since it technically ;;'exists OUTSIDE of the policy view of time. The cyclelength property reflects the ;;'actual time a unit has spent, under ANY policy, until it has reset or started a ;;'new cycle. ;;'Prior to 19 April 2012, The unit's ability to deploy, via the CanDeploy method, ;;'depended on it's position in the current policy as a function of the cyclelength property. ;;'We should prefer the duration of the current cycle record, which is an accurate reflection ;;'of the relative time in the unit's current policy. ;;'TOM Change 20 April 2012 cycletimeA (:cycletime unit) PositionA (:positionpolicy unit) ;; _ (println [:name (:name unit) :cycletimeA cycletimeA ;; :positionA PositionA (assoc benv :ctx nil)]) _ (assert (not (neg? cycletimeA)) (str {:msg "Cycletime should not be negative!" :cycletime cycletimeA :unit (:name unit) :t tupdate})) ;;We run into a problem here: when changing from an infinite policy to ;;a finite policy, despite units having a substational amount of cycletime - exceeding ;;the cyclelength of the new policy in fact - our proportion is computed as a function ;;of the time in the current cycle. It works out that any unit transitioning ;;will get shucked into a 0.0 truncated cycle proportion coordinate.... ;;The net effect is that, regardless of how much supply we get, this artificially ;;"resets" our surplus supply by shoving them all back to the start of the next cycle.. ;;typically reset and unavailable status. For certain inputs, we can never effectively ;;grow supply, which wrecks requirements analysis. ;;The solution is to detect the edge-case where we have an effectively infinite policy, ;;and change the proportion computation. A fair proposal is to take the current cycle ;;time, and quot it by the cycle length of the target policy. that becomes the input ;;for our cycleproportion calculation....We should still get a useful distribution ;;of cycletimes in the new policy without resorting to randomness, while crediting the ;;units that have a longer time in cycle... CycleProportionA #_(core/float-trunc (/ cycletimeA (protocols/cycle-length current-policy)) 6) (compute-proportion cycletimeA (protocols/cycle-length current-policy) (protocols/cycle-length next-policy)) ;;'TOM change 23 April 2012 -> No longer allow units that are De-mobilizing to enter into available pool. ] (->or [(->and [(->pred (fn [_] (can-change-policy? CycleProportionA PositionA))) (->alter #(assoc % :policy-change {:cycletime cycletimeA :current-policy current-policy :next-policy next-policy :proportion CycleProportionA :current-position PositionA})) apply-policy-change]) defer-policy-change]))))) ;;policy-change specific reset behaviors, due to transforms ;;between policies: ;;if the unit's bog budget does not exceed the prescribed overlap, ;;we go to reset early. (befn infeasible-bog-reset ^behaviorenv {:keys [entity ctx] :as benv} (->if (fn [_] (not (pos? (u/boggable-time @entity)))) reset-beh)) ;;Note: in retrospect, it looks like we can just use the unit/can-deploy? ;;predicate, which performs the same checks (and more!) that check-deployable ;;and check-deployable-state perform. This ends up being the standard ;;by which the unit is judged when selected for fill...so... ;;We just postpone deployable status updates until the end, and do a blanket ;;check using (befn policy-change-deployability-check ^behaviorenv {:keys [entity ctx] :as benv} (->seq [infeasible-bog-reset update-deploy-status])) ;;Assuming we have a change, let's apply it! ;;How long will the unit have been in this state? ;; Since it's a policy change....do we zero it out? ;; Or do we assume that the unit has been in the state the exact amount of time required? ;;We assume that the unit has been in the state the exact amount of time required. ;;We also assume that the unit is not entering another cycle, merely extending or truncating. ;; Its current cycle is modified. ;; Does not get a cycle completion out of it. ;;#WIP Nov 2016 ;;Policy change => Movement => [state-change location-change] ;;So, we can use policy-change to set the stage for movement, then pipeline the normal ;;movement behavior... (befn apply-policy-change [ctx tupdate entity policy-change] (let [unit @entity uname (:name unit) {:keys [cycletime current-policy next-policy proportion current-position]} policy-change cycletimeA cycletime policynameA (protocols/atomic-name current-policy) ;active atomic policy policynameB (protocols/atomic-name next-policy) ;new atomic policy cyclelengthB (protocols/cycle-length next-policy) cycletimeB (if (> cyclelengthB +twenty-years+) ;;effectively infinite... cycletimeA ;;use current cycletime, do NOT project. (long (* proportion cyclelengthB))) ;coerce to a long cyclelength. _ (assert (>= cycletimeB 0) "Negative cycle times are not handled...") _ (assert (<= cycletimeB cyclelengthB) "Cyclelength is too long!") wasDeployable (protocols/deployable-by? (:policy unit) cycletimeA) ;;can maybe do this faster just checking state. isDeployable (protocols/deployable-by? next-policy cycletimeB) positionA current-position positionB (if (u/deployed? unit) ;;REVIEW - Shouldn't matter, should already be non-deployed (:positionpolicy unit) ;deployed units remain deployed. (protocols/get-position next-policy cycletimeB)) timeremaining (immediate-policy-wait-time next-policy positionB) timeinstate (- cycletimeB (protocols/get-cycle-time next-policy positionB)) oldstate (protocols/get-state current-policy positionB) unit (reset! entity (-> unit ;;we change positionpolicy here....bad move? (merge {;:positionpolicy positionB ;;policy-change supercedes old deferred policy changes. :deferred-policy-change nil :policy next-policy :cycletime cycletimeB}) (u/change-cycle tupdate) (u/modify-cycle next-policy))) newduration (- timeremaining timeinstate) ;;added... newstate (protocols/get-state next-policy positionB) _ (debug [:preparing-apply-policy-change {:cycletimeA cycletimeA :policynameA policynameA :positionA positionA :policynameB policynameB :cycletimeB cycletimeB :positionB positionB :timeremaining timeremaining :timeinstate timeinstate :newduration newduration :oldstate oldstate :newstate newstate }]) ] ;;We have a move. ;;Setup the movement and let the behavior execute. ;(if (not= positionA positionB) ;;setup the move and use existing behavior to execute (vs. legacy method that folded stuff in here). (do (swap! ctx #(->> (assoc % :policy-change nil) (core/trigger-event :UnitChangedPolicy uname policynameA (core/msg "Unit " uname " changed policies: " policynameA ":" cycletimeA "->" policynameB ":" cycletimeB) nil))) (->and [(->alter (fn [benv] (assoc benv :state-change {:newstate newstate :duration newduration :timeinstate 0} :changed-policy true :policy-change nil ;;we add a formal reset policy ;;to allow reset-beh to accurately ;;compute reset wait time. :reset-policy next-policy))) ;;for some reason, move! was swallowing up our behavior ;;for specific units, and not following through withh ;;a policy-change-deployability check. This left us ;;with units that should have reset and gained new ;;bog budget not doing so, leading to a runtime ;;invalid deployer error. ->seq should force both ;;behaviors to evaluate. (->seq [(move! positionB newduration) ;;movement behavior policy-change-deployability-check]) ])))) ;;TODO: Add this? ;;'<NAME> add 10 May 2016 . We have some units changing policies from a longer BOG budget ;;'to a shorter BOG budget. We are running into issues when a unit's new BOG budget is < Overlap ;;'and then this unit got deployed and ended up with a negative BOG budget throwing negativeErr ;;If .CurrentCycle.bogbudget <= .policy.overlap Then 'this matches our check in AbruptWithdraw_State ;; Set unit = Reset_State(unit, deltat) ;; .parent.UpdateDeployStatus unit ;;End If ;;This automatically gets checked during move!... ;; MarathonOpSupply.UpdateDeployStatus simstate.supplystore, unit, , , simstate.context ;; 'Adopt Policy B. ;; 'Policy A -> ;; ' Find relative CT = ct/CLengthA ;; 'Policy B -> ;; ' Find relative positionB = pos(RelativeCT * CLengthB) ;; 'Movingstate from PositionA to relative PositionB. ;; 'Update with delta0. ;; 'TOM Change 2 Sep -> moved this north so that we can use the policy stack as a flag in unit's ;; 'ChangeCycle logic. Check for sideeffects ;; .policyStack.Remove 1 ;; SimLib.triggerEvent UnitChangedPolicy, .name, .policy.AtomicName, "Unit " & .name & " changed policies: " & _ ;; policynameA & ":" & cycletimeA & "->" & policynameB & ":" & CycleTimeB, , simstate.context ;;SET UP A STATECHANGE ;; SimLib.triggerEvent supplyUpdate, .name, .name, "Policy Change Caused Supply Update for unit " & .name, , simstate.context ;; Set PolicyChange_State = ChangeState(unit, nextstate, 0, newduration) ;; 'NOTE -> I may need to consider changing location here..... ;;The unit's cycle cannot project onto another cycle. We need to defer policy change until reset. ;;leave the policy on the stack. Catch it during reset. ;;TOM change 2 Sep 2011 -> we modify the cyclerecord to reflect changes in expectations... ;;This is not a replacement... ;;WIP Nov 2016 (befn defer-policy-change {:keys [entity ctx tupdate policy-change] :as benv} (when policy-change (let [_ (debug [:deferring-policy-change]) {:keys [next-policy]} policy-change unit @entity uname (:name unit) _ (swap! ctx #(core/trigger-event :AwaitingPolicyChange uname (marathon.data.protocols/atomic-name (:policy unit)) (core/msg "Unit " uname " in position " (:positionpolicy unit) " is waiting until reset to change policies") nil %)) ;;marked the deferred policy change. _ (swap! entity #(assoc % :deferred-policy-change (select-keys policy-change [:next-policy])))] (->alter (fn [benv] (assoc benv :policy-change nil)))))) (befn try-deferred-policy-change {:keys [entity ctx tupdate] :as benv} (when-let [pc (:deferred-policy-change @entity)] (let [_ (debug [:applying-deferred-policy-change]) _ (swap! entity assoc :deferred-policy-change nil)] (->seq [(->alter (fn [benv] (assoc benv :policy-change pc))) policy-change-state])))) ;; SimLib.triggerEvent AwaitingPolicyChange, .name, .policy.AtomicName, "Unit " & _ ;; .name & " in position " & .PositionPolicy & " is waiting until reset to change policies", , simstate.context ;; Set unit = RevertState(unit) ;; 'We updated the unit in the process ;; SimLib.triggerEvent supplyUpdate, .name, .name, "Policy Change Attempt Caused Supply Update for unit " & .name, , simstate.context ;;Basic Unit Behaviors (or "States....") ;;===================================== ;;entities have actions that can be taken in a state... (def default-statemap {:reset reset-beh ; :global :abrupt-withdraw abrupt-withdraw-beh :recovery recovery-beh :followon age-unit ; :recovered (echo :recovered-beh) ;:end-cycle ; :spawning spawning-beh :demobilizing dwelling-beh "DeMobilizing" dwelling-beh protocols/demobilization dwelling-beh :bogging bogging-beh protocols/Bogging bogging-beh ;;Added for legacy compatibility... :non-bogging dwelling-beh :recovering (echo :recovering-beh) "Recovering" (echo :recovering-beh) :dwelling dwelling-beh protocols/Dwelling dwelling-beh ;;Need to make sure we don't add bogg if we're already bogging... :overlapping bogging-beh protocols/Overlapping bogging-beh :waiting (echo :waiting-state) #_(->seq [(echo :waiting-state) defer-policy-change]) :modernizing modernizing-beh }) ;;PERFORMANCE NOTE: HotSpot - used val-at macro to inline method calls. ;;lookup what effects or actions should be taken relative to ;;the current state we're in. This is kind of blending fsm ;;and behaviortree. (befn do-current-state {:keys [entity statedata] :as benv} (let [;state (:state @entity) state (:state (deref!! entity)) ;;slightly faster using keyword as function call. state-map (or (:statemap entity) default-statemap)] (if (set? state) ;entity has multiple effects... ;;MEGA-HACK:This a serious hack to prevent double-counting of bog when we have ;;state-sets. Alone, either overlapping or bogging confers collecting bog time, ;;and in legacy policies are mutually exclusive. However, for SRM policies, ;;we have the possibility of bogging/non-bogging, as well as being in an ;;overlap state. This leaves us with a conundrum relative to our default ;;legacy meanings of bog and overlap. What we can do is ensure that if ;;bogging is present, we just skip overlapping if we ever encounter a ;;state-state. This is practical, but somewhat brittle....probabtately ;;a better idea to encode the meaning of states better - like [:bogging :overlapping] (let [stats (r/filter identity (r/map (fn [s] (val-at state-map s)) (disj state :overlapping))) ] (->seq stats)) (get state-map state)))) ;;the entity will see if a message has been sent ;;externally, and then compare this with its current internal ;;knowledge of messages that are happening concurrently. (befn check-messages ^behaviorenv {:keys [entity current-messages ctx] :as benv} (if-let [old-msgs (fget (deref! entity) :messages)] ;we have messages (when-let [msgs (pq/chunk-peek! old-msgs)] (let [new-msgs (rconcat (r/map val msgs) current-messages) _ (b/swap!! entity (fn [^clojure.lang.Associative m] (.assoc m :messages (pq/chunk-pop! old-msgs msgs) )))] (bind!! {:current-messages new-msgs}))) (when current-messages (success benv)))) ;;this is a dumb static message handler. ;;It's a simple little interpreter that ;;dispatches based on the message information. ;;Should result in something that's beval compatible. ;;we can probably override this easily enough. ;;#Optimize: We're bottlnecking here, creating lots of ;;maps.... ;;Where does this live? ;;From an OOP perspective, every actor has a mailbox and a message handler. ;; ;;so now we can handle changing state and friends. ;;we can define a response-map, ala compojure and friends. ;;type sig:: msg -> benv/Associative -> benv/Associative ;;this gets called a lot. (defn message-handler [msg ^behaviorenv benv] (let [entity (.entity benv) current-messages (.current-messages benv) ctx (.ctx benv)] (do (ai/debug (str [(:name (deref! entity)) :handling msg])) (beval (case (:msg msg) :move (let [move-info (:data msg) {:keys [wait-time next-location next-position deltat] :or {wait-time 0 deltat 0}} move-info _ (debug [:executing-move move-info msg (:positionpolicy @entity)])] (beval (move! next-location deltat next-position wait-time) benv)) ;;allow the entity to invoke a state-change-behavior ;;We can always vary this by modifying the message-handler :change-state ;;generic update function. Temporally dependent. ;;we're already stepping the entity. Can we just invoke the change-state behavior? (let [state-change (:data msg) _ (debug [:state-change-message state-change msg])] (beval change-state-beh (assoc benv :state-change state-change :next-position (or (:next-position state-change) (:newstate state-change))))) :change-policy ;;Note: this is allowing us to change policy bypassing our wait state... ;;We need to put a break in here to defer policy changes. ;;Policy-changes are handled by updating the unit, then ;;executing the change-policy behavior. ;;Note: we could tie in change-policy at a lower echelon....so we check for ;;policy changes after updates. (beval policy-change-state (assoc benv :policy-change (:data msg))) :update (if (== (get (deref! entity) :last-update -1) (.tupdate benv)) (success benv) ;entity is current (->and [(echo :update) ;roll-forward-beh ;;See if we can replace this with update-state... update-state-beh ])) :spawn (->and [(echo :spawn) (push! entity :state :spawning) spawning-beh] ) ;;Allow the entity to apply location-based information to its movement, specifically ;;altering behavior due to demands. :location-based-move (beval location-based-beh (assoc benv :location-based-info (:data msg))) ;;Like a location-based move, except with a simple wait time guarantee, with a ;;reversion to the original state upon completion of the wait. :wait-based-move (beval wait-based-beh (assoc benv :wait-based-info (:data msg))) ;;allow the entity to change its behavior. :become (push! entity :behavior (:data msg)) :do (->do (:data msg)) :echo (->do (fn [_] (println (:data msg)))) (do ;(println (str [:ignoring :unknown-message-type (:msg msg) :in msg])) (sim/trigger-event msg @ctx) ;toss it over the fence ;(throw (Exception. (str [:unknown-message-type (:msg msg) :in msg]))) (success benv) )) benv)))) ;;we'd probably like to encapsulate this in a component that can be seen as a "mini system" ;;basically, it'd be a simple record, or a function, that exposes a message-handling ;;interface (could even be a generic fn that eats packets). For now, we'll work ;;inside the behavior context. Note, the entity is a form of continuation....at ;;least the message-handling portion of it is. ;;message handling is currently baked into the behavior. ;;We should parameterize it. ;;handle the current batch of messages that are pending for the ;;entity. We currently define a default behavior. (befn handle-messages ^behaviorenv {:keys [entity current-messages ctx] :as benv} (when current-messages (reduce (fn [acc msg] (message-handler msg (val! acc))) (success (assoc benv :current-messages nil)) current-messages))) ;;The global sequence of behaviors that we'll hit every update. ;;These are effectively shared behaviors across most updates. (def global-state (->seq [(echo :aging) age-unit (echo :aged) moving-beh])) (befn up-to-date {:keys [entity tupdate] :as benv} (let [e (reset! entity (assoc @entity :last-update tupdate))] (echo [:up-to-date (:name e) :cycletime (:cycletime e) :last-update (:last-update e) :tupdate tupdate :positionpolicy (:positionpolicy e)]))) (def process-messages-beh (->or [(->and [(echo :check-messages) check-messages handle-messages]) (echo :no-messages)])) ;;The root behavior for updating the entity. (def update-state-beh (->seq [(echo :<update-state-beh>) ; process-messages-beh (->or [special-state (->seq [(echo :<do-current-state>) do-current-state (echo :global-state) (fn [ctx] (if-y global-state (fail ctx)))]) up-to-date])])) ;;if we have a message, and the message indicates ;;a time delta, we should wait the amount of time ;;the delta indicates. Waiting induces a change in the ;;remaining wait time, as well as a chang (befn wait-in-state ^behaviorenv [entity current-message ctx] (let [;_ (println [:wait-in-state entity msg]) msg current-message t (fget msg :t) delta (- t (fget (deref! entity) :t))] (when-let [duration (fget (deref! entity) :wait-time)] (if (<= delta duration) ;time remains or is zero. ;(println [:entity-waited duration :remaining (- duration delta)]) (merge!! entity {:wait-time (- duration delta) :tupdate t}) ;;update the time. (do ;can't wait out entire time in this state. (merge!! entity {:wait-time 0 :tupdate (- t duration)}) ;;still not up-to-date ;;have we handled the message? ;;what if time remains? this is akin to roll-over behavior. ;;we'll register that time is left over. We can determine what ;;to do in the next evaluation. For now, we defer it. (bind!! {:current-message (.assoc ^clojure.lang.Associative msg :delta (- delta duration))} ) ))))) (defn up-to-date? [e ctx] (== (:tupdate e) (:tupdate ctx))) ;;This will become an API call... ;;instead of associng, we can invoke the protocol. (befn schedule-update ^behaviorenv {:keys [entity ctx new-messages] :as benv} (let [st (deref! entity) nm (:name st) duration (:wait-time st) tnow (:tupdate (deref! ctx)) tfut (+ tnow duration) _ (debug 4 [:entity nm :scheduled :update tfut]) ;_ (when new-messages (println [:existing :new-messages new-messages])) ] (success (push-message- benv nm nm (->msg nm nm tfut :update))))) ;;wire in functionality here for a unit to invoke its own ;;deployment order... ;;From here on, the system will append a deployment order to ;;the unit, and send the unit a message to update. ;;The unit will handle the message by appending a ;;deployment order to its state and invoking an update. ;;This way, we handle messages first, which preps the ;;behavior environment to respond to stimulii (like ;;the presence of a deploy order) (defn deploy-to [o benv] ;;stub (success benv)) (befn try-deploy ^behaviorenv {:keys [entity] :as benv} (when-let [o (:deploy-order @entity)] (deploy-to o))) ;;This is kind of weak, but I don't have a better solution at the moment... (do (println [:setting-defaults]) (reset! base/default-behavior roll-forward-beh)) ;;aux function to help us add a breadcrumb for ;;the location-based behavior updates. ;;Some locations have overlap. If so, we look for this ;;to see if the move is prescribed. We store this as a ;;component in the entity. (defn prescribe-overlap! [benv t overlap state locname] (if (and overlap (pos? overlap)) (let [entity (:entity benv)] (do (debug [:prescribing-overlap (:name @entity) overlap t]) (swap! entity assoc :prescribed-move {:state-change {:newstate state :duration overlap :followingstate nil :timeinstate 0} :overlapping-position [locname :to] ;true :t t} ) benv)) benv)) ;;SRM bs... ;;SRM takes a different view of unit behavior. ;;Most importantly, for AC units (and deploying RC units), ;;the behavior looks at demand to determine position ;;changes, state-changes, duration, etc., rather than look ;;at the policy. ;;When not in a mission state, the default behavior does ;;provide a cyclical routing, even for AC (At the moment, ;;but that crap will probably change like everything else). ;;We should be able to inject a supply of units that ;;follow the baseline SRM policy, with no demand, and ;;Just have them spawn and run through policy changes. ;;The SRM behavior only really varies upon deployment... ;;so we can create special SRM-specific behaviors that ;;read information about the demand and use it ;;to schedule changes. For now, there is no ;;notion of recovery... ;;These differences mean we need to handle ;;local-demand effects if deployed.... ;;For any movement, we need to check to see if ;;there are effects or guidance associated with the ;;place we're moving to. Some places tell us what ;;to do, outside of our policy. ;;The only way we can get here is if there is a location-policy ;;in the environment. How does it get there? ;;TODO_Have the location push behaviors onto some kind of ;;stack. This could be very powerful (and common), in that ;;the behavior would evaluate its top-most behavior first ;;(i.e. do-current-state), and pop the behavior once ;;the time expired. (defn location-based-state [u state] (let [s (get-state u state) s (if (set? s) s #{s})] s)) (befn location-based-beh {:keys [entity location-based-info ctx] :as benv} (when location-based-info (let [{:keys [name MissionLength BOG StartState EndState overlap timeinstate]} location-based-info ;;StartState is really a policy position.... start-state (location-based-state @entity StartState) newstate (if BOG (conj start-state :bogging) start-state) ;;we need to schedule a state change. ;;and a location-change... _ (swap! entity assoc :location-behavior true) followingstate (if (pos? overlap) (conj newstate :overlapping) (location-based-state @entity EndState)) state-change {:newstate newstate :duration (- MissionLength overlap) :followingstate followingstate :timeinstate (or timeinstate 0)} location-change {:from-location (:locationname @entity) :to-location name} position-change {:from-position (:positionpolicy @entity) :to-position StartState} ;;add the ability to check for prescribed moves... ;;if the demand prescribes one, then we go ahead and schedule it with ;;the entity... wt (- MissionLength overlap) _ (debug [:location-based {:name (:name @entity) :state-change state-change :location-change location-change :wait-time wt :next-position StartState}]) ] (beval change-state-beh (-> benv (prescribe-overlap! (+ (:tupdate benv) wt) overlap followingstate name) (assoc :state-change state-change :location-change location-change :position-change position-change ;new :wait-time wt :next-position StartState)))))) ;;Another potential garbage leak! (def wbm (atom nil)) (defn compute-wait-position [unit] (let [p (:policy unit) current-pos (:positionpolicy unit) ct (:cycletime unit)] (protocols/get-position p ct))) (befn wait-based-beh {:keys [entity statedata wait-based-info ctx] :as benv} (when wait-based-info (let [{:keys [demand wait-time wait-state]} wait-based-info name (:name demand) state-change {:newstate wait-state :duration wait-time :followingstate (:state @entity) :timeinstate 0} location-change {:from-location (:locationname @entity) :to-location name} position (:positionpolicy @entity) position-change (when (= position :followon) ;;we need to compute a position change to ;;make sure the unit reverts to its ;;former position, not stay in followon AND wait. ;;this will cause problems. {:from-position position :to-position (compute-wait-position @entity)}) _ (debug [:wait-based {:name (:name @entity) :state-change state-change :location-change location-change :position-change position-change :wait-time wait-time}]) ;; _ (throw (Exception. (str [:about-to-wait {:name (:name @entity) ;; :state-change state-change ;; :location-change location-change ;; :wait-time wait-time}]))) ] (->seq [(->alter #(assoc % :state-change state-change :location-change location-change :position-change position-change :wait-time (when (and wait-time (< wait-time 999999)) wait-time))) change-location change-position change-state-beh (->alter (fn [benv] (let [u (deref (:entity benv)) _ (debug [:deployable-changed! :waiting :deployment-index (:deployment-index u)]) _ (swap! (:ctx benv) #(supply/update-deploy-status u nil nil %)) ;_ (reset! wbm u) _ :ballz #_(throw (Exception. (str [:ballz])))] benv))) wait])))) ;;All our behavior does right now is spawn... ;;The only other changes we need to make are to alter how we deploy entities... ;;We can actually handle that outside of the unit's deployment.... ;;Possibly include it as a message type... ;;Have a special message handler for it... ;;[genius] ;;If we have an location-based-policy to apply, we can ;;tell the unit via messaging... ;;We typically tell the unit form outside, after we've ;;set it up and everything... ;;SRM behavior overrides some functionality for the base behavior. (befn srm-beh [] spawning-beh ;(throw (Exception. (str "SRM Behavior doesn't do anything!"))) ) (do (println [:setting-srm]) (swap! base/behaviors assoc "SRM" roll-forward-beh ;same thing. ;srm-beh )) (comment ;old version (befn do-current-state {:keys [entity statedata] :as benv} (let [;state (:state @entity) state (:state (deref!! entity) ) ;;slightly faster using keyword as function call. state-map (or (:statemap entity) default-statemap)] (if (set? state) ;entity has multiple effects... (let [stats (r/filter identity (r/map (fn [s] (get state-map s)) state))] (->seq stats)) (get state-map state)))) ) (comment ;OBE (defn update-unit "Computes a new simulation context given a specific unit to update, an elapsed time, and an optional time of update. tupdate is inferred to be the current simulation time if none is supplied." ([unit deltat ctx] (update-unit unit deltat (sim/current-time ctx) ctx)) ([unit deltat tupdate ctx] (->> ctx (load-entity! unit deltat tupdate) (roll-forward-beh) ;update the unit according to the change in ;time. (error-on-fail) ;unit updates should never fail. (second ;result is in terms of [:success|:fail ctx], pull out ;the ctx ) (commit-entity!) ; (clear-bb) ))) ;;We'll replace these; for now the units will automatically ;;try to update themselves if possible. ;;Debatable...just invokes roll-forward-beh; I think we can ensure that ;;roll-forward is always invoked first... ;;Re-evaluate the need for this....can we synchronize from outside? ;;ideally, we just keep track of the unit's last update.... (defn sync "Utility function. Synchronize the unit to the current simulation time. If the last update occured before the current time, we roll the unit forward by the delta between the last update and the current time." [unit ctx] (let [tprev (or (sim/last-update (:name unit) ctx) 0) tnow (sim/current-time ctx)] (if (= tprev tnow) (log! (str "unit " (:name unit) "is up to date") ctx) (log! (str "Synchronizing unit " (:name unit) " from " tprev " to " tnow) (update-unit unit (- tnow tprev) tprev ctx))))) ;;Synchronizes the unit to the current time, then applies a time ;;delta, then processes/records the unit's time of update. (defn update "Entry point for computing behavior-based unit updates. Fundamental API function for processing unit entities. Synchronizes the unit to the current simulation time, then computes the new simulation context resulting from the entity behavior over an elapsed deltat (from current simulation time)." [unit deltat ctx] (let [nm (get unit :name)] (->> (sync unit ctx) (update-unit unit deltat) (u/unit-update! nm (core/msg "Updated " nm))))) )
true
;;A namespace for defining and composing entity behaviors. ;;We'll define core behaviors here, leveraging the ;;behavior tree approach defined by spork.ai.behavior . (ns marathon.ces.behavior (:require [spork.ai.core :as ai :refer [deref! fget fassoc push-message- debug ->msg]] [spork.ai.behavior :refer [beval success? success run fail behave ->seq ->elapse ->not ->do ->alter ->elapse-until ->leaf ->wait-until ->if ->and ->and! ->pred ->or ->bnode ->while ->reduce always-succeed always-fail bind! bind!! merge! merge!! push! return! val! befn ] :as b] [spork.ai.behaviorcontext :as base :refer :all] [spork.ai [machine :as fsm]] [marathon.data [protocols :as protocols] ] [marathon.ces [core :as core] [unit :as u] [supply :as supply] [demand :as d] ] [spork.cljgraph.core :as graph] [spork.util.general :as gen] [spork.data.priorityq :as pq] [clojure.core.reducers :as r] [spork.entitysystem.store :as store :refer :all :exclude [default]] [spork.sim.simcontext :as sim] [clojure.core.reducers :as r] [spork.util.general :as general]) (:import [spork.ai.behaviorcontext behaviorenv])) ;;Overview ;;======== ;;The general idea behind how we motivate entities to do things is to ;;use composeable behaviors - as defined in spork.ai.behavior - ;;composed into behavior "trees". These trees simplify the typical ;;state-transition model we find in finite-state machines. Where the ;;FSM has zero or more edges - or transitions - between states, ;;behavior trees focus on a small set of composition operations - ;;called internal or intermediate nodes - that define how to traverse ;;the tree. So, rather than evaluating the next state to transition ;;to - along with the pre, executing, and post conditions for the ;;state - we walk a tree of behaviors, where nodes along the path ;;dictate consistent idiomatic ways to evaluate child nodes. ;;Besides composition, the other core concept is that behaviors may ;;return success, failure, or (in other implementations) run to ;;indicate that a behavior node has not yet finished evaluating. This ;;implementation - focused on unit entity behaviors - takes a fairly ;;naive view and ignores the run evaluation. Rather, we always ;;succeed or fail. ;;Evaluation in the Behavior Environment ;;===================================== ;;Unlike traditional entity "update" or "step" functions, we maintain ;;an explicit context in which the behavior is evaluated - the ;;behavior environment (marathon.ces.basebehavior). This context ;;provides a consistent accumulation of state through which we can ;;view evaluation of the behavior tree as a reduction, with the ;;behavior environment being the accumulated result. Thus, we ;;traverse the tree with an initial behavior environment [reified as a ;;map with useful keys referencing the simulation context/entity ;;store, the entity being processed, the simulated time of the ;;evaluation, and any additional keys useful to evaluation]. Taken as ;;a lexical environment, the keys of the behavior environment form a ;;working set of "variables" or properties that we can either query, ;;update, redefine, add to, or otherwise use to guide behavior ;;evaluation. ;;When evaluating a behavior tree, we start from the root behavior and ;;use its evaluation rules to proceed with the reduction (i.e. ;;compute a resulting behavior environment). The reduced behavior ;;context is then - typically - processed by merging the entity ;;reference into the simulation context reference, returning the ;;simulation context. The function that encapsulates this functional ;;form of entity behavior processing is ;;spork.ai.behaviorcontext/step-entity . ;;Behavior evaluation occurs using the spork.ai.behavior/beval ;;function, which operates similarly to eval but in the domain of ;;behavior trees. The evaluation rules are fairly simple: ;;If the item is a vector pair that matches [:success|fail|run ctx], ;;the vector is returned as the output for beval. ;;If the item to be evaluated is a function, then it is applied to the ;;current accumulated context to determine the next behavior to beval. ;;This means that functions may return a final result ala ;;[:success|:fail|:run ctx] or they may return another behavior ;;(function or node) which will continue to be evaluated against the ;;context. ;;If the item to be evaluated is a behavior node - anything ;;implemented the spork.ai.IBehaviorTree protocol - then it is beval'd ;;with the current accumulated context (delegating to the behave ;;function of the IBehaviorTree). ;;The current implementation assumes that the highest-level of ;;evaluation - as in spork.ai.behaviorcontext/step-entity! will ;;always be successful. Anything else is an error (even returning ;;[:fail ...]. ;;Behavior Functions ;;================= ;;Callers may define functions that operate on the behavior ;;environment directly; in some cases this is a useful - if low level ;;- approach to defining behaviors. Arbitrary functions that map a ;;context to a [:success ...] or a [:fail ...] may be used as ;;behaviors, and will operate correctly under beval. ;;For convenience, and to focus on behavior tree traversal as an ;;"evaluation", the spork.ai.behavior/befn macro provides a convenient ;;way to define aforementioned behavior functions with convenient ;;destructuring and behavior result packing built in. Using the befn ;;macro - to define behavior functions - is similar to the standard ;;clojure.core/defn form, with a change the context: The function ;;arguments correspond to a map-destructing of the behavior ;;environment, and where specified by a type hint, will compile to ;;fast field-based accessors for the specific behavior environment. ;;To elide repetitive use of (success ...) and the like, and to align ;;with clojure's idiom of using nil for failure, nil results are ;;automatically converted to (failure ...) evaluations. Otherwise, ;;behavior evaluation continues as per beval - the caller can ;;immediately return from the behavior using (success ctx) or yield ;;another behavior as a return value - which will effectively continue ;;evaluation using the new behavior. ;;Additional operations available in a behavior function include: ;;(bind!! {:a 1 :b 2}) => (success (merge benv {:a 1 :b 2})) ;;(return! ^MapEntry [:success x]) => x ;;(return! ^MapEntry [:fail x]) => (Throw (Exeption. ...)) ;;Behavior Nodes ;;============== ;;Aside from encoding custom functionality with raw functions, ;;pre-existing behavior nodes provide an expressive domain specific ;;language for defining behavioral "flow control" in a composeable ;;manner. They effectively define custom behavior functions - again ;;returning [:success|:fail|:run ctx] behind a unified protocol. The ;;magic lies in how a behavior node executes and interprets the ;;traversal of its children. For example, the ->or behavior ;;corresponds to a logical or of all child nodes (or clauses). Upon ;;evaluation, ->or will reduce its children - in order - returning on ;;the first [:success ctx] it finds, else [:fail ctx]. This is ;;similar to the 'or macro in clojure. Similarly, the ->and will ;;return at the first sign of a failed child node, else return ;;[:success ctx] as its behavior reduction. In-order, ;;i.e. left-to-right node traversal is a common idiom (although not a ;;constraint) in behavior trees, and allows one to follow the behavior ;;"logic" in a simple, consistent manner by following the traversal. ;;These nodes provide a simple way to compose behaviors and to ;;communicate success/failure throughout the traversal. These trees ;;may be embedded as children of like nodes, creating sophisticatd ;;behaviors with a declarative specification. Callers are advised to ;;use the canonical behavior nodes where possible to utilize their ;;expressive power, readability, and re-use. ;;Updating Units by Sending Messages ;;================================== ;;Technically, a unit entity update is any application of ;;spork.ai.behaviorcontext/step-entity!, in which the entity, the ;;simulation context, and a behavior - either a unique behavior ;;associated with the entity's :behavior component, or a default ;;global behavior defined in ;;spork.ai.behaviorcontext/default-behavior - are munged into a ;;spork.ai.behaviorcontext/behaviorenv. ;;Thus, stepping entities requires the simulation context/entity ;;store, the entity to update, and a message to send it. The result ;;will be a simulation context / entity store reflecting any committed ;;changes in response to how the entity "behaved" in response to the ;;message. ;;We use messages - as defined in marathon.ces.core/->msg, as an ;;entry-point to initiate behavior and provide initial bindings for ;;the behavior environemnt. For instance, the convenience function ;;marathon.ces.core/handle-message! merely wraps step-entity!, while ;;marathon.ces.core/send!! provides a simple API for defining messages ;;to send to the entity in addition to computing the result of a send ;;/ behavior. ;;When are Messages Sent, or When do Updates Happen? ;;======================= ;;Currently, entities send themselves messages typically in reponse to ;;"organic" events such as following a rotational policy. Once the ;;entity is initialized, it will likely request an update at a later ;;time, the span of which is predicated based on the amount of time ;;the unit is supposed to wait in a particular state according to its ;;rotational policy. Absent any "outside" interference, this message ;;will be propogated to the entity at the scheduled time, with the ;;entity living in eventless stasis (retaining the state from its last ;;known update) until the message is delivered. For unit entities, ;;message delivery is dispatched during invocation of ;;marathon.ces.supply/manage-supply, at which point any units ;;scheduled for updating are notified. ;;Inorganic messages occur when external forces exert unexpected ;;control over the unit entity. These typically manifest in events ;;like filling demand, sending units home, changing policies, or any ;;number of things that are unexplained by the unit's rotational ;;policy - yet necessary for simulation. ;;How Are Messages Processed? ;;=========================== ;;Messages may occur out-of-sync with the unit's current status. That ;;is, on the timeline the unit follows, the entity is not guaranteed ;;to have been "updated" at the same time step as the new message is ;;received. ;;Consequently, we need to synchronize, or roll the unit forward in ;;time to account for any pending updates and to bring the entity into ;;a synchronized state at the time of the message. Unit entity ;;behavior is defined to account for an elapsed time, represented by ;;deltat in the behavior environment, which allows us to accomplish ;;rolling forward. For instance, if a unit arrives at a dwelling ;;state, and needs to wait there for 365 days until the next update, ;;with the implication that the dwelling behavior merely adds 1 unit ;;of dwell to a dwell statistic for every elapsed day, the entity will ;;have an update scheduled 365 days later - at which point the deltat ;;will indicate the need to roll forward 365 days and thus add 365 ;;days to the dwell stat. ;;If an update or similar message arrives earlier than the next ;;scheduled update, such as from an inorganic message - say a ;;deployment 18 days later, then the unit must be "aged" or rolled ;;forward 18 days to account for the elapsed time. From that ;;synchronization point, the unit may process the pending message and ;;accomplish its deployment, initiating another scheduled update. ;;Message processing always occurs after synchronizing the unit with ;;the time frame that the message was sent. In terms of behavior ;;trees, message processing and "rolling forward" are merely behavior ;;functions that can be composed like any other. This opens up a raft ;;of flexible options for "communicating" with entities, as well as ;;offering the possibility for either centralizing and synchronously ;;updating entity state for all entities, or using Erlang-style ;;message-passing concurrency (or other asynchronous communication and ;;state management like clojure's software transactional memory or ;;channels) to perform asychronous updates, possibly in parallel. ;;Currently, the default implementation is synchronous and ;;centralized. ;;__utils__ (def ^:constant +inf+ Long/MAX_VALUE) (def ^:constant +twenty-years+ 7300) (defmacro ensure-pos! "Ensures n is a positive, non-zero value, else throws an exception." [n] `(if (pos? ~n) ~n (throw (Exception. (str [:non-positive-value ~n]))))) (defmacro non-neg! "Ensures n is a positive or zero value, else throws an exception." ([lbl x] `(if (not (neg? ~x)) ~x (throw (Exception. (str [~lbl :negative-value ~x]))))) ([x] `(if (not (neg? ~x)) ~x (throw (Exception. (str [:negative-value ~x])))))) #_(defn non-neg! ([lbl x] (if (not (neg? x)) x (throw (Exception. (str lbl " " x " cannot be negative!"))))) ([x] (non-neg! "" x))) (defmacro try-get [m k & else] `(if-let [res# (get ~m ~k)] res# ~@else)) (defn rconcat ([& colls] (reify clojure.core.protocols/CollReduce (coll-reduce [this f1] (let [c1 (first colls) init (reduce (fn [acc x] (reduced x)) (r/take 1 c1)) a0 (reduce f1 init (r/drop 1 c1))] (if (reduced? a0) @a0 (reduce (fn [acc coll] (reduce (fn [acc x] (f1 acc x)) acc coll)) a0 (r/drop 1 colls))))) (coll-reduce [this f init] (reduce (fn [acc coll] (reduce (fn [acc x] (f acc x)) acc coll)) init colls)) clojure.lang.ISeq (seq [this] (seq (into [] (r/mapcat identity colls) ))) ))) (defn pass [msg ctx] (->> (success ctx) (core/debug-print [:passing msg]))) (def ^:dynamic *interact* false) (defmacro if-y [expr & else] `(if ~'*interact* (if (and (= (clojure.string/upper-case (read)) "Y")) ~expr ~@else) ~expr)) (defmacro log! [msg ctx] `(do (debug ~msg) ~ctx)) ;;migrate.,.. (defn echo [msg] (fn [ctx] (do (debug msg) (success ctx)))) (defmacro deref!! [v] (let [v (with-meta v {:tag 'clojure.lang.IDeref})] `(.deref ~v))) (defmacro val-at "Synonimous with clojure.core/get, except it uses interop to directly inject the method call and avoid function invocation. Intended to optimize hotspots where clojure.core/get adds unwanted overhead." [m & args] (let [m (with-meta m {:tag 'clojure.lang.ILookup})] `(.valAt ~m ~@args))) ;;let's see if we can memoize get-next-position for big gainz yo... (defn memo-2 [f & {:keys [xkey ykey] :or {xkey identity ykey identity}}] (let [xs (java.util.HashMap.)] (fn [x1 y1] (let [x (xkey x1) y (ykey y1)] (if-let [^java.util.HashMap ys (.get xs x)] (if-let [res (.get ys y)] res (let [res (f x1 y1)] (do (.put ys y res) res))) (let [res (f x1 y1) ys (doto (java.util.HashMap.) (.put y res)) _ (.put xs x ys)] res)))))) ;;slightly faster for memoizing policy name. ;;This should be a concurent hashmap... (defn memo2-policy [f] (let [xs (java.util.HashMap.)] (fn [^clojure.lang.ILookup x1 y] (let [x (marathon.data.protocols/atomic-name x1) #_(.valAt x1 :name)] (if-let [^java.util.HashMap ys (.get xs x)] (if-let [res (.get ys y)] res (let [res (f x1 y)] (do (.put ys y res) res))) (let [res (f x1 y) ys (java.util.HashMap.) _ (.put ys y res) _ (.put xs x ys)] res)))))) (defn memo1-policy [f] (let [xs (java.util.HashMap.)] (fn [^clojure.lang.ILookup x1] (let [x (marathon.data.protocols/atomic-name x1) #_(.valAt x1 :name)] (if-let [res (.get xs x)] res (let [res (f x1)] (do (.put xs x res) res))))))) ;;an alternative idea here... ;;use a closure to do all this stuff, and reify to give us implementations ;;for the object. We can also just us a mutable hashmap behind the ;;scene if we want to...at some point, it's probably better to have ;;the shared-nothing approach and just leave entities in their ;;own mutable cells, isolated from other state. We can ;;still maintain persistent history. Everything becomes a lookup though; ;;we have to find the current value of the entity at time t; ;;More to think of here.. ;;New ;;Environment for evaluating entity behaviors, adapted for use with the simcontext. ;;If we provide an address, the entity is pushed there. So, we can have nested ;;updates inside associative structures. ;;__Utility functions__ ;;Entity step operations... (defn progress-cycle [x width] (if (>= x width) 0 (unchecked-inc x))) ;;testing function... (defn deployment-over? [y] (or (>= y (* 15 31)) (and (>= y 30) (<= (rand) 0.01)))) ;;testing function... (defn should-deploy? [t tmax] (and (>= t 365) (<= (rand) (* 0.005 (/ (double t) tmax))))) (defn deployed? [e] (identical? (:state e) :deploying)) (defn should-reset? [t tmax] (>= t tmax)) (defn spawning? [^spork.ai.machine.statedata statedata] (identical? (.curstate statedata) :spawning)) ;;aux functions will most likely be plentiful. We specifically ;;have a host of helper functions for unit-specific entity behaviors. ;;Most of them deal with how the units read their policies and stuff. ;;__Aux Functions__ ;;#TODO See if we can encode or derive a more meaningful semantics ;;from the indices currently associated with the states...for ;;instance, :deployable randomly came back with 7 as a state, we ;;either don't want this or we want to have it mean something. ;;Note: these are specific to unit, so could probably go into the unit ;;namespace; save on real estate. ;;Performance: inlined to alleviate minor hotspot....marginal gains. ;;Lol inlining hurts us a bit here, better not to inline... (defn get-state [unit position] (case position :abrupt-withdraw :abrupt-withdraw :recovery :recovery (let [s (protocols/get-state (val-at unit :policy) position)] (if (number? s) :dwelling s) ;;wierd... ))) ;; TOM Hack 24 July 2012 -> again, to facilitate implicit recovery. In the case of explicit recovery policy, ;; we defer to the unit's policy to determine how long to wait. In the case of implicit recovery, we use ;; a global parameter for all units, to determine wait time if they are in a recovery state. ;; Similarly, we account for units with policies that do not have an explicit recovered state. ;; In this case, we inject the equivalent of a fake state, with 0 wait time, to allow for recovery ;; processing to occur. ;;original non-memoized function. #_(defn get-next-position [policy position] (case position :recovery :recovered :recovered :re-entry (if-let [res (protocols/next-position policy position)] res (throw (Exception. (str [:dont-know-following-position position :in (:name policy)])))))) ;;memoized to alleviate hotspot, marginal gains. ;;NOTE: this causes a problem with composite policies... ;;We need to memoize based on a finer criteria, based on the ;;active policy name... ;;Added another default for :modernizing-deployable, indicat (def get-next-position (memo2-policy (fn get-next-position [policy position] (case position :recovery :recovered :recovered :re-entry (if-let [res (protocols/next-position policy position)] res (throw (Exception. (str [:dont-know-following-position position :in (:name policy)])))) )))) ;;We're getting too far ahead of ourselves during policy change calcs. ;;Jumping the position we're "in"...for max/nearmax policies, this leaves ;;us with. ;;Patched to allow specified recovery times. (defn policy-wait-time ([policy statedata position deltat recovery-time] (cond (identical? position :recovery) recovery-time ;;this is a weak default. We'll either fix the policies or wrap the behavior later. (identical? position :recovered) 0 :else (let [frompos (get-next-position policy position) topos (get-next-position policy frompos)] (if-let [t (protocols/transfer-time policy frompos topos)] (- t (- deltat (fsm/remaining statedata))) (throw (Exception. (str [:undefined-transfer :from frompos :to topos :in [(protocols/policy-name policy) (protocols/atomic-name policy)]]))) ;if it's not defined in policy...instant? )))) ([policy statedata position deltat] (policy-wait-time policy statedata position deltat 0)) ;;weak, I just copied this down. Ugh. ([policy position] (cond (identical? position :recovery) 0 ;;this is a weak default. We'll either fix the policies or wrap the behavior later. (identical? position :recovered) 0 :else (let [frompos (get-next-position policy position) topos (get-next-position policy frompos) ] (if-let [t (protocols/transfer-time policy frompos topos)] t (throw (Exception. (str [:undefined-transfer :from frompos :to topos :in [(protocols/policy-name policy) (protocols/atomic-name policy)] ])))))))) ;;aux function to help with policy transfers. (defn immediate-policy-wait-time [policy frompos] (protocols/transfer-time policy frompos (get-next-position policy frompos))) ;;Pulled out to address concerns in get-wait-time. ;;Computes the wait time - i.e. transfer time - between ;;frompos and topos relative to a unit's policy and statedata. (defn immediate-wait-time ([unit frompos topos deltat statedata] (let [wt (protocols/transfer-time (:policy unit) frompos topos) remaining (fsm/remaining statedata) deltat (or deltat remaining) ;allow the ctx to override us... ] (- wt (- deltat remaining)))) ([unit frompos {:keys [deltat statedata] :as benv}] (immediate-wait-time unit frompos (get-next-position (:policy unit) frompos) deltat statedata))) ;;Could be a cleaner way to unpack our data, but this is it for now... ;;need to fix this...let's see where we use it. ;;Note: this depends on policy-wait-time, which is great, but the ;;use-case is intended for a future, planned wait. In other words, ;;this fails us when we want to compute the wait time from a current ;;policy position - ala during a policy change. (defn get-wait-time ;;WARNING: we define an inconsistency here in the 4-arity version. ;;If we specifcy the from,to positions, the wait-time is computed using ;;frompos as the the starting position. The other arities compute ;;using policy-wait-time, which uses the successor wait time of the ;;current position - i.e. how long will I have to wait in the next position. ;;Current usage appears correct - namely the 3-arity version, but that ;;could throw us off - as it did for initial policy-change implementation! ([unit position {:keys [deltat statedata ctx] :as benv}] ;;uses position after current... (policy-wait-time (:policy unit) statedata position (or deltat 0) (or (:default-recovery unit) 0))) ([position {:keys [entity] :as benv}] (get-wait-time @entity position benv)) ([{:keys [wait-time] :as benv}] wait-time)) ;;Basic API ;;========= ;;The rest of the simulation still relies on our pre-existing API, ;;namely that we have "change-state", and "update" ;;note that change-state already exists in marathon.sim.unit/change-state, ;;we're merely providing an interface to the unit's behavior for it. ;;Also note that change-state is only called (currently) from ;;marathon.sim.demand (for abrupt withdraws), and marathon.sim.supply ;;(for deployments). ;;might ditch these.... (declare change-state-beh update-state-beh update-state roll-forward-beh lite-update-state-beh check-overlap check-deployable check-deployable-state finish-cycle spawning-beh ;; age-unit moving-beh process-messages-beh ;;re-entry behaviors abrupt-withdraw-beh re-entry-beh recovery-beh ;;policy change fwd declarations apply-policy-change defer-policy-change policy-change-state try-deferred-policy-change ;; auxillary behavior definitions. location-based-beh wait-based-beh ) ;;API ;;=== ;;These are the entry points that will be called from the outside. ;;Under the legacy implementation, they delegated to a hard coded ;;finite state machine that interpreted rotational policy to infer ;;state transitions. The general mechanism is to augment the ;;simulation context. We may want to define a single function ;;load-context and unload-context the clears up any augmented ;;contextual items we put in. That, or manage the simulation ;;context separate from the behavior context. For now, managing ;;the simcontext along with the behavior context (treating it ;;as a huge blackboard) seems like the simplest thing to do. ;;__update-entity!__ ;;Similarly, we'll have update take the context last. ;;update will depend on change-state-beh, but not change-state. ;;change-state is a higher-level api for changing things. ;;Note: this is covered by step-entity! We need to ;;include the roll-forward-beh though, to ensure we're ;;consistent. ;;we can wrap these up and just pass a generic message for the ;;behavior to interpret. ;;change-state becomes ;;load-entity ;;add-message {:to-state to-state :deltat deltat :ctx ctx} ;;Move this out to marathon.ces.unit? ;;auxillary function that helps us wrap updates to the unit. ;;added check to prevent recording traversals to save time and ;;memory. Does not affect debugging. (defn traverse-unit [u t from to] (-> (if marathon.ces.core/*debug* (u/add-traversal u t from to) u) (assoc :positionpolicy to))) ;;this is kinda weak, we used to use it to determine when not to ;;perform updates via the global state, but it's probably less ;;important now...we can actually codify this structurally ;;in the behavior tree now... ;;special states just diverted the fsm update function to ;;a different path (bypassing the global state, i.e. not ;;aging/advancing). Where we had direct method calls to ;;other state handler functions, we can now just directly ;;encode the transition in the tree... (definline special-state? [s] `(#{:spawning :abrupt-withdraw :recovered :waiting #_:recovery} ~s)) (defn just-spawned? "Determines if the entity recently spawned, indicated by a default negative spawn time or a spawntime in the present." [{:keys [entity ctx] :as benv}] (identical? (:state @entity) :spawning)) ;;These accessors help us ensure that we're not ;;getting stuck in invalid transitions, or spawning ;;with funky null errors. (defn position->state [policy positionpolicy] (if-let [res (protocols/get-state policy positionpolicy)] res (throw (Exception. (str {:unknown-position positionpolicy :policy (:name policy)}))))) ;;We can make this processing more sophisticated... ;;Since we (defn position->time [policy positionpolicy] (if-let [res (protocols/get-cycle-time policy positionpolicy)] res (throw (Exception. (str {:position-not-in-cycle positionpolicy :policy (:name policy)}))))) ; (let [st (:spawntime @entity)] ; (or (neg? st) ; (== st (core/get-time @ctx)))) (defn state-expired? [{:keys [deltat statedata] :as benv}] (let [r (fsm/remaining statedata) dt (or deltat 0) ] (<= r dt))) ;;debatable utility... ;;Not sure where we're using these guys.... (defn to-position? [to benv] (identical? (:next-position benv) to)) (defn from-position? [from benv] (identical? (:from-position benv) from)) ;;Capturing change information in a structure, rather than passing it ;;around willy-nilly in the environment. If we have a pending ;;change, there will be changeinfo. This only applies for instantaneous ;;changes....That way, we can communicate our state updates serially ;;by adding (and removing) changeinfo. (comment (defrecord changeinfo [newstate duration followingstate]) ) ;;Behaviors ;;========= ;;this is a primitive action masked as a behavior. (defn move! ([location deltat destination wait-time] (->and [(->alter (fn [benv] (merge benv {:deltat deltat :next-position destination :next-location location :wait-time wait-time}))) moving-beh])) ([deltat destination wait-time] (->and [(->alter (fn [benv] (merge benv {:deltat deltat :next-position destination :wait-time wait-time}))) moving-beh])) ([destination wait-time] (->and [(->alter (fn [benv] (merge benv {:next-position destination :wait-time wait-time}))) moving-beh])) ([destination] (->and [(->alter (fn [benv] (merge benv {:next-position destination }))) moving-beh]))) ;;A lot of these behaviors operate on the concept of a blackboard. ;;The behavior environment, defined in spork.ai.behaviorcontext, ;;is a map of lexical bindings that we use to evaluate the consequences ;;of a unit's behavior. Certain behaviors may place or remove things ;;from the blackboard to communicate information with other behaviors ;;"down the line". We can couple behaviors directly using the behavior ;;tree, or allow them to be indirectly coupled using the blackboard ;;as a form of simple event communication. Many behaviors, like ;;update-after, and roll-forward-beh, will actually "consume" ;;items in the environment, like time. It will be common to see ;;an ephemeral, or a transactional semantics with the behaviors. (befn +nothing-state+ [entity deltat ctx] (->do (fn [_] (log! (str (:name @entity) " is doing nothing for " deltat) ctx) ))) ;;Determines if our entities are going to wait beyond the feasible ;;time horizon. It's not that much of a stretch to consider anything longer ;;than a decent human lifetime effectively infinite... (defn effectively-infinite? [^long x] (or (== x +inf+ ) (>= x (* 365 100)))) (defn compute-proportion "Given a current cycletime, a cyclelength we're projecting from, and a cyclelength we're projecting to, computes the proportion of the normalized projected cycle length - the cycle propotion. When dealing with effectively infinite policies, we avoid projecting onto finite policies with ~0 propotion for everything by computing the cycle proportion based on the remainder of the current cycletime relative to the target cyclelength. Otherwise, we compute a simple coordinate based on the proportion of ct : clfrom." [ct clfrom clto] (let [finf (effectively-infinite? clfrom) tinf (effectively-infinite? clto)] (cond (or (and (not finf) (not tinf)) (and finf tinf)) ;policy relative (core/float-trunc (/ ct clfrom) 6) tinf ;relative to infinite policy... (core/float-trunc (/ ct clto) 6) :else (-> (rem ct clto) ;chop (/ clto) ;normalize (core/float-trunc 6))))) ;;note-we have a wait time in the context, under :wait-time ;;updates an entity after a specified duration, relative to the ;;current simulation time + duration. ;;Note: Added the invariant that we cannot have negative wait-times. ;;ensure-pos! throws an exception if we encounter negative wait times. (befn update-after ^behaviorenv [entity wait-time tupdate ctx] (when wait-time (->alter #(if (effectively-infinite? wait-time) (do (debug [(:name @entity) :waiting :infinitely]) ;skip requesting update. (dissoc % :wait-time) ) (let [tfut (+ tupdate (ensure-pos! wait-time)) e (:name @entity) _ (debug [e :requesting-update :at tfut])] (swap! ctx (fn [ctx] (core/request-update tfut e :supply-update ctx))) (dissoc % :wait-time) ;remove the wait-time from further consideration... ))))) (require '[clojure.pprint :as pprint]) ;;our idioms for defining behaviors will be to unpack ;;vars we're expecting from the context. typically we'll ;;just be passing around the simulation context, perhaps ;;with some supplementary keys. ;;Let's think about what it means to change state.... ;;Are we in fact changing the root of the behavior? ;;This is where the transition from FSM to behavior tree ;;comes in.... (befn change-state-beh! {:keys [entity ctx statedata state-change deltat] :or {deltat 0} :as benv} (when state-change (let [_ (echo [:state-change (:name @entity)]) {:keys [newstate duration followingstate timeinstate] :or {timeinstate 0}} state-change _ (when (not duration) (throw (Exception. (str "nil value for duration in state change behavior!")))) followingstate (or followingstate newstate) ;;we change statedata here... wt (- duration timeinstate) _ (when (neg? wt) (throw (Exception. (str [:negative-wait-time])))) _ (debug [:changing-state state-change :wait-time wt]) newdata (assoc (fsm/change-statedata statedata newstate duration followingstate marathon.ces.core/*debug*) :timeinstate timeinstate) benv (merge (dissoc benv :state-change) {:statedata newdata :duration duration :timeinstate timeinstate :wait-time wt}) _ (reset! ctx (supply/log-state! (:tupdate benv) @entity (:state @entity) newstate @ctx)) _ (swap! entity #(assoc % :state newstate :statedata newdata)) ;;update the entity state, currently redundant. ;_ (debug [:statedata statedata :newdata newdata :newstate newstate]) ] (beval update-state-beh benv)))) (def change-state-beh (->seq [(echo :<change-state-beh>) change-state-beh!])) ;;Aux function to compute our state change during spawn. ;;Setting up initial conditions is a PITA, particularly ;;since it's possible that some of the input data is ;;intentionally empty or zeroed out. This helps set up ;;the bread-n-butter wait time as a function of the ;;spawning information, if any, the entity's policy, and ;;the proposed position for the entity. #_(defn compute-state-stats [entity cycletime policy positionpolicy] (let [duration (:duration (:spawn-info @entity)) ;;duration may be 0. ;;if so, we check policy to see if we should be waiting more than 0. derive? (or (not duration) (zero? duration)) duration (if derive? (let [pw (policy-wait-time policy positionpolicy) _ (debug [:derived-duration (:name @entity) positionpolicy pw])] pw) ;derive from policy. duration) ;;If the position is not in the policy, then we need to ;;find a way to compute the duration. ;;If we have spawn-info, then we have duration... position-time (if derive? #_(pos? duration) ;prescribed. (try (position->time policy positionpolicy) (catch Exception e (if (protocols/leads-to-start? policy positionpolicy) 0 (throw (Exception. (str [positionpolicy :isolated-from-cycle])))))) 0) ;;We're running into problems here....the positionpolicy cycletime (if (< cycletime position-time) position-time cycletime)] ;;timeinstate also subject to spawn-info.... {:cycletime cycletime :position-time position-time :timeinstate (if duration 0 (non-neg! "timeinstate" (- cycletime position-time))) ;;timeremaining is subject to spawn info. :timeremaining (or duration ;this should keep us from bombing out... (protocols/transfer-time policy positionpolicy (protocols/next-position policy positionpolicy)))})) (defn compute-state-stats [entity cycletime policy positionpolicy] (let [duration (:duration (:spawn-info @entity)) ;;duration may be 0. ;;if so, we check policy to see if we should be waiting more than 0. derive? (or (not duration) (zero? duration)) ;;if so, we check policy to see if we should be waiting more than 0. duration (if #_derive? (and duration (zero? duration)) (let [pw (policy-wait-time policy positionpolicy) _ (debug [:derived-duration (:name @entity) positionpolicy pw])] pw) ;derive from policy. duration) ;;If the position is not in the policy, then we need to ;;find a way to compute the duration. ;;If we have spawn-info, then we have duration... position-time (if derive? ;prescribed. (try (position->time policy positionpolicy) (catch Exception e (if (protocols/leads-to-start? policy positionpolicy) 0 (throw (Exception. (str [positionpolicy :isolated-from-cycle])))))) 0) ;;We're running into problems here....the positionpolicy cycletime (if (< cycletime position-time) position-time cycletime) ] ;;timeinstate also subject to spawn-info.... {:cycletime cycletime :position-time position-time :timeinstate (if duration 0 (non-neg! "timeinstate" (- cycletime position-time))) ;;timeremaining is subject to spawn info. :timeremaining (or duration ;this should keep us from bombing out... (protocols/transfer-time policy positionpolicy (protocols/next-position policy positionpolicy)))})) ;;Adding a lame function to detect for and enable pre-fill. ;;Right now, we just assume it's enabled, but we'll check the context. (defn prefill? [ctx] (-> ctx core/get-parameters :PreFill)) ;;prefill is modeled as the unit's available time impacting its bog budget. So ;;the simplest scheme is to just reduce the bog budget proportional to the ;;unit's available time. Maybe decrement according to its stop-deployable time? ;;Addendum: we want to ensure we never have overlop occuring on day 1. ;;We also want to ensure deployments are somehow optimally ordered in the past. ;;That is, we don't have units deploying (and leaving) at the same time, ;;creating clumps or resonant unfilled demand. So to avoid this, we ;;create a prefill deployemnt schedudle where units are evenly spaced ;;as if they had deployed in the past. The trick is to ensure that ;;we account for overlap in this spacing. We also want this to be ;;determinstic and "ideal" akin to how our cycletime spacing is ;;an ideal representation. So to have an ideal prefill deployment ;;schedule is to minimize the effects of overlap and ensure no ;;clumping in addition to the constraint that no prefilling unit ;;will overlap on day 1... ;;The projection scheme ends up being identical to just offsetting ;;unit cycletimes by - (overlap + 1). The only caveat is that we now ;;have the possibility of units near cycletime of (start-deployable + overlap) ;;becoming ineligible to deploy since the offset pushes their cycletime ;;before start deployble. So we allow them to have max prefill by ;;flooring the cycletime at start-deployable. We thus allow units close ;;to start-deployable to have maximum prefill bog budget if they are ;;selected for prefill deployments. This should be unlikely. ;;Edit: we have a problem for policies where tf - ts is > BogBudget, ;;since we end up with negative numbers. This ends up increasing BOG ;;for prefill. First manifested with an unexpected MaxUtilization and ;;infinite policy cyclelength. Working out a scheme to either warn ;;or correct. ;;For any policy where the span of the deployable window is ;;greater than the bogbudget, we have a situation where it's ;;possible to get a negative number. We apply a correction ;;iff we have eligible deployers (within the interval [ts tf]) ;;where we project the monotonically decreasing prefill (negative ;;numbers the further away you get from tf - bogbudget) onto ;;a rotating sequence of prefills similar to the values we ;;have for the units that had positive numbers. To do this, ;;we transform the negative prefills by computing their ;;abs value's modulus relative to the bogbudget (bound), ;;so they projected onto positive numbers within the span ;;of [0 bound]. Then to get the ordering correct so we ;;have a decreasing order of positive numbers, we ;;subtract the result from the bound. This gives ;;us a nice repeating spread that's determined by ;;cycletime, bogbudget, overlap, and the deployable ;;window and "should" work with any policy. (defn inverse-clamp [bound x] (if (> x -1) x (let [x (mod (- x) bound) y (- bound x)] y))) (defn compute-prefill [ent policy cycletime] (let [ts (protocols/start-deployable policy) tf (protocols/stop-deployable policy) bogbudget (protocols/max-bog policy) ;;addresses infinite cycle stuff, incorporates ;;expected-dwell (assigned to max-dwell). {:keys [max-bog max-dwell cycle-length max-mob]} (u/cycle-stats policy) tf (min tf max-dwell cycle-length)] (when (and (>= cycletime ts) ;;deployable (< cycletime tf)) (let [overlap (inc (protocols/overlap policy)) ctprojected (max (- cycletime overlap) ts) res (long (- bogbudget (- tf ctprojected))) bound (- bogbudget overlap) clamped (inverse-clamp bound res)] (or (and (>= clamped 0) (<= clamped bound) clamped) (throw (ex-info "prefill not in [0 .. bogbudget - (overlap + 1]" {:prefill clamped :policy-name (protocols/policy-name policy) :start-deployable ts :stop-deployable tf :overlap+1 overlap}))))))) ;;if we detect a prefill condition, we reduce the unit's ;;bog budget accordingly to space out deployments. (defn set-prefill [ent policy cycletime ctx] (if-let [pf (and (prefill? ctx) (compute-prefill ent policy cycletime))] (assoc ent :prefill pf) ent)) ;;Our default spawning behavior is to use cycle to indicate. ;;There will be times we alter the methods in which a unit ;;spawns (initializes itself in the simulation). It'd be nice ;;to break this out at some point, for now, we just let it go. ;;we can break the spawning behavior up into smaller tasks... ;;Find out where we're supposed to be. Do we have initial conditions? ;;Initial conditions are currently derived from cycletime and policy. ;;For instance, we narrowly assume that every unit exists somewhere in ;;on a cycle at t=0, rather than setting them in arbitray deployments ;;to begin with. This is limiting, we should be able to define ;;all kinds of initial conditions to perform upon spawn (like set ;;location, cycletime, etc.) For now, we just replicate the ;;cycletime-derived automated initial conditions logic. ;;Given a cycletime, where should we be according to policy? ;;Behavior to control how a unit acts when it spawns. ;;We're trying to account for the unit's initial state... ;;We move from spawning to the initial location. ;;We account for having been at the initial location for ;;timeinstate days (currently tied to cycletime - timetoposition). ;;So, what we really want to do is update the unit initially, possibly ;;with a negative time, and advance it forward to time 0 via the ;;deltat being the timeinstate. (befn spawning-beh ^behaviorenv {:keys [to-position cycletime tupdate statedata entity ctx] :as benv} (when (spawning? statedata) (let [ent @entity ;;we're now tracking default recovery in our context. {:keys [positionpolicy policy]} ent {:keys [curstate prevstate nextstate timeinstate timeinstateprior duration durationprior statestart statehistory]} statedata cycletime (or cycletime (:cycletime ent) 0) topos (if (not (or to-position positionpolicy)) (protocols/get-position (u/get-policy ent) cycletime) positionpolicy) nextstate (position->state policy positionpolicy) {:keys [timeinstate timeremaining cycletime position-time] :as stats} (compute-state-stats entity cycletime policy positionpolicy) _ (debug [:unit (:name ent) stats]) spawned-unit (-> ent (assoc :cycletime cycletime :default-recovery (core/default-recovery @ctx)) (u/initCycles tupdate) (u/add-dwell cycletime) (set-prefill policy cycletime @ctx) ;;added for optional prefill to space out deps. (assoc :last-update tupdate) (dissoc :spawn-info) ;eliminate spawning data. ) ;;may not want to do this.. _ (reset! entity spawned-unit) state-change {:newstate nextstate :duration timeremaining :followingstate nil :timeinstate timeinstate } _ (debug [:nextstate nextstate :state-change state-change :current-state (:state ent)]) ] (->> (assoc benv :state-change state-change :location-change {:from-location "Spawning" :to-location (or (:location (:spawn-info ent)) topos)} :next-position topos ;queue up a move... ) (log! (core/msg "Spawning unit " (select-keys (u/summary spawned-unit) [:name :positionstate :positionpolicy :cycletime]))) (beval (->seq [(echo :change-state) change-state-beh #_(fn [benv] (do (reset! ctx (supply/log-move! tupdate :spawning (:positionpolicy @entity) @entity @ctx)) (success benv)))] )))))) ;;While we're rolling, we want to suspend message processing. ;;We can do this by, at the outer level, dissocing the messages... ;;or, associng a directive to disable message processing... ;;we want to update the unit to its current point in time. Basically, ;;we are folding over the behavior tree, updating along the way by ;;modifying the context. One of the bits of context we're modifying ;;is the current deltat; assumably, some behaviors are predicated on ;;having a positive deltat, others are instantaneous and thus expect ;;deltat = 0 in the context. Note, this is predicated on the ;;assumption that we can eventually pass time in some behavior.... (befn roll-forward-beh {:keys [entity deltat statedata] :as benv} (do (debug [:<<<<<<<<begin-roll-forward (:name @entity) :last-update (:last-update @entity)]) (cond (spawning? statedata) (->seq [spawning-beh roll-forward-beh]) (pos? deltat) (loop [dt deltat benv benv] (let [sd (:statedata benv) timeleft (fsm/remaining sd) _ (debug [:sd sd]) _ (debug [:rolling :dt dt :remaining timeleft]) ] (if-y (if (<= dt timeleft) (do (debug [:dt<=timeleft :updating-for dt]) ;;this is intended to be the last update... ;;as if we're send the unit an update message ;;for the last amount of time... (beval (->seq [update-state-beh process-messages-beh]) ;we suspend message processing until we're current. (assoc benv :deltat dt))) (let [residual (max (- dt timeleft) 0) res (beval update-state-beh (assoc benv :deltat timeleft))] (if (success? res) (recur residual ;advance time be decreasing delta (val! res)) res))) nil))) :else (->seq [update-state-beh process-messages-beh])))) ;;So, at the high level, we have a simple behavior that checks to see ;;if it can move, finds where to move to, starts the process of ;;moving (maybe instantaneous), and waits... ;;We should consider move if our time in state has expired, or ;;if we have a next-location planned. (befn should-move? ^behaviorenv {:keys [next-position statedata] :as benv} (do (debug [:should? {:next-position next-position :remaining (fsm/remaining statedata) :spawning? (spawning? statedata) :wait-time (:wait-time benv)}]) (when (or next-position (zero? (fsm/remaining statedata)) ;;time is up... (spawning? statedata)) (success benv)))) (def locstates #{"Dwelling" "DeMobilizing" "Recovering" :dwelling :demobilizing :recovering :recovery}) (defn position=location? [newstate] (if (not (set? newstate)) (locstates newstate) (gen/some-member newstate locstates) )) ;;memoize this... (alter-var-root #'position=location? gen/memo-1) ;;after updating the unit bound to :entity in our context, ;;we commit it into the supplystore. This is probably ;;slow....we may want to define a mutable version, ;;or detect if mutation is allowed for a faster update ;;path. For instance, on first encountering the unit, ;;we establish a mutable cell to its location and use that ;;during the update process. ;;Given that we have the context for a move in place, ;;we want to move as directed by the context. If there ;;is a wait time associated with the place we're moving ;;to, we will add the wait-time to the context. That way, ;;downstream behaviors can pick up on the wait-time, and ;;apply it. ;;Note: there's a potential problem where our assumptions about ;;deployability may be violated: If a policy change occurs, and ;;the old policy position ends up being the new policy position, ;;we bypass the position-change behavior to save time. If the ;;state-change happens, we still do it, but we miss - by virtue ;;of assuming position changes -> deployable changes - ;;the possibility that while the position may nominally ;;be the same between two policies, the state is not... ;;Case in point: ReqAnalysis_MaxUtilization_FullSurge_AC -> ;;TAA19-23_AC_1:2; for at least one case, we have a transition ;;from #{:deployable :c2 :dwelling} to #{:c2 :dwelling}, ;;while the position is still Ready...the fine difference is ;;that the preceding policy had [Reset :deployable] transition ;;to Ready, where the new policy is not deployable until ;;later in cycle. We end up not updating the deployability ;;of the unit, and it gets selected for a fill that - upon ;;deployment checks - is illegal under the new policy. ;;Solution: strengthen our definition of "no position change" ;;to include no state change....Positions are equal iff ;;they have the same state...the presence (or absence) of ;;:deployable is the key currently... (befn move->statechange ^behaviorenv {:keys [entity next-position location-change tupdate statedata state-change ctx] :as benv} (when-let [nextpos next-position] ;we must have a position computed, else we fail. (let [t tupdate u @entity frompos (get u :positionpolicy) ;;look up where we're coming from. wt (or (:wait-time benv) (get-wait-time u nextpos benv)) ;;how long will we be waiting? location-based? (:location-behavior u) ] (if (= frompos nextpos) ;;if we're already there... (do (debug [:no-movement frompos nextpos {:wt wt :state-change state-change}]) (if state-change (->seq [(->alter #(assoc % :wait-time nil :next-position nil)) check-deployable-state]) (success (dissoc benv :next-position))) ;do nothing, no move has taken place. No change in position. #_(success (if state-change (assoc benv :wait-time nil :next-position nil) (dissoc benv :next-position)))) ;do nothing, no move has taken place. No change in position. (let [_ (debug [:moving frompos nextpos]) newstate (or (get-state u nextpos) nextpos) ;;need to account for prescribed moves. newstate (if location-based? (into (-> (-> statedata :curstate) #_(disj nextpos)) newstate) newstate) _ (when (nil? newstate) (throw (Exception. (str [:undefined-transition newstate u frompos nextpos wt])))) state-change {:newstate newstate :duration wt :followingstate nil :timeinstate 0 } _ (reset! entity (-> (if location-based? (dissoc u :location-behavior) u) (traverse-unit t frompos nextpos) )) ;update the entity atom ;;if we already have a location change set, then we should respect it. from-loc (:locationname u) to-loc (if-let [newloc (:next-location benv)] (do (debug [:preset-location newloc :From from-loc]) newloc) (if (position=location? newstate) nextpos from-loc)) ;_ (println [from-loc to-loc]) ] (bind!! ;update the context with information derived ;from moving {:position-change {:from-position frompos ;record information :to-position nextpos} :state-change state-change :location-change (or location-change (when (not (identical? from-loc to-loc)) {:from-location from-loc :to-location to-loc})) :wait-time nil :next-position nil :next-location nil} )) )))) (def movekeys #{:position-change :state-change :location-change}) (befn prescribed-move->statechange {:keys [prescribed-move tupdate] :as benv} (when prescribed-move (success (reduce-kv (fn [acc k v] (if v (assoc acc k v) acc)) (assoc benv :prescribed-move nil) prescribed-move) ))) (defn prescribed? [e tupdate] (when-let [pm (val-at @e :prescribed-move)] (== (val-at pm :t) tupdate))) ;;PERFORMANCE NOTE: <HOTSPOT> - eliding debug info here saves time... ;;This hooks us up with a next-position and a wait-time ;;going forward. We also now allow prescribed moves to ;;be set, for things like location-specific policies.. (befn find-move ^behaviorenv {:keys [entity next-position wait-time tupdate] :as benv} (if (prescribed? entity tupdate) ;;we have a move set up.. (let [pm (:prescribed-move @entity) _ (debug [:found-prescribed-move pm]) ] (do (swap! entity dissoc :prescribed-move) (bind!! {:prescribed-move pm}))) ;;let's derive a move... (let [e @entity currentpos (:positionpolicy e) ;_ (when (= currentpos :re-entry) (println (:tupdate benv))) p (or next-position (do (debug [:computing-position currentpos]) ;;performance 1 (get-next-position (:policy e) currentpos))) wt (if (and next-position wait-time) wait-time (do (debug [:computing-wait (:positionpolicy e)]) ;;performance 2 ;;WARNING: This may be using the following wait time...is that what we mean? ;;Given the current position, it's determining how long to wait in the next position. ;;I think we're good...should rename get-wait-time to something more appropriate. ;;get-next-wait-time? (get-wait-time @entity (:positionpolicy e) benv))) _ (debug [:found-move {:current-position currentpos :next-position p :wait-time wt}]) ] (bind!! {:next-position p :wait-time wt } ;;have a move scheduled... )))) ;;We know how to wait. If there is an established wait-time, we ;;request an update after the time has elapsed using update-after. (befn wait ^behaviorenv {:keys [wait-time] :as benv} (when-let [wt wait-time] ;;if we have an established wait time... (do #_(debug [:sdb (:statedata benv) :sde (:statedata @(:entity benv))]) (if (zero? wt) ;;skip the wait, instantaneous. No need to request an ;;update. (do (debug [:instantly-updating]) update-state-beh) (do (debug [:waiting wt]) (update-after benv)))))) ;;Note: start-cycle looks somewhat weak. Can we fold this into ;;another behavior? ;;Units starting cycles will go through a series of procedures. ;;Possibly log this as an event? (befn start-cycle {:keys [entity deltat tupdate] :as benv} (do (swap! entity #(merge % {:cycletime 0 :date-to-reset tupdate})) (success benv))) ;;legacy implemenation. no longer using policystack. ;; (befn start-cycle {:keys [entity deltat tupdate] :as benv} ;; (let [unit @entity ;; pstack (:policystack unit)] ;; (do (swap! entity #(merge % {:cycletime 0 ;; :date-to-reset tupdate})) ;; (if (pos? (count pstack)) ;; (bind!! {:policy-change {:next-policy (first pstack)}}) ;; (success benv))))) ;;We may not care about cycles.... ;;Should be able to specify this in our collections logic, go faster... ;;Units ending cycles will record their last cycle locally. We broadcast ;;the change...Maybe we should just queue this as a message instead.. (befn end-cycle {:keys [entity ctx tupdate] :as benv} (let [cyc (assoc (:currentcycle @entity) :tfinal tupdate) _ (swap! entity (fn [unit] (-> unit (assoc :currentcycle cyc) (u/recordcycle tupdate)))) ;;notify interested parties of the event... _ (swap! ctx (fn [ctx] (sim/trigger-event :CycleCompleted (:name @entity) :SupplyStore (str (:name @entity) " Completed A Cycle") nil ctx)))] (success benv))) ;;dunno, just making this up at the moment until I can find a ;;definition of new-cycle. This might change since we have local ;;demand effects that can cause units to stop cycling. ;;Wow...just got burned on this..strings are no good for identity ;;checks....since some are interned and some ore instances. wow.... (defn new-cycle? [unit frompos topos] (and (not= frompos :recovered) ;;additional criteria to cover nonbog reentry. (= (protocols/start-state (:policy unit)) topos))) ;;We check to see if there was a position change, and if so, if that ;;change caused us to finish a policy cycle. Note: this only applies ;;in cyclical policies. ;;Note: We want to preclude finishing cycles if we are applying a ;;policy change. We handle that in another state. This keeps us ;;from entering into a policy change that sends us to reset, and ;;automagically terminates the current cycle stats. Consistent with M3. (befn finish-cycle ^behaviorenv {:keys [entity position-change changed-policy policy-change] :as benv} (when position-change (let [{:keys [from-position to-position]} position-change no-spawn? (not (just-spawned? benv)) new-cyc? (new-cycle? @entity from-position to-position) ;_ (println [:check-cycle no-spawn? new-cyc? (not policy-change) (:tupdate benv)]) ] (when (and no-spawn? new-cyc? (not changed-policy)) ;;If we changed-policy already, preclude... (do (debug [:finishing-cycle (:name @entity) from-position]) (->seq [start-cycle end-cycle try-deferred-policy-change])))))) ;;Now that we have prescribed moves, the entities are going into ;;an overlapping state, but it's a state set.. (defn overlapping? [x] (or (identical? x protocols/Overlapping) (identical? x :overlapping))) ;;this is really a behavior, modified from the old state. called from overlapping_state. ;;used to be called check-overlap. (befn disengage {:keys [entity ctx overlapping-position tupdate] :as benv} (when-let [opvec overlapping-position] (let [[lname op] opvec _ (debug [:overlapping-prescribed op]) _ (debug [:disengaging (:name @entity) (:locationname @entity)]) res (identical? op :to) _ (swap! ctx ;;update the context... #(d/disengage (core/get-demandstore %) (assoc @entity :last-update tupdate) lname % res #_true))] (success (assoc benv :overlapping-position nil))))) ;; (when overlap-detected ;; (when (not (identical? res :none)) ;ugh? ;; (do (debug [:disengaging (:name @entity) (:locationname @entity)]) ;; (swap! ctx ;;update the context... ;; #(d/disengage (core/get-demandstore %) ;; @entity (:locationname @entity) % res)) ;; (success benv))))))) ;;used to be called check-overlap; (def check-overlap disengage) ;;Note: This behavior ASSUMES position changes within the ;;same policy. We can't have changed policies and assume ;;this works. Need an invariant to cover that. ;;Performance: We have a mild hotspot when we eagerly update ;;deployability via supply/update-deploy-status. Might be possible to ;;update deployability lazily, save a little bit. We're typically ;;"not" deploying... #_(befn check-deployable ^behaviorenv {:keys [entity position-change ctx] :as benv} (when position-change (let [{:keys [from-position to-position]} position-change u @entity p (:policy u) _ (debug [:checking-deployable-position :from from-position :to to-position])] (when (or (not= (protocols/deployable-at? p from-position) (protocols/deployable-at? p to-position)) #_(unit/can-non-bog? u)) (do (debug [:deployable-changed! from-position to-position]) (swap! ctx #(supply/update-deploy-status u nil nil %)) (success benv)))) )) (defn update-deploy-status ([u ctx] (->alter (fn [benv] (do (swap! ctx #(supply/update-deploy-status u nil nil %)) benv)))) ([benv] (update-deploy-status @(:entity benv) (:ctx benv)))) ;;Begavior note: ;;When units change policy, they may come from (as in RA) a finite policy ;;with a larger bog budget than the target policy, and have bogged (longer ;;than the bog of the new policy), and end up in a position/state that ;;is identical to the old policy. So, on the surface, we have no ;;state change; no position change, no indicator of deployability change, ;;yet the unit is not technically deployable, since it has no bogbudget. ;;M3 addressed this by adding an automatic update deployability ;;check at the end of policy change, regardless. ;;M3 also added an additional check, where bogbudget exists, ;;but the deployable time is less than the new policy's overlap, ;;causing a negative cycle time error to occur. This second ;;conditions sends the unit to reset if the modified bogbudget ;;< newpolicy.overlap. (befn check-deployable ^behaviorenv {:keys [entity position-change changed-policy ctx] :as benv} (when position-change (if-not changed-policy (let [{:keys [from-position to-position]} position-change u @entity p (:policy u) _ (debug [:checking-deployable-position :from from-position :to to-position])] (when (or (not= (protocols/deployable-at? p from-position) (protocols/deployable-at? p to-position)) #_(unit/can-non-bog? u)) (do (debug [:deployable-changed! from-position to-position]) (update-deploy-status u ctx) #_(swap! ctx #(supply/update-deploy-status u nil nil %)) #_(success benv)))) check-deployable-state) )) ;;Suggestion: To deal with the fact that deployability may change ;;without nominal position changes (but state changes), we should ;;add in the ability to check for a state-change fallback. (befn check-deployable-state ^behaviorenv {:keys [entity state-change ctx] :as benv} (when state-change (let [u @entity from-state (marathon.ces.unit/unit-state u) to-state (:newstate state-change) _ (debug [:checking-deployable-state :from from-state :to to-state])] (when (not= (protocols/deployable-state? from-state) (protocols/deployable-state? to-state)) (do (debug [:deployable-changed! from-state to-state]) (update-deploy-status u ctx) #_(swap! ctx #(supply/update-deploy-status u nil nil %)) #_(success benv)))))) ;; (befn check-deployable ^behaviorenv {:keys [entity position-change state-change ctx] :as benv} ;; (when position-change ;; (let [{:keys [from-position to-position]} position-change ;; u @entity ;; p (:policy u) ;; _ (debug [:checking-deployable :from from-position :to to-position])] ;; (when (or (not= (protocols/deployable-at? p from-position) ;; (protocols/deployable-at? p to-position)) ;; #_(unit/can-non-bog? u)) ;; (do (debug [:deployable-changed! from-position to-position]) ;; (swap! ctx #(supply/update-deploy-status u nil nil %)) ;; (success benv)))))) (befn mark-overlap {:keys [entity position-change] :as benv} (when-let [change position-change] (let [{:keys [to-position from-position]} position-change ;;overlapping is not triggering because we only hae on definition of ;;overlapping per the keyword. There's a string version that shows ;;up. res (cond (overlapping? to-position) :to ;true (overlapping? from-position) :from ;false :else :none) ] (when (not (identical? res :none)) (do (debug [:marking-overlap res]) (success (assoc benv :overlapping-position [(:locationname @entity) res]))))))) ;;When there's a change in position, we want to do all these things. (befn change-position [entity position-change tupdate ctx] (when position-change (let [{:keys [from-position to-position]} position-change] (do (debug [:changed-position from-position to-position]) (reset! ctx (supply/log-position! tupdate from-position to-position @entity @ctx)) ;ugly, fire off a move event.check-overlap (reset! entity (assoc @entity :positionpolicy to-position)) (->seq [check-deployable ;;now being checked a bit more universally... finish-cycle mark-overlap (->alter #(assoc % :position-change nil :next-position nil))]))))) ;;Performance: Mild hotspot. Dissocing costs us here. Change to assoc and ;;check. ;;if there's a location change queued, we see it in the env. (befn change-location {:keys [entity location-change tupdate ctx] :as benv} (when location-change (let [;#_{:keys [from-location to-location]} #_location-change ;minor improvement.. from-location (val-at location-change :from-location) ;;OMG, typo on location...was loction!!! to-location (val-at location-change :to-location) _ (debug [:location-change location-change]) _ (reset! entity (u/push-location @entity to-location)) _ (reset! ctx (supply/log-move! tupdate from-location to-location @entity nil @ctx)) ] ;;we need to trigger a location change on the unit... (success (assoc benv :location-change nil))))) ;;this is a weak predicate..but it should work for now. (defn demand? [e] (not (nil? (:source-first e)))) ;;we can do this like a scalpel.. ;;All that matters is that the demand fill changes. ;;We ensure we remove the unit from the demand's ;;assignment, and then remove the unit from the demand, ;;and update the fill status of the demand. ;;If we leave a demand, we need to update its information ;;and change fill status. ;;is the movement causing a change in fill? (befn change-fill {:keys [entity location-change ctx] :as benv} (when location-change (let [{:keys [from-location]} location-change] (when (demand? (store/get-entity @ctx from-location)) (swap! ctx ;;update the context... #(d/remove-unit-from-demand (core/get-demandstore %) @entity from-location %)) (success benv))))) ;;with a wait-time and a next-position secured, ;;we can now move. Movement may compute a statechange ;;in the process. (def execute-move (->seq [(echo :<move->statechange>) (->or [prescribed-move->statechange move->statechange]) (echo :<change-position>) change-position (echo :<change-fill>) change-fill ;;newly added... (echo :<change-location>) change-location change-state-beh (echo :<check-overlap>) ;moved before change-position check-overlap ;;Added, I think I missed this earlier... (echo :waiting) wait ])) ;;Movement is pretty straightforward: find a place to go, determine ;;any changes necessary to "get" there, apply the changes, wait ;;at the location until a specified time. (def moving-beh (->and [(echo :moving-beh) should-move? ;if there is a next position or our time in state expired. find-move ;determine the wait-time, and possibly the next-position to wait at. (echo :execute-move) execute-move ])) ;;PERFORMANCE NOTE: Minor HotSpot ;;Changed to == instead of zero? due to minor perf issues. ;;State handler for generic updates that occur regardless of the state. ;;These are specific to the unit data structure, not any particular state. ;;Should we keep a timestamp with the unit? That way we can keep track ;;of how fresh it is. (befn age-unit ^behaviorenv {:keys [deltat statedata entity ctx] :as benv} (let [^long dt (or deltat 0)] (if (== dt 0) (success benv) ;done aging. (let [e @entity ;_ (println [:currentcycle (:currentcycle e)]) _ (when-not (u/waiting? e) (swap! entity #(u/add-duration % dt))) #_(debug [:skipping :add-duration (:name entity)]) ;;update the entity atom _ (debug [:aging-unit deltat :cycletime (:cycletime @entity)]) ] (bind!! {:deltat 0 ;is this the sole consumer of time? :last-update (unchecked-inc deltat) :statedata (fsm/add-duration statedata dt)}))))) ;;Dwelling just increments statistics.. (befn dwelling-beh ^behaviorenv {:keys [entity deltat] :as benv} (when (pos? deltat) (do (debug [:dwelling deltat]) (swap! entity #(u/add-dwell % deltat)) (success benv)))) ;;Bogging just increments stastistics.. (befn bogging-beh ^behaviorenv {:keys [entity deltat] :as benv} (when (pos? deltat) (do (debug [:bogging deltat]) (swap! entity #(u/add-bog % deltat)) (success benv)))) (befn modernizing-beh ^behaviorenv {:keys [entity statedata deltat] :as benv} (when (and (pos? deltat) (= (spork.ai.machine/remaining statedata) deltat)) (let [unit @entity uname (:name unit) from (:mod unit) to (dec from) _ (swap! entity assoc :mod to)] (->> benv (log! (core/msg "Modernized unit " (:name unit) " from " from " to " to) ) success)))) ;;This is a little weak; we're loosely hard coding ;;these behaviors. It's not terrible though. (befn special-state {:keys [entity statedata] :as benv} (case (:state (deref!! entity) #_@entity) :spawning spawning-beh :abrupt-withdraw (do (debug [:<special-state-abw>]) abrupt-withdraw-beh) :recovery recovery-beh ;moving-beh ;;setup the move to recovered. :recovered (->and [(echo :recovered-beh) (->seq [re-entry-beh ;;TODO: Optimize. We can skip the re-entry, ;;go to policy-change directly. (->if (fn [{:keys [entity]}] (zero? (u/get-bog @entity))) try-deferred-policy-change)]) ;reset-beh ]) ;; I think we need to implement these. ;; :modernizing modernizing-beh ;; :modernized modernized-beh ;:waiting (success benv) ;up-to-date (fail benv))) ;;rest-beh is kind of what we want to do. We'd like to ;;compute the unit's now position in its old policy. ;;What about pending policy changes? [how'd marathon handle them in vba?] ;;I think we deferred until reset actually. ;;Follow-on state is an absorbing state, where the unit waits until a changestate sends it elsewhere. ;;The only feasible state transfers are to a reentry state, where the unit re-enters the arforgen pool ;;in a dynamically determined position, or the unit goes to another demand compatible with the ;;followon code. (befn followon-beh {:keys [entity ctx] :as benv} (let [fc (u/followon-code @entity) _ (debug [:trying-followon (:name @entity) fc])] (when fc ;if the unit has a followon code (do ;register the unit as a possible followOn ;(println [(:name @entity) :added-followon :for [fc]]) ;;Note: we have a problem here, since add-followon ends up getting our entity ;;out-of-sync with the entity reference stored in the context... ;;We add a bunch of components to the entity, like :followon, which may ;;end up getting ditched when we merge the entity atom in at the end ;;of the transaction... (swap! ctx #(supply/add-followon (core/get-supplystore %) @entity %)) (reset! entity (-> (store/get-entity @ctx (:name @entity)) (merge {:state :followon}))) ;age-unit (debug [:waiting-in-followon-status fc]) (->seq [(->alter (fn [b] (merge b {:wait-time +inf+ :next-position :followon ;(:positionpolicy @entity) ;:followon :next-state :followon;:abruptwithdraw }))) moving-beh]) ;? )))) ;;way to get the unit back to reset. We set up a move to the policy's start state, ;;and rip off the followon code. Added a formal reset policy for ;;reset evaluation associated with policy changes. (befn reset-beh {:keys [entity reset-policy] :as benv} (let [pos (protocols/start-state (or reset-policy (:policy @entity))) wt (if-not reset-policy (immediate-wait-time @entity pos benv) ;;supplied reset policy implies a move to reset with note ;;added transfer time; time remaining in state is ignored. (protocols/transfer-time reset-policy pos (get-next-position reset-policy pos))) _ (debug [:immediate-reset :from (:positionpolicy @entity) :to pos :wait-time wt]) newbogbudget (u/max-bog @entity) _ (swap! entity #(-> % (assoc :followoncode nil) (assoc-in [:currentcycle :bogbudget] newbogbudget))) ] (beval moving-beh (assoc benv :next-position pos :wait-time wt)))) ;; 'A state to handle reentry into the available pool.... (def invalid? #{"Deployed" "Overlapping"}) ;;Note: ;;Attempting to match m3 behavior exactly. Units re-entering ;;with 0 bog and a pending policy change should go ahead ;;and change policies vs. going through re-entry in the ;;current cycle's policy. ;;Kind of like reset, except it's not guaranteed we go to reset. (befn re-entry-beh {:keys [entity ctx tupdate] :as benv} (let [unit @entity p (:policy unit) current-pos (:positionpolicy unit) ct (:cycletime unit) _ (when (< ct 0) (throw (Exception. (str "Cycle Time should not be negative!")))) _ (when (invalid? current-pos) (throw (Exception. "Cannot handle during deployment or overlap"))) is-deployable (protocols/deployable-by? p ct) positionA current-pos positionB (protocols/get-position p ct) _ (when (invalid? positionB) (throw (Exception. (str "Cannot handle during deployment or overlap: " positionB)))) timeremaining (protocols/transfer-time p positionB (protocols/next-position p positionB)) timeinstate (- ct (protocols/get-cycle-time p positionB)) ;;this ends up being 0. wt (max (- timeremaining timeinstate) 0) _ (debug [:re-entry {:cycletime ct :current-pos current-pos :next-pos positionB :timeinstate timeinstate :timeremaining timeremaining :wt wt}]) state-change {:newstate (get-state unit positionB) :duration timeremaining :followingstate nil :timeinstate timeinstate } _ (reset! ctx (->> @ctx ;; (supply/log-position! tupdate positionA positionB unit) (supply/supply-update! {:name "SupplyStore"} unit (core/msg "Unit " (:name unit) " ReEntering at " positionB " with " (:bogbudget (:currentcycle unit)) " BOGBudget.")))) _ (reset! entity (assoc unit :followoncode nil))] (beval change-state-beh (assoc benv :state-change state-change ;; :position-change {:from-position positionA ;; :to-position positionB} :wait-time wt :next-position positionB)))) ;;Function to handle the occurence of an early withdraw from a deployment. ;;when a demand deactivates, what happens to the unit? ;;The behavior will be guided by (the unit's) policy. ;;The default behavior is that a unit will check its policy to see if it CAN deploy. ;;If policy says it's okay, the unit will return to the point time of its current lifecycle. ;;We can parameterize the penalty it takes to get back into lifecycle from deployment. ;; A usual penalty is a move to "90 days of recovery" ;;Note, we can also specify if the unit is instantly available to local demands. ;;Recovery should now be an option by default, not specifically dictated by ;;policy. ;;1)Consult policy to determine if entry back into available / ready pool is feasible. ;;TOM note 18 july 2012 -> this is erroneous. We were check overlap....that's not the definition of ;;a unit's capacity to re-enter the available pool. ;;uuuuuuuge hack....gotta get this out the door though. (def non-recoverable #{"SRMAC" "SRMRC" "SRMRC13"}) ;;we no longer use the default +recovery-time+ shim, ;;now we consult policy or fallback to the :DefaultRecoveryTime ;;parameter. (def policy-recovery-time (memo1-policy (fn policy-rec [p] (or (:recovery p) ;;srm policies have a :recovery field. (marathon.data.protocols/transfer-time p :recovery :recovered))))) (defn recovery-time ([unit p] (or (policy-recovery-time (-> p marathon.data.protocols/get-active-policy)) (:default-recovery unit))) ([unit] (recovery-time unit (:policy unit)))) ;;We need to modify this to prevent any srm units from recovering. (defn can-recover? [unit] (let [cyc (:currentcycle unit) p (:policy unit) rt (recovery-time unit p)] (when (and (not (non-recoverable (protocols/policy-name p))) (pos? (:bogbudget cyc)) (< (+ (:cycletime unit) rt) (:duration-expected cyc))) rt))) (befn recovery-beh {:keys [entity deltat ctx] :as benv} (let [unit @entity] (if-let [t (can-recover? unit)] (do (debug [:unit-can-recover (:name unit)]) (move! :recovered t)) ;;recovery is now determined by policy or parameters. (let [cyc (:currentcycle unit) ct (:cycletime unit) dur (:duration-expected cyc)] (swap! ctx #(sim/trigger-event :supplyUpdate (:name unit) (:name unit) (core/msg "Unit " (:name unit) " Skipping Recovery with " (:bogbudget (:currentcycle unit)) " BOGBudget " ct "/" dur " CycleTime " ) nil %)) (reset! entity (assoc-in unit [:currentcycle :bogbudget] 0)) #_moving-beh reset-beh)))) ;;On second thought, this is sound. If the unit is already in overlap, it's in a terminal state.. ;;For followon eligibility, it means another unit would immediately be overlapping this one anyway, ;;and the demand would not be considered filled....It does nothing to alleviate the demand pressure, ;;which is the intent of followon deployments. Conversely, if overlap is 0, as in typical surge ;;periods, then units will always followon. I take back my earlier assessment, this is accurate. ;;Note: We need to ensure this behavior fails if called from incompatible circumstances... ;;We can only call this on units that are actually deployed/bogging. (befn abrupt-withdraw-beh {:keys [entity deltat] :as benv} (let [_ (when (pos? deltat) (swap! entity #(u/add-bog % deltat))) unit @entity ;1) bogremaining (- (:bogbudget (:currentcycle unit)) (protocols/overlap (:policy unit)) ;;note: this overlap assumption may not hold... ) _ (debug [:abw-beh {:deltat deltat :bogremaining bogremaining :unt (:name unit) :fc (:followoncode unit) ;:unit (dissoc unit :policy) }])] (if (not (pos? bogremaining)) ;makes no sense for the unit to continue BOGGING, send it home. ; (->and [(echo [:abw->reset {:bogremaining bogremaining}]) reset-beh ;]) (->or ;unit has some feasible bogtime left, we can possibly have it followon or extend its bog... ;A follow-on is when a unit can immediately move to fill an unfilled demand from the same ;group of demands. In otherwords, its able to locally fill in. ;This allows us to refer to forcelists as discrete chunks of data, group them together, ;and allow forces to flow from one to the next naturally. [followon-beh recovery-beh])))) ;;Policy Changes ;;============== ;;Changing policies in legacy MARATHON involves something called the "policy stack" ;;and a subscriber model where unit's "subscribe" to a parent policy (typically ;;a composite policy defined over multiple simulation periods). Changes in the ;;period cause changes in policy, which propogate to changes in subscribers' ;;policy. Policy changes are typically limited to "non-deployed" states or ;;dwelling states. That is, units may not permissively change the structure ;;of their policy while "in-use" by a demand. ;;In this case, the policy change is tracked by keeping the policy change ;;stack non-empty. When the unit cycles through a state in which policy ;;changes can occur, it finds a pending change and converts to the new ;;atomic policy. (def infeasible-policy-change? #{"Deployed" "Overlapping" "DeMobilization"}) (defn can-change-policy? [cycle-proportion from-pos] (and (<= cycle-proportion 1) (not (infeasible-policy-change? from-pos)))) ;; 'TOM Change 13 Jul 2011 ;; 'Needed to implement the transition from one policy to another. I chose to add a state to handle just this. ;; 'Visual analysis showed that PolicyChange looks a lot like Spawn, in that when a unit changes policies, it must change ;; 'a lot of its internal state to follow the new policy. The result of the policy change is: ;; ' 1: The unit's cycle time is normalized, and then transformed into the relevant cycletime in the new policy. ;; ' 2: The unit's position "may" change to reflect its position in the new policy. ;; ' 3: The unit's location "may" change to reflect its location in the new policy. ;; 'TOM Change 20 April: ;; ' 4: The unit's BOGBudget "may" change to reflect either increased, or decreased, BOGBudget. ;; 'TOM Change 24 April: ;; ' 5: The unit's BOGBudget and MAXBOG may only change (increase) as the result of a policy change. ;; ' 6: Policy changes can NOT happen during terminal states: ;; ' [Deployed {Bogging, Overlapping}, Demobilizing] ;; 'If NOT deployed (bogging, overlapping) or in a terminal state (demobilizing), then entities can change policy immediately. ;; 'Otherwise, units change policy upon next reset (change is deferred). ;; 'Assumes that the new policy is already set for the unit (i.e. the unitdata is pointing toward the new policy). ;; 'Ideally, an outside agent will have modified the unit's policy, and subsequently told it to changestates to a policy-change ;; 'state. ;; 'Net effect is that policy changes to the same policy are idempotent. ;; 'State to control how a unit acts when it changes policy. ;; 'Note -> we extract the next policy from the unitdata's policy stack. ;; 'TOM note -> figure out how to change this for the deployed population...they have negative cycle ;; 'times. ;; 'Note -> this assumes we have compatible policies, or at least policies that have a cyclical ;; 'rotational lifecycle. ;; Function PolicyChange_State(unit As TimeStep_UnitData, deltat As Single) As TimeStep_UnitData ;;WIP Nov 2016 (befn policy-change-state ^behaviorenv {:keys [entity wait-time tupdate policy-change ctx] :as benv} (when policy-change ;;we have a change. (if (u/waiting? @entity) (do (debug [:deferring-policy-change-while-waiting]) defer-policy-change) ;;units in waiting must defer policy changes! (let [next-policy (:next-policy policy-change) unit @entity tnow tupdate _ (assert (pos? (protocols/bog-budget next-policy)) "No bog budget!") current-policy (:policy unit) ;;'TOM Change 20 April -> We need to separate the unit's experienced ;;'cycle length vs the NOMINAL cycle duration, which exists in ;;'POLICY SPACE. In composite rotational policies, the NOMINAL cycle duration ;;'changes when Atomic policies change. Specificallly, we map the unit's position ;;'or coordinates in the current atomic policy to coordinates in the new policy. ;;'The unit's actual experienced lifecycle, i.e. its cycletime property, is not ;;'an accurate mapping between policies. The implicit assumption is that when ;;'mapping from one policy to another, if the policies have differing cycle lengths ;;'then there is a discount or exchange rate between the policies, such that time ;;'spent in one policy is NOT equal to time spent in another. However, our ;;'unit's cyclelength property is not subject to this, since it technically ;;'exists OUTSIDE of the policy view of time. The cyclelength property reflects the ;;'actual time a unit has spent, under ANY policy, until it has reset or started a ;;'new cycle. ;;'Prior to 19 April 2012, The unit's ability to deploy, via the CanDeploy method, ;;'depended on it's position in the current policy as a function of the cyclelength property. ;;'We should prefer the duration of the current cycle record, which is an accurate reflection ;;'of the relative time in the unit's current policy. ;;'TOM Change 20 April 2012 cycletimeA (:cycletime unit) PositionA (:positionpolicy unit) ;; _ (println [:name (:name unit) :cycletimeA cycletimeA ;; :positionA PositionA (assoc benv :ctx nil)]) _ (assert (not (neg? cycletimeA)) (str {:msg "Cycletime should not be negative!" :cycletime cycletimeA :unit (:name unit) :t tupdate})) ;;We run into a problem here: when changing from an infinite policy to ;;a finite policy, despite units having a substational amount of cycletime - exceeding ;;the cyclelength of the new policy in fact - our proportion is computed as a function ;;of the time in the current cycle. It works out that any unit transitioning ;;will get shucked into a 0.0 truncated cycle proportion coordinate.... ;;The net effect is that, regardless of how much supply we get, this artificially ;;"resets" our surplus supply by shoving them all back to the start of the next cycle.. ;;typically reset and unavailable status. For certain inputs, we can never effectively ;;grow supply, which wrecks requirements analysis. ;;The solution is to detect the edge-case where we have an effectively infinite policy, ;;and change the proportion computation. A fair proposal is to take the current cycle ;;time, and quot it by the cycle length of the target policy. that becomes the input ;;for our cycleproportion calculation....We should still get a useful distribution ;;of cycletimes in the new policy without resorting to randomness, while crediting the ;;units that have a longer time in cycle... CycleProportionA #_(core/float-trunc (/ cycletimeA (protocols/cycle-length current-policy)) 6) (compute-proportion cycletimeA (protocols/cycle-length current-policy) (protocols/cycle-length next-policy)) ;;'TOM change 23 April 2012 -> No longer allow units that are De-mobilizing to enter into available pool. ] (->or [(->and [(->pred (fn [_] (can-change-policy? CycleProportionA PositionA))) (->alter #(assoc % :policy-change {:cycletime cycletimeA :current-policy current-policy :next-policy next-policy :proportion CycleProportionA :current-position PositionA})) apply-policy-change]) defer-policy-change]))))) ;;policy-change specific reset behaviors, due to transforms ;;between policies: ;;if the unit's bog budget does not exceed the prescribed overlap, ;;we go to reset early. (befn infeasible-bog-reset ^behaviorenv {:keys [entity ctx] :as benv} (->if (fn [_] (not (pos? (u/boggable-time @entity)))) reset-beh)) ;;Note: in retrospect, it looks like we can just use the unit/can-deploy? ;;predicate, which performs the same checks (and more!) that check-deployable ;;and check-deployable-state perform. This ends up being the standard ;;by which the unit is judged when selected for fill...so... ;;We just postpone deployable status updates until the end, and do a blanket ;;check using (befn policy-change-deployability-check ^behaviorenv {:keys [entity ctx] :as benv} (->seq [infeasible-bog-reset update-deploy-status])) ;;Assuming we have a change, let's apply it! ;;How long will the unit have been in this state? ;; Since it's a policy change....do we zero it out? ;; Or do we assume that the unit has been in the state the exact amount of time required? ;;We assume that the unit has been in the state the exact amount of time required. ;;We also assume that the unit is not entering another cycle, merely extending or truncating. ;; Its current cycle is modified. ;; Does not get a cycle completion out of it. ;;#WIP Nov 2016 ;;Policy change => Movement => [state-change location-change] ;;So, we can use policy-change to set the stage for movement, then pipeline the normal ;;movement behavior... (befn apply-policy-change [ctx tupdate entity policy-change] (let [unit @entity uname (:name unit) {:keys [cycletime current-policy next-policy proportion current-position]} policy-change cycletimeA cycletime policynameA (protocols/atomic-name current-policy) ;active atomic policy policynameB (protocols/atomic-name next-policy) ;new atomic policy cyclelengthB (protocols/cycle-length next-policy) cycletimeB (if (> cyclelengthB +twenty-years+) ;;effectively infinite... cycletimeA ;;use current cycletime, do NOT project. (long (* proportion cyclelengthB))) ;coerce to a long cyclelength. _ (assert (>= cycletimeB 0) "Negative cycle times are not handled...") _ (assert (<= cycletimeB cyclelengthB) "Cyclelength is too long!") wasDeployable (protocols/deployable-by? (:policy unit) cycletimeA) ;;can maybe do this faster just checking state. isDeployable (protocols/deployable-by? next-policy cycletimeB) positionA current-position positionB (if (u/deployed? unit) ;;REVIEW - Shouldn't matter, should already be non-deployed (:positionpolicy unit) ;deployed units remain deployed. (protocols/get-position next-policy cycletimeB)) timeremaining (immediate-policy-wait-time next-policy positionB) timeinstate (- cycletimeB (protocols/get-cycle-time next-policy positionB)) oldstate (protocols/get-state current-policy positionB) unit (reset! entity (-> unit ;;we change positionpolicy here....bad move? (merge {;:positionpolicy positionB ;;policy-change supercedes old deferred policy changes. :deferred-policy-change nil :policy next-policy :cycletime cycletimeB}) (u/change-cycle tupdate) (u/modify-cycle next-policy))) newduration (- timeremaining timeinstate) ;;added... newstate (protocols/get-state next-policy positionB) _ (debug [:preparing-apply-policy-change {:cycletimeA cycletimeA :policynameA policynameA :positionA positionA :policynameB policynameB :cycletimeB cycletimeB :positionB positionB :timeremaining timeremaining :timeinstate timeinstate :newduration newduration :oldstate oldstate :newstate newstate }]) ] ;;We have a move. ;;Setup the movement and let the behavior execute. ;(if (not= positionA positionB) ;;setup the move and use existing behavior to execute (vs. legacy method that folded stuff in here). (do (swap! ctx #(->> (assoc % :policy-change nil) (core/trigger-event :UnitChangedPolicy uname policynameA (core/msg "Unit " uname " changed policies: " policynameA ":" cycletimeA "->" policynameB ":" cycletimeB) nil))) (->and [(->alter (fn [benv] (assoc benv :state-change {:newstate newstate :duration newduration :timeinstate 0} :changed-policy true :policy-change nil ;;we add a formal reset policy ;;to allow reset-beh to accurately ;;compute reset wait time. :reset-policy next-policy))) ;;for some reason, move! was swallowing up our behavior ;;for specific units, and not following through withh ;;a policy-change-deployability check. This left us ;;with units that should have reset and gained new ;;bog budget not doing so, leading to a runtime ;;invalid deployer error. ->seq should force both ;;behaviors to evaluate. (->seq [(move! positionB newduration) ;;movement behavior policy-change-deployability-check]) ])))) ;;TODO: Add this? ;;'PI:NAME:<NAME>END_PI add 10 May 2016 . We have some units changing policies from a longer BOG budget ;;'to a shorter BOG budget. We are running into issues when a unit's new BOG budget is < Overlap ;;'and then this unit got deployed and ended up with a negative BOG budget throwing negativeErr ;;If .CurrentCycle.bogbudget <= .policy.overlap Then 'this matches our check in AbruptWithdraw_State ;; Set unit = Reset_State(unit, deltat) ;; .parent.UpdateDeployStatus unit ;;End If ;;This automatically gets checked during move!... ;; MarathonOpSupply.UpdateDeployStatus simstate.supplystore, unit, , , simstate.context ;; 'Adopt Policy B. ;; 'Policy A -> ;; ' Find relative CT = ct/CLengthA ;; 'Policy B -> ;; ' Find relative positionB = pos(RelativeCT * CLengthB) ;; 'Movingstate from PositionA to relative PositionB. ;; 'Update with delta0. ;; 'TOM Change 2 Sep -> moved this north so that we can use the policy stack as a flag in unit's ;; 'ChangeCycle logic. Check for sideeffects ;; .policyStack.Remove 1 ;; SimLib.triggerEvent UnitChangedPolicy, .name, .policy.AtomicName, "Unit " & .name & " changed policies: " & _ ;; policynameA & ":" & cycletimeA & "->" & policynameB & ":" & CycleTimeB, , simstate.context ;;SET UP A STATECHANGE ;; SimLib.triggerEvent supplyUpdate, .name, .name, "Policy Change Caused Supply Update for unit " & .name, , simstate.context ;; Set PolicyChange_State = ChangeState(unit, nextstate, 0, newduration) ;; 'NOTE -> I may need to consider changing location here..... ;;The unit's cycle cannot project onto another cycle. We need to defer policy change until reset. ;;leave the policy on the stack. Catch it during reset. ;;TOM change 2 Sep 2011 -> we modify the cyclerecord to reflect changes in expectations... ;;This is not a replacement... ;;WIP Nov 2016 (befn defer-policy-change {:keys [entity ctx tupdate policy-change] :as benv} (when policy-change (let [_ (debug [:deferring-policy-change]) {:keys [next-policy]} policy-change unit @entity uname (:name unit) _ (swap! ctx #(core/trigger-event :AwaitingPolicyChange uname (marathon.data.protocols/atomic-name (:policy unit)) (core/msg "Unit " uname " in position " (:positionpolicy unit) " is waiting until reset to change policies") nil %)) ;;marked the deferred policy change. _ (swap! entity #(assoc % :deferred-policy-change (select-keys policy-change [:next-policy])))] (->alter (fn [benv] (assoc benv :policy-change nil)))))) (befn try-deferred-policy-change {:keys [entity ctx tupdate] :as benv} (when-let [pc (:deferred-policy-change @entity)] (let [_ (debug [:applying-deferred-policy-change]) _ (swap! entity assoc :deferred-policy-change nil)] (->seq [(->alter (fn [benv] (assoc benv :policy-change pc))) policy-change-state])))) ;; SimLib.triggerEvent AwaitingPolicyChange, .name, .policy.AtomicName, "Unit " & _ ;; .name & " in position " & .PositionPolicy & " is waiting until reset to change policies", , simstate.context ;; Set unit = RevertState(unit) ;; 'We updated the unit in the process ;; SimLib.triggerEvent supplyUpdate, .name, .name, "Policy Change Attempt Caused Supply Update for unit " & .name, , simstate.context ;;Basic Unit Behaviors (or "States....") ;;===================================== ;;entities have actions that can be taken in a state... (def default-statemap {:reset reset-beh ; :global :abrupt-withdraw abrupt-withdraw-beh :recovery recovery-beh :followon age-unit ; :recovered (echo :recovered-beh) ;:end-cycle ; :spawning spawning-beh :demobilizing dwelling-beh "DeMobilizing" dwelling-beh protocols/demobilization dwelling-beh :bogging bogging-beh protocols/Bogging bogging-beh ;;Added for legacy compatibility... :non-bogging dwelling-beh :recovering (echo :recovering-beh) "Recovering" (echo :recovering-beh) :dwelling dwelling-beh protocols/Dwelling dwelling-beh ;;Need to make sure we don't add bogg if we're already bogging... :overlapping bogging-beh protocols/Overlapping bogging-beh :waiting (echo :waiting-state) #_(->seq [(echo :waiting-state) defer-policy-change]) :modernizing modernizing-beh }) ;;PERFORMANCE NOTE: HotSpot - used val-at macro to inline method calls. ;;lookup what effects or actions should be taken relative to ;;the current state we're in. This is kind of blending fsm ;;and behaviortree. (befn do-current-state {:keys [entity statedata] :as benv} (let [;state (:state @entity) state (:state (deref!! entity)) ;;slightly faster using keyword as function call. state-map (or (:statemap entity) default-statemap)] (if (set? state) ;entity has multiple effects... ;;MEGA-HACK:This a serious hack to prevent double-counting of bog when we have ;;state-sets. Alone, either overlapping or bogging confers collecting bog time, ;;and in legacy policies are mutually exclusive. However, for SRM policies, ;;we have the possibility of bogging/non-bogging, as well as being in an ;;overlap state. This leaves us with a conundrum relative to our default ;;legacy meanings of bog and overlap. What we can do is ensure that if ;;bogging is present, we just skip overlapping if we ever encounter a ;;state-state. This is practical, but somewhat brittle....probabtately ;;a better idea to encode the meaning of states better - like [:bogging :overlapping] (let [stats (r/filter identity (r/map (fn [s] (val-at state-map s)) (disj state :overlapping))) ] (->seq stats)) (get state-map state)))) ;;the entity will see if a message has been sent ;;externally, and then compare this with its current internal ;;knowledge of messages that are happening concurrently. (befn check-messages ^behaviorenv {:keys [entity current-messages ctx] :as benv} (if-let [old-msgs (fget (deref! entity) :messages)] ;we have messages (when-let [msgs (pq/chunk-peek! old-msgs)] (let [new-msgs (rconcat (r/map val msgs) current-messages) _ (b/swap!! entity (fn [^clojure.lang.Associative m] (.assoc m :messages (pq/chunk-pop! old-msgs msgs) )))] (bind!! {:current-messages new-msgs}))) (when current-messages (success benv)))) ;;this is a dumb static message handler. ;;It's a simple little interpreter that ;;dispatches based on the message information. ;;Should result in something that's beval compatible. ;;we can probably override this easily enough. ;;#Optimize: We're bottlnecking here, creating lots of ;;maps.... ;;Where does this live? ;;From an OOP perspective, every actor has a mailbox and a message handler. ;; ;;so now we can handle changing state and friends. ;;we can define a response-map, ala compojure and friends. ;;type sig:: msg -> benv/Associative -> benv/Associative ;;this gets called a lot. (defn message-handler [msg ^behaviorenv benv] (let [entity (.entity benv) current-messages (.current-messages benv) ctx (.ctx benv)] (do (ai/debug (str [(:name (deref! entity)) :handling msg])) (beval (case (:msg msg) :move (let [move-info (:data msg) {:keys [wait-time next-location next-position deltat] :or {wait-time 0 deltat 0}} move-info _ (debug [:executing-move move-info msg (:positionpolicy @entity)])] (beval (move! next-location deltat next-position wait-time) benv)) ;;allow the entity to invoke a state-change-behavior ;;We can always vary this by modifying the message-handler :change-state ;;generic update function. Temporally dependent. ;;we're already stepping the entity. Can we just invoke the change-state behavior? (let [state-change (:data msg) _ (debug [:state-change-message state-change msg])] (beval change-state-beh (assoc benv :state-change state-change :next-position (or (:next-position state-change) (:newstate state-change))))) :change-policy ;;Note: this is allowing us to change policy bypassing our wait state... ;;We need to put a break in here to defer policy changes. ;;Policy-changes are handled by updating the unit, then ;;executing the change-policy behavior. ;;Note: we could tie in change-policy at a lower echelon....so we check for ;;policy changes after updates. (beval policy-change-state (assoc benv :policy-change (:data msg))) :update (if (== (get (deref! entity) :last-update -1) (.tupdate benv)) (success benv) ;entity is current (->and [(echo :update) ;roll-forward-beh ;;See if we can replace this with update-state... update-state-beh ])) :spawn (->and [(echo :spawn) (push! entity :state :spawning) spawning-beh] ) ;;Allow the entity to apply location-based information to its movement, specifically ;;altering behavior due to demands. :location-based-move (beval location-based-beh (assoc benv :location-based-info (:data msg))) ;;Like a location-based move, except with a simple wait time guarantee, with a ;;reversion to the original state upon completion of the wait. :wait-based-move (beval wait-based-beh (assoc benv :wait-based-info (:data msg))) ;;allow the entity to change its behavior. :become (push! entity :behavior (:data msg)) :do (->do (:data msg)) :echo (->do (fn [_] (println (:data msg)))) (do ;(println (str [:ignoring :unknown-message-type (:msg msg) :in msg])) (sim/trigger-event msg @ctx) ;toss it over the fence ;(throw (Exception. (str [:unknown-message-type (:msg msg) :in msg]))) (success benv) )) benv)))) ;;we'd probably like to encapsulate this in a component that can be seen as a "mini system" ;;basically, it'd be a simple record, or a function, that exposes a message-handling ;;interface (could even be a generic fn that eats packets). For now, we'll work ;;inside the behavior context. Note, the entity is a form of continuation....at ;;least the message-handling portion of it is. ;;message handling is currently baked into the behavior. ;;We should parameterize it. ;;handle the current batch of messages that are pending for the ;;entity. We currently define a default behavior. (befn handle-messages ^behaviorenv {:keys [entity current-messages ctx] :as benv} (when current-messages (reduce (fn [acc msg] (message-handler msg (val! acc))) (success (assoc benv :current-messages nil)) current-messages))) ;;The global sequence of behaviors that we'll hit every update. ;;These are effectively shared behaviors across most updates. (def global-state (->seq [(echo :aging) age-unit (echo :aged) moving-beh])) (befn up-to-date {:keys [entity tupdate] :as benv} (let [e (reset! entity (assoc @entity :last-update tupdate))] (echo [:up-to-date (:name e) :cycletime (:cycletime e) :last-update (:last-update e) :tupdate tupdate :positionpolicy (:positionpolicy e)]))) (def process-messages-beh (->or [(->and [(echo :check-messages) check-messages handle-messages]) (echo :no-messages)])) ;;The root behavior for updating the entity. (def update-state-beh (->seq [(echo :<update-state-beh>) ; process-messages-beh (->or [special-state (->seq [(echo :<do-current-state>) do-current-state (echo :global-state) (fn [ctx] (if-y global-state (fail ctx)))]) up-to-date])])) ;;if we have a message, and the message indicates ;;a time delta, we should wait the amount of time ;;the delta indicates. Waiting induces a change in the ;;remaining wait time, as well as a chang (befn wait-in-state ^behaviorenv [entity current-message ctx] (let [;_ (println [:wait-in-state entity msg]) msg current-message t (fget msg :t) delta (- t (fget (deref! entity) :t))] (when-let [duration (fget (deref! entity) :wait-time)] (if (<= delta duration) ;time remains or is zero. ;(println [:entity-waited duration :remaining (- duration delta)]) (merge!! entity {:wait-time (- duration delta) :tupdate t}) ;;update the time. (do ;can't wait out entire time in this state. (merge!! entity {:wait-time 0 :tupdate (- t duration)}) ;;still not up-to-date ;;have we handled the message? ;;what if time remains? this is akin to roll-over behavior. ;;we'll register that time is left over. We can determine what ;;to do in the next evaluation. For now, we defer it. (bind!! {:current-message (.assoc ^clojure.lang.Associative msg :delta (- delta duration))} ) ))))) (defn up-to-date? [e ctx] (== (:tupdate e) (:tupdate ctx))) ;;This will become an API call... ;;instead of associng, we can invoke the protocol. (befn schedule-update ^behaviorenv {:keys [entity ctx new-messages] :as benv} (let [st (deref! entity) nm (:name st) duration (:wait-time st) tnow (:tupdate (deref! ctx)) tfut (+ tnow duration) _ (debug 4 [:entity nm :scheduled :update tfut]) ;_ (when new-messages (println [:existing :new-messages new-messages])) ] (success (push-message- benv nm nm (->msg nm nm tfut :update))))) ;;wire in functionality here for a unit to invoke its own ;;deployment order... ;;From here on, the system will append a deployment order to ;;the unit, and send the unit a message to update. ;;The unit will handle the message by appending a ;;deployment order to its state and invoking an update. ;;This way, we handle messages first, which preps the ;;behavior environment to respond to stimulii (like ;;the presence of a deploy order) (defn deploy-to [o benv] ;;stub (success benv)) (befn try-deploy ^behaviorenv {:keys [entity] :as benv} (when-let [o (:deploy-order @entity)] (deploy-to o))) ;;This is kind of weak, but I don't have a better solution at the moment... (do (println [:setting-defaults]) (reset! base/default-behavior roll-forward-beh)) ;;aux function to help us add a breadcrumb for ;;the location-based behavior updates. ;;Some locations have overlap. If so, we look for this ;;to see if the move is prescribed. We store this as a ;;component in the entity. (defn prescribe-overlap! [benv t overlap state locname] (if (and overlap (pos? overlap)) (let [entity (:entity benv)] (do (debug [:prescribing-overlap (:name @entity) overlap t]) (swap! entity assoc :prescribed-move {:state-change {:newstate state :duration overlap :followingstate nil :timeinstate 0} :overlapping-position [locname :to] ;true :t t} ) benv)) benv)) ;;SRM bs... ;;SRM takes a different view of unit behavior. ;;Most importantly, for AC units (and deploying RC units), ;;the behavior looks at demand to determine position ;;changes, state-changes, duration, etc., rather than look ;;at the policy. ;;When not in a mission state, the default behavior does ;;provide a cyclical routing, even for AC (At the moment, ;;but that crap will probably change like everything else). ;;We should be able to inject a supply of units that ;;follow the baseline SRM policy, with no demand, and ;;Just have them spawn and run through policy changes. ;;The SRM behavior only really varies upon deployment... ;;so we can create special SRM-specific behaviors that ;;read information about the demand and use it ;;to schedule changes. For now, there is no ;;notion of recovery... ;;These differences mean we need to handle ;;local-demand effects if deployed.... ;;For any movement, we need to check to see if ;;there are effects or guidance associated with the ;;place we're moving to. Some places tell us what ;;to do, outside of our policy. ;;The only way we can get here is if there is a location-policy ;;in the environment. How does it get there? ;;TODO_Have the location push behaviors onto some kind of ;;stack. This could be very powerful (and common), in that ;;the behavior would evaluate its top-most behavior first ;;(i.e. do-current-state), and pop the behavior once ;;the time expired. (defn location-based-state [u state] (let [s (get-state u state) s (if (set? s) s #{s})] s)) (befn location-based-beh {:keys [entity location-based-info ctx] :as benv} (when location-based-info (let [{:keys [name MissionLength BOG StartState EndState overlap timeinstate]} location-based-info ;;StartState is really a policy position.... start-state (location-based-state @entity StartState) newstate (if BOG (conj start-state :bogging) start-state) ;;we need to schedule a state change. ;;and a location-change... _ (swap! entity assoc :location-behavior true) followingstate (if (pos? overlap) (conj newstate :overlapping) (location-based-state @entity EndState)) state-change {:newstate newstate :duration (- MissionLength overlap) :followingstate followingstate :timeinstate (or timeinstate 0)} location-change {:from-location (:locationname @entity) :to-location name} position-change {:from-position (:positionpolicy @entity) :to-position StartState} ;;add the ability to check for prescribed moves... ;;if the demand prescribes one, then we go ahead and schedule it with ;;the entity... wt (- MissionLength overlap) _ (debug [:location-based {:name (:name @entity) :state-change state-change :location-change location-change :wait-time wt :next-position StartState}]) ] (beval change-state-beh (-> benv (prescribe-overlap! (+ (:tupdate benv) wt) overlap followingstate name) (assoc :state-change state-change :location-change location-change :position-change position-change ;new :wait-time wt :next-position StartState)))))) ;;Another potential garbage leak! (def wbm (atom nil)) (defn compute-wait-position [unit] (let [p (:policy unit) current-pos (:positionpolicy unit) ct (:cycletime unit)] (protocols/get-position p ct))) (befn wait-based-beh {:keys [entity statedata wait-based-info ctx] :as benv} (when wait-based-info (let [{:keys [demand wait-time wait-state]} wait-based-info name (:name demand) state-change {:newstate wait-state :duration wait-time :followingstate (:state @entity) :timeinstate 0} location-change {:from-location (:locationname @entity) :to-location name} position (:positionpolicy @entity) position-change (when (= position :followon) ;;we need to compute a position change to ;;make sure the unit reverts to its ;;former position, not stay in followon AND wait. ;;this will cause problems. {:from-position position :to-position (compute-wait-position @entity)}) _ (debug [:wait-based {:name (:name @entity) :state-change state-change :location-change location-change :position-change position-change :wait-time wait-time}]) ;; _ (throw (Exception. (str [:about-to-wait {:name (:name @entity) ;; :state-change state-change ;; :location-change location-change ;; :wait-time wait-time}]))) ] (->seq [(->alter #(assoc % :state-change state-change :location-change location-change :position-change position-change :wait-time (when (and wait-time (< wait-time 999999)) wait-time))) change-location change-position change-state-beh (->alter (fn [benv] (let [u (deref (:entity benv)) _ (debug [:deployable-changed! :waiting :deployment-index (:deployment-index u)]) _ (swap! (:ctx benv) #(supply/update-deploy-status u nil nil %)) ;_ (reset! wbm u) _ :ballz #_(throw (Exception. (str [:ballz])))] benv))) wait])))) ;;All our behavior does right now is spawn... ;;The only other changes we need to make are to alter how we deploy entities... ;;We can actually handle that outside of the unit's deployment.... ;;Possibly include it as a message type... ;;Have a special message handler for it... ;;[genius] ;;If we have an location-based-policy to apply, we can ;;tell the unit via messaging... ;;We typically tell the unit form outside, after we've ;;set it up and everything... ;;SRM behavior overrides some functionality for the base behavior. (befn srm-beh [] spawning-beh ;(throw (Exception. (str "SRM Behavior doesn't do anything!"))) ) (do (println [:setting-srm]) (swap! base/behaviors assoc "SRM" roll-forward-beh ;same thing. ;srm-beh )) (comment ;old version (befn do-current-state {:keys [entity statedata] :as benv} (let [;state (:state @entity) state (:state (deref!! entity) ) ;;slightly faster using keyword as function call. state-map (or (:statemap entity) default-statemap)] (if (set? state) ;entity has multiple effects... (let [stats (r/filter identity (r/map (fn [s] (get state-map s)) state))] (->seq stats)) (get state-map state)))) ) (comment ;OBE (defn update-unit "Computes a new simulation context given a specific unit to update, an elapsed time, and an optional time of update. tupdate is inferred to be the current simulation time if none is supplied." ([unit deltat ctx] (update-unit unit deltat (sim/current-time ctx) ctx)) ([unit deltat tupdate ctx] (->> ctx (load-entity! unit deltat tupdate) (roll-forward-beh) ;update the unit according to the change in ;time. (error-on-fail) ;unit updates should never fail. (second ;result is in terms of [:success|:fail ctx], pull out ;the ctx ) (commit-entity!) ; (clear-bb) ))) ;;We'll replace these; for now the units will automatically ;;try to update themselves if possible. ;;Debatable...just invokes roll-forward-beh; I think we can ensure that ;;roll-forward is always invoked first... ;;Re-evaluate the need for this....can we synchronize from outside? ;;ideally, we just keep track of the unit's last update.... (defn sync "Utility function. Synchronize the unit to the current simulation time. If the last update occured before the current time, we roll the unit forward by the delta between the last update and the current time." [unit ctx] (let [tprev (or (sim/last-update (:name unit) ctx) 0) tnow (sim/current-time ctx)] (if (= tprev tnow) (log! (str "unit " (:name unit) "is up to date") ctx) (log! (str "Synchronizing unit " (:name unit) " from " tprev " to " tnow) (update-unit unit (- tnow tprev) tprev ctx))))) ;;Synchronizes the unit to the current time, then applies a time ;;delta, then processes/records the unit's time of update. (defn update "Entry point for computing behavior-based unit updates. Fundamental API function for processing unit entities. Synchronizes the unit to the current simulation time, then computes the new simulation context resulting from the entity behavior over an elapsed deltat (from current simulation time)." [unit deltat ctx] (let [nm (get unit :name)] (->> (sync unit ctx) (update-unit unit deltat) (u/unit-update! nm (core/msg "Updated " nm))))) )
[ { "context": "(ns #^{:author \"Mikael Reponen\"\n :doc \" General functions\"}\n cutter.gener", "end": 30, "score": 0.9998886585235596, "start": 16, "tag": "NAME", "value": "Mikael Reponen" } ]
src/cutter/general.clj
Viritystila/cutter
0
(ns #^{:author "Mikael Reponen" :doc " General functions"} cutter.general (:require ;[clojure.tools.namespace.repl :refer [refresh]] ;[watchtower.core :as watcher] [clojure.java.io :as io] ;clojure.string ) (:import ;(java.nio IntBuffer ByteBuffer FloatBuffer ByteOrder) ;(org.lwjgl BufferUtils) (java.io File FileInputStream))) (defn resource-to-temp [res] (let [res-cont (slurp (clojure.java.io/resource res)) split-res (clojure.string/split res #"[.]") temp-file (java.io.File/createTempFile (first split-res) (str "." (last split-res))) temp-path (.getPath temp-file)] (spit temp-path res-cont) temp-path)) (def not-nil? (complement nil?)) (defn sleepTime [startTime endTime fps] (let [ dtns (- endTime startTime) dtms (* dtns 1e-6) fpdel (/ 1 fps) fpdelms (* 1e3 fpdel) dt (- fpdelms dtms) dtout (if (< dt 0) 0 dt)] dtout)) (defn files-exist "Check to see that the filenames actually exist." [filenames] (let [full-filenames (flatten filenames)] (reduce #(and %1 %2) ; kibit keep (for [fn full-filenames] (if (or (nil? fn) (.exists (File. ^String fn))) fn (do (println "ERROR:" fn "does not exist.") nil)))))) (defn sane-user-inputs [shader-filename shader-str] (and (files-exist (flatten [shader-filename])) (not (and (nil? shader-filename) (nil? shader-str))))) (defn limit-max [input max] (into [] (subvec input 0 (min max (count input))))) (defn remove-inexistent [filenames max] (limit-max (vec (remove nil? (map (fn [x] (files-exist [x])) filenames))) max)) (defn keyname [key] (str namespace key) "/" (name key))
39414
(ns #^{:author "<NAME>" :doc " General functions"} cutter.general (:require ;[clojure.tools.namespace.repl :refer [refresh]] ;[watchtower.core :as watcher] [clojure.java.io :as io] ;clojure.string ) (:import ;(java.nio IntBuffer ByteBuffer FloatBuffer ByteOrder) ;(org.lwjgl BufferUtils) (java.io File FileInputStream))) (defn resource-to-temp [res] (let [res-cont (slurp (clojure.java.io/resource res)) split-res (clojure.string/split res #"[.]") temp-file (java.io.File/createTempFile (first split-res) (str "." (last split-res))) temp-path (.getPath temp-file)] (spit temp-path res-cont) temp-path)) (def not-nil? (complement nil?)) (defn sleepTime [startTime endTime fps] (let [ dtns (- endTime startTime) dtms (* dtns 1e-6) fpdel (/ 1 fps) fpdelms (* 1e3 fpdel) dt (- fpdelms dtms) dtout (if (< dt 0) 0 dt)] dtout)) (defn files-exist "Check to see that the filenames actually exist." [filenames] (let [full-filenames (flatten filenames)] (reduce #(and %1 %2) ; kibit keep (for [fn full-filenames] (if (or (nil? fn) (.exists (File. ^String fn))) fn (do (println "ERROR:" fn "does not exist.") nil)))))) (defn sane-user-inputs [shader-filename shader-str] (and (files-exist (flatten [shader-filename])) (not (and (nil? shader-filename) (nil? shader-str))))) (defn limit-max [input max] (into [] (subvec input 0 (min max (count input))))) (defn remove-inexistent [filenames max] (limit-max (vec (remove nil? (map (fn [x] (files-exist [x])) filenames))) max)) (defn keyname [key] (str namespace key) "/" (name key))
true
(ns #^{:author "PI:NAME:<NAME>END_PI" :doc " General functions"} cutter.general (:require ;[clojure.tools.namespace.repl :refer [refresh]] ;[watchtower.core :as watcher] [clojure.java.io :as io] ;clojure.string ) (:import ;(java.nio IntBuffer ByteBuffer FloatBuffer ByteOrder) ;(org.lwjgl BufferUtils) (java.io File FileInputStream))) (defn resource-to-temp [res] (let [res-cont (slurp (clojure.java.io/resource res)) split-res (clojure.string/split res #"[.]") temp-file (java.io.File/createTempFile (first split-res) (str "." (last split-res))) temp-path (.getPath temp-file)] (spit temp-path res-cont) temp-path)) (def not-nil? (complement nil?)) (defn sleepTime [startTime endTime fps] (let [ dtns (- endTime startTime) dtms (* dtns 1e-6) fpdel (/ 1 fps) fpdelms (* 1e3 fpdel) dt (- fpdelms dtms) dtout (if (< dt 0) 0 dt)] dtout)) (defn files-exist "Check to see that the filenames actually exist." [filenames] (let [full-filenames (flatten filenames)] (reduce #(and %1 %2) ; kibit keep (for [fn full-filenames] (if (or (nil? fn) (.exists (File. ^String fn))) fn (do (println "ERROR:" fn "does not exist.") nil)))))) (defn sane-user-inputs [shader-filename shader-str] (and (files-exist (flatten [shader-filename])) (not (and (nil? shader-filename) (nil? shader-str))))) (defn limit-max [input max] (into [] (subvec input 0 (min max (count input))))) (defn remove-inexistent [filenames max] (limit-max (vec (remove nil? (map (fn [x] (files-exist [x])) filenames))) max)) (defn keyname [key] (str namespace key) "/" (name key))
[ { "context": " {:name \"Floatboth\"\n :url \"floatboth@com\"}]})\n (def val-data\n {:name \"v\"\n :sites [", "end": 664, "score": 0.8766022324562073, "start": 656, "tag": "EMAIL", "value": "both@com" }, { "context": "cts \"about email-field\"\n ((:pred (email-field)) \"me@myfreeweb.ru\") => true\n ((:pred (email-field)) \"not.an.email\"", "end": 3638, "score": 0.9998825192451477, "start": 3623, "tag": "EMAIL", "value": "me@myfreeweb.ru" }, { "context": "facts \"about ipv4-field\"\n ((:pred (ipv4-field)) \"127.0.0.1\") => true\n ((:pred (ipv4-field)) \"255.255.", "end": 3902, "score": 0.9926524758338928, "start": 3893, "tag": "IP_ADDRESS", "value": "127.0.0.1" }, { "context": "27.0.0.1\") => true\n ((:pred (ipv4-field)) \"255.255.255.255\") => true\n ((:pred (ipv4-field)) \"256.0.0.0\") ", "end": 3959, "score": 0.9939170479774475, "start": 3944, "tag": "IP_ADDRESS", "value": "255.255.255.255" }, { "context": "55.255.255.255\") => true\n ((:pred (ipv4-field)) \"256.0.0.0\") => false\n ((:pred (ipv4-field)) \"127.0.l", "end": 4004, "score": 0.9885463714599609, "start": 3995, "tag": "IP_ADDRESS", "value": "256.0.0.0" }, { "context": "6.0.0.0\") => false\n ((:pred (ipv4-field)) \"127.0.lolwut\") => false)\n\n(facts \"about color-field", "end": 4050, "score": 0.6394183039665222, "start": 4047, "tag": "IP_ADDRESS", "value": "127" } ]
data/clojure/f912f3890c09342da9db915ce3ad6eee_test.clj
maxim5/code-inspector
5
(ns formfinger.test (:refer-clojure :exclude [extend]) (:use (formfinger fields core), midje.sweet, clj-time.core)) (facts "about the form API" (def frm {:name [(f (required) "This is required") (f (alphanumeric) "Must be alphanumeric")] :addr {:city [(f (maxlength 1024) "WTF")] :street [(f (maxlength 2048) "LOL")]} :sites (many {:name [(f (minlength 5) "wtf")] :url [(f (url-field) "Not an URL")]})}) (def inv-data {:addr {:city "Moscow"} :sites [{:name "Ringfinger" :url "http://ringfinger.floatboth.com"} {:name "Floatboth" :url "floatboth@com"}]}) (def val-data {:name "v" :sites [{:name "CSSPrefixer" :url "http://cssprefixer.appspot.com"}]}) (-> ((make-data-pre-hook {:date [(f (date-field) "")]}) {:date "2010-01-10"}) :date class) => org.joda.time.DateTime (get-required-fields frm) => {:name true} (get-defaults {:bool [(f (checkbox) "")]}) => {:bool ""} (validate frm (make-fake frm)) => nil (validate frm inv-data) => {:name ["This is required" "Must be alphanumeric"] :sites [nil {:url ["Not an URL"]}]} (validate frm val-data) => nil (render {:style :label} frm) => "<input name=\"csrftoken\" type=\"hidden\" /><div><label for=\"name\">Name</label><input id=\"name\" name=\"name\" pattern=\"[0-9a-zA-Z]+\" required=\"required\" /></div><fieldset id=\"addr\"><h2>Addr</h2><div><label for=\"city\">City</label><input id=\"city\" maxlength=\"1024\" name=\"city\" /></div><div><label for=\"street\">Street</label><input id=\"street\" maxlength=\"2048\" name=\"street\" /></div></fieldset><fieldset id=\"sites\"><h2>Sites</h2><div><label for=\"name\">Name</label><input id=\"name\" name=\"name\" pattern=\".{5,}\" /></div><div><label for=\"url\">Url</label><input id=\"url\" name=\"url\" type=\"url\" /></div></fieldset>" (render {:style :placeholder} frm inv-data (validate frm inv-data)) => "<input name=\"csrftoken\" type=\"hidden\" /><div><input id=\"name\" name=\"name\" pattern=\"[0-9a-zA-Z]+\" placeholder=\"Name\" required=\"required\" /><div class=\"error\">This is required</div><div class=\"error\">Must be alphanumeric</div></div><fieldset id=\"addr\"><h2>Addr</h2><div><input id=\"city\" maxlength=\"1024\" name=\"city\" placeholder=\"City\" value=\"Moscow\" /></div><div><input id=\"street\" maxlength=\"2048\" name=\"street\" placeholder=\"Street\" /></div></fieldset><fieldset id=\"sites\"><h2>Sites</h2><div><input id=\"name\" name=\"name\" pattern=\".{5,}\" placeholder=\"Name\" /></div><div><input id=\"url\" name=\"url\" placeholder=\"Url\" type=\"url\" /></div></fieldset>") ; Fields (facts "about required" ((:pred (required)) "s") => true ((:pred (required)) "") => false ((:pred (required)) nil) => false) (let [low (:pred (pattern #"[a-z]"))] (facts "about pattern" (low "a") => true (low "A") => false)) (facts "about alphanumeric" ((:pred (alphanumeric)) "aB0") => true ((:pred (alphanumeric)) ":-)") => false) (facts "about non-confusing" ((:pred (non-confusing)) "amazing") => true ((:pred (non-confusing)) "аmazing") => false) ; Cyrillic а != Latin a (facts "about not-in" ((:pred (not-in ["about"])) "myUser") => true ((:pred (not-in ["about"])) "about") => false) (facts "about maxlength" ((:pred (maxlength 3)) "123") => true ((:pred (maxlength 3)) "1234") => false) (facts "about minlength" ((:pred (minlength 3)) "123") => true ((:pred (minlength 3)) "12") => false) (facts "about email-field" ((:pred (email-field)) "me@myfreeweb.ru") => true ((:pred (email-field)) "not.an.email") => false) (facts "about url-field" ((:pred (url-field)) "http://floatboth.com") => true ((:pred (url-field)) "not@an.address!!") => false) (facts "about ipv4-field" ((:pred (ipv4-field)) "127.0.0.1") => true ((:pred (ipv4-field)) "255.255.255.255") => true ((:pred (ipv4-field)) "256.0.0.0") => false ((:pred (ipv4-field)) "127.0.lolwut") => false) (facts "about color-field" ((:pred (color-field)) "1f4") => true ((:pred (color-field)) "#00fF00") => true ((:pred (color-field)) "#FFail") => false) (facts "about date-field" ((:pred (date-field)) "2011-11-02") => true ((:pred (date-field)) "not-a-date") => false (year ((:pre-hook (date-field)) "2011-11-02")) => 2011 ((:view (date-field)) ((:post-hook (date-field)) (date-time 2001 10 02))) => "2001-10-02") (facts "about time-field" ((:pred (time-field)) "10:01") => true ((:pred (time-field)) "lolwtf") => false (hour ((:pre-hook (time-field)) "14:10")) => 14 ((:view (time-field)) ((:post-hook (time-field)) (date-time 2001 10 02 00 42))) => "00:42") (facts "about date-date-time-field" ((:pred (date-time-field)) "2001-10-02T10:01Z") => true ((:pred (date-time-field)) "lolwtf") => false (hour ((:pre-hook (date-time-field)) "2010-01-02T14:10Z")) => 14 ((:view (date-time-field)) ((:post-hook (date-time-field)) (date-time 2001 10 02 00 42))) => "2001-10-02T00:42Z") (facts "about number-field" ((:pred (number-field)) "1234") => true ((:pred (number-field)) "-123") => true ((:pred (number-field)) "word") => false) (let [gte-ten (:pred (nmin 10))] (facts "about nmin" (gte-ten "10") => true (gte-ten "9") => false)) (let [lte-ten (:pred (nmax 10))] (facts "about nmax" (lte-ten "10") => true (lte-ten "11") => false)) (let [btw (:pred (nbetween 10 15))] (facts "about nbetween" (btw "10") => true (btw "15") => true (btw "9") => false (btw "16") => false))
118657
(ns formfinger.test (:refer-clojure :exclude [extend]) (:use (formfinger fields core), midje.sweet, clj-time.core)) (facts "about the form API" (def frm {:name [(f (required) "This is required") (f (alphanumeric) "Must be alphanumeric")] :addr {:city [(f (maxlength 1024) "WTF")] :street [(f (maxlength 2048) "LOL")]} :sites (many {:name [(f (minlength 5) "wtf")] :url [(f (url-field) "Not an URL")]})}) (def inv-data {:addr {:city "Moscow"} :sites [{:name "Ringfinger" :url "http://ringfinger.floatboth.com"} {:name "Floatboth" :url "float<EMAIL>"}]}) (def val-data {:name "v" :sites [{:name "CSSPrefixer" :url "http://cssprefixer.appspot.com"}]}) (-> ((make-data-pre-hook {:date [(f (date-field) "")]}) {:date "2010-01-10"}) :date class) => org.joda.time.DateTime (get-required-fields frm) => {:name true} (get-defaults {:bool [(f (checkbox) "")]}) => {:bool ""} (validate frm (make-fake frm)) => nil (validate frm inv-data) => {:name ["This is required" "Must be alphanumeric"] :sites [nil {:url ["Not an URL"]}]} (validate frm val-data) => nil (render {:style :label} frm) => "<input name=\"csrftoken\" type=\"hidden\" /><div><label for=\"name\">Name</label><input id=\"name\" name=\"name\" pattern=\"[0-9a-zA-Z]+\" required=\"required\" /></div><fieldset id=\"addr\"><h2>Addr</h2><div><label for=\"city\">City</label><input id=\"city\" maxlength=\"1024\" name=\"city\" /></div><div><label for=\"street\">Street</label><input id=\"street\" maxlength=\"2048\" name=\"street\" /></div></fieldset><fieldset id=\"sites\"><h2>Sites</h2><div><label for=\"name\">Name</label><input id=\"name\" name=\"name\" pattern=\".{5,}\" /></div><div><label for=\"url\">Url</label><input id=\"url\" name=\"url\" type=\"url\" /></div></fieldset>" (render {:style :placeholder} frm inv-data (validate frm inv-data)) => "<input name=\"csrftoken\" type=\"hidden\" /><div><input id=\"name\" name=\"name\" pattern=\"[0-9a-zA-Z]+\" placeholder=\"Name\" required=\"required\" /><div class=\"error\">This is required</div><div class=\"error\">Must be alphanumeric</div></div><fieldset id=\"addr\"><h2>Addr</h2><div><input id=\"city\" maxlength=\"1024\" name=\"city\" placeholder=\"City\" value=\"Moscow\" /></div><div><input id=\"street\" maxlength=\"2048\" name=\"street\" placeholder=\"Street\" /></div></fieldset><fieldset id=\"sites\"><h2>Sites</h2><div><input id=\"name\" name=\"name\" pattern=\".{5,}\" placeholder=\"Name\" /></div><div><input id=\"url\" name=\"url\" placeholder=\"Url\" type=\"url\" /></div></fieldset>") ; Fields (facts "about required" ((:pred (required)) "s") => true ((:pred (required)) "") => false ((:pred (required)) nil) => false) (let [low (:pred (pattern #"[a-z]"))] (facts "about pattern" (low "a") => true (low "A") => false)) (facts "about alphanumeric" ((:pred (alphanumeric)) "aB0") => true ((:pred (alphanumeric)) ":-)") => false) (facts "about non-confusing" ((:pred (non-confusing)) "amazing") => true ((:pred (non-confusing)) "аmazing") => false) ; Cyrillic а != Latin a (facts "about not-in" ((:pred (not-in ["about"])) "myUser") => true ((:pred (not-in ["about"])) "about") => false) (facts "about maxlength" ((:pred (maxlength 3)) "123") => true ((:pred (maxlength 3)) "1234") => false) (facts "about minlength" ((:pred (minlength 3)) "123") => true ((:pred (minlength 3)) "12") => false) (facts "about email-field" ((:pred (email-field)) "<EMAIL>") => true ((:pred (email-field)) "not.an.email") => false) (facts "about url-field" ((:pred (url-field)) "http://floatboth.com") => true ((:pred (url-field)) "not@an.address!!") => false) (facts "about ipv4-field" ((:pred (ipv4-field)) "127.0.0.1") => true ((:pred (ipv4-field)) "255.255.255.255") => true ((:pred (ipv4-field)) "256.0.0.0") => false ((:pred (ipv4-field)) "127.0.lolwut") => false) (facts "about color-field" ((:pred (color-field)) "1f4") => true ((:pred (color-field)) "#00fF00") => true ((:pred (color-field)) "#FFail") => false) (facts "about date-field" ((:pred (date-field)) "2011-11-02") => true ((:pred (date-field)) "not-a-date") => false (year ((:pre-hook (date-field)) "2011-11-02")) => 2011 ((:view (date-field)) ((:post-hook (date-field)) (date-time 2001 10 02))) => "2001-10-02") (facts "about time-field" ((:pred (time-field)) "10:01") => true ((:pred (time-field)) "lolwtf") => false (hour ((:pre-hook (time-field)) "14:10")) => 14 ((:view (time-field)) ((:post-hook (time-field)) (date-time 2001 10 02 00 42))) => "00:42") (facts "about date-date-time-field" ((:pred (date-time-field)) "2001-10-02T10:01Z") => true ((:pred (date-time-field)) "lolwtf") => false (hour ((:pre-hook (date-time-field)) "2010-01-02T14:10Z")) => 14 ((:view (date-time-field)) ((:post-hook (date-time-field)) (date-time 2001 10 02 00 42))) => "2001-10-02T00:42Z") (facts "about number-field" ((:pred (number-field)) "1234") => true ((:pred (number-field)) "-123") => true ((:pred (number-field)) "word") => false) (let [gte-ten (:pred (nmin 10))] (facts "about nmin" (gte-ten "10") => true (gte-ten "9") => false)) (let [lte-ten (:pred (nmax 10))] (facts "about nmax" (lte-ten "10") => true (lte-ten "11") => false)) (let [btw (:pred (nbetween 10 15))] (facts "about nbetween" (btw "10") => true (btw "15") => true (btw "9") => false (btw "16") => false))
true
(ns formfinger.test (:refer-clojure :exclude [extend]) (:use (formfinger fields core), midje.sweet, clj-time.core)) (facts "about the form API" (def frm {:name [(f (required) "This is required") (f (alphanumeric) "Must be alphanumeric")] :addr {:city [(f (maxlength 1024) "WTF")] :street [(f (maxlength 2048) "LOL")]} :sites (many {:name [(f (minlength 5) "wtf")] :url [(f (url-field) "Not an URL")]})}) (def inv-data {:addr {:city "Moscow"} :sites [{:name "Ringfinger" :url "http://ringfinger.floatboth.com"} {:name "Floatboth" :url "floatPI:EMAIL:<EMAIL>END_PI"}]}) (def val-data {:name "v" :sites [{:name "CSSPrefixer" :url "http://cssprefixer.appspot.com"}]}) (-> ((make-data-pre-hook {:date [(f (date-field) "")]}) {:date "2010-01-10"}) :date class) => org.joda.time.DateTime (get-required-fields frm) => {:name true} (get-defaults {:bool [(f (checkbox) "")]}) => {:bool ""} (validate frm (make-fake frm)) => nil (validate frm inv-data) => {:name ["This is required" "Must be alphanumeric"] :sites [nil {:url ["Not an URL"]}]} (validate frm val-data) => nil (render {:style :label} frm) => "<input name=\"csrftoken\" type=\"hidden\" /><div><label for=\"name\">Name</label><input id=\"name\" name=\"name\" pattern=\"[0-9a-zA-Z]+\" required=\"required\" /></div><fieldset id=\"addr\"><h2>Addr</h2><div><label for=\"city\">City</label><input id=\"city\" maxlength=\"1024\" name=\"city\" /></div><div><label for=\"street\">Street</label><input id=\"street\" maxlength=\"2048\" name=\"street\" /></div></fieldset><fieldset id=\"sites\"><h2>Sites</h2><div><label for=\"name\">Name</label><input id=\"name\" name=\"name\" pattern=\".{5,}\" /></div><div><label for=\"url\">Url</label><input id=\"url\" name=\"url\" type=\"url\" /></div></fieldset>" (render {:style :placeholder} frm inv-data (validate frm inv-data)) => "<input name=\"csrftoken\" type=\"hidden\" /><div><input id=\"name\" name=\"name\" pattern=\"[0-9a-zA-Z]+\" placeholder=\"Name\" required=\"required\" /><div class=\"error\">This is required</div><div class=\"error\">Must be alphanumeric</div></div><fieldset id=\"addr\"><h2>Addr</h2><div><input id=\"city\" maxlength=\"1024\" name=\"city\" placeholder=\"City\" value=\"Moscow\" /></div><div><input id=\"street\" maxlength=\"2048\" name=\"street\" placeholder=\"Street\" /></div></fieldset><fieldset id=\"sites\"><h2>Sites</h2><div><input id=\"name\" name=\"name\" pattern=\".{5,}\" placeholder=\"Name\" /></div><div><input id=\"url\" name=\"url\" placeholder=\"Url\" type=\"url\" /></div></fieldset>") ; Fields (facts "about required" ((:pred (required)) "s") => true ((:pred (required)) "") => false ((:pred (required)) nil) => false) (let [low (:pred (pattern #"[a-z]"))] (facts "about pattern" (low "a") => true (low "A") => false)) (facts "about alphanumeric" ((:pred (alphanumeric)) "aB0") => true ((:pred (alphanumeric)) ":-)") => false) (facts "about non-confusing" ((:pred (non-confusing)) "amazing") => true ((:pred (non-confusing)) "аmazing") => false) ; Cyrillic а != Latin a (facts "about not-in" ((:pred (not-in ["about"])) "myUser") => true ((:pred (not-in ["about"])) "about") => false) (facts "about maxlength" ((:pred (maxlength 3)) "123") => true ((:pred (maxlength 3)) "1234") => false) (facts "about minlength" ((:pred (minlength 3)) "123") => true ((:pred (minlength 3)) "12") => false) (facts "about email-field" ((:pred (email-field)) "PI:EMAIL:<EMAIL>END_PI") => true ((:pred (email-field)) "not.an.email") => false) (facts "about url-field" ((:pred (url-field)) "http://floatboth.com") => true ((:pred (url-field)) "not@an.address!!") => false) (facts "about ipv4-field" ((:pred (ipv4-field)) "127.0.0.1") => true ((:pred (ipv4-field)) "255.255.255.255") => true ((:pred (ipv4-field)) "256.0.0.0") => false ((:pred (ipv4-field)) "127.0.lolwut") => false) (facts "about color-field" ((:pred (color-field)) "1f4") => true ((:pred (color-field)) "#00fF00") => true ((:pred (color-field)) "#FFail") => false) (facts "about date-field" ((:pred (date-field)) "2011-11-02") => true ((:pred (date-field)) "not-a-date") => false (year ((:pre-hook (date-field)) "2011-11-02")) => 2011 ((:view (date-field)) ((:post-hook (date-field)) (date-time 2001 10 02))) => "2001-10-02") (facts "about time-field" ((:pred (time-field)) "10:01") => true ((:pred (time-field)) "lolwtf") => false (hour ((:pre-hook (time-field)) "14:10")) => 14 ((:view (time-field)) ((:post-hook (time-field)) (date-time 2001 10 02 00 42))) => "00:42") (facts "about date-date-time-field" ((:pred (date-time-field)) "2001-10-02T10:01Z") => true ((:pred (date-time-field)) "lolwtf") => false (hour ((:pre-hook (date-time-field)) "2010-01-02T14:10Z")) => 14 ((:view (date-time-field)) ((:post-hook (date-time-field)) (date-time 2001 10 02 00 42))) => "2001-10-02T00:42Z") (facts "about number-field" ((:pred (number-field)) "1234") => true ((:pred (number-field)) "-123") => true ((:pred (number-field)) "word") => false) (let [gte-ten (:pred (nmin 10))] (facts "about nmin" (gte-ten "10") => true (gte-ten "9") => false)) (let [lte-ten (:pred (nmax 10))] (facts "about nmax" (lte-ten "10") => true (lte-ten "11") => false)) (let [btw (:pred (nbetween 10 15))] (facts "about nbetween" (btw "10") => true (btw "15") => true (btw "9") => false (btw "16") => false))
[ { "context": "\\o \\g \\l \\e] ; vectors\n{:name \"Kim\" :age 25 :location \"here\"} ; maps\n#{2 3 5 7 11 13", "end": 912, "score": 0.9975649118423462, "start": 909, "tag": "NAME", "value": "Kim" }, { "context": " ; lists\n(1 2 3 4 5)\n\n{:name \"Mike Tyson\", :skills [\"punching\", \"standing\", \"acting\", \"pig", "end": 1071, "score": 0.9996244311332703, "start": 1061, "tag": "NAME", "value": "Mike Tyson" } ]
presentation/1-syntax.clj
brunokim/learning-clojure
1
; Copyright 2014 Google Inc. All rights reserved. ; ; Licensed under the Apache License, Version 2.0 (the License); ; you may not use this file except in compliance with the License. ; You may obtain a copy of the License at ; ; http://www.apache.org/licenses/LICENSE-2.0 ; ; Unless required by applicable law or agreed to in writing, software ; distributed under the License is distributed on an AS IS BASIS, ; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ; See the License for the specific language governing permissions and ; limitations under the License. (ns learning.syntax) (+ 2 2) (println "Hello World!") (and true false) (/ (+ 7 1) (- (* 2 3) 2)) (+ 1 2 3 4 5 6 7 8 9 10) (- 4 1 1 5 3) (print \c \newline \u2603) "strings are enclosed in double quotes" :keyword 10 10.0 10.02M 1000000000000000N 10/3 [\g \o \o \g \l \e] ; vectors {:name "Kim" :age 25 :location "here"} ; maps #{2 3 5 7 11 13 17 19 23 29} ; sets '(1 2 3 4 5) ; lists (1 2 3 4 5) {:name "Mike Tyson", :skills ["punching", "standing", "acting", "pigeon racing"]} (if (neg? 10) "10 is negative" "10 is not negative")
25403
; Copyright 2014 Google Inc. All rights reserved. ; ; Licensed under the Apache License, Version 2.0 (the License); ; you may not use this file except in compliance with the License. ; You may obtain a copy of the License at ; ; http://www.apache.org/licenses/LICENSE-2.0 ; ; Unless required by applicable law or agreed to in writing, software ; distributed under the License is distributed on an AS IS BASIS, ; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ; See the License for the specific language governing permissions and ; limitations under the License. (ns learning.syntax) (+ 2 2) (println "Hello World!") (and true false) (/ (+ 7 1) (- (* 2 3) 2)) (+ 1 2 3 4 5 6 7 8 9 10) (- 4 1 1 5 3) (print \c \newline \u2603) "strings are enclosed in double quotes" :keyword 10 10.0 10.02M 1000000000000000N 10/3 [\g \o \o \g \l \e] ; vectors {:name "<NAME>" :age 25 :location "here"} ; maps #{2 3 5 7 11 13 17 19 23 29} ; sets '(1 2 3 4 5) ; lists (1 2 3 4 5) {:name "<NAME>", :skills ["punching", "standing", "acting", "pigeon racing"]} (if (neg? 10) "10 is negative" "10 is not negative")
true
; Copyright 2014 Google Inc. All rights reserved. ; ; Licensed under the Apache License, Version 2.0 (the License); ; you may not use this file except in compliance with the License. ; You may obtain a copy of the License at ; ; http://www.apache.org/licenses/LICENSE-2.0 ; ; Unless required by applicable law or agreed to in writing, software ; distributed under the License is distributed on an AS IS BASIS, ; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ; See the License for the specific language governing permissions and ; limitations under the License. (ns learning.syntax) (+ 2 2) (println "Hello World!") (and true false) (/ (+ 7 1) (- (* 2 3) 2)) (+ 1 2 3 4 5 6 7 8 9 10) (- 4 1 1 5 3) (print \c \newline \u2603) "strings are enclosed in double quotes" :keyword 10 10.0 10.02M 1000000000000000N 10/3 [\g \o \o \g \l \e] ; vectors {:name "PI:NAME:<NAME>END_PI" :age 25 :location "here"} ; maps #{2 3 5 7 11 13 17 19 23 29} ; sets '(1 2 3 4 5) ; lists (1 2 3 4 5) {:name "PI:NAME:<NAME>END_PI", :skills ["punching", "standing", "acting", "pigeon racing"]} (if (neg? 10) "10 is negative" "10 is not negative")
[ { "context": " :foaf/name \"Luke\"\n :foaf/know", "end": 538, "score": 0.9993910789489746, "start": 534, "tag": "NAME", "value": "Luke" }, { "context": " :foaf/name \"Stuart\"}})]\n\n (is (= 3 (count (graph/triples g))))\n ", "end": 699, "score": 0.999125599861145, "start": 693, "tag": "NAME", "value": "Stuart" } ]
test/arachne/aristotle/reification_test.clj
caleb/aristotle
87
(ns arachne.aristotle.reification-test (:require [clojure.test :refer :all] [arachne.aristotle.registry :as reg] [arachne.aristotle.graph :as graph] [arachne.aristotle.query :as q] [arachne.aristotle :as aa] [clojure.java.io :as io] [clojure.walk :as w])) (reg/prefix 'foaf "http://xmlns.com/foaf/0.1/") (deftest reification-test (let [g (aa/add (aa/graph :simple) {:rdf/about "<http://example.com/#luke>" :foaf/name "Luke" :foaf/knows {:rdf/about "<http://example.com/#Stu>" :foaf/name "Stuart"}})] (is (= 3 (count (graph/triples g)))) (let [g (graph/reify g "<http://example.com/graph>" "<http://example.com/graph1>")] (is (= 18 (count (graph/triples g))))))) (comment ;; Reification Benchmarking (import '[java.util UUID]) (def entities (vec (repeatedly 5000 (fn [] (str "<http://example.com/" (UUID/randomUUID) ">") )))) (def properties (vec (repeatedly 500 (fn [] (str "<http://example.com/p/" (UUID/randomUUID) ">") )))) (defn rand-triple [] [(rand-nth entities) (rand-nth properties) (case (rand-int 3) 0 (rand-nth entities) 1 (rand) 2 (str (UUID/randomUUID)))]) (def n 100000) (time (let [g (aa/add (aa/graph :jena-mini) (repeatedly n rand-triple)) g (graph/reify g "<http://example.com/graph>" (str (UUID/randomUUID)))] (def the-g g) )) (def the-g nil) (time (count (graph/triples the-g))) ;; Results: 100k triples (before reification) in a :jena-mini graph ;; cost about 1.1G of heap. ;; :simple is much cheaper, can fit about 1M ;; triples (before reification) in a 2GB data structure. )
56560
(ns arachne.aristotle.reification-test (:require [clojure.test :refer :all] [arachne.aristotle.registry :as reg] [arachne.aristotle.graph :as graph] [arachne.aristotle.query :as q] [arachne.aristotle :as aa] [clojure.java.io :as io] [clojure.walk :as w])) (reg/prefix 'foaf "http://xmlns.com/foaf/0.1/") (deftest reification-test (let [g (aa/add (aa/graph :simple) {:rdf/about "<http://example.com/#luke>" :foaf/name "<NAME>" :foaf/knows {:rdf/about "<http://example.com/#Stu>" :foaf/name "<NAME>"}})] (is (= 3 (count (graph/triples g)))) (let [g (graph/reify g "<http://example.com/graph>" "<http://example.com/graph1>")] (is (= 18 (count (graph/triples g))))))) (comment ;; Reification Benchmarking (import '[java.util UUID]) (def entities (vec (repeatedly 5000 (fn [] (str "<http://example.com/" (UUID/randomUUID) ">") )))) (def properties (vec (repeatedly 500 (fn [] (str "<http://example.com/p/" (UUID/randomUUID) ">") )))) (defn rand-triple [] [(rand-nth entities) (rand-nth properties) (case (rand-int 3) 0 (rand-nth entities) 1 (rand) 2 (str (UUID/randomUUID)))]) (def n 100000) (time (let [g (aa/add (aa/graph :jena-mini) (repeatedly n rand-triple)) g (graph/reify g "<http://example.com/graph>" (str (UUID/randomUUID)))] (def the-g g) )) (def the-g nil) (time (count (graph/triples the-g))) ;; Results: 100k triples (before reification) in a :jena-mini graph ;; cost about 1.1G of heap. ;; :simple is much cheaper, can fit about 1M ;; triples (before reification) in a 2GB data structure. )
true
(ns arachne.aristotle.reification-test (:require [clojure.test :refer :all] [arachne.aristotle.registry :as reg] [arachne.aristotle.graph :as graph] [arachne.aristotle.query :as q] [arachne.aristotle :as aa] [clojure.java.io :as io] [clojure.walk :as w])) (reg/prefix 'foaf "http://xmlns.com/foaf/0.1/") (deftest reification-test (let [g (aa/add (aa/graph :simple) {:rdf/about "<http://example.com/#luke>" :foaf/name "PI:NAME:<NAME>END_PI" :foaf/knows {:rdf/about "<http://example.com/#Stu>" :foaf/name "PI:NAME:<NAME>END_PI"}})] (is (= 3 (count (graph/triples g)))) (let [g (graph/reify g "<http://example.com/graph>" "<http://example.com/graph1>")] (is (= 18 (count (graph/triples g))))))) (comment ;; Reification Benchmarking (import '[java.util UUID]) (def entities (vec (repeatedly 5000 (fn [] (str "<http://example.com/" (UUID/randomUUID) ">") )))) (def properties (vec (repeatedly 500 (fn [] (str "<http://example.com/p/" (UUID/randomUUID) ">") )))) (defn rand-triple [] [(rand-nth entities) (rand-nth properties) (case (rand-int 3) 0 (rand-nth entities) 1 (rand) 2 (str (UUID/randomUUID)))]) (def n 100000) (time (let [g (aa/add (aa/graph :jena-mini) (repeatedly n rand-triple)) g (graph/reify g "<http://example.com/graph>" (str (UUID/randomUUID)))] (def the-g g) )) (def the-g nil) (time (count (graph/triples the-g))) ;; Results: 100k triples (before reification) in a :jena-mini graph ;; cost about 1.1G of heap. ;; :simple is much cheaper, can fit about 1M ;; triples (before reification) in a 2GB data structure. )
[ { "context": "rvice \"service\"\n :body body\n :access \"AKIDEXAMPLE\"\n :secret \"wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXA", "end": 651, "score": 0.774273693561554, "start": 640, "tag": "KEY", "value": "AKIDEXAMPLE" }, { "context": " body\n :access \"AKIDEXAMPLE\"\n :secret \"wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY\"}))\n\n(doseq [[k data] (test-data \"test/suite-v4\")", "end": 708, "score": 0.9997677803039551, "start": 668, "tag": "KEY", "value": "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY" }, { "context": "data :req req->map v4/sign-req)\n keys [:creq :sts :authz]\n success (= (select-keys data", "end": 834, "score": 0.9262099266052246, "start": 830, "tag": "KEY", "value": "creq" }, { "context": "req req->map v4/sign-req)\n keys [:creq :sts :authz]\n success (= (select-keys data keys", "end": 839, "score": 0.7326129078865051, "start": 836, "tag": "KEY", "value": "sts" }, { "context": "eq->map v4/sign-req)\n keys [:creq :sts :authz]\n success (= (select-keys data keys) (sele", "end": 846, "score": 0.8494353890419006, "start": 841, "tag": "KEY", "value": "authz" } ]
test/mrmcc3/aws/sig_v4_test.cljs
mrmcc3/cljs-aws-signature
3
(ns mrmcc3.aws.sig-v4-test (:require-macros [mrmcc3.aws.sig-v4-test :refer [test-data]]) (:require [mrmcc3.aws.sig-v4 :as v4] [clojure.string :as str] [clojure.pprint :refer [pprint]]) (:import (goog Uri) (goog.date UtcDateTime))) (defn query-map [uri] (let [qd (.getQueryData (Uri. uri))] (map vector (.getKeys qd) (.getValues qd)))) (defn req->map [{:keys [method uri headers body]}] (let [[path] (str/split uri #"\?" 2)] {:method method :path path :query (query-map uri) :headers headers :region "us-east-1" :service "service" :body body :access "AKIDEXAMPLE" :secret "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY"})) (doseq [[k data] (test-data "test/suite-v4")] (let [result (-> data :req req->map v4/sign-req) keys [:creq :sts :authz] success (= (select-keys data keys) (select-keys result keys))] (when success (println "testing" k "... OK")) (when-not success (println "testing" k "... FAILED") (println (:creq data)) (println (:creq result)))))
81119
(ns mrmcc3.aws.sig-v4-test (:require-macros [mrmcc3.aws.sig-v4-test :refer [test-data]]) (:require [mrmcc3.aws.sig-v4 :as v4] [clojure.string :as str] [clojure.pprint :refer [pprint]]) (:import (goog Uri) (goog.date UtcDateTime))) (defn query-map [uri] (let [qd (.getQueryData (Uri. uri))] (map vector (.getKeys qd) (.getValues qd)))) (defn req->map [{:keys [method uri headers body]}] (let [[path] (str/split uri #"\?" 2)] {:method method :path path :query (query-map uri) :headers headers :region "us-east-1" :service "service" :body body :access "<KEY>" :secret "<KEY>"})) (doseq [[k data] (test-data "test/suite-v4")] (let [result (-> data :req req->map v4/sign-req) keys [:<KEY> :<KEY> :<KEY>] success (= (select-keys data keys) (select-keys result keys))] (when success (println "testing" k "... OK")) (when-not success (println "testing" k "... FAILED") (println (:creq data)) (println (:creq result)))))
true
(ns mrmcc3.aws.sig-v4-test (:require-macros [mrmcc3.aws.sig-v4-test :refer [test-data]]) (:require [mrmcc3.aws.sig-v4 :as v4] [clojure.string :as str] [clojure.pprint :refer [pprint]]) (:import (goog Uri) (goog.date UtcDateTime))) (defn query-map [uri] (let [qd (.getQueryData (Uri. uri))] (map vector (.getKeys qd) (.getValues qd)))) (defn req->map [{:keys [method uri headers body]}] (let [[path] (str/split uri #"\?" 2)] {:method method :path path :query (query-map uri) :headers headers :region "us-east-1" :service "service" :body body :access "PI:KEY:<KEY>END_PI" :secret "PI:KEY:<KEY>END_PI"})) (doseq [[k data] (test-data "test/suite-v4")] (let [result (-> data :req req->map v4/sign-req) keys [:PI:KEY:<KEY>END_PI :PI:KEY:<KEY>END_PI :PI:KEY:<KEY>END_PI] success (= (select-keys data keys) (select-keys result keys))] (when success (println "testing" k "... OK")) (when-not success (println "testing" k "... FAILED") (println (:creq data)) (println (:creq result)))))
[ { "context": ";; Copyright © 2015 Atamert Ölçgen\n;;\n;; Distributed under the The MIT License\n;; ht", "end": 34, "score": 0.9998835325241089, "start": 20, "tag": "NAME", "value": "Atamert Ölçgen" } ]
src/ring/middleware/transit.clj
muhuk/ring-transit-middleware
3
;; Copyright © 2015 Atamert Ölçgen ;; ;; Distributed under the The MIT License ;; http://opensource.org/licenses/MIT (ns ring.middleware.transit "Ring middleware for Transit requests & responses. For basic usage you just need to add [[wrap-transit]] middleware. #### Usage Example: (require '[ring.middleware.transit :refer [wrap-transit]]) (defn handler [request] (let [username (get-in request [:params :username])] {:hello username})) (defn app (-> handler (wrap-transit))) [[encode]] & [[decode]] are provided for reusing of options to [[wrap-transit]] when encoding/decoding Transit outside of HTTP requests/responses. When using WebSockets or communicating with other services. If you want to write a custom middleware based on this code take a look at `-decode-request` & `-encode-response` functions. They are not documented. " (:require [cognitect.transit :as t] [ring.util.response :refer [content-type]]) (:import [java.io ByteArrayInputStream ByteArrayOutputStream])) (def ^:private aleph-available? (try (require '[aleph.http]) true (catch java.io.FileNotFoundException _ false))) (def ^:private chain (if aleph-available? (find-var 'manifold.deferred/chain) (fn [v f] (f v)))) (declare -decode-request -encode-response parse-transit-content-type ring-response-body?) (def ^:private re-ct #"^application/transit\+(json|json-verbose|msgpack)(;\s*charset=([-\w]+))?") (defn decode "Decode string Transit data. #### Parameters: s : Transit data. Note that unlike `cognitect.transit/reader` this function takes a `String`. options : Optional parameter. This is a map that will be passed to `cognitect.transit/reader` as its third argument. Additionally `decode`'s `options` map can contain `:encoding`: :encoding : Transit reader's encoding. Default is `:json`. Passed to `reader` as its second argument. An example options map: {:encoding :json :handlers {Foo (FooHandler.)} :default-handler (DefaultHandler.)} `:encoding` key stripped from `options` before calling `reader`. " ([s] (decode s nil)) ([^String s options] {:pre [(string? s)]} (let [encoding (:encoding options :json) options (dissoc options :encoding)] (-> (.getBytes s) (ByteArrayInputStream.) (t/reader encoding options) (t/read))))) (defn encode "Encode some value into string Transit data. #### Parameters: v : Value to be encoded. options : Optional parameter. This is a map that will be passed to `cognitect.transit/writer` as its third argument. Additionally there are two more keys accepted: :buffer-size : Size of the buffer of the output stream, in bytes. Default is `1024`. :encoding : Transit writer's encoding. Default is `:json`. Passed to `reader` as its second argument. An example options map: {:buffer-size 4096 :encoding :json-verbose :handlers {Foo (FooHandler.)}} `:buffer-size` & `:encoding` keys are stripped from `options` before calling `writer`. " ([v] (encode v nil)) ([v options] {:post [(string? %)]} (let [buffer-size (get options :buffer-size 1024) encoding (get options :encoding :json) options (dissoc options :buffer-size :encoding) out (ByteArrayOutputStream. buffer-size) _ (-> out (t/writer encoding options) (t/write v)) result (.toString out)] (.reset out) result))) (defn wrap-transit "Decodes Transit requests and encodes Transit responses. #### Parameters: handler : Ring handler to wrap. options : Optional parameter. A map of options that can contain a `:reader` and a `:writer` keys which correspond to options to be passed to [[decode]] and [[encode]] respectively. `[:reader :encoding]` will always be overwritten using the `Content-Type` header of the request. #### Transit Requests Decoded Transit messages can be accessed through request's `:transit-params` key. If the decoded object is a map, it will be also be merged with request's `:params`. For Transit requests, `:body` is read into a string and is available to downstream. #### Transit Responses If there is no `Content-Type` header, anything but the types ring accepts as valid response bodies are encoded. If `Content-Type` is present it overrides the type of `:body`. | Content Type Header | Response Type | Encoded? | |---------------------|-------------------------------------------|:--------:| | Not present. | `String`, `InputStream`, `File` or `ISeq` | No. | | Not present. | Anything else. | Yes. | | application/transit | Anything. | Yes. | | Other content type | Anything. | No. | #### Aleph Support If you have [Aleph](http://aleph.io/) in your classpath deferred responses will be handled properly using `manifold.deferred/chain`. " ([handler] (wrap-transit handler nil)) ([handler options] (let [req-opts (select-keys options [:reader]) res-opts (select-keys options [:writer])] (fn [request] (-> request (-decode-request req-opts) (handler) (chain #(-encode-response % res-opts))))))) (defn ^:no-doc -decode-request [request options] (let [{:keys [transit? encoding charset]} (parse-transit-content-type request)] (if-not transit? request (let [options (assoc options :encoding (or encoding (:encoding options))) body (slurp (:body request) :encoding charset) transit-params (decode body options) request' (assoc request :body body :transit-params transit-params)] (assert (contains? #{:json :msgpack nil} encoding)) (if (map? transit-params) (update-in request' [:params] merge transit-params) request'))))) (defn ^:no-doc -encode-response [response options] (let [{:keys [transit? encoding charset] :as ct} (parse-transit-content-type response)] (if (or (false? transit?) (and (nil? transit?) (ring-response-body? response))) response (-> response (cond-> (nil? (:content-type ct)) (content-type (format "application/transit+%s; charset=%s" (name (or encoding (get :encoding options :json))) charset))) (update-in [:body] encode options))))) (defn- parse-transit-content-type [r] (let [ct (or (get-in r [:headers "content-type"]) (get-in r [:headers "Content-Type"]))] (if (nil? ct) {:charset "utf-8"} (let [[ct-transit encoding _ charset] (re-find re-ct ct) charset (or charset "utf-8")] (cond-> {:content-type ct :transit? false} (some? ct-transit) (assoc :transit? true :encoding (keyword encoding) :charset charset)))))) (defn- ring-response-body? [response] (let [body (:body response)] (or (string? body) (instance? java.io.InputStream body) (instance? java.io.File body) (instance? clojure.lang.ISeq body))))
97640
;; Copyright © 2015 <NAME> ;; ;; Distributed under the The MIT License ;; http://opensource.org/licenses/MIT (ns ring.middleware.transit "Ring middleware for Transit requests & responses. For basic usage you just need to add [[wrap-transit]] middleware. #### Usage Example: (require '[ring.middleware.transit :refer [wrap-transit]]) (defn handler [request] (let [username (get-in request [:params :username])] {:hello username})) (defn app (-> handler (wrap-transit))) [[encode]] & [[decode]] are provided for reusing of options to [[wrap-transit]] when encoding/decoding Transit outside of HTTP requests/responses. When using WebSockets or communicating with other services. If you want to write a custom middleware based on this code take a look at `-decode-request` & `-encode-response` functions. They are not documented. " (:require [cognitect.transit :as t] [ring.util.response :refer [content-type]]) (:import [java.io ByteArrayInputStream ByteArrayOutputStream])) (def ^:private aleph-available? (try (require '[aleph.http]) true (catch java.io.FileNotFoundException _ false))) (def ^:private chain (if aleph-available? (find-var 'manifold.deferred/chain) (fn [v f] (f v)))) (declare -decode-request -encode-response parse-transit-content-type ring-response-body?) (def ^:private re-ct #"^application/transit\+(json|json-verbose|msgpack)(;\s*charset=([-\w]+))?") (defn decode "Decode string Transit data. #### Parameters: s : Transit data. Note that unlike `cognitect.transit/reader` this function takes a `String`. options : Optional parameter. This is a map that will be passed to `cognitect.transit/reader` as its third argument. Additionally `decode`'s `options` map can contain `:encoding`: :encoding : Transit reader's encoding. Default is `:json`. Passed to `reader` as its second argument. An example options map: {:encoding :json :handlers {Foo (FooHandler.)} :default-handler (DefaultHandler.)} `:encoding` key stripped from `options` before calling `reader`. " ([s] (decode s nil)) ([^String s options] {:pre [(string? s)]} (let [encoding (:encoding options :json) options (dissoc options :encoding)] (-> (.getBytes s) (ByteArrayInputStream.) (t/reader encoding options) (t/read))))) (defn encode "Encode some value into string Transit data. #### Parameters: v : Value to be encoded. options : Optional parameter. This is a map that will be passed to `cognitect.transit/writer` as its third argument. Additionally there are two more keys accepted: :buffer-size : Size of the buffer of the output stream, in bytes. Default is `1024`. :encoding : Transit writer's encoding. Default is `:json`. Passed to `reader` as its second argument. An example options map: {:buffer-size 4096 :encoding :json-verbose :handlers {Foo (FooHandler.)}} `:buffer-size` & `:encoding` keys are stripped from `options` before calling `writer`. " ([v] (encode v nil)) ([v options] {:post [(string? %)]} (let [buffer-size (get options :buffer-size 1024) encoding (get options :encoding :json) options (dissoc options :buffer-size :encoding) out (ByteArrayOutputStream. buffer-size) _ (-> out (t/writer encoding options) (t/write v)) result (.toString out)] (.reset out) result))) (defn wrap-transit "Decodes Transit requests and encodes Transit responses. #### Parameters: handler : Ring handler to wrap. options : Optional parameter. A map of options that can contain a `:reader` and a `:writer` keys which correspond to options to be passed to [[decode]] and [[encode]] respectively. `[:reader :encoding]` will always be overwritten using the `Content-Type` header of the request. #### Transit Requests Decoded Transit messages can be accessed through request's `:transit-params` key. If the decoded object is a map, it will be also be merged with request's `:params`. For Transit requests, `:body` is read into a string and is available to downstream. #### Transit Responses If there is no `Content-Type` header, anything but the types ring accepts as valid response bodies are encoded. If `Content-Type` is present it overrides the type of `:body`. | Content Type Header | Response Type | Encoded? | |---------------------|-------------------------------------------|:--------:| | Not present. | `String`, `InputStream`, `File` or `ISeq` | No. | | Not present. | Anything else. | Yes. | | application/transit | Anything. | Yes. | | Other content type | Anything. | No. | #### Aleph Support If you have [Aleph](http://aleph.io/) in your classpath deferred responses will be handled properly using `manifold.deferred/chain`. " ([handler] (wrap-transit handler nil)) ([handler options] (let [req-opts (select-keys options [:reader]) res-opts (select-keys options [:writer])] (fn [request] (-> request (-decode-request req-opts) (handler) (chain #(-encode-response % res-opts))))))) (defn ^:no-doc -decode-request [request options] (let [{:keys [transit? encoding charset]} (parse-transit-content-type request)] (if-not transit? request (let [options (assoc options :encoding (or encoding (:encoding options))) body (slurp (:body request) :encoding charset) transit-params (decode body options) request' (assoc request :body body :transit-params transit-params)] (assert (contains? #{:json :msgpack nil} encoding)) (if (map? transit-params) (update-in request' [:params] merge transit-params) request'))))) (defn ^:no-doc -encode-response [response options] (let [{:keys [transit? encoding charset] :as ct} (parse-transit-content-type response)] (if (or (false? transit?) (and (nil? transit?) (ring-response-body? response))) response (-> response (cond-> (nil? (:content-type ct)) (content-type (format "application/transit+%s; charset=%s" (name (or encoding (get :encoding options :json))) charset))) (update-in [:body] encode options))))) (defn- parse-transit-content-type [r] (let [ct (or (get-in r [:headers "content-type"]) (get-in r [:headers "Content-Type"]))] (if (nil? ct) {:charset "utf-8"} (let [[ct-transit encoding _ charset] (re-find re-ct ct) charset (or charset "utf-8")] (cond-> {:content-type ct :transit? false} (some? ct-transit) (assoc :transit? true :encoding (keyword encoding) :charset charset)))))) (defn- ring-response-body? [response] (let [body (:body response)] (or (string? body) (instance? java.io.InputStream body) (instance? java.io.File body) (instance? clojure.lang.ISeq body))))
true
;; Copyright © 2015 PI:NAME:<NAME>END_PI ;; ;; Distributed under the The MIT License ;; http://opensource.org/licenses/MIT (ns ring.middleware.transit "Ring middleware for Transit requests & responses. For basic usage you just need to add [[wrap-transit]] middleware. #### Usage Example: (require '[ring.middleware.transit :refer [wrap-transit]]) (defn handler [request] (let [username (get-in request [:params :username])] {:hello username})) (defn app (-> handler (wrap-transit))) [[encode]] & [[decode]] are provided for reusing of options to [[wrap-transit]] when encoding/decoding Transit outside of HTTP requests/responses. When using WebSockets or communicating with other services. If you want to write a custom middleware based on this code take a look at `-decode-request` & `-encode-response` functions. They are not documented. " (:require [cognitect.transit :as t] [ring.util.response :refer [content-type]]) (:import [java.io ByteArrayInputStream ByteArrayOutputStream])) (def ^:private aleph-available? (try (require '[aleph.http]) true (catch java.io.FileNotFoundException _ false))) (def ^:private chain (if aleph-available? (find-var 'manifold.deferred/chain) (fn [v f] (f v)))) (declare -decode-request -encode-response parse-transit-content-type ring-response-body?) (def ^:private re-ct #"^application/transit\+(json|json-verbose|msgpack)(;\s*charset=([-\w]+))?") (defn decode "Decode string Transit data. #### Parameters: s : Transit data. Note that unlike `cognitect.transit/reader` this function takes a `String`. options : Optional parameter. This is a map that will be passed to `cognitect.transit/reader` as its third argument. Additionally `decode`'s `options` map can contain `:encoding`: :encoding : Transit reader's encoding. Default is `:json`. Passed to `reader` as its second argument. An example options map: {:encoding :json :handlers {Foo (FooHandler.)} :default-handler (DefaultHandler.)} `:encoding` key stripped from `options` before calling `reader`. " ([s] (decode s nil)) ([^String s options] {:pre [(string? s)]} (let [encoding (:encoding options :json) options (dissoc options :encoding)] (-> (.getBytes s) (ByteArrayInputStream.) (t/reader encoding options) (t/read))))) (defn encode "Encode some value into string Transit data. #### Parameters: v : Value to be encoded. options : Optional parameter. This is a map that will be passed to `cognitect.transit/writer` as its third argument. Additionally there are two more keys accepted: :buffer-size : Size of the buffer of the output stream, in bytes. Default is `1024`. :encoding : Transit writer's encoding. Default is `:json`. Passed to `reader` as its second argument. An example options map: {:buffer-size 4096 :encoding :json-verbose :handlers {Foo (FooHandler.)}} `:buffer-size` & `:encoding` keys are stripped from `options` before calling `writer`. " ([v] (encode v nil)) ([v options] {:post [(string? %)]} (let [buffer-size (get options :buffer-size 1024) encoding (get options :encoding :json) options (dissoc options :buffer-size :encoding) out (ByteArrayOutputStream. buffer-size) _ (-> out (t/writer encoding options) (t/write v)) result (.toString out)] (.reset out) result))) (defn wrap-transit "Decodes Transit requests and encodes Transit responses. #### Parameters: handler : Ring handler to wrap. options : Optional parameter. A map of options that can contain a `:reader` and a `:writer` keys which correspond to options to be passed to [[decode]] and [[encode]] respectively. `[:reader :encoding]` will always be overwritten using the `Content-Type` header of the request. #### Transit Requests Decoded Transit messages can be accessed through request's `:transit-params` key. If the decoded object is a map, it will be also be merged with request's `:params`. For Transit requests, `:body` is read into a string and is available to downstream. #### Transit Responses If there is no `Content-Type` header, anything but the types ring accepts as valid response bodies are encoded. If `Content-Type` is present it overrides the type of `:body`. | Content Type Header | Response Type | Encoded? | |---------------------|-------------------------------------------|:--------:| | Not present. | `String`, `InputStream`, `File` or `ISeq` | No. | | Not present. | Anything else. | Yes. | | application/transit | Anything. | Yes. | | Other content type | Anything. | No. | #### Aleph Support If you have [Aleph](http://aleph.io/) in your classpath deferred responses will be handled properly using `manifold.deferred/chain`. " ([handler] (wrap-transit handler nil)) ([handler options] (let [req-opts (select-keys options [:reader]) res-opts (select-keys options [:writer])] (fn [request] (-> request (-decode-request req-opts) (handler) (chain #(-encode-response % res-opts))))))) (defn ^:no-doc -decode-request [request options] (let [{:keys [transit? encoding charset]} (parse-transit-content-type request)] (if-not transit? request (let [options (assoc options :encoding (or encoding (:encoding options))) body (slurp (:body request) :encoding charset) transit-params (decode body options) request' (assoc request :body body :transit-params transit-params)] (assert (contains? #{:json :msgpack nil} encoding)) (if (map? transit-params) (update-in request' [:params] merge transit-params) request'))))) (defn ^:no-doc -encode-response [response options] (let [{:keys [transit? encoding charset] :as ct} (parse-transit-content-type response)] (if (or (false? transit?) (and (nil? transit?) (ring-response-body? response))) response (-> response (cond-> (nil? (:content-type ct)) (content-type (format "application/transit+%s; charset=%s" (name (or encoding (get :encoding options :json))) charset))) (update-in [:body] encode options))))) (defn- parse-transit-content-type [r] (let [ct (or (get-in r [:headers "content-type"]) (get-in r [:headers "Content-Type"]))] (if (nil? ct) {:charset "utf-8"} (let [[ct-transit encoding _ charset] (re-find re-ct ct) charset (or charset "utf-8")] (cond-> {:content-type ct :transit? false} (some? ct-transit) (assoc :transit? true :encoding (keyword encoding) :charset charset)))))) (defn- ring-response-body? [response] (let [body (:body response)] (or (string? body) (instance? java.io.InputStream body) (instance? java.io.File body) (instance? clojure.lang.ISeq body))))
[ { "context": "r? (sut/execute (with-cmd :user/get {:user/email \"andrew@doe.com\"}))))\n (is (r/success? (sut/execute (with-cmd :u", "end": 908, "score": 0.9999082088470459, "start": 894, "tag": "EMAIL", "value": "andrew@doe.com" }, { "context": "(sut/execute (with-cmd :user/create {:user/email \"andrew@doe.com\"}))))\n (is (= 3 (count (r/get-data (sut/execute ", "end": 997, "score": 0.9999108910560608, "start": 983, "tag": "EMAIL", "value": "andrew@doe.com" }, { "context": "(sut/execute (with-cmd :user/delete {:user/email \"andrew@doe.com\"}))))\n (is (r/error? (sut/execute (with-cmd :use", "end": 1160, "score": 0.9999141693115234, "start": 1146, "tag": "EMAIL", "value": "andrew@doe.com" }, { "context": "r? (sut/execute (with-cmd :user/get {:user/email \"andrew@doe.com\"}))))\n (is (= 2 (count (r/get-data (sut/execute ", "end": 1244, "score": 0.9999107718467712, "start": 1230, "tag": "EMAIL", "value": "andrew@doe.com" } ]
test/src/example/api_test.cljc
just-sultanov/clj-unifier
27
(ns example.api-test (:require #?(:clj [clojure.test :refer [deftest is]] :cljs [cljs.test :refer-macros [deftest is]]) [unifier.response :as r] [example.data :as data] [example.api :as sut])) (defn- with-cmd ([name] (with-cmd name nil)) ([name context] (with-cmd name :v1 context)) ([name version context] {:cmd/name name :cmd/version version :cmd/context context})) (deftest api-test (is (r/error? (sut/execute (with-cmd ::unknown)))) (is (= 2 (count (r/get-data (sut/execute (with-cmd :users/get-all)))))) (is (r/success? (sut/execute (with-cmd :user/get data/user1)))) (is (r/success? (sut/execute (with-cmd :user/get data/user2)))) (is (r/error? (sut/execute (with-cmd :user/create data/user1)))) (is (r/error? (sut/execute (with-cmd :user/create data/user2)))) (is (r/error? (sut/execute (with-cmd :user/get {:user/email "andrew@doe.com"})))) (is (r/success? (sut/execute (with-cmd :user/create {:user/email "andrew@doe.com"})))) (is (= 3 (count (r/get-data (sut/execute (with-cmd :users/get-all)))))) (is (r/success? (sut/execute (with-cmd :user/delete {:user/email "andrew@doe.com"})))) (is (r/error? (sut/execute (with-cmd :user/get {:user/email "andrew@doe.com"})))) (is (= 2 (count (r/get-data (sut/execute (with-cmd :users/get-all)))))))
89713
(ns example.api-test (:require #?(:clj [clojure.test :refer [deftest is]] :cljs [cljs.test :refer-macros [deftest is]]) [unifier.response :as r] [example.data :as data] [example.api :as sut])) (defn- with-cmd ([name] (with-cmd name nil)) ([name context] (with-cmd name :v1 context)) ([name version context] {:cmd/name name :cmd/version version :cmd/context context})) (deftest api-test (is (r/error? (sut/execute (with-cmd ::unknown)))) (is (= 2 (count (r/get-data (sut/execute (with-cmd :users/get-all)))))) (is (r/success? (sut/execute (with-cmd :user/get data/user1)))) (is (r/success? (sut/execute (with-cmd :user/get data/user2)))) (is (r/error? (sut/execute (with-cmd :user/create data/user1)))) (is (r/error? (sut/execute (with-cmd :user/create data/user2)))) (is (r/error? (sut/execute (with-cmd :user/get {:user/email "<EMAIL>"})))) (is (r/success? (sut/execute (with-cmd :user/create {:user/email "<EMAIL>"})))) (is (= 3 (count (r/get-data (sut/execute (with-cmd :users/get-all)))))) (is (r/success? (sut/execute (with-cmd :user/delete {:user/email "<EMAIL>"})))) (is (r/error? (sut/execute (with-cmd :user/get {:user/email "<EMAIL>"})))) (is (= 2 (count (r/get-data (sut/execute (with-cmd :users/get-all)))))))
true
(ns example.api-test (:require #?(:clj [clojure.test :refer [deftest is]] :cljs [cljs.test :refer-macros [deftest is]]) [unifier.response :as r] [example.data :as data] [example.api :as sut])) (defn- with-cmd ([name] (with-cmd name nil)) ([name context] (with-cmd name :v1 context)) ([name version context] {:cmd/name name :cmd/version version :cmd/context context})) (deftest api-test (is (r/error? (sut/execute (with-cmd ::unknown)))) (is (= 2 (count (r/get-data (sut/execute (with-cmd :users/get-all)))))) (is (r/success? (sut/execute (with-cmd :user/get data/user1)))) (is (r/success? (sut/execute (with-cmd :user/get data/user2)))) (is (r/error? (sut/execute (with-cmd :user/create data/user1)))) (is (r/error? (sut/execute (with-cmd :user/create data/user2)))) (is (r/error? (sut/execute (with-cmd :user/get {:user/email "PI:EMAIL:<EMAIL>END_PI"})))) (is (r/success? (sut/execute (with-cmd :user/create {:user/email "PI:EMAIL:<EMAIL>END_PI"})))) (is (= 3 (count (r/get-data (sut/execute (with-cmd :users/get-all)))))) (is (r/success? (sut/execute (with-cmd :user/delete {:user/email "PI:EMAIL:<EMAIL>END_PI"})))) (is (r/error? (sut/execute (with-cmd :user/get {:user/email "PI:EMAIL:<EMAIL>END_PI"})))) (is (= 2 (count (r/get-data (sut/execute (with-cmd :users/get-all)))))))
[ { "context": "ng an auth token\"\n (with-redefs [env {:secret \"swiss cheese\"}\n tc/to-long (fn [_] 1507501300", "end": 750, "score": 0.9746477603912354, "start": 738, "tag": "KEY", "value": "swiss cheese" } ]
test/artstor_metadata_service_os/util_test.clj
ithaka/artstor-metadata-service-os
0
(ns artstor-metadata-service-os.util-test (:require [artstor-metadata-service-os.util :as util] [clojure.test :refer :all] [clojure.test.check.clojure-test :refer [defspec]] [clojure.test.check.properties :as prop] [clojure.test.check.generators :as gen] [environ.core :refer [env]] [clj-time.coerce :as tc])) (defspec test-crappy-encrypt-functions 100 (prop/for-all [plain-str gen/string-alpha-numeric] (let [round-trip (util/decrypt (util/encrypt plain-str))] (is (= round-trip plain-str))))) (deftest test-generate-auth-token (testing "test generating an auth token" (with-redefs [env {:secret "swiss cheese"} tc/to-long (fn [_] 1507501300112)] (is (= "sslps/c35269/2639599.fpx/8Sw7XoQxXC9awrXDs_DGlw/1507504900/" (util/generate-auth-token "sslps/c35269/2639599.fpx"))))))
47172
(ns artstor-metadata-service-os.util-test (:require [artstor-metadata-service-os.util :as util] [clojure.test :refer :all] [clojure.test.check.clojure-test :refer [defspec]] [clojure.test.check.properties :as prop] [clojure.test.check.generators :as gen] [environ.core :refer [env]] [clj-time.coerce :as tc])) (defspec test-crappy-encrypt-functions 100 (prop/for-all [plain-str gen/string-alpha-numeric] (let [round-trip (util/decrypt (util/encrypt plain-str))] (is (= round-trip plain-str))))) (deftest test-generate-auth-token (testing "test generating an auth token" (with-redefs [env {:secret "<KEY>"} tc/to-long (fn [_] 1507501300112)] (is (= "sslps/c35269/2639599.fpx/8Sw7XoQxXC9awrXDs_DGlw/1507504900/" (util/generate-auth-token "sslps/c35269/2639599.fpx"))))))
true
(ns artstor-metadata-service-os.util-test (:require [artstor-metadata-service-os.util :as util] [clojure.test :refer :all] [clojure.test.check.clojure-test :refer [defspec]] [clojure.test.check.properties :as prop] [clojure.test.check.generators :as gen] [environ.core :refer [env]] [clj-time.coerce :as tc])) (defspec test-crappy-encrypt-functions 100 (prop/for-all [plain-str gen/string-alpha-numeric] (let [round-trip (util/decrypt (util/encrypt plain-str))] (is (= round-trip plain-str))))) (deftest test-generate-auth-token (testing "test generating an auth token" (with-redefs [env {:secret "PI:KEY:<KEY>END_PI"} tc/to-long (fn [_] 1507501300112)] (is (= "sslps/c35269/2639599.fpx/8Sw7XoQxXC9awrXDs_DGlw/1507504900/" (util/generate-auth-token "sslps/c35269/2639599.fpx"))))))
[ { "context": " (useForm #js {:email \"johndoe@example.com\"\n ", "end": 486, "score": 0.9999257922172546, "start": 467, "tag": "EMAIL", "value": "johndoe@example.com" }, { "context": " :password \"secret\"\n ", "end": 574, "score": 0.9993765950202942, "start": 568, "tag": "PASSWORD", "value": "secret" } ]
src/cljs/pingcrm/pages/login.cljs
mgl-team/pingcrm-clojure-template
50
(ns pingcrm.pages.login (:require ["@inertiajs/inertia-react" :refer [useForm Head]] [applied-science.js-interop :as j] [pingcrm.shared.buttons :refer [loading-button]] [pingcrm.shared.form-input :refer [text-input]] [pingcrm.shared.logo :refer [logo]])) (defn login-form [] (let [{:keys [data setData errors post processing]} (j/lookup (useForm #js {:email "johndoe@example.com" :password "secret" :remember false})) on-submit #(do (.preventDefault %) (post (js/route "login.store")))] [:<> [:> Head {:title "Login"}] [:div {:class "p-6 bg-indigo-800 min-h-screen flex justify-center items-center"} [:div {:class "w-full max-w-md"} [logo {:class "block mx-auto w-full max-w-xs fill-white" :height "50"}] [:form {:on-submit on-submit :class "mt-8 overflow-hidden bg-white rounded-lg shadow-xl"} [:div {:class "px-10 py-12"} [:h1 {:class "text-3xl font-bold text-center"} "Welcome Back!"] [:div {:class "w-24 mx-auto mt-6 border-b-2"}] [text-input {:class "mt-10" :label "Email" :name "email" :errors (.-email errors) :value (.-email data) :on-change #(setData "email" (.. % -target -value))}] [text-input {:class "mt-6" :label "Password" :name "password" :type "password" :errors (.-password errors) :value (.-password data) :on-change #(setData "password" (.. % -target -value))}] [:label {:class "flex items-center mt-6 select-none" :html-for "remember"} [:input#remember {:name "remember" :class "mr-1" :type "checkbox" :checked (.-remember data) :on-change #(setData "remember" (.. % -target -checked))}] [:span {:class "text-sm"} "Remember Me"]]] [:div {:class "px-10 py-4 bg-gray-100 border-t border-gray-100 flex"} [loading-button {:type "submit" :loading processing :class "btn-indigo ml-auto"} "Login"]]]]]])) (defn login [] [:f> login-form])
76690
(ns pingcrm.pages.login (:require ["@inertiajs/inertia-react" :refer [useForm Head]] [applied-science.js-interop :as j] [pingcrm.shared.buttons :refer [loading-button]] [pingcrm.shared.form-input :refer [text-input]] [pingcrm.shared.logo :refer [logo]])) (defn login-form [] (let [{:keys [data setData errors post processing]} (j/lookup (useForm #js {:email "<EMAIL>" :password "<PASSWORD>" :remember false})) on-submit #(do (.preventDefault %) (post (js/route "login.store")))] [:<> [:> Head {:title "Login"}] [:div {:class "p-6 bg-indigo-800 min-h-screen flex justify-center items-center"} [:div {:class "w-full max-w-md"} [logo {:class "block mx-auto w-full max-w-xs fill-white" :height "50"}] [:form {:on-submit on-submit :class "mt-8 overflow-hidden bg-white rounded-lg shadow-xl"} [:div {:class "px-10 py-12"} [:h1 {:class "text-3xl font-bold text-center"} "Welcome Back!"] [:div {:class "w-24 mx-auto mt-6 border-b-2"}] [text-input {:class "mt-10" :label "Email" :name "email" :errors (.-email errors) :value (.-email data) :on-change #(setData "email" (.. % -target -value))}] [text-input {:class "mt-6" :label "Password" :name "password" :type "password" :errors (.-password errors) :value (.-password data) :on-change #(setData "password" (.. % -target -value))}] [:label {:class "flex items-center mt-6 select-none" :html-for "remember"} [:input#remember {:name "remember" :class "mr-1" :type "checkbox" :checked (.-remember data) :on-change #(setData "remember" (.. % -target -checked))}] [:span {:class "text-sm"} "Remember Me"]]] [:div {:class "px-10 py-4 bg-gray-100 border-t border-gray-100 flex"} [loading-button {:type "submit" :loading processing :class "btn-indigo ml-auto"} "Login"]]]]]])) (defn login [] [:f> login-form])
true
(ns pingcrm.pages.login (:require ["@inertiajs/inertia-react" :refer [useForm Head]] [applied-science.js-interop :as j] [pingcrm.shared.buttons :refer [loading-button]] [pingcrm.shared.form-input :refer [text-input]] [pingcrm.shared.logo :refer [logo]])) (defn login-form [] (let [{:keys [data setData errors post processing]} (j/lookup (useForm #js {:email "PI:EMAIL:<EMAIL>END_PI" :password "PI:PASSWORD:<PASSWORD>END_PI" :remember false})) on-submit #(do (.preventDefault %) (post (js/route "login.store")))] [:<> [:> Head {:title "Login"}] [:div {:class "p-6 bg-indigo-800 min-h-screen flex justify-center items-center"} [:div {:class "w-full max-w-md"} [logo {:class "block mx-auto w-full max-w-xs fill-white" :height "50"}] [:form {:on-submit on-submit :class "mt-8 overflow-hidden bg-white rounded-lg shadow-xl"} [:div {:class "px-10 py-12"} [:h1 {:class "text-3xl font-bold text-center"} "Welcome Back!"] [:div {:class "w-24 mx-auto mt-6 border-b-2"}] [text-input {:class "mt-10" :label "Email" :name "email" :errors (.-email errors) :value (.-email data) :on-change #(setData "email" (.. % -target -value))}] [text-input {:class "mt-6" :label "Password" :name "password" :type "password" :errors (.-password errors) :value (.-password data) :on-change #(setData "password" (.. % -target -value))}] [:label {:class "flex items-center mt-6 select-none" :html-for "remember"} [:input#remember {:name "remember" :class "mr-1" :type "checkbox" :checked (.-remember data) :on-change #(setData "remember" (.. % -target -checked))}] [:span {:class "text-sm"} "Remember Me"]]] [:div {:class "px-10 py-4 bg-gray-100 border-t border-gray-100 flex"} [loading-button {:type "submit" :loading processing :class "btn-indigo ml-auto"} "Login"]]]]]])) (defn login [] [:f> login-form])
[ { "context": " :bind-dn user :password password)\n search-root (getx-in env [:ldap :searc", "end": 853, "score": 0.9981800317764282, "start": 845, "tag": "PASSWORD", "value": "password" }, { "context": "text\" :placeholder (text :t.ldap/username) :name \"username\" :required true}]\n [:input.form-control {:type", "end": 1971, "score": 0.865416944026947, "start": 1963, "tag": "USERNAME", "value": "username" }, { "context": " username (getx-in req [:form-params \"username\"])\n password (getx-in req [:form-par", "end": 2815, "score": 0.5141604542732239, "start": 2807, "tag": "USERNAME", "value": "username" } ]
src/clj/rems/auth/ldap.clj
JerryTraskelin/rems
0
(ns rems.auth.ldap (:require [clj-ldap.client :as ldap] [clojure.tools.logging :as log] [compojure.core :refer [GET POST defroutes]] [rems.anti-forgery :refer [anti-forgery-field]] [rems.config :refer [env]] [rems.layout :as layout] [rems.text :refer [text]] [rems.util :refer [getx getx-in]] [ring.util.response :refer [redirect]])) ;; Do these need to be configurable? (def ^:private +ldap-search-attributes+ [:userPrincipalName :displayName :company :mail]) (def ^:private +ldap-search-query+ "(userPrincipalName=%s)") (defn- get-ldap-user "Returns nil if login fails, map of properties if succeeds." [user password] (try (let [connection (assoc (getx-in env [:ldap :connection]) :bind-dn user :password password) search-root (getx-in env [:ldap :search-root]) conn (ldap/connect connection) users (ldap/search conn search-root {:filter (format +ldap-search-query+ user) :attributes +ldap-search-attributes+})] (if (= 1 (count users)) (first users) (do (log/errorf "Found %s hits for user %s" (count users) user) nil))) (catch com.unboundid.ldap.sdk.LDAPBindException e (log/errorf "Bind failed for user %s" user) nil))) ;; TODO: should stop using "eppn" and instead convert both shibboleth ;; and ldap users to a common format. (defn- convert-ldap-user "Converts user fetched from LDAP to a Shibboleth-like format." [user] {"eppn" (getx user :userPrincipalName) "commonName" (getx user :displayName) "mail" (getx user :mail) "dn" (getx user :dn)}) (defn login-component [] [:div.m-auto.jumbotron [:h2 (text :t.ldap/title)] [:form {:action "/ldap/login" :method "post"} [:input.form-control {:type "text" :placeholder (text :t.ldap/username) :name "username" :required true}] [:input.form-control {:type "password" :placeholder (text :t.ldap/password) :name "password" :required true}] (anti-forgery-field) [:button.btn.btn-lg.btn-primary.btn-block {:type "submit"} (text :t.ldap/login)]]]) (defn login-url [] "/ldap/login") (defn logout-url [] "/ldap/logout") (defn- login-page [] (layout/render "login" (login-component))) (defn- login-failed [] (assoc (redirect "/ldap/login") :flash [{:status :failure :contents (text :t.ldap/failed)}])) (defroutes routes (GET "/ldap/logout" req (let [session (get req :session)] (assoc (redirect "/") :session (dissoc session :identity)))) (GET "/ldap/login" [] (login-page)) (POST "/ldap/login" req (let [session (get req :session) username (getx-in req [:form-params "username"]) password (getx-in req [:form-params "password"]) user (get-ldap-user username password)] (if user (assoc (redirect "/") :session (assoc session :identity (convert-ldap-user user))) (login-failed)))))
56415
(ns rems.auth.ldap (:require [clj-ldap.client :as ldap] [clojure.tools.logging :as log] [compojure.core :refer [GET POST defroutes]] [rems.anti-forgery :refer [anti-forgery-field]] [rems.config :refer [env]] [rems.layout :as layout] [rems.text :refer [text]] [rems.util :refer [getx getx-in]] [ring.util.response :refer [redirect]])) ;; Do these need to be configurable? (def ^:private +ldap-search-attributes+ [:userPrincipalName :displayName :company :mail]) (def ^:private +ldap-search-query+ "(userPrincipalName=%s)") (defn- get-ldap-user "Returns nil if login fails, map of properties if succeeds." [user password] (try (let [connection (assoc (getx-in env [:ldap :connection]) :bind-dn user :password <PASSWORD>) search-root (getx-in env [:ldap :search-root]) conn (ldap/connect connection) users (ldap/search conn search-root {:filter (format +ldap-search-query+ user) :attributes +ldap-search-attributes+})] (if (= 1 (count users)) (first users) (do (log/errorf "Found %s hits for user %s" (count users) user) nil))) (catch com.unboundid.ldap.sdk.LDAPBindException e (log/errorf "Bind failed for user %s" user) nil))) ;; TODO: should stop using "eppn" and instead convert both shibboleth ;; and ldap users to a common format. (defn- convert-ldap-user "Converts user fetched from LDAP to a Shibboleth-like format." [user] {"eppn" (getx user :userPrincipalName) "commonName" (getx user :displayName) "mail" (getx user :mail) "dn" (getx user :dn)}) (defn login-component [] [:div.m-auto.jumbotron [:h2 (text :t.ldap/title)] [:form {:action "/ldap/login" :method "post"} [:input.form-control {:type "text" :placeholder (text :t.ldap/username) :name "username" :required true}] [:input.form-control {:type "password" :placeholder (text :t.ldap/password) :name "password" :required true}] (anti-forgery-field) [:button.btn.btn-lg.btn-primary.btn-block {:type "submit"} (text :t.ldap/login)]]]) (defn login-url [] "/ldap/login") (defn logout-url [] "/ldap/logout") (defn- login-page [] (layout/render "login" (login-component))) (defn- login-failed [] (assoc (redirect "/ldap/login") :flash [{:status :failure :contents (text :t.ldap/failed)}])) (defroutes routes (GET "/ldap/logout" req (let [session (get req :session)] (assoc (redirect "/") :session (dissoc session :identity)))) (GET "/ldap/login" [] (login-page)) (POST "/ldap/login" req (let [session (get req :session) username (getx-in req [:form-params "username"]) password (getx-in req [:form-params "password"]) user (get-ldap-user username password)] (if user (assoc (redirect "/") :session (assoc session :identity (convert-ldap-user user))) (login-failed)))))
true
(ns rems.auth.ldap (:require [clj-ldap.client :as ldap] [clojure.tools.logging :as log] [compojure.core :refer [GET POST defroutes]] [rems.anti-forgery :refer [anti-forgery-field]] [rems.config :refer [env]] [rems.layout :as layout] [rems.text :refer [text]] [rems.util :refer [getx getx-in]] [ring.util.response :refer [redirect]])) ;; Do these need to be configurable? (def ^:private +ldap-search-attributes+ [:userPrincipalName :displayName :company :mail]) (def ^:private +ldap-search-query+ "(userPrincipalName=%s)") (defn- get-ldap-user "Returns nil if login fails, map of properties if succeeds." [user password] (try (let [connection (assoc (getx-in env [:ldap :connection]) :bind-dn user :password PI:PASSWORD:<PASSWORD>END_PI) search-root (getx-in env [:ldap :search-root]) conn (ldap/connect connection) users (ldap/search conn search-root {:filter (format +ldap-search-query+ user) :attributes +ldap-search-attributes+})] (if (= 1 (count users)) (first users) (do (log/errorf "Found %s hits for user %s" (count users) user) nil))) (catch com.unboundid.ldap.sdk.LDAPBindException e (log/errorf "Bind failed for user %s" user) nil))) ;; TODO: should stop using "eppn" and instead convert both shibboleth ;; and ldap users to a common format. (defn- convert-ldap-user "Converts user fetched from LDAP to a Shibboleth-like format." [user] {"eppn" (getx user :userPrincipalName) "commonName" (getx user :displayName) "mail" (getx user :mail) "dn" (getx user :dn)}) (defn login-component [] [:div.m-auto.jumbotron [:h2 (text :t.ldap/title)] [:form {:action "/ldap/login" :method "post"} [:input.form-control {:type "text" :placeholder (text :t.ldap/username) :name "username" :required true}] [:input.form-control {:type "password" :placeholder (text :t.ldap/password) :name "password" :required true}] (anti-forgery-field) [:button.btn.btn-lg.btn-primary.btn-block {:type "submit"} (text :t.ldap/login)]]]) (defn login-url [] "/ldap/login") (defn logout-url [] "/ldap/logout") (defn- login-page [] (layout/render "login" (login-component))) (defn- login-failed [] (assoc (redirect "/ldap/login") :flash [{:status :failure :contents (text :t.ldap/failed)}])) (defroutes routes (GET "/ldap/logout" req (let [session (get req :session)] (assoc (redirect "/") :session (dissoc session :identity)))) (GET "/ldap/login" [] (login-page)) (POST "/ldap/login" req (let [session (get req :session) username (getx-in req [:form-params "username"]) password (getx-in req [:form-params "password"]) user (get-ldap-user username password)] (if user (assoc (redirect "/") :session (assoc session :identity (convert-ldap-user user))) (login-failed)))))
[ { "context": ";; Test routines for complex-numbers.clj\n\n;; by Konrad Hinsen\n;; last updated April 2, 2009\n\n;; Copyright (c) K", "end": 61, "score": 0.9998551607131958, "start": 48, "tag": "NAME", "value": "Konrad Hinsen" }, { "context": "en\n;; last updated April 2, 2009\n\n;; Copyright (c) Konrad Hinsen, 2008. All rights reserved. The use\n;; and distr", "end": 123, "score": 0.9998440742492676, "start": 110, "tag": "NAME", "value": "Konrad Hinsen" } ]
ThirdParty/clojure-contrib-1.1.0/src/clojure/contrib/test_contrib/complex_numbers.clj
allertonm/Couverjure
3
;; Test routines for complex-numbers.clj ;; by Konrad Hinsen ;; last updated April 2, 2009 ;; Copyright (c) Konrad Hinsen, 2008. All rights reserved. The use ;; and distribution terms for this software are covered by the Eclipse ;; Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php) ;; which can be found in the file epl-v10.html at the root of this ;; distribution. By using this software in any fashion, you are ;; agreeing to be bound by the terms of this license. You must not ;; remove this notice, or any other, from this software. (ns clojure.contrib.test-contrib.complex-numbers (:refer-clojure :exclude [+ - * / = < > <= >=]) (:use [clojure.test :only (deftest is are run-tests)] [clojure.contrib.generic.arithmetic :only (+ - * /)] [clojure.contrib.generic.comparison :only (= < > <= >=)] [clojure.contrib.generic.math-functions :only (abs approx= conjugate exp sqr sqrt)] [clojure.contrib.complex-numbers :only (complex imaginary real imag)])) (deftest complex-addition (is (= (+ (complex 1 2) (complex 1 2)) (complex 2 4))) (is (= (+ (complex 1 2) (complex -3 -7)) (complex -2 -5))) (is (= (+ (complex -3 -7) (complex 1 2)) (complex -2 -5))) (is (= (+ (complex 1 2) 3) (complex 4 2))) (is (= (+ 3 (complex 1 2)) (complex 4 2))) (is (= (+ (complex 1 2) -1) (imaginary 2))) (is (= (+ -1 (complex 1 2)) (imaginary 2))) (is (= (+ (complex 1 2) (imaginary -2)) 1)) (is (= (+ (imaginary -2) (complex 1 2)) 1)) (is (= (+ (complex 1 2) (imaginary 5)) (complex 1 7))) (is (= (+ (imaginary 5) (complex 1 2)) (complex 1 7))) (is (= (+ (complex -3 -7) (complex 1 2)) (complex -2 -5))) (is (= (+ (complex 1 2) (complex -3 -7)) (complex -2 -5))) (is (= (+ (complex -3 -7) (complex -3 -7)) (complex -6 -14))) (is (= (+ (complex -3 -7) 3) (imaginary -7))) (is (= (+ 3 (complex -3 -7)) (imaginary -7))) (is (= (+ (complex -3 -7) -1) (complex -4 -7))) (is (= (+ -1 (complex -3 -7)) (complex -4 -7))) (is (= (+ (complex -3 -7) (imaginary -2)) (complex -3 -9))) (is (= (+ (imaginary -2) (complex -3 -7)) (complex -3 -9))) (is (= (+ (complex -3 -7) (imaginary 5)) (complex -3 -2))) (is (= (+ (imaginary 5) (complex -3 -7)) (complex -3 -2))) (is (= (+ 3 (complex 1 2)) (complex 4 2))) (is (= (+ (complex 1 2) 3) (complex 4 2))) (is (= (+ 3 (complex -3 -7)) (imaginary -7))) (is (= (+ (complex -3 -7) 3) (imaginary -7))) (is (= (+ 3 (imaginary -2)) (complex 3 -2))) (is (= (+ (imaginary -2) 3) (complex 3 -2))) (is (= (+ 3 (imaginary 5)) (complex 3 5))) (is (= (+ (imaginary 5) 3) (complex 3 5))) (is (= (+ -1 (complex 1 2)) (imaginary 2))) (is (= (+ (complex 1 2) -1) (imaginary 2))) (is (= (+ -1 (complex -3 -7)) (complex -4 -7))) (is (= (+ (complex -3 -7) -1) (complex -4 -7))) (is (= (+ -1 (imaginary -2)) (complex -1 -2))) (is (= (+ (imaginary -2) -1) (complex -1 -2))) (is (= (+ -1 (imaginary 5)) (complex -1 5))) (is (= (+ (imaginary 5) -1) (complex -1 5))) (is (= (+ (imaginary -2) (complex 1 2)) 1)) (is (= (+ (complex 1 2) (imaginary -2)) 1)) (is (= (+ (imaginary -2) (complex -3 -7)) (complex -3 -9))) (is (= (+ (complex -3 -7) (imaginary -2)) (complex -3 -9))) (is (= (+ (imaginary -2) 3) (complex 3 -2))) (is (= (+ 3 (imaginary -2)) (complex 3 -2))) (is (= (+ (imaginary -2) -1) (complex -1 -2))) (is (= (+ -1 (imaginary -2)) (complex -1 -2))) (is (= (+ (imaginary -2) (imaginary -2)) (imaginary -4))) (is (= (+ (imaginary -2) (imaginary 5)) (imaginary 3))) (is (= (+ (imaginary 5) (imaginary -2)) (imaginary 3))) (is (= (+ (imaginary 5) (complex 1 2)) (complex 1 7))) (is (= (+ (complex 1 2) (imaginary 5)) (complex 1 7))) (is (= (+ (imaginary 5) (complex -3 -7)) (complex -3 -2))) (is (= (+ (complex -3 -7) (imaginary 5)) (complex -3 -2))) (is (= (+ (imaginary 5) 3) (complex 3 5))) (is (= (+ 3 (imaginary 5)) (complex 3 5))) (is (= (+ (imaginary 5) -1) (complex -1 5))) (is (= (+ -1 (imaginary 5)) (complex -1 5))) (is (= (+ (imaginary 5) (imaginary -2)) (imaginary 3))) (is (= (+ (imaginary -2) (imaginary 5)) (imaginary 3))) (is (= (+ (imaginary 5) (imaginary 5)) (imaginary 10)))) (deftest complex-subtraction (is (= (- (complex 1 2) (complex 1 2)) 0)) (is (= (- (complex 1 2) (complex -3 -7)) (complex 4 9))) (is (= (- (complex -3 -7) (complex 1 2)) (complex -4 -9))) (is (= (- (complex 1 2) 3) (complex -2 2))) (is (= (- 3 (complex 1 2)) (complex 2 -2))) (is (= (- (complex 1 2) -1) (complex 2 2))) (is (= (- -1 (complex 1 2)) (complex -2 -2))) (is (= (- (complex 1 2) (imaginary -2)) (complex 1 4))) (is (= (- (imaginary -2) (complex 1 2)) (complex -1 -4))) (is (= (- (complex 1 2) (imaginary 5)) (complex 1 -3))) (is (= (- (imaginary 5) (complex 1 2)) (complex -1 3))) (is (= (- (complex -3 -7) (complex 1 2)) (complex -4 -9))) (is (= (- (complex 1 2) (complex -3 -7)) (complex 4 9))) (is (= (- (complex -3 -7) (complex -3 -7)) 0)) (is (= (- (complex -3 -7) 3) (complex -6 -7))) (is (= (- 3 (complex -3 -7)) (complex 6 7))) (is (= (- (complex -3 -7) -1) (complex -2 -7))) (is (= (- -1 (complex -3 -7)) (complex 2 7))) (is (= (- (complex -3 -7) (imaginary -2)) (complex -3 -5))) (is (= (- (imaginary -2) (complex -3 -7)) (complex 3 5))) (is (= (- (complex -3 -7) (imaginary 5)) (complex -3 -12))) (is (= (- (imaginary 5) (complex -3 -7)) (complex 3 12))) (is (= (- 3 (complex 1 2)) (complex 2 -2))) (is (= (- (complex 1 2) 3) (complex -2 2))) (is (= (- 3 (complex -3 -7)) (complex 6 7))) (is (= (- (complex -3 -7) 3) (complex -6 -7))) (is (= (- 3 (imaginary -2)) (complex 3 2))) (is (= (- (imaginary -2) 3) (complex -3 -2))) (is (= (- 3 (imaginary 5)) (complex 3 -5))) (is (= (- (imaginary 5) 3) (complex -3 5))) (is (= (- -1 (complex 1 2)) (complex -2 -2))) (is (= (- (complex 1 2) -1) (complex 2 2))) (is (= (- -1 (complex -3 -7)) (complex 2 7))) (is (= (- (complex -3 -7) -1) (complex -2 -7))) (is (= (- -1 (imaginary -2)) (complex -1 2))) (is (= (- (imaginary -2) -1) (complex 1 -2))) (is (= (- -1 (imaginary 5)) (complex -1 -5))) (is (= (- (imaginary 5) -1) (complex 1 5))) (is (= (- (imaginary -2) (complex 1 2)) (complex -1 -4))) (is (= (- (complex 1 2) (imaginary -2)) (complex 1 4))) (is (= (- (imaginary -2) (complex -3 -7)) (complex 3 5))) (is (= (- (complex -3 -7) (imaginary -2)) (complex -3 -5))) (is (= (- (imaginary -2) 3) (complex -3 -2))) (is (= (- 3 (imaginary -2)) (complex 3 2))) (is (= (- (imaginary -2) -1) (complex 1 -2))) (is (= (- -1 (imaginary -2)) (complex -1 2))) (is (= (- (imaginary -2) (imaginary -2)) 0)) (is (= (- (imaginary -2) (imaginary 5)) (imaginary -7))) (is (= (- (imaginary 5) (imaginary -2)) (imaginary 7))) (is (= (- (imaginary 5) (complex 1 2)) (complex -1 3))) (is (= (- (complex 1 2) (imaginary 5)) (complex 1 -3))) (is (= (- (imaginary 5) (complex -3 -7)) (complex 3 12))) (is (= (- (complex -3 -7) (imaginary 5)) (complex -3 -12))) (is (= (- (imaginary 5) 3) (complex -3 5))) (is (= (- 3 (imaginary 5)) (complex 3 -5))) (is (= (- (imaginary 5) -1) (complex 1 5))) (is (= (- -1 (imaginary 5)) (complex -1 -5))) (is (= (- (imaginary 5) (imaginary -2)) (imaginary 7))) (is (= (- (imaginary -2) (imaginary 5)) (imaginary -7))) (is (= (- (imaginary 5) (imaginary 5)) 0))) (deftest complex-multiplication (is (= (* (complex 1 2) (complex 1 2)) (complex -3 4))) (is (= (* (complex 1 2) (complex -3 -7)) (complex 11 -13))) (is (= (* (complex -3 -7) (complex 1 2)) (complex 11 -13))) (is (= (* (complex 1 2) 3) (complex 3 6))) (is (= (* 3 (complex 1 2)) (complex 3 6))) (is (= (* (complex 1 2) -1) (complex -1 -2))) (is (= (* -1 (complex 1 2)) (complex -1 -2))) (is (= (* (complex 1 2) (imaginary -2)) (complex 4 -2))) (is (= (* (imaginary -2) (complex 1 2)) (complex 4 -2))) (is (= (* (complex 1 2) (imaginary 5)) (complex -10 5))) (is (= (* (imaginary 5) (complex 1 2)) (complex -10 5))) (is (= (* (complex -3 -7) (complex 1 2)) (complex 11 -13))) (is (= (* (complex 1 2) (complex -3 -7)) (complex 11 -13))) (is (= (* (complex -3 -7) (complex -3 -7)) (complex -40 42))) (is (= (* (complex -3 -7) 3) (complex -9 -21))) (is (= (* 3 (complex -3 -7)) (complex -9 -21))) (is (= (* (complex -3 -7) -1) (complex 3 7))) (is (= (* -1 (complex -3 -7)) (complex 3 7))) (is (= (* (complex -3 -7) (imaginary -2)) (complex -14 6))) (is (= (* (imaginary -2) (complex -3 -7)) (complex -14 6))) (is (= (* (complex -3 -7) (imaginary 5)) (complex 35 -15))) (is (= (* (imaginary 5) (complex -3 -7)) (complex 35 -15))) (is (= (* 3 (complex 1 2)) (complex 3 6))) (is (= (* (complex 1 2) 3) (complex 3 6))) (is (= (* 3 (complex -3 -7)) (complex -9 -21))) (is (= (* (complex -3 -7) 3) (complex -9 -21))) (is (= (* 3 (imaginary -2)) (imaginary -6))) (is (= (* (imaginary -2) 3) (imaginary -6))) (is (= (* 3 (imaginary 5)) (imaginary 15))) (is (= (* (imaginary 5) 3) (imaginary 15))) (is (= (* -1 (complex 1 2)) (complex -1 -2))) (is (= (* (complex 1 2) -1) (complex -1 -2))) (is (= (* -1 (complex -3 -7)) (complex 3 7))) (is (= (* (complex -3 -7) -1) (complex 3 7))) (is (= (* -1 (imaginary -2)) (imaginary 2))) (is (= (* (imaginary -2) -1) (imaginary 2))) (is (= (* -1 (imaginary 5)) (imaginary -5))) (is (= (* (imaginary 5) -1) (imaginary -5))) (is (= (* (imaginary -2) (complex 1 2)) (complex 4 -2))) (is (= (* (complex 1 2) (imaginary -2)) (complex 4 -2))) (is (= (* (imaginary -2) (complex -3 -7)) (complex -14 6))) (is (= (* (complex -3 -7) (imaginary -2)) (complex -14 6))) (is (= (* (imaginary -2) 3) (imaginary -6))) (is (= (* 3 (imaginary -2)) (imaginary -6))) (is (= (* (imaginary -2) -1) (imaginary 2))) (is (= (* -1 (imaginary -2)) (imaginary 2))) (is (= (* (imaginary -2) (imaginary -2)) -4)) (is (= (* (imaginary -2) (imaginary 5)) 10)) (is (= (* (imaginary 5) (imaginary -2)) 10)) (is (= (* (imaginary 5) (complex 1 2)) (complex -10 5))) (is (= (* (complex 1 2) (imaginary 5)) (complex -10 5))) (is (= (* (imaginary 5) (complex -3 -7)) (complex 35 -15))) (is (= (* (complex -3 -7) (imaginary 5)) (complex 35 -15))) (is (= (* (imaginary 5) 3) (imaginary 15))) (is (= (* 3 (imaginary 5)) (imaginary 15))) (is (= (* (imaginary 5) -1) (imaginary -5))) (is (= (* -1 (imaginary 5)) (imaginary -5))) (is (= (* (imaginary 5) (imaginary -2)) 10)) (is (= (* (imaginary -2) (imaginary 5)) 10)) (is (= (* (imaginary 5) (imaginary 5)) -25))) (deftest complex-division (is (= (/ (complex 1 2) (complex 1 2)) 1)) (is (= (/ (complex 1 2) (complex -3 -7)) (complex -17/58 1/58))) (is (= (/ (complex -3 -7) (complex 1 2)) (complex -17/5 -1/5))) (is (= (/ (complex 1 2) 3) (complex 1/3 2/3))) (is (= (/ 3 (complex 1 2)) (complex 3/5 -6/5))) (is (= (/ (complex 1 2) -1) (complex -1 -2))) (is (= (/ -1 (complex 1 2)) (complex -1/5 2/5))) (is (= (/ (complex 1 2) (imaginary -2)) (complex -1 1/2))) (is (= (/ (imaginary -2) (complex 1 2)) (complex -4/5 -2/5))) (is (= (/ (complex 1 2) (imaginary 5)) (complex 2/5 -1/5))) (is (= (/ (imaginary 5) (complex 1 2)) (complex 2 1))) (is (= (/ (complex -3 -7) (complex 1 2)) (complex -17/5 -1/5))) (is (= (/ (complex 1 2) (complex -3 -7)) (complex -17/58 1/58))) (is (= (/ (complex -3 -7) (complex -3 -7)) 1)) (is (= (/ (complex -3 -7) 3) (complex -1 -7/3))) (is (= (/ 3 (complex -3 -7)) (complex -9/58 21/58))) (is (= (/ (complex -3 -7) -1) (complex 3 7))) (is (= (/ -1 (complex -3 -7)) (complex 3/58 -7/58))) (is (= (/ (complex -3 -7) (imaginary -2)) (complex 7/2 -3/2))) (is (= (/ (imaginary -2) (complex -3 -7)) (complex 7/29 3/29))) (is (= (/ (complex -3 -7) (imaginary 5)) (complex -7/5 3/5))) (is (= (/ (imaginary 5) (complex -3 -7)) (complex -35/58 -15/58))) (is (= (/ 3 (complex 1 2)) (complex 3/5 -6/5))) (is (= (/ (complex 1 2) 3) (complex 1/3 2/3))) (is (= (/ 3 (complex -3 -7)) (complex -9/58 21/58))) (is (= (/ (complex -3 -7) 3) (complex -1 -7/3))) (is (= (/ 3 (imaginary -2)) (imaginary 1.5))) (is (= (/ (imaginary -2) 3) (imaginary -2/3))) (is (= (/ 3 (imaginary 5)) (imaginary -3/5))) (is (= (/ (imaginary 5) 3) (imaginary 5/3))) (is (= (/ -1 (complex 1 2)) (complex -1/5 2/5))) (is (= (/ (complex 1 2) -1) (complex -1 -2))) (is (= (/ -1 (complex -3 -7)) (complex 3/58 -7/58))) (is (= (/ (complex -3 -7) -1) (complex 3 7))) (is (= (/ -1 (imaginary -2)) (imaginary -1/2))) (is (= (/ (imaginary -2) -1) (imaginary 2))) (is (= (/ -1 (imaginary 5)) (imaginary 1/5))) (is (= (/ (imaginary 5) -1) (imaginary -5))) (is (= (/ (imaginary -2) (complex 1 2)) (complex -4/5 -2/5))) (is (= (/ (complex 1 2) (imaginary -2)) (complex -1 1/2))) (is (= (/ (imaginary -2) (complex -3 -7)) (complex 7/29 3/29))) (is (= (/ (complex -3 -7) (imaginary -2)) (complex 7/2 -3/2))) (is (= (/ (imaginary -2) 3) (imaginary -2/3))) (is (= (/ 3 (imaginary -2)) (imaginary 3/2))) (is (= (/ (imaginary -2) -1) (imaginary 2))) (is (= (/ -1 (imaginary -2)) (imaginary -1/2))) (is (= (/ (imaginary -2) (imaginary -2)) 1)) (is (= (/ (imaginary -2) (imaginary 5)) -2/5)) (is (= (/ (imaginary 5) (imaginary -2)) -5/2)) (is (= (/ (imaginary 5) (complex 1 2)) (complex 2 1))) (is (= (/ (complex 1 2) (imaginary 5)) (complex 2/5 -1/5))) (is (= (/ (imaginary 5) (complex -3 -7)) (complex -35/58 -15/58))) (is (= (/ (complex -3 -7) (imaginary 5)) (complex -7/5 3/5))) (is (= (/ (imaginary 5) 3) (imaginary 5/3))) (is (= (/ 3 (imaginary 5)) (imaginary -3/5))) (is (= (/ (imaginary 5) -1) (imaginary -5))) (is (= (/ -1 (imaginary 5)) (imaginary 1/5))) (is (= (/ (imaginary 5) (imaginary -2)) -5/2)) (is (= (/ (imaginary -2) (imaginary 5)) -2/5)) (is (= (/ (imaginary 5) (imaginary 5)) 1))) (deftest complex-conjugate (is (= (conjugate (complex 1 2)) (complex 1 -2))) (is (= (conjugate (complex -3 -7)) (complex -3 7))) (is (= (conjugate (imaginary -2)) (imaginary 2))) (is (= (conjugate (imaginary 5)) (imaginary -5)))) (deftest complex-abs (doseq [c [(complex 1 2) (complex -2 3) (complex 4 -2) (complex -3 -7) (imaginary -2) (imaginary 5)]] (is (approx= (* c (conjugate c)) (sqr (abs c)) 1e-14)))) (deftest complex-sqrt (doseq [c [(complex 1 2) (complex -2 3) (complex 4 -2) (complex -3 -7) (imaginary -2) (imaginary 5)]] (let [r (sqrt c)] (is (approx= c (sqr r) 1e-14)) (is (>= (real r) 0))))) (deftest complex-exp (is (approx= (exp (complex 1 2)) (complex -1.1312043837568135 2.4717266720048188) 1e-14)) (is (approx= (exp (complex 2 3)) (complex -7.3151100949011028 1.0427436562359045) 1e-14)) (is (approx= (exp (complex 4 -2)) (complex -22.720847417619233 -49.645957334580565) 1e-14)) (is (approx= (exp (complex 3 -7)) (complex 15.142531566086868 -13.195928586605717) 1e-14)) (is (approx= (exp (imaginary -2)) (complex -0.41614683654714241 -0.90929742682568171) 1e-14)) (is (approx= (exp (imaginary 5)) (complex 0.2836621854632263 -0.95892427466313845) 1e-14)))
79298
;; Test routines for complex-numbers.clj ;; by <NAME> ;; last updated April 2, 2009 ;; Copyright (c) <NAME>, 2008. All rights reserved. The use ;; and distribution terms for this software are covered by the Eclipse ;; Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php) ;; which can be found in the file epl-v10.html at the root of this ;; distribution. By using this software in any fashion, you are ;; agreeing to be bound by the terms of this license. You must not ;; remove this notice, or any other, from this software. (ns clojure.contrib.test-contrib.complex-numbers (:refer-clojure :exclude [+ - * / = < > <= >=]) (:use [clojure.test :only (deftest is are run-tests)] [clojure.contrib.generic.arithmetic :only (+ - * /)] [clojure.contrib.generic.comparison :only (= < > <= >=)] [clojure.contrib.generic.math-functions :only (abs approx= conjugate exp sqr sqrt)] [clojure.contrib.complex-numbers :only (complex imaginary real imag)])) (deftest complex-addition (is (= (+ (complex 1 2) (complex 1 2)) (complex 2 4))) (is (= (+ (complex 1 2) (complex -3 -7)) (complex -2 -5))) (is (= (+ (complex -3 -7) (complex 1 2)) (complex -2 -5))) (is (= (+ (complex 1 2) 3) (complex 4 2))) (is (= (+ 3 (complex 1 2)) (complex 4 2))) (is (= (+ (complex 1 2) -1) (imaginary 2))) (is (= (+ -1 (complex 1 2)) (imaginary 2))) (is (= (+ (complex 1 2) (imaginary -2)) 1)) (is (= (+ (imaginary -2) (complex 1 2)) 1)) (is (= (+ (complex 1 2) (imaginary 5)) (complex 1 7))) (is (= (+ (imaginary 5) (complex 1 2)) (complex 1 7))) (is (= (+ (complex -3 -7) (complex 1 2)) (complex -2 -5))) (is (= (+ (complex 1 2) (complex -3 -7)) (complex -2 -5))) (is (= (+ (complex -3 -7) (complex -3 -7)) (complex -6 -14))) (is (= (+ (complex -3 -7) 3) (imaginary -7))) (is (= (+ 3 (complex -3 -7)) (imaginary -7))) (is (= (+ (complex -3 -7) -1) (complex -4 -7))) (is (= (+ -1 (complex -3 -7)) (complex -4 -7))) (is (= (+ (complex -3 -7) (imaginary -2)) (complex -3 -9))) (is (= (+ (imaginary -2) (complex -3 -7)) (complex -3 -9))) (is (= (+ (complex -3 -7) (imaginary 5)) (complex -3 -2))) (is (= (+ (imaginary 5) (complex -3 -7)) (complex -3 -2))) (is (= (+ 3 (complex 1 2)) (complex 4 2))) (is (= (+ (complex 1 2) 3) (complex 4 2))) (is (= (+ 3 (complex -3 -7)) (imaginary -7))) (is (= (+ (complex -3 -7) 3) (imaginary -7))) (is (= (+ 3 (imaginary -2)) (complex 3 -2))) (is (= (+ (imaginary -2) 3) (complex 3 -2))) (is (= (+ 3 (imaginary 5)) (complex 3 5))) (is (= (+ (imaginary 5) 3) (complex 3 5))) (is (= (+ -1 (complex 1 2)) (imaginary 2))) (is (= (+ (complex 1 2) -1) (imaginary 2))) (is (= (+ -1 (complex -3 -7)) (complex -4 -7))) (is (= (+ (complex -3 -7) -1) (complex -4 -7))) (is (= (+ -1 (imaginary -2)) (complex -1 -2))) (is (= (+ (imaginary -2) -1) (complex -1 -2))) (is (= (+ -1 (imaginary 5)) (complex -1 5))) (is (= (+ (imaginary 5) -1) (complex -1 5))) (is (= (+ (imaginary -2) (complex 1 2)) 1)) (is (= (+ (complex 1 2) (imaginary -2)) 1)) (is (= (+ (imaginary -2) (complex -3 -7)) (complex -3 -9))) (is (= (+ (complex -3 -7) (imaginary -2)) (complex -3 -9))) (is (= (+ (imaginary -2) 3) (complex 3 -2))) (is (= (+ 3 (imaginary -2)) (complex 3 -2))) (is (= (+ (imaginary -2) -1) (complex -1 -2))) (is (= (+ -1 (imaginary -2)) (complex -1 -2))) (is (= (+ (imaginary -2) (imaginary -2)) (imaginary -4))) (is (= (+ (imaginary -2) (imaginary 5)) (imaginary 3))) (is (= (+ (imaginary 5) (imaginary -2)) (imaginary 3))) (is (= (+ (imaginary 5) (complex 1 2)) (complex 1 7))) (is (= (+ (complex 1 2) (imaginary 5)) (complex 1 7))) (is (= (+ (imaginary 5) (complex -3 -7)) (complex -3 -2))) (is (= (+ (complex -3 -7) (imaginary 5)) (complex -3 -2))) (is (= (+ (imaginary 5) 3) (complex 3 5))) (is (= (+ 3 (imaginary 5)) (complex 3 5))) (is (= (+ (imaginary 5) -1) (complex -1 5))) (is (= (+ -1 (imaginary 5)) (complex -1 5))) (is (= (+ (imaginary 5) (imaginary -2)) (imaginary 3))) (is (= (+ (imaginary -2) (imaginary 5)) (imaginary 3))) (is (= (+ (imaginary 5) (imaginary 5)) (imaginary 10)))) (deftest complex-subtraction (is (= (- (complex 1 2) (complex 1 2)) 0)) (is (= (- (complex 1 2) (complex -3 -7)) (complex 4 9))) (is (= (- (complex -3 -7) (complex 1 2)) (complex -4 -9))) (is (= (- (complex 1 2) 3) (complex -2 2))) (is (= (- 3 (complex 1 2)) (complex 2 -2))) (is (= (- (complex 1 2) -1) (complex 2 2))) (is (= (- -1 (complex 1 2)) (complex -2 -2))) (is (= (- (complex 1 2) (imaginary -2)) (complex 1 4))) (is (= (- (imaginary -2) (complex 1 2)) (complex -1 -4))) (is (= (- (complex 1 2) (imaginary 5)) (complex 1 -3))) (is (= (- (imaginary 5) (complex 1 2)) (complex -1 3))) (is (= (- (complex -3 -7) (complex 1 2)) (complex -4 -9))) (is (= (- (complex 1 2) (complex -3 -7)) (complex 4 9))) (is (= (- (complex -3 -7) (complex -3 -7)) 0)) (is (= (- (complex -3 -7) 3) (complex -6 -7))) (is (= (- 3 (complex -3 -7)) (complex 6 7))) (is (= (- (complex -3 -7) -1) (complex -2 -7))) (is (= (- -1 (complex -3 -7)) (complex 2 7))) (is (= (- (complex -3 -7) (imaginary -2)) (complex -3 -5))) (is (= (- (imaginary -2) (complex -3 -7)) (complex 3 5))) (is (= (- (complex -3 -7) (imaginary 5)) (complex -3 -12))) (is (= (- (imaginary 5) (complex -3 -7)) (complex 3 12))) (is (= (- 3 (complex 1 2)) (complex 2 -2))) (is (= (- (complex 1 2) 3) (complex -2 2))) (is (= (- 3 (complex -3 -7)) (complex 6 7))) (is (= (- (complex -3 -7) 3) (complex -6 -7))) (is (= (- 3 (imaginary -2)) (complex 3 2))) (is (= (- (imaginary -2) 3) (complex -3 -2))) (is (= (- 3 (imaginary 5)) (complex 3 -5))) (is (= (- (imaginary 5) 3) (complex -3 5))) (is (= (- -1 (complex 1 2)) (complex -2 -2))) (is (= (- (complex 1 2) -1) (complex 2 2))) (is (= (- -1 (complex -3 -7)) (complex 2 7))) (is (= (- (complex -3 -7) -1) (complex -2 -7))) (is (= (- -1 (imaginary -2)) (complex -1 2))) (is (= (- (imaginary -2) -1) (complex 1 -2))) (is (= (- -1 (imaginary 5)) (complex -1 -5))) (is (= (- (imaginary 5) -1) (complex 1 5))) (is (= (- (imaginary -2) (complex 1 2)) (complex -1 -4))) (is (= (- (complex 1 2) (imaginary -2)) (complex 1 4))) (is (= (- (imaginary -2) (complex -3 -7)) (complex 3 5))) (is (= (- (complex -3 -7) (imaginary -2)) (complex -3 -5))) (is (= (- (imaginary -2) 3) (complex -3 -2))) (is (= (- 3 (imaginary -2)) (complex 3 2))) (is (= (- (imaginary -2) -1) (complex 1 -2))) (is (= (- -1 (imaginary -2)) (complex -1 2))) (is (= (- (imaginary -2) (imaginary -2)) 0)) (is (= (- (imaginary -2) (imaginary 5)) (imaginary -7))) (is (= (- (imaginary 5) (imaginary -2)) (imaginary 7))) (is (= (- (imaginary 5) (complex 1 2)) (complex -1 3))) (is (= (- (complex 1 2) (imaginary 5)) (complex 1 -3))) (is (= (- (imaginary 5) (complex -3 -7)) (complex 3 12))) (is (= (- (complex -3 -7) (imaginary 5)) (complex -3 -12))) (is (= (- (imaginary 5) 3) (complex -3 5))) (is (= (- 3 (imaginary 5)) (complex 3 -5))) (is (= (- (imaginary 5) -1) (complex 1 5))) (is (= (- -1 (imaginary 5)) (complex -1 -5))) (is (= (- (imaginary 5) (imaginary -2)) (imaginary 7))) (is (= (- (imaginary -2) (imaginary 5)) (imaginary -7))) (is (= (- (imaginary 5) (imaginary 5)) 0))) (deftest complex-multiplication (is (= (* (complex 1 2) (complex 1 2)) (complex -3 4))) (is (= (* (complex 1 2) (complex -3 -7)) (complex 11 -13))) (is (= (* (complex -3 -7) (complex 1 2)) (complex 11 -13))) (is (= (* (complex 1 2) 3) (complex 3 6))) (is (= (* 3 (complex 1 2)) (complex 3 6))) (is (= (* (complex 1 2) -1) (complex -1 -2))) (is (= (* -1 (complex 1 2)) (complex -1 -2))) (is (= (* (complex 1 2) (imaginary -2)) (complex 4 -2))) (is (= (* (imaginary -2) (complex 1 2)) (complex 4 -2))) (is (= (* (complex 1 2) (imaginary 5)) (complex -10 5))) (is (= (* (imaginary 5) (complex 1 2)) (complex -10 5))) (is (= (* (complex -3 -7) (complex 1 2)) (complex 11 -13))) (is (= (* (complex 1 2) (complex -3 -7)) (complex 11 -13))) (is (= (* (complex -3 -7) (complex -3 -7)) (complex -40 42))) (is (= (* (complex -3 -7) 3) (complex -9 -21))) (is (= (* 3 (complex -3 -7)) (complex -9 -21))) (is (= (* (complex -3 -7) -1) (complex 3 7))) (is (= (* -1 (complex -3 -7)) (complex 3 7))) (is (= (* (complex -3 -7) (imaginary -2)) (complex -14 6))) (is (= (* (imaginary -2) (complex -3 -7)) (complex -14 6))) (is (= (* (complex -3 -7) (imaginary 5)) (complex 35 -15))) (is (= (* (imaginary 5) (complex -3 -7)) (complex 35 -15))) (is (= (* 3 (complex 1 2)) (complex 3 6))) (is (= (* (complex 1 2) 3) (complex 3 6))) (is (= (* 3 (complex -3 -7)) (complex -9 -21))) (is (= (* (complex -3 -7) 3) (complex -9 -21))) (is (= (* 3 (imaginary -2)) (imaginary -6))) (is (= (* (imaginary -2) 3) (imaginary -6))) (is (= (* 3 (imaginary 5)) (imaginary 15))) (is (= (* (imaginary 5) 3) (imaginary 15))) (is (= (* -1 (complex 1 2)) (complex -1 -2))) (is (= (* (complex 1 2) -1) (complex -1 -2))) (is (= (* -1 (complex -3 -7)) (complex 3 7))) (is (= (* (complex -3 -7) -1) (complex 3 7))) (is (= (* -1 (imaginary -2)) (imaginary 2))) (is (= (* (imaginary -2) -1) (imaginary 2))) (is (= (* -1 (imaginary 5)) (imaginary -5))) (is (= (* (imaginary 5) -1) (imaginary -5))) (is (= (* (imaginary -2) (complex 1 2)) (complex 4 -2))) (is (= (* (complex 1 2) (imaginary -2)) (complex 4 -2))) (is (= (* (imaginary -2) (complex -3 -7)) (complex -14 6))) (is (= (* (complex -3 -7) (imaginary -2)) (complex -14 6))) (is (= (* (imaginary -2) 3) (imaginary -6))) (is (= (* 3 (imaginary -2)) (imaginary -6))) (is (= (* (imaginary -2) -1) (imaginary 2))) (is (= (* -1 (imaginary -2)) (imaginary 2))) (is (= (* (imaginary -2) (imaginary -2)) -4)) (is (= (* (imaginary -2) (imaginary 5)) 10)) (is (= (* (imaginary 5) (imaginary -2)) 10)) (is (= (* (imaginary 5) (complex 1 2)) (complex -10 5))) (is (= (* (complex 1 2) (imaginary 5)) (complex -10 5))) (is (= (* (imaginary 5) (complex -3 -7)) (complex 35 -15))) (is (= (* (complex -3 -7) (imaginary 5)) (complex 35 -15))) (is (= (* (imaginary 5) 3) (imaginary 15))) (is (= (* 3 (imaginary 5)) (imaginary 15))) (is (= (* (imaginary 5) -1) (imaginary -5))) (is (= (* -1 (imaginary 5)) (imaginary -5))) (is (= (* (imaginary 5) (imaginary -2)) 10)) (is (= (* (imaginary -2) (imaginary 5)) 10)) (is (= (* (imaginary 5) (imaginary 5)) -25))) (deftest complex-division (is (= (/ (complex 1 2) (complex 1 2)) 1)) (is (= (/ (complex 1 2) (complex -3 -7)) (complex -17/58 1/58))) (is (= (/ (complex -3 -7) (complex 1 2)) (complex -17/5 -1/5))) (is (= (/ (complex 1 2) 3) (complex 1/3 2/3))) (is (= (/ 3 (complex 1 2)) (complex 3/5 -6/5))) (is (= (/ (complex 1 2) -1) (complex -1 -2))) (is (= (/ -1 (complex 1 2)) (complex -1/5 2/5))) (is (= (/ (complex 1 2) (imaginary -2)) (complex -1 1/2))) (is (= (/ (imaginary -2) (complex 1 2)) (complex -4/5 -2/5))) (is (= (/ (complex 1 2) (imaginary 5)) (complex 2/5 -1/5))) (is (= (/ (imaginary 5) (complex 1 2)) (complex 2 1))) (is (= (/ (complex -3 -7) (complex 1 2)) (complex -17/5 -1/5))) (is (= (/ (complex 1 2) (complex -3 -7)) (complex -17/58 1/58))) (is (= (/ (complex -3 -7) (complex -3 -7)) 1)) (is (= (/ (complex -3 -7) 3) (complex -1 -7/3))) (is (= (/ 3 (complex -3 -7)) (complex -9/58 21/58))) (is (= (/ (complex -3 -7) -1) (complex 3 7))) (is (= (/ -1 (complex -3 -7)) (complex 3/58 -7/58))) (is (= (/ (complex -3 -7) (imaginary -2)) (complex 7/2 -3/2))) (is (= (/ (imaginary -2) (complex -3 -7)) (complex 7/29 3/29))) (is (= (/ (complex -3 -7) (imaginary 5)) (complex -7/5 3/5))) (is (= (/ (imaginary 5) (complex -3 -7)) (complex -35/58 -15/58))) (is (= (/ 3 (complex 1 2)) (complex 3/5 -6/5))) (is (= (/ (complex 1 2) 3) (complex 1/3 2/3))) (is (= (/ 3 (complex -3 -7)) (complex -9/58 21/58))) (is (= (/ (complex -3 -7) 3) (complex -1 -7/3))) (is (= (/ 3 (imaginary -2)) (imaginary 1.5))) (is (= (/ (imaginary -2) 3) (imaginary -2/3))) (is (= (/ 3 (imaginary 5)) (imaginary -3/5))) (is (= (/ (imaginary 5) 3) (imaginary 5/3))) (is (= (/ -1 (complex 1 2)) (complex -1/5 2/5))) (is (= (/ (complex 1 2) -1) (complex -1 -2))) (is (= (/ -1 (complex -3 -7)) (complex 3/58 -7/58))) (is (= (/ (complex -3 -7) -1) (complex 3 7))) (is (= (/ -1 (imaginary -2)) (imaginary -1/2))) (is (= (/ (imaginary -2) -1) (imaginary 2))) (is (= (/ -1 (imaginary 5)) (imaginary 1/5))) (is (= (/ (imaginary 5) -1) (imaginary -5))) (is (= (/ (imaginary -2) (complex 1 2)) (complex -4/5 -2/5))) (is (= (/ (complex 1 2) (imaginary -2)) (complex -1 1/2))) (is (= (/ (imaginary -2) (complex -3 -7)) (complex 7/29 3/29))) (is (= (/ (complex -3 -7) (imaginary -2)) (complex 7/2 -3/2))) (is (= (/ (imaginary -2) 3) (imaginary -2/3))) (is (= (/ 3 (imaginary -2)) (imaginary 3/2))) (is (= (/ (imaginary -2) -1) (imaginary 2))) (is (= (/ -1 (imaginary -2)) (imaginary -1/2))) (is (= (/ (imaginary -2) (imaginary -2)) 1)) (is (= (/ (imaginary -2) (imaginary 5)) -2/5)) (is (= (/ (imaginary 5) (imaginary -2)) -5/2)) (is (= (/ (imaginary 5) (complex 1 2)) (complex 2 1))) (is (= (/ (complex 1 2) (imaginary 5)) (complex 2/5 -1/5))) (is (= (/ (imaginary 5) (complex -3 -7)) (complex -35/58 -15/58))) (is (= (/ (complex -3 -7) (imaginary 5)) (complex -7/5 3/5))) (is (= (/ (imaginary 5) 3) (imaginary 5/3))) (is (= (/ 3 (imaginary 5)) (imaginary -3/5))) (is (= (/ (imaginary 5) -1) (imaginary -5))) (is (= (/ -1 (imaginary 5)) (imaginary 1/5))) (is (= (/ (imaginary 5) (imaginary -2)) -5/2)) (is (= (/ (imaginary -2) (imaginary 5)) -2/5)) (is (= (/ (imaginary 5) (imaginary 5)) 1))) (deftest complex-conjugate (is (= (conjugate (complex 1 2)) (complex 1 -2))) (is (= (conjugate (complex -3 -7)) (complex -3 7))) (is (= (conjugate (imaginary -2)) (imaginary 2))) (is (= (conjugate (imaginary 5)) (imaginary -5)))) (deftest complex-abs (doseq [c [(complex 1 2) (complex -2 3) (complex 4 -2) (complex -3 -7) (imaginary -2) (imaginary 5)]] (is (approx= (* c (conjugate c)) (sqr (abs c)) 1e-14)))) (deftest complex-sqrt (doseq [c [(complex 1 2) (complex -2 3) (complex 4 -2) (complex -3 -7) (imaginary -2) (imaginary 5)]] (let [r (sqrt c)] (is (approx= c (sqr r) 1e-14)) (is (>= (real r) 0))))) (deftest complex-exp (is (approx= (exp (complex 1 2)) (complex -1.1312043837568135 2.4717266720048188) 1e-14)) (is (approx= (exp (complex 2 3)) (complex -7.3151100949011028 1.0427436562359045) 1e-14)) (is (approx= (exp (complex 4 -2)) (complex -22.720847417619233 -49.645957334580565) 1e-14)) (is (approx= (exp (complex 3 -7)) (complex 15.142531566086868 -13.195928586605717) 1e-14)) (is (approx= (exp (imaginary -2)) (complex -0.41614683654714241 -0.90929742682568171) 1e-14)) (is (approx= (exp (imaginary 5)) (complex 0.2836621854632263 -0.95892427466313845) 1e-14)))
true
;; Test routines for complex-numbers.clj ;; by PI:NAME:<NAME>END_PI ;; last updated April 2, 2009 ;; Copyright (c) PI:NAME:<NAME>END_PI, 2008. All rights reserved. The use ;; and distribution terms for this software are covered by the Eclipse ;; Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php) ;; which can be found in the file epl-v10.html at the root of this ;; distribution. By using this software in any fashion, you are ;; agreeing to be bound by the terms of this license. You must not ;; remove this notice, or any other, from this software. (ns clojure.contrib.test-contrib.complex-numbers (:refer-clojure :exclude [+ - * / = < > <= >=]) (:use [clojure.test :only (deftest is are run-tests)] [clojure.contrib.generic.arithmetic :only (+ - * /)] [clojure.contrib.generic.comparison :only (= < > <= >=)] [clojure.contrib.generic.math-functions :only (abs approx= conjugate exp sqr sqrt)] [clojure.contrib.complex-numbers :only (complex imaginary real imag)])) (deftest complex-addition (is (= (+ (complex 1 2) (complex 1 2)) (complex 2 4))) (is (= (+ (complex 1 2) (complex -3 -7)) (complex -2 -5))) (is (= (+ (complex -3 -7) (complex 1 2)) (complex -2 -5))) (is (= (+ (complex 1 2) 3) (complex 4 2))) (is (= (+ 3 (complex 1 2)) (complex 4 2))) (is (= (+ (complex 1 2) -1) (imaginary 2))) (is (= (+ -1 (complex 1 2)) (imaginary 2))) (is (= (+ (complex 1 2) (imaginary -2)) 1)) (is (= (+ (imaginary -2) (complex 1 2)) 1)) (is (= (+ (complex 1 2) (imaginary 5)) (complex 1 7))) (is (= (+ (imaginary 5) (complex 1 2)) (complex 1 7))) (is (= (+ (complex -3 -7) (complex 1 2)) (complex -2 -5))) (is (= (+ (complex 1 2) (complex -3 -7)) (complex -2 -5))) (is (= (+ (complex -3 -7) (complex -3 -7)) (complex -6 -14))) (is (= (+ (complex -3 -7) 3) (imaginary -7))) (is (= (+ 3 (complex -3 -7)) (imaginary -7))) (is (= (+ (complex -3 -7) -1) (complex -4 -7))) (is (= (+ -1 (complex -3 -7)) (complex -4 -7))) (is (= (+ (complex -3 -7) (imaginary -2)) (complex -3 -9))) (is (= (+ (imaginary -2) (complex -3 -7)) (complex -3 -9))) (is (= (+ (complex -3 -7) (imaginary 5)) (complex -3 -2))) (is (= (+ (imaginary 5) (complex -3 -7)) (complex -3 -2))) (is (= (+ 3 (complex 1 2)) (complex 4 2))) (is (= (+ (complex 1 2) 3) (complex 4 2))) (is (= (+ 3 (complex -3 -7)) (imaginary -7))) (is (= (+ (complex -3 -7) 3) (imaginary -7))) (is (= (+ 3 (imaginary -2)) (complex 3 -2))) (is (= (+ (imaginary -2) 3) (complex 3 -2))) (is (= (+ 3 (imaginary 5)) (complex 3 5))) (is (= (+ (imaginary 5) 3) (complex 3 5))) (is (= (+ -1 (complex 1 2)) (imaginary 2))) (is (= (+ (complex 1 2) -1) (imaginary 2))) (is (= (+ -1 (complex -3 -7)) (complex -4 -7))) (is (= (+ (complex -3 -7) -1) (complex -4 -7))) (is (= (+ -1 (imaginary -2)) (complex -1 -2))) (is (= (+ (imaginary -2) -1) (complex -1 -2))) (is (= (+ -1 (imaginary 5)) (complex -1 5))) (is (= (+ (imaginary 5) -1) (complex -1 5))) (is (= (+ (imaginary -2) (complex 1 2)) 1)) (is (= (+ (complex 1 2) (imaginary -2)) 1)) (is (= (+ (imaginary -2) (complex -3 -7)) (complex -3 -9))) (is (= (+ (complex -3 -7) (imaginary -2)) (complex -3 -9))) (is (= (+ (imaginary -2) 3) (complex 3 -2))) (is (= (+ 3 (imaginary -2)) (complex 3 -2))) (is (= (+ (imaginary -2) -1) (complex -1 -2))) (is (= (+ -1 (imaginary -2)) (complex -1 -2))) (is (= (+ (imaginary -2) (imaginary -2)) (imaginary -4))) (is (= (+ (imaginary -2) (imaginary 5)) (imaginary 3))) (is (= (+ (imaginary 5) (imaginary -2)) (imaginary 3))) (is (= (+ (imaginary 5) (complex 1 2)) (complex 1 7))) (is (= (+ (complex 1 2) (imaginary 5)) (complex 1 7))) (is (= (+ (imaginary 5) (complex -3 -7)) (complex -3 -2))) (is (= (+ (complex -3 -7) (imaginary 5)) (complex -3 -2))) (is (= (+ (imaginary 5) 3) (complex 3 5))) (is (= (+ 3 (imaginary 5)) (complex 3 5))) (is (= (+ (imaginary 5) -1) (complex -1 5))) (is (= (+ -1 (imaginary 5)) (complex -1 5))) (is (= (+ (imaginary 5) (imaginary -2)) (imaginary 3))) (is (= (+ (imaginary -2) (imaginary 5)) (imaginary 3))) (is (= (+ (imaginary 5) (imaginary 5)) (imaginary 10)))) (deftest complex-subtraction (is (= (- (complex 1 2) (complex 1 2)) 0)) (is (= (- (complex 1 2) (complex -3 -7)) (complex 4 9))) (is (= (- (complex -3 -7) (complex 1 2)) (complex -4 -9))) (is (= (- (complex 1 2) 3) (complex -2 2))) (is (= (- 3 (complex 1 2)) (complex 2 -2))) (is (= (- (complex 1 2) -1) (complex 2 2))) (is (= (- -1 (complex 1 2)) (complex -2 -2))) (is (= (- (complex 1 2) (imaginary -2)) (complex 1 4))) (is (= (- (imaginary -2) (complex 1 2)) (complex -1 -4))) (is (= (- (complex 1 2) (imaginary 5)) (complex 1 -3))) (is (= (- (imaginary 5) (complex 1 2)) (complex -1 3))) (is (= (- (complex -3 -7) (complex 1 2)) (complex -4 -9))) (is (= (- (complex 1 2) (complex -3 -7)) (complex 4 9))) (is (= (- (complex -3 -7) (complex -3 -7)) 0)) (is (= (- (complex -3 -7) 3) (complex -6 -7))) (is (= (- 3 (complex -3 -7)) (complex 6 7))) (is (= (- (complex -3 -7) -1) (complex -2 -7))) (is (= (- -1 (complex -3 -7)) (complex 2 7))) (is (= (- (complex -3 -7) (imaginary -2)) (complex -3 -5))) (is (= (- (imaginary -2) (complex -3 -7)) (complex 3 5))) (is (= (- (complex -3 -7) (imaginary 5)) (complex -3 -12))) (is (= (- (imaginary 5) (complex -3 -7)) (complex 3 12))) (is (= (- 3 (complex 1 2)) (complex 2 -2))) (is (= (- (complex 1 2) 3) (complex -2 2))) (is (= (- 3 (complex -3 -7)) (complex 6 7))) (is (= (- (complex -3 -7) 3) (complex -6 -7))) (is (= (- 3 (imaginary -2)) (complex 3 2))) (is (= (- (imaginary -2) 3) (complex -3 -2))) (is (= (- 3 (imaginary 5)) (complex 3 -5))) (is (= (- (imaginary 5) 3) (complex -3 5))) (is (= (- -1 (complex 1 2)) (complex -2 -2))) (is (= (- (complex 1 2) -1) (complex 2 2))) (is (= (- -1 (complex -3 -7)) (complex 2 7))) (is (= (- (complex -3 -7) -1) (complex -2 -7))) (is (= (- -1 (imaginary -2)) (complex -1 2))) (is (= (- (imaginary -2) -1) (complex 1 -2))) (is (= (- -1 (imaginary 5)) (complex -1 -5))) (is (= (- (imaginary 5) -1) (complex 1 5))) (is (= (- (imaginary -2) (complex 1 2)) (complex -1 -4))) (is (= (- (complex 1 2) (imaginary -2)) (complex 1 4))) (is (= (- (imaginary -2) (complex -3 -7)) (complex 3 5))) (is (= (- (complex -3 -7) (imaginary -2)) (complex -3 -5))) (is (= (- (imaginary -2) 3) (complex -3 -2))) (is (= (- 3 (imaginary -2)) (complex 3 2))) (is (= (- (imaginary -2) -1) (complex 1 -2))) (is (= (- -1 (imaginary -2)) (complex -1 2))) (is (= (- (imaginary -2) (imaginary -2)) 0)) (is (= (- (imaginary -2) (imaginary 5)) (imaginary -7))) (is (= (- (imaginary 5) (imaginary -2)) (imaginary 7))) (is (= (- (imaginary 5) (complex 1 2)) (complex -1 3))) (is (= (- (complex 1 2) (imaginary 5)) (complex 1 -3))) (is (= (- (imaginary 5) (complex -3 -7)) (complex 3 12))) (is (= (- (complex -3 -7) (imaginary 5)) (complex -3 -12))) (is (= (- (imaginary 5) 3) (complex -3 5))) (is (= (- 3 (imaginary 5)) (complex 3 -5))) (is (= (- (imaginary 5) -1) (complex 1 5))) (is (= (- -1 (imaginary 5)) (complex -1 -5))) (is (= (- (imaginary 5) (imaginary -2)) (imaginary 7))) (is (= (- (imaginary -2) (imaginary 5)) (imaginary -7))) (is (= (- (imaginary 5) (imaginary 5)) 0))) (deftest complex-multiplication (is (= (* (complex 1 2) (complex 1 2)) (complex -3 4))) (is (= (* (complex 1 2) (complex -3 -7)) (complex 11 -13))) (is (= (* (complex -3 -7) (complex 1 2)) (complex 11 -13))) (is (= (* (complex 1 2) 3) (complex 3 6))) (is (= (* 3 (complex 1 2)) (complex 3 6))) (is (= (* (complex 1 2) -1) (complex -1 -2))) (is (= (* -1 (complex 1 2)) (complex -1 -2))) (is (= (* (complex 1 2) (imaginary -2)) (complex 4 -2))) (is (= (* (imaginary -2) (complex 1 2)) (complex 4 -2))) (is (= (* (complex 1 2) (imaginary 5)) (complex -10 5))) (is (= (* (imaginary 5) (complex 1 2)) (complex -10 5))) (is (= (* (complex -3 -7) (complex 1 2)) (complex 11 -13))) (is (= (* (complex 1 2) (complex -3 -7)) (complex 11 -13))) (is (= (* (complex -3 -7) (complex -3 -7)) (complex -40 42))) (is (= (* (complex -3 -7) 3) (complex -9 -21))) (is (= (* 3 (complex -3 -7)) (complex -9 -21))) (is (= (* (complex -3 -7) -1) (complex 3 7))) (is (= (* -1 (complex -3 -7)) (complex 3 7))) (is (= (* (complex -3 -7) (imaginary -2)) (complex -14 6))) (is (= (* (imaginary -2) (complex -3 -7)) (complex -14 6))) (is (= (* (complex -3 -7) (imaginary 5)) (complex 35 -15))) (is (= (* (imaginary 5) (complex -3 -7)) (complex 35 -15))) (is (= (* 3 (complex 1 2)) (complex 3 6))) (is (= (* (complex 1 2) 3) (complex 3 6))) (is (= (* 3 (complex -3 -7)) (complex -9 -21))) (is (= (* (complex -3 -7) 3) (complex -9 -21))) (is (= (* 3 (imaginary -2)) (imaginary -6))) (is (= (* (imaginary -2) 3) (imaginary -6))) (is (= (* 3 (imaginary 5)) (imaginary 15))) (is (= (* (imaginary 5) 3) (imaginary 15))) (is (= (* -1 (complex 1 2)) (complex -1 -2))) (is (= (* (complex 1 2) -1) (complex -1 -2))) (is (= (* -1 (complex -3 -7)) (complex 3 7))) (is (= (* (complex -3 -7) -1) (complex 3 7))) (is (= (* -1 (imaginary -2)) (imaginary 2))) (is (= (* (imaginary -2) -1) (imaginary 2))) (is (= (* -1 (imaginary 5)) (imaginary -5))) (is (= (* (imaginary 5) -1) (imaginary -5))) (is (= (* (imaginary -2) (complex 1 2)) (complex 4 -2))) (is (= (* (complex 1 2) (imaginary -2)) (complex 4 -2))) (is (= (* (imaginary -2) (complex -3 -7)) (complex -14 6))) (is (= (* (complex -3 -7) (imaginary -2)) (complex -14 6))) (is (= (* (imaginary -2) 3) (imaginary -6))) (is (= (* 3 (imaginary -2)) (imaginary -6))) (is (= (* (imaginary -2) -1) (imaginary 2))) (is (= (* -1 (imaginary -2)) (imaginary 2))) (is (= (* (imaginary -2) (imaginary -2)) -4)) (is (= (* (imaginary -2) (imaginary 5)) 10)) (is (= (* (imaginary 5) (imaginary -2)) 10)) (is (= (* (imaginary 5) (complex 1 2)) (complex -10 5))) (is (= (* (complex 1 2) (imaginary 5)) (complex -10 5))) (is (= (* (imaginary 5) (complex -3 -7)) (complex 35 -15))) (is (= (* (complex -3 -7) (imaginary 5)) (complex 35 -15))) (is (= (* (imaginary 5) 3) (imaginary 15))) (is (= (* 3 (imaginary 5)) (imaginary 15))) (is (= (* (imaginary 5) -1) (imaginary -5))) (is (= (* -1 (imaginary 5)) (imaginary -5))) (is (= (* (imaginary 5) (imaginary -2)) 10)) (is (= (* (imaginary -2) (imaginary 5)) 10)) (is (= (* (imaginary 5) (imaginary 5)) -25))) (deftest complex-division (is (= (/ (complex 1 2) (complex 1 2)) 1)) (is (= (/ (complex 1 2) (complex -3 -7)) (complex -17/58 1/58))) (is (= (/ (complex -3 -7) (complex 1 2)) (complex -17/5 -1/5))) (is (= (/ (complex 1 2) 3) (complex 1/3 2/3))) (is (= (/ 3 (complex 1 2)) (complex 3/5 -6/5))) (is (= (/ (complex 1 2) -1) (complex -1 -2))) (is (= (/ -1 (complex 1 2)) (complex -1/5 2/5))) (is (= (/ (complex 1 2) (imaginary -2)) (complex -1 1/2))) (is (= (/ (imaginary -2) (complex 1 2)) (complex -4/5 -2/5))) (is (= (/ (complex 1 2) (imaginary 5)) (complex 2/5 -1/5))) (is (= (/ (imaginary 5) (complex 1 2)) (complex 2 1))) (is (= (/ (complex -3 -7) (complex 1 2)) (complex -17/5 -1/5))) (is (= (/ (complex 1 2) (complex -3 -7)) (complex -17/58 1/58))) (is (= (/ (complex -3 -7) (complex -3 -7)) 1)) (is (= (/ (complex -3 -7) 3) (complex -1 -7/3))) (is (= (/ 3 (complex -3 -7)) (complex -9/58 21/58))) (is (= (/ (complex -3 -7) -1) (complex 3 7))) (is (= (/ -1 (complex -3 -7)) (complex 3/58 -7/58))) (is (= (/ (complex -3 -7) (imaginary -2)) (complex 7/2 -3/2))) (is (= (/ (imaginary -2) (complex -3 -7)) (complex 7/29 3/29))) (is (= (/ (complex -3 -7) (imaginary 5)) (complex -7/5 3/5))) (is (= (/ (imaginary 5) (complex -3 -7)) (complex -35/58 -15/58))) (is (= (/ 3 (complex 1 2)) (complex 3/5 -6/5))) (is (= (/ (complex 1 2) 3) (complex 1/3 2/3))) (is (= (/ 3 (complex -3 -7)) (complex -9/58 21/58))) (is (= (/ (complex -3 -7) 3) (complex -1 -7/3))) (is (= (/ 3 (imaginary -2)) (imaginary 1.5))) (is (= (/ (imaginary -2) 3) (imaginary -2/3))) (is (= (/ 3 (imaginary 5)) (imaginary -3/5))) (is (= (/ (imaginary 5) 3) (imaginary 5/3))) (is (= (/ -1 (complex 1 2)) (complex -1/5 2/5))) (is (= (/ (complex 1 2) -1) (complex -1 -2))) (is (= (/ -1 (complex -3 -7)) (complex 3/58 -7/58))) (is (= (/ (complex -3 -7) -1) (complex 3 7))) (is (= (/ -1 (imaginary -2)) (imaginary -1/2))) (is (= (/ (imaginary -2) -1) (imaginary 2))) (is (= (/ -1 (imaginary 5)) (imaginary 1/5))) (is (= (/ (imaginary 5) -1) (imaginary -5))) (is (= (/ (imaginary -2) (complex 1 2)) (complex -4/5 -2/5))) (is (= (/ (complex 1 2) (imaginary -2)) (complex -1 1/2))) (is (= (/ (imaginary -2) (complex -3 -7)) (complex 7/29 3/29))) (is (= (/ (complex -3 -7) (imaginary -2)) (complex 7/2 -3/2))) (is (= (/ (imaginary -2) 3) (imaginary -2/3))) (is (= (/ 3 (imaginary -2)) (imaginary 3/2))) (is (= (/ (imaginary -2) -1) (imaginary 2))) (is (= (/ -1 (imaginary -2)) (imaginary -1/2))) (is (= (/ (imaginary -2) (imaginary -2)) 1)) (is (= (/ (imaginary -2) (imaginary 5)) -2/5)) (is (= (/ (imaginary 5) (imaginary -2)) -5/2)) (is (= (/ (imaginary 5) (complex 1 2)) (complex 2 1))) (is (= (/ (complex 1 2) (imaginary 5)) (complex 2/5 -1/5))) (is (= (/ (imaginary 5) (complex -3 -7)) (complex -35/58 -15/58))) (is (= (/ (complex -3 -7) (imaginary 5)) (complex -7/5 3/5))) (is (= (/ (imaginary 5) 3) (imaginary 5/3))) (is (= (/ 3 (imaginary 5)) (imaginary -3/5))) (is (= (/ (imaginary 5) -1) (imaginary -5))) (is (= (/ -1 (imaginary 5)) (imaginary 1/5))) (is (= (/ (imaginary 5) (imaginary -2)) -5/2)) (is (= (/ (imaginary -2) (imaginary 5)) -2/5)) (is (= (/ (imaginary 5) (imaginary 5)) 1))) (deftest complex-conjugate (is (= (conjugate (complex 1 2)) (complex 1 -2))) (is (= (conjugate (complex -3 -7)) (complex -3 7))) (is (= (conjugate (imaginary -2)) (imaginary 2))) (is (= (conjugate (imaginary 5)) (imaginary -5)))) (deftest complex-abs (doseq [c [(complex 1 2) (complex -2 3) (complex 4 -2) (complex -3 -7) (imaginary -2) (imaginary 5)]] (is (approx= (* c (conjugate c)) (sqr (abs c)) 1e-14)))) (deftest complex-sqrt (doseq [c [(complex 1 2) (complex -2 3) (complex 4 -2) (complex -3 -7) (imaginary -2) (imaginary 5)]] (let [r (sqrt c)] (is (approx= c (sqr r) 1e-14)) (is (>= (real r) 0))))) (deftest complex-exp (is (approx= (exp (complex 1 2)) (complex -1.1312043837568135 2.4717266720048188) 1e-14)) (is (approx= (exp (complex 2 3)) (complex -7.3151100949011028 1.0427436562359045) 1e-14)) (is (approx= (exp (complex 4 -2)) (complex -22.720847417619233 -49.645957334580565) 1e-14)) (is (approx= (exp (complex 3 -7)) (complex 15.142531566086868 -13.195928586605717) 1e-14)) (is (approx= (exp (imaginary -2)) (complex -0.41614683654714241 -0.90929742682568171) 1e-14)) (is (approx= (exp (imaginary 5)) (complex 0.2836621854632263 -0.95892427466313845) 1e-14)))
[ { "context": "(ns fp)\n\n; Code from: Programming Clojure, Alex Miller\n\n; p01 Simple Recursion\n\n(defn stack-consuming-fi", "end": 54, "score": 0.999758780002594, "start": 43, "tag": "NAME", "value": "Alex Miller" } ]
clj/ex/study_clojure/ex06/src/fp.clj
mertnuhoglu/study
1
(ns fp) ; Code from: Programming Clojure, Alex Miller ; p01 Simple Recursion (defn stack-consuming-fibo [n] (cond (= n 0) 0 ; basis (= n 1) 1 ; basis :else (+ (stack-consuming-fibo (- n 1)) ; induction (stack-consuming-fibo (- n 2))))) (stack-consuming-fibo 9) ;;=> 34 ; p02 Tail Recursion id=g12018 (defn tail-fibo [n] (letfn [(fib ; <label id="code.tail-fibo.letfn"/> [current next n] ; <label id="code.tail-fibo.args"/> (if (zero? n) current ; <label id="code.tail-fibo.terminate"/> (fib next (+ current next) (dec n))))] ; <label id="code.tail-fibo.recur"/> (fib 0N 1N n))) ; <label id="code.tail-fibo.call"/> (comment (+ 1 2) (defn f [] (+ 1 2)) (tail-fibo 3) ;;=> 2N ,) ; p03 Explicit self-recursion with recur id=g12019 (defn recur-fibo [n] (letfn [(fib [current next n] (if (zero? n) current (recur next (+ current next) (dec n))))] ; <label id="code.recur-fibo.recur"/> (fib 0N 1N n))) (comment (recur-fibo 3) ;;=> 2N ,) ; p04 Lazy Seq Recursion 01 (defn lazy-seq-fibo ([] (concat [0 1] (lazy-seq-fibo 0N 1N))) ([a b] (let [n (+ a b)] (lazy-seq (cons n (lazy-seq-fibo b n)))))) (comment (take 10 (lazy-seq-fibo)) ;;=> (0 1 1N 2N 3N 5N 8N 13N 21N 34N) ,) ; p05 Lazy Seq Recursion Using iterate id=g12020 (comment (take 5 (iterate (fn [[a b]] [b (+ a b)]) [0 1]))) ;;=> ([0 1] [1 1] [1 2] [2 3] [3 5]) (defn fibo [] (map first (iterate (fn [[a b]] [b (+ a b)]) [0N 1N]))) (comment (take 5 (fibo)) ;;=> (0N 1N 1N 2N 3N) ,) ; p06 Lazier than Lazy id=g12022 (defn count-heads-pairs [coll] (loop [cnt 0 coll coll] (if (empty? coll) cnt (recur (if (= :h (first coll) (second coll)) (inc cnt) cnt) (rest coll))))) (comment (count-heads-pairs [:h :t :h :h :h]) ;;=> 2 ,) ; p07 Transforming the Input Sequence id=g12021 (defn by-pairs [coll] (let [take-pair (fn [c] (when (next c) (take 2 c)))] (lazy-seq (when-let [pair (seq (take-pair coll))] (cons pair (by-pairs (rest coll))))))) (comment (by-pairs [:h :t :h :h :h]) ;;=> ((:h :t) (:t :h) (:h :h) (:h :h)) ,) (defn count-heads-pairs [coll] (count (filter (fn [pair] (every? #(= :h %) pair))) (by-pairs coll))) (comment (count-heads-pairs [:h :h]) ;;=> 1 (count-heads-pairs [:h :t]) ;;=> 0 (count-heads-pairs [:h :h :h]) ;;=> 2 ,) ; p07.02 Use partition instead of by-pairs id=g12023 ; (partition size step? coll) (comment (partition 2 [:h :h :h]) ;;=> ((:h :h)) (partition 2 [:h :t :h]) ;;=> ((:h :t)) (partition 2 [:h :t :t :h :h :h]) ;;=> ((:h :t) (:t :h) (:h :h)) ; use step argument (partition 2 1 [:h :t :h]) ;;=> ((:h :t) (:t :h)) (partition 2 1 [:h :t :t :h :h :h]) ;;=> ((:h :t) (:t :t) (:t :h) (:h :h) (:h :h)) ,) ; (comp f & fs) (def ^{:doc "Count items matching a filter"} count-if (comp count filter)) (comment (count-if odd? [1 2 3 4 5]) ;;=> 3 ,) (defn count-runs "Count runs of length n where pred is true in coll." [n pred coll] (count-if #(every? pred %) (partition n 1 coll))) (comment (count-runs 2 #(= % :h) [:h :t :t :h :h :h]) ;;=> 2 (count-runs 3 #(= % :h) [:h :t :t :h :h :h]) ;;=> 1 ,) ; p08 Currying and Partial Application ; (partial f & partial-args) (def ^{:doc "Count runs of length two that are both heads"} count-heads-pairs (partial count-runs 2 #(= % :h))) (comment (count-heads-pairs [:h :t :t :h :h :h]) ;;=> 2 ,)
25791
(ns fp) ; Code from: Programming Clojure, <NAME> ; p01 Simple Recursion (defn stack-consuming-fibo [n] (cond (= n 0) 0 ; basis (= n 1) 1 ; basis :else (+ (stack-consuming-fibo (- n 1)) ; induction (stack-consuming-fibo (- n 2))))) (stack-consuming-fibo 9) ;;=> 34 ; p02 Tail Recursion id=g12018 (defn tail-fibo [n] (letfn [(fib ; <label id="code.tail-fibo.letfn"/> [current next n] ; <label id="code.tail-fibo.args"/> (if (zero? n) current ; <label id="code.tail-fibo.terminate"/> (fib next (+ current next) (dec n))))] ; <label id="code.tail-fibo.recur"/> (fib 0N 1N n))) ; <label id="code.tail-fibo.call"/> (comment (+ 1 2) (defn f [] (+ 1 2)) (tail-fibo 3) ;;=> 2N ,) ; p03 Explicit self-recursion with recur id=g12019 (defn recur-fibo [n] (letfn [(fib [current next n] (if (zero? n) current (recur next (+ current next) (dec n))))] ; <label id="code.recur-fibo.recur"/> (fib 0N 1N n))) (comment (recur-fibo 3) ;;=> 2N ,) ; p04 Lazy Seq Recursion 01 (defn lazy-seq-fibo ([] (concat [0 1] (lazy-seq-fibo 0N 1N))) ([a b] (let [n (+ a b)] (lazy-seq (cons n (lazy-seq-fibo b n)))))) (comment (take 10 (lazy-seq-fibo)) ;;=> (0 1 1N 2N 3N 5N 8N 13N 21N 34N) ,) ; p05 Lazy Seq Recursion Using iterate id=g12020 (comment (take 5 (iterate (fn [[a b]] [b (+ a b)]) [0 1]))) ;;=> ([0 1] [1 1] [1 2] [2 3] [3 5]) (defn fibo [] (map first (iterate (fn [[a b]] [b (+ a b)]) [0N 1N]))) (comment (take 5 (fibo)) ;;=> (0N 1N 1N 2N 3N) ,) ; p06 Lazier than Lazy id=g12022 (defn count-heads-pairs [coll] (loop [cnt 0 coll coll] (if (empty? coll) cnt (recur (if (= :h (first coll) (second coll)) (inc cnt) cnt) (rest coll))))) (comment (count-heads-pairs [:h :t :h :h :h]) ;;=> 2 ,) ; p07 Transforming the Input Sequence id=g12021 (defn by-pairs [coll] (let [take-pair (fn [c] (when (next c) (take 2 c)))] (lazy-seq (when-let [pair (seq (take-pair coll))] (cons pair (by-pairs (rest coll))))))) (comment (by-pairs [:h :t :h :h :h]) ;;=> ((:h :t) (:t :h) (:h :h) (:h :h)) ,) (defn count-heads-pairs [coll] (count (filter (fn [pair] (every? #(= :h %) pair))) (by-pairs coll))) (comment (count-heads-pairs [:h :h]) ;;=> 1 (count-heads-pairs [:h :t]) ;;=> 0 (count-heads-pairs [:h :h :h]) ;;=> 2 ,) ; p07.02 Use partition instead of by-pairs id=g12023 ; (partition size step? coll) (comment (partition 2 [:h :h :h]) ;;=> ((:h :h)) (partition 2 [:h :t :h]) ;;=> ((:h :t)) (partition 2 [:h :t :t :h :h :h]) ;;=> ((:h :t) (:t :h) (:h :h)) ; use step argument (partition 2 1 [:h :t :h]) ;;=> ((:h :t) (:t :h)) (partition 2 1 [:h :t :t :h :h :h]) ;;=> ((:h :t) (:t :t) (:t :h) (:h :h) (:h :h)) ,) ; (comp f & fs) (def ^{:doc "Count items matching a filter"} count-if (comp count filter)) (comment (count-if odd? [1 2 3 4 5]) ;;=> 3 ,) (defn count-runs "Count runs of length n where pred is true in coll." [n pred coll] (count-if #(every? pred %) (partition n 1 coll))) (comment (count-runs 2 #(= % :h) [:h :t :t :h :h :h]) ;;=> 2 (count-runs 3 #(= % :h) [:h :t :t :h :h :h]) ;;=> 1 ,) ; p08 Currying and Partial Application ; (partial f & partial-args) (def ^{:doc "Count runs of length two that are both heads"} count-heads-pairs (partial count-runs 2 #(= % :h))) (comment (count-heads-pairs [:h :t :t :h :h :h]) ;;=> 2 ,)
true
(ns fp) ; Code from: Programming Clojure, PI:NAME:<NAME>END_PI ; p01 Simple Recursion (defn stack-consuming-fibo [n] (cond (= n 0) 0 ; basis (= n 1) 1 ; basis :else (+ (stack-consuming-fibo (- n 1)) ; induction (stack-consuming-fibo (- n 2))))) (stack-consuming-fibo 9) ;;=> 34 ; p02 Tail Recursion id=g12018 (defn tail-fibo [n] (letfn [(fib ; <label id="code.tail-fibo.letfn"/> [current next n] ; <label id="code.tail-fibo.args"/> (if (zero? n) current ; <label id="code.tail-fibo.terminate"/> (fib next (+ current next) (dec n))))] ; <label id="code.tail-fibo.recur"/> (fib 0N 1N n))) ; <label id="code.tail-fibo.call"/> (comment (+ 1 2) (defn f [] (+ 1 2)) (tail-fibo 3) ;;=> 2N ,) ; p03 Explicit self-recursion with recur id=g12019 (defn recur-fibo [n] (letfn [(fib [current next n] (if (zero? n) current (recur next (+ current next) (dec n))))] ; <label id="code.recur-fibo.recur"/> (fib 0N 1N n))) (comment (recur-fibo 3) ;;=> 2N ,) ; p04 Lazy Seq Recursion 01 (defn lazy-seq-fibo ([] (concat [0 1] (lazy-seq-fibo 0N 1N))) ([a b] (let [n (+ a b)] (lazy-seq (cons n (lazy-seq-fibo b n)))))) (comment (take 10 (lazy-seq-fibo)) ;;=> (0 1 1N 2N 3N 5N 8N 13N 21N 34N) ,) ; p05 Lazy Seq Recursion Using iterate id=g12020 (comment (take 5 (iterate (fn [[a b]] [b (+ a b)]) [0 1]))) ;;=> ([0 1] [1 1] [1 2] [2 3] [3 5]) (defn fibo [] (map first (iterate (fn [[a b]] [b (+ a b)]) [0N 1N]))) (comment (take 5 (fibo)) ;;=> (0N 1N 1N 2N 3N) ,) ; p06 Lazier than Lazy id=g12022 (defn count-heads-pairs [coll] (loop [cnt 0 coll coll] (if (empty? coll) cnt (recur (if (= :h (first coll) (second coll)) (inc cnt) cnt) (rest coll))))) (comment (count-heads-pairs [:h :t :h :h :h]) ;;=> 2 ,) ; p07 Transforming the Input Sequence id=g12021 (defn by-pairs [coll] (let [take-pair (fn [c] (when (next c) (take 2 c)))] (lazy-seq (when-let [pair (seq (take-pair coll))] (cons pair (by-pairs (rest coll))))))) (comment (by-pairs [:h :t :h :h :h]) ;;=> ((:h :t) (:t :h) (:h :h) (:h :h)) ,) (defn count-heads-pairs [coll] (count (filter (fn [pair] (every? #(= :h %) pair))) (by-pairs coll))) (comment (count-heads-pairs [:h :h]) ;;=> 1 (count-heads-pairs [:h :t]) ;;=> 0 (count-heads-pairs [:h :h :h]) ;;=> 2 ,) ; p07.02 Use partition instead of by-pairs id=g12023 ; (partition size step? coll) (comment (partition 2 [:h :h :h]) ;;=> ((:h :h)) (partition 2 [:h :t :h]) ;;=> ((:h :t)) (partition 2 [:h :t :t :h :h :h]) ;;=> ((:h :t) (:t :h) (:h :h)) ; use step argument (partition 2 1 [:h :t :h]) ;;=> ((:h :t) (:t :h)) (partition 2 1 [:h :t :t :h :h :h]) ;;=> ((:h :t) (:t :t) (:t :h) (:h :h) (:h :h)) ,) ; (comp f & fs) (def ^{:doc "Count items matching a filter"} count-if (comp count filter)) (comment (count-if odd? [1 2 3 4 5]) ;;=> 3 ,) (defn count-runs "Count runs of length n where pred is true in coll." [n pred coll] (count-if #(every? pred %) (partition n 1 coll))) (comment (count-runs 2 #(= % :h) [:h :t :t :h :h :h]) ;;=> 2 (count-runs 3 #(= % :h) [:h :t :t :h :h :h]) ;;=> 1 ,) ; p08 Currying and Partial Application ; (partial f & partial-args) (def ^{:doc "Count runs of length two that are both heads"} count-heads-pairs (partial count-runs 2 #(= % :h))) (comment (count-heads-pairs [:h :t :t :h :h :h]) ;;=> 2 ,)
[ { "context": "; Copyright (c) 2021-present Walmart, Inc.\n;\n; Licensed under the Apache License, Vers", "end": 36, "score": 0.9244232773780823, "start": 29, "tag": "NAME", "value": "Walmart" } ]
test/com/walmartlabs/lacinia/resolve_bindings_test.clj
hagenek/lacinia
1,762
; Copyright (c) 2021-present Walmart, Inc. ; ; Licensed under the Apache License, Version 2.0 (the "License") ; you may not use this file except in compliance with the License. ; You may obtain a copy of the License at ; ; http://www.apache.org/licenses/LICENSE-2.0 ; ; Unless required by applicable law or agreed to in writing, software ; distributed under the License is distributed on an "AS IS" BASIS, ; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ; See the License for the specific language governing permissions and ; limitations under the License. (ns com.walmartlabs.lacinia.resolve-bindings-test (:require [clojure.test :refer [deftest is use-fixtures]] [clojure.core.async :refer [chan go close! <!!]] [com.walmartlabs.lacinia.resolve :as resolve]) (:import (java.util.concurrent ThreadPoolExecutor TimeUnit ArrayBlockingQueue ExecutorService))) (def ^:private ^:dynamic *bound* :default) (def ^:private *states (atom [])) (defn ^:private reset-*states [f] (try (f) (finally (reset! *states [])))) (defn ^:private add-state "Capture the state of *bound*." [tag] (swap! *states conj [tag *bound*])) (use-fixtures :each reset-*states) (defn ^:private ^ExecutorService new-executor [] (ThreadPoolExecutor. 1 5 1 TimeUnit/SECONDS (ArrayBlockingQueue. 1))) ;; First, normal case where there is no executor for the callback (deftest bindings-conveyed-normally (let [resolved (chan) promise (resolve/resolve-promise)] (add-state "before") (binding [*bound* :override] (add-state "during") (resolve/on-deliver! promise (fn [_] (add-state "on-deliver!") (close! resolved))) ;; We use the go macro as it is known to properly convey bindings (go (add-state "in go block") (resolve/deliver! promise true))) (<!! resolved) (add-state "after") (is (= [["before" :default] ["during" :override] ["in go block" :override] ["on-deliver!" :override] ["after" :default]] @*states)))) (deftest bindings-conveyed-through-executor (let [resolved (chan) executor (new-executor)] (try (add-state "before") (binding [*bound* :override resolve/*callback-executor* executor] (add-state "during") ;; bindings are captured at the time the promise is created (let [promise (resolve/resolve-promise)] (resolve/on-deliver! promise (fn [_] (add-state "on-deliver!") (close! resolved))) (go (add-state "in go block") (resolve/deliver! promise true))) (<!! resolved)) (add-state "after") (is (= [["before" :default] ["during" :override] ["in go block" :override] ;; The following is where it would go wrong without the fix: ["on-deliver!" :override] ["after" :default]] @*states)) (finally (.shutdown executor)))))
22240
; Copyright (c) 2021-present <NAME>, Inc. ; ; Licensed under the Apache License, Version 2.0 (the "License") ; you may not use this file except in compliance with the License. ; You may obtain a copy of the License at ; ; http://www.apache.org/licenses/LICENSE-2.0 ; ; Unless required by applicable law or agreed to in writing, software ; distributed under the License is distributed on an "AS IS" BASIS, ; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ; See the License for the specific language governing permissions and ; limitations under the License. (ns com.walmartlabs.lacinia.resolve-bindings-test (:require [clojure.test :refer [deftest is use-fixtures]] [clojure.core.async :refer [chan go close! <!!]] [com.walmartlabs.lacinia.resolve :as resolve]) (:import (java.util.concurrent ThreadPoolExecutor TimeUnit ArrayBlockingQueue ExecutorService))) (def ^:private ^:dynamic *bound* :default) (def ^:private *states (atom [])) (defn ^:private reset-*states [f] (try (f) (finally (reset! *states [])))) (defn ^:private add-state "Capture the state of *bound*." [tag] (swap! *states conj [tag *bound*])) (use-fixtures :each reset-*states) (defn ^:private ^ExecutorService new-executor [] (ThreadPoolExecutor. 1 5 1 TimeUnit/SECONDS (ArrayBlockingQueue. 1))) ;; First, normal case where there is no executor for the callback (deftest bindings-conveyed-normally (let [resolved (chan) promise (resolve/resolve-promise)] (add-state "before") (binding [*bound* :override] (add-state "during") (resolve/on-deliver! promise (fn [_] (add-state "on-deliver!") (close! resolved))) ;; We use the go macro as it is known to properly convey bindings (go (add-state "in go block") (resolve/deliver! promise true))) (<!! resolved) (add-state "after") (is (= [["before" :default] ["during" :override] ["in go block" :override] ["on-deliver!" :override] ["after" :default]] @*states)))) (deftest bindings-conveyed-through-executor (let [resolved (chan) executor (new-executor)] (try (add-state "before") (binding [*bound* :override resolve/*callback-executor* executor] (add-state "during") ;; bindings are captured at the time the promise is created (let [promise (resolve/resolve-promise)] (resolve/on-deliver! promise (fn [_] (add-state "on-deliver!") (close! resolved))) (go (add-state "in go block") (resolve/deliver! promise true))) (<!! resolved)) (add-state "after") (is (= [["before" :default] ["during" :override] ["in go block" :override] ;; The following is where it would go wrong without the fix: ["on-deliver!" :override] ["after" :default]] @*states)) (finally (.shutdown executor)))))
true
; Copyright (c) 2021-present PI:NAME:<NAME>END_PI, Inc. ; ; Licensed under the Apache License, Version 2.0 (the "License") ; you may not use this file except in compliance with the License. ; You may obtain a copy of the License at ; ; http://www.apache.org/licenses/LICENSE-2.0 ; ; Unless required by applicable law or agreed to in writing, software ; distributed under the License is distributed on an "AS IS" BASIS, ; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ; See the License for the specific language governing permissions and ; limitations under the License. (ns com.walmartlabs.lacinia.resolve-bindings-test (:require [clojure.test :refer [deftest is use-fixtures]] [clojure.core.async :refer [chan go close! <!!]] [com.walmartlabs.lacinia.resolve :as resolve]) (:import (java.util.concurrent ThreadPoolExecutor TimeUnit ArrayBlockingQueue ExecutorService))) (def ^:private ^:dynamic *bound* :default) (def ^:private *states (atom [])) (defn ^:private reset-*states [f] (try (f) (finally (reset! *states [])))) (defn ^:private add-state "Capture the state of *bound*." [tag] (swap! *states conj [tag *bound*])) (use-fixtures :each reset-*states) (defn ^:private ^ExecutorService new-executor [] (ThreadPoolExecutor. 1 5 1 TimeUnit/SECONDS (ArrayBlockingQueue. 1))) ;; First, normal case where there is no executor for the callback (deftest bindings-conveyed-normally (let [resolved (chan) promise (resolve/resolve-promise)] (add-state "before") (binding [*bound* :override] (add-state "during") (resolve/on-deliver! promise (fn [_] (add-state "on-deliver!") (close! resolved))) ;; We use the go macro as it is known to properly convey bindings (go (add-state "in go block") (resolve/deliver! promise true))) (<!! resolved) (add-state "after") (is (= [["before" :default] ["during" :override] ["in go block" :override] ["on-deliver!" :override] ["after" :default]] @*states)))) (deftest bindings-conveyed-through-executor (let [resolved (chan) executor (new-executor)] (try (add-state "before") (binding [*bound* :override resolve/*callback-executor* executor] (add-state "during") ;; bindings are captured at the time the promise is created (let [promise (resolve/resolve-promise)] (resolve/on-deliver! promise (fn [_] (add-state "on-deliver!") (close! resolved))) (go (add-state "in go block") (resolve/deliver! promise true))) (<!! resolved)) (add-state "after") (is (= [["before" :default] ["during" :override] ["in go block" :override] ;; The following is where it would go wrong without the fix: ["on-deliver!" :override] ["after" :default]] @*states)) (finally (.shutdown executor)))))
[ { "context": "(str \"https://maps.googleapis.com/maps/api/js?key=AIzaSyD4UK2xtTJozwVOZ7GyZrDjaQlCRTa3VpI\")]})\n\n(def youtube\n {:csp [{:script-src [\"https:", "end": 3861, "score": 0.9997425079345703, "start": 3822, "tag": "KEY", "value": "AIzaSyD4UK2xtTJozwVOZ7GyZrDjaQlCRTa3VpI" }, { "context": "a8fdf5453853dc174\" {:value nil}},\n :remote-addr \"::1\",\n :secure? nil,\n :params nil,\n :stale? ", "end": 9003, "score": 0.9922027587890625, "start": 8999, "tag": "IP_ADDRESS", "value": "\"::1" }, { "context": "74349; amplitude_id_43316fd1a98a364a996cba88fcd94d7c=eyJkZXZpY2VJZCI6IjMyY2JkMzYyLTg5NGYtNDhjMy1iNmYyLTJhMGE5MmU4NTVlOFIiLCJ1c2VySWQiOm51bGwsIm9wdE91dCI6ZmFsc2UsInNlc3Npb25JZCI6MTUyMjc0NTA5ODc4OSwibGFzdEV2ZW50VGltZSI6MTUyMjc0NTUwNjA5MywiZXZlbnRJZCI6NSwiaWRlbnRpZnlJZCI6MCwic2VxdWVuY2VOdW1iZXIiOjV9; amplitude_id_9cfb537503cbe43a8fdf5453853dc174=eyJkZXZpY2VJZCI6IjJjMjQzMDljLTMyOTktNGM3Ni1hMGIxLTU3NTFmZjQzNjkzM1IiLCJ1c2VySWQiOm51bGwsIm9wdE91dCI6ZmFsc2UsInNlc3Npb25JZCI6MTUyNzMyNzQwMzIyNCwibGFzdEV2ZW50VGltZSI6MTUyNzMyOTI5NTYzNiwiZXZlbnRJZCI6MTcyMSwiaWRlbnRpZnlJZCI6MCwic2VxdWVuY2VOdW1iZXIiOjE3MjF9\",\n \"content-length\" \"48\",\n \"referer\" \"htt", "end": 10043, "score": 0.9993544816970825, "start": 9496, "tag": "KEY", "value": "7c=eyJkZXZpY2VJZCI6IjMyY2JkMzYyLTg5NGYtNDhjMy1iNmYyLTJhMGE5MmU4NTVlOFIiLCJ1c2VySWQiOm51bGwsIm9wdE91dCI6ZmFsc2UsInNlc3Npb25JZCI6MTUyMjc0NTA5ODc4OSwibGFzdEV2ZW50VGltZSI6MTUyMjc0NTUwNjA5MywiZXZlbnRJZCI6NSwiaWRlbnRpZnlJZCI6MCwic2VxdWVuY2VOdW1iZXIiOjV9; amplitude_id_9cfb537503cbe43a8fdf5453853dc174=eyJkZXZpY2VJZCI6IjJjMjQzMDljLTMyOTktNGM3Ni1hMGIxLTU3NTFmZjQzNjkzM1IiLCJ1c2VySWQiOm51bGwsIm9wdE91dCI6ZmFsc2UsInNlc3Npb25JZCI6MTUyNzMyNzQwMzIyNCwibGFzdEV2ZW50VGltZSI6MTUyNzMyOTI5NTYzNiwiZXZlbnRJZCI6MTcyMSwiaWRlbnRpZnlJZCI6MCwic2VxdWVuY2VOdW1iZXIiOjE3MjF9" }, { "context": " :uri \"/api/gi\",\n :fresh? nil,\n :server-name \"::1\",\n :query-string nil,\n :body [[:dynamodb \"g", "end": 10453, "score": 0.9930127859115601, "start": 10449, "tag": "IP_ADDRESS", "value": "\"::1" } ]
src/cljs-node/rx/node/entry.cljs
zk/rx-lib
0
(ns rx.node.entry (:require [rx.kitchen-sink :as ks] [rx.http :as http] [rx.css :as rcss] #_[macchiato.middleware.resource :refer [wrap-resource]] #_[macchiato.middleware.content-type :refer [wrap-content-type]] #_[hiccups.runtime :as rt] #_[macchiato.fs.path :as path] #_[macchiato.fs :as fs] [clojure.string :as str] #_[nsfw.gi :as gi] [httpurr.client :as hc] [httpurr.client.node :refer [client]] [rx.node.awsclient :as ac] [rx.node.mongodb :as mdb] [rx.node.devbus :as devbus] [cljs-node-io.core :as io] [cljs.core.async :as async :refer [<! >! chan close! put! take! timeout] :refer-macros [go go-loop]])) (def FS (js/require "fs")) (defn read-file-sync [path & [opts]] (.readFileSync FS path opts)) (defn <http-get [url & [opts]] (let [ch (chan)] (.catch (.then (hc/send! client (merge {:method :get :url url} opts)) (fn [resp] (put! ch [resp]))) (fn [err] (put! ch [nil err]))) ch)) (def ga-tracking-id "foo") (def google-analytics (let [src (str "(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){ (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o), m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m) })(window,document,'script','https://www.google-analytics.com/analytics.js','ga'); ga('create', '" ga-tracking-id "', 'auto'); ga('send', 'pageview');")] {:head-js [[:script {:type "text/javascript"} src]] :csp [{:script-src ["https://www.google-analytics.com"]}]})) #_["var CLOSURE_UNCOMPILED_DEFINES = {};" "var CLOSURE_NO_DEPS = true;" "if(typeof goog == \"undefined\") document.write('<script src=\"cljs/goog/base.js\"></script>');" "document.write('<script src=\"cljs/goog/deps.js\"></script>');" "document.write('<script src=\"cljs/cljs_deps.js\"></script>');" "document.write('<script>if (typeof goog == \"undefined\") console.warn(\"ClojureScript could not load :main, did you forget to specify :asset-path?\");</script>');" "document.write('<script>goog.require(\"figwheel.connect\");</script>');" "document.write('<script>goog.require(\"process.env\");</script>');" "document.write('<script>goog.require(\"main.browser\");</script>');" "document.write(\"<script>figwheel.connect.start();</script>\");"] (def figwheel {:csp [{:script-src (vec (concat ["'unsafe-eval'"] (->> ["if (typeof goog == \"undefined\") console.warn(\"ClojureScript could not load :main, did you forget to specify :asset-path?\");" "goog.require(\"figwheel.connect\");" "goog.require(\"process.env\");" "figwheel.connect.start();" "goog.require(\"main.browserfigwheel\");"] (map (fn [s] (str "'sha512-" (ks/to-base64-str (ks/sha512-bytes s)) "'")))) ["ws://localhost:3449" "'sha256-RnIrup63MTO631n1+K9Ycir54hhC/4FwOnKAV2wNu0M='"])) :frame-ancestors ["'none'"]}]}) (def mapbox {:csp [{:child-src ["blob: " "data: "] :img-src ["'self'" "blob: " "data: "] :worker-src ["'self'" "blob:"] :connect-src ["https://*.tiles.mapbox.com" "https://api.mapbox.com"] :script-src ["'unsafe-eval'"]}] :css ["https://api.tiles.mapbox.com/mapbox-gl-js/v0.44.2/mapbox-gl.css"]}) (def google-maps {:csp [{:script-src ["https://maps.googleapis.com"]}] :head-js [(str "https://maps.googleapis.com/maps/api/js?key=AIzaSyD4UK2xtTJozwVOZ7GyZrDjaQlCRTa3VpI")]}) (def youtube {:csp [{:script-src ["https://www.youtube.com" "https://s.ytimg.com"] :frame-src ["https://www.youtube.com"]}]}) (def SIGS {}) (def app-assets {:body-js [(str "/cljs/app.js?" (SIGS :appjs))] :head-css ["https://code.ionicframework.com/ionicons/2.0.1/css/ionicons.min.css" (str "/css/app.css?" (SIGS :appcss))] :meta-names [{:viewport "width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=0"}] :head (->> rcss/font-hrefs (map (fn [href] [:link {:href href :rel :stylesheet}])))}) (def favicons ;; https://www.favicon-generator.org/ {:head [[:link {:rel "apple-touch-icon" :sizes "57x57" :href "/apple-icon-57x57.png"}] [:link {:rel "apple-touch-icon" :sizes "60x60" :href "/apple-icon-60x60.png"}] [:link {:rel "apple-touch-icon" :sizes "72x72" :href "/apple-icon-72x72.png"}] [:link {:rel "apple-touch-icon" :sizes "76x76" :href "/apple-icon-76x76.png"}] [:link {:rel "apple-touch-icon" :sizes "114x114" :href "/apple-icon-114x114.png"}] [:link {:rel "apple-touch-icon" :sizes "120x120" :href "/apple-icon-120x120.png"}] [:link {:rel "apple-touch-icon" :sizes "144x144" :href "/apple-icon-144x144.png"}] [:link {:rel "apple-touch-icon" :sizes "152x152" :href "/apple-icon-152x152.png"}] [:link {:rel "apple-touch-icon" :sizes "180x180" :href "/apple-icon-180x180.png"}] [:link {:rel "icon" :type "image/png" :sizes "192x192" :href "/android-icon-192x192.png"}] [:link {:rel "icon" :type "image/png" :sizes "32x32" :href "/android-icon-32x32.png"}] [:link {:rel "icon" :type "image/png" :sizes "96x96" :href "/android-icon-96x96.png"}] [:link {:rel "icon" :type "image/png" :sizes "16x16" :href "/android-icon-16x16.png"}] [:meta {:name "msapplication-TileColor" :content "#ffffff"}] [:meta {:name "msapplication-TileImage" :content "/ms-icon-144x144.png"}] [:meta {:name "theme-color" :content "#ffffff"}]]}) (defn root-handler [req respond raise] ((-> (fn [req respond raise] (respond (http/render-specs [figwheel google-analytics google-maps app-assets favicons {:body [[:div#cljs-entry]]}]))) http/wrap-html-response) req respond raise)) (def cache-mime-types {"text/javascript" "public, max-age=31536000" "text/css" "public, max-age=31536000"}) (defn handle-dynamodb [fn-name payload] (go (let [[res err](<! (ac/<dyndoc fn-name payload))] (if err [nil (pr-str err)] [res])))) (defn handle-mongodb-coll-op [& args] (go (let [[res err] (<! (mdb/<coll-op {:url (aget js/process.env "MONGO_URL") :db (aget js/process.env "MONGO_DATABASE")} args))] (if err [nil (pr-str err)] [res])))) (defn handle-mongodb-find [coll-name query & [opts]] (go (let [[res err] (<! (mdb/<find {:url (aget js/process.env "MONGO_URL") :db (aget js/process.env "MONGO_DATABASE")} coll-name query opts))] (when err (println err)) (if err [nil (pr-str err)] [res])))) (def gi-handlers (merge {:dynamodb handle-dynamodb :mongodb-coll-op handle-mongodb-coll-op :mongodb-find handle-mongodb-find} (devbus/gi-handlers {:s3-bucket "nalopastures" :s3-prefix "debug-app-state"}))) (defn map-async [<f coll] (go-loop [coll coll out []] (if (empty? coll) out (let [res (<! (<f (first coll)))] (recur (rest coll) (conj out res)))))) (defn chan? [x] (instance? cljs.core.async.impl.channels.ManyToManyChannel x)) (defn gi-adapter [handlers] (fn [req respond raise] (go (let [command-ress nil #_ (gi/handle-commands gi-handlers req) realized (loop [crs command-ress out []] (if (empty? crs) out (let [next-res (first crs) next-res (if (chan? next-res) (<! next-res) next-res)] (recur (rest crs) (concat out [next-res])))))] (respond (merge #_(gi/format-response req realized) {:status 200 :headers {"Access-Control-Allow-Origin" "*"}})))))) #_((gi-adapter gi-handlers) {:ssl-client-cert #js {}, :protocol "HTTPS/1.1", :subdomains nil, :cookies {"_ga" {:value nil}, "amplitude_id_43316fd1a98a364a996cba88fcd94d7c" {:value nil}, "amplitude_id_9cfb537503cbe43a8fdf5453853dc174" {:value nil}}, :remote-addr "::1", :secure? nil, :params nil, :stale? nil, :hostname "localhost", :xhr? nil, :route-params nil, :headers {"origin" "https://localhost:5000", "host" "localhost:5000", "user-agent" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36", "content-type" "application/transit+json", "cookie" "_ga=GA1.1.633402034.1457074349; amplitude_id_43316fd1a98a364a996cba88fcd94d7c=eyJkZXZpY2VJZCI6IjMyY2JkMzYyLTg5NGYtNDhjMy1iNmYyLTJhMGE5MmU4NTVlOFIiLCJ1c2VySWQiOm51bGwsIm9wdE91dCI6ZmFsc2UsInNlc3Npb25JZCI6MTUyMjc0NTA5ODc4OSwibGFzdEV2ZW50VGltZSI6MTUyMjc0NTUwNjA5MywiZXZlbnRJZCI6NSwiaWRlbnRpZnlJZCI6MCwic2VxdWVuY2VOdW1iZXIiOjV9; amplitude_id_9cfb537503cbe43a8fdf5453853dc174=eyJkZXZpY2VJZCI6IjJjMjQzMDljLTMyOTktNGM3Ni1hMGIxLTU3NTFmZjQzNjkzM1IiLCJ1c2VySWQiOm51bGwsIm9wdE91dCI6ZmFsc2UsInNlc3Npb25JZCI6MTUyNzMyNzQwMzIyNCwibGFzdEV2ZW50VGltZSI6MTUyNzMyOTI5NTYzNiwiZXZlbnRJZCI6MTcyMSwiaWRlbnRpZnlJZCI6MCwic2VxdWVuY2VOdW1iZXIiOjE3MjF9", "content-length" "48", "referer" "https://localhost:5000/", "connection" "keep-alive", "accept" "*/*", "accept-language" "en-US,en;q=0.9", "accept-encoding" "gzip, deflate, br"}, :server-port 5000, :content-length "48", :signed-cookies nil, :url "/api/gi", :content-type "application/transit+json", :uri "/api/gi", :fresh? nil, :server-name "::1", :query-string nil, :body [[:dynamodb "get" {:Table "foobar"}]], :scheme :https, :request-method :post,} (fn [resp] (prn "RESP vvvvvvvvvvv") (ks/pp resp))) (defn wrap-log-request [h] (fn [req respond raise] (println "Request" (:request-method req) (:uri req)) (h req respond raise))) (defn wrap-access-control [h] (fn [req respond raise] (h req (fn [resp] (respond (assoc-in resp [:headers "Access-Control-Allow-Origin"] "*"))) raise))) (def handler (-> (http/gen-handler ["" [["/" :root] ["/api/gi" :gi]]] {:root root-handler :gi (-> (gi-adapter gi-handlers) http/wrap-transit-request (http/wrap-transit-response {:handlers (merge mdb/to-transit-handlers)}))}) http/wrap-html-response #_(wrap-resource "resources/public") #_wrap-content-type (http/wrap-cache-control cache-mime-types) wrap-access-control))
105190
(ns rx.node.entry (:require [rx.kitchen-sink :as ks] [rx.http :as http] [rx.css :as rcss] #_[macchiato.middleware.resource :refer [wrap-resource]] #_[macchiato.middleware.content-type :refer [wrap-content-type]] #_[hiccups.runtime :as rt] #_[macchiato.fs.path :as path] #_[macchiato.fs :as fs] [clojure.string :as str] #_[nsfw.gi :as gi] [httpurr.client :as hc] [httpurr.client.node :refer [client]] [rx.node.awsclient :as ac] [rx.node.mongodb :as mdb] [rx.node.devbus :as devbus] [cljs-node-io.core :as io] [cljs.core.async :as async :refer [<! >! chan close! put! take! timeout] :refer-macros [go go-loop]])) (def FS (js/require "fs")) (defn read-file-sync [path & [opts]] (.readFileSync FS path opts)) (defn <http-get [url & [opts]] (let [ch (chan)] (.catch (.then (hc/send! client (merge {:method :get :url url} opts)) (fn [resp] (put! ch [resp]))) (fn [err] (put! ch [nil err]))) ch)) (def ga-tracking-id "foo") (def google-analytics (let [src (str "(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){ (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o), m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m) })(window,document,'script','https://www.google-analytics.com/analytics.js','ga'); ga('create', '" ga-tracking-id "', 'auto'); ga('send', 'pageview');")] {:head-js [[:script {:type "text/javascript"} src]] :csp [{:script-src ["https://www.google-analytics.com"]}]})) #_["var CLOSURE_UNCOMPILED_DEFINES = {};" "var CLOSURE_NO_DEPS = true;" "if(typeof goog == \"undefined\") document.write('<script src=\"cljs/goog/base.js\"></script>');" "document.write('<script src=\"cljs/goog/deps.js\"></script>');" "document.write('<script src=\"cljs/cljs_deps.js\"></script>');" "document.write('<script>if (typeof goog == \"undefined\") console.warn(\"ClojureScript could not load :main, did you forget to specify :asset-path?\");</script>');" "document.write('<script>goog.require(\"figwheel.connect\");</script>');" "document.write('<script>goog.require(\"process.env\");</script>');" "document.write('<script>goog.require(\"main.browser\");</script>');" "document.write(\"<script>figwheel.connect.start();</script>\");"] (def figwheel {:csp [{:script-src (vec (concat ["'unsafe-eval'"] (->> ["if (typeof goog == \"undefined\") console.warn(\"ClojureScript could not load :main, did you forget to specify :asset-path?\");" "goog.require(\"figwheel.connect\");" "goog.require(\"process.env\");" "figwheel.connect.start();" "goog.require(\"main.browserfigwheel\");"] (map (fn [s] (str "'sha512-" (ks/to-base64-str (ks/sha512-bytes s)) "'")))) ["ws://localhost:3449" "'sha256-RnIrup63MTO631n1+K9Ycir54hhC/4FwOnKAV2wNu0M='"])) :frame-ancestors ["'none'"]}]}) (def mapbox {:csp [{:child-src ["blob: " "data: "] :img-src ["'self'" "blob: " "data: "] :worker-src ["'self'" "blob:"] :connect-src ["https://*.tiles.mapbox.com" "https://api.mapbox.com"] :script-src ["'unsafe-eval'"]}] :css ["https://api.tiles.mapbox.com/mapbox-gl-js/v0.44.2/mapbox-gl.css"]}) (def google-maps {:csp [{:script-src ["https://maps.googleapis.com"]}] :head-js [(str "https://maps.googleapis.com/maps/api/js?key=<KEY>")]}) (def youtube {:csp [{:script-src ["https://www.youtube.com" "https://s.ytimg.com"] :frame-src ["https://www.youtube.com"]}]}) (def SIGS {}) (def app-assets {:body-js [(str "/cljs/app.js?" (SIGS :appjs))] :head-css ["https://code.ionicframework.com/ionicons/2.0.1/css/ionicons.min.css" (str "/css/app.css?" (SIGS :appcss))] :meta-names [{:viewport "width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=0"}] :head (->> rcss/font-hrefs (map (fn [href] [:link {:href href :rel :stylesheet}])))}) (def favicons ;; https://www.favicon-generator.org/ {:head [[:link {:rel "apple-touch-icon" :sizes "57x57" :href "/apple-icon-57x57.png"}] [:link {:rel "apple-touch-icon" :sizes "60x60" :href "/apple-icon-60x60.png"}] [:link {:rel "apple-touch-icon" :sizes "72x72" :href "/apple-icon-72x72.png"}] [:link {:rel "apple-touch-icon" :sizes "76x76" :href "/apple-icon-76x76.png"}] [:link {:rel "apple-touch-icon" :sizes "114x114" :href "/apple-icon-114x114.png"}] [:link {:rel "apple-touch-icon" :sizes "120x120" :href "/apple-icon-120x120.png"}] [:link {:rel "apple-touch-icon" :sizes "144x144" :href "/apple-icon-144x144.png"}] [:link {:rel "apple-touch-icon" :sizes "152x152" :href "/apple-icon-152x152.png"}] [:link {:rel "apple-touch-icon" :sizes "180x180" :href "/apple-icon-180x180.png"}] [:link {:rel "icon" :type "image/png" :sizes "192x192" :href "/android-icon-192x192.png"}] [:link {:rel "icon" :type "image/png" :sizes "32x32" :href "/android-icon-32x32.png"}] [:link {:rel "icon" :type "image/png" :sizes "96x96" :href "/android-icon-96x96.png"}] [:link {:rel "icon" :type "image/png" :sizes "16x16" :href "/android-icon-16x16.png"}] [:meta {:name "msapplication-TileColor" :content "#ffffff"}] [:meta {:name "msapplication-TileImage" :content "/ms-icon-144x144.png"}] [:meta {:name "theme-color" :content "#ffffff"}]]}) (defn root-handler [req respond raise] ((-> (fn [req respond raise] (respond (http/render-specs [figwheel google-analytics google-maps app-assets favicons {:body [[:div#cljs-entry]]}]))) http/wrap-html-response) req respond raise)) (def cache-mime-types {"text/javascript" "public, max-age=31536000" "text/css" "public, max-age=31536000"}) (defn handle-dynamodb [fn-name payload] (go (let [[res err](<! (ac/<dyndoc fn-name payload))] (if err [nil (pr-str err)] [res])))) (defn handle-mongodb-coll-op [& args] (go (let [[res err] (<! (mdb/<coll-op {:url (aget js/process.env "MONGO_URL") :db (aget js/process.env "MONGO_DATABASE")} args))] (if err [nil (pr-str err)] [res])))) (defn handle-mongodb-find [coll-name query & [opts]] (go (let [[res err] (<! (mdb/<find {:url (aget js/process.env "MONGO_URL") :db (aget js/process.env "MONGO_DATABASE")} coll-name query opts))] (when err (println err)) (if err [nil (pr-str err)] [res])))) (def gi-handlers (merge {:dynamodb handle-dynamodb :mongodb-coll-op handle-mongodb-coll-op :mongodb-find handle-mongodb-find} (devbus/gi-handlers {:s3-bucket "nalopastures" :s3-prefix "debug-app-state"}))) (defn map-async [<f coll] (go-loop [coll coll out []] (if (empty? coll) out (let [res (<! (<f (first coll)))] (recur (rest coll) (conj out res)))))) (defn chan? [x] (instance? cljs.core.async.impl.channels.ManyToManyChannel x)) (defn gi-adapter [handlers] (fn [req respond raise] (go (let [command-ress nil #_ (gi/handle-commands gi-handlers req) realized (loop [crs command-ress out []] (if (empty? crs) out (let [next-res (first crs) next-res (if (chan? next-res) (<! next-res) next-res)] (recur (rest crs) (concat out [next-res])))))] (respond (merge #_(gi/format-response req realized) {:status 200 :headers {"Access-Control-Allow-Origin" "*"}})))))) #_((gi-adapter gi-handlers) {:ssl-client-cert #js {}, :protocol "HTTPS/1.1", :subdomains nil, :cookies {"_ga" {:value nil}, "amplitude_id_43316fd1a98a364a996cba88fcd94d7c" {:value nil}, "amplitude_id_9cfb537503cbe43a8fdf5453853dc174" {:value nil}}, :remote-addr "::1", :secure? nil, :params nil, :stale? nil, :hostname "localhost", :xhr? nil, :route-params nil, :headers {"origin" "https://localhost:5000", "host" "localhost:5000", "user-agent" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36", "content-type" "application/transit+json", "cookie" "_ga=GA1.1.633402034.1457074349; amplitude_id_43316fd1a98a364a996cba88fcd94d<KEY>", "content-length" "48", "referer" "https://localhost:5000/", "connection" "keep-alive", "accept" "*/*", "accept-language" "en-US,en;q=0.9", "accept-encoding" "gzip, deflate, br"}, :server-port 5000, :content-length "48", :signed-cookies nil, :url "/api/gi", :content-type "application/transit+json", :uri "/api/gi", :fresh? nil, :server-name "::1", :query-string nil, :body [[:dynamodb "get" {:Table "foobar"}]], :scheme :https, :request-method :post,} (fn [resp] (prn "RESP vvvvvvvvvvv") (ks/pp resp))) (defn wrap-log-request [h] (fn [req respond raise] (println "Request" (:request-method req) (:uri req)) (h req respond raise))) (defn wrap-access-control [h] (fn [req respond raise] (h req (fn [resp] (respond (assoc-in resp [:headers "Access-Control-Allow-Origin"] "*"))) raise))) (def handler (-> (http/gen-handler ["" [["/" :root] ["/api/gi" :gi]]] {:root root-handler :gi (-> (gi-adapter gi-handlers) http/wrap-transit-request (http/wrap-transit-response {:handlers (merge mdb/to-transit-handlers)}))}) http/wrap-html-response #_(wrap-resource "resources/public") #_wrap-content-type (http/wrap-cache-control cache-mime-types) wrap-access-control))
true
(ns rx.node.entry (:require [rx.kitchen-sink :as ks] [rx.http :as http] [rx.css :as rcss] #_[macchiato.middleware.resource :refer [wrap-resource]] #_[macchiato.middleware.content-type :refer [wrap-content-type]] #_[hiccups.runtime :as rt] #_[macchiato.fs.path :as path] #_[macchiato.fs :as fs] [clojure.string :as str] #_[nsfw.gi :as gi] [httpurr.client :as hc] [httpurr.client.node :refer [client]] [rx.node.awsclient :as ac] [rx.node.mongodb :as mdb] [rx.node.devbus :as devbus] [cljs-node-io.core :as io] [cljs.core.async :as async :refer [<! >! chan close! put! take! timeout] :refer-macros [go go-loop]])) (def FS (js/require "fs")) (defn read-file-sync [path & [opts]] (.readFileSync FS path opts)) (defn <http-get [url & [opts]] (let [ch (chan)] (.catch (.then (hc/send! client (merge {:method :get :url url} opts)) (fn [resp] (put! ch [resp]))) (fn [err] (put! ch [nil err]))) ch)) (def ga-tracking-id "foo") (def google-analytics (let [src (str "(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){ (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o), m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m) })(window,document,'script','https://www.google-analytics.com/analytics.js','ga'); ga('create', '" ga-tracking-id "', 'auto'); ga('send', 'pageview');")] {:head-js [[:script {:type "text/javascript"} src]] :csp [{:script-src ["https://www.google-analytics.com"]}]})) #_["var CLOSURE_UNCOMPILED_DEFINES = {};" "var CLOSURE_NO_DEPS = true;" "if(typeof goog == \"undefined\") document.write('<script src=\"cljs/goog/base.js\"></script>');" "document.write('<script src=\"cljs/goog/deps.js\"></script>');" "document.write('<script src=\"cljs/cljs_deps.js\"></script>');" "document.write('<script>if (typeof goog == \"undefined\") console.warn(\"ClojureScript could not load :main, did you forget to specify :asset-path?\");</script>');" "document.write('<script>goog.require(\"figwheel.connect\");</script>');" "document.write('<script>goog.require(\"process.env\");</script>');" "document.write('<script>goog.require(\"main.browser\");</script>');" "document.write(\"<script>figwheel.connect.start();</script>\");"] (def figwheel {:csp [{:script-src (vec (concat ["'unsafe-eval'"] (->> ["if (typeof goog == \"undefined\") console.warn(\"ClojureScript could not load :main, did you forget to specify :asset-path?\");" "goog.require(\"figwheel.connect\");" "goog.require(\"process.env\");" "figwheel.connect.start();" "goog.require(\"main.browserfigwheel\");"] (map (fn [s] (str "'sha512-" (ks/to-base64-str (ks/sha512-bytes s)) "'")))) ["ws://localhost:3449" "'sha256-RnIrup63MTO631n1+K9Ycir54hhC/4FwOnKAV2wNu0M='"])) :frame-ancestors ["'none'"]}]}) (def mapbox {:csp [{:child-src ["blob: " "data: "] :img-src ["'self'" "blob: " "data: "] :worker-src ["'self'" "blob:"] :connect-src ["https://*.tiles.mapbox.com" "https://api.mapbox.com"] :script-src ["'unsafe-eval'"]}] :css ["https://api.tiles.mapbox.com/mapbox-gl-js/v0.44.2/mapbox-gl.css"]}) (def google-maps {:csp [{:script-src ["https://maps.googleapis.com"]}] :head-js [(str "https://maps.googleapis.com/maps/api/js?key=PI:KEY:<KEY>END_PI")]}) (def youtube {:csp [{:script-src ["https://www.youtube.com" "https://s.ytimg.com"] :frame-src ["https://www.youtube.com"]}]}) (def SIGS {}) (def app-assets {:body-js [(str "/cljs/app.js?" (SIGS :appjs))] :head-css ["https://code.ionicframework.com/ionicons/2.0.1/css/ionicons.min.css" (str "/css/app.css?" (SIGS :appcss))] :meta-names [{:viewport "width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=0"}] :head (->> rcss/font-hrefs (map (fn [href] [:link {:href href :rel :stylesheet}])))}) (def favicons ;; https://www.favicon-generator.org/ {:head [[:link {:rel "apple-touch-icon" :sizes "57x57" :href "/apple-icon-57x57.png"}] [:link {:rel "apple-touch-icon" :sizes "60x60" :href "/apple-icon-60x60.png"}] [:link {:rel "apple-touch-icon" :sizes "72x72" :href "/apple-icon-72x72.png"}] [:link {:rel "apple-touch-icon" :sizes "76x76" :href "/apple-icon-76x76.png"}] [:link {:rel "apple-touch-icon" :sizes "114x114" :href "/apple-icon-114x114.png"}] [:link {:rel "apple-touch-icon" :sizes "120x120" :href "/apple-icon-120x120.png"}] [:link {:rel "apple-touch-icon" :sizes "144x144" :href "/apple-icon-144x144.png"}] [:link {:rel "apple-touch-icon" :sizes "152x152" :href "/apple-icon-152x152.png"}] [:link {:rel "apple-touch-icon" :sizes "180x180" :href "/apple-icon-180x180.png"}] [:link {:rel "icon" :type "image/png" :sizes "192x192" :href "/android-icon-192x192.png"}] [:link {:rel "icon" :type "image/png" :sizes "32x32" :href "/android-icon-32x32.png"}] [:link {:rel "icon" :type "image/png" :sizes "96x96" :href "/android-icon-96x96.png"}] [:link {:rel "icon" :type "image/png" :sizes "16x16" :href "/android-icon-16x16.png"}] [:meta {:name "msapplication-TileColor" :content "#ffffff"}] [:meta {:name "msapplication-TileImage" :content "/ms-icon-144x144.png"}] [:meta {:name "theme-color" :content "#ffffff"}]]}) (defn root-handler [req respond raise] ((-> (fn [req respond raise] (respond (http/render-specs [figwheel google-analytics google-maps app-assets favicons {:body [[:div#cljs-entry]]}]))) http/wrap-html-response) req respond raise)) (def cache-mime-types {"text/javascript" "public, max-age=31536000" "text/css" "public, max-age=31536000"}) (defn handle-dynamodb [fn-name payload] (go (let [[res err](<! (ac/<dyndoc fn-name payload))] (if err [nil (pr-str err)] [res])))) (defn handle-mongodb-coll-op [& args] (go (let [[res err] (<! (mdb/<coll-op {:url (aget js/process.env "MONGO_URL") :db (aget js/process.env "MONGO_DATABASE")} args))] (if err [nil (pr-str err)] [res])))) (defn handle-mongodb-find [coll-name query & [opts]] (go (let [[res err] (<! (mdb/<find {:url (aget js/process.env "MONGO_URL") :db (aget js/process.env "MONGO_DATABASE")} coll-name query opts))] (when err (println err)) (if err [nil (pr-str err)] [res])))) (def gi-handlers (merge {:dynamodb handle-dynamodb :mongodb-coll-op handle-mongodb-coll-op :mongodb-find handle-mongodb-find} (devbus/gi-handlers {:s3-bucket "nalopastures" :s3-prefix "debug-app-state"}))) (defn map-async [<f coll] (go-loop [coll coll out []] (if (empty? coll) out (let [res (<! (<f (first coll)))] (recur (rest coll) (conj out res)))))) (defn chan? [x] (instance? cljs.core.async.impl.channels.ManyToManyChannel x)) (defn gi-adapter [handlers] (fn [req respond raise] (go (let [command-ress nil #_ (gi/handle-commands gi-handlers req) realized (loop [crs command-ress out []] (if (empty? crs) out (let [next-res (first crs) next-res (if (chan? next-res) (<! next-res) next-res)] (recur (rest crs) (concat out [next-res])))))] (respond (merge #_(gi/format-response req realized) {:status 200 :headers {"Access-Control-Allow-Origin" "*"}})))))) #_((gi-adapter gi-handlers) {:ssl-client-cert #js {}, :protocol "HTTPS/1.1", :subdomains nil, :cookies {"_ga" {:value nil}, "amplitude_id_43316fd1a98a364a996cba88fcd94d7c" {:value nil}, "amplitude_id_9cfb537503cbe43a8fdf5453853dc174" {:value nil}}, :remote-addr "::1", :secure? nil, :params nil, :stale? nil, :hostname "localhost", :xhr? nil, :route-params nil, :headers {"origin" "https://localhost:5000", "host" "localhost:5000", "user-agent" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36", "content-type" "application/transit+json", "cookie" "_ga=GA1.1.633402034.1457074349; amplitude_id_43316fd1a98a364a996cba88fcd94dPI:KEY:<KEY>END_PI", "content-length" "48", "referer" "https://localhost:5000/", "connection" "keep-alive", "accept" "*/*", "accept-language" "en-US,en;q=0.9", "accept-encoding" "gzip, deflate, br"}, :server-port 5000, :content-length "48", :signed-cookies nil, :url "/api/gi", :content-type "application/transit+json", :uri "/api/gi", :fresh? nil, :server-name "::1", :query-string nil, :body [[:dynamodb "get" {:Table "foobar"}]], :scheme :https, :request-method :post,} (fn [resp] (prn "RESP vvvvvvvvvvv") (ks/pp resp))) (defn wrap-log-request [h] (fn [req respond raise] (println "Request" (:request-method req) (:uri req)) (h req respond raise))) (defn wrap-access-control [h] (fn [req respond raise] (h req (fn [resp] (respond (assoc-in resp [:headers "Access-Control-Allow-Origin"] "*"))) raise))) (def handler (-> (http/gen-handler ["" [["/" :root] ["/api/gi" :gi]]] {:root root-handler :gi (-> (gi-adapter gi-handlers) http/wrap-transit-request (http/wrap-transit-response {:handlers (merge mdb/to-transit-handlers)}))}) http/wrap-html-response #_(wrap-resource "resources/public") #_wrap-content-type (http/wrap-cache-control cache-mime-types) wrap-access-control))
[ { "context": "ssage-tracker confirmations]\n (let [routing-key (random-string)\n body (-> confirmations\n ", "end": 548, "score": 0.910722017288208, "start": 535, "tag": "KEY", "value": "random-string" } ]
test/kithara/test/property.clj
xsc/kithara
21
(ns kithara.test.property (:require [clojure.test.check [generators :as gen] [properties :as prop]] [kithara.test [confirmations :as confirmations] [fixtures :as fix] [handler :as handler]] [kithara.utils :refer [random-string]] [clojure.pprint :as pprint] [clojure.tools.logging :as log] [peripheral.core :as p])) ;; ## Publish Logic (defn- publish! [message-tracker confirmations] (let [routing-key (random-string) body (-> confirmations (handler/make-message) (pr-str) (.getBytes "UTF-8"))] (handler/track! message-tracker routing-key confirmations) (fix/publish! routing-key body))) ;; ## Property (defn- log-stack! [forms] (binding [pprint/*print-pprint-dispatch* pprint/code-dispatch] (->> forms (concat '[-> message-handler]) (pprint/pprint) ^String (with-out-str) (.trim) (log/debugf "[stack under test]%n%s")))) (defn consumer-property ([stack-gen] (consumer-property {} stack-gen)) ([{:keys [message-count wait-ms] :or {message-count 5 wait-ms 2000}} stack-gen] {:pre [(pos? message-count) (pos? wait-ms)]} (prop/for-all [confirmations (gen/vector confirmations/gen 1 message-count) {:keys [forms verifiers build-fn]} stack-gen] (log-stack! forms) (let [message-tracker (handler/make-tracker) message-handler (handler/make message-tracker verifiers) stack (build-fn message-handler {:connection (fix/connection-config) :queue (random-string) :exchange (fix/exchange-name)})] (p/with-start [_ stack] (doseq [sq confirmations] (publish! message-tracker sq)) (and (handler/wait-for-messages! message-tracker wait-ms) (handler/verify-expectations! message-tracker)))))))
8309
(ns kithara.test.property (:require [clojure.test.check [generators :as gen] [properties :as prop]] [kithara.test [confirmations :as confirmations] [fixtures :as fix] [handler :as handler]] [kithara.utils :refer [random-string]] [clojure.pprint :as pprint] [clojure.tools.logging :as log] [peripheral.core :as p])) ;; ## Publish Logic (defn- publish! [message-tracker confirmations] (let [routing-key (<KEY>) body (-> confirmations (handler/make-message) (pr-str) (.getBytes "UTF-8"))] (handler/track! message-tracker routing-key confirmations) (fix/publish! routing-key body))) ;; ## Property (defn- log-stack! [forms] (binding [pprint/*print-pprint-dispatch* pprint/code-dispatch] (->> forms (concat '[-> message-handler]) (pprint/pprint) ^String (with-out-str) (.trim) (log/debugf "[stack under test]%n%s")))) (defn consumer-property ([stack-gen] (consumer-property {} stack-gen)) ([{:keys [message-count wait-ms] :or {message-count 5 wait-ms 2000}} stack-gen] {:pre [(pos? message-count) (pos? wait-ms)]} (prop/for-all [confirmations (gen/vector confirmations/gen 1 message-count) {:keys [forms verifiers build-fn]} stack-gen] (log-stack! forms) (let [message-tracker (handler/make-tracker) message-handler (handler/make message-tracker verifiers) stack (build-fn message-handler {:connection (fix/connection-config) :queue (random-string) :exchange (fix/exchange-name)})] (p/with-start [_ stack] (doseq [sq confirmations] (publish! message-tracker sq)) (and (handler/wait-for-messages! message-tracker wait-ms) (handler/verify-expectations! message-tracker)))))))
true
(ns kithara.test.property (:require [clojure.test.check [generators :as gen] [properties :as prop]] [kithara.test [confirmations :as confirmations] [fixtures :as fix] [handler :as handler]] [kithara.utils :refer [random-string]] [clojure.pprint :as pprint] [clojure.tools.logging :as log] [peripheral.core :as p])) ;; ## Publish Logic (defn- publish! [message-tracker confirmations] (let [routing-key (PI:KEY:<KEY>END_PI) body (-> confirmations (handler/make-message) (pr-str) (.getBytes "UTF-8"))] (handler/track! message-tracker routing-key confirmations) (fix/publish! routing-key body))) ;; ## Property (defn- log-stack! [forms] (binding [pprint/*print-pprint-dispatch* pprint/code-dispatch] (->> forms (concat '[-> message-handler]) (pprint/pprint) ^String (with-out-str) (.trim) (log/debugf "[stack under test]%n%s")))) (defn consumer-property ([stack-gen] (consumer-property {} stack-gen)) ([{:keys [message-count wait-ms] :or {message-count 5 wait-ms 2000}} stack-gen] {:pre [(pos? message-count) (pos? wait-ms)]} (prop/for-all [confirmations (gen/vector confirmations/gen 1 message-count) {:keys [forms verifiers build-fn]} stack-gen] (log-stack! forms) (let [message-tracker (handler/make-tracker) message-handler (handler/make message-tracker verifiers) stack (build-fn message-handler {:connection (fix/connection-config) :queue (random-string) :exchange (fix/exchange-name)})] (p/with-start [_ stack] (doseq [sq confirmations] (publish! message-tracker sq)) (and (handler/wait-for-messages! message-tracker wait-ms) (handler/verify-expectations! message-tracker)))))))
[ { "context": "-------------------------------\n;;\n;;\n;; Author: PLIQUE Guillaume (Yomguithereal)\n;; Version: 0.1\n;;\n(ns clj-fuzz", "end": 215, "score": 0.9998235702514648, "start": 199, "tag": "NAME", "value": "PLIQUE Guillaume" }, { "context": "------------\n;;\n;;\n;; Author: PLIQUE Guillaume (Yomguithereal)\n;; Version: 0.1\n;;\n(ns clj-fuzzy.lancaster-tes", "end": 230, "score": 0.9984562397003174, "start": 217, "tag": "USERNAME", "value": "Yomguithereal" } ]
test/clj_fuzzy/lancaster_test.clj
sooheon/clj-fuzzy
222
;; ------------------------------------------------------------------- ;; clj-fuzzy Lancaster Stemming Tests ;; ------------------------------------------------------------------- ;; ;; ;; Author: PLIQUE Guillaume (Yomguithereal) ;; Version: 0.1 ;; (ns clj-fuzzy.lancaster-test (:require [clojure.test :refer :all] [clj-fuzzy.lancaster :refer :all])) (def tests ['("worker" "work") '("marks" "mark") '("MARKS" "mark") '("living" "liv") '("thing" "thing") '("ear" "ear") '("string" "string") '("triplicate" "triply") '("classified" "class") '("maximum" "maxim") '("presumably" "presum") '("exceed" "excess") '("anguish" "anct") '("affluxion" "affluct") '("discept" "disceiv")]) (deftest stem-test (doseq [pair tests] (let [original-word (first pair) stemmed-word (second pair)] (is (= stemmed-word (stem original-word))))))
90337
;; ------------------------------------------------------------------- ;; clj-fuzzy Lancaster Stemming Tests ;; ------------------------------------------------------------------- ;; ;; ;; Author: <NAME> (Yomguithereal) ;; Version: 0.1 ;; (ns clj-fuzzy.lancaster-test (:require [clojure.test :refer :all] [clj-fuzzy.lancaster :refer :all])) (def tests ['("worker" "work") '("marks" "mark") '("MARKS" "mark") '("living" "liv") '("thing" "thing") '("ear" "ear") '("string" "string") '("triplicate" "triply") '("classified" "class") '("maximum" "maxim") '("presumably" "presum") '("exceed" "excess") '("anguish" "anct") '("affluxion" "affluct") '("discept" "disceiv")]) (deftest stem-test (doseq [pair tests] (let [original-word (first pair) stemmed-word (second pair)] (is (= stemmed-word (stem original-word))))))
true
;; ------------------------------------------------------------------- ;; clj-fuzzy Lancaster Stemming Tests ;; ------------------------------------------------------------------- ;; ;; ;; Author: PI:NAME:<NAME>END_PI (Yomguithereal) ;; Version: 0.1 ;; (ns clj-fuzzy.lancaster-test (:require [clojure.test :refer :all] [clj-fuzzy.lancaster :refer :all])) (def tests ['("worker" "work") '("marks" "mark") '("MARKS" "mark") '("living" "liv") '("thing" "thing") '("ear" "ear") '("string" "string") '("triplicate" "triply") '("classified" "class") '("maximum" "maxim") '("presumably" "presum") '("exceed" "excess") '("anguish" "anct") '("affluxion" "affluct") '("discept" "disceiv")]) (deftest stem-test (doseq [pair tests] (let [original-word (first pair) stemmed-word (second pair)] (is (= stemmed-word (stem original-word))))))
[ { "context": "on/generate-string (assoc bulk-update-body :name \"TEST NAME 2\"))]\n\n (testing \"Bulk update response\")\n (le", "end": 1960, "score": 0.797566294670105, "start": 1949, "tag": "NAME", "value": "TEST NAME 2" }, { "context": " (assoc bulk-update-body :name \"TEST NAME 1\")\n {:accept-format :", "end": 2199, "score": 0.9150573015213013, "start": 2190, "tag": "NAME", "value": "TEST NAME" }, { "context": " (assoc bulk-update-body :name \"TEST NAME 2\")\n {:accept-format :x", "end": 2460, "score": 0.8515602946281433, "start": 2451, "tag": "NAME", "value": "TEST NAME" }, { "context": "sk-id task-id-1,\n :name \"TEST NAME 1\"\n :status-message \"All c", "end": 3261, "score": 0.89534991979599, "start": 3250, "tag": "NAME", "value": "TEST NAME 1" }, { "context": "sk-id task-id-2,\n :name \"TEST NAME 2\"\n :status-message \"All c", "end": 3548, "score": 0.9524432420730591, "start": 3537, "tag": "NAME", "value": "TEST NAME 2" }, { "context": " :status 200,\n :name \"TEST NAME 1\"\n :task-status \"COMPLETE\",\n ", "end": 4231, "score": 0.9952564239501953, "start": 4220, "tag": "NAME", "value": "TEST NAME 1" }, { "context": "1\" \"C111-PROV2\"]\n :name \"TEST NAME\"\n :update-type \"ADD_TO_E", "end": 5180, "score": 0.9974576830863953, "start": 5171, "tag": "NAME", "value": "TEST NAME" }, { "context": "s).\",\n :status 200,\n :name \"TEST NAME\"\n :request-json-body json-body\n ", "end": 6067, "score": 0.9975379705429077, "start": 6058, "tag": "NAME", "value": "TEST NAME" } ]
system-int-test/test/cmr/system_int_test/ingest/bulk_update/bulk_update_flow_test.clj
daniel-zamora/Common-Metadata-Repository
1
(ns cmr.system-int-test.ingest.bulk-update.bulk-update-flow-test "CMR bulk update queueing flow integration tests. Endpoint validation is handled" (:require [cheshire.core :as json] [clojure.test :refer :all] [cmr.common.util :as util :refer [are3]] [cmr.message-queue.test.queue-broker-side-api :as qb-side-api] [cmr.mock-echo.client.echo-util :as e] [cmr.system-int-test.data2.umm-spec-collection :as data-umm-c] [cmr.system-int-test.system :as s] [cmr.system-int-test.utils.index-util :as index] [cmr.system-int-test.utils.ingest-util :as ingest])) (use-fixtures :each (ingest/reset-fixture {"provguid1" "PROV1" "provguid2" "PROV2"})) (defn- generate-concept-id [index provider] (format "C120000000%s-%s" index provider)) (deftest bulk-update-success (let [concept-ids (doall (for [x (range 3)] (:concept-id (ingest/ingest-concept (assoc (data-umm-c/collection-concept (data-umm-c/collection x {})) :concept-id (generate-concept-id x "PROV1")))))) _ (index/wait-until-indexed) bulk-update-body {:concept-ids concept-ids :update-type "ADD_TO_EXISTING" :update-field "SCIENCE_KEYWORDS" :update-value {:Category "EARTH SCIENCE" :Topic "HUMAN DIMENSIONS" :Term "ENVIRONMENTAL IMPACTS" :VariableLevel1 "HEAVY METALS CONCENTRATION"}} json-body1 (json/generate-string (assoc bulk-update-body :name "TEST NAME 1")) json-body2 (json/generate-string (assoc bulk-update-body :name "TEST NAME 2"))] (testing "Bulk update response") (let [response-json (ingest/parse-bulk-update-body :json (ingest/bulk-update-collections "PROV1" (assoc bulk-update-body :name "TEST NAME 1") {:accept-format :json :raw? true})) response-xml (ingest/parse-bulk-update-body :xml (ingest/bulk-update-collections "PROV1" (assoc bulk-update-body :name "TEST NAME 2") {:accept-format :xml :raw? true})) task-id-1 (str (:task-id response-json)) task-id-2 (:task-id response-xml)] (is (= 200 (:status response-json))) (is (= 200 (:status response-xml))) (testing "Provider status response" ;; Create another bulk update event with PROV2 to make sure we're just ;; getting PROV1 statuses (ingest/bulk-update-collections "PROV2" bulk-update-body) (qb-side-api/wait-for-terminal-states) (are3 [accept-format] (let [response (ingest/bulk-update-provider-status "PROV1" {:accept-format accept-format})] (is (= (set [{:task-id task-id-1, :name "TEST NAME 1" :status-message "All collection updates completed successfully.", :status "COMPLETE", :request-json-body json-body1} {:task-id task-id-2, :name "TEST NAME 2" :status-message "All collection updates completed successfully.", :status "COMPLETE", :request-json-body json-body2}]) (set (map #(dissoc % :created-at) (:tasks response)))))) "JSON" :json "XML" :xml)) (testing "Provider task status response" (are3 [accept-format] (let [response (ingest/bulk-update-task-status "PROV1" task-id-1 {:accept-format accept-format})] (is (= {:status-message "All collection updates completed successfully.", :status 200, :name "TEST NAME 1" :task-status "COMPLETE", :request-json-body json-body1 :collection-statuses [{:status-message nil, :status "UPDATED", :concept-id "C1200000000-PROV1"} {:status-message nil, :status "UPDATED", :concept-id "C1200000001-PROV1"} {:status-message nil, :status "UPDATED", :concept-id "C1200000002-PROV1"}]} (dissoc response :created-at)))) "JSON" :json "XML" :xml))))) (deftest bulk-update-invalid-concept-id (let [bulk-update-body {:concept-ids ["C1200000100-PROV1" "C111-PROV2"] :name "TEST NAME" :update-type "ADD_TO_EXISTING" :update-field "SCIENCE_KEYWORDS" :update-value {:Category "EARTH SCIENCE" :Topic "HUMAN DIMENSIONS" :Term "ENVIRONMENTAL IMPACTS" :VariableLevel1 "HEAVY METALS CONCENTRATION"}} json-body (json/generate-string bulk-update-body) {:keys [task-id]} (ingest/bulk-update-collections "PROV1" bulk-update-body) _ (qb-side-api/wait-for-terminal-states) status-response (ingest/bulk-update-task-status "PROV1" task-id) status-response (dissoc status-response :created-at)] (is (= {:status-message "Task completed with 2 FAILED out of 2 total collection update(s).", :status 200, :name "TEST NAME" :request-json-body json-body :task-status "COMPLETE", :collection-statuses (set [{:status-message "Concept-id [C1200000100-PROV1] does not exist.", :status "FAILED", :concept-id "C1200000100-PROV1"} {:status-message (str "Concept-id [C111-PROV2] is not associated " "with provider-id [PROV1]."), :status "FAILED", :concept-id "C111-PROV2"}])} (update status-response :collection-statuses set)))))
5245
(ns cmr.system-int-test.ingest.bulk-update.bulk-update-flow-test "CMR bulk update queueing flow integration tests. Endpoint validation is handled" (:require [cheshire.core :as json] [clojure.test :refer :all] [cmr.common.util :as util :refer [are3]] [cmr.message-queue.test.queue-broker-side-api :as qb-side-api] [cmr.mock-echo.client.echo-util :as e] [cmr.system-int-test.data2.umm-spec-collection :as data-umm-c] [cmr.system-int-test.system :as s] [cmr.system-int-test.utils.index-util :as index] [cmr.system-int-test.utils.ingest-util :as ingest])) (use-fixtures :each (ingest/reset-fixture {"provguid1" "PROV1" "provguid2" "PROV2"})) (defn- generate-concept-id [index provider] (format "C120000000%s-%s" index provider)) (deftest bulk-update-success (let [concept-ids (doall (for [x (range 3)] (:concept-id (ingest/ingest-concept (assoc (data-umm-c/collection-concept (data-umm-c/collection x {})) :concept-id (generate-concept-id x "PROV1")))))) _ (index/wait-until-indexed) bulk-update-body {:concept-ids concept-ids :update-type "ADD_TO_EXISTING" :update-field "SCIENCE_KEYWORDS" :update-value {:Category "EARTH SCIENCE" :Topic "HUMAN DIMENSIONS" :Term "ENVIRONMENTAL IMPACTS" :VariableLevel1 "HEAVY METALS CONCENTRATION"}} json-body1 (json/generate-string (assoc bulk-update-body :name "TEST NAME 1")) json-body2 (json/generate-string (assoc bulk-update-body :name "<NAME>"))] (testing "Bulk update response") (let [response-json (ingest/parse-bulk-update-body :json (ingest/bulk-update-collections "PROV1" (assoc bulk-update-body :name "<NAME> 1") {:accept-format :json :raw? true})) response-xml (ingest/parse-bulk-update-body :xml (ingest/bulk-update-collections "PROV1" (assoc bulk-update-body :name "<NAME> 2") {:accept-format :xml :raw? true})) task-id-1 (str (:task-id response-json)) task-id-2 (:task-id response-xml)] (is (= 200 (:status response-json))) (is (= 200 (:status response-xml))) (testing "Provider status response" ;; Create another bulk update event with PROV2 to make sure we're just ;; getting PROV1 statuses (ingest/bulk-update-collections "PROV2" bulk-update-body) (qb-side-api/wait-for-terminal-states) (are3 [accept-format] (let [response (ingest/bulk-update-provider-status "PROV1" {:accept-format accept-format})] (is (= (set [{:task-id task-id-1, :name "<NAME>" :status-message "All collection updates completed successfully.", :status "COMPLETE", :request-json-body json-body1} {:task-id task-id-2, :name "<NAME>" :status-message "All collection updates completed successfully.", :status "COMPLETE", :request-json-body json-body2}]) (set (map #(dissoc % :created-at) (:tasks response)))))) "JSON" :json "XML" :xml)) (testing "Provider task status response" (are3 [accept-format] (let [response (ingest/bulk-update-task-status "PROV1" task-id-1 {:accept-format accept-format})] (is (= {:status-message "All collection updates completed successfully.", :status 200, :name "<NAME>" :task-status "COMPLETE", :request-json-body json-body1 :collection-statuses [{:status-message nil, :status "UPDATED", :concept-id "C1200000000-PROV1"} {:status-message nil, :status "UPDATED", :concept-id "C1200000001-PROV1"} {:status-message nil, :status "UPDATED", :concept-id "C1200000002-PROV1"}]} (dissoc response :created-at)))) "JSON" :json "XML" :xml))))) (deftest bulk-update-invalid-concept-id (let [bulk-update-body {:concept-ids ["C1200000100-PROV1" "C111-PROV2"] :name "<NAME>" :update-type "ADD_TO_EXISTING" :update-field "SCIENCE_KEYWORDS" :update-value {:Category "EARTH SCIENCE" :Topic "HUMAN DIMENSIONS" :Term "ENVIRONMENTAL IMPACTS" :VariableLevel1 "HEAVY METALS CONCENTRATION"}} json-body (json/generate-string bulk-update-body) {:keys [task-id]} (ingest/bulk-update-collections "PROV1" bulk-update-body) _ (qb-side-api/wait-for-terminal-states) status-response (ingest/bulk-update-task-status "PROV1" task-id) status-response (dissoc status-response :created-at)] (is (= {:status-message "Task completed with 2 FAILED out of 2 total collection update(s).", :status 200, :name "<NAME>" :request-json-body json-body :task-status "COMPLETE", :collection-statuses (set [{:status-message "Concept-id [C1200000100-PROV1] does not exist.", :status "FAILED", :concept-id "C1200000100-PROV1"} {:status-message (str "Concept-id [C111-PROV2] is not associated " "with provider-id [PROV1]."), :status "FAILED", :concept-id "C111-PROV2"}])} (update status-response :collection-statuses set)))))
true
(ns cmr.system-int-test.ingest.bulk-update.bulk-update-flow-test "CMR bulk update queueing flow integration tests. Endpoint validation is handled" (:require [cheshire.core :as json] [clojure.test :refer :all] [cmr.common.util :as util :refer [are3]] [cmr.message-queue.test.queue-broker-side-api :as qb-side-api] [cmr.mock-echo.client.echo-util :as e] [cmr.system-int-test.data2.umm-spec-collection :as data-umm-c] [cmr.system-int-test.system :as s] [cmr.system-int-test.utils.index-util :as index] [cmr.system-int-test.utils.ingest-util :as ingest])) (use-fixtures :each (ingest/reset-fixture {"provguid1" "PROV1" "provguid2" "PROV2"})) (defn- generate-concept-id [index provider] (format "C120000000%s-%s" index provider)) (deftest bulk-update-success (let [concept-ids (doall (for [x (range 3)] (:concept-id (ingest/ingest-concept (assoc (data-umm-c/collection-concept (data-umm-c/collection x {})) :concept-id (generate-concept-id x "PROV1")))))) _ (index/wait-until-indexed) bulk-update-body {:concept-ids concept-ids :update-type "ADD_TO_EXISTING" :update-field "SCIENCE_KEYWORDS" :update-value {:Category "EARTH SCIENCE" :Topic "HUMAN DIMENSIONS" :Term "ENVIRONMENTAL IMPACTS" :VariableLevel1 "HEAVY METALS CONCENTRATION"}} json-body1 (json/generate-string (assoc bulk-update-body :name "TEST NAME 1")) json-body2 (json/generate-string (assoc bulk-update-body :name "PI:NAME:<NAME>END_PI"))] (testing "Bulk update response") (let [response-json (ingest/parse-bulk-update-body :json (ingest/bulk-update-collections "PROV1" (assoc bulk-update-body :name "PI:NAME:<NAME>END_PI 1") {:accept-format :json :raw? true})) response-xml (ingest/parse-bulk-update-body :xml (ingest/bulk-update-collections "PROV1" (assoc bulk-update-body :name "PI:NAME:<NAME>END_PI 2") {:accept-format :xml :raw? true})) task-id-1 (str (:task-id response-json)) task-id-2 (:task-id response-xml)] (is (= 200 (:status response-json))) (is (= 200 (:status response-xml))) (testing "Provider status response" ;; Create another bulk update event with PROV2 to make sure we're just ;; getting PROV1 statuses (ingest/bulk-update-collections "PROV2" bulk-update-body) (qb-side-api/wait-for-terminal-states) (are3 [accept-format] (let [response (ingest/bulk-update-provider-status "PROV1" {:accept-format accept-format})] (is (= (set [{:task-id task-id-1, :name "PI:NAME:<NAME>END_PI" :status-message "All collection updates completed successfully.", :status "COMPLETE", :request-json-body json-body1} {:task-id task-id-2, :name "PI:NAME:<NAME>END_PI" :status-message "All collection updates completed successfully.", :status "COMPLETE", :request-json-body json-body2}]) (set (map #(dissoc % :created-at) (:tasks response)))))) "JSON" :json "XML" :xml)) (testing "Provider task status response" (are3 [accept-format] (let [response (ingest/bulk-update-task-status "PROV1" task-id-1 {:accept-format accept-format})] (is (= {:status-message "All collection updates completed successfully.", :status 200, :name "PI:NAME:<NAME>END_PI" :task-status "COMPLETE", :request-json-body json-body1 :collection-statuses [{:status-message nil, :status "UPDATED", :concept-id "C1200000000-PROV1"} {:status-message nil, :status "UPDATED", :concept-id "C1200000001-PROV1"} {:status-message nil, :status "UPDATED", :concept-id "C1200000002-PROV1"}]} (dissoc response :created-at)))) "JSON" :json "XML" :xml))))) (deftest bulk-update-invalid-concept-id (let [bulk-update-body {:concept-ids ["C1200000100-PROV1" "C111-PROV2"] :name "PI:NAME:<NAME>END_PI" :update-type "ADD_TO_EXISTING" :update-field "SCIENCE_KEYWORDS" :update-value {:Category "EARTH SCIENCE" :Topic "HUMAN DIMENSIONS" :Term "ENVIRONMENTAL IMPACTS" :VariableLevel1 "HEAVY METALS CONCENTRATION"}} json-body (json/generate-string bulk-update-body) {:keys [task-id]} (ingest/bulk-update-collections "PROV1" bulk-update-body) _ (qb-side-api/wait-for-terminal-states) status-response (ingest/bulk-update-task-status "PROV1" task-id) status-response (dissoc status-response :created-at)] (is (= {:status-message "Task completed with 2 FAILED out of 2 total collection update(s).", :status 200, :name "PI:NAME:<NAME>END_PI" :request-json-body json-body :task-status "COMPLETE", :collection-statuses (set [{:status-message "Concept-id [C1200000100-PROV1] does not exist.", :status "FAILED", :concept-id "C1200000100-PROV1"} {:status-message (str "Concept-id [C111-PROV2] is not associated " "with provider-id [PROV1]."), :status "FAILED", :concept-id "C111-PROV2"}])} (update status-response :collection-statuses set)))))
[ { "context": "rtial ring or dome.\n\n VBAP was created by Ville Pulkki. For more information on\n VBAP see http:", "end": 2027, "score": 0.9991544485092163, "start": 2015, "tag": "NAME", "value": "Ville Pulkki" } ]
src/overtone/sc/machinery/ugen/metadata/extras/vbap.clj
ABaldwinHunter/overtone
3,870
(ns overtone.sc.machinery.ugen.metadata.extras.vbap (:use [overtone.sc.machinery.ugen common check])) (def specs [ {:name "VBAP" :summary "Vector Based Amplitude Panner" :args [{:name "num-chans" :default 1 :mode :num-outs :doc "The number of output channels. This must be a fixed integer and not a signal or a control proxy. The architecture of the synth design cannot change after it is compiled." } {:name "in" :default :none :doc "The signal to be panned." :rates #{:ar}} {:name "bufnum" :default 10 :doc "The index of the buffer containing data calculated by the function vbap-speaker-array. Its number of channels must correspond to numChans above." } {:name "azimuth" :default 0 :doc "+/- 180 degrees from the median plane (i.e. straight ahead)" } {:name "elevation" :default 0 :doc "+/- 90 degrees from azimuth plane" } {:name "spread" :default 0 :doc "A value from 0-100. When 0, if the signal is panned exactly to a speaker location the signal is only on that speaker. At values higher than 0, the signal will always be on more than one speaker. This can smooth the panning effect by making localisation blur more constant." }] :rates #{:ar :kr} :check (nth-input-stream? 1) :doc "An implementation of Vector Base Amplitude Panning. This allows for equal power panning of a source over an array of speakers on arbitrary positions on a circle (2D) or sphere (3D) around the listener. Normally this would be a ring, a dome, or a partial ring or dome. VBAP was created by Ville Pulkki. For more information on VBAP see http://www.acoustics.hut.fi/research/cat/vbap/ Examples: ;;; ------------------------------------------------------------------------------------------ ;;; 2-D: (do ;;; define Loudspeaker Positions (def vbap-data (vbap-speaker-array [-45 0 45 90 135 180 -135 -90])) ;;; init buffer on server and store the loudspeaker data matrices in it (def b (buffer (count vbap-data))) (buffer-write! b vbap-data) ;;; define a simple synth with pink noise as source (defsynth vbaptest [buf 0 azi 0 ele 0 spread 0] (out 0 (vbap 8 (pink-noise) buf azi ele spread)))) ;;; start the synth (def vbapsynth (vbaptest b 0 0 0)) ;;; change the azimuth (map-indexed #(at (+ (now) (* 1000 %1)) (ctl vbapsynth :azi %2)) '(-45 0 45 90 135 180 -135 -90 -45)) ;;; change spread and repeat the above command: ;;; more than one speaker (ctl vbapsynth :spread 40) ;;; only one speaker (ctl vbapsynth :spread 0) ;;; stop the synth (kill vbapsynth) ;;; ------------------------------------------------------------------------------------------ ;;; 3-D: (do ;;; define Loudspeaker Positions in a zig-zag around the Listener (def vbap-data (vbap-speaker-array [[-45 0] [0 45] [45 0] [90 45] [135 0] [180 45] [-135 0] [-90 45]])) ;;; init buffer on server and store the loudspeaker data matrices in it (def b (buffer (count vbap-data))) (buffer-write! b vbap-data) ;;; define a simple synth with pink noise as source (defsynth vbaptest [buf 0 azi 0 ele 0 spread 0] (out 0 (vbap 8 (pink-noise) buf azi ele spread)))) ;;; start the synth (def vbapsynth (vbaptest b 0 0 0)) ;;; traverse all speakers in a zig-zag motion (map #(at (+ (now) (* 1000 %1)) (ctl vbapsynth :azi %2 :ele %3)) (range) '(-45 0 45 90 135 180 -135 -90 -45) '(0 45 0 45 0 45 0 45 0)) ;;; change spread and repeat the above command: ;;; more than one speaker involved (ctl vbapsynth :spread 40) ;;; only one speaker at a time (ctl vbapsynth :spread 0) ;;; stop the synth (kill vbapsynth) "}])
25646
(ns overtone.sc.machinery.ugen.metadata.extras.vbap (:use [overtone.sc.machinery.ugen common check])) (def specs [ {:name "VBAP" :summary "Vector Based Amplitude Panner" :args [{:name "num-chans" :default 1 :mode :num-outs :doc "The number of output channels. This must be a fixed integer and not a signal or a control proxy. The architecture of the synth design cannot change after it is compiled." } {:name "in" :default :none :doc "The signal to be panned." :rates #{:ar}} {:name "bufnum" :default 10 :doc "The index of the buffer containing data calculated by the function vbap-speaker-array. Its number of channels must correspond to numChans above." } {:name "azimuth" :default 0 :doc "+/- 180 degrees from the median plane (i.e. straight ahead)" } {:name "elevation" :default 0 :doc "+/- 90 degrees from azimuth plane" } {:name "spread" :default 0 :doc "A value from 0-100. When 0, if the signal is panned exactly to a speaker location the signal is only on that speaker. At values higher than 0, the signal will always be on more than one speaker. This can smooth the panning effect by making localisation blur more constant." }] :rates #{:ar :kr} :check (nth-input-stream? 1) :doc "An implementation of Vector Base Amplitude Panning. This allows for equal power panning of a source over an array of speakers on arbitrary positions on a circle (2D) or sphere (3D) around the listener. Normally this would be a ring, a dome, or a partial ring or dome. VBAP was created by <NAME>. For more information on VBAP see http://www.acoustics.hut.fi/research/cat/vbap/ Examples: ;;; ------------------------------------------------------------------------------------------ ;;; 2-D: (do ;;; define Loudspeaker Positions (def vbap-data (vbap-speaker-array [-45 0 45 90 135 180 -135 -90])) ;;; init buffer on server and store the loudspeaker data matrices in it (def b (buffer (count vbap-data))) (buffer-write! b vbap-data) ;;; define a simple synth with pink noise as source (defsynth vbaptest [buf 0 azi 0 ele 0 spread 0] (out 0 (vbap 8 (pink-noise) buf azi ele spread)))) ;;; start the synth (def vbapsynth (vbaptest b 0 0 0)) ;;; change the azimuth (map-indexed #(at (+ (now) (* 1000 %1)) (ctl vbapsynth :azi %2)) '(-45 0 45 90 135 180 -135 -90 -45)) ;;; change spread and repeat the above command: ;;; more than one speaker (ctl vbapsynth :spread 40) ;;; only one speaker (ctl vbapsynth :spread 0) ;;; stop the synth (kill vbapsynth) ;;; ------------------------------------------------------------------------------------------ ;;; 3-D: (do ;;; define Loudspeaker Positions in a zig-zag around the Listener (def vbap-data (vbap-speaker-array [[-45 0] [0 45] [45 0] [90 45] [135 0] [180 45] [-135 0] [-90 45]])) ;;; init buffer on server and store the loudspeaker data matrices in it (def b (buffer (count vbap-data))) (buffer-write! b vbap-data) ;;; define a simple synth with pink noise as source (defsynth vbaptest [buf 0 azi 0 ele 0 spread 0] (out 0 (vbap 8 (pink-noise) buf azi ele spread)))) ;;; start the synth (def vbapsynth (vbaptest b 0 0 0)) ;;; traverse all speakers in a zig-zag motion (map #(at (+ (now) (* 1000 %1)) (ctl vbapsynth :azi %2 :ele %3)) (range) '(-45 0 45 90 135 180 -135 -90 -45) '(0 45 0 45 0 45 0 45 0)) ;;; change spread and repeat the above command: ;;; more than one speaker involved (ctl vbapsynth :spread 40) ;;; only one speaker at a time (ctl vbapsynth :spread 0) ;;; stop the synth (kill vbapsynth) "}])
true
(ns overtone.sc.machinery.ugen.metadata.extras.vbap (:use [overtone.sc.machinery.ugen common check])) (def specs [ {:name "VBAP" :summary "Vector Based Amplitude Panner" :args [{:name "num-chans" :default 1 :mode :num-outs :doc "The number of output channels. This must be a fixed integer and not a signal or a control proxy. The architecture of the synth design cannot change after it is compiled." } {:name "in" :default :none :doc "The signal to be panned." :rates #{:ar}} {:name "bufnum" :default 10 :doc "The index of the buffer containing data calculated by the function vbap-speaker-array. Its number of channels must correspond to numChans above." } {:name "azimuth" :default 0 :doc "+/- 180 degrees from the median plane (i.e. straight ahead)" } {:name "elevation" :default 0 :doc "+/- 90 degrees from azimuth plane" } {:name "spread" :default 0 :doc "A value from 0-100. When 0, if the signal is panned exactly to a speaker location the signal is only on that speaker. At values higher than 0, the signal will always be on more than one speaker. This can smooth the panning effect by making localisation blur more constant." }] :rates #{:ar :kr} :check (nth-input-stream? 1) :doc "An implementation of Vector Base Amplitude Panning. This allows for equal power panning of a source over an array of speakers on arbitrary positions on a circle (2D) or sphere (3D) around the listener. Normally this would be a ring, a dome, or a partial ring or dome. VBAP was created by PI:NAME:<NAME>END_PI. For more information on VBAP see http://www.acoustics.hut.fi/research/cat/vbap/ Examples: ;;; ------------------------------------------------------------------------------------------ ;;; 2-D: (do ;;; define Loudspeaker Positions (def vbap-data (vbap-speaker-array [-45 0 45 90 135 180 -135 -90])) ;;; init buffer on server and store the loudspeaker data matrices in it (def b (buffer (count vbap-data))) (buffer-write! b vbap-data) ;;; define a simple synth with pink noise as source (defsynth vbaptest [buf 0 azi 0 ele 0 spread 0] (out 0 (vbap 8 (pink-noise) buf azi ele spread)))) ;;; start the synth (def vbapsynth (vbaptest b 0 0 0)) ;;; change the azimuth (map-indexed #(at (+ (now) (* 1000 %1)) (ctl vbapsynth :azi %2)) '(-45 0 45 90 135 180 -135 -90 -45)) ;;; change spread and repeat the above command: ;;; more than one speaker (ctl vbapsynth :spread 40) ;;; only one speaker (ctl vbapsynth :spread 0) ;;; stop the synth (kill vbapsynth) ;;; ------------------------------------------------------------------------------------------ ;;; 3-D: (do ;;; define Loudspeaker Positions in a zig-zag around the Listener (def vbap-data (vbap-speaker-array [[-45 0] [0 45] [45 0] [90 45] [135 0] [180 45] [-135 0] [-90 45]])) ;;; init buffer on server and store the loudspeaker data matrices in it (def b (buffer (count vbap-data))) (buffer-write! b vbap-data) ;;; define a simple synth with pink noise as source (defsynth vbaptest [buf 0 azi 0 ele 0 spread 0] (out 0 (vbap 8 (pink-noise) buf azi ele spread)))) ;;; start the synth (def vbapsynth (vbaptest b 0 0 0)) ;;; traverse all speakers in a zig-zag motion (map #(at (+ (now) (* 1000 %1)) (ctl vbapsynth :azi %2 :ele %3)) (range) '(-45 0 45 90 135 180 -135 -90 -45) '(0 45 0 45 0 45 0 45 0)) ;;; change spread and repeat the above command: ;;; more than one speaker involved (ctl vbapsynth :spread 40) ;;; only one speaker at a time (ctl vbapsynth :spread 0) ;;; stop the synth (kill vbapsynth) "}])
[ { "context": ";; Copyright 2014-2020 King\n;; Copyright 2009-2014 Ragnar Svensson, Christian Murray\n;; Licensed under the Defold Li", "end": 111, "score": 0.9998056888580322, "start": 96, "tag": "NAME", "value": "Ragnar Svensson" }, { "context": "-2020 King\n;; Copyright 2009-2014 Ragnar Svensson, Christian Murray\n;; Licensed under the Defold License version 1.0 ", "end": 129, "score": 0.9998232126235962, "start": 113, "tag": "NAME", "value": "Christian Murray" } ]
editor/src/clj/editor/image_util.clj
cmarincia/defold
0
;; Copyright 2020-2022 The Defold Foundation ;; Copyright 2014-2020 King ;; Copyright 2009-2014 Ragnar Svensson, Christian Murray ;; Licensed under the Defold License version 1.0 (the "License"); you may not use ;; this file except in compliance with the License. ;; ;; You may obtain a copy of the License, together with FAQs at ;; https://www.defold.com/license ;; ;; Unless required by applicable law or agreed to in writing, software distributed ;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR ;; CONDITIONS OF ANY KIND, either express or implied. See the License for the ;; specific language governing permissions and limitations under the License. (ns editor.image-util (:require [clojure.java.io :as io] [dynamo.graph :as g] [schema.core :as s] [editor.geom :refer [clamper]] [editor.types :as types] [editor.pipeline.tex-gen :as tex-gen] [service.log :as log]) (:import [editor.types Rect Image] [java.awt Color] [java.awt.image BufferedImage] [javax.imageio ImageIO])) (set! *warn-on-reflection* true) (defn- convert-to-abgr [^BufferedImage image] (let [type (.getType image)] (if (= type BufferedImage/TYPE_4BYTE_ABGR) image (let [abgr-image (BufferedImage. (.getWidth image) (.getHeight image) BufferedImage/TYPE_4BYTE_ABGR)] (doto (.createGraphics abgr-image) (.drawImage image 0 0 nil) (.dispose)) abgr-image)))) (defn read-image ^BufferedImage [source] (with-open [source-stream (io/input-stream source)] (convert-to-abgr (ImageIO/read source-stream)))) (defn read-size [source] (with-open [source-stream (io/input-stream source) image-stream (ImageIO/createImageInputStream source-stream)] (let [readers (ImageIO/getImageReaders image-stream)] (if (.hasNext readers) (let [^javax.imageio.ImageReader reader (.next readers)] (try (.setInput reader image-stream true true) {:width (.getWidth reader 0) :height (.getHeight reader 0)} (finally (.dispose reader)))) (throw (ex-info "No matching ImageReader" {})))))) (defmacro with-graphics [binding & body] (let [rsym (gensym)] `(let ~(into [] (concat binding [rsym `(do ~@body)])) (.dispose ~(first binding)) ~rsym))) (s/defn make-color :- java.awt.Color "creates a color using rgb values (optional a). Color values between 0 and 1.0" ([r :- Float g :- Float b :- Float] (java.awt.Color. r g b)) ([r :- Float g :- Float b :- Float a :- Float] (java.awt.Color. r g b a))) (s/defn make-image :- Image [nm :- s/Any contents :- BufferedImage] (Image. nm contents (.getWidth contents) (.getHeight contents) :sprite-trim-mode-off)) (s/defn blank-image :- BufferedImage ([space :- Rect] (blank-image (.width space) (.height space))) ([width :- s/Int height :- s/Int] (blank-image width height BufferedImage/TYPE_4BYTE_ABGR)) ([width :- s/Int height :- s/Int t :- s/Int] (BufferedImage. width height t))) (s/defn flood :- BufferedImage "Floods the image with the specified color (r g b <a>). Color values between 0 and 1.0." [^BufferedImage img :- BufferedImage r :- Float g :- Float b :- Float] (let [gfx (.createGraphics img) color (make-color r g b)] (.setColor gfx color) (.fillRect gfx 0 0 (.getWidth img) (.getHeight img)) (.dispose gfx) img)) (defn load-image [src reference] (make-image reference (read-image src))) ;; Use "Hollywood Cerise" for the placeholder image color. (def placeholder-image (make-image "placeholder" (flood (blank-image 64 64) 0.9568 0.0 0.6313))) (s/defn image-color-components :- long [src :- BufferedImage] (.. src (getColorModel) (getNumComponents))) (s/defn image-infer-type :- long [src :- BufferedImage] (if (not= 0 (.getType src)) (.getType src) (case (image-color-components src) 4 BufferedImage/TYPE_4BYTE_ABGR 3 BufferedImage/TYPE_3BYTE_BGR 1 BufferedImage/TYPE_BYTE_GRAY))) (s/defn image-type :- g/Int [src :- BufferedImage] (let [t (.getType src)] (if (not= 0 t) t (image-infer-type src)))) (s/defn image-convert-type :- BufferedImage [original :- BufferedImage new-type :- g/Int] (if (= new-type (image-type original)) original (let [new (blank-image (.getWidth original) (.getHeight original) new-type)] (with-graphics [g2d (.createGraphics new)] (.drawImage g2d original 0 0 nil)) new))) (s/defn image-bounds :- Rect [source :- Image] (types/rect (.path source) 0 0 (.width source) (.height source))) (s/defn image-pixels :- ints [src :- BufferedImage] (let [w (.getWidth src) h (.getHeight src) pixels (int-array (* w h (image-color-components src)))] (.. src (getRaster) (getPixels 0 0 w h pixels)) pixels)) (s/defn image-from-pixels :- BufferedImage [^long width :- g/Int ^long height :- g/Int t :- g/Int pixels :- ints] (doto (blank-image width height t) (.. (getRaster) (setPixels 0 0 width height pixels)))) (defmacro pixel-index [x y step stride] `(* ~step (+ ~x (* ~y ~stride)))) (s/defn extrude-borders :- Image "Return a new pixel array, larger than the original by `extrusion` with the orig-pixels centered in it. The source pixels on the edges will bleed into the surrounding empty space. The pixels in the border region will be identical to the nearest pixel of the source image." [extrusion :- g/Int src :- Image] (if-not (< 0 extrusion) src (let [src-img (types/contents src) src-img (image-convert-type src-img BufferedImage/TYPE_4BYTE_ABGR) orig-width (.width src) orig-height (.height src) new-width (+ orig-width (* 2 extrusion)) new-height (+ orig-height (* 2 extrusion)) src-pixels (image-pixels src-img) num-components (image-color-components src-img) clampx (clamper 0 (dec orig-width)) clampy (clamper 0 (dec orig-height)) new-pixels (int-array (* new-width new-height num-components))] (doseq [y (range new-height) x (range new-width)] (let [sx (clampx (- x extrusion)) sy (clampy (- y extrusion)) src-idx (pixel-index sx sy num-components orig-width) tgt-idx (pixel-index x y num-components new-width)] (doseq [i (range num-components)] (aset-int new-pixels (+ i tgt-idx) (aget src-pixels (+ i src-idx)))))) (make-image (.path src) (image-from-pixels new-width new-height (.getType src-img) new-pixels))))) (defn- map-by [p coll] (zipmap (map p coll) coll)) (s/defn composite :- BufferedImage [onto :- BufferedImage placements :- [Rect] sources :- [Image]] (let [src-by-path (map-by :path sources)] (with-graphics [graphics (.getGraphics onto)] (doseq [^Rect rect placements] (.drawImage graphics (:contents (get src-by-path (.path rect))) (int (.x rect)) (int (.y rect)) nil))) onto))
59896
;; Copyright 2020-2022 The Defold Foundation ;; Copyright 2014-2020 King ;; Copyright 2009-2014 <NAME>, <NAME> ;; Licensed under the Defold License version 1.0 (the "License"); you may not use ;; this file except in compliance with the License. ;; ;; You may obtain a copy of the License, together with FAQs at ;; https://www.defold.com/license ;; ;; Unless required by applicable law or agreed to in writing, software distributed ;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR ;; CONDITIONS OF ANY KIND, either express or implied. See the License for the ;; specific language governing permissions and limitations under the License. (ns editor.image-util (:require [clojure.java.io :as io] [dynamo.graph :as g] [schema.core :as s] [editor.geom :refer [clamper]] [editor.types :as types] [editor.pipeline.tex-gen :as tex-gen] [service.log :as log]) (:import [editor.types Rect Image] [java.awt Color] [java.awt.image BufferedImage] [javax.imageio ImageIO])) (set! *warn-on-reflection* true) (defn- convert-to-abgr [^BufferedImage image] (let [type (.getType image)] (if (= type BufferedImage/TYPE_4BYTE_ABGR) image (let [abgr-image (BufferedImage. (.getWidth image) (.getHeight image) BufferedImage/TYPE_4BYTE_ABGR)] (doto (.createGraphics abgr-image) (.drawImage image 0 0 nil) (.dispose)) abgr-image)))) (defn read-image ^BufferedImage [source] (with-open [source-stream (io/input-stream source)] (convert-to-abgr (ImageIO/read source-stream)))) (defn read-size [source] (with-open [source-stream (io/input-stream source) image-stream (ImageIO/createImageInputStream source-stream)] (let [readers (ImageIO/getImageReaders image-stream)] (if (.hasNext readers) (let [^javax.imageio.ImageReader reader (.next readers)] (try (.setInput reader image-stream true true) {:width (.getWidth reader 0) :height (.getHeight reader 0)} (finally (.dispose reader)))) (throw (ex-info "No matching ImageReader" {})))))) (defmacro with-graphics [binding & body] (let [rsym (gensym)] `(let ~(into [] (concat binding [rsym `(do ~@body)])) (.dispose ~(first binding)) ~rsym))) (s/defn make-color :- java.awt.Color "creates a color using rgb values (optional a). Color values between 0 and 1.0" ([r :- Float g :- Float b :- Float] (java.awt.Color. r g b)) ([r :- Float g :- Float b :- Float a :- Float] (java.awt.Color. r g b a))) (s/defn make-image :- Image [nm :- s/Any contents :- BufferedImage] (Image. nm contents (.getWidth contents) (.getHeight contents) :sprite-trim-mode-off)) (s/defn blank-image :- BufferedImage ([space :- Rect] (blank-image (.width space) (.height space))) ([width :- s/Int height :- s/Int] (blank-image width height BufferedImage/TYPE_4BYTE_ABGR)) ([width :- s/Int height :- s/Int t :- s/Int] (BufferedImage. width height t))) (s/defn flood :- BufferedImage "Floods the image with the specified color (r g b <a>). Color values between 0 and 1.0." [^BufferedImage img :- BufferedImage r :- Float g :- Float b :- Float] (let [gfx (.createGraphics img) color (make-color r g b)] (.setColor gfx color) (.fillRect gfx 0 0 (.getWidth img) (.getHeight img)) (.dispose gfx) img)) (defn load-image [src reference] (make-image reference (read-image src))) ;; Use "Hollywood Cerise" for the placeholder image color. (def placeholder-image (make-image "placeholder" (flood (blank-image 64 64) 0.9568 0.0 0.6313))) (s/defn image-color-components :- long [src :- BufferedImage] (.. src (getColorModel) (getNumComponents))) (s/defn image-infer-type :- long [src :- BufferedImage] (if (not= 0 (.getType src)) (.getType src) (case (image-color-components src) 4 BufferedImage/TYPE_4BYTE_ABGR 3 BufferedImage/TYPE_3BYTE_BGR 1 BufferedImage/TYPE_BYTE_GRAY))) (s/defn image-type :- g/Int [src :- BufferedImage] (let [t (.getType src)] (if (not= 0 t) t (image-infer-type src)))) (s/defn image-convert-type :- BufferedImage [original :- BufferedImage new-type :- g/Int] (if (= new-type (image-type original)) original (let [new (blank-image (.getWidth original) (.getHeight original) new-type)] (with-graphics [g2d (.createGraphics new)] (.drawImage g2d original 0 0 nil)) new))) (s/defn image-bounds :- Rect [source :- Image] (types/rect (.path source) 0 0 (.width source) (.height source))) (s/defn image-pixels :- ints [src :- BufferedImage] (let [w (.getWidth src) h (.getHeight src) pixels (int-array (* w h (image-color-components src)))] (.. src (getRaster) (getPixels 0 0 w h pixels)) pixels)) (s/defn image-from-pixels :- BufferedImage [^long width :- g/Int ^long height :- g/Int t :- g/Int pixels :- ints] (doto (blank-image width height t) (.. (getRaster) (setPixels 0 0 width height pixels)))) (defmacro pixel-index [x y step stride] `(* ~step (+ ~x (* ~y ~stride)))) (s/defn extrude-borders :- Image "Return a new pixel array, larger than the original by `extrusion` with the orig-pixels centered in it. The source pixels on the edges will bleed into the surrounding empty space. The pixels in the border region will be identical to the nearest pixel of the source image." [extrusion :- g/Int src :- Image] (if-not (< 0 extrusion) src (let [src-img (types/contents src) src-img (image-convert-type src-img BufferedImage/TYPE_4BYTE_ABGR) orig-width (.width src) orig-height (.height src) new-width (+ orig-width (* 2 extrusion)) new-height (+ orig-height (* 2 extrusion)) src-pixels (image-pixels src-img) num-components (image-color-components src-img) clampx (clamper 0 (dec orig-width)) clampy (clamper 0 (dec orig-height)) new-pixels (int-array (* new-width new-height num-components))] (doseq [y (range new-height) x (range new-width)] (let [sx (clampx (- x extrusion)) sy (clampy (- y extrusion)) src-idx (pixel-index sx sy num-components orig-width) tgt-idx (pixel-index x y num-components new-width)] (doseq [i (range num-components)] (aset-int new-pixels (+ i tgt-idx) (aget src-pixels (+ i src-idx)))))) (make-image (.path src) (image-from-pixels new-width new-height (.getType src-img) new-pixels))))) (defn- map-by [p coll] (zipmap (map p coll) coll)) (s/defn composite :- BufferedImage [onto :- BufferedImage placements :- [Rect] sources :- [Image]] (let [src-by-path (map-by :path sources)] (with-graphics [graphics (.getGraphics onto)] (doseq [^Rect rect placements] (.drawImage graphics (:contents (get src-by-path (.path rect))) (int (.x rect)) (int (.y rect)) nil))) onto))
true
;; Copyright 2020-2022 The Defold Foundation ;; Copyright 2014-2020 King ;; Copyright 2009-2014 PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI ;; Licensed under the Defold License version 1.0 (the "License"); you may not use ;; this file except in compliance with the License. ;; ;; You may obtain a copy of the License, together with FAQs at ;; https://www.defold.com/license ;; ;; Unless required by applicable law or agreed to in writing, software distributed ;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR ;; CONDITIONS OF ANY KIND, either express or implied. See the License for the ;; specific language governing permissions and limitations under the License. (ns editor.image-util (:require [clojure.java.io :as io] [dynamo.graph :as g] [schema.core :as s] [editor.geom :refer [clamper]] [editor.types :as types] [editor.pipeline.tex-gen :as tex-gen] [service.log :as log]) (:import [editor.types Rect Image] [java.awt Color] [java.awt.image BufferedImage] [javax.imageio ImageIO])) (set! *warn-on-reflection* true) (defn- convert-to-abgr [^BufferedImage image] (let [type (.getType image)] (if (= type BufferedImage/TYPE_4BYTE_ABGR) image (let [abgr-image (BufferedImage. (.getWidth image) (.getHeight image) BufferedImage/TYPE_4BYTE_ABGR)] (doto (.createGraphics abgr-image) (.drawImage image 0 0 nil) (.dispose)) abgr-image)))) (defn read-image ^BufferedImage [source] (with-open [source-stream (io/input-stream source)] (convert-to-abgr (ImageIO/read source-stream)))) (defn read-size [source] (with-open [source-stream (io/input-stream source) image-stream (ImageIO/createImageInputStream source-stream)] (let [readers (ImageIO/getImageReaders image-stream)] (if (.hasNext readers) (let [^javax.imageio.ImageReader reader (.next readers)] (try (.setInput reader image-stream true true) {:width (.getWidth reader 0) :height (.getHeight reader 0)} (finally (.dispose reader)))) (throw (ex-info "No matching ImageReader" {})))))) (defmacro with-graphics [binding & body] (let [rsym (gensym)] `(let ~(into [] (concat binding [rsym `(do ~@body)])) (.dispose ~(first binding)) ~rsym))) (s/defn make-color :- java.awt.Color "creates a color using rgb values (optional a). Color values between 0 and 1.0" ([r :- Float g :- Float b :- Float] (java.awt.Color. r g b)) ([r :- Float g :- Float b :- Float a :- Float] (java.awt.Color. r g b a))) (s/defn make-image :- Image [nm :- s/Any contents :- BufferedImage] (Image. nm contents (.getWidth contents) (.getHeight contents) :sprite-trim-mode-off)) (s/defn blank-image :- BufferedImage ([space :- Rect] (blank-image (.width space) (.height space))) ([width :- s/Int height :- s/Int] (blank-image width height BufferedImage/TYPE_4BYTE_ABGR)) ([width :- s/Int height :- s/Int t :- s/Int] (BufferedImage. width height t))) (s/defn flood :- BufferedImage "Floods the image with the specified color (r g b <a>). Color values between 0 and 1.0." [^BufferedImage img :- BufferedImage r :- Float g :- Float b :- Float] (let [gfx (.createGraphics img) color (make-color r g b)] (.setColor gfx color) (.fillRect gfx 0 0 (.getWidth img) (.getHeight img)) (.dispose gfx) img)) (defn load-image [src reference] (make-image reference (read-image src))) ;; Use "Hollywood Cerise" for the placeholder image color. (def placeholder-image (make-image "placeholder" (flood (blank-image 64 64) 0.9568 0.0 0.6313))) (s/defn image-color-components :- long [src :- BufferedImage] (.. src (getColorModel) (getNumComponents))) (s/defn image-infer-type :- long [src :- BufferedImage] (if (not= 0 (.getType src)) (.getType src) (case (image-color-components src) 4 BufferedImage/TYPE_4BYTE_ABGR 3 BufferedImage/TYPE_3BYTE_BGR 1 BufferedImage/TYPE_BYTE_GRAY))) (s/defn image-type :- g/Int [src :- BufferedImage] (let [t (.getType src)] (if (not= 0 t) t (image-infer-type src)))) (s/defn image-convert-type :- BufferedImage [original :- BufferedImage new-type :- g/Int] (if (= new-type (image-type original)) original (let [new (blank-image (.getWidth original) (.getHeight original) new-type)] (with-graphics [g2d (.createGraphics new)] (.drawImage g2d original 0 0 nil)) new))) (s/defn image-bounds :- Rect [source :- Image] (types/rect (.path source) 0 0 (.width source) (.height source))) (s/defn image-pixels :- ints [src :- BufferedImage] (let [w (.getWidth src) h (.getHeight src) pixels (int-array (* w h (image-color-components src)))] (.. src (getRaster) (getPixels 0 0 w h pixels)) pixels)) (s/defn image-from-pixels :- BufferedImage [^long width :- g/Int ^long height :- g/Int t :- g/Int pixels :- ints] (doto (blank-image width height t) (.. (getRaster) (setPixels 0 0 width height pixels)))) (defmacro pixel-index [x y step stride] `(* ~step (+ ~x (* ~y ~stride)))) (s/defn extrude-borders :- Image "Return a new pixel array, larger than the original by `extrusion` with the orig-pixels centered in it. The source pixels on the edges will bleed into the surrounding empty space. The pixels in the border region will be identical to the nearest pixel of the source image." [extrusion :- g/Int src :- Image] (if-not (< 0 extrusion) src (let [src-img (types/contents src) src-img (image-convert-type src-img BufferedImage/TYPE_4BYTE_ABGR) orig-width (.width src) orig-height (.height src) new-width (+ orig-width (* 2 extrusion)) new-height (+ orig-height (* 2 extrusion)) src-pixels (image-pixels src-img) num-components (image-color-components src-img) clampx (clamper 0 (dec orig-width)) clampy (clamper 0 (dec orig-height)) new-pixels (int-array (* new-width new-height num-components))] (doseq [y (range new-height) x (range new-width)] (let [sx (clampx (- x extrusion)) sy (clampy (- y extrusion)) src-idx (pixel-index sx sy num-components orig-width) tgt-idx (pixel-index x y num-components new-width)] (doseq [i (range num-components)] (aset-int new-pixels (+ i tgt-idx) (aget src-pixels (+ i src-idx)))))) (make-image (.path src) (image-from-pixels new-width new-height (.getType src-img) new-pixels))))) (defn- map-by [p coll] (zipmap (map p coll) coll)) (s/defn composite :- BufferedImage [onto :- BufferedImage placements :- [Rect] sources :- [Image]] (let [src-by-path (map-by :path sources)] (with-graphics [graphics (.getGraphics onto)] (doseq [^Rect rect placements] (.drawImage graphics (:contents (get src-by-path (.path rect))) (int (.x rect)) (int (.y rect)) nil))) onto))
[ { "context": " :english \"Hello World\"\n :spanish \"Hola Mundo\"\n :german \"Hallo Welt\"\n :fr", "end": 432, "score": 0.9996756315231323, "start": 422, "tag": "NAME", "value": "Hola Mundo" }, { "context": " :spanish \"Hola Mundo\"\n :german \"Hallo Welt\"\n :french \"Bonjour tout le monde\")))", "end": 466, "score": 0.9997084736824036, "start": 456, "tag": "NAME", "value": "Hallo Welt" } ]
examples/hello_world/test/hello_world/core_test.cljs
BirdseyeSoftware/buster-cljs
1
(ns hello-world.core-test (:require [hello-world.core :as hello]) (:require-macros [buster-cljs.macros :refer [initialize-buster deftest describe it is are]])) ;; NOTE: This call is important if you run your test-suite against node (initialize-buster) (deftest test-say-hello (it "say-hello works correctly" (are [in out] (= (hello/say-hello in) out) :english "Hello World" :spanish "Hola Mundo" :german "Hallo Welt" :french "Bonjour tout le monde")))
62877
(ns hello-world.core-test (:require [hello-world.core :as hello]) (:require-macros [buster-cljs.macros :refer [initialize-buster deftest describe it is are]])) ;; NOTE: This call is important if you run your test-suite against node (initialize-buster) (deftest test-say-hello (it "say-hello works correctly" (are [in out] (= (hello/say-hello in) out) :english "Hello World" :spanish "<NAME>" :german "<NAME>" :french "Bonjour tout le monde")))
true
(ns hello-world.core-test (:require [hello-world.core :as hello]) (:require-macros [buster-cljs.macros :refer [initialize-buster deftest describe it is are]])) ;; NOTE: This call is important if you run your test-suite against node (initialize-buster) (deftest test-say-hello (it "say-hello works correctly" (are [in out] (= (hello/say-hello in) out) :english "Hello World" :spanish "PI:NAME:<NAME>END_PI" :german "PI:NAME:<NAME>END_PI" :french "Bonjour tout le monde")))
[ { "context": "\n:special-snowflake\n\n; Map (Hash in Ruby)\n{:name \"Greenhouse\" :city \"New York\"}\n\n; Set\n#{\"fred\" \"ethel\"}\n\n; Co", "end": 267, "score": 0.8939305543899536, "start": 257, "tag": "NAME", "value": "Greenhouse" }, { "context": ")\n{:name \"Greenhouse\" :city \"New York\"}\n\n; Set\n#{\"fred\" \"ethel\"}\n\n; Composable (vector of maps)\n[{:name ", "end": 301, "score": 0.9848243594169617, "start": 297, "tag": "NAME", "value": "fred" }, { "context": "e \"Greenhouse\" :city \"New York\"}\n\n; Set\n#{\"fred\" \"ethel\"}\n\n; Composable (vector of maps)\n[{:name \"Greenho", "end": 309, "score": 0.8432551622390747, "start": 304, "tag": "NAME", "value": "ethel" }, { "context": " \"ethel\"}\n\n; Composable (vector of maps)\n[{:name \"Greenhouse\" :city \"New York\"}\n {:name \"Dropbox\" :city \"San F", "end": 362, "score": 0.9339647889137268, "start": 352, "tag": "NAME", "value": "Greenhouse" }, { "context": ")\n[{:name \"Greenhouse\" :city \"New York\"}\n {:name \"Dropbox\" :city \"San Francisco\"}]\n\n\n\n\n\n\n\n\n\n\n;;; EXAMPLES\n\n", "end": 398, "score": 0.9144544005393982, "start": 391, "tag": "NAME", "value": "Dropbox" }, { "context": "# IMMUTABLE DATA STRUCTURES\n\n(def company {:name \"Greenhouse\"})\n\n(assoc company :city \"New York\")\n\n(dissoc com", "end": 1516, "score": 0.9995505213737488, "start": 1506, "tag": "NAME", "value": "Greenhouse" }, { "context": "LOJURE.SPEC\n\n(s/def ::sourcing_strategy_key\n #{ \"CANDIDATE_SEARCH\"\n \"COMPANY_MARKETING\"\n \"AGENCIES\"})\n\n(s/d", "end": 2938, "score": 0.9934375286102295, "start": 2922, "tag": "KEY", "value": "CANDIDATE_SEARCH" }, { "context": "urcing_strategy_key\n #{ \"CANDIDATE_SEARCH\"\n \"COMPANY_MARKETING\"\n \"AGENCIES\"})\n\n(s/def ::strategies (s/coll-o", "end": 2963, "score": 0.9863964915275574, "start": 2946, "tag": "KEY", "value": "COMPANY_MARKETING" }, { "context": "\"CANDIDATE_SEARCH\"\n \"COMPANY_MARKETING\"\n \"AGENCIES\"})\n\n(s/def ::strategies (s/coll-of ::sourcing_str", "end": 2979, "score": 0.9841020703315735, "start": 2971, "tag": "KEY", "value": "AGENCIES" }, { "context": "lah\"])\n (s/explain-data ::sourcing_strategy_key \"blahblah\")\n (s/conform ::strategies [\"CANDIDATE_SEARCH\" \"", "end": 3209, "score": 0.731285035610199, "start": 3201, "tag": "KEY", "value": "blahblah" } ]
data/test/clojure/171fefcd8079ca1d9008ad905283a88c573ec856core.clj
harshp8l/deep-learning-lang-detection
84
(ns clojure-lunch-and-learn.core (:require [rhizome.viz :as viz] [clojure.spec :as s]) (:gen-class)) ;;; DATA ; Vector ["a" "b" "c"] ; List '(1 2 3) ; Keyword (similar to symbols in Ruby) :special-snowflake ; Map (Hash in Ruby) {:name "Greenhouse" :city "New York"} ; Set #{"fred" "ethel"} ; Composable (vector of maps) [{:name "Greenhouse" :city "New York"} {:name "Dropbox" :city "San Francisco"}] ;;; EXAMPLES ; HTTP Headers {:user-agent "Mozilla/5.0 (Macintosh)", :cache-control "max-age=0", :host "localhost:3000", :accept-encoding "gzip, deflate, sdch, br", :connection "keep-alive", :accept-language "en-US,en;q=0.8", :accept "text/html"} ; SQL (if that's your thing) {:select [:a :b :c] :from [:foo] :where [:= :f.a "baz"]} ; HTML (Hiccup) ; <div id="hello" class="content"><p>Hello world!</p></div> [:div {:id "hello" :class "content"} [:p "Hello world!"]] ; Onyx Directed Acyclic Graph (DAG) (def graph {:in [:split-by-spaces] :split-by-spaces [:mixed-case] :mixed-case [:loud :question] :loud [:loud-output] :question [:question-output]}) (comment (viz/view-graph (keys graph) graph :node->descriptor (fn [n] {:label (name n)}))) ;; FUNCTIONS ; Invoking functions (+ 1 1) (str "aa" "bb" "cc" "dd") (+ (* 2 2) (* 3 2)) (defn welcome [name] (str "Hi " name "!")) (comment (welcome "everyone!")) ;; # IMMUTABLE DATA STRUCTURES (def company {:name "Greenhouse"}) (assoc company :city "New York") (dissoc company :name) ;; How does this work? ;; - Structural sharing ;; - implemented efficiently ;; Why would you want to do this? ;; - knowing that your data structure ;; is not going to change means ;; it is shareable ;; - eliminates an entire class of bugs ;; Other examples of immutability in the wild ;; - Git ;; - Logging ;; SO HOW DO YOU MANAGE CHANGE? (def greenhouse-initial-state {:employees 200}) (def greenhouse (atom greenhouse-initial-state)) (comment (swap! greenhouse update :employees inc)) ;; ~90% your code ends up being pure ;; functions that don't have side-effects. ;; And then there's a small part of your code ;; that deals with changing the world and ;; it is isolated ;; MACROS (defmacro unless [pred then else] `(if (not ~pred) ~then ~else)) ;; Macros are expanded at compile time (comment (unless false "Return me" "Not me") (macroexpand '(unless false "Return me" "Not me"))) ;; One of the benefits of LISP. ;; Because of this property, new ;; language features can be added ;; horizontally through libraries. ; ;; Core features of the language are ;; implemented this way ;; - core.async (golang-like async processing) ;; - core.match (pattern-matching) ;; - clojure.spec (data specification) ;; BONUS! ;; ;; CLOJURE.SPEC (s/def ::sourcing_strategy_key #{ "CANDIDATE_SEARCH" "COMPANY_MARKETING" "AGENCIES"}) (s/def ::strategies (s/coll-of ::sourcing_strategy_key)) (comment (s/valid? ::sourcing_strategy_key "blahblah") (s/explain ::strategies ["OTHER" "AGENCIES" "blahblah"]) (s/explain-data ::sourcing_strategy_key "blahblah") (s/conform ::strategies ["CANDIDATE_SEARCH" "AGENCIES"]))
96373
(ns clojure-lunch-and-learn.core (:require [rhizome.viz :as viz] [clojure.spec :as s]) (:gen-class)) ;;; DATA ; Vector ["a" "b" "c"] ; List '(1 2 3) ; Keyword (similar to symbols in Ruby) :special-snowflake ; Map (Hash in Ruby) {:name "<NAME>" :city "New York"} ; Set #{"<NAME>" "<NAME>"} ; Composable (vector of maps) [{:name "<NAME>" :city "New York"} {:name "<NAME>" :city "San Francisco"}] ;;; EXAMPLES ; HTTP Headers {:user-agent "Mozilla/5.0 (Macintosh)", :cache-control "max-age=0", :host "localhost:3000", :accept-encoding "gzip, deflate, sdch, br", :connection "keep-alive", :accept-language "en-US,en;q=0.8", :accept "text/html"} ; SQL (if that's your thing) {:select [:a :b :c] :from [:foo] :where [:= :f.a "baz"]} ; HTML (Hiccup) ; <div id="hello" class="content"><p>Hello world!</p></div> [:div {:id "hello" :class "content"} [:p "Hello world!"]] ; Onyx Directed Acyclic Graph (DAG) (def graph {:in [:split-by-spaces] :split-by-spaces [:mixed-case] :mixed-case [:loud :question] :loud [:loud-output] :question [:question-output]}) (comment (viz/view-graph (keys graph) graph :node->descriptor (fn [n] {:label (name n)}))) ;; FUNCTIONS ; Invoking functions (+ 1 1) (str "aa" "bb" "cc" "dd") (+ (* 2 2) (* 3 2)) (defn welcome [name] (str "Hi " name "!")) (comment (welcome "everyone!")) ;; # IMMUTABLE DATA STRUCTURES (def company {:name "<NAME>"}) (assoc company :city "New York") (dissoc company :name) ;; How does this work? ;; - Structural sharing ;; - implemented efficiently ;; Why would you want to do this? ;; - knowing that your data structure ;; is not going to change means ;; it is shareable ;; - eliminates an entire class of bugs ;; Other examples of immutability in the wild ;; - Git ;; - Logging ;; SO HOW DO YOU MANAGE CHANGE? (def greenhouse-initial-state {:employees 200}) (def greenhouse (atom greenhouse-initial-state)) (comment (swap! greenhouse update :employees inc)) ;; ~90% your code ends up being pure ;; functions that don't have side-effects. ;; And then there's a small part of your code ;; that deals with changing the world and ;; it is isolated ;; MACROS (defmacro unless [pred then else] `(if (not ~pred) ~then ~else)) ;; Macros are expanded at compile time (comment (unless false "Return me" "Not me") (macroexpand '(unless false "Return me" "Not me"))) ;; One of the benefits of LISP. ;; Because of this property, new ;; language features can be added ;; horizontally through libraries. ; ;; Core features of the language are ;; implemented this way ;; - core.async (golang-like async processing) ;; - core.match (pattern-matching) ;; - clojure.spec (data specification) ;; BONUS! ;; ;; CLOJURE.SPEC (s/def ::sourcing_strategy_key #{ "<KEY>" "<KEY>" "<KEY>"}) (s/def ::strategies (s/coll-of ::sourcing_strategy_key)) (comment (s/valid? ::sourcing_strategy_key "blahblah") (s/explain ::strategies ["OTHER" "AGENCIES" "blahblah"]) (s/explain-data ::sourcing_strategy_key "<KEY>") (s/conform ::strategies ["CANDIDATE_SEARCH" "AGENCIES"]))
true
(ns clojure-lunch-and-learn.core (:require [rhizome.viz :as viz] [clojure.spec :as s]) (:gen-class)) ;;; DATA ; Vector ["a" "b" "c"] ; List '(1 2 3) ; Keyword (similar to symbols in Ruby) :special-snowflake ; Map (Hash in Ruby) {:name "PI:NAME:<NAME>END_PI" :city "New York"} ; Set #{"PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI"} ; Composable (vector of maps) [{:name "PI:NAME:<NAME>END_PI" :city "New York"} {:name "PI:NAME:<NAME>END_PI" :city "San Francisco"}] ;;; EXAMPLES ; HTTP Headers {:user-agent "Mozilla/5.0 (Macintosh)", :cache-control "max-age=0", :host "localhost:3000", :accept-encoding "gzip, deflate, sdch, br", :connection "keep-alive", :accept-language "en-US,en;q=0.8", :accept "text/html"} ; SQL (if that's your thing) {:select [:a :b :c] :from [:foo] :where [:= :f.a "baz"]} ; HTML (Hiccup) ; <div id="hello" class="content"><p>Hello world!</p></div> [:div {:id "hello" :class "content"} [:p "Hello world!"]] ; Onyx Directed Acyclic Graph (DAG) (def graph {:in [:split-by-spaces] :split-by-spaces [:mixed-case] :mixed-case [:loud :question] :loud [:loud-output] :question [:question-output]}) (comment (viz/view-graph (keys graph) graph :node->descriptor (fn [n] {:label (name n)}))) ;; FUNCTIONS ; Invoking functions (+ 1 1) (str "aa" "bb" "cc" "dd") (+ (* 2 2) (* 3 2)) (defn welcome [name] (str "Hi " name "!")) (comment (welcome "everyone!")) ;; # IMMUTABLE DATA STRUCTURES (def company {:name "PI:NAME:<NAME>END_PI"}) (assoc company :city "New York") (dissoc company :name) ;; How does this work? ;; - Structural sharing ;; - implemented efficiently ;; Why would you want to do this? ;; - knowing that your data structure ;; is not going to change means ;; it is shareable ;; - eliminates an entire class of bugs ;; Other examples of immutability in the wild ;; - Git ;; - Logging ;; SO HOW DO YOU MANAGE CHANGE? (def greenhouse-initial-state {:employees 200}) (def greenhouse (atom greenhouse-initial-state)) (comment (swap! greenhouse update :employees inc)) ;; ~90% your code ends up being pure ;; functions that don't have side-effects. ;; And then there's a small part of your code ;; that deals with changing the world and ;; it is isolated ;; MACROS (defmacro unless [pred then else] `(if (not ~pred) ~then ~else)) ;; Macros are expanded at compile time (comment (unless false "Return me" "Not me") (macroexpand '(unless false "Return me" "Not me"))) ;; One of the benefits of LISP. ;; Because of this property, new ;; language features can be added ;; horizontally through libraries. ; ;; Core features of the language are ;; implemented this way ;; - core.async (golang-like async processing) ;; - core.match (pattern-matching) ;; - clojure.spec (data specification) ;; BONUS! ;; ;; CLOJURE.SPEC (s/def ::sourcing_strategy_key #{ "PI:KEY:<KEY>END_PI" "PI:KEY:<KEY>END_PI" "PI:KEY:<KEY>END_PI"}) (s/def ::strategies (s/coll-of ::sourcing_strategy_key)) (comment (s/valid? ::sourcing_strategy_key "blahblah") (s/explain ::strategies ["OTHER" "AGENCIES" "blahblah"]) (s/explain-data ::sourcing_strategy_key "PI:KEY:<KEY>END_PI") (s/conform ::strategies ["CANDIDATE_SEARCH" "AGENCIES"]))
[ { "context": "ased\n lookup. See [the wiki](https://github.com/marick/suchwow/wiki/such.relational)\n for more about t", "end": 298, "score": 0.9994475841522217, "start": 292, "tag": "USERNAME", "value": "marick" }, { "context": "nsider this `xrel`:\n\n (def xrel [ {:first \\\"Brian\\\" :order 1 :count 4}\n {:first \\", "end": 1181, "score": 0.9961248636245728, "start": 1176, "tag": "NAME", "value": "Brian" }, { "context": "\" :order 1 :count 6}\n {:first \\\"Paul\\\" :order 1 :count 5}\n {:first \\", "end": 1291, "score": 0.8399131894111633, "start": 1287, "tag": "NAME", "value": "Paul" }, { "context": "\" :order 1 :count 5}\n {:first \\\"Sophie\\\" :order 2 :count 9} ])\n\n Then `(index xrel [:", "end": 1348, "score": 0.9839024543762207, "start": 1342, "tag": "NAME", "value": "Sophie" }, { "context": "r])` is:\n\n {{:order 1}\n #{{:first \\\"Paul\\\", :order 1, :count 5}\n {:first \\\"Dawn\\", "end": 1456, "score": 0.9140253067016602, "start": 1452, "tag": "NAME", "value": "Paul" }, { "context": "\"Dawn\\\", :order 1, :count 6}\n {:first \\\"Brian\\\", :order 1, :count 4}},\n {:order 2}\n ", "end": 1555, "score": 0.9918792247772217, "start": 1550, "tag": "NAME", "value": "Brian" }, { "context": "t 4}},\n {:order 2}\n #{{:first \\\"Sophie\\\", :order 2, :count 9}}}\n\n ... and `(index xre", "end": 1630, "score": 0.9278425574302673, "start": 1624, "tag": "NAME", "value": "Sophie" }, { "context": " is:\n\n {{:order 1, :count 4} #{ {:first \\\"Brian\\\", :order 1, :count 4} },\n {:order 1, :cou", "end": 1754, "score": 0.9947082996368408, "start": 1749, "tag": "NAME", "value": "Brian" }, { "context": "4} },\n {:order 1, :count 6} #{ {:first \\\"Dawn\\\", :order 1, :count 6} },\n {:order 1, :cou", "end": 1829, "score": 0.9995220899581909, "start": 1825, "tag": "NAME", "value": "Dawn" }, { "context": "6} },\n {:order 1, :count 5} #{ {:first \\\"Paul\\\", :order 1, :count 5} },\n {:order 2, :cou", "end": 1904, "score": 0.9996465444564819, "start": 1900, "tag": "NAME", "value": "Paul" }, { "context": "5} },\n {:order 2, :count 9} #{ {:first \\\"Sophie\\\", :order 2, :count 9} }}\n\n If one of the `xrel", "end": 1981, "score": 0.9996599555015564, "start": 1975, "tag": "NAME", "value": "Sophie" }, { "context": "in an index.\n\n See [the wiki](https://github.com/marick/suchwow/wiki/such.relational) for examples.\n\n `k", "end": 9762, "score": 0.9988987445831299, "start": 9756, "tag": "USERNAME", "value": "marick" }, { "context": "-on]] index.\n\n See [the wiki](https://github.com/marick/suchwow/wiki/such.relational) for examples.\n\n Th", "end": 11619, "score": 0.9944250583648682, "start": 11613, "tag": "USERNAME", "value": "marick" }, { "context": "dp 1690}])\n\n (See [the wiki](https://github.com/marick/suchwow/wiki/such.relational) for details.)\n \"\n ", "end": 14774, "score": 0.9993616342544556, "start": 14768, "tag": "USERNAME", "value": "marick" } ]
src/such/relational.clj
marick/suchwow
70
(ns such.relational "This namespace provides two things: better documentation for relational functions in `clojure.set`, and an *experimental* set of functions for \"pre-joining\" relational tables for a more tree-structured or path-based lookup. See [the wiki](https://github.com/marick/suchwow/wiki/such.relational) for more about the latter. The API for the experimental functions may change without triggering a [semver](http://semver.org/) major number change." (:require [clojure.set :as set] [such.better-doc :as doc] [such.maps :as map] [such.imperfection :refer :all] [such.shorthand :refer :all] [such.wrongness :refer [boom!]] [such.metadata :as meta]) (:refer-clojure :exclude [any?])) (doc/update-and-make-local-copy! #'clojure.set/index "`xrel` is a collection of maps; consider it the result of an SQL SELECT. `ks` is a collection of values assumed to be keys of the maps (think table columns). The result maps from particular key-value pairs to a set of all the maps in `xrel` that contain them. Consider this `xrel`: (def xrel [ {:first \"Brian\" :order 1 :count 4} {:first \"Dawn\" :order 1 :count 6} {:first \"Paul\" :order 1 :count 5} {:first \"Sophie\" :order 2 :count 9} ]) Then `(index xrel [:order])` is: {{:order 1} #{{:first \"Paul\", :order 1, :count 5} {:first \"Dawn\", :order 1, :count 6} {:first \"Brian\", :order 1, :count 4}}, {:order 2} #{{:first \"Sophie\", :order 2, :count 9}}} ... and `(index xrel [:order :count])` is: {{:order 1, :count 4} #{ {:first \"Brian\", :order 1, :count 4} }, {:order 1, :count 6} #{ {:first \"Dawn\", :order 1, :count 6} }, {:order 1, :count 5} #{ {:first \"Paul\", :order 1, :count 5} }, {:order 2, :count 9} #{ {:first \"Sophie\", :order 2, :count 9} }} If one of the `xrel` maps doesn't have an key, it is assigned to an index without that key. Consider this `xrel`: (def xrel [ {:a 1, :b 1} {:a 1} {:b 1} {:c 1}]) Then `(index xrel [:a b])` is: { {:a 1, :b 1} #{ {:a 1 :b 1} } {:a 1 } #{ {:a 1} } { :b 1} #{ {:b 1} } { } #{ {:c 1} }}) ") (doc/update-and-make-local-copy! #'clojure.set/join "`xrel` and `yrel` are collections of maps (think SQL SELECT). In the first form, produces the [natural join](https://en.wikipedia.org/wiki/Join_%28SQL%29#Natural_join). That is, it joins on the shared keys. In the following, `:b` is shared: (def has-a-and-b [{:a 1, :b 2} {:a 2, :b 1} {:a 2, :b 2}]) (def has-b-and-c [{:b 1, :c 2} {:b 2, :c 1} {:b 2, :c 2}]) (join has-a-and-b has-b-and-c) => #{{:a 1, :b 2, :c 1} {:a 1, :b 2, :c 2} {:a 2, :b 1, :c 2} {:a 2, :b 2, :c 1} {:a 2, :b 2, :c 2}}} Alternately, you can use a map to describe which left-hand-side keys should be considered the same as which right-hand-side keys. In the above case, the sharing could be made explicit with `(join has-a-and-b has-b-and-c {:b :b})`. A more likely example is one where the two relations have slightly different \"b\" keys, like this: (def has-a-and-b [{:a 1, :b 2} {:a 2, :b 1} {:a 2, :b 2}]) (def has-b-and-c [{:blike 1, :c 2} {:blike 2, :c 1} {:blike 2, :c 2}]) In such a case, the join would look like this: (join has-a-and-b has-b-and-c {:b :blike}) => #{{:a 1, :b 2, :blike 2, :c 1} {:a 1, :b 2, :blike 2, :c 2} {:a 2, :b 1, :blike 1, :c 2} {:a 2, :b 2, :blike 2, :c 1} {:a 2, :b 2, :blike 2, :c 2}} Notice that the `:b` and `:blike` keys are both included. The join when there are no keys shared is the cross-product of the relations. (clojure.set/join [{:a 1} {:a 2}] [{:b 1} {:b 2}]) => #{{:a 1, :b 2} {:a 2, :b 1} {:a 1, :b 1} {:a 2, :b 2}} The behavior when maps are missing keys is probably not something you should depend on. ") (doc/update-and-make-local-copy! #'clojure.set/project "`xrel` is a collection of maps (think SQL `SELECT *`). This function produces a set of maps, each of which contains only the keys in `ks`. (project [{:a 1, :b 1} {:a 2, :b 2}] [:b]) => #{{:b 1} {:b 2}} `project` differs from `(map #(select-keys % ks) ...)` in two ways: 1. It returns a set, rather than a lazy sequence. 2. Any metadata on the original `xrel` is preserved. (It shares this behavior with [[rename]] but with no other relational functions.) ") (doc/update-and-make-local-copy! #'clojure.set/rename "`xrel` is a collection of maps. Transform each map according to the keys and values in `kmap`. Each map key that matches a `kmap` key is replaced with that `kmap` key's value. (rename [{:a 1, :b 2}] {:b :replacement}) => #{{:a 1, :replacement 2}} `rename` differs from `(map #(set/rename-keys % kmap) ...)` in two ways: 1. It returns a set, rather than a lazy sequence. 2. Any metadata on the original `xrel` is preserved. (It shares this behavior with [[project]] but with no other relational functions.) ") ;;; Extensions (defn- force-sequential [v] (if (sequential? v) v (vector v))) (defn- mkfn:key-for-index "Given [:x :y], produce a function that takes [1 2] and returns {:x 1 :y 2}" [map-keys] (fn [map-values] (apply hash-map (interleave map-keys map-values)))) (defn- multi-get [kvs keyseq] "(multi-get {:x 1, :y 2, :z 3} [:x :z]) => [1 3]" (vals (select-keys kvs (force-sequential keyseq)))) (defn- prefix-all-keys [kvs prefix] (letfn [(prefixer [k] (-> (str (name prefix) (name k)) (cond-> (keyword? k) keyword)))] (let [translation (apply hash-map (interleave (keys kvs) (map prefixer (keys kvs))))] (set/rename-keys kvs translation)))) (defn- option-controlled-merge [old new options] (if-let [destination (:into options)] (let [current (or (get old destination) []) extended (into current new)] (assoc old destination extended)) (merge old new))) ;; Use of indexes is controlled by metadata (defn- one-to-one-index? [index] (= :one-to-one (meta/get index ::type))) (defn- one-to-many-index? [index] (= :one-to-many (meta/get index ::type))) (defn- index-keyseq [index] (meta/get index ::keyseq)) (defn- with-one-to-one-metadata [index keyseq] (meta/assoc index ::type :one-to-one ::keyseq keyseq ; the keys this is an index on (a singleton like [:id] ;; convert a singleton sequence (a value like `[5]`) into the format ;; clojure.set/index wants: `{:id 5}` ::key-maker (mkfn:key-for-index keyseq) ::value-handler first ; the result is always a set containing one value ::key-selector select-keys ; how to pick a smaller (projected map) ::prefix-adder prefix-all-keys)) (defn- with-one-to-many-metadata [index keyseq] (meta/assoc index ::type :one-to-many ::keyseq keyseq ::key-maker (mkfn:key-for-index keyseq) ::value-handler identity ; multiple values are returned ::key-selector (fn [value keyseq] (mapv #(select-keys % keyseq) value)) ::prefix-adder (fn [value prefix] (mapv #(prefix-all-keys % prefix) value)))) ;;;; Public (defn one-to-one-index-on "`table` should be a sequence of maps. `keyseq` is either a single value (corresponding to a traditional `:id` or `:pk` entry) or a sequence of values (corresponding to a compound key). The resulting index provides fast access to individual maps. (def index:traditional (one-to-one-index-on table :id)) (index-select 5 :using index:traditional :keys [:key1 :key2]) (def index:compound (one-to-one-index-on table [\"intkey\" \"strkey\"))) (index-select [4 \"dawn\"] :using index:compound) Note that keys need not be Clojure keywords. " [table keyseq] (if (sequential? keyseq) (-> table (index keyseq) (with-one-to-one-metadata keyseq)) (one-to-one-index-on table [keyseq]))) (defn one-to-many-index-on "`table` should be a sequence of maps. `keyseq` is either a single value (corresponding to a traditional `:id` or `:pk` entry) or a sequence of values (corresponding to a compound key). The resulting index provides fast retrieval of vectors of matching maps. (def index:traditional (one-to-many-index-on table :id)) (index-select 5 :using index:traditional :keys [:key-i-want]) ; a vector of maps (def index:compound (one-to-many-index-on table [\"intkey\" \"strkey\"))) (index-select [4 \"dawn\"] :using index:compound) ; a vector of maps Keys may be either Clojure keywords or strings. " [table keyseq] (if (sequential? keyseq) (-> table (index keyseq) (with-one-to-many-metadata keyseq)) (one-to-many-index-on table [keyseq]))) (defn index-select "Produce a map by looking a key up in an index. See [the wiki](https://github.com/marick/suchwow/wiki/such.relational) for examples. `key` is a unique or compound key that's been indexed with [[one-to-one-index-on]] or [[one-to-many-index-on]]. The `options` may be given as N keys and values following `key` (Smalltalk style) or as a single map. They are: :using <index> (required) The index to use. :keys <[keys...]> (optional) Keys you're interested in (default is all of them) :prefix <prefix> (optional) Prepend the given prefix to all the keys in the selected map. The prefix may be either a string or keyword. The resulting key will be of the same type (string or keyword) as the original. The return value depends on the index. If it is `one-to-one`, a map is returned. If it is `one-to-many`, a vector of maps is returned. " ([key options] (assert (contains? options :using) "You must provide an index with `:using`.") (when-let [keys (options :keys)] (assert (vector? keys) ":keys takes a vector as an argument")) (let [index (get options :using) [key-maker value-handler key-selector prefix-adder] (mapv #(meta/get index %) [::key-maker ::value-handler ::key-selector ::prefix-adder]) [desired-keys prefix] (mapv #(get options %) [:keys :prefix])] (-> index (get (key-maker (force-sequential key))) value-handler (cond-> desired-keys (key-selector desired-keys)) (cond-> prefix (prefix-adder prefix))))) ([key k v & rest] ; k and v are to give this different arity than above (index-select key (apply hash-map k v rest)))) (defn extend-map "Add more key/value pairs to `kvs`. They are found by looking up values in a [[one-to-one-index-on]] or [[one-to-many-index-on]] index. See [the wiki](https://github.com/marick/suchwow/wiki/such.relational) for examples. The `options` control what maps are returned and how they're merged into the original `kvs`. They may be given as N keys and values following the `kvs` argument (Smalltalk style) or as a single map. They are: :using <index> (required) The index to use. :via <key> (required) A single foreign key or a sequence of them that is used to look up a map in the <index>. :into <key> (optional, relevant only to a one-to-many map). Since a one-to-many map can't be merged into the `kvs`, it has to be added \"under\" (as the value of) a particular `key`. :keys [key1 key2 key3 ...] (optional) Keys you're interested in (default is all of them) :prefix <prefix> (optional) Prepend the given prefix to all the keys in the selected map. The prefix may be either a string or keyword. The resulting key will be of the same type (string or keyword) as the original. " ([kvs options] (assert (contains? options :via) "You must provide a foreign key with `:via`.") (assert (contains? options :using) "You must provide an index with `:using`.") (when (one-to-many-index? (:using options)) (assert (contains? options :into) "When using a one-to-many index, you must provide `:into`")) (let [foreign-key-value (multi-get kvs (:via options))] (option-controlled-merge kvs (index-select foreign-key-value options) options))) ([kvs k v & rest] ; k and v are to give this different arity than above (extend-map kvs (apply hash-map k v rest)))) ;;;; (defn- select-along-path "This is not really intended for public use. Note: doesn't handle compound keys." [val starting-index & foreign-index-pairs] (loop [many-return-values? false result [{::sentinel-key val}] [foreign-key next-index & remainder :as all] (concat [::sentinel-key starting-index] foreign-index-pairs)] (cond (empty? all) result ; note that even 1-1 indexes return a set result. (one-to-one-index? next-index) (recur many-return-values? (set (map #(index-select (get % foreign-key) :using next-index) result)) remainder) :else (recur true (set (mapcat #(index-select (get % foreign-key) :using next-index) result)) remainder)))) (defn combined-index-on "Create an index that maps directly from values in the starting index to values in the last of the list of indexes, following keys to move from index to index. Example: (let [index:countries-by-person-id (subject/combined-index-on index:rulership-by-person-id :country_code index:country-by-country-code)] (subject/index-select 1 :using index:countries-by-person-id :keys [:gdp]) => [{:gdp 1690}]) (See [the wiki](https://github.com/marick/suchwow/wiki/such.relational) for details.) " {:arglists '([starting-index foreign-key next-index ...])} [starting-index & path-pairs] (let [raw-index (reduce (fn [so-far key-and-value-map] (let [starting-val (multi-get key-and-value-map (index-keyseq starting-index))] (assoc so-far key-and-value-map (apply select-along-path starting-val starting-index path-pairs)))) {} (keys starting-index)) ;; Bit of sliminess here in that we're checking the metadata on non-indexes metadata-adder (if (any? one-to-many-index? (cons starting-index path-pairs)) with-one-to-many-metadata with-one-to-one-metadata)] (metadata-adder raw-index (index-keyseq starting-index))))
29740
(ns such.relational "This namespace provides two things: better documentation for relational functions in `clojure.set`, and an *experimental* set of functions for \"pre-joining\" relational tables for a more tree-structured or path-based lookup. See [the wiki](https://github.com/marick/suchwow/wiki/such.relational) for more about the latter. The API for the experimental functions may change without triggering a [semver](http://semver.org/) major number change." (:require [clojure.set :as set] [such.better-doc :as doc] [such.maps :as map] [such.imperfection :refer :all] [such.shorthand :refer :all] [such.wrongness :refer [boom!]] [such.metadata :as meta]) (:refer-clojure :exclude [any?])) (doc/update-and-make-local-copy! #'clojure.set/index "`xrel` is a collection of maps; consider it the result of an SQL SELECT. `ks` is a collection of values assumed to be keys of the maps (think table columns). The result maps from particular key-value pairs to a set of all the maps in `xrel` that contain them. Consider this `xrel`: (def xrel [ {:first \"<NAME>\" :order 1 :count 4} {:first \"Dawn\" :order 1 :count 6} {:first \"<NAME>\" :order 1 :count 5} {:first \"<NAME>\" :order 2 :count 9} ]) Then `(index xrel [:order])` is: {{:order 1} #{{:first \"<NAME>\", :order 1, :count 5} {:first \"Dawn\", :order 1, :count 6} {:first \"<NAME>\", :order 1, :count 4}}, {:order 2} #{{:first \"<NAME>\", :order 2, :count 9}}} ... and `(index xrel [:order :count])` is: {{:order 1, :count 4} #{ {:first \"<NAME>\", :order 1, :count 4} }, {:order 1, :count 6} #{ {:first \"<NAME>\", :order 1, :count 6} }, {:order 1, :count 5} #{ {:first \"<NAME>\", :order 1, :count 5} }, {:order 2, :count 9} #{ {:first \"<NAME>\", :order 2, :count 9} }} If one of the `xrel` maps doesn't have an key, it is assigned to an index without that key. Consider this `xrel`: (def xrel [ {:a 1, :b 1} {:a 1} {:b 1} {:c 1}]) Then `(index xrel [:a b])` is: { {:a 1, :b 1} #{ {:a 1 :b 1} } {:a 1 } #{ {:a 1} } { :b 1} #{ {:b 1} } { } #{ {:c 1} }}) ") (doc/update-and-make-local-copy! #'clojure.set/join "`xrel` and `yrel` are collections of maps (think SQL SELECT). In the first form, produces the [natural join](https://en.wikipedia.org/wiki/Join_%28SQL%29#Natural_join). That is, it joins on the shared keys. In the following, `:b` is shared: (def has-a-and-b [{:a 1, :b 2} {:a 2, :b 1} {:a 2, :b 2}]) (def has-b-and-c [{:b 1, :c 2} {:b 2, :c 1} {:b 2, :c 2}]) (join has-a-and-b has-b-and-c) => #{{:a 1, :b 2, :c 1} {:a 1, :b 2, :c 2} {:a 2, :b 1, :c 2} {:a 2, :b 2, :c 1} {:a 2, :b 2, :c 2}}} Alternately, you can use a map to describe which left-hand-side keys should be considered the same as which right-hand-side keys. In the above case, the sharing could be made explicit with `(join has-a-and-b has-b-and-c {:b :b})`. A more likely example is one where the two relations have slightly different \"b\" keys, like this: (def has-a-and-b [{:a 1, :b 2} {:a 2, :b 1} {:a 2, :b 2}]) (def has-b-and-c [{:blike 1, :c 2} {:blike 2, :c 1} {:blike 2, :c 2}]) In such a case, the join would look like this: (join has-a-and-b has-b-and-c {:b :blike}) => #{{:a 1, :b 2, :blike 2, :c 1} {:a 1, :b 2, :blike 2, :c 2} {:a 2, :b 1, :blike 1, :c 2} {:a 2, :b 2, :blike 2, :c 1} {:a 2, :b 2, :blike 2, :c 2}} Notice that the `:b` and `:blike` keys are both included. The join when there are no keys shared is the cross-product of the relations. (clojure.set/join [{:a 1} {:a 2}] [{:b 1} {:b 2}]) => #{{:a 1, :b 2} {:a 2, :b 1} {:a 1, :b 1} {:a 2, :b 2}} The behavior when maps are missing keys is probably not something you should depend on. ") (doc/update-and-make-local-copy! #'clojure.set/project "`xrel` is a collection of maps (think SQL `SELECT *`). This function produces a set of maps, each of which contains only the keys in `ks`. (project [{:a 1, :b 1} {:a 2, :b 2}] [:b]) => #{{:b 1} {:b 2}} `project` differs from `(map #(select-keys % ks) ...)` in two ways: 1. It returns a set, rather than a lazy sequence. 2. Any metadata on the original `xrel` is preserved. (It shares this behavior with [[rename]] but with no other relational functions.) ") (doc/update-and-make-local-copy! #'clojure.set/rename "`xrel` is a collection of maps. Transform each map according to the keys and values in `kmap`. Each map key that matches a `kmap` key is replaced with that `kmap` key's value. (rename [{:a 1, :b 2}] {:b :replacement}) => #{{:a 1, :replacement 2}} `rename` differs from `(map #(set/rename-keys % kmap) ...)` in two ways: 1. It returns a set, rather than a lazy sequence. 2. Any metadata on the original `xrel` is preserved. (It shares this behavior with [[project]] but with no other relational functions.) ") ;;; Extensions (defn- force-sequential [v] (if (sequential? v) v (vector v))) (defn- mkfn:key-for-index "Given [:x :y], produce a function that takes [1 2] and returns {:x 1 :y 2}" [map-keys] (fn [map-values] (apply hash-map (interleave map-keys map-values)))) (defn- multi-get [kvs keyseq] "(multi-get {:x 1, :y 2, :z 3} [:x :z]) => [1 3]" (vals (select-keys kvs (force-sequential keyseq)))) (defn- prefix-all-keys [kvs prefix] (letfn [(prefixer [k] (-> (str (name prefix) (name k)) (cond-> (keyword? k) keyword)))] (let [translation (apply hash-map (interleave (keys kvs) (map prefixer (keys kvs))))] (set/rename-keys kvs translation)))) (defn- option-controlled-merge [old new options] (if-let [destination (:into options)] (let [current (or (get old destination) []) extended (into current new)] (assoc old destination extended)) (merge old new))) ;; Use of indexes is controlled by metadata (defn- one-to-one-index? [index] (= :one-to-one (meta/get index ::type))) (defn- one-to-many-index? [index] (= :one-to-many (meta/get index ::type))) (defn- index-keyseq [index] (meta/get index ::keyseq)) (defn- with-one-to-one-metadata [index keyseq] (meta/assoc index ::type :one-to-one ::keyseq keyseq ; the keys this is an index on (a singleton like [:id] ;; convert a singleton sequence (a value like `[5]`) into the format ;; clojure.set/index wants: `{:id 5}` ::key-maker (mkfn:key-for-index keyseq) ::value-handler first ; the result is always a set containing one value ::key-selector select-keys ; how to pick a smaller (projected map) ::prefix-adder prefix-all-keys)) (defn- with-one-to-many-metadata [index keyseq] (meta/assoc index ::type :one-to-many ::keyseq keyseq ::key-maker (mkfn:key-for-index keyseq) ::value-handler identity ; multiple values are returned ::key-selector (fn [value keyseq] (mapv #(select-keys % keyseq) value)) ::prefix-adder (fn [value prefix] (mapv #(prefix-all-keys % prefix) value)))) ;;;; Public (defn one-to-one-index-on "`table` should be a sequence of maps. `keyseq` is either a single value (corresponding to a traditional `:id` or `:pk` entry) or a sequence of values (corresponding to a compound key). The resulting index provides fast access to individual maps. (def index:traditional (one-to-one-index-on table :id)) (index-select 5 :using index:traditional :keys [:key1 :key2]) (def index:compound (one-to-one-index-on table [\"intkey\" \"strkey\"))) (index-select [4 \"dawn\"] :using index:compound) Note that keys need not be Clojure keywords. " [table keyseq] (if (sequential? keyseq) (-> table (index keyseq) (with-one-to-one-metadata keyseq)) (one-to-one-index-on table [keyseq]))) (defn one-to-many-index-on "`table` should be a sequence of maps. `keyseq` is either a single value (corresponding to a traditional `:id` or `:pk` entry) or a sequence of values (corresponding to a compound key). The resulting index provides fast retrieval of vectors of matching maps. (def index:traditional (one-to-many-index-on table :id)) (index-select 5 :using index:traditional :keys [:key-i-want]) ; a vector of maps (def index:compound (one-to-many-index-on table [\"intkey\" \"strkey\"))) (index-select [4 \"dawn\"] :using index:compound) ; a vector of maps Keys may be either Clojure keywords or strings. " [table keyseq] (if (sequential? keyseq) (-> table (index keyseq) (with-one-to-many-metadata keyseq)) (one-to-many-index-on table [keyseq]))) (defn index-select "Produce a map by looking a key up in an index. See [the wiki](https://github.com/marick/suchwow/wiki/such.relational) for examples. `key` is a unique or compound key that's been indexed with [[one-to-one-index-on]] or [[one-to-many-index-on]]. The `options` may be given as N keys and values following `key` (Smalltalk style) or as a single map. They are: :using <index> (required) The index to use. :keys <[keys...]> (optional) Keys you're interested in (default is all of them) :prefix <prefix> (optional) Prepend the given prefix to all the keys in the selected map. The prefix may be either a string or keyword. The resulting key will be of the same type (string or keyword) as the original. The return value depends on the index. If it is `one-to-one`, a map is returned. If it is `one-to-many`, a vector of maps is returned. " ([key options] (assert (contains? options :using) "You must provide an index with `:using`.") (when-let [keys (options :keys)] (assert (vector? keys) ":keys takes a vector as an argument")) (let [index (get options :using) [key-maker value-handler key-selector prefix-adder] (mapv #(meta/get index %) [::key-maker ::value-handler ::key-selector ::prefix-adder]) [desired-keys prefix] (mapv #(get options %) [:keys :prefix])] (-> index (get (key-maker (force-sequential key))) value-handler (cond-> desired-keys (key-selector desired-keys)) (cond-> prefix (prefix-adder prefix))))) ([key k v & rest] ; k and v are to give this different arity than above (index-select key (apply hash-map k v rest)))) (defn extend-map "Add more key/value pairs to `kvs`. They are found by looking up values in a [[one-to-one-index-on]] or [[one-to-many-index-on]] index. See [the wiki](https://github.com/marick/suchwow/wiki/such.relational) for examples. The `options` control what maps are returned and how they're merged into the original `kvs`. They may be given as N keys and values following the `kvs` argument (Smalltalk style) or as a single map. They are: :using <index> (required) The index to use. :via <key> (required) A single foreign key or a sequence of them that is used to look up a map in the <index>. :into <key> (optional, relevant only to a one-to-many map). Since a one-to-many map can't be merged into the `kvs`, it has to be added \"under\" (as the value of) a particular `key`. :keys [key1 key2 key3 ...] (optional) Keys you're interested in (default is all of them) :prefix <prefix> (optional) Prepend the given prefix to all the keys in the selected map. The prefix may be either a string or keyword. The resulting key will be of the same type (string or keyword) as the original. " ([kvs options] (assert (contains? options :via) "You must provide a foreign key with `:via`.") (assert (contains? options :using) "You must provide an index with `:using`.") (when (one-to-many-index? (:using options)) (assert (contains? options :into) "When using a one-to-many index, you must provide `:into`")) (let [foreign-key-value (multi-get kvs (:via options))] (option-controlled-merge kvs (index-select foreign-key-value options) options))) ([kvs k v & rest] ; k and v are to give this different arity than above (extend-map kvs (apply hash-map k v rest)))) ;;;; (defn- select-along-path "This is not really intended for public use. Note: doesn't handle compound keys." [val starting-index & foreign-index-pairs] (loop [many-return-values? false result [{::sentinel-key val}] [foreign-key next-index & remainder :as all] (concat [::sentinel-key starting-index] foreign-index-pairs)] (cond (empty? all) result ; note that even 1-1 indexes return a set result. (one-to-one-index? next-index) (recur many-return-values? (set (map #(index-select (get % foreign-key) :using next-index) result)) remainder) :else (recur true (set (mapcat #(index-select (get % foreign-key) :using next-index) result)) remainder)))) (defn combined-index-on "Create an index that maps directly from values in the starting index to values in the last of the list of indexes, following keys to move from index to index. Example: (let [index:countries-by-person-id (subject/combined-index-on index:rulership-by-person-id :country_code index:country-by-country-code)] (subject/index-select 1 :using index:countries-by-person-id :keys [:gdp]) => [{:gdp 1690}]) (See [the wiki](https://github.com/marick/suchwow/wiki/such.relational) for details.) " {:arglists '([starting-index foreign-key next-index ...])} [starting-index & path-pairs] (let [raw-index (reduce (fn [so-far key-and-value-map] (let [starting-val (multi-get key-and-value-map (index-keyseq starting-index))] (assoc so-far key-and-value-map (apply select-along-path starting-val starting-index path-pairs)))) {} (keys starting-index)) ;; Bit of sliminess here in that we're checking the metadata on non-indexes metadata-adder (if (any? one-to-many-index? (cons starting-index path-pairs)) with-one-to-many-metadata with-one-to-one-metadata)] (metadata-adder raw-index (index-keyseq starting-index))))
true
(ns such.relational "This namespace provides two things: better documentation for relational functions in `clojure.set`, and an *experimental* set of functions for \"pre-joining\" relational tables for a more tree-structured or path-based lookup. See [the wiki](https://github.com/marick/suchwow/wiki/such.relational) for more about the latter. The API for the experimental functions may change without triggering a [semver](http://semver.org/) major number change." (:require [clojure.set :as set] [such.better-doc :as doc] [such.maps :as map] [such.imperfection :refer :all] [such.shorthand :refer :all] [such.wrongness :refer [boom!]] [such.metadata :as meta]) (:refer-clojure :exclude [any?])) (doc/update-and-make-local-copy! #'clojure.set/index "`xrel` is a collection of maps; consider it the result of an SQL SELECT. `ks` is a collection of values assumed to be keys of the maps (think table columns). The result maps from particular key-value pairs to a set of all the maps in `xrel` that contain them. Consider this `xrel`: (def xrel [ {:first \"PI:NAME:<NAME>END_PI\" :order 1 :count 4} {:first \"Dawn\" :order 1 :count 6} {:first \"PI:NAME:<NAME>END_PI\" :order 1 :count 5} {:first \"PI:NAME:<NAME>END_PI\" :order 2 :count 9} ]) Then `(index xrel [:order])` is: {{:order 1} #{{:first \"PI:NAME:<NAME>END_PI\", :order 1, :count 5} {:first \"Dawn\", :order 1, :count 6} {:first \"PI:NAME:<NAME>END_PI\", :order 1, :count 4}}, {:order 2} #{{:first \"PI:NAME:<NAME>END_PI\", :order 2, :count 9}}} ... and `(index xrel [:order :count])` is: {{:order 1, :count 4} #{ {:first \"PI:NAME:<NAME>END_PI\", :order 1, :count 4} }, {:order 1, :count 6} #{ {:first \"PI:NAME:<NAME>END_PI\", :order 1, :count 6} }, {:order 1, :count 5} #{ {:first \"PI:NAME:<NAME>END_PI\", :order 1, :count 5} }, {:order 2, :count 9} #{ {:first \"PI:NAME:<NAME>END_PI\", :order 2, :count 9} }} If one of the `xrel` maps doesn't have an key, it is assigned to an index without that key. Consider this `xrel`: (def xrel [ {:a 1, :b 1} {:a 1} {:b 1} {:c 1}]) Then `(index xrel [:a b])` is: { {:a 1, :b 1} #{ {:a 1 :b 1} } {:a 1 } #{ {:a 1} } { :b 1} #{ {:b 1} } { } #{ {:c 1} }}) ") (doc/update-and-make-local-copy! #'clojure.set/join "`xrel` and `yrel` are collections of maps (think SQL SELECT). In the first form, produces the [natural join](https://en.wikipedia.org/wiki/Join_%28SQL%29#Natural_join). That is, it joins on the shared keys. In the following, `:b` is shared: (def has-a-and-b [{:a 1, :b 2} {:a 2, :b 1} {:a 2, :b 2}]) (def has-b-and-c [{:b 1, :c 2} {:b 2, :c 1} {:b 2, :c 2}]) (join has-a-and-b has-b-and-c) => #{{:a 1, :b 2, :c 1} {:a 1, :b 2, :c 2} {:a 2, :b 1, :c 2} {:a 2, :b 2, :c 1} {:a 2, :b 2, :c 2}}} Alternately, you can use a map to describe which left-hand-side keys should be considered the same as which right-hand-side keys. In the above case, the sharing could be made explicit with `(join has-a-and-b has-b-and-c {:b :b})`. A more likely example is one where the two relations have slightly different \"b\" keys, like this: (def has-a-and-b [{:a 1, :b 2} {:a 2, :b 1} {:a 2, :b 2}]) (def has-b-and-c [{:blike 1, :c 2} {:blike 2, :c 1} {:blike 2, :c 2}]) In such a case, the join would look like this: (join has-a-and-b has-b-and-c {:b :blike}) => #{{:a 1, :b 2, :blike 2, :c 1} {:a 1, :b 2, :blike 2, :c 2} {:a 2, :b 1, :blike 1, :c 2} {:a 2, :b 2, :blike 2, :c 1} {:a 2, :b 2, :blike 2, :c 2}} Notice that the `:b` and `:blike` keys are both included. The join when there are no keys shared is the cross-product of the relations. (clojure.set/join [{:a 1} {:a 2}] [{:b 1} {:b 2}]) => #{{:a 1, :b 2} {:a 2, :b 1} {:a 1, :b 1} {:a 2, :b 2}} The behavior when maps are missing keys is probably not something you should depend on. ") (doc/update-and-make-local-copy! #'clojure.set/project "`xrel` is a collection of maps (think SQL `SELECT *`). This function produces a set of maps, each of which contains only the keys in `ks`. (project [{:a 1, :b 1} {:a 2, :b 2}] [:b]) => #{{:b 1} {:b 2}} `project` differs from `(map #(select-keys % ks) ...)` in two ways: 1. It returns a set, rather than a lazy sequence. 2. Any metadata on the original `xrel` is preserved. (It shares this behavior with [[rename]] but with no other relational functions.) ") (doc/update-and-make-local-copy! #'clojure.set/rename "`xrel` is a collection of maps. Transform each map according to the keys and values in `kmap`. Each map key that matches a `kmap` key is replaced with that `kmap` key's value. (rename [{:a 1, :b 2}] {:b :replacement}) => #{{:a 1, :replacement 2}} `rename` differs from `(map #(set/rename-keys % kmap) ...)` in two ways: 1. It returns a set, rather than a lazy sequence. 2. Any metadata on the original `xrel` is preserved. (It shares this behavior with [[project]] but with no other relational functions.) ") ;;; Extensions (defn- force-sequential [v] (if (sequential? v) v (vector v))) (defn- mkfn:key-for-index "Given [:x :y], produce a function that takes [1 2] and returns {:x 1 :y 2}" [map-keys] (fn [map-values] (apply hash-map (interleave map-keys map-values)))) (defn- multi-get [kvs keyseq] "(multi-get {:x 1, :y 2, :z 3} [:x :z]) => [1 3]" (vals (select-keys kvs (force-sequential keyseq)))) (defn- prefix-all-keys [kvs prefix] (letfn [(prefixer [k] (-> (str (name prefix) (name k)) (cond-> (keyword? k) keyword)))] (let [translation (apply hash-map (interleave (keys kvs) (map prefixer (keys kvs))))] (set/rename-keys kvs translation)))) (defn- option-controlled-merge [old new options] (if-let [destination (:into options)] (let [current (or (get old destination) []) extended (into current new)] (assoc old destination extended)) (merge old new))) ;; Use of indexes is controlled by metadata (defn- one-to-one-index? [index] (= :one-to-one (meta/get index ::type))) (defn- one-to-many-index? [index] (= :one-to-many (meta/get index ::type))) (defn- index-keyseq [index] (meta/get index ::keyseq)) (defn- with-one-to-one-metadata [index keyseq] (meta/assoc index ::type :one-to-one ::keyseq keyseq ; the keys this is an index on (a singleton like [:id] ;; convert a singleton sequence (a value like `[5]`) into the format ;; clojure.set/index wants: `{:id 5}` ::key-maker (mkfn:key-for-index keyseq) ::value-handler first ; the result is always a set containing one value ::key-selector select-keys ; how to pick a smaller (projected map) ::prefix-adder prefix-all-keys)) (defn- with-one-to-many-metadata [index keyseq] (meta/assoc index ::type :one-to-many ::keyseq keyseq ::key-maker (mkfn:key-for-index keyseq) ::value-handler identity ; multiple values are returned ::key-selector (fn [value keyseq] (mapv #(select-keys % keyseq) value)) ::prefix-adder (fn [value prefix] (mapv #(prefix-all-keys % prefix) value)))) ;;;; Public (defn one-to-one-index-on "`table` should be a sequence of maps. `keyseq` is either a single value (corresponding to a traditional `:id` or `:pk` entry) or a sequence of values (corresponding to a compound key). The resulting index provides fast access to individual maps. (def index:traditional (one-to-one-index-on table :id)) (index-select 5 :using index:traditional :keys [:key1 :key2]) (def index:compound (one-to-one-index-on table [\"intkey\" \"strkey\"))) (index-select [4 \"dawn\"] :using index:compound) Note that keys need not be Clojure keywords. " [table keyseq] (if (sequential? keyseq) (-> table (index keyseq) (with-one-to-one-metadata keyseq)) (one-to-one-index-on table [keyseq]))) (defn one-to-many-index-on "`table` should be a sequence of maps. `keyseq` is either a single value (corresponding to a traditional `:id` or `:pk` entry) or a sequence of values (corresponding to a compound key). The resulting index provides fast retrieval of vectors of matching maps. (def index:traditional (one-to-many-index-on table :id)) (index-select 5 :using index:traditional :keys [:key-i-want]) ; a vector of maps (def index:compound (one-to-many-index-on table [\"intkey\" \"strkey\"))) (index-select [4 \"dawn\"] :using index:compound) ; a vector of maps Keys may be either Clojure keywords or strings. " [table keyseq] (if (sequential? keyseq) (-> table (index keyseq) (with-one-to-many-metadata keyseq)) (one-to-many-index-on table [keyseq]))) (defn index-select "Produce a map by looking a key up in an index. See [the wiki](https://github.com/marick/suchwow/wiki/such.relational) for examples. `key` is a unique or compound key that's been indexed with [[one-to-one-index-on]] or [[one-to-many-index-on]]. The `options` may be given as N keys and values following `key` (Smalltalk style) or as a single map. They are: :using <index> (required) The index to use. :keys <[keys...]> (optional) Keys you're interested in (default is all of them) :prefix <prefix> (optional) Prepend the given prefix to all the keys in the selected map. The prefix may be either a string or keyword. The resulting key will be of the same type (string or keyword) as the original. The return value depends on the index. If it is `one-to-one`, a map is returned. If it is `one-to-many`, a vector of maps is returned. " ([key options] (assert (contains? options :using) "You must provide an index with `:using`.") (when-let [keys (options :keys)] (assert (vector? keys) ":keys takes a vector as an argument")) (let [index (get options :using) [key-maker value-handler key-selector prefix-adder] (mapv #(meta/get index %) [::key-maker ::value-handler ::key-selector ::prefix-adder]) [desired-keys prefix] (mapv #(get options %) [:keys :prefix])] (-> index (get (key-maker (force-sequential key))) value-handler (cond-> desired-keys (key-selector desired-keys)) (cond-> prefix (prefix-adder prefix))))) ([key k v & rest] ; k and v are to give this different arity than above (index-select key (apply hash-map k v rest)))) (defn extend-map "Add more key/value pairs to `kvs`. They are found by looking up values in a [[one-to-one-index-on]] or [[one-to-many-index-on]] index. See [the wiki](https://github.com/marick/suchwow/wiki/such.relational) for examples. The `options` control what maps are returned and how they're merged into the original `kvs`. They may be given as N keys and values following the `kvs` argument (Smalltalk style) or as a single map. They are: :using <index> (required) The index to use. :via <key> (required) A single foreign key or a sequence of them that is used to look up a map in the <index>. :into <key> (optional, relevant only to a one-to-many map). Since a one-to-many map can't be merged into the `kvs`, it has to be added \"under\" (as the value of) a particular `key`. :keys [key1 key2 key3 ...] (optional) Keys you're interested in (default is all of them) :prefix <prefix> (optional) Prepend the given prefix to all the keys in the selected map. The prefix may be either a string or keyword. The resulting key will be of the same type (string or keyword) as the original. " ([kvs options] (assert (contains? options :via) "You must provide a foreign key with `:via`.") (assert (contains? options :using) "You must provide an index with `:using`.") (when (one-to-many-index? (:using options)) (assert (contains? options :into) "When using a one-to-many index, you must provide `:into`")) (let [foreign-key-value (multi-get kvs (:via options))] (option-controlled-merge kvs (index-select foreign-key-value options) options))) ([kvs k v & rest] ; k and v are to give this different arity than above (extend-map kvs (apply hash-map k v rest)))) ;;;; (defn- select-along-path "This is not really intended for public use. Note: doesn't handle compound keys." [val starting-index & foreign-index-pairs] (loop [many-return-values? false result [{::sentinel-key val}] [foreign-key next-index & remainder :as all] (concat [::sentinel-key starting-index] foreign-index-pairs)] (cond (empty? all) result ; note that even 1-1 indexes return a set result. (one-to-one-index? next-index) (recur many-return-values? (set (map #(index-select (get % foreign-key) :using next-index) result)) remainder) :else (recur true (set (mapcat #(index-select (get % foreign-key) :using next-index) result)) remainder)))) (defn combined-index-on "Create an index that maps directly from values in the starting index to values in the last of the list of indexes, following keys to move from index to index. Example: (let [index:countries-by-person-id (subject/combined-index-on index:rulership-by-person-id :country_code index:country-by-country-code)] (subject/index-select 1 :using index:countries-by-person-id :keys [:gdp]) => [{:gdp 1690}]) (See [the wiki](https://github.com/marick/suchwow/wiki/such.relational) for details.) " {:arglists '([starting-index foreign-key next-index ...])} [starting-index & path-pairs] (let [raw-index (reduce (fn [so-far key-and-value-map] (let [starting-val (multi-get key-and-value-map (index-keyseq starting-index))] (assoc so-far key-and-value-map (apply select-along-path starting-val starting-index path-pairs)))) {} (keys starting-index)) ;; Bit of sliminess here in that we're checking the metadata on non-indexes metadata-adder (if (any? one-to-many-index? (cons starting-index path-pairs)) with-one-to-many-metadata with-one-to-one-metadata)] (metadata-adder raw-index (index-keyseq starting-index))))
[ { "context": " (c/add-data case :customer-info {:name \"Carl-Jan Granqvist\"\n :age ", "end": 8424, "score": 0.999889075756073, "start": 8406, "tag": "NAME", "value": "Carl-Jan Granqvist" }, { "context": "dd-data case :currently-owned-real-estate {:name \"Villa villerkulla\"})\n (do\n (is= (c/next-actions", "end": 8561, "score": 0.999883234500885, "start": 8544, "tag": "NAME", "value": "Villa villerkulla" }, { "context": "dd-data case :currently-owned-real-estate {:name \"Villa villerkulla\"})\n (c/add-data case :loan-details {", "end": 18214, "score": 0.999120831489563, "start": 18197, "tag": "NAME", "value": "Villa villerkulla" }, { "context": "dd-data case :currently-owned-real-estate {:name \"Villa villerkulla\"})\n (c/add-data case :loan-details {", "end": 18930, "score": 0.9998840093612671, "start": 18913, "tag": "NAME", "value": "Villa villerkulla" }, { "context": " :valuator \"Karl Anka\"})\n (is (c/action-allowed? case-grap", "end": 19531, "score": 0.9998801946640015, "start": 19522, "tag": "NAME", "value": "Karl Anka" } ]
test/dativity/core_test.cljc
agentbellnorm/dativity
196
(ns dativity.core-test (:require [ysera.test :refer [is= is is-not deftest testing]] [dativity.core :as c] [dativity.define :as d])) (def case-graph (d/create-model {:actions [:create-case :consent-to-personal-data-retrieval-and-storage :fetch-supplimentary-info :know-your-customer :add-economy :get-currently-owned-real-estate :add-loan-details :add-collateral-valuation :add-collateral :create-collateral-link :calculate-amortization :produce-credit-application-document :sign-credit-application-document] :data [:case-id :customer-id :customer-info :consent :know-your-customer-data :economy :currently-owned-real-estate :loan-details :collateral :collateral-valuation :collateral-link :amortization :credit-application-reference :credit-application-signature] :roles [:applicant :system :officer] :action-produces [[:add-collateral :collateral] [:add-collateral-valuation :collateral-valuation] [:add-economy :economy] [:add-loan-details :loan-details] [:calculate-amortization :amortization] [:consent-to-personal-data-retrieval-and-storage :consent] [:create-case :case-id] [:create-case :customer-id] [:create-collateral-link :collateral-link] [:fetch-supplimentary-info :customer-info] [:get-currently-owned-real-estate :currently-owned-real-estate] [:know-your-customer :know-your-customer-data] [:produce-credit-application-document :credit-application-reference] [:sign-credit-application-document :credit-application-signature]] :action-requires [[:add-collateral :case-id] [:add-collateral-valuation :collateral] [:add-economy :customer-id] [:add-loan-details :case-id] [:calculate-amortization :collateral-link] [:calculate-amortization :economy] [:consent-to-personal-data-retrieval-and-storage :customer-id] [:create-collateral-link :collateral] [:create-collateral-link :currently-owned-real-estate] [:create-collateral-link :loan-details] [:fetch-supplimentary-info :consent] [:get-currently-owned-real-estate :consent] [:know-your-customer :consent] [:produce-credit-application-document :amortization] [:produce-credit-application-document :collateral-link] [:produce-credit-application-document :collateral] [:produce-credit-application-document :currently-owned-real-estate] [:produce-credit-application-document :customer-info] [:produce-credit-application-document :loan-details] [:sign-credit-application-document :credit-application-reference]] :role-performs [[:applicant :add-collateral] [:applicant :add-economy] [:applicant :add-loan-details] [:applicant :consent-to-personal-data-retrieval-and-storage] [:applicant :create-case] [:applicant :create-collateral-link] [:applicant :sign-credit-application-document] [:system :calculate-amortization] [:system :fetch-supplimentary-info] [:system :get-currently-owned-real-estate] [:system :know-your-customer]] :action-requires-conditional [{:action :create-collateral-link :requires :collateral-valuation :condition (fn [loan-details] (> (:amount loan-details) 2000000)) :condition-argument :loan-details}]})) (comment (dativity.visualize/generate-png case-graph)) (deftest actions-it (testing "runs a case through the whole flow and makes sure that only the right actions are available" (as-> {} case (do (is= (c/next-actions case-graph case) #{:create-case}) (is= (c/next-actions case-graph case :applicant) #{:create-case}) (is= (c/next-actions case-graph case :system) #{}) case) (c/add-data case :case-id "100001") (c/add-data case :customer-id "9209041111") (do (is= (c/next-actions case-graph case) #{:add-loan-details :add-collateral :consent-to-personal-data-retrieval-and-storage :add-economy}) case) (c/add-data case :loan-details {:amount 1000000 :product "Bolån"}) (do (is= (c/next-actions case-graph case) #{:add-collateral :consent-to-personal-data-retrieval-and-storage :add-economy}) (is= (c/actions-performed case-graph case) #{:create-case :add-loan-details}) (is-not (c/action-allowed? case-graph case :produce-credit-application-document)) (is (c/action-allowed? case-graph case :add-loan-details)) case) (c/add-data case :collateral {:designation {:municipality "Täby" :region "Pallen" :block "11:45"}}) (do (is= (c/actions-performed case-graph case) #{:create-case :add-loan-details :add-collateral}) (is-not (c/action-allowed? case-graph case :add-collateral-link)) case) (c/add-data case :consent {:uc true :lmv true :pep true}) (do (is= (c/next-actions case-graph case) #{:fetch-supplimentary-info :get-currently-owned-real-estate :add-collateral-valuation :add-economy :know-your-customer}) (is-not (c/action-allowed? case-graph case :create-collateral-link)) case) (c/add-data case :economy {:income 500000 :children 2}) (c/add-data case :customer-info {:name "Carl-Jan Granqvist" :age 63}) (c/add-data case :currently-owned-real-estate {:name "Villa villerkulla"}) (do (is= (c/next-actions case-graph case) #{:create-collateral-link :know-your-customer :add-collateral-valuation}) case)))) (deftest invalidate-action-it (testing "Given a case that has a few actions performed, when an action is invalidated, then the case should be 'rewinded' to that action that was invalidated. No data should be removed." (as-> {} case (do (is= (c/next-actions case-graph case) #{:create-case}) (is= (c/next-actions case-graph case :applicant) #{:create-case}) (is= (c/next-actions case-graph case :system) #{}) case) (c/add-data case :case-id "100001") (c/add-data case :customer-id "9209041111") (do (is= (c/next-actions case-graph case) #{:add-loan-details :add-collateral :consent-to-personal-data-retrieval-and-storage :add-economy}) case) (c/add-data case :loan-details {:amount 1000000 :product "Bolån"}) (do (is= (c/next-actions case-graph case) #{:add-collateral :consent-to-personal-data-retrieval-and-storage :add-economy}) (is= (c/actions-performed case-graph case) #{:create-case :add-loan-details}) (is-not (c/action-allowed? case-graph case :produce-credit-application-document)) (is (c/action-allowed? case-graph case :add-loan-details)) case) (c/add-data case :collateral {:designation {:municipality "Täby" :region "Pallen" :block "11:45"}}) (do (is= (c/actions-performed case-graph case) #{:create-case :add-loan-details :add-collateral}) (is-not (c/action-allowed? case-graph case :add-collateral-link)) case) (c/add-data case :consent {:uc true :lmv true :pep true}) (c/add-data case :know-your-customer-data {:income 10000}) (do (is= (c/next-actions case-graph case) #{:fetch-supplimentary-info :get-currently-owned-real-estate :add-collateral-valuation :add-economy}) (is-not (c/action-allowed? case-graph case :create-collateral-link)) case) (c/add-data case :currently-owned-real-estate {:address "Bägersta Byväg 17"}) (do (is= (c/actions-performed case-graph case) #{:create-case :add-loan-details :add-collateral :get-currently-owned-real-estate :consent-to-personal-data-retrieval-and-storage :know-your-customer}) (is (c/action-allowed? case-graph case :create-collateral-link)) case) (c/invalidate-action case-graph case :consent-to-personal-data-retrieval-and-storage) ; INVALIDATION!! (do (is-not (c/action-allowed? case-graph case :fetch-supplimentary-info)) (is-not (c/action-allowed? case-graph case :know-your-customer)) (is= (c/next-actions case-graph case) #{:add-collateral-valuation :consent-to-personal-data-retrieval-and-storage :add-economy}) (is= (c/actions-performed case-graph case) #{:create-case :add-loan-details :add-collateral}) (is-not (c/action-allowed? case-graph case :produce-credit-application-document)) (is (c/action-allowed? case-graph case :add-loan-details)) (is (c/case-has-data? case :loan-details)) (is (c/case-has-data? case :collateral)) (is (c/case-has-data? case :consent)) (is (c/case-has-data? case :case-id)) (is (c/case-has-data? case :customer-id)))))) (deftest invalidate-data-it (testing "Given a case that has a few actions performed, when a data node is invalidated, then the case should be 'rewinded' to the action that produced that data. Actions that depend on that data should be invalidated. No data should be removed." (as-> {} case (do (is= (c/next-actions case-graph case) #{:create-case}) (is= (c/next-actions case-graph case :applicant) #{:create-case}) (is= (c/next-actions case-graph case :system) #{}) case) (c/add-data case :case-id "100001") (c/add-data case :customer-id "9209041111") (do (is= (c/next-actions case-graph case) #{:add-loan-details :add-collateral :consent-to-personal-data-retrieval-and-storage :add-economy}) case) (c/add-data case :loan-details {:amount 1000000 :product "Bolån"}) (do (is= (c/next-actions case-graph case) #{:add-collateral :consent-to-personal-data-retrieval-and-storage :add-economy}) (is= (c/actions-performed case-graph case) #{:create-case :add-loan-details}) (is-not (c/action-allowed? case-graph case :produce-credit-application-document)) (is (c/action-allowed? case-graph case :add-loan-details)) case) (c/add-data case :collateral {:designation {:municipality "Täby" :region "Pallen" :block "11:45"}}) (do (is= (c/actions-performed case-graph case) #{:create-case :add-loan-details :add-collateral}) (is-not (c/action-allowed? case-graph case :add-collateral-link)) case) (c/add-data case :consent {:uc true :lmv true :pep true}) (c/add-data case :know-your-customer-data {:income 10000}) (do (is= (c/next-actions case-graph case) #{:fetch-supplimentary-info :get-currently-owned-real-estate :add-collateral-valuation :add-economy}) (is-not (c/action-allowed? case-graph case :create-collateral-link)) case) (c/add-data case :currently-owned-real-estate {:address "Bägersta Byväg 17"}) (do (is= (c/actions-performed case-graph case) #{:create-case :add-loan-details :add-collateral :get-currently-owned-real-estate :consent-to-personal-data-retrieval-and-storage :know-your-customer}) (is (c/action-allowed? case-graph case :create-collateral-link)) case) (c/invalidate-data case-graph case :consent) ; INVALIDATION!! (do (is-not (c/action-allowed? case-graph case :fetch-supplimentary-info)) (is-not (c/action-allowed? case-graph case :know-your-customer)) (is-not (c/action-allowed? case-graph case :get-currently-owned-real-estate)) (is-not (c/action-allowed? case-graph case :produce-credit-application-document)) (is= (c/next-actions case-graph case) #{:add-collateral-valuation :consent-to-personal-data-retrieval-and-storage :add-economy}) (is= (c/actions-performed case-graph case) #{:create-case :add-loan-details :add-collateral}) (is (c/action-allowed? case-graph case :add-loan-details)) (is (c/case-has-data? case :loan-details)) (is (c/case-has-data? case :collateral)) (is (c/case-has-data? case :consent)) (is (c/case-has-data? case :case-id)) (is (c/case-has-data? case :customer-id)))))) (deftest conditional-it (testing "When the loan amount is over 2 000 000 then the collateral needs to have a valuation. If the loan amount is lower than or equal to 2 000 000 then it's possible to proceed and create the collateral-link without a valuation. It's still possible to add it, but not required." (do (as-> {} case (c/add-data case :case-id "100001") (c/add-data case :customer-id "9209041111") (c/add-data case :consent {:uc true :lmv true :pep true}) (c/add-data case :currently-owned-real-estate {:name "Villa villerkulla"}) (c/add-data case :loan-details {:amount 1000000 :product "Bolån"}) (c/add-data case :collateral {:designation {:municipality "Täby" :region "Pallen" :block "11:45"}}) (do (is (c/action-allowed? case-graph case :create-collateral-link)))) (as-> {} case (c/add-data case :case-id "100001") (c/add-data case :customer-id "9209041111") (c/add-data case :consent {:uc true :lmv true :pep true}) (c/add-data case :currently-owned-real-estate {:name "Villa villerkulla"}) (c/add-data case :loan-details {:amount 3000000 :product "Bolån"}) (c/add-data case :collateral {:designation {:municipality "Täby" :region "Pallen" :block "11:45"}}) (do (is-not (c/action-allowed? case-graph case :create-collateral-link)) case) (c/add-data case :collateral-valuation {:valuation 5700000 :valuator "Karl Anka"}) (is (c/action-allowed? case-graph case :create-collateral-link))))))
33730
(ns dativity.core-test (:require [ysera.test :refer [is= is is-not deftest testing]] [dativity.core :as c] [dativity.define :as d])) (def case-graph (d/create-model {:actions [:create-case :consent-to-personal-data-retrieval-and-storage :fetch-supplimentary-info :know-your-customer :add-economy :get-currently-owned-real-estate :add-loan-details :add-collateral-valuation :add-collateral :create-collateral-link :calculate-amortization :produce-credit-application-document :sign-credit-application-document] :data [:case-id :customer-id :customer-info :consent :know-your-customer-data :economy :currently-owned-real-estate :loan-details :collateral :collateral-valuation :collateral-link :amortization :credit-application-reference :credit-application-signature] :roles [:applicant :system :officer] :action-produces [[:add-collateral :collateral] [:add-collateral-valuation :collateral-valuation] [:add-economy :economy] [:add-loan-details :loan-details] [:calculate-amortization :amortization] [:consent-to-personal-data-retrieval-and-storage :consent] [:create-case :case-id] [:create-case :customer-id] [:create-collateral-link :collateral-link] [:fetch-supplimentary-info :customer-info] [:get-currently-owned-real-estate :currently-owned-real-estate] [:know-your-customer :know-your-customer-data] [:produce-credit-application-document :credit-application-reference] [:sign-credit-application-document :credit-application-signature]] :action-requires [[:add-collateral :case-id] [:add-collateral-valuation :collateral] [:add-economy :customer-id] [:add-loan-details :case-id] [:calculate-amortization :collateral-link] [:calculate-amortization :economy] [:consent-to-personal-data-retrieval-and-storage :customer-id] [:create-collateral-link :collateral] [:create-collateral-link :currently-owned-real-estate] [:create-collateral-link :loan-details] [:fetch-supplimentary-info :consent] [:get-currently-owned-real-estate :consent] [:know-your-customer :consent] [:produce-credit-application-document :amortization] [:produce-credit-application-document :collateral-link] [:produce-credit-application-document :collateral] [:produce-credit-application-document :currently-owned-real-estate] [:produce-credit-application-document :customer-info] [:produce-credit-application-document :loan-details] [:sign-credit-application-document :credit-application-reference]] :role-performs [[:applicant :add-collateral] [:applicant :add-economy] [:applicant :add-loan-details] [:applicant :consent-to-personal-data-retrieval-and-storage] [:applicant :create-case] [:applicant :create-collateral-link] [:applicant :sign-credit-application-document] [:system :calculate-amortization] [:system :fetch-supplimentary-info] [:system :get-currently-owned-real-estate] [:system :know-your-customer]] :action-requires-conditional [{:action :create-collateral-link :requires :collateral-valuation :condition (fn [loan-details] (> (:amount loan-details) 2000000)) :condition-argument :loan-details}]})) (comment (dativity.visualize/generate-png case-graph)) (deftest actions-it (testing "runs a case through the whole flow and makes sure that only the right actions are available" (as-> {} case (do (is= (c/next-actions case-graph case) #{:create-case}) (is= (c/next-actions case-graph case :applicant) #{:create-case}) (is= (c/next-actions case-graph case :system) #{}) case) (c/add-data case :case-id "100001") (c/add-data case :customer-id "9209041111") (do (is= (c/next-actions case-graph case) #{:add-loan-details :add-collateral :consent-to-personal-data-retrieval-and-storage :add-economy}) case) (c/add-data case :loan-details {:amount 1000000 :product "Bolån"}) (do (is= (c/next-actions case-graph case) #{:add-collateral :consent-to-personal-data-retrieval-and-storage :add-economy}) (is= (c/actions-performed case-graph case) #{:create-case :add-loan-details}) (is-not (c/action-allowed? case-graph case :produce-credit-application-document)) (is (c/action-allowed? case-graph case :add-loan-details)) case) (c/add-data case :collateral {:designation {:municipality "Täby" :region "Pallen" :block "11:45"}}) (do (is= (c/actions-performed case-graph case) #{:create-case :add-loan-details :add-collateral}) (is-not (c/action-allowed? case-graph case :add-collateral-link)) case) (c/add-data case :consent {:uc true :lmv true :pep true}) (do (is= (c/next-actions case-graph case) #{:fetch-supplimentary-info :get-currently-owned-real-estate :add-collateral-valuation :add-economy :know-your-customer}) (is-not (c/action-allowed? case-graph case :create-collateral-link)) case) (c/add-data case :economy {:income 500000 :children 2}) (c/add-data case :customer-info {:name "<NAME>" :age 63}) (c/add-data case :currently-owned-real-estate {:name "<NAME>"}) (do (is= (c/next-actions case-graph case) #{:create-collateral-link :know-your-customer :add-collateral-valuation}) case)))) (deftest invalidate-action-it (testing "Given a case that has a few actions performed, when an action is invalidated, then the case should be 'rewinded' to that action that was invalidated. No data should be removed." (as-> {} case (do (is= (c/next-actions case-graph case) #{:create-case}) (is= (c/next-actions case-graph case :applicant) #{:create-case}) (is= (c/next-actions case-graph case :system) #{}) case) (c/add-data case :case-id "100001") (c/add-data case :customer-id "9209041111") (do (is= (c/next-actions case-graph case) #{:add-loan-details :add-collateral :consent-to-personal-data-retrieval-and-storage :add-economy}) case) (c/add-data case :loan-details {:amount 1000000 :product "Bolån"}) (do (is= (c/next-actions case-graph case) #{:add-collateral :consent-to-personal-data-retrieval-and-storage :add-economy}) (is= (c/actions-performed case-graph case) #{:create-case :add-loan-details}) (is-not (c/action-allowed? case-graph case :produce-credit-application-document)) (is (c/action-allowed? case-graph case :add-loan-details)) case) (c/add-data case :collateral {:designation {:municipality "Täby" :region "Pallen" :block "11:45"}}) (do (is= (c/actions-performed case-graph case) #{:create-case :add-loan-details :add-collateral}) (is-not (c/action-allowed? case-graph case :add-collateral-link)) case) (c/add-data case :consent {:uc true :lmv true :pep true}) (c/add-data case :know-your-customer-data {:income 10000}) (do (is= (c/next-actions case-graph case) #{:fetch-supplimentary-info :get-currently-owned-real-estate :add-collateral-valuation :add-economy}) (is-not (c/action-allowed? case-graph case :create-collateral-link)) case) (c/add-data case :currently-owned-real-estate {:address "Bägersta Byväg 17"}) (do (is= (c/actions-performed case-graph case) #{:create-case :add-loan-details :add-collateral :get-currently-owned-real-estate :consent-to-personal-data-retrieval-and-storage :know-your-customer}) (is (c/action-allowed? case-graph case :create-collateral-link)) case) (c/invalidate-action case-graph case :consent-to-personal-data-retrieval-and-storage) ; INVALIDATION!! (do (is-not (c/action-allowed? case-graph case :fetch-supplimentary-info)) (is-not (c/action-allowed? case-graph case :know-your-customer)) (is= (c/next-actions case-graph case) #{:add-collateral-valuation :consent-to-personal-data-retrieval-and-storage :add-economy}) (is= (c/actions-performed case-graph case) #{:create-case :add-loan-details :add-collateral}) (is-not (c/action-allowed? case-graph case :produce-credit-application-document)) (is (c/action-allowed? case-graph case :add-loan-details)) (is (c/case-has-data? case :loan-details)) (is (c/case-has-data? case :collateral)) (is (c/case-has-data? case :consent)) (is (c/case-has-data? case :case-id)) (is (c/case-has-data? case :customer-id)))))) (deftest invalidate-data-it (testing "Given a case that has a few actions performed, when a data node is invalidated, then the case should be 'rewinded' to the action that produced that data. Actions that depend on that data should be invalidated. No data should be removed." (as-> {} case (do (is= (c/next-actions case-graph case) #{:create-case}) (is= (c/next-actions case-graph case :applicant) #{:create-case}) (is= (c/next-actions case-graph case :system) #{}) case) (c/add-data case :case-id "100001") (c/add-data case :customer-id "9209041111") (do (is= (c/next-actions case-graph case) #{:add-loan-details :add-collateral :consent-to-personal-data-retrieval-and-storage :add-economy}) case) (c/add-data case :loan-details {:amount 1000000 :product "Bolån"}) (do (is= (c/next-actions case-graph case) #{:add-collateral :consent-to-personal-data-retrieval-and-storage :add-economy}) (is= (c/actions-performed case-graph case) #{:create-case :add-loan-details}) (is-not (c/action-allowed? case-graph case :produce-credit-application-document)) (is (c/action-allowed? case-graph case :add-loan-details)) case) (c/add-data case :collateral {:designation {:municipality "Täby" :region "Pallen" :block "11:45"}}) (do (is= (c/actions-performed case-graph case) #{:create-case :add-loan-details :add-collateral}) (is-not (c/action-allowed? case-graph case :add-collateral-link)) case) (c/add-data case :consent {:uc true :lmv true :pep true}) (c/add-data case :know-your-customer-data {:income 10000}) (do (is= (c/next-actions case-graph case) #{:fetch-supplimentary-info :get-currently-owned-real-estate :add-collateral-valuation :add-economy}) (is-not (c/action-allowed? case-graph case :create-collateral-link)) case) (c/add-data case :currently-owned-real-estate {:address "Bägersta Byväg 17"}) (do (is= (c/actions-performed case-graph case) #{:create-case :add-loan-details :add-collateral :get-currently-owned-real-estate :consent-to-personal-data-retrieval-and-storage :know-your-customer}) (is (c/action-allowed? case-graph case :create-collateral-link)) case) (c/invalidate-data case-graph case :consent) ; INVALIDATION!! (do (is-not (c/action-allowed? case-graph case :fetch-supplimentary-info)) (is-not (c/action-allowed? case-graph case :know-your-customer)) (is-not (c/action-allowed? case-graph case :get-currently-owned-real-estate)) (is-not (c/action-allowed? case-graph case :produce-credit-application-document)) (is= (c/next-actions case-graph case) #{:add-collateral-valuation :consent-to-personal-data-retrieval-and-storage :add-economy}) (is= (c/actions-performed case-graph case) #{:create-case :add-loan-details :add-collateral}) (is (c/action-allowed? case-graph case :add-loan-details)) (is (c/case-has-data? case :loan-details)) (is (c/case-has-data? case :collateral)) (is (c/case-has-data? case :consent)) (is (c/case-has-data? case :case-id)) (is (c/case-has-data? case :customer-id)))))) (deftest conditional-it (testing "When the loan amount is over 2 000 000 then the collateral needs to have a valuation. If the loan amount is lower than or equal to 2 000 000 then it's possible to proceed and create the collateral-link without a valuation. It's still possible to add it, but not required." (do (as-> {} case (c/add-data case :case-id "100001") (c/add-data case :customer-id "9209041111") (c/add-data case :consent {:uc true :lmv true :pep true}) (c/add-data case :currently-owned-real-estate {:name "<NAME>"}) (c/add-data case :loan-details {:amount 1000000 :product "Bolån"}) (c/add-data case :collateral {:designation {:municipality "Täby" :region "Pallen" :block "11:45"}}) (do (is (c/action-allowed? case-graph case :create-collateral-link)))) (as-> {} case (c/add-data case :case-id "100001") (c/add-data case :customer-id "9209041111") (c/add-data case :consent {:uc true :lmv true :pep true}) (c/add-data case :currently-owned-real-estate {:name "<NAME>"}) (c/add-data case :loan-details {:amount 3000000 :product "Bolån"}) (c/add-data case :collateral {:designation {:municipality "Täby" :region "Pallen" :block "11:45"}}) (do (is-not (c/action-allowed? case-graph case :create-collateral-link)) case) (c/add-data case :collateral-valuation {:valuation 5700000 :valuator "<NAME>"}) (is (c/action-allowed? case-graph case :create-collateral-link))))))
true
(ns dativity.core-test (:require [ysera.test :refer [is= is is-not deftest testing]] [dativity.core :as c] [dativity.define :as d])) (def case-graph (d/create-model {:actions [:create-case :consent-to-personal-data-retrieval-and-storage :fetch-supplimentary-info :know-your-customer :add-economy :get-currently-owned-real-estate :add-loan-details :add-collateral-valuation :add-collateral :create-collateral-link :calculate-amortization :produce-credit-application-document :sign-credit-application-document] :data [:case-id :customer-id :customer-info :consent :know-your-customer-data :economy :currently-owned-real-estate :loan-details :collateral :collateral-valuation :collateral-link :amortization :credit-application-reference :credit-application-signature] :roles [:applicant :system :officer] :action-produces [[:add-collateral :collateral] [:add-collateral-valuation :collateral-valuation] [:add-economy :economy] [:add-loan-details :loan-details] [:calculate-amortization :amortization] [:consent-to-personal-data-retrieval-and-storage :consent] [:create-case :case-id] [:create-case :customer-id] [:create-collateral-link :collateral-link] [:fetch-supplimentary-info :customer-info] [:get-currently-owned-real-estate :currently-owned-real-estate] [:know-your-customer :know-your-customer-data] [:produce-credit-application-document :credit-application-reference] [:sign-credit-application-document :credit-application-signature]] :action-requires [[:add-collateral :case-id] [:add-collateral-valuation :collateral] [:add-economy :customer-id] [:add-loan-details :case-id] [:calculate-amortization :collateral-link] [:calculate-amortization :economy] [:consent-to-personal-data-retrieval-and-storage :customer-id] [:create-collateral-link :collateral] [:create-collateral-link :currently-owned-real-estate] [:create-collateral-link :loan-details] [:fetch-supplimentary-info :consent] [:get-currently-owned-real-estate :consent] [:know-your-customer :consent] [:produce-credit-application-document :amortization] [:produce-credit-application-document :collateral-link] [:produce-credit-application-document :collateral] [:produce-credit-application-document :currently-owned-real-estate] [:produce-credit-application-document :customer-info] [:produce-credit-application-document :loan-details] [:sign-credit-application-document :credit-application-reference]] :role-performs [[:applicant :add-collateral] [:applicant :add-economy] [:applicant :add-loan-details] [:applicant :consent-to-personal-data-retrieval-and-storage] [:applicant :create-case] [:applicant :create-collateral-link] [:applicant :sign-credit-application-document] [:system :calculate-amortization] [:system :fetch-supplimentary-info] [:system :get-currently-owned-real-estate] [:system :know-your-customer]] :action-requires-conditional [{:action :create-collateral-link :requires :collateral-valuation :condition (fn [loan-details] (> (:amount loan-details) 2000000)) :condition-argument :loan-details}]})) (comment (dativity.visualize/generate-png case-graph)) (deftest actions-it (testing "runs a case through the whole flow and makes sure that only the right actions are available" (as-> {} case (do (is= (c/next-actions case-graph case) #{:create-case}) (is= (c/next-actions case-graph case :applicant) #{:create-case}) (is= (c/next-actions case-graph case :system) #{}) case) (c/add-data case :case-id "100001") (c/add-data case :customer-id "9209041111") (do (is= (c/next-actions case-graph case) #{:add-loan-details :add-collateral :consent-to-personal-data-retrieval-and-storage :add-economy}) case) (c/add-data case :loan-details {:amount 1000000 :product "Bolån"}) (do (is= (c/next-actions case-graph case) #{:add-collateral :consent-to-personal-data-retrieval-and-storage :add-economy}) (is= (c/actions-performed case-graph case) #{:create-case :add-loan-details}) (is-not (c/action-allowed? case-graph case :produce-credit-application-document)) (is (c/action-allowed? case-graph case :add-loan-details)) case) (c/add-data case :collateral {:designation {:municipality "Täby" :region "Pallen" :block "11:45"}}) (do (is= (c/actions-performed case-graph case) #{:create-case :add-loan-details :add-collateral}) (is-not (c/action-allowed? case-graph case :add-collateral-link)) case) (c/add-data case :consent {:uc true :lmv true :pep true}) (do (is= (c/next-actions case-graph case) #{:fetch-supplimentary-info :get-currently-owned-real-estate :add-collateral-valuation :add-economy :know-your-customer}) (is-not (c/action-allowed? case-graph case :create-collateral-link)) case) (c/add-data case :economy {:income 500000 :children 2}) (c/add-data case :customer-info {:name "PI:NAME:<NAME>END_PI" :age 63}) (c/add-data case :currently-owned-real-estate {:name "PI:NAME:<NAME>END_PI"}) (do (is= (c/next-actions case-graph case) #{:create-collateral-link :know-your-customer :add-collateral-valuation}) case)))) (deftest invalidate-action-it (testing "Given a case that has a few actions performed, when an action is invalidated, then the case should be 'rewinded' to that action that was invalidated. No data should be removed." (as-> {} case (do (is= (c/next-actions case-graph case) #{:create-case}) (is= (c/next-actions case-graph case :applicant) #{:create-case}) (is= (c/next-actions case-graph case :system) #{}) case) (c/add-data case :case-id "100001") (c/add-data case :customer-id "9209041111") (do (is= (c/next-actions case-graph case) #{:add-loan-details :add-collateral :consent-to-personal-data-retrieval-and-storage :add-economy}) case) (c/add-data case :loan-details {:amount 1000000 :product "Bolån"}) (do (is= (c/next-actions case-graph case) #{:add-collateral :consent-to-personal-data-retrieval-and-storage :add-economy}) (is= (c/actions-performed case-graph case) #{:create-case :add-loan-details}) (is-not (c/action-allowed? case-graph case :produce-credit-application-document)) (is (c/action-allowed? case-graph case :add-loan-details)) case) (c/add-data case :collateral {:designation {:municipality "Täby" :region "Pallen" :block "11:45"}}) (do (is= (c/actions-performed case-graph case) #{:create-case :add-loan-details :add-collateral}) (is-not (c/action-allowed? case-graph case :add-collateral-link)) case) (c/add-data case :consent {:uc true :lmv true :pep true}) (c/add-data case :know-your-customer-data {:income 10000}) (do (is= (c/next-actions case-graph case) #{:fetch-supplimentary-info :get-currently-owned-real-estate :add-collateral-valuation :add-economy}) (is-not (c/action-allowed? case-graph case :create-collateral-link)) case) (c/add-data case :currently-owned-real-estate {:address "Bägersta Byväg 17"}) (do (is= (c/actions-performed case-graph case) #{:create-case :add-loan-details :add-collateral :get-currently-owned-real-estate :consent-to-personal-data-retrieval-and-storage :know-your-customer}) (is (c/action-allowed? case-graph case :create-collateral-link)) case) (c/invalidate-action case-graph case :consent-to-personal-data-retrieval-and-storage) ; INVALIDATION!! (do (is-not (c/action-allowed? case-graph case :fetch-supplimentary-info)) (is-not (c/action-allowed? case-graph case :know-your-customer)) (is= (c/next-actions case-graph case) #{:add-collateral-valuation :consent-to-personal-data-retrieval-and-storage :add-economy}) (is= (c/actions-performed case-graph case) #{:create-case :add-loan-details :add-collateral}) (is-not (c/action-allowed? case-graph case :produce-credit-application-document)) (is (c/action-allowed? case-graph case :add-loan-details)) (is (c/case-has-data? case :loan-details)) (is (c/case-has-data? case :collateral)) (is (c/case-has-data? case :consent)) (is (c/case-has-data? case :case-id)) (is (c/case-has-data? case :customer-id)))))) (deftest invalidate-data-it (testing "Given a case that has a few actions performed, when a data node is invalidated, then the case should be 'rewinded' to the action that produced that data. Actions that depend on that data should be invalidated. No data should be removed." (as-> {} case (do (is= (c/next-actions case-graph case) #{:create-case}) (is= (c/next-actions case-graph case :applicant) #{:create-case}) (is= (c/next-actions case-graph case :system) #{}) case) (c/add-data case :case-id "100001") (c/add-data case :customer-id "9209041111") (do (is= (c/next-actions case-graph case) #{:add-loan-details :add-collateral :consent-to-personal-data-retrieval-and-storage :add-economy}) case) (c/add-data case :loan-details {:amount 1000000 :product "Bolån"}) (do (is= (c/next-actions case-graph case) #{:add-collateral :consent-to-personal-data-retrieval-and-storage :add-economy}) (is= (c/actions-performed case-graph case) #{:create-case :add-loan-details}) (is-not (c/action-allowed? case-graph case :produce-credit-application-document)) (is (c/action-allowed? case-graph case :add-loan-details)) case) (c/add-data case :collateral {:designation {:municipality "Täby" :region "Pallen" :block "11:45"}}) (do (is= (c/actions-performed case-graph case) #{:create-case :add-loan-details :add-collateral}) (is-not (c/action-allowed? case-graph case :add-collateral-link)) case) (c/add-data case :consent {:uc true :lmv true :pep true}) (c/add-data case :know-your-customer-data {:income 10000}) (do (is= (c/next-actions case-graph case) #{:fetch-supplimentary-info :get-currently-owned-real-estate :add-collateral-valuation :add-economy}) (is-not (c/action-allowed? case-graph case :create-collateral-link)) case) (c/add-data case :currently-owned-real-estate {:address "Bägersta Byväg 17"}) (do (is= (c/actions-performed case-graph case) #{:create-case :add-loan-details :add-collateral :get-currently-owned-real-estate :consent-to-personal-data-retrieval-and-storage :know-your-customer}) (is (c/action-allowed? case-graph case :create-collateral-link)) case) (c/invalidate-data case-graph case :consent) ; INVALIDATION!! (do (is-not (c/action-allowed? case-graph case :fetch-supplimentary-info)) (is-not (c/action-allowed? case-graph case :know-your-customer)) (is-not (c/action-allowed? case-graph case :get-currently-owned-real-estate)) (is-not (c/action-allowed? case-graph case :produce-credit-application-document)) (is= (c/next-actions case-graph case) #{:add-collateral-valuation :consent-to-personal-data-retrieval-and-storage :add-economy}) (is= (c/actions-performed case-graph case) #{:create-case :add-loan-details :add-collateral}) (is (c/action-allowed? case-graph case :add-loan-details)) (is (c/case-has-data? case :loan-details)) (is (c/case-has-data? case :collateral)) (is (c/case-has-data? case :consent)) (is (c/case-has-data? case :case-id)) (is (c/case-has-data? case :customer-id)))))) (deftest conditional-it (testing "When the loan amount is over 2 000 000 then the collateral needs to have a valuation. If the loan amount is lower than or equal to 2 000 000 then it's possible to proceed and create the collateral-link without a valuation. It's still possible to add it, but not required." (do (as-> {} case (c/add-data case :case-id "100001") (c/add-data case :customer-id "9209041111") (c/add-data case :consent {:uc true :lmv true :pep true}) (c/add-data case :currently-owned-real-estate {:name "PI:NAME:<NAME>END_PI"}) (c/add-data case :loan-details {:amount 1000000 :product "Bolån"}) (c/add-data case :collateral {:designation {:municipality "Täby" :region "Pallen" :block "11:45"}}) (do (is (c/action-allowed? case-graph case :create-collateral-link)))) (as-> {} case (c/add-data case :case-id "100001") (c/add-data case :customer-id "9209041111") (c/add-data case :consent {:uc true :lmv true :pep true}) (c/add-data case :currently-owned-real-estate {:name "PI:NAME:<NAME>END_PI"}) (c/add-data case :loan-details {:amount 3000000 :product "Bolån"}) (c/add-data case :collateral {:designation {:municipality "Täby" :region "Pallen" :block "11:45"}}) (do (is-not (c/action-allowed? case-graph case :create-collateral-link)) case) (c/add-data case :collateral-valuation {:valuation 5700000 :valuator "PI:NAME:<NAME>END_PI"}) (is (c/action-allowed? case-graph case :create-collateral-link))))))
[ { "context": "ug \"test-decision\"\n ::user/email \"Marc\"})])\n =check=>\n (_/embeds?*\n ", "end": 11905, "score": 0.9778761863708496, "start": 11901, "tag": "NAME", "value": "Marc" } ]
src/test/decide/models/process_test.clj
fivejjs/decide3
4
(ns decide.models.process-test (:require [clojure.test :refer [deftest is use-fixtures testing are]] [decide.models.process :as process] [decide.models.process.mutations :as process.mutations] [decide.models.user :as user] [decide.server-components.pathom :as pathom] [decide.test-utils.common :refer [db-fixture *conn*]] [fulcro-spec.check :as _] [fulcro-spec.core :refer [specification provided behavior assertions component provided! => =fn=> =check=>]])) (use-fixtures :each db-fixture) (deftest unauthorized-user-integration-test (let [parser (pathom/build-parser {} *conn*)] (component "Someone not authorized" (let [parser-without-session #(parser {:ring/request {}} %)] (assertions "can query for an existing process." (parser-without-session [{[::process/slug "test-decision"] [::process/slug ::process/title]}]) => {[::process/slug "test-decision"] #::process{:slug "test-decision" :title "Meine Test-Entscheidung"}} "can not add a new process." (parser-without-session [{(list `process.mutations/add-process #::process{:slug "test" :title "My Test-Title" :description "foobar"}) [::process/slug ::process/title ::process/description]}]) => {`process.mutations/add-process {:com.fulcrologic.rad.pathom/errors {:message "User is not logged in!" :data {}}}} "can not update an existing process." (parser-without-session [{`(process.mutations/update-process #::process{:slug "test-decision" :title "My NEW Test-Title"}) [::process/title]}]) => {`process.mutations/update-process {:com.fulcrologic.rad.pathom/errors {:message "User is not logged in!" :data {}}}}))))) (deftest parser-integration-test (let [parser (pathom/build-parser {} *conn*)] (behavior "An authorized user" (let [parser-existing-user (partial parser {:ring/request {:session {:id #uuid"0000fb5e-a9d0-44b6-b293-bb3c506fc0cb"}}})] (assertions "can query for an existing process." (parser-existing-user [{[::process/slug "test-decision"] [::process/slug ::process/title]}]) => {[::process/slug "test-decision"] #::process{:slug "test-decision" :title "Meine Test-Entscheidung"}} "can add a new process." (parser-existing-user [{(list `process.mutations/add-process #::process{:slug "test" :title "My Test-Title" :description "foobar"}) [::process/slug ::process/title ::process/description]}]) => {`process.mutations/add-process #::process{:slug "test" :title "My Test-Title" :description "foobar"}} "can query for the new process." (parser-existing-user [{[::process/slug "test"] [::process/slug ::process/title ::process/description ::process/end-time {::process/moderators [::user/id]}]}]) => {[::process/slug "test"] #::process{:slug "test" :title "My Test-Title" :description "foobar" ::process/moderators [{::user/id #uuid"0000fb5e-a9d0-44b6-b293-bb3c506fc0cb"}]}} "can't get an end-time, since it wasn't specified on creation." (parser-existing-user [{[::process/slug "test"] [::process/end-time]}]) => {[::process/slug "test"] {}} "can query for the new moderators, who is just the user who created the process." (parser-existing-user [{[::process/slug "test"] [{::process/moderators [::user/id]}]}]) => {[::process/slug "test"] {::process/moderators [{::user/id #uuid"0000fb5e-a9d0-44b6-b293-bb3c506fc0cb"}]}}) (component "who is a moderator of the process" (let [parser-with-moderator parser-existing-user] (assertions "can update the process afterwards" (parser-with-moderator [{`(process.mutations/update-process #::process{:slug "test" :title "My NEW Test-Title"}) [::process/title]}]) => {`process.mutations/update-process {::process/title "My NEW Test-Title"}} "can not update a process that is not in use." (parser-existing-user [{`(process.mutations/update-process #::process{:slug "i-do-not-exist" :title "My NEW Test-Title"}) [::process/title]}]) => {`process.mutations/update-process {:com.fulcrologic.rad.pathom/errors {:message "Slug is not in use!" :data {:slug "i-do-not-exist"}}}} "can add an end-date afterwards" (parser-with-moderator [{`(process.mutations/update-process #::process{:slug "test" :end-time #inst"2030"}) [::process/end-time]}]) => {`process.mutations/update-process {::process/end-time #inst"2030"}} "can remove an end-date afterwards" (parser-with-moderator [{`(process.mutations/update-process #::process{:slug "test" :end-time nil}) [::process/slug ::process/end-time]}]) => {`process.mutations/update-process #::process{:slug "test"}} "can not remove an required attributes" (get-in (parser-with-moderator [{`(process.mutations/update-process #::process{:slug "test" :title nil}) [::process/slug ::process/end-time]}]) [`process.mutations/update-process :com.fulcrologic.rad.pathom/errors :message]) => "Failed validation!" "can not remove an required attributes" (parser-with-moderator [{`(process.mutations/update-process #::process{:slug "test" :title nil}) [::process/slug ::process/end-time]}]) =check=> (_/embeds?* {`process.mutations/update-process {:com.fulcrologic.rad.pathom/errors {:message "Failed validation!"}}}) "can not remove slug" (parser-with-moderator [{`(process.mutations/update-process #::process{:slug nil}) [::process/slug ::process/end-time]}]) =check=> (_/embeds?* {`process.mutations/update-process {:com.fulcrologic.rad.pathom/errors {:message "Failed validation!"}}}) "can not set title to empty" (parser-with-moderator [{`(process.mutations/update-process #::process{:slug "test" :title ""}) [::process/slug ::process/end-time]}]) =check=> (_/embeds?* {`process.mutations/update-process {:com.fulcrologic.rad.pathom/errors {:message "Failed validation!"}}})))) (component "who is not a moderator of the process" (let [parser-with-non-moderator (partial parser {:ring/request {:session {:id #uuid"001e7a7e-3eb2-4226-b9ab-36dddcf64106"}}})] (assertions "can not update the process." (parser-with-non-moderator [{`(process.mutations/update-process #::process{:slug "test" :title "My NEW Test-Title"}) [::process/title]}]) => {`process.mutations/update-process {:com.fulcrologic.rad.pathom/errors {:message "Need moderation role for this operation" :data {::user/id #uuid"001e7a7e-3eb2-4226-b9ab-36dddcf64106" ::process/slug "test"}}}}))))))) (specification "Malformed add-process parameters" (let [parser (pathom/build-parser {} *conn*) parser-existing-user (partial parser {:ring/request {:session {:id #uuid"0000fb5e-a9d0-44b6-b293-bb3c506fc0cb"}}})] (behavior "process can not be added with" (assertions "malformed slug" (parser-existing-user [{`(process.mutations/add-process #::process{:slug "I AM NOT A CORRECT SLUG" :title "My Test-Title" :description "foobar"}) [::process/slug ::process/title ::process/description]}]) =check=> (_/embeds?* {`process.mutations/add-process {:com.fulcrologic.rad.pathom/errors {:message "Failed validation!"}}}) "empty title" (parser-existing-user [{(list `process.mutations/add-process #::process{:slug "correct-slug" :title "" :description "foobar"}) [::process/slug ::process/title ::process/description]}]) =check=> (_/embeds?* {`process.mutations/add-process {:com.fulcrologic.rad.pathom/errors {:message "Failed validation!"}}}) "missing field" (parser-existing-user [{(list `process.mutations/add-process #::process{:slug "correct-slug" :description "foobar"}) [::process/slug ::process/title ::process/description]}]) =check=> (_/embeds?* {`process.mutations/add-process {:com.fulcrologic.rad.pathom/errors {:message "Failed validation!"}}}) "string as end-date" (parser-existing-user [{(list `process.mutations/add-process #::process{:slug "correct-slug" :title "Bla" :description "foobar" :end-time "heute"}) [::process/slug ::process/title ::process/description]}]) =check=> (_/embeds?* {`process.mutations/add-process {:com.fulcrologic.rad.pathom/errors {:message "Failed validation!"}}}) "slug is already in use" (parser-existing-user [{(list `process.mutations/add-process #::process{:slug "test-decision" :title "Bla" :description "foobar"}) [::process/slug ::process/title ::process/description]}]) =check=> (_/embeds?* {`process.mutations/add-process {:com.fulcrologic.rad.pathom/errors {:message "Slug already in use"}}}))))) (specification "Moderator priviliges" (let [parser (pathom/build-parser {} *conn*) parser-with-alex-session (partial parser {:ring/request {:session {:id #uuid"000aa0e2-e4d6-463d-ae7c-46765e13a31b"}}})] (behavior "Only a moderator can" (behavior "add new moderators" (assertions (parser-with-alex-session [`(process.mutations/add-moderator {::process/slug "test-decision" ::user/email "Marc"})]) =check=> (_/embeds?* {`process.mutations/add-moderator {:com.fulcrologic.rad.pathom/errors {:message "Need moderation role for this operation"}}})))))) (specification "Private processes" (let [parser (pathom/build-parser {} *conn*) parser-with-alex-session (partial parser {:ring/request {:session {:id #uuid"000aa0e2-e4d6-463d-ae7c-46765e13a31b"}}})] (behavior "A private process" (behavior "can not be queried by everyone" (assertions (parser-with-alex-session [{:root/all-processes [::process/slug]}]) => #:root{:all-processes [#:decide.models.process{:slug "test-decision"}]})))))
51356
(ns decide.models.process-test (:require [clojure.test :refer [deftest is use-fixtures testing are]] [decide.models.process :as process] [decide.models.process.mutations :as process.mutations] [decide.models.user :as user] [decide.server-components.pathom :as pathom] [decide.test-utils.common :refer [db-fixture *conn*]] [fulcro-spec.check :as _] [fulcro-spec.core :refer [specification provided behavior assertions component provided! => =fn=> =check=>]])) (use-fixtures :each db-fixture) (deftest unauthorized-user-integration-test (let [parser (pathom/build-parser {} *conn*)] (component "Someone not authorized" (let [parser-without-session #(parser {:ring/request {}} %)] (assertions "can query for an existing process." (parser-without-session [{[::process/slug "test-decision"] [::process/slug ::process/title]}]) => {[::process/slug "test-decision"] #::process{:slug "test-decision" :title "Meine Test-Entscheidung"}} "can not add a new process." (parser-without-session [{(list `process.mutations/add-process #::process{:slug "test" :title "My Test-Title" :description "foobar"}) [::process/slug ::process/title ::process/description]}]) => {`process.mutations/add-process {:com.fulcrologic.rad.pathom/errors {:message "User is not logged in!" :data {}}}} "can not update an existing process." (parser-without-session [{`(process.mutations/update-process #::process{:slug "test-decision" :title "My NEW Test-Title"}) [::process/title]}]) => {`process.mutations/update-process {:com.fulcrologic.rad.pathom/errors {:message "User is not logged in!" :data {}}}}))))) (deftest parser-integration-test (let [parser (pathom/build-parser {} *conn*)] (behavior "An authorized user" (let [parser-existing-user (partial parser {:ring/request {:session {:id #uuid"0000fb5e-a9d0-44b6-b293-bb3c506fc0cb"}}})] (assertions "can query for an existing process." (parser-existing-user [{[::process/slug "test-decision"] [::process/slug ::process/title]}]) => {[::process/slug "test-decision"] #::process{:slug "test-decision" :title "Meine Test-Entscheidung"}} "can add a new process." (parser-existing-user [{(list `process.mutations/add-process #::process{:slug "test" :title "My Test-Title" :description "foobar"}) [::process/slug ::process/title ::process/description]}]) => {`process.mutations/add-process #::process{:slug "test" :title "My Test-Title" :description "foobar"}} "can query for the new process." (parser-existing-user [{[::process/slug "test"] [::process/slug ::process/title ::process/description ::process/end-time {::process/moderators [::user/id]}]}]) => {[::process/slug "test"] #::process{:slug "test" :title "My Test-Title" :description "foobar" ::process/moderators [{::user/id #uuid"0000fb5e-a9d0-44b6-b293-bb3c506fc0cb"}]}} "can't get an end-time, since it wasn't specified on creation." (parser-existing-user [{[::process/slug "test"] [::process/end-time]}]) => {[::process/slug "test"] {}} "can query for the new moderators, who is just the user who created the process." (parser-existing-user [{[::process/slug "test"] [{::process/moderators [::user/id]}]}]) => {[::process/slug "test"] {::process/moderators [{::user/id #uuid"0000fb5e-a9d0-44b6-b293-bb3c506fc0cb"}]}}) (component "who is a moderator of the process" (let [parser-with-moderator parser-existing-user] (assertions "can update the process afterwards" (parser-with-moderator [{`(process.mutations/update-process #::process{:slug "test" :title "My NEW Test-Title"}) [::process/title]}]) => {`process.mutations/update-process {::process/title "My NEW Test-Title"}} "can not update a process that is not in use." (parser-existing-user [{`(process.mutations/update-process #::process{:slug "i-do-not-exist" :title "My NEW Test-Title"}) [::process/title]}]) => {`process.mutations/update-process {:com.fulcrologic.rad.pathom/errors {:message "Slug is not in use!" :data {:slug "i-do-not-exist"}}}} "can add an end-date afterwards" (parser-with-moderator [{`(process.mutations/update-process #::process{:slug "test" :end-time #inst"2030"}) [::process/end-time]}]) => {`process.mutations/update-process {::process/end-time #inst"2030"}} "can remove an end-date afterwards" (parser-with-moderator [{`(process.mutations/update-process #::process{:slug "test" :end-time nil}) [::process/slug ::process/end-time]}]) => {`process.mutations/update-process #::process{:slug "test"}} "can not remove an required attributes" (get-in (parser-with-moderator [{`(process.mutations/update-process #::process{:slug "test" :title nil}) [::process/slug ::process/end-time]}]) [`process.mutations/update-process :com.fulcrologic.rad.pathom/errors :message]) => "Failed validation!" "can not remove an required attributes" (parser-with-moderator [{`(process.mutations/update-process #::process{:slug "test" :title nil}) [::process/slug ::process/end-time]}]) =check=> (_/embeds?* {`process.mutations/update-process {:com.fulcrologic.rad.pathom/errors {:message "Failed validation!"}}}) "can not remove slug" (parser-with-moderator [{`(process.mutations/update-process #::process{:slug nil}) [::process/slug ::process/end-time]}]) =check=> (_/embeds?* {`process.mutations/update-process {:com.fulcrologic.rad.pathom/errors {:message "Failed validation!"}}}) "can not set title to empty" (parser-with-moderator [{`(process.mutations/update-process #::process{:slug "test" :title ""}) [::process/slug ::process/end-time]}]) =check=> (_/embeds?* {`process.mutations/update-process {:com.fulcrologic.rad.pathom/errors {:message "Failed validation!"}}})))) (component "who is not a moderator of the process" (let [parser-with-non-moderator (partial parser {:ring/request {:session {:id #uuid"001e7a7e-3eb2-4226-b9ab-36dddcf64106"}}})] (assertions "can not update the process." (parser-with-non-moderator [{`(process.mutations/update-process #::process{:slug "test" :title "My NEW Test-Title"}) [::process/title]}]) => {`process.mutations/update-process {:com.fulcrologic.rad.pathom/errors {:message "Need moderation role for this operation" :data {::user/id #uuid"001e7a7e-3eb2-4226-b9ab-36dddcf64106" ::process/slug "test"}}}}))))))) (specification "Malformed add-process parameters" (let [parser (pathom/build-parser {} *conn*) parser-existing-user (partial parser {:ring/request {:session {:id #uuid"0000fb5e-a9d0-44b6-b293-bb3c506fc0cb"}}})] (behavior "process can not be added with" (assertions "malformed slug" (parser-existing-user [{`(process.mutations/add-process #::process{:slug "I AM NOT A CORRECT SLUG" :title "My Test-Title" :description "foobar"}) [::process/slug ::process/title ::process/description]}]) =check=> (_/embeds?* {`process.mutations/add-process {:com.fulcrologic.rad.pathom/errors {:message "Failed validation!"}}}) "empty title" (parser-existing-user [{(list `process.mutations/add-process #::process{:slug "correct-slug" :title "" :description "foobar"}) [::process/slug ::process/title ::process/description]}]) =check=> (_/embeds?* {`process.mutations/add-process {:com.fulcrologic.rad.pathom/errors {:message "Failed validation!"}}}) "missing field" (parser-existing-user [{(list `process.mutations/add-process #::process{:slug "correct-slug" :description "foobar"}) [::process/slug ::process/title ::process/description]}]) =check=> (_/embeds?* {`process.mutations/add-process {:com.fulcrologic.rad.pathom/errors {:message "Failed validation!"}}}) "string as end-date" (parser-existing-user [{(list `process.mutations/add-process #::process{:slug "correct-slug" :title "Bla" :description "foobar" :end-time "heute"}) [::process/slug ::process/title ::process/description]}]) =check=> (_/embeds?* {`process.mutations/add-process {:com.fulcrologic.rad.pathom/errors {:message "Failed validation!"}}}) "slug is already in use" (parser-existing-user [{(list `process.mutations/add-process #::process{:slug "test-decision" :title "Bla" :description "foobar"}) [::process/slug ::process/title ::process/description]}]) =check=> (_/embeds?* {`process.mutations/add-process {:com.fulcrologic.rad.pathom/errors {:message "Slug already in use"}}}))))) (specification "Moderator priviliges" (let [parser (pathom/build-parser {} *conn*) parser-with-alex-session (partial parser {:ring/request {:session {:id #uuid"000aa0e2-e4d6-463d-ae7c-46765e13a31b"}}})] (behavior "Only a moderator can" (behavior "add new moderators" (assertions (parser-with-alex-session [`(process.mutations/add-moderator {::process/slug "test-decision" ::user/email "<NAME>"})]) =check=> (_/embeds?* {`process.mutations/add-moderator {:com.fulcrologic.rad.pathom/errors {:message "Need moderation role for this operation"}}})))))) (specification "Private processes" (let [parser (pathom/build-parser {} *conn*) parser-with-alex-session (partial parser {:ring/request {:session {:id #uuid"000aa0e2-e4d6-463d-ae7c-46765e13a31b"}}})] (behavior "A private process" (behavior "can not be queried by everyone" (assertions (parser-with-alex-session [{:root/all-processes [::process/slug]}]) => #:root{:all-processes [#:decide.models.process{:slug "test-decision"}]})))))
true
(ns decide.models.process-test (:require [clojure.test :refer [deftest is use-fixtures testing are]] [decide.models.process :as process] [decide.models.process.mutations :as process.mutations] [decide.models.user :as user] [decide.server-components.pathom :as pathom] [decide.test-utils.common :refer [db-fixture *conn*]] [fulcro-spec.check :as _] [fulcro-spec.core :refer [specification provided behavior assertions component provided! => =fn=> =check=>]])) (use-fixtures :each db-fixture) (deftest unauthorized-user-integration-test (let [parser (pathom/build-parser {} *conn*)] (component "Someone not authorized" (let [parser-without-session #(parser {:ring/request {}} %)] (assertions "can query for an existing process." (parser-without-session [{[::process/slug "test-decision"] [::process/slug ::process/title]}]) => {[::process/slug "test-decision"] #::process{:slug "test-decision" :title "Meine Test-Entscheidung"}} "can not add a new process." (parser-without-session [{(list `process.mutations/add-process #::process{:slug "test" :title "My Test-Title" :description "foobar"}) [::process/slug ::process/title ::process/description]}]) => {`process.mutations/add-process {:com.fulcrologic.rad.pathom/errors {:message "User is not logged in!" :data {}}}} "can not update an existing process." (parser-without-session [{`(process.mutations/update-process #::process{:slug "test-decision" :title "My NEW Test-Title"}) [::process/title]}]) => {`process.mutations/update-process {:com.fulcrologic.rad.pathom/errors {:message "User is not logged in!" :data {}}}}))))) (deftest parser-integration-test (let [parser (pathom/build-parser {} *conn*)] (behavior "An authorized user" (let [parser-existing-user (partial parser {:ring/request {:session {:id #uuid"0000fb5e-a9d0-44b6-b293-bb3c506fc0cb"}}})] (assertions "can query for an existing process." (parser-existing-user [{[::process/slug "test-decision"] [::process/slug ::process/title]}]) => {[::process/slug "test-decision"] #::process{:slug "test-decision" :title "Meine Test-Entscheidung"}} "can add a new process." (parser-existing-user [{(list `process.mutations/add-process #::process{:slug "test" :title "My Test-Title" :description "foobar"}) [::process/slug ::process/title ::process/description]}]) => {`process.mutations/add-process #::process{:slug "test" :title "My Test-Title" :description "foobar"}} "can query for the new process." (parser-existing-user [{[::process/slug "test"] [::process/slug ::process/title ::process/description ::process/end-time {::process/moderators [::user/id]}]}]) => {[::process/slug "test"] #::process{:slug "test" :title "My Test-Title" :description "foobar" ::process/moderators [{::user/id #uuid"0000fb5e-a9d0-44b6-b293-bb3c506fc0cb"}]}} "can't get an end-time, since it wasn't specified on creation." (parser-existing-user [{[::process/slug "test"] [::process/end-time]}]) => {[::process/slug "test"] {}} "can query for the new moderators, who is just the user who created the process." (parser-existing-user [{[::process/slug "test"] [{::process/moderators [::user/id]}]}]) => {[::process/slug "test"] {::process/moderators [{::user/id #uuid"0000fb5e-a9d0-44b6-b293-bb3c506fc0cb"}]}}) (component "who is a moderator of the process" (let [parser-with-moderator parser-existing-user] (assertions "can update the process afterwards" (parser-with-moderator [{`(process.mutations/update-process #::process{:slug "test" :title "My NEW Test-Title"}) [::process/title]}]) => {`process.mutations/update-process {::process/title "My NEW Test-Title"}} "can not update a process that is not in use." (parser-existing-user [{`(process.mutations/update-process #::process{:slug "i-do-not-exist" :title "My NEW Test-Title"}) [::process/title]}]) => {`process.mutations/update-process {:com.fulcrologic.rad.pathom/errors {:message "Slug is not in use!" :data {:slug "i-do-not-exist"}}}} "can add an end-date afterwards" (parser-with-moderator [{`(process.mutations/update-process #::process{:slug "test" :end-time #inst"2030"}) [::process/end-time]}]) => {`process.mutations/update-process {::process/end-time #inst"2030"}} "can remove an end-date afterwards" (parser-with-moderator [{`(process.mutations/update-process #::process{:slug "test" :end-time nil}) [::process/slug ::process/end-time]}]) => {`process.mutations/update-process #::process{:slug "test"}} "can not remove an required attributes" (get-in (parser-with-moderator [{`(process.mutations/update-process #::process{:slug "test" :title nil}) [::process/slug ::process/end-time]}]) [`process.mutations/update-process :com.fulcrologic.rad.pathom/errors :message]) => "Failed validation!" "can not remove an required attributes" (parser-with-moderator [{`(process.mutations/update-process #::process{:slug "test" :title nil}) [::process/slug ::process/end-time]}]) =check=> (_/embeds?* {`process.mutations/update-process {:com.fulcrologic.rad.pathom/errors {:message "Failed validation!"}}}) "can not remove slug" (parser-with-moderator [{`(process.mutations/update-process #::process{:slug nil}) [::process/slug ::process/end-time]}]) =check=> (_/embeds?* {`process.mutations/update-process {:com.fulcrologic.rad.pathom/errors {:message "Failed validation!"}}}) "can not set title to empty" (parser-with-moderator [{`(process.mutations/update-process #::process{:slug "test" :title ""}) [::process/slug ::process/end-time]}]) =check=> (_/embeds?* {`process.mutations/update-process {:com.fulcrologic.rad.pathom/errors {:message "Failed validation!"}}})))) (component "who is not a moderator of the process" (let [parser-with-non-moderator (partial parser {:ring/request {:session {:id #uuid"001e7a7e-3eb2-4226-b9ab-36dddcf64106"}}})] (assertions "can not update the process." (parser-with-non-moderator [{`(process.mutations/update-process #::process{:slug "test" :title "My NEW Test-Title"}) [::process/title]}]) => {`process.mutations/update-process {:com.fulcrologic.rad.pathom/errors {:message "Need moderation role for this operation" :data {::user/id #uuid"001e7a7e-3eb2-4226-b9ab-36dddcf64106" ::process/slug "test"}}}}))))))) (specification "Malformed add-process parameters" (let [parser (pathom/build-parser {} *conn*) parser-existing-user (partial parser {:ring/request {:session {:id #uuid"0000fb5e-a9d0-44b6-b293-bb3c506fc0cb"}}})] (behavior "process can not be added with" (assertions "malformed slug" (parser-existing-user [{`(process.mutations/add-process #::process{:slug "I AM NOT A CORRECT SLUG" :title "My Test-Title" :description "foobar"}) [::process/slug ::process/title ::process/description]}]) =check=> (_/embeds?* {`process.mutations/add-process {:com.fulcrologic.rad.pathom/errors {:message "Failed validation!"}}}) "empty title" (parser-existing-user [{(list `process.mutations/add-process #::process{:slug "correct-slug" :title "" :description "foobar"}) [::process/slug ::process/title ::process/description]}]) =check=> (_/embeds?* {`process.mutations/add-process {:com.fulcrologic.rad.pathom/errors {:message "Failed validation!"}}}) "missing field" (parser-existing-user [{(list `process.mutations/add-process #::process{:slug "correct-slug" :description "foobar"}) [::process/slug ::process/title ::process/description]}]) =check=> (_/embeds?* {`process.mutations/add-process {:com.fulcrologic.rad.pathom/errors {:message "Failed validation!"}}}) "string as end-date" (parser-existing-user [{(list `process.mutations/add-process #::process{:slug "correct-slug" :title "Bla" :description "foobar" :end-time "heute"}) [::process/slug ::process/title ::process/description]}]) =check=> (_/embeds?* {`process.mutations/add-process {:com.fulcrologic.rad.pathom/errors {:message "Failed validation!"}}}) "slug is already in use" (parser-existing-user [{(list `process.mutations/add-process #::process{:slug "test-decision" :title "Bla" :description "foobar"}) [::process/slug ::process/title ::process/description]}]) =check=> (_/embeds?* {`process.mutations/add-process {:com.fulcrologic.rad.pathom/errors {:message "Slug already in use"}}}))))) (specification "Moderator priviliges" (let [parser (pathom/build-parser {} *conn*) parser-with-alex-session (partial parser {:ring/request {:session {:id #uuid"000aa0e2-e4d6-463d-ae7c-46765e13a31b"}}})] (behavior "Only a moderator can" (behavior "add new moderators" (assertions (parser-with-alex-session [`(process.mutations/add-moderator {::process/slug "test-decision" ::user/email "PI:NAME:<NAME>END_PI"})]) =check=> (_/embeds?* {`process.mutations/add-moderator {:com.fulcrologic.rad.pathom/errors {:message "Need moderation role for this operation"}}})))))) (specification "Private processes" (let [parser (pathom/build-parser {} *conn*) parser-with-alex-session (partial parser {:ring/request {:session {:id #uuid"000aa0e2-e4d6-463d-ae7c-46765e13a31b"}}})] (behavior "A private process" (behavior "can not be queried by everyone" (assertions (parser-with-alex-session [{:root/all-processes [::process/slug]}]) => #:root{:all-processes [#:decide.models.process{:slug "test-decision"}]})))))
[ { "context": " [{:assignee [{:scalar [{:identifier [\"mary\"]}]}]}\n {:is-a [", "end": 1593, "score": 0.8944257497787476, "start": 1589, "tag": "NAME", "value": "mary" }, { "context": " [{:identifier [\"mary\"]}\n ", "end": 2047, "score": 0.7701767683029175, "start": 2043, "tag": "NAME", "value": "mary" }, { "context": " [{:assignee [{:scalar [{:identifier [\"isla\"]}]}]}\n {:is [", "end": 4582, "score": 0.5065579414367676, "start": 4580, "tag": "NAME", "value": "is" }, { "context": " [{:assignee [{:scalar [{:identifier [\"mary\"]}]}]}\n {:is [:i", "end": 4884, "score": 0.6563595533370972, "start": 4880, "tag": "NAME", "value": "mary" }, { "context": "e-ass-and-value-ass\n (check-ast (parse \"name is 'Isla'\\nmary is a girl\\nwrite name\")\n {:r", "end": 6020, "score": 0.5992544889450073, "start": 6018, "tag": "NAME", "value": "Is" }, { "context": " [{:assignee [{:scalar [{:identifier [\"mary\"]}]}]}\n {:is-a [", "end": 6526, "score": 0.7531620860099792, "start": 6522, "tag": "NAME", "value": "mary" } ]
test/isla/test/parser.clj
maryrosecook/islaclj
38
(ns isla.test.parser (:use [isla.parser]) (:use [clojure.test]) (:use [clojure.pprint]) (:require [mrc.utils :as utils])) (defmulti check-ast (fn [_ expected] (class expected))) (defmethod check-ast java.util.Map [actual expected] (def actual-tag (:tag actual)) (is (contains? expected actual-tag)) ;; check parent (check-ast (:c actual) (actual-tag expected))) ;; recurse sub tree (defmethod check-ast java.util.List [actual expected] (is (= (count actual) (count expected))) ;; if not same len, got a problem (doseq [[actual-node expected-tag] (map vector actual expected)] (check-ast actual-node expected-tag))) (defmethod check-ast :default [actual expected] ;; keyword, string, int (is (= actual expected))) ;; nnode (deftest nnode-create (is (= (nnode :integer [1]) {:tag :integer :c [1]}))) ;; slot assignment (deftest test-slot-assignment (check-ast (parse "isla age is 1") {:root [{:block [{:expression [{:value-assignment [{:assignee [{:object [{:identifier ["isla"]} {:identifier ["age"]}]}]} {:is [:is]} {:value [{:literal [{:integer [1]}]}]}]}]}]}]})) ;; type assignment (deftest type-assignment (check-ast (parse "mary is a girl") {:root [{:block [{:expression [{:type-assignment [{:assignee [{:scalar [{:identifier ["mary"]}]}]} {:is-a [:is-a]} {:identifier ["girl"]}]}]}]}]})) ;; type assignment to slot (deftest test-type-assignment-to-slot (check-ast (parse "mary friend is a person") {:root [{:block [{:expression [{:type-assignment [{:assignee [{:object [{:identifier ["mary"]} {:identifier ["friend"]}]}]} {:is-a [:is-a]} {:identifier ["person"]}]}]}]}]})) ;; integers (deftest test-single-digit-integer (= (utils/extract (parse "mary is 1") [:c 0 :c 0 :c 0 :c 2 :c 0 :c 0 :c 0]) 1)) (deftest test-multiple-digit-integer (= (utils/extract (parse "mary is 12345") [:c 0 :c 0 :c 0 :c 2 :c 0 :c 0 :c 0]) 12345)) (deftest test-integer-cannot-start-with-zero (try (parse "mary is 02345") (is false) ;; shouldn't get called (catch Exception e (is (re-find #"Got lost at" (.getMessage e)))))) ;; assignment to primitive variable (deftest assignment-number (check-ast (parse "mary is 1") {:root [{:block [{:expression [{:value-assignment [{:assignee [{:scalar [{:identifier ["mary"]}]}]} {:is [:is]} {:value [{:literal [{:integer [1]}]}]}]}]}]}]})) (deftest assignment-identifier (check-ast (parse "isla is age") {:root [{:block [{:expression [{:value-assignment [{:assignee [{:scalar [{:identifier ["isla"]}]}]} {:is [:is]} {:value [{:variable [{:scalar [{:identifier ["age"]}]}]}]}]}]}]}]})) (deftest assignment-string (check-ast (parse "isla is 'cool'") {:root [{:block [{:expression [{:value-assignment [{:assignee [{:scalar [{:identifier ["isla"]}]}]} {:is [:is]} {:value [{:literal [{:string ["cool"]}]}]}]}]}]}]})) (deftest assignment-string-double (check-ast (parse "isla is \"cool\"") {:root [{:block [{:expression [{:value-assignment [{:assignee [{:scalar [{:identifier ["isla"]}]}]} {:is [:is]} {:value [{:literal [{:string ["cool"]}]}]}]}]}]}]})) ;; blocks (deftest two-expression-block (check-ast (parse "isla is 1\nmary is 2") {:root [{:block [{:expression [{:value-assignment [{:assignee [{:scalar [{:identifier ["isla"]}]}]} {:is [:is]} {:value [{:literal [{:integer [1]}]}]}]}]} {:expression [{:value-assignment [{:assignee [{:scalar [{:identifier ["mary"]}]}]} {:is [:is]} {:value [{:literal [{:integer [2]}]}]}]}]}]}]})) (deftest three-expression-block (check-ast (parse "name is 'Isla'\nwrite 'la'\nwrite name") {:root [{:block [{:expression [{:value-assignment [{:assignee [{:scalar [{:identifier ["name"]}]}]} {:is [:is]} {:value [{:literal [{:string ["Isla"]}]}]}]}]} {:expression [{:invocation [{:identifier ["write"]} {:value [{:literal [{:string ["la"]}]}]}]}]} {:expression [{:invocation [{:identifier ["write"]} {:value [{:variable [{:scalar [{:identifier ["name"]}]}]}]}]}]}]}]})) (deftest test-block-with-type-ass-and-value-ass (check-ast (parse "name is 'Isla'\nmary is a girl\nwrite name") {:root [{:block [{:expression [{:value-assignment [{:assignee [{:scalar [{:identifier ["name"]}]}]} {:is [:is]} {:value [{:literal [{:string ["Isla"]}]}]}]}]} {:expression [{:type-assignment [{:assignee [{:scalar [{:identifier ["mary"]}]}]} {:is-a [:is-a]} {:identifier ["girl"]}]}]} {:expression [{:invocation [{:identifier ["write"]} {:value [{:variable [{:scalar [{:identifier ["name"]}]}]}]}]}]}]}]})) ;; invocation (deftest invoke-fn-scalar-variable (check-ast (parse "write isla") {:root [{:block [{:expression [{:invocation [{:identifier ["write"]} {:value [{:variable [{:scalar [{:identifier ["isla"]}]}]}]}]}]}]}]})) (deftest invoke-fn-scalar-param (check-ast (parse "write 'isla'") {:root [{:block [{:expression [{:invocation [{:identifier ["write"]} {:value [{:literal [{:string ["isla"]}]}]}]}]}]}]})) (deftest invoke-fn-object-attribute-variable (check-ast (parse "write isla age") {:root [{:block [{:expression [{:invocation [{:identifier ["write"]} {:value [{:variable [{:object [{:identifier ["isla"]} {:identifier ["age"]}]}]}]}]}]}]}]})) (deftest test-write-string-regression (check-ast (parse "write 'My name Isla'") {:root [{:block [{:expression [{:invocation [{:identifier ["write"]} {:value [{:literal [{:string ["My name Isla"]}]}]}]}]}]}]})) ;; lists (deftest test-list-instantiation (let [expected-ast {:root [{:block [{:expression [{:type-assignment [{:assignee [{:scalar [{:identifier ["items"]}]}]} {:is-a [:is-a]} {:identifier ["list"]}]}]}]}]}] (check-ast (parse "items is a list") expected-ast))) (deftest test-list-add (let [expected-ast {:root [{:block [{:expression [{:list-assignment [{:list-operation [{:add [:add]}]} {:value [{:variable [{:scalar [{:identifier ["sword"]}]}]}]} {:to-from [:to-from]} {:assignee [{:scalar [{:identifier ["items"]}]}]}]}]}]}]}] (check-ast (parse "add sword to items") expected-ast))) (deftest test-list-remove (let [expected-ast {:root [{:block [{:expression [{:list-assignment [{:list-operation [{:remove [:remove]}]} {:value [{:variable [{:scalar [{:identifier ["sword"]}]}]}]} {:to-from [:to-from]} {:assignee [{:scalar [{:identifier ["items"]}]}]}]}]}]}]}] (check-ast (parse "remove sword from items") expected-ast)))
59840
(ns isla.test.parser (:use [isla.parser]) (:use [clojure.test]) (:use [clojure.pprint]) (:require [mrc.utils :as utils])) (defmulti check-ast (fn [_ expected] (class expected))) (defmethod check-ast java.util.Map [actual expected] (def actual-tag (:tag actual)) (is (contains? expected actual-tag)) ;; check parent (check-ast (:c actual) (actual-tag expected))) ;; recurse sub tree (defmethod check-ast java.util.List [actual expected] (is (= (count actual) (count expected))) ;; if not same len, got a problem (doseq [[actual-node expected-tag] (map vector actual expected)] (check-ast actual-node expected-tag))) (defmethod check-ast :default [actual expected] ;; keyword, string, int (is (= actual expected))) ;; nnode (deftest nnode-create (is (= (nnode :integer [1]) {:tag :integer :c [1]}))) ;; slot assignment (deftest test-slot-assignment (check-ast (parse "isla age is 1") {:root [{:block [{:expression [{:value-assignment [{:assignee [{:object [{:identifier ["isla"]} {:identifier ["age"]}]}]} {:is [:is]} {:value [{:literal [{:integer [1]}]}]}]}]}]}]})) ;; type assignment (deftest type-assignment (check-ast (parse "mary is a girl") {:root [{:block [{:expression [{:type-assignment [{:assignee [{:scalar [{:identifier ["<NAME>"]}]}]} {:is-a [:is-a]} {:identifier ["girl"]}]}]}]}]})) ;; type assignment to slot (deftest test-type-assignment-to-slot (check-ast (parse "mary friend is a person") {:root [{:block [{:expression [{:type-assignment [{:assignee [{:object [{:identifier ["<NAME>"]} {:identifier ["friend"]}]}]} {:is-a [:is-a]} {:identifier ["person"]}]}]}]}]})) ;; integers (deftest test-single-digit-integer (= (utils/extract (parse "mary is 1") [:c 0 :c 0 :c 0 :c 2 :c 0 :c 0 :c 0]) 1)) (deftest test-multiple-digit-integer (= (utils/extract (parse "mary is 12345") [:c 0 :c 0 :c 0 :c 2 :c 0 :c 0 :c 0]) 12345)) (deftest test-integer-cannot-start-with-zero (try (parse "mary is 02345") (is false) ;; shouldn't get called (catch Exception e (is (re-find #"Got lost at" (.getMessage e)))))) ;; assignment to primitive variable (deftest assignment-number (check-ast (parse "mary is 1") {:root [{:block [{:expression [{:value-assignment [{:assignee [{:scalar [{:identifier ["mary"]}]}]} {:is [:is]} {:value [{:literal [{:integer [1]}]}]}]}]}]}]})) (deftest assignment-identifier (check-ast (parse "isla is age") {:root [{:block [{:expression [{:value-assignment [{:assignee [{:scalar [{:identifier ["isla"]}]}]} {:is [:is]} {:value [{:variable [{:scalar [{:identifier ["age"]}]}]}]}]}]}]}]})) (deftest assignment-string (check-ast (parse "isla is 'cool'") {:root [{:block [{:expression [{:value-assignment [{:assignee [{:scalar [{:identifier ["isla"]}]}]} {:is [:is]} {:value [{:literal [{:string ["cool"]}]}]}]}]}]}]})) (deftest assignment-string-double (check-ast (parse "isla is \"cool\"") {:root [{:block [{:expression [{:value-assignment [{:assignee [{:scalar [{:identifier ["isla"]}]}]} {:is [:is]} {:value [{:literal [{:string ["cool"]}]}]}]}]}]}]})) ;; blocks (deftest two-expression-block (check-ast (parse "isla is 1\nmary is 2") {:root [{:block [{:expression [{:value-assignment [{:assignee [{:scalar [{:identifier ["<NAME>la"]}]}]} {:is [:is]} {:value [{:literal [{:integer [1]}]}]}]}]} {:expression [{:value-assignment [{:assignee [{:scalar [{:identifier ["<NAME>"]}]}]} {:is [:is]} {:value [{:literal [{:integer [2]}]}]}]}]}]}]})) (deftest three-expression-block (check-ast (parse "name is 'Isla'\nwrite 'la'\nwrite name") {:root [{:block [{:expression [{:value-assignment [{:assignee [{:scalar [{:identifier ["name"]}]}]} {:is [:is]} {:value [{:literal [{:string ["Isla"]}]}]}]}]} {:expression [{:invocation [{:identifier ["write"]} {:value [{:literal [{:string ["la"]}]}]}]}]} {:expression [{:invocation [{:identifier ["write"]} {:value [{:variable [{:scalar [{:identifier ["name"]}]}]}]}]}]}]}]})) (deftest test-block-with-type-ass-and-value-ass (check-ast (parse "name is '<NAME>la'\nmary is a girl\nwrite name") {:root [{:block [{:expression [{:value-assignment [{:assignee [{:scalar [{:identifier ["name"]}]}]} {:is [:is]} {:value [{:literal [{:string ["Isla"]}]}]}]}]} {:expression [{:type-assignment [{:assignee [{:scalar [{:identifier ["<NAME>"]}]}]} {:is-a [:is-a]} {:identifier ["girl"]}]}]} {:expression [{:invocation [{:identifier ["write"]} {:value [{:variable [{:scalar [{:identifier ["name"]}]}]}]}]}]}]}]})) ;; invocation (deftest invoke-fn-scalar-variable (check-ast (parse "write isla") {:root [{:block [{:expression [{:invocation [{:identifier ["write"]} {:value [{:variable [{:scalar [{:identifier ["isla"]}]}]}]}]}]}]}]})) (deftest invoke-fn-scalar-param (check-ast (parse "write 'isla'") {:root [{:block [{:expression [{:invocation [{:identifier ["write"]} {:value [{:literal [{:string ["isla"]}]}]}]}]}]}]})) (deftest invoke-fn-object-attribute-variable (check-ast (parse "write isla age") {:root [{:block [{:expression [{:invocation [{:identifier ["write"]} {:value [{:variable [{:object [{:identifier ["isla"]} {:identifier ["age"]}]}]}]}]}]}]}]})) (deftest test-write-string-regression (check-ast (parse "write 'My name Isla'") {:root [{:block [{:expression [{:invocation [{:identifier ["write"]} {:value [{:literal [{:string ["My name Isla"]}]}]}]}]}]}]})) ;; lists (deftest test-list-instantiation (let [expected-ast {:root [{:block [{:expression [{:type-assignment [{:assignee [{:scalar [{:identifier ["items"]}]}]} {:is-a [:is-a]} {:identifier ["list"]}]}]}]}]}] (check-ast (parse "items is a list") expected-ast))) (deftest test-list-add (let [expected-ast {:root [{:block [{:expression [{:list-assignment [{:list-operation [{:add [:add]}]} {:value [{:variable [{:scalar [{:identifier ["sword"]}]}]}]} {:to-from [:to-from]} {:assignee [{:scalar [{:identifier ["items"]}]}]}]}]}]}]}] (check-ast (parse "add sword to items") expected-ast))) (deftest test-list-remove (let [expected-ast {:root [{:block [{:expression [{:list-assignment [{:list-operation [{:remove [:remove]}]} {:value [{:variable [{:scalar [{:identifier ["sword"]}]}]}]} {:to-from [:to-from]} {:assignee [{:scalar [{:identifier ["items"]}]}]}]}]}]}]}] (check-ast (parse "remove sword from items") expected-ast)))
true
(ns isla.test.parser (:use [isla.parser]) (:use [clojure.test]) (:use [clojure.pprint]) (:require [mrc.utils :as utils])) (defmulti check-ast (fn [_ expected] (class expected))) (defmethod check-ast java.util.Map [actual expected] (def actual-tag (:tag actual)) (is (contains? expected actual-tag)) ;; check parent (check-ast (:c actual) (actual-tag expected))) ;; recurse sub tree (defmethod check-ast java.util.List [actual expected] (is (= (count actual) (count expected))) ;; if not same len, got a problem (doseq [[actual-node expected-tag] (map vector actual expected)] (check-ast actual-node expected-tag))) (defmethod check-ast :default [actual expected] ;; keyword, string, int (is (= actual expected))) ;; nnode (deftest nnode-create (is (= (nnode :integer [1]) {:tag :integer :c [1]}))) ;; slot assignment (deftest test-slot-assignment (check-ast (parse "isla age is 1") {:root [{:block [{:expression [{:value-assignment [{:assignee [{:object [{:identifier ["isla"]} {:identifier ["age"]}]}]} {:is [:is]} {:value [{:literal [{:integer [1]}]}]}]}]}]}]})) ;; type assignment (deftest type-assignment (check-ast (parse "mary is a girl") {:root [{:block [{:expression [{:type-assignment [{:assignee [{:scalar [{:identifier ["PI:NAME:<NAME>END_PI"]}]}]} {:is-a [:is-a]} {:identifier ["girl"]}]}]}]}]})) ;; type assignment to slot (deftest test-type-assignment-to-slot (check-ast (parse "mary friend is a person") {:root [{:block [{:expression [{:type-assignment [{:assignee [{:object [{:identifier ["PI:NAME:<NAME>END_PI"]} {:identifier ["friend"]}]}]} {:is-a [:is-a]} {:identifier ["person"]}]}]}]}]})) ;; integers (deftest test-single-digit-integer (= (utils/extract (parse "mary is 1") [:c 0 :c 0 :c 0 :c 2 :c 0 :c 0 :c 0]) 1)) (deftest test-multiple-digit-integer (= (utils/extract (parse "mary is 12345") [:c 0 :c 0 :c 0 :c 2 :c 0 :c 0 :c 0]) 12345)) (deftest test-integer-cannot-start-with-zero (try (parse "mary is 02345") (is false) ;; shouldn't get called (catch Exception e (is (re-find #"Got lost at" (.getMessage e)))))) ;; assignment to primitive variable (deftest assignment-number (check-ast (parse "mary is 1") {:root [{:block [{:expression [{:value-assignment [{:assignee [{:scalar [{:identifier ["mary"]}]}]} {:is [:is]} {:value [{:literal [{:integer [1]}]}]}]}]}]}]})) (deftest assignment-identifier (check-ast (parse "isla is age") {:root [{:block [{:expression [{:value-assignment [{:assignee [{:scalar [{:identifier ["isla"]}]}]} {:is [:is]} {:value [{:variable [{:scalar [{:identifier ["age"]}]}]}]}]}]}]}]})) (deftest assignment-string (check-ast (parse "isla is 'cool'") {:root [{:block [{:expression [{:value-assignment [{:assignee [{:scalar [{:identifier ["isla"]}]}]} {:is [:is]} {:value [{:literal [{:string ["cool"]}]}]}]}]}]}]})) (deftest assignment-string-double (check-ast (parse "isla is \"cool\"") {:root [{:block [{:expression [{:value-assignment [{:assignee [{:scalar [{:identifier ["isla"]}]}]} {:is [:is]} {:value [{:literal [{:string ["cool"]}]}]}]}]}]}]})) ;; blocks (deftest two-expression-block (check-ast (parse "isla is 1\nmary is 2") {:root [{:block [{:expression [{:value-assignment [{:assignee [{:scalar [{:identifier ["PI:NAME:<NAME>END_PIla"]}]}]} {:is [:is]} {:value [{:literal [{:integer [1]}]}]}]}]} {:expression [{:value-assignment [{:assignee [{:scalar [{:identifier ["PI:NAME:<NAME>END_PI"]}]}]} {:is [:is]} {:value [{:literal [{:integer [2]}]}]}]}]}]}]})) (deftest three-expression-block (check-ast (parse "name is 'Isla'\nwrite 'la'\nwrite name") {:root [{:block [{:expression [{:value-assignment [{:assignee [{:scalar [{:identifier ["name"]}]}]} {:is [:is]} {:value [{:literal [{:string ["Isla"]}]}]}]}]} {:expression [{:invocation [{:identifier ["write"]} {:value [{:literal [{:string ["la"]}]}]}]}]} {:expression [{:invocation [{:identifier ["write"]} {:value [{:variable [{:scalar [{:identifier ["name"]}]}]}]}]}]}]}]})) (deftest test-block-with-type-ass-and-value-ass (check-ast (parse "name is 'PI:NAME:<NAME>END_PIla'\nmary is a girl\nwrite name") {:root [{:block [{:expression [{:value-assignment [{:assignee [{:scalar [{:identifier ["name"]}]}]} {:is [:is]} {:value [{:literal [{:string ["Isla"]}]}]}]}]} {:expression [{:type-assignment [{:assignee [{:scalar [{:identifier ["PI:NAME:<NAME>END_PI"]}]}]} {:is-a [:is-a]} {:identifier ["girl"]}]}]} {:expression [{:invocation [{:identifier ["write"]} {:value [{:variable [{:scalar [{:identifier ["name"]}]}]}]}]}]}]}]})) ;; invocation (deftest invoke-fn-scalar-variable (check-ast (parse "write isla") {:root [{:block [{:expression [{:invocation [{:identifier ["write"]} {:value [{:variable [{:scalar [{:identifier ["isla"]}]}]}]}]}]}]}]})) (deftest invoke-fn-scalar-param (check-ast (parse "write 'isla'") {:root [{:block [{:expression [{:invocation [{:identifier ["write"]} {:value [{:literal [{:string ["isla"]}]}]}]}]}]}]})) (deftest invoke-fn-object-attribute-variable (check-ast (parse "write isla age") {:root [{:block [{:expression [{:invocation [{:identifier ["write"]} {:value [{:variable [{:object [{:identifier ["isla"]} {:identifier ["age"]}]}]}]}]}]}]}]})) (deftest test-write-string-regression (check-ast (parse "write 'My name Isla'") {:root [{:block [{:expression [{:invocation [{:identifier ["write"]} {:value [{:literal [{:string ["My name Isla"]}]}]}]}]}]}]})) ;; lists (deftest test-list-instantiation (let [expected-ast {:root [{:block [{:expression [{:type-assignment [{:assignee [{:scalar [{:identifier ["items"]}]}]} {:is-a [:is-a]} {:identifier ["list"]}]}]}]}]}] (check-ast (parse "items is a list") expected-ast))) (deftest test-list-add (let [expected-ast {:root [{:block [{:expression [{:list-assignment [{:list-operation [{:add [:add]}]} {:value [{:variable [{:scalar [{:identifier ["sword"]}]}]}]} {:to-from [:to-from]} {:assignee [{:scalar [{:identifier ["items"]}]}]}]}]}]}]}] (check-ast (parse "add sword to items") expected-ast))) (deftest test-list-remove (let [expected-ast {:root [{:block [{:expression [{:list-assignment [{:list-operation [{:remove [:remove]}]} {:value [{:variable [{:scalar [{:identifier ["sword"]}]}]}]} {:to-from [:to-from]} {:assignee [{:scalar [{:identifier ["items"]}]}]}]}]}]}]}] (check-ast (parse "remove sword from items") expected-ast)))
[ { "context": "inding]))\n\n;;;\n;;; This code is a modified copy of Zachary Tellman's Proteus library\n;;; (https://github.com/ztellma", "end": 225, "score": 0.9998637437820435, "start": 210, "tag": "NAME", "value": "Zachary Tellman" }, { "context": "Tellman's Proteus library\n;;; (https://github.com/ztellman/proteus\n;;;\n\n(declare transform-let-mutable-form)", "end": 276, "score": 0.9990132451057434, "start": 268, "tag": "USERNAME", "value": "ztellman" } ]
src/luaclj/proteus.clj
smartrockstars/luaclj
8
(ns luaclj.proteus (:require [riddley.walk :refer (walk-exprs)] [riddley.compiler :refer (locals)]) (:import [clojure.lang Compiler$LocalBinding])) ;;; ;;; This code is a modified copy of Zachary Tellman's Proteus library ;;; (https://github.com/ztellman/proteus ;;; (declare transform-let-mutable-form) (defn- add-meta [obj m] (with-meta obj (merge (or (meta obj) {}) m))) (defn- key* [x] (when x (key x))) (defn- mutable-vars [] (->> (locals) keys (filter (comp ::write-form meta)))) (defn- read-form [x] (if (not (:tag (meta x))) (-> (locals) (find x) key* meta ::read-form) nil)) (defn- write-form [x] (-> (locals) (find x) key* meta ::write-form)) (defn- transform-predicate [x] (or (symbol? x) (and (seq? x) (or (#{'letfn* 'set!} (first x)) (and (= 'fn* (first x)) (:local (meta x))))))) (defn- transform-handler [x] ;(println "handler:" x "meta:" (meta x)) (cond (:processed (meta x)) x (symbol? x) (or (read-form x) x) (= (first x) 'set!) (let [[_ k v] x] (if-let [f (write-form k)] (f (transform-let-mutable-form v)) x)) (= (first x) 'fn*) (let [vs (seq (mutable-vars))] `(let [~@(interleave vs (map read-form vs))] ~(with-meta x {:processed true}))) (= (first x) 'letfn*) (let [[_ bindings & body] x vs (seq (mutable-vars)) vs' (map #(gensym (name %)) vs)] `(let [~@(interleave vs' vs) ~@(interleave vs (map read-form vs))] (~'letfn* ~bindings (let [~@(interleave vs vs')] ~(transform-let-mutable-form `(do ~@body)))))) :else x)) (defn- transform-let-mutable-form [x] (walk-exprs transform-predicate transform-handler x)) ;;; (defn- typeof [x env] (if-let [^Compiler$LocalBinding binding (get env x)] (when (.hasJavaClass binding) (.getJavaClass binding)) (cond (instance? Boolean x) Boolean/TYPE (instance? Long x) Long/TYPE (instance? Double x) Double/TYPE))) (defmacro let-mutable "Acts as a let-binding for variables that can be modified with `set!`. (let-mutable [x 0] (dotimes [_ 100] (set! x (inc x))) x) The mutable variable cannot escape the scope in which it's defined; if the variable is closed over, the current value will be captured. Wherever possible, unboxed numbers are used, giving significantly better performance than `clojure.core/with-local-vars`." [bindings & body] (let [ks (->> bindings (partition 2) (map first)) vs (->> bindings (partition 2) (map second)) types (map #(condp = (typeof % &env) Boolean/TYPE "luaclj.proteus.Containers$B" Long/TYPE "luaclj.proteus.Containers$L" Double/TYPE "luaclj.proteus.Containers$D" "luaclj.proteus.Containers$O") vs) ks (map (fn [k type] (with-meta k {:tag type})) ks types)] (transform-let-mutable-form `(let [~@(interleave (map (fn [k] (with-meta k {::read-form `(.x ~k) ::write-form (fn [x] `(do (.set ~k ~x) nil))})) ks) (map (fn [v type] `(new ~(symbol type) ~v)) vs types))] ~@body))))
125120
(ns luaclj.proteus (:require [riddley.walk :refer (walk-exprs)] [riddley.compiler :refer (locals)]) (:import [clojure.lang Compiler$LocalBinding])) ;;; ;;; This code is a modified copy of <NAME>'s Proteus library ;;; (https://github.com/ztellman/proteus ;;; (declare transform-let-mutable-form) (defn- add-meta [obj m] (with-meta obj (merge (or (meta obj) {}) m))) (defn- key* [x] (when x (key x))) (defn- mutable-vars [] (->> (locals) keys (filter (comp ::write-form meta)))) (defn- read-form [x] (if (not (:tag (meta x))) (-> (locals) (find x) key* meta ::read-form) nil)) (defn- write-form [x] (-> (locals) (find x) key* meta ::write-form)) (defn- transform-predicate [x] (or (symbol? x) (and (seq? x) (or (#{'letfn* 'set!} (first x)) (and (= 'fn* (first x)) (:local (meta x))))))) (defn- transform-handler [x] ;(println "handler:" x "meta:" (meta x)) (cond (:processed (meta x)) x (symbol? x) (or (read-form x) x) (= (first x) 'set!) (let [[_ k v] x] (if-let [f (write-form k)] (f (transform-let-mutable-form v)) x)) (= (first x) 'fn*) (let [vs (seq (mutable-vars))] `(let [~@(interleave vs (map read-form vs))] ~(with-meta x {:processed true}))) (= (first x) 'letfn*) (let [[_ bindings & body] x vs (seq (mutable-vars)) vs' (map #(gensym (name %)) vs)] `(let [~@(interleave vs' vs) ~@(interleave vs (map read-form vs))] (~'letfn* ~bindings (let [~@(interleave vs vs')] ~(transform-let-mutable-form `(do ~@body)))))) :else x)) (defn- transform-let-mutable-form [x] (walk-exprs transform-predicate transform-handler x)) ;;; (defn- typeof [x env] (if-let [^Compiler$LocalBinding binding (get env x)] (when (.hasJavaClass binding) (.getJavaClass binding)) (cond (instance? Boolean x) Boolean/TYPE (instance? Long x) Long/TYPE (instance? Double x) Double/TYPE))) (defmacro let-mutable "Acts as a let-binding for variables that can be modified with `set!`. (let-mutable [x 0] (dotimes [_ 100] (set! x (inc x))) x) The mutable variable cannot escape the scope in which it's defined; if the variable is closed over, the current value will be captured. Wherever possible, unboxed numbers are used, giving significantly better performance than `clojure.core/with-local-vars`." [bindings & body] (let [ks (->> bindings (partition 2) (map first)) vs (->> bindings (partition 2) (map second)) types (map #(condp = (typeof % &env) Boolean/TYPE "luaclj.proteus.Containers$B" Long/TYPE "luaclj.proteus.Containers$L" Double/TYPE "luaclj.proteus.Containers$D" "luaclj.proteus.Containers$O") vs) ks (map (fn [k type] (with-meta k {:tag type})) ks types)] (transform-let-mutable-form `(let [~@(interleave (map (fn [k] (with-meta k {::read-form `(.x ~k) ::write-form (fn [x] `(do (.set ~k ~x) nil))})) ks) (map (fn [v type] `(new ~(symbol type) ~v)) vs types))] ~@body))))
true
(ns luaclj.proteus (:require [riddley.walk :refer (walk-exprs)] [riddley.compiler :refer (locals)]) (:import [clojure.lang Compiler$LocalBinding])) ;;; ;;; This code is a modified copy of PI:NAME:<NAME>END_PI's Proteus library ;;; (https://github.com/ztellman/proteus ;;; (declare transform-let-mutable-form) (defn- add-meta [obj m] (with-meta obj (merge (or (meta obj) {}) m))) (defn- key* [x] (when x (key x))) (defn- mutable-vars [] (->> (locals) keys (filter (comp ::write-form meta)))) (defn- read-form [x] (if (not (:tag (meta x))) (-> (locals) (find x) key* meta ::read-form) nil)) (defn- write-form [x] (-> (locals) (find x) key* meta ::write-form)) (defn- transform-predicate [x] (or (symbol? x) (and (seq? x) (or (#{'letfn* 'set!} (first x)) (and (= 'fn* (first x)) (:local (meta x))))))) (defn- transform-handler [x] ;(println "handler:" x "meta:" (meta x)) (cond (:processed (meta x)) x (symbol? x) (or (read-form x) x) (= (first x) 'set!) (let [[_ k v] x] (if-let [f (write-form k)] (f (transform-let-mutable-form v)) x)) (= (first x) 'fn*) (let [vs (seq (mutable-vars))] `(let [~@(interleave vs (map read-form vs))] ~(with-meta x {:processed true}))) (= (first x) 'letfn*) (let [[_ bindings & body] x vs (seq (mutable-vars)) vs' (map #(gensym (name %)) vs)] `(let [~@(interleave vs' vs) ~@(interleave vs (map read-form vs))] (~'letfn* ~bindings (let [~@(interleave vs vs')] ~(transform-let-mutable-form `(do ~@body)))))) :else x)) (defn- transform-let-mutable-form [x] (walk-exprs transform-predicate transform-handler x)) ;;; (defn- typeof [x env] (if-let [^Compiler$LocalBinding binding (get env x)] (when (.hasJavaClass binding) (.getJavaClass binding)) (cond (instance? Boolean x) Boolean/TYPE (instance? Long x) Long/TYPE (instance? Double x) Double/TYPE))) (defmacro let-mutable "Acts as a let-binding for variables that can be modified with `set!`. (let-mutable [x 0] (dotimes [_ 100] (set! x (inc x))) x) The mutable variable cannot escape the scope in which it's defined; if the variable is closed over, the current value will be captured. Wherever possible, unboxed numbers are used, giving significantly better performance than `clojure.core/with-local-vars`." [bindings & body] (let [ks (->> bindings (partition 2) (map first)) vs (->> bindings (partition 2) (map second)) types (map #(condp = (typeof % &env) Boolean/TYPE "luaclj.proteus.Containers$B" Long/TYPE "luaclj.proteus.Containers$L" Double/TYPE "luaclj.proteus.Containers$D" "luaclj.proteus.Containers$O") vs) ks (map (fn [k type] (with-meta k {:tag type})) ks types)] (transform-let-mutable-form `(let [~@(interleave (map (fn [k] (with-meta k {::read-form `(.x ~k) ::write-form (fn [x] `(do (.set ~k ~x) nil))})) ks) (map (fn [v type] `(new ~(symbol type) ~v)) vs types))] ~@body))))
[ { "context": " :Username username\n :Passwor", "end": 4415, "score": 0.9992349147796631, "start": 4407, "tag": "USERNAME", "value": "username" }, { "context": " :Password password\n :UserAtt", "end": 4475, "score": 0.9986844658851624, "start": 4467, "tag": "PASSWORD", "value": "password" }, { "context": " :Value username}])\n (clj->", "end": 4624, "score": 0.9666287899017334, "start": 4616, "tag": "USERNAME", "value": "username" }, { "context": "))))))\n\n(fx/reg-fx\n :amplify-verify\n (fn [{:keys [username code on-success on-failure] :as par}]\n (let [co", "end": 5635, "score": 0.9981111288070679, "start": 5627, "tag": "USERNAME", "value": "username" }, { "context": " :Username username\n :Confirm", "end": 6066, "score": 0.9990577697753906, "start": 6058, "tag": "USERNAME", "value": "username" }, { "context": " :AuthParameters {:USERNAME username\n ", "end": 7588, "score": 0.9892733693122864, "start": 7580, "tag": "USERNAME", "value": "username" }, { "context": " :PASSWORD password})\n (clj->j", "end": 7665, "score": 0.9992671608924866, "start": 7657, "tag": "PASSWORD", "value": "password" }, { "context": "fx/reg-fx\n :amplify-forgot-password\n (fn [{:keys [username on-success on-failure] :as par}]\n (let [config ", "end": 10851, "score": 0.9076935648918152, "start": 10843, "tag": "USERNAME", "value": "username" }, { "context": " :Username username)\n (clj->js", "end": 11277, "score": 0.9851182103157043, "start": 11269, "tag": "USERNAME", "value": "username" }, { "context": "x\n :amplify-conform-forgot-password\n (fn [{:keys [username password code on-success on-failure] :as par}]\n ", "end": 12238, "score": 0.9924321174621582, "start": 12230, "tag": "USERNAME", "value": "username" }, { "context": "ify-conform-forgot-password\n (fn [{:keys [username password code on-success on-failure] :as par}]\n (let [co", "end": 12247, "score": 0.7823870182037354, "start": 12239, "tag": "PASSWORD", "value": "password" }, { "context": " :Username username\n :Confirm", "end": 12678, "score": 0.9976800084114075, "start": 12670, "tag": "USERNAME", "value": "username" }, { "context": " :Password password)\n (clj->js", "end": 12802, "score": 0.9981270432472229, "start": 12794, "tag": "PASSWORD", "value": "password" }, { "context": "\n :amplify-resend-confirmation-code\n (fn [{:keys [username on-success on-failure]}]\n (let [config (get-con", "end": 13771, "score": 0.9882488250732422, "start": 13763, "tag": "USERNAME", "value": "username" }, { "context": " :Username username)\n (clj->js", "end": 14189, "score": 0.9977061748504639, "start": 14181, "tag": "USERNAME", "value": "username" } ]
src/widget/login/core.cljs
alpha-prosoft/edd-core-web
0
(ns widget.login.core (:require [re-frame.fx :as fx] [clojure.string :as str] [re-frame.core :as rf] [edd.events :as edd-events] [widget.login.i18n :as i18n])) (defn init [] (rf/dispatch [:initialize-login-db]) (rf/dispatch [::edd-events/add-translation i18n/tr])) (defn get-config [] (let [config (js->clj (.-eddconfig js/window) :keywordize-keys true) oauth {:userPoolId (get config :AuthUserPoolId) :domain (get config :AuthUserPoolDomain) :scope ["email" "openid"] :redirectSignIn "http://localhost:3000/" :responseType "code" :user-pool-web-client-id (get config :AuthUserPoolClientId) :region (get config :Region "eu-central-1") :authenticationFlowType "USER_PASSWORD_AUTH"}] oauth)) (def known-messages [{:type "InvalidParameterException" :message "2 validation errors detected: Value at 'password' failed to satisfy constraint: Member must satisfy regular expression pattern: ^[\\S]+.*[\\S]+$; Value at 'password' failed to satisfy constraint: Member must have length greater than or equal to 6" :search "Value at 'password' failed to satisfy constraint" :key :invalid-password} {:type "InvalidPasswordException" :message "Password did not conform with policy: Password must have uppercase characters" :search "Password did not conform with policy" :key :invalid-password} {:type "InvalidPasswordException" :message "Password did not conform with policy: Password must have numeric characters" :search "Password did not conform with policy" :key :invalid-password} {:type "InvalidParameterException" :message "1 validation error detected: Value at 'password' failed to satisfy constraint: Member must have length greater than or equal to 6" :search "Value at 'password' failed to satisfy constraint" :key :invalid-password} {:type "UsernameExistsException" :message "User already exists" :search "User already exists" :key :user-exists} {:type "InvalidParameterException" :message "Invalid email address format." :search "Invalid email address format." :key :invalid-email} {:message "Incorrect username or password." :search "Incorrect username or password." :type "NotAuthorizedException" :key :invalid-credentials} {:message "Invalid code provided, please request a code again." :type "ExpiredCodeException" :key :code-expired :search "Invalid code provided, please request a code again."} {:message "Attempt limit exceeded, please try after some time." :type "LimitExceededException" :search "Attempt limit exceeded, please try after some time." :key :attempt-limit-exceeded} {:message "Invalid verification code provided, please try again." :type "CodeMismatchException" :search "Invalid verification code provided" :key :invalid-code} {:message "Missing required parameter USERNAME" :type "InvalidParameterException" :search "Missing required parameter USERNAME" :key :missing-username} {:message "Missing required parameter PASSWORD" :type "InvalidParameterException" :search "Missing required parameter PASSWORD" :key :missing-password}]) (defn match-error-message [body] (let [message (.-message body) message-type (aget body "__type")] {:message (get (first (filter (fn [{:keys [search type]}] (and (= type message-type) (str/includes? message search))) known-messages)) :key) :type message-type})) (fx/reg-fx :amplify-register (fn [{:keys [username password on-success on-failure]}] (let [config (get-config)] (-> (.fetch js/window (str "https://cognito-idp." "eu-west-1" ".amazonaws.com") (clj->js {:method "POST" :body (-> {} (assoc :ClientId (:user-pool-web-client-id config) :Username username :Password password :UserAttributes [{:Name "email" :Value username}]) (clj->js) (#(.stringify js/JSON %))) :headers {"X-Amz-Target" "AWSCognitoIdentityProviderService.SignUp" "Content-Type" "application/x-amz-json-1.1"}})) (.then (fn [%] (let [status (.-status %)] (if (> status 299) (throw (ex-info status %)) (.json %))))) (.then (fn [%] (rf/dispatch [:bla (js->clj % :keywordize-keys true)]) (rf/dispatch (conj on-success (js->clj % :keywordize-keys true))))) (.catch (fn [e] (-> (ex-data e) (.json) (.then (fn [body] (rf/dispatch (conj on-failure (match-error-message body)))))))))))) (fx/reg-fx :amplify-verify (fn [{:keys [username code on-success on-failure] :as par}] (let [config (get-config)] (-> (.fetch js/window (str "https://cognito-idp." "eu-west-1" ".amazonaws.com") (clj->js {:method "POST" :body (-> {} (assoc :ClientId (:user-pool-web-client-id config) :Username username :ConfirmationCode code) (clj->js) (#(.stringify js/JSON %))) :headers {"X-Amz-Target" "AWSCognitoIdentityProviderService.ConfirmSignUp" "Content-Type" "application/x-amz-json-1.1"}})) (.then (fn [%] (let [status (.-status %)] (if (> status 299) (throw (ex-info status %)) (.json %))))) (.then (fn [%] (rf/dispatch (conj on-success (js->clj % :keywordize-keys true))))) (.catch (fn [e] (-> (ex-data e) (.json) (.then (fn [body] (rf/dispatch (conj on-failure (match-error-message body)))))))))))) (fx/reg-fx :amplify-login (fn [{:keys [username password on-success on-failure]}] (let [config (get-config)] (-> (.fetch js/window (str "https://cognito-idp." "eu-west-1" ".amazonaws.com") (clj->js {:method "POST" :body (-> {} (assoc :ClientId (:user-pool-web-client-id config) :AuthFlow "USER_PASSWORD_AUTH" :AuthParameters {:USERNAME username :PASSWORD password}) (clj->js) (#(.stringify js/JSON %))) :headers {"X-Amz-Target" "AWSCognitoIdentityProviderService.InitiateAuth" "Content-Type" "application/x-amz-json-1.1"}})) (.then (fn [%] (let [status (.-status %)] (if (> status 299) (throw (ex-info status %)) (.json %))))) (.then (fn [%] (let [response (-> % (js->clj :keywordize-keys true) (:AuthenticationResult)) auth {:id-token (:IdToken response) :refresh-token (:RefreshToken response) :access-token (:AccessToken response)} auth-string (.stringify js/JSON (clj->js auth))] (-> js/window (.-localStorage) (.setItem "auth" auth-string)) (rf/dispatch (conj on-success auth))))) (.catch (fn [e] (-> (ex-data e) (.json) (.then (fn [body] (rf/dispatch (conj on-failure (match-error-message body)))))))))))) (fx/reg-fx :amplify-refresh-credentials (fn [{:keys [on-success]}] (let [config (get-config) auth-string (-> js/window (.-localStorage) (.getItem "auth")) auth (-> (.parse js/JSON auth-string) (js->clj :keywordize-keys true)) refresh-token (:refresh-token auth)] (when refresh-token (-> (.fetch js/window (str "https://" (:domain config) "/oauth2/token") (clj->js {:method "POST" :headers {"Content-Type" "application/x-www-form-urlencoded"} :body (str "grant_type=refresh_token&" "client_id=" (:user-pool-web-client-id config) "&" "refresh_token=" refresh-token)})) (.then (fn [%] (let [status (.-status %)] (if (> status 299) (-> (.text %) (.then (fn [body] (-> js/window (.-localStorage) (.setItem "auth" "{}"))))) (.json %))))) (.then (fn [%] (let [response (-> % (js->clj :keywordize-keys true) (:id_token)) auth {:id-token response}] (rf/dispatch (conj on-success auth)))))))))) (fx/reg-fx :amplify-forgot-password (fn [{:keys [username on-success on-failure] :as par}] (let [config (get-config)] (-> (.fetch js/window (str "https://cognito-idp." "eu-west-1" ".amazonaws.com") (clj->js {:method "POST" :body (-> {} (assoc :ClientId (:user-pool-web-client-id config) :Username username) (clj->js) (#(.stringify js/JSON %))) :headers {"X-Amz-Target" "AWSCognitoIdentityProviderService.ForgotPassword" "Content-Type" "application/x-amz-json-1.1"}})) (.then (fn [%] (let [status (.-status %)] (if (> status 299) (throw (ex-info status %)) (.json %))))) (.then (fn [%] (rf/dispatch (conj on-success (js->clj % :keywordize-keys true))))) (.catch (fn [e] (-> (ex-data e) (.json) (.then (fn [body] (rf/dispatch (conj on-failure (match-error-message body)))))))))))) (fx/reg-fx :amplify-conform-forgot-password (fn [{:keys [username password code on-success on-failure] :as par}] (let [config (get-config)] (-> (.fetch js/window (str "https://cognito-idp." "eu-west-1" ".amazonaws.com") (clj->js {:method "POST" :body (-> {} (assoc :ClientId (:user-pool-web-client-id config) :Username username :ConfirmationCode code :Password password) (clj->js) (#(.stringify js/JSON %))) :headers {"X-Amz-Target" "AWSCognitoIdentityProviderService.ConfirmForgotPassword" "Content-Type" "application/x-amz-json-1.1"}})) (.then (fn [%] (let [status (.-status %)] (if (> status 299) (throw (ex-info status %)) (.json %))))) (.then (fn [%] (rf/dispatch (conj on-success (js->clj % :keywordize-keys true))))) (.catch (fn [e] (-> (ex-data e) (.json) (.then (fn [body] (rf/dispatch (conj on-failure (match-error-message body)))))))))))) (fx/reg-fx :amplify-resend-confirmation-code (fn [{:keys [username on-success on-failure]}] (let [config (get-config)] (-> (.fetch js/window (str "https://cognito-idp." "eu-west-1" ".amazonaws.com") (clj->js {:method "POST" :body (-> {} (assoc :ClientId (:user-pool-web-client-id config) :Username username) (clj->js) (#(.stringify js/JSON %))) :headers {"X-Amz-Target" "AWSCognitoIdentityProviderService.ResendConfirmationCode" "Content-Type" "application/x-amz-json-1.1"}})) (.then (fn [%] (let [status (.-status %)] (if (> status 299) (throw (ex-info status %)) (.json %))))) (.then (fn [%] (rf/dispatch (conj on-success (js->clj % :keywordize-keys true))))) (.catch (fn [e] (-> (ex-data e) (.json) (.then (fn [body] (rf/dispatch (conj on-failure (match-error-message body)))))))))))) (fx/reg-fx :amplify-logout (fn [] (-> js/window (.-localStorage) (.setItem "auth" "{}"))))
124862
(ns widget.login.core (:require [re-frame.fx :as fx] [clojure.string :as str] [re-frame.core :as rf] [edd.events :as edd-events] [widget.login.i18n :as i18n])) (defn init [] (rf/dispatch [:initialize-login-db]) (rf/dispatch [::edd-events/add-translation i18n/tr])) (defn get-config [] (let [config (js->clj (.-eddconfig js/window) :keywordize-keys true) oauth {:userPoolId (get config :AuthUserPoolId) :domain (get config :AuthUserPoolDomain) :scope ["email" "openid"] :redirectSignIn "http://localhost:3000/" :responseType "code" :user-pool-web-client-id (get config :AuthUserPoolClientId) :region (get config :Region "eu-central-1") :authenticationFlowType "USER_PASSWORD_AUTH"}] oauth)) (def known-messages [{:type "InvalidParameterException" :message "2 validation errors detected: Value at 'password' failed to satisfy constraint: Member must satisfy regular expression pattern: ^[\\S]+.*[\\S]+$; Value at 'password' failed to satisfy constraint: Member must have length greater than or equal to 6" :search "Value at 'password' failed to satisfy constraint" :key :invalid-password} {:type "InvalidPasswordException" :message "Password did not conform with policy: Password must have uppercase characters" :search "Password did not conform with policy" :key :invalid-password} {:type "InvalidPasswordException" :message "Password did not conform with policy: Password must have numeric characters" :search "Password did not conform with policy" :key :invalid-password} {:type "InvalidParameterException" :message "1 validation error detected: Value at 'password' failed to satisfy constraint: Member must have length greater than or equal to 6" :search "Value at 'password' failed to satisfy constraint" :key :invalid-password} {:type "UsernameExistsException" :message "User already exists" :search "User already exists" :key :user-exists} {:type "InvalidParameterException" :message "Invalid email address format." :search "Invalid email address format." :key :invalid-email} {:message "Incorrect username or password." :search "Incorrect username or password." :type "NotAuthorizedException" :key :invalid-credentials} {:message "Invalid code provided, please request a code again." :type "ExpiredCodeException" :key :code-expired :search "Invalid code provided, please request a code again."} {:message "Attempt limit exceeded, please try after some time." :type "LimitExceededException" :search "Attempt limit exceeded, please try after some time." :key :attempt-limit-exceeded} {:message "Invalid verification code provided, please try again." :type "CodeMismatchException" :search "Invalid verification code provided" :key :invalid-code} {:message "Missing required parameter USERNAME" :type "InvalidParameterException" :search "Missing required parameter USERNAME" :key :missing-username} {:message "Missing required parameter PASSWORD" :type "InvalidParameterException" :search "Missing required parameter PASSWORD" :key :missing-password}]) (defn match-error-message [body] (let [message (.-message body) message-type (aget body "__type")] {:message (get (first (filter (fn [{:keys [search type]}] (and (= type message-type) (str/includes? message search))) known-messages)) :key) :type message-type})) (fx/reg-fx :amplify-register (fn [{:keys [username password on-success on-failure]}] (let [config (get-config)] (-> (.fetch js/window (str "https://cognito-idp." "eu-west-1" ".amazonaws.com") (clj->js {:method "POST" :body (-> {} (assoc :ClientId (:user-pool-web-client-id config) :Username username :Password <PASSWORD> :UserAttributes [{:Name "email" :Value username}]) (clj->js) (#(.stringify js/JSON %))) :headers {"X-Amz-Target" "AWSCognitoIdentityProviderService.SignUp" "Content-Type" "application/x-amz-json-1.1"}})) (.then (fn [%] (let [status (.-status %)] (if (> status 299) (throw (ex-info status %)) (.json %))))) (.then (fn [%] (rf/dispatch [:bla (js->clj % :keywordize-keys true)]) (rf/dispatch (conj on-success (js->clj % :keywordize-keys true))))) (.catch (fn [e] (-> (ex-data e) (.json) (.then (fn [body] (rf/dispatch (conj on-failure (match-error-message body)))))))))))) (fx/reg-fx :amplify-verify (fn [{:keys [username code on-success on-failure] :as par}] (let [config (get-config)] (-> (.fetch js/window (str "https://cognito-idp." "eu-west-1" ".amazonaws.com") (clj->js {:method "POST" :body (-> {} (assoc :ClientId (:user-pool-web-client-id config) :Username username :ConfirmationCode code) (clj->js) (#(.stringify js/JSON %))) :headers {"X-Amz-Target" "AWSCognitoIdentityProviderService.ConfirmSignUp" "Content-Type" "application/x-amz-json-1.1"}})) (.then (fn [%] (let [status (.-status %)] (if (> status 299) (throw (ex-info status %)) (.json %))))) (.then (fn [%] (rf/dispatch (conj on-success (js->clj % :keywordize-keys true))))) (.catch (fn [e] (-> (ex-data e) (.json) (.then (fn [body] (rf/dispatch (conj on-failure (match-error-message body)))))))))))) (fx/reg-fx :amplify-login (fn [{:keys [username password on-success on-failure]}] (let [config (get-config)] (-> (.fetch js/window (str "https://cognito-idp." "eu-west-1" ".amazonaws.com") (clj->js {:method "POST" :body (-> {} (assoc :ClientId (:user-pool-web-client-id config) :AuthFlow "USER_PASSWORD_AUTH" :AuthParameters {:USERNAME username :PASSWORD <PASSWORD>}) (clj->js) (#(.stringify js/JSON %))) :headers {"X-Amz-Target" "AWSCognitoIdentityProviderService.InitiateAuth" "Content-Type" "application/x-amz-json-1.1"}})) (.then (fn [%] (let [status (.-status %)] (if (> status 299) (throw (ex-info status %)) (.json %))))) (.then (fn [%] (let [response (-> % (js->clj :keywordize-keys true) (:AuthenticationResult)) auth {:id-token (:IdToken response) :refresh-token (:RefreshToken response) :access-token (:AccessToken response)} auth-string (.stringify js/JSON (clj->js auth))] (-> js/window (.-localStorage) (.setItem "auth" auth-string)) (rf/dispatch (conj on-success auth))))) (.catch (fn [e] (-> (ex-data e) (.json) (.then (fn [body] (rf/dispatch (conj on-failure (match-error-message body)))))))))))) (fx/reg-fx :amplify-refresh-credentials (fn [{:keys [on-success]}] (let [config (get-config) auth-string (-> js/window (.-localStorage) (.getItem "auth")) auth (-> (.parse js/JSON auth-string) (js->clj :keywordize-keys true)) refresh-token (:refresh-token auth)] (when refresh-token (-> (.fetch js/window (str "https://" (:domain config) "/oauth2/token") (clj->js {:method "POST" :headers {"Content-Type" "application/x-www-form-urlencoded"} :body (str "grant_type=refresh_token&" "client_id=" (:user-pool-web-client-id config) "&" "refresh_token=" refresh-token)})) (.then (fn [%] (let [status (.-status %)] (if (> status 299) (-> (.text %) (.then (fn [body] (-> js/window (.-localStorage) (.setItem "auth" "{}"))))) (.json %))))) (.then (fn [%] (let [response (-> % (js->clj :keywordize-keys true) (:id_token)) auth {:id-token response}] (rf/dispatch (conj on-success auth)))))))))) (fx/reg-fx :amplify-forgot-password (fn [{:keys [username on-success on-failure] :as par}] (let [config (get-config)] (-> (.fetch js/window (str "https://cognito-idp." "eu-west-1" ".amazonaws.com") (clj->js {:method "POST" :body (-> {} (assoc :ClientId (:user-pool-web-client-id config) :Username username) (clj->js) (#(.stringify js/JSON %))) :headers {"X-Amz-Target" "AWSCognitoIdentityProviderService.ForgotPassword" "Content-Type" "application/x-amz-json-1.1"}})) (.then (fn [%] (let [status (.-status %)] (if (> status 299) (throw (ex-info status %)) (.json %))))) (.then (fn [%] (rf/dispatch (conj on-success (js->clj % :keywordize-keys true))))) (.catch (fn [e] (-> (ex-data e) (.json) (.then (fn [body] (rf/dispatch (conj on-failure (match-error-message body)))))))))))) (fx/reg-fx :amplify-conform-forgot-password (fn [{:keys [username <PASSWORD> code on-success on-failure] :as par}] (let [config (get-config)] (-> (.fetch js/window (str "https://cognito-idp." "eu-west-1" ".amazonaws.com") (clj->js {:method "POST" :body (-> {} (assoc :ClientId (:user-pool-web-client-id config) :Username username :ConfirmationCode code :Password <PASSWORD>) (clj->js) (#(.stringify js/JSON %))) :headers {"X-Amz-Target" "AWSCognitoIdentityProviderService.ConfirmForgotPassword" "Content-Type" "application/x-amz-json-1.1"}})) (.then (fn [%] (let [status (.-status %)] (if (> status 299) (throw (ex-info status %)) (.json %))))) (.then (fn [%] (rf/dispatch (conj on-success (js->clj % :keywordize-keys true))))) (.catch (fn [e] (-> (ex-data e) (.json) (.then (fn [body] (rf/dispatch (conj on-failure (match-error-message body)))))))))))) (fx/reg-fx :amplify-resend-confirmation-code (fn [{:keys [username on-success on-failure]}] (let [config (get-config)] (-> (.fetch js/window (str "https://cognito-idp." "eu-west-1" ".amazonaws.com") (clj->js {:method "POST" :body (-> {} (assoc :ClientId (:user-pool-web-client-id config) :Username username) (clj->js) (#(.stringify js/JSON %))) :headers {"X-Amz-Target" "AWSCognitoIdentityProviderService.ResendConfirmationCode" "Content-Type" "application/x-amz-json-1.1"}})) (.then (fn [%] (let [status (.-status %)] (if (> status 299) (throw (ex-info status %)) (.json %))))) (.then (fn [%] (rf/dispatch (conj on-success (js->clj % :keywordize-keys true))))) (.catch (fn [e] (-> (ex-data e) (.json) (.then (fn [body] (rf/dispatch (conj on-failure (match-error-message body)))))))))))) (fx/reg-fx :amplify-logout (fn [] (-> js/window (.-localStorage) (.setItem "auth" "{}"))))
true
(ns widget.login.core (:require [re-frame.fx :as fx] [clojure.string :as str] [re-frame.core :as rf] [edd.events :as edd-events] [widget.login.i18n :as i18n])) (defn init [] (rf/dispatch [:initialize-login-db]) (rf/dispatch [::edd-events/add-translation i18n/tr])) (defn get-config [] (let [config (js->clj (.-eddconfig js/window) :keywordize-keys true) oauth {:userPoolId (get config :AuthUserPoolId) :domain (get config :AuthUserPoolDomain) :scope ["email" "openid"] :redirectSignIn "http://localhost:3000/" :responseType "code" :user-pool-web-client-id (get config :AuthUserPoolClientId) :region (get config :Region "eu-central-1") :authenticationFlowType "USER_PASSWORD_AUTH"}] oauth)) (def known-messages [{:type "InvalidParameterException" :message "2 validation errors detected: Value at 'password' failed to satisfy constraint: Member must satisfy regular expression pattern: ^[\\S]+.*[\\S]+$; Value at 'password' failed to satisfy constraint: Member must have length greater than or equal to 6" :search "Value at 'password' failed to satisfy constraint" :key :invalid-password} {:type "InvalidPasswordException" :message "Password did not conform with policy: Password must have uppercase characters" :search "Password did not conform with policy" :key :invalid-password} {:type "InvalidPasswordException" :message "Password did not conform with policy: Password must have numeric characters" :search "Password did not conform with policy" :key :invalid-password} {:type "InvalidParameterException" :message "1 validation error detected: Value at 'password' failed to satisfy constraint: Member must have length greater than or equal to 6" :search "Value at 'password' failed to satisfy constraint" :key :invalid-password} {:type "UsernameExistsException" :message "User already exists" :search "User already exists" :key :user-exists} {:type "InvalidParameterException" :message "Invalid email address format." :search "Invalid email address format." :key :invalid-email} {:message "Incorrect username or password." :search "Incorrect username or password." :type "NotAuthorizedException" :key :invalid-credentials} {:message "Invalid code provided, please request a code again." :type "ExpiredCodeException" :key :code-expired :search "Invalid code provided, please request a code again."} {:message "Attempt limit exceeded, please try after some time." :type "LimitExceededException" :search "Attempt limit exceeded, please try after some time." :key :attempt-limit-exceeded} {:message "Invalid verification code provided, please try again." :type "CodeMismatchException" :search "Invalid verification code provided" :key :invalid-code} {:message "Missing required parameter USERNAME" :type "InvalidParameterException" :search "Missing required parameter USERNAME" :key :missing-username} {:message "Missing required parameter PASSWORD" :type "InvalidParameterException" :search "Missing required parameter PASSWORD" :key :missing-password}]) (defn match-error-message [body] (let [message (.-message body) message-type (aget body "__type")] {:message (get (first (filter (fn [{:keys [search type]}] (and (= type message-type) (str/includes? message search))) known-messages)) :key) :type message-type})) (fx/reg-fx :amplify-register (fn [{:keys [username password on-success on-failure]}] (let [config (get-config)] (-> (.fetch js/window (str "https://cognito-idp." "eu-west-1" ".amazonaws.com") (clj->js {:method "POST" :body (-> {} (assoc :ClientId (:user-pool-web-client-id config) :Username username :Password PI:PASSWORD:<PASSWORD>END_PI :UserAttributes [{:Name "email" :Value username}]) (clj->js) (#(.stringify js/JSON %))) :headers {"X-Amz-Target" "AWSCognitoIdentityProviderService.SignUp" "Content-Type" "application/x-amz-json-1.1"}})) (.then (fn [%] (let [status (.-status %)] (if (> status 299) (throw (ex-info status %)) (.json %))))) (.then (fn [%] (rf/dispatch [:bla (js->clj % :keywordize-keys true)]) (rf/dispatch (conj on-success (js->clj % :keywordize-keys true))))) (.catch (fn [e] (-> (ex-data e) (.json) (.then (fn [body] (rf/dispatch (conj on-failure (match-error-message body)))))))))))) (fx/reg-fx :amplify-verify (fn [{:keys [username code on-success on-failure] :as par}] (let [config (get-config)] (-> (.fetch js/window (str "https://cognito-idp." "eu-west-1" ".amazonaws.com") (clj->js {:method "POST" :body (-> {} (assoc :ClientId (:user-pool-web-client-id config) :Username username :ConfirmationCode code) (clj->js) (#(.stringify js/JSON %))) :headers {"X-Amz-Target" "AWSCognitoIdentityProviderService.ConfirmSignUp" "Content-Type" "application/x-amz-json-1.1"}})) (.then (fn [%] (let [status (.-status %)] (if (> status 299) (throw (ex-info status %)) (.json %))))) (.then (fn [%] (rf/dispatch (conj on-success (js->clj % :keywordize-keys true))))) (.catch (fn [e] (-> (ex-data e) (.json) (.then (fn [body] (rf/dispatch (conj on-failure (match-error-message body)))))))))))) (fx/reg-fx :amplify-login (fn [{:keys [username password on-success on-failure]}] (let [config (get-config)] (-> (.fetch js/window (str "https://cognito-idp." "eu-west-1" ".amazonaws.com") (clj->js {:method "POST" :body (-> {} (assoc :ClientId (:user-pool-web-client-id config) :AuthFlow "USER_PASSWORD_AUTH" :AuthParameters {:USERNAME username :PASSWORD PI:PASSWORD:<PASSWORD>END_PI}) (clj->js) (#(.stringify js/JSON %))) :headers {"X-Amz-Target" "AWSCognitoIdentityProviderService.InitiateAuth" "Content-Type" "application/x-amz-json-1.1"}})) (.then (fn [%] (let [status (.-status %)] (if (> status 299) (throw (ex-info status %)) (.json %))))) (.then (fn [%] (let [response (-> % (js->clj :keywordize-keys true) (:AuthenticationResult)) auth {:id-token (:IdToken response) :refresh-token (:RefreshToken response) :access-token (:AccessToken response)} auth-string (.stringify js/JSON (clj->js auth))] (-> js/window (.-localStorage) (.setItem "auth" auth-string)) (rf/dispatch (conj on-success auth))))) (.catch (fn [e] (-> (ex-data e) (.json) (.then (fn [body] (rf/dispatch (conj on-failure (match-error-message body)))))))))))) (fx/reg-fx :amplify-refresh-credentials (fn [{:keys [on-success]}] (let [config (get-config) auth-string (-> js/window (.-localStorage) (.getItem "auth")) auth (-> (.parse js/JSON auth-string) (js->clj :keywordize-keys true)) refresh-token (:refresh-token auth)] (when refresh-token (-> (.fetch js/window (str "https://" (:domain config) "/oauth2/token") (clj->js {:method "POST" :headers {"Content-Type" "application/x-www-form-urlencoded"} :body (str "grant_type=refresh_token&" "client_id=" (:user-pool-web-client-id config) "&" "refresh_token=" refresh-token)})) (.then (fn [%] (let [status (.-status %)] (if (> status 299) (-> (.text %) (.then (fn [body] (-> js/window (.-localStorage) (.setItem "auth" "{}"))))) (.json %))))) (.then (fn [%] (let [response (-> % (js->clj :keywordize-keys true) (:id_token)) auth {:id-token response}] (rf/dispatch (conj on-success auth)))))))))) (fx/reg-fx :amplify-forgot-password (fn [{:keys [username on-success on-failure] :as par}] (let [config (get-config)] (-> (.fetch js/window (str "https://cognito-idp." "eu-west-1" ".amazonaws.com") (clj->js {:method "POST" :body (-> {} (assoc :ClientId (:user-pool-web-client-id config) :Username username) (clj->js) (#(.stringify js/JSON %))) :headers {"X-Amz-Target" "AWSCognitoIdentityProviderService.ForgotPassword" "Content-Type" "application/x-amz-json-1.1"}})) (.then (fn [%] (let [status (.-status %)] (if (> status 299) (throw (ex-info status %)) (.json %))))) (.then (fn [%] (rf/dispatch (conj on-success (js->clj % :keywordize-keys true))))) (.catch (fn [e] (-> (ex-data e) (.json) (.then (fn [body] (rf/dispatch (conj on-failure (match-error-message body)))))))))))) (fx/reg-fx :amplify-conform-forgot-password (fn [{:keys [username PI:PASSWORD:<PASSWORD>END_PI code on-success on-failure] :as par}] (let [config (get-config)] (-> (.fetch js/window (str "https://cognito-idp." "eu-west-1" ".amazonaws.com") (clj->js {:method "POST" :body (-> {} (assoc :ClientId (:user-pool-web-client-id config) :Username username :ConfirmationCode code :Password PI:PASSWORD:<PASSWORD>END_PI) (clj->js) (#(.stringify js/JSON %))) :headers {"X-Amz-Target" "AWSCognitoIdentityProviderService.ConfirmForgotPassword" "Content-Type" "application/x-amz-json-1.1"}})) (.then (fn [%] (let [status (.-status %)] (if (> status 299) (throw (ex-info status %)) (.json %))))) (.then (fn [%] (rf/dispatch (conj on-success (js->clj % :keywordize-keys true))))) (.catch (fn [e] (-> (ex-data e) (.json) (.then (fn [body] (rf/dispatch (conj on-failure (match-error-message body)))))))))))) (fx/reg-fx :amplify-resend-confirmation-code (fn [{:keys [username on-success on-failure]}] (let [config (get-config)] (-> (.fetch js/window (str "https://cognito-idp." "eu-west-1" ".amazonaws.com") (clj->js {:method "POST" :body (-> {} (assoc :ClientId (:user-pool-web-client-id config) :Username username) (clj->js) (#(.stringify js/JSON %))) :headers {"X-Amz-Target" "AWSCognitoIdentityProviderService.ResendConfirmationCode" "Content-Type" "application/x-amz-json-1.1"}})) (.then (fn [%] (let [status (.-status %)] (if (> status 299) (throw (ex-info status %)) (.json %))))) (.then (fn [%] (rf/dispatch (conj on-success (js->clj % :keywordize-keys true))))) (.catch (fn [e] (-> (ex-data e) (.json) (.then (fn [body] (rf/dispatch (conj on-failure (match-error-message body)))))))))))) (fx/reg-fx :amplify-logout (fn [] (-> js/window (.-localStorage) (.setItem "auth" "{}"))))
[ { "context": ":bob :a :2} @(future (q))))\n (is (= {:core :charlie :a :2} @(future (q))))\n (is (= {:core :alic", "end": 1747, "score": 0.9258917570114136, "start": 1740, "tag": "NAME", "value": "charlie" }, { "context": "rlie :a :2} @(future (q))))\n (is (= {:core :alice :a :3} @(future (q))))\n (is (= {:core :char", "end": 1798, "score": 0.9976179003715515, "start": 1793, "tag": "NAME", "value": "alice" }, { "context": "lice :a :3} @(future (q))))\n (is (= {:core :charlie :a :3} @(future (q))))\n (is (= {:core :char", "end": 1851, "score": 0.8458478450775146, "start": 1844, "tag": "NAME", "value": "charlie" }, { "context": "rlie :a :3} @(future (q))))\n (is (= {:core :charlie :a :4} @(future (q))))\n (is (= 0 (count q))", "end": 1904, "score": 0.9074752330780029, "start": 1897, "tag": "NAME", "value": "charlie" } ]
test/com/acrolinx/clj_queue_by_stress_test.clj
acrolinx/clj-queue-by
16
;; Copyright 2017-2019 Acrolinx GmbH ;; Licensed under the Apache License, Version 2.0 (the "License"); ;; you may not use this file except in compliance with the License. ;; You may obtain a copy of the License at ;; ;; http://www.apache.org/licenses/LICENSE-2.0 ;; ;; Unless required by applicable law or agreed to in writing, software ;; distributed under the License is distributed on an "AS IS" BASIS, ;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or ;; implied. ;; See the License for the specific language governing permissions and ;; limitations under the License. (ns com.acrolinx.clj-queue-by-stress-test (:require [clojure.test :refer [deftest testing is]] [com.acrolinx.clj-queue-by :as tt])) ;; FIXME: I'd feel better to have some real-world multi-threaded test ;; here and run it a 1000 times or so. (deftest queue-multi-threaded-basic-test (let [q (tt/queue-by :core)] (testing "Adding many and get them back as expected" (future (q {:core :alice :a :1}) (q {:core :bob :a :1}) (q {:core :alice :a :2}) (q {:core :alice :a :3}) (q {:core :bob :a :2}) (q {:core :charlie :a :1}) (q {:core :charlie :a :2}) (q {:core :charlie :a :3}) (q {:core :charlie :a :4})) ;; wait for the future to start (Thread/sleep 100) ;; all blocking on the deref of the future (is (= {:core :alice :a :1} @(future (q)))) (is (= {:core :bob :a :1} @(future (q)))) (is (= {:core :charlie :a :1} @(future (q)))) (is (= {:core :alice :a :2} @(future (q)))) (is (= {:core :bob :a :2} @(future (q)))) (is (= {:core :charlie :a :2} @(future (q)))) (is (= {:core :alice :a :3} @(future (q)))) (is (= {:core :charlie :a :3} @(future (q)))) (is (= {:core :charlie :a :4} @(future (q)))) (is (= 0 (count q)))))) (defn stress-reader [q max-reads] (let [cnt (atom 0) succ (atom {"t1" 0 "t2" 0 "t3" 0})] (loop [it nil] (swap! cnt inc) (cond (< max-reads @cnt) @succ (not (nil? it)) (do (swap! succ update-in [(:name it)] inc) ;; (printf "<%s" (:id it)) (recur (q))) :else (do ;; (Thread/sleep 1) (recur (q))))))) (defn stress-writer [q name n] (dotimes [i n] (q {:name name :id i}) ;; (printf ">%s" i) )) (defn stress-main [] (let [q (tt/queue-by :name 2800) r (future (stress-reader q 9000)) w1 (future (stress-writer q "t1" 900)) w2 (future (stress-writer q "t2" 900)) w3 (future (stress-writer q "t3" 900))] (= @r {"t1" 900, "t2" 900, "t3" 900}))) (deftest stress-test (is (stress-main)))
92830
;; Copyright 2017-2019 Acrolinx GmbH ;; Licensed under the Apache License, Version 2.0 (the "License"); ;; you may not use this file except in compliance with the License. ;; You may obtain a copy of the License at ;; ;; http://www.apache.org/licenses/LICENSE-2.0 ;; ;; Unless required by applicable law or agreed to in writing, software ;; distributed under the License is distributed on an "AS IS" BASIS, ;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or ;; implied. ;; See the License for the specific language governing permissions and ;; limitations under the License. (ns com.acrolinx.clj-queue-by-stress-test (:require [clojure.test :refer [deftest testing is]] [com.acrolinx.clj-queue-by :as tt])) ;; FIXME: I'd feel better to have some real-world multi-threaded test ;; here and run it a 1000 times or so. (deftest queue-multi-threaded-basic-test (let [q (tt/queue-by :core)] (testing "Adding many and get them back as expected" (future (q {:core :alice :a :1}) (q {:core :bob :a :1}) (q {:core :alice :a :2}) (q {:core :alice :a :3}) (q {:core :bob :a :2}) (q {:core :charlie :a :1}) (q {:core :charlie :a :2}) (q {:core :charlie :a :3}) (q {:core :charlie :a :4})) ;; wait for the future to start (Thread/sleep 100) ;; all blocking on the deref of the future (is (= {:core :alice :a :1} @(future (q)))) (is (= {:core :bob :a :1} @(future (q)))) (is (= {:core :charlie :a :1} @(future (q)))) (is (= {:core :alice :a :2} @(future (q)))) (is (= {:core :bob :a :2} @(future (q)))) (is (= {:core :<NAME> :a :2} @(future (q)))) (is (= {:core :<NAME> :a :3} @(future (q)))) (is (= {:core :<NAME> :a :3} @(future (q)))) (is (= {:core :<NAME> :a :4} @(future (q)))) (is (= 0 (count q)))))) (defn stress-reader [q max-reads] (let [cnt (atom 0) succ (atom {"t1" 0 "t2" 0 "t3" 0})] (loop [it nil] (swap! cnt inc) (cond (< max-reads @cnt) @succ (not (nil? it)) (do (swap! succ update-in [(:name it)] inc) ;; (printf "<%s" (:id it)) (recur (q))) :else (do ;; (Thread/sleep 1) (recur (q))))))) (defn stress-writer [q name n] (dotimes [i n] (q {:name name :id i}) ;; (printf ">%s" i) )) (defn stress-main [] (let [q (tt/queue-by :name 2800) r (future (stress-reader q 9000)) w1 (future (stress-writer q "t1" 900)) w2 (future (stress-writer q "t2" 900)) w3 (future (stress-writer q "t3" 900))] (= @r {"t1" 900, "t2" 900, "t3" 900}))) (deftest stress-test (is (stress-main)))
true
;; Copyright 2017-2019 Acrolinx GmbH ;; Licensed under the Apache License, Version 2.0 (the "License"); ;; you may not use this file except in compliance with the License. ;; You may obtain a copy of the License at ;; ;; http://www.apache.org/licenses/LICENSE-2.0 ;; ;; Unless required by applicable law or agreed to in writing, software ;; distributed under the License is distributed on an "AS IS" BASIS, ;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or ;; implied. ;; See the License for the specific language governing permissions and ;; limitations under the License. (ns com.acrolinx.clj-queue-by-stress-test (:require [clojure.test :refer [deftest testing is]] [com.acrolinx.clj-queue-by :as tt])) ;; FIXME: I'd feel better to have some real-world multi-threaded test ;; here and run it a 1000 times or so. (deftest queue-multi-threaded-basic-test (let [q (tt/queue-by :core)] (testing "Adding many and get them back as expected" (future (q {:core :alice :a :1}) (q {:core :bob :a :1}) (q {:core :alice :a :2}) (q {:core :alice :a :3}) (q {:core :bob :a :2}) (q {:core :charlie :a :1}) (q {:core :charlie :a :2}) (q {:core :charlie :a :3}) (q {:core :charlie :a :4})) ;; wait for the future to start (Thread/sleep 100) ;; all blocking on the deref of the future (is (= {:core :alice :a :1} @(future (q)))) (is (= {:core :bob :a :1} @(future (q)))) (is (= {:core :charlie :a :1} @(future (q)))) (is (= {:core :alice :a :2} @(future (q)))) (is (= {:core :bob :a :2} @(future (q)))) (is (= {:core :PI:NAME:<NAME>END_PI :a :2} @(future (q)))) (is (= {:core :PI:NAME:<NAME>END_PI :a :3} @(future (q)))) (is (= {:core :PI:NAME:<NAME>END_PI :a :3} @(future (q)))) (is (= {:core :PI:NAME:<NAME>END_PI :a :4} @(future (q)))) (is (= 0 (count q)))))) (defn stress-reader [q max-reads] (let [cnt (atom 0) succ (atom {"t1" 0 "t2" 0 "t3" 0})] (loop [it nil] (swap! cnt inc) (cond (< max-reads @cnt) @succ (not (nil? it)) (do (swap! succ update-in [(:name it)] inc) ;; (printf "<%s" (:id it)) (recur (q))) :else (do ;; (Thread/sleep 1) (recur (q))))))) (defn stress-writer [q name n] (dotimes [i n] (q {:name name :id i}) ;; (printf ">%s" i) )) (defn stress-main [] (let [q (tt/queue-by :name 2800) r (future (stress-reader q 9000)) w1 (future (stress-writer q "t1" 900)) w2 (future (stress-writer q "t2" 900)) w3 (future (stress-writer q "t3" 900))] (= @r {"t1" 900, "t2" 900, "t3" 900}))) (deftest stress-test (is (stress-main)))
[ { "context": "))\n\n(deftest no-unreadable-forms\n (is (= {:name \"John Doe\"\n :age 37\n :url {:prone.prep/va", "end": 1620, "score": 0.9990686178207397, "start": 1612, "tag": "NAME", "value": "John Doe" }, { "context": " (-> (prep-error-page {} {} {:session {:name \"John Doe\"\n :", "end": 2620, "score": 0.9996583461761475, "start": 2612, "tag": "NAME", "value": "John Doe" } ]
test/prone/prep_test.clj
timothypratley/prone
0
(ns prone.prep-test (:require [clojure.java.io :as io] [clojure.string :as str] [clojure.test :refer :all] [datomic.api :as d] [prone.prep :refer [prep-error-page prep-debug-page]]) (:import [java.io ByteArrayInputStream])) (defn prep-frames [frames & [application-name]] (-> (prep-error-page {:frames frames} {} {} application-name) :error :frames)) (deftest source-for-frames (is (re-find #"prone.prep-test" (:code (:source (first (prep-frames [{:class-path-url "prone/prep_test.clj"}])))))) (is (= "(unknown source file)" (:failure (:source (first (prep-frames [{}])))))) (is (= "(could not locate source file on class path)" (:failure (:source (first (prep-frames [{:class-path-url "plone/plep_test.clj"}]))))))) (deftest id-for-frames (is (= [0 1] (map :id (prep-frames [{:class-path-url "prone/prep_test.clj"} {:class-path-url "prone/prep_test.clj"}]))))) (deftest application-frames (is (= ["a"] (->> (prep-frames [{:name "a" :package "prone.prep-test"} {:name "b" :package "plone.plep-test"}] ["prone"]) (filter :application?) (map :name))))) (deftest frame-selection (is (= :application (:src-loc-selection (prep-error-page {:frames []} {} {} ""))))) (defrecord DefLeppard [num-hands]) (def conn (do (d/create-database "datomic:mem://test-db") (d/connect "datomic:mem://test-db"))) (deftest no-unreadable-forms (is (= {:name "John Doe" :age 37 :url {:prone.prep/value "http://example.com" :prone.prep/original-type "java.net.URL"} :body {:prone.prep/value "Hello" :prone.prep/original-type "java.io.ByteArrayInputStream"} :closed-stream {:prone.prep/value nil :prone.prep/original-type "java.io.BufferedInputStream"} :lazy '(2 3 4) :record {:prone.prep/value {:num-hands 1} :prone.prep/original-type "prone.prep_test.DefLeppard"} :datomic {:conn {:prone.prep/original-type "datomic.peer.LocalConnection", :prone.prep/value (str conn)} :db {:prone.prep/original-type "datomic.db.Db", :prone.prep/value 'Db} :entity {:prone.prep/original-type "datomic.query.EntityMap", :prone.prep/value "#:db{:id 1}"}}} (-> (prep-error-page {} {} {:session {:name "John Doe" :age 37 :url (java.net.URL. "http://example.com") :body (ByteArrayInputStream. (.getBytes "Hello")) :closed-stream (doto (io/input-stream "http://example.com") .close) :lazy (map inc [1 2 3]) :record (DefLeppard. 1) :datomic (let [db (d/db conn)] {:conn conn :db db :entity (d/entity db 1)})}} "") :browsables first :data :session)))) (deftest avoid-really-long-strings (is (= {:content {:prone.prep/value "ssssssssssssssssssssssssssssssssssssssssssssssssssssssssssss..." :prone.prep/original-type "String with 20000 chars"}} (-> (prep-error-page {} {} {:content (str/join (repeat 20000 "s"))} "") :browsables first :data)))) (defn prep-debug [debug] (prep-debug-page debug {})) (deftest prep-debug-auxilliary-info (let [class-path-url "prone/debug_test.clj"] (is (= :clj (:lang (first (:debug-data (prep-debug [{}])))))) (is (= "test/prone/debug_test.clj" (:file-name (first (:debug-data (prep-debug [{:class-path-url class-path-url}])))))) (is (= "prone/debug_test.clj" (:class-path-url (first (:debug-data (prep-debug [{:class-path-url class-path-url}])))))) (is (= "prone.debug-test" (:package (first (:debug-data (prep-debug [{:class-path-url class-path-url}])))))) (let [source (:source (first (:debug-data (prep-debug [{:class-path-url class-path-url}]))))] (is (re-find #"^\(ns prone\.debug-test" (:code source))) (is (= 0 (:offset source))))))
97752
(ns prone.prep-test (:require [clojure.java.io :as io] [clojure.string :as str] [clojure.test :refer :all] [datomic.api :as d] [prone.prep :refer [prep-error-page prep-debug-page]]) (:import [java.io ByteArrayInputStream])) (defn prep-frames [frames & [application-name]] (-> (prep-error-page {:frames frames} {} {} application-name) :error :frames)) (deftest source-for-frames (is (re-find #"prone.prep-test" (:code (:source (first (prep-frames [{:class-path-url "prone/prep_test.clj"}])))))) (is (= "(unknown source file)" (:failure (:source (first (prep-frames [{}])))))) (is (= "(could not locate source file on class path)" (:failure (:source (first (prep-frames [{:class-path-url "plone/plep_test.clj"}]))))))) (deftest id-for-frames (is (= [0 1] (map :id (prep-frames [{:class-path-url "prone/prep_test.clj"} {:class-path-url "prone/prep_test.clj"}]))))) (deftest application-frames (is (= ["a"] (->> (prep-frames [{:name "a" :package "prone.prep-test"} {:name "b" :package "plone.plep-test"}] ["prone"]) (filter :application?) (map :name))))) (deftest frame-selection (is (= :application (:src-loc-selection (prep-error-page {:frames []} {} {} ""))))) (defrecord DefLeppard [num-hands]) (def conn (do (d/create-database "datomic:mem://test-db") (d/connect "datomic:mem://test-db"))) (deftest no-unreadable-forms (is (= {:name "<NAME>" :age 37 :url {:prone.prep/value "http://example.com" :prone.prep/original-type "java.net.URL"} :body {:prone.prep/value "Hello" :prone.prep/original-type "java.io.ByteArrayInputStream"} :closed-stream {:prone.prep/value nil :prone.prep/original-type "java.io.BufferedInputStream"} :lazy '(2 3 4) :record {:prone.prep/value {:num-hands 1} :prone.prep/original-type "prone.prep_test.DefLeppard"} :datomic {:conn {:prone.prep/original-type "datomic.peer.LocalConnection", :prone.prep/value (str conn)} :db {:prone.prep/original-type "datomic.db.Db", :prone.prep/value 'Db} :entity {:prone.prep/original-type "datomic.query.EntityMap", :prone.prep/value "#:db{:id 1}"}}} (-> (prep-error-page {} {} {:session {:name "<NAME>" :age 37 :url (java.net.URL. "http://example.com") :body (ByteArrayInputStream. (.getBytes "Hello")) :closed-stream (doto (io/input-stream "http://example.com") .close) :lazy (map inc [1 2 3]) :record (DefLeppard. 1) :datomic (let [db (d/db conn)] {:conn conn :db db :entity (d/entity db 1)})}} "") :browsables first :data :session)))) (deftest avoid-really-long-strings (is (= {:content {:prone.prep/value "ssssssssssssssssssssssssssssssssssssssssssssssssssssssssssss..." :prone.prep/original-type "String with 20000 chars"}} (-> (prep-error-page {} {} {:content (str/join (repeat 20000 "s"))} "") :browsables first :data)))) (defn prep-debug [debug] (prep-debug-page debug {})) (deftest prep-debug-auxilliary-info (let [class-path-url "prone/debug_test.clj"] (is (= :clj (:lang (first (:debug-data (prep-debug [{}])))))) (is (= "test/prone/debug_test.clj" (:file-name (first (:debug-data (prep-debug [{:class-path-url class-path-url}])))))) (is (= "prone/debug_test.clj" (:class-path-url (first (:debug-data (prep-debug [{:class-path-url class-path-url}])))))) (is (= "prone.debug-test" (:package (first (:debug-data (prep-debug [{:class-path-url class-path-url}])))))) (let [source (:source (first (:debug-data (prep-debug [{:class-path-url class-path-url}]))))] (is (re-find #"^\(ns prone\.debug-test" (:code source))) (is (= 0 (:offset source))))))
true
(ns prone.prep-test (:require [clojure.java.io :as io] [clojure.string :as str] [clojure.test :refer :all] [datomic.api :as d] [prone.prep :refer [prep-error-page prep-debug-page]]) (:import [java.io ByteArrayInputStream])) (defn prep-frames [frames & [application-name]] (-> (prep-error-page {:frames frames} {} {} application-name) :error :frames)) (deftest source-for-frames (is (re-find #"prone.prep-test" (:code (:source (first (prep-frames [{:class-path-url "prone/prep_test.clj"}])))))) (is (= "(unknown source file)" (:failure (:source (first (prep-frames [{}])))))) (is (= "(could not locate source file on class path)" (:failure (:source (first (prep-frames [{:class-path-url "plone/plep_test.clj"}]))))))) (deftest id-for-frames (is (= [0 1] (map :id (prep-frames [{:class-path-url "prone/prep_test.clj"} {:class-path-url "prone/prep_test.clj"}]))))) (deftest application-frames (is (= ["a"] (->> (prep-frames [{:name "a" :package "prone.prep-test"} {:name "b" :package "plone.plep-test"}] ["prone"]) (filter :application?) (map :name))))) (deftest frame-selection (is (= :application (:src-loc-selection (prep-error-page {:frames []} {} {} ""))))) (defrecord DefLeppard [num-hands]) (def conn (do (d/create-database "datomic:mem://test-db") (d/connect "datomic:mem://test-db"))) (deftest no-unreadable-forms (is (= {:name "PI:NAME:<NAME>END_PI" :age 37 :url {:prone.prep/value "http://example.com" :prone.prep/original-type "java.net.URL"} :body {:prone.prep/value "Hello" :prone.prep/original-type "java.io.ByteArrayInputStream"} :closed-stream {:prone.prep/value nil :prone.prep/original-type "java.io.BufferedInputStream"} :lazy '(2 3 4) :record {:prone.prep/value {:num-hands 1} :prone.prep/original-type "prone.prep_test.DefLeppard"} :datomic {:conn {:prone.prep/original-type "datomic.peer.LocalConnection", :prone.prep/value (str conn)} :db {:prone.prep/original-type "datomic.db.Db", :prone.prep/value 'Db} :entity {:prone.prep/original-type "datomic.query.EntityMap", :prone.prep/value "#:db{:id 1}"}}} (-> (prep-error-page {} {} {:session {:name "PI:NAME:<NAME>END_PI" :age 37 :url (java.net.URL. "http://example.com") :body (ByteArrayInputStream. (.getBytes "Hello")) :closed-stream (doto (io/input-stream "http://example.com") .close) :lazy (map inc [1 2 3]) :record (DefLeppard. 1) :datomic (let [db (d/db conn)] {:conn conn :db db :entity (d/entity db 1)})}} "") :browsables first :data :session)))) (deftest avoid-really-long-strings (is (= {:content {:prone.prep/value "ssssssssssssssssssssssssssssssssssssssssssssssssssssssssssss..." :prone.prep/original-type "String with 20000 chars"}} (-> (prep-error-page {} {} {:content (str/join (repeat 20000 "s"))} "") :browsables first :data)))) (defn prep-debug [debug] (prep-debug-page debug {})) (deftest prep-debug-auxilliary-info (let [class-path-url "prone/debug_test.clj"] (is (= :clj (:lang (first (:debug-data (prep-debug [{}])))))) (is (= "test/prone/debug_test.clj" (:file-name (first (:debug-data (prep-debug [{:class-path-url class-path-url}])))))) (is (= "prone/debug_test.clj" (:class-path-url (first (:debug-data (prep-debug [{:class-path-url class-path-url}])))))) (is (= "prone.debug-test" (:package (first (:debug-data (prep-debug [{:class-path-url class-path-url}])))))) (let [source (:source (first (:debug-data (prep-debug [{:class-path-url class-path-url}]))))] (is (re-find #"^\(ns prone\.debug-test" (:code source))) (is (= 0 (:offset source))))))
[ { "context": "rkBench -- Natural deduction\n\n; Copyright (c) 2016 Burkhardt Renz, THM. All rights reserved.\n; The use and distribu", "end": 79, "score": 0.99985671043396, "start": 65, "tag": "NAME", "value": "Burkhardt Renz" } ]
src/lwb/nd/examples/ltl_rules.clj
esb-lwb/lwb
22
; lwb Logic WorkBench -- Natural deduction ; Copyright (c) 2016 Burkhardt Renz, THM. All rights reserved. ; The use and distribution terms for this software are covered by the ; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php). ; By using this software in any fashion, you are agreeing to be bound by ; the terms of this license. (ns lwb.nd.examples.ltl-rules (:require [lwb.nd.repl :refer :all])) (load-logic :ltl) ; interactive checking in the repl for nd ; ----------------------------------------------------------------------------------------- ; atnext-introduction (proof '[(at[j] A) (succ i j)] '(at [i] (atnext A))) (step-f :atnext-i 1 2) (proof '[(at[j] A) (succ i j)] '(at [i] (atnext A))) (step-b :atnext-i 4 1) ; ----------------------------------------------------------------------------------------- ; atnext-elimination (proof '[(at [i] (atnext A)) (at [i'] B)] '(at [i'] (and A B))) (step-f :atnext-e 1 4) (swap '?1 'i' :checked) (step-b :and-i 6) ; ----------------------------------------------------------------------------------------- ; always-introduction (proof '[(at [i] (always A)) (<= i j)] '(at [j] (always A))) (step-b :always-i 4) (swap '?1 'k) (step-f :<=trans 2 3) (step-f :always-e 1 4) ; ----------------------------------------------------------------------------------------- ; always-elimination (proof '[(at [i] (always A)) (succ i j)] '(at [j] A)) (step-f :succ/<= 2) (step-f :always-e 1 3) ; ----------------------------------------------------------------------------------------- ; finally-introduction (proof '[(at [j] A) (<= i j)] '(at [i] (finally A))) (step-f :finally-i 1 2) (proof '[(at [j] A) (<= i j)] '(at [i] (finally A))) (step-b :finally-i 4) (swap '?1 'j) ; ----------------------------------------------------------------------------------------- ; finally-elimination (proof '(at [i] (finally A)) '(at [j] A)) (step-f :finally-e 1) (swap '?1 'j) (swap '?2 '(at [j] A)) ; or (proof '(at [i] (finally A)) '(at [j] A)) (step-f :finally-e 1 3) (swap '?1 'j)
29908
; lwb Logic WorkBench -- Natural deduction ; Copyright (c) 2016 <NAME>, THM. All rights reserved. ; The use and distribution terms for this software are covered by the ; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php). ; By using this software in any fashion, you are agreeing to be bound by ; the terms of this license. (ns lwb.nd.examples.ltl-rules (:require [lwb.nd.repl :refer :all])) (load-logic :ltl) ; interactive checking in the repl for nd ; ----------------------------------------------------------------------------------------- ; atnext-introduction (proof '[(at[j] A) (succ i j)] '(at [i] (atnext A))) (step-f :atnext-i 1 2) (proof '[(at[j] A) (succ i j)] '(at [i] (atnext A))) (step-b :atnext-i 4 1) ; ----------------------------------------------------------------------------------------- ; atnext-elimination (proof '[(at [i] (atnext A)) (at [i'] B)] '(at [i'] (and A B))) (step-f :atnext-e 1 4) (swap '?1 'i' :checked) (step-b :and-i 6) ; ----------------------------------------------------------------------------------------- ; always-introduction (proof '[(at [i] (always A)) (<= i j)] '(at [j] (always A))) (step-b :always-i 4) (swap '?1 'k) (step-f :<=trans 2 3) (step-f :always-e 1 4) ; ----------------------------------------------------------------------------------------- ; always-elimination (proof '[(at [i] (always A)) (succ i j)] '(at [j] A)) (step-f :succ/<= 2) (step-f :always-e 1 3) ; ----------------------------------------------------------------------------------------- ; finally-introduction (proof '[(at [j] A) (<= i j)] '(at [i] (finally A))) (step-f :finally-i 1 2) (proof '[(at [j] A) (<= i j)] '(at [i] (finally A))) (step-b :finally-i 4) (swap '?1 'j) ; ----------------------------------------------------------------------------------------- ; finally-elimination (proof '(at [i] (finally A)) '(at [j] A)) (step-f :finally-e 1) (swap '?1 'j) (swap '?2 '(at [j] A)) ; or (proof '(at [i] (finally A)) '(at [j] A)) (step-f :finally-e 1 3) (swap '?1 'j)
true
; lwb Logic WorkBench -- Natural deduction ; Copyright (c) 2016 PI:NAME:<NAME>END_PI, THM. All rights reserved. ; The use and distribution terms for this software are covered by the ; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php). ; By using this software in any fashion, you are agreeing to be bound by ; the terms of this license. (ns lwb.nd.examples.ltl-rules (:require [lwb.nd.repl :refer :all])) (load-logic :ltl) ; interactive checking in the repl for nd ; ----------------------------------------------------------------------------------------- ; atnext-introduction (proof '[(at[j] A) (succ i j)] '(at [i] (atnext A))) (step-f :atnext-i 1 2) (proof '[(at[j] A) (succ i j)] '(at [i] (atnext A))) (step-b :atnext-i 4 1) ; ----------------------------------------------------------------------------------------- ; atnext-elimination (proof '[(at [i] (atnext A)) (at [i'] B)] '(at [i'] (and A B))) (step-f :atnext-e 1 4) (swap '?1 'i' :checked) (step-b :and-i 6) ; ----------------------------------------------------------------------------------------- ; always-introduction (proof '[(at [i] (always A)) (<= i j)] '(at [j] (always A))) (step-b :always-i 4) (swap '?1 'k) (step-f :<=trans 2 3) (step-f :always-e 1 4) ; ----------------------------------------------------------------------------------------- ; always-elimination (proof '[(at [i] (always A)) (succ i j)] '(at [j] A)) (step-f :succ/<= 2) (step-f :always-e 1 3) ; ----------------------------------------------------------------------------------------- ; finally-introduction (proof '[(at [j] A) (<= i j)] '(at [i] (finally A))) (step-f :finally-i 1 2) (proof '[(at [j] A) (<= i j)] '(at [i] (finally A))) (step-b :finally-i 4) (swap '?1 'j) ; ----------------------------------------------------------------------------------------- ; finally-elimination (proof '(at [i] (finally A)) '(at [j] A)) (step-f :finally-e 1) (swap '?1 'j) (swap '?2 '(at [j] A)) ; or (proof '(at [i] (finally A)) '(at [j] A)) (step-f :finally-e 1 3) (swap '?1 'j)
[ { "context": " data ids))))\n\n(def perils\n {:rand_25 {:name \"Tremor\"\n :desc \"25% chance of {:hp} damage t", "end": 3120, "score": 0.6791923642158508, "start": 3114, "tag": "NAME", "value": "Tremor" }, { "context": "rand)))\n :hp 3}\n :slam {:name \"Slam\"\n :desc \"{:hp} damage to one live rai", "end": 3291, "score": 0.8629920482635498, "start": 3287, "tag": "NAME", "value": "Slam" }, { "context": "%))]))\n :hp 30}\n :rampage {:name \"Rampage\"\n :desc \"Don't let more than 5 people", "end": 3564, "score": 0.8926590085029602, "start": 3557, "tag": "NAME", "value": "Rampage" } ]
src/gridom/game.cljs
lxsli/gridom
0
(ns gridom.game (:require [om.core :as om :include-macros true] [gridom.utils :as utils])) ;; TODO score ;; TODO peril list ;; TODO penalise death (live list) ;; TODO key binds (def rows-n 5) (def cols-n 5) (def board-n (* rows-n cols-n)) (def mana-eff 6) (def peril-scale 1) (defn mkbox [[id _v]] {:id id :v 60}) (defn mkboard [size] (vec (map mkbox (zipmap (range size) (repeat 100))))) ; defonce to make this only initialize on hard reload (def app-state (atom {:mana 100 :board (mkboard board-n) :live (vec (range board-n))})) (defn dead? [box] (= 0 (:v box))) (defn alive? [box] (not (dead? box))) (defn heal [box v] (if (alive? box) (assoc-in box [:v] (utils/minmax 0 (+ (:v box) v) 100)) box)) (defn hurt [box v] (heal box (* (- v) peril-scale))) (defn heal-in [boxes pred v] (vec (map #(if (pred %) (heal % v) %) boxes))) (defn hurt-in [boxes pred v] (vec (map #(if (pred %) (hurt % v) %) boxes))) (defn mana-gain [data v] (assoc data :mana (utils/minmax 0 (+ (:mana data) v) 100))) (defn boxplus [a b] (let [[aid bid] [(:id a) (:id b)] m5 (mod aid cols-n)] (or (= bid aid) (= bid (- aid rows-n)) (= bid (+ aid rows-n)) (and (= bid (- aid 1)) (not= 0 m5)) (and (= bid (+ aid 1)) (not= 4 m5))))) (defn boxrow [a b] (let [[aid bid] [(:id a) (:id b)] c1 (- aid (mod aid cols-n))] (some #{bid} (range c1 (+ c1 cols-n))))) (defn boxcol [a b] (let [[aid bid] [(:id a) (:id b)] d (Math/abs (- aid bid))] (= 0 (mod d 5)))) (defn mana-regen [data] (let [c (count (:live data))] (* c (/ 1 25)))) (defn target-bonus [targets] (/ (Math/log targets) (Math/log 5))) (defn spell-cost [spell] (let [t (:targets spell) e (+ mana-eff (target-bonus t)) h (:hp spell)] (assoc spell :mana (/ (* h t) e)))) (def spells (utils/fmap spell-cost {:single {:name "Illumination" :hp 50 :targets 1 :pred (fn [center box] (= center box))} :row {:name "Wall of Light" :hp 30 :targets 5 :pred (fn [center box] (boxrow center box))} :col {:name "Lightray" :hp 30 :targets 5 :pred (fn [center box] (boxcol center box))} :plus {:name "Radiant Burst" :hp 30 :targets 5 :pred (fn [center box] (boxplus center box))} :all {:name "Serenity" :hp 10 :targets 25 :pred (fn [center box] (boxplus center box))} })) (def binds {:left :col :1 :col :mid :single :2 :single :right :row :3 :row}) (defn perbox_peril_fn [pred] (fn perbox [peril data] (assoc data :board (hurt-in (:board data) pred (:hp peril))))) (defn idlist_peril_fn [ids_fn] (fn [peril data] (let [ids (ids_fn data)] ;(print (:name peril) ids) (reduce (fn [acc n] (let [box (nth (get acc :board) n)] (assoc-in data [:board n] (hurt box (:hp peril))))) data ids)))) (def perils {:rand_25 {:name "Tremor" :desc "25% chance of {:hp} damage to each raid member" :func (perbox_peril_fn #(>= 0.25 (rand))) :hp 3} :slam {:name "Slam" :desc "{:hp} damage to one live raid member" :func (idlist_peril_fn #(condp = (:live %) [] [] [(rand-nth (:live %))])) :hp 30} :rampage {:name "Rampage" :desc "Don't let more than 5 people die!" :func (idlist_peril_fn #(condp >= (count (:live %)) 0 [] 19 [(first (:live %))] [])) :hp 100} }) (def perils_by_delay {150 [:rampage] 500 [:rand_25] ;; 1.5 dps * 25 1500 [:rand_25] ;; 0.5 dps * 25 3000 [:slam] ;; 10 dps }) (defn cast [data box input] (let [{:keys [hp mana pred]} (get spells (get binds input))] (if (and (>= (:mana @app-state) mana) (alive? box)) (do ; TODO combine (om/transact! data (fn [d] (assoc d :board (heal-in (:board d) (partial pred box) hp)))) (om/transact! data (fn [x] (mana-gain x (- mana)))) ))))
51818
(ns gridom.game (:require [om.core :as om :include-macros true] [gridom.utils :as utils])) ;; TODO score ;; TODO peril list ;; TODO penalise death (live list) ;; TODO key binds (def rows-n 5) (def cols-n 5) (def board-n (* rows-n cols-n)) (def mana-eff 6) (def peril-scale 1) (defn mkbox [[id _v]] {:id id :v 60}) (defn mkboard [size] (vec (map mkbox (zipmap (range size) (repeat 100))))) ; defonce to make this only initialize on hard reload (def app-state (atom {:mana 100 :board (mkboard board-n) :live (vec (range board-n))})) (defn dead? [box] (= 0 (:v box))) (defn alive? [box] (not (dead? box))) (defn heal [box v] (if (alive? box) (assoc-in box [:v] (utils/minmax 0 (+ (:v box) v) 100)) box)) (defn hurt [box v] (heal box (* (- v) peril-scale))) (defn heal-in [boxes pred v] (vec (map #(if (pred %) (heal % v) %) boxes))) (defn hurt-in [boxes pred v] (vec (map #(if (pred %) (hurt % v) %) boxes))) (defn mana-gain [data v] (assoc data :mana (utils/minmax 0 (+ (:mana data) v) 100))) (defn boxplus [a b] (let [[aid bid] [(:id a) (:id b)] m5 (mod aid cols-n)] (or (= bid aid) (= bid (- aid rows-n)) (= bid (+ aid rows-n)) (and (= bid (- aid 1)) (not= 0 m5)) (and (= bid (+ aid 1)) (not= 4 m5))))) (defn boxrow [a b] (let [[aid bid] [(:id a) (:id b)] c1 (- aid (mod aid cols-n))] (some #{bid} (range c1 (+ c1 cols-n))))) (defn boxcol [a b] (let [[aid bid] [(:id a) (:id b)] d (Math/abs (- aid bid))] (= 0 (mod d 5)))) (defn mana-regen [data] (let [c (count (:live data))] (* c (/ 1 25)))) (defn target-bonus [targets] (/ (Math/log targets) (Math/log 5))) (defn spell-cost [spell] (let [t (:targets spell) e (+ mana-eff (target-bonus t)) h (:hp spell)] (assoc spell :mana (/ (* h t) e)))) (def spells (utils/fmap spell-cost {:single {:name "Illumination" :hp 50 :targets 1 :pred (fn [center box] (= center box))} :row {:name "Wall of Light" :hp 30 :targets 5 :pred (fn [center box] (boxrow center box))} :col {:name "Lightray" :hp 30 :targets 5 :pred (fn [center box] (boxcol center box))} :plus {:name "Radiant Burst" :hp 30 :targets 5 :pred (fn [center box] (boxplus center box))} :all {:name "Serenity" :hp 10 :targets 25 :pred (fn [center box] (boxplus center box))} })) (def binds {:left :col :1 :col :mid :single :2 :single :right :row :3 :row}) (defn perbox_peril_fn [pred] (fn perbox [peril data] (assoc data :board (hurt-in (:board data) pred (:hp peril))))) (defn idlist_peril_fn [ids_fn] (fn [peril data] (let [ids (ids_fn data)] ;(print (:name peril) ids) (reduce (fn [acc n] (let [box (nth (get acc :board) n)] (assoc-in data [:board n] (hurt box (:hp peril))))) data ids)))) (def perils {:rand_25 {:name "<NAME>" :desc "25% chance of {:hp} damage to each raid member" :func (perbox_peril_fn #(>= 0.25 (rand))) :hp 3} :slam {:name "<NAME>" :desc "{:hp} damage to one live raid member" :func (idlist_peril_fn #(condp = (:live %) [] [] [(rand-nth (:live %))])) :hp 30} :rampage {:name "<NAME>" :desc "Don't let more than 5 people die!" :func (idlist_peril_fn #(condp >= (count (:live %)) 0 [] 19 [(first (:live %))] [])) :hp 100} }) (def perils_by_delay {150 [:rampage] 500 [:rand_25] ;; 1.5 dps * 25 1500 [:rand_25] ;; 0.5 dps * 25 3000 [:slam] ;; 10 dps }) (defn cast [data box input] (let [{:keys [hp mana pred]} (get spells (get binds input))] (if (and (>= (:mana @app-state) mana) (alive? box)) (do ; TODO combine (om/transact! data (fn [d] (assoc d :board (heal-in (:board d) (partial pred box) hp)))) (om/transact! data (fn [x] (mana-gain x (- mana)))) ))))
true
(ns gridom.game (:require [om.core :as om :include-macros true] [gridom.utils :as utils])) ;; TODO score ;; TODO peril list ;; TODO penalise death (live list) ;; TODO key binds (def rows-n 5) (def cols-n 5) (def board-n (* rows-n cols-n)) (def mana-eff 6) (def peril-scale 1) (defn mkbox [[id _v]] {:id id :v 60}) (defn mkboard [size] (vec (map mkbox (zipmap (range size) (repeat 100))))) ; defonce to make this only initialize on hard reload (def app-state (atom {:mana 100 :board (mkboard board-n) :live (vec (range board-n))})) (defn dead? [box] (= 0 (:v box))) (defn alive? [box] (not (dead? box))) (defn heal [box v] (if (alive? box) (assoc-in box [:v] (utils/minmax 0 (+ (:v box) v) 100)) box)) (defn hurt [box v] (heal box (* (- v) peril-scale))) (defn heal-in [boxes pred v] (vec (map #(if (pred %) (heal % v) %) boxes))) (defn hurt-in [boxes pred v] (vec (map #(if (pred %) (hurt % v) %) boxes))) (defn mana-gain [data v] (assoc data :mana (utils/minmax 0 (+ (:mana data) v) 100))) (defn boxplus [a b] (let [[aid bid] [(:id a) (:id b)] m5 (mod aid cols-n)] (or (= bid aid) (= bid (- aid rows-n)) (= bid (+ aid rows-n)) (and (= bid (- aid 1)) (not= 0 m5)) (and (= bid (+ aid 1)) (not= 4 m5))))) (defn boxrow [a b] (let [[aid bid] [(:id a) (:id b)] c1 (- aid (mod aid cols-n))] (some #{bid} (range c1 (+ c1 cols-n))))) (defn boxcol [a b] (let [[aid bid] [(:id a) (:id b)] d (Math/abs (- aid bid))] (= 0 (mod d 5)))) (defn mana-regen [data] (let [c (count (:live data))] (* c (/ 1 25)))) (defn target-bonus [targets] (/ (Math/log targets) (Math/log 5))) (defn spell-cost [spell] (let [t (:targets spell) e (+ mana-eff (target-bonus t)) h (:hp spell)] (assoc spell :mana (/ (* h t) e)))) (def spells (utils/fmap spell-cost {:single {:name "Illumination" :hp 50 :targets 1 :pred (fn [center box] (= center box))} :row {:name "Wall of Light" :hp 30 :targets 5 :pred (fn [center box] (boxrow center box))} :col {:name "Lightray" :hp 30 :targets 5 :pred (fn [center box] (boxcol center box))} :plus {:name "Radiant Burst" :hp 30 :targets 5 :pred (fn [center box] (boxplus center box))} :all {:name "Serenity" :hp 10 :targets 25 :pred (fn [center box] (boxplus center box))} })) (def binds {:left :col :1 :col :mid :single :2 :single :right :row :3 :row}) (defn perbox_peril_fn [pred] (fn perbox [peril data] (assoc data :board (hurt-in (:board data) pred (:hp peril))))) (defn idlist_peril_fn [ids_fn] (fn [peril data] (let [ids (ids_fn data)] ;(print (:name peril) ids) (reduce (fn [acc n] (let [box (nth (get acc :board) n)] (assoc-in data [:board n] (hurt box (:hp peril))))) data ids)))) (def perils {:rand_25 {:name "PI:NAME:<NAME>END_PI" :desc "25% chance of {:hp} damage to each raid member" :func (perbox_peril_fn #(>= 0.25 (rand))) :hp 3} :slam {:name "PI:NAME:<NAME>END_PI" :desc "{:hp} damage to one live raid member" :func (idlist_peril_fn #(condp = (:live %) [] [] [(rand-nth (:live %))])) :hp 30} :rampage {:name "PI:NAME:<NAME>END_PI" :desc "Don't let more than 5 people die!" :func (idlist_peril_fn #(condp >= (count (:live %)) 0 [] 19 [(first (:live %))] [])) :hp 100} }) (def perils_by_delay {150 [:rampage] 500 [:rand_25] ;; 1.5 dps * 25 1500 [:rand_25] ;; 0.5 dps * 25 3000 [:slam] ;; 10 dps }) (defn cast [data box input] (let [{:keys [hp mana pred]} (get spells (get binds input))] (if (and (>= (:mana @app-state) mana) (alive? box)) (do ; TODO combine (om/transact! data (fn [d] (assoc d :board (heal-in (:board d) (partial pred box) hp)))) (om/transact! data (fn [x] (mana-gain x (- mana)))) ))))
[ { "context": "-colls [c1-p2 c2-p2 c3-p2 c4-p2]\n tag-key \"tag1\"\n tag (tags/make-tag {:tag-key tag-key})\n ", "end": 5378, "score": 0.996431827545166, "start": 5374, "tag": "KEY", "value": "tag1" }, { "context": "l/coll-catalog-item-id \"PROV1\"))\n (let [tag-key \"tag1\"\n tag (tags/make-tag {:tag-key tag-key})\n ", "end": 8753, "score": 0.9979820251464844, "start": 8749, "tag": "KEY", "value": "tag1" }, { "context": "\n token (echo-util/login (system/context) \"user1\")\n {:keys [concept-id revision-id]} (tags/", "end": 8856, "score": 0.9380649328231812, "start": 8851, "tag": "USERNAME", "value": "user1" }, { "context": "all-prov2-colls all-prov3-colls)\n tag-key \"tag1\"\n tag (tags/make-tag {:tag-key tag-key})\n ", "end": 16082, "score": 0.8897675275802612, "start": 16078, "tag": "KEY", "value": "tag1" }, { "context": "\n token (echo-util/login (system/context) \"user1\")\n prov3-token (echo-util/login (system/co", "end": 16185, "score": 0.9201805591583252, "start": 16180, "tag": "USERNAME", "value": "user1" }, { "context": "em/context)\n \"prov3-user\"\n [group1-con", "end": 16290, "score": 0.8222905993461609, "start": 16280, "tag": "USERNAME", "value": "prov3-user" }, { "context": "l/coll-catalog-item-id \"PROV1\"))\n (let [tag-key \"tag1\"\n tag (tags/make-tag {:tag-key tag-key})\n ", "end": 18933, "score": 0.9990721940994263, "start": 18929, "tag": "KEY", "value": "tag1" }, { "context": " token (echo-util/login (system/context) \"user1\")\n _ (index/wait-until-indexed)\n ", "end": 21724, "score": 0.7678720951080322, "start": 21719, "tag": "USERNAME", "value": "user1" }, { "context": " token (echo-util/login (system/context) \"user1\")\n tag-key \"tag1\"\n assert-tag-a", "end": 22721, "score": 0.5794293284416199, "start": 22720, "tag": "USERNAME", "value": "1" }, { "context": "\n token (echo-util/login (system/context) \"user1\")\n _ (index/wait-until-indexed)\n ta", "end": 24488, "score": 0.9291319847106934, "start": 24483, "tag": "USERNAME", "value": "user1" }, { "context": "\n token (echo-util/login (system/context) \"user1\")]\n (tags/create-tag token (tags/make-tag {:ta", "end": 27595, "score": 0.9845421314239502, "start": 27590, "tag": "USERNAME", "value": "user1" }, { "context": "\n token (echo-util/login (system/context) \"user1\")\n tag-key \"tag1\"]\n (tags/create-tag to", "end": 30079, "score": 0.9541335701942444, "start": 30074, "tag": "USERNAME", "value": "user1" }, { "context": "/login (system/context) \"user1\")\n tag-key \"tag1\"]\n (tags/create-tag token (tags/make-tag {:tag", "end": 30103, "score": 0.9978699684143066, "start": 30099, "tag": "KEY", "value": "tag1" } ]
system-int-test/test/cmr/system_int_test/search/tagging/tag_association_test.clj
daniel-zamora/Common-Metadata-Repository
0
(ns cmr.system-int-test.search.tagging.tag-association-test "This tests associating tags with collections." (:require [clojure.test :refer :all] [cmr.common.util :refer [are2] :as util] [cmr.mock-echo.client.echo-util :as echo-util] [cmr.system-int-test.data2.collection :as collection] [cmr.system-int-test.data2.core :as data-core] [cmr.system-int-test.system :as system] [cmr.system-int-test.utils.index-util :as index] [cmr.system-int-test.utils.ingest-util :as ingest] [cmr.system-int-test.utils.metadata-db-util :as mdb] [cmr.system-int-test.utils.search-util :as search] [cmr.system-int-test.utils.tag-util :as tags] [cmr.transmit.tag :as transmit-tag])) (use-fixtures :each (join-fixtures [(ingest/reset-fixture {"provguid1" "PROV1" "provguid2" "PROV2" "provguid3" "PROV3"} {:grant-all-search? false}) tags/grant-all-tag-fixture])) (deftest associate-tags-by-query-with-collections-test ;; Grant all collections in PROV1 and 2 (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV1")) (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV2")) ;; Create 4 collections in each provider that are identical. ;; The first collection will have data: ;; {:entry-id "S1_V1", :entry_title "ET1", :short-name "S1", :version-id "V1"} (let [[c1-p1 c2-p1 c3-p1 c4-p1 c1-p2 c2-p2 c3-p2 c4-p2 c1-p3 c2-p3 c3-p3 c4-p3] (for [p ["PROV1" "PROV2" "PROV3"] n (range 1 5)] (:concept-id (data-core/ingest p (collection/collection {:short-name (str "S" n) :version-id (str "V" n) :entry-title (str "ET" n)})))) all-prov1-colls [c1-p1 c2-p1 c3-p1 c4-p1] all-prov2-colls [c1-p2 c2-p2 c3-p2 c4-p2] tag (tags/make-tag) tag-key (:tag-key tag) token (echo-util/login (system/context) "user1") {:keys [concept-id]} (tags/create-tag token tag)] (index/wait-until-indexed) (testing "Successfully Associate tag with collections" (let [response (tags/associate-by-query token tag-key {:provider "PROV1"})] (tags/assert-tag-association-response-ok? {["C1200000013-PROV1"] {:concept-id "TA1200000026-CMR" :revision-id 1} ["C1200000014-PROV1"] {:concept-id "TA1200000027-CMR" :revision-id 1} ["C1200000015-PROV1"] {:concept-id "TA1200000028-CMR" :revision-id 1} ["C1200000016-PROV1"] {:concept-id "TA1200000029-CMR" :revision-id 1}} response))) (testing "Associate using query that finds nothing" (let [response (tags/associate-by-query token tag-key {:provider "foo"})] (tags/assert-tag-association-response-ok? {} response))) (testing "ACLs are applied to collections found" ;; None of PROV3's collections are visible (let [response (tags/associate-by-query token tag-key {:provider "PROV3"})] (tags/assert-tag-association-response-ok? {} response))) (testing "Associate more collections" ;; Associates all the version 2 collections which is c2-p1 (already in) and c2-p2 (new) (let [response (tags/associate-by-query token tag-key {:version "v2"})] (tags/assert-tag-association-response-ok? {["C1200000014-PROV1"] {:concept-id "TA1200000027-CMR" :revision-id 2} ["C1200000018-PROV2"] {:concept-id "TA1200000030-CMR" :revision-id 1}} response))))) (deftest associate-tags-by-concept-ids-with-collections-test ;; Grant all collections in PROV1 and 2 (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV1")) (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV2")) ;; Create 4 collections in each provider that are identical. ;; The first collection will have data: ;; {:entry-id "S1_V1", :entry_title "ET1", :short-name "S1", :version-id "V1"} (let [[c1-p1 c2-p1 c3-p1 c4-p1 c1-p2 c2-p2 c3-p2 c4-p2 c1-p3 c2-p3 c3-p3 c4-p3] (for [p ["PROV1" "PROV2" "PROV3"] n (range 1 5)] (:concept-id (data-core/ingest p (collection/collection {:short-name (str "S" n) :version-id (str "V" n) :entry-title (str "ET" n)})))) all-prov1-colls [c1-p1 c2-p1 c3-p1 c4-p1] all-prov2-colls [c1-p2 c2-p2 c3-p2 c4-p2] tag-key "tag1" tag (tags/make-tag {:tag-key tag-key}) token (echo-util/login (system/context) "user1") {:keys [concept-id]} (tags/create-tag token tag)] (index/wait-until-indexed) (testing "Associate tag with collections by concept-ids" (let [response (tags/associate-by-concept-ids token tag-key [{:concept-id c1-p1} {:concept-id c3-p2}])] (tags/assert-tag-association-response-ok? {["C1200000013-PROV1"] {:concept-id "TA1200000026-CMR" :revision-id 1} ["C1200000019-PROV2"] {:concept-id "TA1200000027-CMR" :revision-id 1}} response))) (testing "Associate to no collections" (let [response (tags/associate-by-concept-ids token tag-key [])] (tags/assert-invalid-data-error ["At least one collection must be provided for tag association."] response))) (testing "Associate to collection revision and whole collection at the same time" (let [response (tags/associate-by-concept-ids token tag-key [{:concept-id c1-p1} {:concept-id c1-p1 :revision-id 1}])] (tags/assert-invalid-data-error [(format (str "Unable to create tag association on a collection revision and the whole " "collection at the same time for the following collections: %s.") c1-p1)] response))) (testing "Associate to non-existent collections" (let [response (tags/associate-by-concept-ids token tag-key [{:concept-id "C100-P5"}])] (tags/assert-tag-association-response-ok? {["C100-P5"] {:errors ["Collection [C100-P5] does not exist or is not visible."]}} response))) (testing "Associate to deleted collections" (let [c1-p1-concept (mdb/get-concept c1-p1) _ (ingest/delete-concept c1-p1-concept) _ (index/wait-until-indexed) response (tags/associate-by-concept-ids token tag-key [{:concept-id c1-p1}])] (tags/assert-tag-association-response-ok? {[c1-p1] {:errors [(format "Collection [%s] does not exist or is not visible." c1-p1)]}} response))) (testing "ACLs are applied to collections found" ;; None of PROV3's collections are visible (let [response (tags/associate-by-concept-ids token tag-key [{:concept-id c4-p3}])] (tags/assert-tag-association-response-ok? {[c4-p3] {:errors [(format "Collection [%s] does not exist or is not visible." c4-p3)]}} response))) (testing "Tag association mixed response" (let [response (tags/associate-by-concept-ids token tag-key [{:concept-id c2-p1} {:concept-id "C100-P5"}])] (tags/assert-tag-association-response-ok? {["C1200000014-PROV1"] {:concept-id "TA1200000028-CMR" :revision-id 1} ["C100-P5"] {:errors ["Collection [C100-P5] does not exist or is not visible."]}} response))))) (deftest associate-tag-failure-test (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV1")) (let [tag-key "tag1" tag (tags/make-tag {:tag-key tag-key}) token (echo-util/login (system/context) "user1") {:keys [concept-id revision-id]} (tags/create-tag token tag) ;; The stored updated tag would have user1 in the originator id tag (assoc tag :originator-id "user1") coll-concept-id (:concept-id (data-core/ingest "PROV1" (collection/collection)))] (testing "Associate tag using query sent with invalid content type" (are [associate-tag-fn request-json] (= {:status 400, :errors ["The mime types specified in the content-type header [application/xml] are not supported."]} (associate-tag-fn token tag-key request-json {:http-options {:content-type :xml}})) tags/associate-by-query {:provider "foo"} tags/associate-by-concept-ids [{:concept-id coll-concept-id}])) (testing "Associate applies JSON Query validations" (are [associate-tag-fn request-json message] (= {:status 400 :errors [message]} (associate-tag-fn token tag-key {:foo "bar"})) tags/associate-by-query {:foo "bar"} "#/condition: extraneous key [foo] is not permitted" tags/associate-by-concept-ids {:concept-id coll-concept-id} "#: expected type: JSONArray, found: JSONObject")) (testing "Associate tag that doesn't exist" (are [associate-tag-fn request-json] (= {:status 404 :errors ["Tag could not be found with tag-key [tag100]"]} (associate-tag-fn token "tag100" request-json)) tags/associate-by-query {:provider "foo"} tags/associate-by-concept-ids [{:concept-id coll-concept-id}])) (testing "Associate deleted tag" (tags/delete-tag token tag-key) (are [associate-tag-fn request-json] (= {:status 404 :errors [(format "Tag with tag-key [%s] was deleted." tag-key)]} (associate-tag-fn token tag-key request-json)) tags/associate-by-query {:provider "foo"} tags/associate-by-concept-ids [{:concept-id coll-concept-id}])))) (deftest dissociate-tags-with-collections-by-query-test ;; Create 4 collections in each provider that are identical. ;; The first collection will have data: ;; {:entry-id "S1_V1", :entry_title "ET1", :short-name "S1", :version-id "V1"} (let [group1-concept-id (echo-util/get-or-create-group (system/context) "group1") ;; Grant all collections in PROV1 and 2 _ (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV1")) _ (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV2")) _ (echo-util/grant-group (system/context) group1-concept-id (echo-util/coll-catalog-item-id "PROV3")) [c1-p1 c2-p1 c3-p1 c4-p1 c1-p2 c2-p2 c3-p2 c4-p2 c1-p3 c2-p3 c3-p3 c4-p3] (for [p ["PROV1" "PROV2" "PROV3"] n (range 1 5)] (data-core/ingest p (collection/collection {:short-name (str "S" n) :version-id (str "V" n) :entry-title (str "ET" n)}))) all-prov1-colls [c1-p1 c2-p1 c3-p1 c4-p1] all-prov2-colls [c1-p2 c2-p2 c3-p2 c4-p2] all-prov3-colls [c1-p3 c2-p3 c3-p3 c4-p3] all-colls (concat all-prov1-colls all-prov2-colls all-prov3-colls) tag-key "tag1" tag (tags/make-tag {:tag-key tag-key}) token (echo-util/login (system/context) "user1") prov3-token (echo-util/login (system/context) "prov3-user" [group1-concept-id]) {:keys [concept-id]} (tags/create-tag token tag) assert-tag-associated (partial tags/assert-tag-associated-with-query prov3-token {:tag-key "tag1"})] (index/wait-until-indexed) ;; Associate the tag with every collection (tags/associate-by-query prov3-token tag-key {:or [{:provider "PROV1"} {:provider "PROV2"} {:provider "PROV3"}]}) (testing "Dissociate using query that finds nothing" (let [{:keys [status]} (tags/dissociate-by-query token tag-key {:provider "foo"})] (is (= 200 status)) (assert-tag-associated all-colls))) (testing "ACLs are applied to collections found" ;; None of PROV3's collections are visible to normal users (let [{:keys [status]} (tags/dissociate-by-query token tag-key {:provider "PROV3"})] (is (= 200 status)) (assert-tag-associated all-colls))) (testing "Successfully dissociate tag with collections" (let [{:keys [status]} (tags/dissociate-by-query token tag-key {:provider "PROV1"})] (is (= 200 status)) (assert-tag-associated (concat all-prov2-colls all-prov3-colls))) ;; dissociate tag again is OK. Since there is no existing tag association, it does nothing. (let [{:keys [status]} (tags/dissociate-by-query token tag-key {:provider "PROV1"})] (is (= 200 status)) (assert-tag-associated (concat all-prov2-colls all-prov3-colls)))))) (deftest dissociate-tags-with-collections-by-concept-ids-test ;; Create 4 collections in each provider that are identical. ;; The first collection will have data: ;; {:entry-id "S1_V1", :entry_title "ET1", :short-name "S1", :version-id "V1"} (let [group1-concept-id (echo-util/get-or-create-group (system/context) "group1") ;; Grant all collections in PROV1 and 2 _ (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV1")) _ (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV2")) _ (echo-util/grant-group (system/context) group1-concept-id (echo-util/coll-catalog-item-id "PROV3")) [c1-p1 c2-p1 c3-p1 c4-p1 c1-p2 c2-p2 c3-p2 c4-p2 c1-p3 c2-p3 c3-p3 c4-p3] (for [p ["PROV1" "PROV2" "PROV3"] n (range 1 5)] (data-core/ingest p (collection/collection {:short-name (str "S" n) :version-id (str "V" n) :entry-title (str "ET" n)}))) all-prov1-colls [c1-p1 c2-p1 c3-p1 c4-p1] all-prov2-colls [c1-p2 c2-p2 c3-p2 c4-p2] all-prov3-colls [c1-p3 c2-p3 c3-p3 c4-p3] all-colls (concat all-prov1-colls all-prov2-colls all-prov3-colls) tag-key "tag1" tag (tags/make-tag {:tag-key tag-key}) token (echo-util/login (system/context) "user1") prov3-token (echo-util/login (system/context) "prov3-user" [group1-concept-id]) {:keys [concept-id]} (tags/create-tag token tag) assert-tag-associated (partial tags/assert-tag-associated-with-query prov3-token {:tag-key "tag1"})] (index/wait-until-indexed) ;; Associate the tag with every collection (tags/associate-by-query prov3-token tag-key {:or [{:provider "PROV1"} {:provider "PROV2"} {:provider "PROV3"}]}) (testing "Successfully dissociate tag with collections" (let [{:keys [status]} (tags/dissociate-by-concept-ids token tag-key (map #(hash-map :concept-id (:concept-id %)) all-prov1-colls))] (is (= 200 status)) (assert-tag-associated (concat all-prov2-colls all-prov3-colls)))) (testing "Dissociate non-existent collections" (let [response (tags/dissociate-by-concept-ids token tag-key [{:concept-id "C100-P5"}])] (tags/assert-tag-dissociation-response-ok? {["C100-P5"] {:errors ["Collection [C100-P5] does not exist or is not visible."]}} response))) (testing "Dissociate to deleted collections" (let [c1-p2-concept-id (:concept-id c1-p2) c1-p2-concept (mdb/get-concept c1-p2-concept-id) _ (ingest/delete-concept c1-p2-concept) _ (index/wait-until-indexed) response (tags/dissociate-by-concept-ids token tag-key [{:concept-id c1-p2-concept-id}])] (tags/assert-tag-dissociation-response-ok? {["C1200000019-PROV2"] {:errors [(format "Collection [%s] does not exist or is not visible." c1-p2-concept-id)]}} response))) (testing "ACLs are applied to collections found" ;; None of PROV3's collections are visible (let [coll-concept-id (:concept-id c4-p3) response (tags/dissociate-by-concept-ids token tag-key [{:concept-id coll-concept-id}])] (tags/assert-tag-dissociation-response-ok? {["C1200000026-PROV3"] {:errors [(format "Collection [%s] does not exist or is not visible." coll-concept-id)]}} response))))) (deftest dissociate-tag-failure-test (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV1")) (let [tag-key "tag1" tag (tags/make-tag {:tag-key tag-key}) token (echo-util/login (system/context) "user1") {:keys [concept-id revision-id]} (tags/create-tag token tag) ;; The stored updated tag would have user1 in the originator id tag (assoc tag :originator-id "user1") coll-concept-id (:concept-id (data-core/ingest "PROV1" (collection/collection)))] (testing "Dissociate tag using query sent with invalid content type" (are [dissociate-tag-fn request-json] (= {:status 400, :errors ["The mime types specified in the content-type header [application/xml] are not supported."]} (dissociate-tag-fn token tag-key request-json {:http-options {:content-type :xml}})) tags/dissociate-by-query {:provider "foo"} tags/dissociate-by-concept-ids [{:concept-id coll-concept-id}])) (testing "Dissociate applies JSON Query validations" (are [dissociate-tag-fn request-json message] (= {:status 400 :errors [message]} (dissociate-tag-fn token tag-key request-json)) tags/dissociate-by-query {:foo "bar"} "#/condition: extraneous key [foo] is not permitted" tags/dissociate-by-concept-ids {:concept-id coll-concept-id} "#: expected type: JSONArray, found: JSONObject")) (testing "Dissociate tag that doesn't exist" (are [dissociate-tag-fn request-json] (= {:status 404 :errors ["Tag could not be found with tag-key [tag100]"]} (dissociate-tag-fn token "tag100" request-json)) tags/dissociate-by-query {:provider "foo"} tags/dissociate-by-concept-ids [{:concept-id coll-concept-id}])) (testing "Dissociate deleted tag" (tags/delete-tag token tag-key) (are [dissociate-tag-fn request-json] (= {:status 404 :errors [(format "Tag with tag-key [%s] was deleted." tag-key)]} (dissociate-tag-fn token tag-key request-json)) tags/dissociate-by-query {:provider "foo"} tags/dissociate-by-concept-ids [{:concept-id coll-concept-id}])))) (deftest dissociate-tags-with-partial-match-query-test (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV1")) (testing "dissociate tag with only some of the collections matching the query are associated with the tag is OK" (let [coll1 (data-core/ingest "PROV1" (collection/collection {:entry-title "ET1"})) coll2 (data-core/ingest "PROV1" (collection/collection {:entry-title "ET2"})) token (echo-util/login (system/context) "user1") _ (index/wait-until-indexed) tag (tags/save-tag token (tags/make-tag {:tag-key "tag1"}) [coll1]) assert-tag-associated (partial tags/assert-tag-associated-with-query token {:tag-key "tag1"})] (assert-tag-associated [coll1]) (let [{:keys [status errors]} (tags/dissociate-by-query token "tag1" {:provider "PROV1"})] (is (= 200 status)) (assert-tag-associated []))))) (deftest dissociate-tags-with-mixed-response-test (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV1")) (testing "dissociate tag with mixed success and failure response" (let [coll1 (data-core/ingest "PROV1" (collection/collection {:entry-title "ET1"})) coll2 (data-core/ingest "PROV1" (collection/collection {:entry-title "ET2"})) coll3 (data-core/ingest "PROV1" (collection/collection {:entry-title "ET3"})) token (echo-util/login (system/context) "user1") tag-key "tag1" assert-tag-associated (partial tags/assert-tag-associated-with-query token {:tag-key "tag1"})] (tags/create-tag token (tags/make-tag {:tag-key tag-key})) (index/wait-until-indexed) (tags/associate-by-concept-ids token tag-key [{:concept-id (:concept-id coll1)} {:concept-id (:concept-id coll2) :revision-id (:revision-id coll2)}]) (assert-tag-associated [coll1 coll2]) (let [response (tags/dissociate-by-concept-ids token tag-key [{:concept-id "C100-P5"} ;; non-existent collection {:concept-id (:concept-id coll1)} ;; success {:concept-id (:concept-id coll2) :revision-id 1} ;; success {:concept-id (:concept-id coll3)}])] ;; no tag association (tags/assert-tag-dissociation-response-ok? {["C100-P5"] {:errors ["Collection [C100-P5] does not exist or is not visible."]} ["C1200000012-PROV1"] {:concept-id "TA1200000016-CMR" :revision-id 2} ["C1200000013-PROV1" 1] {:concept-id "TA1200000017-CMR" :revision-id 2} ["C1200000014-PROV1"] {:warnings ["Tag [tag1] is not associated with collection [C1200000014-PROV1]."]}} response) (assert-tag-associated []))))) ;; This tests association retention when collections and tags are updated or deleted. (deftest association-retention-test (echo-util/grant-all (system/context) (echo-util/coll-catalog-item-id "PROV1")) (let [coll (data-core/ingest "PROV1" (collection/collection)) token (echo-util/login (system/context) "user1") _ (index/wait-until-indexed) tag (tags/save-tag token (tags/make-tag {:tag-key "tag1"}) [coll]) assert-tag-associated (partial tags/assert-tag-associated-with-query nil {:tag-key "tag1"}) assert-tag-not-associated (fn [] (let [refs (search/find-refs :collection {:tag-key "tag1"})] (is (nil? (:errors refs))) (is (data-core/refs-match? [] refs))))] (index/wait-until-indexed) (testing "Tag initially associated with collection" (assert-tag-associated [coll])) (testing "Tag still associated with collection after updating collection" (let [updated-coll (data-core/ingest "PROV1" (dissoc coll :revision-id))] (is (= 200 (:status updated-coll))) (index/wait-until-indexed) (assert-tag-associated [updated-coll]))) (testing "Tag still associated with collection after deleting and recreating the collection" (is (= 200 (:status (ingest/delete-concept (data-core/item->concept coll))))) (let [recreated-coll (data-core/ingest "PROV1" (dissoc coll :revision-id))] (is (= 200 (:status recreated-coll))) (index/wait-until-indexed) (assert-tag-associated [recreated-coll]))) (let [latest-coll (assoc coll :revision-id 4)] (testing "Tag still associated with collection after updating tag" (let [updated-tag (tags/save-tag token tag)] (is (= {:status 200 :revision-id 2} (select-keys updated-tag [:status :revision-id]))) (index/wait-until-indexed) (assert-tag-associated [latest-coll]))) (testing "Tag not associated with collection after deleting and recreating the tag" (is (= {:status 200 :concept-id (:concept-id tag) :revision-id 3} (tags/delete-tag token (:tag-key tag)))) (index/wait-until-indexed) (testing "Not associated after tag deleted" (assert-tag-not-associated)) (is (= {:status 200 :concept-id (:concept-id tag) :revision-id 4} (tags/create-tag token (tags/make-tag {:tag-key "tag1"})))) (index/wait-until-indexed) (testing "Not associated after being recreated." (assert-tag-not-associated)))))) (defn- assert-tag-association "Assert the collections are associated with the tag for the given tag-key" [token colls tag-key] (is (data-core/refs-match? colls (search/find-refs :collection {:token token :tag-key tag-key})))) (deftest associate-dissociate-tag-with-collections-test ;; Grant all collections in PROV1 (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV1")) (let [[coll1 coll2 coll3] (for [n (range 1 4)] (data-core/ingest "PROV1" (collection/collection))) [coll1-id coll2-id coll3-id] (map :concept-id [coll1 coll2 coll3]) token (echo-util/login (system/context) "user1")] (tags/create-tag token (tags/make-tag {:tag-key "tag1"})) (tags/create-tag token (tags/make-tag {:tag-key "tag2"})) (index/wait-until-indexed) ;; associate tag1 to coll1, tag2 to coll2 ;; both :concept-id and :concept_id works as keys (tags/associate-by-concept-ids token "tag1" [{:concept_id coll1-id}]) (tags/associate-by-concept-ids token "tag2" [{:concept-id coll2-id}]) (index/wait-until-indexed) ;; verify association (assert-tag-association token [coll1] "tag1") (assert-tag-association token [coll2] "tag2") ;; associate tag1 to coll1 again (tags/associate-by-concept-ids token "tag1" [{:concept-id coll1-id}]) (index/wait-until-indexed) ;; verify association (assert-tag-association token [coll1] "tag1") (assert-tag-association token [coll2] "tag2") ;; associate tag1 to coll2 (tags/associate-by-concept-ids token "tag1" [{:concept-id coll2-id}]) (index/wait-until-indexed) ;; verify association (assert-tag-association token [coll1 coll2] "tag1") (assert-tag-association token [coll2] "tag2") ;; associate tag2 to coll1, coll2 and coll3 (tags/associate-by-concept-ids token "tag2" [{:concept-id coll1-id} {:concept-id coll2-id} {:concept-id coll3-id}]) (index/wait-until-indexed) ;; verify association (assert-tag-association token [coll1 coll2] "tag1") (assert-tag-association token [coll1 coll2 coll3] "tag2") ;; dissociate tag1 from coll1 (tags/dissociate-by-concept-ids token "tag1" [{:concept-id coll1-id}]) (index/wait-until-indexed) ;; verify association (assert-tag-association token [coll2] "tag1") (assert-tag-association token [coll1 coll2 coll3] "tag2") ;; dissociate tag2 from coll1 and coll2 (tags/dissociate-by-concept-ids token "tag2" [{:concept-id coll1-id} {:concept-id coll2-id}]) (index/wait-until-indexed) ;; verify association (assert-tag-association token [coll2] "tag1") (assert-tag-association token [coll3] "tag2"))) (deftest associate-tags-with-data-test (echo-util/grant-all (system/context) (echo-util/coll-catalog-item-id "PROV1")) (let [coll (data-core/ingest "PROV1" (collection/collection)) coll-concept-id (:concept-id coll) token (echo-util/login (system/context) "user1") tag-key "tag1"] (tags/create-tag token (tags/make-tag {:tag-key tag-key})) (index/wait-until-indexed) (testing "Associate tag with collections by concept-id and data" (are [data] (let [{:keys [status]} (tags/associate-by-concept-ids token tag-key [{:concept-id coll-concept-id :data data}])] (is (= 200 status))) "string data" true 100 123.45 [true "some string" 100] {"status" "reviewed" "action" "fix typos"})) (testing "Associate tag with collections with invalid data" (let [{:keys [status body]} (transmit-tag/associate-tag :concept-ids (system/context) tag-key nil {:raw? true :http-options {:body "{{{{"}}) error (-> body :errors first)] (is (= 400 status)) (is (re-find #"Invalid JSON: A JSONObject text must end with \'}\' at \d \[character \d line \d\]" error)))) (testing "Associate tag with collections with data exceed 32KB" (let [too-much-data {"a" (tags/string-of-length 32768)} expected-msg (format "Tag association data exceed the maximum length of 32KB for collection with concept id [%s] revision id [%s]." coll-concept-id nil) response (tags/associate-by-concept-ids token tag-key [{:concept-id coll-concept-id :data too-much-data}])] (tags/assert-tag-association-response-ok? {[coll-concept-id] {:errors [expected-msg]}} response))))) (deftest retrieve-concept-by-tag-association-concept-id-test (let [{:keys [status errors]} (search/get-search-failure-xml-data (search/retrieve-concept "TA10000-CMR" nil {:throw-exceptions true}))] (testing "Retrieve concept by tag association concept-id is invalid" (is (= [400 ["Retrieving concept by concept id is not supported for concept type [tag-association]."]] [status errors])))))
13329
(ns cmr.system-int-test.search.tagging.tag-association-test "This tests associating tags with collections." (:require [clojure.test :refer :all] [cmr.common.util :refer [are2] :as util] [cmr.mock-echo.client.echo-util :as echo-util] [cmr.system-int-test.data2.collection :as collection] [cmr.system-int-test.data2.core :as data-core] [cmr.system-int-test.system :as system] [cmr.system-int-test.utils.index-util :as index] [cmr.system-int-test.utils.ingest-util :as ingest] [cmr.system-int-test.utils.metadata-db-util :as mdb] [cmr.system-int-test.utils.search-util :as search] [cmr.system-int-test.utils.tag-util :as tags] [cmr.transmit.tag :as transmit-tag])) (use-fixtures :each (join-fixtures [(ingest/reset-fixture {"provguid1" "PROV1" "provguid2" "PROV2" "provguid3" "PROV3"} {:grant-all-search? false}) tags/grant-all-tag-fixture])) (deftest associate-tags-by-query-with-collections-test ;; Grant all collections in PROV1 and 2 (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV1")) (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV2")) ;; Create 4 collections in each provider that are identical. ;; The first collection will have data: ;; {:entry-id "S1_V1", :entry_title "ET1", :short-name "S1", :version-id "V1"} (let [[c1-p1 c2-p1 c3-p1 c4-p1 c1-p2 c2-p2 c3-p2 c4-p2 c1-p3 c2-p3 c3-p3 c4-p3] (for [p ["PROV1" "PROV2" "PROV3"] n (range 1 5)] (:concept-id (data-core/ingest p (collection/collection {:short-name (str "S" n) :version-id (str "V" n) :entry-title (str "ET" n)})))) all-prov1-colls [c1-p1 c2-p1 c3-p1 c4-p1] all-prov2-colls [c1-p2 c2-p2 c3-p2 c4-p2] tag (tags/make-tag) tag-key (:tag-key tag) token (echo-util/login (system/context) "user1") {:keys [concept-id]} (tags/create-tag token tag)] (index/wait-until-indexed) (testing "Successfully Associate tag with collections" (let [response (tags/associate-by-query token tag-key {:provider "PROV1"})] (tags/assert-tag-association-response-ok? {["C1200000013-PROV1"] {:concept-id "TA1200000026-CMR" :revision-id 1} ["C1200000014-PROV1"] {:concept-id "TA1200000027-CMR" :revision-id 1} ["C1200000015-PROV1"] {:concept-id "TA1200000028-CMR" :revision-id 1} ["C1200000016-PROV1"] {:concept-id "TA1200000029-CMR" :revision-id 1}} response))) (testing "Associate using query that finds nothing" (let [response (tags/associate-by-query token tag-key {:provider "foo"})] (tags/assert-tag-association-response-ok? {} response))) (testing "ACLs are applied to collections found" ;; None of PROV3's collections are visible (let [response (tags/associate-by-query token tag-key {:provider "PROV3"})] (tags/assert-tag-association-response-ok? {} response))) (testing "Associate more collections" ;; Associates all the version 2 collections which is c2-p1 (already in) and c2-p2 (new) (let [response (tags/associate-by-query token tag-key {:version "v2"})] (tags/assert-tag-association-response-ok? {["C1200000014-PROV1"] {:concept-id "TA1200000027-CMR" :revision-id 2} ["C1200000018-PROV2"] {:concept-id "TA1200000030-CMR" :revision-id 1}} response))))) (deftest associate-tags-by-concept-ids-with-collections-test ;; Grant all collections in PROV1 and 2 (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV1")) (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV2")) ;; Create 4 collections in each provider that are identical. ;; The first collection will have data: ;; {:entry-id "S1_V1", :entry_title "ET1", :short-name "S1", :version-id "V1"} (let [[c1-p1 c2-p1 c3-p1 c4-p1 c1-p2 c2-p2 c3-p2 c4-p2 c1-p3 c2-p3 c3-p3 c4-p3] (for [p ["PROV1" "PROV2" "PROV3"] n (range 1 5)] (:concept-id (data-core/ingest p (collection/collection {:short-name (str "S" n) :version-id (str "V" n) :entry-title (str "ET" n)})))) all-prov1-colls [c1-p1 c2-p1 c3-p1 c4-p1] all-prov2-colls [c1-p2 c2-p2 c3-p2 c4-p2] tag-key "<KEY>" tag (tags/make-tag {:tag-key tag-key}) token (echo-util/login (system/context) "user1") {:keys [concept-id]} (tags/create-tag token tag)] (index/wait-until-indexed) (testing "Associate tag with collections by concept-ids" (let [response (tags/associate-by-concept-ids token tag-key [{:concept-id c1-p1} {:concept-id c3-p2}])] (tags/assert-tag-association-response-ok? {["C1200000013-PROV1"] {:concept-id "TA1200000026-CMR" :revision-id 1} ["C1200000019-PROV2"] {:concept-id "TA1200000027-CMR" :revision-id 1}} response))) (testing "Associate to no collections" (let [response (tags/associate-by-concept-ids token tag-key [])] (tags/assert-invalid-data-error ["At least one collection must be provided for tag association."] response))) (testing "Associate to collection revision and whole collection at the same time" (let [response (tags/associate-by-concept-ids token tag-key [{:concept-id c1-p1} {:concept-id c1-p1 :revision-id 1}])] (tags/assert-invalid-data-error [(format (str "Unable to create tag association on a collection revision and the whole " "collection at the same time for the following collections: %s.") c1-p1)] response))) (testing "Associate to non-existent collections" (let [response (tags/associate-by-concept-ids token tag-key [{:concept-id "C100-P5"}])] (tags/assert-tag-association-response-ok? {["C100-P5"] {:errors ["Collection [C100-P5] does not exist or is not visible."]}} response))) (testing "Associate to deleted collections" (let [c1-p1-concept (mdb/get-concept c1-p1) _ (ingest/delete-concept c1-p1-concept) _ (index/wait-until-indexed) response (tags/associate-by-concept-ids token tag-key [{:concept-id c1-p1}])] (tags/assert-tag-association-response-ok? {[c1-p1] {:errors [(format "Collection [%s] does not exist or is not visible." c1-p1)]}} response))) (testing "ACLs are applied to collections found" ;; None of PROV3's collections are visible (let [response (tags/associate-by-concept-ids token tag-key [{:concept-id c4-p3}])] (tags/assert-tag-association-response-ok? {[c4-p3] {:errors [(format "Collection [%s] does not exist or is not visible." c4-p3)]}} response))) (testing "Tag association mixed response" (let [response (tags/associate-by-concept-ids token tag-key [{:concept-id c2-p1} {:concept-id "C100-P5"}])] (tags/assert-tag-association-response-ok? {["C1200000014-PROV1"] {:concept-id "TA1200000028-CMR" :revision-id 1} ["C100-P5"] {:errors ["Collection [C100-P5] does not exist or is not visible."]}} response))))) (deftest associate-tag-failure-test (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV1")) (let [tag-key "<KEY>" tag (tags/make-tag {:tag-key tag-key}) token (echo-util/login (system/context) "user1") {:keys [concept-id revision-id]} (tags/create-tag token tag) ;; The stored updated tag would have user1 in the originator id tag (assoc tag :originator-id "user1") coll-concept-id (:concept-id (data-core/ingest "PROV1" (collection/collection)))] (testing "Associate tag using query sent with invalid content type" (are [associate-tag-fn request-json] (= {:status 400, :errors ["The mime types specified in the content-type header [application/xml] are not supported."]} (associate-tag-fn token tag-key request-json {:http-options {:content-type :xml}})) tags/associate-by-query {:provider "foo"} tags/associate-by-concept-ids [{:concept-id coll-concept-id}])) (testing "Associate applies JSON Query validations" (are [associate-tag-fn request-json message] (= {:status 400 :errors [message]} (associate-tag-fn token tag-key {:foo "bar"})) tags/associate-by-query {:foo "bar"} "#/condition: extraneous key [foo] is not permitted" tags/associate-by-concept-ids {:concept-id coll-concept-id} "#: expected type: JSONArray, found: JSONObject")) (testing "Associate tag that doesn't exist" (are [associate-tag-fn request-json] (= {:status 404 :errors ["Tag could not be found with tag-key [tag100]"]} (associate-tag-fn token "tag100" request-json)) tags/associate-by-query {:provider "foo"} tags/associate-by-concept-ids [{:concept-id coll-concept-id}])) (testing "Associate deleted tag" (tags/delete-tag token tag-key) (are [associate-tag-fn request-json] (= {:status 404 :errors [(format "Tag with tag-key [%s] was deleted." tag-key)]} (associate-tag-fn token tag-key request-json)) tags/associate-by-query {:provider "foo"} tags/associate-by-concept-ids [{:concept-id coll-concept-id}])))) (deftest dissociate-tags-with-collections-by-query-test ;; Create 4 collections in each provider that are identical. ;; The first collection will have data: ;; {:entry-id "S1_V1", :entry_title "ET1", :short-name "S1", :version-id "V1"} (let [group1-concept-id (echo-util/get-or-create-group (system/context) "group1") ;; Grant all collections in PROV1 and 2 _ (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV1")) _ (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV2")) _ (echo-util/grant-group (system/context) group1-concept-id (echo-util/coll-catalog-item-id "PROV3")) [c1-p1 c2-p1 c3-p1 c4-p1 c1-p2 c2-p2 c3-p2 c4-p2 c1-p3 c2-p3 c3-p3 c4-p3] (for [p ["PROV1" "PROV2" "PROV3"] n (range 1 5)] (data-core/ingest p (collection/collection {:short-name (str "S" n) :version-id (str "V" n) :entry-title (str "ET" n)}))) all-prov1-colls [c1-p1 c2-p1 c3-p1 c4-p1] all-prov2-colls [c1-p2 c2-p2 c3-p2 c4-p2] all-prov3-colls [c1-p3 c2-p3 c3-p3 c4-p3] all-colls (concat all-prov1-colls all-prov2-colls all-prov3-colls) tag-key "tag1" tag (tags/make-tag {:tag-key tag-key}) token (echo-util/login (system/context) "user1") prov3-token (echo-util/login (system/context) "prov3-user" [group1-concept-id]) {:keys [concept-id]} (tags/create-tag token tag) assert-tag-associated (partial tags/assert-tag-associated-with-query prov3-token {:tag-key "tag1"})] (index/wait-until-indexed) ;; Associate the tag with every collection (tags/associate-by-query prov3-token tag-key {:or [{:provider "PROV1"} {:provider "PROV2"} {:provider "PROV3"}]}) (testing "Dissociate using query that finds nothing" (let [{:keys [status]} (tags/dissociate-by-query token tag-key {:provider "foo"})] (is (= 200 status)) (assert-tag-associated all-colls))) (testing "ACLs are applied to collections found" ;; None of PROV3's collections are visible to normal users (let [{:keys [status]} (tags/dissociate-by-query token tag-key {:provider "PROV3"})] (is (= 200 status)) (assert-tag-associated all-colls))) (testing "Successfully dissociate tag with collections" (let [{:keys [status]} (tags/dissociate-by-query token tag-key {:provider "PROV1"})] (is (= 200 status)) (assert-tag-associated (concat all-prov2-colls all-prov3-colls))) ;; dissociate tag again is OK. Since there is no existing tag association, it does nothing. (let [{:keys [status]} (tags/dissociate-by-query token tag-key {:provider "PROV1"})] (is (= 200 status)) (assert-tag-associated (concat all-prov2-colls all-prov3-colls)))))) (deftest dissociate-tags-with-collections-by-concept-ids-test ;; Create 4 collections in each provider that are identical. ;; The first collection will have data: ;; {:entry-id "S1_V1", :entry_title "ET1", :short-name "S1", :version-id "V1"} (let [group1-concept-id (echo-util/get-or-create-group (system/context) "group1") ;; Grant all collections in PROV1 and 2 _ (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV1")) _ (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV2")) _ (echo-util/grant-group (system/context) group1-concept-id (echo-util/coll-catalog-item-id "PROV3")) [c1-p1 c2-p1 c3-p1 c4-p1 c1-p2 c2-p2 c3-p2 c4-p2 c1-p3 c2-p3 c3-p3 c4-p3] (for [p ["PROV1" "PROV2" "PROV3"] n (range 1 5)] (data-core/ingest p (collection/collection {:short-name (str "S" n) :version-id (str "V" n) :entry-title (str "ET" n)}))) all-prov1-colls [c1-p1 c2-p1 c3-p1 c4-p1] all-prov2-colls [c1-p2 c2-p2 c3-p2 c4-p2] all-prov3-colls [c1-p3 c2-p3 c3-p3 c4-p3] all-colls (concat all-prov1-colls all-prov2-colls all-prov3-colls) tag-key "<KEY>" tag (tags/make-tag {:tag-key tag-key}) token (echo-util/login (system/context) "user1") prov3-token (echo-util/login (system/context) "prov3-user" [group1-concept-id]) {:keys [concept-id]} (tags/create-tag token tag) assert-tag-associated (partial tags/assert-tag-associated-with-query prov3-token {:tag-key "tag1"})] (index/wait-until-indexed) ;; Associate the tag with every collection (tags/associate-by-query prov3-token tag-key {:or [{:provider "PROV1"} {:provider "PROV2"} {:provider "PROV3"}]}) (testing "Successfully dissociate tag with collections" (let [{:keys [status]} (tags/dissociate-by-concept-ids token tag-key (map #(hash-map :concept-id (:concept-id %)) all-prov1-colls))] (is (= 200 status)) (assert-tag-associated (concat all-prov2-colls all-prov3-colls)))) (testing "Dissociate non-existent collections" (let [response (tags/dissociate-by-concept-ids token tag-key [{:concept-id "C100-P5"}])] (tags/assert-tag-dissociation-response-ok? {["C100-P5"] {:errors ["Collection [C100-P5] does not exist or is not visible."]}} response))) (testing "Dissociate to deleted collections" (let [c1-p2-concept-id (:concept-id c1-p2) c1-p2-concept (mdb/get-concept c1-p2-concept-id) _ (ingest/delete-concept c1-p2-concept) _ (index/wait-until-indexed) response (tags/dissociate-by-concept-ids token tag-key [{:concept-id c1-p2-concept-id}])] (tags/assert-tag-dissociation-response-ok? {["C1200000019-PROV2"] {:errors [(format "Collection [%s] does not exist or is not visible." c1-p2-concept-id)]}} response))) (testing "ACLs are applied to collections found" ;; None of PROV3's collections are visible (let [coll-concept-id (:concept-id c4-p3) response (tags/dissociate-by-concept-ids token tag-key [{:concept-id coll-concept-id}])] (tags/assert-tag-dissociation-response-ok? {["C1200000026-PROV3"] {:errors [(format "Collection [%s] does not exist or is not visible." coll-concept-id)]}} response))))) (deftest dissociate-tag-failure-test (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV1")) (let [tag-key "<KEY>" tag (tags/make-tag {:tag-key tag-key}) token (echo-util/login (system/context) "user1") {:keys [concept-id revision-id]} (tags/create-tag token tag) ;; The stored updated tag would have user1 in the originator id tag (assoc tag :originator-id "user1") coll-concept-id (:concept-id (data-core/ingest "PROV1" (collection/collection)))] (testing "Dissociate tag using query sent with invalid content type" (are [dissociate-tag-fn request-json] (= {:status 400, :errors ["The mime types specified in the content-type header [application/xml] are not supported."]} (dissociate-tag-fn token tag-key request-json {:http-options {:content-type :xml}})) tags/dissociate-by-query {:provider "foo"} tags/dissociate-by-concept-ids [{:concept-id coll-concept-id}])) (testing "Dissociate applies JSON Query validations" (are [dissociate-tag-fn request-json message] (= {:status 400 :errors [message]} (dissociate-tag-fn token tag-key request-json)) tags/dissociate-by-query {:foo "bar"} "#/condition: extraneous key [foo] is not permitted" tags/dissociate-by-concept-ids {:concept-id coll-concept-id} "#: expected type: JSONArray, found: JSONObject")) (testing "Dissociate tag that doesn't exist" (are [dissociate-tag-fn request-json] (= {:status 404 :errors ["Tag could not be found with tag-key [tag100]"]} (dissociate-tag-fn token "tag100" request-json)) tags/dissociate-by-query {:provider "foo"} tags/dissociate-by-concept-ids [{:concept-id coll-concept-id}])) (testing "Dissociate deleted tag" (tags/delete-tag token tag-key) (are [dissociate-tag-fn request-json] (= {:status 404 :errors [(format "Tag with tag-key [%s] was deleted." tag-key)]} (dissociate-tag-fn token tag-key request-json)) tags/dissociate-by-query {:provider "foo"} tags/dissociate-by-concept-ids [{:concept-id coll-concept-id}])))) (deftest dissociate-tags-with-partial-match-query-test (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV1")) (testing "dissociate tag with only some of the collections matching the query are associated with the tag is OK" (let [coll1 (data-core/ingest "PROV1" (collection/collection {:entry-title "ET1"})) coll2 (data-core/ingest "PROV1" (collection/collection {:entry-title "ET2"})) token (echo-util/login (system/context) "user1") _ (index/wait-until-indexed) tag (tags/save-tag token (tags/make-tag {:tag-key "tag1"}) [coll1]) assert-tag-associated (partial tags/assert-tag-associated-with-query token {:tag-key "tag1"})] (assert-tag-associated [coll1]) (let [{:keys [status errors]} (tags/dissociate-by-query token "tag1" {:provider "PROV1"})] (is (= 200 status)) (assert-tag-associated []))))) (deftest dissociate-tags-with-mixed-response-test (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV1")) (testing "dissociate tag with mixed success and failure response" (let [coll1 (data-core/ingest "PROV1" (collection/collection {:entry-title "ET1"})) coll2 (data-core/ingest "PROV1" (collection/collection {:entry-title "ET2"})) coll3 (data-core/ingest "PROV1" (collection/collection {:entry-title "ET3"})) token (echo-util/login (system/context) "user1") tag-key "tag1" assert-tag-associated (partial tags/assert-tag-associated-with-query token {:tag-key "tag1"})] (tags/create-tag token (tags/make-tag {:tag-key tag-key})) (index/wait-until-indexed) (tags/associate-by-concept-ids token tag-key [{:concept-id (:concept-id coll1)} {:concept-id (:concept-id coll2) :revision-id (:revision-id coll2)}]) (assert-tag-associated [coll1 coll2]) (let [response (tags/dissociate-by-concept-ids token tag-key [{:concept-id "C100-P5"} ;; non-existent collection {:concept-id (:concept-id coll1)} ;; success {:concept-id (:concept-id coll2) :revision-id 1} ;; success {:concept-id (:concept-id coll3)}])] ;; no tag association (tags/assert-tag-dissociation-response-ok? {["C100-P5"] {:errors ["Collection [C100-P5] does not exist or is not visible."]} ["C1200000012-PROV1"] {:concept-id "TA1200000016-CMR" :revision-id 2} ["C1200000013-PROV1" 1] {:concept-id "TA1200000017-CMR" :revision-id 2} ["C1200000014-PROV1"] {:warnings ["Tag [tag1] is not associated with collection [C1200000014-PROV1]."]}} response) (assert-tag-associated []))))) ;; This tests association retention when collections and tags are updated or deleted. (deftest association-retention-test (echo-util/grant-all (system/context) (echo-util/coll-catalog-item-id "PROV1")) (let [coll (data-core/ingest "PROV1" (collection/collection)) token (echo-util/login (system/context) "user1") _ (index/wait-until-indexed) tag (tags/save-tag token (tags/make-tag {:tag-key "tag1"}) [coll]) assert-tag-associated (partial tags/assert-tag-associated-with-query nil {:tag-key "tag1"}) assert-tag-not-associated (fn [] (let [refs (search/find-refs :collection {:tag-key "tag1"})] (is (nil? (:errors refs))) (is (data-core/refs-match? [] refs))))] (index/wait-until-indexed) (testing "Tag initially associated with collection" (assert-tag-associated [coll])) (testing "Tag still associated with collection after updating collection" (let [updated-coll (data-core/ingest "PROV1" (dissoc coll :revision-id))] (is (= 200 (:status updated-coll))) (index/wait-until-indexed) (assert-tag-associated [updated-coll]))) (testing "Tag still associated with collection after deleting and recreating the collection" (is (= 200 (:status (ingest/delete-concept (data-core/item->concept coll))))) (let [recreated-coll (data-core/ingest "PROV1" (dissoc coll :revision-id))] (is (= 200 (:status recreated-coll))) (index/wait-until-indexed) (assert-tag-associated [recreated-coll]))) (let [latest-coll (assoc coll :revision-id 4)] (testing "Tag still associated with collection after updating tag" (let [updated-tag (tags/save-tag token tag)] (is (= {:status 200 :revision-id 2} (select-keys updated-tag [:status :revision-id]))) (index/wait-until-indexed) (assert-tag-associated [latest-coll]))) (testing "Tag not associated with collection after deleting and recreating the tag" (is (= {:status 200 :concept-id (:concept-id tag) :revision-id 3} (tags/delete-tag token (:tag-key tag)))) (index/wait-until-indexed) (testing "Not associated after tag deleted" (assert-tag-not-associated)) (is (= {:status 200 :concept-id (:concept-id tag) :revision-id 4} (tags/create-tag token (tags/make-tag {:tag-key "tag1"})))) (index/wait-until-indexed) (testing "Not associated after being recreated." (assert-tag-not-associated)))))) (defn- assert-tag-association "Assert the collections are associated with the tag for the given tag-key" [token colls tag-key] (is (data-core/refs-match? colls (search/find-refs :collection {:token token :tag-key tag-key})))) (deftest associate-dissociate-tag-with-collections-test ;; Grant all collections in PROV1 (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV1")) (let [[coll1 coll2 coll3] (for [n (range 1 4)] (data-core/ingest "PROV1" (collection/collection))) [coll1-id coll2-id coll3-id] (map :concept-id [coll1 coll2 coll3]) token (echo-util/login (system/context) "user1")] (tags/create-tag token (tags/make-tag {:tag-key "tag1"})) (tags/create-tag token (tags/make-tag {:tag-key "tag2"})) (index/wait-until-indexed) ;; associate tag1 to coll1, tag2 to coll2 ;; both :concept-id and :concept_id works as keys (tags/associate-by-concept-ids token "tag1" [{:concept_id coll1-id}]) (tags/associate-by-concept-ids token "tag2" [{:concept-id coll2-id}]) (index/wait-until-indexed) ;; verify association (assert-tag-association token [coll1] "tag1") (assert-tag-association token [coll2] "tag2") ;; associate tag1 to coll1 again (tags/associate-by-concept-ids token "tag1" [{:concept-id coll1-id}]) (index/wait-until-indexed) ;; verify association (assert-tag-association token [coll1] "tag1") (assert-tag-association token [coll2] "tag2") ;; associate tag1 to coll2 (tags/associate-by-concept-ids token "tag1" [{:concept-id coll2-id}]) (index/wait-until-indexed) ;; verify association (assert-tag-association token [coll1 coll2] "tag1") (assert-tag-association token [coll2] "tag2") ;; associate tag2 to coll1, coll2 and coll3 (tags/associate-by-concept-ids token "tag2" [{:concept-id coll1-id} {:concept-id coll2-id} {:concept-id coll3-id}]) (index/wait-until-indexed) ;; verify association (assert-tag-association token [coll1 coll2] "tag1") (assert-tag-association token [coll1 coll2 coll3] "tag2") ;; dissociate tag1 from coll1 (tags/dissociate-by-concept-ids token "tag1" [{:concept-id coll1-id}]) (index/wait-until-indexed) ;; verify association (assert-tag-association token [coll2] "tag1") (assert-tag-association token [coll1 coll2 coll3] "tag2") ;; dissociate tag2 from coll1 and coll2 (tags/dissociate-by-concept-ids token "tag2" [{:concept-id coll1-id} {:concept-id coll2-id}]) (index/wait-until-indexed) ;; verify association (assert-tag-association token [coll2] "tag1") (assert-tag-association token [coll3] "tag2"))) (deftest associate-tags-with-data-test (echo-util/grant-all (system/context) (echo-util/coll-catalog-item-id "PROV1")) (let [coll (data-core/ingest "PROV1" (collection/collection)) coll-concept-id (:concept-id coll) token (echo-util/login (system/context) "user1") tag-key "<KEY>"] (tags/create-tag token (tags/make-tag {:tag-key tag-key})) (index/wait-until-indexed) (testing "Associate tag with collections by concept-id and data" (are [data] (let [{:keys [status]} (tags/associate-by-concept-ids token tag-key [{:concept-id coll-concept-id :data data}])] (is (= 200 status))) "string data" true 100 123.45 [true "some string" 100] {"status" "reviewed" "action" "fix typos"})) (testing "Associate tag with collections with invalid data" (let [{:keys [status body]} (transmit-tag/associate-tag :concept-ids (system/context) tag-key nil {:raw? true :http-options {:body "{{{{"}}) error (-> body :errors first)] (is (= 400 status)) (is (re-find #"Invalid JSON: A JSONObject text must end with \'}\' at \d \[character \d line \d\]" error)))) (testing "Associate tag with collections with data exceed 32KB" (let [too-much-data {"a" (tags/string-of-length 32768)} expected-msg (format "Tag association data exceed the maximum length of 32KB for collection with concept id [%s] revision id [%s]." coll-concept-id nil) response (tags/associate-by-concept-ids token tag-key [{:concept-id coll-concept-id :data too-much-data}])] (tags/assert-tag-association-response-ok? {[coll-concept-id] {:errors [expected-msg]}} response))))) (deftest retrieve-concept-by-tag-association-concept-id-test (let [{:keys [status errors]} (search/get-search-failure-xml-data (search/retrieve-concept "TA10000-CMR" nil {:throw-exceptions true}))] (testing "Retrieve concept by tag association concept-id is invalid" (is (= [400 ["Retrieving concept by concept id is not supported for concept type [tag-association]."]] [status errors])))))
true
(ns cmr.system-int-test.search.tagging.tag-association-test "This tests associating tags with collections." (:require [clojure.test :refer :all] [cmr.common.util :refer [are2] :as util] [cmr.mock-echo.client.echo-util :as echo-util] [cmr.system-int-test.data2.collection :as collection] [cmr.system-int-test.data2.core :as data-core] [cmr.system-int-test.system :as system] [cmr.system-int-test.utils.index-util :as index] [cmr.system-int-test.utils.ingest-util :as ingest] [cmr.system-int-test.utils.metadata-db-util :as mdb] [cmr.system-int-test.utils.search-util :as search] [cmr.system-int-test.utils.tag-util :as tags] [cmr.transmit.tag :as transmit-tag])) (use-fixtures :each (join-fixtures [(ingest/reset-fixture {"provguid1" "PROV1" "provguid2" "PROV2" "provguid3" "PROV3"} {:grant-all-search? false}) tags/grant-all-tag-fixture])) (deftest associate-tags-by-query-with-collections-test ;; Grant all collections in PROV1 and 2 (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV1")) (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV2")) ;; Create 4 collections in each provider that are identical. ;; The first collection will have data: ;; {:entry-id "S1_V1", :entry_title "ET1", :short-name "S1", :version-id "V1"} (let [[c1-p1 c2-p1 c3-p1 c4-p1 c1-p2 c2-p2 c3-p2 c4-p2 c1-p3 c2-p3 c3-p3 c4-p3] (for [p ["PROV1" "PROV2" "PROV3"] n (range 1 5)] (:concept-id (data-core/ingest p (collection/collection {:short-name (str "S" n) :version-id (str "V" n) :entry-title (str "ET" n)})))) all-prov1-colls [c1-p1 c2-p1 c3-p1 c4-p1] all-prov2-colls [c1-p2 c2-p2 c3-p2 c4-p2] tag (tags/make-tag) tag-key (:tag-key tag) token (echo-util/login (system/context) "user1") {:keys [concept-id]} (tags/create-tag token tag)] (index/wait-until-indexed) (testing "Successfully Associate tag with collections" (let [response (tags/associate-by-query token tag-key {:provider "PROV1"})] (tags/assert-tag-association-response-ok? {["C1200000013-PROV1"] {:concept-id "TA1200000026-CMR" :revision-id 1} ["C1200000014-PROV1"] {:concept-id "TA1200000027-CMR" :revision-id 1} ["C1200000015-PROV1"] {:concept-id "TA1200000028-CMR" :revision-id 1} ["C1200000016-PROV1"] {:concept-id "TA1200000029-CMR" :revision-id 1}} response))) (testing "Associate using query that finds nothing" (let [response (tags/associate-by-query token tag-key {:provider "foo"})] (tags/assert-tag-association-response-ok? {} response))) (testing "ACLs are applied to collections found" ;; None of PROV3's collections are visible (let [response (tags/associate-by-query token tag-key {:provider "PROV3"})] (tags/assert-tag-association-response-ok? {} response))) (testing "Associate more collections" ;; Associates all the version 2 collections which is c2-p1 (already in) and c2-p2 (new) (let [response (tags/associate-by-query token tag-key {:version "v2"})] (tags/assert-tag-association-response-ok? {["C1200000014-PROV1"] {:concept-id "TA1200000027-CMR" :revision-id 2} ["C1200000018-PROV2"] {:concept-id "TA1200000030-CMR" :revision-id 1}} response))))) (deftest associate-tags-by-concept-ids-with-collections-test ;; Grant all collections in PROV1 and 2 (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV1")) (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV2")) ;; Create 4 collections in each provider that are identical. ;; The first collection will have data: ;; {:entry-id "S1_V1", :entry_title "ET1", :short-name "S1", :version-id "V1"} (let [[c1-p1 c2-p1 c3-p1 c4-p1 c1-p2 c2-p2 c3-p2 c4-p2 c1-p3 c2-p3 c3-p3 c4-p3] (for [p ["PROV1" "PROV2" "PROV3"] n (range 1 5)] (:concept-id (data-core/ingest p (collection/collection {:short-name (str "S" n) :version-id (str "V" n) :entry-title (str "ET" n)})))) all-prov1-colls [c1-p1 c2-p1 c3-p1 c4-p1] all-prov2-colls [c1-p2 c2-p2 c3-p2 c4-p2] tag-key "PI:KEY:<KEY>END_PI" tag (tags/make-tag {:tag-key tag-key}) token (echo-util/login (system/context) "user1") {:keys [concept-id]} (tags/create-tag token tag)] (index/wait-until-indexed) (testing "Associate tag with collections by concept-ids" (let [response (tags/associate-by-concept-ids token tag-key [{:concept-id c1-p1} {:concept-id c3-p2}])] (tags/assert-tag-association-response-ok? {["C1200000013-PROV1"] {:concept-id "TA1200000026-CMR" :revision-id 1} ["C1200000019-PROV2"] {:concept-id "TA1200000027-CMR" :revision-id 1}} response))) (testing "Associate to no collections" (let [response (tags/associate-by-concept-ids token tag-key [])] (tags/assert-invalid-data-error ["At least one collection must be provided for tag association."] response))) (testing "Associate to collection revision and whole collection at the same time" (let [response (tags/associate-by-concept-ids token tag-key [{:concept-id c1-p1} {:concept-id c1-p1 :revision-id 1}])] (tags/assert-invalid-data-error [(format (str "Unable to create tag association on a collection revision and the whole " "collection at the same time for the following collections: %s.") c1-p1)] response))) (testing "Associate to non-existent collections" (let [response (tags/associate-by-concept-ids token tag-key [{:concept-id "C100-P5"}])] (tags/assert-tag-association-response-ok? {["C100-P5"] {:errors ["Collection [C100-P5] does not exist or is not visible."]}} response))) (testing "Associate to deleted collections" (let [c1-p1-concept (mdb/get-concept c1-p1) _ (ingest/delete-concept c1-p1-concept) _ (index/wait-until-indexed) response (tags/associate-by-concept-ids token tag-key [{:concept-id c1-p1}])] (tags/assert-tag-association-response-ok? {[c1-p1] {:errors [(format "Collection [%s] does not exist or is not visible." c1-p1)]}} response))) (testing "ACLs are applied to collections found" ;; None of PROV3's collections are visible (let [response (tags/associate-by-concept-ids token tag-key [{:concept-id c4-p3}])] (tags/assert-tag-association-response-ok? {[c4-p3] {:errors [(format "Collection [%s] does not exist or is not visible." c4-p3)]}} response))) (testing "Tag association mixed response" (let [response (tags/associate-by-concept-ids token tag-key [{:concept-id c2-p1} {:concept-id "C100-P5"}])] (tags/assert-tag-association-response-ok? {["C1200000014-PROV1"] {:concept-id "TA1200000028-CMR" :revision-id 1} ["C100-P5"] {:errors ["Collection [C100-P5] does not exist or is not visible."]}} response))))) (deftest associate-tag-failure-test (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV1")) (let [tag-key "PI:KEY:<KEY>END_PI" tag (tags/make-tag {:tag-key tag-key}) token (echo-util/login (system/context) "user1") {:keys [concept-id revision-id]} (tags/create-tag token tag) ;; The stored updated tag would have user1 in the originator id tag (assoc tag :originator-id "user1") coll-concept-id (:concept-id (data-core/ingest "PROV1" (collection/collection)))] (testing "Associate tag using query sent with invalid content type" (are [associate-tag-fn request-json] (= {:status 400, :errors ["The mime types specified in the content-type header [application/xml] are not supported."]} (associate-tag-fn token tag-key request-json {:http-options {:content-type :xml}})) tags/associate-by-query {:provider "foo"} tags/associate-by-concept-ids [{:concept-id coll-concept-id}])) (testing "Associate applies JSON Query validations" (are [associate-tag-fn request-json message] (= {:status 400 :errors [message]} (associate-tag-fn token tag-key {:foo "bar"})) tags/associate-by-query {:foo "bar"} "#/condition: extraneous key [foo] is not permitted" tags/associate-by-concept-ids {:concept-id coll-concept-id} "#: expected type: JSONArray, found: JSONObject")) (testing "Associate tag that doesn't exist" (are [associate-tag-fn request-json] (= {:status 404 :errors ["Tag could not be found with tag-key [tag100]"]} (associate-tag-fn token "tag100" request-json)) tags/associate-by-query {:provider "foo"} tags/associate-by-concept-ids [{:concept-id coll-concept-id}])) (testing "Associate deleted tag" (tags/delete-tag token tag-key) (are [associate-tag-fn request-json] (= {:status 404 :errors [(format "Tag with tag-key [%s] was deleted." tag-key)]} (associate-tag-fn token tag-key request-json)) tags/associate-by-query {:provider "foo"} tags/associate-by-concept-ids [{:concept-id coll-concept-id}])))) (deftest dissociate-tags-with-collections-by-query-test ;; Create 4 collections in each provider that are identical. ;; The first collection will have data: ;; {:entry-id "S1_V1", :entry_title "ET1", :short-name "S1", :version-id "V1"} (let [group1-concept-id (echo-util/get-or-create-group (system/context) "group1") ;; Grant all collections in PROV1 and 2 _ (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV1")) _ (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV2")) _ (echo-util/grant-group (system/context) group1-concept-id (echo-util/coll-catalog-item-id "PROV3")) [c1-p1 c2-p1 c3-p1 c4-p1 c1-p2 c2-p2 c3-p2 c4-p2 c1-p3 c2-p3 c3-p3 c4-p3] (for [p ["PROV1" "PROV2" "PROV3"] n (range 1 5)] (data-core/ingest p (collection/collection {:short-name (str "S" n) :version-id (str "V" n) :entry-title (str "ET" n)}))) all-prov1-colls [c1-p1 c2-p1 c3-p1 c4-p1] all-prov2-colls [c1-p2 c2-p2 c3-p2 c4-p2] all-prov3-colls [c1-p3 c2-p3 c3-p3 c4-p3] all-colls (concat all-prov1-colls all-prov2-colls all-prov3-colls) tag-key "tag1" tag (tags/make-tag {:tag-key tag-key}) token (echo-util/login (system/context) "user1") prov3-token (echo-util/login (system/context) "prov3-user" [group1-concept-id]) {:keys [concept-id]} (tags/create-tag token tag) assert-tag-associated (partial tags/assert-tag-associated-with-query prov3-token {:tag-key "tag1"})] (index/wait-until-indexed) ;; Associate the tag with every collection (tags/associate-by-query prov3-token tag-key {:or [{:provider "PROV1"} {:provider "PROV2"} {:provider "PROV3"}]}) (testing "Dissociate using query that finds nothing" (let [{:keys [status]} (tags/dissociate-by-query token tag-key {:provider "foo"})] (is (= 200 status)) (assert-tag-associated all-colls))) (testing "ACLs are applied to collections found" ;; None of PROV3's collections are visible to normal users (let [{:keys [status]} (tags/dissociate-by-query token tag-key {:provider "PROV3"})] (is (= 200 status)) (assert-tag-associated all-colls))) (testing "Successfully dissociate tag with collections" (let [{:keys [status]} (tags/dissociate-by-query token tag-key {:provider "PROV1"})] (is (= 200 status)) (assert-tag-associated (concat all-prov2-colls all-prov3-colls))) ;; dissociate tag again is OK. Since there is no existing tag association, it does nothing. (let [{:keys [status]} (tags/dissociate-by-query token tag-key {:provider "PROV1"})] (is (= 200 status)) (assert-tag-associated (concat all-prov2-colls all-prov3-colls)))))) (deftest dissociate-tags-with-collections-by-concept-ids-test ;; Create 4 collections in each provider that are identical. ;; The first collection will have data: ;; {:entry-id "S1_V1", :entry_title "ET1", :short-name "S1", :version-id "V1"} (let [group1-concept-id (echo-util/get-or-create-group (system/context) "group1") ;; Grant all collections in PROV1 and 2 _ (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV1")) _ (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV2")) _ (echo-util/grant-group (system/context) group1-concept-id (echo-util/coll-catalog-item-id "PROV3")) [c1-p1 c2-p1 c3-p1 c4-p1 c1-p2 c2-p2 c3-p2 c4-p2 c1-p3 c2-p3 c3-p3 c4-p3] (for [p ["PROV1" "PROV2" "PROV3"] n (range 1 5)] (data-core/ingest p (collection/collection {:short-name (str "S" n) :version-id (str "V" n) :entry-title (str "ET" n)}))) all-prov1-colls [c1-p1 c2-p1 c3-p1 c4-p1] all-prov2-colls [c1-p2 c2-p2 c3-p2 c4-p2] all-prov3-colls [c1-p3 c2-p3 c3-p3 c4-p3] all-colls (concat all-prov1-colls all-prov2-colls all-prov3-colls) tag-key "PI:KEY:<KEY>END_PI" tag (tags/make-tag {:tag-key tag-key}) token (echo-util/login (system/context) "user1") prov3-token (echo-util/login (system/context) "prov3-user" [group1-concept-id]) {:keys [concept-id]} (tags/create-tag token tag) assert-tag-associated (partial tags/assert-tag-associated-with-query prov3-token {:tag-key "tag1"})] (index/wait-until-indexed) ;; Associate the tag with every collection (tags/associate-by-query prov3-token tag-key {:or [{:provider "PROV1"} {:provider "PROV2"} {:provider "PROV3"}]}) (testing "Successfully dissociate tag with collections" (let [{:keys [status]} (tags/dissociate-by-concept-ids token tag-key (map #(hash-map :concept-id (:concept-id %)) all-prov1-colls))] (is (= 200 status)) (assert-tag-associated (concat all-prov2-colls all-prov3-colls)))) (testing "Dissociate non-existent collections" (let [response (tags/dissociate-by-concept-ids token tag-key [{:concept-id "C100-P5"}])] (tags/assert-tag-dissociation-response-ok? {["C100-P5"] {:errors ["Collection [C100-P5] does not exist or is not visible."]}} response))) (testing "Dissociate to deleted collections" (let [c1-p2-concept-id (:concept-id c1-p2) c1-p2-concept (mdb/get-concept c1-p2-concept-id) _ (ingest/delete-concept c1-p2-concept) _ (index/wait-until-indexed) response (tags/dissociate-by-concept-ids token tag-key [{:concept-id c1-p2-concept-id}])] (tags/assert-tag-dissociation-response-ok? {["C1200000019-PROV2"] {:errors [(format "Collection [%s] does not exist or is not visible." c1-p2-concept-id)]}} response))) (testing "ACLs are applied to collections found" ;; None of PROV3's collections are visible (let [coll-concept-id (:concept-id c4-p3) response (tags/dissociate-by-concept-ids token tag-key [{:concept-id coll-concept-id}])] (tags/assert-tag-dissociation-response-ok? {["C1200000026-PROV3"] {:errors [(format "Collection [%s] does not exist or is not visible." coll-concept-id)]}} response))))) (deftest dissociate-tag-failure-test (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV1")) (let [tag-key "PI:KEY:<KEY>END_PI" tag (tags/make-tag {:tag-key tag-key}) token (echo-util/login (system/context) "user1") {:keys [concept-id revision-id]} (tags/create-tag token tag) ;; The stored updated tag would have user1 in the originator id tag (assoc tag :originator-id "user1") coll-concept-id (:concept-id (data-core/ingest "PROV1" (collection/collection)))] (testing "Dissociate tag using query sent with invalid content type" (are [dissociate-tag-fn request-json] (= {:status 400, :errors ["The mime types specified in the content-type header [application/xml] are not supported."]} (dissociate-tag-fn token tag-key request-json {:http-options {:content-type :xml}})) tags/dissociate-by-query {:provider "foo"} tags/dissociate-by-concept-ids [{:concept-id coll-concept-id}])) (testing "Dissociate applies JSON Query validations" (are [dissociate-tag-fn request-json message] (= {:status 400 :errors [message]} (dissociate-tag-fn token tag-key request-json)) tags/dissociate-by-query {:foo "bar"} "#/condition: extraneous key [foo] is not permitted" tags/dissociate-by-concept-ids {:concept-id coll-concept-id} "#: expected type: JSONArray, found: JSONObject")) (testing "Dissociate tag that doesn't exist" (are [dissociate-tag-fn request-json] (= {:status 404 :errors ["Tag could not be found with tag-key [tag100]"]} (dissociate-tag-fn token "tag100" request-json)) tags/dissociate-by-query {:provider "foo"} tags/dissociate-by-concept-ids [{:concept-id coll-concept-id}])) (testing "Dissociate deleted tag" (tags/delete-tag token tag-key) (are [dissociate-tag-fn request-json] (= {:status 404 :errors [(format "Tag with tag-key [%s] was deleted." tag-key)]} (dissociate-tag-fn token tag-key request-json)) tags/dissociate-by-query {:provider "foo"} tags/dissociate-by-concept-ids [{:concept-id coll-concept-id}])))) (deftest dissociate-tags-with-partial-match-query-test (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV1")) (testing "dissociate tag with only some of the collections matching the query are associated with the tag is OK" (let [coll1 (data-core/ingest "PROV1" (collection/collection {:entry-title "ET1"})) coll2 (data-core/ingest "PROV1" (collection/collection {:entry-title "ET2"})) token (echo-util/login (system/context) "user1") _ (index/wait-until-indexed) tag (tags/save-tag token (tags/make-tag {:tag-key "tag1"}) [coll1]) assert-tag-associated (partial tags/assert-tag-associated-with-query token {:tag-key "tag1"})] (assert-tag-associated [coll1]) (let [{:keys [status errors]} (tags/dissociate-by-query token "tag1" {:provider "PROV1"})] (is (= 200 status)) (assert-tag-associated []))))) (deftest dissociate-tags-with-mixed-response-test (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV1")) (testing "dissociate tag with mixed success and failure response" (let [coll1 (data-core/ingest "PROV1" (collection/collection {:entry-title "ET1"})) coll2 (data-core/ingest "PROV1" (collection/collection {:entry-title "ET2"})) coll3 (data-core/ingest "PROV1" (collection/collection {:entry-title "ET3"})) token (echo-util/login (system/context) "user1") tag-key "tag1" assert-tag-associated (partial tags/assert-tag-associated-with-query token {:tag-key "tag1"})] (tags/create-tag token (tags/make-tag {:tag-key tag-key})) (index/wait-until-indexed) (tags/associate-by-concept-ids token tag-key [{:concept-id (:concept-id coll1)} {:concept-id (:concept-id coll2) :revision-id (:revision-id coll2)}]) (assert-tag-associated [coll1 coll2]) (let [response (tags/dissociate-by-concept-ids token tag-key [{:concept-id "C100-P5"} ;; non-existent collection {:concept-id (:concept-id coll1)} ;; success {:concept-id (:concept-id coll2) :revision-id 1} ;; success {:concept-id (:concept-id coll3)}])] ;; no tag association (tags/assert-tag-dissociation-response-ok? {["C100-P5"] {:errors ["Collection [C100-P5] does not exist or is not visible."]} ["C1200000012-PROV1"] {:concept-id "TA1200000016-CMR" :revision-id 2} ["C1200000013-PROV1" 1] {:concept-id "TA1200000017-CMR" :revision-id 2} ["C1200000014-PROV1"] {:warnings ["Tag [tag1] is not associated with collection [C1200000014-PROV1]."]}} response) (assert-tag-associated []))))) ;; This tests association retention when collections and tags are updated or deleted. (deftest association-retention-test (echo-util/grant-all (system/context) (echo-util/coll-catalog-item-id "PROV1")) (let [coll (data-core/ingest "PROV1" (collection/collection)) token (echo-util/login (system/context) "user1") _ (index/wait-until-indexed) tag (tags/save-tag token (tags/make-tag {:tag-key "tag1"}) [coll]) assert-tag-associated (partial tags/assert-tag-associated-with-query nil {:tag-key "tag1"}) assert-tag-not-associated (fn [] (let [refs (search/find-refs :collection {:tag-key "tag1"})] (is (nil? (:errors refs))) (is (data-core/refs-match? [] refs))))] (index/wait-until-indexed) (testing "Tag initially associated with collection" (assert-tag-associated [coll])) (testing "Tag still associated with collection after updating collection" (let [updated-coll (data-core/ingest "PROV1" (dissoc coll :revision-id))] (is (= 200 (:status updated-coll))) (index/wait-until-indexed) (assert-tag-associated [updated-coll]))) (testing "Tag still associated with collection after deleting and recreating the collection" (is (= 200 (:status (ingest/delete-concept (data-core/item->concept coll))))) (let [recreated-coll (data-core/ingest "PROV1" (dissoc coll :revision-id))] (is (= 200 (:status recreated-coll))) (index/wait-until-indexed) (assert-tag-associated [recreated-coll]))) (let [latest-coll (assoc coll :revision-id 4)] (testing "Tag still associated with collection after updating tag" (let [updated-tag (tags/save-tag token tag)] (is (= {:status 200 :revision-id 2} (select-keys updated-tag [:status :revision-id]))) (index/wait-until-indexed) (assert-tag-associated [latest-coll]))) (testing "Tag not associated with collection after deleting and recreating the tag" (is (= {:status 200 :concept-id (:concept-id tag) :revision-id 3} (tags/delete-tag token (:tag-key tag)))) (index/wait-until-indexed) (testing "Not associated after tag deleted" (assert-tag-not-associated)) (is (= {:status 200 :concept-id (:concept-id tag) :revision-id 4} (tags/create-tag token (tags/make-tag {:tag-key "tag1"})))) (index/wait-until-indexed) (testing "Not associated after being recreated." (assert-tag-not-associated)))))) (defn- assert-tag-association "Assert the collections are associated with the tag for the given tag-key" [token colls tag-key] (is (data-core/refs-match? colls (search/find-refs :collection {:token token :tag-key tag-key})))) (deftest associate-dissociate-tag-with-collections-test ;; Grant all collections in PROV1 (echo-util/grant-registered-users (system/context) (echo-util/coll-catalog-item-id "PROV1")) (let [[coll1 coll2 coll3] (for [n (range 1 4)] (data-core/ingest "PROV1" (collection/collection))) [coll1-id coll2-id coll3-id] (map :concept-id [coll1 coll2 coll3]) token (echo-util/login (system/context) "user1")] (tags/create-tag token (tags/make-tag {:tag-key "tag1"})) (tags/create-tag token (tags/make-tag {:tag-key "tag2"})) (index/wait-until-indexed) ;; associate tag1 to coll1, tag2 to coll2 ;; both :concept-id and :concept_id works as keys (tags/associate-by-concept-ids token "tag1" [{:concept_id coll1-id}]) (tags/associate-by-concept-ids token "tag2" [{:concept-id coll2-id}]) (index/wait-until-indexed) ;; verify association (assert-tag-association token [coll1] "tag1") (assert-tag-association token [coll2] "tag2") ;; associate tag1 to coll1 again (tags/associate-by-concept-ids token "tag1" [{:concept-id coll1-id}]) (index/wait-until-indexed) ;; verify association (assert-tag-association token [coll1] "tag1") (assert-tag-association token [coll2] "tag2") ;; associate tag1 to coll2 (tags/associate-by-concept-ids token "tag1" [{:concept-id coll2-id}]) (index/wait-until-indexed) ;; verify association (assert-tag-association token [coll1 coll2] "tag1") (assert-tag-association token [coll2] "tag2") ;; associate tag2 to coll1, coll2 and coll3 (tags/associate-by-concept-ids token "tag2" [{:concept-id coll1-id} {:concept-id coll2-id} {:concept-id coll3-id}]) (index/wait-until-indexed) ;; verify association (assert-tag-association token [coll1 coll2] "tag1") (assert-tag-association token [coll1 coll2 coll3] "tag2") ;; dissociate tag1 from coll1 (tags/dissociate-by-concept-ids token "tag1" [{:concept-id coll1-id}]) (index/wait-until-indexed) ;; verify association (assert-tag-association token [coll2] "tag1") (assert-tag-association token [coll1 coll2 coll3] "tag2") ;; dissociate tag2 from coll1 and coll2 (tags/dissociate-by-concept-ids token "tag2" [{:concept-id coll1-id} {:concept-id coll2-id}]) (index/wait-until-indexed) ;; verify association (assert-tag-association token [coll2] "tag1") (assert-tag-association token [coll3] "tag2"))) (deftest associate-tags-with-data-test (echo-util/grant-all (system/context) (echo-util/coll-catalog-item-id "PROV1")) (let [coll (data-core/ingest "PROV1" (collection/collection)) coll-concept-id (:concept-id coll) token (echo-util/login (system/context) "user1") tag-key "PI:KEY:<KEY>END_PI"] (tags/create-tag token (tags/make-tag {:tag-key tag-key})) (index/wait-until-indexed) (testing "Associate tag with collections by concept-id and data" (are [data] (let [{:keys [status]} (tags/associate-by-concept-ids token tag-key [{:concept-id coll-concept-id :data data}])] (is (= 200 status))) "string data" true 100 123.45 [true "some string" 100] {"status" "reviewed" "action" "fix typos"})) (testing "Associate tag with collections with invalid data" (let [{:keys [status body]} (transmit-tag/associate-tag :concept-ids (system/context) tag-key nil {:raw? true :http-options {:body "{{{{"}}) error (-> body :errors first)] (is (= 400 status)) (is (re-find #"Invalid JSON: A JSONObject text must end with \'}\' at \d \[character \d line \d\]" error)))) (testing "Associate tag with collections with data exceed 32KB" (let [too-much-data {"a" (tags/string-of-length 32768)} expected-msg (format "Tag association data exceed the maximum length of 32KB for collection with concept id [%s] revision id [%s]." coll-concept-id nil) response (tags/associate-by-concept-ids token tag-key [{:concept-id coll-concept-id :data too-much-data}])] (tags/assert-tag-association-response-ok? {[coll-concept-id] {:errors [expected-msg]}} response))))) (deftest retrieve-concept-by-tag-association-concept-id-test (let [{:keys [status errors]} (search/get-search-failure-xml-data (search/retrieve-concept "TA10000-CMR" nil {:throw-exceptions true}))] (testing "Retrieve concept by tag association concept-id is invalid" (is (= [400 ["Retrieving concept by concept id is not supported for concept type [tag-association]."]] [status errors])))))
[ { "context": ";;;; Copyright 2016 Peter Stephens. All Rights Reserved.\r\n;;;;\r\n;;;; Licensed unde", "end": 36, "score": 0.999788224697113, "start": 22, "tag": "NAME", "value": "Peter Stephens" } ]
src/biblecli/main/utility.cljs
pstephens/kingjames.bible
23
;;;; Copyright 2016 Peter Stephens. All Rights Reserved. ;;;; ;;;; Licensed under the Apache License, Version 2.0 (the "License"); ;;;; you may not use this file except in compliance with the License. ;;;; You may obtain a copy of the License at ;;;; ;;;; http://www.apache.org/licenses/LICENSE-2.0 ;;;; ;;;; Unless required by applicable law or agreed to in writing, software ;;;; distributed under the License is distributed on an "AS IS" BASIS, ;;;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ;;;; See the License for the specific language governing permissions and ;;;; limitations under the License. (ns biblecli.main.utility) (def path (js/require "path")) (def root-path (atom "")) (defn get-root-path [] @root-path) (defn set-root-path! [path] (reset! root-path path)) (defn default-parser [] "staggs") (defn default-parser-input [] (.join path (get-root-path) "./kjv-src/www.staggs.pair.com-kjbp/kjv.txt")) (defn default-parser-input-rel [] (.relative path "" (default-parser-input)))
24071
;;;; Copyright 2016 <NAME>. All Rights Reserved. ;;;; ;;;; Licensed under the Apache License, Version 2.0 (the "License"); ;;;; you may not use this file except in compliance with the License. ;;;; You may obtain a copy of the License at ;;;; ;;;; http://www.apache.org/licenses/LICENSE-2.0 ;;;; ;;;; Unless required by applicable law or agreed to in writing, software ;;;; distributed under the License is distributed on an "AS IS" BASIS, ;;;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ;;;; See the License for the specific language governing permissions and ;;;; limitations under the License. (ns biblecli.main.utility) (def path (js/require "path")) (def root-path (atom "")) (defn get-root-path [] @root-path) (defn set-root-path! [path] (reset! root-path path)) (defn default-parser [] "staggs") (defn default-parser-input [] (.join path (get-root-path) "./kjv-src/www.staggs.pair.com-kjbp/kjv.txt")) (defn default-parser-input-rel [] (.relative path "" (default-parser-input)))
true
;;;; Copyright 2016 PI:NAME:<NAME>END_PI. All Rights Reserved. ;;;; ;;;; Licensed under the Apache License, Version 2.0 (the "License"); ;;;; you may not use this file except in compliance with the License. ;;;; You may obtain a copy of the License at ;;;; ;;;; http://www.apache.org/licenses/LICENSE-2.0 ;;;; ;;;; Unless required by applicable law or agreed to in writing, software ;;;; distributed under the License is distributed on an "AS IS" BASIS, ;;;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ;;;; See the License for the specific language governing permissions and ;;;; limitations under the License. (ns biblecli.main.utility) (def path (js/require "path")) (def root-path (atom "")) (defn get-root-path [] @root-path) (defn set-root-path! [path] (reset! root-path path)) (defn default-parser [] "staggs") (defn default-parser-input [] (.join path (get-root-path) "./kjv-src/www.staggs.pair.com-kjbp/kjv.txt")) (defn default-parser-input-rel [] (.relative path "" (default-parser-input)))
[ { "context": ";;\n;;\n;; Copyright 2013-2015 Netflix, Inc.\n;;\n;; Licensed under the Apache License", "end": 37, "score": 0.7631281614303589, "start": 30, "tag": "NAME", "value": "Netflix" } ]
pigpen-core/src/main/clojure/pigpen/core/op.clj
ombagus/Netflix
327
;; ;; ;; Copyright 2013-2015 Netflix, Inc. ;; ;; Licensed under the Apache License, Version 2.0 (the "License"); ;; you may not use this file except in compliance with the License. ;; You may obtain a copy of the License at ;; ;; http://www.apache.org/licenses/LICENSE-2.0 ;; ;; Unless required by applicable law or agreed to in writing, software ;; distributed under the License is distributed on an "AS IS" BASIS, ;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ;; See the License for the specific language governing permissions and ;; limitations under the License. ;; ;; (ns pigpen.core.op "*** ALPHA - Subject to change *** The raw pigpen operators. These are the basic building blocks that platforms implement. All higher level operators are defined in terms of these operators. These should be used to build custom PigPen operators. In these examples, fields refers to the fields that the underlying platform is aware of. Usually this is a single user field that represents arbitrary Clojure data. Note: You most likely don't want this namespace. Unless you are doing advanced things, stick to pigpen.core " (:require [pigpen.raw] [pigpen.runtime])) (intern *ns* (with-meta 'noop$ (meta #'pigpen.raw/noop$)) @#'pigpen.raw/noop$) ;; ********** IO ********** (intern *ns* (with-meta 'load$ (meta #'pigpen.raw/load$)) @#'pigpen.raw/load$) (intern *ns* (with-meta 'store$ (meta #'pigpen.raw/store$)) @#'pigpen.raw/store$) (intern *ns* (with-meta 'store-many$ (meta #'pigpen.raw/store-many$)) @#'pigpen.raw/store-many$) (intern *ns* (with-meta 'return$ (meta #'pigpen.raw/return$)) @#'pigpen.raw/return$) ;; ********** Map ********** (intern *ns* (with-meta 'code$ (meta #'pigpen.raw/code$)) @#'pigpen.raw/code$) (intern *ns* (with-meta 'projection-field$ (meta #'pigpen.raw/projection-field$)) @#'pigpen.raw/projection-field$) (intern *ns* (with-meta 'projection-func$ (meta #'pigpen.raw/projection-func$)) @#'pigpen.raw/projection-func$) (intern *ns* (with-meta 'project$ (meta #'pigpen.raw/project$)) @#'pigpen.raw/project$) (intern *ns* (with-meta 'map->bind (meta #'pigpen.runtime/map->bind)) @#'pigpen.runtime/map->bind) (intern *ns* (with-meta 'mapcat->bind (meta #'pigpen.runtime/mapcat->bind)) @#'pigpen.runtime/mapcat->bind) (intern *ns* (with-meta 'filter->bind (meta #'pigpen.runtime/filter->bind)) @#'pigpen.runtime/filter->bind) (intern *ns* (with-meta 'key-selector->bind (meta #'pigpen.runtime/key-selector->bind)) @#'pigpen.runtime/key-selector->bind) (intern *ns* (with-meta 'keyword-field-selector->bind (meta #'pigpen.runtime/keyword-field-selector->bind)) @#'pigpen.runtime/keyword-field-selector->bind) (intern *ns* (with-meta 'indexed-field-selector->bind (meta #'pigpen.runtime/indexed-field-selector->bind)) @#'pigpen.runtime/indexed-field-selector->bind) (intern *ns* (with-meta 'bind$ (meta #'pigpen.raw/bind$)) @#'pigpen.raw/bind$) (intern *ns* (with-meta 'sort$ (meta #'pigpen.raw/sort$)) @#'pigpen.raw/sort$) (intern *ns* (with-meta 'rank$ (meta #'pigpen.raw/rank$)) @#'pigpen.raw/rank$) ;; ********** Filter ********** (intern *ns* (with-meta 'take$ (meta #'pigpen.raw/take$)) @#'pigpen.raw/take$) (intern *ns* (with-meta 'sample$ (meta #'pigpen.raw/sample$)) @#'pigpen.raw/sample$) ;; ********** Set ********** (intern *ns* (with-meta 'distinct$ (meta #'pigpen.raw/distinct$)) @#'pigpen.raw/distinct$) (intern *ns* (with-meta 'concat$ (meta #'pigpen.raw/concat$)) @#'pigpen.raw/concat$) ;; ********** Join ********** (intern *ns* (with-meta 'reduce$ (meta #'pigpen.raw/reduce$)) @#'pigpen.raw/reduce$) (intern *ns* (with-meta 'group$ (meta #'pigpen.raw/group$)) @#'pigpen.raw/group$) (intern *ns* (with-meta 'join$ (meta #'pigpen.raw/join$)) @#'pigpen.raw/join$)
2260
;; ;; ;; Copyright 2013-2015 <NAME>, Inc. ;; ;; Licensed under the Apache License, Version 2.0 (the "License"); ;; you may not use this file except in compliance with the License. ;; You may obtain a copy of the License at ;; ;; http://www.apache.org/licenses/LICENSE-2.0 ;; ;; Unless required by applicable law or agreed to in writing, software ;; distributed under the License is distributed on an "AS IS" BASIS, ;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ;; See the License for the specific language governing permissions and ;; limitations under the License. ;; ;; (ns pigpen.core.op "*** ALPHA - Subject to change *** The raw pigpen operators. These are the basic building blocks that platforms implement. All higher level operators are defined in terms of these operators. These should be used to build custom PigPen operators. In these examples, fields refers to the fields that the underlying platform is aware of. Usually this is a single user field that represents arbitrary Clojure data. Note: You most likely don't want this namespace. Unless you are doing advanced things, stick to pigpen.core " (:require [pigpen.raw] [pigpen.runtime])) (intern *ns* (with-meta 'noop$ (meta #'pigpen.raw/noop$)) @#'pigpen.raw/noop$) ;; ********** IO ********** (intern *ns* (with-meta 'load$ (meta #'pigpen.raw/load$)) @#'pigpen.raw/load$) (intern *ns* (with-meta 'store$ (meta #'pigpen.raw/store$)) @#'pigpen.raw/store$) (intern *ns* (with-meta 'store-many$ (meta #'pigpen.raw/store-many$)) @#'pigpen.raw/store-many$) (intern *ns* (with-meta 'return$ (meta #'pigpen.raw/return$)) @#'pigpen.raw/return$) ;; ********** Map ********** (intern *ns* (with-meta 'code$ (meta #'pigpen.raw/code$)) @#'pigpen.raw/code$) (intern *ns* (with-meta 'projection-field$ (meta #'pigpen.raw/projection-field$)) @#'pigpen.raw/projection-field$) (intern *ns* (with-meta 'projection-func$ (meta #'pigpen.raw/projection-func$)) @#'pigpen.raw/projection-func$) (intern *ns* (with-meta 'project$ (meta #'pigpen.raw/project$)) @#'pigpen.raw/project$) (intern *ns* (with-meta 'map->bind (meta #'pigpen.runtime/map->bind)) @#'pigpen.runtime/map->bind) (intern *ns* (with-meta 'mapcat->bind (meta #'pigpen.runtime/mapcat->bind)) @#'pigpen.runtime/mapcat->bind) (intern *ns* (with-meta 'filter->bind (meta #'pigpen.runtime/filter->bind)) @#'pigpen.runtime/filter->bind) (intern *ns* (with-meta 'key-selector->bind (meta #'pigpen.runtime/key-selector->bind)) @#'pigpen.runtime/key-selector->bind) (intern *ns* (with-meta 'keyword-field-selector->bind (meta #'pigpen.runtime/keyword-field-selector->bind)) @#'pigpen.runtime/keyword-field-selector->bind) (intern *ns* (with-meta 'indexed-field-selector->bind (meta #'pigpen.runtime/indexed-field-selector->bind)) @#'pigpen.runtime/indexed-field-selector->bind) (intern *ns* (with-meta 'bind$ (meta #'pigpen.raw/bind$)) @#'pigpen.raw/bind$) (intern *ns* (with-meta 'sort$ (meta #'pigpen.raw/sort$)) @#'pigpen.raw/sort$) (intern *ns* (with-meta 'rank$ (meta #'pigpen.raw/rank$)) @#'pigpen.raw/rank$) ;; ********** Filter ********** (intern *ns* (with-meta 'take$ (meta #'pigpen.raw/take$)) @#'pigpen.raw/take$) (intern *ns* (with-meta 'sample$ (meta #'pigpen.raw/sample$)) @#'pigpen.raw/sample$) ;; ********** Set ********** (intern *ns* (with-meta 'distinct$ (meta #'pigpen.raw/distinct$)) @#'pigpen.raw/distinct$) (intern *ns* (with-meta 'concat$ (meta #'pigpen.raw/concat$)) @#'pigpen.raw/concat$) ;; ********** Join ********** (intern *ns* (with-meta 'reduce$ (meta #'pigpen.raw/reduce$)) @#'pigpen.raw/reduce$) (intern *ns* (with-meta 'group$ (meta #'pigpen.raw/group$)) @#'pigpen.raw/group$) (intern *ns* (with-meta 'join$ (meta #'pigpen.raw/join$)) @#'pigpen.raw/join$)
true
;; ;; ;; Copyright 2013-2015 PI:NAME:<NAME>END_PI, Inc. ;; ;; Licensed under the Apache License, Version 2.0 (the "License"); ;; you may not use this file except in compliance with the License. ;; You may obtain a copy of the License at ;; ;; http://www.apache.org/licenses/LICENSE-2.0 ;; ;; Unless required by applicable law or agreed to in writing, software ;; distributed under the License is distributed on an "AS IS" BASIS, ;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ;; See the License for the specific language governing permissions and ;; limitations under the License. ;; ;; (ns pigpen.core.op "*** ALPHA - Subject to change *** The raw pigpen operators. These are the basic building blocks that platforms implement. All higher level operators are defined in terms of these operators. These should be used to build custom PigPen operators. In these examples, fields refers to the fields that the underlying platform is aware of. Usually this is a single user field that represents arbitrary Clojure data. Note: You most likely don't want this namespace. Unless you are doing advanced things, stick to pigpen.core " (:require [pigpen.raw] [pigpen.runtime])) (intern *ns* (with-meta 'noop$ (meta #'pigpen.raw/noop$)) @#'pigpen.raw/noop$) ;; ********** IO ********** (intern *ns* (with-meta 'load$ (meta #'pigpen.raw/load$)) @#'pigpen.raw/load$) (intern *ns* (with-meta 'store$ (meta #'pigpen.raw/store$)) @#'pigpen.raw/store$) (intern *ns* (with-meta 'store-many$ (meta #'pigpen.raw/store-many$)) @#'pigpen.raw/store-many$) (intern *ns* (with-meta 'return$ (meta #'pigpen.raw/return$)) @#'pigpen.raw/return$) ;; ********** Map ********** (intern *ns* (with-meta 'code$ (meta #'pigpen.raw/code$)) @#'pigpen.raw/code$) (intern *ns* (with-meta 'projection-field$ (meta #'pigpen.raw/projection-field$)) @#'pigpen.raw/projection-field$) (intern *ns* (with-meta 'projection-func$ (meta #'pigpen.raw/projection-func$)) @#'pigpen.raw/projection-func$) (intern *ns* (with-meta 'project$ (meta #'pigpen.raw/project$)) @#'pigpen.raw/project$) (intern *ns* (with-meta 'map->bind (meta #'pigpen.runtime/map->bind)) @#'pigpen.runtime/map->bind) (intern *ns* (with-meta 'mapcat->bind (meta #'pigpen.runtime/mapcat->bind)) @#'pigpen.runtime/mapcat->bind) (intern *ns* (with-meta 'filter->bind (meta #'pigpen.runtime/filter->bind)) @#'pigpen.runtime/filter->bind) (intern *ns* (with-meta 'key-selector->bind (meta #'pigpen.runtime/key-selector->bind)) @#'pigpen.runtime/key-selector->bind) (intern *ns* (with-meta 'keyword-field-selector->bind (meta #'pigpen.runtime/keyword-field-selector->bind)) @#'pigpen.runtime/keyword-field-selector->bind) (intern *ns* (with-meta 'indexed-field-selector->bind (meta #'pigpen.runtime/indexed-field-selector->bind)) @#'pigpen.runtime/indexed-field-selector->bind) (intern *ns* (with-meta 'bind$ (meta #'pigpen.raw/bind$)) @#'pigpen.raw/bind$) (intern *ns* (with-meta 'sort$ (meta #'pigpen.raw/sort$)) @#'pigpen.raw/sort$) (intern *ns* (with-meta 'rank$ (meta #'pigpen.raw/rank$)) @#'pigpen.raw/rank$) ;; ********** Filter ********** (intern *ns* (with-meta 'take$ (meta #'pigpen.raw/take$)) @#'pigpen.raw/take$) (intern *ns* (with-meta 'sample$ (meta #'pigpen.raw/sample$)) @#'pigpen.raw/sample$) ;; ********** Set ********** (intern *ns* (with-meta 'distinct$ (meta #'pigpen.raw/distinct$)) @#'pigpen.raw/distinct$) (intern *ns* (with-meta 'concat$ (meta #'pigpen.raw/concat$)) @#'pigpen.raw/concat$) ;; ********** Join ********** (intern *ns* (with-meta 'reduce$ (meta #'pigpen.raw/reduce$)) @#'pigpen.raw/reduce$) (intern *ns* (with-meta 'group$ (meta #'pigpen.raw/group$)) @#'pigpen.raw/group$) (intern *ns* (with-meta 'join$ (meta #'pigpen.raw/join$)) @#'pigpen.raw/join$)
[ { "context": "nge\"\n [state side kw new-val delta]\n (let [key (name kw)]\n (system-msg state side\n (str", "end": 676, "score": 0.9592305421829224, "start": 669, "tag": "KEY", "value": "name kw" } ]
src/clj/game/core/change_vals.clj
ouroboros8/netrunner
0
(ns game.core.change-vals (:require [game.core.agendas :refer [update-all-agenda-points]] [game.core.effects :refer [register-floating-effect]] [game.core.engine :refer [trigger-event]] [game.core.gaining :refer [base-mod-size deduct gain]] [game.core.hand-size :refer [hand-size update-hand-size]] [game.core.link :refer [get-link update-link]] [game.core.memory :refer [available-mu update-mu]] [game.core.say :refer [system-msg]] [game.core.tags :refer [update-tag-status]] [game.macros :refer [req]])) (defn- change-msg "Send a system message indicating the property change" [state side kw new-val delta] (let [key (name kw)] (system-msg state side (str "sets " (.replace key "-" " ") " to " new-val " (" (if (pos? delta) (str "+" delta) delta) ")")))) (defn- change-map "Change a player's property using the :mod system" [state side key delta] (gain state side key {:mod delta}) (change-msg state side key (base-mod-size state side key) delta)) (defn- change-mu "Send a system message indicating how mu was changed" [state side delta] (register-floating-effect state side nil {:type :user-available-mu :value [:regular delta]}) (update-mu state) (system-msg state side (str "sets unused [mu] to " (available-mu state) " (" (if (pos? delta) (str "+" delta) delta) ")"))) (defn- change-tags "Change a player's tag count" [state delta] (gain state :runner :tag delta) (update-tag-status state) (system-msg state :runner (str "sets Tags to " (get-in @state [:runner :tag :total]) " (" (if (pos? delta) (str "+" delta) delta) ")"))) (defn- change-bad-pub "Change a player's base bad pub count" [state delta] (if (neg? delta) (deduct state :corp [:bad-publicity (Math/abs delta)]) (gain state :corp :bad-publicity delta)) (system-msg state :corp (str "sets Bad Publicity to " (get-in @state [:corp :bad-publicity :base]) " (" (if (pos? delta) (str "+" delta) delta) ")"))) (defn- change-agenda-points "Change a player's total agenda points, using floating effects." [state side delta] (register-floating-effect state side nil ;; This is needed as `req` creates/shadows the existing `side` already in scope. (let [user-side side] {:type :user-agenda-points ;; `target` is either `:corp` or `:runner` :req (req (= user-side target)) :value delta})) (update-all-agenda-points state side) (system-msg state side (str "sets their agenda points to " (get-in @state [side :agenda-point]) " (" (if (pos? delta) (str "+" delta) delta) ")"))) (defn- change-link "Change the runner's link, using floating effects." [state side delta] (register-floating-effect state side nil {:type :user-link :value delta}) (update-link state) (system-msg state side (str "sets their link to " (get-link state) " (" (if (pos? delta) (str "+" delta) delta) ")"))) (defn- change-hand-size "Change the player's hand-size, using floating effects." [state side delta] (register-floating-effect state side nil (let [user-side side] {:type :user-hand-size :req (req (= side user-side)) :value delta})) (update-hand-size state side) (system-msg state side (str "sets their hand size to " (hand-size state side) " (" (if (pos? delta) (str "+" delta) delta) ")"))) (defn- change-generic "Change a player's base generic property." [state side key delta] (if (neg? delta) (deduct state side [key (- delta)]) (swap! state update-in [side key] (partial + delta))) (change-msg state side key (get-in @state [side key]) delta)) (defn change "Increase/decrease a player's property (clicks, credits, MU, etc.) by delta." [state side {:keys [key delta]}] (case key :memory (change-mu state side delta) :hand-size (change-hand-size state side delta) :tag (change-tags state delta) :bad-publicity (change-bad-pub state delta) :agenda-point (change-agenda-points state side delta) :link (change-link state side delta) ; else (change-generic state side key delta)))
101470
(ns game.core.change-vals (:require [game.core.agendas :refer [update-all-agenda-points]] [game.core.effects :refer [register-floating-effect]] [game.core.engine :refer [trigger-event]] [game.core.gaining :refer [base-mod-size deduct gain]] [game.core.hand-size :refer [hand-size update-hand-size]] [game.core.link :refer [get-link update-link]] [game.core.memory :refer [available-mu update-mu]] [game.core.say :refer [system-msg]] [game.core.tags :refer [update-tag-status]] [game.macros :refer [req]])) (defn- change-msg "Send a system message indicating the property change" [state side kw new-val delta] (let [key (<KEY>)] (system-msg state side (str "sets " (.replace key "-" " ") " to " new-val " (" (if (pos? delta) (str "+" delta) delta) ")")))) (defn- change-map "Change a player's property using the :mod system" [state side key delta] (gain state side key {:mod delta}) (change-msg state side key (base-mod-size state side key) delta)) (defn- change-mu "Send a system message indicating how mu was changed" [state side delta] (register-floating-effect state side nil {:type :user-available-mu :value [:regular delta]}) (update-mu state) (system-msg state side (str "sets unused [mu] to " (available-mu state) " (" (if (pos? delta) (str "+" delta) delta) ")"))) (defn- change-tags "Change a player's tag count" [state delta] (gain state :runner :tag delta) (update-tag-status state) (system-msg state :runner (str "sets Tags to " (get-in @state [:runner :tag :total]) " (" (if (pos? delta) (str "+" delta) delta) ")"))) (defn- change-bad-pub "Change a player's base bad pub count" [state delta] (if (neg? delta) (deduct state :corp [:bad-publicity (Math/abs delta)]) (gain state :corp :bad-publicity delta)) (system-msg state :corp (str "sets Bad Publicity to " (get-in @state [:corp :bad-publicity :base]) " (" (if (pos? delta) (str "+" delta) delta) ")"))) (defn- change-agenda-points "Change a player's total agenda points, using floating effects." [state side delta] (register-floating-effect state side nil ;; This is needed as `req` creates/shadows the existing `side` already in scope. (let [user-side side] {:type :user-agenda-points ;; `target` is either `:corp` or `:runner` :req (req (= user-side target)) :value delta})) (update-all-agenda-points state side) (system-msg state side (str "sets their agenda points to " (get-in @state [side :agenda-point]) " (" (if (pos? delta) (str "+" delta) delta) ")"))) (defn- change-link "Change the runner's link, using floating effects." [state side delta] (register-floating-effect state side nil {:type :user-link :value delta}) (update-link state) (system-msg state side (str "sets their link to " (get-link state) " (" (if (pos? delta) (str "+" delta) delta) ")"))) (defn- change-hand-size "Change the player's hand-size, using floating effects." [state side delta] (register-floating-effect state side nil (let [user-side side] {:type :user-hand-size :req (req (= side user-side)) :value delta})) (update-hand-size state side) (system-msg state side (str "sets their hand size to " (hand-size state side) " (" (if (pos? delta) (str "+" delta) delta) ")"))) (defn- change-generic "Change a player's base generic property." [state side key delta] (if (neg? delta) (deduct state side [key (- delta)]) (swap! state update-in [side key] (partial + delta))) (change-msg state side key (get-in @state [side key]) delta)) (defn change "Increase/decrease a player's property (clicks, credits, MU, etc.) by delta." [state side {:keys [key delta]}] (case key :memory (change-mu state side delta) :hand-size (change-hand-size state side delta) :tag (change-tags state delta) :bad-publicity (change-bad-pub state delta) :agenda-point (change-agenda-points state side delta) :link (change-link state side delta) ; else (change-generic state side key delta)))
true
(ns game.core.change-vals (:require [game.core.agendas :refer [update-all-agenda-points]] [game.core.effects :refer [register-floating-effect]] [game.core.engine :refer [trigger-event]] [game.core.gaining :refer [base-mod-size deduct gain]] [game.core.hand-size :refer [hand-size update-hand-size]] [game.core.link :refer [get-link update-link]] [game.core.memory :refer [available-mu update-mu]] [game.core.say :refer [system-msg]] [game.core.tags :refer [update-tag-status]] [game.macros :refer [req]])) (defn- change-msg "Send a system message indicating the property change" [state side kw new-val delta] (let [key (PI:KEY:<KEY>END_PI)] (system-msg state side (str "sets " (.replace key "-" " ") " to " new-val " (" (if (pos? delta) (str "+" delta) delta) ")")))) (defn- change-map "Change a player's property using the :mod system" [state side key delta] (gain state side key {:mod delta}) (change-msg state side key (base-mod-size state side key) delta)) (defn- change-mu "Send a system message indicating how mu was changed" [state side delta] (register-floating-effect state side nil {:type :user-available-mu :value [:regular delta]}) (update-mu state) (system-msg state side (str "sets unused [mu] to " (available-mu state) " (" (if (pos? delta) (str "+" delta) delta) ")"))) (defn- change-tags "Change a player's tag count" [state delta] (gain state :runner :tag delta) (update-tag-status state) (system-msg state :runner (str "sets Tags to " (get-in @state [:runner :tag :total]) " (" (if (pos? delta) (str "+" delta) delta) ")"))) (defn- change-bad-pub "Change a player's base bad pub count" [state delta] (if (neg? delta) (deduct state :corp [:bad-publicity (Math/abs delta)]) (gain state :corp :bad-publicity delta)) (system-msg state :corp (str "sets Bad Publicity to " (get-in @state [:corp :bad-publicity :base]) " (" (if (pos? delta) (str "+" delta) delta) ")"))) (defn- change-agenda-points "Change a player's total agenda points, using floating effects." [state side delta] (register-floating-effect state side nil ;; This is needed as `req` creates/shadows the existing `side` already in scope. (let [user-side side] {:type :user-agenda-points ;; `target` is either `:corp` or `:runner` :req (req (= user-side target)) :value delta})) (update-all-agenda-points state side) (system-msg state side (str "sets their agenda points to " (get-in @state [side :agenda-point]) " (" (if (pos? delta) (str "+" delta) delta) ")"))) (defn- change-link "Change the runner's link, using floating effects." [state side delta] (register-floating-effect state side nil {:type :user-link :value delta}) (update-link state) (system-msg state side (str "sets their link to " (get-link state) " (" (if (pos? delta) (str "+" delta) delta) ")"))) (defn- change-hand-size "Change the player's hand-size, using floating effects." [state side delta] (register-floating-effect state side nil (let [user-side side] {:type :user-hand-size :req (req (= side user-side)) :value delta})) (update-hand-size state side) (system-msg state side (str "sets their hand size to " (hand-size state side) " (" (if (pos? delta) (str "+" delta) delta) ")"))) (defn- change-generic "Change a player's base generic property." [state side key delta] (if (neg? delta) (deduct state side [key (- delta)]) (swap! state update-in [side key] (partial + delta))) (change-msg state side key (get-in @state [side key]) delta)) (defn change "Increase/decrease a player's property (clicks, credits, MU, etc.) by delta." [state side {:keys [key delta]}] (case key :memory (change-mu state side delta) :hand-size (change-hand-size state side delta) :tag (change-tags state delta) :bad-publicity (change-bad-pub state delta) :agenda-point (change-agenda-points state side delta) :link (change-link state side delta) ; else (change-generic state side key delta)))
[ { "context": "alid, 2) anonymous -\n; a string like \"REMOTE_ADDR=212.92.245.244\" in the response.\n;\n; Produces the file \"proxies.", "end": 1313, "score": 0.9997474551200867, "start": 1299, "tag": "IP_ADDRESS", "value": "212.92.245.244" } ]
data/train/clojure/a09f77e550a0ddc73ee036c5691f34c98996217epa-fetch.clj
harshp8l/deep-learning-lang-detection
84
(require '[com.twinql.clojure.http :as http]) (require '[work.core :as work]) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; ; Exception-free wrapper for clojure.http component. ; (defn try-get "Wrapper for the com.twinql.clojure.http component. Returns either valid clojure.http response, or the empty response. Requires URL, proxy host and port, clojure.http connection manager, timeout in ms, and the User-Agent string." [url host port ccm timeout browser] (try (http/get url :as :string :connection-manager ccm :parameters (http/map->params {:default-proxy (http/http-host :host host :port port) :so-timeout timeout :connection-timeout timeout :user-agent browser})) (catch Exception e {:code 0 :reason "" :content "" :headers {}}))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; ; Proxy discovering stuff. Requires the list of proxies in ; data/proxy-list.txt. ; ; Accesses the echo server "http://xformstest.org/cgi-bin/echo.sh" ; via each of the listed proxies. ; ; Tries to find the proof of proxy being 1) valid, 2) anonymous - ; a string like "REMOTE_ADDR=212.92.245.244" in the response. ; ; Produces the file "proxies.txt" which contains valid anonymous ; proxies for later work. ; ; Warning: (discover) is a lengthy operation; 1 minute or so. ; It complains a lot on the servers; that's all right. ; (defn- split-proxy [x] (let [pair (re-seq #"[^:]+" x)] [ (first pair) (Integer/valueOf (second pair)) ])) (defn- check-proxy "Checks the proxy to be able to access xformstest.org diring the specified timeout and to provide its IP to the server, instead of local IP." [host port ccm timeout] (let [result (try-get "http://xformstest.org/cgi-bin/echo.sh" host port ccm timeout "Mozilla/4.0 (compatible)") retval (and (= (:code result) 200) (re-find (re-pattern (str "REMOTE_ADDR=" host)) (:content result)))] (if retval (println (str [host port] " OK.")) (println (str [host port]))) retval)) (defn discover "Examines proxies from data/proxy-list.txt who manage to proove that they are valid and anonymous during the specified timeout, and writes the valid proxies to proxies.txt" [timeout] (let [proxy-list (re-seq #"\S+" (slurp "data/proxy-list.txt")) candidates (vec (map split-proxy proxy-list))] (http/with-connection-manager [ccm :thread-safe] (defn- valid-proxy? [[host port]] ; something curryish. Lambdas are harder to read. (check-proxy host port ccm timeout)) (let [valid-proxies (vec (work/filter-work valid-proxy? candidates 200))] (println (count valid-proxies) " valid proxies found.") (println "Writing file 'proxies.txt'.") (spit "proxies.txt" valid-proxies) (println "Done."))))) ; (discover 1000) ; (discover 500) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; ; Stuff to select random User-Agent strings according to distribution ; of browsers in the wild. ; (defn weighted-iter "Takes an array of weights (that is, frequencies), and returns a fn which returns a random integer which lies in (range (count weights)) and is distributed according to the weights." [weights] (let [part-sums (vec (reductions + (map float weights))) total-sum (last part-sums) total-num (count weights) index-range (range total-num)] (fn [] (let [scaled-rand (* (rand) total-sum) index (first (filter #(<= scaled-rand (get part-sums %)) index-range))] (if (nil? index) (dec total-num) index))))) ; (let [f (weighted-iter [1 2 3])] ; (dotimes [_ 1000] (prn (f)))) (let [agent-pairs (partition 2 (load-file "data/agent-pairs.txt")) user-agents (vec (map second agent-pairs)) agent-iter (weighted-iter (map first agent-pairs))] (defn weighted-random-user-agent [] (user-agents (agent-iter)))) ; (dotimes [_ 20] ; (prn (weighted-random-user-agent)) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; ; Insistent anonymous fetching function. Tries to push the request ; through each of the (unreliable) proxies. Randomly orders the proxies ; for better overall throughput. Returns a valid clojure.http response ; on success, nil on failure (that is, when neither of the proxies ; managed to service the request during the specified timeout). ; (defn insist-get [url ccm proxies timeout] (let [shuffled-proxies (shuffle proxies) num-proxies (count proxies)] (loop [pos 0] (if (>= pos num-proxies) nil (let [the-proxy (shuffled-proxies pos) [host port] the-proxy user-agent (weighted-random-user-agent) result (try-get url host port ccm timeout user-agent)] (if (= (:code result) 200) result (recur (inc pos)))))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; ; parallel fetcher itself ; (defn pa-fetch [urls proxies timeout] (http/with-connection-manager [ccm :thread-safe] (defn- getter [url] ; something curryish. Lambdas are harder to read. (insist-get url ccm proxies timeout)) (work/map-work getter urls 200))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; ;; The top-level stuff. ;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; ; Discovering proxies ; (def timeout 1000) ; proxies are far away and busy. (time (discover timeout)) ; ; Loading alive proxies for use ; (def proxies (load-file "proxies.txt")) ; ; Parallel test ; (doseq [num-urls [50 100 200]] (let [urls (repeat num-urls "http://www.example.com/")] (time (def responses (pa-fetch urls proxies timeout))) (prn (map :code responses)) ; should be (200 200 200 ... 200) ))
42702
(require '[com.twinql.clojure.http :as http]) (require '[work.core :as work]) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; ; Exception-free wrapper for clojure.http component. ; (defn try-get "Wrapper for the com.twinql.clojure.http component. Returns either valid clojure.http response, or the empty response. Requires URL, proxy host and port, clojure.http connection manager, timeout in ms, and the User-Agent string." [url host port ccm timeout browser] (try (http/get url :as :string :connection-manager ccm :parameters (http/map->params {:default-proxy (http/http-host :host host :port port) :so-timeout timeout :connection-timeout timeout :user-agent browser})) (catch Exception e {:code 0 :reason "" :content "" :headers {}}))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; ; Proxy discovering stuff. Requires the list of proxies in ; data/proxy-list.txt. ; ; Accesses the echo server "http://xformstest.org/cgi-bin/echo.sh" ; via each of the listed proxies. ; ; Tries to find the proof of proxy being 1) valid, 2) anonymous - ; a string like "REMOTE_ADDR=172.16.58.3" in the response. ; ; Produces the file "proxies.txt" which contains valid anonymous ; proxies for later work. ; ; Warning: (discover) is a lengthy operation; 1 minute or so. ; It complains a lot on the servers; that's all right. ; (defn- split-proxy [x] (let [pair (re-seq #"[^:]+" x)] [ (first pair) (Integer/valueOf (second pair)) ])) (defn- check-proxy "Checks the proxy to be able to access xformstest.org diring the specified timeout and to provide its IP to the server, instead of local IP." [host port ccm timeout] (let [result (try-get "http://xformstest.org/cgi-bin/echo.sh" host port ccm timeout "Mozilla/4.0 (compatible)") retval (and (= (:code result) 200) (re-find (re-pattern (str "REMOTE_ADDR=" host)) (:content result)))] (if retval (println (str [host port] " OK.")) (println (str [host port]))) retval)) (defn discover "Examines proxies from data/proxy-list.txt who manage to proove that they are valid and anonymous during the specified timeout, and writes the valid proxies to proxies.txt" [timeout] (let [proxy-list (re-seq #"\S+" (slurp "data/proxy-list.txt")) candidates (vec (map split-proxy proxy-list))] (http/with-connection-manager [ccm :thread-safe] (defn- valid-proxy? [[host port]] ; something curryish. Lambdas are harder to read. (check-proxy host port ccm timeout)) (let [valid-proxies (vec (work/filter-work valid-proxy? candidates 200))] (println (count valid-proxies) " valid proxies found.") (println "Writing file 'proxies.txt'.") (spit "proxies.txt" valid-proxies) (println "Done."))))) ; (discover 1000) ; (discover 500) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; ; Stuff to select random User-Agent strings according to distribution ; of browsers in the wild. ; (defn weighted-iter "Takes an array of weights (that is, frequencies), and returns a fn which returns a random integer which lies in (range (count weights)) and is distributed according to the weights." [weights] (let [part-sums (vec (reductions + (map float weights))) total-sum (last part-sums) total-num (count weights) index-range (range total-num)] (fn [] (let [scaled-rand (* (rand) total-sum) index (first (filter #(<= scaled-rand (get part-sums %)) index-range))] (if (nil? index) (dec total-num) index))))) ; (let [f (weighted-iter [1 2 3])] ; (dotimes [_ 1000] (prn (f)))) (let [agent-pairs (partition 2 (load-file "data/agent-pairs.txt")) user-agents (vec (map second agent-pairs)) agent-iter (weighted-iter (map first agent-pairs))] (defn weighted-random-user-agent [] (user-agents (agent-iter)))) ; (dotimes [_ 20] ; (prn (weighted-random-user-agent)) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; ; Insistent anonymous fetching function. Tries to push the request ; through each of the (unreliable) proxies. Randomly orders the proxies ; for better overall throughput. Returns a valid clojure.http response ; on success, nil on failure (that is, when neither of the proxies ; managed to service the request during the specified timeout). ; (defn insist-get [url ccm proxies timeout] (let [shuffled-proxies (shuffle proxies) num-proxies (count proxies)] (loop [pos 0] (if (>= pos num-proxies) nil (let [the-proxy (shuffled-proxies pos) [host port] the-proxy user-agent (weighted-random-user-agent) result (try-get url host port ccm timeout user-agent)] (if (= (:code result) 200) result (recur (inc pos)))))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; ; parallel fetcher itself ; (defn pa-fetch [urls proxies timeout] (http/with-connection-manager [ccm :thread-safe] (defn- getter [url] ; something curryish. Lambdas are harder to read. (insist-get url ccm proxies timeout)) (work/map-work getter urls 200))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; ;; The top-level stuff. ;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; ; Discovering proxies ; (def timeout 1000) ; proxies are far away and busy. (time (discover timeout)) ; ; Loading alive proxies for use ; (def proxies (load-file "proxies.txt")) ; ; Parallel test ; (doseq [num-urls [50 100 200]] (let [urls (repeat num-urls "http://www.example.com/")] (time (def responses (pa-fetch urls proxies timeout))) (prn (map :code responses)) ; should be (200 200 200 ... 200) ))
true
(require '[com.twinql.clojure.http :as http]) (require '[work.core :as work]) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; ; Exception-free wrapper for clojure.http component. ; (defn try-get "Wrapper for the com.twinql.clojure.http component. Returns either valid clojure.http response, or the empty response. Requires URL, proxy host and port, clojure.http connection manager, timeout in ms, and the User-Agent string." [url host port ccm timeout browser] (try (http/get url :as :string :connection-manager ccm :parameters (http/map->params {:default-proxy (http/http-host :host host :port port) :so-timeout timeout :connection-timeout timeout :user-agent browser})) (catch Exception e {:code 0 :reason "" :content "" :headers {}}))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; ; Proxy discovering stuff. Requires the list of proxies in ; data/proxy-list.txt. ; ; Accesses the echo server "http://xformstest.org/cgi-bin/echo.sh" ; via each of the listed proxies. ; ; Tries to find the proof of proxy being 1) valid, 2) anonymous - ; a string like "REMOTE_ADDR=PI:IP_ADDRESS:172.16.58.3END_PI" in the response. ; ; Produces the file "proxies.txt" which contains valid anonymous ; proxies for later work. ; ; Warning: (discover) is a lengthy operation; 1 minute or so. ; It complains a lot on the servers; that's all right. ; (defn- split-proxy [x] (let [pair (re-seq #"[^:]+" x)] [ (first pair) (Integer/valueOf (second pair)) ])) (defn- check-proxy "Checks the proxy to be able to access xformstest.org diring the specified timeout and to provide its IP to the server, instead of local IP." [host port ccm timeout] (let [result (try-get "http://xformstest.org/cgi-bin/echo.sh" host port ccm timeout "Mozilla/4.0 (compatible)") retval (and (= (:code result) 200) (re-find (re-pattern (str "REMOTE_ADDR=" host)) (:content result)))] (if retval (println (str [host port] " OK.")) (println (str [host port]))) retval)) (defn discover "Examines proxies from data/proxy-list.txt who manage to proove that they are valid and anonymous during the specified timeout, and writes the valid proxies to proxies.txt" [timeout] (let [proxy-list (re-seq #"\S+" (slurp "data/proxy-list.txt")) candidates (vec (map split-proxy proxy-list))] (http/with-connection-manager [ccm :thread-safe] (defn- valid-proxy? [[host port]] ; something curryish. Lambdas are harder to read. (check-proxy host port ccm timeout)) (let [valid-proxies (vec (work/filter-work valid-proxy? candidates 200))] (println (count valid-proxies) " valid proxies found.") (println "Writing file 'proxies.txt'.") (spit "proxies.txt" valid-proxies) (println "Done."))))) ; (discover 1000) ; (discover 500) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; ; Stuff to select random User-Agent strings according to distribution ; of browsers in the wild. ; (defn weighted-iter "Takes an array of weights (that is, frequencies), and returns a fn which returns a random integer which lies in (range (count weights)) and is distributed according to the weights." [weights] (let [part-sums (vec (reductions + (map float weights))) total-sum (last part-sums) total-num (count weights) index-range (range total-num)] (fn [] (let [scaled-rand (* (rand) total-sum) index (first (filter #(<= scaled-rand (get part-sums %)) index-range))] (if (nil? index) (dec total-num) index))))) ; (let [f (weighted-iter [1 2 3])] ; (dotimes [_ 1000] (prn (f)))) (let [agent-pairs (partition 2 (load-file "data/agent-pairs.txt")) user-agents (vec (map second agent-pairs)) agent-iter (weighted-iter (map first agent-pairs))] (defn weighted-random-user-agent [] (user-agents (agent-iter)))) ; (dotimes [_ 20] ; (prn (weighted-random-user-agent)) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; ; Insistent anonymous fetching function. Tries to push the request ; through each of the (unreliable) proxies. Randomly orders the proxies ; for better overall throughput. Returns a valid clojure.http response ; on success, nil on failure (that is, when neither of the proxies ; managed to service the request during the specified timeout). ; (defn insist-get [url ccm proxies timeout] (let [shuffled-proxies (shuffle proxies) num-proxies (count proxies)] (loop [pos 0] (if (>= pos num-proxies) nil (let [the-proxy (shuffled-proxies pos) [host port] the-proxy user-agent (weighted-random-user-agent) result (try-get url host port ccm timeout user-agent)] (if (= (:code result) 200) result (recur (inc pos)))))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; ; parallel fetcher itself ; (defn pa-fetch [urls proxies timeout] (http/with-connection-manager [ccm :thread-safe] (defn- getter [url] ; something curryish. Lambdas are harder to read. (insist-get url ccm proxies timeout)) (work/map-work getter urls 200))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; ;; The top-level stuff. ;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; ; Discovering proxies ; (def timeout 1000) ; proxies are far away and busy. (time (discover timeout)) ; ; Loading alive proxies for use ; (def proxies (load-file "proxies.txt")) ; ; Parallel test ; (doseq [num-urls [50 100 200]] (let [urls (repeat num-urls "http://www.example.com/")] (time (def responses (pa-fetch urls proxies timeout))) (prn (map :code responses)) ; should be (200 200 200 ... 200) ))
[ { "context": "t/result value))\n\n(def a-user (users/init {:auth \"Foobar\"}))\n\n(facts \"Callback URL should be calculated", "end": 900, "score": 0.8162456154823303, "start": 897, "tag": "USERNAME", "value": "Foo" }, { "context": "esult value))\n\n(def a-user (users/init {:auth \"Foobar\"}))\n\n(facts \"Callback URL should be calculated fr", "end": 903, "score": 0.5165241956710815, "start": 900, "tag": "PASSWORD", "value": "bar" }, { "context": "thorisation\"\n (twitter-auth->user {:screen_name \"Foo\"}) => (result-value? {:username \"Foo\" :auth {:twi", "end": 1707, "score": 0.42094889283180237, "start": 1704, "tag": "USERNAME", "value": "Foo" }, { "context": "screen_name \"Foo\"}) => (result-value? {:username \"Foo\" :auth {:twitter {:screen_name \"Foo\"}}}))\n\n(facts", "end": 1744, "score": 0.9797108769416809, "start": 1741, "tag": "USERNAME", "value": "Foo" } ]
test/londonstartup/controllers/auth_test.clj
emeka/londonstartup
1
(ns londonstartup.controllers.auth-test (:require [londonstartup.controllers.auth :as auth] [londonstartup.environment :as env] [londonstartup.models.users :as users] [londonstartup.common.result :as result] [londonstartup.services.twitter :as twitter] [londonstartup.services.session :as session] [londonstartup.views.auth :as views] [ring.util.response :as resp]) (:use midje.sweet) (:use [midje.util :only [expose-testables]])) (expose-testables londonstartup.controllers.auth) (defn valid-result? [fact] (result/error-free? fact)) (defn error-result? [fact] (result/has-error? fact)) (defn result-value? [value] (fn [fact] (= (result/value fact) value))) (defn error [value] (result/error value :field "Msg")) (defn value [value] (result/result value)) (def a-user (users/init {:auth "Foobar"})) (facts "Callback URL should be calculated from the environment." (against-background (env/get "BASE_URL" anything) => "http://baseurl.com", (env/get "URL") => anything) (fact "If BASE_URL or URL are given, it should return a result" (result/value (callback "foo")) => "http://baseurl.com/login?auto=true&redirect_to=foo") (fact "The redirect_to url is empty, it should redirect to '/'" (result/value (callback nil)) => "http://baseurl.com/login?auto=true&redirect_to=/") (fact "If BASE_URL and URL are empty, it should throw an exception" (callback "/") => (fn [actual] (result/has-error? actual)) (provided (env/get "BASE_URL" anything) => nil))) (facts "We should be able to initialise a user from a Twitter authorisation" (twitter-auth->user {:screen_name "Foo"}) => (result-value? {:username "Foo" :auth {:twitter {:screen_name "Foo"}}})) (facts "We should be able to find a user in the database from a session user" (fact "We should extract the Twitter user_id and search for it" (find-user {:auth {:twitter {:user_id "1" :foo "Foo"}}}) => ...User... (provided (users/user {"auth.twitter.user_id" "1"}) => ...User... :times 1)) ) (facts "We should update the user in the database if the twitter authorisation changed" (fact "We should do nothing if the Twitter authorisations are the same" (update-user! {:auth {:twitter "Foo"}} {:auth {:twitter "Foo"}}) => valid-result? (provided (users/update! anything) => anything :times 0)) (fact "We should update if the Twitter authorisations are different" (update-user! {:auth {:twitter {:user_id "1" :foo "Foo"}}} {:auth {:twitter {:user_id "1" :foo "Bar"}}}) => valid-result? (provided (users/update! anything) => (value ...Result...) :times 1)) (fact "We should return an error if the user_id values are different" (update-user! {:auth {:twitter {:user_id "1"}}} {:auth {:twitter {:user_id "2"}}}) => error-result? (provided (users/update! anything) => anything :times 0))) (facts "We should be able to get a request token for a callback url" (fact "We shoud return a valid result if we can calculate the callback and twitter request worked" (get-request-token "/") => valid-result? (provided (#'londonstartup.controllers.auth/callback "/") => (result/result "callback"), (twitter/request-token "callback") => (result/result "token"))) (fact "We should return an error if we cannot calculate the callback" (get-request-token "/") => error-result? (provided (#'londonstartup.controllers.auth/callback "/") => (result/error "callback" :callback "Msg"))) (fact "We should return an error if Twitter returns an error" (get-request-token "/") => error-result? (provided (#'londonstartup.controllers.auth/callback "/") => (result/result "callback"), (twitter/request-token "callback") => (result/error "token" :token "Msg")))) (facts "Twitter request-token must be confirmed" (fact "We should return a valid result if oath_callback_confirmed is \"true\"" (confirmed? {:oauth_callback_confirmed "true"}) => valid-result?) (fact "We should return an error result if oath_callback_confirmed is not \"true\"" (confirmed? {:oauth_callback_confirmed "foo"}) => error-result?) (fact "We should return an error result if no oauth_callback_confirmed is given" (confirmed? {}) => error-result?)) (facts "Adding a value in the session should just return the value as a valid result" (session-put! ..Value.. ..Key..) => (result/result ..Value..) (provided (session/put! ..Key.. ..Value..) => anything :times 1)) (facts "Getting a value from the session should just return the value as a valid result" (session-get ..Key..) => (result/result ..Value..) (provided (session/get ..Key..) => ..Value.. :times 1)) (facts "We should be able to redirect the user to the Twitter authorisation page" (fact "If we can get the approval URL we should redirect to it" (redirect-to-twitter-auth-page ..URL..) => ..Redirect.. (provided (#'londonstartup.controllers.auth/get-approval-url ..URL..) => (result/result ..Approval-URL..), (resp/redirect ..Approval-URL..) => ..Redirect.. :times 1)) (fact "If we cannot get the approval URL we should not redirect" (redirect-to-twitter-auth-page ..URL..) => anything (provided (#'londonstartup.controllers.auth/get-approval-url ..URL..) => (result/error ..Approval-URL.. :foo "Msg"), (resp/redirect ..Approval-URL..) => ..Redirect.. :times 0))) (facts "We should be able to log existing users" (fact "If we can update the user in the database, we should save them in the session and redirect to the given url" (log-existing-user ..db-user.. ..session-user.. ..redirect-url..) => ..Redirect.. (provided (#'londonstartup.controllers.auth/update-user! ..db-user.. ..session-user..) => (result/result ..updated-user..), (session/put! :user ..updated-user..) => anything :times 1, (resp/redirect ..redirect-url..) => ..Redirect.. :times 1)) (fact "If we cannot update the user in the database, return an error" (log-existing-user ..db-user.. ..session-user.. ..redirect-url..) => #(string? %) (provided (#'londonstartup.controllers.auth/update-user! ..db-user.. ..session-user..) => (result/error ..updated-user.. ..field.. ..Msg..), (session/put! :user ..updated-user..) => anything :times 0, (resp/redirect ..redirect-url..) => ..Redirect.. :times 0))) (facts "We should be able to log new users" (fact "If we can add the user in the database, we should save them in the session and redirect to the given url" (log-new-user ..session-user.. ..redirect-url..) => ..Redirect.. (provided (users/add! ..session-user..) => (result/result ..added-user..), (session/put! :user ..added-user..) => anything :times 1, (resp/redirect "/signup?redirect_to=..redirect-url..") => ..Redirect.. :times 1)) (fact "If we cannot update the user in the database, return an error" (log-new-user ..session-user.. ..redirect-url..) => #(string? %) (provided (users/add! ..session-user..) => (result/error ..added-user.. ..field.. ..Msg..), (session/put! :user ..added-user..) => anything :times 0, (resp/redirect "/signup?redirect_to=..redirect-url..") => ..Redirect.. :times 0))) (facts "We should be able to validate the oauth-token" (let [request-token {:oauth_token "foo"}] (fact "If the received oath-token is the same as the one in the request-token, we return a valid result" (validate-oauth-token request-token "foo") => valid-result?) (fact "If the received oath-token is different from the one in the request-token, we return an error" (validate-oauth-token request-token "bar") => error-result?) )) (facts "We should be able to authorize a user from the oauth-toke and the oauth-verifier" (fact "If we cannot create a session user, return an error" (authorise-user ..oauth-token.. ..oauth-verifier.. ..redirect-url..) => #(string? %) (provided (#'londonstartup.controllers.auth/create-session-user ..oauth-token.. ..oauth-verifier..) => (error ..result..) (#'londonstartup.controllers.auth/log-existing-user ..db-user.. ..session-user.. ..redirect-url..) => anything :times 0 (#'londonstartup.controllers.auth/log-new-user ..session-user.. ..redirect-url..) => anything :times 0)) (fact "If the users exist in the database, then we update the database user with the twitter authentication and log them" (authorise-user ..oauth-token.. ..oauth-verifier.. ..redirect-url..) => anything (provided (#'londonstartup.controllers.auth/create-session-user ..oauth-token.. ..oauth-verifier..) => (value ..session-user..) (#'londonstartup.controllers.auth/find-user ..session-user..) => (value ..db-user..) (#'londonstartup.controllers.auth/log-existing-user ..db-user.. ..session-user.. ..redirect-url..) => anything :times 1 (#'londonstartup.controllers.auth/log-new-user ..session-user.. ..redirect-url..) => anything :times 0)) (fact "If the users do not exist in the database, then we add the user in the database user and log them" (authorise-user ..oauth-token.. ..oauth-verifier.. ..redirect-url..) => anything (provided (#'londonstartup.controllers.auth/create-session-user ..oauth-token.. ..oauth-verifier..) => (value ..session-user..) (#'londonstartup.controllers.auth/find-user ..session-user..) => (error ..db-user..) (#'londonstartup.controllers.auth/log-existing-user ..db-user.. ..session-user.. ..redirect-url..) => anything :times 0 (#'londonstartup.controllers.auth/log-new-user ..session-user.. ..redirect-url..) => anything :times 1)) ) (facts "We should be able to process login requests" (fact "If a user is already logged, the we directly redirect to the given url" (auth/login "/redirect-url" ..oauth-token.. ..oauth-verifier.. nil ..auto..) => anything (provided (session/get :user ) => a-user (resp/redirect "/redirect-url") => anything)) (fact "If the login is denied, we clean the session and we display the login page" (auth/login "/redirect-url" ..oauth-token.. ..oauth-verifier.. "true" ..auto..) => ..login-page.. (provided (session/get :user ) => nil (session/clear!) => anything (session/flash! "ACCESS DENIED") => anything (views/login-page "/redirect-url") => ..login-page..)) (fact "If auto is nil, we display the login page" (auth/login "/redirect-url" ..oauth-token.. ..oauth-verifier.. nil nil) => ..login-page.. (provided (session/get :user ) => nil (session/clear!) => anything :times 0 (views/login-page "/redirect-url") => ..login-page..)) (fact "If auto is false, we display the login page" (auth/login "/redirect-url" ..oauth-token.. ..oauth-verifier.. nil "False") => ..login-page.. (provided (session/get :user ) => nil (session/clear!) => anything :times 0 (views/login-page "/redirect-url") => ..login-page..)) (fact "If the login is automatic but we don't have the Twitter authorisation tokens, we redirect to Twitter" (auth/login "/redirect-url" ..oauth-token.. nil nil "True") => ..redirect-to-twitter.. (provided (session/get :user ) => nil (#'londonstartup.controllers.auth/redirect-to-twitter-auth-page "/redirect-url") => ..redirect-to-twitter..)) (fact "If we get the Twitter tokens, we authorize user" (auth/login "/redirect-url" ..oauth-token.. ..oauth-verifier.. nil "True") => ..authorizse-user.. (provided (session/get :user ) => nil (#'londonstartup.controllers.auth/authorise-user ..oauth-token.. ..oauth-verifier.. "/redirect-url") => ..authorizse-user..)) ) (facts "We should be able to process logout requests" (fact "We clear the session and redirect to the home page" (auth/logout) => ..redirect-to-home-page.. (provided (session/clear!) => anything (resp/redirect "/") => ..redirect-to-home-page..)))
117743
(ns londonstartup.controllers.auth-test (:require [londonstartup.controllers.auth :as auth] [londonstartup.environment :as env] [londonstartup.models.users :as users] [londonstartup.common.result :as result] [londonstartup.services.twitter :as twitter] [londonstartup.services.session :as session] [londonstartup.views.auth :as views] [ring.util.response :as resp]) (:use midje.sweet) (:use [midje.util :only [expose-testables]])) (expose-testables londonstartup.controllers.auth) (defn valid-result? [fact] (result/error-free? fact)) (defn error-result? [fact] (result/has-error? fact)) (defn result-value? [value] (fn [fact] (= (result/value fact) value))) (defn error [value] (result/error value :field "Msg")) (defn value [value] (result/result value)) (def a-user (users/init {:auth "Foo<PASSWORD>"})) (facts "Callback URL should be calculated from the environment." (against-background (env/get "BASE_URL" anything) => "http://baseurl.com", (env/get "URL") => anything) (fact "If BASE_URL or URL are given, it should return a result" (result/value (callback "foo")) => "http://baseurl.com/login?auto=true&redirect_to=foo") (fact "The redirect_to url is empty, it should redirect to '/'" (result/value (callback nil)) => "http://baseurl.com/login?auto=true&redirect_to=/") (fact "If BASE_URL and URL are empty, it should throw an exception" (callback "/") => (fn [actual] (result/has-error? actual)) (provided (env/get "BASE_URL" anything) => nil))) (facts "We should be able to initialise a user from a Twitter authorisation" (twitter-auth->user {:screen_name "Foo"}) => (result-value? {:username "Foo" :auth {:twitter {:screen_name "Foo"}}})) (facts "We should be able to find a user in the database from a session user" (fact "We should extract the Twitter user_id and search for it" (find-user {:auth {:twitter {:user_id "1" :foo "Foo"}}}) => ...User... (provided (users/user {"auth.twitter.user_id" "1"}) => ...User... :times 1)) ) (facts "We should update the user in the database if the twitter authorisation changed" (fact "We should do nothing if the Twitter authorisations are the same" (update-user! {:auth {:twitter "Foo"}} {:auth {:twitter "Foo"}}) => valid-result? (provided (users/update! anything) => anything :times 0)) (fact "We should update if the Twitter authorisations are different" (update-user! {:auth {:twitter {:user_id "1" :foo "Foo"}}} {:auth {:twitter {:user_id "1" :foo "Bar"}}}) => valid-result? (provided (users/update! anything) => (value ...Result...) :times 1)) (fact "We should return an error if the user_id values are different" (update-user! {:auth {:twitter {:user_id "1"}}} {:auth {:twitter {:user_id "2"}}}) => error-result? (provided (users/update! anything) => anything :times 0))) (facts "We should be able to get a request token for a callback url" (fact "We shoud return a valid result if we can calculate the callback and twitter request worked" (get-request-token "/") => valid-result? (provided (#'londonstartup.controllers.auth/callback "/") => (result/result "callback"), (twitter/request-token "callback") => (result/result "token"))) (fact "We should return an error if we cannot calculate the callback" (get-request-token "/") => error-result? (provided (#'londonstartup.controllers.auth/callback "/") => (result/error "callback" :callback "Msg"))) (fact "We should return an error if Twitter returns an error" (get-request-token "/") => error-result? (provided (#'londonstartup.controllers.auth/callback "/") => (result/result "callback"), (twitter/request-token "callback") => (result/error "token" :token "Msg")))) (facts "Twitter request-token must be confirmed" (fact "We should return a valid result if oath_callback_confirmed is \"true\"" (confirmed? {:oauth_callback_confirmed "true"}) => valid-result?) (fact "We should return an error result if oath_callback_confirmed is not \"true\"" (confirmed? {:oauth_callback_confirmed "foo"}) => error-result?) (fact "We should return an error result if no oauth_callback_confirmed is given" (confirmed? {}) => error-result?)) (facts "Adding a value in the session should just return the value as a valid result" (session-put! ..Value.. ..Key..) => (result/result ..Value..) (provided (session/put! ..Key.. ..Value..) => anything :times 1)) (facts "Getting a value from the session should just return the value as a valid result" (session-get ..Key..) => (result/result ..Value..) (provided (session/get ..Key..) => ..Value.. :times 1)) (facts "We should be able to redirect the user to the Twitter authorisation page" (fact "If we can get the approval URL we should redirect to it" (redirect-to-twitter-auth-page ..URL..) => ..Redirect.. (provided (#'londonstartup.controllers.auth/get-approval-url ..URL..) => (result/result ..Approval-URL..), (resp/redirect ..Approval-URL..) => ..Redirect.. :times 1)) (fact "If we cannot get the approval URL we should not redirect" (redirect-to-twitter-auth-page ..URL..) => anything (provided (#'londonstartup.controllers.auth/get-approval-url ..URL..) => (result/error ..Approval-URL.. :foo "Msg"), (resp/redirect ..Approval-URL..) => ..Redirect.. :times 0))) (facts "We should be able to log existing users" (fact "If we can update the user in the database, we should save them in the session and redirect to the given url" (log-existing-user ..db-user.. ..session-user.. ..redirect-url..) => ..Redirect.. (provided (#'londonstartup.controllers.auth/update-user! ..db-user.. ..session-user..) => (result/result ..updated-user..), (session/put! :user ..updated-user..) => anything :times 1, (resp/redirect ..redirect-url..) => ..Redirect.. :times 1)) (fact "If we cannot update the user in the database, return an error" (log-existing-user ..db-user.. ..session-user.. ..redirect-url..) => #(string? %) (provided (#'londonstartup.controllers.auth/update-user! ..db-user.. ..session-user..) => (result/error ..updated-user.. ..field.. ..Msg..), (session/put! :user ..updated-user..) => anything :times 0, (resp/redirect ..redirect-url..) => ..Redirect.. :times 0))) (facts "We should be able to log new users" (fact "If we can add the user in the database, we should save them in the session and redirect to the given url" (log-new-user ..session-user.. ..redirect-url..) => ..Redirect.. (provided (users/add! ..session-user..) => (result/result ..added-user..), (session/put! :user ..added-user..) => anything :times 1, (resp/redirect "/signup?redirect_to=..redirect-url..") => ..Redirect.. :times 1)) (fact "If we cannot update the user in the database, return an error" (log-new-user ..session-user.. ..redirect-url..) => #(string? %) (provided (users/add! ..session-user..) => (result/error ..added-user.. ..field.. ..Msg..), (session/put! :user ..added-user..) => anything :times 0, (resp/redirect "/signup?redirect_to=..redirect-url..") => ..Redirect.. :times 0))) (facts "We should be able to validate the oauth-token" (let [request-token {:oauth_token "foo"}] (fact "If the received oath-token is the same as the one in the request-token, we return a valid result" (validate-oauth-token request-token "foo") => valid-result?) (fact "If the received oath-token is different from the one in the request-token, we return an error" (validate-oauth-token request-token "bar") => error-result?) )) (facts "We should be able to authorize a user from the oauth-toke and the oauth-verifier" (fact "If we cannot create a session user, return an error" (authorise-user ..oauth-token.. ..oauth-verifier.. ..redirect-url..) => #(string? %) (provided (#'londonstartup.controllers.auth/create-session-user ..oauth-token.. ..oauth-verifier..) => (error ..result..) (#'londonstartup.controllers.auth/log-existing-user ..db-user.. ..session-user.. ..redirect-url..) => anything :times 0 (#'londonstartup.controllers.auth/log-new-user ..session-user.. ..redirect-url..) => anything :times 0)) (fact "If the users exist in the database, then we update the database user with the twitter authentication and log them" (authorise-user ..oauth-token.. ..oauth-verifier.. ..redirect-url..) => anything (provided (#'londonstartup.controllers.auth/create-session-user ..oauth-token.. ..oauth-verifier..) => (value ..session-user..) (#'londonstartup.controllers.auth/find-user ..session-user..) => (value ..db-user..) (#'londonstartup.controllers.auth/log-existing-user ..db-user.. ..session-user.. ..redirect-url..) => anything :times 1 (#'londonstartup.controllers.auth/log-new-user ..session-user.. ..redirect-url..) => anything :times 0)) (fact "If the users do not exist in the database, then we add the user in the database user and log them" (authorise-user ..oauth-token.. ..oauth-verifier.. ..redirect-url..) => anything (provided (#'londonstartup.controllers.auth/create-session-user ..oauth-token.. ..oauth-verifier..) => (value ..session-user..) (#'londonstartup.controllers.auth/find-user ..session-user..) => (error ..db-user..) (#'londonstartup.controllers.auth/log-existing-user ..db-user.. ..session-user.. ..redirect-url..) => anything :times 0 (#'londonstartup.controllers.auth/log-new-user ..session-user.. ..redirect-url..) => anything :times 1)) ) (facts "We should be able to process login requests" (fact "If a user is already logged, the we directly redirect to the given url" (auth/login "/redirect-url" ..oauth-token.. ..oauth-verifier.. nil ..auto..) => anything (provided (session/get :user ) => a-user (resp/redirect "/redirect-url") => anything)) (fact "If the login is denied, we clean the session and we display the login page" (auth/login "/redirect-url" ..oauth-token.. ..oauth-verifier.. "true" ..auto..) => ..login-page.. (provided (session/get :user ) => nil (session/clear!) => anything (session/flash! "ACCESS DENIED") => anything (views/login-page "/redirect-url") => ..login-page..)) (fact "If auto is nil, we display the login page" (auth/login "/redirect-url" ..oauth-token.. ..oauth-verifier.. nil nil) => ..login-page.. (provided (session/get :user ) => nil (session/clear!) => anything :times 0 (views/login-page "/redirect-url") => ..login-page..)) (fact "If auto is false, we display the login page" (auth/login "/redirect-url" ..oauth-token.. ..oauth-verifier.. nil "False") => ..login-page.. (provided (session/get :user ) => nil (session/clear!) => anything :times 0 (views/login-page "/redirect-url") => ..login-page..)) (fact "If the login is automatic but we don't have the Twitter authorisation tokens, we redirect to Twitter" (auth/login "/redirect-url" ..oauth-token.. nil nil "True") => ..redirect-to-twitter.. (provided (session/get :user ) => nil (#'londonstartup.controllers.auth/redirect-to-twitter-auth-page "/redirect-url") => ..redirect-to-twitter..)) (fact "If we get the Twitter tokens, we authorize user" (auth/login "/redirect-url" ..oauth-token.. ..oauth-verifier.. nil "True") => ..authorizse-user.. (provided (session/get :user ) => nil (#'londonstartup.controllers.auth/authorise-user ..oauth-token.. ..oauth-verifier.. "/redirect-url") => ..authorizse-user..)) ) (facts "We should be able to process logout requests" (fact "We clear the session and redirect to the home page" (auth/logout) => ..redirect-to-home-page.. (provided (session/clear!) => anything (resp/redirect "/") => ..redirect-to-home-page..)))
true
(ns londonstartup.controllers.auth-test (:require [londonstartup.controllers.auth :as auth] [londonstartup.environment :as env] [londonstartup.models.users :as users] [londonstartup.common.result :as result] [londonstartup.services.twitter :as twitter] [londonstartup.services.session :as session] [londonstartup.views.auth :as views] [ring.util.response :as resp]) (:use midje.sweet) (:use [midje.util :only [expose-testables]])) (expose-testables londonstartup.controllers.auth) (defn valid-result? [fact] (result/error-free? fact)) (defn error-result? [fact] (result/has-error? fact)) (defn result-value? [value] (fn [fact] (= (result/value fact) value))) (defn error [value] (result/error value :field "Msg")) (defn value [value] (result/result value)) (def a-user (users/init {:auth "FooPI:PASSWORD:<PASSWORD>END_PI"})) (facts "Callback URL should be calculated from the environment." (against-background (env/get "BASE_URL" anything) => "http://baseurl.com", (env/get "URL") => anything) (fact "If BASE_URL or URL are given, it should return a result" (result/value (callback "foo")) => "http://baseurl.com/login?auto=true&redirect_to=foo") (fact "The redirect_to url is empty, it should redirect to '/'" (result/value (callback nil)) => "http://baseurl.com/login?auto=true&redirect_to=/") (fact "If BASE_URL and URL are empty, it should throw an exception" (callback "/") => (fn [actual] (result/has-error? actual)) (provided (env/get "BASE_URL" anything) => nil))) (facts "We should be able to initialise a user from a Twitter authorisation" (twitter-auth->user {:screen_name "Foo"}) => (result-value? {:username "Foo" :auth {:twitter {:screen_name "Foo"}}})) (facts "We should be able to find a user in the database from a session user" (fact "We should extract the Twitter user_id and search for it" (find-user {:auth {:twitter {:user_id "1" :foo "Foo"}}}) => ...User... (provided (users/user {"auth.twitter.user_id" "1"}) => ...User... :times 1)) ) (facts "We should update the user in the database if the twitter authorisation changed" (fact "We should do nothing if the Twitter authorisations are the same" (update-user! {:auth {:twitter "Foo"}} {:auth {:twitter "Foo"}}) => valid-result? (provided (users/update! anything) => anything :times 0)) (fact "We should update if the Twitter authorisations are different" (update-user! {:auth {:twitter {:user_id "1" :foo "Foo"}}} {:auth {:twitter {:user_id "1" :foo "Bar"}}}) => valid-result? (provided (users/update! anything) => (value ...Result...) :times 1)) (fact "We should return an error if the user_id values are different" (update-user! {:auth {:twitter {:user_id "1"}}} {:auth {:twitter {:user_id "2"}}}) => error-result? (provided (users/update! anything) => anything :times 0))) (facts "We should be able to get a request token for a callback url" (fact "We shoud return a valid result if we can calculate the callback and twitter request worked" (get-request-token "/") => valid-result? (provided (#'londonstartup.controllers.auth/callback "/") => (result/result "callback"), (twitter/request-token "callback") => (result/result "token"))) (fact "We should return an error if we cannot calculate the callback" (get-request-token "/") => error-result? (provided (#'londonstartup.controllers.auth/callback "/") => (result/error "callback" :callback "Msg"))) (fact "We should return an error if Twitter returns an error" (get-request-token "/") => error-result? (provided (#'londonstartup.controllers.auth/callback "/") => (result/result "callback"), (twitter/request-token "callback") => (result/error "token" :token "Msg")))) (facts "Twitter request-token must be confirmed" (fact "We should return a valid result if oath_callback_confirmed is \"true\"" (confirmed? {:oauth_callback_confirmed "true"}) => valid-result?) (fact "We should return an error result if oath_callback_confirmed is not \"true\"" (confirmed? {:oauth_callback_confirmed "foo"}) => error-result?) (fact "We should return an error result if no oauth_callback_confirmed is given" (confirmed? {}) => error-result?)) (facts "Adding a value in the session should just return the value as a valid result" (session-put! ..Value.. ..Key..) => (result/result ..Value..) (provided (session/put! ..Key.. ..Value..) => anything :times 1)) (facts "Getting a value from the session should just return the value as a valid result" (session-get ..Key..) => (result/result ..Value..) (provided (session/get ..Key..) => ..Value.. :times 1)) (facts "We should be able to redirect the user to the Twitter authorisation page" (fact "If we can get the approval URL we should redirect to it" (redirect-to-twitter-auth-page ..URL..) => ..Redirect.. (provided (#'londonstartup.controllers.auth/get-approval-url ..URL..) => (result/result ..Approval-URL..), (resp/redirect ..Approval-URL..) => ..Redirect.. :times 1)) (fact "If we cannot get the approval URL we should not redirect" (redirect-to-twitter-auth-page ..URL..) => anything (provided (#'londonstartup.controllers.auth/get-approval-url ..URL..) => (result/error ..Approval-URL.. :foo "Msg"), (resp/redirect ..Approval-URL..) => ..Redirect.. :times 0))) (facts "We should be able to log existing users" (fact "If we can update the user in the database, we should save them in the session and redirect to the given url" (log-existing-user ..db-user.. ..session-user.. ..redirect-url..) => ..Redirect.. (provided (#'londonstartup.controllers.auth/update-user! ..db-user.. ..session-user..) => (result/result ..updated-user..), (session/put! :user ..updated-user..) => anything :times 1, (resp/redirect ..redirect-url..) => ..Redirect.. :times 1)) (fact "If we cannot update the user in the database, return an error" (log-existing-user ..db-user.. ..session-user.. ..redirect-url..) => #(string? %) (provided (#'londonstartup.controllers.auth/update-user! ..db-user.. ..session-user..) => (result/error ..updated-user.. ..field.. ..Msg..), (session/put! :user ..updated-user..) => anything :times 0, (resp/redirect ..redirect-url..) => ..Redirect.. :times 0))) (facts "We should be able to log new users" (fact "If we can add the user in the database, we should save them in the session and redirect to the given url" (log-new-user ..session-user.. ..redirect-url..) => ..Redirect.. (provided (users/add! ..session-user..) => (result/result ..added-user..), (session/put! :user ..added-user..) => anything :times 1, (resp/redirect "/signup?redirect_to=..redirect-url..") => ..Redirect.. :times 1)) (fact "If we cannot update the user in the database, return an error" (log-new-user ..session-user.. ..redirect-url..) => #(string? %) (provided (users/add! ..session-user..) => (result/error ..added-user.. ..field.. ..Msg..), (session/put! :user ..added-user..) => anything :times 0, (resp/redirect "/signup?redirect_to=..redirect-url..") => ..Redirect.. :times 0))) (facts "We should be able to validate the oauth-token" (let [request-token {:oauth_token "foo"}] (fact "If the received oath-token is the same as the one in the request-token, we return a valid result" (validate-oauth-token request-token "foo") => valid-result?) (fact "If the received oath-token is different from the one in the request-token, we return an error" (validate-oauth-token request-token "bar") => error-result?) )) (facts "We should be able to authorize a user from the oauth-toke and the oauth-verifier" (fact "If we cannot create a session user, return an error" (authorise-user ..oauth-token.. ..oauth-verifier.. ..redirect-url..) => #(string? %) (provided (#'londonstartup.controllers.auth/create-session-user ..oauth-token.. ..oauth-verifier..) => (error ..result..) (#'londonstartup.controllers.auth/log-existing-user ..db-user.. ..session-user.. ..redirect-url..) => anything :times 0 (#'londonstartup.controllers.auth/log-new-user ..session-user.. ..redirect-url..) => anything :times 0)) (fact "If the users exist in the database, then we update the database user with the twitter authentication and log them" (authorise-user ..oauth-token.. ..oauth-verifier.. ..redirect-url..) => anything (provided (#'londonstartup.controllers.auth/create-session-user ..oauth-token.. ..oauth-verifier..) => (value ..session-user..) (#'londonstartup.controllers.auth/find-user ..session-user..) => (value ..db-user..) (#'londonstartup.controllers.auth/log-existing-user ..db-user.. ..session-user.. ..redirect-url..) => anything :times 1 (#'londonstartup.controllers.auth/log-new-user ..session-user.. ..redirect-url..) => anything :times 0)) (fact "If the users do not exist in the database, then we add the user in the database user and log them" (authorise-user ..oauth-token.. ..oauth-verifier.. ..redirect-url..) => anything (provided (#'londonstartup.controllers.auth/create-session-user ..oauth-token.. ..oauth-verifier..) => (value ..session-user..) (#'londonstartup.controllers.auth/find-user ..session-user..) => (error ..db-user..) (#'londonstartup.controllers.auth/log-existing-user ..db-user.. ..session-user.. ..redirect-url..) => anything :times 0 (#'londonstartup.controllers.auth/log-new-user ..session-user.. ..redirect-url..) => anything :times 1)) ) (facts "We should be able to process login requests" (fact "If a user is already logged, the we directly redirect to the given url" (auth/login "/redirect-url" ..oauth-token.. ..oauth-verifier.. nil ..auto..) => anything (provided (session/get :user ) => a-user (resp/redirect "/redirect-url") => anything)) (fact "If the login is denied, we clean the session and we display the login page" (auth/login "/redirect-url" ..oauth-token.. ..oauth-verifier.. "true" ..auto..) => ..login-page.. (provided (session/get :user ) => nil (session/clear!) => anything (session/flash! "ACCESS DENIED") => anything (views/login-page "/redirect-url") => ..login-page..)) (fact "If auto is nil, we display the login page" (auth/login "/redirect-url" ..oauth-token.. ..oauth-verifier.. nil nil) => ..login-page.. (provided (session/get :user ) => nil (session/clear!) => anything :times 0 (views/login-page "/redirect-url") => ..login-page..)) (fact "If auto is false, we display the login page" (auth/login "/redirect-url" ..oauth-token.. ..oauth-verifier.. nil "False") => ..login-page.. (provided (session/get :user ) => nil (session/clear!) => anything :times 0 (views/login-page "/redirect-url") => ..login-page..)) (fact "If the login is automatic but we don't have the Twitter authorisation tokens, we redirect to Twitter" (auth/login "/redirect-url" ..oauth-token.. nil nil "True") => ..redirect-to-twitter.. (provided (session/get :user ) => nil (#'londonstartup.controllers.auth/redirect-to-twitter-auth-page "/redirect-url") => ..redirect-to-twitter..)) (fact "If we get the Twitter tokens, we authorize user" (auth/login "/redirect-url" ..oauth-token.. ..oauth-verifier.. nil "True") => ..authorizse-user.. (provided (session/get :user ) => nil (#'londonstartup.controllers.auth/authorise-user ..oauth-token.. ..oauth-verifier.. "/redirect-url") => ..authorizse-user..)) ) (facts "We should be able to process logout requests" (fact "We clear the session and redirect to the home page" (auth/logout) => ..redirect-to-home-page.. (provided (session/clear!) => anything (resp/redirect "/") => ..redirect-to-home-page..)))
[ { "context": "e format\"\n (trf \"A {a} B {name}\" :a 1 :name \"Sam\") => \"A 1 B Sam\"\n \"accepts a single map as a", "end": 4238, "score": 0.8861222267150879, "start": 4235, "tag": "NAME", "value": "Sam" }, { "context": " (trf \"A {a} B {name}\" :a 1 :name \"Sam\") => \"A 1 B Sam\"\n \"accepts a single map as arguments to the ", "end": 4254, "score": 0.5275896787643433, "start": 4251, "tag": "NAME", "value": "Sam" }, { "context": " format\"\n (trf \"A {a} B {name}\" {:a 1 :name \"Sam\"}) => \"A 1 B Sam\")\n (assertions \"formats numbe", "end": 4356, "score": 0.9138978123664856, "start": 4353, "tag": "NAME", "value": "Sam" }, { "context": "trf \"A {a} B {name}\" {:a 1 :name \"Sam\"}) => \"A 1 B Sam\")\n (assertions \"formats numbers - US\"\n (t", "end": 4373, "score": 0.7151486873626709, "start": 4370, "tag": "NAME", "value": "Sam" }, { "context": "o\"\n \"trf\"\n (trf \"Hi, {name}\" :name \"Tony\") => \"Hi, Tony\"\n \"trf map-based\"\n (", "end": 6889, "score": 0.9991739988327026, "start": 6885, "tag": "NAME", "value": "Tony" }, { "context": "f\"\n (trf \"Hi, {name}\" :name \"Tony\") => \"Hi, Tony\"\n \"trf map-based\"\n (trf \"Hi, {name}", "end": 6904, "score": 0.9969966411590576, "start": 6900, "tag": "NAME", "value": "Tony" }, { "context": "\"trf map-based\"\n (trf \"Hi, {name}\" {:name \"Tony\"}) => \"Hi, Tony\"))))\n\n#?(:clj\n (specification \"", "end": 6968, "score": 0.9994592666625977, "start": 6964, "tag": "NAME", "value": "Tony" }, { "context": "\n (trf \"Hi, {name}\" {:name \"Tony\"}) => \"Hi, Tony\"))))\n\n#?(:clj\n (specification \"Locale loading f", "end": 6984, "score": 0.9986546039581299, "start": 6980, "tag": "NAME", "value": "Tony" }, { "context": "il\n (dom/p nil (trf \"Hello, {name}\" {:name \"Sam\"}))\n (dom/p nil (trf \"It is {n,date}\" {:n (", "end": 8093, "score": 0.9759681224822998, "start": 8090, "tag": "NAME", "value": "Sam" } ]
src/test/com/fulcrologic/fulcro_i18n/i18n_spec.cljc
avisi-apps/fulcro-i18n
0
(ns com.fulcrologic.fulcro-i18n.i18n-spec (:require [clojure.string :as str] [fulcro-spec.core :refer [specification behavior provided assertions when-mocking]] [com.fulcrologic.fulcro-i18n.i18n :as i18n :refer [tr trf trc]] #?(:cljs ["intl-messageformat" :default IntlMessageFormat]) [com.fulcrologic.fulcro.components :as comp :refer [defsc]] [com.fulcrologic.fulcro.algorithms.server-render :as ssr] #?(:cljs [com.fulcrologic.fulcro.dom :as dom] :clj [com.fulcrologic.fulcro.dom-server :as dom]) [taoensso.timbre :as log]) #?(:clj (:import (com.ibm.icu.text MessageFormat) (java.util Locale)))) (def es-locale {::i18n/locale :es ::i18n/translations {["" "Hi"] "Ola" ["Abbreviation for Monday" "M"] "L" ["" "{n,plural,=0 {none} =1 {one} other {#}}"] "{n,plural,=0 {nada} =1 {uno} other {#}}"}}) (def locale-with-no-translations {::i18n/locale :en-US ::i18n/translations {}}) (def bad-locale {::i18n/locale :es ::i18n/translations {["" "Hi"] ""}}) (defn date [year month day hour min sec millis] #?(:clj (java.util.Date. (- year 1900) month day hour min sec) :cljs (js/Date. year month day hour min sec millis))) (defn deflt-format [{:keys [::i18n/localized-format-string ::i18n/locale ::i18n/format-options]}] #?(:cljs (let [locale-str (name locale) formatter (IntlMessageFormat. localized-format-string locale-str)] (.format formatter (clj->js format-options))) :clj (let [locale-str (name locale)] (try (let [formatter (new MessageFormat localized-format-string (Locale/forLanguageTag locale-str))] (.format formatter format-options)) (catch Exception e (log/error "Formatting failed!" e) "???"))))) (specification "Base translation -- tr" (assertions "returns the message key if there is no translation" (i18n/with-locale deflt-format locale-with-no-translations (tr "Hello")) => "Hello" "returns message key if translation is an empty string" (i18n/with-locale deflt-format bad-locale (tr "Hi")) => "Hi" "returns message key if no entry is found in the translations" (i18n/with-locale deflt-format es-locale (tr "Hello")) => "Hello" "Returns an error-marker string if anything but a literal string is used" (str/starts-with? (tr 4) "ERROR: tr requires a literal string") => true (str/starts-with? (tr map) "ERROR: tr requires a literal string") => true (str/starts-with? (tr :keyword) "ERROR: tr requires a literal string") => true " (error markers include namespace and line number)" (tr 4) =fn=> (fn [s] (re-matches #".*on line [1-9][0-9]* in com.fulcro.*" s)))) (specification "Message translations with context" (assertions "Requires literal strings for both arguments" (str/includes? (trc 1 4) "literal string") => true (str/includes? (trc 1 "msg") "literal string") => true (str/includes? (trc "c" 4) "literal string") => true " (error message includes line and namespace)" (trc 1 4) =fn=> (fn [s] (re-matches #".*on line [1-9][0-9]* in com.fulcro.*" s)) "Returns the message parameter if there is no translation" (trc "c" "m") => "m") (assertions "Formats in en-US locale" (trc "Abbreviation for Monday" "M") => "M") (assertions "Formats in an es locale" (i18n/with-locale deflt-format es-locale (trc "Abbreviation for Monday" "M")) => "L")) (specification "Message format translation -- trf" (i18n/with-locale deflt-format locale-with-no-translations (assertions "returns the string it is passed if there is no translation" (trf "Hello") => "Hello") (let [s "str"] (assertions "Requires that the format be a literal string" (str/includes? (trf s) "literal string") => true " (error includes line and namespace)" (trf s) =fn=> (fn [s] (re-matches #".*on line [1-9][0-9]* in com.fulcro.*" s)))) (assertions "accepts a sequence of k/v pairs as arguments to the format" (trf "A {a} B {name}" :a 1 :name "Sam") => "A 1 B Sam" "accepts a single map as arguments to the format" (trf "A {a} B {name}" {:a 1 :name "Sam"}) => "A 1 B Sam") (assertions "formats numbers - US" (trf "{a, number}" :a 18349) => "18,349") (assertions "formats dates - US" (trf "{a, date, long}" :a (date 1990 3 1 13 45 22 0)) => "April 1, 1990" (trf "{a, date, medium}" :a (date 1990 3 1 13 45 22 0)) => "Apr 1, 1990" (trf "{a, date, short}" :a (date 1990 3 1 13 45 22 0)) => "4/1/90") (behavior "formats plurals - US" (assertions (trf "{n, plural, =0 {no apples} =1 {1 apple} other {# apples}}" :n 0) => "no apples" (trf "{n, plural, =0 {no apples} =1 {1 apple} other {# apples}}" :n 1) => "1 apple" (trf "{n, plural, =0 {no apples} =1 {1 apple} other {# apples}}" :n 2) => "2 apples" (trf "{n, plural, =0 {no apples} =1 {1 apple} other {# apples}}" :n 146) => "146 apples")) (assertions "formats numbers - Germany" (i18n/with-locale deflt-format {::i18n/locale :de} (trf "{a, number}" :a 18349)) => "18.349") (i18n/with-locale deflt-format (assoc es-locale ::i18n/locale :es-MX) (behavior "NOTE: JVM and various browsers all do this a little differently due to nuances of the various formatting implementations!" #?(:cljs (assertions "formats dates - Mexico (client-side)" (trf "{a, date, long}" :a (date 1990 3 1 13 45 22 0)) => "1 de abril de 1990" (trf "{a, date, medium}" :a (date 1990 3 1 13 45 22 0)) =fn=> (fn [s] (re-matches #"^1 .*abr.*" s)) (trf "{a, date, short}" :a (date 1990 3 1 13 45 22 0)) => "1/4/90") :clj (assertions "formats dates - Mexico (server-side)" (trf "{a, date, long}" :a (date 1990 3 1 13 45 22 0)) => "1 de abril de 1990" (trf "{a, date, medium}" :a (date 1990 3 1 13 45 22 0)) =fn=> (fn [s] (re-matches #"1 abr 1990" s)) (trf "{a, date, short}" :a (date 1990 3 1 13 45 22 0)) => "01/04/90"))) (behavior "formats plurals - Spanish" (assertions (trf "{n,plural,=0 {none} =1 {one} other {#}}" :n 1) => "uno" (trf "{n,plural,=0 {none} =1 {one} other {#}}" :n 2) => "2" (trf "{n,plural,=0 {none} =1 {one} other {#}}" :n 146) => "146"))))) (specification "An undefined locale" (i18n/with-locale deflt-format nil (behavior "uses the in-code translation" (assertions "tr" (tr "Hello") => "Hello" "trc" (trc "context" "Hello") => "Hello" "trf" (trf "Hi, {name}" :name "Tony") => "Hi, Tony" "trf map-based" (trf "Hi, {name}" {:name "Tony"}) => "Hi, Tony")))) #?(:clj (specification "Locale loading from PO files." (when-mocking (log/-log! _ lvl _ _ _ _ _ _ _ _) => (assertions "Logs an error when no locale is found" lvl => :error) (let [xlation (i18n/load-locale "fulcro" :es) missing-xlation (i18n/load-locale "boo" :xx)] (assertions "Returns nil if no locale is found" missing-xlation => nil "Loaded translation exists" xlation => {::i18n/locale :es ::i18n/translations {["" "It is {n,date}"] "Es {n, date}" ["" "Hello, {name}"] "Hola {name}" ["Gender abbreviation" "M"] "M" ["" "Hello"] "Hola"}}))))) #?(:clj (defsc Child [this props] {:query [:ui/checked?] :initial-state {:ui/checked? false}} (dom/div nil (dom/p nil (trf "Hello, {name}" {:name "Sam"})) (dom/p nil (trf "It is {n,date}" {:n (java.util.Date.)})) (dom/p nil (trc "Gender abbreviation" "M")) (tr "Hello")))) #?(:clj (def ui-child (comp/factory Child))) #?(:clj (defsc Root [this {:keys [child locale-selector]}] {:query [{:locale-selector (comp/get-query i18n/LocaleSelector)} {::i18n/current-locale (comp/get-query i18n/Locale)} {:child (comp/get-query Child)}] :initial-state {:child {} ::i18n/current-locale {:locale :en :name "English" :translations {}} :locale-selector {:locales [{:locale :en :name "English"} {:locale :es :name "Espanol"} {:locale :de :name "Deutsch"}]}}} (dom/div nil (i18n/ui-locale-selector locale-selector) (ui-child child)))) #?(:clj (defn message-formatter [{:keys [::i18n/localized-format-string ::i18n/locale ::i18n/format-options]}] localized-format-string)) #?(:clj (specification "Locale override in SSR" (let [initial-tree (comp/get-initial-state Root {}) es-locale (i18n/load-locale "fulcro" :es) tree-with-locale (assoc initial-tree ::i18n/current-locale es-locale) ui-root (comp/factory Root) output-html (i18n/with-locale message-formatter es-locale (dom/render-to-str (ui-root tree-with-locale))) bad-locale-html (i18n/with-locale message-formatter nil (dom/render-to-str (ui-root initial-tree)))] (assertions "Renders properly even if the locale isn't set correctly" (str/includes? bad-locale-html "It is {n,date}") => true "Renders properly with the overridden locale" (str/includes? output-html "Es {n, date}") => true))))
8172
(ns com.fulcrologic.fulcro-i18n.i18n-spec (:require [clojure.string :as str] [fulcro-spec.core :refer [specification behavior provided assertions when-mocking]] [com.fulcrologic.fulcro-i18n.i18n :as i18n :refer [tr trf trc]] #?(:cljs ["intl-messageformat" :default IntlMessageFormat]) [com.fulcrologic.fulcro.components :as comp :refer [defsc]] [com.fulcrologic.fulcro.algorithms.server-render :as ssr] #?(:cljs [com.fulcrologic.fulcro.dom :as dom] :clj [com.fulcrologic.fulcro.dom-server :as dom]) [taoensso.timbre :as log]) #?(:clj (:import (com.ibm.icu.text MessageFormat) (java.util Locale)))) (def es-locale {::i18n/locale :es ::i18n/translations {["" "Hi"] "Ola" ["Abbreviation for Monday" "M"] "L" ["" "{n,plural,=0 {none} =1 {one} other {#}}"] "{n,plural,=0 {nada} =1 {uno} other {#}}"}}) (def locale-with-no-translations {::i18n/locale :en-US ::i18n/translations {}}) (def bad-locale {::i18n/locale :es ::i18n/translations {["" "Hi"] ""}}) (defn date [year month day hour min sec millis] #?(:clj (java.util.Date. (- year 1900) month day hour min sec) :cljs (js/Date. year month day hour min sec millis))) (defn deflt-format [{:keys [::i18n/localized-format-string ::i18n/locale ::i18n/format-options]}] #?(:cljs (let [locale-str (name locale) formatter (IntlMessageFormat. localized-format-string locale-str)] (.format formatter (clj->js format-options))) :clj (let [locale-str (name locale)] (try (let [formatter (new MessageFormat localized-format-string (Locale/forLanguageTag locale-str))] (.format formatter format-options)) (catch Exception e (log/error "Formatting failed!" e) "???"))))) (specification "Base translation -- tr" (assertions "returns the message key if there is no translation" (i18n/with-locale deflt-format locale-with-no-translations (tr "Hello")) => "Hello" "returns message key if translation is an empty string" (i18n/with-locale deflt-format bad-locale (tr "Hi")) => "Hi" "returns message key if no entry is found in the translations" (i18n/with-locale deflt-format es-locale (tr "Hello")) => "Hello" "Returns an error-marker string if anything but a literal string is used" (str/starts-with? (tr 4) "ERROR: tr requires a literal string") => true (str/starts-with? (tr map) "ERROR: tr requires a literal string") => true (str/starts-with? (tr :keyword) "ERROR: tr requires a literal string") => true " (error markers include namespace and line number)" (tr 4) =fn=> (fn [s] (re-matches #".*on line [1-9][0-9]* in com.fulcro.*" s)))) (specification "Message translations with context" (assertions "Requires literal strings for both arguments" (str/includes? (trc 1 4) "literal string") => true (str/includes? (trc 1 "msg") "literal string") => true (str/includes? (trc "c" 4) "literal string") => true " (error message includes line and namespace)" (trc 1 4) =fn=> (fn [s] (re-matches #".*on line [1-9][0-9]* in com.fulcro.*" s)) "Returns the message parameter if there is no translation" (trc "c" "m") => "m") (assertions "Formats in en-US locale" (trc "Abbreviation for Monday" "M") => "M") (assertions "Formats in an es locale" (i18n/with-locale deflt-format es-locale (trc "Abbreviation for Monday" "M")) => "L")) (specification "Message format translation -- trf" (i18n/with-locale deflt-format locale-with-no-translations (assertions "returns the string it is passed if there is no translation" (trf "Hello") => "Hello") (let [s "str"] (assertions "Requires that the format be a literal string" (str/includes? (trf s) "literal string") => true " (error includes line and namespace)" (trf s) =fn=> (fn [s] (re-matches #".*on line [1-9][0-9]* in com.fulcro.*" s)))) (assertions "accepts a sequence of k/v pairs as arguments to the format" (trf "A {a} B {name}" :a 1 :name "<NAME>") => "A 1 B <NAME>" "accepts a single map as arguments to the format" (trf "A {a} B {name}" {:a 1 :name "<NAME>"}) => "A 1 B <NAME>") (assertions "formats numbers - US" (trf "{a, number}" :a 18349) => "18,349") (assertions "formats dates - US" (trf "{a, date, long}" :a (date 1990 3 1 13 45 22 0)) => "April 1, 1990" (trf "{a, date, medium}" :a (date 1990 3 1 13 45 22 0)) => "Apr 1, 1990" (trf "{a, date, short}" :a (date 1990 3 1 13 45 22 0)) => "4/1/90") (behavior "formats plurals - US" (assertions (trf "{n, plural, =0 {no apples} =1 {1 apple} other {# apples}}" :n 0) => "no apples" (trf "{n, plural, =0 {no apples} =1 {1 apple} other {# apples}}" :n 1) => "1 apple" (trf "{n, plural, =0 {no apples} =1 {1 apple} other {# apples}}" :n 2) => "2 apples" (trf "{n, plural, =0 {no apples} =1 {1 apple} other {# apples}}" :n 146) => "146 apples")) (assertions "formats numbers - Germany" (i18n/with-locale deflt-format {::i18n/locale :de} (trf "{a, number}" :a 18349)) => "18.349") (i18n/with-locale deflt-format (assoc es-locale ::i18n/locale :es-MX) (behavior "NOTE: JVM and various browsers all do this a little differently due to nuances of the various formatting implementations!" #?(:cljs (assertions "formats dates - Mexico (client-side)" (trf "{a, date, long}" :a (date 1990 3 1 13 45 22 0)) => "1 de abril de 1990" (trf "{a, date, medium}" :a (date 1990 3 1 13 45 22 0)) =fn=> (fn [s] (re-matches #"^1 .*abr.*" s)) (trf "{a, date, short}" :a (date 1990 3 1 13 45 22 0)) => "1/4/90") :clj (assertions "formats dates - Mexico (server-side)" (trf "{a, date, long}" :a (date 1990 3 1 13 45 22 0)) => "1 de abril de 1990" (trf "{a, date, medium}" :a (date 1990 3 1 13 45 22 0)) =fn=> (fn [s] (re-matches #"1 abr 1990" s)) (trf "{a, date, short}" :a (date 1990 3 1 13 45 22 0)) => "01/04/90"))) (behavior "formats plurals - Spanish" (assertions (trf "{n,plural,=0 {none} =1 {one} other {#}}" :n 1) => "uno" (trf "{n,plural,=0 {none} =1 {one} other {#}}" :n 2) => "2" (trf "{n,plural,=0 {none} =1 {one} other {#}}" :n 146) => "146"))))) (specification "An undefined locale" (i18n/with-locale deflt-format nil (behavior "uses the in-code translation" (assertions "tr" (tr "Hello") => "Hello" "trc" (trc "context" "Hello") => "Hello" "trf" (trf "Hi, {name}" :name "<NAME>") => "Hi, <NAME>" "trf map-based" (trf "Hi, {name}" {:name "<NAME>"}) => "Hi, <NAME>")))) #?(:clj (specification "Locale loading from PO files." (when-mocking (log/-log! _ lvl _ _ _ _ _ _ _ _) => (assertions "Logs an error when no locale is found" lvl => :error) (let [xlation (i18n/load-locale "fulcro" :es) missing-xlation (i18n/load-locale "boo" :xx)] (assertions "Returns nil if no locale is found" missing-xlation => nil "Loaded translation exists" xlation => {::i18n/locale :es ::i18n/translations {["" "It is {n,date}"] "Es {n, date}" ["" "Hello, {name}"] "Hola {name}" ["Gender abbreviation" "M"] "M" ["" "Hello"] "Hola"}}))))) #?(:clj (defsc Child [this props] {:query [:ui/checked?] :initial-state {:ui/checked? false}} (dom/div nil (dom/p nil (trf "Hello, {name}" {:name "<NAME>"})) (dom/p nil (trf "It is {n,date}" {:n (java.util.Date.)})) (dom/p nil (trc "Gender abbreviation" "M")) (tr "Hello")))) #?(:clj (def ui-child (comp/factory Child))) #?(:clj (defsc Root [this {:keys [child locale-selector]}] {:query [{:locale-selector (comp/get-query i18n/LocaleSelector)} {::i18n/current-locale (comp/get-query i18n/Locale)} {:child (comp/get-query Child)}] :initial-state {:child {} ::i18n/current-locale {:locale :en :name "English" :translations {}} :locale-selector {:locales [{:locale :en :name "English"} {:locale :es :name "Espanol"} {:locale :de :name "Deutsch"}]}}} (dom/div nil (i18n/ui-locale-selector locale-selector) (ui-child child)))) #?(:clj (defn message-formatter [{:keys [::i18n/localized-format-string ::i18n/locale ::i18n/format-options]}] localized-format-string)) #?(:clj (specification "Locale override in SSR" (let [initial-tree (comp/get-initial-state Root {}) es-locale (i18n/load-locale "fulcro" :es) tree-with-locale (assoc initial-tree ::i18n/current-locale es-locale) ui-root (comp/factory Root) output-html (i18n/with-locale message-formatter es-locale (dom/render-to-str (ui-root tree-with-locale))) bad-locale-html (i18n/with-locale message-formatter nil (dom/render-to-str (ui-root initial-tree)))] (assertions "Renders properly even if the locale isn't set correctly" (str/includes? bad-locale-html "It is {n,date}") => true "Renders properly with the overridden locale" (str/includes? output-html "Es {n, date}") => true))))
true
(ns com.fulcrologic.fulcro-i18n.i18n-spec (:require [clojure.string :as str] [fulcro-spec.core :refer [specification behavior provided assertions when-mocking]] [com.fulcrologic.fulcro-i18n.i18n :as i18n :refer [tr trf trc]] #?(:cljs ["intl-messageformat" :default IntlMessageFormat]) [com.fulcrologic.fulcro.components :as comp :refer [defsc]] [com.fulcrologic.fulcro.algorithms.server-render :as ssr] #?(:cljs [com.fulcrologic.fulcro.dom :as dom] :clj [com.fulcrologic.fulcro.dom-server :as dom]) [taoensso.timbre :as log]) #?(:clj (:import (com.ibm.icu.text MessageFormat) (java.util Locale)))) (def es-locale {::i18n/locale :es ::i18n/translations {["" "Hi"] "Ola" ["Abbreviation for Monday" "M"] "L" ["" "{n,plural,=0 {none} =1 {one} other {#}}"] "{n,plural,=0 {nada} =1 {uno} other {#}}"}}) (def locale-with-no-translations {::i18n/locale :en-US ::i18n/translations {}}) (def bad-locale {::i18n/locale :es ::i18n/translations {["" "Hi"] ""}}) (defn date [year month day hour min sec millis] #?(:clj (java.util.Date. (- year 1900) month day hour min sec) :cljs (js/Date. year month day hour min sec millis))) (defn deflt-format [{:keys [::i18n/localized-format-string ::i18n/locale ::i18n/format-options]}] #?(:cljs (let [locale-str (name locale) formatter (IntlMessageFormat. localized-format-string locale-str)] (.format formatter (clj->js format-options))) :clj (let [locale-str (name locale)] (try (let [formatter (new MessageFormat localized-format-string (Locale/forLanguageTag locale-str))] (.format formatter format-options)) (catch Exception e (log/error "Formatting failed!" e) "???"))))) (specification "Base translation -- tr" (assertions "returns the message key if there is no translation" (i18n/with-locale deflt-format locale-with-no-translations (tr "Hello")) => "Hello" "returns message key if translation is an empty string" (i18n/with-locale deflt-format bad-locale (tr "Hi")) => "Hi" "returns message key if no entry is found in the translations" (i18n/with-locale deflt-format es-locale (tr "Hello")) => "Hello" "Returns an error-marker string if anything but a literal string is used" (str/starts-with? (tr 4) "ERROR: tr requires a literal string") => true (str/starts-with? (tr map) "ERROR: tr requires a literal string") => true (str/starts-with? (tr :keyword) "ERROR: tr requires a literal string") => true " (error markers include namespace and line number)" (tr 4) =fn=> (fn [s] (re-matches #".*on line [1-9][0-9]* in com.fulcro.*" s)))) (specification "Message translations with context" (assertions "Requires literal strings for both arguments" (str/includes? (trc 1 4) "literal string") => true (str/includes? (trc 1 "msg") "literal string") => true (str/includes? (trc "c" 4) "literal string") => true " (error message includes line and namespace)" (trc 1 4) =fn=> (fn [s] (re-matches #".*on line [1-9][0-9]* in com.fulcro.*" s)) "Returns the message parameter if there is no translation" (trc "c" "m") => "m") (assertions "Formats in en-US locale" (trc "Abbreviation for Monday" "M") => "M") (assertions "Formats in an es locale" (i18n/with-locale deflt-format es-locale (trc "Abbreviation for Monday" "M")) => "L")) (specification "Message format translation -- trf" (i18n/with-locale deflt-format locale-with-no-translations (assertions "returns the string it is passed if there is no translation" (trf "Hello") => "Hello") (let [s "str"] (assertions "Requires that the format be a literal string" (str/includes? (trf s) "literal string") => true " (error includes line and namespace)" (trf s) =fn=> (fn [s] (re-matches #".*on line [1-9][0-9]* in com.fulcro.*" s)))) (assertions "accepts a sequence of k/v pairs as arguments to the format" (trf "A {a} B {name}" :a 1 :name "PI:NAME:<NAME>END_PI") => "A 1 B PI:NAME:<NAME>END_PI" "accepts a single map as arguments to the format" (trf "A {a} B {name}" {:a 1 :name "PI:NAME:<NAME>END_PI"}) => "A 1 B PI:NAME:<NAME>END_PI") (assertions "formats numbers - US" (trf "{a, number}" :a 18349) => "18,349") (assertions "formats dates - US" (trf "{a, date, long}" :a (date 1990 3 1 13 45 22 0)) => "April 1, 1990" (trf "{a, date, medium}" :a (date 1990 3 1 13 45 22 0)) => "Apr 1, 1990" (trf "{a, date, short}" :a (date 1990 3 1 13 45 22 0)) => "4/1/90") (behavior "formats plurals - US" (assertions (trf "{n, plural, =0 {no apples} =1 {1 apple} other {# apples}}" :n 0) => "no apples" (trf "{n, plural, =0 {no apples} =1 {1 apple} other {# apples}}" :n 1) => "1 apple" (trf "{n, plural, =0 {no apples} =1 {1 apple} other {# apples}}" :n 2) => "2 apples" (trf "{n, plural, =0 {no apples} =1 {1 apple} other {# apples}}" :n 146) => "146 apples")) (assertions "formats numbers - Germany" (i18n/with-locale deflt-format {::i18n/locale :de} (trf "{a, number}" :a 18349)) => "18.349") (i18n/with-locale deflt-format (assoc es-locale ::i18n/locale :es-MX) (behavior "NOTE: JVM and various browsers all do this a little differently due to nuances of the various formatting implementations!" #?(:cljs (assertions "formats dates - Mexico (client-side)" (trf "{a, date, long}" :a (date 1990 3 1 13 45 22 0)) => "1 de abril de 1990" (trf "{a, date, medium}" :a (date 1990 3 1 13 45 22 0)) =fn=> (fn [s] (re-matches #"^1 .*abr.*" s)) (trf "{a, date, short}" :a (date 1990 3 1 13 45 22 0)) => "1/4/90") :clj (assertions "formats dates - Mexico (server-side)" (trf "{a, date, long}" :a (date 1990 3 1 13 45 22 0)) => "1 de abril de 1990" (trf "{a, date, medium}" :a (date 1990 3 1 13 45 22 0)) =fn=> (fn [s] (re-matches #"1 abr 1990" s)) (trf "{a, date, short}" :a (date 1990 3 1 13 45 22 0)) => "01/04/90"))) (behavior "formats plurals - Spanish" (assertions (trf "{n,plural,=0 {none} =1 {one} other {#}}" :n 1) => "uno" (trf "{n,plural,=0 {none} =1 {one} other {#}}" :n 2) => "2" (trf "{n,plural,=0 {none} =1 {one} other {#}}" :n 146) => "146"))))) (specification "An undefined locale" (i18n/with-locale deflt-format nil (behavior "uses the in-code translation" (assertions "tr" (tr "Hello") => "Hello" "trc" (trc "context" "Hello") => "Hello" "trf" (trf "Hi, {name}" :name "PI:NAME:<NAME>END_PI") => "Hi, PI:NAME:<NAME>END_PI" "trf map-based" (trf "Hi, {name}" {:name "PI:NAME:<NAME>END_PI"}) => "Hi, PI:NAME:<NAME>END_PI")))) #?(:clj (specification "Locale loading from PO files." (when-mocking (log/-log! _ lvl _ _ _ _ _ _ _ _) => (assertions "Logs an error when no locale is found" lvl => :error) (let [xlation (i18n/load-locale "fulcro" :es) missing-xlation (i18n/load-locale "boo" :xx)] (assertions "Returns nil if no locale is found" missing-xlation => nil "Loaded translation exists" xlation => {::i18n/locale :es ::i18n/translations {["" "It is {n,date}"] "Es {n, date}" ["" "Hello, {name}"] "Hola {name}" ["Gender abbreviation" "M"] "M" ["" "Hello"] "Hola"}}))))) #?(:clj (defsc Child [this props] {:query [:ui/checked?] :initial-state {:ui/checked? false}} (dom/div nil (dom/p nil (trf "Hello, {name}" {:name "PI:NAME:<NAME>END_PI"})) (dom/p nil (trf "It is {n,date}" {:n (java.util.Date.)})) (dom/p nil (trc "Gender abbreviation" "M")) (tr "Hello")))) #?(:clj (def ui-child (comp/factory Child))) #?(:clj (defsc Root [this {:keys [child locale-selector]}] {:query [{:locale-selector (comp/get-query i18n/LocaleSelector)} {::i18n/current-locale (comp/get-query i18n/Locale)} {:child (comp/get-query Child)}] :initial-state {:child {} ::i18n/current-locale {:locale :en :name "English" :translations {}} :locale-selector {:locales [{:locale :en :name "English"} {:locale :es :name "Espanol"} {:locale :de :name "Deutsch"}]}}} (dom/div nil (i18n/ui-locale-selector locale-selector) (ui-child child)))) #?(:clj (defn message-formatter [{:keys [::i18n/localized-format-string ::i18n/locale ::i18n/format-options]}] localized-format-string)) #?(:clj (specification "Locale override in SSR" (let [initial-tree (comp/get-initial-state Root {}) es-locale (i18n/load-locale "fulcro" :es) tree-with-locale (assoc initial-tree ::i18n/current-locale es-locale) ui-root (comp/factory Root) output-html (i18n/with-locale message-formatter es-locale (dom/render-to-str (ui-root tree-with-locale))) bad-locale-html (i18n/with-locale message-formatter nil (dom/render-to-str (ui-root initial-tree)))] (assertions "Renders properly even if the locale isn't set correctly" (str/includes? bad-locale-html "It is {n,date}") => true "Renders properly with the overridden locale" (str/includes? output-html "Es {n, date}") => true))))
[ { "context": "te false})\n\n(def registration-data\n {\n :email \"test.registration@example.com\"\n :first_name \"Testing\"\n :last_name \"Registra", "end": 323, "score": 0.9999232888221741, "start": 294, "tag": "EMAIL", "value": "test.registration@example.com" }, { "context": "l \"test.registration@example.com\"\n :first_name \"Testing\"\n :last_name \"Registration\"\n :language \"fi\"\n ", "end": 348, "score": 0.9997933506965637, "start": 341, "tag": "NAME", "value": "Testing" }, { "context": "mple.com\"\n :first_name \"Testing\"\n :last_name \"Registration\"\n :language \"fi\"\n :country \"FI\"\n :password ", "end": 377, "score": 0.9997778534889221, "start": 365, "tag": "NAME", "value": "Registration" }, { "context": "\n :language \"fi\"\n :country \"FI\"\n :password \"123456\"\n :password_verify \"123456\"\n :token \"register", "end": 434, "score": 0.9993534684181213, "start": 428, "tag": "PASSWORD", "value": "123456" }, { "context": "y \"FI\"\n :password \"123456\"\n :password_verify \"123456\"\n :token \"registerationToken\"})\n\n(t/deftest-ctx", "end": 463, "score": 0.9993808269500732, "start": 457, "tag": "PASSWORD", "value": "123456" }, { "context": " \"123456\"\n :password_verify \"123456\"\n :token \"registerationToken\"})\n\n(t/deftest-ctx main-test [ctx]\n\n #_ (testing ", "end": 494, "score": 0.7043641805648804, "start": 476, "tag": "PASSWORD", "value": "registerationToken" } ]
test/clj/salava/user/registration_test.clj
Vilikkki/salava
0
(ns salava.user.registration-test (:require [salava.user.db :as db] [clojure.test :refer :all] [salava.core.migrator :as migrator] [salava.core.test-utils :as t])) (def test-user {:id 1 :role "user" :private false}) (def registration-data { :email "test.registration@example.com" :first_name "Testing" :last_name "Registration" :language "fi" :country "FI" :password "123456" :password_verify "123456" :token "registerationToken"}) (t/deftest-ctx main-test [ctx] #_ (testing "register user" (let [connect (db/register-user ctx (:email registration-data) (:first_name registration-data) (:last_name registration-data) (:country registration-data) (:language registration-data) (:password registration-data) (:password-verify registration-data))] (is (= "success" (:status connect))) (is (= "" (:message connect))) )) #_ (testing "register user again with same data" (let [connect (db/register-user ctx (:email registration-data) (:first_name registration-data) (:last_name registration-data) (:country registration-data) (:language registration-data) (:password registration-data) (:password-verify registration-data))] (is (= "error" (:status connect))) (is (= "user/Enteredaddressisalready" (:message connect))) )) (testing "get current state of configs" (let [{:keys [status body]} (t/test-api-request ctx :post "/obpv1/user/register" {:params registration-data})] (is (= 200 status)) ;; (is (= "success" status)) )) ) ;(migrator/run-test-reset) (migrator/reset-seeds (migrator/test-config))
60761
(ns salava.user.registration-test (:require [salava.user.db :as db] [clojure.test :refer :all] [salava.core.migrator :as migrator] [salava.core.test-utils :as t])) (def test-user {:id 1 :role "user" :private false}) (def registration-data { :email "<EMAIL>" :first_name "<NAME>" :last_name "<NAME>" :language "fi" :country "FI" :password "<PASSWORD>" :password_verify "<PASSWORD>" :token "<PASSWORD>"}) (t/deftest-ctx main-test [ctx] #_ (testing "register user" (let [connect (db/register-user ctx (:email registration-data) (:first_name registration-data) (:last_name registration-data) (:country registration-data) (:language registration-data) (:password registration-data) (:password-verify registration-data))] (is (= "success" (:status connect))) (is (= "" (:message connect))) )) #_ (testing "register user again with same data" (let [connect (db/register-user ctx (:email registration-data) (:first_name registration-data) (:last_name registration-data) (:country registration-data) (:language registration-data) (:password registration-data) (:password-verify registration-data))] (is (= "error" (:status connect))) (is (= "user/Enteredaddressisalready" (:message connect))) )) (testing "get current state of configs" (let [{:keys [status body]} (t/test-api-request ctx :post "/obpv1/user/register" {:params registration-data})] (is (= 200 status)) ;; (is (= "success" status)) )) ) ;(migrator/run-test-reset) (migrator/reset-seeds (migrator/test-config))
true
(ns salava.user.registration-test (:require [salava.user.db :as db] [clojure.test :refer :all] [salava.core.migrator :as migrator] [salava.core.test-utils :as t])) (def test-user {:id 1 :role "user" :private false}) (def registration-data { :email "PI:EMAIL:<EMAIL>END_PI" :first_name "PI:NAME:<NAME>END_PI" :last_name "PI:NAME:<NAME>END_PI" :language "fi" :country "FI" :password "PI:PASSWORD:<PASSWORD>END_PI" :password_verify "PI:PASSWORD:<PASSWORD>END_PI" :token "PI:PASSWORD:<PASSWORD>END_PI"}) (t/deftest-ctx main-test [ctx] #_ (testing "register user" (let [connect (db/register-user ctx (:email registration-data) (:first_name registration-data) (:last_name registration-data) (:country registration-data) (:language registration-data) (:password registration-data) (:password-verify registration-data))] (is (= "success" (:status connect))) (is (= "" (:message connect))) )) #_ (testing "register user again with same data" (let [connect (db/register-user ctx (:email registration-data) (:first_name registration-data) (:last_name registration-data) (:country registration-data) (:language registration-data) (:password registration-data) (:password-verify registration-data))] (is (= "error" (:status connect))) (is (= "user/Enteredaddressisalready" (:message connect))) )) (testing "get current state of configs" (let [{:keys [status body]} (t/test-api-request ctx :post "/obpv1/user/register" {:params registration-data})] (is (= 200 status)) ;; (is (= "success" status)) )) ) ;(migrator/run-test-reset) (migrator/reset-seeds (migrator/test-config))
[ { "context": ";; Copyright 2014-2020 King\n;; Copyright 2009-2014 Ragnar Svensson, Christian Murray\n;; Licensed under the Defold Li", "end": 111, "score": 0.9998126029968262, "start": 96, "tag": "NAME", "value": "Ragnar Svensson" }, { "context": "-2020 King\n;; Copyright 2009-2014 Ragnar Svensson, Christian Murray\n;; Licensed under the Defold License version 1.0 ", "end": 129, "score": 0.9998201131820679, "start": 113, "tag": "NAME", "value": "Christian Murray" } ]
editor/src/clj/editor/scene_visibility.clj
cmarincia/defold
0
;; Copyright 2020-2022 The Defold Foundation ;; Copyright 2014-2020 King ;; Copyright 2009-2014 Ragnar Svensson, Christian Murray ;; Licensed under the Defold License version 1.0 (the "License"); you may not use ;; this file except in compliance with the License. ;; ;; You may obtain a copy of the License, together with FAQs at ;; https://www.defold.com/license ;; ;; Unless required by applicable law or agreed to in writing, software distributed ;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR ;; CONDITIONS OF ANY KIND, either express or implied. See the License for the ;; specific language governing permissions and limitations under the License. (ns editor.scene-visibility (:require [clojure.set :as set] [dynamo.graph :as g] [editor.handler :as handler] [editor.keymap :as keymap] [editor.system :as system] [editor.types :as types] [editor.ui :as ui] [editor.ui.popup :as popup] [editor.util :as util] [internal.util :as iutil] [schema.core :as s]) (:import [javafx.geometry Insets Point2D Pos] [javafx.scene Parent] [javafx.scene.control CheckBox Label PopupControl Separator] [javafx.scene.layout HBox Priority Region StackPane VBox])) (set! *warn-on-reflection* true) (def ^:private renderable-tag-toggles-info (cond-> [{:label "Collision Shapes" :tag :collision-shape} #_{:label "GUI Elements" :tag :gui} ; This tag exists, but we decided to hide it and put in granular control instead. Add back if we make the toggles hierarchical? {:label "GUI Bounds" :tag :gui-bounds} {:label "GUI Shapes" :tag :gui-shape} {:label "GUI Particle Effects" :tag :gui-particlefx} {:label "GUI Spine Scenes" :tag :gui-spine} {:label "GUI Text" :tag :gui-text} {:label "Models" :tag :model} {:label "Particle Effects" :tag :particlefx} {:label "Skeletons" :tag :skeleton} {:label "Spine Scenes" :tag :spine} {:label "Sprites" :tag :sprite} {:label "Text" :tag :text} {:label "Tile Maps" :tag :tilemap}] (system/defold-dev?) (into [{:label :separator} {:label "Scene Visibility Bounds" :tag :dev-visibility-bounds :appear-filtered false}]))) (def ^:private appear-filtered-renderable-tags (into #{} (keep (fn [{:keys [appear-filtered tag] :or {appear-filtered true}}] (when appear-filtered tag))) renderable-tag-toggles-info)) (defn filters-appear-active? "Returns true if some parts of the scene are hidden due to visibility filters. Does not consider scene elements that you'd not typically expect to be there, such as debug rendering of bounding volumes, etc." ([scene-visibility] (g/with-auto-evaluation-context evaluation-context (filters-appear-active? scene-visibility evaluation-context))) ([scene-visibility evaluation-context] (boolean (and (g/node-value scene-visibility :visibility-filters-enabled? evaluation-context) (some appear-filtered-renderable-tags (g/node-value scene-visibility :filtered-renderable-tags evaluation-context)))))) ;; ----------------------------------------------------------------------------- ;; SceneVisibilityNode ;; ----------------------------------------------------------------------------- ;; ;; A SceneHideHistoryNode manages visibility for a particular scene-resource-id. ;; Objects are identified with outline name paths, which are vectors of string ;; ids from the outline. We use the term "name" here to avoid confusing these ;; with node ids. These are not necessarily names or even strings, but currently ;; the schema enforces strings. ;; ;; The individual tokens are named "node-outline-key" elsewhere, but we use the ;; term "outline name path" in this file to distinguish these from ;; node-outline-key-paths, which include the resource node id at the beginning. (defn outline-name-path? [value] (and (vector? value) (every? string? value))) (def ^:private TOutlineNamePaths #{(s/pred outline-name-path?)}) (def ^:private THideHistory [(s/both TOutlineNamePaths (s/pred seq))]) (g/deftype HideHistory THideHistory) (g/deftype OutlineNamePaths TOutlineNamePaths) (g/deftype OutlineNamePathsByBool {s/Bool TOutlineNamePaths}) (g/deftype OutlineNamePathsByNodeID {s/Int (s/both TOutlineNamePaths (s/pred seq))}) (g/deftype SceneHideHistoryData [(s/one s/Int "scene-resource-node") (s/one THideHistory "hide-history")]) (defn- scene-outline-name-paths ([scene] (scene-outline-name-paths [] scene)) ([outline-name-path {:keys [node-id children] :as _scene}] (mapcat (fn [{child-node-id :node-id child-node-outline-key :node-outline-key :as child-scene}] (when (some? child-node-outline-key) (if (= node-id child-node-id) (scene-outline-name-paths outline-name-path child-scene) (let [child-outline-name-path (conj outline-name-path child-node-outline-key)] (cons child-outline-name-path (scene-outline-name-paths child-outline-name-path child-scene)))))) children))) (def ^:private outline-selection-entry->outline-name-path (comp not-empty vec next :node-outline-key-path)) (g/defnode SceneVisibilityNode (property visibility-filters-enabled? g/Bool (default true)) (property filtered-renderable-tags types/RenderableTags (default #{:dev-visibility-bounds})) (input active-resource-node g/NodeID) (input active-scene g/Any :substitute nil) (input outline-selection g/Any :substitute nil) (input scene-hide-history-datas SceneHideHistoryData :array) (output active-scene-resource-node g/NodeID (g/fnk [_basis active-resource-node] (when (some? active-resource-node) (when (g/has-output? (g/node-type* _basis active-resource-node) :scene) active-resource-node)))) (output hidden-outline-name-paths-by-scene-resource-node OutlineNamePathsByNodeID :cached (g/fnk [scene-hide-history-datas] (into {} (keep (fn [[scene-resource-node hide-history]] (when-some [hidden-outline-name-paths (not-empty (apply set/union hide-history))] [scene-resource-node hidden-outline-name-paths]))) scene-hide-history-datas))) (output hidden-renderable-tags types/RenderableTags :cached (g/fnk [filtered-renderable-tags visibility-filters-enabled?] (if visibility-filters-enabled? filtered-renderable-tags (set/intersection filtered-renderable-tags #{:grid :outline})))) (output hidden-node-outline-key-paths types/NodeOutlineKeyPaths :cached (g/fnk [hidden-outline-name-paths-by-scene-resource-node] (into #{} (mapcat (fn [[scene-resource-node hidden-outline-name-paths]] (map (partial into [scene-resource-node]) hidden-outline-name-paths))) hidden-outline-name-paths-by-scene-resource-node))) (output hidden-outline-name-paths OutlineNamePaths (g/fnk [active-scene-resource-node hidden-outline-name-paths-by-scene-resource-node] (hidden-outline-name-paths-by-scene-resource-node active-scene-resource-node))) (output outline-name-paths-by-selection-state OutlineNamePathsByBool :cached (g/fnk [active-scene outline-selection] (let [selected-outline-name-paths (into [] (keep outline-selection-entry->outline-name-path) outline-selection) outline-name-path-below-selection? (fn [outline-name-path] (boolean (some #(iutil/seq-starts-with? outline-name-path %) selected-outline-name-paths)))] (iutil/group-into {} #{} outline-name-path-below-selection? (scene-outline-name-paths active-scene))))) (output selected-outline-name-paths OutlineNamePaths (g/fnk [outline-name-paths-by-selection-state] (outline-name-paths-by-selection-state true))) (output unselected-outline-name-paths OutlineNamePaths (g/fnk [outline-name-paths-by-selection-state] (outline-name-paths-by-selection-state false))) (output unselected-hideable-outline-name-paths OutlineNamePaths :cached (g/fnk [hidden-outline-name-paths unselected-outline-name-paths] (not-empty (set/difference unselected-outline-name-paths hidden-outline-name-paths)))) (output selected-hideable-outline-name-paths OutlineNamePaths :cached (g/fnk [hidden-outline-name-paths selected-outline-name-paths] (not-empty (set/difference selected-outline-name-paths hidden-outline-name-paths)))) (output selected-showable-outline-name-paths OutlineNamePaths :cached (g/fnk [hidden-outline-name-paths selected-outline-name-paths] (not-empty (set/intersection selected-outline-name-paths hidden-outline-name-paths)))) (output last-hidden-outline-name-paths OutlineNamePaths :cached (g/fnk [active-scene-resource-node scene-hide-history-datas] (peek (some (fn [[scene-resource-node hide-history]] (when (= active-scene-resource-node scene-resource-node) hide-history)) scene-hide-history-datas))))) (defn make-scene-visibility-node! [view-graph] (g/make-node! view-graph SceneVisibilityNode)) ;; ----------------------------------------------------------------------------- ;; Per-Object Visibility ;; ----------------------------------------------------------------------------- (g/defnode SceneHideHistoryNode (property hide-history HideHistory) (input scene-resource-node g/NodeID) (output scene-hide-history-data SceneHideHistoryData (g/fnk [hide-history scene-resource-node] [scene-resource-node hide-history]))) (defn- find-scene-hide-history-node [scene-visibility scene-resource-node] (some (fn [[scene-hide-history-node]] (when (some-> (g/node-feeding-into scene-hide-history-node :scene-resource-node) (= scene-resource-node)) scene-hide-history-node)) (g/sources-of scene-visibility :scene-hide-history-datas))) (defn- show-outline-name-paths! [scene-visibility outline-name-paths] (assert (set? (not-empty outline-name-paths))) (assert (every? outline-name-path? outline-name-paths)) (let [scene-resource-node (g/node-value scene-visibility :active-scene-resource-node) scene-hide-history-node (find-scene-hide-history-node scene-visibility scene-resource-node)] ;; Remove the now-visible nodes from the hide history. This ensures the Show ;; Last Hidden Objects command works as expected if the user manually shows ;; nodes she has previously hidden. (g/update-property! scene-hide-history-node :hide-history (fn [hide-history] (into [] (keep (fn [hidden-outline-name-paths] (not-empty (set/difference hidden-outline-name-paths outline-name-paths)))) hide-history))) ;; Remove the SceneHideHistoryNode if its history is now empty. (when (empty? (g/node-value scene-hide-history-node :hide-history)) (g/delete-node! scene-hide-history-node)))) (defn- hide-outline-name-paths! [scene-visibility outline-name-paths] (assert (set? (not-empty outline-name-paths))) (assert (every? outline-name-path? outline-name-paths)) (let [scene-resource-node (g/node-value scene-visibility :active-scene-resource-node) scene-hide-history-node (find-scene-hide-history-node scene-visibility scene-resource-node)] (if (some? scene-hide-history-node) (g/update-property! scene-hide-history-node :hide-history conj outline-name-paths) (g/transact (g/make-nodes (g/node-id->graph-id scene-visibility) [scene-hide-history-node [SceneHideHistoryNode :hide-history [outline-name-paths]]] (g/connect scene-resource-node :_node-id scene-hide-history-node :scene-resource-node) (g/connect scene-hide-history-node :scene-hide-history-data scene-visibility :scene-hide-history-datas)))))) (handler/defhandler :hide-unselected :workbench (active? [scene-visibility evaluation-context] (g/node-value scene-visibility :active-scene-resource-node evaluation-context)) (enabled? [scene-visibility evaluation-context] (g/node-value scene-visibility :unselected-hideable-outline-name-paths evaluation-context)) (run [scene-visibility] (hide-outline-name-paths! scene-visibility (g/node-value scene-visibility :unselected-hideable-outline-name-paths)))) (handler/defhandler :hide-selected :workbench (active? [scene-visibility evaluation-context] (g/node-value scene-visibility :active-scene-resource-node evaluation-context)) (enabled? [scene-visibility evaluation-context] (g/node-value scene-visibility :selected-hideable-outline-name-paths evaluation-context)) (run [scene-visibility] (hide-outline-name-paths! scene-visibility (g/node-value scene-visibility :selected-hideable-outline-name-paths)))) (handler/defhandler :show-selected :workbench (active? [scene-visibility evaluation-context] (g/node-value scene-visibility :active-scene-resource-node evaluation-context)) (enabled? [scene-visibility evaluation-context] (g/node-value scene-visibility :selected-showable-outline-name-paths evaluation-context)) (run [scene-visibility] (show-outline-name-paths! scene-visibility (g/node-value scene-visibility :selected-showable-outline-name-paths)))) (handler/defhandler :show-last-hidden :workbench (active? [scene-visibility evaluation-context] (g/node-value scene-visibility :active-scene-resource-node evaluation-context)) (enabled? [scene-visibility evaluation-context] (g/node-value scene-visibility :last-hidden-outline-name-paths evaluation-context)) (run [scene-visibility] (show-outline-name-paths! scene-visibility (g/node-value scene-visibility :last-hidden-outline-name-paths)))) (handler/defhandler :show-all-hidden :workbench (active? [scene-visibility evaluation-context] (g/node-value scene-visibility :active-scene-resource-node evaluation-context)) (enabled? [scene-visibility evaluation-context] (g/node-value scene-visibility :hidden-outline-name-paths evaluation-context)) (run [scene-visibility] (show-outline-name-paths! scene-visibility (g/node-value scene-visibility :hidden-outline-name-paths)))) ;; ----------------------------------------------------------------------------- ;; Visibility Filters ;; ----------------------------------------------------------------------------- (defn- make-toggle [{:keys [label acc on-change]}] (let [check-box (CheckBox.) label (Label. label) acc (Label. acc)] (ui/on-action! check-box (fn [_] (on-change (ui/value check-box)))) (ui/remove-style! check-box "check-box") (ui/add-style! check-box "slide-switch") (HBox/setHgrow label Priority/ALWAYS) (ui/add-style! label "slide-switch-label") (when (util/is-mac-os?) (.setStyle acc "-fx-font-family: 'Lucida Grande';")) (ui/add-style! acc "accelerator-label") (let [hbox (doto (HBox.) (.setAlignment Pos/CENTER_LEFT) (ui/on-click! (fn [_] (ui/value! check-box (not (ui/value check-box))) (on-change (ui/value check-box)))) (ui/children! [check-box label acc])) update (fn [checked enabled] (ui/enable! hbox enabled) (ui/value! check-box checked))] [hbox update]))) (defn set-tag-visibility! [scene-visibility tag visible] (g/update-property! scene-visibility :filtered-renderable-tags (if visible disj conj) tag)) (defn toggle-tag-visibility! [scene-visibility tag] (g/update-property! scene-visibility :filtered-renderable-tags (fn [tags] (if (contains? tags tag) (disj tags tag) (conj tags tag))))) (defn set-filters-enabled! [scene-visibility enabled] (g/set-property! scene-visibility :visibility-filters-enabled? enabled)) (defn toggle-filters-enabled! [scene-visibility] (g/update-property! scene-visibility :visibility-filters-enabled? not)) (defn- make-visibility-toggles-list ^Region [scene-visibility] (let [make-control (fn [{:keys [label tag]}] (if (= :separator label) [(Separator.) nil] (let [[control update-fn] (make-toggle {:label label :acc "" :on-change (fn [checked] (set-tag-visibility! scene-visibility tag checked))}) update-from-hidden-tags (fn [hidden-tags enabled] (let [checked (not (contains? hidden-tags tag))] (update-fn checked enabled)))] [control update-from-hidden-tags]))) tag-toggles (mapv make-control renderable-tag-toggles-info) tag-toggle-update-fns (into [] (keep second) tag-toggles) update-tag-toggles (fn [hidden-tags enabled] (doseq [update-fn tag-toggle-update-fns] (update-fn hidden-tags enabled))) [filters-enabled-control filters-enabled-update-fn] (make-toggle {:label "Visibility Filters" :acc (keymap/key-combo->display-text "Shift+Shortcut+I") :on-change (fn [checked] (set-filters-enabled! scene-visibility checked))}) container (doto (StackPane.) (.setMinWidth 230) (ui/children! [(doto (Region.) ;; Move drop shadow down a bit so it does not interfere with toolbar clicks. (StackPane/setMargin (Insets. 16.0 0.0 0.0 0.0)) (ui/add-style! "visibility-toggles-shadow")) (doto (VBox.) (ui/add-style! "visibility-toggles-list") (ui/children! (into [filters-enabled-control] (map first) tag-toggles)))])) update-fn (fn [] (let [filtered-tags (g/node-value scene-visibility :filtered-renderable-tags) visibility-filters-enabled? (g/node-value scene-visibility :visibility-filters-enabled?)] (filters-enabled-update-fn visibility-filters-enabled? true) (update-tag-toggles filtered-tags visibility-filters-enabled?)))] (ui/add-style! filters-enabled-control "first-entry") [container update-fn])) (defn- pref-popup-position ^Point2D [^Parent container width y-gap] (let [container-screen-bounds (.localToScreen container (.getBoundsInLocal container))] (Point2D. (- (.getMaxX container-screen-bounds) width 10) (+ (.getMaxY container-screen-bounds) y-gap)))) (defn show-visibility-settings! [^Parent owner scene-visibility] (if-let [popup ^PopupControl (ui/user-data owner ::popup)] (.hide popup) (let [[^Region toggles update-fn] (make-visibility-toggles-list scene-visibility) popup (popup/make-popup owner toggles) anchor (pref-popup-position owner (.getMinWidth toggles) -5) refresh-timer (ui/->timer 13 "refresh-tag-filters" (fn [_ _] (update-fn)))] (update-fn) (ui/user-data! owner ::popup popup) (ui/on-closed! popup (fn [_] (ui/user-data! owner ::popup nil))) (ui/timer-stop-on-closed! popup refresh-timer) (ui/timer-start! refresh-timer) (.show popup owner (.getX anchor) (.getY anchor))))) (defn settings-visible? [^Parent owner] (some? (ui/user-data owner ::popup))) (handler/defhandler :toggle-visibility-filters :workbench (active? [scene-visibility evaluation-context] (g/node-value scene-visibility :active-scene-resource-node evaluation-context)) (run [scene-visibility] (toggle-filters-enabled! scene-visibility))) (handler/defhandler :toggle-component-guides :workbench (active? [scene-visibility evaluation-context] (g/node-value scene-visibility :active-scene-resource-node evaluation-context)) (run [scene-visibility] (toggle-tag-visibility! scene-visibility :outline))) (handler/defhandler :toggle-grid :workbench (active? [scene-visibility evaluation-context] (g/node-value scene-visibility :active-scene-resource-node evaluation-context)) (run [scene-visibility] (toggle-tag-visibility! scene-visibility :grid)))
121046
;; Copyright 2020-2022 The Defold Foundation ;; Copyright 2014-2020 King ;; Copyright 2009-2014 <NAME>, <NAME> ;; Licensed under the Defold License version 1.0 (the "License"); you may not use ;; this file except in compliance with the License. ;; ;; You may obtain a copy of the License, together with FAQs at ;; https://www.defold.com/license ;; ;; Unless required by applicable law or agreed to in writing, software distributed ;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR ;; CONDITIONS OF ANY KIND, either express or implied. See the License for the ;; specific language governing permissions and limitations under the License. (ns editor.scene-visibility (:require [clojure.set :as set] [dynamo.graph :as g] [editor.handler :as handler] [editor.keymap :as keymap] [editor.system :as system] [editor.types :as types] [editor.ui :as ui] [editor.ui.popup :as popup] [editor.util :as util] [internal.util :as iutil] [schema.core :as s]) (:import [javafx.geometry Insets Point2D Pos] [javafx.scene Parent] [javafx.scene.control CheckBox Label PopupControl Separator] [javafx.scene.layout HBox Priority Region StackPane VBox])) (set! *warn-on-reflection* true) (def ^:private renderable-tag-toggles-info (cond-> [{:label "Collision Shapes" :tag :collision-shape} #_{:label "GUI Elements" :tag :gui} ; This tag exists, but we decided to hide it and put in granular control instead. Add back if we make the toggles hierarchical? {:label "GUI Bounds" :tag :gui-bounds} {:label "GUI Shapes" :tag :gui-shape} {:label "GUI Particle Effects" :tag :gui-particlefx} {:label "GUI Spine Scenes" :tag :gui-spine} {:label "GUI Text" :tag :gui-text} {:label "Models" :tag :model} {:label "Particle Effects" :tag :particlefx} {:label "Skeletons" :tag :skeleton} {:label "Spine Scenes" :tag :spine} {:label "Sprites" :tag :sprite} {:label "Text" :tag :text} {:label "Tile Maps" :tag :tilemap}] (system/defold-dev?) (into [{:label :separator} {:label "Scene Visibility Bounds" :tag :dev-visibility-bounds :appear-filtered false}]))) (def ^:private appear-filtered-renderable-tags (into #{} (keep (fn [{:keys [appear-filtered tag] :or {appear-filtered true}}] (when appear-filtered tag))) renderable-tag-toggles-info)) (defn filters-appear-active? "Returns true if some parts of the scene are hidden due to visibility filters. Does not consider scene elements that you'd not typically expect to be there, such as debug rendering of bounding volumes, etc." ([scene-visibility] (g/with-auto-evaluation-context evaluation-context (filters-appear-active? scene-visibility evaluation-context))) ([scene-visibility evaluation-context] (boolean (and (g/node-value scene-visibility :visibility-filters-enabled? evaluation-context) (some appear-filtered-renderable-tags (g/node-value scene-visibility :filtered-renderable-tags evaluation-context)))))) ;; ----------------------------------------------------------------------------- ;; SceneVisibilityNode ;; ----------------------------------------------------------------------------- ;; ;; A SceneHideHistoryNode manages visibility for a particular scene-resource-id. ;; Objects are identified with outline name paths, which are vectors of string ;; ids from the outline. We use the term "name" here to avoid confusing these ;; with node ids. These are not necessarily names or even strings, but currently ;; the schema enforces strings. ;; ;; The individual tokens are named "node-outline-key" elsewhere, but we use the ;; term "outline name path" in this file to distinguish these from ;; node-outline-key-paths, which include the resource node id at the beginning. (defn outline-name-path? [value] (and (vector? value) (every? string? value))) (def ^:private TOutlineNamePaths #{(s/pred outline-name-path?)}) (def ^:private THideHistory [(s/both TOutlineNamePaths (s/pred seq))]) (g/deftype HideHistory THideHistory) (g/deftype OutlineNamePaths TOutlineNamePaths) (g/deftype OutlineNamePathsByBool {s/Bool TOutlineNamePaths}) (g/deftype OutlineNamePathsByNodeID {s/Int (s/both TOutlineNamePaths (s/pred seq))}) (g/deftype SceneHideHistoryData [(s/one s/Int "scene-resource-node") (s/one THideHistory "hide-history")]) (defn- scene-outline-name-paths ([scene] (scene-outline-name-paths [] scene)) ([outline-name-path {:keys [node-id children] :as _scene}] (mapcat (fn [{child-node-id :node-id child-node-outline-key :node-outline-key :as child-scene}] (when (some? child-node-outline-key) (if (= node-id child-node-id) (scene-outline-name-paths outline-name-path child-scene) (let [child-outline-name-path (conj outline-name-path child-node-outline-key)] (cons child-outline-name-path (scene-outline-name-paths child-outline-name-path child-scene)))))) children))) (def ^:private outline-selection-entry->outline-name-path (comp not-empty vec next :node-outline-key-path)) (g/defnode SceneVisibilityNode (property visibility-filters-enabled? g/Bool (default true)) (property filtered-renderable-tags types/RenderableTags (default #{:dev-visibility-bounds})) (input active-resource-node g/NodeID) (input active-scene g/Any :substitute nil) (input outline-selection g/Any :substitute nil) (input scene-hide-history-datas SceneHideHistoryData :array) (output active-scene-resource-node g/NodeID (g/fnk [_basis active-resource-node] (when (some? active-resource-node) (when (g/has-output? (g/node-type* _basis active-resource-node) :scene) active-resource-node)))) (output hidden-outline-name-paths-by-scene-resource-node OutlineNamePathsByNodeID :cached (g/fnk [scene-hide-history-datas] (into {} (keep (fn [[scene-resource-node hide-history]] (when-some [hidden-outline-name-paths (not-empty (apply set/union hide-history))] [scene-resource-node hidden-outline-name-paths]))) scene-hide-history-datas))) (output hidden-renderable-tags types/RenderableTags :cached (g/fnk [filtered-renderable-tags visibility-filters-enabled?] (if visibility-filters-enabled? filtered-renderable-tags (set/intersection filtered-renderable-tags #{:grid :outline})))) (output hidden-node-outline-key-paths types/NodeOutlineKeyPaths :cached (g/fnk [hidden-outline-name-paths-by-scene-resource-node] (into #{} (mapcat (fn [[scene-resource-node hidden-outline-name-paths]] (map (partial into [scene-resource-node]) hidden-outline-name-paths))) hidden-outline-name-paths-by-scene-resource-node))) (output hidden-outline-name-paths OutlineNamePaths (g/fnk [active-scene-resource-node hidden-outline-name-paths-by-scene-resource-node] (hidden-outline-name-paths-by-scene-resource-node active-scene-resource-node))) (output outline-name-paths-by-selection-state OutlineNamePathsByBool :cached (g/fnk [active-scene outline-selection] (let [selected-outline-name-paths (into [] (keep outline-selection-entry->outline-name-path) outline-selection) outline-name-path-below-selection? (fn [outline-name-path] (boolean (some #(iutil/seq-starts-with? outline-name-path %) selected-outline-name-paths)))] (iutil/group-into {} #{} outline-name-path-below-selection? (scene-outline-name-paths active-scene))))) (output selected-outline-name-paths OutlineNamePaths (g/fnk [outline-name-paths-by-selection-state] (outline-name-paths-by-selection-state true))) (output unselected-outline-name-paths OutlineNamePaths (g/fnk [outline-name-paths-by-selection-state] (outline-name-paths-by-selection-state false))) (output unselected-hideable-outline-name-paths OutlineNamePaths :cached (g/fnk [hidden-outline-name-paths unselected-outline-name-paths] (not-empty (set/difference unselected-outline-name-paths hidden-outline-name-paths)))) (output selected-hideable-outline-name-paths OutlineNamePaths :cached (g/fnk [hidden-outline-name-paths selected-outline-name-paths] (not-empty (set/difference selected-outline-name-paths hidden-outline-name-paths)))) (output selected-showable-outline-name-paths OutlineNamePaths :cached (g/fnk [hidden-outline-name-paths selected-outline-name-paths] (not-empty (set/intersection selected-outline-name-paths hidden-outline-name-paths)))) (output last-hidden-outline-name-paths OutlineNamePaths :cached (g/fnk [active-scene-resource-node scene-hide-history-datas] (peek (some (fn [[scene-resource-node hide-history]] (when (= active-scene-resource-node scene-resource-node) hide-history)) scene-hide-history-datas))))) (defn make-scene-visibility-node! [view-graph] (g/make-node! view-graph SceneVisibilityNode)) ;; ----------------------------------------------------------------------------- ;; Per-Object Visibility ;; ----------------------------------------------------------------------------- (g/defnode SceneHideHistoryNode (property hide-history HideHistory) (input scene-resource-node g/NodeID) (output scene-hide-history-data SceneHideHistoryData (g/fnk [hide-history scene-resource-node] [scene-resource-node hide-history]))) (defn- find-scene-hide-history-node [scene-visibility scene-resource-node] (some (fn [[scene-hide-history-node]] (when (some-> (g/node-feeding-into scene-hide-history-node :scene-resource-node) (= scene-resource-node)) scene-hide-history-node)) (g/sources-of scene-visibility :scene-hide-history-datas))) (defn- show-outline-name-paths! [scene-visibility outline-name-paths] (assert (set? (not-empty outline-name-paths))) (assert (every? outline-name-path? outline-name-paths)) (let [scene-resource-node (g/node-value scene-visibility :active-scene-resource-node) scene-hide-history-node (find-scene-hide-history-node scene-visibility scene-resource-node)] ;; Remove the now-visible nodes from the hide history. This ensures the Show ;; Last Hidden Objects command works as expected if the user manually shows ;; nodes she has previously hidden. (g/update-property! scene-hide-history-node :hide-history (fn [hide-history] (into [] (keep (fn [hidden-outline-name-paths] (not-empty (set/difference hidden-outline-name-paths outline-name-paths)))) hide-history))) ;; Remove the SceneHideHistoryNode if its history is now empty. (when (empty? (g/node-value scene-hide-history-node :hide-history)) (g/delete-node! scene-hide-history-node)))) (defn- hide-outline-name-paths! [scene-visibility outline-name-paths] (assert (set? (not-empty outline-name-paths))) (assert (every? outline-name-path? outline-name-paths)) (let [scene-resource-node (g/node-value scene-visibility :active-scene-resource-node) scene-hide-history-node (find-scene-hide-history-node scene-visibility scene-resource-node)] (if (some? scene-hide-history-node) (g/update-property! scene-hide-history-node :hide-history conj outline-name-paths) (g/transact (g/make-nodes (g/node-id->graph-id scene-visibility) [scene-hide-history-node [SceneHideHistoryNode :hide-history [outline-name-paths]]] (g/connect scene-resource-node :_node-id scene-hide-history-node :scene-resource-node) (g/connect scene-hide-history-node :scene-hide-history-data scene-visibility :scene-hide-history-datas)))))) (handler/defhandler :hide-unselected :workbench (active? [scene-visibility evaluation-context] (g/node-value scene-visibility :active-scene-resource-node evaluation-context)) (enabled? [scene-visibility evaluation-context] (g/node-value scene-visibility :unselected-hideable-outline-name-paths evaluation-context)) (run [scene-visibility] (hide-outline-name-paths! scene-visibility (g/node-value scene-visibility :unselected-hideable-outline-name-paths)))) (handler/defhandler :hide-selected :workbench (active? [scene-visibility evaluation-context] (g/node-value scene-visibility :active-scene-resource-node evaluation-context)) (enabled? [scene-visibility evaluation-context] (g/node-value scene-visibility :selected-hideable-outline-name-paths evaluation-context)) (run [scene-visibility] (hide-outline-name-paths! scene-visibility (g/node-value scene-visibility :selected-hideable-outline-name-paths)))) (handler/defhandler :show-selected :workbench (active? [scene-visibility evaluation-context] (g/node-value scene-visibility :active-scene-resource-node evaluation-context)) (enabled? [scene-visibility evaluation-context] (g/node-value scene-visibility :selected-showable-outline-name-paths evaluation-context)) (run [scene-visibility] (show-outline-name-paths! scene-visibility (g/node-value scene-visibility :selected-showable-outline-name-paths)))) (handler/defhandler :show-last-hidden :workbench (active? [scene-visibility evaluation-context] (g/node-value scene-visibility :active-scene-resource-node evaluation-context)) (enabled? [scene-visibility evaluation-context] (g/node-value scene-visibility :last-hidden-outline-name-paths evaluation-context)) (run [scene-visibility] (show-outline-name-paths! scene-visibility (g/node-value scene-visibility :last-hidden-outline-name-paths)))) (handler/defhandler :show-all-hidden :workbench (active? [scene-visibility evaluation-context] (g/node-value scene-visibility :active-scene-resource-node evaluation-context)) (enabled? [scene-visibility evaluation-context] (g/node-value scene-visibility :hidden-outline-name-paths evaluation-context)) (run [scene-visibility] (show-outline-name-paths! scene-visibility (g/node-value scene-visibility :hidden-outline-name-paths)))) ;; ----------------------------------------------------------------------------- ;; Visibility Filters ;; ----------------------------------------------------------------------------- (defn- make-toggle [{:keys [label acc on-change]}] (let [check-box (CheckBox.) label (Label. label) acc (Label. acc)] (ui/on-action! check-box (fn [_] (on-change (ui/value check-box)))) (ui/remove-style! check-box "check-box") (ui/add-style! check-box "slide-switch") (HBox/setHgrow label Priority/ALWAYS) (ui/add-style! label "slide-switch-label") (when (util/is-mac-os?) (.setStyle acc "-fx-font-family: 'Lucida Grande';")) (ui/add-style! acc "accelerator-label") (let [hbox (doto (HBox.) (.setAlignment Pos/CENTER_LEFT) (ui/on-click! (fn [_] (ui/value! check-box (not (ui/value check-box))) (on-change (ui/value check-box)))) (ui/children! [check-box label acc])) update (fn [checked enabled] (ui/enable! hbox enabled) (ui/value! check-box checked))] [hbox update]))) (defn set-tag-visibility! [scene-visibility tag visible] (g/update-property! scene-visibility :filtered-renderable-tags (if visible disj conj) tag)) (defn toggle-tag-visibility! [scene-visibility tag] (g/update-property! scene-visibility :filtered-renderable-tags (fn [tags] (if (contains? tags tag) (disj tags tag) (conj tags tag))))) (defn set-filters-enabled! [scene-visibility enabled] (g/set-property! scene-visibility :visibility-filters-enabled? enabled)) (defn toggle-filters-enabled! [scene-visibility] (g/update-property! scene-visibility :visibility-filters-enabled? not)) (defn- make-visibility-toggles-list ^Region [scene-visibility] (let [make-control (fn [{:keys [label tag]}] (if (= :separator label) [(Separator.) nil] (let [[control update-fn] (make-toggle {:label label :acc "" :on-change (fn [checked] (set-tag-visibility! scene-visibility tag checked))}) update-from-hidden-tags (fn [hidden-tags enabled] (let [checked (not (contains? hidden-tags tag))] (update-fn checked enabled)))] [control update-from-hidden-tags]))) tag-toggles (mapv make-control renderable-tag-toggles-info) tag-toggle-update-fns (into [] (keep second) tag-toggles) update-tag-toggles (fn [hidden-tags enabled] (doseq [update-fn tag-toggle-update-fns] (update-fn hidden-tags enabled))) [filters-enabled-control filters-enabled-update-fn] (make-toggle {:label "Visibility Filters" :acc (keymap/key-combo->display-text "Shift+Shortcut+I") :on-change (fn [checked] (set-filters-enabled! scene-visibility checked))}) container (doto (StackPane.) (.setMinWidth 230) (ui/children! [(doto (Region.) ;; Move drop shadow down a bit so it does not interfere with toolbar clicks. (StackPane/setMargin (Insets. 16.0 0.0 0.0 0.0)) (ui/add-style! "visibility-toggles-shadow")) (doto (VBox.) (ui/add-style! "visibility-toggles-list") (ui/children! (into [filters-enabled-control] (map first) tag-toggles)))])) update-fn (fn [] (let [filtered-tags (g/node-value scene-visibility :filtered-renderable-tags) visibility-filters-enabled? (g/node-value scene-visibility :visibility-filters-enabled?)] (filters-enabled-update-fn visibility-filters-enabled? true) (update-tag-toggles filtered-tags visibility-filters-enabled?)))] (ui/add-style! filters-enabled-control "first-entry") [container update-fn])) (defn- pref-popup-position ^Point2D [^Parent container width y-gap] (let [container-screen-bounds (.localToScreen container (.getBoundsInLocal container))] (Point2D. (- (.getMaxX container-screen-bounds) width 10) (+ (.getMaxY container-screen-bounds) y-gap)))) (defn show-visibility-settings! [^Parent owner scene-visibility] (if-let [popup ^PopupControl (ui/user-data owner ::popup)] (.hide popup) (let [[^Region toggles update-fn] (make-visibility-toggles-list scene-visibility) popup (popup/make-popup owner toggles) anchor (pref-popup-position owner (.getMinWidth toggles) -5) refresh-timer (ui/->timer 13 "refresh-tag-filters" (fn [_ _] (update-fn)))] (update-fn) (ui/user-data! owner ::popup popup) (ui/on-closed! popup (fn [_] (ui/user-data! owner ::popup nil))) (ui/timer-stop-on-closed! popup refresh-timer) (ui/timer-start! refresh-timer) (.show popup owner (.getX anchor) (.getY anchor))))) (defn settings-visible? [^Parent owner] (some? (ui/user-data owner ::popup))) (handler/defhandler :toggle-visibility-filters :workbench (active? [scene-visibility evaluation-context] (g/node-value scene-visibility :active-scene-resource-node evaluation-context)) (run [scene-visibility] (toggle-filters-enabled! scene-visibility))) (handler/defhandler :toggle-component-guides :workbench (active? [scene-visibility evaluation-context] (g/node-value scene-visibility :active-scene-resource-node evaluation-context)) (run [scene-visibility] (toggle-tag-visibility! scene-visibility :outline))) (handler/defhandler :toggle-grid :workbench (active? [scene-visibility evaluation-context] (g/node-value scene-visibility :active-scene-resource-node evaluation-context)) (run [scene-visibility] (toggle-tag-visibility! scene-visibility :grid)))
true
;; Copyright 2020-2022 The Defold Foundation ;; Copyright 2014-2020 King ;; Copyright 2009-2014 PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI ;; Licensed under the Defold License version 1.0 (the "License"); you may not use ;; this file except in compliance with the License. ;; ;; You may obtain a copy of the License, together with FAQs at ;; https://www.defold.com/license ;; ;; Unless required by applicable law or agreed to in writing, software distributed ;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR ;; CONDITIONS OF ANY KIND, either express or implied. See the License for the ;; specific language governing permissions and limitations under the License. (ns editor.scene-visibility (:require [clojure.set :as set] [dynamo.graph :as g] [editor.handler :as handler] [editor.keymap :as keymap] [editor.system :as system] [editor.types :as types] [editor.ui :as ui] [editor.ui.popup :as popup] [editor.util :as util] [internal.util :as iutil] [schema.core :as s]) (:import [javafx.geometry Insets Point2D Pos] [javafx.scene Parent] [javafx.scene.control CheckBox Label PopupControl Separator] [javafx.scene.layout HBox Priority Region StackPane VBox])) (set! *warn-on-reflection* true) (def ^:private renderable-tag-toggles-info (cond-> [{:label "Collision Shapes" :tag :collision-shape} #_{:label "GUI Elements" :tag :gui} ; This tag exists, but we decided to hide it and put in granular control instead. Add back if we make the toggles hierarchical? {:label "GUI Bounds" :tag :gui-bounds} {:label "GUI Shapes" :tag :gui-shape} {:label "GUI Particle Effects" :tag :gui-particlefx} {:label "GUI Spine Scenes" :tag :gui-spine} {:label "GUI Text" :tag :gui-text} {:label "Models" :tag :model} {:label "Particle Effects" :tag :particlefx} {:label "Skeletons" :tag :skeleton} {:label "Spine Scenes" :tag :spine} {:label "Sprites" :tag :sprite} {:label "Text" :tag :text} {:label "Tile Maps" :tag :tilemap}] (system/defold-dev?) (into [{:label :separator} {:label "Scene Visibility Bounds" :tag :dev-visibility-bounds :appear-filtered false}]))) (def ^:private appear-filtered-renderable-tags (into #{} (keep (fn [{:keys [appear-filtered tag] :or {appear-filtered true}}] (when appear-filtered tag))) renderable-tag-toggles-info)) (defn filters-appear-active? "Returns true if some parts of the scene are hidden due to visibility filters. Does not consider scene elements that you'd not typically expect to be there, such as debug rendering of bounding volumes, etc." ([scene-visibility] (g/with-auto-evaluation-context evaluation-context (filters-appear-active? scene-visibility evaluation-context))) ([scene-visibility evaluation-context] (boolean (and (g/node-value scene-visibility :visibility-filters-enabled? evaluation-context) (some appear-filtered-renderable-tags (g/node-value scene-visibility :filtered-renderable-tags evaluation-context)))))) ;; ----------------------------------------------------------------------------- ;; SceneVisibilityNode ;; ----------------------------------------------------------------------------- ;; ;; A SceneHideHistoryNode manages visibility for a particular scene-resource-id. ;; Objects are identified with outline name paths, which are vectors of string ;; ids from the outline. We use the term "name" here to avoid confusing these ;; with node ids. These are not necessarily names or even strings, but currently ;; the schema enforces strings. ;; ;; The individual tokens are named "node-outline-key" elsewhere, but we use the ;; term "outline name path" in this file to distinguish these from ;; node-outline-key-paths, which include the resource node id at the beginning. (defn outline-name-path? [value] (and (vector? value) (every? string? value))) (def ^:private TOutlineNamePaths #{(s/pred outline-name-path?)}) (def ^:private THideHistory [(s/both TOutlineNamePaths (s/pred seq))]) (g/deftype HideHistory THideHistory) (g/deftype OutlineNamePaths TOutlineNamePaths) (g/deftype OutlineNamePathsByBool {s/Bool TOutlineNamePaths}) (g/deftype OutlineNamePathsByNodeID {s/Int (s/both TOutlineNamePaths (s/pred seq))}) (g/deftype SceneHideHistoryData [(s/one s/Int "scene-resource-node") (s/one THideHistory "hide-history")]) (defn- scene-outline-name-paths ([scene] (scene-outline-name-paths [] scene)) ([outline-name-path {:keys [node-id children] :as _scene}] (mapcat (fn [{child-node-id :node-id child-node-outline-key :node-outline-key :as child-scene}] (when (some? child-node-outline-key) (if (= node-id child-node-id) (scene-outline-name-paths outline-name-path child-scene) (let [child-outline-name-path (conj outline-name-path child-node-outline-key)] (cons child-outline-name-path (scene-outline-name-paths child-outline-name-path child-scene)))))) children))) (def ^:private outline-selection-entry->outline-name-path (comp not-empty vec next :node-outline-key-path)) (g/defnode SceneVisibilityNode (property visibility-filters-enabled? g/Bool (default true)) (property filtered-renderable-tags types/RenderableTags (default #{:dev-visibility-bounds})) (input active-resource-node g/NodeID) (input active-scene g/Any :substitute nil) (input outline-selection g/Any :substitute nil) (input scene-hide-history-datas SceneHideHistoryData :array) (output active-scene-resource-node g/NodeID (g/fnk [_basis active-resource-node] (when (some? active-resource-node) (when (g/has-output? (g/node-type* _basis active-resource-node) :scene) active-resource-node)))) (output hidden-outline-name-paths-by-scene-resource-node OutlineNamePathsByNodeID :cached (g/fnk [scene-hide-history-datas] (into {} (keep (fn [[scene-resource-node hide-history]] (when-some [hidden-outline-name-paths (not-empty (apply set/union hide-history))] [scene-resource-node hidden-outline-name-paths]))) scene-hide-history-datas))) (output hidden-renderable-tags types/RenderableTags :cached (g/fnk [filtered-renderable-tags visibility-filters-enabled?] (if visibility-filters-enabled? filtered-renderable-tags (set/intersection filtered-renderable-tags #{:grid :outline})))) (output hidden-node-outline-key-paths types/NodeOutlineKeyPaths :cached (g/fnk [hidden-outline-name-paths-by-scene-resource-node] (into #{} (mapcat (fn [[scene-resource-node hidden-outline-name-paths]] (map (partial into [scene-resource-node]) hidden-outline-name-paths))) hidden-outline-name-paths-by-scene-resource-node))) (output hidden-outline-name-paths OutlineNamePaths (g/fnk [active-scene-resource-node hidden-outline-name-paths-by-scene-resource-node] (hidden-outline-name-paths-by-scene-resource-node active-scene-resource-node))) (output outline-name-paths-by-selection-state OutlineNamePathsByBool :cached (g/fnk [active-scene outline-selection] (let [selected-outline-name-paths (into [] (keep outline-selection-entry->outline-name-path) outline-selection) outline-name-path-below-selection? (fn [outline-name-path] (boolean (some #(iutil/seq-starts-with? outline-name-path %) selected-outline-name-paths)))] (iutil/group-into {} #{} outline-name-path-below-selection? (scene-outline-name-paths active-scene))))) (output selected-outline-name-paths OutlineNamePaths (g/fnk [outline-name-paths-by-selection-state] (outline-name-paths-by-selection-state true))) (output unselected-outline-name-paths OutlineNamePaths (g/fnk [outline-name-paths-by-selection-state] (outline-name-paths-by-selection-state false))) (output unselected-hideable-outline-name-paths OutlineNamePaths :cached (g/fnk [hidden-outline-name-paths unselected-outline-name-paths] (not-empty (set/difference unselected-outline-name-paths hidden-outline-name-paths)))) (output selected-hideable-outline-name-paths OutlineNamePaths :cached (g/fnk [hidden-outline-name-paths selected-outline-name-paths] (not-empty (set/difference selected-outline-name-paths hidden-outline-name-paths)))) (output selected-showable-outline-name-paths OutlineNamePaths :cached (g/fnk [hidden-outline-name-paths selected-outline-name-paths] (not-empty (set/intersection selected-outline-name-paths hidden-outline-name-paths)))) (output last-hidden-outline-name-paths OutlineNamePaths :cached (g/fnk [active-scene-resource-node scene-hide-history-datas] (peek (some (fn [[scene-resource-node hide-history]] (when (= active-scene-resource-node scene-resource-node) hide-history)) scene-hide-history-datas))))) (defn make-scene-visibility-node! [view-graph] (g/make-node! view-graph SceneVisibilityNode)) ;; ----------------------------------------------------------------------------- ;; Per-Object Visibility ;; ----------------------------------------------------------------------------- (g/defnode SceneHideHistoryNode (property hide-history HideHistory) (input scene-resource-node g/NodeID) (output scene-hide-history-data SceneHideHistoryData (g/fnk [hide-history scene-resource-node] [scene-resource-node hide-history]))) (defn- find-scene-hide-history-node [scene-visibility scene-resource-node] (some (fn [[scene-hide-history-node]] (when (some-> (g/node-feeding-into scene-hide-history-node :scene-resource-node) (= scene-resource-node)) scene-hide-history-node)) (g/sources-of scene-visibility :scene-hide-history-datas))) (defn- show-outline-name-paths! [scene-visibility outline-name-paths] (assert (set? (not-empty outline-name-paths))) (assert (every? outline-name-path? outline-name-paths)) (let [scene-resource-node (g/node-value scene-visibility :active-scene-resource-node) scene-hide-history-node (find-scene-hide-history-node scene-visibility scene-resource-node)] ;; Remove the now-visible nodes from the hide history. This ensures the Show ;; Last Hidden Objects command works as expected if the user manually shows ;; nodes she has previously hidden. (g/update-property! scene-hide-history-node :hide-history (fn [hide-history] (into [] (keep (fn [hidden-outline-name-paths] (not-empty (set/difference hidden-outline-name-paths outline-name-paths)))) hide-history))) ;; Remove the SceneHideHistoryNode if its history is now empty. (when (empty? (g/node-value scene-hide-history-node :hide-history)) (g/delete-node! scene-hide-history-node)))) (defn- hide-outline-name-paths! [scene-visibility outline-name-paths] (assert (set? (not-empty outline-name-paths))) (assert (every? outline-name-path? outline-name-paths)) (let [scene-resource-node (g/node-value scene-visibility :active-scene-resource-node) scene-hide-history-node (find-scene-hide-history-node scene-visibility scene-resource-node)] (if (some? scene-hide-history-node) (g/update-property! scene-hide-history-node :hide-history conj outline-name-paths) (g/transact (g/make-nodes (g/node-id->graph-id scene-visibility) [scene-hide-history-node [SceneHideHistoryNode :hide-history [outline-name-paths]]] (g/connect scene-resource-node :_node-id scene-hide-history-node :scene-resource-node) (g/connect scene-hide-history-node :scene-hide-history-data scene-visibility :scene-hide-history-datas)))))) (handler/defhandler :hide-unselected :workbench (active? [scene-visibility evaluation-context] (g/node-value scene-visibility :active-scene-resource-node evaluation-context)) (enabled? [scene-visibility evaluation-context] (g/node-value scene-visibility :unselected-hideable-outline-name-paths evaluation-context)) (run [scene-visibility] (hide-outline-name-paths! scene-visibility (g/node-value scene-visibility :unselected-hideable-outline-name-paths)))) (handler/defhandler :hide-selected :workbench (active? [scene-visibility evaluation-context] (g/node-value scene-visibility :active-scene-resource-node evaluation-context)) (enabled? [scene-visibility evaluation-context] (g/node-value scene-visibility :selected-hideable-outline-name-paths evaluation-context)) (run [scene-visibility] (hide-outline-name-paths! scene-visibility (g/node-value scene-visibility :selected-hideable-outline-name-paths)))) (handler/defhandler :show-selected :workbench (active? [scene-visibility evaluation-context] (g/node-value scene-visibility :active-scene-resource-node evaluation-context)) (enabled? [scene-visibility evaluation-context] (g/node-value scene-visibility :selected-showable-outline-name-paths evaluation-context)) (run [scene-visibility] (show-outline-name-paths! scene-visibility (g/node-value scene-visibility :selected-showable-outline-name-paths)))) (handler/defhandler :show-last-hidden :workbench (active? [scene-visibility evaluation-context] (g/node-value scene-visibility :active-scene-resource-node evaluation-context)) (enabled? [scene-visibility evaluation-context] (g/node-value scene-visibility :last-hidden-outline-name-paths evaluation-context)) (run [scene-visibility] (show-outline-name-paths! scene-visibility (g/node-value scene-visibility :last-hidden-outline-name-paths)))) (handler/defhandler :show-all-hidden :workbench (active? [scene-visibility evaluation-context] (g/node-value scene-visibility :active-scene-resource-node evaluation-context)) (enabled? [scene-visibility evaluation-context] (g/node-value scene-visibility :hidden-outline-name-paths evaluation-context)) (run [scene-visibility] (show-outline-name-paths! scene-visibility (g/node-value scene-visibility :hidden-outline-name-paths)))) ;; ----------------------------------------------------------------------------- ;; Visibility Filters ;; ----------------------------------------------------------------------------- (defn- make-toggle [{:keys [label acc on-change]}] (let [check-box (CheckBox.) label (Label. label) acc (Label. acc)] (ui/on-action! check-box (fn [_] (on-change (ui/value check-box)))) (ui/remove-style! check-box "check-box") (ui/add-style! check-box "slide-switch") (HBox/setHgrow label Priority/ALWAYS) (ui/add-style! label "slide-switch-label") (when (util/is-mac-os?) (.setStyle acc "-fx-font-family: 'Lucida Grande';")) (ui/add-style! acc "accelerator-label") (let [hbox (doto (HBox.) (.setAlignment Pos/CENTER_LEFT) (ui/on-click! (fn [_] (ui/value! check-box (not (ui/value check-box))) (on-change (ui/value check-box)))) (ui/children! [check-box label acc])) update (fn [checked enabled] (ui/enable! hbox enabled) (ui/value! check-box checked))] [hbox update]))) (defn set-tag-visibility! [scene-visibility tag visible] (g/update-property! scene-visibility :filtered-renderable-tags (if visible disj conj) tag)) (defn toggle-tag-visibility! [scene-visibility tag] (g/update-property! scene-visibility :filtered-renderable-tags (fn [tags] (if (contains? tags tag) (disj tags tag) (conj tags tag))))) (defn set-filters-enabled! [scene-visibility enabled] (g/set-property! scene-visibility :visibility-filters-enabled? enabled)) (defn toggle-filters-enabled! [scene-visibility] (g/update-property! scene-visibility :visibility-filters-enabled? not)) (defn- make-visibility-toggles-list ^Region [scene-visibility] (let [make-control (fn [{:keys [label tag]}] (if (= :separator label) [(Separator.) nil] (let [[control update-fn] (make-toggle {:label label :acc "" :on-change (fn [checked] (set-tag-visibility! scene-visibility tag checked))}) update-from-hidden-tags (fn [hidden-tags enabled] (let [checked (not (contains? hidden-tags tag))] (update-fn checked enabled)))] [control update-from-hidden-tags]))) tag-toggles (mapv make-control renderable-tag-toggles-info) tag-toggle-update-fns (into [] (keep second) tag-toggles) update-tag-toggles (fn [hidden-tags enabled] (doseq [update-fn tag-toggle-update-fns] (update-fn hidden-tags enabled))) [filters-enabled-control filters-enabled-update-fn] (make-toggle {:label "Visibility Filters" :acc (keymap/key-combo->display-text "Shift+Shortcut+I") :on-change (fn [checked] (set-filters-enabled! scene-visibility checked))}) container (doto (StackPane.) (.setMinWidth 230) (ui/children! [(doto (Region.) ;; Move drop shadow down a bit so it does not interfere with toolbar clicks. (StackPane/setMargin (Insets. 16.0 0.0 0.0 0.0)) (ui/add-style! "visibility-toggles-shadow")) (doto (VBox.) (ui/add-style! "visibility-toggles-list") (ui/children! (into [filters-enabled-control] (map first) tag-toggles)))])) update-fn (fn [] (let [filtered-tags (g/node-value scene-visibility :filtered-renderable-tags) visibility-filters-enabled? (g/node-value scene-visibility :visibility-filters-enabled?)] (filters-enabled-update-fn visibility-filters-enabled? true) (update-tag-toggles filtered-tags visibility-filters-enabled?)))] (ui/add-style! filters-enabled-control "first-entry") [container update-fn])) (defn- pref-popup-position ^Point2D [^Parent container width y-gap] (let [container-screen-bounds (.localToScreen container (.getBoundsInLocal container))] (Point2D. (- (.getMaxX container-screen-bounds) width 10) (+ (.getMaxY container-screen-bounds) y-gap)))) (defn show-visibility-settings! [^Parent owner scene-visibility] (if-let [popup ^PopupControl (ui/user-data owner ::popup)] (.hide popup) (let [[^Region toggles update-fn] (make-visibility-toggles-list scene-visibility) popup (popup/make-popup owner toggles) anchor (pref-popup-position owner (.getMinWidth toggles) -5) refresh-timer (ui/->timer 13 "refresh-tag-filters" (fn [_ _] (update-fn)))] (update-fn) (ui/user-data! owner ::popup popup) (ui/on-closed! popup (fn [_] (ui/user-data! owner ::popup nil))) (ui/timer-stop-on-closed! popup refresh-timer) (ui/timer-start! refresh-timer) (.show popup owner (.getX anchor) (.getY anchor))))) (defn settings-visible? [^Parent owner] (some? (ui/user-data owner ::popup))) (handler/defhandler :toggle-visibility-filters :workbench (active? [scene-visibility evaluation-context] (g/node-value scene-visibility :active-scene-resource-node evaluation-context)) (run [scene-visibility] (toggle-filters-enabled! scene-visibility))) (handler/defhandler :toggle-component-guides :workbench (active? [scene-visibility evaluation-context] (g/node-value scene-visibility :active-scene-resource-node evaluation-context)) (run [scene-visibility] (toggle-tag-visibility! scene-visibility :outline))) (handler/defhandler :toggle-grid :workbench (active? [scene-visibility evaluation-context] (g/node-value scene-visibility :active-scene-resource-node evaluation-context)) (run [scene-visibility] (toggle-tag-visibility! scene-visibility :grid)))
[ { "context": " sim-rng)\n\n ;; create a stochastic seq for bob\n bob-arma (arma-seq\n (m", "end": 14955, "score": 0.6911351084709167, "start": 14952, "tag": "NAME", "value": "bob" }, { "context": " {:seed bob-arma-seed}))\n\n ;; Bob's activity probability\n bob-prob (op-seq ", "end": 15088, "score": 0.8113678097724915, "start": 15085, "tag": "NAME", "value": "Bob" }, { "context": " mask])\n ;; to keep it deterministic, give bob another seeded RNG to take with him.\n ^Ra", "end": 15342, "score": 0.7586930990219116, "start": 15339, "tag": "NAME", "value": "bob" }, { "context": " ;; Compose the time (in minute increments), bob's probability\n ;; and his RNG and you hav", "end": 15492, "score": 0.7700924873352051, "start": 15489, "tag": "NAME", "value": "bob" } ]
src/main/com/yetanalytics/datasim/timeseries.clj
yetanalytics/datasim
12
(ns com.yetanalytics.datasim.timeseries (:require [clojure.spec.alpha :as s] [clojure.spec.gen.alpha :as sgen] ;; [incanter.interpolation :as interp] [com.yetanalytics.datasim.clock :as clock] [java-time :as t] [com.yetanalytics.datasim.util.maths :as maths]) (:import [java.util Random])) ;; Primitive seqs, just lazy seqs of numerics ;; ARMA, stochasic pseudorandom (s/def ::safe-double (s/double-in :infinite? false :NaN? false)) (s/def ::phi (s/coll-of ::safe-double :into [])) (s/def ::std ::safe-double) (s/def ::c ::safe-double) (s/def ::seed int?) (s/def ::ar (s/keys :req-un [::phi ::std ::c ::seed])) (s/def ::theta (s/coll-of ::safe-double :into [])) (s/def ::ma (s/keys :req-un [::theta ::std ::c ::seed])) (s/def ::arma (s/merge ::ar ::ma)) (s/def ::rng (s/with-gen #(instance? Random %) (fn [] (sgen/return (Random.))))) (s/def ::value ::safe-double) (s/def ::epsilon ::safe-double) (s/fdef arma-seq :args (s/cat :arma-model ::arma :recur-args (s/? (s/cat :prev-value ::value :prev-epsilon ::epsilon :rng ::rng))) :ret (s/every ::safe-double)) (defn arma-seq-const "Find the value of a stochastic sequence after n runs with the given model." [{:keys [std phi theta c seed rng value epsilon] :as arma-model :or {phi [] theta [] value 0.0 epsilon 0.0}} n] (let [^Random rng (or rng (and seed (Random. seed)) (Random.))] (loop [^Double v value ^Double e epsilon n' 0] (let [new-epsilon (* (.nextGaussian rng) std) sum-phi (reduce (fn [old nxt] (+ old (* nxt v))) 0.0 phi) sum-theta (reduce (fn [old nxt] (+ old (* nxt e))) 0.0 theta) ret (+ c new-epsilon sum-phi sum-theta)] (if (= n' (inc n)) v (recur ret new-epsilon (inc n'))))))) (defn arma-seq "Given arma params, return an infinite lazy seq of values" ([{:keys [seed] :as arma-model}] (lazy-seq (with-meta (arma-seq arma-model 0.0 0.0 (Random. seed)) {::seed seed ::arma arma-model}))) ([{:keys [std phi theta c] :as arma-model :or {phi [] theta []}} ^Double prev-value ^Double prev-epsilon ^Random rng] (lazy-seq (let [new-epsilon (* (.nextGaussian rng) std) sum-phi (reduce (fn [old nxt] (+ old (* nxt prev-value))) 0.0 phi) sum-theta (reduce (fn [old nxt] (+ old (* nxt prev-epsilon))) 0.0 theta) ret (+ c new-epsilon sum-phi sum-theta)] (cons ret (arma-seq arma-model ret new-epsilon rng)))))) #_(let [model {:phi [0.5 0.2] :theta [] :std 0.25 :c 0.0 :seed 42}] (= (arma-seq-const model 1000) (nth (arma-seq model) 1000))) (defn constant-seq "Return an infinite sequence of the given constant value" [constant] (repeat constant)) (defn rand-seq [& {:keys [seed rng val-type gauss-mean gauss-sd] :or {val-type :long gauss-mean 0.0 gauss-sd 1.0}}] (lazy-seq (let [^Random rng (or rng (and seed (Random. seed)) (Random.))] (cons (case val-type :long (.nextLong rng) :gauss (+ (* gauss-sd (.nextGaussian rng)) gauss-mean) :double (.nextDouble rng)) (rand-seq :rng rng :val-type val-type :gauss-mean gauss-mean :gauss-sd gauss-sd))))) #_(take 10 (rand-seq :val-type :gauss :seed 42)) ;; => (1.1419053154730547 0.9194079489827879 -0.9498666368908959 -1.1069902863993377 0.2809776380727795 0.6846227956326554 -0.8172214073987268 -1.3966434026780434 -0.19094451307087512 1.4862133923906502) #_(take 10 (rand-seq :val-type :gauss :gauss-sd 100 :gauss-mean 500 :seed 42 )) ;; => (614.1905315473055 591.9407948982788 405.0133363109104 389.3009713600662 528.0977638072779 568.4622795632655 418.27785926012734 360.33565973219567 480.9055486929125 648.621339239065) #_(defn interpolate-seq "Given a series of point tuples where x is time and y is a known value, return an interpolated sequence of y every step" [& {:keys [;; init args points interpolation-type ;; Recur args step x interpolator interpolate-opts] :or {step 1 x 0 interpolation-type :cubic-hermite interpolate-opts []}}] (lazy-seq (let [interpolator (or interpolator (apply interp/interpolate points interpolation-type interpolate-opts))] (cons (interpolator x) (interpolate-seq :step step :x (+ x step) :interpolator interpolator))))) #_(take 10 (interpolate-seq :points [[0 0] [4 6] [8 3]])) ;;=> (0.0 1.7109375 3.5625 5.1328125 6.0 5.8828125 5.0625 3.9609375 3.0 2.6015625) #_(defn continuize-seq "Return a lazy seq of interpolation functions for the input seq." [xs & {:keys [;; init args points interpolation-type ;; Recur args interpolate-opts] :or {step 1 x 0 interpolation-type :cubic-hermite interpolate-opts []}}] (map-indexed (fn [x [y z a b c]] (delay (apply interp/interpolate [[(- x 2) y] [(- x 1) z] [x a] [(+ x 1) b] [(+ x 2) c]] interpolation-type interpolate-opts))) (partition 5 1 (concat (repeat 2 0.0) xs))) ) (defn cycle-seq "Given a sequence, length and offset, return a seq that cycles forever over a portion of the seq." [xs & {:keys [length offset] :or {offset 0 length 0}}] (->> xs (drop offset) (take length) cycle)) #_(take 10 (cycle-seq (range 10) :length 3)) ;; => (0 1 2 0 1 2 0 1 2 0) (defn smooth-seq [xs & {:keys [n] :or {n 2}}] (map (fn [xs'] (double (/ (reduce + xs') (count xs')))) (partition n 1 xs))) (defn interval-seq "Return a seq representing the intervals of the input seq" [xs] (map (fn [[a b]] (- b a)) (partition 2 1 xs))) #_(interval-seq (range 10)) ;; => (1 1 1 1 1 1 1 1 1) #_(interval-seq [1 5 7 9]) ;; => (4 2 2) ;; Primitive seq ops that yield other seqs (defn op-seq "Perform actions on one or more seqs" [op seqs] (assert (<= 1 (count seqs)) "At least one seq is required") (apply map op seqs)) (defn sum-seq "Add together the values of any number of seqs" [& seqs] (op-seq + seqs)) (defn invert-seq "flip vals in a seq from positive to negative or visa versa" [xs] (map - xs)) (defn scale-seq "Given a seq and a scale, change the number of events to fit the scale" [xs scale] (assert (and (int? scale) (<= 1 scale))) (mapcat (partial repeat scale) xs)) ;; Complex (composite) seqs (defn overlap-seq "NOT USED, but instructive... Given two seqs a and b, for each period where a > b return the T (index) and length of the overlap." [a b & {:keys [comp-fn extra-stats] :or {comp-fn > extra-stats false}}] (keep (fn [[[t a' b'] & _ :as chunk]] (when (comp-fn a' b') (merge {:t t :length (count chunk)} (when extra-stats (let [a-seq (map #(get % 1) chunk) [a-min a-max] (apply (juxt min max) a-seq) b-seq (map #(get % 2) chunk) [b-min b-max] (apply (juxt min max) b-seq)] {:a-seq a-seq :a-edges ((juxt first last) a-seq) :a-min a-min :a-max a-max :b-seq b-seq :b-edges ((juxt first last) b-seq) :b-min b-min :b-max b-max :min (min a-min b-min) :max (max a-max b-max)}))))) (partition-by (fn [[_ a' b']] (comp-fn a' b')) (map vector (range) a b)))) #_(overlap-seq [1 2 3 4 5 6 5 4 3 2 1] [6 5 4 3 2 1 2 3 4 5 6]) ;; => ({:t 3, :length 5}) (defn take-sample "Take a sample of sample-millis from a time series. :from denotes the period of xs" [xs sample-millis & {:keys [from] :or {from :millis}}] (take (quot sample-millis (case from :millis 1 :seconds 1000 :minutes 60000 :hours 3600000 :days 86400000)) xs)) (defn- local-seq-as [xs zone as] (map (fn [stamp] (t/as (t/local-date-time stamp zone) as)) xs)) (defn time-seqs "Given a t-zero (simulation start), an upper bound of sample-n milliseconds and an optional local timezone, return a map of useful lazy time sequences." [& {:keys [t-zero sample-n ^java.time.ZoneRegion zone] :or {t-zero 0 zone ^java.time.ZoneRegion (t/zone-id "UTC")}}] (let [t-seq (if sample-n (range t-zero sample-n) (range)) r-partial (if sample-n (fn [step] (take (quot sample-n step) (range t-zero Long/MAX_VALUE step))) (partial range t-zero Long/MAX_VALUE)) ;; Primary ;; sec-seq (r-partial 1000) min-seq (r-partial 60000) ;; hour-seq (r-partial 3600000) ;; week-seq (r-partial 604800000) ;; day-seq (r-partial 86400000) ;; secondary/local ;; moh-seq (local-seq-as min-seq ;; zone ;; :minute-of-hour) mod-seq (local-seq-as min-seq zone :minute-of-day) day-night-seq (map (comp #(Math/cos ^Double %) #(double (* 2 Math/PI (/ % 86400000))) (partial * 60000)) mod-seq) ;; hod-seq (local-seq-as hour-seq ;; zone ;; :hour-of-day) ;; dow-seq (local-seq-as day-seq ;; zone ;; :day-of-week) ;; dom-seq (local-seq-as day-seq ;; zone ;; :day-of-month) ;; doy-seq (local-seq-as day-seq ;; zone ;; :day-of-year) ] {; :t-seq t-seq ; :sec-seq sec-seq :min-seq min-seq ;:hour-seq hour-seq ;:day-seq day-seq ;:week-seq week-seq ;:moh-seq moh-seq :mod-seq mod-seq :day-night-seq day-night-seq ;:hod-seq hod-seq ;:dow-seq dow-seq ;:dom-seq dom-seq ;:doy-seq doy-seq })) (comment (use '(incanter core stats charts io)) (time (let [sim-seed 42 ;; Create a master RNG for the sim. This is used only to generate other seeds ^Random sim-rng (Random. sim-seed) ;; the start of the sim, in ms since epoch t-zero 0;; (System/currentTimeMillis) ;; the amount of time, in MS, this sim covers sample-n (:whole (t/convert-amount 7 :days :millis)) ;; a local timezone timezone (t/zone-id "America/New_York") ;; Build useful time seqs, only get eval'd if used! {:keys [week-seq min-seq t-seq doy-seq moh-seq day-seq sec-seq dom-seq hod-seq hour-seq dow-seq mod-seq day-night-seq]} (time-seqs :t-zero t-zero :sample-n sample-n :zone timezone) ;; in our model, a timeseries for a given actor is measured against ;; a composite timeseries representing abstract challenge/diversity. ;; This is a combination of a stochastic series representing ;; unpredictable factors that apply to the group, higher is harder. ;; this series is max'd with a day night cycle (day is easier, night is ;; harder). Think of this combined series as a mask. ;; When an actor's series is greater than the challenge, the difference ;; between the two is the probability (from 0.0 to 1.0) that an event ;; will happen at that time. ;; random stochastic settings can (and probably should) be shared. common-arma {:phi [0.5 0.2] :theta [] :std 0.25 :c 0.0} ;; Generate a seed for the group group-seed (.nextLong sim-rng) ;; create a stochastic seq for the group group-arma (arma-seq (merge common-arma {:seed group-seed})) ;; Create a periodic seq for the lunch hour break lunch-hour-seq (map (fn [x] (if (<= 720 x 780) 1.0 -1.0)) mod-seq) ;; form a mask for the group + day-night + lunch mask (op-seq max [group-arma day-night-seq lunch-hour-seq]) ;; create a seed for Bob's seq bob-arma-seed (.nextLong sim-rng) ;; create a stochastic seq for bob bob-arma (arma-seq (merge common-arma {:seed bob-arma-seed})) ;; Bob's activity probability bob-prob (op-seq (fn [a b] (double (maths/min-max 0.0 (/ (- a b) 2) 1.0))) [bob-arma mask]) ;; to keep it deterministic, give bob another seeded RNG to take with him. ^Random bob-rng (Random. (.nextLong sim-rng)) ;; Compose the time (in minute increments), bob's probability ;; and his RNG and you have everything you need to generate events for ;; bob. Here the RNG is used to generate a sequence for demonstration, ;; in practice it would get handed off to a thread or some such. bob-seq (map (fn [t prob rand-long] {:t t :prob prob :r rand-long}) min-seq bob-prob (rand-seq :val-type :long :rng bob-rng))] (view (time-series-plot (map :t bob-seq) (map :prob bob-seq))) (view (time-series-plot (map :t bob-seq) (map :r bob-seq))))) )
11720
(ns com.yetanalytics.datasim.timeseries (:require [clojure.spec.alpha :as s] [clojure.spec.gen.alpha :as sgen] ;; [incanter.interpolation :as interp] [com.yetanalytics.datasim.clock :as clock] [java-time :as t] [com.yetanalytics.datasim.util.maths :as maths]) (:import [java.util Random])) ;; Primitive seqs, just lazy seqs of numerics ;; ARMA, stochasic pseudorandom (s/def ::safe-double (s/double-in :infinite? false :NaN? false)) (s/def ::phi (s/coll-of ::safe-double :into [])) (s/def ::std ::safe-double) (s/def ::c ::safe-double) (s/def ::seed int?) (s/def ::ar (s/keys :req-un [::phi ::std ::c ::seed])) (s/def ::theta (s/coll-of ::safe-double :into [])) (s/def ::ma (s/keys :req-un [::theta ::std ::c ::seed])) (s/def ::arma (s/merge ::ar ::ma)) (s/def ::rng (s/with-gen #(instance? Random %) (fn [] (sgen/return (Random.))))) (s/def ::value ::safe-double) (s/def ::epsilon ::safe-double) (s/fdef arma-seq :args (s/cat :arma-model ::arma :recur-args (s/? (s/cat :prev-value ::value :prev-epsilon ::epsilon :rng ::rng))) :ret (s/every ::safe-double)) (defn arma-seq-const "Find the value of a stochastic sequence after n runs with the given model." [{:keys [std phi theta c seed rng value epsilon] :as arma-model :or {phi [] theta [] value 0.0 epsilon 0.0}} n] (let [^Random rng (or rng (and seed (Random. seed)) (Random.))] (loop [^Double v value ^Double e epsilon n' 0] (let [new-epsilon (* (.nextGaussian rng) std) sum-phi (reduce (fn [old nxt] (+ old (* nxt v))) 0.0 phi) sum-theta (reduce (fn [old nxt] (+ old (* nxt e))) 0.0 theta) ret (+ c new-epsilon sum-phi sum-theta)] (if (= n' (inc n)) v (recur ret new-epsilon (inc n'))))))) (defn arma-seq "Given arma params, return an infinite lazy seq of values" ([{:keys [seed] :as arma-model}] (lazy-seq (with-meta (arma-seq arma-model 0.0 0.0 (Random. seed)) {::seed seed ::arma arma-model}))) ([{:keys [std phi theta c] :as arma-model :or {phi [] theta []}} ^Double prev-value ^Double prev-epsilon ^Random rng] (lazy-seq (let [new-epsilon (* (.nextGaussian rng) std) sum-phi (reduce (fn [old nxt] (+ old (* nxt prev-value))) 0.0 phi) sum-theta (reduce (fn [old nxt] (+ old (* nxt prev-epsilon))) 0.0 theta) ret (+ c new-epsilon sum-phi sum-theta)] (cons ret (arma-seq arma-model ret new-epsilon rng)))))) #_(let [model {:phi [0.5 0.2] :theta [] :std 0.25 :c 0.0 :seed 42}] (= (arma-seq-const model 1000) (nth (arma-seq model) 1000))) (defn constant-seq "Return an infinite sequence of the given constant value" [constant] (repeat constant)) (defn rand-seq [& {:keys [seed rng val-type gauss-mean gauss-sd] :or {val-type :long gauss-mean 0.0 gauss-sd 1.0}}] (lazy-seq (let [^Random rng (or rng (and seed (Random. seed)) (Random.))] (cons (case val-type :long (.nextLong rng) :gauss (+ (* gauss-sd (.nextGaussian rng)) gauss-mean) :double (.nextDouble rng)) (rand-seq :rng rng :val-type val-type :gauss-mean gauss-mean :gauss-sd gauss-sd))))) #_(take 10 (rand-seq :val-type :gauss :seed 42)) ;; => (1.1419053154730547 0.9194079489827879 -0.9498666368908959 -1.1069902863993377 0.2809776380727795 0.6846227956326554 -0.8172214073987268 -1.3966434026780434 -0.19094451307087512 1.4862133923906502) #_(take 10 (rand-seq :val-type :gauss :gauss-sd 100 :gauss-mean 500 :seed 42 )) ;; => (614.1905315473055 591.9407948982788 405.0133363109104 389.3009713600662 528.0977638072779 568.4622795632655 418.27785926012734 360.33565973219567 480.9055486929125 648.621339239065) #_(defn interpolate-seq "Given a series of point tuples where x is time and y is a known value, return an interpolated sequence of y every step" [& {:keys [;; init args points interpolation-type ;; Recur args step x interpolator interpolate-opts] :or {step 1 x 0 interpolation-type :cubic-hermite interpolate-opts []}}] (lazy-seq (let [interpolator (or interpolator (apply interp/interpolate points interpolation-type interpolate-opts))] (cons (interpolator x) (interpolate-seq :step step :x (+ x step) :interpolator interpolator))))) #_(take 10 (interpolate-seq :points [[0 0] [4 6] [8 3]])) ;;=> (0.0 1.7109375 3.5625 5.1328125 6.0 5.8828125 5.0625 3.9609375 3.0 2.6015625) #_(defn continuize-seq "Return a lazy seq of interpolation functions for the input seq." [xs & {:keys [;; init args points interpolation-type ;; Recur args interpolate-opts] :or {step 1 x 0 interpolation-type :cubic-hermite interpolate-opts []}}] (map-indexed (fn [x [y z a b c]] (delay (apply interp/interpolate [[(- x 2) y] [(- x 1) z] [x a] [(+ x 1) b] [(+ x 2) c]] interpolation-type interpolate-opts))) (partition 5 1 (concat (repeat 2 0.0) xs))) ) (defn cycle-seq "Given a sequence, length and offset, return a seq that cycles forever over a portion of the seq." [xs & {:keys [length offset] :or {offset 0 length 0}}] (->> xs (drop offset) (take length) cycle)) #_(take 10 (cycle-seq (range 10) :length 3)) ;; => (0 1 2 0 1 2 0 1 2 0) (defn smooth-seq [xs & {:keys [n] :or {n 2}}] (map (fn [xs'] (double (/ (reduce + xs') (count xs')))) (partition n 1 xs))) (defn interval-seq "Return a seq representing the intervals of the input seq" [xs] (map (fn [[a b]] (- b a)) (partition 2 1 xs))) #_(interval-seq (range 10)) ;; => (1 1 1 1 1 1 1 1 1) #_(interval-seq [1 5 7 9]) ;; => (4 2 2) ;; Primitive seq ops that yield other seqs (defn op-seq "Perform actions on one or more seqs" [op seqs] (assert (<= 1 (count seqs)) "At least one seq is required") (apply map op seqs)) (defn sum-seq "Add together the values of any number of seqs" [& seqs] (op-seq + seqs)) (defn invert-seq "flip vals in a seq from positive to negative or visa versa" [xs] (map - xs)) (defn scale-seq "Given a seq and a scale, change the number of events to fit the scale" [xs scale] (assert (and (int? scale) (<= 1 scale))) (mapcat (partial repeat scale) xs)) ;; Complex (composite) seqs (defn overlap-seq "NOT USED, but instructive... Given two seqs a and b, for each period where a > b return the T (index) and length of the overlap." [a b & {:keys [comp-fn extra-stats] :or {comp-fn > extra-stats false}}] (keep (fn [[[t a' b'] & _ :as chunk]] (when (comp-fn a' b') (merge {:t t :length (count chunk)} (when extra-stats (let [a-seq (map #(get % 1) chunk) [a-min a-max] (apply (juxt min max) a-seq) b-seq (map #(get % 2) chunk) [b-min b-max] (apply (juxt min max) b-seq)] {:a-seq a-seq :a-edges ((juxt first last) a-seq) :a-min a-min :a-max a-max :b-seq b-seq :b-edges ((juxt first last) b-seq) :b-min b-min :b-max b-max :min (min a-min b-min) :max (max a-max b-max)}))))) (partition-by (fn [[_ a' b']] (comp-fn a' b')) (map vector (range) a b)))) #_(overlap-seq [1 2 3 4 5 6 5 4 3 2 1] [6 5 4 3 2 1 2 3 4 5 6]) ;; => ({:t 3, :length 5}) (defn take-sample "Take a sample of sample-millis from a time series. :from denotes the period of xs" [xs sample-millis & {:keys [from] :or {from :millis}}] (take (quot sample-millis (case from :millis 1 :seconds 1000 :minutes 60000 :hours 3600000 :days 86400000)) xs)) (defn- local-seq-as [xs zone as] (map (fn [stamp] (t/as (t/local-date-time stamp zone) as)) xs)) (defn time-seqs "Given a t-zero (simulation start), an upper bound of sample-n milliseconds and an optional local timezone, return a map of useful lazy time sequences." [& {:keys [t-zero sample-n ^java.time.ZoneRegion zone] :or {t-zero 0 zone ^java.time.ZoneRegion (t/zone-id "UTC")}}] (let [t-seq (if sample-n (range t-zero sample-n) (range)) r-partial (if sample-n (fn [step] (take (quot sample-n step) (range t-zero Long/MAX_VALUE step))) (partial range t-zero Long/MAX_VALUE)) ;; Primary ;; sec-seq (r-partial 1000) min-seq (r-partial 60000) ;; hour-seq (r-partial 3600000) ;; week-seq (r-partial 604800000) ;; day-seq (r-partial 86400000) ;; secondary/local ;; moh-seq (local-seq-as min-seq ;; zone ;; :minute-of-hour) mod-seq (local-seq-as min-seq zone :minute-of-day) day-night-seq (map (comp #(Math/cos ^Double %) #(double (* 2 Math/PI (/ % 86400000))) (partial * 60000)) mod-seq) ;; hod-seq (local-seq-as hour-seq ;; zone ;; :hour-of-day) ;; dow-seq (local-seq-as day-seq ;; zone ;; :day-of-week) ;; dom-seq (local-seq-as day-seq ;; zone ;; :day-of-month) ;; doy-seq (local-seq-as day-seq ;; zone ;; :day-of-year) ] {; :t-seq t-seq ; :sec-seq sec-seq :min-seq min-seq ;:hour-seq hour-seq ;:day-seq day-seq ;:week-seq week-seq ;:moh-seq moh-seq :mod-seq mod-seq :day-night-seq day-night-seq ;:hod-seq hod-seq ;:dow-seq dow-seq ;:dom-seq dom-seq ;:doy-seq doy-seq })) (comment (use '(incanter core stats charts io)) (time (let [sim-seed 42 ;; Create a master RNG for the sim. This is used only to generate other seeds ^Random sim-rng (Random. sim-seed) ;; the start of the sim, in ms since epoch t-zero 0;; (System/currentTimeMillis) ;; the amount of time, in MS, this sim covers sample-n (:whole (t/convert-amount 7 :days :millis)) ;; a local timezone timezone (t/zone-id "America/New_York") ;; Build useful time seqs, only get eval'd if used! {:keys [week-seq min-seq t-seq doy-seq moh-seq day-seq sec-seq dom-seq hod-seq hour-seq dow-seq mod-seq day-night-seq]} (time-seqs :t-zero t-zero :sample-n sample-n :zone timezone) ;; in our model, a timeseries for a given actor is measured against ;; a composite timeseries representing abstract challenge/diversity. ;; This is a combination of a stochastic series representing ;; unpredictable factors that apply to the group, higher is harder. ;; this series is max'd with a day night cycle (day is easier, night is ;; harder). Think of this combined series as a mask. ;; When an actor's series is greater than the challenge, the difference ;; between the two is the probability (from 0.0 to 1.0) that an event ;; will happen at that time. ;; random stochastic settings can (and probably should) be shared. common-arma {:phi [0.5 0.2] :theta [] :std 0.25 :c 0.0} ;; Generate a seed for the group group-seed (.nextLong sim-rng) ;; create a stochastic seq for the group group-arma (arma-seq (merge common-arma {:seed group-seed})) ;; Create a periodic seq for the lunch hour break lunch-hour-seq (map (fn [x] (if (<= 720 x 780) 1.0 -1.0)) mod-seq) ;; form a mask for the group + day-night + lunch mask (op-seq max [group-arma day-night-seq lunch-hour-seq]) ;; create a seed for Bob's seq bob-arma-seed (.nextLong sim-rng) ;; create a stochastic seq for <NAME> bob-arma (arma-seq (merge common-arma {:seed bob-arma-seed})) ;; <NAME>'s activity probability bob-prob (op-seq (fn [a b] (double (maths/min-max 0.0 (/ (- a b) 2) 1.0))) [bob-arma mask]) ;; to keep it deterministic, give <NAME> another seeded RNG to take with him. ^Random bob-rng (Random. (.nextLong sim-rng)) ;; Compose the time (in minute increments), <NAME>'s probability ;; and his RNG and you have everything you need to generate events for ;; bob. Here the RNG is used to generate a sequence for demonstration, ;; in practice it would get handed off to a thread or some such. bob-seq (map (fn [t prob rand-long] {:t t :prob prob :r rand-long}) min-seq bob-prob (rand-seq :val-type :long :rng bob-rng))] (view (time-series-plot (map :t bob-seq) (map :prob bob-seq))) (view (time-series-plot (map :t bob-seq) (map :r bob-seq))))) )
true
(ns com.yetanalytics.datasim.timeseries (:require [clojure.spec.alpha :as s] [clojure.spec.gen.alpha :as sgen] ;; [incanter.interpolation :as interp] [com.yetanalytics.datasim.clock :as clock] [java-time :as t] [com.yetanalytics.datasim.util.maths :as maths]) (:import [java.util Random])) ;; Primitive seqs, just lazy seqs of numerics ;; ARMA, stochasic pseudorandom (s/def ::safe-double (s/double-in :infinite? false :NaN? false)) (s/def ::phi (s/coll-of ::safe-double :into [])) (s/def ::std ::safe-double) (s/def ::c ::safe-double) (s/def ::seed int?) (s/def ::ar (s/keys :req-un [::phi ::std ::c ::seed])) (s/def ::theta (s/coll-of ::safe-double :into [])) (s/def ::ma (s/keys :req-un [::theta ::std ::c ::seed])) (s/def ::arma (s/merge ::ar ::ma)) (s/def ::rng (s/with-gen #(instance? Random %) (fn [] (sgen/return (Random.))))) (s/def ::value ::safe-double) (s/def ::epsilon ::safe-double) (s/fdef arma-seq :args (s/cat :arma-model ::arma :recur-args (s/? (s/cat :prev-value ::value :prev-epsilon ::epsilon :rng ::rng))) :ret (s/every ::safe-double)) (defn arma-seq-const "Find the value of a stochastic sequence after n runs with the given model." [{:keys [std phi theta c seed rng value epsilon] :as arma-model :or {phi [] theta [] value 0.0 epsilon 0.0}} n] (let [^Random rng (or rng (and seed (Random. seed)) (Random.))] (loop [^Double v value ^Double e epsilon n' 0] (let [new-epsilon (* (.nextGaussian rng) std) sum-phi (reduce (fn [old nxt] (+ old (* nxt v))) 0.0 phi) sum-theta (reduce (fn [old nxt] (+ old (* nxt e))) 0.0 theta) ret (+ c new-epsilon sum-phi sum-theta)] (if (= n' (inc n)) v (recur ret new-epsilon (inc n'))))))) (defn arma-seq "Given arma params, return an infinite lazy seq of values" ([{:keys [seed] :as arma-model}] (lazy-seq (with-meta (arma-seq arma-model 0.0 0.0 (Random. seed)) {::seed seed ::arma arma-model}))) ([{:keys [std phi theta c] :as arma-model :or {phi [] theta []}} ^Double prev-value ^Double prev-epsilon ^Random rng] (lazy-seq (let [new-epsilon (* (.nextGaussian rng) std) sum-phi (reduce (fn [old nxt] (+ old (* nxt prev-value))) 0.0 phi) sum-theta (reduce (fn [old nxt] (+ old (* nxt prev-epsilon))) 0.0 theta) ret (+ c new-epsilon sum-phi sum-theta)] (cons ret (arma-seq arma-model ret new-epsilon rng)))))) #_(let [model {:phi [0.5 0.2] :theta [] :std 0.25 :c 0.0 :seed 42}] (= (arma-seq-const model 1000) (nth (arma-seq model) 1000))) (defn constant-seq "Return an infinite sequence of the given constant value" [constant] (repeat constant)) (defn rand-seq [& {:keys [seed rng val-type gauss-mean gauss-sd] :or {val-type :long gauss-mean 0.0 gauss-sd 1.0}}] (lazy-seq (let [^Random rng (or rng (and seed (Random. seed)) (Random.))] (cons (case val-type :long (.nextLong rng) :gauss (+ (* gauss-sd (.nextGaussian rng)) gauss-mean) :double (.nextDouble rng)) (rand-seq :rng rng :val-type val-type :gauss-mean gauss-mean :gauss-sd gauss-sd))))) #_(take 10 (rand-seq :val-type :gauss :seed 42)) ;; => (1.1419053154730547 0.9194079489827879 -0.9498666368908959 -1.1069902863993377 0.2809776380727795 0.6846227956326554 -0.8172214073987268 -1.3966434026780434 -0.19094451307087512 1.4862133923906502) #_(take 10 (rand-seq :val-type :gauss :gauss-sd 100 :gauss-mean 500 :seed 42 )) ;; => (614.1905315473055 591.9407948982788 405.0133363109104 389.3009713600662 528.0977638072779 568.4622795632655 418.27785926012734 360.33565973219567 480.9055486929125 648.621339239065) #_(defn interpolate-seq "Given a series of point tuples where x is time and y is a known value, return an interpolated sequence of y every step" [& {:keys [;; init args points interpolation-type ;; Recur args step x interpolator interpolate-opts] :or {step 1 x 0 interpolation-type :cubic-hermite interpolate-opts []}}] (lazy-seq (let [interpolator (or interpolator (apply interp/interpolate points interpolation-type interpolate-opts))] (cons (interpolator x) (interpolate-seq :step step :x (+ x step) :interpolator interpolator))))) #_(take 10 (interpolate-seq :points [[0 0] [4 6] [8 3]])) ;;=> (0.0 1.7109375 3.5625 5.1328125 6.0 5.8828125 5.0625 3.9609375 3.0 2.6015625) #_(defn continuize-seq "Return a lazy seq of interpolation functions for the input seq." [xs & {:keys [;; init args points interpolation-type ;; Recur args interpolate-opts] :or {step 1 x 0 interpolation-type :cubic-hermite interpolate-opts []}}] (map-indexed (fn [x [y z a b c]] (delay (apply interp/interpolate [[(- x 2) y] [(- x 1) z] [x a] [(+ x 1) b] [(+ x 2) c]] interpolation-type interpolate-opts))) (partition 5 1 (concat (repeat 2 0.0) xs))) ) (defn cycle-seq "Given a sequence, length and offset, return a seq that cycles forever over a portion of the seq." [xs & {:keys [length offset] :or {offset 0 length 0}}] (->> xs (drop offset) (take length) cycle)) #_(take 10 (cycle-seq (range 10) :length 3)) ;; => (0 1 2 0 1 2 0 1 2 0) (defn smooth-seq [xs & {:keys [n] :or {n 2}}] (map (fn [xs'] (double (/ (reduce + xs') (count xs')))) (partition n 1 xs))) (defn interval-seq "Return a seq representing the intervals of the input seq" [xs] (map (fn [[a b]] (- b a)) (partition 2 1 xs))) #_(interval-seq (range 10)) ;; => (1 1 1 1 1 1 1 1 1) #_(interval-seq [1 5 7 9]) ;; => (4 2 2) ;; Primitive seq ops that yield other seqs (defn op-seq "Perform actions on one or more seqs" [op seqs] (assert (<= 1 (count seqs)) "At least one seq is required") (apply map op seqs)) (defn sum-seq "Add together the values of any number of seqs" [& seqs] (op-seq + seqs)) (defn invert-seq "flip vals in a seq from positive to negative or visa versa" [xs] (map - xs)) (defn scale-seq "Given a seq and a scale, change the number of events to fit the scale" [xs scale] (assert (and (int? scale) (<= 1 scale))) (mapcat (partial repeat scale) xs)) ;; Complex (composite) seqs (defn overlap-seq "NOT USED, but instructive... Given two seqs a and b, for each period where a > b return the T (index) and length of the overlap." [a b & {:keys [comp-fn extra-stats] :or {comp-fn > extra-stats false}}] (keep (fn [[[t a' b'] & _ :as chunk]] (when (comp-fn a' b') (merge {:t t :length (count chunk)} (when extra-stats (let [a-seq (map #(get % 1) chunk) [a-min a-max] (apply (juxt min max) a-seq) b-seq (map #(get % 2) chunk) [b-min b-max] (apply (juxt min max) b-seq)] {:a-seq a-seq :a-edges ((juxt first last) a-seq) :a-min a-min :a-max a-max :b-seq b-seq :b-edges ((juxt first last) b-seq) :b-min b-min :b-max b-max :min (min a-min b-min) :max (max a-max b-max)}))))) (partition-by (fn [[_ a' b']] (comp-fn a' b')) (map vector (range) a b)))) #_(overlap-seq [1 2 3 4 5 6 5 4 3 2 1] [6 5 4 3 2 1 2 3 4 5 6]) ;; => ({:t 3, :length 5}) (defn take-sample "Take a sample of sample-millis from a time series. :from denotes the period of xs" [xs sample-millis & {:keys [from] :or {from :millis}}] (take (quot sample-millis (case from :millis 1 :seconds 1000 :minutes 60000 :hours 3600000 :days 86400000)) xs)) (defn- local-seq-as [xs zone as] (map (fn [stamp] (t/as (t/local-date-time stamp zone) as)) xs)) (defn time-seqs "Given a t-zero (simulation start), an upper bound of sample-n milliseconds and an optional local timezone, return a map of useful lazy time sequences." [& {:keys [t-zero sample-n ^java.time.ZoneRegion zone] :or {t-zero 0 zone ^java.time.ZoneRegion (t/zone-id "UTC")}}] (let [t-seq (if sample-n (range t-zero sample-n) (range)) r-partial (if sample-n (fn [step] (take (quot sample-n step) (range t-zero Long/MAX_VALUE step))) (partial range t-zero Long/MAX_VALUE)) ;; Primary ;; sec-seq (r-partial 1000) min-seq (r-partial 60000) ;; hour-seq (r-partial 3600000) ;; week-seq (r-partial 604800000) ;; day-seq (r-partial 86400000) ;; secondary/local ;; moh-seq (local-seq-as min-seq ;; zone ;; :minute-of-hour) mod-seq (local-seq-as min-seq zone :minute-of-day) day-night-seq (map (comp #(Math/cos ^Double %) #(double (* 2 Math/PI (/ % 86400000))) (partial * 60000)) mod-seq) ;; hod-seq (local-seq-as hour-seq ;; zone ;; :hour-of-day) ;; dow-seq (local-seq-as day-seq ;; zone ;; :day-of-week) ;; dom-seq (local-seq-as day-seq ;; zone ;; :day-of-month) ;; doy-seq (local-seq-as day-seq ;; zone ;; :day-of-year) ] {; :t-seq t-seq ; :sec-seq sec-seq :min-seq min-seq ;:hour-seq hour-seq ;:day-seq day-seq ;:week-seq week-seq ;:moh-seq moh-seq :mod-seq mod-seq :day-night-seq day-night-seq ;:hod-seq hod-seq ;:dow-seq dow-seq ;:dom-seq dom-seq ;:doy-seq doy-seq })) (comment (use '(incanter core stats charts io)) (time (let [sim-seed 42 ;; Create a master RNG for the sim. This is used only to generate other seeds ^Random sim-rng (Random. sim-seed) ;; the start of the sim, in ms since epoch t-zero 0;; (System/currentTimeMillis) ;; the amount of time, in MS, this sim covers sample-n (:whole (t/convert-amount 7 :days :millis)) ;; a local timezone timezone (t/zone-id "America/New_York") ;; Build useful time seqs, only get eval'd if used! {:keys [week-seq min-seq t-seq doy-seq moh-seq day-seq sec-seq dom-seq hod-seq hour-seq dow-seq mod-seq day-night-seq]} (time-seqs :t-zero t-zero :sample-n sample-n :zone timezone) ;; in our model, a timeseries for a given actor is measured against ;; a composite timeseries representing abstract challenge/diversity. ;; This is a combination of a stochastic series representing ;; unpredictable factors that apply to the group, higher is harder. ;; this series is max'd with a day night cycle (day is easier, night is ;; harder). Think of this combined series as a mask. ;; When an actor's series is greater than the challenge, the difference ;; between the two is the probability (from 0.0 to 1.0) that an event ;; will happen at that time. ;; random stochastic settings can (and probably should) be shared. common-arma {:phi [0.5 0.2] :theta [] :std 0.25 :c 0.0} ;; Generate a seed for the group group-seed (.nextLong sim-rng) ;; create a stochastic seq for the group group-arma (arma-seq (merge common-arma {:seed group-seed})) ;; Create a periodic seq for the lunch hour break lunch-hour-seq (map (fn [x] (if (<= 720 x 780) 1.0 -1.0)) mod-seq) ;; form a mask for the group + day-night + lunch mask (op-seq max [group-arma day-night-seq lunch-hour-seq]) ;; create a seed for Bob's seq bob-arma-seed (.nextLong sim-rng) ;; create a stochastic seq for PI:NAME:<NAME>END_PI bob-arma (arma-seq (merge common-arma {:seed bob-arma-seed})) ;; PI:NAME:<NAME>END_PI's activity probability bob-prob (op-seq (fn [a b] (double (maths/min-max 0.0 (/ (- a b) 2) 1.0))) [bob-arma mask]) ;; to keep it deterministic, give PI:NAME:<NAME>END_PI another seeded RNG to take with him. ^Random bob-rng (Random. (.nextLong sim-rng)) ;; Compose the time (in minute increments), PI:NAME:<NAME>END_PI's probability ;; and his RNG and you have everything you need to generate events for ;; bob. Here the RNG is used to generate a sequence for demonstration, ;; in practice it would get handed off to a thread or some such. bob-seq (map (fn [t prob rand-long] {:t t :prob prob :r rand-long}) min-seq bob-prob (rand-seq :val-type :long :rng bob-rng))] (view (time-series-plot (map :t bob-seq) (map :prob bob-seq))) (view (time-series-plot (map :t bob-seq) (map :r bob-seq))))) )
[ { "context": " {:state false} (sut/adding-person? :person/add [\"Piet\"]))))\n (t/testing \":person/remove should do noth", "end": 772, "score": 0.9559964537620544, "start": 768, "tag": "NAME", "value": "Piet" }, { "context": "state false} (sut/adding-person? :person/remove [\"Piet\"] false))))\n )\n\n\n(t/deftest show-adding-person?\n", "end": 896, "score": 0.9163838624954224, "start": 892, "tag": "NAME", "value": "Piet" } ]
test/tfsa/app_state_tests.cljs
SneakyPeet/tax-free-savings-tracker
0
(ns tfsa.app-state-tests (:require [tfsa.app-state :as sut] [cljs.test :as t :include-macros true] [tfsa.reconciler :as reconciler] [tfsa.config :as conf] [citrus.core :as citrus] [tfsa.domain :as domain])) (t/deftest adding-person?-controller (t/testing ":init should return false" (t/is (= {:state false} (sut/adding-person? :init)))) (t/testing ":adding-person/show should return true" (t/is (= {:state true} (sut/adding-person? :adding-person/show)))) (t/testing ":adding-person/hide should return false" (t/is (= {:state false} (sut/adding-person? :adding-person/hide)))) (t/testing ":person/add should return false" (t/is (= {:state false} (sut/adding-person? :person/add ["Piet"])))) (t/testing ":person/remove should do nothing" (t/is (= {:state false} (sut/adding-person? :person/remove ["Piet"] false)))) ) (t/deftest show-adding-person? (let [state (reconciler/make-init)] (t/is (= false @(sut/show-adding-person? state))))) ;;;; Deposit Details (t/deftest deposit-details-controller (t/testing ":init should return initial details" (t/is (= {:state sut/initial-deposit-details} (sut/deposit-details :init)))) (t/testing ":deposit/set-field should set field" (t/is (= {:state (assoc sut/initial-deposit-details :amount 10)} (sut/deposit-details :deposit/set-field [:amount 10] sut/initial-deposit-details)))) (t/testing ":deposit/clear clears only amount and note" (t/is (= {:state (assoc sut/initial-deposit-details :year 2000 :month 1 :day 1)} (sut/deposit-details :deposit/clear nil (assoc sut/initial-deposit-details :year 2000 :month 1 :day 1 :amount 10 :note "")))))) (t/deftest deposit-form-detail (let [state (reconciler/make-init)] (t/is (= sut/initial-deposit-details @(sut/deposit-form-detail state))))) (t/deftest can-deposit? (let [state (reconciler/make-init)] (t/testing "false amount 0" (t/is (false? @(sut/can-deposit? state)))) (t/testing "true amount > 0" (citrus/dispatch-sync! state :deposit-details :deposit/set-field :amount 10) (t/is (true? @(sut/can-deposit? state)))) (t/testing "false if date is before tfsa started" (let [state (reconciler/make-init)] (citrus/dispatch-sync! state :deposit-details :deposit/set-field :year conf/first-tfsa-year) (citrus/dispatch-sync! state :deposit-details :deposit/set-field :month 2) (citrus/dispatch-sync! state :deposit-details :deposit/set-field :day 28) (citrus/dispatch-sync! state :deposit-details :deposit/set-field :amount 10) (t/is (false? @(sut/can-deposit? state))))) (t/testing "false if date is in upcomming tax year" (let [state (reconciler/make-init)] (citrus/dispatch-sync! state :deposit-details :deposit/set-field :year (inc (:year (domain/current-tax-year-end-details)))) (citrus/dispatch-sync! state :deposit-details :deposit/set-field :month 3) (citrus/dispatch-sync! state :deposit-details :deposit/set-field :day 1) (citrus/dispatch-sync! state :deposit-details :deposit/set-field :amount 10) (t/is (false? @(sut/can-deposit? state)))))))
101096
(ns tfsa.app-state-tests (:require [tfsa.app-state :as sut] [cljs.test :as t :include-macros true] [tfsa.reconciler :as reconciler] [tfsa.config :as conf] [citrus.core :as citrus] [tfsa.domain :as domain])) (t/deftest adding-person?-controller (t/testing ":init should return false" (t/is (= {:state false} (sut/adding-person? :init)))) (t/testing ":adding-person/show should return true" (t/is (= {:state true} (sut/adding-person? :adding-person/show)))) (t/testing ":adding-person/hide should return false" (t/is (= {:state false} (sut/adding-person? :adding-person/hide)))) (t/testing ":person/add should return false" (t/is (= {:state false} (sut/adding-person? :person/add ["<NAME>"])))) (t/testing ":person/remove should do nothing" (t/is (= {:state false} (sut/adding-person? :person/remove ["<NAME>"] false)))) ) (t/deftest show-adding-person? (let [state (reconciler/make-init)] (t/is (= false @(sut/show-adding-person? state))))) ;;;; Deposit Details (t/deftest deposit-details-controller (t/testing ":init should return initial details" (t/is (= {:state sut/initial-deposit-details} (sut/deposit-details :init)))) (t/testing ":deposit/set-field should set field" (t/is (= {:state (assoc sut/initial-deposit-details :amount 10)} (sut/deposit-details :deposit/set-field [:amount 10] sut/initial-deposit-details)))) (t/testing ":deposit/clear clears only amount and note" (t/is (= {:state (assoc sut/initial-deposit-details :year 2000 :month 1 :day 1)} (sut/deposit-details :deposit/clear nil (assoc sut/initial-deposit-details :year 2000 :month 1 :day 1 :amount 10 :note "")))))) (t/deftest deposit-form-detail (let [state (reconciler/make-init)] (t/is (= sut/initial-deposit-details @(sut/deposit-form-detail state))))) (t/deftest can-deposit? (let [state (reconciler/make-init)] (t/testing "false amount 0" (t/is (false? @(sut/can-deposit? state)))) (t/testing "true amount > 0" (citrus/dispatch-sync! state :deposit-details :deposit/set-field :amount 10) (t/is (true? @(sut/can-deposit? state)))) (t/testing "false if date is before tfsa started" (let [state (reconciler/make-init)] (citrus/dispatch-sync! state :deposit-details :deposit/set-field :year conf/first-tfsa-year) (citrus/dispatch-sync! state :deposit-details :deposit/set-field :month 2) (citrus/dispatch-sync! state :deposit-details :deposit/set-field :day 28) (citrus/dispatch-sync! state :deposit-details :deposit/set-field :amount 10) (t/is (false? @(sut/can-deposit? state))))) (t/testing "false if date is in upcomming tax year" (let [state (reconciler/make-init)] (citrus/dispatch-sync! state :deposit-details :deposit/set-field :year (inc (:year (domain/current-tax-year-end-details)))) (citrus/dispatch-sync! state :deposit-details :deposit/set-field :month 3) (citrus/dispatch-sync! state :deposit-details :deposit/set-field :day 1) (citrus/dispatch-sync! state :deposit-details :deposit/set-field :amount 10) (t/is (false? @(sut/can-deposit? state)))))))
true
(ns tfsa.app-state-tests (:require [tfsa.app-state :as sut] [cljs.test :as t :include-macros true] [tfsa.reconciler :as reconciler] [tfsa.config :as conf] [citrus.core :as citrus] [tfsa.domain :as domain])) (t/deftest adding-person?-controller (t/testing ":init should return false" (t/is (= {:state false} (sut/adding-person? :init)))) (t/testing ":adding-person/show should return true" (t/is (= {:state true} (sut/adding-person? :adding-person/show)))) (t/testing ":adding-person/hide should return false" (t/is (= {:state false} (sut/adding-person? :adding-person/hide)))) (t/testing ":person/add should return false" (t/is (= {:state false} (sut/adding-person? :person/add ["PI:NAME:<NAME>END_PI"])))) (t/testing ":person/remove should do nothing" (t/is (= {:state false} (sut/adding-person? :person/remove ["PI:NAME:<NAME>END_PI"] false)))) ) (t/deftest show-adding-person? (let [state (reconciler/make-init)] (t/is (= false @(sut/show-adding-person? state))))) ;;;; Deposit Details (t/deftest deposit-details-controller (t/testing ":init should return initial details" (t/is (= {:state sut/initial-deposit-details} (sut/deposit-details :init)))) (t/testing ":deposit/set-field should set field" (t/is (= {:state (assoc sut/initial-deposit-details :amount 10)} (sut/deposit-details :deposit/set-field [:amount 10] sut/initial-deposit-details)))) (t/testing ":deposit/clear clears only amount and note" (t/is (= {:state (assoc sut/initial-deposit-details :year 2000 :month 1 :day 1)} (sut/deposit-details :deposit/clear nil (assoc sut/initial-deposit-details :year 2000 :month 1 :day 1 :amount 10 :note "")))))) (t/deftest deposit-form-detail (let [state (reconciler/make-init)] (t/is (= sut/initial-deposit-details @(sut/deposit-form-detail state))))) (t/deftest can-deposit? (let [state (reconciler/make-init)] (t/testing "false amount 0" (t/is (false? @(sut/can-deposit? state)))) (t/testing "true amount > 0" (citrus/dispatch-sync! state :deposit-details :deposit/set-field :amount 10) (t/is (true? @(sut/can-deposit? state)))) (t/testing "false if date is before tfsa started" (let [state (reconciler/make-init)] (citrus/dispatch-sync! state :deposit-details :deposit/set-field :year conf/first-tfsa-year) (citrus/dispatch-sync! state :deposit-details :deposit/set-field :month 2) (citrus/dispatch-sync! state :deposit-details :deposit/set-field :day 28) (citrus/dispatch-sync! state :deposit-details :deposit/set-field :amount 10) (t/is (false? @(sut/can-deposit? state))))) (t/testing "false if date is in upcomming tax year" (let [state (reconciler/make-init)] (citrus/dispatch-sync! state :deposit-details :deposit/set-field :year (inc (:year (domain/current-tax-year-end-details)))) (citrus/dispatch-sync! state :deposit-details :deposit/set-field :month 3) (citrus/dispatch-sync! state :deposit-details :deposit/set-field :day 1) (citrus/dispatch-sync! state :deposit-details :deposit/set-field :amount 10) (t/is (false? @(sut/can-deposit? state)))))))
[ { "context": "omponent\"\n (let [component-tree (person :tony \"Tony\" [(phone-number 1 \"555-1212\") (phone-number 2 \"12", "end": 12056, "score": 0.9967016577720642, "start": 12052, "tag": "NAME", "value": "Tony" }, { "context": "er 2 \"123-4555\")])\n sally {:id :sally :name \"Sally\" :numbers [[:phone/id 3]]}\n p", "end": 12153, "score": 0.8534520268440247, "start": 12148, "tag": "NAME", "value": "sally" }, { "context": "5\")])\n sally {:id :sally :name \"Sally\" :numbers [[:phone/id 3]]}\n phone-3 ", "end": 12166, "score": 0.9992788434028625, "start": 12161, "tag": "NAME", "value": "Sally" }, { "context": "-3}\n :person/id {:sally sally}}\n new-state-map (merge/merge-component", "end": 12404, "score": 0.718518853187561, "start": 12400, "tag": "NAME", "value": "ally" }, { "context": "on component-tree)\n expected-person {:id :tony :name \"Tony\" :numbers [[:phone/id 1] [:phone/id ", "end": 12523, "score": 0.7471346855163574, "start": 12520, "tag": "USERNAME", "value": "ton" }, { "context": "component-tree)\n expected-person {:id :tony :name \"Tony\" :numbers [[:phone/id 1] [:phone/id 2", "end": 12524, "score": 0.6118338108062744, "start": 12523, "tag": "NAME", "value": "y" }, { "context": "-tree)\n expected-person {:id :tony :name \"Tony\" :numbers [[:phone/id 1] [:phone/id 2]]}\n ", "end": 12536, "score": 0.9988735318183899, "start": 12532, "tag": "NAME", "value": "Tony" }, { "context": "ge-component\n {:person/id {1 {:id 1 :name \"Joe\" :numbers [[:phone/id 1]]}}}\n MPerson {:id", "end": 13377, "score": 0.9998199939727783, "start": 13374, "tag": "NAME", "value": "Joe" }, { "context": "ge-component\n {:person/id {1 {:id 1 :name \"Joe\" :numbers [[:phone/id 1]]}}}\n MPerson {:id", "end": 13629, "score": 0.9998007416725159, "start": 13626, "tag": "NAME", "value": "Joe" }, { "context": "umbers []})\n => {:person/id {1 {:id 1 :name \"Joe\" :numbers []}}}))\n\n (assertions\n \"Can merge b", "end": 13737, "score": 0.9997960925102234, "start": 13734, "tag": "NAME", "value": "Joe" }, { "context": "merge/merge-component {} MPersonPM (person :mary \"Mary\" [(phone-number 55 \"98765-4321\")]))\n => {:pers", "end": 13900, "score": 0.9992210268974304, "start": 13896, "tag": "NAME", "value": "Mary" }, { "context": "d :mary\n :name \"Mary\"\n :numbers [[:phone/id ", "end": 14020, "score": 0.9998102188110352, "start": 14016, "tag": "NAME", "value": "Mary" }, { "context": "\"\n (merge/merge-component {} MPersonPM {:name \"Mary2\" :numbers [{:number \"98765-4321\"}]}\n :replac", "end": 14335, "score": 0.9950382709503174, "start": 14330, "tag": "NAME", "value": "Mary2" }, { "context": " :name \"Mary2\"\n :number", "end": 14574, "score": 0.9996887445449829, "start": 14570, "tag": "NAME", "value": "Mary" }, { "context": "merge/merge-component {} MPersonPM (person :mary \"Mary\" [(phone-number 55 \"98765-4321\")]) :replace [:mai", "end": 16207, "score": 0.9995577335357666, "start": 16203, "tag": "NAME", "value": "Mary" }, { "context": "n-person])\n => {:person/id {:mary {:id :mary\n :name \"Mary\"\n ", "end": 16311, "score": 0.8830675482749939, "start": 16307, "tag": "NAME", "value": "mary" }, { "context": " :mary\n :name \"Mary\"\n :numbers [[:phone/i", "end": 16355, "score": 0.9996823072433472, "start": 16351, "tag": "NAME", "value": "Mary" }, { "context": "tree {'some-mutation {:user/id 1 :user/name \"Joe\"}}\n query [{(list 'some-mutation {:x", "end": 20954, "score": 0.9997562766075134, "start": 20951, "tag": "NAME", "value": "Joe" }, { "context": "new-state => {:user/id {1 {:user/id 1 :user/name \"Joe\"}}})))\n (behavior \"Does pre-merge processing\"\n ", "end": 21179, "score": 0.9997487664222717, "start": 21176, "tag": "NAME", "value": "Joe" }, { "context": "tree {'some-mutation {:user/id 1 :user/name \"Joe\"}}\n query [{(list 'some-mutation {:x", "end": 21313, "score": 0.9997637867927551, "start": 21310, "tag": "NAME", "value": "Joe" }, { "context": "r/id {1 {:ui/visible? true :user/id 1 :user/name \"Joe\"}}})))\n (behavior \"Does data targeting based on ", "end": 21558, "score": 0.9997789859771729, "start": 21555, "tag": "NAME", "value": "Joe" }, { "context": "tree {'some-mutation {:user/id 1 :user/name \"Joe\"}}\n query [{(list 'some-mutation {:x", "end": 21712, "score": 0.999753475189209, "start": 21709, "tag": "NAME", "value": "Joe" }, { "context": " :user/id {2 {:user/id 2 :user/name \"Joe\"}}})))\n (behavior \"Targeting on pre-merge overwr", "end": 22085, "score": 0.9997567534446716, "start": 22082, "tag": "NAME", "value": "Joe" }, { "context": "t [state {:user/id {1 {:user/id 1 :user/name \"Tom\"}}}\n tree {'some-mutation {:user/id", "end": 22222, "score": 0.9997240900993347, "start": 22219, "tag": "NAME", "value": "Tom" }, { "context": "tree {'some-mutation {:user/id 1 :user/name \"Joe\"}}\n query [{(list 'some-mutation {:x", "end": 22290, "score": 0.999728798866272, "start": 22287, "tag": "NAME", "value": "Joe" }, { "context": " :user/id {1 {:user/id 1 :user/name \"Tom\"}\n 2 {:user/id 2 :", "end": 22663, "score": 0.9996739625930786, "start": 22660, "tag": "NAME", "value": "Tom" }, { "context": " 2 {:user/id 2 :user/name \"Joe\"}}}))))\n\n(specification \"mark-missing\"\n (behavio", "end": 22727, "score": 0.9997842907905579, "start": 22724, "tag": "NAME", "value": "Joe" } ]
src/test/com/fulcrologic/fulcro/algorithms/merge_spec.cljc
mruzekw/fulcro
1,312
(ns com.fulcrologic.fulcro.algorithms.merge-spec (:require [clojure.test :refer [deftest are]] [com.fulcrologic.fulcro.components :as comp :refer [defsc]] [fulcro-spec.core :refer [assertions specification component when-mocking behavior provided]] [com.fulcrologic.fulcro.algorithms.merge :as merge] [com.fulcrologic.fulcro.algorithms.denormalize :as fdn] [com.fulcrologic.fulcro.algorithms.normalize :as fnorm] [com.fulcrologic.fulcro.algorithms.do-not-use :as util] [com.fulcrologic.fulcro.application :as app] [taoensso.timbre :as log] [com.fulcrologic.fulcro.algorithms.data-targeting :as targeting] [com.fulcrologic.fulcro.algorithms.scheduling :as sched])) (declare =>) (defsc MMChild [_ _] {:query [:db/id] :initial-state {:db/id :param/id}}) (defsc MMParent [_ _] {:query [:db/id {:main-child (comp/get-query MMChild)} {:children (comp/get-query MMChild)}] :initial-state {:db/id :param/id :main-child :param/main :children :param/children}}) (specification "Mixed Mode Initial State" (component "defsc components that use template initial state" (assertions "Accept maps of child parameters and automatically construct children from them" (comp/get-initial-state MMParent {:id 1 :main {:id 1} :children [{:id 1}]}) => {:db/id 1 :main-child {:db/id 1} :children [{:db/id 1}]} "Allow to-one children to be initialized directly with a call to get-initial-state" (comp/get-initial-state MMParent {:id 1 :main (comp/get-initial-state MMChild {:id 1}) :children [{:id 1}]}) => {:db/id 1 :main-child {:db/id 1} :children [{:db/id 1}]} "Allow to-many children to be initialized directly with calls to get-initial-state" (comp/get-initial-state MMParent {:id 1 :main {:id 1} :children [(comp/get-initial-state MMChild {:id 1})]}) => {:db/id 1 :main-child {:db/id 1} :children [{:db/id 1}]} "Allow to-many children to be initialized with a mix of maps and calls to get-initial-state" (comp/get-initial-state MMParent {:id 1 :main {:id 1} :children [{:id 3} (comp/get-initial-state MMChild {:id 1})]}) => {:db/id 1 :main-child {:db/id 1} :children [{:db/id 3} {:db/id 1}]}))) (defsc MergeX [_ _] {:initial-state (fn [params] {:type :x :n :x}) :query (fn [] [:n :type])}) (defsc MergeY [_ _] {:initial-state (fn [params] {:type :y :n :y}) :query (fn [] [:n :type])}) (defsc MergeAChild [_ _] {:initial-state (fn [params] {:child :merge-a}) :ident (fn [] [:mergea :child]) :query (fn [] [:child])}) (defsc MergeA [_ _] {:initial-state (fn [params] {:type :a :n :a :child (comp/get-initial-state MergeAChild nil)}) :query (fn [] [:type :n {:child (comp/get-query MergeAChild)}])}) (defsc MergeB [_ _] {:initial-state (fn [params] {:type :b :n :b}) :query (fn [] [:n])}) (defsc MergeUnion [_ _] {:initial-state (fn [params] (comp/get-initial-state MergeA {})) :ident (fn [] [:mergea-or-b :at-union]) :query (fn [] {:a (comp/get-query MergeA) :b (comp/get-query MergeB)})}) (defsc MergeRoot [_ _] {:initial-state (fn [params] {:a 1 :b (comp/get-initial-state MergeUnion {})}) :query (fn [] [:a {:b (comp/get-query MergeUnion)}])}) (defsc U2 [_ _] {:initial-state (fn [params] (comp/get-initial-state MergeX {})) :query (fn [] {:x (comp/get-query MergeX) :y (comp/get-query MergeY)})}) ;; Nested routing tree ;; NestedRoot ;; | ;; U1 ;; / B A = MergeRoot B = MergeB ;; R2 ;; U2 A2 ;; X Y (defsc R2 [_ _] {:initial-state (fn [params] {:id 1 :u2 (comp/get-initial-state U2 {})}) :query (fn [] [:id {:u2 (comp/get-query U2)}])}) (defsc U1 [_ _] {:initial-state (fn [params] (comp/get-initial-state MergeB {})) :query (fn [] {:r2 (comp/get-query R2) :b (comp/get-query MergeB)})}) (defsc NestedRoot [_ _] {:initial-state (fn [params] {:u1 (comp/get-initial-state U1 {})}) :query (fn [] [{:u1 (comp/get-query U1)}])}) (defsc SU1 [_ props] {:initial-state (fn [params] (comp/get-initial-state MergeB {})) :ident (fn [] [(:type props) 1]) :query (fn [] {:a (comp/get-query MergeA) :b (comp/get-query MergeB)})}) ;; Sibling routing tree ;; SiblingRoot ;; | \ ;; SU1 SU2 ;; A B X Y (defsc SU2 [_ props] {:initial-state (fn [params] (comp/get-initial-state MergeX {})) :ident (fn [] [(:type props) 2]) :query (fn [] {:x (comp/get-query MergeX) :y (comp/get-query MergeY)})}) (defsc SiblingRoot [_ _] {:initial-state (fn [params] {:su1 (comp/get-initial-state SU1 {}) :su2 (comp/get-initial-state SU2 {})}) :query (fn [] [{:su1 (comp/get-query SU1)} {:su2 (comp/get-query SU2)}])}) (specification "merge-alternate-union-elements!" (behavior "For applications with sibling unions" (when-mocking (merge/merge-component! app comp state) =1x=> (do (assertions "Merges level one elements" comp => SU1 state => (comp/get-initial-state MergeA {}))) (merge/merge-component! app comp state) =1x=> (do (assertions "Merges only the state of branches that are not already initialized" comp => SU2 state => (comp/get-initial-state MergeY {}))) (merge/merge-alternate-union-elements! :app SiblingRoot))) (behavior "For applications with nested unions" (when-mocking (merge/merge-component! app comp state) =1x=> (do (assertions "Merges level one elements" comp => U1 state => (comp/get-initial-state R2 {}))) (merge/merge-component! app comp state) =1x=> (do (assertions "Merges only the state of branches that are not already initialized" comp => U2 state => (comp/get-initial-state MergeY {}))) (merge/merge-alternate-union-elements! :app NestedRoot))) (behavior "For applications with non-nested unions" (let [app (app/fulcro-app)] (when-mocking (merge/merge-component! app comp state) => (do (assertions "Merges only the state of branches that are not already initialized" comp => MergeUnion state => (comp/get-initial-state MergeB {}))) (merge/merge-alternate-union-elements! app MergeRoot))))) (defn phone-number [id n] {:id id :number n}) (defn person [id name numbers] {:id id :name name :numbers numbers}) (defsc MPhone [_ props] {:query (fn [] [:id :number]) :ident (fn [] [:phone/id (:id props)])}) (defsc MPerson [_ props] {:query (fn [] [:id :name {:numbers (comp/get-query MPhone)}]) :ident (fn [] [:person/id (:id props)])}) (defsc MPhonePM [_ _] {:ident [:phone/id :id] :query [:id :number] :pre-merge (fn [{:keys [current-normalized data-tree]}] (merge {:id :sample-phone-id :ui/initial-flag :start} current-normalized data-tree))}) (defsc MPersonPM [_ props] {:pre-merge (fn [{:keys [current-normalized data-tree]}] (merge {:id :sample-person-id} current-normalized data-tree)) :ident [:person/id :id] :query [:id :name {:numbers (comp/get-query MPhonePM)}]}) (defsc Score [_ {::keys []}] {:pre-merge (fn [{:keys [current-normalized data-tree]}] (merge {:ui/expanded? false} current-normalized data-tree)) :ident [::score-id ::score-id] :query [::score-id ::points :ui/expanded?]}) (defsc Scoreboard [_ props] {:pre-merge (fn [{:keys [current-normalized data-tree]}] (let [{::keys [scores]} data-tree high-score (apply max (map ::points scores)) scores (mapv (fn [{::keys [points] :as score}] (assoc score :ui/expanded? (= points high-score))) scores)] (merge current-normalized (assoc data-tree ::scores scores)))) :ident [::scoreboard-id ::scoreboard-id] :query [::scoreboard-id {::scores (comp/get-query Score)}]} "") (defonce id-counter (atom 0)) (defsc UiItem [_ _] {:pre-merge (fn [{:keys [current-normalized data-tree]}] (merge {::id (swap! id-counter inc)} current-normalized data-tree)) :ident [::id ::id] :query [::id ::title]}) (defsc UiLoadedItem [_ _] {:pre-merge (fn [{:keys [current-normalized data-tree]}] (merge {:ui/item {}} current-normalized data-tree)) :ident [::loaded-id ::loaded-id] :query [::loaded-id ::name {:ui/item (comp/get-query UiItem)}]}) (defsc UiCollectionHolder [_ _] {:ident [::col-id ::col-id] :query [::col-id {::load-items (comp/get-query UiLoadedItem)}]}) (defsc UiPreMergePlaceholderChild [_ _] {:pre-merge (fn [{:keys [current-normalized data-tree]}] (let [id (or (:id data-tree) (:id current-normalized) #?(:clj (java.util.UUID/randomUUID) :cljs (random-uuid)))] (merge {:id id} current-normalized data-tree))) :ident :id :query [:id :child/value]}) (defsc UiPreMergePlaceholderRoot [_ _] {:pre-merge (fn [{:keys [current-normalized data-tree]}] (let [id (or (:id data-tree) (:id current-normalized) #?(:clj (java.util.UUID/randomUUID) :cljs (random-uuid)))] (merge {:child/value 321 :id id :child {:id id}} current-normalized data-tree))) :ident :id :query [:id {:child (comp/get-query UiPreMergePlaceholderChild)}]}) (specification "merge*" (assertions "keep data defined by root merge component." (merge/merge* {} [{[:id 42] (comp/get-query UiPreMergePlaceholderRoot)}] {[:id 42] {:id 42}} {:remove-missing? true}) => {:id {42 {:id 42 :child/value 321 :child [:id 42]}}} "merge parent and children new data" (merge/merge* {} [{[:id 42] (comp/get-query UiPreMergePlaceholderRoot)}] {[:id 42] {:id 42 :child {:id 42 :child/value 123}}} {:remove-missing? true}) => {:id {42 {:id 42 :child [:id 42] :child/value 123}}})) (specification "merge-component" (let [component-tree (person :tony "Tony" [(phone-number 1 "555-1212") (phone-number 2 "123-4555")]) sally {:id :sally :name "Sally" :numbers [[:phone/id 3]]} phone-3 {:id 3 :number "111-2222"} state-map {:people [[:person/id :sally]] :phone/id {3 phone-3} :person/id {:sally sally}} new-state-map (merge/merge-component state-map MPerson component-tree) expected-person {:id :tony :name "Tony" :numbers [[:phone/id 1] [:phone/id 2]]} expected-phone-1 {:id 1 :number "555-1212"} expected-phone-2 {:id 2 :number "123-4555"}] (assertions "merges the top-level component with normalized links to children" (get-in new-state-map [:person/id :tony]) => expected-person "merges the normalized children" (get-in new-state-map [:phone/id 1]) => expected-phone-1 (get-in new-state-map [:phone/id 2]) => expected-phone-2 "leaves the original state untouched" (contains? new-state-map :people) => true (get-in new-state-map [:person/id :sally]) => sally (get-in new-state-map [:phone/id 3]) => phone-3 "honors sweep-merge (overwrites data that is in query but did not appear in result) if asked" (merge/merge-component {:person/id {1 {:id 1 :name "Joe" :numbers [[:phone/id 1]]}}} MPerson {:id 1 :numbers []} :remove-missing? true) => {:person/id {1 {:id 1 :numbers []}}} "Prevents sweep-merge by default" (merge/merge-component {:person/id {1 {:id 1 :name "Joe" :numbers [[:phone/id 1]]}}} MPerson {:id 1 :numbers []}) => {:person/id {1 {:id 1 :name "Joe" :numbers []}}})) (assertions "Can merge basic data into the database (pre-merge not override)" (merge/merge-component {} MPersonPM (person :mary "Mary" [(phone-number 55 "98765-4321")])) => {:person/id {:mary {:id :mary :name "Mary" :numbers [[:phone/id 55]]}} :phone/id {55 {:id 55 :number "98765-4321" :ui/initial-flag :start}}} "Can assign IDs to primary entities via pre-merge" (merge/merge-component {} MPersonPM {:name "Mary2" :numbers [{:number "98765-4321"}]} :replace [:global-ref]) => {:global-ref [:person/id :sample-person-id] :person/id {:sample-person-id {:id :sample-person-id :name "Mary2" :numbers [[:phone/id :sample-phone-id]]}} :phone/id {:sample-phone-id {:id :sample-phone-id :number "98765-4321" :ui/initial-flag :start}}} "can merge nested to-many items and apply pre-merge" (merge/merge-component {} Scoreboard {::scoreboard-id 123 ::scores [{::score-id 1 ::points 4} {::score-id 2 ::points 8} {::score-id 3 ::points 7}]}) => {::scoreboard-id {123 {::scoreboard-id 123 ::scores [[::score-id 1] [::score-id 2] [::score-id 3]]}} ::score-id {1 {::score-id 1 ::points 4 :ui/expanded? false} 2 {::score-id 2 ::points 8 :ui/expanded? true} 3 {::score-id 3 ::points 7 :ui/expanded? false}}} "can place ident via replace named parameter with pre-merge" (merge/merge-component {} MPersonPM (person :mary "Mary" [(phone-number 55 "98765-4321")]) :replace [:main-person]) => {:person/id {:mary {:id :mary :name "Mary" :numbers [[:phone/id 55]]}} :phone/id {55 {:id 55 :number "98765-4321" :ui/initial-flag :start}} :main-person [:person/id :mary]} "pre-merge step can assign an id to generated sub-elements (to-one)" (do (reset! id-counter 0) (merge/merge-component {} UiLoadedItem {::loaded-id 1 ::name "a"})) => {::loaded-id {1 {::loaded-id 1 ::name "a" :ui/item [::id 1]}} ::id {1 {::id 1}}} "pre-merge step can assign an id to generated sub-elements (to-many)" (do (reset! id-counter 0) (merge/merge-component {} UiCollectionHolder {::col-id 123 ::load-items [{::loaded-id 1 ::name "a"} {::loaded-id 2 ::name "b"}]})) => {::col-id {123 {::col-id 123 ::load-items [[::loaded-id 1] [::loaded-id 2]]}} ::loaded-id {1 {::loaded-id 1 ::name "a" :ui/item [::id 1]} 2 {::loaded-id 2 ::name "b" :ui/item [::id 2]}} ::id {1 {::id 1} 2 {::id 2}}})) (specification "merge-component!" (let [rendered (atom false) ;; needed because mocking cannot mock something you've closed over already app (assoc-in (app/fulcro-app {}) [::app/algorithms :com.fulcrologic.fulcro.algorithm/schedule-render!] (fn [& args] (reset! rendered true)))] (when-mocking (merge/merge-component s c d & np) => (do (assertions "calls merge-component with the component and data" c => MPerson d => {} "includes the correct named parameters" np => [:replace [:x]])) (merge/merge-component! app MPerson {} :replace [:x]) (assertions "schedules a render" @rendered => true)))) (def table-1 {:type :table :id 1 :rows [1 2 3]}) (defsc Table [_ _] {:initial-state (fn [p] table-1) :query (fn [] [:type :id :rows])}) (def graph-1 {:type :graph :id 1 :data [1 2 3]}) (defsc Graph [_ _] {:initial-state (fn [p] graph-1) :query (fn [] [:type :id :data])}) (defsc Reports [_ props] {:initial-state (fn [p] (comp/get-initial-state Graph nil)) ; initial state will already include Graph :ident (fn [] [(:type props) (:id props)]) :query (fn [] {:graph (comp/get-query Graph) :table (comp/get-query Table)})}) (defsc MRRoot [_ _] {:initial-state (fn [p] {:reports (comp/get-initial-state Reports nil)}) :query (fn [] [{:reports (comp/get-query Reports)}])}) (specification "merge-alternate-union-elements" (let [initial-state (merge (comp/get-initial-state MRRoot nil) {:a 1}) state-map (fnorm/tree->db MRRoot initial-state true) new-state (merge/merge-alternate-union-elements state-map MRRoot)] (assertions "can be used to merge alternate union elements to raw state" (get-in new-state [:table 1]) => table-1 "(existing state isn't touched)" (get new-state :a) => 1 (get new-state :reports) => [:graph 1] (get-in new-state [:graph 1]) => graph-1))) (defsc User [_ _] {:query [:user/id :user/name] :ident :user/id}) (defsc UserPM [_ _] {:query [:user/id :user/name] :pre-merge (fn [{:keys [data-tree current-normalized state-map]}] (merge {:ui/visible? true} current-normalized data-tree)) :ident :user/id}) (defsc UserPMT [_ _] {:query [:user/id :user/name] :pre-merge (fn [{:keys [data-tree current-normalized]}] ;; rewriting the ID to verify that targeting uses the correct (altered) ident (merge current-normalized data-tree {:user/id 2})) :ident :user/id}) (specification "merge-mutation-joins" :focus (behavior "Merges basic return values into app state" (let [state {} tree {'some-mutation {:user/id 1 :user/name "Joe"}} query [{(list 'some-mutation {:x 1}) (comp/get-query User)}] new-state (merge/merge-mutation-joins state query tree)] (assertions new-state => {:user/id {1 {:user/id 1 :user/name "Joe"}}}))) (behavior "Does pre-merge processing" (let [state {} tree {'some-mutation {:user/id 1 :user/name "Joe"}} query [{(list 'some-mutation {:x 1}) (comp/get-query UserPM)}] new-state (merge/merge-mutation-joins state query tree)] (assertions new-state => {:user/id {1 {:ui/visible? true :user/id 1 :user/name "Joe"}}}))) (behavior "Does data targeting based on pre-merge result" (let [state {} tree {'some-mutation {:user/id 1 :user/name "Joe"}} query [{(list 'some-mutation {:x 1}) (vary-meta (comp/get-query UserPMT) assoc ::targeting/target [:top-key])}] new-state (merge/merge-mutation-joins state query tree)] (assertions new-state => {:top-key [:user/id 2] :user/id {2 {:user/id 2 :user/name "Joe"}}}))) (behavior "Targeting on pre-merge overwrite with id re-assignment" (let [state {:user/id {1 {:user/id 1 :user/name "Tom"}}} tree {'some-mutation {:user/id 1 :user/name "Joe"}} query [{(list 'some-mutation {:x 1}) (vary-meta (comp/get-query UserPMT) assoc ::targeting/target [:top-key])}] new-state (merge/merge-mutation-joins state query tree)] (assertions new-state => {:top-key [:user/id 2] :user/id {1 {:user/id 1 :user/name "Tom"} 2 {:user/id 2 :user/name "Joe"}}})))) (specification "mark-missing" (behavior "correctly marks missing properties" (are [query ?missing-result exp] (= exp (merge/mark-missing ?missing-result query)) [:a :b] {:a 1} {:a 1 :b ::merge/not-found})) (behavior "joins -> one" (are [query ?missing-result exp] (= exp (merge/mark-missing ?missing-result query)) [:a {:b [:c]}] {:a 1} {:a 1 :b ::merge/not-found} [{:b [:c]}] {:b {}} {:b {:c ::merge/not-found}} [{:b [:c]}] {:b {:c 0}} {:b {:c 0}} [{:b [:c :d]}] {:b {:c 1}} {:b {:c 1 :d ::merge/not-found}})) (behavior "join -> many" (are [query ?missing-result exp] (= exp (merge/mark-missing ?missing-result query)) [{:a [:b :c]}] {:a [{:b 1 :c 2} {:b 1}]} {:a [{:b 1 :c 2} {:b 1 :c ::merge/not-found}]})) (behavior "idents and ident joins" (are [query ?missing-result exp] (= exp (merge/mark-missing ?missing-result query)) [{[:a 1] [:x]}] {[:a 1] {}} {[:a 1] {:x ::merge/not-found}} [{[:b 1] [:x]}] {[:b 1] {:x 2}} {[:b 1] {:x 2}} [{[:c 1] [:x]}] {} {[:c 1] {:x ::merge/not-found}} [{[:c 1] ['*]}] {} {[:c 1] {}} [{[:e 1] [:x :y :z]}] {[:e 1] {}} {[:e 1] {:x ::merge/not-found :y ::merge/not-found :z ::merge/not-found}} [[:d 1]] {} {[:d 1] {}})) (behavior "Ignores root link idents" (assertions "when the subquery exists" (merge/mark-missing {} [{[:a '_] [:x]}]) => {} "when it is a pure link" (merge/mark-missing {} [[:a '_]]) => {})) (behavior "parameterized" (are [query ?missing-result exp] (= exp (merge/mark-missing ?missing-result query)) '[:z (:y {})] {:z 1} {:z 1 :y ::merge/not-found} '[:z (:y {})] {:z 1 :y 0} {:z 1 :y 0} '[:z ({:y [:x]} {})] {:z 1 :y {}} {:z 1 :y {:x ::merge/not-found}})) (behavior "nested" (are [query ?missing-result exp] (= exp (merge/mark-missing ?missing-result query)) [{:b [:c {:d [:e]}]}] {:b {:c 1}} {:b {:c 1 :d ::merge/not-found}} [{:b [:c {:d [:e]}]}] {:b {:c 1 :d {}}} {:b {:c 1 :d {:e ::merge/not-found}}})) (behavior "upgrades value to maps if necessary" (are [query ?missing-result exp] (= exp (merge/mark-missing ?missing-result query)) [{:l [:m]}] {:l 0} {:l {:m ::merge/not-found}} [{:b [:c]}] {:b nil} {:b {:c ::merge/not-found}})) (behavior "unions" (assertions "singletons" (merge/mark-missing {:j {:c {}}} [{:j {:a [:c] :b [:d]}}]) => {:j {:c {} :d ::merge/not-found}} "singleton with no result" (merge/mark-missing {} [{:j {:a [:c] :b [:d]}}]) => {:j ::merge/not-found} "list to-many with 1" (merge/mark-missing {:j [{:c "c"}]} [{:j {:a [:c] :b [:d]}}]) => {:j [{:c "c" :d ::merge/not-found}]} "list to-many with 2" (merge/mark-missing {:items [{:id 0 :image "img1"} {:id 1 :text "text1"}]} [{:items {:photo [:id :image] :text [:id :text]}}]) => {:items [{:id 0 :image "img1" :text ::merge/not-found} {:id 1 :image ::merge/not-found :text "text1"}]} "list to-many with no results" (merge/mark-missing {:j []} [{:j {:a [:c] :b [:d]}}]) => {:j []})) (behavior "if the query has a ui.*/ attribute, it should not be marked as missing" (are [query ?missing-result exp] (= exp (merge/mark-missing ?missing-result query)) [:a :ui/b :c] {:a {} :c {}} {:a {} :c {}} [{:j [:ui/b :c]}] {:j {:c 5}} {:j {:c 5}} [{:j [{:ui/b [:d]} :c]}] {:j {:c 5}} {:j {:c 5}})) (behavior "mutations!" (are [query ?missing-result exp] (= exp (merge/mark-missing ?missing-result query)) '[(f) {:j [:a]}] {'f {} :j {}} {'f {} :j {:a ::merge/not-found}} '[(app/add-q {:p 1}) {:j1 [:p1]} {:j2 [:p2]}] {'app/add-q {:tempids {}} :j1 {} :j2 [{:p2 2} {}]} {'app/add-q {:tempids {}} :j1 {:p1 ::merge/not-found} :j2 [{:p2 2} {:p2 ::merge/not-found}]})) (behavior "correctly walks recursive queries to mark missing data" (behavior "when the recursive target is a singleton" (are [query ?missing-result exp] (= exp (merge/mark-missing ?missing-result query)) [:a {:b '...}] {:a 1 :b {:a 2}} {:a 1 :b {:a 2 :b ::merge/not-found}} [:a {:b '...}] {:a 1 :b {:a 2 :b {:a 3}}} {:a 1 :b {:a 2 :b {:a 3 :b ::merge/not-found}}} [:a {:b 9}] {:a 1 :b {:a 2 :b {:a 3 :b {:a 4}}}} {:a 1 :b {:a 2 :b {:a 3 :b {:a 4 :b ::merge/not-found}}}})) (behavior "when the recursive target is to-many" (are [query ?missing-result exp] (= exp (merge/mark-missing ?missing-result query)) [:a {:b '...}] {:a 1 :b [{:a 2 :b [{:a 3}]} {:a 4}]} {:a 1 :b [{:a 2 :b [{:a 3 :b ::merge/not-found}]} {:a 4 :b ::merge/not-found}]}))) (behavior "marks leaf data based on the query where" (letfn [(has-leaves [leaf-paths] (fn [result] (every? #(#'merge/leaf? (get-in result %)) leaf-paths)))] (assertions "plain data is always a leaf" (merge/mark-missing {:a 1 :b {:x 5}} [:a {:b [:x]}]) =fn=> (has-leaves [[:b :x] [:a] [:missing]]) "data structures are properly marked in singleton results" (merge/mark-missing {:b {:x {:data 1}}} [{:b [:x :y]}]) =fn=> (has-leaves [[:b :x]]) "data structures are properly marked in to-many results" (merge/mark-missing {:b [{:x {:data 1}} {:x {:data 2}}]} [{:b [:x]}]) =fn=> (has-leaves [[:b 0 :x] [:b 1 :x]]) (merge/mark-missing {:b []} [:a {:b [:x]}]) =fn=> (has-leaves [[:b]]) "unions are followed" (merge/mark-missing {:a [{:x {:data 1}} {:y {:data 2}}]} [{:a {:b [:x] :c [:y]}}]) =fn=> (has-leaves [[:a 0 :x] [:a 1 :y]]) "unions leaves data in place when the result is empty" (merge/mark-missing {:a 1} [:a {:z {:b [:x] :c [:y]}}]) =fn=> (has-leaves [[:a]]))))) (specification "Sweep one" (assertions "removes not-found values from maps" (#'merge/sweep-one {:a 1 :b ::merge/not-found}) => {:a 1} "removes tempids from maps" (#'merge/sweep-one {:tempids {3 4}}) => {} "is not recursive" (#'merge/sweep-one {:a 1 :b {:c ::merge/not-found}}) => {:a 1 :b {:c ::merge/not-found}} "maps over vectors not recursive" (#'merge/sweep-one [{:a 1 :b ::merge/not-found}]) => [{:a 1}] "retains metadata" (-> (#'merge/sweep-one (with-meta {:a 1 :b ::merge/not-found} {:meta :data})) meta) => {:meta :data} (-> (#'merge/sweep-one [(with-meta {:a 1 :b ::merge/not-found} {:meta :data})]) first meta) => {:meta :data} (-> (#'merge/sweep-one (with-meta [{:a 1 :b ::merge/not-found}] {:meta :data})) meta) => {:meta :data})) (specification "Sweep merge" (assertions "recursively merges maps" (merge/sweep-merge {:a 1 :c {:b 2}} {:a 2 :c 5}) => {:a 2 :c 5} (merge/sweep-merge {:a 1 :c {:b 2}} {:a 2 :c {:x 1}}) => {:a 2 :c {:b 2 :x 1}} "stops recursive merging if the source element is marked as a leaf" (merge/sweep-merge {:a 1 :c {:d {:x 2} :e 4}} {:a 2 :c (#'merge/as-leaf {:d {:x 1}})}) => {:a 2 :c {:d {:x 1}}} "sweeps tempids from maps" (merge/sweep-merge {:a 1 :c {:b 2}} {:a 2 :tempids {} :c {:b ::merge/not-found}}) => {:a 2 :c {}} "Merging into a sub-map should remove the explicitly marked keys" (merge/sweep-merge {:a 1 :c {:x 1 :b 42}} {:a 2 :c ::merge/not-found}) => {:a 2} (merge/sweep-merge {:a 1 :c {:x 1 :b 42}} {:a 2 :c {:b ::merge/not-found}}) => {:a 2 :c {:x 1}} (merge/sweep-merge {:a 1 :c {:x 1 :b 42}} {:a 2 :c {:x ::merge/not-found}}) => {:a 2 :c {:b 42}} "Merging from an empty map should leave the original unmodified" (merge/sweep-merge {:a 1 :c {:x 1 :b 42}} {:a 2 :c {}}) => {:a 2 :c {:x 1 :b 42}} "removes values that are marked as not found" (merge/sweep-merge {:a 1 :c {:b 2}} {:a 2 :c {:b ::merge/not-found}}) => {:a 2 :c {}} (merge/sweep-merge {:a 1 :c 2} {:a 2 :c [{:x 1 :b ::merge/not-found}]}) => {:a 2 :c [{:x 1}]} (merge/sweep-merge {:a 1 :c {:data-fetch :loading}} {:a 2 :c [{:x 1 :b ::merge/not-found}]}) => {:a 2 :c [{:x 1}]} (merge/sweep-merge {:a 1 :c nil} {:a 2 :c [{:x 1 :b ::merge/not-found}]}) => {:a 2 :c [{:x 1}]} (merge/sweep-merge {:a 1 :b {:c {}}} {:a 2 :b {:c [{:x 1 :b ::merge/not-found}]}}) => {:a 2 :b {:c [{:x 1}]}} "clears normalized table entries that has an id of not found" (merge/sweep-merge {:table {1 {:a 2}}} {:table {::merge/not-found {:db/id ::merge/not-found}}}) => {:table {1 {:a 2}}} "clears idents whose ids were not found" (merge/sweep-merge {} {:table {1 {:db/id 1 :the-thing [:table-1 ::merge/not-found]}} :thing [:table-2 ::merge/not-found]}) => {:table {1 {:db/id 1}}} "sweeps not-found values from normalized table merges" (merge/sweep-merge {:subpanel [:dashboard :panel] :dashboard {:panel {:view-mode :detail :surveys {:ui/fetch-state {:post-mutation 's}}}} } {:subpanel [:dashboard :panel] :dashboard {:panel {:view-mode :detail :surveys [[:s 1] [:s 2]]}} :s { 1 {:db/id 1, :survey/launch-date ::merge/not-found} 2 {:db/id 2, :survey/launch-date "2012-12-22"} }}) => {:subpanel [:dashboard :panel] :dashboard {:panel {:view-mode :detail :surveys [[:s 1] [:s 2]]}} :s { 1 {:db/id 1} 2 {:db/id 2 :survey/launch-date "2012-12-22"} }} "overwrites target (non-map) value if incoming value is a map" (merge/sweep-merge {:a 1 :c 2} {:a 2 :c {:b 1}}) => {:a 2 :c {:b 1}}))
3046
(ns com.fulcrologic.fulcro.algorithms.merge-spec (:require [clojure.test :refer [deftest are]] [com.fulcrologic.fulcro.components :as comp :refer [defsc]] [fulcro-spec.core :refer [assertions specification component when-mocking behavior provided]] [com.fulcrologic.fulcro.algorithms.merge :as merge] [com.fulcrologic.fulcro.algorithms.denormalize :as fdn] [com.fulcrologic.fulcro.algorithms.normalize :as fnorm] [com.fulcrologic.fulcro.algorithms.do-not-use :as util] [com.fulcrologic.fulcro.application :as app] [taoensso.timbre :as log] [com.fulcrologic.fulcro.algorithms.data-targeting :as targeting] [com.fulcrologic.fulcro.algorithms.scheduling :as sched])) (declare =>) (defsc MMChild [_ _] {:query [:db/id] :initial-state {:db/id :param/id}}) (defsc MMParent [_ _] {:query [:db/id {:main-child (comp/get-query MMChild)} {:children (comp/get-query MMChild)}] :initial-state {:db/id :param/id :main-child :param/main :children :param/children}}) (specification "Mixed Mode Initial State" (component "defsc components that use template initial state" (assertions "Accept maps of child parameters and automatically construct children from them" (comp/get-initial-state MMParent {:id 1 :main {:id 1} :children [{:id 1}]}) => {:db/id 1 :main-child {:db/id 1} :children [{:db/id 1}]} "Allow to-one children to be initialized directly with a call to get-initial-state" (comp/get-initial-state MMParent {:id 1 :main (comp/get-initial-state MMChild {:id 1}) :children [{:id 1}]}) => {:db/id 1 :main-child {:db/id 1} :children [{:db/id 1}]} "Allow to-many children to be initialized directly with calls to get-initial-state" (comp/get-initial-state MMParent {:id 1 :main {:id 1} :children [(comp/get-initial-state MMChild {:id 1})]}) => {:db/id 1 :main-child {:db/id 1} :children [{:db/id 1}]} "Allow to-many children to be initialized with a mix of maps and calls to get-initial-state" (comp/get-initial-state MMParent {:id 1 :main {:id 1} :children [{:id 3} (comp/get-initial-state MMChild {:id 1})]}) => {:db/id 1 :main-child {:db/id 1} :children [{:db/id 3} {:db/id 1}]}))) (defsc MergeX [_ _] {:initial-state (fn [params] {:type :x :n :x}) :query (fn [] [:n :type])}) (defsc MergeY [_ _] {:initial-state (fn [params] {:type :y :n :y}) :query (fn [] [:n :type])}) (defsc MergeAChild [_ _] {:initial-state (fn [params] {:child :merge-a}) :ident (fn [] [:mergea :child]) :query (fn [] [:child])}) (defsc MergeA [_ _] {:initial-state (fn [params] {:type :a :n :a :child (comp/get-initial-state MergeAChild nil)}) :query (fn [] [:type :n {:child (comp/get-query MergeAChild)}])}) (defsc MergeB [_ _] {:initial-state (fn [params] {:type :b :n :b}) :query (fn [] [:n])}) (defsc MergeUnion [_ _] {:initial-state (fn [params] (comp/get-initial-state MergeA {})) :ident (fn [] [:mergea-or-b :at-union]) :query (fn [] {:a (comp/get-query MergeA) :b (comp/get-query MergeB)})}) (defsc MergeRoot [_ _] {:initial-state (fn [params] {:a 1 :b (comp/get-initial-state MergeUnion {})}) :query (fn [] [:a {:b (comp/get-query MergeUnion)}])}) (defsc U2 [_ _] {:initial-state (fn [params] (comp/get-initial-state MergeX {})) :query (fn [] {:x (comp/get-query MergeX) :y (comp/get-query MergeY)})}) ;; Nested routing tree ;; NestedRoot ;; | ;; U1 ;; / B A = MergeRoot B = MergeB ;; R2 ;; U2 A2 ;; X Y (defsc R2 [_ _] {:initial-state (fn [params] {:id 1 :u2 (comp/get-initial-state U2 {})}) :query (fn [] [:id {:u2 (comp/get-query U2)}])}) (defsc U1 [_ _] {:initial-state (fn [params] (comp/get-initial-state MergeB {})) :query (fn [] {:r2 (comp/get-query R2) :b (comp/get-query MergeB)})}) (defsc NestedRoot [_ _] {:initial-state (fn [params] {:u1 (comp/get-initial-state U1 {})}) :query (fn [] [{:u1 (comp/get-query U1)}])}) (defsc SU1 [_ props] {:initial-state (fn [params] (comp/get-initial-state MergeB {})) :ident (fn [] [(:type props) 1]) :query (fn [] {:a (comp/get-query MergeA) :b (comp/get-query MergeB)})}) ;; Sibling routing tree ;; SiblingRoot ;; | \ ;; SU1 SU2 ;; A B X Y (defsc SU2 [_ props] {:initial-state (fn [params] (comp/get-initial-state MergeX {})) :ident (fn [] [(:type props) 2]) :query (fn [] {:x (comp/get-query MergeX) :y (comp/get-query MergeY)})}) (defsc SiblingRoot [_ _] {:initial-state (fn [params] {:su1 (comp/get-initial-state SU1 {}) :su2 (comp/get-initial-state SU2 {})}) :query (fn [] [{:su1 (comp/get-query SU1)} {:su2 (comp/get-query SU2)}])}) (specification "merge-alternate-union-elements!" (behavior "For applications with sibling unions" (when-mocking (merge/merge-component! app comp state) =1x=> (do (assertions "Merges level one elements" comp => SU1 state => (comp/get-initial-state MergeA {}))) (merge/merge-component! app comp state) =1x=> (do (assertions "Merges only the state of branches that are not already initialized" comp => SU2 state => (comp/get-initial-state MergeY {}))) (merge/merge-alternate-union-elements! :app SiblingRoot))) (behavior "For applications with nested unions" (when-mocking (merge/merge-component! app comp state) =1x=> (do (assertions "Merges level one elements" comp => U1 state => (comp/get-initial-state R2 {}))) (merge/merge-component! app comp state) =1x=> (do (assertions "Merges only the state of branches that are not already initialized" comp => U2 state => (comp/get-initial-state MergeY {}))) (merge/merge-alternate-union-elements! :app NestedRoot))) (behavior "For applications with non-nested unions" (let [app (app/fulcro-app)] (when-mocking (merge/merge-component! app comp state) => (do (assertions "Merges only the state of branches that are not already initialized" comp => MergeUnion state => (comp/get-initial-state MergeB {}))) (merge/merge-alternate-union-elements! app MergeRoot))))) (defn phone-number [id n] {:id id :number n}) (defn person [id name numbers] {:id id :name name :numbers numbers}) (defsc MPhone [_ props] {:query (fn [] [:id :number]) :ident (fn [] [:phone/id (:id props)])}) (defsc MPerson [_ props] {:query (fn [] [:id :name {:numbers (comp/get-query MPhone)}]) :ident (fn [] [:person/id (:id props)])}) (defsc MPhonePM [_ _] {:ident [:phone/id :id] :query [:id :number] :pre-merge (fn [{:keys [current-normalized data-tree]}] (merge {:id :sample-phone-id :ui/initial-flag :start} current-normalized data-tree))}) (defsc MPersonPM [_ props] {:pre-merge (fn [{:keys [current-normalized data-tree]}] (merge {:id :sample-person-id} current-normalized data-tree)) :ident [:person/id :id] :query [:id :name {:numbers (comp/get-query MPhonePM)}]}) (defsc Score [_ {::keys []}] {:pre-merge (fn [{:keys [current-normalized data-tree]}] (merge {:ui/expanded? false} current-normalized data-tree)) :ident [::score-id ::score-id] :query [::score-id ::points :ui/expanded?]}) (defsc Scoreboard [_ props] {:pre-merge (fn [{:keys [current-normalized data-tree]}] (let [{::keys [scores]} data-tree high-score (apply max (map ::points scores)) scores (mapv (fn [{::keys [points] :as score}] (assoc score :ui/expanded? (= points high-score))) scores)] (merge current-normalized (assoc data-tree ::scores scores)))) :ident [::scoreboard-id ::scoreboard-id] :query [::scoreboard-id {::scores (comp/get-query Score)}]} "") (defonce id-counter (atom 0)) (defsc UiItem [_ _] {:pre-merge (fn [{:keys [current-normalized data-tree]}] (merge {::id (swap! id-counter inc)} current-normalized data-tree)) :ident [::id ::id] :query [::id ::title]}) (defsc UiLoadedItem [_ _] {:pre-merge (fn [{:keys [current-normalized data-tree]}] (merge {:ui/item {}} current-normalized data-tree)) :ident [::loaded-id ::loaded-id] :query [::loaded-id ::name {:ui/item (comp/get-query UiItem)}]}) (defsc UiCollectionHolder [_ _] {:ident [::col-id ::col-id] :query [::col-id {::load-items (comp/get-query UiLoadedItem)}]}) (defsc UiPreMergePlaceholderChild [_ _] {:pre-merge (fn [{:keys [current-normalized data-tree]}] (let [id (or (:id data-tree) (:id current-normalized) #?(:clj (java.util.UUID/randomUUID) :cljs (random-uuid)))] (merge {:id id} current-normalized data-tree))) :ident :id :query [:id :child/value]}) (defsc UiPreMergePlaceholderRoot [_ _] {:pre-merge (fn [{:keys [current-normalized data-tree]}] (let [id (or (:id data-tree) (:id current-normalized) #?(:clj (java.util.UUID/randomUUID) :cljs (random-uuid)))] (merge {:child/value 321 :id id :child {:id id}} current-normalized data-tree))) :ident :id :query [:id {:child (comp/get-query UiPreMergePlaceholderChild)}]}) (specification "merge*" (assertions "keep data defined by root merge component." (merge/merge* {} [{[:id 42] (comp/get-query UiPreMergePlaceholderRoot)}] {[:id 42] {:id 42}} {:remove-missing? true}) => {:id {42 {:id 42 :child/value 321 :child [:id 42]}}} "merge parent and children new data" (merge/merge* {} [{[:id 42] (comp/get-query UiPreMergePlaceholderRoot)}] {[:id 42] {:id 42 :child {:id 42 :child/value 123}}} {:remove-missing? true}) => {:id {42 {:id 42 :child [:id 42] :child/value 123}}})) (specification "merge-component" (let [component-tree (person :tony "<NAME>" [(phone-number 1 "555-1212") (phone-number 2 "123-4555")]) sally {:id :<NAME> :name "<NAME>" :numbers [[:phone/id 3]]} phone-3 {:id 3 :number "111-2222"} state-map {:people [[:person/id :sally]] :phone/id {3 phone-3} :person/id {:sally s<NAME>}} new-state-map (merge/merge-component state-map MPerson component-tree) expected-person {:id :ton<NAME> :name "<NAME>" :numbers [[:phone/id 1] [:phone/id 2]]} expected-phone-1 {:id 1 :number "555-1212"} expected-phone-2 {:id 2 :number "123-4555"}] (assertions "merges the top-level component with normalized links to children" (get-in new-state-map [:person/id :tony]) => expected-person "merges the normalized children" (get-in new-state-map [:phone/id 1]) => expected-phone-1 (get-in new-state-map [:phone/id 2]) => expected-phone-2 "leaves the original state untouched" (contains? new-state-map :people) => true (get-in new-state-map [:person/id :sally]) => sally (get-in new-state-map [:phone/id 3]) => phone-3 "honors sweep-merge (overwrites data that is in query but did not appear in result) if asked" (merge/merge-component {:person/id {1 {:id 1 :name "<NAME>" :numbers [[:phone/id 1]]}}} MPerson {:id 1 :numbers []} :remove-missing? true) => {:person/id {1 {:id 1 :numbers []}}} "Prevents sweep-merge by default" (merge/merge-component {:person/id {1 {:id 1 :name "<NAME>" :numbers [[:phone/id 1]]}}} MPerson {:id 1 :numbers []}) => {:person/id {1 {:id 1 :name "<NAME>" :numbers []}}})) (assertions "Can merge basic data into the database (pre-merge not override)" (merge/merge-component {} MPersonPM (person :mary "<NAME>" [(phone-number 55 "98765-4321")])) => {:person/id {:mary {:id :mary :name "<NAME>" :numbers [[:phone/id 55]]}} :phone/id {55 {:id 55 :number "98765-4321" :ui/initial-flag :start}}} "Can assign IDs to primary entities via pre-merge" (merge/merge-component {} MPersonPM {:name "<NAME>" :numbers [{:number "98765-4321"}]} :replace [:global-ref]) => {:global-ref [:person/id :sample-person-id] :person/id {:sample-person-id {:id :sample-person-id :name "<NAME>2" :numbers [[:phone/id :sample-phone-id]]}} :phone/id {:sample-phone-id {:id :sample-phone-id :number "98765-4321" :ui/initial-flag :start}}} "can merge nested to-many items and apply pre-merge" (merge/merge-component {} Scoreboard {::scoreboard-id 123 ::scores [{::score-id 1 ::points 4} {::score-id 2 ::points 8} {::score-id 3 ::points 7}]}) => {::scoreboard-id {123 {::scoreboard-id 123 ::scores [[::score-id 1] [::score-id 2] [::score-id 3]]}} ::score-id {1 {::score-id 1 ::points 4 :ui/expanded? false} 2 {::score-id 2 ::points 8 :ui/expanded? true} 3 {::score-id 3 ::points 7 :ui/expanded? false}}} "can place ident via replace named parameter with pre-merge" (merge/merge-component {} MPersonPM (person :mary "<NAME>" [(phone-number 55 "98765-4321")]) :replace [:main-person]) => {:person/id {:mary {:id :<NAME> :name "<NAME>" :numbers [[:phone/id 55]]}} :phone/id {55 {:id 55 :number "98765-4321" :ui/initial-flag :start}} :main-person [:person/id :mary]} "pre-merge step can assign an id to generated sub-elements (to-one)" (do (reset! id-counter 0) (merge/merge-component {} UiLoadedItem {::loaded-id 1 ::name "a"})) => {::loaded-id {1 {::loaded-id 1 ::name "a" :ui/item [::id 1]}} ::id {1 {::id 1}}} "pre-merge step can assign an id to generated sub-elements (to-many)" (do (reset! id-counter 0) (merge/merge-component {} UiCollectionHolder {::col-id 123 ::load-items [{::loaded-id 1 ::name "a"} {::loaded-id 2 ::name "b"}]})) => {::col-id {123 {::col-id 123 ::load-items [[::loaded-id 1] [::loaded-id 2]]}} ::loaded-id {1 {::loaded-id 1 ::name "a" :ui/item [::id 1]} 2 {::loaded-id 2 ::name "b" :ui/item [::id 2]}} ::id {1 {::id 1} 2 {::id 2}}})) (specification "merge-component!" (let [rendered (atom false) ;; needed because mocking cannot mock something you've closed over already app (assoc-in (app/fulcro-app {}) [::app/algorithms :com.fulcrologic.fulcro.algorithm/schedule-render!] (fn [& args] (reset! rendered true)))] (when-mocking (merge/merge-component s c d & np) => (do (assertions "calls merge-component with the component and data" c => MPerson d => {} "includes the correct named parameters" np => [:replace [:x]])) (merge/merge-component! app MPerson {} :replace [:x]) (assertions "schedules a render" @rendered => true)))) (def table-1 {:type :table :id 1 :rows [1 2 3]}) (defsc Table [_ _] {:initial-state (fn [p] table-1) :query (fn [] [:type :id :rows])}) (def graph-1 {:type :graph :id 1 :data [1 2 3]}) (defsc Graph [_ _] {:initial-state (fn [p] graph-1) :query (fn [] [:type :id :data])}) (defsc Reports [_ props] {:initial-state (fn [p] (comp/get-initial-state Graph nil)) ; initial state will already include Graph :ident (fn [] [(:type props) (:id props)]) :query (fn [] {:graph (comp/get-query Graph) :table (comp/get-query Table)})}) (defsc MRRoot [_ _] {:initial-state (fn [p] {:reports (comp/get-initial-state Reports nil)}) :query (fn [] [{:reports (comp/get-query Reports)}])}) (specification "merge-alternate-union-elements" (let [initial-state (merge (comp/get-initial-state MRRoot nil) {:a 1}) state-map (fnorm/tree->db MRRoot initial-state true) new-state (merge/merge-alternate-union-elements state-map MRRoot)] (assertions "can be used to merge alternate union elements to raw state" (get-in new-state [:table 1]) => table-1 "(existing state isn't touched)" (get new-state :a) => 1 (get new-state :reports) => [:graph 1] (get-in new-state [:graph 1]) => graph-1))) (defsc User [_ _] {:query [:user/id :user/name] :ident :user/id}) (defsc UserPM [_ _] {:query [:user/id :user/name] :pre-merge (fn [{:keys [data-tree current-normalized state-map]}] (merge {:ui/visible? true} current-normalized data-tree)) :ident :user/id}) (defsc UserPMT [_ _] {:query [:user/id :user/name] :pre-merge (fn [{:keys [data-tree current-normalized]}] ;; rewriting the ID to verify that targeting uses the correct (altered) ident (merge current-normalized data-tree {:user/id 2})) :ident :user/id}) (specification "merge-mutation-joins" :focus (behavior "Merges basic return values into app state" (let [state {} tree {'some-mutation {:user/id 1 :user/name "<NAME>"}} query [{(list 'some-mutation {:x 1}) (comp/get-query User)}] new-state (merge/merge-mutation-joins state query tree)] (assertions new-state => {:user/id {1 {:user/id 1 :user/name "<NAME>"}}}))) (behavior "Does pre-merge processing" (let [state {} tree {'some-mutation {:user/id 1 :user/name "<NAME>"}} query [{(list 'some-mutation {:x 1}) (comp/get-query UserPM)}] new-state (merge/merge-mutation-joins state query tree)] (assertions new-state => {:user/id {1 {:ui/visible? true :user/id 1 :user/name "<NAME>"}}}))) (behavior "Does data targeting based on pre-merge result" (let [state {} tree {'some-mutation {:user/id 1 :user/name "<NAME>"}} query [{(list 'some-mutation {:x 1}) (vary-meta (comp/get-query UserPMT) assoc ::targeting/target [:top-key])}] new-state (merge/merge-mutation-joins state query tree)] (assertions new-state => {:top-key [:user/id 2] :user/id {2 {:user/id 2 :user/name "<NAME>"}}}))) (behavior "Targeting on pre-merge overwrite with id re-assignment" (let [state {:user/id {1 {:user/id 1 :user/name "<NAME>"}}} tree {'some-mutation {:user/id 1 :user/name "<NAME>"}} query [{(list 'some-mutation {:x 1}) (vary-meta (comp/get-query UserPMT) assoc ::targeting/target [:top-key])}] new-state (merge/merge-mutation-joins state query tree)] (assertions new-state => {:top-key [:user/id 2] :user/id {1 {:user/id 1 :user/name "<NAME>"} 2 {:user/id 2 :user/name "<NAME>"}}})))) (specification "mark-missing" (behavior "correctly marks missing properties" (are [query ?missing-result exp] (= exp (merge/mark-missing ?missing-result query)) [:a :b] {:a 1} {:a 1 :b ::merge/not-found})) (behavior "joins -> one" (are [query ?missing-result exp] (= exp (merge/mark-missing ?missing-result query)) [:a {:b [:c]}] {:a 1} {:a 1 :b ::merge/not-found} [{:b [:c]}] {:b {}} {:b {:c ::merge/not-found}} [{:b [:c]}] {:b {:c 0}} {:b {:c 0}} [{:b [:c :d]}] {:b {:c 1}} {:b {:c 1 :d ::merge/not-found}})) (behavior "join -> many" (are [query ?missing-result exp] (= exp (merge/mark-missing ?missing-result query)) [{:a [:b :c]}] {:a [{:b 1 :c 2} {:b 1}]} {:a [{:b 1 :c 2} {:b 1 :c ::merge/not-found}]})) (behavior "idents and ident joins" (are [query ?missing-result exp] (= exp (merge/mark-missing ?missing-result query)) [{[:a 1] [:x]}] {[:a 1] {}} {[:a 1] {:x ::merge/not-found}} [{[:b 1] [:x]}] {[:b 1] {:x 2}} {[:b 1] {:x 2}} [{[:c 1] [:x]}] {} {[:c 1] {:x ::merge/not-found}} [{[:c 1] ['*]}] {} {[:c 1] {}} [{[:e 1] [:x :y :z]}] {[:e 1] {}} {[:e 1] {:x ::merge/not-found :y ::merge/not-found :z ::merge/not-found}} [[:d 1]] {} {[:d 1] {}})) (behavior "Ignores root link idents" (assertions "when the subquery exists" (merge/mark-missing {} [{[:a '_] [:x]}]) => {} "when it is a pure link" (merge/mark-missing {} [[:a '_]]) => {})) (behavior "parameterized" (are [query ?missing-result exp] (= exp (merge/mark-missing ?missing-result query)) '[:z (:y {})] {:z 1} {:z 1 :y ::merge/not-found} '[:z (:y {})] {:z 1 :y 0} {:z 1 :y 0} '[:z ({:y [:x]} {})] {:z 1 :y {}} {:z 1 :y {:x ::merge/not-found}})) (behavior "nested" (are [query ?missing-result exp] (= exp (merge/mark-missing ?missing-result query)) [{:b [:c {:d [:e]}]}] {:b {:c 1}} {:b {:c 1 :d ::merge/not-found}} [{:b [:c {:d [:e]}]}] {:b {:c 1 :d {}}} {:b {:c 1 :d {:e ::merge/not-found}}})) (behavior "upgrades value to maps if necessary" (are [query ?missing-result exp] (= exp (merge/mark-missing ?missing-result query)) [{:l [:m]}] {:l 0} {:l {:m ::merge/not-found}} [{:b [:c]}] {:b nil} {:b {:c ::merge/not-found}})) (behavior "unions" (assertions "singletons" (merge/mark-missing {:j {:c {}}} [{:j {:a [:c] :b [:d]}}]) => {:j {:c {} :d ::merge/not-found}} "singleton with no result" (merge/mark-missing {} [{:j {:a [:c] :b [:d]}}]) => {:j ::merge/not-found} "list to-many with 1" (merge/mark-missing {:j [{:c "c"}]} [{:j {:a [:c] :b [:d]}}]) => {:j [{:c "c" :d ::merge/not-found}]} "list to-many with 2" (merge/mark-missing {:items [{:id 0 :image "img1"} {:id 1 :text "text1"}]} [{:items {:photo [:id :image] :text [:id :text]}}]) => {:items [{:id 0 :image "img1" :text ::merge/not-found} {:id 1 :image ::merge/not-found :text "text1"}]} "list to-many with no results" (merge/mark-missing {:j []} [{:j {:a [:c] :b [:d]}}]) => {:j []})) (behavior "if the query has a ui.*/ attribute, it should not be marked as missing" (are [query ?missing-result exp] (= exp (merge/mark-missing ?missing-result query)) [:a :ui/b :c] {:a {} :c {}} {:a {} :c {}} [{:j [:ui/b :c]}] {:j {:c 5}} {:j {:c 5}} [{:j [{:ui/b [:d]} :c]}] {:j {:c 5}} {:j {:c 5}})) (behavior "mutations!" (are [query ?missing-result exp] (= exp (merge/mark-missing ?missing-result query)) '[(f) {:j [:a]}] {'f {} :j {}} {'f {} :j {:a ::merge/not-found}} '[(app/add-q {:p 1}) {:j1 [:p1]} {:j2 [:p2]}] {'app/add-q {:tempids {}} :j1 {} :j2 [{:p2 2} {}]} {'app/add-q {:tempids {}} :j1 {:p1 ::merge/not-found} :j2 [{:p2 2} {:p2 ::merge/not-found}]})) (behavior "correctly walks recursive queries to mark missing data" (behavior "when the recursive target is a singleton" (are [query ?missing-result exp] (= exp (merge/mark-missing ?missing-result query)) [:a {:b '...}] {:a 1 :b {:a 2}} {:a 1 :b {:a 2 :b ::merge/not-found}} [:a {:b '...}] {:a 1 :b {:a 2 :b {:a 3}}} {:a 1 :b {:a 2 :b {:a 3 :b ::merge/not-found}}} [:a {:b 9}] {:a 1 :b {:a 2 :b {:a 3 :b {:a 4}}}} {:a 1 :b {:a 2 :b {:a 3 :b {:a 4 :b ::merge/not-found}}}})) (behavior "when the recursive target is to-many" (are [query ?missing-result exp] (= exp (merge/mark-missing ?missing-result query)) [:a {:b '...}] {:a 1 :b [{:a 2 :b [{:a 3}]} {:a 4}]} {:a 1 :b [{:a 2 :b [{:a 3 :b ::merge/not-found}]} {:a 4 :b ::merge/not-found}]}))) (behavior "marks leaf data based on the query where" (letfn [(has-leaves [leaf-paths] (fn [result] (every? #(#'merge/leaf? (get-in result %)) leaf-paths)))] (assertions "plain data is always a leaf" (merge/mark-missing {:a 1 :b {:x 5}} [:a {:b [:x]}]) =fn=> (has-leaves [[:b :x] [:a] [:missing]]) "data structures are properly marked in singleton results" (merge/mark-missing {:b {:x {:data 1}}} [{:b [:x :y]}]) =fn=> (has-leaves [[:b :x]]) "data structures are properly marked in to-many results" (merge/mark-missing {:b [{:x {:data 1}} {:x {:data 2}}]} [{:b [:x]}]) =fn=> (has-leaves [[:b 0 :x] [:b 1 :x]]) (merge/mark-missing {:b []} [:a {:b [:x]}]) =fn=> (has-leaves [[:b]]) "unions are followed" (merge/mark-missing {:a [{:x {:data 1}} {:y {:data 2}}]} [{:a {:b [:x] :c [:y]}}]) =fn=> (has-leaves [[:a 0 :x] [:a 1 :y]]) "unions leaves data in place when the result is empty" (merge/mark-missing {:a 1} [:a {:z {:b [:x] :c [:y]}}]) =fn=> (has-leaves [[:a]]))))) (specification "Sweep one" (assertions "removes not-found values from maps" (#'merge/sweep-one {:a 1 :b ::merge/not-found}) => {:a 1} "removes tempids from maps" (#'merge/sweep-one {:tempids {3 4}}) => {} "is not recursive" (#'merge/sweep-one {:a 1 :b {:c ::merge/not-found}}) => {:a 1 :b {:c ::merge/not-found}} "maps over vectors not recursive" (#'merge/sweep-one [{:a 1 :b ::merge/not-found}]) => [{:a 1}] "retains metadata" (-> (#'merge/sweep-one (with-meta {:a 1 :b ::merge/not-found} {:meta :data})) meta) => {:meta :data} (-> (#'merge/sweep-one [(with-meta {:a 1 :b ::merge/not-found} {:meta :data})]) first meta) => {:meta :data} (-> (#'merge/sweep-one (with-meta [{:a 1 :b ::merge/not-found}] {:meta :data})) meta) => {:meta :data})) (specification "Sweep merge" (assertions "recursively merges maps" (merge/sweep-merge {:a 1 :c {:b 2}} {:a 2 :c 5}) => {:a 2 :c 5} (merge/sweep-merge {:a 1 :c {:b 2}} {:a 2 :c {:x 1}}) => {:a 2 :c {:b 2 :x 1}} "stops recursive merging if the source element is marked as a leaf" (merge/sweep-merge {:a 1 :c {:d {:x 2} :e 4}} {:a 2 :c (#'merge/as-leaf {:d {:x 1}})}) => {:a 2 :c {:d {:x 1}}} "sweeps tempids from maps" (merge/sweep-merge {:a 1 :c {:b 2}} {:a 2 :tempids {} :c {:b ::merge/not-found}}) => {:a 2 :c {}} "Merging into a sub-map should remove the explicitly marked keys" (merge/sweep-merge {:a 1 :c {:x 1 :b 42}} {:a 2 :c ::merge/not-found}) => {:a 2} (merge/sweep-merge {:a 1 :c {:x 1 :b 42}} {:a 2 :c {:b ::merge/not-found}}) => {:a 2 :c {:x 1}} (merge/sweep-merge {:a 1 :c {:x 1 :b 42}} {:a 2 :c {:x ::merge/not-found}}) => {:a 2 :c {:b 42}} "Merging from an empty map should leave the original unmodified" (merge/sweep-merge {:a 1 :c {:x 1 :b 42}} {:a 2 :c {}}) => {:a 2 :c {:x 1 :b 42}} "removes values that are marked as not found" (merge/sweep-merge {:a 1 :c {:b 2}} {:a 2 :c {:b ::merge/not-found}}) => {:a 2 :c {}} (merge/sweep-merge {:a 1 :c 2} {:a 2 :c [{:x 1 :b ::merge/not-found}]}) => {:a 2 :c [{:x 1}]} (merge/sweep-merge {:a 1 :c {:data-fetch :loading}} {:a 2 :c [{:x 1 :b ::merge/not-found}]}) => {:a 2 :c [{:x 1}]} (merge/sweep-merge {:a 1 :c nil} {:a 2 :c [{:x 1 :b ::merge/not-found}]}) => {:a 2 :c [{:x 1}]} (merge/sweep-merge {:a 1 :b {:c {}}} {:a 2 :b {:c [{:x 1 :b ::merge/not-found}]}}) => {:a 2 :b {:c [{:x 1}]}} "clears normalized table entries that has an id of not found" (merge/sweep-merge {:table {1 {:a 2}}} {:table {::merge/not-found {:db/id ::merge/not-found}}}) => {:table {1 {:a 2}}} "clears idents whose ids were not found" (merge/sweep-merge {} {:table {1 {:db/id 1 :the-thing [:table-1 ::merge/not-found]}} :thing [:table-2 ::merge/not-found]}) => {:table {1 {:db/id 1}}} "sweeps not-found values from normalized table merges" (merge/sweep-merge {:subpanel [:dashboard :panel] :dashboard {:panel {:view-mode :detail :surveys {:ui/fetch-state {:post-mutation 's}}}} } {:subpanel [:dashboard :panel] :dashboard {:panel {:view-mode :detail :surveys [[:s 1] [:s 2]]}} :s { 1 {:db/id 1, :survey/launch-date ::merge/not-found} 2 {:db/id 2, :survey/launch-date "2012-12-22"} }}) => {:subpanel [:dashboard :panel] :dashboard {:panel {:view-mode :detail :surveys [[:s 1] [:s 2]]}} :s { 1 {:db/id 1} 2 {:db/id 2 :survey/launch-date "2012-12-22"} }} "overwrites target (non-map) value if incoming value is a map" (merge/sweep-merge {:a 1 :c 2} {:a 2 :c {:b 1}}) => {:a 2 :c {:b 1}}))
true
(ns com.fulcrologic.fulcro.algorithms.merge-spec (:require [clojure.test :refer [deftest are]] [com.fulcrologic.fulcro.components :as comp :refer [defsc]] [fulcro-spec.core :refer [assertions specification component when-mocking behavior provided]] [com.fulcrologic.fulcro.algorithms.merge :as merge] [com.fulcrologic.fulcro.algorithms.denormalize :as fdn] [com.fulcrologic.fulcro.algorithms.normalize :as fnorm] [com.fulcrologic.fulcro.algorithms.do-not-use :as util] [com.fulcrologic.fulcro.application :as app] [taoensso.timbre :as log] [com.fulcrologic.fulcro.algorithms.data-targeting :as targeting] [com.fulcrologic.fulcro.algorithms.scheduling :as sched])) (declare =>) (defsc MMChild [_ _] {:query [:db/id] :initial-state {:db/id :param/id}}) (defsc MMParent [_ _] {:query [:db/id {:main-child (comp/get-query MMChild)} {:children (comp/get-query MMChild)}] :initial-state {:db/id :param/id :main-child :param/main :children :param/children}}) (specification "Mixed Mode Initial State" (component "defsc components that use template initial state" (assertions "Accept maps of child parameters and automatically construct children from them" (comp/get-initial-state MMParent {:id 1 :main {:id 1} :children [{:id 1}]}) => {:db/id 1 :main-child {:db/id 1} :children [{:db/id 1}]} "Allow to-one children to be initialized directly with a call to get-initial-state" (comp/get-initial-state MMParent {:id 1 :main (comp/get-initial-state MMChild {:id 1}) :children [{:id 1}]}) => {:db/id 1 :main-child {:db/id 1} :children [{:db/id 1}]} "Allow to-many children to be initialized directly with calls to get-initial-state" (comp/get-initial-state MMParent {:id 1 :main {:id 1} :children [(comp/get-initial-state MMChild {:id 1})]}) => {:db/id 1 :main-child {:db/id 1} :children [{:db/id 1}]} "Allow to-many children to be initialized with a mix of maps and calls to get-initial-state" (comp/get-initial-state MMParent {:id 1 :main {:id 1} :children [{:id 3} (comp/get-initial-state MMChild {:id 1})]}) => {:db/id 1 :main-child {:db/id 1} :children [{:db/id 3} {:db/id 1}]}))) (defsc MergeX [_ _] {:initial-state (fn [params] {:type :x :n :x}) :query (fn [] [:n :type])}) (defsc MergeY [_ _] {:initial-state (fn [params] {:type :y :n :y}) :query (fn [] [:n :type])}) (defsc MergeAChild [_ _] {:initial-state (fn [params] {:child :merge-a}) :ident (fn [] [:mergea :child]) :query (fn [] [:child])}) (defsc MergeA [_ _] {:initial-state (fn [params] {:type :a :n :a :child (comp/get-initial-state MergeAChild nil)}) :query (fn [] [:type :n {:child (comp/get-query MergeAChild)}])}) (defsc MergeB [_ _] {:initial-state (fn [params] {:type :b :n :b}) :query (fn [] [:n])}) (defsc MergeUnion [_ _] {:initial-state (fn [params] (comp/get-initial-state MergeA {})) :ident (fn [] [:mergea-or-b :at-union]) :query (fn [] {:a (comp/get-query MergeA) :b (comp/get-query MergeB)})}) (defsc MergeRoot [_ _] {:initial-state (fn [params] {:a 1 :b (comp/get-initial-state MergeUnion {})}) :query (fn [] [:a {:b (comp/get-query MergeUnion)}])}) (defsc U2 [_ _] {:initial-state (fn [params] (comp/get-initial-state MergeX {})) :query (fn [] {:x (comp/get-query MergeX) :y (comp/get-query MergeY)})}) ;; Nested routing tree ;; NestedRoot ;; | ;; U1 ;; / B A = MergeRoot B = MergeB ;; R2 ;; U2 A2 ;; X Y (defsc R2 [_ _] {:initial-state (fn [params] {:id 1 :u2 (comp/get-initial-state U2 {})}) :query (fn [] [:id {:u2 (comp/get-query U2)}])}) (defsc U1 [_ _] {:initial-state (fn [params] (comp/get-initial-state MergeB {})) :query (fn [] {:r2 (comp/get-query R2) :b (comp/get-query MergeB)})}) (defsc NestedRoot [_ _] {:initial-state (fn [params] {:u1 (comp/get-initial-state U1 {})}) :query (fn [] [{:u1 (comp/get-query U1)}])}) (defsc SU1 [_ props] {:initial-state (fn [params] (comp/get-initial-state MergeB {})) :ident (fn [] [(:type props) 1]) :query (fn [] {:a (comp/get-query MergeA) :b (comp/get-query MergeB)})}) ;; Sibling routing tree ;; SiblingRoot ;; | \ ;; SU1 SU2 ;; A B X Y (defsc SU2 [_ props] {:initial-state (fn [params] (comp/get-initial-state MergeX {})) :ident (fn [] [(:type props) 2]) :query (fn [] {:x (comp/get-query MergeX) :y (comp/get-query MergeY)})}) (defsc SiblingRoot [_ _] {:initial-state (fn [params] {:su1 (comp/get-initial-state SU1 {}) :su2 (comp/get-initial-state SU2 {})}) :query (fn [] [{:su1 (comp/get-query SU1)} {:su2 (comp/get-query SU2)}])}) (specification "merge-alternate-union-elements!" (behavior "For applications with sibling unions" (when-mocking (merge/merge-component! app comp state) =1x=> (do (assertions "Merges level one elements" comp => SU1 state => (comp/get-initial-state MergeA {}))) (merge/merge-component! app comp state) =1x=> (do (assertions "Merges only the state of branches that are not already initialized" comp => SU2 state => (comp/get-initial-state MergeY {}))) (merge/merge-alternate-union-elements! :app SiblingRoot))) (behavior "For applications with nested unions" (when-mocking (merge/merge-component! app comp state) =1x=> (do (assertions "Merges level one elements" comp => U1 state => (comp/get-initial-state R2 {}))) (merge/merge-component! app comp state) =1x=> (do (assertions "Merges only the state of branches that are not already initialized" comp => U2 state => (comp/get-initial-state MergeY {}))) (merge/merge-alternate-union-elements! :app NestedRoot))) (behavior "For applications with non-nested unions" (let [app (app/fulcro-app)] (when-mocking (merge/merge-component! app comp state) => (do (assertions "Merges only the state of branches that are not already initialized" comp => MergeUnion state => (comp/get-initial-state MergeB {}))) (merge/merge-alternate-union-elements! app MergeRoot))))) (defn phone-number [id n] {:id id :number n}) (defn person [id name numbers] {:id id :name name :numbers numbers}) (defsc MPhone [_ props] {:query (fn [] [:id :number]) :ident (fn [] [:phone/id (:id props)])}) (defsc MPerson [_ props] {:query (fn [] [:id :name {:numbers (comp/get-query MPhone)}]) :ident (fn [] [:person/id (:id props)])}) (defsc MPhonePM [_ _] {:ident [:phone/id :id] :query [:id :number] :pre-merge (fn [{:keys [current-normalized data-tree]}] (merge {:id :sample-phone-id :ui/initial-flag :start} current-normalized data-tree))}) (defsc MPersonPM [_ props] {:pre-merge (fn [{:keys [current-normalized data-tree]}] (merge {:id :sample-person-id} current-normalized data-tree)) :ident [:person/id :id] :query [:id :name {:numbers (comp/get-query MPhonePM)}]}) (defsc Score [_ {::keys []}] {:pre-merge (fn [{:keys [current-normalized data-tree]}] (merge {:ui/expanded? false} current-normalized data-tree)) :ident [::score-id ::score-id] :query [::score-id ::points :ui/expanded?]}) (defsc Scoreboard [_ props] {:pre-merge (fn [{:keys [current-normalized data-tree]}] (let [{::keys [scores]} data-tree high-score (apply max (map ::points scores)) scores (mapv (fn [{::keys [points] :as score}] (assoc score :ui/expanded? (= points high-score))) scores)] (merge current-normalized (assoc data-tree ::scores scores)))) :ident [::scoreboard-id ::scoreboard-id] :query [::scoreboard-id {::scores (comp/get-query Score)}]} "") (defonce id-counter (atom 0)) (defsc UiItem [_ _] {:pre-merge (fn [{:keys [current-normalized data-tree]}] (merge {::id (swap! id-counter inc)} current-normalized data-tree)) :ident [::id ::id] :query [::id ::title]}) (defsc UiLoadedItem [_ _] {:pre-merge (fn [{:keys [current-normalized data-tree]}] (merge {:ui/item {}} current-normalized data-tree)) :ident [::loaded-id ::loaded-id] :query [::loaded-id ::name {:ui/item (comp/get-query UiItem)}]}) (defsc UiCollectionHolder [_ _] {:ident [::col-id ::col-id] :query [::col-id {::load-items (comp/get-query UiLoadedItem)}]}) (defsc UiPreMergePlaceholderChild [_ _] {:pre-merge (fn [{:keys [current-normalized data-tree]}] (let [id (or (:id data-tree) (:id current-normalized) #?(:clj (java.util.UUID/randomUUID) :cljs (random-uuid)))] (merge {:id id} current-normalized data-tree))) :ident :id :query [:id :child/value]}) (defsc UiPreMergePlaceholderRoot [_ _] {:pre-merge (fn [{:keys [current-normalized data-tree]}] (let [id (or (:id data-tree) (:id current-normalized) #?(:clj (java.util.UUID/randomUUID) :cljs (random-uuid)))] (merge {:child/value 321 :id id :child {:id id}} current-normalized data-tree))) :ident :id :query [:id {:child (comp/get-query UiPreMergePlaceholderChild)}]}) (specification "merge*" (assertions "keep data defined by root merge component." (merge/merge* {} [{[:id 42] (comp/get-query UiPreMergePlaceholderRoot)}] {[:id 42] {:id 42}} {:remove-missing? true}) => {:id {42 {:id 42 :child/value 321 :child [:id 42]}}} "merge parent and children new data" (merge/merge* {} [{[:id 42] (comp/get-query UiPreMergePlaceholderRoot)}] {[:id 42] {:id 42 :child {:id 42 :child/value 123}}} {:remove-missing? true}) => {:id {42 {:id 42 :child [:id 42] :child/value 123}}})) (specification "merge-component" (let [component-tree (person :tony "PI:NAME:<NAME>END_PI" [(phone-number 1 "555-1212") (phone-number 2 "123-4555")]) sally {:id :PI:NAME:<NAME>END_PI :name "PI:NAME:<NAME>END_PI" :numbers [[:phone/id 3]]} phone-3 {:id 3 :number "111-2222"} state-map {:people [[:person/id :sally]] :phone/id {3 phone-3} :person/id {:sally sPI:NAME:<NAME>END_PI}} new-state-map (merge/merge-component state-map MPerson component-tree) expected-person {:id :tonPI:NAME:<NAME>END_PI :name "PI:NAME:<NAME>END_PI" :numbers [[:phone/id 1] [:phone/id 2]]} expected-phone-1 {:id 1 :number "555-1212"} expected-phone-2 {:id 2 :number "123-4555"}] (assertions "merges the top-level component with normalized links to children" (get-in new-state-map [:person/id :tony]) => expected-person "merges the normalized children" (get-in new-state-map [:phone/id 1]) => expected-phone-1 (get-in new-state-map [:phone/id 2]) => expected-phone-2 "leaves the original state untouched" (contains? new-state-map :people) => true (get-in new-state-map [:person/id :sally]) => sally (get-in new-state-map [:phone/id 3]) => phone-3 "honors sweep-merge (overwrites data that is in query but did not appear in result) if asked" (merge/merge-component {:person/id {1 {:id 1 :name "PI:NAME:<NAME>END_PI" :numbers [[:phone/id 1]]}}} MPerson {:id 1 :numbers []} :remove-missing? true) => {:person/id {1 {:id 1 :numbers []}}} "Prevents sweep-merge by default" (merge/merge-component {:person/id {1 {:id 1 :name "PI:NAME:<NAME>END_PI" :numbers [[:phone/id 1]]}}} MPerson {:id 1 :numbers []}) => {:person/id {1 {:id 1 :name "PI:NAME:<NAME>END_PI" :numbers []}}})) (assertions "Can merge basic data into the database (pre-merge not override)" (merge/merge-component {} MPersonPM (person :mary "PI:NAME:<NAME>END_PI" [(phone-number 55 "98765-4321")])) => {:person/id {:mary {:id :mary :name "PI:NAME:<NAME>END_PI" :numbers [[:phone/id 55]]}} :phone/id {55 {:id 55 :number "98765-4321" :ui/initial-flag :start}}} "Can assign IDs to primary entities via pre-merge" (merge/merge-component {} MPersonPM {:name "PI:NAME:<NAME>END_PI" :numbers [{:number "98765-4321"}]} :replace [:global-ref]) => {:global-ref [:person/id :sample-person-id] :person/id {:sample-person-id {:id :sample-person-id :name "PI:NAME:<NAME>END_PI2" :numbers [[:phone/id :sample-phone-id]]}} :phone/id {:sample-phone-id {:id :sample-phone-id :number "98765-4321" :ui/initial-flag :start}}} "can merge nested to-many items and apply pre-merge" (merge/merge-component {} Scoreboard {::scoreboard-id 123 ::scores [{::score-id 1 ::points 4} {::score-id 2 ::points 8} {::score-id 3 ::points 7}]}) => {::scoreboard-id {123 {::scoreboard-id 123 ::scores [[::score-id 1] [::score-id 2] [::score-id 3]]}} ::score-id {1 {::score-id 1 ::points 4 :ui/expanded? false} 2 {::score-id 2 ::points 8 :ui/expanded? true} 3 {::score-id 3 ::points 7 :ui/expanded? false}}} "can place ident via replace named parameter with pre-merge" (merge/merge-component {} MPersonPM (person :mary "PI:NAME:<NAME>END_PI" [(phone-number 55 "98765-4321")]) :replace [:main-person]) => {:person/id {:mary {:id :PI:NAME:<NAME>END_PI :name "PI:NAME:<NAME>END_PI" :numbers [[:phone/id 55]]}} :phone/id {55 {:id 55 :number "98765-4321" :ui/initial-flag :start}} :main-person [:person/id :mary]} "pre-merge step can assign an id to generated sub-elements (to-one)" (do (reset! id-counter 0) (merge/merge-component {} UiLoadedItem {::loaded-id 1 ::name "a"})) => {::loaded-id {1 {::loaded-id 1 ::name "a" :ui/item [::id 1]}} ::id {1 {::id 1}}} "pre-merge step can assign an id to generated sub-elements (to-many)" (do (reset! id-counter 0) (merge/merge-component {} UiCollectionHolder {::col-id 123 ::load-items [{::loaded-id 1 ::name "a"} {::loaded-id 2 ::name "b"}]})) => {::col-id {123 {::col-id 123 ::load-items [[::loaded-id 1] [::loaded-id 2]]}} ::loaded-id {1 {::loaded-id 1 ::name "a" :ui/item [::id 1]} 2 {::loaded-id 2 ::name "b" :ui/item [::id 2]}} ::id {1 {::id 1} 2 {::id 2}}})) (specification "merge-component!" (let [rendered (atom false) ;; needed because mocking cannot mock something you've closed over already app (assoc-in (app/fulcro-app {}) [::app/algorithms :com.fulcrologic.fulcro.algorithm/schedule-render!] (fn [& args] (reset! rendered true)))] (when-mocking (merge/merge-component s c d & np) => (do (assertions "calls merge-component with the component and data" c => MPerson d => {} "includes the correct named parameters" np => [:replace [:x]])) (merge/merge-component! app MPerson {} :replace [:x]) (assertions "schedules a render" @rendered => true)))) (def table-1 {:type :table :id 1 :rows [1 2 3]}) (defsc Table [_ _] {:initial-state (fn [p] table-1) :query (fn [] [:type :id :rows])}) (def graph-1 {:type :graph :id 1 :data [1 2 3]}) (defsc Graph [_ _] {:initial-state (fn [p] graph-1) :query (fn [] [:type :id :data])}) (defsc Reports [_ props] {:initial-state (fn [p] (comp/get-initial-state Graph nil)) ; initial state will already include Graph :ident (fn [] [(:type props) (:id props)]) :query (fn [] {:graph (comp/get-query Graph) :table (comp/get-query Table)})}) (defsc MRRoot [_ _] {:initial-state (fn [p] {:reports (comp/get-initial-state Reports nil)}) :query (fn [] [{:reports (comp/get-query Reports)}])}) (specification "merge-alternate-union-elements" (let [initial-state (merge (comp/get-initial-state MRRoot nil) {:a 1}) state-map (fnorm/tree->db MRRoot initial-state true) new-state (merge/merge-alternate-union-elements state-map MRRoot)] (assertions "can be used to merge alternate union elements to raw state" (get-in new-state [:table 1]) => table-1 "(existing state isn't touched)" (get new-state :a) => 1 (get new-state :reports) => [:graph 1] (get-in new-state [:graph 1]) => graph-1))) (defsc User [_ _] {:query [:user/id :user/name] :ident :user/id}) (defsc UserPM [_ _] {:query [:user/id :user/name] :pre-merge (fn [{:keys [data-tree current-normalized state-map]}] (merge {:ui/visible? true} current-normalized data-tree)) :ident :user/id}) (defsc UserPMT [_ _] {:query [:user/id :user/name] :pre-merge (fn [{:keys [data-tree current-normalized]}] ;; rewriting the ID to verify that targeting uses the correct (altered) ident (merge current-normalized data-tree {:user/id 2})) :ident :user/id}) (specification "merge-mutation-joins" :focus (behavior "Merges basic return values into app state" (let [state {} tree {'some-mutation {:user/id 1 :user/name "PI:NAME:<NAME>END_PI"}} query [{(list 'some-mutation {:x 1}) (comp/get-query User)}] new-state (merge/merge-mutation-joins state query tree)] (assertions new-state => {:user/id {1 {:user/id 1 :user/name "PI:NAME:<NAME>END_PI"}}}))) (behavior "Does pre-merge processing" (let [state {} tree {'some-mutation {:user/id 1 :user/name "PI:NAME:<NAME>END_PI"}} query [{(list 'some-mutation {:x 1}) (comp/get-query UserPM)}] new-state (merge/merge-mutation-joins state query tree)] (assertions new-state => {:user/id {1 {:ui/visible? true :user/id 1 :user/name "PI:NAME:<NAME>END_PI"}}}))) (behavior "Does data targeting based on pre-merge result" (let [state {} tree {'some-mutation {:user/id 1 :user/name "PI:NAME:<NAME>END_PI"}} query [{(list 'some-mutation {:x 1}) (vary-meta (comp/get-query UserPMT) assoc ::targeting/target [:top-key])}] new-state (merge/merge-mutation-joins state query tree)] (assertions new-state => {:top-key [:user/id 2] :user/id {2 {:user/id 2 :user/name "PI:NAME:<NAME>END_PI"}}}))) (behavior "Targeting on pre-merge overwrite with id re-assignment" (let [state {:user/id {1 {:user/id 1 :user/name "PI:NAME:<NAME>END_PI"}}} tree {'some-mutation {:user/id 1 :user/name "PI:NAME:<NAME>END_PI"}} query [{(list 'some-mutation {:x 1}) (vary-meta (comp/get-query UserPMT) assoc ::targeting/target [:top-key])}] new-state (merge/merge-mutation-joins state query tree)] (assertions new-state => {:top-key [:user/id 2] :user/id {1 {:user/id 1 :user/name "PI:NAME:<NAME>END_PI"} 2 {:user/id 2 :user/name "PI:NAME:<NAME>END_PI"}}})))) (specification "mark-missing" (behavior "correctly marks missing properties" (are [query ?missing-result exp] (= exp (merge/mark-missing ?missing-result query)) [:a :b] {:a 1} {:a 1 :b ::merge/not-found})) (behavior "joins -> one" (are [query ?missing-result exp] (= exp (merge/mark-missing ?missing-result query)) [:a {:b [:c]}] {:a 1} {:a 1 :b ::merge/not-found} [{:b [:c]}] {:b {}} {:b {:c ::merge/not-found}} [{:b [:c]}] {:b {:c 0}} {:b {:c 0}} [{:b [:c :d]}] {:b {:c 1}} {:b {:c 1 :d ::merge/not-found}})) (behavior "join -> many" (are [query ?missing-result exp] (= exp (merge/mark-missing ?missing-result query)) [{:a [:b :c]}] {:a [{:b 1 :c 2} {:b 1}]} {:a [{:b 1 :c 2} {:b 1 :c ::merge/not-found}]})) (behavior "idents and ident joins" (are [query ?missing-result exp] (= exp (merge/mark-missing ?missing-result query)) [{[:a 1] [:x]}] {[:a 1] {}} {[:a 1] {:x ::merge/not-found}} [{[:b 1] [:x]}] {[:b 1] {:x 2}} {[:b 1] {:x 2}} [{[:c 1] [:x]}] {} {[:c 1] {:x ::merge/not-found}} [{[:c 1] ['*]}] {} {[:c 1] {}} [{[:e 1] [:x :y :z]}] {[:e 1] {}} {[:e 1] {:x ::merge/not-found :y ::merge/not-found :z ::merge/not-found}} [[:d 1]] {} {[:d 1] {}})) (behavior "Ignores root link idents" (assertions "when the subquery exists" (merge/mark-missing {} [{[:a '_] [:x]}]) => {} "when it is a pure link" (merge/mark-missing {} [[:a '_]]) => {})) (behavior "parameterized" (are [query ?missing-result exp] (= exp (merge/mark-missing ?missing-result query)) '[:z (:y {})] {:z 1} {:z 1 :y ::merge/not-found} '[:z (:y {})] {:z 1 :y 0} {:z 1 :y 0} '[:z ({:y [:x]} {})] {:z 1 :y {}} {:z 1 :y {:x ::merge/not-found}})) (behavior "nested" (are [query ?missing-result exp] (= exp (merge/mark-missing ?missing-result query)) [{:b [:c {:d [:e]}]}] {:b {:c 1}} {:b {:c 1 :d ::merge/not-found}} [{:b [:c {:d [:e]}]}] {:b {:c 1 :d {}}} {:b {:c 1 :d {:e ::merge/not-found}}})) (behavior "upgrades value to maps if necessary" (are [query ?missing-result exp] (= exp (merge/mark-missing ?missing-result query)) [{:l [:m]}] {:l 0} {:l {:m ::merge/not-found}} [{:b [:c]}] {:b nil} {:b {:c ::merge/not-found}})) (behavior "unions" (assertions "singletons" (merge/mark-missing {:j {:c {}}} [{:j {:a [:c] :b [:d]}}]) => {:j {:c {} :d ::merge/not-found}} "singleton with no result" (merge/mark-missing {} [{:j {:a [:c] :b [:d]}}]) => {:j ::merge/not-found} "list to-many with 1" (merge/mark-missing {:j [{:c "c"}]} [{:j {:a [:c] :b [:d]}}]) => {:j [{:c "c" :d ::merge/not-found}]} "list to-many with 2" (merge/mark-missing {:items [{:id 0 :image "img1"} {:id 1 :text "text1"}]} [{:items {:photo [:id :image] :text [:id :text]}}]) => {:items [{:id 0 :image "img1" :text ::merge/not-found} {:id 1 :image ::merge/not-found :text "text1"}]} "list to-many with no results" (merge/mark-missing {:j []} [{:j {:a [:c] :b [:d]}}]) => {:j []})) (behavior "if the query has a ui.*/ attribute, it should not be marked as missing" (are [query ?missing-result exp] (= exp (merge/mark-missing ?missing-result query)) [:a :ui/b :c] {:a {} :c {}} {:a {} :c {}} [{:j [:ui/b :c]}] {:j {:c 5}} {:j {:c 5}} [{:j [{:ui/b [:d]} :c]}] {:j {:c 5}} {:j {:c 5}})) (behavior "mutations!" (are [query ?missing-result exp] (= exp (merge/mark-missing ?missing-result query)) '[(f) {:j [:a]}] {'f {} :j {}} {'f {} :j {:a ::merge/not-found}} '[(app/add-q {:p 1}) {:j1 [:p1]} {:j2 [:p2]}] {'app/add-q {:tempids {}} :j1 {} :j2 [{:p2 2} {}]} {'app/add-q {:tempids {}} :j1 {:p1 ::merge/not-found} :j2 [{:p2 2} {:p2 ::merge/not-found}]})) (behavior "correctly walks recursive queries to mark missing data" (behavior "when the recursive target is a singleton" (are [query ?missing-result exp] (= exp (merge/mark-missing ?missing-result query)) [:a {:b '...}] {:a 1 :b {:a 2}} {:a 1 :b {:a 2 :b ::merge/not-found}} [:a {:b '...}] {:a 1 :b {:a 2 :b {:a 3}}} {:a 1 :b {:a 2 :b {:a 3 :b ::merge/not-found}}} [:a {:b 9}] {:a 1 :b {:a 2 :b {:a 3 :b {:a 4}}}} {:a 1 :b {:a 2 :b {:a 3 :b {:a 4 :b ::merge/not-found}}}})) (behavior "when the recursive target is to-many" (are [query ?missing-result exp] (= exp (merge/mark-missing ?missing-result query)) [:a {:b '...}] {:a 1 :b [{:a 2 :b [{:a 3}]} {:a 4}]} {:a 1 :b [{:a 2 :b [{:a 3 :b ::merge/not-found}]} {:a 4 :b ::merge/not-found}]}))) (behavior "marks leaf data based on the query where" (letfn [(has-leaves [leaf-paths] (fn [result] (every? #(#'merge/leaf? (get-in result %)) leaf-paths)))] (assertions "plain data is always a leaf" (merge/mark-missing {:a 1 :b {:x 5}} [:a {:b [:x]}]) =fn=> (has-leaves [[:b :x] [:a] [:missing]]) "data structures are properly marked in singleton results" (merge/mark-missing {:b {:x {:data 1}}} [{:b [:x :y]}]) =fn=> (has-leaves [[:b :x]]) "data structures are properly marked in to-many results" (merge/mark-missing {:b [{:x {:data 1}} {:x {:data 2}}]} [{:b [:x]}]) =fn=> (has-leaves [[:b 0 :x] [:b 1 :x]]) (merge/mark-missing {:b []} [:a {:b [:x]}]) =fn=> (has-leaves [[:b]]) "unions are followed" (merge/mark-missing {:a [{:x {:data 1}} {:y {:data 2}}]} [{:a {:b [:x] :c [:y]}}]) =fn=> (has-leaves [[:a 0 :x] [:a 1 :y]]) "unions leaves data in place when the result is empty" (merge/mark-missing {:a 1} [:a {:z {:b [:x] :c [:y]}}]) =fn=> (has-leaves [[:a]]))))) (specification "Sweep one" (assertions "removes not-found values from maps" (#'merge/sweep-one {:a 1 :b ::merge/not-found}) => {:a 1} "removes tempids from maps" (#'merge/sweep-one {:tempids {3 4}}) => {} "is not recursive" (#'merge/sweep-one {:a 1 :b {:c ::merge/not-found}}) => {:a 1 :b {:c ::merge/not-found}} "maps over vectors not recursive" (#'merge/sweep-one [{:a 1 :b ::merge/not-found}]) => [{:a 1}] "retains metadata" (-> (#'merge/sweep-one (with-meta {:a 1 :b ::merge/not-found} {:meta :data})) meta) => {:meta :data} (-> (#'merge/sweep-one [(with-meta {:a 1 :b ::merge/not-found} {:meta :data})]) first meta) => {:meta :data} (-> (#'merge/sweep-one (with-meta [{:a 1 :b ::merge/not-found}] {:meta :data})) meta) => {:meta :data})) (specification "Sweep merge" (assertions "recursively merges maps" (merge/sweep-merge {:a 1 :c {:b 2}} {:a 2 :c 5}) => {:a 2 :c 5} (merge/sweep-merge {:a 1 :c {:b 2}} {:a 2 :c {:x 1}}) => {:a 2 :c {:b 2 :x 1}} "stops recursive merging if the source element is marked as a leaf" (merge/sweep-merge {:a 1 :c {:d {:x 2} :e 4}} {:a 2 :c (#'merge/as-leaf {:d {:x 1}})}) => {:a 2 :c {:d {:x 1}}} "sweeps tempids from maps" (merge/sweep-merge {:a 1 :c {:b 2}} {:a 2 :tempids {} :c {:b ::merge/not-found}}) => {:a 2 :c {}} "Merging into a sub-map should remove the explicitly marked keys" (merge/sweep-merge {:a 1 :c {:x 1 :b 42}} {:a 2 :c ::merge/not-found}) => {:a 2} (merge/sweep-merge {:a 1 :c {:x 1 :b 42}} {:a 2 :c {:b ::merge/not-found}}) => {:a 2 :c {:x 1}} (merge/sweep-merge {:a 1 :c {:x 1 :b 42}} {:a 2 :c {:x ::merge/not-found}}) => {:a 2 :c {:b 42}} "Merging from an empty map should leave the original unmodified" (merge/sweep-merge {:a 1 :c {:x 1 :b 42}} {:a 2 :c {}}) => {:a 2 :c {:x 1 :b 42}} "removes values that are marked as not found" (merge/sweep-merge {:a 1 :c {:b 2}} {:a 2 :c {:b ::merge/not-found}}) => {:a 2 :c {}} (merge/sweep-merge {:a 1 :c 2} {:a 2 :c [{:x 1 :b ::merge/not-found}]}) => {:a 2 :c [{:x 1}]} (merge/sweep-merge {:a 1 :c {:data-fetch :loading}} {:a 2 :c [{:x 1 :b ::merge/not-found}]}) => {:a 2 :c [{:x 1}]} (merge/sweep-merge {:a 1 :c nil} {:a 2 :c [{:x 1 :b ::merge/not-found}]}) => {:a 2 :c [{:x 1}]} (merge/sweep-merge {:a 1 :b {:c {}}} {:a 2 :b {:c [{:x 1 :b ::merge/not-found}]}}) => {:a 2 :b {:c [{:x 1}]}} "clears normalized table entries that has an id of not found" (merge/sweep-merge {:table {1 {:a 2}}} {:table {::merge/not-found {:db/id ::merge/not-found}}}) => {:table {1 {:a 2}}} "clears idents whose ids were not found" (merge/sweep-merge {} {:table {1 {:db/id 1 :the-thing [:table-1 ::merge/not-found]}} :thing [:table-2 ::merge/not-found]}) => {:table {1 {:db/id 1}}} "sweeps not-found values from normalized table merges" (merge/sweep-merge {:subpanel [:dashboard :panel] :dashboard {:panel {:view-mode :detail :surveys {:ui/fetch-state {:post-mutation 's}}}} } {:subpanel [:dashboard :panel] :dashboard {:panel {:view-mode :detail :surveys [[:s 1] [:s 2]]}} :s { 1 {:db/id 1, :survey/launch-date ::merge/not-found} 2 {:db/id 2, :survey/launch-date "2012-12-22"} }}) => {:subpanel [:dashboard :panel] :dashboard {:panel {:view-mode :detail :surveys [[:s 1] [:s 2]]}} :s { 1 {:db/id 1} 2 {:db/id 2 :survey/launch-date "2012-12-22"} }} "overwrites target (non-map) value if incoming value is a map" (merge/sweep-merge {:a 1 :c 2} {:a 2 :c {:b 1}}) => {:a 2 :c {:b 1}}))
[ { "context": "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n;; @ Copyright (c) Michael Leachim ", "end": 124, "score": 0.9997689723968506, "start": 109, "tag": "NAME", "value": "Michael Leachim" }, { "context": " @\n;; @@@@@@ At 2018-10-10 20:52 <mklimoff222@gmail.com> @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n\n(ns wirefra", "end": 502, "score": 0.9999297857284546, "start": 481, "tag": "EMAIL", "value": "mklimoff222@gmail.com" } ]
src/wireframe/utils.clj
MichaelLeachim/wireframecss
1
;; @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ ;; @ Copyright (c) Michael Leachim @ ;; @ You can find additional information regarding licensing of this work in LICENSE.md @ ;; @ You must not remove this notice, or any other, from this software. @ ;; @ All rights reserved. @ ;; @@@@@@ At 2018-10-10 20:52 <mklimoff222@gmail.com> @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ (ns wireframe.utils (:require [garden.core :as garden] [wireframe.config :as config :refer [*ANGRY* *PREFIX*]])) (defn- black-font-calculator [r g b] (> (/ (+ (* 0.299 r) (* 0.587 g) (* 0.114 b)) 255) 0.5)) (defn black-font? [color] (let [colors (rest (clojure.string/split color #"")) red (take 2 colors) green (take 2 (drop 2 colors)) blue (take 2 (drop 4 colors))] (apply black-font-calculator (map #(-> (conj % "0x") (clojure.string/join) (read-string)) [red green blue])))) (defn scale-inplace [A B C D X] (+ (* C (- 1 (/ (- X A) (- B A)))) (* D (/ (- X A) (- B A))))) (defn get-class-names [item] (distinct (re-seq (re-pattern (str "\\." *PREFIX* "[^\\s:]+")) (garden/css item)))) (defn angry [item] (if config/*ANGRY* (str item " !important") item)) (defn any-to-string [item] (let [i (str item)] (if (= \: (first i)) (apply str (rest i)) i))) (defn kstr [& whatever] (as-> (map any-to-string whatever) $ (clojure.string/join "-" $) (clojure.string/split $ #"-+") (clojure.string/join "-" $))) (defn dstr [& whatever] (as-> (map any-to-string whatever) $ (if config/*ANGRY* (concat $ ["angry"]) $) (concat [(str "." *PREFIX*)] $) (clojure.string/join "-" $) (clojure.string/split $ #"-+") (clojure.string/join "-" $))) (defn whatever [] config/*ANGRY*)
110999
;; @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ ;; @ Copyright (c) <NAME> @ ;; @ You can find additional information regarding licensing of this work in LICENSE.md @ ;; @ You must not remove this notice, or any other, from this software. @ ;; @ All rights reserved. @ ;; @@@@@@ At 2018-10-10 20:52 <<EMAIL>> @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ (ns wireframe.utils (:require [garden.core :as garden] [wireframe.config :as config :refer [*ANGRY* *PREFIX*]])) (defn- black-font-calculator [r g b] (> (/ (+ (* 0.299 r) (* 0.587 g) (* 0.114 b)) 255) 0.5)) (defn black-font? [color] (let [colors (rest (clojure.string/split color #"")) red (take 2 colors) green (take 2 (drop 2 colors)) blue (take 2 (drop 4 colors))] (apply black-font-calculator (map #(-> (conj % "0x") (clojure.string/join) (read-string)) [red green blue])))) (defn scale-inplace [A B C D X] (+ (* C (- 1 (/ (- X A) (- B A)))) (* D (/ (- X A) (- B A))))) (defn get-class-names [item] (distinct (re-seq (re-pattern (str "\\." *PREFIX* "[^\\s:]+")) (garden/css item)))) (defn angry [item] (if config/*ANGRY* (str item " !important") item)) (defn any-to-string [item] (let [i (str item)] (if (= \: (first i)) (apply str (rest i)) i))) (defn kstr [& whatever] (as-> (map any-to-string whatever) $ (clojure.string/join "-" $) (clojure.string/split $ #"-+") (clojure.string/join "-" $))) (defn dstr [& whatever] (as-> (map any-to-string whatever) $ (if config/*ANGRY* (concat $ ["angry"]) $) (concat [(str "." *PREFIX*)] $) (clojure.string/join "-" $) (clojure.string/split $ #"-+") (clojure.string/join "-" $))) (defn whatever [] config/*ANGRY*)
true
;; @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ ;; @ Copyright (c) PI:NAME:<NAME>END_PI @ ;; @ You can find additional information regarding licensing of this work in LICENSE.md @ ;; @ You must not remove this notice, or any other, from this software. @ ;; @ All rights reserved. @ ;; @@@@@@ At 2018-10-10 20:52 <PI:EMAIL:<EMAIL>END_PI> @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ (ns wireframe.utils (:require [garden.core :as garden] [wireframe.config :as config :refer [*ANGRY* *PREFIX*]])) (defn- black-font-calculator [r g b] (> (/ (+ (* 0.299 r) (* 0.587 g) (* 0.114 b)) 255) 0.5)) (defn black-font? [color] (let [colors (rest (clojure.string/split color #"")) red (take 2 colors) green (take 2 (drop 2 colors)) blue (take 2 (drop 4 colors))] (apply black-font-calculator (map #(-> (conj % "0x") (clojure.string/join) (read-string)) [red green blue])))) (defn scale-inplace [A B C D X] (+ (* C (- 1 (/ (- X A) (- B A)))) (* D (/ (- X A) (- B A))))) (defn get-class-names [item] (distinct (re-seq (re-pattern (str "\\." *PREFIX* "[^\\s:]+")) (garden/css item)))) (defn angry [item] (if config/*ANGRY* (str item " !important") item)) (defn any-to-string [item] (let [i (str item)] (if (= \: (first i)) (apply str (rest i)) i))) (defn kstr [& whatever] (as-> (map any-to-string whatever) $ (clojure.string/join "-" $) (clojure.string/split $ #"-+") (clojure.string/join "-" $))) (defn dstr [& whatever] (as-> (map any-to-string whatever) $ (if config/*ANGRY* (concat $ ["angry"]) $) (concat [(str "." *PREFIX*)] $) (clojure.string/join "-" $) (clojure.string/split $ #"-+") (clojure.string/join "-" $))) (defn whatever [] config/*ANGRY*)
[ { "context": " -- functional string utilities for Clojure\n\n;; by Stuart Sierra, http://stuartsierra.com/\n;; August 19, 2009\n\n;; ", "end": 82, "score": 0.9998817443847656, "start": 69, "tag": "NAME", "value": "Stuart Sierra" }, { "context": "rtsierra.com/\n;; August 19, 2009\n\n;; Copyright (c) Stuart Sierra, 2009. All rights reserved. The use\n;; and distr", "end": 159, "score": 0.9998841285705566, "start": 146, "tag": "NAME", "value": "Stuart Sierra" }, { "context": " any other, from this software.\n\n\n(ns #^{:author \"Stuart Sierra\"\n :doc \"This is a library of string manipul", "end": 627, "score": 0.9998916983604431, "start": 614, "tag": "NAME", "value": "Stuart Sierra" }, { "context": "ome ideas are borrowed from\n http://github.com/francoisdevlin/devlinsf-clojure-utils/\"}\n clojure.contrib.str-ut", "end": 1182, "score": 0.9926236271858215, "start": 1168, "tag": "USERNAME", "value": "francoisdevlin" }, { "context": "\\n\" s)))\n\n;; borrowed from compojure.str-utils, by James Reeves, EPL 1.0\n(defn #^String map-str\n \"Apply f to eac", "end": 10193, "score": 0.999708890914917, "start": 10181, "tag": "NAME", "value": "James Reeves" }, { "context": " coll)))\n\n;; borrowed from compojure.str-utils, by James Reeves, EPL 1.0\n(defn grep\n \"Filters elements of coll b", "end": 10397, "score": 0.9997036457061768, "start": 10385, "tag": "NAME", "value": "James Reeves" } ]
ThirdParty/clojure-contrib-1.1.0/src/clojure/contrib/str_utils2.clj
allertonm/Couverjure
3
;;; str_utils2.clj -- functional string utilities for Clojure ;; by Stuart Sierra, http://stuartsierra.com/ ;; August 19, 2009 ;; Copyright (c) Stuart Sierra, 2009. All rights reserved. The use ;; and distribution terms for this software are covered by the Eclipse ;; Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php) ;; which can be found in the file epl-v10.html at the root of this ;; distribution. By using this software in any fashion, you are ;; agreeing to be bound by the terms of this license. You must not ;; remove this notice, or any other, from this software. (ns #^{:author "Stuart Sierra" :doc "This is a library of string manipulation functions. It is intented as a replacement for clojure.contrib.str-utils. You cannot (use 'clojure.contrib.str-utils2) because it defines functions with the same names as functions in clojure.core. Instead, do (require '[clojure.contrib.str-utils2 :as s]) or something similar. Goals: 1. Be functional 2. String argument first, to work with -> 3. Performance linear in string length Some ideas are borrowed from http://github.com/francoisdevlin/devlinsf-clojure-utils/"} clojure.contrib.str-utils2 (:refer-clojure :exclude (take replace drop butlast partition contains? get repeat reverse partial)) (:import (java.util.regex Pattern))) (defmacro dochars "bindings => [name string] Repeatedly executes body, with name bound to each character in string. Does NOT handle Unicode supplementary characters (above U+FFFF)." [bindings & body] (assert (vector bindings)) (assert (= 2 (count bindings))) ;; This seems to be the fastest way to iterate over characters. `(let [#^String s# ~(second bindings)] (dotimes [i# (.length s#)] (let [~(first bindings) (.charAt s# i#)] ~@body)))) (defmacro docodepoints "bindings => [name string] Repeatedly executes body, with name bound to the integer code point of each Unicode character in the string. Handles Unicode supplementary characters (above U+FFFF) correctly." [bindings & body] (assert (vector bindings)) (assert (= 2 (count bindings))) (let [character (first bindings) string (second bindings)] `(let [#^String s# ~string len# (.length s#)] (loop [i# 0] (when (< i# len#) (let [~character (.charAt s# i#)] (if (Character/isHighSurrogate ~character) (let [~character (.codePointAt s# i#)] ~@body (recur (+ 2 i#))) (let [~character (int ~character)] ~@body (recur (inc i#)))))))))) (defn codepoints "Returns a sequence of integer Unicode code points in s. Handles Unicode supplementary characters (above U+FFFF) correctly." [#^String s] (let [len (.length s) f (fn thisfn [#^String s i] (when (< i len) (let [c (.charAt s i)] (if (Character/isHighSurrogate c) (cons (.codePointAt s i) (thisfn s (+ 2 i))) (cons (int c) (thisfn s (inc i)))))))] (lazy-seq (f s 0)))) (defn #^String escape "Returns a new String by applying cmap (a function or a map) to each character in s. If cmap returns nil, the original character is added to the output unchanged." [#^String s cmap] (let [buffer (StringBuilder. (.length s))] (dochars [c s] (if-let [r (cmap c)] (.append buffer r) (.append buffer c))) (.toString buffer))) (defn blank? "True if s is nil, empty, or contains only whitespace." [#^String s] (every? (fn [#^Character c] (Character/isWhitespace c)) s)) (defn #^String take "Take first n characters from s, up to the length of s. Note the argument order is the opposite of clojure.core/take; this is to keep the string as the first argument for use with ->" [#^String s n] (if (< (count s) n) s (.substring s 0 n))) (defn #^String drop "Drops first n characters from s. Returns an empty string if n is greater than the length of s. Note the argument order is the opposite of clojure.core/drop; this is to keep the string as the first argument for use with ->" [#^String s n] (if (< (count s) n) "" (.substring s n))) (defn #^String butlast "Returns s without the last n characters. Returns an empty string if n is greater than the length of s. Note the argument order is the opposite of clojure.core/butlast; this is to keep the string as the first argument for use with ->" [#^String s n] (if (< (count s) n) "" (.substring s 0 (- (count s) n)))) (defn #^String tail "Returns the last n characters of s." [#^String s n] (if (< (count s) n) s (.substring s (- (count s) n)))) (defn #^String repeat "Returns a new String containing s repeated n times." [#^String s n] (apply str (clojure.core/repeat n s))) (defn #^String reverse "Returns s with its characters reversed." [#^String s] (.toString (.reverse (StringBuilder. s)))) (defmulti #^{:doc "Replaces all instances of pattern in string with replacement. Allowed argument types for pattern and replacement are: 1. String and String 2. Character and Character 3. regex Pattern and String (Uses java.util.regex.Matcher.replaceAll) 4. regex Pattern and function (Calls function with re-groups of each match, uses return value as replacement.)" :arglists '([string pattern replacement]) :tag String} replace (fn [#^String string pattern replacement] [(class pattern) (class replacement)])) (defmethod replace [String String] [#^String s #^String a #^String b] (.replace s a b)) (defmethod replace [Character Character] [#^String s #^Character a #^Character b] (.replace s a b)) (defmethod replace [Pattern String] [#^String s re replacement] (.replaceAll (re-matcher re s) replacement)) (defmethod replace [Pattern clojure.lang.IFn] [#^String s re replacement] (let [m (re-matcher re s)] (let [buffer (StringBuffer. (.length s))] (loop [] (if (.find m) (do (.appendReplacement m buffer (replacement (re-groups m))) (recur)) (do (.appendTail m buffer) (.toString buffer))))))) (defmulti #^{:doc "Replaces the first instance of pattern in s with replacement. Allowed argument types for pattern and replacement are: 1. String and String 2. regex Pattern and String (Uses java.util.regex.Matcher.replaceAll) 3. regex Pattern and function " :arglists '([s pattern replacement]) :tag String} replace-first (fn [s pattern replacement] [(class pattern) (class replacement)])) (defmethod replace-first [String String] [#^String s pattern replacement] (.replaceFirst (re-matcher (Pattern/quote pattern) s) replacement)) (defmethod replace-first [Pattern String] [#^String s re replacement] (.replaceFirst (re-matcher re s) replacement)) (defmethod replace-first [Pattern clojure.lang.IFn] [#^String s #^Pattern re f] (let [m (re-matcher re s)] (let [buffer (StringBuffer.)] (if (.find m) (let [rep (f (re-groups m))] (.appendReplacement m buffer rep) (.appendTail m buffer) (str buffer)))))) (defn partition "Splits the string into a lazy sequence of substrings, alternating between substrings that match the patthern and the substrings between the matches. The sequence always starts with the substring before the first match, or an empty string if the beginning of the string matches. For example: (partition \"abc123def\" #\"[a-z]+\") returns: (\"\" \"abc\" \"123\" \"def\")" [#^String s #^Pattern re] (let [m (re-matcher re s)] ((fn step [prevend] (lazy-seq (if (.find m) (cons (.subSequence s prevend (.start m)) (cons (re-groups m) (step (+ (.start m) (count (.group m)))))) (when (< prevend (.length s)) (list (.subSequence s prevend (.length s))))))) 0))) (defn #^String join "Returns a string of all elements in coll, separated by separator. Like Perl's join." [#^String separator coll] (apply str (interpose separator coll))) (defn #^String chop "Removes the last character of string, does nothing on a zero-length string." [#^String s] (let [size (count s)] (if (zero? size) s (subs s 0 (dec (count s)))))) (defn #^String chomp "Removes all trailing newline \\n or return \\r characters from string. Note: String.trim() is similar and faster." [#^String s] (replace s #"[\r\n]+$" "")) (defn title-case [#^String s] (throw (Exception. "title-case not implemeted yet"))) (defn #^String swap-case "Changes upper case characters to lower case and vice-versa. Handles Unicode supplementary characters correctly. Uses the locale-sensitive String.toUpperCase() and String.toLowerCase() methods." [#^String s] (let [buffer (StringBuilder. (.length s)) ;; array to make a String from one code point #^"[I" array (make-array Integer/TYPE 1)] (docodepoints [c s] (aset-int array 0 c) (if (Character/isLowerCase c) ;; Character.toUpperCase is not locale-sensitive, but ;; String.toUpperCase is; so we use a String. (.append buffer (.toUpperCase (String. array 0 1))) (.append buffer (.toLowerCase (String. array 0 1))))) (.toString buffer))) (defn #^String capitalize "Converts first character of the string to upper-case, all other characters to lower-case." [#^String s] (if (< (count s) 2) (.toUpperCase s) (str (.toUpperCase #^String (subs s 0 1)) (.toLowerCase #^String (subs s 1))))) (defn #^String ltrim "Removes whitespace from the left side of string." [#^String s] (replace s #"^\s+" "")) (defn #^String rtrim "Removes whitespace from the right side of string." [#^String s] (replace s #"\s+$" "")) (defn split-lines "Splits s on \\n or \\r\\n." [#^String s] (seq (.split #"\r?\n" s))) ;; borrowed from compojure.str-utils, by James Reeves, EPL 1.0 (defn #^String map-str "Apply f to each element of coll, concatenate all results into a String." [f coll] (apply str (map f coll))) ;; borrowed from compojure.str-utils, by James Reeves, EPL 1.0 (defn grep "Filters elements of coll by a regular expression. The String representation (with str) of each element is tested with re-find." [re coll] (filter (fn [x] (re-find re (str x))) coll)) (defn partial "Like clojure.core/partial for functions that take their primary argument first. Takes a function f and its arguments, NOT INCLUDING the first argument. Returns a new function whose first argument will be the first argument to f. Example: (str-utils2/partial str-utils2/take 2) ;;=> (fn [s] (str-utils2/take s 2))" [f & args] (fn [s & more] (apply f s (concat args more)))) ;;; WRAPPERS ;; The following functions are simple wrappers around java.lang.String ;; functions. They are included here for completeness, and for use ;; when mapping over a collection of strings. (defn #^String upper-case "Converts string to all upper-case." [#^String s] (.toUpperCase s)) (defn #^String lower-case "Converts string to all lower-case." [#^String s] (.toLowerCase s)) (defn split "Splits string on a regular expression. Optional argument limit is the maximum number of splits." ([#^String s #^Pattern re] (seq (.split re s))) ([#^String s #^Pattern re limit] (seq (.split re s limit)))) (defn #^String trim "Removes whitespace from both ends of string." [#^String s] (.trim s)) (defn #^String contains? "True if s contains the substring." [#^String s substring] (.contains s substring)) (defn #^String get "Gets the i'th character in string." [#^String s i] (.charAt s i))
120697
;;; str_utils2.clj -- functional string utilities for Clojure ;; by <NAME>, http://stuartsierra.com/ ;; August 19, 2009 ;; Copyright (c) <NAME>, 2009. All rights reserved. The use ;; and distribution terms for this software are covered by the Eclipse ;; Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php) ;; which can be found in the file epl-v10.html at the root of this ;; distribution. By using this software in any fashion, you are ;; agreeing to be bound by the terms of this license. You must not ;; remove this notice, or any other, from this software. (ns #^{:author "<NAME>" :doc "This is a library of string manipulation functions. It is intented as a replacement for clojure.contrib.str-utils. You cannot (use 'clojure.contrib.str-utils2) because it defines functions with the same names as functions in clojure.core. Instead, do (require '[clojure.contrib.str-utils2 :as s]) or something similar. Goals: 1. Be functional 2. String argument first, to work with -> 3. Performance linear in string length Some ideas are borrowed from http://github.com/francoisdevlin/devlinsf-clojure-utils/"} clojure.contrib.str-utils2 (:refer-clojure :exclude (take replace drop butlast partition contains? get repeat reverse partial)) (:import (java.util.regex Pattern))) (defmacro dochars "bindings => [name string] Repeatedly executes body, with name bound to each character in string. Does NOT handle Unicode supplementary characters (above U+FFFF)." [bindings & body] (assert (vector bindings)) (assert (= 2 (count bindings))) ;; This seems to be the fastest way to iterate over characters. `(let [#^String s# ~(second bindings)] (dotimes [i# (.length s#)] (let [~(first bindings) (.charAt s# i#)] ~@body)))) (defmacro docodepoints "bindings => [name string] Repeatedly executes body, with name bound to the integer code point of each Unicode character in the string. Handles Unicode supplementary characters (above U+FFFF) correctly." [bindings & body] (assert (vector bindings)) (assert (= 2 (count bindings))) (let [character (first bindings) string (second bindings)] `(let [#^String s# ~string len# (.length s#)] (loop [i# 0] (when (< i# len#) (let [~character (.charAt s# i#)] (if (Character/isHighSurrogate ~character) (let [~character (.codePointAt s# i#)] ~@body (recur (+ 2 i#))) (let [~character (int ~character)] ~@body (recur (inc i#)))))))))) (defn codepoints "Returns a sequence of integer Unicode code points in s. Handles Unicode supplementary characters (above U+FFFF) correctly." [#^String s] (let [len (.length s) f (fn thisfn [#^String s i] (when (< i len) (let [c (.charAt s i)] (if (Character/isHighSurrogate c) (cons (.codePointAt s i) (thisfn s (+ 2 i))) (cons (int c) (thisfn s (inc i)))))))] (lazy-seq (f s 0)))) (defn #^String escape "Returns a new String by applying cmap (a function or a map) to each character in s. If cmap returns nil, the original character is added to the output unchanged." [#^String s cmap] (let [buffer (StringBuilder. (.length s))] (dochars [c s] (if-let [r (cmap c)] (.append buffer r) (.append buffer c))) (.toString buffer))) (defn blank? "True if s is nil, empty, or contains only whitespace." [#^String s] (every? (fn [#^Character c] (Character/isWhitespace c)) s)) (defn #^String take "Take first n characters from s, up to the length of s. Note the argument order is the opposite of clojure.core/take; this is to keep the string as the first argument for use with ->" [#^String s n] (if (< (count s) n) s (.substring s 0 n))) (defn #^String drop "Drops first n characters from s. Returns an empty string if n is greater than the length of s. Note the argument order is the opposite of clojure.core/drop; this is to keep the string as the first argument for use with ->" [#^String s n] (if (< (count s) n) "" (.substring s n))) (defn #^String butlast "Returns s without the last n characters. Returns an empty string if n is greater than the length of s. Note the argument order is the opposite of clojure.core/butlast; this is to keep the string as the first argument for use with ->" [#^String s n] (if (< (count s) n) "" (.substring s 0 (- (count s) n)))) (defn #^String tail "Returns the last n characters of s." [#^String s n] (if (< (count s) n) s (.substring s (- (count s) n)))) (defn #^String repeat "Returns a new String containing s repeated n times." [#^String s n] (apply str (clojure.core/repeat n s))) (defn #^String reverse "Returns s with its characters reversed." [#^String s] (.toString (.reverse (StringBuilder. s)))) (defmulti #^{:doc "Replaces all instances of pattern in string with replacement. Allowed argument types for pattern and replacement are: 1. String and String 2. Character and Character 3. regex Pattern and String (Uses java.util.regex.Matcher.replaceAll) 4. regex Pattern and function (Calls function with re-groups of each match, uses return value as replacement.)" :arglists '([string pattern replacement]) :tag String} replace (fn [#^String string pattern replacement] [(class pattern) (class replacement)])) (defmethod replace [String String] [#^String s #^String a #^String b] (.replace s a b)) (defmethod replace [Character Character] [#^String s #^Character a #^Character b] (.replace s a b)) (defmethod replace [Pattern String] [#^String s re replacement] (.replaceAll (re-matcher re s) replacement)) (defmethod replace [Pattern clojure.lang.IFn] [#^String s re replacement] (let [m (re-matcher re s)] (let [buffer (StringBuffer. (.length s))] (loop [] (if (.find m) (do (.appendReplacement m buffer (replacement (re-groups m))) (recur)) (do (.appendTail m buffer) (.toString buffer))))))) (defmulti #^{:doc "Replaces the first instance of pattern in s with replacement. Allowed argument types for pattern and replacement are: 1. String and String 2. regex Pattern and String (Uses java.util.regex.Matcher.replaceAll) 3. regex Pattern and function " :arglists '([s pattern replacement]) :tag String} replace-first (fn [s pattern replacement] [(class pattern) (class replacement)])) (defmethod replace-first [String String] [#^String s pattern replacement] (.replaceFirst (re-matcher (Pattern/quote pattern) s) replacement)) (defmethod replace-first [Pattern String] [#^String s re replacement] (.replaceFirst (re-matcher re s) replacement)) (defmethod replace-first [Pattern clojure.lang.IFn] [#^String s #^Pattern re f] (let [m (re-matcher re s)] (let [buffer (StringBuffer.)] (if (.find m) (let [rep (f (re-groups m))] (.appendReplacement m buffer rep) (.appendTail m buffer) (str buffer)))))) (defn partition "Splits the string into a lazy sequence of substrings, alternating between substrings that match the patthern and the substrings between the matches. The sequence always starts with the substring before the first match, or an empty string if the beginning of the string matches. For example: (partition \"abc123def\" #\"[a-z]+\") returns: (\"\" \"abc\" \"123\" \"def\")" [#^String s #^Pattern re] (let [m (re-matcher re s)] ((fn step [prevend] (lazy-seq (if (.find m) (cons (.subSequence s prevend (.start m)) (cons (re-groups m) (step (+ (.start m) (count (.group m)))))) (when (< prevend (.length s)) (list (.subSequence s prevend (.length s))))))) 0))) (defn #^String join "Returns a string of all elements in coll, separated by separator. Like Perl's join." [#^String separator coll] (apply str (interpose separator coll))) (defn #^String chop "Removes the last character of string, does nothing on a zero-length string." [#^String s] (let [size (count s)] (if (zero? size) s (subs s 0 (dec (count s)))))) (defn #^String chomp "Removes all trailing newline \\n or return \\r characters from string. Note: String.trim() is similar and faster." [#^String s] (replace s #"[\r\n]+$" "")) (defn title-case [#^String s] (throw (Exception. "title-case not implemeted yet"))) (defn #^String swap-case "Changes upper case characters to lower case and vice-versa. Handles Unicode supplementary characters correctly. Uses the locale-sensitive String.toUpperCase() and String.toLowerCase() methods." [#^String s] (let [buffer (StringBuilder. (.length s)) ;; array to make a String from one code point #^"[I" array (make-array Integer/TYPE 1)] (docodepoints [c s] (aset-int array 0 c) (if (Character/isLowerCase c) ;; Character.toUpperCase is not locale-sensitive, but ;; String.toUpperCase is; so we use a String. (.append buffer (.toUpperCase (String. array 0 1))) (.append buffer (.toLowerCase (String. array 0 1))))) (.toString buffer))) (defn #^String capitalize "Converts first character of the string to upper-case, all other characters to lower-case." [#^String s] (if (< (count s) 2) (.toUpperCase s) (str (.toUpperCase #^String (subs s 0 1)) (.toLowerCase #^String (subs s 1))))) (defn #^String ltrim "Removes whitespace from the left side of string." [#^String s] (replace s #"^\s+" "")) (defn #^String rtrim "Removes whitespace from the right side of string." [#^String s] (replace s #"\s+$" "")) (defn split-lines "Splits s on \\n or \\r\\n." [#^String s] (seq (.split #"\r?\n" s))) ;; borrowed from compojure.str-utils, by <NAME>, EPL 1.0 (defn #^String map-str "Apply f to each element of coll, concatenate all results into a String." [f coll] (apply str (map f coll))) ;; borrowed from compojure.str-utils, by <NAME>, EPL 1.0 (defn grep "Filters elements of coll by a regular expression. The String representation (with str) of each element is tested with re-find." [re coll] (filter (fn [x] (re-find re (str x))) coll)) (defn partial "Like clojure.core/partial for functions that take their primary argument first. Takes a function f and its arguments, NOT INCLUDING the first argument. Returns a new function whose first argument will be the first argument to f. Example: (str-utils2/partial str-utils2/take 2) ;;=> (fn [s] (str-utils2/take s 2))" [f & args] (fn [s & more] (apply f s (concat args more)))) ;;; WRAPPERS ;; The following functions are simple wrappers around java.lang.String ;; functions. They are included here for completeness, and for use ;; when mapping over a collection of strings. (defn #^String upper-case "Converts string to all upper-case." [#^String s] (.toUpperCase s)) (defn #^String lower-case "Converts string to all lower-case." [#^String s] (.toLowerCase s)) (defn split "Splits string on a regular expression. Optional argument limit is the maximum number of splits." ([#^String s #^Pattern re] (seq (.split re s))) ([#^String s #^Pattern re limit] (seq (.split re s limit)))) (defn #^String trim "Removes whitespace from both ends of string." [#^String s] (.trim s)) (defn #^String contains? "True if s contains the substring." [#^String s substring] (.contains s substring)) (defn #^String get "Gets the i'th character in string." [#^String s i] (.charAt s i))
true
;;; str_utils2.clj -- functional string utilities for Clojure ;; by PI:NAME:<NAME>END_PI, http://stuartsierra.com/ ;; August 19, 2009 ;; Copyright (c) PI:NAME:<NAME>END_PI, 2009. All rights reserved. The use ;; and distribution terms for this software are covered by the Eclipse ;; Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php) ;; which can be found in the file epl-v10.html at the root of this ;; distribution. By using this software in any fashion, you are ;; agreeing to be bound by the terms of this license. You must not ;; remove this notice, or any other, from this software. (ns #^{:author "PI:NAME:<NAME>END_PI" :doc "This is a library of string manipulation functions. It is intented as a replacement for clojure.contrib.str-utils. You cannot (use 'clojure.contrib.str-utils2) because it defines functions with the same names as functions in clojure.core. Instead, do (require '[clojure.contrib.str-utils2 :as s]) or something similar. Goals: 1. Be functional 2. String argument first, to work with -> 3. Performance linear in string length Some ideas are borrowed from http://github.com/francoisdevlin/devlinsf-clojure-utils/"} clojure.contrib.str-utils2 (:refer-clojure :exclude (take replace drop butlast partition contains? get repeat reverse partial)) (:import (java.util.regex Pattern))) (defmacro dochars "bindings => [name string] Repeatedly executes body, with name bound to each character in string. Does NOT handle Unicode supplementary characters (above U+FFFF)." [bindings & body] (assert (vector bindings)) (assert (= 2 (count bindings))) ;; This seems to be the fastest way to iterate over characters. `(let [#^String s# ~(second bindings)] (dotimes [i# (.length s#)] (let [~(first bindings) (.charAt s# i#)] ~@body)))) (defmacro docodepoints "bindings => [name string] Repeatedly executes body, with name bound to the integer code point of each Unicode character in the string. Handles Unicode supplementary characters (above U+FFFF) correctly." [bindings & body] (assert (vector bindings)) (assert (= 2 (count bindings))) (let [character (first bindings) string (second bindings)] `(let [#^String s# ~string len# (.length s#)] (loop [i# 0] (when (< i# len#) (let [~character (.charAt s# i#)] (if (Character/isHighSurrogate ~character) (let [~character (.codePointAt s# i#)] ~@body (recur (+ 2 i#))) (let [~character (int ~character)] ~@body (recur (inc i#)))))))))) (defn codepoints "Returns a sequence of integer Unicode code points in s. Handles Unicode supplementary characters (above U+FFFF) correctly." [#^String s] (let [len (.length s) f (fn thisfn [#^String s i] (when (< i len) (let [c (.charAt s i)] (if (Character/isHighSurrogate c) (cons (.codePointAt s i) (thisfn s (+ 2 i))) (cons (int c) (thisfn s (inc i)))))))] (lazy-seq (f s 0)))) (defn #^String escape "Returns a new String by applying cmap (a function or a map) to each character in s. If cmap returns nil, the original character is added to the output unchanged." [#^String s cmap] (let [buffer (StringBuilder. (.length s))] (dochars [c s] (if-let [r (cmap c)] (.append buffer r) (.append buffer c))) (.toString buffer))) (defn blank? "True if s is nil, empty, or contains only whitespace." [#^String s] (every? (fn [#^Character c] (Character/isWhitespace c)) s)) (defn #^String take "Take first n characters from s, up to the length of s. Note the argument order is the opposite of clojure.core/take; this is to keep the string as the first argument for use with ->" [#^String s n] (if (< (count s) n) s (.substring s 0 n))) (defn #^String drop "Drops first n characters from s. Returns an empty string if n is greater than the length of s. Note the argument order is the opposite of clojure.core/drop; this is to keep the string as the first argument for use with ->" [#^String s n] (if (< (count s) n) "" (.substring s n))) (defn #^String butlast "Returns s without the last n characters. Returns an empty string if n is greater than the length of s. Note the argument order is the opposite of clojure.core/butlast; this is to keep the string as the first argument for use with ->" [#^String s n] (if (< (count s) n) "" (.substring s 0 (- (count s) n)))) (defn #^String tail "Returns the last n characters of s." [#^String s n] (if (< (count s) n) s (.substring s (- (count s) n)))) (defn #^String repeat "Returns a new String containing s repeated n times." [#^String s n] (apply str (clojure.core/repeat n s))) (defn #^String reverse "Returns s with its characters reversed." [#^String s] (.toString (.reverse (StringBuilder. s)))) (defmulti #^{:doc "Replaces all instances of pattern in string with replacement. Allowed argument types for pattern and replacement are: 1. String and String 2. Character and Character 3. regex Pattern and String (Uses java.util.regex.Matcher.replaceAll) 4. regex Pattern and function (Calls function with re-groups of each match, uses return value as replacement.)" :arglists '([string pattern replacement]) :tag String} replace (fn [#^String string pattern replacement] [(class pattern) (class replacement)])) (defmethod replace [String String] [#^String s #^String a #^String b] (.replace s a b)) (defmethod replace [Character Character] [#^String s #^Character a #^Character b] (.replace s a b)) (defmethod replace [Pattern String] [#^String s re replacement] (.replaceAll (re-matcher re s) replacement)) (defmethod replace [Pattern clojure.lang.IFn] [#^String s re replacement] (let [m (re-matcher re s)] (let [buffer (StringBuffer. (.length s))] (loop [] (if (.find m) (do (.appendReplacement m buffer (replacement (re-groups m))) (recur)) (do (.appendTail m buffer) (.toString buffer))))))) (defmulti #^{:doc "Replaces the first instance of pattern in s with replacement. Allowed argument types for pattern and replacement are: 1. String and String 2. regex Pattern and String (Uses java.util.regex.Matcher.replaceAll) 3. regex Pattern and function " :arglists '([s pattern replacement]) :tag String} replace-first (fn [s pattern replacement] [(class pattern) (class replacement)])) (defmethod replace-first [String String] [#^String s pattern replacement] (.replaceFirst (re-matcher (Pattern/quote pattern) s) replacement)) (defmethod replace-first [Pattern String] [#^String s re replacement] (.replaceFirst (re-matcher re s) replacement)) (defmethod replace-first [Pattern clojure.lang.IFn] [#^String s #^Pattern re f] (let [m (re-matcher re s)] (let [buffer (StringBuffer.)] (if (.find m) (let [rep (f (re-groups m))] (.appendReplacement m buffer rep) (.appendTail m buffer) (str buffer)))))) (defn partition "Splits the string into a lazy sequence of substrings, alternating between substrings that match the patthern and the substrings between the matches. The sequence always starts with the substring before the first match, or an empty string if the beginning of the string matches. For example: (partition \"abc123def\" #\"[a-z]+\") returns: (\"\" \"abc\" \"123\" \"def\")" [#^String s #^Pattern re] (let [m (re-matcher re s)] ((fn step [prevend] (lazy-seq (if (.find m) (cons (.subSequence s prevend (.start m)) (cons (re-groups m) (step (+ (.start m) (count (.group m)))))) (when (< prevend (.length s)) (list (.subSequence s prevend (.length s))))))) 0))) (defn #^String join "Returns a string of all elements in coll, separated by separator. Like Perl's join." [#^String separator coll] (apply str (interpose separator coll))) (defn #^String chop "Removes the last character of string, does nothing on a zero-length string." [#^String s] (let [size (count s)] (if (zero? size) s (subs s 0 (dec (count s)))))) (defn #^String chomp "Removes all trailing newline \\n or return \\r characters from string. Note: String.trim() is similar and faster." [#^String s] (replace s #"[\r\n]+$" "")) (defn title-case [#^String s] (throw (Exception. "title-case not implemeted yet"))) (defn #^String swap-case "Changes upper case characters to lower case and vice-versa. Handles Unicode supplementary characters correctly. Uses the locale-sensitive String.toUpperCase() and String.toLowerCase() methods." [#^String s] (let [buffer (StringBuilder. (.length s)) ;; array to make a String from one code point #^"[I" array (make-array Integer/TYPE 1)] (docodepoints [c s] (aset-int array 0 c) (if (Character/isLowerCase c) ;; Character.toUpperCase is not locale-sensitive, but ;; String.toUpperCase is; so we use a String. (.append buffer (.toUpperCase (String. array 0 1))) (.append buffer (.toLowerCase (String. array 0 1))))) (.toString buffer))) (defn #^String capitalize "Converts first character of the string to upper-case, all other characters to lower-case." [#^String s] (if (< (count s) 2) (.toUpperCase s) (str (.toUpperCase #^String (subs s 0 1)) (.toLowerCase #^String (subs s 1))))) (defn #^String ltrim "Removes whitespace from the left side of string." [#^String s] (replace s #"^\s+" "")) (defn #^String rtrim "Removes whitespace from the right side of string." [#^String s] (replace s #"\s+$" "")) (defn split-lines "Splits s on \\n or \\r\\n." [#^String s] (seq (.split #"\r?\n" s))) ;; borrowed from compojure.str-utils, by PI:NAME:<NAME>END_PI, EPL 1.0 (defn #^String map-str "Apply f to each element of coll, concatenate all results into a String." [f coll] (apply str (map f coll))) ;; borrowed from compojure.str-utils, by PI:NAME:<NAME>END_PI, EPL 1.0 (defn grep "Filters elements of coll by a regular expression. The String representation (with str) of each element is tested with re-find." [re coll] (filter (fn [x] (re-find re (str x))) coll)) (defn partial "Like clojure.core/partial for functions that take their primary argument first. Takes a function f and its arguments, NOT INCLUDING the first argument. Returns a new function whose first argument will be the first argument to f. Example: (str-utils2/partial str-utils2/take 2) ;;=> (fn [s] (str-utils2/take s 2))" [f & args] (fn [s & more] (apply f s (concat args more)))) ;;; WRAPPERS ;; The following functions are simple wrappers around java.lang.String ;; functions. They are included here for completeness, and for use ;; when mapping over a collection of strings. (defn #^String upper-case "Converts string to all upper-case." [#^String s] (.toUpperCase s)) (defn #^String lower-case "Converts string to all lower-case." [#^String s] (.toLowerCase s)) (defn split "Splits string on a regular expression. Optional argument limit is the maximum number of splits." ([#^String s #^Pattern re] (seq (.split re s))) ([#^String s #^Pattern re limit] (seq (.split re s limit)))) (defn #^String trim "Removes whitespace from both ends of string." [#^String s] (.trim s)) (defn #^String contains? "True if s contains the substring." [#^String s substring] (.contains s substring)) (defn #^String get "Gets the i'th character in string." [#^String s i] (.charAt s i))
[ { "context": " :user \"postgres\"\n :password \"pgsql123\"}}\n )", "end": 1162, "score": 0.9992742538452148, "start": 1154, "tag": "PASSWORD", "value": "pgsql123" } ]
project.clj
willianns/clj-restful-todolist
1
(defproject clj-restful-todolist "0.1.0-SNAPSHOT" :description "FIXME: write description" :url "http://example.com/FIXME" :license {:name "EPL-2.0 OR GPL-2.0-or-later WITH Classpath-exception-2.0" :url "https://www.eclipse.org/legal/epl-2.0/"} :dependencies [[org.clojure/clojure "1.10.1"] ; API libs [prismatic/schema "1.1.12"] [metosin/compojure-api "2.0.0-alpha28"] [ring/ring-jetty-adapter "1.9.1"] ; Database libs [toucan "1.15.4"] [org.postgresql/postgresql "42.2.4"] [migratus "1.2.7"]] :plugins [[migratus-lein "0.7.3"]] :main ^:skip-aot clj-restful-todolist.core :target-path "target/%s" :profiles {:uberjar {:aot :all :jvm-opts ["-Dclojure.compiler.direct-linking=true"]}} :migratus {:store :database :migration-dir "migrations" :db {:classname "org.postgresql.Driver" :subprotocol "postgresql" :subname "//localhost:5432/todosdb" :user "postgres" :password "pgsql123"}} )
8666
(defproject clj-restful-todolist "0.1.0-SNAPSHOT" :description "FIXME: write description" :url "http://example.com/FIXME" :license {:name "EPL-2.0 OR GPL-2.0-or-later WITH Classpath-exception-2.0" :url "https://www.eclipse.org/legal/epl-2.0/"} :dependencies [[org.clojure/clojure "1.10.1"] ; API libs [prismatic/schema "1.1.12"] [metosin/compojure-api "2.0.0-alpha28"] [ring/ring-jetty-adapter "1.9.1"] ; Database libs [toucan "1.15.4"] [org.postgresql/postgresql "42.2.4"] [migratus "1.2.7"]] :plugins [[migratus-lein "0.7.3"]] :main ^:skip-aot clj-restful-todolist.core :target-path "target/%s" :profiles {:uberjar {:aot :all :jvm-opts ["-Dclojure.compiler.direct-linking=true"]}} :migratus {:store :database :migration-dir "migrations" :db {:classname "org.postgresql.Driver" :subprotocol "postgresql" :subname "//localhost:5432/todosdb" :user "postgres" :password "<PASSWORD>"}} )
true
(defproject clj-restful-todolist "0.1.0-SNAPSHOT" :description "FIXME: write description" :url "http://example.com/FIXME" :license {:name "EPL-2.0 OR GPL-2.0-or-later WITH Classpath-exception-2.0" :url "https://www.eclipse.org/legal/epl-2.0/"} :dependencies [[org.clojure/clojure "1.10.1"] ; API libs [prismatic/schema "1.1.12"] [metosin/compojure-api "2.0.0-alpha28"] [ring/ring-jetty-adapter "1.9.1"] ; Database libs [toucan "1.15.4"] [org.postgresql/postgresql "42.2.4"] [migratus "1.2.7"]] :plugins [[migratus-lein "0.7.3"]] :main ^:skip-aot clj-restful-todolist.core :target-path "target/%s" :profiles {:uberjar {:aot :all :jvm-opts ["-Dclojure.compiler.direct-linking=true"]}} :migratus {:store :database :migration-dir "migrations" :db {:classname "org.postgresql.Driver" :subprotocol "postgresql" :subname "//localhost:5432/todosdb" :user "postgres" :password "PI:PASSWORD:<PASSWORD>END_PI"}} )
[ { "context": " \"\"\n \"Game by Scot Brown & Dmitri Sotnikov\"]\n instructions (map-ind", "end": 2892, "score": 0.9998763203620911, "start": 2882, "tag": "NAME", "value": "Scot Brown" }, { "context": " \"\"\n \"Game by Scot Brown & Dmitri Sotnikov\"]\n instructions (map-indexed (fn [idx text", "end": 2910, "score": 0.9998536705970764, "start": 2895, "tag": "NAME", "value": "Dmitri Sotnikov" } ]
src/graviton/ui.cljs
jsdelivrbot/graviton
19
(ns graviton.ui (:require [graviton.engine :as engine] [graviton.prizes :as prizes])) (defn text-field [text size] (-> (js/PIXI.Text. text (js/PIXI.TextStyle. #js {:fill "#FF00FF" :fontSize size :fontFamily "Arial"})))) (defn text-box [{:keys [text x y]}] {:x x :y y :graphics (text-field text 30) :init (fn [text state] text) :update (fn [])}) (defn button [{:keys [label on-click x y width height]}] (let [text (engine/set-anchor (text-field label 30) 0.5 0.5) button (doto (js/PIXI.Graphics.) (.addChild (do (set! (.-x text) (/ width 2)) (set! (.-y text) (/ height 2)) text)) (.on "pointerdown" on-click) (.lineStyle 2 0xFF00FF 1) (.beginFill 0xFF00BB 0.25) (.drawRoundedRect 0 0 width height 15) (.endFill))] (set! (.-interactive button) true) (set! (.-buttonMode button) true) {:graphics button :x x :y y :init (fn [{:keys [graphics] :as button} state] (engine/set-graphics-position button) (.addChild (:stage state) graphics) button) :update (fn [])})) (defn start-button [state parent-graphics] (let [graphics (doto (js/PIXI.Graphics.) (.lineStyle 2 0xFF00FF 1) (.beginFill 0xFF00BB 0.25) (.drawRoundedRect -10 -3 120 40 15) (.endFill) (.addChild (text-field "START" 30)) (.on "pointerdown" #(do (.removeChild (:stage @state) parent-graphics) (vswap! state assoc :game-state :started) (vswap! state prizes/random-prizes) (engine/add-stage-on-click-event state) (engine/init-scene state))))] (set! (.-interactive graphics) true) (set! (.-buttonMode graphics) true) graphics)) (defn help-menu [state] (let [graphics (js/PIXI.Graphics.) text-lines ["Instructions:" "" "Drag to create gravity wells that will guide your ship to the prizes" "The ship will start moving along the gravity fields" "Keep placing attractors if you need to, but be careful! If you place too many, you'll create death zones that will end the game when touched!" "" "Have fun!" "" "" "Game by Scot Brown & Dmitri Sotnikov"] instructions (map-indexed (fn [idx text] (let [text (text-field text 15)] (set! (.-x text) 50) (set! (.-y text) (* 30 (inc idx))) text)) text-lines) button (start-button state graphics)] (doseq [line instructions] (.addChild graphics line)) (set! (.-x button) (- (/ (:width @state) 2) 60)) (set! (.-y button) (- (/ (:height @state) 2) 20)) (.addChild graphics button) (.addChild (:stage @state) graphics) state))
3476
(ns graviton.ui (:require [graviton.engine :as engine] [graviton.prizes :as prizes])) (defn text-field [text size] (-> (js/PIXI.Text. text (js/PIXI.TextStyle. #js {:fill "#FF00FF" :fontSize size :fontFamily "Arial"})))) (defn text-box [{:keys [text x y]}] {:x x :y y :graphics (text-field text 30) :init (fn [text state] text) :update (fn [])}) (defn button [{:keys [label on-click x y width height]}] (let [text (engine/set-anchor (text-field label 30) 0.5 0.5) button (doto (js/PIXI.Graphics.) (.addChild (do (set! (.-x text) (/ width 2)) (set! (.-y text) (/ height 2)) text)) (.on "pointerdown" on-click) (.lineStyle 2 0xFF00FF 1) (.beginFill 0xFF00BB 0.25) (.drawRoundedRect 0 0 width height 15) (.endFill))] (set! (.-interactive button) true) (set! (.-buttonMode button) true) {:graphics button :x x :y y :init (fn [{:keys [graphics] :as button} state] (engine/set-graphics-position button) (.addChild (:stage state) graphics) button) :update (fn [])})) (defn start-button [state parent-graphics] (let [graphics (doto (js/PIXI.Graphics.) (.lineStyle 2 0xFF00FF 1) (.beginFill 0xFF00BB 0.25) (.drawRoundedRect -10 -3 120 40 15) (.endFill) (.addChild (text-field "START" 30)) (.on "pointerdown" #(do (.removeChild (:stage @state) parent-graphics) (vswap! state assoc :game-state :started) (vswap! state prizes/random-prizes) (engine/add-stage-on-click-event state) (engine/init-scene state))))] (set! (.-interactive graphics) true) (set! (.-buttonMode graphics) true) graphics)) (defn help-menu [state] (let [graphics (js/PIXI.Graphics.) text-lines ["Instructions:" "" "Drag to create gravity wells that will guide your ship to the prizes" "The ship will start moving along the gravity fields" "Keep placing attractors if you need to, but be careful! If you place too many, you'll create death zones that will end the game when touched!" "" "Have fun!" "" "" "Game by <NAME> & <NAME>"] instructions (map-indexed (fn [idx text] (let [text (text-field text 15)] (set! (.-x text) 50) (set! (.-y text) (* 30 (inc idx))) text)) text-lines) button (start-button state graphics)] (doseq [line instructions] (.addChild graphics line)) (set! (.-x button) (- (/ (:width @state) 2) 60)) (set! (.-y button) (- (/ (:height @state) 2) 20)) (.addChild graphics button) (.addChild (:stage @state) graphics) state))
true
(ns graviton.ui (:require [graviton.engine :as engine] [graviton.prizes :as prizes])) (defn text-field [text size] (-> (js/PIXI.Text. text (js/PIXI.TextStyle. #js {:fill "#FF00FF" :fontSize size :fontFamily "Arial"})))) (defn text-box [{:keys [text x y]}] {:x x :y y :graphics (text-field text 30) :init (fn [text state] text) :update (fn [])}) (defn button [{:keys [label on-click x y width height]}] (let [text (engine/set-anchor (text-field label 30) 0.5 0.5) button (doto (js/PIXI.Graphics.) (.addChild (do (set! (.-x text) (/ width 2)) (set! (.-y text) (/ height 2)) text)) (.on "pointerdown" on-click) (.lineStyle 2 0xFF00FF 1) (.beginFill 0xFF00BB 0.25) (.drawRoundedRect 0 0 width height 15) (.endFill))] (set! (.-interactive button) true) (set! (.-buttonMode button) true) {:graphics button :x x :y y :init (fn [{:keys [graphics] :as button} state] (engine/set-graphics-position button) (.addChild (:stage state) graphics) button) :update (fn [])})) (defn start-button [state parent-graphics] (let [graphics (doto (js/PIXI.Graphics.) (.lineStyle 2 0xFF00FF 1) (.beginFill 0xFF00BB 0.25) (.drawRoundedRect -10 -3 120 40 15) (.endFill) (.addChild (text-field "START" 30)) (.on "pointerdown" #(do (.removeChild (:stage @state) parent-graphics) (vswap! state assoc :game-state :started) (vswap! state prizes/random-prizes) (engine/add-stage-on-click-event state) (engine/init-scene state))))] (set! (.-interactive graphics) true) (set! (.-buttonMode graphics) true) graphics)) (defn help-menu [state] (let [graphics (js/PIXI.Graphics.) text-lines ["Instructions:" "" "Drag to create gravity wells that will guide your ship to the prizes" "The ship will start moving along the gravity fields" "Keep placing attractors if you need to, but be careful! If you place too many, you'll create death zones that will end the game when touched!" "" "Have fun!" "" "" "Game by PI:NAME:<NAME>END_PI & PI:NAME:<NAME>END_PI"] instructions (map-indexed (fn [idx text] (let [text (text-field text 15)] (set! (.-x text) 50) (set! (.-y text) (* 30 (inc idx))) text)) text-lines) button (start-button state graphics)] (doseq [line instructions] (.addChild graphics line)) (set! (.-x button) (- (/ (:width @state) 2) 60)) (set! (.-y button) (- (/ (:height @state) 2) 20)) (.addChild graphics button) (.addChild (:stage @state) graphics) state))
[ { "context": "t [path \"path/passed/in\"\n token-passed-in \"fake-token\"\n vault-url \"https://vault.example.am", "end": 441, "score": 0.5334453582763672, "start": 437, "tag": "PASSWORD", "value": "fake" }, { "context": "th \"path/passed/in\"\n token-passed-in \"fake-token\"\n vault-url \"https://vault.example.amperit", "end": 447, "score": 0.5240418314933777, "start": 442, "tag": "KEY", "value": "token" }, { "context": "st\n (let [mount \"mount\"\n token-passed-in \"fake-token\"\n vault-url \"https://vault.example.am", "end": 1397, "score": 0.5551998019218445, "start": 1393, "tag": "PASSWORD", "value": "fake" }, { "context": "(let [mount \"mount\"\n token-passed-in \"fake-token\"\n vault-url \"https://vault.example.amperit", "end": 1403, "score": 0.4990828335285187, "start": 1398, "tag": "KEY", "value": "token" }, { "context": "\"}\n mount \"mount\"\n token-passed-in \"fake-token\"\n vault-url \"https://vault.example.amperit", "end": 2567, "score": 0.8353581428527832, "start": 2557, "tag": "PASSWORD", "value": "fake-token" }, { "context": "ssed-in \"path/passed/in\"\n token-passed-in \"fake-token\"\n vault-url \"https://vault.example.am", "end": 3701, "score": 0.7469824552536011, "start": 3697, "tag": "PASSWORD", "value": "fake" }, { "context": "ssed-in \"path/passed/in\"\n token-passed-in \"fake-token\"\n vault-url \"https://vault.example.am", "end": 6372, "score": 0.5049141645431519, "start": 6368, "tag": "PASSWORD", "value": "fake" }, { "context": "ssed-in \"path/passed/in\"\n token-passed-in \"fake-token\"\n vault-url \"https://vault.example.amperit", "end": 12661, "score": 0.47544264793395996, "start": 12651, "tag": "PASSWORD", "value": "fake-token" }, { "context": "ssed-in \"path/passed/in\"\n token-passed-in \"fake-token\"\n vault-url \"https://vault.example.amperit", "end": 14318, "score": 0.5386613011360168, "start": 14308, "tag": "PASSWORD", "value": "fake-token" }, { "context": "ssed-in \"path/passed/in\"\n token-passed-in \"fake-token\"\n vault-url \"https://vault.example.amperit", "end": 15710, "score": 0.6418955326080322, "start": 15700, "tag": "PASSWORD", "value": "fake-token" }, { "context": "ssed-in \"path/passed/in\"\n token-passed-in \"fake-token\"\n vault-url \"https://vault.example.amperit", "end": 16910, "score": 0.7760822772979736, "start": 16900, "tag": "PASSWORD", "value": "fake-token" }, { "context": "ssed-in \"path/passed/in\"\n token-passed-in \"fake-token\"\n vault-url \"https://vault.example.amperit", "end": 18318, "score": 0.7034502029418945, "start": 18308, "tag": "PASSWORD", "value": "fake-token" }, { "context": "as initialized with\"\n (is (= {:batman \"Bruce Wayne\"\n :captain-marvel \"Carol Danvers\"}\n ", "end": 20020, "score": 0.9998871088027954, "start": 20009, "tag": "NAME", "value": "Bruce Wayne" }, { "context": " \"Bruce Wayne\"\n :captain-marvel \"Carol Danvers\"}\n (vault-kvv2/read-secret (mock-client", "end": 20064, "score": 0.9998904466629028, "start": 20051, "tag": "NAME", "value": "Carol Danvers" } ]
test/vault/secrets/kvv2_test.clj
b-social/vault-clj
0
(ns vault.secrets.kvv2-test (:require [clj-http.client] [clojure.test :refer [testing deftest is]] [vault.client.api-util :as api-util] [vault.client.http :as http-client] [vault.client.mock-test :as mock-test] [vault.core :as vault] [vault.secrets.kvv2 :as vault-kvv2]) (:import (clojure.lang ExceptionInfo))) (deftest list-secrets-test (let [path "path/passed/in" token-passed-in "fake-token" vault-url "https://vault.example.amperity.com" client (http-client/http-client vault-url) response {:auth nil :data {:keys ["foo" "foo/"]} :lease_duration 2764800 :lease_id "" :renewable false}] (vault/authenticate! client :token token-passed-in) (testing "List secrets has correct response and sends correct request" (with-redefs [clj-http.client/request (fn [req] (is (= :get (:method req))) (is (= (str vault-url "/v1/listmount/metadata/" path) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (true? (-> req :query-params :list))) {:body response})] (is (= ["foo" "foo/"] (vault-kvv2/list-secrets client "listmount" path))))))) (deftest write-config!-test (let [mount "mount" token-passed-in "fake-token" vault-url "https://vault.example.amperity.com" client (http-client/http-client vault-url) new-config-kebab {:max-versions 5 :cas-required false :delete-version-after "3h25m19s"} new-config-snake {:max_versions 5 :cas_required false :delete_version_after "3h25m19s"}] (vault/authenticate! client :token token-passed-in) (testing "Write config sends correct request and returns true on valid call" (with-redefs [clj-http.client/request (fn [req] (is (= :post (:method req))) (is (= (str vault-url "/v1/" mount "/config") (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= new-config-snake (:form-params req))) {:status 204})] (is (true? (vault-kvv2/write-config! client mount new-config-kebab))))))) (deftest read-config-test (let [config {:max-versions 5 :cas-required false :delete-version-after "3h25m19s"} mount "mount" token-passed-in "fake-token" vault-url "https://vault.example.amperity.com" client (http-client/http-client vault-url)] (vault/authenticate! client :token token-passed-in) (testing "Read config sends correct request and returns the config with valid call" (with-redefs [clj-http.client/request (fn [req] (is (= :get (:method req))) (is (= (str vault-url "/v1/" mount "/config") (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) {:body {:data (api-util/snakeify-keys config)}})] (is (= config (vault-kvv2/read-config client mount))))))) (deftest read-test (let [lookup-response-valid-path {:data {:data {:foo "bar"} :metadata {:created_time "2018-03-22T02:24:06.945319214Z" :deletion_time "" :destroyed false :version 1}}} mount "mount" path-passed-in "path/passed/in" token-passed-in "fake-token" vault-url "https://vault.example.amperity.com" client (http-client/http-client vault-url)] (vault/authenticate! client :token token-passed-in) (testing "Read secrets sends correct request and responds correctly if secret is successfully located" (with-redefs [clj-http.client/request (fn [req] (is (= :get (:method req))) (is (= (str vault-url "/v1/" mount "/data/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) {:body lookup-response-valid-path})] (is (= {:foo "bar"} (vault-kvv2/read-secret client mount path-passed-in))))) (testing "Read secrets sends correct request and responds correctly if secret with version is successfully located" (with-redefs [clj-http.client/request (fn [req] (is (= :get (:method req))) (is (= (str vault-url "/v1/" mount "/data/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= {"version" 3} (:query-params req))) {:body lookup-response-valid-path})] (is (= {:foo "bar"} (vault-kvv2/read-secret client mount path-passed-in {:version 3 :force-read true}))))) (testing "Read secrets sends correct request and responds correctly if no secret is found" (with-redefs [clj-http.client/request (fn [req] (is (= :get (:method req))) (is (= (str vault-url "/v1/" mount "/data/different/path") (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (throw (ex-info "not found" {:errors [] :status 404 :type :vault.client.api-util/api-error})))] (try (is (= {:default-val :is-here} (vault-kvv2/read-secret client mount "different/path" {:not-found {:default-val :is-here}}))) (vault-kvv2/read-secret client mount "different/path") (is false) (catch ExceptionInfo e (is (= {:errors nil :status 404 :type ::api-util/api-error} (ex-data e))))))))) (deftest write!-test (let [create-success {:data {:created_time "2018-03-22T02:24:06.945319214Z" :deletion_time "" :destroyed false :version 1}} write-data {:foo "bar" :zip "zap"} mount "mount" path-passed-in "path/passed/in" token-passed-in "fake-token" vault-url "https://vault.example.amperity.com" client (http-client/http-client vault-url)] (vault/authenticate! client :token token-passed-in) (testing "Write secrets sends correct request and returns true upon success" (with-redefs [clj-http.client/request (fn [req] (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= :post (:method req))) (if (= (str vault-url "/v1/" mount "/metadata/" path-passed-in) (:url req)) (do (is (= {} (:form-params req))) {:errors [] :status 200}) (do (is (= (str vault-url "/v1/" mount "/data/" path-passed-in) (:url req))) (is (= {:data write-data} (:form-params req))) {:body create-success :status 200})))] (is (= (:data create-success) (vault-kvv2/write-secret! client mount path-passed-in write-data))))) (testing "Write secrets sends correct request and returns false upon failure" (with-redefs [clj-http.client/request (fn [req] (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= :post (:method req))) (if (= (str vault-url "/v1/" mount "/metadata/other-path") (:url req)) (do (is (= {} (:form-params req))) {:errors [] :status 200}) (do (is (= (str vault-url "/v1/" mount "/data/other-path") (:url req))) (is (= {:data write-data} (:form-params req))) {:errors [] :status 500})))] (is (false? (vault-kvv2/write-secret! client mount "other-path" write-data))))))) (deftest delete-test (let [mount "mount" path-passed-in "path/passed/in" token-passed-in "fake-token" vault-url "https://vault.example.amperity.com" client (http-client/http-client vault-url)] (vault/authenticate! client :token token-passed-in) (testing "delete secrets send correct request and returns true upon success when no versions passed in" (with-redefs [clj-http.client/request (fn [req] (is (= :delete (:method req))) (is (= (str vault-url "/v1/" mount "/data/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) {:status 204})] (is (true? (vault-kvv2/delete-secret! client mount path-passed-in)) (is (true? (vault-kvv2/delete-secret! client mount path-passed-in [])))) (testing "delete secrets send correct request and returns false upon failure when no versions passed in" (with-redefs [clj-http.client/request (fn [req] (is (= :delete (:method req))) (is (= (str vault-url "/v1/" mount "/data/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) {:status 404})] (is (false? (vault-kvv2/delete-secret! client mount path-passed-in))))) (testing "delete secrets send correct request and returns true upon success when multiple versions passed in" (with-redefs [clj-http.client/request (fn [req] (is (= :post (:method req))) (is (= (str vault-url "/v1/" mount "/delete/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= {:versions [12 14 147]} (:form-params req))) {:status 204})] (is (true? (vault-kvv2/delete-secret! client mount path-passed-in [12 14 147]))))) (testing "delete secrets send correct request and returns false upon failure when multiple versions passed in" (with-redefs [clj-http.client/request (fn [req] (is (= :post (:method req))) (is (= (str vault-url "/v1/" mount "/delete/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= {:versions [123]} (:form-params req))) {:status 404})] (is (false? (vault-kvv2/delete-secret! client mount path-passed-in [123]))))))))) (deftest read-metadata-test (let [data {:data {:created_time "2018-03-22T02:24:06.945319214Z" :current_version 3 :max_versions 0 :oldest_version 0 :updated_time "2018-03-22T02:36:43.986212308Z" :versions {:1 {:created_time "2018-03-22T02:24:06.945319214Z" :deletion_time "" :destroyed false} :2 {:created_time "2018-03-22T02:36:33.954880664Z" :deletion_time "" :destroyed false} :3 {:created_time "2018-03-22T02:36:43.986212308Z" :deletion_time "" :destroyed false}}}} kebab-metadata {:created-time "2018-03-22T02:24:06.945319214Z" :current-version 3 :max-versions 0 :oldest-version 0 :updated-time "2018-03-22T02:36:43.986212308Z" :versions {:1 {:created-time "2018-03-22T02:24:06.945319214Z" :deletion-time "" :destroyed false} :2 {:created-time "2018-03-22T02:36:33.954880664Z" :deletion-time "" :destroyed false} :3 {:created-time "2018-03-22T02:36:43.986212308Z" :deletion-time "" :destroyed false}}} mount "mount" path-passed-in "path/passed/in" token-passed-in "fake-token" vault-url "https://vault.example.amperity.com" client (http-client/http-client vault-url)] (vault/authenticate! client :token token-passed-in) (testing "Sends correct request and responds correctly upon success" (with-redefs [clj-http.client/request (fn [req] (is (= :get (:method req))) (is (= (str vault-url "/v1/" mount "/metadata/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) {:body data :status 200})] (is (= kebab-metadata (vault-kvv2/read-metadata client mount path-passed-in))))) (testing "Sends correct request and responds correctly when metadata not found" (with-redefs [clj-http.client/request (fn [req] (is (= :get (:method req))) (is (= (str vault-url "/v1/" mount "/metadata/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (throw (ex-info "not found" {:errors [] :status 404 :type :vault.client.api-util/api-error})))] (is (thrown? ExceptionInfo (vault-kvv2/read-metadata client mount path-passed-in {:force-read true}))) (is (= 3 (vault-kvv2/read-metadata client mount path-passed-in {:not-found 3 :force-read true}))))))) (deftest write-metadata-test (let [payload {:max-versions 5, :cas-required false, :delete-version-after "3h25m19s"} mount "mount" path-passed-in "path/passed/in" token-passed-in "fake-token" vault-url "https://vault.example.amperity.com" client (http-client/http-client vault-url)] (vault/authenticate! client :token token-passed-in) (testing "Write metadata sends correct request and responds with true upon success" (with-redefs [clj-http.client/request (fn [req] (is (= :post (:method req))) (is (= (str vault-url "/v1/" mount "/metadata/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= (api-util/snakeify-keys payload) (:form-params req))) {:status 204})] (is (true? (vault-kvv2/write-metadata! client mount path-passed-in payload))))) (testing "Write metadata sends correct request and responds with false upon failure" (with-redefs [clj-http.client/request (fn [req] (is (= :post (:method req))) (is (= (str vault-url "/v1/" mount "/metadata/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= (api-util/snakeify-keys payload) (:form-params req))) {:status 500})] (is (false? (vault-kvv2/write-metadata! client mount path-passed-in payload))))))) (deftest delete-metadata-test (let [mount "mount" path-passed-in "path/passed/in" token-passed-in "fake-token" vault-url "https://vault.example.amperity.com" client (http-client/http-client vault-url)] (vault/authenticate! client :token token-passed-in) (testing "Sends correct request and responds correctly upon success" (with-redefs [clj-http.client/request (fn [req] (is (= :delete (:method req))) (is (= (str vault-url "/v1/" mount "/metadata/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) {:status 204})] (is (true? (vault-kvv2/delete-metadata! client mount path-passed-in))))) (testing "Sends correct request and responds correctly upon failure" (with-redefs [clj-http.client/request (fn [req] (is (= :delete (:method req))) (is (= (str vault-url "/v1/" mount "/metadata/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) {:status 500})] (is (false? (vault-kvv2/delete-metadata! client mount path-passed-in))))))) (deftest destroy!-test (let [mount "mount" path-passed-in "path/passed/in" token-passed-in "fake-token" vault-url "https://vault.example.amperity.com" client (http-client/http-client vault-url) versions [1 2]] (vault/authenticate! client :token token-passed-in) (testing "Destroy secrets sends correct request and returns true upon success" (with-redefs [clj-http.client/request (fn [req] (is (= :post (:method req))) (is (= (str vault-url "/v1/" mount "/destroy/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= {:versions versions} (:form-params req))) {:status 204})] (is (true? (vault-kvv2/destroy-secret! client mount path-passed-in versions))))) (testing "Destroy secrets sends correct request and returns false upon failure" (with-redefs [clj-http.client/request (fn [req] (is (= :post (:method req))) (is (= (str vault-url "/v1/" mount "/destroy/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= {:versions [1]} (:form-params req))) {:status 500})] (is (false? (vault-kvv2/destroy-secret! client mount path-passed-in [1]))))))) (deftest undelete-secret!-test (let [mount "mount" path-passed-in "path/passed/in" token-passed-in "fake-token" vault-url "https://vault.example.amperity.com" client (http-client/http-client vault-url) versions [1 2]] (vault/authenticate! client :token token-passed-in) (testing "Undelete secrets sends correct request and returns true upon success" (with-redefs [clj-http.client/request (fn [req] (is (= :post (:method req))) (is (= (str vault-url "/v1/" mount "/undelete/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= {:versions versions} (:form-params req))) {:status 204})] (is (true? (vault-kvv2/undelete-secret! client mount path-passed-in versions))))) (testing "Undelete secrets sends correct request and returns false upon failure" (with-redefs [clj-http.client/request (fn [req] (is (= :post (:method req))) (is (= (str vault-url "/v1/" mount "/undelete/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= {:versions [1]} (:form-params req))) {:status 500})] (is (false? (vault-kvv2/undelete-secret! client mount path-passed-in [1]))))))) ;; -------- Mock Client ------------------------------------------------------- (defn mock-client-kvv2 "Creates a mock client with the data in `vault/secrets/secret-fixture-kvv2.edn`" [] (mock-test/mock-client-authenticated "vault/secrets/secret-fixture-kvv2.edn")) (deftest mock-client-test (testing "Mock client can correctly read values it was initialized with" (is (= {:batman "Bruce Wayne" :captain-marvel "Carol Danvers"} (vault-kvv2/read-secret (mock-client-kvv2) "mount" "identities")))) (testing "Mock client correctly responds with a 404 to reading non-existent paths" (is (thrown-with-msg? ExceptionInfo #"No such secret: mount/data/hello" (vault-kvv2/read-secret (mock-client-kvv2) "mount" "hello"))) (is (thrown-with-msg? ExceptionInfo #"No such secret: mount/data/identities" (vault-kvv2/read-secret (vault/new-client "mock:-") "mount" "identities")))) (testing "Mock client can write/update and read data" (let [client (mock-client-kvv2)] (is (thrown-with-msg? ExceptionInfo #"No such secret: mount/data/hello" (vault-kvv2/read-secret client "mount" "hello"))) (is (true? (vault-kvv2/write-secret! client "mount" "hello" {:and-i-say "goodbye"}))) (is (true? (vault-kvv2/write-secret! client "mount" "identities" {:intersect "Chuck"}))) (is (= {:and-i-say "goodbye"} (vault-kvv2/read-secret client "mount" "hello"))) (is (= {:intersect "Chuck"} (vault-kvv2/read-secret client "mount" "identities"))))) (testing "Mock client can write and read config" (let [client (mock-client-kvv2) config {:max-versions 5 :cas-required false :delete-version-after "3h23m19s"}] (is (thrown? ExceptionInfo (vault-kvv2/read-config client "mount"))) (is (true? (vault-kvv2/write-config! client "mount" config))) (is (= config (vault-kvv2/read-config client "mount"))))) (testing "Mock client can write and read metadata" (let [client (mock-client-kvv2)] (is (thrown? ExceptionInfo (vault-kvv2/read-metadata client "mount" "doesn't exist" {:force-read true}))) (is (= {:created-time "2018-03-22T02:24:06.945319214Z" :current-version 1 :max-versions 0 :oldest-version 0 :updated-time "2018-03-22T02:36:43.986212308Z" :versions {:1 {:created-time "2018-03-22T02:24:06.945319214Z" :deletion-time "" :destroyed false}}} (vault-kvv2/read-metadata client "mount" "identities" {:force-read true}))) (is (true? (vault-kvv2/delete-metadata! client "mount" "identities"))) (is (thrown? ExceptionInfo (vault-kvv2/read-metadata client "mount" "identities" {:force-read true}))) (is (true? (vault-kvv2/write-metadata! client "mount" "hello" {:max-versions 3}))) (is (= 3 (:max-versions (vault-kvv2/read-metadata client "mount" "hello")))) (is (= 5 (vault-kvv2/read-metadata client "mount" "doesn't exist" {:force-read true :not-found 5}))))) (testing "Mock client returns true if path is found on delete for secret, false if not when no versions specified" (let [client (mock-client-kvv2)] (is (true? (vault-kvv2/delete-secret! client "mount" "identities"))) (is (false? (vault-kvv2/delete-secret! client "mount" "eggsactly"))))) (testing "Mock client always returns true on delete for secret when versions specified" (let [client (mock-client-kvv2)] (is (true? (vault-kvv2/delete-secret! client "mount" "identities" [1]))) (is (true? (vault-kvv2/delete-secret! client "mount" "eggsactly" [4 5 6]))))) (testing "Mock can list secrets from their associated metadata" (let [client (mock-client-kvv2)] (is (empty? (vault-kvv2/list-secrets client "hello" "yes"))) (is (true? (vault-kvv2/write-secret! client "mount" "hello" {:and-i-say "goodbye"}))) ;; Paths are good enough for mock right now, but be aware they are current (is (= ["identities" "hello"] (into [] (vault-kvv2/list-secrets client "mount" "")))))) (testing "Mock client does not crash upon destroy" (is (true? (vault-kvv2/destroy-secret! (mock-client-kvv2) "mount" "identities" [1])))) (testing "Mock client does not crash upon undelete" (is (true? (vault-kvv2/undelete-secret! (mock-client-kvv2) "mount" "identities" [1])))))
95163
(ns vault.secrets.kvv2-test (:require [clj-http.client] [clojure.test :refer [testing deftest is]] [vault.client.api-util :as api-util] [vault.client.http :as http-client] [vault.client.mock-test :as mock-test] [vault.core :as vault] [vault.secrets.kvv2 :as vault-kvv2]) (:import (clojure.lang ExceptionInfo))) (deftest list-secrets-test (let [path "path/passed/in" token-passed-in "<PASSWORD>-<KEY>" vault-url "https://vault.example.amperity.com" client (http-client/http-client vault-url) response {:auth nil :data {:keys ["foo" "foo/"]} :lease_duration 2764800 :lease_id "" :renewable false}] (vault/authenticate! client :token token-passed-in) (testing "List secrets has correct response and sends correct request" (with-redefs [clj-http.client/request (fn [req] (is (= :get (:method req))) (is (= (str vault-url "/v1/listmount/metadata/" path) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (true? (-> req :query-params :list))) {:body response})] (is (= ["foo" "foo/"] (vault-kvv2/list-secrets client "listmount" path))))))) (deftest write-config!-test (let [mount "mount" token-passed-in "<PASSWORD>-<KEY>" vault-url "https://vault.example.amperity.com" client (http-client/http-client vault-url) new-config-kebab {:max-versions 5 :cas-required false :delete-version-after "3h25m19s"} new-config-snake {:max_versions 5 :cas_required false :delete_version_after "3h25m19s"}] (vault/authenticate! client :token token-passed-in) (testing "Write config sends correct request and returns true on valid call" (with-redefs [clj-http.client/request (fn [req] (is (= :post (:method req))) (is (= (str vault-url "/v1/" mount "/config") (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= new-config-snake (:form-params req))) {:status 204})] (is (true? (vault-kvv2/write-config! client mount new-config-kebab))))))) (deftest read-config-test (let [config {:max-versions 5 :cas-required false :delete-version-after "3h25m19s"} mount "mount" token-passed-in "<PASSWORD>" vault-url "https://vault.example.amperity.com" client (http-client/http-client vault-url)] (vault/authenticate! client :token token-passed-in) (testing "Read config sends correct request and returns the config with valid call" (with-redefs [clj-http.client/request (fn [req] (is (= :get (:method req))) (is (= (str vault-url "/v1/" mount "/config") (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) {:body {:data (api-util/snakeify-keys config)}})] (is (= config (vault-kvv2/read-config client mount))))))) (deftest read-test (let [lookup-response-valid-path {:data {:data {:foo "bar"} :metadata {:created_time "2018-03-22T02:24:06.945319214Z" :deletion_time "" :destroyed false :version 1}}} mount "mount" path-passed-in "path/passed/in" token-passed-in "<PASSWORD>-token" vault-url "https://vault.example.amperity.com" client (http-client/http-client vault-url)] (vault/authenticate! client :token token-passed-in) (testing "Read secrets sends correct request and responds correctly if secret is successfully located" (with-redefs [clj-http.client/request (fn [req] (is (= :get (:method req))) (is (= (str vault-url "/v1/" mount "/data/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) {:body lookup-response-valid-path})] (is (= {:foo "bar"} (vault-kvv2/read-secret client mount path-passed-in))))) (testing "Read secrets sends correct request and responds correctly if secret with version is successfully located" (with-redefs [clj-http.client/request (fn [req] (is (= :get (:method req))) (is (= (str vault-url "/v1/" mount "/data/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= {"version" 3} (:query-params req))) {:body lookup-response-valid-path})] (is (= {:foo "bar"} (vault-kvv2/read-secret client mount path-passed-in {:version 3 :force-read true}))))) (testing "Read secrets sends correct request and responds correctly if no secret is found" (with-redefs [clj-http.client/request (fn [req] (is (= :get (:method req))) (is (= (str vault-url "/v1/" mount "/data/different/path") (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (throw (ex-info "not found" {:errors [] :status 404 :type :vault.client.api-util/api-error})))] (try (is (= {:default-val :is-here} (vault-kvv2/read-secret client mount "different/path" {:not-found {:default-val :is-here}}))) (vault-kvv2/read-secret client mount "different/path") (is false) (catch ExceptionInfo e (is (= {:errors nil :status 404 :type ::api-util/api-error} (ex-data e))))))))) (deftest write!-test (let [create-success {:data {:created_time "2018-03-22T02:24:06.945319214Z" :deletion_time "" :destroyed false :version 1}} write-data {:foo "bar" :zip "zap"} mount "mount" path-passed-in "path/passed/in" token-passed-in "<PASSWORD>-token" vault-url "https://vault.example.amperity.com" client (http-client/http-client vault-url)] (vault/authenticate! client :token token-passed-in) (testing "Write secrets sends correct request and returns true upon success" (with-redefs [clj-http.client/request (fn [req] (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= :post (:method req))) (if (= (str vault-url "/v1/" mount "/metadata/" path-passed-in) (:url req)) (do (is (= {} (:form-params req))) {:errors [] :status 200}) (do (is (= (str vault-url "/v1/" mount "/data/" path-passed-in) (:url req))) (is (= {:data write-data} (:form-params req))) {:body create-success :status 200})))] (is (= (:data create-success) (vault-kvv2/write-secret! client mount path-passed-in write-data))))) (testing "Write secrets sends correct request and returns false upon failure" (with-redefs [clj-http.client/request (fn [req] (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= :post (:method req))) (if (= (str vault-url "/v1/" mount "/metadata/other-path") (:url req)) (do (is (= {} (:form-params req))) {:errors [] :status 200}) (do (is (= (str vault-url "/v1/" mount "/data/other-path") (:url req))) (is (= {:data write-data} (:form-params req))) {:errors [] :status 500})))] (is (false? (vault-kvv2/write-secret! client mount "other-path" write-data))))))) (deftest delete-test (let [mount "mount" path-passed-in "path/passed/in" token-passed-in "fake-token" vault-url "https://vault.example.amperity.com" client (http-client/http-client vault-url)] (vault/authenticate! client :token token-passed-in) (testing "delete secrets send correct request and returns true upon success when no versions passed in" (with-redefs [clj-http.client/request (fn [req] (is (= :delete (:method req))) (is (= (str vault-url "/v1/" mount "/data/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) {:status 204})] (is (true? (vault-kvv2/delete-secret! client mount path-passed-in)) (is (true? (vault-kvv2/delete-secret! client mount path-passed-in [])))) (testing "delete secrets send correct request and returns false upon failure when no versions passed in" (with-redefs [clj-http.client/request (fn [req] (is (= :delete (:method req))) (is (= (str vault-url "/v1/" mount "/data/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) {:status 404})] (is (false? (vault-kvv2/delete-secret! client mount path-passed-in))))) (testing "delete secrets send correct request and returns true upon success when multiple versions passed in" (with-redefs [clj-http.client/request (fn [req] (is (= :post (:method req))) (is (= (str vault-url "/v1/" mount "/delete/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= {:versions [12 14 147]} (:form-params req))) {:status 204})] (is (true? (vault-kvv2/delete-secret! client mount path-passed-in [12 14 147]))))) (testing "delete secrets send correct request and returns false upon failure when multiple versions passed in" (with-redefs [clj-http.client/request (fn [req] (is (= :post (:method req))) (is (= (str vault-url "/v1/" mount "/delete/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= {:versions [123]} (:form-params req))) {:status 404})] (is (false? (vault-kvv2/delete-secret! client mount path-passed-in [123]))))))))) (deftest read-metadata-test (let [data {:data {:created_time "2018-03-22T02:24:06.945319214Z" :current_version 3 :max_versions 0 :oldest_version 0 :updated_time "2018-03-22T02:36:43.986212308Z" :versions {:1 {:created_time "2018-03-22T02:24:06.945319214Z" :deletion_time "" :destroyed false} :2 {:created_time "2018-03-22T02:36:33.954880664Z" :deletion_time "" :destroyed false} :3 {:created_time "2018-03-22T02:36:43.986212308Z" :deletion_time "" :destroyed false}}}} kebab-metadata {:created-time "2018-03-22T02:24:06.945319214Z" :current-version 3 :max-versions 0 :oldest-version 0 :updated-time "2018-03-22T02:36:43.986212308Z" :versions {:1 {:created-time "2018-03-22T02:24:06.945319214Z" :deletion-time "" :destroyed false} :2 {:created-time "2018-03-22T02:36:33.954880664Z" :deletion-time "" :destroyed false} :3 {:created-time "2018-03-22T02:36:43.986212308Z" :deletion-time "" :destroyed false}}} mount "mount" path-passed-in "path/passed/in" token-passed-in "<PASSWORD>" vault-url "https://vault.example.amperity.com" client (http-client/http-client vault-url)] (vault/authenticate! client :token token-passed-in) (testing "Sends correct request and responds correctly upon success" (with-redefs [clj-http.client/request (fn [req] (is (= :get (:method req))) (is (= (str vault-url "/v1/" mount "/metadata/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) {:body data :status 200})] (is (= kebab-metadata (vault-kvv2/read-metadata client mount path-passed-in))))) (testing "Sends correct request and responds correctly when metadata not found" (with-redefs [clj-http.client/request (fn [req] (is (= :get (:method req))) (is (= (str vault-url "/v1/" mount "/metadata/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (throw (ex-info "not found" {:errors [] :status 404 :type :vault.client.api-util/api-error})))] (is (thrown? ExceptionInfo (vault-kvv2/read-metadata client mount path-passed-in {:force-read true}))) (is (= 3 (vault-kvv2/read-metadata client mount path-passed-in {:not-found 3 :force-read true}))))))) (deftest write-metadata-test (let [payload {:max-versions 5, :cas-required false, :delete-version-after "3h25m19s"} mount "mount" path-passed-in "path/passed/in" token-passed-in "<PASSWORD>" vault-url "https://vault.example.amperity.com" client (http-client/http-client vault-url)] (vault/authenticate! client :token token-passed-in) (testing "Write metadata sends correct request and responds with true upon success" (with-redefs [clj-http.client/request (fn [req] (is (= :post (:method req))) (is (= (str vault-url "/v1/" mount "/metadata/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= (api-util/snakeify-keys payload) (:form-params req))) {:status 204})] (is (true? (vault-kvv2/write-metadata! client mount path-passed-in payload))))) (testing "Write metadata sends correct request and responds with false upon failure" (with-redefs [clj-http.client/request (fn [req] (is (= :post (:method req))) (is (= (str vault-url "/v1/" mount "/metadata/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= (api-util/snakeify-keys payload) (:form-params req))) {:status 500})] (is (false? (vault-kvv2/write-metadata! client mount path-passed-in payload))))))) (deftest delete-metadata-test (let [mount "mount" path-passed-in "path/passed/in" token-passed-in "<PASSWORD>" vault-url "https://vault.example.amperity.com" client (http-client/http-client vault-url)] (vault/authenticate! client :token token-passed-in) (testing "Sends correct request and responds correctly upon success" (with-redefs [clj-http.client/request (fn [req] (is (= :delete (:method req))) (is (= (str vault-url "/v1/" mount "/metadata/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) {:status 204})] (is (true? (vault-kvv2/delete-metadata! client mount path-passed-in))))) (testing "Sends correct request and responds correctly upon failure" (with-redefs [clj-http.client/request (fn [req] (is (= :delete (:method req))) (is (= (str vault-url "/v1/" mount "/metadata/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) {:status 500})] (is (false? (vault-kvv2/delete-metadata! client mount path-passed-in))))))) (deftest destroy!-test (let [mount "mount" path-passed-in "path/passed/in" token-passed-in "<PASSWORD>" vault-url "https://vault.example.amperity.com" client (http-client/http-client vault-url) versions [1 2]] (vault/authenticate! client :token token-passed-in) (testing "Destroy secrets sends correct request and returns true upon success" (with-redefs [clj-http.client/request (fn [req] (is (= :post (:method req))) (is (= (str vault-url "/v1/" mount "/destroy/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= {:versions versions} (:form-params req))) {:status 204})] (is (true? (vault-kvv2/destroy-secret! client mount path-passed-in versions))))) (testing "Destroy secrets sends correct request and returns false upon failure" (with-redefs [clj-http.client/request (fn [req] (is (= :post (:method req))) (is (= (str vault-url "/v1/" mount "/destroy/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= {:versions [1]} (:form-params req))) {:status 500})] (is (false? (vault-kvv2/destroy-secret! client mount path-passed-in [1]))))))) (deftest undelete-secret!-test (let [mount "mount" path-passed-in "path/passed/in" token-passed-in "<PASSWORD>" vault-url "https://vault.example.amperity.com" client (http-client/http-client vault-url) versions [1 2]] (vault/authenticate! client :token token-passed-in) (testing "Undelete secrets sends correct request and returns true upon success" (with-redefs [clj-http.client/request (fn [req] (is (= :post (:method req))) (is (= (str vault-url "/v1/" mount "/undelete/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= {:versions versions} (:form-params req))) {:status 204})] (is (true? (vault-kvv2/undelete-secret! client mount path-passed-in versions))))) (testing "Undelete secrets sends correct request and returns false upon failure" (with-redefs [clj-http.client/request (fn [req] (is (= :post (:method req))) (is (= (str vault-url "/v1/" mount "/undelete/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= {:versions [1]} (:form-params req))) {:status 500})] (is (false? (vault-kvv2/undelete-secret! client mount path-passed-in [1]))))))) ;; -------- Mock Client ------------------------------------------------------- (defn mock-client-kvv2 "Creates a mock client with the data in `vault/secrets/secret-fixture-kvv2.edn`" [] (mock-test/mock-client-authenticated "vault/secrets/secret-fixture-kvv2.edn")) (deftest mock-client-test (testing "Mock client can correctly read values it was initialized with" (is (= {:batman "<NAME>" :captain-marvel "<NAME>"} (vault-kvv2/read-secret (mock-client-kvv2) "mount" "identities")))) (testing "Mock client correctly responds with a 404 to reading non-existent paths" (is (thrown-with-msg? ExceptionInfo #"No such secret: mount/data/hello" (vault-kvv2/read-secret (mock-client-kvv2) "mount" "hello"))) (is (thrown-with-msg? ExceptionInfo #"No such secret: mount/data/identities" (vault-kvv2/read-secret (vault/new-client "mock:-") "mount" "identities")))) (testing "Mock client can write/update and read data" (let [client (mock-client-kvv2)] (is (thrown-with-msg? ExceptionInfo #"No such secret: mount/data/hello" (vault-kvv2/read-secret client "mount" "hello"))) (is (true? (vault-kvv2/write-secret! client "mount" "hello" {:and-i-say "goodbye"}))) (is (true? (vault-kvv2/write-secret! client "mount" "identities" {:intersect "Chuck"}))) (is (= {:and-i-say "goodbye"} (vault-kvv2/read-secret client "mount" "hello"))) (is (= {:intersect "Chuck"} (vault-kvv2/read-secret client "mount" "identities"))))) (testing "Mock client can write and read config" (let [client (mock-client-kvv2) config {:max-versions 5 :cas-required false :delete-version-after "3h23m19s"}] (is (thrown? ExceptionInfo (vault-kvv2/read-config client "mount"))) (is (true? (vault-kvv2/write-config! client "mount" config))) (is (= config (vault-kvv2/read-config client "mount"))))) (testing "Mock client can write and read metadata" (let [client (mock-client-kvv2)] (is (thrown? ExceptionInfo (vault-kvv2/read-metadata client "mount" "doesn't exist" {:force-read true}))) (is (= {:created-time "2018-03-22T02:24:06.945319214Z" :current-version 1 :max-versions 0 :oldest-version 0 :updated-time "2018-03-22T02:36:43.986212308Z" :versions {:1 {:created-time "2018-03-22T02:24:06.945319214Z" :deletion-time "" :destroyed false}}} (vault-kvv2/read-metadata client "mount" "identities" {:force-read true}))) (is (true? (vault-kvv2/delete-metadata! client "mount" "identities"))) (is (thrown? ExceptionInfo (vault-kvv2/read-metadata client "mount" "identities" {:force-read true}))) (is (true? (vault-kvv2/write-metadata! client "mount" "hello" {:max-versions 3}))) (is (= 3 (:max-versions (vault-kvv2/read-metadata client "mount" "hello")))) (is (= 5 (vault-kvv2/read-metadata client "mount" "doesn't exist" {:force-read true :not-found 5}))))) (testing "Mock client returns true if path is found on delete for secret, false if not when no versions specified" (let [client (mock-client-kvv2)] (is (true? (vault-kvv2/delete-secret! client "mount" "identities"))) (is (false? (vault-kvv2/delete-secret! client "mount" "eggsactly"))))) (testing "Mock client always returns true on delete for secret when versions specified" (let [client (mock-client-kvv2)] (is (true? (vault-kvv2/delete-secret! client "mount" "identities" [1]))) (is (true? (vault-kvv2/delete-secret! client "mount" "eggsactly" [4 5 6]))))) (testing "Mock can list secrets from their associated metadata" (let [client (mock-client-kvv2)] (is (empty? (vault-kvv2/list-secrets client "hello" "yes"))) (is (true? (vault-kvv2/write-secret! client "mount" "hello" {:and-i-say "goodbye"}))) ;; Paths are good enough for mock right now, but be aware they are current (is (= ["identities" "hello"] (into [] (vault-kvv2/list-secrets client "mount" "")))))) (testing "Mock client does not crash upon destroy" (is (true? (vault-kvv2/destroy-secret! (mock-client-kvv2) "mount" "identities" [1])))) (testing "Mock client does not crash upon undelete" (is (true? (vault-kvv2/undelete-secret! (mock-client-kvv2) "mount" "identities" [1])))))
true
(ns vault.secrets.kvv2-test (:require [clj-http.client] [clojure.test :refer [testing deftest is]] [vault.client.api-util :as api-util] [vault.client.http :as http-client] [vault.client.mock-test :as mock-test] [vault.core :as vault] [vault.secrets.kvv2 :as vault-kvv2]) (:import (clojure.lang ExceptionInfo))) (deftest list-secrets-test (let [path "path/passed/in" token-passed-in "PI:PASSWORD:<PASSWORD>END_PI-PI:KEY:<KEY>END_PI" vault-url "https://vault.example.amperity.com" client (http-client/http-client vault-url) response {:auth nil :data {:keys ["foo" "foo/"]} :lease_duration 2764800 :lease_id "" :renewable false}] (vault/authenticate! client :token token-passed-in) (testing "List secrets has correct response and sends correct request" (with-redefs [clj-http.client/request (fn [req] (is (= :get (:method req))) (is (= (str vault-url "/v1/listmount/metadata/" path) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (true? (-> req :query-params :list))) {:body response})] (is (= ["foo" "foo/"] (vault-kvv2/list-secrets client "listmount" path))))))) (deftest write-config!-test (let [mount "mount" token-passed-in "PI:PASSWORD:<PASSWORD>END_PI-PI:KEY:<KEY>END_PI" vault-url "https://vault.example.amperity.com" client (http-client/http-client vault-url) new-config-kebab {:max-versions 5 :cas-required false :delete-version-after "3h25m19s"} new-config-snake {:max_versions 5 :cas_required false :delete_version_after "3h25m19s"}] (vault/authenticate! client :token token-passed-in) (testing "Write config sends correct request and returns true on valid call" (with-redefs [clj-http.client/request (fn [req] (is (= :post (:method req))) (is (= (str vault-url "/v1/" mount "/config") (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= new-config-snake (:form-params req))) {:status 204})] (is (true? (vault-kvv2/write-config! client mount new-config-kebab))))))) (deftest read-config-test (let [config {:max-versions 5 :cas-required false :delete-version-after "3h25m19s"} mount "mount" token-passed-in "PI:PASSWORD:<PASSWORD>END_PI" vault-url "https://vault.example.amperity.com" client (http-client/http-client vault-url)] (vault/authenticate! client :token token-passed-in) (testing "Read config sends correct request and returns the config with valid call" (with-redefs [clj-http.client/request (fn [req] (is (= :get (:method req))) (is (= (str vault-url "/v1/" mount "/config") (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) {:body {:data (api-util/snakeify-keys config)}})] (is (= config (vault-kvv2/read-config client mount))))))) (deftest read-test (let [lookup-response-valid-path {:data {:data {:foo "bar"} :metadata {:created_time "2018-03-22T02:24:06.945319214Z" :deletion_time "" :destroyed false :version 1}}} mount "mount" path-passed-in "path/passed/in" token-passed-in "PI:PASSWORD:<PASSWORD>END_PI-token" vault-url "https://vault.example.amperity.com" client (http-client/http-client vault-url)] (vault/authenticate! client :token token-passed-in) (testing "Read secrets sends correct request and responds correctly if secret is successfully located" (with-redefs [clj-http.client/request (fn [req] (is (= :get (:method req))) (is (= (str vault-url "/v1/" mount "/data/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) {:body lookup-response-valid-path})] (is (= {:foo "bar"} (vault-kvv2/read-secret client mount path-passed-in))))) (testing "Read secrets sends correct request and responds correctly if secret with version is successfully located" (with-redefs [clj-http.client/request (fn [req] (is (= :get (:method req))) (is (= (str vault-url "/v1/" mount "/data/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= {"version" 3} (:query-params req))) {:body lookup-response-valid-path})] (is (= {:foo "bar"} (vault-kvv2/read-secret client mount path-passed-in {:version 3 :force-read true}))))) (testing "Read secrets sends correct request and responds correctly if no secret is found" (with-redefs [clj-http.client/request (fn [req] (is (= :get (:method req))) (is (= (str vault-url "/v1/" mount "/data/different/path") (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (throw (ex-info "not found" {:errors [] :status 404 :type :vault.client.api-util/api-error})))] (try (is (= {:default-val :is-here} (vault-kvv2/read-secret client mount "different/path" {:not-found {:default-val :is-here}}))) (vault-kvv2/read-secret client mount "different/path") (is false) (catch ExceptionInfo e (is (= {:errors nil :status 404 :type ::api-util/api-error} (ex-data e))))))))) (deftest write!-test (let [create-success {:data {:created_time "2018-03-22T02:24:06.945319214Z" :deletion_time "" :destroyed false :version 1}} write-data {:foo "bar" :zip "zap"} mount "mount" path-passed-in "path/passed/in" token-passed-in "PI:PASSWORD:<PASSWORD>END_PI-token" vault-url "https://vault.example.amperity.com" client (http-client/http-client vault-url)] (vault/authenticate! client :token token-passed-in) (testing "Write secrets sends correct request and returns true upon success" (with-redefs [clj-http.client/request (fn [req] (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= :post (:method req))) (if (= (str vault-url "/v1/" mount "/metadata/" path-passed-in) (:url req)) (do (is (= {} (:form-params req))) {:errors [] :status 200}) (do (is (= (str vault-url "/v1/" mount "/data/" path-passed-in) (:url req))) (is (= {:data write-data} (:form-params req))) {:body create-success :status 200})))] (is (= (:data create-success) (vault-kvv2/write-secret! client mount path-passed-in write-data))))) (testing "Write secrets sends correct request and returns false upon failure" (with-redefs [clj-http.client/request (fn [req] (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= :post (:method req))) (if (= (str vault-url "/v1/" mount "/metadata/other-path") (:url req)) (do (is (= {} (:form-params req))) {:errors [] :status 200}) (do (is (= (str vault-url "/v1/" mount "/data/other-path") (:url req))) (is (= {:data write-data} (:form-params req))) {:errors [] :status 500})))] (is (false? (vault-kvv2/write-secret! client mount "other-path" write-data))))))) (deftest delete-test (let [mount "mount" path-passed-in "path/passed/in" token-passed-in "fake-token" vault-url "https://vault.example.amperity.com" client (http-client/http-client vault-url)] (vault/authenticate! client :token token-passed-in) (testing "delete secrets send correct request and returns true upon success when no versions passed in" (with-redefs [clj-http.client/request (fn [req] (is (= :delete (:method req))) (is (= (str vault-url "/v1/" mount "/data/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) {:status 204})] (is (true? (vault-kvv2/delete-secret! client mount path-passed-in)) (is (true? (vault-kvv2/delete-secret! client mount path-passed-in [])))) (testing "delete secrets send correct request and returns false upon failure when no versions passed in" (with-redefs [clj-http.client/request (fn [req] (is (= :delete (:method req))) (is (= (str vault-url "/v1/" mount "/data/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) {:status 404})] (is (false? (vault-kvv2/delete-secret! client mount path-passed-in))))) (testing "delete secrets send correct request and returns true upon success when multiple versions passed in" (with-redefs [clj-http.client/request (fn [req] (is (= :post (:method req))) (is (= (str vault-url "/v1/" mount "/delete/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= {:versions [12 14 147]} (:form-params req))) {:status 204})] (is (true? (vault-kvv2/delete-secret! client mount path-passed-in [12 14 147]))))) (testing "delete secrets send correct request and returns false upon failure when multiple versions passed in" (with-redefs [clj-http.client/request (fn [req] (is (= :post (:method req))) (is (= (str vault-url "/v1/" mount "/delete/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= {:versions [123]} (:form-params req))) {:status 404})] (is (false? (vault-kvv2/delete-secret! client mount path-passed-in [123]))))))))) (deftest read-metadata-test (let [data {:data {:created_time "2018-03-22T02:24:06.945319214Z" :current_version 3 :max_versions 0 :oldest_version 0 :updated_time "2018-03-22T02:36:43.986212308Z" :versions {:1 {:created_time "2018-03-22T02:24:06.945319214Z" :deletion_time "" :destroyed false} :2 {:created_time "2018-03-22T02:36:33.954880664Z" :deletion_time "" :destroyed false} :3 {:created_time "2018-03-22T02:36:43.986212308Z" :deletion_time "" :destroyed false}}}} kebab-metadata {:created-time "2018-03-22T02:24:06.945319214Z" :current-version 3 :max-versions 0 :oldest-version 0 :updated-time "2018-03-22T02:36:43.986212308Z" :versions {:1 {:created-time "2018-03-22T02:24:06.945319214Z" :deletion-time "" :destroyed false} :2 {:created-time "2018-03-22T02:36:33.954880664Z" :deletion-time "" :destroyed false} :3 {:created-time "2018-03-22T02:36:43.986212308Z" :deletion-time "" :destroyed false}}} mount "mount" path-passed-in "path/passed/in" token-passed-in "PI:PASSWORD:<PASSWORD>END_PI" vault-url "https://vault.example.amperity.com" client (http-client/http-client vault-url)] (vault/authenticate! client :token token-passed-in) (testing "Sends correct request and responds correctly upon success" (with-redefs [clj-http.client/request (fn [req] (is (= :get (:method req))) (is (= (str vault-url "/v1/" mount "/metadata/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) {:body data :status 200})] (is (= kebab-metadata (vault-kvv2/read-metadata client mount path-passed-in))))) (testing "Sends correct request and responds correctly when metadata not found" (with-redefs [clj-http.client/request (fn [req] (is (= :get (:method req))) (is (= (str vault-url "/v1/" mount "/metadata/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (throw (ex-info "not found" {:errors [] :status 404 :type :vault.client.api-util/api-error})))] (is (thrown? ExceptionInfo (vault-kvv2/read-metadata client mount path-passed-in {:force-read true}))) (is (= 3 (vault-kvv2/read-metadata client mount path-passed-in {:not-found 3 :force-read true}))))))) (deftest write-metadata-test (let [payload {:max-versions 5, :cas-required false, :delete-version-after "3h25m19s"} mount "mount" path-passed-in "path/passed/in" token-passed-in "PI:PASSWORD:<PASSWORD>END_PI" vault-url "https://vault.example.amperity.com" client (http-client/http-client vault-url)] (vault/authenticate! client :token token-passed-in) (testing "Write metadata sends correct request and responds with true upon success" (with-redefs [clj-http.client/request (fn [req] (is (= :post (:method req))) (is (= (str vault-url "/v1/" mount "/metadata/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= (api-util/snakeify-keys payload) (:form-params req))) {:status 204})] (is (true? (vault-kvv2/write-metadata! client mount path-passed-in payload))))) (testing "Write metadata sends correct request and responds with false upon failure" (with-redefs [clj-http.client/request (fn [req] (is (= :post (:method req))) (is (= (str vault-url "/v1/" mount "/metadata/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= (api-util/snakeify-keys payload) (:form-params req))) {:status 500})] (is (false? (vault-kvv2/write-metadata! client mount path-passed-in payload))))))) (deftest delete-metadata-test (let [mount "mount" path-passed-in "path/passed/in" token-passed-in "PI:PASSWORD:<PASSWORD>END_PI" vault-url "https://vault.example.amperity.com" client (http-client/http-client vault-url)] (vault/authenticate! client :token token-passed-in) (testing "Sends correct request and responds correctly upon success" (with-redefs [clj-http.client/request (fn [req] (is (= :delete (:method req))) (is (= (str vault-url "/v1/" mount "/metadata/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) {:status 204})] (is (true? (vault-kvv2/delete-metadata! client mount path-passed-in))))) (testing "Sends correct request and responds correctly upon failure" (with-redefs [clj-http.client/request (fn [req] (is (= :delete (:method req))) (is (= (str vault-url "/v1/" mount "/metadata/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) {:status 500})] (is (false? (vault-kvv2/delete-metadata! client mount path-passed-in))))))) (deftest destroy!-test (let [mount "mount" path-passed-in "path/passed/in" token-passed-in "PI:PASSWORD:<PASSWORD>END_PI" vault-url "https://vault.example.amperity.com" client (http-client/http-client vault-url) versions [1 2]] (vault/authenticate! client :token token-passed-in) (testing "Destroy secrets sends correct request and returns true upon success" (with-redefs [clj-http.client/request (fn [req] (is (= :post (:method req))) (is (= (str vault-url "/v1/" mount "/destroy/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= {:versions versions} (:form-params req))) {:status 204})] (is (true? (vault-kvv2/destroy-secret! client mount path-passed-in versions))))) (testing "Destroy secrets sends correct request and returns false upon failure" (with-redefs [clj-http.client/request (fn [req] (is (= :post (:method req))) (is (= (str vault-url "/v1/" mount "/destroy/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= {:versions [1]} (:form-params req))) {:status 500})] (is (false? (vault-kvv2/destroy-secret! client mount path-passed-in [1]))))))) (deftest undelete-secret!-test (let [mount "mount" path-passed-in "path/passed/in" token-passed-in "PI:PASSWORD:<PASSWORD>END_PI" vault-url "https://vault.example.amperity.com" client (http-client/http-client vault-url) versions [1 2]] (vault/authenticate! client :token token-passed-in) (testing "Undelete secrets sends correct request and returns true upon success" (with-redefs [clj-http.client/request (fn [req] (is (= :post (:method req))) (is (= (str vault-url "/v1/" mount "/undelete/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= {:versions versions} (:form-params req))) {:status 204})] (is (true? (vault-kvv2/undelete-secret! client mount path-passed-in versions))))) (testing "Undelete secrets sends correct request and returns false upon failure" (with-redefs [clj-http.client/request (fn [req] (is (= :post (:method req))) (is (= (str vault-url "/v1/" mount "/undelete/" path-passed-in) (:url req))) (is (= token-passed-in (get (:headers req) "X-Vault-Token"))) (is (= {:versions [1]} (:form-params req))) {:status 500})] (is (false? (vault-kvv2/undelete-secret! client mount path-passed-in [1]))))))) ;; -------- Mock Client ------------------------------------------------------- (defn mock-client-kvv2 "Creates a mock client with the data in `vault/secrets/secret-fixture-kvv2.edn`" [] (mock-test/mock-client-authenticated "vault/secrets/secret-fixture-kvv2.edn")) (deftest mock-client-test (testing "Mock client can correctly read values it was initialized with" (is (= {:batman "PI:NAME:<NAME>END_PI" :captain-marvel "PI:NAME:<NAME>END_PI"} (vault-kvv2/read-secret (mock-client-kvv2) "mount" "identities")))) (testing "Mock client correctly responds with a 404 to reading non-existent paths" (is (thrown-with-msg? ExceptionInfo #"No such secret: mount/data/hello" (vault-kvv2/read-secret (mock-client-kvv2) "mount" "hello"))) (is (thrown-with-msg? ExceptionInfo #"No such secret: mount/data/identities" (vault-kvv2/read-secret (vault/new-client "mock:-") "mount" "identities")))) (testing "Mock client can write/update and read data" (let [client (mock-client-kvv2)] (is (thrown-with-msg? ExceptionInfo #"No such secret: mount/data/hello" (vault-kvv2/read-secret client "mount" "hello"))) (is (true? (vault-kvv2/write-secret! client "mount" "hello" {:and-i-say "goodbye"}))) (is (true? (vault-kvv2/write-secret! client "mount" "identities" {:intersect "Chuck"}))) (is (= {:and-i-say "goodbye"} (vault-kvv2/read-secret client "mount" "hello"))) (is (= {:intersect "Chuck"} (vault-kvv2/read-secret client "mount" "identities"))))) (testing "Mock client can write and read config" (let [client (mock-client-kvv2) config {:max-versions 5 :cas-required false :delete-version-after "3h23m19s"}] (is (thrown? ExceptionInfo (vault-kvv2/read-config client "mount"))) (is (true? (vault-kvv2/write-config! client "mount" config))) (is (= config (vault-kvv2/read-config client "mount"))))) (testing "Mock client can write and read metadata" (let [client (mock-client-kvv2)] (is (thrown? ExceptionInfo (vault-kvv2/read-metadata client "mount" "doesn't exist" {:force-read true}))) (is (= {:created-time "2018-03-22T02:24:06.945319214Z" :current-version 1 :max-versions 0 :oldest-version 0 :updated-time "2018-03-22T02:36:43.986212308Z" :versions {:1 {:created-time "2018-03-22T02:24:06.945319214Z" :deletion-time "" :destroyed false}}} (vault-kvv2/read-metadata client "mount" "identities" {:force-read true}))) (is (true? (vault-kvv2/delete-metadata! client "mount" "identities"))) (is (thrown? ExceptionInfo (vault-kvv2/read-metadata client "mount" "identities" {:force-read true}))) (is (true? (vault-kvv2/write-metadata! client "mount" "hello" {:max-versions 3}))) (is (= 3 (:max-versions (vault-kvv2/read-metadata client "mount" "hello")))) (is (= 5 (vault-kvv2/read-metadata client "mount" "doesn't exist" {:force-read true :not-found 5}))))) (testing "Mock client returns true if path is found on delete for secret, false if not when no versions specified" (let [client (mock-client-kvv2)] (is (true? (vault-kvv2/delete-secret! client "mount" "identities"))) (is (false? (vault-kvv2/delete-secret! client "mount" "eggsactly"))))) (testing "Mock client always returns true on delete for secret when versions specified" (let [client (mock-client-kvv2)] (is (true? (vault-kvv2/delete-secret! client "mount" "identities" [1]))) (is (true? (vault-kvv2/delete-secret! client "mount" "eggsactly" [4 5 6]))))) (testing "Mock can list secrets from their associated metadata" (let [client (mock-client-kvv2)] (is (empty? (vault-kvv2/list-secrets client "hello" "yes"))) (is (true? (vault-kvv2/write-secret! client "mount" "hello" {:and-i-say "goodbye"}))) ;; Paths are good enough for mock right now, but be aware they are current (is (= ["identities" "hello"] (into [] (vault-kvv2/list-secrets client "mount" "")))))) (testing "Mock client does not crash upon destroy" (is (true? (vault-kvv2/destroy-secret! (mock-client-kvv2) "mount" "identities" [1])))) (testing "Mock client does not crash upon undelete" (is (true? (vault-kvv2/undelete-secret! (mock-client-kvv2) "mount" "identities" [1])))))
[ { "context": " (is (future? (output-ref step)))\n (link \"Alice\" (:a step))\n (is (not (realized? (output-ref", "end": 428, "score": 0.9913868308067322, "start": 423, "tag": "NAME", "value": "Alice" }, { "context": "(not (realized? (output-ref step))))\n (link \"Bob\" (:b step))\n (link \"Charlie\" (:c step))\n ", "end": 504, "score": 0.9964213371276855, "start": 501, "tag": "NAME", "value": "Bob" }, { "context": "tep))))\n (link \"Bob\" (:b step))\n (link \"Charlie\" (:c step))\n (is (= \"Hello, AliceBobCharlie\"", "end": 537, "score": 0.9961918592453003, "start": 530, "tag": "NAME", "value": "Charlie" }, { "context": " (link \"Charlie\" (:c step))\n (is (= \"Hello, AliceBobCharlie\" (wait-for-output step)))\n (is (re", "end": 576, "score": 0.9103354811668396, "start": 571, "tag": "NAME", "value": "Alice" }, { "context": "\"Charlie\" (:c step))\n (is (= \"Hello, AliceBobCharlie\" (wait-for-output step)))\n (is (realized? (o", "end": 586, "score": 0.9413093328475952, "start": 579, "tag": "NAME", "value": "Charlie" }, { "context": "o [name] (str \"Hello, \" name))\n (link \"Alice\" (:name hello)))]\n ;(println wf)\n ", "end": 926, "score": 0.9911189675331116, "start": 921, "tag": "NAME", "value": "Alice" }, { "context": "\" (step-name (:hello wf))))\n (is (= \"Hello, Alice\") (wait-for-output hello))\n (is (= {:hello", "end": 1046, "score": 0.9945738911628723, "start": 1041, "tag": "NAME", "value": "Alice" }, { "context": " (wait-for-output hello))\n (is (= {:hello \"Hello, Alice\" } (wait-for-workflow wf)))\n ))\n\n (test", "end": 1103, "score": 0.7608236074447632, "start": 1098, "tag": "NAME", "value": "Hello" }, { "context": "-for-output hello))\n (is (= {:hello \"Hello, Alice\" } (wait-for-workflow wf)))\n ))\n\n (testing \"e", "end": 1110, "score": 0.993366539478302, "start": 1105, "tag": "NAME", "value": "Alice" } ]
test/flowing/core_test.clj
stain/flowing-clj
1
(ns flowing.core-test (:require [clojure.test :refer :all] [flowing.core :refer :all])) (deftest simple-workflow (testing "step" (let [step (step "hello" [a b c] (str "Hello, " a b c))] (is (= "hello" (step-name step))) (is (= [:a :b :c] (inputs step))) (is (ref? (:a step))) (is (ref? (:b step))) (is (ref? (:c step))) (is (future? (output-ref step))) (link "Alice" (:a step)) (is (not (realized? (output-ref step)))) (link "Bob" (:b step)) (link "Charlie" (:c step)) (is (= "Hello, AliceBobCharlie" (wait-for-output step))) (is (realized? (output-ref step))))) (testing "defstep" (def hello) (defstep hello [a b c] (str "Hello, " a b c)) (is (= "hello" (step-name hello)))) (testing "workflow" (def hello) (let [wf (workflow (defstep hello [name] (str "Hello, " name)) (link "Alice" (:name hello)))] ;(println wf) (is (= "hello" (step-name (:hello wf)))) (is (= "Hello, Alice") (wait-for-output hello)) (is (= {:hello "Hello, Alice" } (wait-for-workflow wf))) )) (testing "example workflow" (println "Example workflow") (def example-wf (workflow ; Define a series of steps, which can take 0 or more ; arguments. They do not need to be defined in any ; particular order. ; (The below are dummy examples with Thread/sleep to pretend ; to be doing some processing) (defstep get-sequence [id] (println "Retrieving sequence for id" id) (Thread/sleep 1000) "GATTAGCAT") (defstep alignment [sequence database] (println "Aligning" sequence "in" database) (Thread/sleep 2000) (if (= database "cat") (str ">" database " " sequence))) (defstep pathways [fasta] (println "Finding pathways for" fasta) (Thread/sleep 1000) { :dog :cat :cat :tree :fireman :tree}) (defstep similar [fasta paths] (println "Finding similarities in" (count paths) "paths") (Thread/sleep 1000) [ "tiger" "lion" ]) ; Now link them together. Links be provided ; in any order, but you can only link to an input ; parameter once. (link "CATGENE15" (:id get-sequence)) ; constant value ; Each input parameter linked separately (link get-sequence (:sequence alignment)) (link "cat" (:database alignment)) ; Same output do multiple destinations (link alignment (:fasta pathways)) (link alignment (:fasta similar)) ; Steps are executed as soon as all inputs are ready ; and in parallell threads, but this link would ; cause :similar to run after :alignment (link pathways (:paths similar)))) ; TODO: Explicit (run-workflow)? ; TODO: Ability to run a workflow definition more than once ; Outputs can be retrieved from any of the steps: (is (= "GATTAGCAT") (wait-for-output get-sequence)) ; (wait-for-output) block until the step ; has received all its upstream inputs ; and finished execution (is (= ["tiger" "lion"] (wait-for-output similar))) ; Or wait for the whole workflow to complete and get ; a handy map of the outputs: (let [results (wait-for-workflow example-wf)] (is (= #{:get-sequence :alignment :pathways :similar} (set (keys results)))) (is (= "GATTAGCAT" (:get-sequence results))) (is (= :tree (:fireman (:pathways results))))) ) )
6271
(ns flowing.core-test (:require [clojure.test :refer :all] [flowing.core :refer :all])) (deftest simple-workflow (testing "step" (let [step (step "hello" [a b c] (str "Hello, " a b c))] (is (= "hello" (step-name step))) (is (= [:a :b :c] (inputs step))) (is (ref? (:a step))) (is (ref? (:b step))) (is (ref? (:c step))) (is (future? (output-ref step))) (link "<NAME>" (:a step)) (is (not (realized? (output-ref step)))) (link "<NAME>" (:b step)) (link "<NAME>" (:c step)) (is (= "Hello, <NAME>Bob<NAME>" (wait-for-output step))) (is (realized? (output-ref step))))) (testing "defstep" (def hello) (defstep hello [a b c] (str "Hello, " a b c)) (is (= "hello" (step-name hello)))) (testing "workflow" (def hello) (let [wf (workflow (defstep hello [name] (str "Hello, " name)) (link "<NAME>" (:name hello)))] ;(println wf) (is (= "hello" (step-name (:hello wf)))) (is (= "Hello, <NAME>") (wait-for-output hello)) (is (= {:hello "<NAME>, <NAME>" } (wait-for-workflow wf))) )) (testing "example workflow" (println "Example workflow") (def example-wf (workflow ; Define a series of steps, which can take 0 or more ; arguments. They do not need to be defined in any ; particular order. ; (The below are dummy examples with Thread/sleep to pretend ; to be doing some processing) (defstep get-sequence [id] (println "Retrieving sequence for id" id) (Thread/sleep 1000) "GATTAGCAT") (defstep alignment [sequence database] (println "Aligning" sequence "in" database) (Thread/sleep 2000) (if (= database "cat") (str ">" database " " sequence))) (defstep pathways [fasta] (println "Finding pathways for" fasta) (Thread/sleep 1000) { :dog :cat :cat :tree :fireman :tree}) (defstep similar [fasta paths] (println "Finding similarities in" (count paths) "paths") (Thread/sleep 1000) [ "tiger" "lion" ]) ; Now link them together. Links be provided ; in any order, but you can only link to an input ; parameter once. (link "CATGENE15" (:id get-sequence)) ; constant value ; Each input parameter linked separately (link get-sequence (:sequence alignment)) (link "cat" (:database alignment)) ; Same output do multiple destinations (link alignment (:fasta pathways)) (link alignment (:fasta similar)) ; Steps are executed as soon as all inputs are ready ; and in parallell threads, but this link would ; cause :similar to run after :alignment (link pathways (:paths similar)))) ; TODO: Explicit (run-workflow)? ; TODO: Ability to run a workflow definition more than once ; Outputs can be retrieved from any of the steps: (is (= "GATTAGCAT") (wait-for-output get-sequence)) ; (wait-for-output) block until the step ; has received all its upstream inputs ; and finished execution (is (= ["tiger" "lion"] (wait-for-output similar))) ; Or wait for the whole workflow to complete and get ; a handy map of the outputs: (let [results (wait-for-workflow example-wf)] (is (= #{:get-sequence :alignment :pathways :similar} (set (keys results)))) (is (= "GATTAGCAT" (:get-sequence results))) (is (= :tree (:fireman (:pathways results))))) ) )
true
(ns flowing.core-test (:require [clojure.test :refer :all] [flowing.core :refer :all])) (deftest simple-workflow (testing "step" (let [step (step "hello" [a b c] (str "Hello, " a b c))] (is (= "hello" (step-name step))) (is (= [:a :b :c] (inputs step))) (is (ref? (:a step))) (is (ref? (:b step))) (is (ref? (:c step))) (is (future? (output-ref step))) (link "PI:NAME:<NAME>END_PI" (:a step)) (is (not (realized? (output-ref step)))) (link "PI:NAME:<NAME>END_PI" (:b step)) (link "PI:NAME:<NAME>END_PI" (:c step)) (is (= "Hello, PI:NAME:<NAME>END_PIBobPI:NAME:<NAME>END_PI" (wait-for-output step))) (is (realized? (output-ref step))))) (testing "defstep" (def hello) (defstep hello [a b c] (str "Hello, " a b c)) (is (= "hello" (step-name hello)))) (testing "workflow" (def hello) (let [wf (workflow (defstep hello [name] (str "Hello, " name)) (link "PI:NAME:<NAME>END_PI" (:name hello)))] ;(println wf) (is (= "hello" (step-name (:hello wf)))) (is (= "Hello, PI:NAME:<NAME>END_PI") (wait-for-output hello)) (is (= {:hello "PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI" } (wait-for-workflow wf))) )) (testing "example workflow" (println "Example workflow") (def example-wf (workflow ; Define a series of steps, which can take 0 or more ; arguments. They do not need to be defined in any ; particular order. ; (The below are dummy examples with Thread/sleep to pretend ; to be doing some processing) (defstep get-sequence [id] (println "Retrieving sequence for id" id) (Thread/sleep 1000) "GATTAGCAT") (defstep alignment [sequence database] (println "Aligning" sequence "in" database) (Thread/sleep 2000) (if (= database "cat") (str ">" database " " sequence))) (defstep pathways [fasta] (println "Finding pathways for" fasta) (Thread/sleep 1000) { :dog :cat :cat :tree :fireman :tree}) (defstep similar [fasta paths] (println "Finding similarities in" (count paths) "paths") (Thread/sleep 1000) [ "tiger" "lion" ]) ; Now link them together. Links be provided ; in any order, but you can only link to an input ; parameter once. (link "CATGENE15" (:id get-sequence)) ; constant value ; Each input parameter linked separately (link get-sequence (:sequence alignment)) (link "cat" (:database alignment)) ; Same output do multiple destinations (link alignment (:fasta pathways)) (link alignment (:fasta similar)) ; Steps are executed as soon as all inputs are ready ; and in parallell threads, but this link would ; cause :similar to run after :alignment (link pathways (:paths similar)))) ; TODO: Explicit (run-workflow)? ; TODO: Ability to run a workflow definition more than once ; Outputs can be retrieved from any of the steps: (is (= "GATTAGCAT") (wait-for-output get-sequence)) ; (wait-for-output) block until the step ; has received all its upstream inputs ; and finished execution (is (= ["tiger" "lion"] (wait-for-output similar))) ; Or wait for the whole workflow to complete and get ; a handy map of the outputs: (let [results (wait-for-workflow example-wf)] (is (= #{:get-sequence :alignment :pathways :similar} (set (keys results)))) (is (= "GATTAGCAT" (:get-sequence results))) (is (= :tree (:fireman (:pathways results))))) ) )
[ { "context": "r (* polling-interval 2)))))))))\n\n(def version-key \"_version\")\n\n(def export-option-keys\n [\"meta\" \"data_id\" \"g", "end": 4829, "score": 0.9933702349662781, "start": 4820, "tag": "KEY", "value": "\"_version" } ]
src/milia/api/async_export.cljc
onaio/milia
9
(ns milia.api.async-export #?(:cljs (:require-macros [cljs.core.async.macros :refer [go]])) (:require [chimera.seq :refer [select-values]] [chimera.string :refer [is-not-null?]] #?@(:cljs [[goog.string.format] [cljs.core.async :refer [<! chan put! timeout]]]) [clojure.string :refer [join]] [milia.api.dataset :refer [type->endpoint]] [milia.api.http :refer [parse-http]] [milia.utils.remote :refer [make-url *credentials*]] [milia.utils.retry :refer [retry-parse-http]])) (def export-async-url "export_async.json?format=") (def export-failure-status-msg "FAILURE") (def export-pending-status-msg "PENDING") (def initial-polling-interval 5000) ; Async export polling interval in ms (defn- temp-token-suffix "If a temp-token is set, add it as a query string parameter. The caller needs to explicity pass a question-mark or ampersand depending on whether this is the first or a subsequenty query parameter." [& s] (let [temp-token (:temp-token *credentials*)] (join (if (is-not-null? temp-token) (conj (vec s) "temp_token=" temp-token) s)))) (defn- handle-response "Handles API server's response and acts according to given callbacks." [{:as response :keys [status body]} {:as callbacks :keys [on-error on-export-url on-job-id on-stop] :or {on-stop (constantly nil) on-export-url identity on-error identity on-job-id identity}}] (let [{export-url :export_url job-status :job_status job-id :job_uuid} body is-failed-status? #(= job-status export-failure-status-msg) error-detail (or (:detail body) (:error body) (:details body) (when (is-failed-status?) job-status))] ;; sometimes API server returns an export-url quickly (when export-url (when (fn? on-export-url) (on-export-url export-url)) (on-stop)) ;; sometimes it doesn't. Instead, it may want us to wait and gives ;; us a job-uuid for the heavy-lifting export task. (when job-id (when (fn? on-job-id) (on-job-id job-id)) false) ;; or it just gives an error (when (or (>= status 400) (is-failed-status?)) (when (fn? on-error) (if (= status 403) (on-error error-detail (:url body)) (on-error error-detail))) (on-stop)))) #?(:cljs (defn- monitor-async-export! "Repeatedly polls the async export progress for the given job_uuid, When export_url is returned, fires callback on-export-url." [dataset-id job-id & {:keys [on-error on-export-url data-type]}] (go (loop [polling-interval initial-polling-interval] (let [job-suffix (str "export_async.json?job_uuid=" job-id) job-url (make-url (type->endpoint data-type) dataset-id job-suffix) response (<! (retry-parse-http :get job-url :no-cache? true))] ;; Never use `on-job-id` here b/c `on-job-id` should only be ;; triggered once in `trigger-async-export!` where it starts ;; `monitor-async-export!` itself (when (not= (handle-response response {:on-stop #(constantly :stop) :on-error on-error :on-export-url on-export-url}) :stop) (<! (timeout polling-interval)) (recur (* polling-interval 2)))))))) #?(:cljs (defn monitor-async-exports-per-form! "Repeatedly polls the export endpoint given a form_id while any of the export status is pending." [dataset-id callback] (go (loop [polling-interval initial-polling-interval] (let [export-url (make-url (temp-token-suffix "export.json?xform=" dataset-id "&")) {:keys [status body]} (<! (retry-parse-http :get export-url :no-cache? true)) pending-exports-list (vec (filter #(= (:job_status %) export-pending-status-msg) body))] (if (empty? pending-exports-list) (callback body) (do (<! (timeout polling-interval)) (recur (* polling-interval 2))))))))) (def version-key "_version") (def export-option-keys ["meta" "data_id" "group_delimiter" "do_not_split_select_multiples" "include_hxl" "include_images" "remove_group_name" version-key "query" "export_id" "include_labels" "include_labels_only" "win_excel_utf8" "redirect_uri" "binary_select_multiples" "value_select_multiples" "show_choice_labels" "include_reviews" "language"]) (def export-option-values [:meta-id :data-id :group-delimiter :do-not-split-multi-selects? :include-hxl? :include-images? :remove-group-name? :version :query :export_id :include-labels? :labels-only? :windows-compatible-csv? :redirect-uri :binary-select-multiples? :value-select-multiples? :show-choice-labels? :include-reviews? :language]) (defn- get-param [key value] (if (= key version-key) #?(:cljs (goog.string.format "&query={\"%s\":\"%s\"}" key value)) (str "&" key "=" value))) (defn- add-param [key value] (when (or value (= value false)) (get-param key value))) (defn build-export-suffix "Build the export options string to pass to the Ona API." [url data-format & [export-options]] (->> export-options ((apply juxt export-option-values)) (map add-param export-option-keys) (concat [url data-format]) (apply str))) #?(:cljs (defn trigger-async-export! "Triggers async export and watches it via polling. Fires on-job-id callback on receving :job_uuid from server, then monitors job via polling. On receiving :export_url from server, on-export-url fired." ([dataset-id & [{:keys [data-type data-format export-options ;; callbacks on-job-id on-export-url on-error on-done]}]] (go (let [export-suffix (build-export-suffix export-async-url data-format export-options) export-url (make-url (type->endpoint data-type) dataset-id export-suffix) response (<! (retry-parse-http :get export-url)) inner-on-job-id (fn [job-id] (on-job-id job-id) (monitor-async-export! dataset-id job-id :on-export-url on-export-url :on-error on-error :data-type data-type))] (when on-done (on-done response)) (handle-response response {:on-error on-error ;; new on-job-id that will be used in ;; handle-response :on-job-id (if on-done on-job-id inner-on-job-id) :on-export-url on-export-url})))))) #?(:cljs (defn get-async-export-url "Returns a channel, which will have the async export url when ready." [dataset-id data-format] (let [ch (chan 1)] (trigger-async-export! dataset-id {:data-format data-format :on-export-url #(put! ch %)}) ch))) #?(:cljs (defn get-async-export-data "Returns a channel, which will have the async _data_ downloaded using http-method when ready." [dataset-id fmt http-method & args] (go (let [url (<! (get-async-export-url dataset-id fmt))] (<! (apply parse-http (concat [http-method url] args))))))) (defn get-exports-per-form "Get exports based on a form id." [dataset-id] (parse-http :get (make-url (temp-token-suffix "export.json?xform=" dataset-id "&")))) (defn delete-export "Delete an export based on an export id" [export-id] (parse-http :delete (make-url "export" (temp-token-suffix export-id "?"))))
77476
(ns milia.api.async-export #?(:cljs (:require-macros [cljs.core.async.macros :refer [go]])) (:require [chimera.seq :refer [select-values]] [chimera.string :refer [is-not-null?]] #?@(:cljs [[goog.string.format] [cljs.core.async :refer [<! chan put! timeout]]]) [clojure.string :refer [join]] [milia.api.dataset :refer [type->endpoint]] [milia.api.http :refer [parse-http]] [milia.utils.remote :refer [make-url *credentials*]] [milia.utils.retry :refer [retry-parse-http]])) (def export-async-url "export_async.json?format=") (def export-failure-status-msg "FAILURE") (def export-pending-status-msg "PENDING") (def initial-polling-interval 5000) ; Async export polling interval in ms (defn- temp-token-suffix "If a temp-token is set, add it as a query string parameter. The caller needs to explicity pass a question-mark or ampersand depending on whether this is the first or a subsequenty query parameter." [& s] (let [temp-token (:temp-token *credentials*)] (join (if (is-not-null? temp-token) (conj (vec s) "temp_token=" temp-token) s)))) (defn- handle-response "Handles API server's response and acts according to given callbacks." [{:as response :keys [status body]} {:as callbacks :keys [on-error on-export-url on-job-id on-stop] :or {on-stop (constantly nil) on-export-url identity on-error identity on-job-id identity}}] (let [{export-url :export_url job-status :job_status job-id :job_uuid} body is-failed-status? #(= job-status export-failure-status-msg) error-detail (or (:detail body) (:error body) (:details body) (when (is-failed-status?) job-status))] ;; sometimes API server returns an export-url quickly (when export-url (when (fn? on-export-url) (on-export-url export-url)) (on-stop)) ;; sometimes it doesn't. Instead, it may want us to wait and gives ;; us a job-uuid for the heavy-lifting export task. (when job-id (when (fn? on-job-id) (on-job-id job-id)) false) ;; or it just gives an error (when (or (>= status 400) (is-failed-status?)) (when (fn? on-error) (if (= status 403) (on-error error-detail (:url body)) (on-error error-detail))) (on-stop)))) #?(:cljs (defn- monitor-async-export! "Repeatedly polls the async export progress for the given job_uuid, When export_url is returned, fires callback on-export-url." [dataset-id job-id & {:keys [on-error on-export-url data-type]}] (go (loop [polling-interval initial-polling-interval] (let [job-suffix (str "export_async.json?job_uuid=" job-id) job-url (make-url (type->endpoint data-type) dataset-id job-suffix) response (<! (retry-parse-http :get job-url :no-cache? true))] ;; Never use `on-job-id` here b/c `on-job-id` should only be ;; triggered once in `trigger-async-export!` where it starts ;; `monitor-async-export!` itself (when (not= (handle-response response {:on-stop #(constantly :stop) :on-error on-error :on-export-url on-export-url}) :stop) (<! (timeout polling-interval)) (recur (* polling-interval 2)))))))) #?(:cljs (defn monitor-async-exports-per-form! "Repeatedly polls the export endpoint given a form_id while any of the export status is pending." [dataset-id callback] (go (loop [polling-interval initial-polling-interval] (let [export-url (make-url (temp-token-suffix "export.json?xform=" dataset-id "&")) {:keys [status body]} (<! (retry-parse-http :get export-url :no-cache? true)) pending-exports-list (vec (filter #(= (:job_status %) export-pending-status-msg) body))] (if (empty? pending-exports-list) (callback body) (do (<! (timeout polling-interval)) (recur (* polling-interval 2))))))))) (def version-key <KEY>") (def export-option-keys ["meta" "data_id" "group_delimiter" "do_not_split_select_multiples" "include_hxl" "include_images" "remove_group_name" version-key "query" "export_id" "include_labels" "include_labels_only" "win_excel_utf8" "redirect_uri" "binary_select_multiples" "value_select_multiples" "show_choice_labels" "include_reviews" "language"]) (def export-option-values [:meta-id :data-id :group-delimiter :do-not-split-multi-selects? :include-hxl? :include-images? :remove-group-name? :version :query :export_id :include-labels? :labels-only? :windows-compatible-csv? :redirect-uri :binary-select-multiples? :value-select-multiples? :show-choice-labels? :include-reviews? :language]) (defn- get-param [key value] (if (= key version-key) #?(:cljs (goog.string.format "&query={\"%s\":\"%s\"}" key value)) (str "&" key "=" value))) (defn- add-param [key value] (when (or value (= value false)) (get-param key value))) (defn build-export-suffix "Build the export options string to pass to the Ona API." [url data-format & [export-options]] (->> export-options ((apply juxt export-option-values)) (map add-param export-option-keys) (concat [url data-format]) (apply str))) #?(:cljs (defn trigger-async-export! "Triggers async export and watches it via polling. Fires on-job-id callback on receving :job_uuid from server, then monitors job via polling. On receiving :export_url from server, on-export-url fired." ([dataset-id & [{:keys [data-type data-format export-options ;; callbacks on-job-id on-export-url on-error on-done]}]] (go (let [export-suffix (build-export-suffix export-async-url data-format export-options) export-url (make-url (type->endpoint data-type) dataset-id export-suffix) response (<! (retry-parse-http :get export-url)) inner-on-job-id (fn [job-id] (on-job-id job-id) (monitor-async-export! dataset-id job-id :on-export-url on-export-url :on-error on-error :data-type data-type))] (when on-done (on-done response)) (handle-response response {:on-error on-error ;; new on-job-id that will be used in ;; handle-response :on-job-id (if on-done on-job-id inner-on-job-id) :on-export-url on-export-url})))))) #?(:cljs (defn get-async-export-url "Returns a channel, which will have the async export url when ready." [dataset-id data-format] (let [ch (chan 1)] (trigger-async-export! dataset-id {:data-format data-format :on-export-url #(put! ch %)}) ch))) #?(:cljs (defn get-async-export-data "Returns a channel, which will have the async _data_ downloaded using http-method when ready." [dataset-id fmt http-method & args] (go (let [url (<! (get-async-export-url dataset-id fmt))] (<! (apply parse-http (concat [http-method url] args))))))) (defn get-exports-per-form "Get exports based on a form id." [dataset-id] (parse-http :get (make-url (temp-token-suffix "export.json?xform=" dataset-id "&")))) (defn delete-export "Delete an export based on an export id" [export-id] (parse-http :delete (make-url "export" (temp-token-suffix export-id "?"))))
true
(ns milia.api.async-export #?(:cljs (:require-macros [cljs.core.async.macros :refer [go]])) (:require [chimera.seq :refer [select-values]] [chimera.string :refer [is-not-null?]] #?@(:cljs [[goog.string.format] [cljs.core.async :refer [<! chan put! timeout]]]) [clojure.string :refer [join]] [milia.api.dataset :refer [type->endpoint]] [milia.api.http :refer [parse-http]] [milia.utils.remote :refer [make-url *credentials*]] [milia.utils.retry :refer [retry-parse-http]])) (def export-async-url "export_async.json?format=") (def export-failure-status-msg "FAILURE") (def export-pending-status-msg "PENDING") (def initial-polling-interval 5000) ; Async export polling interval in ms (defn- temp-token-suffix "If a temp-token is set, add it as a query string parameter. The caller needs to explicity pass a question-mark or ampersand depending on whether this is the first or a subsequenty query parameter." [& s] (let [temp-token (:temp-token *credentials*)] (join (if (is-not-null? temp-token) (conj (vec s) "temp_token=" temp-token) s)))) (defn- handle-response "Handles API server's response and acts according to given callbacks." [{:as response :keys [status body]} {:as callbacks :keys [on-error on-export-url on-job-id on-stop] :or {on-stop (constantly nil) on-export-url identity on-error identity on-job-id identity}}] (let [{export-url :export_url job-status :job_status job-id :job_uuid} body is-failed-status? #(= job-status export-failure-status-msg) error-detail (or (:detail body) (:error body) (:details body) (when (is-failed-status?) job-status))] ;; sometimes API server returns an export-url quickly (when export-url (when (fn? on-export-url) (on-export-url export-url)) (on-stop)) ;; sometimes it doesn't. Instead, it may want us to wait and gives ;; us a job-uuid for the heavy-lifting export task. (when job-id (when (fn? on-job-id) (on-job-id job-id)) false) ;; or it just gives an error (when (or (>= status 400) (is-failed-status?)) (when (fn? on-error) (if (= status 403) (on-error error-detail (:url body)) (on-error error-detail))) (on-stop)))) #?(:cljs (defn- monitor-async-export! "Repeatedly polls the async export progress for the given job_uuid, When export_url is returned, fires callback on-export-url." [dataset-id job-id & {:keys [on-error on-export-url data-type]}] (go (loop [polling-interval initial-polling-interval] (let [job-suffix (str "export_async.json?job_uuid=" job-id) job-url (make-url (type->endpoint data-type) dataset-id job-suffix) response (<! (retry-parse-http :get job-url :no-cache? true))] ;; Never use `on-job-id` here b/c `on-job-id` should only be ;; triggered once in `trigger-async-export!` where it starts ;; `monitor-async-export!` itself (when (not= (handle-response response {:on-stop #(constantly :stop) :on-error on-error :on-export-url on-export-url}) :stop) (<! (timeout polling-interval)) (recur (* polling-interval 2)))))))) #?(:cljs (defn monitor-async-exports-per-form! "Repeatedly polls the export endpoint given a form_id while any of the export status is pending." [dataset-id callback] (go (loop [polling-interval initial-polling-interval] (let [export-url (make-url (temp-token-suffix "export.json?xform=" dataset-id "&")) {:keys [status body]} (<! (retry-parse-http :get export-url :no-cache? true)) pending-exports-list (vec (filter #(= (:job_status %) export-pending-status-msg) body))] (if (empty? pending-exports-list) (callback body) (do (<! (timeout polling-interval)) (recur (* polling-interval 2))))))))) (def version-key PI:KEY:<KEY>END_PI") (def export-option-keys ["meta" "data_id" "group_delimiter" "do_not_split_select_multiples" "include_hxl" "include_images" "remove_group_name" version-key "query" "export_id" "include_labels" "include_labels_only" "win_excel_utf8" "redirect_uri" "binary_select_multiples" "value_select_multiples" "show_choice_labels" "include_reviews" "language"]) (def export-option-values [:meta-id :data-id :group-delimiter :do-not-split-multi-selects? :include-hxl? :include-images? :remove-group-name? :version :query :export_id :include-labels? :labels-only? :windows-compatible-csv? :redirect-uri :binary-select-multiples? :value-select-multiples? :show-choice-labels? :include-reviews? :language]) (defn- get-param [key value] (if (= key version-key) #?(:cljs (goog.string.format "&query={\"%s\":\"%s\"}" key value)) (str "&" key "=" value))) (defn- add-param [key value] (when (or value (= value false)) (get-param key value))) (defn build-export-suffix "Build the export options string to pass to the Ona API." [url data-format & [export-options]] (->> export-options ((apply juxt export-option-values)) (map add-param export-option-keys) (concat [url data-format]) (apply str))) #?(:cljs (defn trigger-async-export! "Triggers async export and watches it via polling. Fires on-job-id callback on receving :job_uuid from server, then monitors job via polling. On receiving :export_url from server, on-export-url fired." ([dataset-id & [{:keys [data-type data-format export-options ;; callbacks on-job-id on-export-url on-error on-done]}]] (go (let [export-suffix (build-export-suffix export-async-url data-format export-options) export-url (make-url (type->endpoint data-type) dataset-id export-suffix) response (<! (retry-parse-http :get export-url)) inner-on-job-id (fn [job-id] (on-job-id job-id) (monitor-async-export! dataset-id job-id :on-export-url on-export-url :on-error on-error :data-type data-type))] (when on-done (on-done response)) (handle-response response {:on-error on-error ;; new on-job-id that will be used in ;; handle-response :on-job-id (if on-done on-job-id inner-on-job-id) :on-export-url on-export-url})))))) #?(:cljs (defn get-async-export-url "Returns a channel, which will have the async export url when ready." [dataset-id data-format] (let [ch (chan 1)] (trigger-async-export! dataset-id {:data-format data-format :on-export-url #(put! ch %)}) ch))) #?(:cljs (defn get-async-export-data "Returns a channel, which will have the async _data_ downloaded using http-method when ready." [dataset-id fmt http-method & args] (go (let [url (<! (get-async-export-url dataset-id fmt))] (<! (apply parse-http (concat [http-method url] args))))))) (defn get-exports-per-form "Get exports based on a form id." [dataset-id] (parse-http :get (make-url (temp-token-suffix "export.json?xform=" dataset-id "&")))) (defn delete-export "Delete an export based on an export id" [export-id] (parse-http :delete (make-url "export" (temp-token-suffix export-id "?"))))
[ { "context": ")\n(println (format \"name=%s has %.5f\"\n \"Peter\" 5.0)) ;name=Peter has 5.00000", "end": 271, "score": 0.9960019588470459, "start": 266, "tag": "NAME", "value": "Peter" }, { "context": "name=%s has %.5f\"\n \"Peter\" 5.0)) ;name=Peter has 5.00000", "end": 291, "score": 0.9983888268470764, "start": 286, "tag": "NAME", "value": "Peter" } ]
quickstart-kotlin/src/main/clojure/com/bookislife/jgsk/clojure/_07_string/app.clj
youngzil/quickstart-framework
6
;Literals (def s "Hello World") ;Multiple Lines (def text "1, 2, 3 one, two, three \"x, \"y\", \"z\"") (println s) (println text) ;Concatenate (def ret (str "x" "y")) (println ret) ;xy ;Template (def salary 100.1) (println (format "name=%s has %.5f" "Peter" 5.0)) ;name=Peter has 5.00000
92813
;Literals (def s "Hello World") ;Multiple Lines (def text "1, 2, 3 one, two, three \"x, \"y\", \"z\"") (println s) (println text) ;Concatenate (def ret (str "x" "y")) (println ret) ;xy ;Template (def salary 100.1) (println (format "name=%s has %.5f" "<NAME>" 5.0)) ;name=<NAME> has 5.00000
true
;Literals (def s "Hello World") ;Multiple Lines (def text "1, 2, 3 one, two, three \"x, \"y\", \"z\"") (println s) (println text) ;Concatenate (def ret (str "x" "y")) (println ret) ;xy ;Template (def salary 100.1) (println (format "name=%s has %.5f" "PI:NAME:<NAME>END_PI" 5.0)) ;name=PI:NAME:<NAME>END_PI has 5.00000
[ { "context": "d '(:grace-hopper :leslie-lamport :frances-allen :john-mccarthy :barbara-liskov :alonzo-church :ada-lovelace :ala", "end": 776, "score": 0.9675511717796326, "start": 763, "tag": "NAME", "value": "john-mccarthy" }, { "context": "er :leslie-lamport :frances-allen :john-mccarthy :barbara-liskov :alonzo-church :ada-lovelace :alan-turing)\n ", "end": 792, "score": 0.996431291103363, "start": 778, "tag": "NAME", "value": "barbara-liskov" }, { "context": " (let [expected '(:leslie-lamport :frances-allen :john-mccarthy :barbara-liskov)\n actual (mediu", "end": 1001, "score": 0.927139401435852, "start": 997, "tag": "NAME", "value": "john" }, { "context": " [expected '(:leslie-lamport :frances-allen :john-mccarthy :barbara-liskov)\n actual (medium_2)]\n ", "end": 1010, "score": 0.9432359337806702, "start": 1002, "tag": "NAME", "value": "mccarthy" }, { "context": " '(:leslie-lamport :frances-allen :john-mccarthy :barbara-liskov)\n actual (medium_2)]\n (is (= e", "end": 1019, "score": 0.9948068261146545, "start": 1012, "tag": "NAME", "value": "barbara" }, { "context": "ie-lamport :frances-allen :john-mccarthy :barbara-liskov)\n actual (medium_2)]\n (is (= expected", "end": 1026, "score": 0.9934089779853821, "start": 1020, "tag": "NAME", "value": "liskov" } ]
test/logical_7in7/core_test.clj
zz-monad-zz/logical-7in7
0
(ns logical-7in7.core-test (:require [clojure.test :refer :all] [logical-7in7.core :refer :all])) (deftest a-test (testing "This a simple example." (let [expected '(true) actual (example)] (is (= expected actual))))) (deftest test_easy_1 (testing "easy_1" (let [expected '(3) actual (easy_1)] (is (= expected actual))))) (deftest test_easy_2 (testing "easy_2" (let [expected '((4 5)) actual (easy_2)] (is (= expected actual))))) (deftest test_easy_3 (testing "easy_3" (let [expected '(:barbara-liskov) actual (easy_3)] (is (= expected actual))))) (deftest test_medium_1 (testing "medium_1" (let [expected '(:grace-hopper :leslie-lamport :frances-allen :john-mccarthy :barbara-liskov :alonzo-church :ada-lovelace :alan-turing) actual (medium_1)] (is (= expected actual))))) (deftest test_medium_2 (testing "medium_2" (let [expected '(:leslie-lamport :frances-allen :john-mccarthy :barbara-liskov) actual (medium_2)] (is (= expected actual)))))
124262
(ns logical-7in7.core-test (:require [clojure.test :refer :all] [logical-7in7.core :refer :all])) (deftest a-test (testing "This a simple example." (let [expected '(true) actual (example)] (is (= expected actual))))) (deftest test_easy_1 (testing "easy_1" (let [expected '(3) actual (easy_1)] (is (= expected actual))))) (deftest test_easy_2 (testing "easy_2" (let [expected '((4 5)) actual (easy_2)] (is (= expected actual))))) (deftest test_easy_3 (testing "easy_3" (let [expected '(:barbara-liskov) actual (easy_3)] (is (= expected actual))))) (deftest test_medium_1 (testing "medium_1" (let [expected '(:grace-hopper :leslie-lamport :frances-allen :<NAME> :<NAME> :alonzo-church :ada-lovelace :alan-turing) actual (medium_1)] (is (= expected actual))))) (deftest test_medium_2 (testing "medium_2" (let [expected '(:leslie-lamport :frances-allen :<NAME>-<NAME> :<NAME>-<NAME>) actual (medium_2)] (is (= expected actual)))))
true
(ns logical-7in7.core-test (:require [clojure.test :refer :all] [logical-7in7.core :refer :all])) (deftest a-test (testing "This a simple example." (let [expected '(true) actual (example)] (is (= expected actual))))) (deftest test_easy_1 (testing "easy_1" (let [expected '(3) actual (easy_1)] (is (= expected actual))))) (deftest test_easy_2 (testing "easy_2" (let [expected '((4 5)) actual (easy_2)] (is (= expected actual))))) (deftest test_easy_3 (testing "easy_3" (let [expected '(:barbara-liskov) actual (easy_3)] (is (= expected actual))))) (deftest test_medium_1 (testing "medium_1" (let [expected '(:grace-hopper :leslie-lamport :frances-allen :PI:NAME:<NAME>END_PI :PI:NAME:<NAME>END_PI :alonzo-church :ada-lovelace :alan-turing) actual (medium_1)] (is (= expected actual))))) (deftest test_medium_2 (testing "medium_2" (let [expected '(:leslie-lamport :frances-allen :PI:NAME:<NAME>END_PI-PI:NAME:<NAME>END_PI :PI:NAME:<NAME>END_PI-PI:NAME:<NAME>END_PI) actual (medium_2)] (is (= expected actual)))))
[ { "context": "tion of the Glicko-2 rating algorithm\"\n :author \"Sylvain Ageneau\"\n :url \"https://github.com/ageneau/glicko2\"\n :l", "end": 127, "score": 0.9998950958251953, "start": 112, "tag": "NAME", "value": "Sylvain Ageneau" }, { "context": "or \"Sylvain Ageneau\"\n :url \"https://github.com/ageneau/glicko2\"\n :license {:name \"BSD 2-Clause \\\"Simpli", "end": 163, "score": 0.580270528793335, "start": 158, "tag": "USERNAME", "value": "eneau" } ]
project.clj
ageneau/glicko2
1
(defproject ageneau/glicko2 "0.1.2" :description "Implementation of the Glicko-2 rating algorithm" :author "Sylvain Ageneau" :url "https://github.com/ageneau/glicko2" :license {:name "BSD 2-Clause \"Simplified\" License" :url "https://opensource.org/licenses/BSD-2-Clause" :year 2018 :key "bsd-2-clause"} ;; Sets the values of global vars within Clojure. This example ;; disables all pre- and post-conditions and emits warnings on ;; reflective calls. See the Clojure documentation for the list of ;; valid global variables to set (and their meaningful values). :global-vars {*warn-on-reflection* true *assert* false} :dependencies [[org.clojure/clojure "1.9.0"] [org.clojure/clojurescript "1.9.946" :scope "provided"] [org.clojure/algo.generic "0.1.2"]] :profiles {:dev {:dependencies [[lein-doo "0.1.8"] [com.cemerick/piggieback "0.2.2"]] :repl-options {:nrepl-middleware [cemerick.piggieback/wrap-cljs-repl]} :plugins [[lein-doo "0.1.7"]] }} :plugins [[lein-cljsbuild "1.1.5"]] :cljsbuild {:builds [{:id "test" :source-paths ["src" "test" "target/classes"] :compiler {:output-to "target/js/testable.js" :output-dir "target/js/out" :main glicko2.test-runner :optimizations :none}} {:id "node-test" :source-paths ["src" "test" "target/classes"] :compiler {:output-to "target/nodejs/testable.js" :output-dir "target/nodejs/out" :main glicko2.test-runner :optimizations :none :target :nodejs}}]})
27352
(defproject ageneau/glicko2 "0.1.2" :description "Implementation of the Glicko-2 rating algorithm" :author "<NAME>" :url "https://github.com/ageneau/glicko2" :license {:name "BSD 2-Clause \"Simplified\" License" :url "https://opensource.org/licenses/BSD-2-Clause" :year 2018 :key "bsd-2-clause"} ;; Sets the values of global vars within Clojure. This example ;; disables all pre- and post-conditions and emits warnings on ;; reflective calls. See the Clojure documentation for the list of ;; valid global variables to set (and their meaningful values). :global-vars {*warn-on-reflection* true *assert* false} :dependencies [[org.clojure/clojure "1.9.0"] [org.clojure/clojurescript "1.9.946" :scope "provided"] [org.clojure/algo.generic "0.1.2"]] :profiles {:dev {:dependencies [[lein-doo "0.1.8"] [com.cemerick/piggieback "0.2.2"]] :repl-options {:nrepl-middleware [cemerick.piggieback/wrap-cljs-repl]} :plugins [[lein-doo "0.1.7"]] }} :plugins [[lein-cljsbuild "1.1.5"]] :cljsbuild {:builds [{:id "test" :source-paths ["src" "test" "target/classes"] :compiler {:output-to "target/js/testable.js" :output-dir "target/js/out" :main glicko2.test-runner :optimizations :none}} {:id "node-test" :source-paths ["src" "test" "target/classes"] :compiler {:output-to "target/nodejs/testable.js" :output-dir "target/nodejs/out" :main glicko2.test-runner :optimizations :none :target :nodejs}}]})
true
(defproject ageneau/glicko2 "0.1.2" :description "Implementation of the Glicko-2 rating algorithm" :author "PI:NAME:<NAME>END_PI" :url "https://github.com/ageneau/glicko2" :license {:name "BSD 2-Clause \"Simplified\" License" :url "https://opensource.org/licenses/BSD-2-Clause" :year 2018 :key "bsd-2-clause"} ;; Sets the values of global vars within Clojure. This example ;; disables all pre- and post-conditions and emits warnings on ;; reflective calls. See the Clojure documentation for the list of ;; valid global variables to set (and their meaningful values). :global-vars {*warn-on-reflection* true *assert* false} :dependencies [[org.clojure/clojure "1.9.0"] [org.clojure/clojurescript "1.9.946" :scope "provided"] [org.clojure/algo.generic "0.1.2"]] :profiles {:dev {:dependencies [[lein-doo "0.1.8"] [com.cemerick/piggieback "0.2.2"]] :repl-options {:nrepl-middleware [cemerick.piggieback/wrap-cljs-repl]} :plugins [[lein-doo "0.1.7"]] }} :plugins [[lein-cljsbuild "1.1.5"]] :cljsbuild {:builds [{:id "test" :source-paths ["src" "test" "target/classes"] :compiler {:output-to "target/js/testable.js" :output-dir "target/js/out" :main glicko2.test-runner :optimizations :none}} {:id "node-test" :source-paths ["src" "test" "target/classes"] :compiler {:output-to "target/nodejs/testable.js" :output-dir "target/nodejs/out" :main glicko2.test-runner :optimizations :none :target :nodejs}}]})
[ { "context": ";; The MIT License (MIT)\n\n;; Copyright (c) 2016 tyf00@aliyun.com, picked from https://github.com/TimeExceed/cellul", "end": 64, "score": 0.999901294708252, "start": 48, "tag": "EMAIL", "value": "tyf00@aliyun.com" }, { "context": " tyf00@aliyun.com, picked from https://github.com/TimeExceed/cellular\n\n;; Permission is hereby granted, free o", "end": 107, "score": 0.9993598461151123, "start": 97, "tag": "USERNAME", "value": "TimeExceed" } ]
src/cellular/core.cljc
TimeExceed/cellular
0
;; The MIT License (MIT) ;; Copyright (c) 2016 tyf00@aliyun.com, picked from https://github.com/TimeExceed/cellular ;; Permission is hereby granted, free of charge, to any person obtaining a copy ;; of this software and associated documentation files (the "Software"), to deal ;; in the Software without restriction, including without limitation the rights ;; to use, copy, modify, merge, publish, distribute, sublicense, and/or sell ;; copies of the Software, and to permit persons to whom the Software is ;; furnished to do so, subject to the following conditions: ;; The above copyright notice and this permission notice shall be included in all ;; copies or substantial portions of the Software. ;; THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR ;; IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, ;; FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE ;; AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER ;; LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, ;; OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE ;; SOFTWARE. (ns cellular.core (:use [clojure.set] [cellular.utils :only (if-nonnil-let* when-nonnil-let* enumerate)]) (:import [java.io Reader StringReader])) ;; streaming (defn reader->stream [^Reader rdr] (try (let [ch (.read rdr)] (if (neg? ch) [{:result :eof}] (lazy-seq (cons {:result (char ch)} (reader->stream rdr))))) (catch Exception ex {:error ex}))) (defn str->stream [^String str] (let [rdr (StringReader. str)] (reader->stream rdr))) (defn ->stream [xs] (for [x xs] {:result x})) (defn offset-stream [strm] (for [[offset item] (enumerate strm)] (assoc item :offset offset))) (defn- position-stream' [strm row column] (when (seq strm) (let [[head & tail] strm] (if (= (:result head) \newline) (lazy-seq (cons (assoc head :row row :column column) (position-stream' tail (inc row) 1))) (lazy-seq (cons (assoc head :row row :column column) (position-stream' tail row (inc column)))))))) (defn position-stream [strm] (position-stream' strm 1 1)) ;; basics (defn- issue [item msg] (-> item (dissoc :result) (assoc :error msg))) (defn expect-char-if [pred] (fn [strm] (let [[head & tail] strm] (if-not (nil? (:error head)) [head tail] (let [{ch :result} head] (if (pred ch) [{:result ch} tail] [(issue head (format "unexpect %s"(pr-str ch))) strm])))))) (defn expect-char-if-not [pred] (expect-char-if #(not (pred %)))) (defn expect-char [ch] (expect-char-if #(= % ch))) (defn expect-any-char [] (expect-char-if #(not= % :eof))) (defn- expect-eof-parser [strm] (let [[head & tail] strm] (if-not (nil? (:error head)) [head tail] (if (= (:result head) :eof) [{:result :eof} strm] [(issue head "expect :eof") strm])))) (defn expect-eof [] expect-eof-parser) (defn- expect-str-parser' [s strm] (if (empty? s) strm (let [[c & ss] s] (if (seq strm) (let [[x & xs] strm {ch :result} x] (if (= c ch) (recur ss xs))))))) (defn- expect-str-parser [s strm] (if-nonnil-let* [nxt-strm (expect-str-parser' s strm)] [{:result s} nxt-strm] [(issue (first strm) (format "expect %s" (pr-str s))) strm])) (defn expect-str [s] (partial expect-str-parser s)) (defn- skip-while-parser' [skipped pred strm] (let [[head & tail] strm] (if (pred (:result head)) (do (conj! skipped (:result head)) (recur skipped pred tail)) strm))) (defn- skip-while-parser [pred strm] (let [skipped (transient []) nxt-strm (skip-while-parser' skipped pred strm)] [{:result (persistent! skipped)} nxt-strm])) (defn skip-while [pred] (partial skip-while-parser pred)) ;; combinators (defn- optional-parser [f strm] (let [[res nxt-strm :as whole] (f strm)] (if (nil? (:result res)) [{:result :none} strm] whole))) (defn optional [f] (partial optional-parser f)) (defn- choice-parser [parsers strm] (if-not (seq parsers) [(issue (first strm) "match nothing") strm] (let [[parser & rest-parsers] parsers [res nxt-strm :as whole] (parser strm)] (if (:result res) whole (recur rest-parsers strm))))) (defn choice [& parsers] (partial choice-parser parsers)) (defn- choice*-parser [args strm] (condp = (count args) 0 [(issue (first strm) "match nothing") strm] 1 [{:result (first args)} strm] (let [[parser f & rest-args] args [res nxt-strm] (parser strm)] (if-nonnil-let* [res (:result res)] [{:result (f res)} nxt-strm] (recur rest-args strm))))) (defn choice* [& args] (partial choice*-parser args)) (defn- many-parser' [xs parser strm] (let [[res nxt-strm] (parser strm)] (if (nil? (:error res)) (do (conj! xs (:result res)) (recur xs parser nxt-strm)) strm))) (defn- many-parser [parser strm] (let [xs (transient []) nxt-strm (many-parser' xs parser strm)] [{:result (persistent! xs)} nxt-strm])) (defn many [parser] (partial many-parser parser)) (defn- many1-parser [parser strm] (let [[res nxt-strm :as whole] (many-parser parser strm)] (if-not (nil? (:error res)) whole (if (empty? (:result res)) [(issue (first strm) "match nothing in many1") strm] whole)))) (defn many1 [parser] (partial many1-parser parser)) (defn- chain-parser' [res-collector parsers strm] (if-not (seq parsers) [{} strm] (let [[p & ps] parsers [res nxt-strm :as whole] (p strm)] (if-not (nil? (:error res)) whole (do (conj! res-collector (:result res)) (recur res-collector ps nxt-strm)))))) (defn- chain-parser [parsers strm] (let [res (transient [])] (let [[err nxt-strm :as whole] (chain-parser' res parsers strm)] (if-not (nil? (:error err)) whole [{:result (persistent! res)} nxt-strm])))) (defn chain [& parsers] (partial chain-parser parsers)) (defn- foresee-parser [parser strm] (let [[result] (parser strm)] [result strm])) (defn foresee [parser] (partial foresee-parser parser)) (defn- between-parser [left middle right strm] (let [[left-res strm0 :as left-whole] (left strm)] (if (nil? (:result left-res)) left-whole (let [[mid-res strm1] (middle strm0)] (if (nil? (:result mid-res)) [mid-res strm] (let [[right-res strm2] (right strm1)] (if (nil? (:result right-res)) [right-res strm] [mid-res strm2]))))))) (defn between [left middle right] (partial between-parser left middle right)) (defn- separated-list-parser'' [result item sep strm] (let [[res0 strm0] (sep strm)] (if (nil? (:error res0)) (let [[res1 strm1 :as whole] (item strm0)] (if-not (nil? (:error res1)) whole (do (conj! result (:result res1)) (separated-list-parser'' result item sep strm1)))) [{:result (persistent! result)} strm]))) (defn- separated-list-parser' [result item sep strm] (let [[res0 strm0] (item strm)] (if-not (nil? (:error res0)) [res0 strm] (do (conj! result (:result res0)) (separated-list-parser'' result item sep strm0))))) (defn- separated-list-parser [item sep strm] (let [res-collector (transient [])] (separated-list-parser' res-collector item sep strm))) (defn separated-list [item sep] (partial separated-list-parser item sep)) ;; helpers (def digit #{\0 \1 \2 \3 \4 \5 \6 \7 \8 \9}) (def hexdigit (union #{\a \b \c \d \e \f \A \B \C \D \E \F} digit)) (def letter (set (for [x (range 128) :let [ch (char x)] :let [gta (>= (Character/compare ch \a) 0)] :let [ltz (<= (Character/compare ch \z) 0)] :let [gtA (>= (Character/compare ch \A) 0)] :let [ltZ (<= (Character/compare ch \Z) 0)] :when (or (and gta ltz) (and gtA ltZ))] ch))) (def whitespace #{\space \tab \formfeed \newline}) (defn- collect-str-between' [sb start end] (when-not (= start end) (let [[head & tail] start] (.append sb (:result head)) (collect-str-between' sb tail end)))) (defn collect-str-between [start end] (let [sb (StringBuilder.)] (collect-str-between' sb start end) (str sb)))
9463
;; The MIT License (MIT) ;; Copyright (c) 2016 <EMAIL>, picked from https://github.com/TimeExceed/cellular ;; Permission is hereby granted, free of charge, to any person obtaining a copy ;; of this software and associated documentation files (the "Software"), to deal ;; in the Software without restriction, including without limitation the rights ;; to use, copy, modify, merge, publish, distribute, sublicense, and/or sell ;; copies of the Software, and to permit persons to whom the Software is ;; furnished to do so, subject to the following conditions: ;; The above copyright notice and this permission notice shall be included in all ;; copies or substantial portions of the Software. ;; THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR ;; IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, ;; FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE ;; AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER ;; LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, ;; OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE ;; SOFTWARE. (ns cellular.core (:use [clojure.set] [cellular.utils :only (if-nonnil-let* when-nonnil-let* enumerate)]) (:import [java.io Reader StringReader])) ;; streaming (defn reader->stream [^Reader rdr] (try (let [ch (.read rdr)] (if (neg? ch) [{:result :eof}] (lazy-seq (cons {:result (char ch)} (reader->stream rdr))))) (catch Exception ex {:error ex}))) (defn str->stream [^String str] (let [rdr (StringReader. str)] (reader->stream rdr))) (defn ->stream [xs] (for [x xs] {:result x})) (defn offset-stream [strm] (for [[offset item] (enumerate strm)] (assoc item :offset offset))) (defn- position-stream' [strm row column] (when (seq strm) (let [[head & tail] strm] (if (= (:result head) \newline) (lazy-seq (cons (assoc head :row row :column column) (position-stream' tail (inc row) 1))) (lazy-seq (cons (assoc head :row row :column column) (position-stream' tail row (inc column)))))))) (defn position-stream [strm] (position-stream' strm 1 1)) ;; basics (defn- issue [item msg] (-> item (dissoc :result) (assoc :error msg))) (defn expect-char-if [pred] (fn [strm] (let [[head & tail] strm] (if-not (nil? (:error head)) [head tail] (let [{ch :result} head] (if (pred ch) [{:result ch} tail] [(issue head (format "unexpect %s"(pr-str ch))) strm])))))) (defn expect-char-if-not [pred] (expect-char-if #(not (pred %)))) (defn expect-char [ch] (expect-char-if #(= % ch))) (defn expect-any-char [] (expect-char-if #(not= % :eof))) (defn- expect-eof-parser [strm] (let [[head & tail] strm] (if-not (nil? (:error head)) [head tail] (if (= (:result head) :eof) [{:result :eof} strm] [(issue head "expect :eof") strm])))) (defn expect-eof [] expect-eof-parser) (defn- expect-str-parser' [s strm] (if (empty? s) strm (let [[c & ss] s] (if (seq strm) (let [[x & xs] strm {ch :result} x] (if (= c ch) (recur ss xs))))))) (defn- expect-str-parser [s strm] (if-nonnil-let* [nxt-strm (expect-str-parser' s strm)] [{:result s} nxt-strm] [(issue (first strm) (format "expect %s" (pr-str s))) strm])) (defn expect-str [s] (partial expect-str-parser s)) (defn- skip-while-parser' [skipped pred strm] (let [[head & tail] strm] (if (pred (:result head)) (do (conj! skipped (:result head)) (recur skipped pred tail)) strm))) (defn- skip-while-parser [pred strm] (let [skipped (transient []) nxt-strm (skip-while-parser' skipped pred strm)] [{:result (persistent! skipped)} nxt-strm])) (defn skip-while [pred] (partial skip-while-parser pred)) ;; combinators (defn- optional-parser [f strm] (let [[res nxt-strm :as whole] (f strm)] (if (nil? (:result res)) [{:result :none} strm] whole))) (defn optional [f] (partial optional-parser f)) (defn- choice-parser [parsers strm] (if-not (seq parsers) [(issue (first strm) "match nothing") strm] (let [[parser & rest-parsers] parsers [res nxt-strm :as whole] (parser strm)] (if (:result res) whole (recur rest-parsers strm))))) (defn choice [& parsers] (partial choice-parser parsers)) (defn- choice*-parser [args strm] (condp = (count args) 0 [(issue (first strm) "match nothing") strm] 1 [{:result (first args)} strm] (let [[parser f & rest-args] args [res nxt-strm] (parser strm)] (if-nonnil-let* [res (:result res)] [{:result (f res)} nxt-strm] (recur rest-args strm))))) (defn choice* [& args] (partial choice*-parser args)) (defn- many-parser' [xs parser strm] (let [[res nxt-strm] (parser strm)] (if (nil? (:error res)) (do (conj! xs (:result res)) (recur xs parser nxt-strm)) strm))) (defn- many-parser [parser strm] (let [xs (transient []) nxt-strm (many-parser' xs parser strm)] [{:result (persistent! xs)} nxt-strm])) (defn many [parser] (partial many-parser parser)) (defn- many1-parser [parser strm] (let [[res nxt-strm :as whole] (many-parser parser strm)] (if-not (nil? (:error res)) whole (if (empty? (:result res)) [(issue (first strm) "match nothing in many1") strm] whole)))) (defn many1 [parser] (partial many1-parser parser)) (defn- chain-parser' [res-collector parsers strm] (if-not (seq parsers) [{} strm] (let [[p & ps] parsers [res nxt-strm :as whole] (p strm)] (if-not (nil? (:error res)) whole (do (conj! res-collector (:result res)) (recur res-collector ps nxt-strm)))))) (defn- chain-parser [parsers strm] (let [res (transient [])] (let [[err nxt-strm :as whole] (chain-parser' res parsers strm)] (if-not (nil? (:error err)) whole [{:result (persistent! res)} nxt-strm])))) (defn chain [& parsers] (partial chain-parser parsers)) (defn- foresee-parser [parser strm] (let [[result] (parser strm)] [result strm])) (defn foresee [parser] (partial foresee-parser parser)) (defn- between-parser [left middle right strm] (let [[left-res strm0 :as left-whole] (left strm)] (if (nil? (:result left-res)) left-whole (let [[mid-res strm1] (middle strm0)] (if (nil? (:result mid-res)) [mid-res strm] (let [[right-res strm2] (right strm1)] (if (nil? (:result right-res)) [right-res strm] [mid-res strm2]))))))) (defn between [left middle right] (partial between-parser left middle right)) (defn- separated-list-parser'' [result item sep strm] (let [[res0 strm0] (sep strm)] (if (nil? (:error res0)) (let [[res1 strm1 :as whole] (item strm0)] (if-not (nil? (:error res1)) whole (do (conj! result (:result res1)) (separated-list-parser'' result item sep strm1)))) [{:result (persistent! result)} strm]))) (defn- separated-list-parser' [result item sep strm] (let [[res0 strm0] (item strm)] (if-not (nil? (:error res0)) [res0 strm] (do (conj! result (:result res0)) (separated-list-parser'' result item sep strm0))))) (defn- separated-list-parser [item sep strm] (let [res-collector (transient [])] (separated-list-parser' res-collector item sep strm))) (defn separated-list [item sep] (partial separated-list-parser item sep)) ;; helpers (def digit #{\0 \1 \2 \3 \4 \5 \6 \7 \8 \9}) (def hexdigit (union #{\a \b \c \d \e \f \A \B \C \D \E \F} digit)) (def letter (set (for [x (range 128) :let [ch (char x)] :let [gta (>= (Character/compare ch \a) 0)] :let [ltz (<= (Character/compare ch \z) 0)] :let [gtA (>= (Character/compare ch \A) 0)] :let [ltZ (<= (Character/compare ch \Z) 0)] :when (or (and gta ltz) (and gtA ltZ))] ch))) (def whitespace #{\space \tab \formfeed \newline}) (defn- collect-str-between' [sb start end] (when-not (= start end) (let [[head & tail] start] (.append sb (:result head)) (collect-str-between' sb tail end)))) (defn collect-str-between [start end] (let [sb (StringBuilder.)] (collect-str-between' sb start end) (str sb)))
true
;; The MIT License (MIT) ;; Copyright (c) 2016 PI:EMAIL:<EMAIL>END_PI, picked from https://github.com/TimeExceed/cellular ;; Permission is hereby granted, free of charge, to any person obtaining a copy ;; of this software and associated documentation files (the "Software"), to deal ;; in the Software without restriction, including without limitation the rights ;; to use, copy, modify, merge, publish, distribute, sublicense, and/or sell ;; copies of the Software, and to permit persons to whom the Software is ;; furnished to do so, subject to the following conditions: ;; The above copyright notice and this permission notice shall be included in all ;; copies or substantial portions of the Software. ;; THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR ;; IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, ;; FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE ;; AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER ;; LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, ;; OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE ;; SOFTWARE. (ns cellular.core (:use [clojure.set] [cellular.utils :only (if-nonnil-let* when-nonnil-let* enumerate)]) (:import [java.io Reader StringReader])) ;; streaming (defn reader->stream [^Reader rdr] (try (let [ch (.read rdr)] (if (neg? ch) [{:result :eof}] (lazy-seq (cons {:result (char ch)} (reader->stream rdr))))) (catch Exception ex {:error ex}))) (defn str->stream [^String str] (let [rdr (StringReader. str)] (reader->stream rdr))) (defn ->stream [xs] (for [x xs] {:result x})) (defn offset-stream [strm] (for [[offset item] (enumerate strm)] (assoc item :offset offset))) (defn- position-stream' [strm row column] (when (seq strm) (let [[head & tail] strm] (if (= (:result head) \newline) (lazy-seq (cons (assoc head :row row :column column) (position-stream' tail (inc row) 1))) (lazy-seq (cons (assoc head :row row :column column) (position-stream' tail row (inc column)))))))) (defn position-stream [strm] (position-stream' strm 1 1)) ;; basics (defn- issue [item msg] (-> item (dissoc :result) (assoc :error msg))) (defn expect-char-if [pred] (fn [strm] (let [[head & tail] strm] (if-not (nil? (:error head)) [head tail] (let [{ch :result} head] (if (pred ch) [{:result ch} tail] [(issue head (format "unexpect %s"(pr-str ch))) strm])))))) (defn expect-char-if-not [pred] (expect-char-if #(not (pred %)))) (defn expect-char [ch] (expect-char-if #(= % ch))) (defn expect-any-char [] (expect-char-if #(not= % :eof))) (defn- expect-eof-parser [strm] (let [[head & tail] strm] (if-not (nil? (:error head)) [head tail] (if (= (:result head) :eof) [{:result :eof} strm] [(issue head "expect :eof") strm])))) (defn expect-eof [] expect-eof-parser) (defn- expect-str-parser' [s strm] (if (empty? s) strm (let [[c & ss] s] (if (seq strm) (let [[x & xs] strm {ch :result} x] (if (= c ch) (recur ss xs))))))) (defn- expect-str-parser [s strm] (if-nonnil-let* [nxt-strm (expect-str-parser' s strm)] [{:result s} nxt-strm] [(issue (first strm) (format "expect %s" (pr-str s))) strm])) (defn expect-str [s] (partial expect-str-parser s)) (defn- skip-while-parser' [skipped pred strm] (let [[head & tail] strm] (if (pred (:result head)) (do (conj! skipped (:result head)) (recur skipped pred tail)) strm))) (defn- skip-while-parser [pred strm] (let [skipped (transient []) nxt-strm (skip-while-parser' skipped pred strm)] [{:result (persistent! skipped)} nxt-strm])) (defn skip-while [pred] (partial skip-while-parser pred)) ;; combinators (defn- optional-parser [f strm] (let [[res nxt-strm :as whole] (f strm)] (if (nil? (:result res)) [{:result :none} strm] whole))) (defn optional [f] (partial optional-parser f)) (defn- choice-parser [parsers strm] (if-not (seq parsers) [(issue (first strm) "match nothing") strm] (let [[parser & rest-parsers] parsers [res nxt-strm :as whole] (parser strm)] (if (:result res) whole (recur rest-parsers strm))))) (defn choice [& parsers] (partial choice-parser parsers)) (defn- choice*-parser [args strm] (condp = (count args) 0 [(issue (first strm) "match nothing") strm] 1 [{:result (first args)} strm] (let [[parser f & rest-args] args [res nxt-strm] (parser strm)] (if-nonnil-let* [res (:result res)] [{:result (f res)} nxt-strm] (recur rest-args strm))))) (defn choice* [& args] (partial choice*-parser args)) (defn- many-parser' [xs parser strm] (let [[res nxt-strm] (parser strm)] (if (nil? (:error res)) (do (conj! xs (:result res)) (recur xs parser nxt-strm)) strm))) (defn- many-parser [parser strm] (let [xs (transient []) nxt-strm (many-parser' xs parser strm)] [{:result (persistent! xs)} nxt-strm])) (defn many [parser] (partial many-parser parser)) (defn- many1-parser [parser strm] (let [[res nxt-strm :as whole] (many-parser parser strm)] (if-not (nil? (:error res)) whole (if (empty? (:result res)) [(issue (first strm) "match nothing in many1") strm] whole)))) (defn many1 [parser] (partial many1-parser parser)) (defn- chain-parser' [res-collector parsers strm] (if-not (seq parsers) [{} strm] (let [[p & ps] parsers [res nxt-strm :as whole] (p strm)] (if-not (nil? (:error res)) whole (do (conj! res-collector (:result res)) (recur res-collector ps nxt-strm)))))) (defn- chain-parser [parsers strm] (let [res (transient [])] (let [[err nxt-strm :as whole] (chain-parser' res parsers strm)] (if-not (nil? (:error err)) whole [{:result (persistent! res)} nxt-strm])))) (defn chain [& parsers] (partial chain-parser parsers)) (defn- foresee-parser [parser strm] (let [[result] (parser strm)] [result strm])) (defn foresee [parser] (partial foresee-parser parser)) (defn- between-parser [left middle right strm] (let [[left-res strm0 :as left-whole] (left strm)] (if (nil? (:result left-res)) left-whole (let [[mid-res strm1] (middle strm0)] (if (nil? (:result mid-res)) [mid-res strm] (let [[right-res strm2] (right strm1)] (if (nil? (:result right-res)) [right-res strm] [mid-res strm2]))))))) (defn between [left middle right] (partial between-parser left middle right)) (defn- separated-list-parser'' [result item sep strm] (let [[res0 strm0] (sep strm)] (if (nil? (:error res0)) (let [[res1 strm1 :as whole] (item strm0)] (if-not (nil? (:error res1)) whole (do (conj! result (:result res1)) (separated-list-parser'' result item sep strm1)))) [{:result (persistent! result)} strm]))) (defn- separated-list-parser' [result item sep strm] (let [[res0 strm0] (item strm)] (if-not (nil? (:error res0)) [res0 strm] (do (conj! result (:result res0)) (separated-list-parser'' result item sep strm0))))) (defn- separated-list-parser [item sep strm] (let [res-collector (transient [])] (separated-list-parser' res-collector item sep strm))) (defn separated-list [item sep] (partial separated-list-parser item sep)) ;; helpers (def digit #{\0 \1 \2 \3 \4 \5 \6 \7 \8 \9}) (def hexdigit (union #{\a \b \c \d \e \f \A \B \C \D \E \F} digit)) (def letter (set (for [x (range 128) :let [ch (char x)] :let [gta (>= (Character/compare ch \a) 0)] :let [ltz (<= (Character/compare ch \z) 0)] :let [gtA (>= (Character/compare ch \A) 0)] :let [ltZ (<= (Character/compare ch \Z) 0)] :when (or (and gta ltz) (and gtA ltZ))] ch))) (def whitespace #{\space \tab \formfeed \newline}) (defn- collect-str-between' [sb start end] (when-not (= start end) (let [[head & tail] start] (.append sb (:result head)) (collect-str-between' sb tail end)))) (defn collect-str-between [start end] (let [sb (StringBuilder.)] (collect-str-between' sb start end) (str sb)))
[ { "context": " NIMBUS-ADMINS [\"admin-user\"]\n ", "end": 26664, "score": 0.9898571372032166, "start": 26654, "tag": "USERNAME", "value": "admin-user" }, { "context": "\" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 20, LOGS-USERS [\"alice\", (System/getProperty \"user.name\")]} topology)\n ", "end": 27763, "score": 0.9986996650695801, "start": 27758, "tag": "USERNAME", "value": "alice" }, { "context": " {TOPOLOGY-MESSAGE-TIMEOUT-SECS 20, LOGS-GROUPS [\"alice-group\"]} topology)\n (bind storm-id-killgroup (St", "end": 28470, "score": 0.9477014541625977, "start": 28459, "tag": "USERNAME", "value": "alice-group" }, { "context": "\" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 10, LOGS-USERS [\"alice\", (System/getProperty \"user.name\")]} topology)\n ", "end": 29911, "score": 0.9988994598388672, "start": 29906, "tag": "USERNAME", "value": "alice" }, { "context": "gy can not read\n (.submitTopology cluster \"testnoread\" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 10, LOGS-USERS ", "end": 30265, "score": 0.6419968605041504, "start": 30257, "tag": "USERNAME", "value": "testnore" }, { "context": "\" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 10, LOGS-USERS [\"alice\"]} topology)\n (.advanceClusterTime cluster", "end": 30322, "score": 0.9993380308151245, "start": 30317, "tag": "USERNAME", "value": "alice" }, { "context": "\"alice\"))))]\n (log-message \"Checking user alice \" hist-topo-ids)\n (is (= 5 (count hist-t", "end": 31660, "score": 0.498877614736557, "start": 31655, "tag": "NAME", "value": "alice" } ]
storm-core/test/clj/org/apache/storm/nimbus_test.clj
qinwang/storm
0
;; Licensed to the Apache Software Foundation (ASF) under one ;; or more contributor license agreements. See the NOTICE file ;; distributed with this work for additional information ;; regarding copyright ownership. The ASF licenses this file ;; to you under the Apache License, Version 2.0 (the ;; "License"); you may not use this file except in compliance ;; with the License. You may obtain a copy of the License at ;; ;; http://www.apache.org/licenses/LICENSE-2.0 ;; ;; Unless required by applicable law or agreed to in writing, software ;; distributed under the License is distributed on an "AS IS" BASIS, ;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ;; See the License for the specific language governing permissions and ;; limitations under the License. (ns org.apache.storm.nimbus-test (:use [clojure test]) (:require [org.apache.storm [util :as util]]) (:import [java.util.function UnaryOperator]) (:import [org.apache.storm.testing InProcessZookeeper MockLeaderElector TestWordCounter TestWordSpout TestGlobalCount TestAggregatesCounter TestPlannerSpout TestPlannerBolt] [org.apache.storm.blobstore BlobStore] [org.apache.storm.nimbus InMemoryTopologyActionNotifier] [org.apache.storm.daemon.nimbus TopoCache Nimbus Nimbus$StandaloneINimbus] [org.apache.storm.generated GlobalStreamId TopologyStatus SupervisorInfo StormTopology StormBase] [org.apache.storm LocalCluster LocalCluster$Builder Thrift MockAutoCred Testing Testing$Condition] [org.apache.storm.stats BoltExecutorStats StatsUtil] [org.apache.storm.security.auth IGroupMappingServiceProvider IAuthorizer]) (:import [org.apache.storm.testing.staticmocking MockedZookeeper]) (:import [org.apache.storm.testing TmpPath]) (:import [org.apache.storm.scheduler INimbus]) (:import [org.mockito Mockito Matchers]) (:import [org.mockito.exceptions.base MockitoAssertionError]) (:import [org.apache.storm.nimbus ILeaderElector NimbusInfo]) (:import [org.apache.storm.testing.staticmocking MockedCluster]) (:import [org.apache.storm.generated Credentials NotAliveException SubmitOptions TopologyInitialStatus TopologyStatus AlreadyAliveException KillOptions RebalanceOptions InvalidTopologyException AuthorizationException LogConfig LogLevel LogLevelAction Assignment NodeInfo]) (:import [java.util Map HashMap HashSet Optional]) (:import [java.io File]) (:import [javax.security.auth Subject]) (:import [org.apache.storm.utils Time Time$SimulatedTime IPredicate StormCommonInstaller Utils$UptimeComputer ReflectionUtils Utils ConfigUtils ServerConfigUtils] [org.apache.storm.utils.staticmocking ServerConfigUtilsInstaller ReflectionUtilsInstaller UtilsInstaller]) (:import [org.apache.storm.zookeeper Zookeeper]) (:import [org.apache.commons.io FileUtils]) (:import [org.json.simple JSONValue]) (:import [org.apache.storm.daemon StormCommon]) (:import [org.apache.storm.cluster IStormClusterState StormClusterStateImpl ClusterStateContext ClusterUtils] [org.apache.storm.assignments LocalAssignmentsBackendFactory]) (:use [org.apache.storm util daemon-config config log]) (:require [conjure.core]) (:use [conjure core])) (def ^:dynamic *STORM-CONF* (clojurify-structure (ConfigUtils/readStormConfig))) (defn- mk-nimbus ([conf inimbus] (mk-nimbus conf inimbus nil nil nil nil)) ([conf inimbus blob-store leader-elector group-mapper cluster-state] ;blacklist scheduler requires nimbus-monitor-freq-secs as input parameter. (let [conf-with-nimbus-monitor-freq (merge {NIMBUS-MONITOR-FREQ-SECS 10} conf)] (Nimbus. conf-with-nimbus-monitor-freq inimbus cluster-state nil blob-store nil leader-elector group-mapper)))) (defn- from-json [^String str] (if str (clojurify-structure (JSONValue/parse str)) nil)) (defn storm-component->task-info [cluster storm-name] (let [storm-id (StormCommon/getStormId (.getClusterState cluster) storm-name) nimbus (.getNimbus cluster)] (-> (.getUserTopology nimbus storm-id) (#(StormCommon/stormTaskInfo % (from-json (.getTopologyConf nimbus storm-id)))) (Utils/reverseMap) clojurify-structure))) (defn getCredentials [cluster storm-name] (let [storm-id (StormCommon/getStormId (.getClusterState cluster) storm-name) creds (.credentials (.getClusterState cluster) storm-id nil)] (if creds (into {} (.get_creds creds))))) (defn storm-component->executor-info [cluster storm-name] (let [storm-id (StormCommon/getStormId (.getClusterState cluster) storm-name) nimbus (.getNimbus cluster) storm-conf (from-json (.getTopologyConf nimbus storm-id)) topology (.getUserTopology nimbus storm-id) task->component (clojurify-structure (StormCommon/stormTaskInfo topology storm-conf)) state (.getClusterState cluster) get-component (comp task->component first)] (->> (.assignmentInfo state storm-id nil) .get_executor_node_port .keySet clojurify-structure (map (fn [e] {e (get-component e)})) (apply merge) (Utils/reverseMap) clojurify-structure))) (defn storm-num-workers [state storm-name] (let [storm-id (StormCommon/getStormId state storm-name) assignment (.assignmentInfo state storm-id nil)] (.size (Utils/reverseMap (.get_executor_node_port assignment))))) (defn topology-nodes [state storm-name] (let [storm-id (StormCommon/getStormId state storm-name) assignment (.assignmentInfo state storm-id nil)] (->> assignment .get_executor_node_port .values (map (fn [np] (.get_node np))) set ))) (defn topology-slots [state storm-name] (let [storm-id (StormCommon/getStormId state storm-name) assignment (.assignmentInfo state storm-id nil)] (->> assignment .get_executor_node_port .values set ))) ;TODO: when translating this function, don't call map-val, but instead use an inline for loop. ; map-val is a temporary kluge for clojure. (defn topology-node-distribution [state storm-name] (let [storm-id (StormCommon/getStormId state storm-name) assignment (.assignmentInfo state storm-id nil)] (->> assignment .get_executor_node_port .values set (group-by (fn [np] (.get_node np))) (map-val count) (map (fn [[_ amt]] {amt 1})) (apply merge-with +) ))) (defn topology-num-nodes [state storm-name] (count (topology-nodes state storm-name))) (defn executor-assignment [cluster storm-id executor-id] (let [state (.getClusterState cluster) assignment (.assignmentInfo state storm-id nil)] (.get (.get_executor_node_port assignment) executor-id) )) (defn executor-start-times [cluster storm-id] (let [state (.getClusterState cluster) assignment (.assignmentInfo state storm-id nil)] (clojurify-structure (.get_executor_start_time_secs assignment)))) (defn do-executor-heartbeat [cluster storm-id executor] (let [state (.getClusterState cluster) executor->node+port (.get_executor_node_port (.assignmentInfo state storm-id nil)) np (.get executor->node+port executor) node (.get_node np) port (first (.get_port np)) curr-beat (StatsUtil/convertZkWorkerHb (.getWorkerHeartbeat state storm-id node port)) stats (if (get curr-beat "executor-stats") (get curr-beat "executor-stats") (HashMap.))] (log-warn "curr-beat:" (prn-str curr-beat) ",stats:" (prn-str stats)) (log-warn "stats type:" (type stats)) (.put stats (StatsUtil/convertExecutor executor) (.renderStats (BoltExecutorStats. 20 (*STORM-CONF* NUM-STAT-BUCKETS)))) (log-warn "merged:" stats) (.workerHeartbeat state storm-id node port (StatsUtil/thriftifyZkWorkerHb (StatsUtil/mkZkWorkerHb storm-id stats (int 10)))) (.sendSupervisorWorkerHeartbeat (.getNimbus cluster) (StatsUtil/thriftifyRPCWorkerHb storm-id executor)))) (defn slot-assignments [cluster storm-id] (let [state (.getClusterState cluster) assignment (.assignmentInfo state storm-id nil)] (clojurify-structure (Utils/reverseMap (.get_executor_node_port assignment))))) (defn task-ids [cluster storm-id] (let [nimbus (.getNimbus cluster)] (-> (.getUserTopology nimbus storm-id) (#(StormCommon/stormTaskInfo % (from-json (.getTopologyConf nimbus storm-id)))) clojurify-structure keys))) (defn topology-executors [cluster storm-id] (let [state (.getClusterState cluster) assignment (.assignmentInfo state storm-id nil) ret-keys (keys (.get_executor_node_port assignment)) _ (log-message "ret-keys: " (pr-str ret-keys)) ] ret-keys )) (defn check-distribution [items distribution] (let [counts (map long (map count items))] (is (Testing/multiseteq counts (map long distribution))))) (defn disjoint? [& sets] (let [combined (apply concat sets)] (= (count combined) (count (set combined))) )) (defn executor->tasks [executor-id] clojurify-structure (StormCommon/executorIdToTasks executor-id)) (defnk check-consistency [cluster storm-name :assigned? true] (let [state (.getClusterState cluster) storm-id (StormCommon/getStormId state storm-name) task-ids (task-ids cluster storm-id) assignment (.assignmentInfo state storm-id nil) executor->node+port (.get_executor_node_port assignment) task->node+port (StormCommon/taskToNodeport executor->node+port) assigned-task-ids (mapcat executor->tasks (keys executor->node+port)) all-nodes (set (map (fn [np] (.get_node np)) (.values executor->node+port)))] (when assigned? (is (= (sort task-ids) (sort assigned-task-ids))) (doseq [t task-ids] (is (not-nil? (.get task->node+port t))))) (doseq [[e s] executor->node+port] (is (not-nil? s))) (is (= all-nodes (set (keys (.get_node_host assignment))))) (doseq [[e s] executor->node+port] (is (not-nil? (.get (.get_executor_start_time_secs assignment) e)))) )) (deftest test-bogusId (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSupervisors 4) (.withDaemonConf {SUPERVISOR-ENABLE false TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (let [state (.getClusterState cluster) nimbus (.getNimbus cluster)] (is (thrown? NotAliveException (.getTopologyConf nimbus "bogus-id"))) (is (thrown? NotAliveException (.getTopology nimbus "bogus-id"))) (is (thrown? NotAliveException (.getUserTopology nimbus "bogus-id"))) (is (thrown? NotAliveException (.getTopologyInfo nimbus "bogus-id"))) (is (thrown? NotAliveException (.uploadNewCredentials nimbus "bogus-id" (Credentials.)))) ))) (deftest test-assignment (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 4) (.withDaemonConf {SUPERVISOR-ENABLE false TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (let [state (.getClusterState cluster) topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. false) (Integer. 3))} {"2" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "1" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) (Integer. 4)) "3" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "2" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.))}) topology2 (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 12))} {"2" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "1" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) (Integer. 6)) "3" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "1" nil) (Thrift/prepareGlobalGrouping)} (TestPlannerBolt.) (Integer. 8)) "4" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "1" nil) (Thrift/prepareGlobalGrouping) (Utils/getGlobalStreamId "2" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) (Integer. 4))}) _ (.submitTopology cluster "mystorm" {TOPOLOGY-WORKERS 4} topology) _ (.advanceClusterTime cluster 11) task-info (storm-component->task-info cluster "mystorm")] (check-consistency cluster "mystorm") ;; 3 should be assigned once (if it were optimized, we'd have ;; different topology) (is (= 1 (count (.assignments state nil)))) (is (= 1 (count (task-info "1")))) (is (= 4 (count (task-info "2")))) (is (= 1 (count (task-info "3")))) (is (= 4 (storm-num-workers state "mystorm"))) (.submitTopology cluster "storm2" {TOPOLOGY-WORKERS 20} topology2) (.advanceClusterTime cluster 11) (check-consistency cluster "storm2") (is (= 2 (count (.assignments state nil)))) (let [task-info (storm-component->task-info cluster "storm2")] (is (= 12 (count (task-info "1")))) (is (= 6 (count (task-info "2")))) (is (= 8 (count (task-info "3")))) (is (= 4 (count (task-info "4")))) (is (= 8 (storm-num-workers state "storm2"))) ) ))) (defn isolation-nimbus [] (let [standalone (Nimbus$StandaloneINimbus.)] (reify INimbus (prepare [this conf local-dir] (.prepare standalone conf local-dir) ) (allSlotsAvailableForScheduling [this supervisors topologies topologies-missing-assignments] (.allSlotsAvailableForScheduling standalone supervisors topologies topologies-missing-assignments)) (assignSlots [this topology slots] (.assignSlots standalone topology slots) ) (getForcedScheduler [this] (.getForcedScheduler standalone)) (getHostName [this supervisors node-id] node-id )))) (deftest test-auto-credentials (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 6) (.withDaemonConf {SUPERVISOR-ENABLE false TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0 NIMBUS-CREDENTIAL-RENEW-FREQ-SECS 10 NIMBUS-CREDENTIAL-RENEWERS (list "org.apache.storm.MockAutoCred") NIMBUS-AUTO-CRED-PLUGINS (list "org.apache.storm.MockAutoCred") })))] (let [state (.getClusterState cluster) topology-name "test-auto-cred-storm" submitOptions (SubmitOptions. TopologyInitialStatus/INACTIVE) - (.set_creds submitOptions (Credentials. (HashMap.))) topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. false) (Integer. 3))} {"2" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "1" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) (Integer. 4)) "3" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "2" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.))}) _ (.submitTopologyWithOpts cluster topology-name {TOPOLOGY-WORKERS 4 TOPOLOGY-AUTO-CREDENTIALS (list "org.apache.storm.MockAutoCred") } topology submitOptions) credentials (getCredentials cluster topology-name)] ; check that the credentials have nimbus auto generated cred (is (= (.get credentials MockAutoCred/NIMBUS_CRED_KEY) MockAutoCred/NIMBUS_CRED_VAL)) ;advance cluster time so the renewers can execute (.advanceClusterTime cluster 20) ;check that renewed credentials replace the original credential. (is (= (.get (getCredentials cluster topology-name) MockAutoCred/NIMBUS_CRED_KEY) MockAutoCred/NIMBUS_CRED_RENEW_VAL)) (is (= (.get (getCredentials cluster topology-name) MockAutoCred/GATEWAY_CRED_KEY) MockAutoCred/GATEWAY_CRED_RENEW_VAL))))) (defmacro letlocals [& body] (let [[tobind lexpr] (split-at (dec (count body)) body) binded (vec (mapcat (fn [e] (if (and (list? e) (= 'bind (first e))) [(second e) (last e)] ['_ e] )) tobind))] `(let ~binded ~(first lexpr)))) (deftest test-isolated-assignment (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 6) (.withINimbus (isolation-nimbus)) (.withDaemonConf {SUPERVISOR-ENABLE false TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0 STORM-SCHEDULER "org.apache.storm.scheduler.IsolationScheduler" ISOLATION-SCHEDULER-MACHINES {"tester1" 3 "tester2" 2} NIMBUS-MONITOR-FREQ-SECS 10 })))] (letlocals (bind state (.getClusterState cluster)) (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. false) (Integer. 3))} {"2" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "1" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) (Integer. 5)) "3" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "2" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.))})) (.submitTopology cluster "noniso" {TOPOLOGY-WORKERS 4} topology) (.advanceClusterTime cluster 11) (is (= 4 (topology-num-nodes state "noniso"))) (is (= 4 (storm-num-workers state "noniso"))) (.submitTopology cluster "tester1" {TOPOLOGY-WORKERS 6} topology) (.submitTopology cluster "tester2" {TOPOLOGY-WORKERS 6} topology) (.advanceClusterTime cluster 11) (bind task-info-tester1 (storm-component->task-info cluster "tester1")) (bind task-info-tester2 (storm-component->task-info cluster "tester2")) (is (= 1 (topology-num-nodes state "noniso"))) (is (= 3 (storm-num-workers state "noniso"))) (is (= {2 3} (topology-node-distribution state "tester1"))) (is (= {3 2} (topology-node-distribution state "tester2"))) (is (apply disjoint? (map (partial topology-nodes state) ["noniso" "tester1" "tester2"]))) (check-consistency cluster "tester1") (check-consistency cluster "tester2") (check-consistency cluster "noniso") ;;check that nothing gets reassigned (bind tester1-slots (topology-slots state "tester1")) (bind tester2-slots (topology-slots state "tester2")) (bind noniso-slots (topology-slots state "noniso")) (.advanceClusterTime cluster 20) (is (= tester1-slots (topology-slots state "tester1"))) (is (= tester2-slots (topology-slots state "tester2"))) (is (= noniso-slots (topology-slots state "noniso"))) ))) (deftest test-zero-executor-or-tasks (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 6) (.withDaemonConf {SUPERVISOR-ENABLE false TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (let [state (.getClusterState cluster) topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. false) (Integer. 3) {TOPOLOGY-TASKS 0})} {"2" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "1" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) (Integer. 1) {TOPOLOGY-TASKS 2}) "3" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "2" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) nil {TOPOLOGY-TASKS 5})}) _ (.submitTopology cluster "mystorm" {TOPOLOGY-WORKERS 4} topology) _ (.advanceClusterTime cluster 11) task-info (storm-component->task-info cluster "mystorm")] (check-consistency cluster "mystorm") (is (= 0 (count (task-info "1")))) (is (= 2 (count (task-info "2")))) (is (= 5 (count (task-info "3")))) (is (= 2 (storm-num-workers state "mystorm"))) ;; because only 2 executors ))) ;TODO: when translating this function, you should replace the map-val with a proper for loop HERE (deftest test-executor-assignments (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withDaemonConf {SUPERVISOR-ENABLE false TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (let [topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 3) {TOPOLOGY-TASKS 5})} {"2" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "1" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) (Integer. 8) {TOPOLOGY-TASKS 2}) "3" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "2" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) (Integer. 3))}) _ (.submitTopology cluster "mystorm" {TOPOLOGY-WORKERS 4} topology) _ (.advanceClusterTime cluster 11) task-info (storm-component->task-info cluster "mystorm") executor-info (->> (storm-component->executor-info cluster "mystorm") (map-val #(map executor->tasks %)))] (check-consistency cluster "mystorm") (is (= 5 (count (task-info "1")))) (check-distribution (executor-info "1") [2 2 1]) (is (= 2 (count (task-info "2")))) (check-distribution (executor-info "2") [1 1]) (is (= 3 (count (task-info "3")))) (check-distribution (executor-info "3") [1 1 1]) ))) (deftest test-over-parallelism-assignment (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 2) (.withPortsPerSupervisor 5) (.withDaemonConf {SUPERVISOR-ENABLE false TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (let [state (.getClusterState cluster) topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 21))} {"2" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "1" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) (Integer. 9)) "3" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "1" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) (Integer. 2)) "4" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "1" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) (Integer. 10))}) _ (.submitTopology cluster "test" {TOPOLOGY-WORKERS 7} topology) _ (.advanceClusterTime cluster 11) task-info (storm-component->task-info cluster "test")] (check-consistency cluster "test") (is (= 21 (count (task-info "1")))) (is (= 9 (count (task-info "2")))) (is (= 2 (count (task-info "3")))) (is (= 10 (count (task-info "4")))) (is (= 7 (storm-num-workers state "test"))) ))) (deftest test-topo-history (let [group-mapper (Mockito/mock IGroupMappingServiceProvider)] (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 2) (.withPortsPerSupervisor 5) (.withGroupMapper group-mapper) (.withDaemonConf {SUPERVISOR-ENABLE false NIMBUS-ADMINS ["admin-user"] NIMBUS-TASK-TIMEOUT-SECS 30 NIMBUS-MONITOR-FREQ-SECS 10 TOPOLOGY-ACKER-EXECUTORS 0})))] (.thenReturn (Mockito/when (.getGroups group-mapper (Mockito/anyObject))) #{"alice-group"}) (letlocals (bind conf (.getDaemonConf cluster)) (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 4))} {})) (bind state (.getClusterState cluster)) ; get topology history when there's no topology history (let [hist-topo-ids (vec (sort (.get_topo_ids (.getTopologyHistory (.getNimbus cluster) (System/getProperty "user.name")))))] (log-message "Checking user " (System/getProperty "user.name") " " hist-topo-ids) (is (= 0 (count hist-topo-ids)))) (.submitTopology cluster "test" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 20, LOGS-USERS ["alice", (System/getProperty "user.name")]} topology) (bind storm-id (StormCommon/getStormId state "test")) (.advanceClusterTime cluster 5) (is (not-nil? (.stormBase state storm-id nil))) (is (not-nil? (.assignmentInfo state storm-id nil))) (.killTopology (.getNimbus cluster) "test") ;; check that storm is deactivated but alive (is (= TopologyStatus/KILLED (.get_status (.stormBase state storm-id nil)))) (is (not-nil? (.assignmentInfo state storm-id nil))) (.advanceClusterTime cluster 35) ;; kill topology read on group (.submitTopology cluster "killgrouptest" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 20, LOGS-GROUPS ["alice-group"]} topology) (bind storm-id-killgroup (StormCommon/getStormId state "killgrouptest")) (.advanceClusterTime cluster 5) (is (not-nil? (.stormBase state storm-id-killgroup nil))) (is (not-nil? (.assignmentInfo state storm-id-killgroup nil))) (.killTopology (.getNimbus cluster) "killgrouptest") ;; check that storm is deactivated but alive (is (= TopologyStatus/KILLED (.get_status (.stormBase state storm-id-killgroup nil)))) (is (not-nil? (.assignmentInfo state storm-id-killgroup nil))) (.advanceClusterTime cluster 35) ;; kill topology can't read (.submitTopology cluster "killnoreadtest" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 20} topology) (bind storm-id-killnoread (StormCommon/getStormId state "killnoreadtest")) (.advanceClusterTime cluster 5) (is (not-nil? (.stormBase state storm-id-killnoread nil))) (is (not-nil? (.assignmentInfo state storm-id-killnoread nil))) (.killTopology (.getNimbus cluster) "killnoreadtest") ;; check that storm is deactivated but alive (is (= TopologyStatus/KILLED (.get_status (.stormBase state storm-id-killnoread nil)))) (is (not-nil? (.assignmentInfo state storm-id-killnoread nil))) (.advanceClusterTime cluster 35) ;; active topology can read (.submitTopology cluster "2test" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 10, LOGS-USERS ["alice", (System/getProperty "user.name")]} topology) (.advanceClusterTime cluster 11) (bind storm-id2 (StormCommon/getStormId state "2test")) (is (not-nil? (.stormBase state storm-id2 nil))) (is (not-nil? (.assignmentInfo state storm-id2 nil))) ;; active topology can not read (.submitTopology cluster "testnoread" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 10, LOGS-USERS ["alice"]} topology) (.advanceClusterTime cluster 11) (bind storm-id3 (StormCommon/getStormId state "testnoread")) (is (not-nil? (.stormBase state storm-id3 nil))) (is (not-nil? (.assignmentInfo state storm-id3 nil))) ;; active topology can read based on group (.submitTopology cluster "testreadgroup" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 10, LOGS-GROUPS ["alice-group"]} topology) (.advanceClusterTime cluster 11) (bind storm-id4 (StormCommon/getStormId state "testreadgroup")) (is (not-nil? (.stormBase state storm-id4 nil))) (is (not-nil? (.assignmentInfo state storm-id4 nil))) ;; at this point have 1 running, 1 killed topo (let [hist-topo-ids (vec (sort (.get_topo_ids (.getTopologyHistory (.getNimbus cluster) (System/getProperty "user.name")))))] (log-message "Checking user " (System/getProperty "user.name") " " hist-topo-ids) (is (= 4 (count hist-topo-ids))) (is (= storm-id2 (get hist-topo-ids 0))) (is (= storm-id-killgroup (get hist-topo-ids 1))) (is (= storm-id (get hist-topo-ids 2))) (is (= storm-id4 (get hist-topo-ids 3)))) (let [hist-topo-ids (vec (sort (.get_topo_ids (.getTopologyHistory (.getNimbus cluster) "alice"))))] (log-message "Checking user alice " hist-topo-ids) (is (= 5 (count hist-topo-ids))) (is (= storm-id2 (get hist-topo-ids 0))) (is (= storm-id-killgroup (get hist-topo-ids 1))) (is (= storm-id (get hist-topo-ids 2))) (is (= storm-id3 (get hist-topo-ids 3))) (is (= storm-id4 (get hist-topo-ids 4)))) (let [hist-topo-ids (vec (sort (.get_topo_ids (.getTopologyHistory (.getNimbus cluster) "admin-user"))))] (log-message "Checking user admin-user " hist-topo-ids) (is (= 6 (count hist-topo-ids))) (is (= storm-id2 (get hist-topo-ids 0))) (is (= storm-id-killgroup (get hist-topo-ids 1))) (is (= storm-id-killnoread (get hist-topo-ids 2))) (is (= storm-id (get hist-topo-ids 3))) (is (= storm-id3 (get hist-topo-ids 4))) (is (= storm-id4 (get hist-topo-ids 5)))) (let [hist-topo-ids (vec (sort (.get_topo_ids (.getTopologyHistory (.getNimbus cluster) "group-only-user"))))] (log-message "Checking user group-only-user " hist-topo-ids) (is (= 2 (count hist-topo-ids))) (is (= storm-id-killgroup (get hist-topo-ids 0))) (is (= storm-id4 (get hist-topo-ids 1)))))))) (deftest test-kill-storm (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 2) (.withPortsPerSupervisor 5) (.withDaemonConf {SUPERVISOR-ENABLE false NIMBUS-TOPOLOGY-BLOBSTORE-DELETION-DELAY-MS 0 NIMBUS-TASK-TIMEOUT-SECS 30 NIMBUS-MONITOR-FREQ-SECS 10 TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (letlocals (bind conf (.getDaemonConf cluster)) (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 14))} {})) (bind state (.getClusterState cluster)) (.submitTopology cluster "test" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 20} topology) (bind storm-id (StormCommon/getStormId state "test")) (.advanceClusterTime cluster 15) (is (not-nil? (.stormBase state storm-id nil))) (is (not-nil? (.assignmentInfo state storm-id nil))) (.killTopology (.getNimbus cluster) "test") ;; check that storm is deactivated but alive (is (= TopologyStatus/KILLED (.get_status (.stormBase state storm-id nil)))) (is (not-nil? (.assignmentInfo state storm-id nil))) (.advanceClusterTime cluster 18) ;; check that storm is deactivated but alive (is (= 1 (count (.heartbeatStorms state)))) (.advanceClusterTime cluster 3) (is (nil? (.stormBase state storm-id nil))) (is (nil? (.assignmentInfo state storm-id nil))) ;; cleanup happens on monitoring thread (.advanceClusterTime cluster 11) (is (empty? (.heartbeatStorms state))) ;; TODO: check that code on nimbus was cleaned up locally... (is (thrown? NotAliveException (.killTopology (.getNimbus cluster) "lalala"))) (.submitTopology cluster "2test" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 10} topology) (.advanceClusterTime cluster 11) (is (thrown? AlreadyAliveException (.submitTopology cluster "2test" {} topology))) (.advanceClusterTime cluster 11) (bind storm-id (StormCommon/getStormId state "2test")) (is (not-nil? (.stormBase state storm-id nil))) (.killTopology (.getNimbus cluster) "2test") (is (thrown? AlreadyAliveException (.submitTopology cluster "2test" {} topology))) (.advanceClusterTime cluster 11) (is (= 1 (count (.heartbeatStorms state)))) (.advanceClusterTime cluster 6) (is (nil? (.stormBase state storm-id nil))) (is (nil? (.assignmentInfo state storm-id nil))) (.advanceClusterTime cluster 11) (is (= 0 (count (.heartbeatStorms state)))) (.submitTopology cluster "test3" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 5} topology) (bind storm-id3 (StormCommon/getStormId state "test3")) (.advanceClusterTime cluster 11) ;; this guarantees an immediate kill notification (.killTopology (.getNimbus cluster) "test3") (.advanceClusterTime cluster 41) (is (nil? (.stormBase state storm-id3 nil))) (is (nil? (.assignmentInfo state storm-id3 nil))) (is (= 0 (count (.heartbeatStorms state)))) ;; this guarantees that monitor thread won't trigger for 10 more seconds (Time/advanceTimeSecs 11) (.waitForIdle cluster) (.submitTopology cluster "test3" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 5} topology) (bind storm-id3 (StormCommon/getStormId state "test3")) (.advanceClusterTime cluster 11) (bind executor-id (first (topology-executors cluster storm-id3))) (do-executor-heartbeat cluster storm-id3 executor-id) (.killTopology (.getNimbus cluster) "test3") (.advanceClusterTime cluster 6) (is (= 1 (count (.heartbeatStorms state)))) (.advanceClusterTime cluster 5) (is (= 0 (count (.heartbeatStorms state)))) ;; test kill with opts (.submitTopology cluster "test4" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 100} topology) (.advanceClusterTime cluster 11) (.killTopologyWithOpts (.getNimbus cluster) "test4" (doto (KillOptions.) (.set_wait_secs 10))) (bind storm-id4 (StormCommon/getStormId state "test4")) (.advanceClusterTime cluster 9) (is (not-nil? (.assignmentInfo state storm-id4 nil))) (.advanceClusterTime cluster 2) (is (nil? (.assignmentInfo state storm-id4 nil))) ))) (deftest test-reassignment (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 2) (.withPortsPerSupervisor 5) (.withDaemonConf {SUPERVISOR-ENABLE false NIMBUS-TASK-LAUNCH-SECS 60 NIMBUS-TASK-TIMEOUT-SECS 20 NIMBUS-MONITOR-FREQ-SECS 10 NIMBUS-SUPERVISOR-TIMEOUT-SECS 100 TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (letlocals (bind conf (.getDaemonConf cluster)) (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 2))} {})) (bind state (.getClusterState cluster)) (.submitTopology cluster "test" {TOPOLOGY-WORKERS 2} topology) (.advanceClusterTime cluster 11) (check-consistency cluster "test") (bind storm-id (StormCommon/getStormId state "test")) (bind [executor-id1 executor-id2] (topology-executors cluster storm-id)) (bind ass1 (executor-assignment cluster storm-id executor-id1)) (bind ass2 (executor-assignment cluster storm-id executor-id2)) (bind _ (log-message "ass1, t0: " (pr-str ass1))) (bind _ (log-message "ass2, t0: " (pr-str ass2))) (.advanceClusterTime cluster 30) (bind _ (log-message "ass1, t30, pre beat: " (pr-str ass1))) (bind _ (log-message "ass2, t30, pre beat: " (pr-str ass2))) (do-executor-heartbeat cluster storm-id executor-id1) (do-executor-heartbeat cluster storm-id executor-id2) (bind _ (log-message "ass1, t30, post beat: " (pr-str ass1))) (bind _ (log-message "ass2, t30, post beat: " (pr-str ass2))) (.advanceClusterTime cluster 13) (bind _ (log-message "ass1, t43, pre beat: " (pr-str ass1))) (bind _ (log-message "ass2, t43, pre beat: " (pr-str ass2))) (is (= ass1 (executor-assignment cluster storm-id executor-id1))) (is (= ass2 (executor-assignment cluster storm-id executor-id2))) (do-executor-heartbeat cluster storm-id executor-id1) (bind _ (log-message "ass1, t43, post beat: " (pr-str ass1))) (bind _ (log-message "ass2, t43, post beat: " (pr-str ass2))) (.advanceClusterTime cluster 11) (bind _ (log-message "ass1, t54, pre beat: " (pr-str ass1))) (bind _ (log-message "ass2, t54, pre beat: " (pr-str ass2))) (do-executor-heartbeat cluster storm-id executor-id1) (bind _ (log-message "ass1, t54, post beat: " (pr-str ass1))) (bind _ (log-message "ass2, t54, post beat: " (pr-str ass2))) (is (= ass1 (executor-assignment cluster storm-id executor-id1))) (check-consistency cluster "test") ; have to wait an extra 10 seconds because nimbus may not ; resynchronize its heartbeat time till monitor-time secs after (.advanceClusterTime cluster 11) (bind _ (log-message "ass1, t65, pre beat: " (pr-str ass1))) (bind _ (log-message "ass2, t65, pre beat: " (pr-str ass2))) (do-executor-heartbeat cluster storm-id executor-id1) (bind _ (log-message "ass1, t65, post beat: " (pr-str ass1))) (bind _ (log-message "ass2, t65, post beat: " (pr-str ass2))) (is (= ass1 (executor-assignment cluster storm-id executor-id1))) (check-consistency cluster "test") (.advanceClusterTime cluster 11) (bind _ (log-message "ass1, t76, pre beat: " (pr-str ass1))) (bind _ (log-message "ass2, t76, pre beat: " (pr-str ass2))) (is (= ass1 (executor-assignment cluster storm-id executor-id1))) (is (not= ass2 (executor-assignment cluster storm-id executor-id2))) (bind ass2 (executor-assignment cluster storm-id executor-id2)) (bind _ (log-message "ass1, t76, post beat: " (pr-str ass1))) (bind _ (log-message "ass2, t76, post beat: " (pr-str ass2))) (check-consistency cluster "test") (.advanceClusterTime cluster 31) (is (not= ass1 (executor-assignment cluster storm-id executor-id1))) (is (= ass2 (executor-assignment cluster storm-id executor-id2))) ; tests launch timeout (check-consistency cluster "test") (bind ass1 (executor-assignment cluster storm-id executor-id1)) (bind active-supervisor (.get_node ass2)) (.killSupervisor cluster active-supervisor) (doseq [i (range 12)] (do-executor-heartbeat cluster storm-id executor-id1) (do-executor-heartbeat cluster storm-id executor-id2) (.advanceClusterTime cluster 10) ) ;; tests that it doesn't reassign executors if they're heartbeating even if supervisor times out (is (= ass1 (executor-assignment cluster storm-id executor-id1))) (is (= ass2 (executor-assignment cluster storm-id executor-id2))) (check-consistency cluster "test") (.advanceClusterTime cluster 30) (bind ass1 (executor-assignment cluster storm-id executor-id1)) (bind ass2 (executor-assignment cluster storm-id executor-id2)) (is (not-nil? ass1)) (is (not-nil? ass2)) (is (not= active-supervisor (.get_node (executor-assignment cluster storm-id executor-id2)))) (is (not= active-supervisor (.get_node (executor-assignment cluster storm-id executor-id1)))) (check-consistency cluster "test") (doseq [supervisor-id (.supervisors state nil)] (.killSupervisor cluster supervisor-id)) (.advanceClusterTime cluster 90) (bind ass1 (executor-assignment cluster storm-id executor-id1)) (bind ass2 (executor-assignment cluster storm-id executor-id2)) (is (nil? ass1)) (is (nil? ass2)) (check-consistency cluster "test" :assigned? false) (.addSupervisor cluster) (.advanceClusterTime cluster 11) (check-consistency cluster "test") ))) (deftest test-reassignment-to-constrained-cluster (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 0) (.withDaemonConf {SUPERVISOR-ENABLE false NIMBUS-TASK-LAUNCH-SECS 60 NIMBUS-TASK-TIMEOUT-SECS 20 NIMBUS-MONITOR-FREQ-SECS 10 NIMBUS-SUPERVISOR-TIMEOUT-SECS 100 TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (letlocals (.addSupervisor cluster 1 "a") (.addSupervisor cluster 1 "b") (bind conf (.getDaemonConf cluster)) (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 2))} {})) (bind state (.getClusterState cluster)) (.submitTopology cluster "test" {TOPOLOGY-WORKERS 2} topology) (.advanceClusterTime cluster 11) (check-consistency cluster "test") (bind storm-id (StormCommon/getStormId state "test")) (bind [executor-id1 executor-id2] (topology-executors cluster storm-id)) (bind ass1 (executor-assignment cluster storm-id executor-id1)) (bind ass2 (executor-assignment cluster storm-id executor-id2)) (.advanceClusterTime cluster 30) (do-executor-heartbeat cluster storm-id executor-id1) (do-executor-heartbeat cluster storm-id executor-id2) (.advanceClusterTime cluster 13) (is (= ass1 (executor-assignment cluster storm-id executor-id1))) (is (= ass2 (executor-assignment cluster storm-id executor-id2))) ;; with rpc reporting mode, only heartbeats from killed supervisor will time out (.killSupervisor cluster (.get_node ass2)) (do-executor-heartbeat cluster storm-id executor-id1) (.advanceClusterTime cluster 11) (do-executor-heartbeat cluster storm-id executor-id1) (.advanceClusterTime cluster 11) (do-executor-heartbeat cluster storm-id executor-id1) (.advanceClusterTime cluster 11) (do-executor-heartbeat cluster storm-id executor-id1) (.advanceClusterTime cluster 11) (do-executor-heartbeat cluster storm-id executor-id1) (check-consistency cluster "test") (is (= 1 (storm-num-workers state "test"))) ))) (defn check-executor-distribution [slot-executors distribution] (check-distribution (vals slot-executors) distribution)) (defn check-num-nodes [slot-executors num-nodes] (let [nodes (->> slot-executors keys (map (fn [np] (.get_node np))) set)] (is (= num-nodes (count nodes))) )) (deftest test-reassign-squeezed-topology (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 1) (.withPortsPerSupervisor 1) (.withDaemonConf {SUPERVISOR-ENABLE false NIMBUS-TASK-LAUNCH-SECS 60 NIMBUS-TASK-TIMEOUT-SECS 20 NIMBUS-MONITOR-FREQ-SECS 10 TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (letlocals (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 9))} {})) (bind state (.getClusterState cluster)) (.submitTopology cluster "test" {TOPOLOGY-WORKERS 4} topology) ; distribution should be 2, 2, 2, 3 ideally (.advanceClusterTime cluster 11) (bind storm-id (StormCommon/getStormId state "test")) (bind slot-executors (slot-assignments cluster storm-id)) (check-executor-distribution slot-executors [9]) (check-consistency cluster "test") (.addSupervisor cluster 2) (.advanceClusterTime cluster 11) (bind slot-executors (slot-assignments cluster storm-id)) (bind executor->start (executor-start-times cluster storm-id)) (check-executor-distribution slot-executors [3 3 3]) (check-consistency cluster "test") (.addSupervisor cluster 8) ;; this actually works for any time > 0, since zookeeper fires an event causing immediate reassignment ;; doesn't work for time = 0 because it's not waiting for cluster yet, so test might happen before reassignment finishes (.advanceClusterTime cluster 11) (bind slot-executors2 (slot-assignments cluster storm-id)) (bind executor->start2 (executor-start-times cluster storm-id)) (check-executor-distribution slot-executors2 [2 2 2 3]) (check-consistency cluster "test") (bind common (first (Utils/findOne (proxy [IPredicate] [] (test [[k v]] (= 3 (count v)))) slot-executors2))) (is (not-nil? common)) (is (= (slot-executors2 common) (slot-executors common))) ;; check that start times are changed for everything but the common one (bind same-executors (slot-executors2 common)) (bind changed-executors (apply concat (vals (dissoc slot-executors2 common)))) (doseq [t same-executors] (is (= (executor->start t) (executor->start2 t)))) (doseq [t changed-executors] (is (not= (executor->start t) (executor->start2 t)))) ))) (deftest test-get-owner-resource-summaries (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 1) (.withPortsPerSupervisor 12) (.withDaemonConf {SUPERVISOR-ENABLE false NIMBUS-MONITOR-FREQ-SECS 10 TOPOLOGY-MESSAGE-TIMEOUT-SECS 30 TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0 })))] (letlocals ;;test for 0-topology case (.advanceClusterTime cluster 11) (bind owner-resource-summaries (.getOwnerResourceSummaries (.getNimbus cluster) nil)) (bind summary (first owner-resource-summaries)) (is (nil? summary)) ;;test for 1-topology case (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 3))} {})) (.submitTopology cluster "test" {TOPOLOGY-WORKERS 3 TOPOLOGY-MESSAGE-TIMEOUT-SECS 90} topology) (.advanceClusterTime cluster 11) (bind owner-resource-summaries (.getOwnerResourceSummaries (.getNimbus cluster) nil)) (bind summary (first owner-resource-summaries)) (is (= (.get_total_workers summary) 3)) (is (= (.get_total_executors summary)) 3) (is (= (.get_total_topologies summary)) 1) ;;test for many-topology case (bind topology2 (Thrift/buildTopology {"2" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 4))} {})) (bind topology3 (Thrift/buildTopology {"3" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 5))} {})) (.submitTopology cluster "test2" {TOPOLOGY-WORKERS 4 TOPOLOGY-MESSAGE-TIMEOUT-SECS 90} topology2) (.submitTopology cluster "test3" {TOPOLOGY-WORKERS 3 TOPOLOGY-MESSAGE-TIMEOUT-SECS 90} topology3) (.advanceClusterTime cluster 11) (bind owner-resource-summaries (.getOwnerResourceSummaries (.getNimbus cluster) nil)) (bind summary (first owner-resource-summaries)) (is (= (.get_total_workers summary) 10)) (is (= (.get_total_executors summary)) 12) (is (= (.get_total_topologies summary)) 3) ;;test for specific owner (bind owner-resource-summaries (.getOwnerResourceSummaries (.getNimbus cluster) (System/getProperty "user.name"))) (bind summary (first owner-resource-summaries)) (is (= (.get_total_workers summary) 10)) (is (= (.get_total_executors summary)) 12) (is (= (.get_total_topologies summary)) 3) ;;test for other user (bind other-user (str "not-" (System/getProperty "user.name"))) (bind owner-resource-summaries (.getOwnerResourceSummaries (.getNimbus cluster) other-user)) (bind summary (first owner-resource-summaries)) (is (= (.get_total_workers summary) 0)) (is (= (.get_total_executors summary)) 0) (is (= (.get_total_topologies summary)) 0) ))) (deftest test-rebalance (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 1) (.withDaemonConf {SUPERVISOR-ENABLE false NIMBUS-MONITOR-FREQ-SECS 10 TOPOLOGY-MESSAGE-TIMEOUT-SECS 30 TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (letlocals (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 3))} {})) (bind state (.getClusterState cluster)) (.submitTopology cluster "test" {TOPOLOGY-WORKERS 3 TOPOLOGY-MESSAGE-TIMEOUT-SECS 60} topology) (.advanceClusterTime cluster 11) (bind storm-id (StormCommon/getStormId state "test")) (.addSupervisor cluster 3) (.addSupervisor cluster 3) (.advanceClusterTime cluster 11) (bind slot-executors (slot-assignments cluster storm-id)) ;; check that all workers are on one machine (check-executor-distribution slot-executors [1 1 1]) (check-num-nodes slot-executors 1) (.rebalance (.getNimbus cluster) "test" (RebalanceOptions.)) (.advanceClusterTime cluster 30) (check-executor-distribution slot-executors [1 1 1]) (check-num-nodes slot-executors 1) (.advanceClusterTime cluster 30) (bind slot-executors (slot-assignments cluster storm-id)) (check-executor-distribution slot-executors [1 1 1]) (check-num-nodes slot-executors 3) (is (thrown? InvalidTopologyException (.rebalance (.getNimbus cluster) "test" (doto (RebalanceOptions.) (.set_num_executors {"1" (int 0)}) )))) ))) ;TODO: when translating this function, you should replace the map-val with a proper for loop HERE (deftest test-rebalance-change-parallelism (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 4) (.withDaemonConf {SUPERVISOR-ENABLE false NIMBUS-MONITOR-FREQ-SECS 10 TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (letlocals (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 6) {TOPOLOGY-TASKS 12})} {})) (bind state (.getClusterState cluster)) (.submitTopology cluster "test" {TOPOLOGY-WORKERS 3 TOPOLOGY-MESSAGE-TIMEOUT-SECS 30} topology) (.advanceClusterTime cluster 11) (bind storm-id (StormCommon/getStormId state "test")) (bind checker (fn [distribution] (check-executor-distribution (slot-assignments cluster storm-id) distribution))) (checker [2 2 2]) (.rebalance (.getNimbus cluster) "test" (doto (RebalanceOptions.) (.set_num_workers (int 6)) )) (.advanceClusterTime cluster 29) (checker [2 2 2]) (.advanceClusterTime cluster 3) (checker [1 1 1 1 1 1]) (.rebalance (.getNimbus cluster) "test" (doto (RebalanceOptions.) (.set_num_executors {"1" (int 1)}) )) (.advanceClusterTime cluster 29) (checker [1 1 1 1 1 1]) (.advanceClusterTime cluster 3) (checker [1]) (.rebalance (.getNimbus cluster) "test" (doto (RebalanceOptions.) (.set_num_executors {"1" (int 8)}) (.set_num_workers 4) )) (.advanceClusterTime cluster 32) (checker [2 2 2 2]) (check-consistency cluster "test") (bind executor-info (->> (storm-component->executor-info cluster "test") (map-val #(map executor->tasks %)))) (check-distribution (executor-info "1") [2 2 2 2 1 1 1 1]) ))) (defn check-for-collisions [state] (log-message "Checking for collision") (let [assignments (.assignments state nil)] (log-message "Assignemts: " assignments) (let [id->node->ports (into {} (for [id assignments :let [executor->node+port (.get_executor_node_port (.assignmentInfo state id nil)) node+ports (set (.values executor->node+port)) node->ports (apply merge-with (fn [a b] (distinct (concat a b))) (for [np node+ports] {(.get_node np) [(first (.get_port np))]}))]] {id node->ports})) _ (log-message "id->node->ports: " id->node->ports) all-nodes (apply merge-with (fn [a b] (let [ret (concat a b)] (log-message "Can we combine " (pr-str a) " and " (pr-str b) " without collisions? " (apply distinct? ret) " => " (pr-str ret)) (is (apply distinct? ret)) (distinct ret))) (.values id->node->ports))] ))) (deftest test-rebalance-constrained-cluster (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 1) (.withPortsPerSupervisor 4) (.withDaemonConf {SUPERVISOR-ENABLE false NIMBUS-MONITOR-FREQ-SECS 10 TOPOLOGY-MESSAGE-TIMEOUT-SECS 30 TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (letlocals (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 3))} {})) (bind topology2 (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 3))} {})) (bind topology3 (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 3))} {})) (bind state (.getClusterState cluster)) (.submitTopology cluster "test" {TOPOLOGY-WORKERS 3 TOPOLOGY-MESSAGE-TIMEOUT-SECS 90} topology) (.submitTopology cluster "test2" {TOPOLOGY-WORKERS 3 TOPOLOGY-MESSAGE-TIMEOUT-SECS 90} topology2) (.submitTopology cluster "test3" {TOPOLOGY-WORKERS 3 TOPOLOGY-MESSAGE-TIMEOUT-SECS 90} topology3) (.advanceClusterTime cluster 11) (check-for-collisions state) (.rebalance (.getNimbus cluster) "test" (doto (RebalanceOptions.) (.set_num_workers 4) (.set_wait_secs 0) )) (.advanceClusterTime cluster 11) (check-for-collisions state) (.advanceClusterTime cluster 30) (check-for-collisions state) ))) (deftest test-submit-invalid (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withDaemonConf {SUPERVISOR-ENABLE false TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0 NIMBUS-EXECUTORS-PER-TOPOLOGY 8 NIMBUS-SLOTS-PER-TOPOLOGY 8})))] (letlocals (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 1) {TOPOLOGY-TASKS 1})} {})) (is (thrown? InvalidTopologyException (.submitTopology cluster "test/aaa" {} topology))) (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 16) {TOPOLOGY-TASKS 16})} {})) (bind state (.getClusterState cluster)) (is (thrown? InvalidTopologyException (.submitTopology cluster "test" {TOPOLOGY-WORKERS 3} topology))) (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 5) {TOPOLOGY-TASKS 5})} {})) (is (thrown? InvalidTopologyException (.submitTopology cluster "test" {TOPOLOGY-WORKERS 16} topology)))))) (deftest test-clean-inbox "Tests that the inbox correctly cleans jar files." (with-open [_ (Time$SimulatedTime.) tmp-path (TmpPath. )] (let [dir-location (.getPath tmp-path) dir (File. dir-location) mk-file (fn [name seconds-ago] (let [f (File. (str dir-location "/" name)) t (- (Time/currentTimeMillis) (* seconds-ago 1000))] (FileUtils/touch f) (.setLastModified f t))) assert-files-in-dir (fn [compare-file-names] (let [file-names (map #(.getName %) (file-seq dir))] (is (= (sort compare-file-names) (sort (filter #(.endsWith % ".jar") file-names)) ))))] ;; Make three files a.jar, b.jar, c.jar. ;; a and b are older than c and should be deleted first. (Time/advanceTimeSecs 100) (doseq [fs [["a.jar" 20] ["b.jar" 20] ["c.jar" 0]]] (apply mk-file fs)) (assert-files-in-dir ["a.jar" "b.jar" "c.jar"]) (Nimbus/cleanInbox dir-location 10) (assert-files-in-dir ["c.jar"]) ;; Cleanit again, c.jar should stay (Time/advanceTimeSecs 5) (Nimbus/cleanInbox dir-location 10) (assert-files-in-dir ["c.jar"]) ;; Advance time, clean again, c.jar should be deleted. (Time/advanceTimeSecs 5) (Nimbus/cleanInbox dir-location 10) (assert-files-in-dir []) ))) (defn wait-for-status [nimbus name status] (Testing/whileTimeout 5000 (reify Testing$Condition (exec [this] (let [topo-summary (first (filter (fn [topo] (= name (.get_name topo))) (.get_topologies (.getClusterInfo nimbus)))) topo-status (if topo-summary (.get_status topo-summary) "NOT-RUNNING")] (log-message "WAITING FOR "name" TO BE " status " CURRENT " topo-status) (not= topo-status status)))) (fn [] (Thread/sleep 100)))) (deftest test-leadership "Tests that leader actions can only be performed by master and non leader fails to perform the same actions." (with-open [zk (InProcessZookeeper. )] (with-open [tmp-nimbus-dir (TmpPath.) _ (MockedZookeeper. (proxy [Zookeeper] [] (zkLeaderElectorImpl [conf zk blob-store tc cluster-state acls] (MockLeaderElector. ))))] (let [nimbus-dir (.getPath tmp-nimbus-dir)] (letlocals (bind conf (merge (clojurify-structure (ConfigUtils/readStormConfig)) {STORM-ZOOKEEPER-SERVERS ["localhost"] STORM-CLUSTER-MODE "local" STORM-ZOOKEEPER-PORT (.getPort zk) STORM-LOCAL-DIR nimbus-dir})) (bind ass-backend (LocalAssignmentsBackendFactory/getDefault)) (bind cluster-state (ClusterUtils/mkStormClusterState conf ass-backend (ClusterStateContext.))) (bind nimbus (mk-nimbus conf (Nimbus$StandaloneINimbus.) nil nil nil cluster-state)) (.launchServer nimbus) (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 3))} {})) (with-open [_ (MockedZookeeper. (proxy [Zookeeper] [] (zkLeaderElectorImpl [conf zk blob-store tc cluster-state acls] (MockLeaderElector. false))))] (letlocals (bind non-leader-cluster-state (ClusterUtils/mkStormClusterState conf ass-backend (ClusterStateContext.))) (bind non-leader-nimbus (mk-nimbus conf (Nimbus$StandaloneINimbus.) nil nil nil non-leader-cluster-state)) (.launchServer non-leader-nimbus) ;first we verify that the master nimbus can perform all actions, even with another nimbus present. (.submitTopology nimbus "t1" nil "{}" topology) ;; Instead of sleeping until topology is scheduled, rebalance topology so mk-assignments is called. (.rebalance nimbus "t1" (doto (RebalanceOptions.) (.set_wait_secs 0))) (wait-for-status nimbus "t1" "ACTIVE") (.deactivate nimbus "t1") (.activate nimbus "t1") (.rebalance nimbus "t1" (RebalanceOptions.)) (.killTopology nimbus "t1") ;now we verify that non master nimbus can not perform any of the actions. (is (thrown? RuntimeException (.submitTopology non-leader-nimbus "failing" nil "{}" topology))) (is (thrown? RuntimeException (.killTopology non-leader-nimbus "t1"))) (is (thrown? RuntimeException (.activate non-leader-nimbus "t1"))) (is (thrown? RuntimeException (.deactivate non-leader-nimbus "t1"))) (is (thrown? RuntimeException (.rebalance non-leader-nimbus "t1" (RebalanceOptions.)))) (.shutdown non-leader-nimbus) (.disconnect non-leader-cluster-state) )) (.shutdown nimbus) (.disconnect cluster-state)))))) (deftest test-nimbus-iface-submitTopologyWithOpts-checks-authorization (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withDaemonConf {NIMBUS-AUTHORIZER "org.apache.storm.security.auth.authorizer.DenyAuthorizer" SUPERVISOR-AUTHORIZER "org.apache.storm.security.auth.authorizer.DenyAuthorizer"})))] (let [ topology (Thrift/buildTopology {} {}) ] (is (thrown? AuthorizationException (.submitTopologyWithOpts cluster "mystorm" {} topology (SubmitOptions. TopologyInitialStatus/INACTIVE)) )) ) ) ) (deftest test-nimbus-iface-methods-check-authorization (let [cluster-state (Mockito/mock IStormClusterState) blob-store (Mockito/mock BlobStore) tc (Mockito/mock TopoCache)] (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withClusterState cluster-state) (.withBlobStore blob-store) (.withTopoCache tc) (.withDaemonConf {NIMBUS-AUTHORIZER "org.apache.storm.security.auth.authorizer.DenyAuthorizer" SUPERVISOR-AUTHORIZER "org.apache.storm.security.auth.authorizer.DenyAuthorizer"})))] (let [nimbus (.getNimbus cluster) topology-name "test" topology-id "test-id"] (.thenReturn (Mockito/when (.getTopoId cluster-state topology-name)) (Optional/of topology-id)) (is (thrown? AuthorizationException (.rebalance nimbus topology-name (RebalanceOptions.)))) (is (thrown? AuthorizationException (.activate nimbus topology-name))) (is (thrown? AuthorizationException (.deactivate nimbus topology-name))))))) (deftest test-nimbus-check-authorization-params (let [cluster-state (Mockito/mock IStormClusterState) blob-store (Mockito/mock BlobStore) tc (Mockito/mock TopoCache)] (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withClusterState cluster-state) (.withBlobStore blob-store) (.withTopoCache tc) (.withNimbusWrapper (reify UnaryOperator (apply [this nimbus] (Mockito/spy nimbus)))) (.withDaemonConf {NIMBUS-AUTHORIZER "org.apache.storm.security.auth.authorizer.NoopAuthorizer" SUPERVISOR-AUTHORIZER "org.apache.storm.security.auth.authorizer.NoopAuthorizer"})))] (let [nimbus (.getNimbus cluster) topology-name "test-nimbus-check-autho-params" topology-id "fake-id" topology (Thrift/buildTopology {} {}) expected-name topology-name expected-conf {TOPOLOGY-NAME expected-name "foo" "bar"}] (.thenReturn (Mockito/when (.getTopoId cluster-state topology-name)) (Optional/of topology-id)) (.thenReturn (Mockito/when (.readTopoConf tc (Mockito/any String) (Mockito/anyObject))) expected-conf) (.thenReturn (Mockito/when (.readTopology tc (Mockito/any String) (Mockito/anyObject))) nil) (testing "getTopologyConf calls check-authorization! with the correct parameters." (let [expected-operation "getTopologyConf"] (try (is (= expected-conf (->> (.getTopologyConf nimbus topology-id) JSONValue/parse clojurify-structure))) (catch NotAliveException e) (finally (.checkAuthorization (Mockito/verify nimbus) nil nil "getClusterInfo") (.checkAuthorization (Mockito/verify nimbus) (Mockito/eq topology-name) (Mockito/any Map) (Mockito/eq expected-operation)))))) (testing "getTopology calls check-authorization! with the correct parameters." (let [expected-operation "getTopology" common-spy (->> (proxy [StormCommon] [] (systemTopologyImpl [conf topology] nil)) Mockito/spy)] (with-open [- (StormCommonInstaller. common-spy)] (try (.getTopology nimbus topology-id) (catch NotAliveException e) (finally (.checkAuthorization (Mockito/verify nimbus) (Mockito/eq topology-name) (Mockito/any Map) (Mockito/eq expected-operation)) (. (Mockito/verify common-spy) (systemTopologyImpl (Matchers/any Map) (Matchers/any)))))))) (testing "getUserTopology calls check-authorization with the correct parameters." (let [expected-operation "getUserTopology"] (try (.getUserTopology nimbus topology-id) (catch NotAliveException e) (finally (.checkAuthorization (Mockito/verify nimbus) (Mockito/eq topology-name) (Mockito/any Map) (Mockito/eq expected-operation)) ;;One for this time and one for getTopology call (.readTopology (Mockito/verify tc (Mockito/times 2)) (Mockito/eq topology-id) (Mockito/anyObject)))))))))) (deftest test-check-authorization-getSupervisorPageInfo (let [cluster-state (Mockito/mock IStormClusterState) blob-store (Mockito/mock BlobStore) tc (Mockito/mock TopoCache)] (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withClusterState cluster-state) (.withBlobStore blob-store) (.withTopoCache tc) (.withNimbusWrapper (reify UnaryOperator (apply [this nimbus] (Mockito/spy nimbus)))) (.withDaemonConf {NIMBUS-AUTHORIZER "org.apache.storm.security.auth.authorizer.NoopAuthorizer" SUPERVISOR-AUTHORIZER "org.apache.storm.security.auth.authorizer.NoopAuthorizer"})))] (let [nimbus (.getNimbus cluster) expected-name "test-nimbus-check-autho-params" expected-conf {TOPOLOGY-NAME expected-name TOPOLOGY-WORKERS 1 TOPOLOGY-MESSAGE-TIMEOUT-SECS 30 "foo" "bar"} expected-operation "getTopology" assignment (doto (Assignment.) (.set_executor_node_port {[1 1] (NodeInfo. "super1" #{1}), [2 2] (NodeInfo. "super2" #{2})})) topology (doto (StormTopology. ) (.set_spouts {}) (.set_bolts {}) (.set_state_spouts {})) topo-assignment {expected-name assignment} check-auth-state (atom []) mock-check-authorization (fn [nimbus storm-name storm-conf operation] (swap! check-auth-state conj {:nimbus nimbus :storm-name storm-name :storm-conf storm-conf :operation operation})) all-supervisors (doto (HashMap.) (.put "super1" (doto (SupervisorInfo.) (.set_hostname "host1") (.set_meta [(long 1234)]) (.set_uptime_secs (long 123)) (.set_meta [1 2 3]) (.set_used_ports []) (.set_resources_map {}))) (.put "super2" (doto (SupervisorInfo.) (.set_hostname "host2") (.set_meta [(long 1234)]) (.set_uptime_secs (long 123)) (.set_meta [1 2 3]) (.set_used_ports []) (.set_resources_map {}))))] (.thenReturn (Mockito/when (.allSupervisorInfo cluster-state)) all-supervisors) (.thenReturn (Mockito/when (.readTopoConf tc (Mockito/any String) (Mockito/any Subject))) expected-conf) (.thenReturn (Mockito/when (.readTopology tc (Mockito/any String) (Mockito/any Subject))) topology) (.thenReturn (Mockito/when (.assignmentsInfo cluster-state)) topo-assignment) (.getSupervisorPageInfo nimbus "super1" nil true) ;; afterwards, it should get called twice (.checkAuthorization (Mockito/verify nimbus) (Mockito/eq expected-name) (Mockito/any Map) (Mockito/eq "getSupervisorPageInfo")) (.checkAuthorization (Mockito/verify nimbus) nil nil "getClusterInfo") (.checkAuthorization (Mockito/verify nimbus) (Mockito/eq expected-name) (Mockito/any Map) (Mockito/eq "getTopology")))))) (deftest test-nimbus-iface-getTopology-methods-throw-correctly (with-open [cluster (LocalCluster. )] (let [ nimbus (.getNimbus cluster) id "bogus ID" ] (is (thrown? NotAliveException (.getTopology nimbus id))) (try (.getTopology nimbus id) (catch NotAliveException e (is (= id (.get_msg e))) ) ) (is (thrown? NotAliveException (.getTopologyConf nimbus id))) (try (.getTopologyConf nimbus id) (catch NotAliveException e (is (= id (.get_msg e))) ) ) (is (thrown? NotAliveException (.getTopologyInfo nimbus id))) (try (.getTopologyInfo nimbus id) (catch NotAliveException e (is (= id (.get_msg e))) ) ) (is (thrown? NotAliveException (.getUserTopology nimbus id))) (try (.getUserTopology nimbus id) (catch NotAliveException e (is (= id (.get_msg e))) ) ) ) ) ) (defn mkStormBase [launch-time-secs storm-name status] (doto (StormBase.) (.set_name storm-name) (.set_launch_time_secs (int launch-time-secs)) (.set_status status))) (deftest test-nimbus-iface-getClusterInfo-filters-topos-without-bases (let [cluster-state (Mockito/mock IStormClusterState) blob-store (Mockito/mock BlobStore) tc (Mockito/mock TopoCache)] (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withClusterState cluster-state) (.withTopoCache tc) (.withBlobStore blob-store)))] (let [nimbus (.getNimbus cluster) bogus-secs 42 bogus-type TopologyStatus/ACTIVE bogus-bases { "1" nil "2" (mkStormBase bogus-secs "id2-name" bogus-type) "3" nil "4" (mkStormBase bogus-secs "id4-name" bogus-type) } topo-name "test-topo" topo-conf {TOPOLOGY-NAME topo-name TOPOLOGY-WORKERS 1 TOPOLOGY-MESSAGE-TIMEOUT-SECS 30} storm-base (StormBase. ) topology (doto (StormTopology. ) (.set_spouts {}) (.set_bolts {}) (.set_state_spouts {})) ] (.thenReturn (Mockito/when (.stormBase cluster-state (Mockito/any String) (Mockito/anyObject))) storm-base) (.thenReturn (Mockito/when (.topologyBases cluster-state)) bogus-bases) (.thenReturn (Mockito/when (.readTopoConf tc (Mockito/any String) (Mockito/any Subject))) topo-conf) (.thenReturn (Mockito/when (.readTopology tc (Mockito/any String) (Mockito/any Subject))) topology) (let [topos (.get_topologies (.getClusterInfo nimbus))] ; The number of topologies in the summary is correct. (is (= (count (filter (fn [b] (second b)) bogus-bases)) (count topos))) ; Each topology present has a valid name. (is (empty? (filter (fn [t] (or (nil? t) (nil? (.get_name t)))) topos))) ; The topologies are those with valid bases. (is (empty? (filter (fn [t] (or (nil? t) (not (number? (read-string (.get_id t)))) (odd? (read-string (.get_id t))) )) topos))) ) ) ) )) (deftest test-file-bogus-download (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withDaemonConf {SUPERVISOR-ENABLE false TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (let [nimbus (.getNimbus cluster)] (is (thrown-cause? IllegalArgumentException (.beginFileDownload nimbus nil))) (is (thrown-cause? IllegalArgumentException (.beginFileDownload nimbus ""))) (is (thrown-cause? IllegalArgumentException (.beginFileDownload nimbus "/bogus-path/foo"))) ))) (deftest test-validate-topo-config-on-submit (let [cluster-state (Mockito/mock IStormClusterState) blob-store (Mockito/mock BlobStore) tc (Mockito/mock TopoCache)] (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withClusterState cluster-state) (.withBlobStore blob-store) (.withTopoCache tc) (.withDaemonConf {NIMBUS-AUTHORIZER "org.apache.storm.security.auth.authorizer.NoopAuthorizer" SUPERVISOR-AUTHORIZER "org.apache.storm.security.auth.authorizer.NoopAuthorizer"})))] (.thenReturn (Mockito/when (.getTopoId cluster-state "test")) (Optional/empty)) (let [topology (Thrift/buildTopology {} {}) bad-config {"topology.isolate.machines" "2"}] (is (thrown-cause? InvalidTopologyException (.submitTopologyWithOpts cluster "test" bad-config topology (SubmitOptions.)))))))) (deftest test-stateless-with-scheduled-topology-to-be-killed ; tests regression of STORM-856 (with-open [zk (InProcessZookeeper. )] (with-open [tmp-nimbus-dir (TmpPath. )] (let [nimbus-dir (.getPath tmp-nimbus-dir)] (letlocals (bind conf (merge (clojurify-structure (ConfigUtils/readStormConfig)) {STORM-ZOOKEEPER-SERVERS ["localhost"] STORM-CLUSTER-MODE "local" STORM-ZOOKEEPER-PORT (.getPort zk) STORM-LOCAL-DIR nimbus-dir})) (bind cluster-state (ClusterUtils/mkStormClusterState conf (ClusterStateContext.))) (bind nimbus (mk-nimbus conf (Nimbus$StandaloneINimbus.) nil nil nil nil)) (.launchServer nimbus) (Time/sleepSecs 1) (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 3))} {})) (.submitTopology nimbus "t1" nil (str "{\"" TOPOLOGY-MESSAGE-TIMEOUT-SECS "\": 30}") topology) ; make transition for topology t1 to be killed -> nimbus applies this event to cluster state (.killTopology nimbus "t1") ; shutdown nimbus immediately to achieve nimbus doesn't handle event right now (.shutdown nimbus) ; in startup of nimbus it reads cluster state and take proper actions ; in this case nimbus registers topology transition event to scheduler again ; before applying STORM-856 nimbus was killed with NPE (bind nimbus (mk-nimbus conf (Nimbus$StandaloneINimbus.) nil nil nil nil)) (.launchServer nimbus) (.shutdown nimbus) (.disconnect cluster-state) ))))) (deftest test-topology-action-notifier (with-open [zk (InProcessZookeeper. )] (with-open [tmp-nimbus-dir (TmpPath.) _ (MockedZookeeper. (proxy [Zookeeper] [] (zkLeaderElectorImpl [conf zk blob-store tc cluster-state acls] (MockLeaderElector. ))))] (let [nimbus-dir (.getPath tmp-nimbus-dir)] (letlocals (bind conf (merge (clojurify-structure (ConfigUtils/readStormConfig)) {STORM-ZOOKEEPER-SERVERS ["localhost"] STORM-CLUSTER-MODE "local" STORM-ZOOKEEPER-PORT (.getPort zk) STORM-LOCAL-DIR nimbus-dir NIMBUS-TOPOLOGY-ACTION-NOTIFIER-PLUGIN (.getName InMemoryTopologyActionNotifier)})) (bind cluster-state (ClusterUtils/mkStormClusterState conf (ClusterStateContext.))) (bind nimbus (mk-nimbus conf (Nimbus$StandaloneINimbus.) nil nil nil nil)) (.launchServer nimbus) (bind notifier (InMemoryTopologyActionNotifier.)) (Time/sleepSecs 1) (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 3))} {})) (.submitTopology nimbus "test-notification" nil (str "{\"" TOPOLOGY-MESSAGE-TIMEOUT-SECS "\": 30}") topology) (.deactivate nimbus "test-notification") (.activate nimbus "test-notification") (.rebalance nimbus "test-notification" (doto (RebalanceOptions.) (.set_wait_secs 0))) (.killTopologyWithOpts nimbus "test-notification" (doto (KillOptions.) (.set_wait_secs 0))) (.shutdown nimbus) ; ensure notifier was invoked for each action,and in the correct order. (is (= ["submitTopology", "activate", "deactivate", "activate", "rebalance", "killTopology"] (.getTopologyActions notifier "test-notification"))) (.disconnect cluster-state) ))))) (deftest test-debug-on-component (with-open [cluster (LocalCluster. )] (let [nimbus (.getNimbus cluster) topology (Thrift/buildTopology {"spout" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 3))} {})] (.submitTopology cluster "t1" {TOPOLOGY-WORKERS 1} topology) (.debug nimbus "t1" "spout" true 100)))) (deftest test-debug-on-global (with-open [cluster (LocalCluster. )] (let [nimbus (.getNimbus cluster) topology (Thrift/buildTopology {"spout" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 3))} {})] (.submitTopology cluster "t1" {TOPOLOGY-WORKERS 1} topology) (.debug nimbus "t1" "" true 100)))) ;; if the user sends an empty log config, nimbus will say that all ;; log configs it contains are LogLevelAction/UNCHANGED (deftest empty-save-config-results-in-all-unchanged-actions (let [cluster-state (Mockito/mock IStormClusterState) blob-store (Mockito/mock BlobStore) tc (Mockito/mock TopoCache)] (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withClusterState cluster-state) (.withBlobStore blob-store) (.withTopoCache tc) (.withDaemonConf {NIMBUS-AUTHORIZER "org.apache.storm.security.auth.authorizer.NoopAuthorizer" SUPERVISOR-AUTHORIZER "org.apache.storm.security.auth.authorizer.NoopAuthorizer"})))] (let [nimbus (.getNimbus cluster) previous-config (LogConfig.) mock-config (LogConfig.) expected-config (LogConfig.)] ;; send something with content to nimbus beforehand (.put_to_named_logger_level previous-config "test" (doto (LogLevel.) (.set_target_log_level "ERROR") (.set_action LogLevelAction/UPDATE))) (.put_to_named_logger_level expected-config "test" (doto (LogLevel.) (.set_target_log_level "ERROR") (.set_action LogLevelAction/UNCHANGED))) (.thenReturn (Mockito/when (.readTopoConf tc (Mockito/any String) (Mockito/anyObject))) {}) (.thenReturn (Mockito/when (.topologyLogConfig cluster-state (Mockito/any String) (Mockito/anyObject))) previous-config) (.setLogConfig nimbus "foo" mock-config) (.setTopologyLogConfig (Mockito/verify cluster-state) (Mockito/any String) (Mockito/eq expected-config) (Mockito/any Map)))))) (deftest log-level-update-merges-and-flags-existent-log-level (let [cluster-state (Mockito/mock IStormClusterState) blob-store (Mockito/mock BlobStore) tc (Mockito/mock TopoCache)] (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withClusterState cluster-state) (.withBlobStore blob-store) (.withTopoCache tc) (.withDaemonConf {NIMBUS-AUTHORIZER "org.apache.storm.security.auth.authorizer.NoopAuthorizer" SUPERVISOR-AUTHORIZER "org.apache.storm.security.auth.authorizer.NoopAuthorizer"})))] (let [nimbus (.getNimbus cluster) previous-config (LogConfig.) mock-config (LogConfig.) expected-config (LogConfig.)] ;; send something with content to nimbus beforehand (.put_to_named_logger_level previous-config "test" (doto (LogLevel.) (.set_target_log_level "ERROR") (.set_action LogLevelAction/UPDATE))) (.put_to_named_logger_level previous-config "other-test" (doto (LogLevel.) (.set_target_log_level "DEBUG") (.set_action LogLevelAction/UPDATE))) ;; only change "test" (.put_to_named_logger_level mock-config "test" (doto (LogLevel.) (.set_target_log_level "INFO") (.set_action LogLevelAction/UPDATE))) (.put_to_named_logger_level expected-config "test" (doto (LogLevel.) (.set_target_log_level "INFO") (.set_action LogLevelAction/UPDATE))) (.put_to_named_logger_level expected-config "other-test" (doto (LogLevel.) (.set_target_log_level "DEBUG") (.set_action LogLevelAction/UNCHANGED))) (.thenReturn (Mockito/when (.readTopoConf tc (Mockito/any String) (Mockito/anyObject))) {}) (.thenReturn (Mockito/when (.topologyLogConfig cluster-state (Mockito/any String) (Mockito/anyObject))) previous-config) (.setLogConfig nimbus "foo" mock-config) (.setTopologyLogConfig (Mockito/verify cluster-state) (Mockito/any String) (Mockito/eq expected-config) (Mockito/any Map)))))) (defn mock-cluster-state ([] (mock-cluster-state nil nil)) ([active-topos inactive-topos] (mock-cluster-state active-topos inactive-topos inactive-topos inactive-topos inactive-topos)) ([active-topos hb-topos error-topos bp-topos] (mock-cluster-state active-topos hb-topos error-topos bp-topos nil)) ([active-topos hb-topos error-topos bp-topos wt-topos] (let [cluster-state (Mockito/mock IStormClusterState)] (.thenReturn (Mockito/when (.activeStorms cluster-state)) active-topos) (.thenReturn (Mockito/when (.heartbeatStorms cluster-state)) hb-topos) (.thenReturn (Mockito/when (.errorTopologies cluster-state)) error-topos) (.thenReturn (Mockito/when (.backpressureTopologies cluster-state)) bp-topos) (.thenReturn (Mockito/when (.idsOfTopologiesWithPrivateWorkerKeys cluster-state)) (into #{} wt-topos)) cluster-state))) (deftest cleanup-storm-ids-returns-inactive-topos (let [mock-state (mock-cluster-state (list "topo1") (list "topo1" "topo2" "topo3")) store (Mockito/mock BlobStore)] (.thenReturn (Mockito/when (.storedTopoIds store)) #{}) (is (= (Nimbus/topoIdsToClean mock-state store {NIMBUS-TOPOLOGY-BLOBSTORE-DELETION-DELAY-MS 0}) #{"topo2" "topo3"})))) (deftest cleanup-storm-ids-performs-union-of-storm-ids-with-active-znodes (let [active-topos (list "hb1" "e2" "bp3") hb-topos (list "hb1" "hb2" "hb3") error-topos (list "e1" "e2" "e3") bp-topos (list "bp1" "bp2" "bp3") mock-state (mock-cluster-state active-topos hb-topos error-topos bp-topos) store (Mockito/mock BlobStore)] (.thenReturn (Mockito/when (.storedTopoIds store)) #{}) (is (= (Nimbus/topoIdsToClean mock-state store {NIMBUS-TOPOLOGY-BLOBSTORE-DELETION-DELAY-MS 0}) #{"hb2" "hb3" "e1" "e3" "bp1" "bp2"})))) (deftest cleanup-storm-ids-returns-empty-set-when-all-topos-are-active (let [active-topos (list "hb1" "hb2" "hb3" "e1" "e2" "e3" "bp1" "bp2" "bp3") hb-topos (list "hb1" "hb2" "hb3") error-topos (list "e1" "e2" "e3") bp-topos (list "bp1" "bp2" "bp3") mock-state (mock-cluster-state active-topos hb-topos error-topos bp-topos) store (Mockito/mock BlobStore)] (.thenReturn (Mockito/when (.storedTopoIds store)) #{}) (is (= (Nimbus/topoIdsToClean mock-state store (new java.util.HashMap)) #{})))) (deftest do-cleanup-removes-inactive-znodes (let [inactive-topos (list "topo2" "topo3") hb-cache (into {}(map vector inactive-topos '(nil nil))) mock-state (mock-cluster-state) mock-blob-store (Mockito/mock BlobStore) conf {NIMBUS-MONITOR-FREQ-SECS 10 NIMBUS-TOPOLOGY-BLOBSTORE-DELETION-DELAY-MS 0}] (with-open [_ (MockedZookeeper. (proxy [Zookeeper] [] (zkLeaderElectorImpl [conf zk blob-store tc cluster-state acls] (MockLeaderElector. ))))] (let [nimbus (Mockito/spy (Nimbus. conf nil mock-state nil mock-blob-store nil nil))] (.set (.getHeartbeatsCache nimbus) hb-cache) (.thenReturn (Mockito/when (.storedTopoIds mock-blob-store)) (HashSet. inactive-topos)) (.doCleanup nimbus) ;; removed heartbeats znode (.teardownHeartbeats (Mockito/verify mock-state) "topo2") (.teardownHeartbeats (Mockito/verify mock-state) "topo3") ;; removed topo errors znode (.teardownTopologyErrors (Mockito/verify mock-state) "topo2") (.teardownTopologyErrors (Mockito/verify mock-state) "topo3") ;; removed topo directories (.forceDeleteTopoDistDir (Mockito/verify nimbus) "topo2") (.forceDeleteTopoDistDir (Mockito/verify nimbus) "topo3") ;; removed blob store topo keys (.rmTopologyKeys (Mockito/verify nimbus) "topo2") (.rmTopologyKeys (Mockito/verify nimbus) "topo3") ;; removed topology dependencies (.rmDependencyJarsInTopology (Mockito/verify nimbus) "topo2") (.rmDependencyJarsInTopology (Mockito/verify nimbus) "topo3") ;; remove topos from heartbeat cache (is (= (count (.get (.getHeartbeatsCache nimbus))) 0)))))) (deftest do-cleanup-does-not-teardown-active-topos (let [inactive-topos () hb-cache {"topo1" nil "topo2" nil} mock-state (mock-cluster-state) mock-blob-store (Mockito/mock BlobStore) conf {NIMBUS-MONITOR-FREQ-SECS 10}] (with-open [_ (MockedZookeeper. (proxy [Zookeeper] [] (zkLeaderElectorImpl [conf zk blob-store tc cluster-state acls] (MockLeaderElector. ))))] (let [nimbus (Mockito/spy (Nimbus. conf nil mock-state nil mock-blob-store nil nil))] (.set (.getHeartbeatsCache nimbus) hb-cache) (.thenReturn (Mockito/when (.storedTopoIds mock-blob-store)) (set inactive-topos)) (.doCleanup nimbus) (.teardownHeartbeats (Mockito/verify mock-state (Mockito/never)) (Mockito/any)) (.teardownTopologyErrors (Mockito/verify mock-state (Mockito/never)) (Mockito/any)) (.forceDeleteTopoDistDir (Mockito/verify nimbus (Mockito/times 0)) (Mockito/anyObject)) (.rmTopologyKeys (Mockito/verify nimbus (Mockito/times 0)) (Mockito/anyObject)) ;; hb-cache goes down to 1 because only one topo was inactive (is (= (count (.get (.getHeartbeatsCache nimbus))) 2)) (is (contains? (.get (.getHeartbeatsCache nimbus)) "topo1")) (is (contains? (.get (.getHeartbeatsCache nimbus)) "topo2")))))) (deftest user-topologies-for-supervisor (let [assignment (doto (Assignment.) (.set_executor_node_port {[1 1] (NodeInfo. "super1" #{1}), [2 2] (NodeInfo. "super2" #{2})})) assignment2 (doto (Assignment.) (.set_executor_node_port {[1 1] (NodeInfo. "super2" #{2}), [2 2] (NodeInfo. "super2" #{2})})) assignments {"topo1" assignment, "topo2" assignment2} mock-state (mock-cluster-state) mock-blob-store (Mockito/mock BlobStore) mock-tc (Mockito/mock TopoCache) nimbus (Nimbus. {NIMBUS-MONITOR-FREQ-SECS 10} nil mock-state nil mock-blob-store mock-tc (MockLeaderElector. ) nil)] (let [supervisor1-topologies (clojurify-structure (Nimbus/topologiesOnSupervisor assignments "super1")) user1-topologies (clojurify-structure (.filterAuthorized nimbus "getTopology" supervisor1-topologies)) supervisor2-topologies (clojurify-structure (Nimbus/topologiesOnSupervisor assignments "super2")) user2-topologies (clojurify-structure (.filterAuthorized nimbus "getTopology" supervisor2-topologies))] (is (= (list "topo1") supervisor1-topologies)) (is (= #{"topo1"} user1-topologies)) (is (= (list "topo1" "topo2") supervisor2-topologies)) (is (= #{"topo1" "topo2"} user2-topologies))))) (deftest user-topologies-for-supervisor-with-unauthorized-user (let [assignment (doto (Assignment.) (.set_executor_node_port {[1 1] (NodeInfo. "super1" #{1}), [2 2] (NodeInfo. "super2" #{2})})) assignment2 (doto (Assignment.) (.set_executor_node_port {[1 1] (NodeInfo. "super1" #{2}), [2 2] (NodeInfo. "super2" #{2})})) assignments {"topo1" assignment, "authorized" assignment2} mock-state (mock-cluster-state) mock-blob-store (Mockito/mock BlobStore) mock-tc (Mockito/mock TopoCache) nimbus (Nimbus. {NIMBUS-MONITOR-FREQ-SECS 10} nil mock-state nil mock-blob-store mock-tc (MockLeaderElector. ) nil)] (.thenReturn (Mockito/when (.readTopoConf mock-tc (Mockito/eq "authorized") (Mockito/anyObject))) {TOPOLOGY-NAME "authorized"}) (.thenReturn (Mockito/when (.readTopoConf mock-tc (Mockito/eq "topo1") (Mockito/anyObject))) {TOPOLOGY-NAME "topo1"}) (.setAuthorizationHandler nimbus (reify IAuthorizer (permit [this context operation topo-conf] (= "authorized" (get topo-conf TOPOLOGY-NAME))))) (let [supervisor-topologies (clojurify-structure (Nimbus/topologiesOnSupervisor assignments "super1")) user-topologies (clojurify-structure (.filterAuthorized nimbus "getTopology" supervisor-topologies))] (is (= (list "topo1" "authorized") supervisor-topologies)) (is (= #{"authorized"} user-topologies)))))
8775
;; Licensed to the Apache Software Foundation (ASF) under one ;; or more contributor license agreements. See the NOTICE file ;; distributed with this work for additional information ;; regarding copyright ownership. The ASF licenses this file ;; to you under the Apache License, Version 2.0 (the ;; "License"); you may not use this file except in compliance ;; with the License. You may obtain a copy of the License at ;; ;; http://www.apache.org/licenses/LICENSE-2.0 ;; ;; Unless required by applicable law or agreed to in writing, software ;; distributed under the License is distributed on an "AS IS" BASIS, ;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ;; See the License for the specific language governing permissions and ;; limitations under the License. (ns org.apache.storm.nimbus-test (:use [clojure test]) (:require [org.apache.storm [util :as util]]) (:import [java.util.function UnaryOperator]) (:import [org.apache.storm.testing InProcessZookeeper MockLeaderElector TestWordCounter TestWordSpout TestGlobalCount TestAggregatesCounter TestPlannerSpout TestPlannerBolt] [org.apache.storm.blobstore BlobStore] [org.apache.storm.nimbus InMemoryTopologyActionNotifier] [org.apache.storm.daemon.nimbus TopoCache Nimbus Nimbus$StandaloneINimbus] [org.apache.storm.generated GlobalStreamId TopologyStatus SupervisorInfo StormTopology StormBase] [org.apache.storm LocalCluster LocalCluster$Builder Thrift MockAutoCred Testing Testing$Condition] [org.apache.storm.stats BoltExecutorStats StatsUtil] [org.apache.storm.security.auth IGroupMappingServiceProvider IAuthorizer]) (:import [org.apache.storm.testing.staticmocking MockedZookeeper]) (:import [org.apache.storm.testing TmpPath]) (:import [org.apache.storm.scheduler INimbus]) (:import [org.mockito Mockito Matchers]) (:import [org.mockito.exceptions.base MockitoAssertionError]) (:import [org.apache.storm.nimbus ILeaderElector NimbusInfo]) (:import [org.apache.storm.testing.staticmocking MockedCluster]) (:import [org.apache.storm.generated Credentials NotAliveException SubmitOptions TopologyInitialStatus TopologyStatus AlreadyAliveException KillOptions RebalanceOptions InvalidTopologyException AuthorizationException LogConfig LogLevel LogLevelAction Assignment NodeInfo]) (:import [java.util Map HashMap HashSet Optional]) (:import [java.io File]) (:import [javax.security.auth Subject]) (:import [org.apache.storm.utils Time Time$SimulatedTime IPredicate StormCommonInstaller Utils$UptimeComputer ReflectionUtils Utils ConfigUtils ServerConfigUtils] [org.apache.storm.utils.staticmocking ServerConfigUtilsInstaller ReflectionUtilsInstaller UtilsInstaller]) (:import [org.apache.storm.zookeeper Zookeeper]) (:import [org.apache.commons.io FileUtils]) (:import [org.json.simple JSONValue]) (:import [org.apache.storm.daemon StormCommon]) (:import [org.apache.storm.cluster IStormClusterState StormClusterStateImpl ClusterStateContext ClusterUtils] [org.apache.storm.assignments LocalAssignmentsBackendFactory]) (:use [org.apache.storm util daemon-config config log]) (:require [conjure.core]) (:use [conjure core])) (def ^:dynamic *STORM-CONF* (clojurify-structure (ConfigUtils/readStormConfig))) (defn- mk-nimbus ([conf inimbus] (mk-nimbus conf inimbus nil nil nil nil)) ([conf inimbus blob-store leader-elector group-mapper cluster-state] ;blacklist scheduler requires nimbus-monitor-freq-secs as input parameter. (let [conf-with-nimbus-monitor-freq (merge {NIMBUS-MONITOR-FREQ-SECS 10} conf)] (Nimbus. conf-with-nimbus-monitor-freq inimbus cluster-state nil blob-store nil leader-elector group-mapper)))) (defn- from-json [^String str] (if str (clojurify-structure (JSONValue/parse str)) nil)) (defn storm-component->task-info [cluster storm-name] (let [storm-id (StormCommon/getStormId (.getClusterState cluster) storm-name) nimbus (.getNimbus cluster)] (-> (.getUserTopology nimbus storm-id) (#(StormCommon/stormTaskInfo % (from-json (.getTopologyConf nimbus storm-id)))) (Utils/reverseMap) clojurify-structure))) (defn getCredentials [cluster storm-name] (let [storm-id (StormCommon/getStormId (.getClusterState cluster) storm-name) creds (.credentials (.getClusterState cluster) storm-id nil)] (if creds (into {} (.get_creds creds))))) (defn storm-component->executor-info [cluster storm-name] (let [storm-id (StormCommon/getStormId (.getClusterState cluster) storm-name) nimbus (.getNimbus cluster) storm-conf (from-json (.getTopologyConf nimbus storm-id)) topology (.getUserTopology nimbus storm-id) task->component (clojurify-structure (StormCommon/stormTaskInfo topology storm-conf)) state (.getClusterState cluster) get-component (comp task->component first)] (->> (.assignmentInfo state storm-id nil) .get_executor_node_port .keySet clojurify-structure (map (fn [e] {e (get-component e)})) (apply merge) (Utils/reverseMap) clojurify-structure))) (defn storm-num-workers [state storm-name] (let [storm-id (StormCommon/getStormId state storm-name) assignment (.assignmentInfo state storm-id nil)] (.size (Utils/reverseMap (.get_executor_node_port assignment))))) (defn topology-nodes [state storm-name] (let [storm-id (StormCommon/getStormId state storm-name) assignment (.assignmentInfo state storm-id nil)] (->> assignment .get_executor_node_port .values (map (fn [np] (.get_node np))) set ))) (defn topology-slots [state storm-name] (let [storm-id (StormCommon/getStormId state storm-name) assignment (.assignmentInfo state storm-id nil)] (->> assignment .get_executor_node_port .values set ))) ;TODO: when translating this function, don't call map-val, but instead use an inline for loop. ; map-val is a temporary kluge for clojure. (defn topology-node-distribution [state storm-name] (let [storm-id (StormCommon/getStormId state storm-name) assignment (.assignmentInfo state storm-id nil)] (->> assignment .get_executor_node_port .values set (group-by (fn [np] (.get_node np))) (map-val count) (map (fn [[_ amt]] {amt 1})) (apply merge-with +) ))) (defn topology-num-nodes [state storm-name] (count (topology-nodes state storm-name))) (defn executor-assignment [cluster storm-id executor-id] (let [state (.getClusterState cluster) assignment (.assignmentInfo state storm-id nil)] (.get (.get_executor_node_port assignment) executor-id) )) (defn executor-start-times [cluster storm-id] (let [state (.getClusterState cluster) assignment (.assignmentInfo state storm-id nil)] (clojurify-structure (.get_executor_start_time_secs assignment)))) (defn do-executor-heartbeat [cluster storm-id executor] (let [state (.getClusterState cluster) executor->node+port (.get_executor_node_port (.assignmentInfo state storm-id nil)) np (.get executor->node+port executor) node (.get_node np) port (first (.get_port np)) curr-beat (StatsUtil/convertZkWorkerHb (.getWorkerHeartbeat state storm-id node port)) stats (if (get curr-beat "executor-stats") (get curr-beat "executor-stats") (HashMap.))] (log-warn "curr-beat:" (prn-str curr-beat) ",stats:" (prn-str stats)) (log-warn "stats type:" (type stats)) (.put stats (StatsUtil/convertExecutor executor) (.renderStats (BoltExecutorStats. 20 (*STORM-CONF* NUM-STAT-BUCKETS)))) (log-warn "merged:" stats) (.workerHeartbeat state storm-id node port (StatsUtil/thriftifyZkWorkerHb (StatsUtil/mkZkWorkerHb storm-id stats (int 10)))) (.sendSupervisorWorkerHeartbeat (.getNimbus cluster) (StatsUtil/thriftifyRPCWorkerHb storm-id executor)))) (defn slot-assignments [cluster storm-id] (let [state (.getClusterState cluster) assignment (.assignmentInfo state storm-id nil)] (clojurify-structure (Utils/reverseMap (.get_executor_node_port assignment))))) (defn task-ids [cluster storm-id] (let [nimbus (.getNimbus cluster)] (-> (.getUserTopology nimbus storm-id) (#(StormCommon/stormTaskInfo % (from-json (.getTopologyConf nimbus storm-id)))) clojurify-structure keys))) (defn topology-executors [cluster storm-id] (let [state (.getClusterState cluster) assignment (.assignmentInfo state storm-id nil) ret-keys (keys (.get_executor_node_port assignment)) _ (log-message "ret-keys: " (pr-str ret-keys)) ] ret-keys )) (defn check-distribution [items distribution] (let [counts (map long (map count items))] (is (Testing/multiseteq counts (map long distribution))))) (defn disjoint? [& sets] (let [combined (apply concat sets)] (= (count combined) (count (set combined))) )) (defn executor->tasks [executor-id] clojurify-structure (StormCommon/executorIdToTasks executor-id)) (defnk check-consistency [cluster storm-name :assigned? true] (let [state (.getClusterState cluster) storm-id (StormCommon/getStormId state storm-name) task-ids (task-ids cluster storm-id) assignment (.assignmentInfo state storm-id nil) executor->node+port (.get_executor_node_port assignment) task->node+port (StormCommon/taskToNodeport executor->node+port) assigned-task-ids (mapcat executor->tasks (keys executor->node+port)) all-nodes (set (map (fn [np] (.get_node np)) (.values executor->node+port)))] (when assigned? (is (= (sort task-ids) (sort assigned-task-ids))) (doseq [t task-ids] (is (not-nil? (.get task->node+port t))))) (doseq [[e s] executor->node+port] (is (not-nil? s))) (is (= all-nodes (set (keys (.get_node_host assignment))))) (doseq [[e s] executor->node+port] (is (not-nil? (.get (.get_executor_start_time_secs assignment) e)))) )) (deftest test-bogusId (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSupervisors 4) (.withDaemonConf {SUPERVISOR-ENABLE false TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (let [state (.getClusterState cluster) nimbus (.getNimbus cluster)] (is (thrown? NotAliveException (.getTopologyConf nimbus "bogus-id"))) (is (thrown? NotAliveException (.getTopology nimbus "bogus-id"))) (is (thrown? NotAliveException (.getUserTopology nimbus "bogus-id"))) (is (thrown? NotAliveException (.getTopologyInfo nimbus "bogus-id"))) (is (thrown? NotAliveException (.uploadNewCredentials nimbus "bogus-id" (Credentials.)))) ))) (deftest test-assignment (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 4) (.withDaemonConf {SUPERVISOR-ENABLE false TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (let [state (.getClusterState cluster) topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. false) (Integer. 3))} {"2" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "1" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) (Integer. 4)) "3" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "2" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.))}) topology2 (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 12))} {"2" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "1" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) (Integer. 6)) "3" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "1" nil) (Thrift/prepareGlobalGrouping)} (TestPlannerBolt.) (Integer. 8)) "4" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "1" nil) (Thrift/prepareGlobalGrouping) (Utils/getGlobalStreamId "2" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) (Integer. 4))}) _ (.submitTopology cluster "mystorm" {TOPOLOGY-WORKERS 4} topology) _ (.advanceClusterTime cluster 11) task-info (storm-component->task-info cluster "mystorm")] (check-consistency cluster "mystorm") ;; 3 should be assigned once (if it were optimized, we'd have ;; different topology) (is (= 1 (count (.assignments state nil)))) (is (= 1 (count (task-info "1")))) (is (= 4 (count (task-info "2")))) (is (= 1 (count (task-info "3")))) (is (= 4 (storm-num-workers state "mystorm"))) (.submitTopology cluster "storm2" {TOPOLOGY-WORKERS 20} topology2) (.advanceClusterTime cluster 11) (check-consistency cluster "storm2") (is (= 2 (count (.assignments state nil)))) (let [task-info (storm-component->task-info cluster "storm2")] (is (= 12 (count (task-info "1")))) (is (= 6 (count (task-info "2")))) (is (= 8 (count (task-info "3")))) (is (= 4 (count (task-info "4")))) (is (= 8 (storm-num-workers state "storm2"))) ) ))) (defn isolation-nimbus [] (let [standalone (Nimbus$StandaloneINimbus.)] (reify INimbus (prepare [this conf local-dir] (.prepare standalone conf local-dir) ) (allSlotsAvailableForScheduling [this supervisors topologies topologies-missing-assignments] (.allSlotsAvailableForScheduling standalone supervisors topologies topologies-missing-assignments)) (assignSlots [this topology slots] (.assignSlots standalone topology slots) ) (getForcedScheduler [this] (.getForcedScheduler standalone)) (getHostName [this supervisors node-id] node-id )))) (deftest test-auto-credentials (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 6) (.withDaemonConf {SUPERVISOR-ENABLE false TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0 NIMBUS-CREDENTIAL-RENEW-FREQ-SECS 10 NIMBUS-CREDENTIAL-RENEWERS (list "org.apache.storm.MockAutoCred") NIMBUS-AUTO-CRED-PLUGINS (list "org.apache.storm.MockAutoCred") })))] (let [state (.getClusterState cluster) topology-name "test-auto-cred-storm" submitOptions (SubmitOptions. TopologyInitialStatus/INACTIVE) - (.set_creds submitOptions (Credentials. (HashMap.))) topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. false) (Integer. 3))} {"2" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "1" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) (Integer. 4)) "3" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "2" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.))}) _ (.submitTopologyWithOpts cluster topology-name {TOPOLOGY-WORKERS 4 TOPOLOGY-AUTO-CREDENTIALS (list "org.apache.storm.MockAutoCred") } topology submitOptions) credentials (getCredentials cluster topology-name)] ; check that the credentials have nimbus auto generated cred (is (= (.get credentials MockAutoCred/NIMBUS_CRED_KEY) MockAutoCred/NIMBUS_CRED_VAL)) ;advance cluster time so the renewers can execute (.advanceClusterTime cluster 20) ;check that renewed credentials replace the original credential. (is (= (.get (getCredentials cluster topology-name) MockAutoCred/NIMBUS_CRED_KEY) MockAutoCred/NIMBUS_CRED_RENEW_VAL)) (is (= (.get (getCredentials cluster topology-name) MockAutoCred/GATEWAY_CRED_KEY) MockAutoCred/GATEWAY_CRED_RENEW_VAL))))) (defmacro letlocals [& body] (let [[tobind lexpr] (split-at (dec (count body)) body) binded (vec (mapcat (fn [e] (if (and (list? e) (= 'bind (first e))) [(second e) (last e)] ['_ e] )) tobind))] `(let ~binded ~(first lexpr)))) (deftest test-isolated-assignment (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 6) (.withINimbus (isolation-nimbus)) (.withDaemonConf {SUPERVISOR-ENABLE false TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0 STORM-SCHEDULER "org.apache.storm.scheduler.IsolationScheduler" ISOLATION-SCHEDULER-MACHINES {"tester1" 3 "tester2" 2} NIMBUS-MONITOR-FREQ-SECS 10 })))] (letlocals (bind state (.getClusterState cluster)) (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. false) (Integer. 3))} {"2" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "1" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) (Integer. 5)) "3" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "2" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.))})) (.submitTopology cluster "noniso" {TOPOLOGY-WORKERS 4} topology) (.advanceClusterTime cluster 11) (is (= 4 (topology-num-nodes state "noniso"))) (is (= 4 (storm-num-workers state "noniso"))) (.submitTopology cluster "tester1" {TOPOLOGY-WORKERS 6} topology) (.submitTopology cluster "tester2" {TOPOLOGY-WORKERS 6} topology) (.advanceClusterTime cluster 11) (bind task-info-tester1 (storm-component->task-info cluster "tester1")) (bind task-info-tester2 (storm-component->task-info cluster "tester2")) (is (= 1 (topology-num-nodes state "noniso"))) (is (= 3 (storm-num-workers state "noniso"))) (is (= {2 3} (topology-node-distribution state "tester1"))) (is (= {3 2} (topology-node-distribution state "tester2"))) (is (apply disjoint? (map (partial topology-nodes state) ["noniso" "tester1" "tester2"]))) (check-consistency cluster "tester1") (check-consistency cluster "tester2") (check-consistency cluster "noniso") ;;check that nothing gets reassigned (bind tester1-slots (topology-slots state "tester1")) (bind tester2-slots (topology-slots state "tester2")) (bind noniso-slots (topology-slots state "noniso")) (.advanceClusterTime cluster 20) (is (= tester1-slots (topology-slots state "tester1"))) (is (= tester2-slots (topology-slots state "tester2"))) (is (= noniso-slots (topology-slots state "noniso"))) ))) (deftest test-zero-executor-or-tasks (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 6) (.withDaemonConf {SUPERVISOR-ENABLE false TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (let [state (.getClusterState cluster) topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. false) (Integer. 3) {TOPOLOGY-TASKS 0})} {"2" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "1" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) (Integer. 1) {TOPOLOGY-TASKS 2}) "3" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "2" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) nil {TOPOLOGY-TASKS 5})}) _ (.submitTopology cluster "mystorm" {TOPOLOGY-WORKERS 4} topology) _ (.advanceClusterTime cluster 11) task-info (storm-component->task-info cluster "mystorm")] (check-consistency cluster "mystorm") (is (= 0 (count (task-info "1")))) (is (= 2 (count (task-info "2")))) (is (= 5 (count (task-info "3")))) (is (= 2 (storm-num-workers state "mystorm"))) ;; because only 2 executors ))) ;TODO: when translating this function, you should replace the map-val with a proper for loop HERE (deftest test-executor-assignments (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withDaemonConf {SUPERVISOR-ENABLE false TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (let [topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 3) {TOPOLOGY-TASKS 5})} {"2" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "1" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) (Integer. 8) {TOPOLOGY-TASKS 2}) "3" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "2" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) (Integer. 3))}) _ (.submitTopology cluster "mystorm" {TOPOLOGY-WORKERS 4} topology) _ (.advanceClusterTime cluster 11) task-info (storm-component->task-info cluster "mystorm") executor-info (->> (storm-component->executor-info cluster "mystorm") (map-val #(map executor->tasks %)))] (check-consistency cluster "mystorm") (is (= 5 (count (task-info "1")))) (check-distribution (executor-info "1") [2 2 1]) (is (= 2 (count (task-info "2")))) (check-distribution (executor-info "2") [1 1]) (is (= 3 (count (task-info "3")))) (check-distribution (executor-info "3") [1 1 1]) ))) (deftest test-over-parallelism-assignment (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 2) (.withPortsPerSupervisor 5) (.withDaemonConf {SUPERVISOR-ENABLE false TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (let [state (.getClusterState cluster) topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 21))} {"2" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "1" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) (Integer. 9)) "3" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "1" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) (Integer. 2)) "4" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "1" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) (Integer. 10))}) _ (.submitTopology cluster "test" {TOPOLOGY-WORKERS 7} topology) _ (.advanceClusterTime cluster 11) task-info (storm-component->task-info cluster "test")] (check-consistency cluster "test") (is (= 21 (count (task-info "1")))) (is (= 9 (count (task-info "2")))) (is (= 2 (count (task-info "3")))) (is (= 10 (count (task-info "4")))) (is (= 7 (storm-num-workers state "test"))) ))) (deftest test-topo-history (let [group-mapper (Mockito/mock IGroupMappingServiceProvider)] (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 2) (.withPortsPerSupervisor 5) (.withGroupMapper group-mapper) (.withDaemonConf {SUPERVISOR-ENABLE false NIMBUS-ADMINS ["admin-user"] NIMBUS-TASK-TIMEOUT-SECS 30 NIMBUS-MONITOR-FREQ-SECS 10 TOPOLOGY-ACKER-EXECUTORS 0})))] (.thenReturn (Mockito/when (.getGroups group-mapper (Mockito/anyObject))) #{"alice-group"}) (letlocals (bind conf (.getDaemonConf cluster)) (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 4))} {})) (bind state (.getClusterState cluster)) ; get topology history when there's no topology history (let [hist-topo-ids (vec (sort (.get_topo_ids (.getTopologyHistory (.getNimbus cluster) (System/getProperty "user.name")))))] (log-message "Checking user " (System/getProperty "user.name") " " hist-topo-ids) (is (= 0 (count hist-topo-ids)))) (.submitTopology cluster "test" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 20, LOGS-USERS ["alice", (System/getProperty "user.name")]} topology) (bind storm-id (StormCommon/getStormId state "test")) (.advanceClusterTime cluster 5) (is (not-nil? (.stormBase state storm-id nil))) (is (not-nil? (.assignmentInfo state storm-id nil))) (.killTopology (.getNimbus cluster) "test") ;; check that storm is deactivated but alive (is (= TopologyStatus/KILLED (.get_status (.stormBase state storm-id nil)))) (is (not-nil? (.assignmentInfo state storm-id nil))) (.advanceClusterTime cluster 35) ;; kill topology read on group (.submitTopology cluster "killgrouptest" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 20, LOGS-GROUPS ["alice-group"]} topology) (bind storm-id-killgroup (StormCommon/getStormId state "killgrouptest")) (.advanceClusterTime cluster 5) (is (not-nil? (.stormBase state storm-id-killgroup nil))) (is (not-nil? (.assignmentInfo state storm-id-killgroup nil))) (.killTopology (.getNimbus cluster) "killgrouptest") ;; check that storm is deactivated but alive (is (= TopologyStatus/KILLED (.get_status (.stormBase state storm-id-killgroup nil)))) (is (not-nil? (.assignmentInfo state storm-id-killgroup nil))) (.advanceClusterTime cluster 35) ;; kill topology can't read (.submitTopology cluster "killnoreadtest" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 20} topology) (bind storm-id-killnoread (StormCommon/getStormId state "killnoreadtest")) (.advanceClusterTime cluster 5) (is (not-nil? (.stormBase state storm-id-killnoread nil))) (is (not-nil? (.assignmentInfo state storm-id-killnoread nil))) (.killTopology (.getNimbus cluster) "killnoreadtest") ;; check that storm is deactivated but alive (is (= TopologyStatus/KILLED (.get_status (.stormBase state storm-id-killnoread nil)))) (is (not-nil? (.assignmentInfo state storm-id-killnoread nil))) (.advanceClusterTime cluster 35) ;; active topology can read (.submitTopology cluster "2test" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 10, LOGS-USERS ["alice", (System/getProperty "user.name")]} topology) (.advanceClusterTime cluster 11) (bind storm-id2 (StormCommon/getStormId state "2test")) (is (not-nil? (.stormBase state storm-id2 nil))) (is (not-nil? (.assignmentInfo state storm-id2 nil))) ;; active topology can not read (.submitTopology cluster "testnoread" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 10, LOGS-USERS ["alice"]} topology) (.advanceClusterTime cluster 11) (bind storm-id3 (StormCommon/getStormId state "testnoread")) (is (not-nil? (.stormBase state storm-id3 nil))) (is (not-nil? (.assignmentInfo state storm-id3 nil))) ;; active topology can read based on group (.submitTopology cluster "testreadgroup" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 10, LOGS-GROUPS ["alice-group"]} topology) (.advanceClusterTime cluster 11) (bind storm-id4 (StormCommon/getStormId state "testreadgroup")) (is (not-nil? (.stormBase state storm-id4 nil))) (is (not-nil? (.assignmentInfo state storm-id4 nil))) ;; at this point have 1 running, 1 killed topo (let [hist-topo-ids (vec (sort (.get_topo_ids (.getTopologyHistory (.getNimbus cluster) (System/getProperty "user.name")))))] (log-message "Checking user " (System/getProperty "user.name") " " hist-topo-ids) (is (= 4 (count hist-topo-ids))) (is (= storm-id2 (get hist-topo-ids 0))) (is (= storm-id-killgroup (get hist-topo-ids 1))) (is (= storm-id (get hist-topo-ids 2))) (is (= storm-id4 (get hist-topo-ids 3)))) (let [hist-topo-ids (vec (sort (.get_topo_ids (.getTopologyHistory (.getNimbus cluster) "alice"))))] (log-message "Checking user <NAME> " hist-topo-ids) (is (= 5 (count hist-topo-ids))) (is (= storm-id2 (get hist-topo-ids 0))) (is (= storm-id-killgroup (get hist-topo-ids 1))) (is (= storm-id (get hist-topo-ids 2))) (is (= storm-id3 (get hist-topo-ids 3))) (is (= storm-id4 (get hist-topo-ids 4)))) (let [hist-topo-ids (vec (sort (.get_topo_ids (.getTopologyHistory (.getNimbus cluster) "admin-user"))))] (log-message "Checking user admin-user " hist-topo-ids) (is (= 6 (count hist-topo-ids))) (is (= storm-id2 (get hist-topo-ids 0))) (is (= storm-id-killgroup (get hist-topo-ids 1))) (is (= storm-id-killnoread (get hist-topo-ids 2))) (is (= storm-id (get hist-topo-ids 3))) (is (= storm-id3 (get hist-topo-ids 4))) (is (= storm-id4 (get hist-topo-ids 5)))) (let [hist-topo-ids (vec (sort (.get_topo_ids (.getTopologyHistory (.getNimbus cluster) "group-only-user"))))] (log-message "Checking user group-only-user " hist-topo-ids) (is (= 2 (count hist-topo-ids))) (is (= storm-id-killgroup (get hist-topo-ids 0))) (is (= storm-id4 (get hist-topo-ids 1)))))))) (deftest test-kill-storm (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 2) (.withPortsPerSupervisor 5) (.withDaemonConf {SUPERVISOR-ENABLE false NIMBUS-TOPOLOGY-BLOBSTORE-DELETION-DELAY-MS 0 NIMBUS-TASK-TIMEOUT-SECS 30 NIMBUS-MONITOR-FREQ-SECS 10 TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (letlocals (bind conf (.getDaemonConf cluster)) (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 14))} {})) (bind state (.getClusterState cluster)) (.submitTopology cluster "test" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 20} topology) (bind storm-id (StormCommon/getStormId state "test")) (.advanceClusterTime cluster 15) (is (not-nil? (.stormBase state storm-id nil))) (is (not-nil? (.assignmentInfo state storm-id nil))) (.killTopology (.getNimbus cluster) "test") ;; check that storm is deactivated but alive (is (= TopologyStatus/KILLED (.get_status (.stormBase state storm-id nil)))) (is (not-nil? (.assignmentInfo state storm-id nil))) (.advanceClusterTime cluster 18) ;; check that storm is deactivated but alive (is (= 1 (count (.heartbeatStorms state)))) (.advanceClusterTime cluster 3) (is (nil? (.stormBase state storm-id nil))) (is (nil? (.assignmentInfo state storm-id nil))) ;; cleanup happens on monitoring thread (.advanceClusterTime cluster 11) (is (empty? (.heartbeatStorms state))) ;; TODO: check that code on nimbus was cleaned up locally... (is (thrown? NotAliveException (.killTopology (.getNimbus cluster) "lalala"))) (.submitTopology cluster "2test" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 10} topology) (.advanceClusterTime cluster 11) (is (thrown? AlreadyAliveException (.submitTopology cluster "2test" {} topology))) (.advanceClusterTime cluster 11) (bind storm-id (StormCommon/getStormId state "2test")) (is (not-nil? (.stormBase state storm-id nil))) (.killTopology (.getNimbus cluster) "2test") (is (thrown? AlreadyAliveException (.submitTopology cluster "2test" {} topology))) (.advanceClusterTime cluster 11) (is (= 1 (count (.heartbeatStorms state)))) (.advanceClusterTime cluster 6) (is (nil? (.stormBase state storm-id nil))) (is (nil? (.assignmentInfo state storm-id nil))) (.advanceClusterTime cluster 11) (is (= 0 (count (.heartbeatStorms state)))) (.submitTopology cluster "test3" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 5} topology) (bind storm-id3 (StormCommon/getStormId state "test3")) (.advanceClusterTime cluster 11) ;; this guarantees an immediate kill notification (.killTopology (.getNimbus cluster) "test3") (.advanceClusterTime cluster 41) (is (nil? (.stormBase state storm-id3 nil))) (is (nil? (.assignmentInfo state storm-id3 nil))) (is (= 0 (count (.heartbeatStorms state)))) ;; this guarantees that monitor thread won't trigger for 10 more seconds (Time/advanceTimeSecs 11) (.waitForIdle cluster) (.submitTopology cluster "test3" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 5} topology) (bind storm-id3 (StormCommon/getStormId state "test3")) (.advanceClusterTime cluster 11) (bind executor-id (first (topology-executors cluster storm-id3))) (do-executor-heartbeat cluster storm-id3 executor-id) (.killTopology (.getNimbus cluster) "test3") (.advanceClusterTime cluster 6) (is (= 1 (count (.heartbeatStorms state)))) (.advanceClusterTime cluster 5) (is (= 0 (count (.heartbeatStorms state)))) ;; test kill with opts (.submitTopology cluster "test4" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 100} topology) (.advanceClusterTime cluster 11) (.killTopologyWithOpts (.getNimbus cluster) "test4" (doto (KillOptions.) (.set_wait_secs 10))) (bind storm-id4 (StormCommon/getStormId state "test4")) (.advanceClusterTime cluster 9) (is (not-nil? (.assignmentInfo state storm-id4 nil))) (.advanceClusterTime cluster 2) (is (nil? (.assignmentInfo state storm-id4 nil))) ))) (deftest test-reassignment (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 2) (.withPortsPerSupervisor 5) (.withDaemonConf {SUPERVISOR-ENABLE false NIMBUS-TASK-LAUNCH-SECS 60 NIMBUS-TASK-TIMEOUT-SECS 20 NIMBUS-MONITOR-FREQ-SECS 10 NIMBUS-SUPERVISOR-TIMEOUT-SECS 100 TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (letlocals (bind conf (.getDaemonConf cluster)) (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 2))} {})) (bind state (.getClusterState cluster)) (.submitTopology cluster "test" {TOPOLOGY-WORKERS 2} topology) (.advanceClusterTime cluster 11) (check-consistency cluster "test") (bind storm-id (StormCommon/getStormId state "test")) (bind [executor-id1 executor-id2] (topology-executors cluster storm-id)) (bind ass1 (executor-assignment cluster storm-id executor-id1)) (bind ass2 (executor-assignment cluster storm-id executor-id2)) (bind _ (log-message "ass1, t0: " (pr-str ass1))) (bind _ (log-message "ass2, t0: " (pr-str ass2))) (.advanceClusterTime cluster 30) (bind _ (log-message "ass1, t30, pre beat: " (pr-str ass1))) (bind _ (log-message "ass2, t30, pre beat: " (pr-str ass2))) (do-executor-heartbeat cluster storm-id executor-id1) (do-executor-heartbeat cluster storm-id executor-id2) (bind _ (log-message "ass1, t30, post beat: " (pr-str ass1))) (bind _ (log-message "ass2, t30, post beat: " (pr-str ass2))) (.advanceClusterTime cluster 13) (bind _ (log-message "ass1, t43, pre beat: " (pr-str ass1))) (bind _ (log-message "ass2, t43, pre beat: " (pr-str ass2))) (is (= ass1 (executor-assignment cluster storm-id executor-id1))) (is (= ass2 (executor-assignment cluster storm-id executor-id2))) (do-executor-heartbeat cluster storm-id executor-id1) (bind _ (log-message "ass1, t43, post beat: " (pr-str ass1))) (bind _ (log-message "ass2, t43, post beat: " (pr-str ass2))) (.advanceClusterTime cluster 11) (bind _ (log-message "ass1, t54, pre beat: " (pr-str ass1))) (bind _ (log-message "ass2, t54, pre beat: " (pr-str ass2))) (do-executor-heartbeat cluster storm-id executor-id1) (bind _ (log-message "ass1, t54, post beat: " (pr-str ass1))) (bind _ (log-message "ass2, t54, post beat: " (pr-str ass2))) (is (= ass1 (executor-assignment cluster storm-id executor-id1))) (check-consistency cluster "test") ; have to wait an extra 10 seconds because nimbus may not ; resynchronize its heartbeat time till monitor-time secs after (.advanceClusterTime cluster 11) (bind _ (log-message "ass1, t65, pre beat: " (pr-str ass1))) (bind _ (log-message "ass2, t65, pre beat: " (pr-str ass2))) (do-executor-heartbeat cluster storm-id executor-id1) (bind _ (log-message "ass1, t65, post beat: " (pr-str ass1))) (bind _ (log-message "ass2, t65, post beat: " (pr-str ass2))) (is (= ass1 (executor-assignment cluster storm-id executor-id1))) (check-consistency cluster "test") (.advanceClusterTime cluster 11) (bind _ (log-message "ass1, t76, pre beat: " (pr-str ass1))) (bind _ (log-message "ass2, t76, pre beat: " (pr-str ass2))) (is (= ass1 (executor-assignment cluster storm-id executor-id1))) (is (not= ass2 (executor-assignment cluster storm-id executor-id2))) (bind ass2 (executor-assignment cluster storm-id executor-id2)) (bind _ (log-message "ass1, t76, post beat: " (pr-str ass1))) (bind _ (log-message "ass2, t76, post beat: " (pr-str ass2))) (check-consistency cluster "test") (.advanceClusterTime cluster 31) (is (not= ass1 (executor-assignment cluster storm-id executor-id1))) (is (= ass2 (executor-assignment cluster storm-id executor-id2))) ; tests launch timeout (check-consistency cluster "test") (bind ass1 (executor-assignment cluster storm-id executor-id1)) (bind active-supervisor (.get_node ass2)) (.killSupervisor cluster active-supervisor) (doseq [i (range 12)] (do-executor-heartbeat cluster storm-id executor-id1) (do-executor-heartbeat cluster storm-id executor-id2) (.advanceClusterTime cluster 10) ) ;; tests that it doesn't reassign executors if they're heartbeating even if supervisor times out (is (= ass1 (executor-assignment cluster storm-id executor-id1))) (is (= ass2 (executor-assignment cluster storm-id executor-id2))) (check-consistency cluster "test") (.advanceClusterTime cluster 30) (bind ass1 (executor-assignment cluster storm-id executor-id1)) (bind ass2 (executor-assignment cluster storm-id executor-id2)) (is (not-nil? ass1)) (is (not-nil? ass2)) (is (not= active-supervisor (.get_node (executor-assignment cluster storm-id executor-id2)))) (is (not= active-supervisor (.get_node (executor-assignment cluster storm-id executor-id1)))) (check-consistency cluster "test") (doseq [supervisor-id (.supervisors state nil)] (.killSupervisor cluster supervisor-id)) (.advanceClusterTime cluster 90) (bind ass1 (executor-assignment cluster storm-id executor-id1)) (bind ass2 (executor-assignment cluster storm-id executor-id2)) (is (nil? ass1)) (is (nil? ass2)) (check-consistency cluster "test" :assigned? false) (.addSupervisor cluster) (.advanceClusterTime cluster 11) (check-consistency cluster "test") ))) (deftest test-reassignment-to-constrained-cluster (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 0) (.withDaemonConf {SUPERVISOR-ENABLE false NIMBUS-TASK-LAUNCH-SECS 60 NIMBUS-TASK-TIMEOUT-SECS 20 NIMBUS-MONITOR-FREQ-SECS 10 NIMBUS-SUPERVISOR-TIMEOUT-SECS 100 TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (letlocals (.addSupervisor cluster 1 "a") (.addSupervisor cluster 1 "b") (bind conf (.getDaemonConf cluster)) (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 2))} {})) (bind state (.getClusterState cluster)) (.submitTopology cluster "test" {TOPOLOGY-WORKERS 2} topology) (.advanceClusterTime cluster 11) (check-consistency cluster "test") (bind storm-id (StormCommon/getStormId state "test")) (bind [executor-id1 executor-id2] (topology-executors cluster storm-id)) (bind ass1 (executor-assignment cluster storm-id executor-id1)) (bind ass2 (executor-assignment cluster storm-id executor-id2)) (.advanceClusterTime cluster 30) (do-executor-heartbeat cluster storm-id executor-id1) (do-executor-heartbeat cluster storm-id executor-id2) (.advanceClusterTime cluster 13) (is (= ass1 (executor-assignment cluster storm-id executor-id1))) (is (= ass2 (executor-assignment cluster storm-id executor-id2))) ;; with rpc reporting mode, only heartbeats from killed supervisor will time out (.killSupervisor cluster (.get_node ass2)) (do-executor-heartbeat cluster storm-id executor-id1) (.advanceClusterTime cluster 11) (do-executor-heartbeat cluster storm-id executor-id1) (.advanceClusterTime cluster 11) (do-executor-heartbeat cluster storm-id executor-id1) (.advanceClusterTime cluster 11) (do-executor-heartbeat cluster storm-id executor-id1) (.advanceClusterTime cluster 11) (do-executor-heartbeat cluster storm-id executor-id1) (check-consistency cluster "test") (is (= 1 (storm-num-workers state "test"))) ))) (defn check-executor-distribution [slot-executors distribution] (check-distribution (vals slot-executors) distribution)) (defn check-num-nodes [slot-executors num-nodes] (let [nodes (->> slot-executors keys (map (fn [np] (.get_node np))) set)] (is (= num-nodes (count nodes))) )) (deftest test-reassign-squeezed-topology (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 1) (.withPortsPerSupervisor 1) (.withDaemonConf {SUPERVISOR-ENABLE false NIMBUS-TASK-LAUNCH-SECS 60 NIMBUS-TASK-TIMEOUT-SECS 20 NIMBUS-MONITOR-FREQ-SECS 10 TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (letlocals (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 9))} {})) (bind state (.getClusterState cluster)) (.submitTopology cluster "test" {TOPOLOGY-WORKERS 4} topology) ; distribution should be 2, 2, 2, 3 ideally (.advanceClusterTime cluster 11) (bind storm-id (StormCommon/getStormId state "test")) (bind slot-executors (slot-assignments cluster storm-id)) (check-executor-distribution slot-executors [9]) (check-consistency cluster "test") (.addSupervisor cluster 2) (.advanceClusterTime cluster 11) (bind slot-executors (slot-assignments cluster storm-id)) (bind executor->start (executor-start-times cluster storm-id)) (check-executor-distribution slot-executors [3 3 3]) (check-consistency cluster "test") (.addSupervisor cluster 8) ;; this actually works for any time > 0, since zookeeper fires an event causing immediate reassignment ;; doesn't work for time = 0 because it's not waiting for cluster yet, so test might happen before reassignment finishes (.advanceClusterTime cluster 11) (bind slot-executors2 (slot-assignments cluster storm-id)) (bind executor->start2 (executor-start-times cluster storm-id)) (check-executor-distribution slot-executors2 [2 2 2 3]) (check-consistency cluster "test") (bind common (first (Utils/findOne (proxy [IPredicate] [] (test [[k v]] (= 3 (count v)))) slot-executors2))) (is (not-nil? common)) (is (= (slot-executors2 common) (slot-executors common))) ;; check that start times are changed for everything but the common one (bind same-executors (slot-executors2 common)) (bind changed-executors (apply concat (vals (dissoc slot-executors2 common)))) (doseq [t same-executors] (is (= (executor->start t) (executor->start2 t)))) (doseq [t changed-executors] (is (not= (executor->start t) (executor->start2 t)))) ))) (deftest test-get-owner-resource-summaries (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 1) (.withPortsPerSupervisor 12) (.withDaemonConf {SUPERVISOR-ENABLE false NIMBUS-MONITOR-FREQ-SECS 10 TOPOLOGY-MESSAGE-TIMEOUT-SECS 30 TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0 })))] (letlocals ;;test for 0-topology case (.advanceClusterTime cluster 11) (bind owner-resource-summaries (.getOwnerResourceSummaries (.getNimbus cluster) nil)) (bind summary (first owner-resource-summaries)) (is (nil? summary)) ;;test for 1-topology case (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 3))} {})) (.submitTopology cluster "test" {TOPOLOGY-WORKERS 3 TOPOLOGY-MESSAGE-TIMEOUT-SECS 90} topology) (.advanceClusterTime cluster 11) (bind owner-resource-summaries (.getOwnerResourceSummaries (.getNimbus cluster) nil)) (bind summary (first owner-resource-summaries)) (is (= (.get_total_workers summary) 3)) (is (= (.get_total_executors summary)) 3) (is (= (.get_total_topologies summary)) 1) ;;test for many-topology case (bind topology2 (Thrift/buildTopology {"2" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 4))} {})) (bind topology3 (Thrift/buildTopology {"3" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 5))} {})) (.submitTopology cluster "test2" {TOPOLOGY-WORKERS 4 TOPOLOGY-MESSAGE-TIMEOUT-SECS 90} topology2) (.submitTopology cluster "test3" {TOPOLOGY-WORKERS 3 TOPOLOGY-MESSAGE-TIMEOUT-SECS 90} topology3) (.advanceClusterTime cluster 11) (bind owner-resource-summaries (.getOwnerResourceSummaries (.getNimbus cluster) nil)) (bind summary (first owner-resource-summaries)) (is (= (.get_total_workers summary) 10)) (is (= (.get_total_executors summary)) 12) (is (= (.get_total_topologies summary)) 3) ;;test for specific owner (bind owner-resource-summaries (.getOwnerResourceSummaries (.getNimbus cluster) (System/getProperty "user.name"))) (bind summary (first owner-resource-summaries)) (is (= (.get_total_workers summary) 10)) (is (= (.get_total_executors summary)) 12) (is (= (.get_total_topologies summary)) 3) ;;test for other user (bind other-user (str "not-" (System/getProperty "user.name"))) (bind owner-resource-summaries (.getOwnerResourceSummaries (.getNimbus cluster) other-user)) (bind summary (first owner-resource-summaries)) (is (= (.get_total_workers summary) 0)) (is (= (.get_total_executors summary)) 0) (is (= (.get_total_topologies summary)) 0) ))) (deftest test-rebalance (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 1) (.withDaemonConf {SUPERVISOR-ENABLE false NIMBUS-MONITOR-FREQ-SECS 10 TOPOLOGY-MESSAGE-TIMEOUT-SECS 30 TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (letlocals (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 3))} {})) (bind state (.getClusterState cluster)) (.submitTopology cluster "test" {TOPOLOGY-WORKERS 3 TOPOLOGY-MESSAGE-TIMEOUT-SECS 60} topology) (.advanceClusterTime cluster 11) (bind storm-id (StormCommon/getStormId state "test")) (.addSupervisor cluster 3) (.addSupervisor cluster 3) (.advanceClusterTime cluster 11) (bind slot-executors (slot-assignments cluster storm-id)) ;; check that all workers are on one machine (check-executor-distribution slot-executors [1 1 1]) (check-num-nodes slot-executors 1) (.rebalance (.getNimbus cluster) "test" (RebalanceOptions.)) (.advanceClusterTime cluster 30) (check-executor-distribution slot-executors [1 1 1]) (check-num-nodes slot-executors 1) (.advanceClusterTime cluster 30) (bind slot-executors (slot-assignments cluster storm-id)) (check-executor-distribution slot-executors [1 1 1]) (check-num-nodes slot-executors 3) (is (thrown? InvalidTopologyException (.rebalance (.getNimbus cluster) "test" (doto (RebalanceOptions.) (.set_num_executors {"1" (int 0)}) )))) ))) ;TODO: when translating this function, you should replace the map-val with a proper for loop HERE (deftest test-rebalance-change-parallelism (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 4) (.withDaemonConf {SUPERVISOR-ENABLE false NIMBUS-MONITOR-FREQ-SECS 10 TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (letlocals (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 6) {TOPOLOGY-TASKS 12})} {})) (bind state (.getClusterState cluster)) (.submitTopology cluster "test" {TOPOLOGY-WORKERS 3 TOPOLOGY-MESSAGE-TIMEOUT-SECS 30} topology) (.advanceClusterTime cluster 11) (bind storm-id (StormCommon/getStormId state "test")) (bind checker (fn [distribution] (check-executor-distribution (slot-assignments cluster storm-id) distribution))) (checker [2 2 2]) (.rebalance (.getNimbus cluster) "test" (doto (RebalanceOptions.) (.set_num_workers (int 6)) )) (.advanceClusterTime cluster 29) (checker [2 2 2]) (.advanceClusterTime cluster 3) (checker [1 1 1 1 1 1]) (.rebalance (.getNimbus cluster) "test" (doto (RebalanceOptions.) (.set_num_executors {"1" (int 1)}) )) (.advanceClusterTime cluster 29) (checker [1 1 1 1 1 1]) (.advanceClusterTime cluster 3) (checker [1]) (.rebalance (.getNimbus cluster) "test" (doto (RebalanceOptions.) (.set_num_executors {"1" (int 8)}) (.set_num_workers 4) )) (.advanceClusterTime cluster 32) (checker [2 2 2 2]) (check-consistency cluster "test") (bind executor-info (->> (storm-component->executor-info cluster "test") (map-val #(map executor->tasks %)))) (check-distribution (executor-info "1") [2 2 2 2 1 1 1 1]) ))) (defn check-for-collisions [state] (log-message "Checking for collision") (let [assignments (.assignments state nil)] (log-message "Assignemts: " assignments) (let [id->node->ports (into {} (for [id assignments :let [executor->node+port (.get_executor_node_port (.assignmentInfo state id nil)) node+ports (set (.values executor->node+port)) node->ports (apply merge-with (fn [a b] (distinct (concat a b))) (for [np node+ports] {(.get_node np) [(first (.get_port np))]}))]] {id node->ports})) _ (log-message "id->node->ports: " id->node->ports) all-nodes (apply merge-with (fn [a b] (let [ret (concat a b)] (log-message "Can we combine " (pr-str a) " and " (pr-str b) " without collisions? " (apply distinct? ret) " => " (pr-str ret)) (is (apply distinct? ret)) (distinct ret))) (.values id->node->ports))] ))) (deftest test-rebalance-constrained-cluster (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 1) (.withPortsPerSupervisor 4) (.withDaemonConf {SUPERVISOR-ENABLE false NIMBUS-MONITOR-FREQ-SECS 10 TOPOLOGY-MESSAGE-TIMEOUT-SECS 30 TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (letlocals (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 3))} {})) (bind topology2 (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 3))} {})) (bind topology3 (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 3))} {})) (bind state (.getClusterState cluster)) (.submitTopology cluster "test" {TOPOLOGY-WORKERS 3 TOPOLOGY-MESSAGE-TIMEOUT-SECS 90} topology) (.submitTopology cluster "test2" {TOPOLOGY-WORKERS 3 TOPOLOGY-MESSAGE-TIMEOUT-SECS 90} topology2) (.submitTopology cluster "test3" {TOPOLOGY-WORKERS 3 TOPOLOGY-MESSAGE-TIMEOUT-SECS 90} topology3) (.advanceClusterTime cluster 11) (check-for-collisions state) (.rebalance (.getNimbus cluster) "test" (doto (RebalanceOptions.) (.set_num_workers 4) (.set_wait_secs 0) )) (.advanceClusterTime cluster 11) (check-for-collisions state) (.advanceClusterTime cluster 30) (check-for-collisions state) ))) (deftest test-submit-invalid (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withDaemonConf {SUPERVISOR-ENABLE false TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0 NIMBUS-EXECUTORS-PER-TOPOLOGY 8 NIMBUS-SLOTS-PER-TOPOLOGY 8})))] (letlocals (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 1) {TOPOLOGY-TASKS 1})} {})) (is (thrown? InvalidTopologyException (.submitTopology cluster "test/aaa" {} topology))) (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 16) {TOPOLOGY-TASKS 16})} {})) (bind state (.getClusterState cluster)) (is (thrown? InvalidTopologyException (.submitTopology cluster "test" {TOPOLOGY-WORKERS 3} topology))) (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 5) {TOPOLOGY-TASKS 5})} {})) (is (thrown? InvalidTopologyException (.submitTopology cluster "test" {TOPOLOGY-WORKERS 16} topology)))))) (deftest test-clean-inbox "Tests that the inbox correctly cleans jar files." (with-open [_ (Time$SimulatedTime.) tmp-path (TmpPath. )] (let [dir-location (.getPath tmp-path) dir (File. dir-location) mk-file (fn [name seconds-ago] (let [f (File. (str dir-location "/" name)) t (- (Time/currentTimeMillis) (* seconds-ago 1000))] (FileUtils/touch f) (.setLastModified f t))) assert-files-in-dir (fn [compare-file-names] (let [file-names (map #(.getName %) (file-seq dir))] (is (= (sort compare-file-names) (sort (filter #(.endsWith % ".jar") file-names)) ))))] ;; Make three files a.jar, b.jar, c.jar. ;; a and b are older than c and should be deleted first. (Time/advanceTimeSecs 100) (doseq [fs [["a.jar" 20] ["b.jar" 20] ["c.jar" 0]]] (apply mk-file fs)) (assert-files-in-dir ["a.jar" "b.jar" "c.jar"]) (Nimbus/cleanInbox dir-location 10) (assert-files-in-dir ["c.jar"]) ;; Cleanit again, c.jar should stay (Time/advanceTimeSecs 5) (Nimbus/cleanInbox dir-location 10) (assert-files-in-dir ["c.jar"]) ;; Advance time, clean again, c.jar should be deleted. (Time/advanceTimeSecs 5) (Nimbus/cleanInbox dir-location 10) (assert-files-in-dir []) ))) (defn wait-for-status [nimbus name status] (Testing/whileTimeout 5000 (reify Testing$Condition (exec [this] (let [topo-summary (first (filter (fn [topo] (= name (.get_name topo))) (.get_topologies (.getClusterInfo nimbus)))) topo-status (if topo-summary (.get_status topo-summary) "NOT-RUNNING")] (log-message "WAITING FOR "name" TO BE " status " CURRENT " topo-status) (not= topo-status status)))) (fn [] (Thread/sleep 100)))) (deftest test-leadership "Tests that leader actions can only be performed by master and non leader fails to perform the same actions." (with-open [zk (InProcessZookeeper. )] (with-open [tmp-nimbus-dir (TmpPath.) _ (MockedZookeeper. (proxy [Zookeeper] [] (zkLeaderElectorImpl [conf zk blob-store tc cluster-state acls] (MockLeaderElector. ))))] (let [nimbus-dir (.getPath tmp-nimbus-dir)] (letlocals (bind conf (merge (clojurify-structure (ConfigUtils/readStormConfig)) {STORM-ZOOKEEPER-SERVERS ["localhost"] STORM-CLUSTER-MODE "local" STORM-ZOOKEEPER-PORT (.getPort zk) STORM-LOCAL-DIR nimbus-dir})) (bind ass-backend (LocalAssignmentsBackendFactory/getDefault)) (bind cluster-state (ClusterUtils/mkStormClusterState conf ass-backend (ClusterStateContext.))) (bind nimbus (mk-nimbus conf (Nimbus$StandaloneINimbus.) nil nil nil cluster-state)) (.launchServer nimbus) (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 3))} {})) (with-open [_ (MockedZookeeper. (proxy [Zookeeper] [] (zkLeaderElectorImpl [conf zk blob-store tc cluster-state acls] (MockLeaderElector. false))))] (letlocals (bind non-leader-cluster-state (ClusterUtils/mkStormClusterState conf ass-backend (ClusterStateContext.))) (bind non-leader-nimbus (mk-nimbus conf (Nimbus$StandaloneINimbus.) nil nil nil non-leader-cluster-state)) (.launchServer non-leader-nimbus) ;first we verify that the master nimbus can perform all actions, even with another nimbus present. (.submitTopology nimbus "t1" nil "{}" topology) ;; Instead of sleeping until topology is scheduled, rebalance topology so mk-assignments is called. (.rebalance nimbus "t1" (doto (RebalanceOptions.) (.set_wait_secs 0))) (wait-for-status nimbus "t1" "ACTIVE") (.deactivate nimbus "t1") (.activate nimbus "t1") (.rebalance nimbus "t1" (RebalanceOptions.)) (.killTopology nimbus "t1") ;now we verify that non master nimbus can not perform any of the actions. (is (thrown? RuntimeException (.submitTopology non-leader-nimbus "failing" nil "{}" topology))) (is (thrown? RuntimeException (.killTopology non-leader-nimbus "t1"))) (is (thrown? RuntimeException (.activate non-leader-nimbus "t1"))) (is (thrown? RuntimeException (.deactivate non-leader-nimbus "t1"))) (is (thrown? RuntimeException (.rebalance non-leader-nimbus "t1" (RebalanceOptions.)))) (.shutdown non-leader-nimbus) (.disconnect non-leader-cluster-state) )) (.shutdown nimbus) (.disconnect cluster-state)))))) (deftest test-nimbus-iface-submitTopologyWithOpts-checks-authorization (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withDaemonConf {NIMBUS-AUTHORIZER "org.apache.storm.security.auth.authorizer.DenyAuthorizer" SUPERVISOR-AUTHORIZER "org.apache.storm.security.auth.authorizer.DenyAuthorizer"})))] (let [ topology (Thrift/buildTopology {} {}) ] (is (thrown? AuthorizationException (.submitTopologyWithOpts cluster "mystorm" {} topology (SubmitOptions. TopologyInitialStatus/INACTIVE)) )) ) ) ) (deftest test-nimbus-iface-methods-check-authorization (let [cluster-state (Mockito/mock IStormClusterState) blob-store (Mockito/mock BlobStore) tc (Mockito/mock TopoCache)] (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withClusterState cluster-state) (.withBlobStore blob-store) (.withTopoCache tc) (.withDaemonConf {NIMBUS-AUTHORIZER "org.apache.storm.security.auth.authorizer.DenyAuthorizer" SUPERVISOR-AUTHORIZER "org.apache.storm.security.auth.authorizer.DenyAuthorizer"})))] (let [nimbus (.getNimbus cluster) topology-name "test" topology-id "test-id"] (.thenReturn (Mockito/when (.getTopoId cluster-state topology-name)) (Optional/of topology-id)) (is (thrown? AuthorizationException (.rebalance nimbus topology-name (RebalanceOptions.)))) (is (thrown? AuthorizationException (.activate nimbus topology-name))) (is (thrown? AuthorizationException (.deactivate nimbus topology-name))))))) (deftest test-nimbus-check-authorization-params (let [cluster-state (Mockito/mock IStormClusterState) blob-store (Mockito/mock BlobStore) tc (Mockito/mock TopoCache)] (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withClusterState cluster-state) (.withBlobStore blob-store) (.withTopoCache tc) (.withNimbusWrapper (reify UnaryOperator (apply [this nimbus] (Mockito/spy nimbus)))) (.withDaemonConf {NIMBUS-AUTHORIZER "org.apache.storm.security.auth.authorizer.NoopAuthorizer" SUPERVISOR-AUTHORIZER "org.apache.storm.security.auth.authorizer.NoopAuthorizer"})))] (let [nimbus (.getNimbus cluster) topology-name "test-nimbus-check-autho-params" topology-id "fake-id" topology (Thrift/buildTopology {} {}) expected-name topology-name expected-conf {TOPOLOGY-NAME expected-name "foo" "bar"}] (.thenReturn (Mockito/when (.getTopoId cluster-state topology-name)) (Optional/of topology-id)) (.thenReturn (Mockito/when (.readTopoConf tc (Mockito/any String) (Mockito/anyObject))) expected-conf) (.thenReturn (Mockito/when (.readTopology tc (Mockito/any String) (Mockito/anyObject))) nil) (testing "getTopologyConf calls check-authorization! with the correct parameters." (let [expected-operation "getTopologyConf"] (try (is (= expected-conf (->> (.getTopologyConf nimbus topology-id) JSONValue/parse clojurify-structure))) (catch NotAliveException e) (finally (.checkAuthorization (Mockito/verify nimbus) nil nil "getClusterInfo") (.checkAuthorization (Mockito/verify nimbus) (Mockito/eq topology-name) (Mockito/any Map) (Mockito/eq expected-operation)))))) (testing "getTopology calls check-authorization! with the correct parameters." (let [expected-operation "getTopology" common-spy (->> (proxy [StormCommon] [] (systemTopologyImpl [conf topology] nil)) Mockito/spy)] (with-open [- (StormCommonInstaller. common-spy)] (try (.getTopology nimbus topology-id) (catch NotAliveException e) (finally (.checkAuthorization (Mockito/verify nimbus) (Mockito/eq topology-name) (Mockito/any Map) (Mockito/eq expected-operation)) (. (Mockito/verify common-spy) (systemTopologyImpl (Matchers/any Map) (Matchers/any)))))))) (testing "getUserTopology calls check-authorization with the correct parameters." (let [expected-operation "getUserTopology"] (try (.getUserTopology nimbus topology-id) (catch NotAliveException e) (finally (.checkAuthorization (Mockito/verify nimbus) (Mockito/eq topology-name) (Mockito/any Map) (Mockito/eq expected-operation)) ;;One for this time and one for getTopology call (.readTopology (Mockito/verify tc (Mockito/times 2)) (Mockito/eq topology-id) (Mockito/anyObject)))))))))) (deftest test-check-authorization-getSupervisorPageInfo (let [cluster-state (Mockito/mock IStormClusterState) blob-store (Mockito/mock BlobStore) tc (Mockito/mock TopoCache)] (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withClusterState cluster-state) (.withBlobStore blob-store) (.withTopoCache tc) (.withNimbusWrapper (reify UnaryOperator (apply [this nimbus] (Mockito/spy nimbus)))) (.withDaemonConf {NIMBUS-AUTHORIZER "org.apache.storm.security.auth.authorizer.NoopAuthorizer" SUPERVISOR-AUTHORIZER "org.apache.storm.security.auth.authorizer.NoopAuthorizer"})))] (let [nimbus (.getNimbus cluster) expected-name "test-nimbus-check-autho-params" expected-conf {TOPOLOGY-NAME expected-name TOPOLOGY-WORKERS 1 TOPOLOGY-MESSAGE-TIMEOUT-SECS 30 "foo" "bar"} expected-operation "getTopology" assignment (doto (Assignment.) (.set_executor_node_port {[1 1] (NodeInfo. "super1" #{1}), [2 2] (NodeInfo. "super2" #{2})})) topology (doto (StormTopology. ) (.set_spouts {}) (.set_bolts {}) (.set_state_spouts {})) topo-assignment {expected-name assignment} check-auth-state (atom []) mock-check-authorization (fn [nimbus storm-name storm-conf operation] (swap! check-auth-state conj {:nimbus nimbus :storm-name storm-name :storm-conf storm-conf :operation operation})) all-supervisors (doto (HashMap.) (.put "super1" (doto (SupervisorInfo.) (.set_hostname "host1") (.set_meta [(long 1234)]) (.set_uptime_secs (long 123)) (.set_meta [1 2 3]) (.set_used_ports []) (.set_resources_map {}))) (.put "super2" (doto (SupervisorInfo.) (.set_hostname "host2") (.set_meta [(long 1234)]) (.set_uptime_secs (long 123)) (.set_meta [1 2 3]) (.set_used_ports []) (.set_resources_map {}))))] (.thenReturn (Mockito/when (.allSupervisorInfo cluster-state)) all-supervisors) (.thenReturn (Mockito/when (.readTopoConf tc (Mockito/any String) (Mockito/any Subject))) expected-conf) (.thenReturn (Mockito/when (.readTopology tc (Mockito/any String) (Mockito/any Subject))) topology) (.thenReturn (Mockito/when (.assignmentsInfo cluster-state)) topo-assignment) (.getSupervisorPageInfo nimbus "super1" nil true) ;; afterwards, it should get called twice (.checkAuthorization (Mockito/verify nimbus) (Mockito/eq expected-name) (Mockito/any Map) (Mockito/eq "getSupervisorPageInfo")) (.checkAuthorization (Mockito/verify nimbus) nil nil "getClusterInfo") (.checkAuthorization (Mockito/verify nimbus) (Mockito/eq expected-name) (Mockito/any Map) (Mockito/eq "getTopology")))))) (deftest test-nimbus-iface-getTopology-methods-throw-correctly (with-open [cluster (LocalCluster. )] (let [ nimbus (.getNimbus cluster) id "bogus ID" ] (is (thrown? NotAliveException (.getTopology nimbus id))) (try (.getTopology nimbus id) (catch NotAliveException e (is (= id (.get_msg e))) ) ) (is (thrown? NotAliveException (.getTopologyConf nimbus id))) (try (.getTopologyConf nimbus id) (catch NotAliveException e (is (= id (.get_msg e))) ) ) (is (thrown? NotAliveException (.getTopologyInfo nimbus id))) (try (.getTopologyInfo nimbus id) (catch NotAliveException e (is (= id (.get_msg e))) ) ) (is (thrown? NotAliveException (.getUserTopology nimbus id))) (try (.getUserTopology nimbus id) (catch NotAliveException e (is (= id (.get_msg e))) ) ) ) ) ) (defn mkStormBase [launch-time-secs storm-name status] (doto (StormBase.) (.set_name storm-name) (.set_launch_time_secs (int launch-time-secs)) (.set_status status))) (deftest test-nimbus-iface-getClusterInfo-filters-topos-without-bases (let [cluster-state (Mockito/mock IStormClusterState) blob-store (Mockito/mock BlobStore) tc (Mockito/mock TopoCache)] (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withClusterState cluster-state) (.withTopoCache tc) (.withBlobStore blob-store)))] (let [nimbus (.getNimbus cluster) bogus-secs 42 bogus-type TopologyStatus/ACTIVE bogus-bases { "1" nil "2" (mkStormBase bogus-secs "id2-name" bogus-type) "3" nil "4" (mkStormBase bogus-secs "id4-name" bogus-type) } topo-name "test-topo" topo-conf {TOPOLOGY-NAME topo-name TOPOLOGY-WORKERS 1 TOPOLOGY-MESSAGE-TIMEOUT-SECS 30} storm-base (StormBase. ) topology (doto (StormTopology. ) (.set_spouts {}) (.set_bolts {}) (.set_state_spouts {})) ] (.thenReturn (Mockito/when (.stormBase cluster-state (Mockito/any String) (Mockito/anyObject))) storm-base) (.thenReturn (Mockito/when (.topologyBases cluster-state)) bogus-bases) (.thenReturn (Mockito/when (.readTopoConf tc (Mockito/any String) (Mockito/any Subject))) topo-conf) (.thenReturn (Mockito/when (.readTopology tc (Mockito/any String) (Mockito/any Subject))) topology) (let [topos (.get_topologies (.getClusterInfo nimbus))] ; The number of topologies in the summary is correct. (is (= (count (filter (fn [b] (second b)) bogus-bases)) (count topos))) ; Each topology present has a valid name. (is (empty? (filter (fn [t] (or (nil? t) (nil? (.get_name t)))) topos))) ; The topologies are those with valid bases. (is (empty? (filter (fn [t] (or (nil? t) (not (number? (read-string (.get_id t)))) (odd? (read-string (.get_id t))) )) topos))) ) ) ) )) (deftest test-file-bogus-download (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withDaemonConf {SUPERVISOR-ENABLE false TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (let [nimbus (.getNimbus cluster)] (is (thrown-cause? IllegalArgumentException (.beginFileDownload nimbus nil))) (is (thrown-cause? IllegalArgumentException (.beginFileDownload nimbus ""))) (is (thrown-cause? IllegalArgumentException (.beginFileDownload nimbus "/bogus-path/foo"))) ))) (deftest test-validate-topo-config-on-submit (let [cluster-state (Mockito/mock IStormClusterState) blob-store (Mockito/mock BlobStore) tc (Mockito/mock TopoCache)] (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withClusterState cluster-state) (.withBlobStore blob-store) (.withTopoCache tc) (.withDaemonConf {NIMBUS-AUTHORIZER "org.apache.storm.security.auth.authorizer.NoopAuthorizer" SUPERVISOR-AUTHORIZER "org.apache.storm.security.auth.authorizer.NoopAuthorizer"})))] (.thenReturn (Mockito/when (.getTopoId cluster-state "test")) (Optional/empty)) (let [topology (Thrift/buildTopology {} {}) bad-config {"topology.isolate.machines" "2"}] (is (thrown-cause? InvalidTopologyException (.submitTopologyWithOpts cluster "test" bad-config topology (SubmitOptions.)))))))) (deftest test-stateless-with-scheduled-topology-to-be-killed ; tests regression of STORM-856 (with-open [zk (InProcessZookeeper. )] (with-open [tmp-nimbus-dir (TmpPath. )] (let [nimbus-dir (.getPath tmp-nimbus-dir)] (letlocals (bind conf (merge (clojurify-structure (ConfigUtils/readStormConfig)) {STORM-ZOOKEEPER-SERVERS ["localhost"] STORM-CLUSTER-MODE "local" STORM-ZOOKEEPER-PORT (.getPort zk) STORM-LOCAL-DIR nimbus-dir})) (bind cluster-state (ClusterUtils/mkStormClusterState conf (ClusterStateContext.))) (bind nimbus (mk-nimbus conf (Nimbus$StandaloneINimbus.) nil nil nil nil)) (.launchServer nimbus) (Time/sleepSecs 1) (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 3))} {})) (.submitTopology nimbus "t1" nil (str "{\"" TOPOLOGY-MESSAGE-TIMEOUT-SECS "\": 30}") topology) ; make transition for topology t1 to be killed -> nimbus applies this event to cluster state (.killTopology nimbus "t1") ; shutdown nimbus immediately to achieve nimbus doesn't handle event right now (.shutdown nimbus) ; in startup of nimbus it reads cluster state and take proper actions ; in this case nimbus registers topology transition event to scheduler again ; before applying STORM-856 nimbus was killed with NPE (bind nimbus (mk-nimbus conf (Nimbus$StandaloneINimbus.) nil nil nil nil)) (.launchServer nimbus) (.shutdown nimbus) (.disconnect cluster-state) ))))) (deftest test-topology-action-notifier (with-open [zk (InProcessZookeeper. )] (with-open [tmp-nimbus-dir (TmpPath.) _ (MockedZookeeper. (proxy [Zookeeper] [] (zkLeaderElectorImpl [conf zk blob-store tc cluster-state acls] (MockLeaderElector. ))))] (let [nimbus-dir (.getPath tmp-nimbus-dir)] (letlocals (bind conf (merge (clojurify-structure (ConfigUtils/readStormConfig)) {STORM-ZOOKEEPER-SERVERS ["localhost"] STORM-CLUSTER-MODE "local" STORM-ZOOKEEPER-PORT (.getPort zk) STORM-LOCAL-DIR nimbus-dir NIMBUS-TOPOLOGY-ACTION-NOTIFIER-PLUGIN (.getName InMemoryTopologyActionNotifier)})) (bind cluster-state (ClusterUtils/mkStormClusterState conf (ClusterStateContext.))) (bind nimbus (mk-nimbus conf (Nimbus$StandaloneINimbus.) nil nil nil nil)) (.launchServer nimbus) (bind notifier (InMemoryTopologyActionNotifier.)) (Time/sleepSecs 1) (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 3))} {})) (.submitTopology nimbus "test-notification" nil (str "{\"" TOPOLOGY-MESSAGE-TIMEOUT-SECS "\": 30}") topology) (.deactivate nimbus "test-notification") (.activate nimbus "test-notification") (.rebalance nimbus "test-notification" (doto (RebalanceOptions.) (.set_wait_secs 0))) (.killTopologyWithOpts nimbus "test-notification" (doto (KillOptions.) (.set_wait_secs 0))) (.shutdown nimbus) ; ensure notifier was invoked for each action,and in the correct order. (is (= ["submitTopology", "activate", "deactivate", "activate", "rebalance", "killTopology"] (.getTopologyActions notifier "test-notification"))) (.disconnect cluster-state) ))))) (deftest test-debug-on-component (with-open [cluster (LocalCluster. )] (let [nimbus (.getNimbus cluster) topology (Thrift/buildTopology {"spout" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 3))} {})] (.submitTopology cluster "t1" {TOPOLOGY-WORKERS 1} topology) (.debug nimbus "t1" "spout" true 100)))) (deftest test-debug-on-global (with-open [cluster (LocalCluster. )] (let [nimbus (.getNimbus cluster) topology (Thrift/buildTopology {"spout" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 3))} {})] (.submitTopology cluster "t1" {TOPOLOGY-WORKERS 1} topology) (.debug nimbus "t1" "" true 100)))) ;; if the user sends an empty log config, nimbus will say that all ;; log configs it contains are LogLevelAction/UNCHANGED (deftest empty-save-config-results-in-all-unchanged-actions (let [cluster-state (Mockito/mock IStormClusterState) blob-store (Mockito/mock BlobStore) tc (Mockito/mock TopoCache)] (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withClusterState cluster-state) (.withBlobStore blob-store) (.withTopoCache tc) (.withDaemonConf {NIMBUS-AUTHORIZER "org.apache.storm.security.auth.authorizer.NoopAuthorizer" SUPERVISOR-AUTHORIZER "org.apache.storm.security.auth.authorizer.NoopAuthorizer"})))] (let [nimbus (.getNimbus cluster) previous-config (LogConfig.) mock-config (LogConfig.) expected-config (LogConfig.)] ;; send something with content to nimbus beforehand (.put_to_named_logger_level previous-config "test" (doto (LogLevel.) (.set_target_log_level "ERROR") (.set_action LogLevelAction/UPDATE))) (.put_to_named_logger_level expected-config "test" (doto (LogLevel.) (.set_target_log_level "ERROR") (.set_action LogLevelAction/UNCHANGED))) (.thenReturn (Mockito/when (.readTopoConf tc (Mockito/any String) (Mockito/anyObject))) {}) (.thenReturn (Mockito/when (.topologyLogConfig cluster-state (Mockito/any String) (Mockito/anyObject))) previous-config) (.setLogConfig nimbus "foo" mock-config) (.setTopologyLogConfig (Mockito/verify cluster-state) (Mockito/any String) (Mockito/eq expected-config) (Mockito/any Map)))))) (deftest log-level-update-merges-and-flags-existent-log-level (let [cluster-state (Mockito/mock IStormClusterState) blob-store (Mockito/mock BlobStore) tc (Mockito/mock TopoCache)] (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withClusterState cluster-state) (.withBlobStore blob-store) (.withTopoCache tc) (.withDaemonConf {NIMBUS-AUTHORIZER "org.apache.storm.security.auth.authorizer.NoopAuthorizer" SUPERVISOR-AUTHORIZER "org.apache.storm.security.auth.authorizer.NoopAuthorizer"})))] (let [nimbus (.getNimbus cluster) previous-config (LogConfig.) mock-config (LogConfig.) expected-config (LogConfig.)] ;; send something with content to nimbus beforehand (.put_to_named_logger_level previous-config "test" (doto (LogLevel.) (.set_target_log_level "ERROR") (.set_action LogLevelAction/UPDATE))) (.put_to_named_logger_level previous-config "other-test" (doto (LogLevel.) (.set_target_log_level "DEBUG") (.set_action LogLevelAction/UPDATE))) ;; only change "test" (.put_to_named_logger_level mock-config "test" (doto (LogLevel.) (.set_target_log_level "INFO") (.set_action LogLevelAction/UPDATE))) (.put_to_named_logger_level expected-config "test" (doto (LogLevel.) (.set_target_log_level "INFO") (.set_action LogLevelAction/UPDATE))) (.put_to_named_logger_level expected-config "other-test" (doto (LogLevel.) (.set_target_log_level "DEBUG") (.set_action LogLevelAction/UNCHANGED))) (.thenReturn (Mockito/when (.readTopoConf tc (Mockito/any String) (Mockito/anyObject))) {}) (.thenReturn (Mockito/when (.topologyLogConfig cluster-state (Mockito/any String) (Mockito/anyObject))) previous-config) (.setLogConfig nimbus "foo" mock-config) (.setTopologyLogConfig (Mockito/verify cluster-state) (Mockito/any String) (Mockito/eq expected-config) (Mockito/any Map)))))) (defn mock-cluster-state ([] (mock-cluster-state nil nil)) ([active-topos inactive-topos] (mock-cluster-state active-topos inactive-topos inactive-topos inactive-topos inactive-topos)) ([active-topos hb-topos error-topos bp-topos] (mock-cluster-state active-topos hb-topos error-topos bp-topos nil)) ([active-topos hb-topos error-topos bp-topos wt-topos] (let [cluster-state (Mockito/mock IStormClusterState)] (.thenReturn (Mockito/when (.activeStorms cluster-state)) active-topos) (.thenReturn (Mockito/when (.heartbeatStorms cluster-state)) hb-topos) (.thenReturn (Mockito/when (.errorTopologies cluster-state)) error-topos) (.thenReturn (Mockito/when (.backpressureTopologies cluster-state)) bp-topos) (.thenReturn (Mockito/when (.idsOfTopologiesWithPrivateWorkerKeys cluster-state)) (into #{} wt-topos)) cluster-state))) (deftest cleanup-storm-ids-returns-inactive-topos (let [mock-state (mock-cluster-state (list "topo1") (list "topo1" "topo2" "topo3")) store (Mockito/mock BlobStore)] (.thenReturn (Mockito/when (.storedTopoIds store)) #{}) (is (= (Nimbus/topoIdsToClean mock-state store {NIMBUS-TOPOLOGY-BLOBSTORE-DELETION-DELAY-MS 0}) #{"topo2" "topo3"})))) (deftest cleanup-storm-ids-performs-union-of-storm-ids-with-active-znodes (let [active-topos (list "hb1" "e2" "bp3") hb-topos (list "hb1" "hb2" "hb3") error-topos (list "e1" "e2" "e3") bp-topos (list "bp1" "bp2" "bp3") mock-state (mock-cluster-state active-topos hb-topos error-topos bp-topos) store (Mockito/mock BlobStore)] (.thenReturn (Mockito/when (.storedTopoIds store)) #{}) (is (= (Nimbus/topoIdsToClean mock-state store {NIMBUS-TOPOLOGY-BLOBSTORE-DELETION-DELAY-MS 0}) #{"hb2" "hb3" "e1" "e3" "bp1" "bp2"})))) (deftest cleanup-storm-ids-returns-empty-set-when-all-topos-are-active (let [active-topos (list "hb1" "hb2" "hb3" "e1" "e2" "e3" "bp1" "bp2" "bp3") hb-topos (list "hb1" "hb2" "hb3") error-topos (list "e1" "e2" "e3") bp-topos (list "bp1" "bp2" "bp3") mock-state (mock-cluster-state active-topos hb-topos error-topos bp-topos) store (Mockito/mock BlobStore)] (.thenReturn (Mockito/when (.storedTopoIds store)) #{}) (is (= (Nimbus/topoIdsToClean mock-state store (new java.util.HashMap)) #{})))) (deftest do-cleanup-removes-inactive-znodes (let [inactive-topos (list "topo2" "topo3") hb-cache (into {}(map vector inactive-topos '(nil nil))) mock-state (mock-cluster-state) mock-blob-store (Mockito/mock BlobStore) conf {NIMBUS-MONITOR-FREQ-SECS 10 NIMBUS-TOPOLOGY-BLOBSTORE-DELETION-DELAY-MS 0}] (with-open [_ (MockedZookeeper. (proxy [Zookeeper] [] (zkLeaderElectorImpl [conf zk blob-store tc cluster-state acls] (MockLeaderElector. ))))] (let [nimbus (Mockito/spy (Nimbus. conf nil mock-state nil mock-blob-store nil nil))] (.set (.getHeartbeatsCache nimbus) hb-cache) (.thenReturn (Mockito/when (.storedTopoIds mock-blob-store)) (HashSet. inactive-topos)) (.doCleanup nimbus) ;; removed heartbeats znode (.teardownHeartbeats (Mockito/verify mock-state) "topo2") (.teardownHeartbeats (Mockito/verify mock-state) "topo3") ;; removed topo errors znode (.teardownTopologyErrors (Mockito/verify mock-state) "topo2") (.teardownTopologyErrors (Mockito/verify mock-state) "topo3") ;; removed topo directories (.forceDeleteTopoDistDir (Mockito/verify nimbus) "topo2") (.forceDeleteTopoDistDir (Mockito/verify nimbus) "topo3") ;; removed blob store topo keys (.rmTopologyKeys (Mockito/verify nimbus) "topo2") (.rmTopologyKeys (Mockito/verify nimbus) "topo3") ;; removed topology dependencies (.rmDependencyJarsInTopology (Mockito/verify nimbus) "topo2") (.rmDependencyJarsInTopology (Mockito/verify nimbus) "topo3") ;; remove topos from heartbeat cache (is (= (count (.get (.getHeartbeatsCache nimbus))) 0)))))) (deftest do-cleanup-does-not-teardown-active-topos (let [inactive-topos () hb-cache {"topo1" nil "topo2" nil} mock-state (mock-cluster-state) mock-blob-store (Mockito/mock BlobStore) conf {NIMBUS-MONITOR-FREQ-SECS 10}] (with-open [_ (MockedZookeeper. (proxy [Zookeeper] [] (zkLeaderElectorImpl [conf zk blob-store tc cluster-state acls] (MockLeaderElector. ))))] (let [nimbus (Mockito/spy (Nimbus. conf nil mock-state nil mock-blob-store nil nil))] (.set (.getHeartbeatsCache nimbus) hb-cache) (.thenReturn (Mockito/when (.storedTopoIds mock-blob-store)) (set inactive-topos)) (.doCleanup nimbus) (.teardownHeartbeats (Mockito/verify mock-state (Mockito/never)) (Mockito/any)) (.teardownTopologyErrors (Mockito/verify mock-state (Mockito/never)) (Mockito/any)) (.forceDeleteTopoDistDir (Mockito/verify nimbus (Mockito/times 0)) (Mockito/anyObject)) (.rmTopologyKeys (Mockito/verify nimbus (Mockito/times 0)) (Mockito/anyObject)) ;; hb-cache goes down to 1 because only one topo was inactive (is (= (count (.get (.getHeartbeatsCache nimbus))) 2)) (is (contains? (.get (.getHeartbeatsCache nimbus)) "topo1")) (is (contains? (.get (.getHeartbeatsCache nimbus)) "topo2")))))) (deftest user-topologies-for-supervisor (let [assignment (doto (Assignment.) (.set_executor_node_port {[1 1] (NodeInfo. "super1" #{1}), [2 2] (NodeInfo. "super2" #{2})})) assignment2 (doto (Assignment.) (.set_executor_node_port {[1 1] (NodeInfo. "super2" #{2}), [2 2] (NodeInfo. "super2" #{2})})) assignments {"topo1" assignment, "topo2" assignment2} mock-state (mock-cluster-state) mock-blob-store (Mockito/mock BlobStore) mock-tc (Mockito/mock TopoCache) nimbus (Nimbus. {NIMBUS-MONITOR-FREQ-SECS 10} nil mock-state nil mock-blob-store mock-tc (MockLeaderElector. ) nil)] (let [supervisor1-topologies (clojurify-structure (Nimbus/topologiesOnSupervisor assignments "super1")) user1-topologies (clojurify-structure (.filterAuthorized nimbus "getTopology" supervisor1-topologies)) supervisor2-topologies (clojurify-structure (Nimbus/topologiesOnSupervisor assignments "super2")) user2-topologies (clojurify-structure (.filterAuthorized nimbus "getTopology" supervisor2-topologies))] (is (= (list "topo1") supervisor1-topologies)) (is (= #{"topo1"} user1-topologies)) (is (= (list "topo1" "topo2") supervisor2-topologies)) (is (= #{"topo1" "topo2"} user2-topologies))))) (deftest user-topologies-for-supervisor-with-unauthorized-user (let [assignment (doto (Assignment.) (.set_executor_node_port {[1 1] (NodeInfo. "super1" #{1}), [2 2] (NodeInfo. "super2" #{2})})) assignment2 (doto (Assignment.) (.set_executor_node_port {[1 1] (NodeInfo. "super1" #{2}), [2 2] (NodeInfo. "super2" #{2})})) assignments {"topo1" assignment, "authorized" assignment2} mock-state (mock-cluster-state) mock-blob-store (Mockito/mock BlobStore) mock-tc (Mockito/mock TopoCache) nimbus (Nimbus. {NIMBUS-MONITOR-FREQ-SECS 10} nil mock-state nil mock-blob-store mock-tc (MockLeaderElector. ) nil)] (.thenReturn (Mockito/when (.readTopoConf mock-tc (Mockito/eq "authorized") (Mockito/anyObject))) {TOPOLOGY-NAME "authorized"}) (.thenReturn (Mockito/when (.readTopoConf mock-tc (Mockito/eq "topo1") (Mockito/anyObject))) {TOPOLOGY-NAME "topo1"}) (.setAuthorizationHandler nimbus (reify IAuthorizer (permit [this context operation topo-conf] (= "authorized" (get topo-conf TOPOLOGY-NAME))))) (let [supervisor-topologies (clojurify-structure (Nimbus/topologiesOnSupervisor assignments "super1")) user-topologies (clojurify-structure (.filterAuthorized nimbus "getTopology" supervisor-topologies))] (is (= (list "topo1" "authorized") supervisor-topologies)) (is (= #{"authorized"} user-topologies)))))
true
;; Licensed to the Apache Software Foundation (ASF) under one ;; or more contributor license agreements. See the NOTICE file ;; distributed with this work for additional information ;; regarding copyright ownership. The ASF licenses this file ;; to you under the Apache License, Version 2.0 (the ;; "License"); you may not use this file except in compliance ;; with the License. You may obtain a copy of the License at ;; ;; http://www.apache.org/licenses/LICENSE-2.0 ;; ;; Unless required by applicable law or agreed to in writing, software ;; distributed under the License is distributed on an "AS IS" BASIS, ;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ;; See the License for the specific language governing permissions and ;; limitations under the License. (ns org.apache.storm.nimbus-test (:use [clojure test]) (:require [org.apache.storm [util :as util]]) (:import [java.util.function UnaryOperator]) (:import [org.apache.storm.testing InProcessZookeeper MockLeaderElector TestWordCounter TestWordSpout TestGlobalCount TestAggregatesCounter TestPlannerSpout TestPlannerBolt] [org.apache.storm.blobstore BlobStore] [org.apache.storm.nimbus InMemoryTopologyActionNotifier] [org.apache.storm.daemon.nimbus TopoCache Nimbus Nimbus$StandaloneINimbus] [org.apache.storm.generated GlobalStreamId TopologyStatus SupervisorInfo StormTopology StormBase] [org.apache.storm LocalCluster LocalCluster$Builder Thrift MockAutoCred Testing Testing$Condition] [org.apache.storm.stats BoltExecutorStats StatsUtil] [org.apache.storm.security.auth IGroupMappingServiceProvider IAuthorizer]) (:import [org.apache.storm.testing.staticmocking MockedZookeeper]) (:import [org.apache.storm.testing TmpPath]) (:import [org.apache.storm.scheduler INimbus]) (:import [org.mockito Mockito Matchers]) (:import [org.mockito.exceptions.base MockitoAssertionError]) (:import [org.apache.storm.nimbus ILeaderElector NimbusInfo]) (:import [org.apache.storm.testing.staticmocking MockedCluster]) (:import [org.apache.storm.generated Credentials NotAliveException SubmitOptions TopologyInitialStatus TopologyStatus AlreadyAliveException KillOptions RebalanceOptions InvalidTopologyException AuthorizationException LogConfig LogLevel LogLevelAction Assignment NodeInfo]) (:import [java.util Map HashMap HashSet Optional]) (:import [java.io File]) (:import [javax.security.auth Subject]) (:import [org.apache.storm.utils Time Time$SimulatedTime IPredicate StormCommonInstaller Utils$UptimeComputer ReflectionUtils Utils ConfigUtils ServerConfigUtils] [org.apache.storm.utils.staticmocking ServerConfigUtilsInstaller ReflectionUtilsInstaller UtilsInstaller]) (:import [org.apache.storm.zookeeper Zookeeper]) (:import [org.apache.commons.io FileUtils]) (:import [org.json.simple JSONValue]) (:import [org.apache.storm.daemon StormCommon]) (:import [org.apache.storm.cluster IStormClusterState StormClusterStateImpl ClusterStateContext ClusterUtils] [org.apache.storm.assignments LocalAssignmentsBackendFactory]) (:use [org.apache.storm util daemon-config config log]) (:require [conjure.core]) (:use [conjure core])) (def ^:dynamic *STORM-CONF* (clojurify-structure (ConfigUtils/readStormConfig))) (defn- mk-nimbus ([conf inimbus] (mk-nimbus conf inimbus nil nil nil nil)) ([conf inimbus blob-store leader-elector group-mapper cluster-state] ;blacklist scheduler requires nimbus-monitor-freq-secs as input parameter. (let [conf-with-nimbus-monitor-freq (merge {NIMBUS-MONITOR-FREQ-SECS 10} conf)] (Nimbus. conf-with-nimbus-monitor-freq inimbus cluster-state nil blob-store nil leader-elector group-mapper)))) (defn- from-json [^String str] (if str (clojurify-structure (JSONValue/parse str)) nil)) (defn storm-component->task-info [cluster storm-name] (let [storm-id (StormCommon/getStormId (.getClusterState cluster) storm-name) nimbus (.getNimbus cluster)] (-> (.getUserTopology nimbus storm-id) (#(StormCommon/stormTaskInfo % (from-json (.getTopologyConf nimbus storm-id)))) (Utils/reverseMap) clojurify-structure))) (defn getCredentials [cluster storm-name] (let [storm-id (StormCommon/getStormId (.getClusterState cluster) storm-name) creds (.credentials (.getClusterState cluster) storm-id nil)] (if creds (into {} (.get_creds creds))))) (defn storm-component->executor-info [cluster storm-name] (let [storm-id (StormCommon/getStormId (.getClusterState cluster) storm-name) nimbus (.getNimbus cluster) storm-conf (from-json (.getTopologyConf nimbus storm-id)) topology (.getUserTopology nimbus storm-id) task->component (clojurify-structure (StormCommon/stormTaskInfo topology storm-conf)) state (.getClusterState cluster) get-component (comp task->component first)] (->> (.assignmentInfo state storm-id nil) .get_executor_node_port .keySet clojurify-structure (map (fn [e] {e (get-component e)})) (apply merge) (Utils/reverseMap) clojurify-structure))) (defn storm-num-workers [state storm-name] (let [storm-id (StormCommon/getStormId state storm-name) assignment (.assignmentInfo state storm-id nil)] (.size (Utils/reverseMap (.get_executor_node_port assignment))))) (defn topology-nodes [state storm-name] (let [storm-id (StormCommon/getStormId state storm-name) assignment (.assignmentInfo state storm-id nil)] (->> assignment .get_executor_node_port .values (map (fn [np] (.get_node np))) set ))) (defn topology-slots [state storm-name] (let [storm-id (StormCommon/getStormId state storm-name) assignment (.assignmentInfo state storm-id nil)] (->> assignment .get_executor_node_port .values set ))) ;TODO: when translating this function, don't call map-val, but instead use an inline for loop. ; map-val is a temporary kluge for clojure. (defn topology-node-distribution [state storm-name] (let [storm-id (StormCommon/getStormId state storm-name) assignment (.assignmentInfo state storm-id nil)] (->> assignment .get_executor_node_port .values set (group-by (fn [np] (.get_node np))) (map-val count) (map (fn [[_ amt]] {amt 1})) (apply merge-with +) ))) (defn topology-num-nodes [state storm-name] (count (topology-nodes state storm-name))) (defn executor-assignment [cluster storm-id executor-id] (let [state (.getClusterState cluster) assignment (.assignmentInfo state storm-id nil)] (.get (.get_executor_node_port assignment) executor-id) )) (defn executor-start-times [cluster storm-id] (let [state (.getClusterState cluster) assignment (.assignmentInfo state storm-id nil)] (clojurify-structure (.get_executor_start_time_secs assignment)))) (defn do-executor-heartbeat [cluster storm-id executor] (let [state (.getClusterState cluster) executor->node+port (.get_executor_node_port (.assignmentInfo state storm-id nil)) np (.get executor->node+port executor) node (.get_node np) port (first (.get_port np)) curr-beat (StatsUtil/convertZkWorkerHb (.getWorkerHeartbeat state storm-id node port)) stats (if (get curr-beat "executor-stats") (get curr-beat "executor-stats") (HashMap.))] (log-warn "curr-beat:" (prn-str curr-beat) ",stats:" (prn-str stats)) (log-warn "stats type:" (type stats)) (.put stats (StatsUtil/convertExecutor executor) (.renderStats (BoltExecutorStats. 20 (*STORM-CONF* NUM-STAT-BUCKETS)))) (log-warn "merged:" stats) (.workerHeartbeat state storm-id node port (StatsUtil/thriftifyZkWorkerHb (StatsUtil/mkZkWorkerHb storm-id stats (int 10)))) (.sendSupervisorWorkerHeartbeat (.getNimbus cluster) (StatsUtil/thriftifyRPCWorkerHb storm-id executor)))) (defn slot-assignments [cluster storm-id] (let [state (.getClusterState cluster) assignment (.assignmentInfo state storm-id nil)] (clojurify-structure (Utils/reverseMap (.get_executor_node_port assignment))))) (defn task-ids [cluster storm-id] (let [nimbus (.getNimbus cluster)] (-> (.getUserTopology nimbus storm-id) (#(StormCommon/stormTaskInfo % (from-json (.getTopologyConf nimbus storm-id)))) clojurify-structure keys))) (defn topology-executors [cluster storm-id] (let [state (.getClusterState cluster) assignment (.assignmentInfo state storm-id nil) ret-keys (keys (.get_executor_node_port assignment)) _ (log-message "ret-keys: " (pr-str ret-keys)) ] ret-keys )) (defn check-distribution [items distribution] (let [counts (map long (map count items))] (is (Testing/multiseteq counts (map long distribution))))) (defn disjoint? [& sets] (let [combined (apply concat sets)] (= (count combined) (count (set combined))) )) (defn executor->tasks [executor-id] clojurify-structure (StormCommon/executorIdToTasks executor-id)) (defnk check-consistency [cluster storm-name :assigned? true] (let [state (.getClusterState cluster) storm-id (StormCommon/getStormId state storm-name) task-ids (task-ids cluster storm-id) assignment (.assignmentInfo state storm-id nil) executor->node+port (.get_executor_node_port assignment) task->node+port (StormCommon/taskToNodeport executor->node+port) assigned-task-ids (mapcat executor->tasks (keys executor->node+port)) all-nodes (set (map (fn [np] (.get_node np)) (.values executor->node+port)))] (when assigned? (is (= (sort task-ids) (sort assigned-task-ids))) (doseq [t task-ids] (is (not-nil? (.get task->node+port t))))) (doseq [[e s] executor->node+port] (is (not-nil? s))) (is (= all-nodes (set (keys (.get_node_host assignment))))) (doseq [[e s] executor->node+port] (is (not-nil? (.get (.get_executor_start_time_secs assignment) e)))) )) (deftest test-bogusId (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSupervisors 4) (.withDaemonConf {SUPERVISOR-ENABLE false TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (let [state (.getClusterState cluster) nimbus (.getNimbus cluster)] (is (thrown? NotAliveException (.getTopologyConf nimbus "bogus-id"))) (is (thrown? NotAliveException (.getTopology nimbus "bogus-id"))) (is (thrown? NotAliveException (.getUserTopology nimbus "bogus-id"))) (is (thrown? NotAliveException (.getTopologyInfo nimbus "bogus-id"))) (is (thrown? NotAliveException (.uploadNewCredentials nimbus "bogus-id" (Credentials.)))) ))) (deftest test-assignment (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 4) (.withDaemonConf {SUPERVISOR-ENABLE false TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (let [state (.getClusterState cluster) topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. false) (Integer. 3))} {"2" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "1" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) (Integer. 4)) "3" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "2" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.))}) topology2 (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 12))} {"2" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "1" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) (Integer. 6)) "3" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "1" nil) (Thrift/prepareGlobalGrouping)} (TestPlannerBolt.) (Integer. 8)) "4" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "1" nil) (Thrift/prepareGlobalGrouping) (Utils/getGlobalStreamId "2" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) (Integer. 4))}) _ (.submitTopology cluster "mystorm" {TOPOLOGY-WORKERS 4} topology) _ (.advanceClusterTime cluster 11) task-info (storm-component->task-info cluster "mystorm")] (check-consistency cluster "mystorm") ;; 3 should be assigned once (if it were optimized, we'd have ;; different topology) (is (= 1 (count (.assignments state nil)))) (is (= 1 (count (task-info "1")))) (is (= 4 (count (task-info "2")))) (is (= 1 (count (task-info "3")))) (is (= 4 (storm-num-workers state "mystorm"))) (.submitTopology cluster "storm2" {TOPOLOGY-WORKERS 20} topology2) (.advanceClusterTime cluster 11) (check-consistency cluster "storm2") (is (= 2 (count (.assignments state nil)))) (let [task-info (storm-component->task-info cluster "storm2")] (is (= 12 (count (task-info "1")))) (is (= 6 (count (task-info "2")))) (is (= 8 (count (task-info "3")))) (is (= 4 (count (task-info "4")))) (is (= 8 (storm-num-workers state "storm2"))) ) ))) (defn isolation-nimbus [] (let [standalone (Nimbus$StandaloneINimbus.)] (reify INimbus (prepare [this conf local-dir] (.prepare standalone conf local-dir) ) (allSlotsAvailableForScheduling [this supervisors topologies topologies-missing-assignments] (.allSlotsAvailableForScheduling standalone supervisors topologies topologies-missing-assignments)) (assignSlots [this topology slots] (.assignSlots standalone topology slots) ) (getForcedScheduler [this] (.getForcedScheduler standalone)) (getHostName [this supervisors node-id] node-id )))) (deftest test-auto-credentials (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 6) (.withDaemonConf {SUPERVISOR-ENABLE false TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0 NIMBUS-CREDENTIAL-RENEW-FREQ-SECS 10 NIMBUS-CREDENTIAL-RENEWERS (list "org.apache.storm.MockAutoCred") NIMBUS-AUTO-CRED-PLUGINS (list "org.apache.storm.MockAutoCred") })))] (let [state (.getClusterState cluster) topology-name "test-auto-cred-storm" submitOptions (SubmitOptions. TopologyInitialStatus/INACTIVE) - (.set_creds submitOptions (Credentials. (HashMap.))) topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. false) (Integer. 3))} {"2" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "1" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) (Integer. 4)) "3" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "2" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.))}) _ (.submitTopologyWithOpts cluster topology-name {TOPOLOGY-WORKERS 4 TOPOLOGY-AUTO-CREDENTIALS (list "org.apache.storm.MockAutoCred") } topology submitOptions) credentials (getCredentials cluster topology-name)] ; check that the credentials have nimbus auto generated cred (is (= (.get credentials MockAutoCred/NIMBUS_CRED_KEY) MockAutoCred/NIMBUS_CRED_VAL)) ;advance cluster time so the renewers can execute (.advanceClusterTime cluster 20) ;check that renewed credentials replace the original credential. (is (= (.get (getCredentials cluster topology-name) MockAutoCred/NIMBUS_CRED_KEY) MockAutoCred/NIMBUS_CRED_RENEW_VAL)) (is (= (.get (getCredentials cluster topology-name) MockAutoCred/GATEWAY_CRED_KEY) MockAutoCred/GATEWAY_CRED_RENEW_VAL))))) (defmacro letlocals [& body] (let [[tobind lexpr] (split-at (dec (count body)) body) binded (vec (mapcat (fn [e] (if (and (list? e) (= 'bind (first e))) [(second e) (last e)] ['_ e] )) tobind))] `(let ~binded ~(first lexpr)))) (deftest test-isolated-assignment (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 6) (.withINimbus (isolation-nimbus)) (.withDaemonConf {SUPERVISOR-ENABLE false TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0 STORM-SCHEDULER "org.apache.storm.scheduler.IsolationScheduler" ISOLATION-SCHEDULER-MACHINES {"tester1" 3 "tester2" 2} NIMBUS-MONITOR-FREQ-SECS 10 })))] (letlocals (bind state (.getClusterState cluster)) (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. false) (Integer. 3))} {"2" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "1" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) (Integer. 5)) "3" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "2" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.))})) (.submitTopology cluster "noniso" {TOPOLOGY-WORKERS 4} topology) (.advanceClusterTime cluster 11) (is (= 4 (topology-num-nodes state "noniso"))) (is (= 4 (storm-num-workers state "noniso"))) (.submitTopology cluster "tester1" {TOPOLOGY-WORKERS 6} topology) (.submitTopology cluster "tester2" {TOPOLOGY-WORKERS 6} topology) (.advanceClusterTime cluster 11) (bind task-info-tester1 (storm-component->task-info cluster "tester1")) (bind task-info-tester2 (storm-component->task-info cluster "tester2")) (is (= 1 (topology-num-nodes state "noniso"))) (is (= 3 (storm-num-workers state "noniso"))) (is (= {2 3} (topology-node-distribution state "tester1"))) (is (= {3 2} (topology-node-distribution state "tester2"))) (is (apply disjoint? (map (partial topology-nodes state) ["noniso" "tester1" "tester2"]))) (check-consistency cluster "tester1") (check-consistency cluster "tester2") (check-consistency cluster "noniso") ;;check that nothing gets reassigned (bind tester1-slots (topology-slots state "tester1")) (bind tester2-slots (topology-slots state "tester2")) (bind noniso-slots (topology-slots state "noniso")) (.advanceClusterTime cluster 20) (is (= tester1-slots (topology-slots state "tester1"))) (is (= tester2-slots (topology-slots state "tester2"))) (is (= noniso-slots (topology-slots state "noniso"))) ))) (deftest test-zero-executor-or-tasks (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 6) (.withDaemonConf {SUPERVISOR-ENABLE false TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (let [state (.getClusterState cluster) topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. false) (Integer. 3) {TOPOLOGY-TASKS 0})} {"2" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "1" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) (Integer. 1) {TOPOLOGY-TASKS 2}) "3" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "2" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) nil {TOPOLOGY-TASKS 5})}) _ (.submitTopology cluster "mystorm" {TOPOLOGY-WORKERS 4} topology) _ (.advanceClusterTime cluster 11) task-info (storm-component->task-info cluster "mystorm")] (check-consistency cluster "mystorm") (is (= 0 (count (task-info "1")))) (is (= 2 (count (task-info "2")))) (is (= 5 (count (task-info "3")))) (is (= 2 (storm-num-workers state "mystorm"))) ;; because only 2 executors ))) ;TODO: when translating this function, you should replace the map-val with a proper for loop HERE (deftest test-executor-assignments (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withDaemonConf {SUPERVISOR-ENABLE false TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (let [topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 3) {TOPOLOGY-TASKS 5})} {"2" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "1" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) (Integer. 8) {TOPOLOGY-TASKS 2}) "3" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "2" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) (Integer. 3))}) _ (.submitTopology cluster "mystorm" {TOPOLOGY-WORKERS 4} topology) _ (.advanceClusterTime cluster 11) task-info (storm-component->task-info cluster "mystorm") executor-info (->> (storm-component->executor-info cluster "mystorm") (map-val #(map executor->tasks %)))] (check-consistency cluster "mystorm") (is (= 5 (count (task-info "1")))) (check-distribution (executor-info "1") [2 2 1]) (is (= 2 (count (task-info "2")))) (check-distribution (executor-info "2") [1 1]) (is (= 3 (count (task-info "3")))) (check-distribution (executor-info "3") [1 1 1]) ))) (deftest test-over-parallelism-assignment (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 2) (.withPortsPerSupervisor 5) (.withDaemonConf {SUPERVISOR-ENABLE false TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (let [state (.getClusterState cluster) topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 21))} {"2" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "1" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) (Integer. 9)) "3" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "1" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) (Integer. 2)) "4" (Thrift/prepareBoltDetails {(Utils/getGlobalStreamId "1" nil) (Thrift/prepareNoneGrouping)} (TestPlannerBolt.) (Integer. 10))}) _ (.submitTopology cluster "test" {TOPOLOGY-WORKERS 7} topology) _ (.advanceClusterTime cluster 11) task-info (storm-component->task-info cluster "test")] (check-consistency cluster "test") (is (= 21 (count (task-info "1")))) (is (= 9 (count (task-info "2")))) (is (= 2 (count (task-info "3")))) (is (= 10 (count (task-info "4")))) (is (= 7 (storm-num-workers state "test"))) ))) (deftest test-topo-history (let [group-mapper (Mockito/mock IGroupMappingServiceProvider)] (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 2) (.withPortsPerSupervisor 5) (.withGroupMapper group-mapper) (.withDaemonConf {SUPERVISOR-ENABLE false NIMBUS-ADMINS ["admin-user"] NIMBUS-TASK-TIMEOUT-SECS 30 NIMBUS-MONITOR-FREQ-SECS 10 TOPOLOGY-ACKER-EXECUTORS 0})))] (.thenReturn (Mockito/when (.getGroups group-mapper (Mockito/anyObject))) #{"alice-group"}) (letlocals (bind conf (.getDaemonConf cluster)) (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 4))} {})) (bind state (.getClusterState cluster)) ; get topology history when there's no topology history (let [hist-topo-ids (vec (sort (.get_topo_ids (.getTopologyHistory (.getNimbus cluster) (System/getProperty "user.name")))))] (log-message "Checking user " (System/getProperty "user.name") " " hist-topo-ids) (is (= 0 (count hist-topo-ids)))) (.submitTopology cluster "test" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 20, LOGS-USERS ["alice", (System/getProperty "user.name")]} topology) (bind storm-id (StormCommon/getStormId state "test")) (.advanceClusterTime cluster 5) (is (not-nil? (.stormBase state storm-id nil))) (is (not-nil? (.assignmentInfo state storm-id nil))) (.killTopology (.getNimbus cluster) "test") ;; check that storm is deactivated but alive (is (= TopologyStatus/KILLED (.get_status (.stormBase state storm-id nil)))) (is (not-nil? (.assignmentInfo state storm-id nil))) (.advanceClusterTime cluster 35) ;; kill topology read on group (.submitTopology cluster "killgrouptest" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 20, LOGS-GROUPS ["alice-group"]} topology) (bind storm-id-killgroup (StormCommon/getStormId state "killgrouptest")) (.advanceClusterTime cluster 5) (is (not-nil? (.stormBase state storm-id-killgroup nil))) (is (not-nil? (.assignmentInfo state storm-id-killgroup nil))) (.killTopology (.getNimbus cluster) "killgrouptest") ;; check that storm is deactivated but alive (is (= TopologyStatus/KILLED (.get_status (.stormBase state storm-id-killgroup nil)))) (is (not-nil? (.assignmentInfo state storm-id-killgroup nil))) (.advanceClusterTime cluster 35) ;; kill topology can't read (.submitTopology cluster "killnoreadtest" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 20} topology) (bind storm-id-killnoread (StormCommon/getStormId state "killnoreadtest")) (.advanceClusterTime cluster 5) (is (not-nil? (.stormBase state storm-id-killnoread nil))) (is (not-nil? (.assignmentInfo state storm-id-killnoread nil))) (.killTopology (.getNimbus cluster) "killnoreadtest") ;; check that storm is deactivated but alive (is (= TopologyStatus/KILLED (.get_status (.stormBase state storm-id-killnoread nil)))) (is (not-nil? (.assignmentInfo state storm-id-killnoread nil))) (.advanceClusterTime cluster 35) ;; active topology can read (.submitTopology cluster "2test" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 10, LOGS-USERS ["alice", (System/getProperty "user.name")]} topology) (.advanceClusterTime cluster 11) (bind storm-id2 (StormCommon/getStormId state "2test")) (is (not-nil? (.stormBase state storm-id2 nil))) (is (not-nil? (.assignmentInfo state storm-id2 nil))) ;; active topology can not read (.submitTopology cluster "testnoread" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 10, LOGS-USERS ["alice"]} topology) (.advanceClusterTime cluster 11) (bind storm-id3 (StormCommon/getStormId state "testnoread")) (is (not-nil? (.stormBase state storm-id3 nil))) (is (not-nil? (.assignmentInfo state storm-id3 nil))) ;; active topology can read based on group (.submitTopology cluster "testreadgroup" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 10, LOGS-GROUPS ["alice-group"]} topology) (.advanceClusterTime cluster 11) (bind storm-id4 (StormCommon/getStormId state "testreadgroup")) (is (not-nil? (.stormBase state storm-id4 nil))) (is (not-nil? (.assignmentInfo state storm-id4 nil))) ;; at this point have 1 running, 1 killed topo (let [hist-topo-ids (vec (sort (.get_topo_ids (.getTopologyHistory (.getNimbus cluster) (System/getProperty "user.name")))))] (log-message "Checking user " (System/getProperty "user.name") " " hist-topo-ids) (is (= 4 (count hist-topo-ids))) (is (= storm-id2 (get hist-topo-ids 0))) (is (= storm-id-killgroup (get hist-topo-ids 1))) (is (= storm-id (get hist-topo-ids 2))) (is (= storm-id4 (get hist-topo-ids 3)))) (let [hist-topo-ids (vec (sort (.get_topo_ids (.getTopologyHistory (.getNimbus cluster) "alice"))))] (log-message "Checking user PI:NAME:<NAME>END_PI " hist-topo-ids) (is (= 5 (count hist-topo-ids))) (is (= storm-id2 (get hist-topo-ids 0))) (is (= storm-id-killgroup (get hist-topo-ids 1))) (is (= storm-id (get hist-topo-ids 2))) (is (= storm-id3 (get hist-topo-ids 3))) (is (= storm-id4 (get hist-topo-ids 4)))) (let [hist-topo-ids (vec (sort (.get_topo_ids (.getTopologyHistory (.getNimbus cluster) "admin-user"))))] (log-message "Checking user admin-user " hist-topo-ids) (is (= 6 (count hist-topo-ids))) (is (= storm-id2 (get hist-topo-ids 0))) (is (= storm-id-killgroup (get hist-topo-ids 1))) (is (= storm-id-killnoread (get hist-topo-ids 2))) (is (= storm-id (get hist-topo-ids 3))) (is (= storm-id3 (get hist-topo-ids 4))) (is (= storm-id4 (get hist-topo-ids 5)))) (let [hist-topo-ids (vec (sort (.get_topo_ids (.getTopologyHistory (.getNimbus cluster) "group-only-user"))))] (log-message "Checking user group-only-user " hist-topo-ids) (is (= 2 (count hist-topo-ids))) (is (= storm-id-killgroup (get hist-topo-ids 0))) (is (= storm-id4 (get hist-topo-ids 1)))))))) (deftest test-kill-storm (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 2) (.withPortsPerSupervisor 5) (.withDaemonConf {SUPERVISOR-ENABLE false NIMBUS-TOPOLOGY-BLOBSTORE-DELETION-DELAY-MS 0 NIMBUS-TASK-TIMEOUT-SECS 30 NIMBUS-MONITOR-FREQ-SECS 10 TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (letlocals (bind conf (.getDaemonConf cluster)) (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 14))} {})) (bind state (.getClusterState cluster)) (.submitTopology cluster "test" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 20} topology) (bind storm-id (StormCommon/getStormId state "test")) (.advanceClusterTime cluster 15) (is (not-nil? (.stormBase state storm-id nil))) (is (not-nil? (.assignmentInfo state storm-id nil))) (.killTopology (.getNimbus cluster) "test") ;; check that storm is deactivated but alive (is (= TopologyStatus/KILLED (.get_status (.stormBase state storm-id nil)))) (is (not-nil? (.assignmentInfo state storm-id nil))) (.advanceClusterTime cluster 18) ;; check that storm is deactivated but alive (is (= 1 (count (.heartbeatStorms state)))) (.advanceClusterTime cluster 3) (is (nil? (.stormBase state storm-id nil))) (is (nil? (.assignmentInfo state storm-id nil))) ;; cleanup happens on monitoring thread (.advanceClusterTime cluster 11) (is (empty? (.heartbeatStorms state))) ;; TODO: check that code on nimbus was cleaned up locally... (is (thrown? NotAliveException (.killTopology (.getNimbus cluster) "lalala"))) (.submitTopology cluster "2test" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 10} topology) (.advanceClusterTime cluster 11) (is (thrown? AlreadyAliveException (.submitTopology cluster "2test" {} topology))) (.advanceClusterTime cluster 11) (bind storm-id (StormCommon/getStormId state "2test")) (is (not-nil? (.stormBase state storm-id nil))) (.killTopology (.getNimbus cluster) "2test") (is (thrown? AlreadyAliveException (.submitTopology cluster "2test" {} topology))) (.advanceClusterTime cluster 11) (is (= 1 (count (.heartbeatStorms state)))) (.advanceClusterTime cluster 6) (is (nil? (.stormBase state storm-id nil))) (is (nil? (.assignmentInfo state storm-id nil))) (.advanceClusterTime cluster 11) (is (= 0 (count (.heartbeatStorms state)))) (.submitTopology cluster "test3" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 5} topology) (bind storm-id3 (StormCommon/getStormId state "test3")) (.advanceClusterTime cluster 11) ;; this guarantees an immediate kill notification (.killTopology (.getNimbus cluster) "test3") (.advanceClusterTime cluster 41) (is (nil? (.stormBase state storm-id3 nil))) (is (nil? (.assignmentInfo state storm-id3 nil))) (is (= 0 (count (.heartbeatStorms state)))) ;; this guarantees that monitor thread won't trigger for 10 more seconds (Time/advanceTimeSecs 11) (.waitForIdle cluster) (.submitTopology cluster "test3" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 5} topology) (bind storm-id3 (StormCommon/getStormId state "test3")) (.advanceClusterTime cluster 11) (bind executor-id (first (topology-executors cluster storm-id3))) (do-executor-heartbeat cluster storm-id3 executor-id) (.killTopology (.getNimbus cluster) "test3") (.advanceClusterTime cluster 6) (is (= 1 (count (.heartbeatStorms state)))) (.advanceClusterTime cluster 5) (is (= 0 (count (.heartbeatStorms state)))) ;; test kill with opts (.submitTopology cluster "test4" {TOPOLOGY-MESSAGE-TIMEOUT-SECS 100} topology) (.advanceClusterTime cluster 11) (.killTopologyWithOpts (.getNimbus cluster) "test4" (doto (KillOptions.) (.set_wait_secs 10))) (bind storm-id4 (StormCommon/getStormId state "test4")) (.advanceClusterTime cluster 9) (is (not-nil? (.assignmentInfo state storm-id4 nil))) (.advanceClusterTime cluster 2) (is (nil? (.assignmentInfo state storm-id4 nil))) ))) (deftest test-reassignment (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 2) (.withPortsPerSupervisor 5) (.withDaemonConf {SUPERVISOR-ENABLE false NIMBUS-TASK-LAUNCH-SECS 60 NIMBUS-TASK-TIMEOUT-SECS 20 NIMBUS-MONITOR-FREQ-SECS 10 NIMBUS-SUPERVISOR-TIMEOUT-SECS 100 TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (letlocals (bind conf (.getDaemonConf cluster)) (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 2))} {})) (bind state (.getClusterState cluster)) (.submitTopology cluster "test" {TOPOLOGY-WORKERS 2} topology) (.advanceClusterTime cluster 11) (check-consistency cluster "test") (bind storm-id (StormCommon/getStormId state "test")) (bind [executor-id1 executor-id2] (topology-executors cluster storm-id)) (bind ass1 (executor-assignment cluster storm-id executor-id1)) (bind ass2 (executor-assignment cluster storm-id executor-id2)) (bind _ (log-message "ass1, t0: " (pr-str ass1))) (bind _ (log-message "ass2, t0: " (pr-str ass2))) (.advanceClusterTime cluster 30) (bind _ (log-message "ass1, t30, pre beat: " (pr-str ass1))) (bind _ (log-message "ass2, t30, pre beat: " (pr-str ass2))) (do-executor-heartbeat cluster storm-id executor-id1) (do-executor-heartbeat cluster storm-id executor-id2) (bind _ (log-message "ass1, t30, post beat: " (pr-str ass1))) (bind _ (log-message "ass2, t30, post beat: " (pr-str ass2))) (.advanceClusterTime cluster 13) (bind _ (log-message "ass1, t43, pre beat: " (pr-str ass1))) (bind _ (log-message "ass2, t43, pre beat: " (pr-str ass2))) (is (= ass1 (executor-assignment cluster storm-id executor-id1))) (is (= ass2 (executor-assignment cluster storm-id executor-id2))) (do-executor-heartbeat cluster storm-id executor-id1) (bind _ (log-message "ass1, t43, post beat: " (pr-str ass1))) (bind _ (log-message "ass2, t43, post beat: " (pr-str ass2))) (.advanceClusterTime cluster 11) (bind _ (log-message "ass1, t54, pre beat: " (pr-str ass1))) (bind _ (log-message "ass2, t54, pre beat: " (pr-str ass2))) (do-executor-heartbeat cluster storm-id executor-id1) (bind _ (log-message "ass1, t54, post beat: " (pr-str ass1))) (bind _ (log-message "ass2, t54, post beat: " (pr-str ass2))) (is (= ass1 (executor-assignment cluster storm-id executor-id1))) (check-consistency cluster "test") ; have to wait an extra 10 seconds because nimbus may not ; resynchronize its heartbeat time till monitor-time secs after (.advanceClusterTime cluster 11) (bind _ (log-message "ass1, t65, pre beat: " (pr-str ass1))) (bind _ (log-message "ass2, t65, pre beat: " (pr-str ass2))) (do-executor-heartbeat cluster storm-id executor-id1) (bind _ (log-message "ass1, t65, post beat: " (pr-str ass1))) (bind _ (log-message "ass2, t65, post beat: " (pr-str ass2))) (is (= ass1 (executor-assignment cluster storm-id executor-id1))) (check-consistency cluster "test") (.advanceClusterTime cluster 11) (bind _ (log-message "ass1, t76, pre beat: " (pr-str ass1))) (bind _ (log-message "ass2, t76, pre beat: " (pr-str ass2))) (is (= ass1 (executor-assignment cluster storm-id executor-id1))) (is (not= ass2 (executor-assignment cluster storm-id executor-id2))) (bind ass2 (executor-assignment cluster storm-id executor-id2)) (bind _ (log-message "ass1, t76, post beat: " (pr-str ass1))) (bind _ (log-message "ass2, t76, post beat: " (pr-str ass2))) (check-consistency cluster "test") (.advanceClusterTime cluster 31) (is (not= ass1 (executor-assignment cluster storm-id executor-id1))) (is (= ass2 (executor-assignment cluster storm-id executor-id2))) ; tests launch timeout (check-consistency cluster "test") (bind ass1 (executor-assignment cluster storm-id executor-id1)) (bind active-supervisor (.get_node ass2)) (.killSupervisor cluster active-supervisor) (doseq [i (range 12)] (do-executor-heartbeat cluster storm-id executor-id1) (do-executor-heartbeat cluster storm-id executor-id2) (.advanceClusterTime cluster 10) ) ;; tests that it doesn't reassign executors if they're heartbeating even if supervisor times out (is (= ass1 (executor-assignment cluster storm-id executor-id1))) (is (= ass2 (executor-assignment cluster storm-id executor-id2))) (check-consistency cluster "test") (.advanceClusterTime cluster 30) (bind ass1 (executor-assignment cluster storm-id executor-id1)) (bind ass2 (executor-assignment cluster storm-id executor-id2)) (is (not-nil? ass1)) (is (not-nil? ass2)) (is (not= active-supervisor (.get_node (executor-assignment cluster storm-id executor-id2)))) (is (not= active-supervisor (.get_node (executor-assignment cluster storm-id executor-id1)))) (check-consistency cluster "test") (doseq [supervisor-id (.supervisors state nil)] (.killSupervisor cluster supervisor-id)) (.advanceClusterTime cluster 90) (bind ass1 (executor-assignment cluster storm-id executor-id1)) (bind ass2 (executor-assignment cluster storm-id executor-id2)) (is (nil? ass1)) (is (nil? ass2)) (check-consistency cluster "test" :assigned? false) (.addSupervisor cluster) (.advanceClusterTime cluster 11) (check-consistency cluster "test") ))) (deftest test-reassignment-to-constrained-cluster (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 0) (.withDaemonConf {SUPERVISOR-ENABLE false NIMBUS-TASK-LAUNCH-SECS 60 NIMBUS-TASK-TIMEOUT-SECS 20 NIMBUS-MONITOR-FREQ-SECS 10 NIMBUS-SUPERVISOR-TIMEOUT-SECS 100 TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (letlocals (.addSupervisor cluster 1 "a") (.addSupervisor cluster 1 "b") (bind conf (.getDaemonConf cluster)) (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 2))} {})) (bind state (.getClusterState cluster)) (.submitTopology cluster "test" {TOPOLOGY-WORKERS 2} topology) (.advanceClusterTime cluster 11) (check-consistency cluster "test") (bind storm-id (StormCommon/getStormId state "test")) (bind [executor-id1 executor-id2] (topology-executors cluster storm-id)) (bind ass1 (executor-assignment cluster storm-id executor-id1)) (bind ass2 (executor-assignment cluster storm-id executor-id2)) (.advanceClusterTime cluster 30) (do-executor-heartbeat cluster storm-id executor-id1) (do-executor-heartbeat cluster storm-id executor-id2) (.advanceClusterTime cluster 13) (is (= ass1 (executor-assignment cluster storm-id executor-id1))) (is (= ass2 (executor-assignment cluster storm-id executor-id2))) ;; with rpc reporting mode, only heartbeats from killed supervisor will time out (.killSupervisor cluster (.get_node ass2)) (do-executor-heartbeat cluster storm-id executor-id1) (.advanceClusterTime cluster 11) (do-executor-heartbeat cluster storm-id executor-id1) (.advanceClusterTime cluster 11) (do-executor-heartbeat cluster storm-id executor-id1) (.advanceClusterTime cluster 11) (do-executor-heartbeat cluster storm-id executor-id1) (.advanceClusterTime cluster 11) (do-executor-heartbeat cluster storm-id executor-id1) (check-consistency cluster "test") (is (= 1 (storm-num-workers state "test"))) ))) (defn check-executor-distribution [slot-executors distribution] (check-distribution (vals slot-executors) distribution)) (defn check-num-nodes [slot-executors num-nodes] (let [nodes (->> slot-executors keys (map (fn [np] (.get_node np))) set)] (is (= num-nodes (count nodes))) )) (deftest test-reassign-squeezed-topology (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 1) (.withPortsPerSupervisor 1) (.withDaemonConf {SUPERVISOR-ENABLE false NIMBUS-TASK-LAUNCH-SECS 60 NIMBUS-TASK-TIMEOUT-SECS 20 NIMBUS-MONITOR-FREQ-SECS 10 TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (letlocals (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 9))} {})) (bind state (.getClusterState cluster)) (.submitTopology cluster "test" {TOPOLOGY-WORKERS 4} topology) ; distribution should be 2, 2, 2, 3 ideally (.advanceClusterTime cluster 11) (bind storm-id (StormCommon/getStormId state "test")) (bind slot-executors (slot-assignments cluster storm-id)) (check-executor-distribution slot-executors [9]) (check-consistency cluster "test") (.addSupervisor cluster 2) (.advanceClusterTime cluster 11) (bind slot-executors (slot-assignments cluster storm-id)) (bind executor->start (executor-start-times cluster storm-id)) (check-executor-distribution slot-executors [3 3 3]) (check-consistency cluster "test") (.addSupervisor cluster 8) ;; this actually works for any time > 0, since zookeeper fires an event causing immediate reassignment ;; doesn't work for time = 0 because it's not waiting for cluster yet, so test might happen before reassignment finishes (.advanceClusterTime cluster 11) (bind slot-executors2 (slot-assignments cluster storm-id)) (bind executor->start2 (executor-start-times cluster storm-id)) (check-executor-distribution slot-executors2 [2 2 2 3]) (check-consistency cluster "test") (bind common (first (Utils/findOne (proxy [IPredicate] [] (test [[k v]] (= 3 (count v)))) slot-executors2))) (is (not-nil? common)) (is (= (slot-executors2 common) (slot-executors common))) ;; check that start times are changed for everything but the common one (bind same-executors (slot-executors2 common)) (bind changed-executors (apply concat (vals (dissoc slot-executors2 common)))) (doseq [t same-executors] (is (= (executor->start t) (executor->start2 t)))) (doseq [t changed-executors] (is (not= (executor->start t) (executor->start2 t)))) ))) (deftest test-get-owner-resource-summaries (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 1) (.withPortsPerSupervisor 12) (.withDaemonConf {SUPERVISOR-ENABLE false NIMBUS-MONITOR-FREQ-SECS 10 TOPOLOGY-MESSAGE-TIMEOUT-SECS 30 TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0 })))] (letlocals ;;test for 0-topology case (.advanceClusterTime cluster 11) (bind owner-resource-summaries (.getOwnerResourceSummaries (.getNimbus cluster) nil)) (bind summary (first owner-resource-summaries)) (is (nil? summary)) ;;test for 1-topology case (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 3))} {})) (.submitTopology cluster "test" {TOPOLOGY-WORKERS 3 TOPOLOGY-MESSAGE-TIMEOUT-SECS 90} topology) (.advanceClusterTime cluster 11) (bind owner-resource-summaries (.getOwnerResourceSummaries (.getNimbus cluster) nil)) (bind summary (first owner-resource-summaries)) (is (= (.get_total_workers summary) 3)) (is (= (.get_total_executors summary)) 3) (is (= (.get_total_topologies summary)) 1) ;;test for many-topology case (bind topology2 (Thrift/buildTopology {"2" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 4))} {})) (bind topology3 (Thrift/buildTopology {"3" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 5))} {})) (.submitTopology cluster "test2" {TOPOLOGY-WORKERS 4 TOPOLOGY-MESSAGE-TIMEOUT-SECS 90} topology2) (.submitTopology cluster "test3" {TOPOLOGY-WORKERS 3 TOPOLOGY-MESSAGE-TIMEOUT-SECS 90} topology3) (.advanceClusterTime cluster 11) (bind owner-resource-summaries (.getOwnerResourceSummaries (.getNimbus cluster) nil)) (bind summary (first owner-resource-summaries)) (is (= (.get_total_workers summary) 10)) (is (= (.get_total_executors summary)) 12) (is (= (.get_total_topologies summary)) 3) ;;test for specific owner (bind owner-resource-summaries (.getOwnerResourceSummaries (.getNimbus cluster) (System/getProperty "user.name"))) (bind summary (first owner-resource-summaries)) (is (= (.get_total_workers summary) 10)) (is (= (.get_total_executors summary)) 12) (is (= (.get_total_topologies summary)) 3) ;;test for other user (bind other-user (str "not-" (System/getProperty "user.name"))) (bind owner-resource-summaries (.getOwnerResourceSummaries (.getNimbus cluster) other-user)) (bind summary (first owner-resource-summaries)) (is (= (.get_total_workers summary) 0)) (is (= (.get_total_executors summary)) 0) (is (= (.get_total_topologies summary)) 0) ))) (deftest test-rebalance (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 1) (.withDaemonConf {SUPERVISOR-ENABLE false NIMBUS-MONITOR-FREQ-SECS 10 TOPOLOGY-MESSAGE-TIMEOUT-SECS 30 TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (letlocals (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 3))} {})) (bind state (.getClusterState cluster)) (.submitTopology cluster "test" {TOPOLOGY-WORKERS 3 TOPOLOGY-MESSAGE-TIMEOUT-SECS 60} topology) (.advanceClusterTime cluster 11) (bind storm-id (StormCommon/getStormId state "test")) (.addSupervisor cluster 3) (.addSupervisor cluster 3) (.advanceClusterTime cluster 11) (bind slot-executors (slot-assignments cluster storm-id)) ;; check that all workers are on one machine (check-executor-distribution slot-executors [1 1 1]) (check-num-nodes slot-executors 1) (.rebalance (.getNimbus cluster) "test" (RebalanceOptions.)) (.advanceClusterTime cluster 30) (check-executor-distribution slot-executors [1 1 1]) (check-num-nodes slot-executors 1) (.advanceClusterTime cluster 30) (bind slot-executors (slot-assignments cluster storm-id)) (check-executor-distribution slot-executors [1 1 1]) (check-num-nodes slot-executors 3) (is (thrown? InvalidTopologyException (.rebalance (.getNimbus cluster) "test" (doto (RebalanceOptions.) (.set_num_executors {"1" (int 0)}) )))) ))) ;TODO: when translating this function, you should replace the map-val with a proper for loop HERE (deftest test-rebalance-change-parallelism (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 4) (.withDaemonConf {SUPERVISOR-ENABLE false NIMBUS-MONITOR-FREQ-SECS 10 TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (letlocals (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 6) {TOPOLOGY-TASKS 12})} {})) (bind state (.getClusterState cluster)) (.submitTopology cluster "test" {TOPOLOGY-WORKERS 3 TOPOLOGY-MESSAGE-TIMEOUT-SECS 30} topology) (.advanceClusterTime cluster 11) (bind storm-id (StormCommon/getStormId state "test")) (bind checker (fn [distribution] (check-executor-distribution (slot-assignments cluster storm-id) distribution))) (checker [2 2 2]) (.rebalance (.getNimbus cluster) "test" (doto (RebalanceOptions.) (.set_num_workers (int 6)) )) (.advanceClusterTime cluster 29) (checker [2 2 2]) (.advanceClusterTime cluster 3) (checker [1 1 1 1 1 1]) (.rebalance (.getNimbus cluster) "test" (doto (RebalanceOptions.) (.set_num_executors {"1" (int 1)}) )) (.advanceClusterTime cluster 29) (checker [1 1 1 1 1 1]) (.advanceClusterTime cluster 3) (checker [1]) (.rebalance (.getNimbus cluster) "test" (doto (RebalanceOptions.) (.set_num_executors {"1" (int 8)}) (.set_num_workers 4) )) (.advanceClusterTime cluster 32) (checker [2 2 2 2]) (check-consistency cluster "test") (bind executor-info (->> (storm-component->executor-info cluster "test") (map-val #(map executor->tasks %)))) (check-distribution (executor-info "1") [2 2 2 2 1 1 1 1]) ))) (defn check-for-collisions [state] (log-message "Checking for collision") (let [assignments (.assignments state nil)] (log-message "Assignemts: " assignments) (let [id->node->ports (into {} (for [id assignments :let [executor->node+port (.get_executor_node_port (.assignmentInfo state id nil)) node+ports (set (.values executor->node+port)) node->ports (apply merge-with (fn [a b] (distinct (concat a b))) (for [np node+ports] {(.get_node np) [(first (.get_port np))]}))]] {id node->ports})) _ (log-message "id->node->ports: " id->node->ports) all-nodes (apply merge-with (fn [a b] (let [ret (concat a b)] (log-message "Can we combine " (pr-str a) " and " (pr-str b) " without collisions? " (apply distinct? ret) " => " (pr-str ret)) (is (apply distinct? ret)) (distinct ret))) (.values id->node->ports))] ))) (deftest test-rebalance-constrained-cluster (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withSupervisors 1) (.withPortsPerSupervisor 4) (.withDaemonConf {SUPERVISOR-ENABLE false NIMBUS-MONITOR-FREQ-SECS 10 TOPOLOGY-MESSAGE-TIMEOUT-SECS 30 TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (letlocals (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 3))} {})) (bind topology2 (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 3))} {})) (bind topology3 (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 3))} {})) (bind state (.getClusterState cluster)) (.submitTopology cluster "test" {TOPOLOGY-WORKERS 3 TOPOLOGY-MESSAGE-TIMEOUT-SECS 90} topology) (.submitTopology cluster "test2" {TOPOLOGY-WORKERS 3 TOPOLOGY-MESSAGE-TIMEOUT-SECS 90} topology2) (.submitTopology cluster "test3" {TOPOLOGY-WORKERS 3 TOPOLOGY-MESSAGE-TIMEOUT-SECS 90} topology3) (.advanceClusterTime cluster 11) (check-for-collisions state) (.rebalance (.getNimbus cluster) "test" (doto (RebalanceOptions.) (.set_num_workers 4) (.set_wait_secs 0) )) (.advanceClusterTime cluster 11) (check-for-collisions state) (.advanceClusterTime cluster 30) (check-for-collisions state) ))) (deftest test-submit-invalid (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withSimulatedTime) (.withDaemonConf {SUPERVISOR-ENABLE false TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0 NIMBUS-EXECUTORS-PER-TOPOLOGY 8 NIMBUS-SLOTS-PER-TOPOLOGY 8})))] (letlocals (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 1) {TOPOLOGY-TASKS 1})} {})) (is (thrown? InvalidTopologyException (.submitTopology cluster "test/aaa" {} topology))) (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 16) {TOPOLOGY-TASKS 16})} {})) (bind state (.getClusterState cluster)) (is (thrown? InvalidTopologyException (.submitTopology cluster "test" {TOPOLOGY-WORKERS 3} topology))) (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 5) {TOPOLOGY-TASKS 5})} {})) (is (thrown? InvalidTopologyException (.submitTopology cluster "test" {TOPOLOGY-WORKERS 16} topology)))))) (deftest test-clean-inbox "Tests that the inbox correctly cleans jar files." (with-open [_ (Time$SimulatedTime.) tmp-path (TmpPath. )] (let [dir-location (.getPath tmp-path) dir (File. dir-location) mk-file (fn [name seconds-ago] (let [f (File. (str dir-location "/" name)) t (- (Time/currentTimeMillis) (* seconds-ago 1000))] (FileUtils/touch f) (.setLastModified f t))) assert-files-in-dir (fn [compare-file-names] (let [file-names (map #(.getName %) (file-seq dir))] (is (= (sort compare-file-names) (sort (filter #(.endsWith % ".jar") file-names)) ))))] ;; Make three files a.jar, b.jar, c.jar. ;; a and b are older than c and should be deleted first. (Time/advanceTimeSecs 100) (doseq [fs [["a.jar" 20] ["b.jar" 20] ["c.jar" 0]]] (apply mk-file fs)) (assert-files-in-dir ["a.jar" "b.jar" "c.jar"]) (Nimbus/cleanInbox dir-location 10) (assert-files-in-dir ["c.jar"]) ;; Cleanit again, c.jar should stay (Time/advanceTimeSecs 5) (Nimbus/cleanInbox dir-location 10) (assert-files-in-dir ["c.jar"]) ;; Advance time, clean again, c.jar should be deleted. (Time/advanceTimeSecs 5) (Nimbus/cleanInbox dir-location 10) (assert-files-in-dir []) ))) (defn wait-for-status [nimbus name status] (Testing/whileTimeout 5000 (reify Testing$Condition (exec [this] (let [topo-summary (first (filter (fn [topo] (= name (.get_name topo))) (.get_topologies (.getClusterInfo nimbus)))) topo-status (if topo-summary (.get_status topo-summary) "NOT-RUNNING")] (log-message "WAITING FOR "name" TO BE " status " CURRENT " topo-status) (not= topo-status status)))) (fn [] (Thread/sleep 100)))) (deftest test-leadership "Tests that leader actions can only be performed by master and non leader fails to perform the same actions." (with-open [zk (InProcessZookeeper. )] (with-open [tmp-nimbus-dir (TmpPath.) _ (MockedZookeeper. (proxy [Zookeeper] [] (zkLeaderElectorImpl [conf zk blob-store tc cluster-state acls] (MockLeaderElector. ))))] (let [nimbus-dir (.getPath tmp-nimbus-dir)] (letlocals (bind conf (merge (clojurify-structure (ConfigUtils/readStormConfig)) {STORM-ZOOKEEPER-SERVERS ["localhost"] STORM-CLUSTER-MODE "local" STORM-ZOOKEEPER-PORT (.getPort zk) STORM-LOCAL-DIR nimbus-dir})) (bind ass-backend (LocalAssignmentsBackendFactory/getDefault)) (bind cluster-state (ClusterUtils/mkStormClusterState conf ass-backend (ClusterStateContext.))) (bind nimbus (mk-nimbus conf (Nimbus$StandaloneINimbus.) nil nil nil cluster-state)) (.launchServer nimbus) (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 3))} {})) (with-open [_ (MockedZookeeper. (proxy [Zookeeper] [] (zkLeaderElectorImpl [conf zk blob-store tc cluster-state acls] (MockLeaderElector. false))))] (letlocals (bind non-leader-cluster-state (ClusterUtils/mkStormClusterState conf ass-backend (ClusterStateContext.))) (bind non-leader-nimbus (mk-nimbus conf (Nimbus$StandaloneINimbus.) nil nil nil non-leader-cluster-state)) (.launchServer non-leader-nimbus) ;first we verify that the master nimbus can perform all actions, even with another nimbus present. (.submitTopology nimbus "t1" nil "{}" topology) ;; Instead of sleeping until topology is scheduled, rebalance topology so mk-assignments is called. (.rebalance nimbus "t1" (doto (RebalanceOptions.) (.set_wait_secs 0))) (wait-for-status nimbus "t1" "ACTIVE") (.deactivate nimbus "t1") (.activate nimbus "t1") (.rebalance nimbus "t1" (RebalanceOptions.)) (.killTopology nimbus "t1") ;now we verify that non master nimbus can not perform any of the actions. (is (thrown? RuntimeException (.submitTopology non-leader-nimbus "failing" nil "{}" topology))) (is (thrown? RuntimeException (.killTopology non-leader-nimbus "t1"))) (is (thrown? RuntimeException (.activate non-leader-nimbus "t1"))) (is (thrown? RuntimeException (.deactivate non-leader-nimbus "t1"))) (is (thrown? RuntimeException (.rebalance non-leader-nimbus "t1" (RebalanceOptions.)))) (.shutdown non-leader-nimbus) (.disconnect non-leader-cluster-state) )) (.shutdown nimbus) (.disconnect cluster-state)))))) (deftest test-nimbus-iface-submitTopologyWithOpts-checks-authorization (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withDaemonConf {NIMBUS-AUTHORIZER "org.apache.storm.security.auth.authorizer.DenyAuthorizer" SUPERVISOR-AUTHORIZER "org.apache.storm.security.auth.authorizer.DenyAuthorizer"})))] (let [ topology (Thrift/buildTopology {} {}) ] (is (thrown? AuthorizationException (.submitTopologyWithOpts cluster "mystorm" {} topology (SubmitOptions. TopologyInitialStatus/INACTIVE)) )) ) ) ) (deftest test-nimbus-iface-methods-check-authorization (let [cluster-state (Mockito/mock IStormClusterState) blob-store (Mockito/mock BlobStore) tc (Mockito/mock TopoCache)] (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withClusterState cluster-state) (.withBlobStore blob-store) (.withTopoCache tc) (.withDaemonConf {NIMBUS-AUTHORIZER "org.apache.storm.security.auth.authorizer.DenyAuthorizer" SUPERVISOR-AUTHORIZER "org.apache.storm.security.auth.authorizer.DenyAuthorizer"})))] (let [nimbus (.getNimbus cluster) topology-name "test" topology-id "test-id"] (.thenReturn (Mockito/when (.getTopoId cluster-state topology-name)) (Optional/of topology-id)) (is (thrown? AuthorizationException (.rebalance nimbus topology-name (RebalanceOptions.)))) (is (thrown? AuthorizationException (.activate nimbus topology-name))) (is (thrown? AuthorizationException (.deactivate nimbus topology-name))))))) (deftest test-nimbus-check-authorization-params (let [cluster-state (Mockito/mock IStormClusterState) blob-store (Mockito/mock BlobStore) tc (Mockito/mock TopoCache)] (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withClusterState cluster-state) (.withBlobStore blob-store) (.withTopoCache tc) (.withNimbusWrapper (reify UnaryOperator (apply [this nimbus] (Mockito/spy nimbus)))) (.withDaemonConf {NIMBUS-AUTHORIZER "org.apache.storm.security.auth.authorizer.NoopAuthorizer" SUPERVISOR-AUTHORIZER "org.apache.storm.security.auth.authorizer.NoopAuthorizer"})))] (let [nimbus (.getNimbus cluster) topology-name "test-nimbus-check-autho-params" topology-id "fake-id" topology (Thrift/buildTopology {} {}) expected-name topology-name expected-conf {TOPOLOGY-NAME expected-name "foo" "bar"}] (.thenReturn (Mockito/when (.getTopoId cluster-state topology-name)) (Optional/of topology-id)) (.thenReturn (Mockito/when (.readTopoConf tc (Mockito/any String) (Mockito/anyObject))) expected-conf) (.thenReturn (Mockito/when (.readTopology tc (Mockito/any String) (Mockito/anyObject))) nil) (testing "getTopologyConf calls check-authorization! with the correct parameters." (let [expected-operation "getTopologyConf"] (try (is (= expected-conf (->> (.getTopologyConf nimbus topology-id) JSONValue/parse clojurify-structure))) (catch NotAliveException e) (finally (.checkAuthorization (Mockito/verify nimbus) nil nil "getClusterInfo") (.checkAuthorization (Mockito/verify nimbus) (Mockito/eq topology-name) (Mockito/any Map) (Mockito/eq expected-operation)))))) (testing "getTopology calls check-authorization! with the correct parameters." (let [expected-operation "getTopology" common-spy (->> (proxy [StormCommon] [] (systemTopologyImpl [conf topology] nil)) Mockito/spy)] (with-open [- (StormCommonInstaller. common-spy)] (try (.getTopology nimbus topology-id) (catch NotAliveException e) (finally (.checkAuthorization (Mockito/verify nimbus) (Mockito/eq topology-name) (Mockito/any Map) (Mockito/eq expected-operation)) (. (Mockito/verify common-spy) (systemTopologyImpl (Matchers/any Map) (Matchers/any)))))))) (testing "getUserTopology calls check-authorization with the correct parameters." (let [expected-operation "getUserTopology"] (try (.getUserTopology nimbus topology-id) (catch NotAliveException e) (finally (.checkAuthorization (Mockito/verify nimbus) (Mockito/eq topology-name) (Mockito/any Map) (Mockito/eq expected-operation)) ;;One for this time and one for getTopology call (.readTopology (Mockito/verify tc (Mockito/times 2)) (Mockito/eq topology-id) (Mockito/anyObject)))))))))) (deftest test-check-authorization-getSupervisorPageInfo (let [cluster-state (Mockito/mock IStormClusterState) blob-store (Mockito/mock BlobStore) tc (Mockito/mock TopoCache)] (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withClusterState cluster-state) (.withBlobStore blob-store) (.withTopoCache tc) (.withNimbusWrapper (reify UnaryOperator (apply [this nimbus] (Mockito/spy nimbus)))) (.withDaemonConf {NIMBUS-AUTHORIZER "org.apache.storm.security.auth.authorizer.NoopAuthorizer" SUPERVISOR-AUTHORIZER "org.apache.storm.security.auth.authorizer.NoopAuthorizer"})))] (let [nimbus (.getNimbus cluster) expected-name "test-nimbus-check-autho-params" expected-conf {TOPOLOGY-NAME expected-name TOPOLOGY-WORKERS 1 TOPOLOGY-MESSAGE-TIMEOUT-SECS 30 "foo" "bar"} expected-operation "getTopology" assignment (doto (Assignment.) (.set_executor_node_port {[1 1] (NodeInfo. "super1" #{1}), [2 2] (NodeInfo. "super2" #{2})})) topology (doto (StormTopology. ) (.set_spouts {}) (.set_bolts {}) (.set_state_spouts {})) topo-assignment {expected-name assignment} check-auth-state (atom []) mock-check-authorization (fn [nimbus storm-name storm-conf operation] (swap! check-auth-state conj {:nimbus nimbus :storm-name storm-name :storm-conf storm-conf :operation operation})) all-supervisors (doto (HashMap.) (.put "super1" (doto (SupervisorInfo.) (.set_hostname "host1") (.set_meta [(long 1234)]) (.set_uptime_secs (long 123)) (.set_meta [1 2 3]) (.set_used_ports []) (.set_resources_map {}))) (.put "super2" (doto (SupervisorInfo.) (.set_hostname "host2") (.set_meta [(long 1234)]) (.set_uptime_secs (long 123)) (.set_meta [1 2 3]) (.set_used_ports []) (.set_resources_map {}))))] (.thenReturn (Mockito/when (.allSupervisorInfo cluster-state)) all-supervisors) (.thenReturn (Mockito/when (.readTopoConf tc (Mockito/any String) (Mockito/any Subject))) expected-conf) (.thenReturn (Mockito/when (.readTopology tc (Mockito/any String) (Mockito/any Subject))) topology) (.thenReturn (Mockito/when (.assignmentsInfo cluster-state)) topo-assignment) (.getSupervisorPageInfo nimbus "super1" nil true) ;; afterwards, it should get called twice (.checkAuthorization (Mockito/verify nimbus) (Mockito/eq expected-name) (Mockito/any Map) (Mockito/eq "getSupervisorPageInfo")) (.checkAuthorization (Mockito/verify nimbus) nil nil "getClusterInfo") (.checkAuthorization (Mockito/verify nimbus) (Mockito/eq expected-name) (Mockito/any Map) (Mockito/eq "getTopology")))))) (deftest test-nimbus-iface-getTopology-methods-throw-correctly (with-open [cluster (LocalCluster. )] (let [ nimbus (.getNimbus cluster) id "bogus ID" ] (is (thrown? NotAliveException (.getTopology nimbus id))) (try (.getTopology nimbus id) (catch NotAliveException e (is (= id (.get_msg e))) ) ) (is (thrown? NotAliveException (.getTopologyConf nimbus id))) (try (.getTopologyConf nimbus id) (catch NotAliveException e (is (= id (.get_msg e))) ) ) (is (thrown? NotAliveException (.getTopologyInfo nimbus id))) (try (.getTopologyInfo nimbus id) (catch NotAliveException e (is (= id (.get_msg e))) ) ) (is (thrown? NotAliveException (.getUserTopology nimbus id))) (try (.getUserTopology nimbus id) (catch NotAliveException e (is (= id (.get_msg e))) ) ) ) ) ) (defn mkStormBase [launch-time-secs storm-name status] (doto (StormBase.) (.set_name storm-name) (.set_launch_time_secs (int launch-time-secs)) (.set_status status))) (deftest test-nimbus-iface-getClusterInfo-filters-topos-without-bases (let [cluster-state (Mockito/mock IStormClusterState) blob-store (Mockito/mock BlobStore) tc (Mockito/mock TopoCache)] (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withClusterState cluster-state) (.withTopoCache tc) (.withBlobStore blob-store)))] (let [nimbus (.getNimbus cluster) bogus-secs 42 bogus-type TopologyStatus/ACTIVE bogus-bases { "1" nil "2" (mkStormBase bogus-secs "id2-name" bogus-type) "3" nil "4" (mkStormBase bogus-secs "id4-name" bogus-type) } topo-name "test-topo" topo-conf {TOPOLOGY-NAME topo-name TOPOLOGY-WORKERS 1 TOPOLOGY-MESSAGE-TIMEOUT-SECS 30} storm-base (StormBase. ) topology (doto (StormTopology. ) (.set_spouts {}) (.set_bolts {}) (.set_state_spouts {})) ] (.thenReturn (Mockito/when (.stormBase cluster-state (Mockito/any String) (Mockito/anyObject))) storm-base) (.thenReturn (Mockito/when (.topologyBases cluster-state)) bogus-bases) (.thenReturn (Mockito/when (.readTopoConf tc (Mockito/any String) (Mockito/any Subject))) topo-conf) (.thenReturn (Mockito/when (.readTopology tc (Mockito/any String) (Mockito/any Subject))) topology) (let [topos (.get_topologies (.getClusterInfo nimbus))] ; The number of topologies in the summary is correct. (is (= (count (filter (fn [b] (second b)) bogus-bases)) (count topos))) ; Each topology present has a valid name. (is (empty? (filter (fn [t] (or (nil? t) (nil? (.get_name t)))) topos))) ; The topologies are those with valid bases. (is (empty? (filter (fn [t] (or (nil? t) (not (number? (read-string (.get_id t)))) (odd? (read-string (.get_id t))) )) topos))) ) ) ) )) (deftest test-file-bogus-download (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withDaemonConf {SUPERVISOR-ENABLE false TOPOLOGY-ACKER-EXECUTORS 0 TOPOLOGY-EVENTLOGGER-EXECUTORS 0})))] (let [nimbus (.getNimbus cluster)] (is (thrown-cause? IllegalArgumentException (.beginFileDownload nimbus nil))) (is (thrown-cause? IllegalArgumentException (.beginFileDownload nimbus ""))) (is (thrown-cause? IllegalArgumentException (.beginFileDownload nimbus "/bogus-path/foo"))) ))) (deftest test-validate-topo-config-on-submit (let [cluster-state (Mockito/mock IStormClusterState) blob-store (Mockito/mock BlobStore) tc (Mockito/mock TopoCache)] (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withClusterState cluster-state) (.withBlobStore blob-store) (.withTopoCache tc) (.withDaemonConf {NIMBUS-AUTHORIZER "org.apache.storm.security.auth.authorizer.NoopAuthorizer" SUPERVISOR-AUTHORIZER "org.apache.storm.security.auth.authorizer.NoopAuthorizer"})))] (.thenReturn (Mockito/when (.getTopoId cluster-state "test")) (Optional/empty)) (let [topology (Thrift/buildTopology {} {}) bad-config {"topology.isolate.machines" "2"}] (is (thrown-cause? InvalidTopologyException (.submitTopologyWithOpts cluster "test" bad-config topology (SubmitOptions.)))))))) (deftest test-stateless-with-scheduled-topology-to-be-killed ; tests regression of STORM-856 (with-open [zk (InProcessZookeeper. )] (with-open [tmp-nimbus-dir (TmpPath. )] (let [nimbus-dir (.getPath tmp-nimbus-dir)] (letlocals (bind conf (merge (clojurify-structure (ConfigUtils/readStormConfig)) {STORM-ZOOKEEPER-SERVERS ["localhost"] STORM-CLUSTER-MODE "local" STORM-ZOOKEEPER-PORT (.getPort zk) STORM-LOCAL-DIR nimbus-dir})) (bind cluster-state (ClusterUtils/mkStormClusterState conf (ClusterStateContext.))) (bind nimbus (mk-nimbus conf (Nimbus$StandaloneINimbus.) nil nil nil nil)) (.launchServer nimbus) (Time/sleepSecs 1) (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 3))} {})) (.submitTopology nimbus "t1" nil (str "{\"" TOPOLOGY-MESSAGE-TIMEOUT-SECS "\": 30}") topology) ; make transition for topology t1 to be killed -> nimbus applies this event to cluster state (.killTopology nimbus "t1") ; shutdown nimbus immediately to achieve nimbus doesn't handle event right now (.shutdown nimbus) ; in startup of nimbus it reads cluster state and take proper actions ; in this case nimbus registers topology transition event to scheduler again ; before applying STORM-856 nimbus was killed with NPE (bind nimbus (mk-nimbus conf (Nimbus$StandaloneINimbus.) nil nil nil nil)) (.launchServer nimbus) (.shutdown nimbus) (.disconnect cluster-state) ))))) (deftest test-topology-action-notifier (with-open [zk (InProcessZookeeper. )] (with-open [tmp-nimbus-dir (TmpPath.) _ (MockedZookeeper. (proxy [Zookeeper] [] (zkLeaderElectorImpl [conf zk blob-store tc cluster-state acls] (MockLeaderElector. ))))] (let [nimbus-dir (.getPath tmp-nimbus-dir)] (letlocals (bind conf (merge (clojurify-structure (ConfigUtils/readStormConfig)) {STORM-ZOOKEEPER-SERVERS ["localhost"] STORM-CLUSTER-MODE "local" STORM-ZOOKEEPER-PORT (.getPort zk) STORM-LOCAL-DIR nimbus-dir NIMBUS-TOPOLOGY-ACTION-NOTIFIER-PLUGIN (.getName InMemoryTopologyActionNotifier)})) (bind cluster-state (ClusterUtils/mkStormClusterState conf (ClusterStateContext.))) (bind nimbus (mk-nimbus conf (Nimbus$StandaloneINimbus.) nil nil nil nil)) (.launchServer nimbus) (bind notifier (InMemoryTopologyActionNotifier.)) (Time/sleepSecs 1) (bind topology (Thrift/buildTopology {"1" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 3))} {})) (.submitTopology nimbus "test-notification" nil (str "{\"" TOPOLOGY-MESSAGE-TIMEOUT-SECS "\": 30}") topology) (.deactivate nimbus "test-notification") (.activate nimbus "test-notification") (.rebalance nimbus "test-notification" (doto (RebalanceOptions.) (.set_wait_secs 0))) (.killTopologyWithOpts nimbus "test-notification" (doto (KillOptions.) (.set_wait_secs 0))) (.shutdown nimbus) ; ensure notifier was invoked for each action,and in the correct order. (is (= ["submitTopology", "activate", "deactivate", "activate", "rebalance", "killTopology"] (.getTopologyActions notifier "test-notification"))) (.disconnect cluster-state) ))))) (deftest test-debug-on-component (with-open [cluster (LocalCluster. )] (let [nimbus (.getNimbus cluster) topology (Thrift/buildTopology {"spout" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 3))} {})] (.submitTopology cluster "t1" {TOPOLOGY-WORKERS 1} topology) (.debug nimbus "t1" "spout" true 100)))) (deftest test-debug-on-global (with-open [cluster (LocalCluster. )] (let [nimbus (.getNimbus cluster) topology (Thrift/buildTopology {"spout" (Thrift/prepareSpoutDetails (TestPlannerSpout. true) (Integer. 3))} {})] (.submitTopology cluster "t1" {TOPOLOGY-WORKERS 1} topology) (.debug nimbus "t1" "" true 100)))) ;; if the user sends an empty log config, nimbus will say that all ;; log configs it contains are LogLevelAction/UNCHANGED (deftest empty-save-config-results-in-all-unchanged-actions (let [cluster-state (Mockito/mock IStormClusterState) blob-store (Mockito/mock BlobStore) tc (Mockito/mock TopoCache)] (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withClusterState cluster-state) (.withBlobStore blob-store) (.withTopoCache tc) (.withDaemonConf {NIMBUS-AUTHORIZER "org.apache.storm.security.auth.authorizer.NoopAuthorizer" SUPERVISOR-AUTHORIZER "org.apache.storm.security.auth.authorizer.NoopAuthorizer"})))] (let [nimbus (.getNimbus cluster) previous-config (LogConfig.) mock-config (LogConfig.) expected-config (LogConfig.)] ;; send something with content to nimbus beforehand (.put_to_named_logger_level previous-config "test" (doto (LogLevel.) (.set_target_log_level "ERROR") (.set_action LogLevelAction/UPDATE))) (.put_to_named_logger_level expected-config "test" (doto (LogLevel.) (.set_target_log_level "ERROR") (.set_action LogLevelAction/UNCHANGED))) (.thenReturn (Mockito/when (.readTopoConf tc (Mockito/any String) (Mockito/anyObject))) {}) (.thenReturn (Mockito/when (.topologyLogConfig cluster-state (Mockito/any String) (Mockito/anyObject))) previous-config) (.setLogConfig nimbus "foo" mock-config) (.setTopologyLogConfig (Mockito/verify cluster-state) (Mockito/any String) (Mockito/eq expected-config) (Mockito/any Map)))))) (deftest log-level-update-merges-and-flags-existent-log-level (let [cluster-state (Mockito/mock IStormClusterState) blob-store (Mockito/mock BlobStore) tc (Mockito/mock TopoCache)] (with-open [cluster (.build (doto (LocalCluster$Builder. ) (.withClusterState cluster-state) (.withBlobStore blob-store) (.withTopoCache tc) (.withDaemonConf {NIMBUS-AUTHORIZER "org.apache.storm.security.auth.authorizer.NoopAuthorizer" SUPERVISOR-AUTHORIZER "org.apache.storm.security.auth.authorizer.NoopAuthorizer"})))] (let [nimbus (.getNimbus cluster) previous-config (LogConfig.) mock-config (LogConfig.) expected-config (LogConfig.)] ;; send something with content to nimbus beforehand (.put_to_named_logger_level previous-config "test" (doto (LogLevel.) (.set_target_log_level "ERROR") (.set_action LogLevelAction/UPDATE))) (.put_to_named_logger_level previous-config "other-test" (doto (LogLevel.) (.set_target_log_level "DEBUG") (.set_action LogLevelAction/UPDATE))) ;; only change "test" (.put_to_named_logger_level mock-config "test" (doto (LogLevel.) (.set_target_log_level "INFO") (.set_action LogLevelAction/UPDATE))) (.put_to_named_logger_level expected-config "test" (doto (LogLevel.) (.set_target_log_level "INFO") (.set_action LogLevelAction/UPDATE))) (.put_to_named_logger_level expected-config "other-test" (doto (LogLevel.) (.set_target_log_level "DEBUG") (.set_action LogLevelAction/UNCHANGED))) (.thenReturn (Mockito/when (.readTopoConf tc (Mockito/any String) (Mockito/anyObject))) {}) (.thenReturn (Mockito/when (.topologyLogConfig cluster-state (Mockito/any String) (Mockito/anyObject))) previous-config) (.setLogConfig nimbus "foo" mock-config) (.setTopologyLogConfig (Mockito/verify cluster-state) (Mockito/any String) (Mockito/eq expected-config) (Mockito/any Map)))))) (defn mock-cluster-state ([] (mock-cluster-state nil nil)) ([active-topos inactive-topos] (mock-cluster-state active-topos inactive-topos inactive-topos inactive-topos inactive-topos)) ([active-topos hb-topos error-topos bp-topos] (mock-cluster-state active-topos hb-topos error-topos bp-topos nil)) ([active-topos hb-topos error-topos bp-topos wt-topos] (let [cluster-state (Mockito/mock IStormClusterState)] (.thenReturn (Mockito/when (.activeStorms cluster-state)) active-topos) (.thenReturn (Mockito/when (.heartbeatStorms cluster-state)) hb-topos) (.thenReturn (Mockito/when (.errorTopologies cluster-state)) error-topos) (.thenReturn (Mockito/when (.backpressureTopologies cluster-state)) bp-topos) (.thenReturn (Mockito/when (.idsOfTopologiesWithPrivateWorkerKeys cluster-state)) (into #{} wt-topos)) cluster-state))) (deftest cleanup-storm-ids-returns-inactive-topos (let [mock-state (mock-cluster-state (list "topo1") (list "topo1" "topo2" "topo3")) store (Mockito/mock BlobStore)] (.thenReturn (Mockito/when (.storedTopoIds store)) #{}) (is (= (Nimbus/topoIdsToClean mock-state store {NIMBUS-TOPOLOGY-BLOBSTORE-DELETION-DELAY-MS 0}) #{"topo2" "topo3"})))) (deftest cleanup-storm-ids-performs-union-of-storm-ids-with-active-znodes (let [active-topos (list "hb1" "e2" "bp3") hb-topos (list "hb1" "hb2" "hb3") error-topos (list "e1" "e2" "e3") bp-topos (list "bp1" "bp2" "bp3") mock-state (mock-cluster-state active-topos hb-topos error-topos bp-topos) store (Mockito/mock BlobStore)] (.thenReturn (Mockito/when (.storedTopoIds store)) #{}) (is (= (Nimbus/topoIdsToClean mock-state store {NIMBUS-TOPOLOGY-BLOBSTORE-DELETION-DELAY-MS 0}) #{"hb2" "hb3" "e1" "e3" "bp1" "bp2"})))) (deftest cleanup-storm-ids-returns-empty-set-when-all-topos-are-active (let [active-topos (list "hb1" "hb2" "hb3" "e1" "e2" "e3" "bp1" "bp2" "bp3") hb-topos (list "hb1" "hb2" "hb3") error-topos (list "e1" "e2" "e3") bp-topos (list "bp1" "bp2" "bp3") mock-state (mock-cluster-state active-topos hb-topos error-topos bp-topos) store (Mockito/mock BlobStore)] (.thenReturn (Mockito/when (.storedTopoIds store)) #{}) (is (= (Nimbus/topoIdsToClean mock-state store (new java.util.HashMap)) #{})))) (deftest do-cleanup-removes-inactive-znodes (let [inactive-topos (list "topo2" "topo3") hb-cache (into {}(map vector inactive-topos '(nil nil))) mock-state (mock-cluster-state) mock-blob-store (Mockito/mock BlobStore) conf {NIMBUS-MONITOR-FREQ-SECS 10 NIMBUS-TOPOLOGY-BLOBSTORE-DELETION-DELAY-MS 0}] (with-open [_ (MockedZookeeper. (proxy [Zookeeper] [] (zkLeaderElectorImpl [conf zk blob-store tc cluster-state acls] (MockLeaderElector. ))))] (let [nimbus (Mockito/spy (Nimbus. conf nil mock-state nil mock-blob-store nil nil))] (.set (.getHeartbeatsCache nimbus) hb-cache) (.thenReturn (Mockito/when (.storedTopoIds mock-blob-store)) (HashSet. inactive-topos)) (.doCleanup nimbus) ;; removed heartbeats znode (.teardownHeartbeats (Mockito/verify mock-state) "topo2") (.teardownHeartbeats (Mockito/verify mock-state) "topo3") ;; removed topo errors znode (.teardownTopologyErrors (Mockito/verify mock-state) "topo2") (.teardownTopologyErrors (Mockito/verify mock-state) "topo3") ;; removed topo directories (.forceDeleteTopoDistDir (Mockito/verify nimbus) "topo2") (.forceDeleteTopoDistDir (Mockito/verify nimbus) "topo3") ;; removed blob store topo keys (.rmTopologyKeys (Mockito/verify nimbus) "topo2") (.rmTopologyKeys (Mockito/verify nimbus) "topo3") ;; removed topology dependencies (.rmDependencyJarsInTopology (Mockito/verify nimbus) "topo2") (.rmDependencyJarsInTopology (Mockito/verify nimbus) "topo3") ;; remove topos from heartbeat cache (is (= (count (.get (.getHeartbeatsCache nimbus))) 0)))))) (deftest do-cleanup-does-not-teardown-active-topos (let [inactive-topos () hb-cache {"topo1" nil "topo2" nil} mock-state (mock-cluster-state) mock-blob-store (Mockito/mock BlobStore) conf {NIMBUS-MONITOR-FREQ-SECS 10}] (with-open [_ (MockedZookeeper. (proxy [Zookeeper] [] (zkLeaderElectorImpl [conf zk blob-store tc cluster-state acls] (MockLeaderElector. ))))] (let [nimbus (Mockito/spy (Nimbus. conf nil mock-state nil mock-blob-store nil nil))] (.set (.getHeartbeatsCache nimbus) hb-cache) (.thenReturn (Mockito/when (.storedTopoIds mock-blob-store)) (set inactive-topos)) (.doCleanup nimbus) (.teardownHeartbeats (Mockito/verify mock-state (Mockito/never)) (Mockito/any)) (.teardownTopologyErrors (Mockito/verify mock-state (Mockito/never)) (Mockito/any)) (.forceDeleteTopoDistDir (Mockito/verify nimbus (Mockito/times 0)) (Mockito/anyObject)) (.rmTopologyKeys (Mockito/verify nimbus (Mockito/times 0)) (Mockito/anyObject)) ;; hb-cache goes down to 1 because only one topo was inactive (is (= (count (.get (.getHeartbeatsCache nimbus))) 2)) (is (contains? (.get (.getHeartbeatsCache nimbus)) "topo1")) (is (contains? (.get (.getHeartbeatsCache nimbus)) "topo2")))))) (deftest user-topologies-for-supervisor (let [assignment (doto (Assignment.) (.set_executor_node_port {[1 1] (NodeInfo. "super1" #{1}), [2 2] (NodeInfo. "super2" #{2})})) assignment2 (doto (Assignment.) (.set_executor_node_port {[1 1] (NodeInfo. "super2" #{2}), [2 2] (NodeInfo. "super2" #{2})})) assignments {"topo1" assignment, "topo2" assignment2} mock-state (mock-cluster-state) mock-blob-store (Mockito/mock BlobStore) mock-tc (Mockito/mock TopoCache) nimbus (Nimbus. {NIMBUS-MONITOR-FREQ-SECS 10} nil mock-state nil mock-blob-store mock-tc (MockLeaderElector. ) nil)] (let [supervisor1-topologies (clojurify-structure (Nimbus/topologiesOnSupervisor assignments "super1")) user1-topologies (clojurify-structure (.filterAuthorized nimbus "getTopology" supervisor1-topologies)) supervisor2-topologies (clojurify-structure (Nimbus/topologiesOnSupervisor assignments "super2")) user2-topologies (clojurify-structure (.filterAuthorized nimbus "getTopology" supervisor2-topologies))] (is (= (list "topo1") supervisor1-topologies)) (is (= #{"topo1"} user1-topologies)) (is (= (list "topo1" "topo2") supervisor2-topologies)) (is (= #{"topo1" "topo2"} user2-topologies))))) (deftest user-topologies-for-supervisor-with-unauthorized-user (let [assignment (doto (Assignment.) (.set_executor_node_port {[1 1] (NodeInfo. "super1" #{1}), [2 2] (NodeInfo. "super2" #{2})})) assignment2 (doto (Assignment.) (.set_executor_node_port {[1 1] (NodeInfo. "super1" #{2}), [2 2] (NodeInfo. "super2" #{2})})) assignments {"topo1" assignment, "authorized" assignment2} mock-state (mock-cluster-state) mock-blob-store (Mockito/mock BlobStore) mock-tc (Mockito/mock TopoCache) nimbus (Nimbus. {NIMBUS-MONITOR-FREQ-SECS 10} nil mock-state nil mock-blob-store mock-tc (MockLeaderElector. ) nil)] (.thenReturn (Mockito/when (.readTopoConf mock-tc (Mockito/eq "authorized") (Mockito/anyObject))) {TOPOLOGY-NAME "authorized"}) (.thenReturn (Mockito/when (.readTopoConf mock-tc (Mockito/eq "topo1") (Mockito/anyObject))) {TOPOLOGY-NAME "topo1"}) (.setAuthorizationHandler nimbus (reify IAuthorizer (permit [this context operation topo-conf] (= "authorized" (get topo-conf TOPOLOGY-NAME))))) (let [supervisor-topologies (clojurify-structure (Nimbus/topologiesOnSupervisor assignments "super1")) user-topologies (clojurify-structure (.filterAuthorized nimbus "getTopology" supervisor-topologies))] (is (= (list "topo1" "authorized") supervisor-topologies)) (is (= #{"authorized"} user-topologies)))))
[ { "context": " id)]\n (hash-map :username (:id usr), :password (:password usr), :roles #{::user})\n nil))\n\n(defn from-clj", "end": 915, "score": 0.6364927291870117, "start": 905, "tag": "PASSWORD", "value": "(:password" }, { "context": "hash-map :username (:id usr), :password (:password usr), :roles #{::user})\n nil))\n\n(defn from-cljs [c", "end": 919, "score": 0.8576701283454895, "start": 916, "tag": "PASSWORD", "value": "usr" } ]
src/api/secure.clj
dannicolici/news
0
(ns api.secure (:require [api.core :as api] [infrastructure.persistence :as db] [compojure.core :refer :all] [ring.middleware.defaults :refer [wrap-defaults site-defaults]] [ring.adapter.jetty :refer [run-jetty]] [ring.middleware.session :refer [wrap-session]] [ring.middleware.params :refer [wrap-params]] [ring.middleware.keyword-params :refer [wrap-keyword-params]] [ring.middleware.json :refer [wrap-json-response]] [cemerick.friend [workflows :as workflows] [credentials :as creds]] [cemerick.friend :as friend] [compojure.route :as route] [hiccup.core :refer :all])) (defn user [id] "Looks up a user by id and transforms it to a friend auth map with 'user' role" (if-let [usr (db/get-user-by-id id)] (hash-map :username (:id usr), :password (:password usr), :roles #{::user}) nil)) (defn from-cljs [cljs-namespace page-title] "Uses main.js to inject content defined in cljs-namespace into a div DOM element, with id=root" (html [:head [:title page-title]] [:body [:div {:id "root"}] [:script {:src "js/main.js"}] [:script (str cljs-namespace ".start()")]])) (def news-page (from-cljs "news.core" "News")) (def menu-page (from-cljs "menu.core" "Menu")) (def login-page (from-cljs "menu.login" "Login")) (def register-page (from-cljs "menu.register" "Register")) (defroutes app-routes api/news-api-routes (GET "/news" [] news-page) (friend/logout (POST "/logout" [] menu-page))) (defroutes public-routes api/user-api-routes (GET "/" [] menu-page) (GET "/register" [] register-page) (GET "/login" [] login-page)) (defroutes static-routes (route/resources "/public")) (defroutes secured-app-routes (-> (wrap-routes app-routes friend/wrap-authorize #{::user}) (friend/authenticate {:credential-fn (partial creds/bcrypt-credential-fn user) :workflows [(workflows/interactive-form)]}) wrap-session wrap-params wrap-keyword-params)) (def app (-> (routes static-routes (wrap-defaults (routes public-routes secured-app-routes) (assoc-in site-defaults [:security :anti-forgery] false)) (route/not-found "Not Found")) wrap-json-response)) (defn -main [& args] (db/init-db) (run-jetty #'app {:port 8080}))
100013
(ns api.secure (:require [api.core :as api] [infrastructure.persistence :as db] [compojure.core :refer :all] [ring.middleware.defaults :refer [wrap-defaults site-defaults]] [ring.adapter.jetty :refer [run-jetty]] [ring.middleware.session :refer [wrap-session]] [ring.middleware.params :refer [wrap-params]] [ring.middleware.keyword-params :refer [wrap-keyword-params]] [ring.middleware.json :refer [wrap-json-response]] [cemerick.friend [workflows :as workflows] [credentials :as creds]] [cemerick.friend :as friend] [compojure.route :as route] [hiccup.core :refer :all])) (defn user [id] "Looks up a user by id and transforms it to a friend auth map with 'user' role" (if-let [usr (db/get-user-by-id id)] (hash-map :username (:id usr), :password <PASSWORD> <PASSWORD>), :roles #{::user}) nil)) (defn from-cljs [cljs-namespace page-title] "Uses main.js to inject content defined in cljs-namespace into a div DOM element, with id=root" (html [:head [:title page-title]] [:body [:div {:id "root"}] [:script {:src "js/main.js"}] [:script (str cljs-namespace ".start()")]])) (def news-page (from-cljs "news.core" "News")) (def menu-page (from-cljs "menu.core" "Menu")) (def login-page (from-cljs "menu.login" "Login")) (def register-page (from-cljs "menu.register" "Register")) (defroutes app-routes api/news-api-routes (GET "/news" [] news-page) (friend/logout (POST "/logout" [] menu-page))) (defroutes public-routes api/user-api-routes (GET "/" [] menu-page) (GET "/register" [] register-page) (GET "/login" [] login-page)) (defroutes static-routes (route/resources "/public")) (defroutes secured-app-routes (-> (wrap-routes app-routes friend/wrap-authorize #{::user}) (friend/authenticate {:credential-fn (partial creds/bcrypt-credential-fn user) :workflows [(workflows/interactive-form)]}) wrap-session wrap-params wrap-keyword-params)) (def app (-> (routes static-routes (wrap-defaults (routes public-routes secured-app-routes) (assoc-in site-defaults [:security :anti-forgery] false)) (route/not-found "Not Found")) wrap-json-response)) (defn -main [& args] (db/init-db) (run-jetty #'app {:port 8080}))
true
(ns api.secure (:require [api.core :as api] [infrastructure.persistence :as db] [compojure.core :refer :all] [ring.middleware.defaults :refer [wrap-defaults site-defaults]] [ring.adapter.jetty :refer [run-jetty]] [ring.middleware.session :refer [wrap-session]] [ring.middleware.params :refer [wrap-params]] [ring.middleware.keyword-params :refer [wrap-keyword-params]] [ring.middleware.json :refer [wrap-json-response]] [cemerick.friend [workflows :as workflows] [credentials :as creds]] [cemerick.friend :as friend] [compojure.route :as route] [hiccup.core :refer :all])) (defn user [id] "Looks up a user by id and transforms it to a friend auth map with 'user' role" (if-let [usr (db/get-user-by-id id)] (hash-map :username (:id usr), :password PI:PASSWORD:<PASSWORD>END_PI PI:PASSWORD:<PASSWORD>END_PI), :roles #{::user}) nil)) (defn from-cljs [cljs-namespace page-title] "Uses main.js to inject content defined in cljs-namespace into a div DOM element, with id=root" (html [:head [:title page-title]] [:body [:div {:id "root"}] [:script {:src "js/main.js"}] [:script (str cljs-namespace ".start()")]])) (def news-page (from-cljs "news.core" "News")) (def menu-page (from-cljs "menu.core" "Menu")) (def login-page (from-cljs "menu.login" "Login")) (def register-page (from-cljs "menu.register" "Register")) (defroutes app-routes api/news-api-routes (GET "/news" [] news-page) (friend/logout (POST "/logout" [] menu-page))) (defroutes public-routes api/user-api-routes (GET "/" [] menu-page) (GET "/register" [] register-page) (GET "/login" [] login-page)) (defroutes static-routes (route/resources "/public")) (defroutes secured-app-routes (-> (wrap-routes app-routes friend/wrap-authorize #{::user}) (friend/authenticate {:credential-fn (partial creds/bcrypt-credential-fn user) :workflows [(workflows/interactive-form)]}) wrap-session wrap-params wrap-keyword-params)) (def app (-> (routes static-routes (wrap-defaults (routes public-routes secured-app-routes) (assoc-in site-defaults [:security :anti-forgery] false)) (route/not-found "Not Found")) wrap-json-response)) (defn -main [& args] (db/init-db) (run-jetty #'app {:port 8080}))
[ { "context": "-------------------------------\n;;\n;;\n;; Author: PLIQUE Guillaume (Yomguithereal)\n;; Version: 0.1\n;;\n(ns clj-fuzz", "end": 198, "score": 0.9998354315757751, "start": 182, "tag": "NAME", "value": "PLIQUE Guillaume" }, { "context": "------------\n;;\n;;\n;; Author: PLIQUE Guillaume (Yomguithereal)\n;; Version: 0.1\n;;\n(ns clj-fuzzy.soundex\n (:r", "end": 213, "score": 0.9991893768310547, "start": 200, "tag": "USERNAME", "value": "Yomguithereal" } ]
src/clj_fuzzy/soundex.cljc
sooheon/clj-fuzzy
0
;; ------------------------------------------------------------------- ;; clj-fuzzy Soundex ;; ------------------------------------------------------------------- ;; ;; ;; Author: PLIQUE Guillaume (Yomguithereal) ;; Version: 0.1 ;; (ns clj-fuzzy.soundex (:require clojure.string) (:use [clj-fuzzy.helpers :only [clean-non-alphabetical distinct-consecutive]])) ;; Utilities (def ^:private translation (zipmap "AEIOUYWHBPFVCSKGJQXZDTLMNR" "000000DD111122222222334556")) (defn- get-code [character] (translation character)) (defn- pad [word] (subs (str word "000") 0 4)) (defn- word-tail [word] (clojure.string/join (drop 1 word))) (defn- compute-code-sequence "Compute the initial soundex code sequence for a [processed-word] tail." [processed-word] (filter #(not= \D %) (map get-code processed-word))) (defn- clean-code-sequence "Clean the [code-sequence] by checking [first-letter] collocation." [code-sequence first-letter] (if (= (first code-sequence) (translation first-letter)) (drop 1 code-sequence) code-sequence)) ;; Main (defn process "Return the soundex code of a specific [word]." [word] (let [processed-word (clean-non-alphabetical(clojure.string/upper-case word)) first-letter (first processed-word) code-sequence (compute-code-sequence (word-tail processed-word)) cleaned-sequence (clean-code-sequence code-sequence first-letter)] (pad (str first-letter (clojure.string/join (filter #(not= \0 %) (distinct-consecutive cleaned-sequence)))))))
50655
;; ------------------------------------------------------------------- ;; clj-fuzzy Soundex ;; ------------------------------------------------------------------- ;; ;; ;; Author: <NAME> (Yomguithereal) ;; Version: 0.1 ;; (ns clj-fuzzy.soundex (:require clojure.string) (:use [clj-fuzzy.helpers :only [clean-non-alphabetical distinct-consecutive]])) ;; Utilities (def ^:private translation (zipmap "AEIOUYWHBPFVCSKGJQXZDTLMNR" "000000DD111122222222334556")) (defn- get-code [character] (translation character)) (defn- pad [word] (subs (str word "000") 0 4)) (defn- word-tail [word] (clojure.string/join (drop 1 word))) (defn- compute-code-sequence "Compute the initial soundex code sequence for a [processed-word] tail." [processed-word] (filter #(not= \D %) (map get-code processed-word))) (defn- clean-code-sequence "Clean the [code-sequence] by checking [first-letter] collocation." [code-sequence first-letter] (if (= (first code-sequence) (translation first-letter)) (drop 1 code-sequence) code-sequence)) ;; Main (defn process "Return the soundex code of a specific [word]." [word] (let [processed-word (clean-non-alphabetical(clojure.string/upper-case word)) first-letter (first processed-word) code-sequence (compute-code-sequence (word-tail processed-word)) cleaned-sequence (clean-code-sequence code-sequence first-letter)] (pad (str first-letter (clojure.string/join (filter #(not= \0 %) (distinct-consecutive cleaned-sequence)))))))
true
;; ------------------------------------------------------------------- ;; clj-fuzzy Soundex ;; ------------------------------------------------------------------- ;; ;; ;; Author: PI:NAME:<NAME>END_PI (Yomguithereal) ;; Version: 0.1 ;; (ns clj-fuzzy.soundex (:require clojure.string) (:use [clj-fuzzy.helpers :only [clean-non-alphabetical distinct-consecutive]])) ;; Utilities (def ^:private translation (zipmap "AEIOUYWHBPFVCSKGJQXZDTLMNR" "000000DD111122222222334556")) (defn- get-code [character] (translation character)) (defn- pad [word] (subs (str word "000") 0 4)) (defn- word-tail [word] (clojure.string/join (drop 1 word))) (defn- compute-code-sequence "Compute the initial soundex code sequence for a [processed-word] tail." [processed-word] (filter #(not= \D %) (map get-code processed-word))) (defn- clean-code-sequence "Clean the [code-sequence] by checking [first-letter] collocation." [code-sequence first-letter] (if (= (first code-sequence) (translation first-letter)) (drop 1 code-sequence) code-sequence)) ;; Main (defn process "Return the soundex code of a specific [word]." [word] (let [processed-word (clean-non-alphabetical(clojure.string/upper-case word)) first-letter (first processed-word) code-sequence (compute-code-sequence (word-tail processed-word)) cleaned-sequence (clean-code-sequence code-sequence first-letter)] (pad (str first-letter (clojure.string/join (filter #(not= \0 %) (distinct-consecutive cleaned-sequence)))))))
[ { "context": " (is (= ((fn [name] (str \"Hello, \" name \"!\")) \"Dave\") \"Hello, Dave!\"))\n (is (= ((fn [name] (str \"H", "end": 162, "score": 0.9988808631896973, "start": 158, "tag": "NAME", "value": "Dave" }, { "context": "n [name] (str \"Hello, \" name \"!\")) \"Dave\") \"Hello, Dave!\"))\n (is (= ((fn [name] (str \"Hello, \" name \"!", "end": 177, "score": 0.9981470108032227, "start": 173, "tag": "NAME", "value": "Dave" }, { "context": " (is (= ((fn [name] (str \"Hello, \" name \"!\")) \"Jenn\") \"Hello, Jenn!\"))\n (is (= ((fn [name] (str \"H", "end": 236, "score": 0.9966709613800049, "start": 232, "tag": "NAME", "value": "Jenn" }, { "context": "n [name] (str \"Hello, \" name \"!\")) \"Jenn\") \"Hello, Jenn!\"))\n (is (= ((fn [name] (str \"Hello, \" name \"!", "end": 251, "score": 0.9942666292190552, "start": 247, "tag": "NAME", "value": "Jenn" } ]
test/exercises/016_4clojure_test.clj
arturoherrero/4clojure
0
(ns exercises.016-4clojure-test (:require [clojure.test :refer :all])) (deftest exercise-test (testing (is (= ((fn [name] (str "Hello, " name "!")) "Dave") "Hello, Dave!")) (is (= ((fn [name] (str "Hello, " name "!")) "Jenn") "Hello, Jenn!")) (is (= ((fn [name] (str "Hello, " name "!")) "Rhea") "Hello, Rhea!")))) ;; https://www.4clojure.com/problem/16 ;; Write a function which returns a personalized greeting.
31089
(ns exercises.016-4clojure-test (:require [clojure.test :refer :all])) (deftest exercise-test (testing (is (= ((fn [name] (str "Hello, " name "!")) "<NAME>") "Hello, <NAME>!")) (is (= ((fn [name] (str "Hello, " name "!")) "<NAME>") "Hello, <NAME>!")) (is (= ((fn [name] (str "Hello, " name "!")) "Rhea") "Hello, Rhea!")))) ;; https://www.4clojure.com/problem/16 ;; Write a function which returns a personalized greeting.
true
(ns exercises.016-4clojure-test (:require [clojure.test :refer :all])) (deftest exercise-test (testing (is (= ((fn [name] (str "Hello, " name "!")) "PI:NAME:<NAME>END_PI") "Hello, PI:NAME:<NAME>END_PI!")) (is (= ((fn [name] (str "Hello, " name "!")) "PI:NAME:<NAME>END_PI") "Hello, PI:NAME:<NAME>END_PI!")) (is (= ((fn [name] (str "Hello, " name "!")) "Rhea") "Hello, Rhea!")))) ;; https://www.4clojure.com/problem/16 ;; Write a function which returns a personalized greeting.
[ { "context": "date-time\" current-time-ms\n \"last-update-user\" \"auth-user\"\n \"owner\" \"test-user\"\n \"previous\" {\"last-upda", "end": 2568, "score": 0.8607563972473145, "start": 2559, "tag": "USERNAME", "value": "auth-user" }, { "context": "-ms\n \"last-update-user\" \"auth-user\"\n \"owner\" \"test-user\"\n \"previous\" {\"last-update-time\" (- current-tim", "end": 2591, "score": 0.9657420516014099, "start": 2582, "tag": "USERNAME", "value": "test-user" }, { "context": "pi)\n limit 10\n token-name (str \"test-token-hard-delete-\" (UUID/randomUUID))]\n (try\n ;; ARRAN", "end": 3571, "score": 0.7145630121231079, "start": 3549, "tag": "PASSWORD", "value": "test-token-hard-delete" }, { "context": "pi)\n limit 10\n token-name (str \"test-token-soft-delete-\" (UUID/randomUUID))]\n (try\n ;; ARRAN", "end": 6313, "score": 0.6766828894615173, "start": 6291, "tag": "PASSWORD", "value": "test-token-soft-delete" }, { "context": "pi)\n limit 10\n token-name (str \"test-token-update-previously-soft-deleted-\" (UUID/rando", "end": 9277, "score": 0.7021282315254211, "start": 9273, "tag": "PASSWORD", "value": "test" }, { "context": " limit 10\n token-name (str \"test-token-update-previously-soft-deleted-\" (UUID/randomUUID))]\n (try\n ", "end": 9301, "score": 0.6270956993103027, "start": 9284, "tag": "PASSWORD", "value": "update-previously" }, { "context": " token-name (str \"test-token-update-previously-soft-deleted-\" (UUID/randomUUID))]\n (try\n ", "end": 9306, "score": 0.5987756848335266, "start": 9302, "tag": "PASSWORD", "value": "soft" }, { "context": " \"last-update-user\" \"auth-user-0\"\n \"owner\"", "end": 21743, "score": 0.7107157707214355, "start": 21732, "tag": "USERNAME", "value": "auth-user-0" }, { "context": "pi)\n limit 10\n token-name (str \"test-token-diff-root-diff-user\" (UUID/randomUUID))]\n (try\n ;; ARRANG", "end": 24267, "score": 0.6548987030982971, "start": 24237, "tag": "KEY", "value": "test-token-diff-root-diff-user" }, { "context": "-name (str \"test-token-diff-root-diff-user\" (UUID/randomUUID))]\n (try\n ;; ARRANGE\n (let [cu", "end": 24285, "score": 0.5609484314918518, "start": 24275, "tag": "KEY", "value": "randomUUID" }, { "context": "-\" index)\n \"owner\" \"test-user\"\n \"previous\" {\"last", "end": 24954, "score": 0.9969291687011719, "start": 24945, "tag": "USERNAME", "value": "test-user" }, { "context": " \"last-update-user\" \"foo-user\"}\n \"root\" wait", "end": 25110, "score": 0.5905522704124451, "start": 25107, "tag": "USERNAME", "value": "foo" }, { "context": " \"last-update-user\" \"auth-user-0\"\n \"owner\"", "end": 25770, "score": 0.9747071266174316, "start": 25759, "tag": "USERNAME", "value": "auth-user-0" }, { "context": " \"owner\" \"test-user\"\n \"previo", "end": 25831, "score": 0.9980151653289795, "start": 25822, "tag": "USERNAME", "value": "test-user" }, { "context": " \"owner\" \"test-user\"\n ", "end": 26963, "score": 0.9873247146606445, "start": 26954, "tag": "USERNAME", "value": "test-user" }, { "context": " \"last-update-user\" \"foo-user\"}\n ", "end": 27190, "score": 0.7191834449768066, "start": 27186, "tag": "USERNAME", "value": "user" }, { "context": " \"owner\" \"test-user\"\n \"", "end": 29287, "score": 0.9862365126609802, "start": 29278, "tag": "USERNAME", "value": "test-user" }, { "context": " \"last-update-user\" \"foo-user\"}\n ", "end": 29480, "score": 0.6884111762046814, "start": 29476, "tag": "USERNAME", "value": "user" }, { "context": "pi)\n limit 10\n token-name (str \"diff-root-same-user\" (UUID/randomUUID))]\n (try\n ;; ARRANG", "end": 30277, "score": 0.8795523643493652, "start": 30258, "tag": "PASSWORD", "value": "diff-root-same-user" }, { "context": " \"last-update-user\" \"auth-user\"\n \"owner\" \"test-use", "end": 30900, "score": 0.9936426281929016, "start": 30891, "tag": "USERNAME", "value": "auth-user" }, { "context": "uth-user\"\n \"owner\" \"test-user\"\n \"previous\" {\"last", "end": 30951, "score": 0.9803280830383301, "start": 30942, "tag": "USERNAME", "value": "test-user" }, { "context": " \"last-update-user\" \"foo-user\"}\n \"root\" waiter-ur", "end": 31112, "score": 0.9614291787147522, "start": 31104, "tag": "USERNAME", "value": "foo-user" }, { "context": " \"last-update-user\" \"auth-user\"\n \"owner\"", "end": 31765, "score": 0.9960660934448242, "start": 31756, "tag": "USERNAME", "value": "auth-user" }, { "context": " \"owner\" \"test-user\"\n \"previo", "end": 31826, "score": 0.9759199023246765, "start": 31817, "tag": "USERNAME", "value": "test-user" }, { "context": " \"last-update-user\" \"foo-user\"}\n \"", "end": 32002, "score": 0.6561791300773621, "start": 31999, "tag": "USERNAME", "value": "foo" }, { "context": " \"last-update-user\" \"auth-user\"\n \"owner\" \"test-use", "end": 35328, "score": 0.998369038105011, "start": 35319, "tag": "USERNAME", "value": "auth-user" }, { "context": "uth-user\"\n \"owner\" \"test-user\"\n \"previous\" {\"last", "end": 35379, "score": 0.99778813123703, "start": 35370, "tag": "USERNAME", "value": "test-user" }, { "context": " \"last-update-user\" \"foo-user\"}\n \"root\" waiter-ur", "end": 35540, "score": 0.8803969025611877, "start": 35532, "tag": "USERNAME", "value": "foo-user" }, { "context": " \"last-update-user\" \"auth-user\"\n \"owner\"", "end": 36249, "score": 0.9982554912567139, "start": 36240, "tag": "USERNAME", "value": "auth-user" }, { "context": " \"owner\" \"test-user\"\n \"previo", "end": 36310, "score": 0.9972863793373108, "start": 36301, "tag": "USERNAME", "value": "test-user" }, { "context": " \"last-update-user\" \"foo-user\"}\n \"root\"", "end": 36491, "score": 0.8647539019584656, "start": 36483, "tag": "USERNAME", "value": "foo-user" }, { "context": "-\" index)\n \"owner\" \"test-user\"\n \"previous\" {\"last", "end": 39459, "score": 0.9946949481964111, "start": 39450, "tag": "USERNAME", "value": "test-user" }, { "context": " \"last-update-user\" \"foo-user\"}\n \"root\" waiter-ur", "end": 39620, "score": 0.8816534876823425, "start": 39612, "tag": "USERNAME", "value": "foo-user" }, { "context": " \"owner\" \"test-user\"\n \"previo", "end": 40286, "score": 0.9983100295066833, "start": 40277, "tag": "USERNAME", "value": "test-user" }, { "context": " \"last-update-user\" \"foo-user\"}\n \"root\"", "end": 40467, "score": 0.9007943272590637, "start": 40459, "tag": "USERNAME", "value": "foo-user" }, { "context": " \"owner\" \"test-user\"\n \"", "end": 42449, "score": 0.9987233281135559, "start": 42440, "tag": "USERNAME", "value": "test-user" }, { "context": " \"last-update-user\" \"foo-user\"}\n ", "end": 42642, "score": 0.8940319418907166, "start": 42634, "tag": "USERNAME", "value": "foo-user" }, { "context": "sful health check\"\n (let [token-name (str \"test-ping-tokens-\" (UUID/randomUUID))]\n (try\n", "end": 45132, "score": 0.7823355197906494, "start": 45128, "tag": "KEY", "value": "test" }, { "context": "-time-ms\n \"last-update-user\" \"auth-user\"\n \"owner\" \"test-user\"\n ", "end": 47498, "score": 0.9979469180107117, "start": 47489, "tag": "USERNAME", "value": "auth-user" }, { "context": "e-user\" \"auth-user\"\n \"owner\" \"test-user\"\n \"root\" cluster-url))\n\n ", "end": 47539, "score": 0.9978318810462952, "start": 47530, "tag": "USERNAME", "value": "test-user" } ]
token-syncer/integration/token_syncer/basic_test.clj
mokshjawa/waiter
0
;; ;; Copyright (c) Two Sigma Open Source, LLC ;; ;; Licensed under the Apache License, Version 2.0 (the "License"); ;; you may not use this file except in compliance with the License. ;; You may obtain a copy of the License at ;; ;; http://www.apache.org/licenses/LICENSE-2.0 ;; ;; Unless required by applicable law or agreed to in writing, software ;; distributed under the License is distributed on an "AS IS" BASIS, ;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ;; See the License for the specific language governing permissions and ;; limitations under the License. ;; (ns token-syncer.basic-test (:require [clojure.string :as str] [clojure.test :refer :all] [clojure.tools.logging :as log] [plumbing.core :as pc] [token-syncer.commands.cleanup :as cleanup] [token-syncer.commands.ping :as ping] [token-syncer.commands.syncer :as syncer] [token-syncer.main :as main]) (:import (java.util UUID))) (defn- kitchen-cmd [] (str (System/getenv "WAITER_TEST_KITCHEN_CMD") " -p $PORT0")) (def basic-description {"cmd" (kitchen-cmd) "cpus" 1 "mem" 2048 "metric-group" "syncer-test"}) (defn- waiter-urls "Retrieves urls to the waiter clusters provided in the WAITER_URIS environment variable." [] (let [waiter-uris (System/getenv "WAITER_URIS")] (is waiter-uris) (-> waiter-uris (str/split #",") sort))) (defn- waiter-url->cluster "Retrieves the cluster name corresponding to the provided cluster," [waiter-url] (str "cluster-" (hash waiter-url))) (defn- waiter-api "Initializes and returns the Waiter API functions." [] (main/init-waiter-api {:connection-timeout-ms 5000, :idle-timeout-ms 5000})) (deftest ^:integration test-environment (log/info "Running: test-environment") (testing "verifies presence of environment variables for running integration tests" (testing "waiter cluster uris" (log/info "env.WAITER_URIS" (System/getenv "WAITER_URIS")) (is (System/getenv "WAITER_URIS")) (is (> (count (waiter-urls)) 1))) (testing "kitchen command" (log/info "env.WAITER_TEST_KITCHEN_CMD" (System/getenv "WAITER_TEST_KITCHEN_CMD")) (is (not (str/blank? (System/getenv "WAITER_TEST_KITCHEN_CMD"))))))) (defn- basic-token-metadata "Returns the common metadata used in the tests." [current-time-ms] {"cluster" "cl.1" "last-update-time" current-time-ms "last-update-user" "auth-user" "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "another-auth-user"} "root" "src1"}) (defn- token->etag "Retrieves the etag for a token on a waiter router." [{:keys [load-token]} waiter-url token-name] (-> (load-token waiter-url token-name) (get :token-etag))) (defn- cleanup-token "'Hard' deletes the token on all provided clusters." [{:keys [hard-delete-token] :as waiter-api} waiter-urls token-name] (log/info "Cleaning up token:" token-name) (doseq [waiter-url waiter-urls] (try (let [token-etag (token->etag waiter-api waiter-url token-name)] (hard-delete-token waiter-url token-name token-etag)) (catch Exception _)))) (deftest ^:integration test-token-hard-delete (testing "token sync hard-delete" (let [waiter-urls (waiter-urls) {:keys [load-token store-token] :as waiter-api} (waiter-api) limit 10 token-name (str "test-token-hard-delete-" (UUID/randomUUID))] (try ;; ARRANGE (let [last-update-time-ms (- (System/currentTimeMillis) 10000) token-metadata (-> (basic-token-metadata last-update-time-ms) (assoc "deleted" true)) token-description (merge basic-description token-metadata)] (doseq [waiter-url waiter-urls] (let [token-etag (token->etag waiter-api waiter-url token-name)] (store-token waiter-url token-name token-etag token-description))) (let [token-etag (token->etag waiter-api (first waiter-urls) token-name)] ;; ACT (let [actual-result (syncer/sync-tokens waiter-api waiter-urls limit)] ;; ASSERT (let [waiter-sync-result (constantly {:code :success/hard-delete :details {:etag (str token-etag) :status 200}}) expected-result {:details {token-name {:latest {:cluster-url (first waiter-urls) :description token-description :token-etag token-etag} :sync-result (pc/map-from-keys waiter-sync-result waiter-urls)}} :summary {:sync {:failed #{} :unmodified #{} :updated #{token-name}} :tokens {:pending {:count 1 :value #{token-name}} :previously-synced {:count 0 :value #{}} :processed {:count 1 :value #{token-name}} :selected {:count 1 :value #{token-name}} :total {:count 1 :value #{token-name}}}}}] (is (= expected-result actual-result)) (doseq [waiter-url waiter-urls] (let [response (load-token waiter-url token-name)] (is (= 404 (:status response)) (str waiter-url " responded with " response)))))))) (finally (cleanup-token waiter-api waiter-urls token-name)))))) (deftest ^:integration test-token-soft-delete (testing "token sync soft-delete" (let [waiter-urls (waiter-urls) {:keys [load-token store-token] :as waiter-api} (waiter-api) limit 10 token-name (str "test-token-soft-delete-" (UUID/randomUUID))] (try ;; ARRANGE (let [current-time-ms (System/currentTimeMillis) token-metadata (basic-token-metadata current-time-ms) token-description (merge basic-description token-metadata)] (store-token (first waiter-urls) token-name nil (assoc token-description "deleted" true)) (doseq [waiter-url (rest waiter-urls)] (let [last-update-time-ms (- current-time-ms 10000)] (store-token waiter-url token-name nil (assoc token-description "last-update-time" last-update-time-ms)))) (let [token-etag (token->etag waiter-api (first waiter-urls) token-name)] ;; ACT (let [actual-result (syncer/sync-tokens waiter-api waiter-urls limit)] ;; ASSERT (let [waiter-sync-result (constantly {:code :success/soft-delete :details {:etag token-etag :status 200}}) expected-result {:details {token-name {:latest {:cluster-url (first waiter-urls) :description (assoc token-description "deleted" true) :token-etag token-etag} :sync-result (pc/map-from-keys waiter-sync-result (rest waiter-urls))}} :summary {:sync {:failed #{} :unmodified #{} :updated #{token-name}} :tokens {:pending {:count 1 :value #{token-name}} :previously-synced {:count 0 :value #{}} :processed {:count 1 :value #{token-name}} :selected {:count 1 :value #{token-name}} :total {:count 1 :value #{token-name}}}}}] (is (= expected-result actual-result)) (doseq [waiter-url waiter-urls] (is (= {:description (assoc token-description "deleted" true) :headers {"content-type" "application/json"} :status 200} (load-token waiter-url token-name)))))))) (finally (cleanup-token waiter-api waiter-urls token-name)))))) (deftest ^:integration test-token-hard-delete-previously-soft-deleted (testing "token sync hard-delete previously soft-deleted" (let [waiter-urls (waiter-urls) {:keys [load-token store-token] :as waiter-api} (waiter-api) limit 10 token-name (str "test-token-update-previously-soft-deleted-" (UUID/randomUUID))] (try ;; ARRANGE (let [current-time-ms (System/currentTimeMillis) token-metadata (basic-token-metadata current-time-ms) token-description (merge basic-description token-metadata)] (store-token (first waiter-urls) token-name nil (assoc token-description "deleted" true)) (doseq [waiter-url (rest waiter-urls)] (let [last-update-time-ms (- current-time-ms 10000)] (store-token waiter-url token-name nil (assoc token-description "deleted" true "last-update-time" last-update-time-ms)))) (let [token-etag (token->etag waiter-api (first waiter-urls) token-name)] ;; ACT (let [actual-result (syncer/sync-tokens waiter-api waiter-urls limit)] ;; ASSERT (let [waiter-sync-result (constantly {:code :success/hard-delete :details {:etag "" :status 200}}) expected-result {:details {token-name {:latest {:cluster-url (first waiter-urls) :description (assoc token-description "deleted" true) :token-etag token-etag} :sync-result (pc/map-from-keys waiter-sync-result waiter-urls)}} :summary {:sync {:failed #{} :unmodified #{} :updated #{token-name}} :tokens {:pending {:count 1 :value #{token-name}} :previously-synced {:count 0 :value #{}} :processed {:count 1 :value #{token-name}} :selected {:count 1 :value #{token-name}} :total {:count 1 :value #{token-name}}}}}] (is (= expected-result actual-result)) (doseq [waiter-url waiter-urls] (is (= {:description {} :headers {"content-type" "application/json"} :status 404} (load-token waiter-url token-name)))))))) (finally (cleanup-token waiter-api waiter-urls token-name)))))) (deftest ^:integration test-token-token-on-single-cluster (testing "token exists on single cluster" (let [waiter-urls (waiter-urls) {:keys [load-token store-token] :as waiter-api} (waiter-api) limit 10 token-name (str "test-token-token-on-single-cluster-" (UUID/randomUUID))] (try ;; ARRANGE (let [current-time-ms (System/currentTimeMillis) token-metadata (basic-token-metadata current-time-ms) token-description (merge basic-description token-metadata)] (store-token (first waiter-urls) token-name nil token-description) (let [token-etag (token->etag waiter-api (first waiter-urls) token-name)] ;; ACT (let [actual-result (syncer/sync-tokens waiter-api waiter-urls limit)] ;; ASSERT (let [waiter-sync-result (constantly {:code :success/sync-update :details {:etag token-etag :status 200}}) expected-result {:details {token-name {:latest {:cluster-url (first waiter-urls) :description token-description :token-etag token-etag} :sync-result (pc/map-from-keys waiter-sync-result (rest waiter-urls))}} :summary {:sync {:failed #{} :unmodified #{} :updated #{token-name}} :tokens {:pending {:count 1 :value #{token-name}} :previously-synced {:count 0 :value #{}} :processed {:count 1 :value #{token-name}} :selected {:count 1 :value #{token-name}} :total {:count 1 :value #{token-name}}}}}] (is (= expected-result actual-result)) (doseq [waiter-url waiter-urls] (is (= {:description token-description :headers {"content-type" "application/json" "etag" token-etag} :status 200 :token-etag token-etag} (load-token waiter-url token-name)))))))) (finally (cleanup-token waiter-api waiter-urls token-name)))))) (deftest ^:integration test-token-already-synced (testing "token already synced" (let [waiter-urls (waiter-urls) {:keys [load-token store-token] :as waiter-api} (waiter-api) limit 10 token-name (str "test-token-already-synced-" (UUID/randomUUID))] (try ;; ARRANGE (let [current-time-ms (System/currentTimeMillis) token-metadata (basic-token-metadata current-time-ms) token-description (merge basic-description token-metadata)] (doseq [waiter-url waiter-urls] (store-token waiter-url token-name nil token-description)) (let [token-etag (token->etag waiter-api (first waiter-urls) token-name)] ;; ACT (let [actual-result (syncer/sync-tokens waiter-api waiter-urls limit)] ;; ASSERT (let [expected-result {:details {} :summary {:sync {:failed #{} :unmodified #{} :updated #{}} :tokens {:pending {:count 0 :value #{}} :previously-synced {:count 1 :value #{token-name}} :processed {:count 0 :value #{}} :selected {:count 0 :value #{}} :total {:count 1 :value #{token-name}}}}}] (is (= expected-result actual-result)) (doseq [waiter-url waiter-urls] (is (= {:description token-description :headers {"content-type" "application/json" "etag" token-etag} :status 200 :token-etag token-etag} (load-token waiter-url token-name)))))))) (finally (cleanup-token waiter-api waiter-urls token-name)))))) (deftest ^:integration test-token-update (testing "token sync update" (let [waiter-urls (waiter-urls) {:keys [load-token store-token] :as waiter-api} (waiter-api) limit 10 token-name (str "test-token-update-" (UUID/randomUUID))] (try ;; ARRANGE (let [current-time-ms (System/currentTimeMillis) token-metadata (basic-token-metadata current-time-ms) token-description (merge basic-description token-metadata)] (let [last-update-time-ms (- current-time-ms 10000)] (store-token (first waiter-urls) token-name nil token-description) (doseq [waiter-url (rest waiter-urls)] (store-token waiter-url token-name nil (assoc token-description "cpus" 2 "mem" 2048 "last-update-time" last-update-time-ms)))) (let [token-etag (token->etag waiter-api (first waiter-urls) token-name)] ;; ACT (let [actual-result (syncer/sync-tokens waiter-api waiter-urls limit)] ;; ASSERT (let [waiter-sync-result (constantly {:code :success/sync-update :details {:etag token-etag :status 200}}) expected-result {:details {token-name {:latest {:cluster-url (first waiter-urls) :description token-description :token-etag token-etag} :sync-result (pc/map-from-keys waiter-sync-result (rest waiter-urls))}} :summary {:sync {:failed #{} :unmodified #{} :updated #{token-name}} :tokens {:pending {:count 1 :value #{token-name}} :previously-synced {:count 0 :value #{}} :processed {:count 1 :value #{token-name}} :selected {:count 1 :value #{token-name}} :total {:count 1 :value #{token-name}}}}}] (is (= expected-result actual-result)) (doseq [waiter-url waiter-urls] (is (= {:description token-description :headers {"content-type" "application/json" "etag" token-etag} :status 200 :token-etag token-etag} (load-token waiter-url token-name)))))))) (finally (cleanup-token waiter-api waiter-urls token-name)))))) (deftest ^:integration test-token-different-owners-but-same-root (testing "token sync update with different owners but same root" (let [waiter-urls (waiter-urls) {:keys [load-token store-token] :as waiter-api} (waiter-api) limit 10 token-name (str "test-token-different-owners-but-same-root-" (UUID/randomUUID))] (try ;; ARRANGE (let [current-time-ms (System/currentTimeMillis) last-update-time-ms (- current-time-ms 10000)] (doall (map-indexed (fn [index waiter-url] (store-token waiter-url token-name nil (assoc basic-description "cluster" (waiter-url->cluster waiter-url) "cpus" (inc index) "last-update-time" (- last-update-time-ms index) "last-update-user" (str "auth-user-" index) "owner" (str "test-user-" index) "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" "common-root"))) waiter-urls)) (let [token-etag (token->etag waiter-api (first waiter-urls) token-name)] ;; ACT (let [actual-result (syncer/sync-tokens waiter-api waiter-urls limit)] ;; ASSERT (let [latest-description (assoc basic-description "cluster" (waiter-url->cluster (first waiter-urls)) "cpus" 1 "last-update-time" last-update-time-ms "last-update-user" "auth-user-0" "owner" "test-user-0" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" "common-root") waiter-sync-result (constantly {:code :success/sync-update :details {:etag token-etag :status 200}}) expected-result {:details {token-name {:latest {:cluster-url (first waiter-urls) :description latest-description :token-etag token-etag} :sync-result (pc/map-from-keys waiter-sync-result (rest waiter-urls))}} :summary {:sync {:failed #{} :unmodified #{} :updated #{token-name}} :tokens {:pending {:count 1 :value #{token-name}} :previously-synced {:count 0 :value #{}} :processed {:count 1 :value #{token-name}} :selected {:count 1 :value #{token-name}} :total {:count 1 :value #{token-name}}}}}] (is (= expected-result actual-result)) (doseq [waiter-url waiter-urls] (is (= {:description latest-description :headers {"content-type" "application/json" "etag" token-etag} :status 200 :token-etag token-etag} (load-token waiter-url token-name)))))))) (finally (cleanup-token waiter-api waiter-urls token-name)))))) (deftest ^:integration test-token-different-roots-and-last-update-user (testing "token sync update with different roots" (let [waiter-urls (waiter-urls) {:keys [load-token store-token] :as waiter-api} (waiter-api) limit 10 token-name (str "test-token-diff-root-diff-user" (UUID/randomUUID))] (try ;; ARRANGE (let [current-time-ms (System/currentTimeMillis) last-update-time-ms (- current-time-ms 10000)] (doall (map-indexed (fn [index waiter-url] (store-token waiter-url token-name nil (assoc basic-description "cluster" (waiter-url->cluster waiter-url) "cpus" (inc index) "last-update-time" (- last-update-time-ms index) "last-update-user" (str "auth-user-" index) "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" waiter-url))) waiter-urls)) (let [token-etag (token->etag waiter-api (first waiter-urls) token-name)] ;; ACT (let [actual-result (syncer/sync-tokens waiter-api waiter-urls limit)] ;; ASSERT (let [latest-description (assoc basic-description "cluster" (waiter-url->cluster (first waiter-urls)) "cpus" 1 "last-update-time" last-update-time-ms "last-update-user" "auth-user-0" "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" (first waiter-urls)) sync-result (->> (rest waiter-urls) (map-indexed (fn [index waiter-url] [waiter-url {:code :error/root-mismatch :details {:cluster (assoc basic-description "cluster" (waiter-url->cluster waiter-url) "cpus" (+ index 2) "last-update-time" (- last-update-time-ms index 1) "last-update-user" (str "auth-user-" (inc index)) "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" waiter-url) :latest latest-description}}])) (into {})) expected-result {:details {token-name {:latest {:cluster-url (first waiter-urls) :description latest-description :token-etag token-etag} :sync-result sync-result}} :summary {:sync {:failed #{token-name} :unmodified #{} :updated #{}} :tokens {:pending {:count 1 :value #{token-name}} :previously-synced {:count 0 :value #{}} :processed {:count 1 :value #{token-name}} :selected {:count 1 :value #{token-name}} :total {:count 1 :value #{token-name}}}}}] (is (= expected-result actual-result)) (doall (map-indexed (fn [index waiter-url] (let [token-last-modified-time (- last-update-time-ms index) token-etag (token->etag waiter-api waiter-url token-name)] (is (= {:description (assoc basic-description "cluster" (waiter-url->cluster waiter-url) "cpus" (inc index) "last-update-time" token-last-modified-time "last-update-user" (str "auth-user-" index) "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" waiter-url) :headers {"content-type" "application/json" "etag" token-etag} :status 200 :token-etag token-etag} (load-token waiter-url token-name))))) waiter-urls)))))) (finally (cleanup-token waiter-api waiter-urls token-name)))))) (deftest ^:integration test-token-different-roots-but-same-last-update-user (testing "token sync update with different roots" (let [waiter-urls (waiter-urls) {:keys [load-token store-token] :as waiter-api} (waiter-api) limit 10 token-name (str "diff-root-same-user" (UUID/randomUUID))] (try ;; ARRANGE (let [current-time-ms (System/currentTimeMillis) last-update-time-ms (- current-time-ms 10000)] (doall (map-indexed (fn [index waiter-url] (store-token waiter-url token-name nil (assoc basic-description "cluster" (waiter-url->cluster waiter-url) "cpus" (inc index) "last-update-time" (- last-update-time-ms index) "last-update-user" "auth-user" "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" waiter-url))) waiter-urls)) (let [token-etag (token->etag waiter-api (first waiter-urls) token-name)] ;; ACT (let [actual-result (syncer/sync-tokens waiter-api waiter-urls limit)] ;; ASSERT (let [latest-description (assoc basic-description "cluster" (waiter-url->cluster (first waiter-urls)) "cpus" 1 "last-update-time" last-update-time-ms "last-update-user" "auth-user" "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" (first waiter-urls)) sync-result (->> (rest waiter-urls) (map (fn [waiter-url] [waiter-url {:code :success/sync-update :details {:etag token-etag :status 200}}])) (into {})) expected-result {:details {token-name {:latest {:cluster-url (first waiter-urls) :description latest-description :token-etag token-etag} :sync-result sync-result}} :summary {:sync {:failed #{} :unmodified #{} :updated #{token-name}} :tokens {:pending {:count 1 :value #{token-name}} :previously-synced {:count 0 :value #{}} :processed {:count 1 :value #{token-name}} :selected {:count 1 :value #{token-name}} :total {:count 1 :value #{token-name}}}}}] (is (= expected-result actual-result)) (doall (map (fn [waiter-url] (let [token-etag (token->etag waiter-api waiter-url token-name)] (is (= {:description latest-description :headers {"content-type" "application/json" "etag" token-etag} :status 200 :token-etag token-etag} (load-token waiter-url token-name))))) waiter-urls)))))) (finally (cleanup-token waiter-api waiter-urls token-name)))))) (deftest ^:integration test-token-different-roots-and-deleted (testing "token sync hard-delete deleted tokens with different different roots" (let [waiter-urls (waiter-urls) {:keys [load-token store-token] :as waiter-api} (waiter-api) limit 10 token-name (str "test-token-different-roots-and-deleted-" (UUID/randomUUID))] (try ;; ARRANGE (let [current-time-ms (System/currentTimeMillis) last-update-time-ms (- current-time-ms 10000)] (doall (map-indexed (fn [index waiter-url] (store-token waiter-url token-name nil (assoc basic-description "cluster" (waiter-url->cluster waiter-url) "cpus" (inc index) "deleted" true "last-update-time" (- last-update-time-ms index) "last-update-user" "auth-user" "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" waiter-url))) waiter-urls)) (let [token-etag (token->etag waiter-api (first waiter-urls) token-name)] ;; ACT (let [actual-result (syncer/sync-tokens waiter-api waiter-urls limit)] ;; ASSERT (let [latest-description (assoc basic-description "cluster" (waiter-url->cluster (first waiter-urls)) "cpus" 1 "deleted" true "last-update-time" last-update-time-ms "last-update-user" "auth-user" "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" (first waiter-urls)) sync-result (pc/map-from-keys (constantly {:code :success/hard-delete :details {:etag "" :status 200}}) waiter-urls) expected-result {:details {token-name {:latest {:cluster-url (first waiter-urls) :description latest-description :token-etag token-etag} :sync-result sync-result}} :summary {:sync {:failed #{} :unmodified #{} :updated #{token-name}} :tokens {:pending {:count 1 :value #{token-name}} :previously-synced {:count 0 :value #{}} :processed {:count 1 :value #{token-name}} :selected {:count 1 :value #{token-name}} :total {:count 1 :value #{token-name}}}}}] (is (= expected-result actual-result)) (doall (map (fn [waiter-url] (is (= {:description {} :headers {"content-type" "application/json"} :status 404} (load-token waiter-url token-name)))) waiter-urls)))))) (finally (cleanup-token waiter-api waiter-urls token-name)))))) (deftest ^:integration test-token-same-user-params-but-different-root (testing "token sync update with different difference only in roots and system metadata" (let [waiter-urls (waiter-urls) {:keys [load-token store-token] :as waiter-api} (waiter-api) limit 10 token-name (str "test-token-same-user-params-but-different-root-" (UUID/randomUUID))] (try ;; ARRANGE (let [current-time-ms (System/currentTimeMillis) last-update-time-ms (- current-time-ms 10000)] (doall (map-indexed (fn [index waiter-url] (store-token waiter-url token-name nil (assoc basic-description "cluster" (waiter-url->cluster waiter-url) "last-update-time" (- last-update-time-ms index) "last-update-user" (str "auth-user-" index) "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" waiter-url))) waiter-urls)) (let [token-etag (token->etag waiter-api (first waiter-urls) token-name)] ;; ACT (let [actual-result (syncer/sync-tokens waiter-api waiter-urls limit)] ;; ASSERT (let [latest-description (assoc basic-description "cluster" (waiter-url->cluster (first waiter-urls)) "last-update-time" last-update-time-ms "last-update-user" "auth-user-0" "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" (first waiter-urls)) sync-result (pc/map-from-keys (constantly {:code :skip/token-sync}) (rest waiter-urls)) expected-result {:details {token-name {:latest {:cluster-url (first waiter-urls) :description latest-description :token-etag token-etag} :sync-result sync-result}} :summary {:sync {:failed #{token-name} :unmodified #{} :updated #{}} :tokens {:pending {:count 1 :value #{token-name}} :previously-synced {:count 0 :value #{}} :processed {:count 1 :value #{token-name}} :selected {:count 1 :value #{token-name}} :total {:count 1 :value #{token-name}}}}}] (is (= expected-result actual-result)) (doall (map-indexed (fn [index waiter-url] (let [token-etag (token->etag waiter-api waiter-url token-name)] (is (= {:description (assoc basic-description "cluster" (waiter-url->cluster waiter-url) "last-update-time" (- last-update-time-ms index) "last-update-user" (str "auth-user-" index) "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" waiter-url) :headers {"content-type" "application/json" "etag" token-etag} :status 200 :token-etag token-etag} (load-token waiter-url token-name))))) waiter-urls)))))) (finally (cleanup-token waiter-api waiter-urls token-name)))))) (deftest ^:integration test-ping-tokens (testing "token ping on clusters" (let [waiter-urls (waiter-urls) queue-timeout-ms 120000 {:keys [store-token] :as waiter-api} (waiter-api)] (testing "successful health check" (let [token-name (str "test-ping-tokens-" (UUID/randomUUID))] (try ;; ARRANGE (doall (map (fn [waiter-url] (->> (assoc basic-description "health-check-url" "/status" "idle-timeout-mins" 2 "run-as-user" "*" "version" "lorem-ipsum") (store-token waiter-url token-name nil))) waiter-urls)) ;; ACT (let [actual-result (ping/ping-token waiter-api waiter-urls token-name queue-timeout-ms)] ;; ASSERT (let [expected-result {:details (pc/map-from-keys (fn [cluster-url] {:exit-code 0 :message (str "successfully pinged token " token-name " on " cluster-url ", reason: health check returned status code 200")}) waiter-urls) :exit-code 0 :message (str "pinging token " token-name " on " (-> waiter-urls vec println with-out-str str/trim) " was successful") :token token-name}] (is (= expected-result actual-result)))) (finally (cleanup-token waiter-api waiter-urls token-name))))) (testing "unsuccessful health check" (let [token-name (str "test-ping-tokens-" (UUID/randomUUID))] (try ;; ARRANGE (doall (map (fn [waiter-url] (->> (assoc basic-description "health-check-url" "/bad-status" "idle-timeout-mins" 2 "run-as-user" "*" "version" "lorem-ipsum") (store-token waiter-url token-name nil))) waiter-urls)) ;; ACT (let [actual-result (ping/ping-token waiter-api waiter-urls token-name queue-timeout-ms)] ;; ASSERT (let [expected-result {:details (pc/map-from-keys (fn [cluster-url] {:exit-code 1 :message (str "unable to ping token " token-name " on " cluster-url ", reason: health check returned status code 503")}) waiter-urls) :exit-code (count waiter-urls) :message (str "pinging token " token-name " on " (-> waiter-urls vec println with-out-str str/trim) " failed") :token token-name}] (is (= expected-result actual-result)))) (finally (cleanup-token waiter-api waiter-urls token-name)))))))) (deftest ^:integration test-cleanup-token (testing "token cleanup on cluster" (let [cluster-url (first (waiter-urls)) {:keys [load-token store-token] :as waiter-api} (waiter-api) token-name (str "test-cleanup-token-" (UUID/randomUUID)) current-time-ms (System/currentTimeMillis) last-update-time-ms (- current-time-ms 10000)] ;; ARRANGE (store-token cluster-url token-name nil (assoc basic-description "cluster" (waiter-url->cluster cluster-url) "cpus" 1.0 "deleted" true "last-update-time" last-update-time-ms "last-update-user" "auth-user" "owner" "test-user" "root" cluster-url)) (try ;; ACT (let [actual-result (cleanup/cleanup-tokens waiter-api cluster-url current-time-ms)] ;; ASSERT (is (= #{token-name} actual-result)) (is (= {:description {} :headers {"content-type" "application/json"} :status 404} (load-token cluster-url token-name)))) (finally (cleanup-token waiter-api [cluster-url] token-name))))))
89341
;; ;; Copyright (c) Two Sigma Open Source, LLC ;; ;; Licensed under the Apache License, Version 2.0 (the "License"); ;; you may not use this file except in compliance with the License. ;; You may obtain a copy of the License at ;; ;; http://www.apache.org/licenses/LICENSE-2.0 ;; ;; Unless required by applicable law or agreed to in writing, software ;; distributed under the License is distributed on an "AS IS" BASIS, ;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ;; See the License for the specific language governing permissions and ;; limitations under the License. ;; (ns token-syncer.basic-test (:require [clojure.string :as str] [clojure.test :refer :all] [clojure.tools.logging :as log] [plumbing.core :as pc] [token-syncer.commands.cleanup :as cleanup] [token-syncer.commands.ping :as ping] [token-syncer.commands.syncer :as syncer] [token-syncer.main :as main]) (:import (java.util UUID))) (defn- kitchen-cmd [] (str (System/getenv "WAITER_TEST_KITCHEN_CMD") " -p $PORT0")) (def basic-description {"cmd" (kitchen-cmd) "cpus" 1 "mem" 2048 "metric-group" "syncer-test"}) (defn- waiter-urls "Retrieves urls to the waiter clusters provided in the WAITER_URIS environment variable." [] (let [waiter-uris (System/getenv "WAITER_URIS")] (is waiter-uris) (-> waiter-uris (str/split #",") sort))) (defn- waiter-url->cluster "Retrieves the cluster name corresponding to the provided cluster," [waiter-url] (str "cluster-" (hash waiter-url))) (defn- waiter-api "Initializes and returns the Waiter API functions." [] (main/init-waiter-api {:connection-timeout-ms 5000, :idle-timeout-ms 5000})) (deftest ^:integration test-environment (log/info "Running: test-environment") (testing "verifies presence of environment variables for running integration tests" (testing "waiter cluster uris" (log/info "env.WAITER_URIS" (System/getenv "WAITER_URIS")) (is (System/getenv "WAITER_URIS")) (is (> (count (waiter-urls)) 1))) (testing "kitchen command" (log/info "env.WAITER_TEST_KITCHEN_CMD" (System/getenv "WAITER_TEST_KITCHEN_CMD")) (is (not (str/blank? (System/getenv "WAITER_TEST_KITCHEN_CMD"))))))) (defn- basic-token-metadata "Returns the common metadata used in the tests." [current-time-ms] {"cluster" "cl.1" "last-update-time" current-time-ms "last-update-user" "auth-user" "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "another-auth-user"} "root" "src1"}) (defn- token->etag "Retrieves the etag for a token on a waiter router." [{:keys [load-token]} waiter-url token-name] (-> (load-token waiter-url token-name) (get :token-etag))) (defn- cleanup-token "'Hard' deletes the token on all provided clusters." [{:keys [hard-delete-token] :as waiter-api} waiter-urls token-name] (log/info "Cleaning up token:" token-name) (doseq [waiter-url waiter-urls] (try (let [token-etag (token->etag waiter-api waiter-url token-name)] (hard-delete-token waiter-url token-name token-etag)) (catch Exception _)))) (deftest ^:integration test-token-hard-delete (testing "token sync hard-delete" (let [waiter-urls (waiter-urls) {:keys [load-token store-token] :as waiter-api} (waiter-api) limit 10 token-name (str "<PASSWORD>-" (UUID/randomUUID))] (try ;; ARRANGE (let [last-update-time-ms (- (System/currentTimeMillis) 10000) token-metadata (-> (basic-token-metadata last-update-time-ms) (assoc "deleted" true)) token-description (merge basic-description token-metadata)] (doseq [waiter-url waiter-urls] (let [token-etag (token->etag waiter-api waiter-url token-name)] (store-token waiter-url token-name token-etag token-description))) (let [token-etag (token->etag waiter-api (first waiter-urls) token-name)] ;; ACT (let [actual-result (syncer/sync-tokens waiter-api waiter-urls limit)] ;; ASSERT (let [waiter-sync-result (constantly {:code :success/hard-delete :details {:etag (str token-etag) :status 200}}) expected-result {:details {token-name {:latest {:cluster-url (first waiter-urls) :description token-description :token-etag token-etag} :sync-result (pc/map-from-keys waiter-sync-result waiter-urls)}} :summary {:sync {:failed #{} :unmodified #{} :updated #{token-name}} :tokens {:pending {:count 1 :value #{token-name}} :previously-synced {:count 0 :value #{}} :processed {:count 1 :value #{token-name}} :selected {:count 1 :value #{token-name}} :total {:count 1 :value #{token-name}}}}}] (is (= expected-result actual-result)) (doseq [waiter-url waiter-urls] (let [response (load-token waiter-url token-name)] (is (= 404 (:status response)) (str waiter-url " responded with " response)))))))) (finally (cleanup-token waiter-api waiter-urls token-name)))))) (deftest ^:integration test-token-soft-delete (testing "token sync soft-delete" (let [waiter-urls (waiter-urls) {:keys [load-token store-token] :as waiter-api} (waiter-api) limit 10 token-name (str "<PASSWORD>-" (UUID/randomUUID))] (try ;; ARRANGE (let [current-time-ms (System/currentTimeMillis) token-metadata (basic-token-metadata current-time-ms) token-description (merge basic-description token-metadata)] (store-token (first waiter-urls) token-name nil (assoc token-description "deleted" true)) (doseq [waiter-url (rest waiter-urls)] (let [last-update-time-ms (- current-time-ms 10000)] (store-token waiter-url token-name nil (assoc token-description "last-update-time" last-update-time-ms)))) (let [token-etag (token->etag waiter-api (first waiter-urls) token-name)] ;; ACT (let [actual-result (syncer/sync-tokens waiter-api waiter-urls limit)] ;; ASSERT (let [waiter-sync-result (constantly {:code :success/soft-delete :details {:etag token-etag :status 200}}) expected-result {:details {token-name {:latest {:cluster-url (first waiter-urls) :description (assoc token-description "deleted" true) :token-etag token-etag} :sync-result (pc/map-from-keys waiter-sync-result (rest waiter-urls))}} :summary {:sync {:failed #{} :unmodified #{} :updated #{token-name}} :tokens {:pending {:count 1 :value #{token-name}} :previously-synced {:count 0 :value #{}} :processed {:count 1 :value #{token-name}} :selected {:count 1 :value #{token-name}} :total {:count 1 :value #{token-name}}}}}] (is (= expected-result actual-result)) (doseq [waiter-url waiter-urls] (is (= {:description (assoc token-description "deleted" true) :headers {"content-type" "application/json"} :status 200} (load-token waiter-url token-name)))))))) (finally (cleanup-token waiter-api waiter-urls token-name)))))) (deftest ^:integration test-token-hard-delete-previously-soft-deleted (testing "token sync hard-delete previously soft-deleted" (let [waiter-urls (waiter-urls) {:keys [load-token store-token] :as waiter-api} (waiter-api) limit 10 token-name (str "<PASSWORD>-token-<PASSWORD>-<PASSWORD>-deleted-" (UUID/randomUUID))] (try ;; ARRANGE (let [current-time-ms (System/currentTimeMillis) token-metadata (basic-token-metadata current-time-ms) token-description (merge basic-description token-metadata)] (store-token (first waiter-urls) token-name nil (assoc token-description "deleted" true)) (doseq [waiter-url (rest waiter-urls)] (let [last-update-time-ms (- current-time-ms 10000)] (store-token waiter-url token-name nil (assoc token-description "deleted" true "last-update-time" last-update-time-ms)))) (let [token-etag (token->etag waiter-api (first waiter-urls) token-name)] ;; ACT (let [actual-result (syncer/sync-tokens waiter-api waiter-urls limit)] ;; ASSERT (let [waiter-sync-result (constantly {:code :success/hard-delete :details {:etag "" :status 200}}) expected-result {:details {token-name {:latest {:cluster-url (first waiter-urls) :description (assoc token-description "deleted" true) :token-etag token-etag} :sync-result (pc/map-from-keys waiter-sync-result waiter-urls)}} :summary {:sync {:failed #{} :unmodified #{} :updated #{token-name}} :tokens {:pending {:count 1 :value #{token-name}} :previously-synced {:count 0 :value #{}} :processed {:count 1 :value #{token-name}} :selected {:count 1 :value #{token-name}} :total {:count 1 :value #{token-name}}}}}] (is (= expected-result actual-result)) (doseq [waiter-url waiter-urls] (is (= {:description {} :headers {"content-type" "application/json"} :status 404} (load-token waiter-url token-name)))))))) (finally (cleanup-token waiter-api waiter-urls token-name)))))) (deftest ^:integration test-token-token-on-single-cluster (testing "token exists on single cluster" (let [waiter-urls (waiter-urls) {:keys [load-token store-token] :as waiter-api} (waiter-api) limit 10 token-name (str "test-token-token-on-single-cluster-" (UUID/randomUUID))] (try ;; ARRANGE (let [current-time-ms (System/currentTimeMillis) token-metadata (basic-token-metadata current-time-ms) token-description (merge basic-description token-metadata)] (store-token (first waiter-urls) token-name nil token-description) (let [token-etag (token->etag waiter-api (first waiter-urls) token-name)] ;; ACT (let [actual-result (syncer/sync-tokens waiter-api waiter-urls limit)] ;; ASSERT (let [waiter-sync-result (constantly {:code :success/sync-update :details {:etag token-etag :status 200}}) expected-result {:details {token-name {:latest {:cluster-url (first waiter-urls) :description token-description :token-etag token-etag} :sync-result (pc/map-from-keys waiter-sync-result (rest waiter-urls))}} :summary {:sync {:failed #{} :unmodified #{} :updated #{token-name}} :tokens {:pending {:count 1 :value #{token-name}} :previously-synced {:count 0 :value #{}} :processed {:count 1 :value #{token-name}} :selected {:count 1 :value #{token-name}} :total {:count 1 :value #{token-name}}}}}] (is (= expected-result actual-result)) (doseq [waiter-url waiter-urls] (is (= {:description token-description :headers {"content-type" "application/json" "etag" token-etag} :status 200 :token-etag token-etag} (load-token waiter-url token-name)))))))) (finally (cleanup-token waiter-api waiter-urls token-name)))))) (deftest ^:integration test-token-already-synced (testing "token already synced" (let [waiter-urls (waiter-urls) {:keys [load-token store-token] :as waiter-api} (waiter-api) limit 10 token-name (str "test-token-already-synced-" (UUID/randomUUID))] (try ;; ARRANGE (let [current-time-ms (System/currentTimeMillis) token-metadata (basic-token-metadata current-time-ms) token-description (merge basic-description token-metadata)] (doseq [waiter-url waiter-urls] (store-token waiter-url token-name nil token-description)) (let [token-etag (token->etag waiter-api (first waiter-urls) token-name)] ;; ACT (let [actual-result (syncer/sync-tokens waiter-api waiter-urls limit)] ;; ASSERT (let [expected-result {:details {} :summary {:sync {:failed #{} :unmodified #{} :updated #{}} :tokens {:pending {:count 0 :value #{}} :previously-synced {:count 1 :value #{token-name}} :processed {:count 0 :value #{}} :selected {:count 0 :value #{}} :total {:count 1 :value #{token-name}}}}}] (is (= expected-result actual-result)) (doseq [waiter-url waiter-urls] (is (= {:description token-description :headers {"content-type" "application/json" "etag" token-etag} :status 200 :token-etag token-etag} (load-token waiter-url token-name)))))))) (finally (cleanup-token waiter-api waiter-urls token-name)))))) (deftest ^:integration test-token-update (testing "token sync update" (let [waiter-urls (waiter-urls) {:keys [load-token store-token] :as waiter-api} (waiter-api) limit 10 token-name (str "test-token-update-" (UUID/randomUUID))] (try ;; ARRANGE (let [current-time-ms (System/currentTimeMillis) token-metadata (basic-token-metadata current-time-ms) token-description (merge basic-description token-metadata)] (let [last-update-time-ms (- current-time-ms 10000)] (store-token (first waiter-urls) token-name nil token-description) (doseq [waiter-url (rest waiter-urls)] (store-token waiter-url token-name nil (assoc token-description "cpus" 2 "mem" 2048 "last-update-time" last-update-time-ms)))) (let [token-etag (token->etag waiter-api (first waiter-urls) token-name)] ;; ACT (let [actual-result (syncer/sync-tokens waiter-api waiter-urls limit)] ;; ASSERT (let [waiter-sync-result (constantly {:code :success/sync-update :details {:etag token-etag :status 200}}) expected-result {:details {token-name {:latest {:cluster-url (first waiter-urls) :description token-description :token-etag token-etag} :sync-result (pc/map-from-keys waiter-sync-result (rest waiter-urls))}} :summary {:sync {:failed #{} :unmodified #{} :updated #{token-name}} :tokens {:pending {:count 1 :value #{token-name}} :previously-synced {:count 0 :value #{}} :processed {:count 1 :value #{token-name}} :selected {:count 1 :value #{token-name}} :total {:count 1 :value #{token-name}}}}}] (is (= expected-result actual-result)) (doseq [waiter-url waiter-urls] (is (= {:description token-description :headers {"content-type" "application/json" "etag" token-etag} :status 200 :token-etag token-etag} (load-token waiter-url token-name)))))))) (finally (cleanup-token waiter-api waiter-urls token-name)))))) (deftest ^:integration test-token-different-owners-but-same-root (testing "token sync update with different owners but same root" (let [waiter-urls (waiter-urls) {:keys [load-token store-token] :as waiter-api} (waiter-api) limit 10 token-name (str "test-token-different-owners-but-same-root-" (UUID/randomUUID))] (try ;; ARRANGE (let [current-time-ms (System/currentTimeMillis) last-update-time-ms (- current-time-ms 10000)] (doall (map-indexed (fn [index waiter-url] (store-token waiter-url token-name nil (assoc basic-description "cluster" (waiter-url->cluster waiter-url) "cpus" (inc index) "last-update-time" (- last-update-time-ms index) "last-update-user" (str "auth-user-" index) "owner" (str "test-user-" index) "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" "common-root"))) waiter-urls)) (let [token-etag (token->etag waiter-api (first waiter-urls) token-name)] ;; ACT (let [actual-result (syncer/sync-tokens waiter-api waiter-urls limit)] ;; ASSERT (let [latest-description (assoc basic-description "cluster" (waiter-url->cluster (first waiter-urls)) "cpus" 1 "last-update-time" last-update-time-ms "last-update-user" "auth-user-0" "owner" "test-user-0" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" "common-root") waiter-sync-result (constantly {:code :success/sync-update :details {:etag token-etag :status 200}}) expected-result {:details {token-name {:latest {:cluster-url (first waiter-urls) :description latest-description :token-etag token-etag} :sync-result (pc/map-from-keys waiter-sync-result (rest waiter-urls))}} :summary {:sync {:failed #{} :unmodified #{} :updated #{token-name}} :tokens {:pending {:count 1 :value #{token-name}} :previously-synced {:count 0 :value #{}} :processed {:count 1 :value #{token-name}} :selected {:count 1 :value #{token-name}} :total {:count 1 :value #{token-name}}}}}] (is (= expected-result actual-result)) (doseq [waiter-url waiter-urls] (is (= {:description latest-description :headers {"content-type" "application/json" "etag" token-etag} :status 200 :token-etag token-etag} (load-token waiter-url token-name)))))))) (finally (cleanup-token waiter-api waiter-urls token-name)))))) (deftest ^:integration test-token-different-roots-and-last-update-user (testing "token sync update with different roots" (let [waiter-urls (waiter-urls) {:keys [load-token store-token] :as waiter-api} (waiter-api) limit 10 token-name (str "<KEY>" (UUID/<KEY>))] (try ;; ARRANGE (let [current-time-ms (System/currentTimeMillis) last-update-time-ms (- current-time-ms 10000)] (doall (map-indexed (fn [index waiter-url] (store-token waiter-url token-name nil (assoc basic-description "cluster" (waiter-url->cluster waiter-url) "cpus" (inc index) "last-update-time" (- last-update-time-ms index) "last-update-user" (str "auth-user-" index) "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" waiter-url))) waiter-urls)) (let [token-etag (token->etag waiter-api (first waiter-urls) token-name)] ;; ACT (let [actual-result (syncer/sync-tokens waiter-api waiter-urls limit)] ;; ASSERT (let [latest-description (assoc basic-description "cluster" (waiter-url->cluster (first waiter-urls)) "cpus" 1 "last-update-time" last-update-time-ms "last-update-user" "auth-user-0" "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" (first waiter-urls)) sync-result (->> (rest waiter-urls) (map-indexed (fn [index waiter-url] [waiter-url {:code :error/root-mismatch :details {:cluster (assoc basic-description "cluster" (waiter-url->cluster waiter-url) "cpus" (+ index 2) "last-update-time" (- last-update-time-ms index 1) "last-update-user" (str "auth-user-" (inc index)) "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" waiter-url) :latest latest-description}}])) (into {})) expected-result {:details {token-name {:latest {:cluster-url (first waiter-urls) :description latest-description :token-etag token-etag} :sync-result sync-result}} :summary {:sync {:failed #{token-name} :unmodified #{} :updated #{}} :tokens {:pending {:count 1 :value #{token-name}} :previously-synced {:count 0 :value #{}} :processed {:count 1 :value #{token-name}} :selected {:count 1 :value #{token-name}} :total {:count 1 :value #{token-name}}}}}] (is (= expected-result actual-result)) (doall (map-indexed (fn [index waiter-url] (let [token-last-modified-time (- last-update-time-ms index) token-etag (token->etag waiter-api waiter-url token-name)] (is (= {:description (assoc basic-description "cluster" (waiter-url->cluster waiter-url) "cpus" (inc index) "last-update-time" token-last-modified-time "last-update-user" (str "auth-user-" index) "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" waiter-url) :headers {"content-type" "application/json" "etag" token-etag} :status 200 :token-etag token-etag} (load-token waiter-url token-name))))) waiter-urls)))))) (finally (cleanup-token waiter-api waiter-urls token-name)))))) (deftest ^:integration test-token-different-roots-but-same-last-update-user (testing "token sync update with different roots" (let [waiter-urls (waiter-urls) {:keys [load-token store-token] :as waiter-api} (waiter-api) limit 10 token-name (str "<PASSWORD>" (UUID/randomUUID))] (try ;; ARRANGE (let [current-time-ms (System/currentTimeMillis) last-update-time-ms (- current-time-ms 10000)] (doall (map-indexed (fn [index waiter-url] (store-token waiter-url token-name nil (assoc basic-description "cluster" (waiter-url->cluster waiter-url) "cpus" (inc index) "last-update-time" (- last-update-time-ms index) "last-update-user" "auth-user" "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" waiter-url))) waiter-urls)) (let [token-etag (token->etag waiter-api (first waiter-urls) token-name)] ;; ACT (let [actual-result (syncer/sync-tokens waiter-api waiter-urls limit)] ;; ASSERT (let [latest-description (assoc basic-description "cluster" (waiter-url->cluster (first waiter-urls)) "cpus" 1 "last-update-time" last-update-time-ms "last-update-user" "auth-user" "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" (first waiter-urls)) sync-result (->> (rest waiter-urls) (map (fn [waiter-url] [waiter-url {:code :success/sync-update :details {:etag token-etag :status 200}}])) (into {})) expected-result {:details {token-name {:latest {:cluster-url (first waiter-urls) :description latest-description :token-etag token-etag} :sync-result sync-result}} :summary {:sync {:failed #{} :unmodified #{} :updated #{token-name}} :tokens {:pending {:count 1 :value #{token-name}} :previously-synced {:count 0 :value #{}} :processed {:count 1 :value #{token-name}} :selected {:count 1 :value #{token-name}} :total {:count 1 :value #{token-name}}}}}] (is (= expected-result actual-result)) (doall (map (fn [waiter-url] (let [token-etag (token->etag waiter-api waiter-url token-name)] (is (= {:description latest-description :headers {"content-type" "application/json" "etag" token-etag} :status 200 :token-etag token-etag} (load-token waiter-url token-name))))) waiter-urls)))))) (finally (cleanup-token waiter-api waiter-urls token-name)))))) (deftest ^:integration test-token-different-roots-and-deleted (testing "token sync hard-delete deleted tokens with different different roots" (let [waiter-urls (waiter-urls) {:keys [load-token store-token] :as waiter-api} (waiter-api) limit 10 token-name (str "test-token-different-roots-and-deleted-" (UUID/randomUUID))] (try ;; ARRANGE (let [current-time-ms (System/currentTimeMillis) last-update-time-ms (- current-time-ms 10000)] (doall (map-indexed (fn [index waiter-url] (store-token waiter-url token-name nil (assoc basic-description "cluster" (waiter-url->cluster waiter-url) "cpus" (inc index) "deleted" true "last-update-time" (- last-update-time-ms index) "last-update-user" "auth-user" "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" waiter-url))) waiter-urls)) (let [token-etag (token->etag waiter-api (first waiter-urls) token-name)] ;; ACT (let [actual-result (syncer/sync-tokens waiter-api waiter-urls limit)] ;; ASSERT (let [latest-description (assoc basic-description "cluster" (waiter-url->cluster (first waiter-urls)) "cpus" 1 "deleted" true "last-update-time" last-update-time-ms "last-update-user" "auth-user" "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" (first waiter-urls)) sync-result (pc/map-from-keys (constantly {:code :success/hard-delete :details {:etag "" :status 200}}) waiter-urls) expected-result {:details {token-name {:latest {:cluster-url (first waiter-urls) :description latest-description :token-etag token-etag} :sync-result sync-result}} :summary {:sync {:failed #{} :unmodified #{} :updated #{token-name}} :tokens {:pending {:count 1 :value #{token-name}} :previously-synced {:count 0 :value #{}} :processed {:count 1 :value #{token-name}} :selected {:count 1 :value #{token-name}} :total {:count 1 :value #{token-name}}}}}] (is (= expected-result actual-result)) (doall (map (fn [waiter-url] (is (= {:description {} :headers {"content-type" "application/json"} :status 404} (load-token waiter-url token-name)))) waiter-urls)))))) (finally (cleanup-token waiter-api waiter-urls token-name)))))) (deftest ^:integration test-token-same-user-params-but-different-root (testing "token sync update with different difference only in roots and system metadata" (let [waiter-urls (waiter-urls) {:keys [load-token store-token] :as waiter-api} (waiter-api) limit 10 token-name (str "test-token-same-user-params-but-different-root-" (UUID/randomUUID))] (try ;; ARRANGE (let [current-time-ms (System/currentTimeMillis) last-update-time-ms (- current-time-ms 10000)] (doall (map-indexed (fn [index waiter-url] (store-token waiter-url token-name nil (assoc basic-description "cluster" (waiter-url->cluster waiter-url) "last-update-time" (- last-update-time-ms index) "last-update-user" (str "auth-user-" index) "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" waiter-url))) waiter-urls)) (let [token-etag (token->etag waiter-api (first waiter-urls) token-name)] ;; ACT (let [actual-result (syncer/sync-tokens waiter-api waiter-urls limit)] ;; ASSERT (let [latest-description (assoc basic-description "cluster" (waiter-url->cluster (first waiter-urls)) "last-update-time" last-update-time-ms "last-update-user" "auth-user-0" "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" (first waiter-urls)) sync-result (pc/map-from-keys (constantly {:code :skip/token-sync}) (rest waiter-urls)) expected-result {:details {token-name {:latest {:cluster-url (first waiter-urls) :description latest-description :token-etag token-etag} :sync-result sync-result}} :summary {:sync {:failed #{token-name} :unmodified #{} :updated #{}} :tokens {:pending {:count 1 :value #{token-name}} :previously-synced {:count 0 :value #{}} :processed {:count 1 :value #{token-name}} :selected {:count 1 :value #{token-name}} :total {:count 1 :value #{token-name}}}}}] (is (= expected-result actual-result)) (doall (map-indexed (fn [index waiter-url] (let [token-etag (token->etag waiter-api waiter-url token-name)] (is (= {:description (assoc basic-description "cluster" (waiter-url->cluster waiter-url) "last-update-time" (- last-update-time-ms index) "last-update-user" (str "auth-user-" index) "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" waiter-url) :headers {"content-type" "application/json" "etag" token-etag} :status 200 :token-etag token-etag} (load-token waiter-url token-name))))) waiter-urls)))))) (finally (cleanup-token waiter-api waiter-urls token-name)))))) (deftest ^:integration test-ping-tokens (testing "token ping on clusters" (let [waiter-urls (waiter-urls) queue-timeout-ms 120000 {:keys [store-token] :as waiter-api} (waiter-api)] (testing "successful health check" (let [token-name (str "test-ping-tokens-" (UUID/randomUUID))] (try ;; ARRANGE (doall (map (fn [waiter-url] (->> (assoc basic-description "health-check-url" "/status" "idle-timeout-mins" 2 "run-as-user" "*" "version" "lorem-ipsum") (store-token waiter-url token-name nil))) waiter-urls)) ;; ACT (let [actual-result (ping/ping-token waiter-api waiter-urls token-name queue-timeout-ms)] ;; ASSERT (let [expected-result {:details (pc/map-from-keys (fn [cluster-url] {:exit-code 0 :message (str "successfully pinged token " token-name " on " cluster-url ", reason: health check returned status code 200")}) waiter-urls) :exit-code 0 :message (str "pinging token " token-name " on " (-> waiter-urls vec println with-out-str str/trim) " was successful") :token token-name}] (is (= expected-result actual-result)))) (finally (cleanup-token waiter-api waiter-urls token-name))))) (testing "unsuccessful health check" (let [token-name (str "<PASSWORD>-ping-tokens-" (UUID/randomUUID))] (try ;; ARRANGE (doall (map (fn [waiter-url] (->> (assoc basic-description "health-check-url" "/bad-status" "idle-timeout-mins" 2 "run-as-user" "*" "version" "lorem-ipsum") (store-token waiter-url token-name nil))) waiter-urls)) ;; ACT (let [actual-result (ping/ping-token waiter-api waiter-urls token-name queue-timeout-ms)] ;; ASSERT (let [expected-result {:details (pc/map-from-keys (fn [cluster-url] {:exit-code 1 :message (str "unable to ping token " token-name " on " cluster-url ", reason: health check returned status code 503")}) waiter-urls) :exit-code (count waiter-urls) :message (str "pinging token " token-name " on " (-> waiter-urls vec println with-out-str str/trim) " failed") :token token-name}] (is (= expected-result actual-result)))) (finally (cleanup-token waiter-api waiter-urls token-name)))))))) (deftest ^:integration test-cleanup-token (testing "token cleanup on cluster" (let [cluster-url (first (waiter-urls)) {:keys [load-token store-token] :as waiter-api} (waiter-api) token-name (str "test-cleanup-token-" (UUID/randomUUID)) current-time-ms (System/currentTimeMillis) last-update-time-ms (- current-time-ms 10000)] ;; ARRANGE (store-token cluster-url token-name nil (assoc basic-description "cluster" (waiter-url->cluster cluster-url) "cpus" 1.0 "deleted" true "last-update-time" last-update-time-ms "last-update-user" "auth-user" "owner" "test-user" "root" cluster-url)) (try ;; ACT (let [actual-result (cleanup/cleanup-tokens waiter-api cluster-url current-time-ms)] ;; ASSERT (is (= #{token-name} actual-result)) (is (= {:description {} :headers {"content-type" "application/json"} :status 404} (load-token cluster-url token-name)))) (finally (cleanup-token waiter-api [cluster-url] token-name))))))
true
;; ;; Copyright (c) Two Sigma Open Source, LLC ;; ;; Licensed under the Apache License, Version 2.0 (the "License"); ;; you may not use this file except in compliance with the License. ;; You may obtain a copy of the License at ;; ;; http://www.apache.org/licenses/LICENSE-2.0 ;; ;; Unless required by applicable law or agreed to in writing, software ;; distributed under the License is distributed on an "AS IS" BASIS, ;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ;; See the License for the specific language governing permissions and ;; limitations under the License. ;; (ns token-syncer.basic-test (:require [clojure.string :as str] [clojure.test :refer :all] [clojure.tools.logging :as log] [plumbing.core :as pc] [token-syncer.commands.cleanup :as cleanup] [token-syncer.commands.ping :as ping] [token-syncer.commands.syncer :as syncer] [token-syncer.main :as main]) (:import (java.util UUID))) (defn- kitchen-cmd [] (str (System/getenv "WAITER_TEST_KITCHEN_CMD") " -p $PORT0")) (def basic-description {"cmd" (kitchen-cmd) "cpus" 1 "mem" 2048 "metric-group" "syncer-test"}) (defn- waiter-urls "Retrieves urls to the waiter clusters provided in the WAITER_URIS environment variable." [] (let [waiter-uris (System/getenv "WAITER_URIS")] (is waiter-uris) (-> waiter-uris (str/split #",") sort))) (defn- waiter-url->cluster "Retrieves the cluster name corresponding to the provided cluster," [waiter-url] (str "cluster-" (hash waiter-url))) (defn- waiter-api "Initializes and returns the Waiter API functions." [] (main/init-waiter-api {:connection-timeout-ms 5000, :idle-timeout-ms 5000})) (deftest ^:integration test-environment (log/info "Running: test-environment") (testing "verifies presence of environment variables for running integration tests" (testing "waiter cluster uris" (log/info "env.WAITER_URIS" (System/getenv "WAITER_URIS")) (is (System/getenv "WAITER_URIS")) (is (> (count (waiter-urls)) 1))) (testing "kitchen command" (log/info "env.WAITER_TEST_KITCHEN_CMD" (System/getenv "WAITER_TEST_KITCHEN_CMD")) (is (not (str/blank? (System/getenv "WAITER_TEST_KITCHEN_CMD"))))))) (defn- basic-token-metadata "Returns the common metadata used in the tests." [current-time-ms] {"cluster" "cl.1" "last-update-time" current-time-ms "last-update-user" "auth-user" "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "another-auth-user"} "root" "src1"}) (defn- token->etag "Retrieves the etag for a token on a waiter router." [{:keys [load-token]} waiter-url token-name] (-> (load-token waiter-url token-name) (get :token-etag))) (defn- cleanup-token "'Hard' deletes the token on all provided clusters." [{:keys [hard-delete-token] :as waiter-api} waiter-urls token-name] (log/info "Cleaning up token:" token-name) (doseq [waiter-url waiter-urls] (try (let [token-etag (token->etag waiter-api waiter-url token-name)] (hard-delete-token waiter-url token-name token-etag)) (catch Exception _)))) (deftest ^:integration test-token-hard-delete (testing "token sync hard-delete" (let [waiter-urls (waiter-urls) {:keys [load-token store-token] :as waiter-api} (waiter-api) limit 10 token-name (str "PI:PASSWORD:<PASSWORD>END_PI-" (UUID/randomUUID))] (try ;; ARRANGE (let [last-update-time-ms (- (System/currentTimeMillis) 10000) token-metadata (-> (basic-token-metadata last-update-time-ms) (assoc "deleted" true)) token-description (merge basic-description token-metadata)] (doseq [waiter-url waiter-urls] (let [token-etag (token->etag waiter-api waiter-url token-name)] (store-token waiter-url token-name token-etag token-description))) (let [token-etag (token->etag waiter-api (first waiter-urls) token-name)] ;; ACT (let [actual-result (syncer/sync-tokens waiter-api waiter-urls limit)] ;; ASSERT (let [waiter-sync-result (constantly {:code :success/hard-delete :details {:etag (str token-etag) :status 200}}) expected-result {:details {token-name {:latest {:cluster-url (first waiter-urls) :description token-description :token-etag token-etag} :sync-result (pc/map-from-keys waiter-sync-result waiter-urls)}} :summary {:sync {:failed #{} :unmodified #{} :updated #{token-name}} :tokens {:pending {:count 1 :value #{token-name}} :previously-synced {:count 0 :value #{}} :processed {:count 1 :value #{token-name}} :selected {:count 1 :value #{token-name}} :total {:count 1 :value #{token-name}}}}}] (is (= expected-result actual-result)) (doseq [waiter-url waiter-urls] (let [response (load-token waiter-url token-name)] (is (= 404 (:status response)) (str waiter-url " responded with " response)))))))) (finally (cleanup-token waiter-api waiter-urls token-name)))))) (deftest ^:integration test-token-soft-delete (testing "token sync soft-delete" (let [waiter-urls (waiter-urls) {:keys [load-token store-token] :as waiter-api} (waiter-api) limit 10 token-name (str "PI:PASSWORD:<PASSWORD>END_PI-" (UUID/randomUUID))] (try ;; ARRANGE (let [current-time-ms (System/currentTimeMillis) token-metadata (basic-token-metadata current-time-ms) token-description (merge basic-description token-metadata)] (store-token (first waiter-urls) token-name nil (assoc token-description "deleted" true)) (doseq [waiter-url (rest waiter-urls)] (let [last-update-time-ms (- current-time-ms 10000)] (store-token waiter-url token-name nil (assoc token-description "last-update-time" last-update-time-ms)))) (let [token-etag (token->etag waiter-api (first waiter-urls) token-name)] ;; ACT (let [actual-result (syncer/sync-tokens waiter-api waiter-urls limit)] ;; ASSERT (let [waiter-sync-result (constantly {:code :success/soft-delete :details {:etag token-etag :status 200}}) expected-result {:details {token-name {:latest {:cluster-url (first waiter-urls) :description (assoc token-description "deleted" true) :token-etag token-etag} :sync-result (pc/map-from-keys waiter-sync-result (rest waiter-urls))}} :summary {:sync {:failed #{} :unmodified #{} :updated #{token-name}} :tokens {:pending {:count 1 :value #{token-name}} :previously-synced {:count 0 :value #{}} :processed {:count 1 :value #{token-name}} :selected {:count 1 :value #{token-name}} :total {:count 1 :value #{token-name}}}}}] (is (= expected-result actual-result)) (doseq [waiter-url waiter-urls] (is (= {:description (assoc token-description "deleted" true) :headers {"content-type" "application/json"} :status 200} (load-token waiter-url token-name)))))))) (finally (cleanup-token waiter-api waiter-urls token-name)))))) (deftest ^:integration test-token-hard-delete-previously-soft-deleted (testing "token sync hard-delete previously soft-deleted" (let [waiter-urls (waiter-urls) {:keys [load-token store-token] :as waiter-api} (waiter-api) limit 10 token-name (str "PI:PASSWORD:<PASSWORD>END_PI-token-PI:PASSWORD:<PASSWORD>END_PI-PI:PASSWORD:<PASSWORD>END_PI-deleted-" (UUID/randomUUID))] (try ;; ARRANGE (let [current-time-ms (System/currentTimeMillis) token-metadata (basic-token-metadata current-time-ms) token-description (merge basic-description token-metadata)] (store-token (first waiter-urls) token-name nil (assoc token-description "deleted" true)) (doseq [waiter-url (rest waiter-urls)] (let [last-update-time-ms (- current-time-ms 10000)] (store-token waiter-url token-name nil (assoc token-description "deleted" true "last-update-time" last-update-time-ms)))) (let [token-etag (token->etag waiter-api (first waiter-urls) token-name)] ;; ACT (let [actual-result (syncer/sync-tokens waiter-api waiter-urls limit)] ;; ASSERT (let [waiter-sync-result (constantly {:code :success/hard-delete :details {:etag "" :status 200}}) expected-result {:details {token-name {:latest {:cluster-url (first waiter-urls) :description (assoc token-description "deleted" true) :token-etag token-etag} :sync-result (pc/map-from-keys waiter-sync-result waiter-urls)}} :summary {:sync {:failed #{} :unmodified #{} :updated #{token-name}} :tokens {:pending {:count 1 :value #{token-name}} :previously-synced {:count 0 :value #{}} :processed {:count 1 :value #{token-name}} :selected {:count 1 :value #{token-name}} :total {:count 1 :value #{token-name}}}}}] (is (= expected-result actual-result)) (doseq [waiter-url waiter-urls] (is (= {:description {} :headers {"content-type" "application/json"} :status 404} (load-token waiter-url token-name)))))))) (finally (cleanup-token waiter-api waiter-urls token-name)))))) (deftest ^:integration test-token-token-on-single-cluster (testing "token exists on single cluster" (let [waiter-urls (waiter-urls) {:keys [load-token store-token] :as waiter-api} (waiter-api) limit 10 token-name (str "test-token-token-on-single-cluster-" (UUID/randomUUID))] (try ;; ARRANGE (let [current-time-ms (System/currentTimeMillis) token-metadata (basic-token-metadata current-time-ms) token-description (merge basic-description token-metadata)] (store-token (first waiter-urls) token-name nil token-description) (let [token-etag (token->etag waiter-api (first waiter-urls) token-name)] ;; ACT (let [actual-result (syncer/sync-tokens waiter-api waiter-urls limit)] ;; ASSERT (let [waiter-sync-result (constantly {:code :success/sync-update :details {:etag token-etag :status 200}}) expected-result {:details {token-name {:latest {:cluster-url (first waiter-urls) :description token-description :token-etag token-etag} :sync-result (pc/map-from-keys waiter-sync-result (rest waiter-urls))}} :summary {:sync {:failed #{} :unmodified #{} :updated #{token-name}} :tokens {:pending {:count 1 :value #{token-name}} :previously-synced {:count 0 :value #{}} :processed {:count 1 :value #{token-name}} :selected {:count 1 :value #{token-name}} :total {:count 1 :value #{token-name}}}}}] (is (= expected-result actual-result)) (doseq [waiter-url waiter-urls] (is (= {:description token-description :headers {"content-type" "application/json" "etag" token-etag} :status 200 :token-etag token-etag} (load-token waiter-url token-name)))))))) (finally (cleanup-token waiter-api waiter-urls token-name)))))) (deftest ^:integration test-token-already-synced (testing "token already synced" (let [waiter-urls (waiter-urls) {:keys [load-token store-token] :as waiter-api} (waiter-api) limit 10 token-name (str "test-token-already-synced-" (UUID/randomUUID))] (try ;; ARRANGE (let [current-time-ms (System/currentTimeMillis) token-metadata (basic-token-metadata current-time-ms) token-description (merge basic-description token-metadata)] (doseq [waiter-url waiter-urls] (store-token waiter-url token-name nil token-description)) (let [token-etag (token->etag waiter-api (first waiter-urls) token-name)] ;; ACT (let [actual-result (syncer/sync-tokens waiter-api waiter-urls limit)] ;; ASSERT (let [expected-result {:details {} :summary {:sync {:failed #{} :unmodified #{} :updated #{}} :tokens {:pending {:count 0 :value #{}} :previously-synced {:count 1 :value #{token-name}} :processed {:count 0 :value #{}} :selected {:count 0 :value #{}} :total {:count 1 :value #{token-name}}}}}] (is (= expected-result actual-result)) (doseq [waiter-url waiter-urls] (is (= {:description token-description :headers {"content-type" "application/json" "etag" token-etag} :status 200 :token-etag token-etag} (load-token waiter-url token-name)))))))) (finally (cleanup-token waiter-api waiter-urls token-name)))))) (deftest ^:integration test-token-update (testing "token sync update" (let [waiter-urls (waiter-urls) {:keys [load-token store-token] :as waiter-api} (waiter-api) limit 10 token-name (str "test-token-update-" (UUID/randomUUID))] (try ;; ARRANGE (let [current-time-ms (System/currentTimeMillis) token-metadata (basic-token-metadata current-time-ms) token-description (merge basic-description token-metadata)] (let [last-update-time-ms (- current-time-ms 10000)] (store-token (first waiter-urls) token-name nil token-description) (doseq [waiter-url (rest waiter-urls)] (store-token waiter-url token-name nil (assoc token-description "cpus" 2 "mem" 2048 "last-update-time" last-update-time-ms)))) (let [token-etag (token->etag waiter-api (first waiter-urls) token-name)] ;; ACT (let [actual-result (syncer/sync-tokens waiter-api waiter-urls limit)] ;; ASSERT (let [waiter-sync-result (constantly {:code :success/sync-update :details {:etag token-etag :status 200}}) expected-result {:details {token-name {:latest {:cluster-url (first waiter-urls) :description token-description :token-etag token-etag} :sync-result (pc/map-from-keys waiter-sync-result (rest waiter-urls))}} :summary {:sync {:failed #{} :unmodified #{} :updated #{token-name}} :tokens {:pending {:count 1 :value #{token-name}} :previously-synced {:count 0 :value #{}} :processed {:count 1 :value #{token-name}} :selected {:count 1 :value #{token-name}} :total {:count 1 :value #{token-name}}}}}] (is (= expected-result actual-result)) (doseq [waiter-url waiter-urls] (is (= {:description token-description :headers {"content-type" "application/json" "etag" token-etag} :status 200 :token-etag token-etag} (load-token waiter-url token-name)))))))) (finally (cleanup-token waiter-api waiter-urls token-name)))))) (deftest ^:integration test-token-different-owners-but-same-root (testing "token sync update with different owners but same root" (let [waiter-urls (waiter-urls) {:keys [load-token store-token] :as waiter-api} (waiter-api) limit 10 token-name (str "test-token-different-owners-but-same-root-" (UUID/randomUUID))] (try ;; ARRANGE (let [current-time-ms (System/currentTimeMillis) last-update-time-ms (- current-time-ms 10000)] (doall (map-indexed (fn [index waiter-url] (store-token waiter-url token-name nil (assoc basic-description "cluster" (waiter-url->cluster waiter-url) "cpus" (inc index) "last-update-time" (- last-update-time-ms index) "last-update-user" (str "auth-user-" index) "owner" (str "test-user-" index) "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" "common-root"))) waiter-urls)) (let [token-etag (token->etag waiter-api (first waiter-urls) token-name)] ;; ACT (let [actual-result (syncer/sync-tokens waiter-api waiter-urls limit)] ;; ASSERT (let [latest-description (assoc basic-description "cluster" (waiter-url->cluster (first waiter-urls)) "cpus" 1 "last-update-time" last-update-time-ms "last-update-user" "auth-user-0" "owner" "test-user-0" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" "common-root") waiter-sync-result (constantly {:code :success/sync-update :details {:etag token-etag :status 200}}) expected-result {:details {token-name {:latest {:cluster-url (first waiter-urls) :description latest-description :token-etag token-etag} :sync-result (pc/map-from-keys waiter-sync-result (rest waiter-urls))}} :summary {:sync {:failed #{} :unmodified #{} :updated #{token-name}} :tokens {:pending {:count 1 :value #{token-name}} :previously-synced {:count 0 :value #{}} :processed {:count 1 :value #{token-name}} :selected {:count 1 :value #{token-name}} :total {:count 1 :value #{token-name}}}}}] (is (= expected-result actual-result)) (doseq [waiter-url waiter-urls] (is (= {:description latest-description :headers {"content-type" "application/json" "etag" token-etag} :status 200 :token-etag token-etag} (load-token waiter-url token-name)))))))) (finally (cleanup-token waiter-api waiter-urls token-name)))))) (deftest ^:integration test-token-different-roots-and-last-update-user (testing "token sync update with different roots" (let [waiter-urls (waiter-urls) {:keys [load-token store-token] :as waiter-api} (waiter-api) limit 10 token-name (str "PI:KEY:<KEY>END_PI" (UUID/PI:KEY:<KEY>END_PI))] (try ;; ARRANGE (let [current-time-ms (System/currentTimeMillis) last-update-time-ms (- current-time-ms 10000)] (doall (map-indexed (fn [index waiter-url] (store-token waiter-url token-name nil (assoc basic-description "cluster" (waiter-url->cluster waiter-url) "cpus" (inc index) "last-update-time" (- last-update-time-ms index) "last-update-user" (str "auth-user-" index) "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" waiter-url))) waiter-urls)) (let [token-etag (token->etag waiter-api (first waiter-urls) token-name)] ;; ACT (let [actual-result (syncer/sync-tokens waiter-api waiter-urls limit)] ;; ASSERT (let [latest-description (assoc basic-description "cluster" (waiter-url->cluster (first waiter-urls)) "cpus" 1 "last-update-time" last-update-time-ms "last-update-user" "auth-user-0" "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" (first waiter-urls)) sync-result (->> (rest waiter-urls) (map-indexed (fn [index waiter-url] [waiter-url {:code :error/root-mismatch :details {:cluster (assoc basic-description "cluster" (waiter-url->cluster waiter-url) "cpus" (+ index 2) "last-update-time" (- last-update-time-ms index 1) "last-update-user" (str "auth-user-" (inc index)) "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" waiter-url) :latest latest-description}}])) (into {})) expected-result {:details {token-name {:latest {:cluster-url (first waiter-urls) :description latest-description :token-etag token-etag} :sync-result sync-result}} :summary {:sync {:failed #{token-name} :unmodified #{} :updated #{}} :tokens {:pending {:count 1 :value #{token-name}} :previously-synced {:count 0 :value #{}} :processed {:count 1 :value #{token-name}} :selected {:count 1 :value #{token-name}} :total {:count 1 :value #{token-name}}}}}] (is (= expected-result actual-result)) (doall (map-indexed (fn [index waiter-url] (let [token-last-modified-time (- last-update-time-ms index) token-etag (token->etag waiter-api waiter-url token-name)] (is (= {:description (assoc basic-description "cluster" (waiter-url->cluster waiter-url) "cpus" (inc index) "last-update-time" token-last-modified-time "last-update-user" (str "auth-user-" index) "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" waiter-url) :headers {"content-type" "application/json" "etag" token-etag} :status 200 :token-etag token-etag} (load-token waiter-url token-name))))) waiter-urls)))))) (finally (cleanup-token waiter-api waiter-urls token-name)))))) (deftest ^:integration test-token-different-roots-but-same-last-update-user (testing "token sync update with different roots" (let [waiter-urls (waiter-urls) {:keys [load-token store-token] :as waiter-api} (waiter-api) limit 10 token-name (str "PI:PASSWORD:<PASSWORD>END_PI" (UUID/randomUUID))] (try ;; ARRANGE (let [current-time-ms (System/currentTimeMillis) last-update-time-ms (- current-time-ms 10000)] (doall (map-indexed (fn [index waiter-url] (store-token waiter-url token-name nil (assoc basic-description "cluster" (waiter-url->cluster waiter-url) "cpus" (inc index) "last-update-time" (- last-update-time-ms index) "last-update-user" "auth-user" "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" waiter-url))) waiter-urls)) (let [token-etag (token->etag waiter-api (first waiter-urls) token-name)] ;; ACT (let [actual-result (syncer/sync-tokens waiter-api waiter-urls limit)] ;; ASSERT (let [latest-description (assoc basic-description "cluster" (waiter-url->cluster (first waiter-urls)) "cpus" 1 "last-update-time" last-update-time-ms "last-update-user" "auth-user" "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" (first waiter-urls)) sync-result (->> (rest waiter-urls) (map (fn [waiter-url] [waiter-url {:code :success/sync-update :details {:etag token-etag :status 200}}])) (into {})) expected-result {:details {token-name {:latest {:cluster-url (first waiter-urls) :description latest-description :token-etag token-etag} :sync-result sync-result}} :summary {:sync {:failed #{} :unmodified #{} :updated #{token-name}} :tokens {:pending {:count 1 :value #{token-name}} :previously-synced {:count 0 :value #{}} :processed {:count 1 :value #{token-name}} :selected {:count 1 :value #{token-name}} :total {:count 1 :value #{token-name}}}}}] (is (= expected-result actual-result)) (doall (map (fn [waiter-url] (let [token-etag (token->etag waiter-api waiter-url token-name)] (is (= {:description latest-description :headers {"content-type" "application/json" "etag" token-etag} :status 200 :token-etag token-etag} (load-token waiter-url token-name))))) waiter-urls)))))) (finally (cleanup-token waiter-api waiter-urls token-name)))))) (deftest ^:integration test-token-different-roots-and-deleted (testing "token sync hard-delete deleted tokens with different different roots" (let [waiter-urls (waiter-urls) {:keys [load-token store-token] :as waiter-api} (waiter-api) limit 10 token-name (str "test-token-different-roots-and-deleted-" (UUID/randomUUID))] (try ;; ARRANGE (let [current-time-ms (System/currentTimeMillis) last-update-time-ms (- current-time-ms 10000)] (doall (map-indexed (fn [index waiter-url] (store-token waiter-url token-name nil (assoc basic-description "cluster" (waiter-url->cluster waiter-url) "cpus" (inc index) "deleted" true "last-update-time" (- last-update-time-ms index) "last-update-user" "auth-user" "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" waiter-url))) waiter-urls)) (let [token-etag (token->etag waiter-api (first waiter-urls) token-name)] ;; ACT (let [actual-result (syncer/sync-tokens waiter-api waiter-urls limit)] ;; ASSERT (let [latest-description (assoc basic-description "cluster" (waiter-url->cluster (first waiter-urls)) "cpus" 1 "deleted" true "last-update-time" last-update-time-ms "last-update-user" "auth-user" "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" (first waiter-urls)) sync-result (pc/map-from-keys (constantly {:code :success/hard-delete :details {:etag "" :status 200}}) waiter-urls) expected-result {:details {token-name {:latest {:cluster-url (first waiter-urls) :description latest-description :token-etag token-etag} :sync-result sync-result}} :summary {:sync {:failed #{} :unmodified #{} :updated #{token-name}} :tokens {:pending {:count 1 :value #{token-name}} :previously-synced {:count 0 :value #{}} :processed {:count 1 :value #{token-name}} :selected {:count 1 :value #{token-name}} :total {:count 1 :value #{token-name}}}}}] (is (= expected-result actual-result)) (doall (map (fn [waiter-url] (is (= {:description {} :headers {"content-type" "application/json"} :status 404} (load-token waiter-url token-name)))) waiter-urls)))))) (finally (cleanup-token waiter-api waiter-urls token-name)))))) (deftest ^:integration test-token-same-user-params-but-different-root (testing "token sync update with different difference only in roots and system metadata" (let [waiter-urls (waiter-urls) {:keys [load-token store-token] :as waiter-api} (waiter-api) limit 10 token-name (str "test-token-same-user-params-but-different-root-" (UUID/randomUUID))] (try ;; ARRANGE (let [current-time-ms (System/currentTimeMillis) last-update-time-ms (- current-time-ms 10000)] (doall (map-indexed (fn [index waiter-url] (store-token waiter-url token-name nil (assoc basic-description "cluster" (waiter-url->cluster waiter-url) "last-update-time" (- last-update-time-ms index) "last-update-user" (str "auth-user-" index) "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" waiter-url))) waiter-urls)) (let [token-etag (token->etag waiter-api (first waiter-urls) token-name)] ;; ACT (let [actual-result (syncer/sync-tokens waiter-api waiter-urls limit)] ;; ASSERT (let [latest-description (assoc basic-description "cluster" (waiter-url->cluster (first waiter-urls)) "last-update-time" last-update-time-ms "last-update-user" "auth-user-0" "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" (first waiter-urls)) sync-result (pc/map-from-keys (constantly {:code :skip/token-sync}) (rest waiter-urls)) expected-result {:details {token-name {:latest {:cluster-url (first waiter-urls) :description latest-description :token-etag token-etag} :sync-result sync-result}} :summary {:sync {:failed #{token-name} :unmodified #{} :updated #{}} :tokens {:pending {:count 1 :value #{token-name}} :previously-synced {:count 0 :value #{}} :processed {:count 1 :value #{token-name}} :selected {:count 1 :value #{token-name}} :total {:count 1 :value #{token-name}}}}}] (is (= expected-result actual-result)) (doall (map-indexed (fn [index waiter-url] (let [token-etag (token->etag waiter-api waiter-url token-name)] (is (= {:description (assoc basic-description "cluster" (waiter-url->cluster waiter-url) "last-update-time" (- last-update-time-ms index) "last-update-user" (str "auth-user-" index) "owner" "test-user" "previous" {"last-update-time" (- current-time-ms 30000) "last-update-user" "foo-user"} "root" waiter-url) :headers {"content-type" "application/json" "etag" token-etag} :status 200 :token-etag token-etag} (load-token waiter-url token-name))))) waiter-urls)))))) (finally (cleanup-token waiter-api waiter-urls token-name)))))) (deftest ^:integration test-ping-tokens (testing "token ping on clusters" (let [waiter-urls (waiter-urls) queue-timeout-ms 120000 {:keys [store-token] :as waiter-api} (waiter-api)] (testing "successful health check" (let [token-name (str "test-ping-tokens-" (UUID/randomUUID))] (try ;; ARRANGE (doall (map (fn [waiter-url] (->> (assoc basic-description "health-check-url" "/status" "idle-timeout-mins" 2 "run-as-user" "*" "version" "lorem-ipsum") (store-token waiter-url token-name nil))) waiter-urls)) ;; ACT (let [actual-result (ping/ping-token waiter-api waiter-urls token-name queue-timeout-ms)] ;; ASSERT (let [expected-result {:details (pc/map-from-keys (fn [cluster-url] {:exit-code 0 :message (str "successfully pinged token " token-name " on " cluster-url ", reason: health check returned status code 200")}) waiter-urls) :exit-code 0 :message (str "pinging token " token-name " on " (-> waiter-urls vec println with-out-str str/trim) " was successful") :token token-name}] (is (= expected-result actual-result)))) (finally (cleanup-token waiter-api waiter-urls token-name))))) (testing "unsuccessful health check" (let [token-name (str "PI:KEY:<PASSWORD>END_PI-ping-tokens-" (UUID/randomUUID))] (try ;; ARRANGE (doall (map (fn [waiter-url] (->> (assoc basic-description "health-check-url" "/bad-status" "idle-timeout-mins" 2 "run-as-user" "*" "version" "lorem-ipsum") (store-token waiter-url token-name nil))) waiter-urls)) ;; ACT (let [actual-result (ping/ping-token waiter-api waiter-urls token-name queue-timeout-ms)] ;; ASSERT (let [expected-result {:details (pc/map-from-keys (fn [cluster-url] {:exit-code 1 :message (str "unable to ping token " token-name " on " cluster-url ", reason: health check returned status code 503")}) waiter-urls) :exit-code (count waiter-urls) :message (str "pinging token " token-name " on " (-> waiter-urls vec println with-out-str str/trim) " failed") :token token-name}] (is (= expected-result actual-result)))) (finally (cleanup-token waiter-api waiter-urls token-name)))))))) (deftest ^:integration test-cleanup-token (testing "token cleanup on cluster" (let [cluster-url (first (waiter-urls)) {:keys [load-token store-token] :as waiter-api} (waiter-api) token-name (str "test-cleanup-token-" (UUID/randomUUID)) current-time-ms (System/currentTimeMillis) last-update-time-ms (- current-time-ms 10000)] ;; ARRANGE (store-token cluster-url token-name nil (assoc basic-description "cluster" (waiter-url->cluster cluster-url) "cpus" 1.0 "deleted" true "last-update-time" last-update-time-ms "last-update-user" "auth-user" "owner" "test-user" "root" cluster-url)) (try ;; ACT (let [actual-result (cleanup/cleanup-tokens waiter-api cluster-url current-time-ms)] ;; ASSERT (is (= #{token-name} actual-result)) (is (= {:description {} :headers {"content-type" "application/json"} :status 404} (load-token cluster-url token-name)))) (finally (cleanup-token waiter-api [cluster-url] token-name))))))
[ { "context": ";; Copyright 2014-2020 King\n;; Copyright 2009-2014 Ragnar Svensson, Christian Murray\n;; Licensed under the Defold Li", "end": 111, "score": 0.9998135566711426, "start": 96, "tag": "NAME", "value": "Ragnar Svensson" }, { "context": "-2020 King\n;; Copyright 2009-2014 Ragnar Svensson, Christian Murray\n;; Licensed under the Defold License version 1.0 ", "end": 129, "score": 0.9998183250427246, "start": 113, "tag": "NAME", "value": "Christian Murray" }, { "context": " pull-controls (ui/collect-controls pull-root [\"username-field\" \"password-field\" \"save-password-checkbox\" \"pull-", "end": 27551, "score": 0.9514161944389343, "start": 27537, "tag": "USERNAME", "value": "username-field" }, { "context": " (cond-> {:username username}\n ", "end": 41930, "score": 0.9743221998214722, "start": 41922, "tag": "USERNAME", "value": "username" }, { "context": " save-password (assoc :password password))))\n ", "end": 42047, "score": 0.939476490020752, "start": 42039, "tag": "PASSWORD", "value": "password" }, { "context": " {:username username\n ", "end": 42624, "score": 0.8225043416023254, "start": 42616, "tag": "USERNAME", "value": "username" }, { "context": " {:username username\n ", "end": 42633, "score": 0.9421193599700928, "start": 42625, "tag": "USERNAME", "value": "username" }, { "context": " :password password})))\n ", "end": 42739, "score": 0.9991745352745056, "start": 42731, "tag": "PASSWORD", "value": "password" } ]
editor/src/clj/editor/sync.clj
cmarincia/defold
0
;; Copyright 2020-2022 The Defold Foundation ;; Copyright 2014-2020 King ;; Copyright 2009-2014 Ragnar Svensson, Christian Murray ;; Licensed under the Defold License version 1.0 (the "License"); you may not use ;; this file except in compliance with the License. ;; ;; You may obtain a copy of the License, together with FAQs at ;; https://www.defold.com/license ;; ;; Unless required by applicable law or agreed to in writing, software distributed ;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR ;; CONDITIONS OF ANY KIND, either express or implied. See the License for the ;; specific language governing permissions and limitations under the License. (ns editor.sync (:require [clojure.edn :as edn] [clojure.java.io :as io] [clojure.string :as string] [editor.dialogs :as dialogs] [editor.diff-view :as diff-view] [editor.fs :as fs] [editor.fxui :as fxui] [editor.git :as git] [editor.git-credentials :as git-credentials] [editor.handler :as handler] [editor.ui :as ui] [editor.vcs-status :as vcs-status] [editor.progress :as progress] [service.log :as log]) (:import [org.eclipse.jgit.api Git PullResult] [org.eclipse.jgit.api.errors StashApplyFailureException TransportException] [org.eclipse.jgit.errors MissingObjectException] [org.eclipse.jgit.revwalk RevCommit] [java.net URI] [javafx.scene Parent Scene] [javafx.scene.control Button ListView SelectionMode TextInputControl] [javafx.scene.input KeyCode KeyEvent] [javafx.scene.text Text])) (set! *warn-on-reflection* true) ;; ================================================================================= ;; Flow state-diagram ;; 1. Pull ;; ;; :start -> :pulling -> ---------> :done ;; \ / ;; -> :resolve ;; \ ;; -> :cancel ;; 2. Push ;; ;; <PULL-FLOW> -> :start -> :staging -> :committing -> :pushing -> :done ;; \ / \ ;; <----------------------------------------------- -> :cancel (defn- serialize-ref [^RevCommit ref] (some->> ref .getName)) (defn- deserialize-ref [revision ^Git git] (some->> revision (git/get-commit (.getRepository git)))) (defn- serialize-stash-info [stash-info] (some-> stash-info (update :ref serialize-ref))) (defn- deserialize-stash-info [stash-info ^Git git] (some-> stash-info (update :ref deserialize-ref git))) (defn- serialize-flow [flow] (-> flow (dissoc :git) (update :start-ref serialize-ref) (update :stash-info serialize-stash-info))) (defn- deserialize-flow [serialized-flow ^Git git] (assert (instance? Git git)) (-> serialized-flow (assoc :git git) (update :start-ref deserialize-ref git) (update :stash-info deserialize-stash-info git))) (defn- make-flow [^Git git creds start-ref stash-info] (assert (instance? Git git)) (assert (or (nil? creds) (git-credentials/encrypted-credentials? creds))) {:state :pull/start :git git :creds creds :start-ref start-ref :stash-info stash-info :progress (progress/make "pull" 4) :conflicts {} :resolved {} :staged #{} :modified #{}}) (defn- should-update-journal? [old-flow new-flow] (when (or (:git old-flow) (:git new-flow)) (let [simple-keys [:state :start-ref :stash-info :conflicts :resolved :staged :modified]] (or (not= (select-keys old-flow simple-keys) (select-keys new-flow simple-keys)) (let [old-progress (:progress old-flow) new-progress (:progress new-flow)] (or (not= (:message old-progress) (:message new-progress)) (and (= (:pos new-progress) (:size new-progress)) (not= (:pos old-progress) (:size old-progress))))))))) (defn flow-journal-file ^java.io.File [^Git git] (some-> git .getRepository .getWorkTree (io/file ".internal/.sync-in-progress"))) (defn- write-flow-journal! [{:keys [git] :as flow}] (let [file (flow-journal-file git) data (serialize-flow flow)] (fs/create-file! file (pr-str data)))) (defn- on-flow-changed [_ _ old-flow new-flow] (when (should-update-journal? old-flow new-flow) (write-flow-journal! new-flow))) (defn flow-in-progress? [^Git git] (if-let [file (flow-journal-file git)] (.exists file) false)) (defn read-journal [file] (with-open [reader (java.io.PushbackReader. (io/reader file))] (edn/read reader))) (defn- try-load-flow [^Git git file] (let [data (try (read-journal file) (catch Throwable e e))] (if (instance? Throwable data) ;; We failed to read or parse the data from the journal file. Possibly a ;; permissions issue, or it could be corrupt. We should notify the user, ;; but we cannot retry. {:type :error :code :read-error :exception data :can-retry? false} ;; The flow journal file parsed OK, but it might contain invalid refs. (let [flow (try (deserialize-flow data git) (catch Throwable e e))] (cond (map? flow) ;; We got a map back. Verify it contains the required data. (if (and (instance? RevCommit (:start-ref flow)) (instance? RevCommit (:ref (:stash-info flow)))) ;; The journal file looks good! We can proceed with reverting to the ;; pre-sync state. If that fails, we can retry. {:type :success :flow flow} ;; The journal file appears malformed. We cannot retry. {:type :error :code :invalid-data-error :data flow :can-retry? false}) (instance? MissingObjectException flow) ;; One of the refs from the journal file are invalid. Presumably the ;; stash was deleted. We should notify the user, but we cannot retry. {:type :error :code :invalid-ref-error :exception flow :can-retry? false} (instance? Throwable flow) ;; We somehow failed to deserialize the data we read from the journal ;; file. We should notify the user, but we cannot retry. {:type :error :code :deserialize-error :exception flow :can-retry? false} :else ;; Programming error - should not happen. (throw (ex-info (str "Unhandled return value from deserialize-flow: " (pr-str flow)) {:data data :return-value flow}))))))) (defn- try-revert-to-stashed! [^Git git start-ref stash-info] (or (when-some [locked-files (not-empty (git/locked-files git))] {:type :error :code :locked-files-error :locked-files locked-files :can-retry? true}) (try (git/revert-to-revision! git start-ref) nil (catch Throwable e {:type :error :code :revert-to-start-ref-error :exception e :can-retry? true})) (when stash-info (try (git/stash-apply! git stash-info) nil (catch Throwable e {:type :error :code :stash-apply-error :exception e :can-retry? true}))) (when stash-info (try (git/stash-drop! git stash-info) nil (catch Throwable e {:type :warning :code :stash-drop-error :exception e :can-retry? false}))) {:type :success})) (defn cancel-flow-in-progress! [^Git git] (let [file (flow-journal-file git) load-result (try-load-flow git file)] ;; Begin by deleting the flow journal file, since the revert could ;; potentially delete it if it is not .gitignored. In case something ;; goes wrong we will re-write it unless we think it is invalid. (fs/delete-file! file {:fail :silently}) (if (not= :success (:type load-result)) ;; There was an error loading the flow journal file. We cannot retry, ;; so we return the error info without restoring the journal file. load-result ;; The journal file looks good! Proceed with reverting to the ;; pre-sync state. In case something goes wrong, we restore the flow ;; journal file so we can retry the operation. (let [{:keys [start-ref stash-info] :as flow} (:flow load-result) revert-result (try-revert-to-stashed! git start-ref stash-info)] (when (and (not= :success (:type revert-result)) (:can-retry? revert-result)) ;; Something went wrong. Re-write the journal file so we can retry. (write-flow-journal! flow)) revert-result)))) (defn cancel-flow! [!flow] (remove-watch !flow ::on-flow-changed) (let [flow @!flow state (:state flow)] (case (namespace state) ("pull" "push") (let [{:keys [git start-ref stash-info]} flow file (flow-journal-file git) file-existed? (.exists file)] ;; Always delete the flow journal file, since the revert could potentially ;; delete it if it is not .gitignored. In case something goes wrong we will ;; re-write it unless we think it is invalid. (when file-existed? (fs/delete-file! file {:fail :silently})) ;; Proceed with reverting to the pre-sync state. In case something goes ;; wrong, we restore the flow journal file so we can retry the operation. (let [revert-result (try-revert-to-stashed! git start-ref stash-info)] (when (and file-existed? (not= :success (:type revert-result)) (:can-retry? revert-result)) ;; Something went wrong. Re-write the journal file so we can retry. (write-flow-journal! flow)) revert-result))))) (defn- cancel-result-message [cancel-result] (if (= :success (:type cancel-result)) "Successfully restored the project to the pre-sync state." (case (:code cancel-result) :deserialize-error "The sync journal file is corrupt. Unable to restore the project to the pre-sync state. A Git stash with your changes might exist, should you want to attempt to restore it manually." :invalid-data-error "The sync journal file is malformed. Unable to restore the project to the pre-sync state. A Git stash with your changes might exist, should you want to attempt to restore it manually." :invalid-ref-error "The sync journal file references invalid Git objects. Unable to restore the project to the pre-sync state. A Git stash with your changes might exist, should you want to attempt to restore it manually." :locked-files-error (git/locked-files-error-message (:locked-files cancel-result)) :read-error "Failed to read the sync journal file. Unable to restore the project to the pre-sync state. A Git stash with your changes might exist, should you want to attempt to restore it manually." :revert-to-start-ref-error "Failed to revert the project to the commit your changes were made on. Unable to restore the project to the pre-sync state. A Git stash with your changes might exist, should you want to attempt to restore it manually." :stash-apply-error "Failed to apply your stashed changes on top of the base commit. Unable to restore the project to the pre-sync state. A Git stash with your changes might exist, should you want to attempt to restore it manually." :stash-drop-error "Successfully restored the project to the pre-sync state, but was unable to drop the Git stash with your pre-sync changes. You might want to clean it up manually." (let [{:keys [code exception]} cancel-result] (if (instance? Throwable exception) (let [message (.getMessage ^Throwable exception)] (if (keyword? code) (str "Unknown error " code ". " message) (str "Unknown error. " message))) (str "Malformed sync cancellation result " (pr-str cancel-result))))))) (defn interactive-cancel! [cancel-fn] (loop [] (let [result (cancel-fn) dialog-props {:title "Unable to Cancel Sync" :icon :icon/triangle-error :header "An error occurred" :content {:fx/type fxui/label :style-class "dialog-content-padding" :text (cancel-result-message result)}}] (when (not= :success (:type result)) (if (:can-retry? result) (when (dialogs/make-info-dialog (assoc dialog-props :buttons [{:text "Lose Changes" :cancel-button true :result false} {:text "Retry" :default-button true :result true}])) (recur)) (dialogs/make-info-dialog dialog-props)))))) (defn begin-flow! [^Git git prefs] (let [start-ref (git/get-current-commit-ref git) stash-info (git/stash! git) creds (git-credentials/read-encrypted-credentials prefs git) flow (make-flow git creds start-ref stash-info) !flow (atom flow)] (try (write-flow-journal! flow) (add-watch !flow ::on-flow-changed on-flow-changed) !flow (catch Exception e (cancel-flow! !flow) (throw e))))) (defn resume-flow [^Git git] (let [file (flow-journal-file git) data (with-open [reader (java.io.PushbackReader. (io/reader file))] (edn/read reader)) flow (deserialize-flow data git) !flow (atom flow)] (add-watch !flow ::on-flow-changed on-flow-changed) !flow)) (defn finish-flow! [!flow] (remove-watch !flow ::on-flow-changed) (let [{:keys [git stash-info]} @!flow file (flow-journal-file git)] (fs/delete-file! file {:fail :silently}) (when stash-info (git/stash-drop! git stash-info)))) (defn- tick ([flow new-state] (tick flow new-state 1)) ([flow new-state n] (-> flow (assoc :state new-state) (update :progress #(progress/advance % n))))) (defn find-git-state [{:keys [added changed removed]} unified-status] (reduce (fn [result {:keys [change-type old-path new-path] :as change}] (case change-type :add (if (contains? added new-path) (update result :staged conj (dissoc change :score)) (update result :modified conj (dissoc change :score))) :delete (if (contains? removed old-path) (update result :staged conj (dissoc change :score)) (update result :modified conj (dissoc change :score))) :modify (if (contains? changed new-path) (update result :staged conj (dissoc change :score)) (update result :modified conj (dissoc change :score))) :rename (let [add-staged (contains? added new-path) delete-staged (contains? removed old-path)] (cond (and add-staged delete-staged) (update result :staged conj (dissoc change :score)) add-staged (-> result (update :staged conj (git/make-add-change new-path)) (update :modified conj (git/make-delete-change old-path))) delete-staged (-> result (update :staged conj (git/make-delete-change old-path)) (update :modified conj (git/make-add-change new-path))) :else (update result :modified conj (dissoc change :score)))))) {:modified #{} :staged #{}} unified-status)) (defn refresh-git-state [{:keys [git] :as flow}] (let [status (git/status git)] (merge flow (find-git-state status (git/unified-status git status))))) (defn- matching-transport-exception? [re-pattern exception] (and (instance? TransportException exception) (some? (re-find re-pattern (ex-message exception))))) (def ^:private https-not-authorized-exception? (partial matching-transport-exception? #"(?i)\bnot authorized\b")) (defn advance-flow [{:keys [git state progress creds conflicts stash-info message] :as flow} render-progress] (render-progress progress) (condp = state :pull/start (if (nil? (:error flow)) (advance-flow (tick flow :pull/pulling) render-progress) flow) :pull/pulling (let [dry-run-push-error (when (:verify-creds flow) ;; Pulling from a public repository will not use the ;; credentials. Do a dry-run push here to check if our ;; credentials are valid for the push state. (try (git/push! git {:encrypted-credentials creds :dry-run true}) nil (catch Exception error error)))] (if (https-not-authorized-exception? dry-run-push-error) (advance-flow (-> flow (tick :pull/start) (assoc :error :https-not-authorized)) render-progress) (let [pull-result-or-error (try (git/pull! git {:encrypted-credentials creds}) (catch Exception error error))] (cond (and (instance? PullResult pull-result-or-error) (.isSuccessful ^PullResult pull-result-or-error)) (advance-flow (tick flow :pull/applying) render-progress) (https-not-authorized-exception? pull-result-or-error) (advance-flow (-> flow (tick :pull/start) (assoc :error :https-not-authorized)) render-progress) :else (do (log/error :exception pull-result-or-error :msg (format "Error pulling during sync: %s" (ex-message pull-result-or-error))) (advance-flow (tick flow :pull/error) render-progress)))))) :pull/applying (let [stash-res (when stash-info (try (git/stash-apply! git stash-info) (catch StashApplyFailureException _ :conflict) (catch Exception e (println e)))) status (git/status git)] (cond (nil? stash-info) (advance-flow (tick flow :pull/done 2) render-progress) (= :conflict stash-res) (advance-flow (-> flow (tick :pull/conflicts) (assoc :conflicts (:conflicting-stage-state status))) render-progress) stash-res (advance-flow (tick flow :pull/done 2) render-progress) :else (advance-flow (tick flow :pull/error) render-progress))) :pull/conflicts (if (empty? conflicts) (advance-flow (tick flow :pull/done) render-progress) flow) :pull/done (refresh-git-state flow) ; Affects info text and Push command availability. :pull/error flow :push/start (advance-flow (tick (refresh-git-state flow) :push/staging) render-progress) :push/staging flow :push/committing (do (git/commit git message) (advance-flow (tick flow :push/pushing) render-progress)) :push/pushing (try (git/push! git {:encrypted-credentials creds}) (advance-flow (tick flow :push/done) render-progress) (catch Exception e (println e) (advance-flow (tick flow :push/error) render-progress))) :push/done flow :push/error flow)) (handler/register-menu! ::conflicts-menu [{:label "View Diff" :command :show-diff} {:label "Use Ours" :command :use-ours} {:label "Use Theirs" :command :use-theirs}]) (handler/register-menu! ::staging-menu [{:label "View Diff" :command :show-change-diff} {:label "Stage Change" :command :stage-change}]) (handler/register-menu! ::unstaging-menu [{:label "View Diff" :command :show-change-diff} {:label "Unstage Change" :command :unstage-change}]) (defn- get-theirs [{:keys [git] :as flow} file] (when-let [their-bytes (git/show-file git file)] (String. their-bytes))) (defn- get-ours [{:keys [git stash-info] :as flow} file] (when-let [stash-ref ^RevCommit (:ref stash-info)] (when-let [our-bytes (git/show-file git file (.name stash-ref))] (String. our-bytes)))) (defn- resolve-file! [!flow file] (let [{:keys [^Git git conflicts]} @!flow] (when-let [entry (get conflicts file)] (if (.exists (git/file git file)) (-> git .add (.addFilepattern file) .call) (-> git .rm (.addFilepattern file) .call)) (-> git .reset (.addPath file) .call) (swap! !flow #(-> % (update :conflicts dissoc file) (update :resolved assoc file entry)))))) (defn use-ours! [!flow file] (if-let [ours (get-ours @!flow file)] (spit (git/file (:git @!flow) file) ours) (fs/delete-file! (git/file (:git @!flow) file) {:fail :silently})) (resolve-file! !flow file)) (defn use-theirs! [!flow file] (if-let [theirs (get-theirs @!flow file)] (spit (git/file (:git @!flow) file) theirs) (fs/delete-file! (git/file (:git @!flow) file) {:fail :silently})) (resolve-file! !flow file)) (handler/defhandler :show-diff :sync (enabled? [selection] (= 1 (count selection))) (run [selection !flow] (let [file (first selection) ours (get-ours @!flow file) theirs (get-theirs @!flow file)] (when (and ours theirs) (diff-view/make-diff-viewer (str "Theirs '" file "'") theirs (str "Ours '" file "'") ours))))) (handler/defhandler :show-change-diff :sync (enabled? [selection] (git/selection-diffable? selection)) (run [selection !flow] (diff-view/present-diff-data (git/selection-diff-data (:git @!flow) selection)))) (handler/defhandler :use-ours :sync (enabled? [selection] (pos? (count selection))) (run [selection !flow] (doseq [f selection] (use-ours! !flow f)))) (handler/defhandler :use-theirs :sync (enabled? [selection] (pos? (count selection))) (run [selection !flow] (doseq [f selection] (use-theirs! !flow f)))) (handler/defhandler :stage-change :sync (enabled? [selection] (pos? (count selection))) (run [selection !flow] (doseq [change selection] (git/stage-change! (:git @!flow) change)) (swap! !flow refresh-git-state))) (handler/defhandler :unstage-change :sync (enabled? [selection] (pos? (count selection))) (run [selection !flow] (doseq [change selection] (git/unstage-change! (:git @!flow) change)) (swap! !flow refresh-git-state))) ;; ================================================================================= (def ^:private ^String ssh-not-supported-info-text (string/join "\n\n" ["This project is configured to synchronize over the SSH protocol, possibly due to having been initially cloned from an SSH URL." "It is not currently possible to synchronize from the Defold editor over SSH, but you can use external tools to do so." "If you want to be able to sync directly from the Defold editor, you must clone the project from a HTTPS link."])) (def ^:private ^String pull-done-info-text (string/join "\n\n" ["You are up-to-date with the latest changes from the server." "Press Push if you want to also upload your local changes to the server, or Done to return to the editor without pushing your changes."])) (def ^:private ^String pull-done-no-local-changes-info-text (string/join "\n\n" ["You are up-to-date with the latest changes from the server." "There are no local changes to Push to the server. Press Done to return to the editor."])) (def ^:private ^String pull-error-info-text (string/join "\n\n" ["Something went wrong when we tried to get the latest changes from the server." "Click Cancel to restore the project the pre-sync state."])) (def ^:private ^String push-done-info-text (string/join "\n\n" ["Your changes were successfully pushed to the server." "Click Done to return to the editor."])) (def ^:private ^String push-error-info-text (string/join "\n\n" ["Something went wrong when we tried to push your local changes to the server." "Click Done to return to the editor without pushing. You will still have the latest changes from the server unless you choose Cancel to restore the project the pre-sync state."])) (defn- personal-access-token-uri ^URI [remote-info] (when (= :https (:scheme remote-info)) (let [host (:host remote-info) remote-uri (git/remote-uri remote-info)] (cond (re-find #"\bgithub\b" host) (.resolve remote-uri "/settings/tokens") (re-find #"\bgitlab\b" host) (.resolve remote-uri "/profile/personal_access_tokens") (re-find #"\bbbitbucket\b" host) (URI. "https://confluence.atlassian.com/bitbucket/app-passwords-828781300.html"))))) (def ^:private sync-dialog-open-atom (atom false)) (defn sync-dialog-open? [] @sync-dialog-open-atom) (defn open-sync-dialog [!flow prefs] ;; Note: It would be really nice to rewrite this using cljfx. It is quite ;; cumbersome to make changes as it stands. The advance-flow function above ;; also contributes to the overall complexity. We actually might want to ;; revise the git workflow to separate the acts of committing and pushing ;; since there are sometimes issues with this simplified model. (let [root ^Parent (ui/load-fxml "sync-dialog.fxml") pull-root ^Parent (ui/load-fxml "sync-pull.fxml") push-root ^Parent (ui/load-fxml "sync-push.fxml") stage (ui/make-dialog-stage (ui/main-stage)) scene (Scene. root) dialog-controls (ui/collect-controls root ["ok" "push" "cancel" "dialog-area" "progress-bar"]) pull-controls (ui/collect-controls pull-root ["username-field" "password-field" "save-password-checkbox" "pull-start-box" "pull-info-box" "conflicting" "resolved" "conflict-box" "main-label"]) push-controls (ui/collect-controls push-root ["changed" "staged" "message" "committer-name-field" "committer-email-field" "push-info-box" "content-box" "main-label" "diff" "stage" "unstage"]) render-progress (fn [progress] (when progress (ui/run-later (ui/render-progress-controls! progress (:progress-bar dialog-controls) nil)))) update-push-buttons! (fn [] ;; The stage, unstage and diff buttons are enabled ;; if the changed or staged views have input focus ;; and something selected. The diff button is ;; disabled if more than one item is selected. (let [changed-view (:changed push-controls) changed-selection (ui/selection changed-view) staged-view (:staged push-controls) staged-selection (ui/selection staged-view) enabled (cond (and (ui/focus? changed-view) (seq changed-selection)) (if (git/selection-diffable? changed-selection) #{:stage :diff} #{:stage}) (and (ui/focus? staged-view) (seq staged-selection)) (if (git/selection-diffable? staged-selection) #{:unstage :diff} #{:unstage}) :else #{})] (ui/disable! (:diff push-controls) (not (:diff enabled))) (ui/disable! (:stage push-controls) (not (:stage enabled))) (ui/disable! (:unstage push-controls) (not (:unstage enabled))) (when (:diff enabled) (if-let [focused-list-view (cond (ui/focus? changed-view) changed-view (ui/focus? staged-view) staged-view :else nil)] (ui/context! (:diff push-controls) :sync {:!flow !flow} (ui/->selection-provider focused-list-view)))))) update-controls (fn [{:keys [state conflicts resolved modified staged] :as flow}] (ui/run-later (case (namespace state) "pull" (do (ui/title! stage "Get Remote Changes") (ui/text! (:ok dialog-controls) "Pull") (ui/children! (:dialog-area dialog-controls) [pull-root]) (ui/fill-control pull-root) (.sizeToScene (.getWindow scene))) "push" (do (ui/title! stage "Push Local Changes") (ui/visible! (:push dialog-controls) false) (ui/text! (:ok dialog-controls) "Push") (ui/children! (:dialog-area dialog-controls) [push-root]) (ui/fill-control push-root) (.sizeToScene (.getWindow scene)))) (condp = state :pull/start (let [error (:error flow) remote-info (git/remote-info (:git flow) :fetch) is-ssh-remote (= :ssh (:scheme remote-info))] (ui/text! (:main-label pull-controls) (cond is-ssh-remote "Cannot Sync Over SSH" (= :https-not-authorized error) "Invalid Username or Password" :else "Get Remote Changes")) (ui/visible! (:pull-start-box pull-controls) (not is-ssh-remote)) (ui/visible! (:conflict-box pull-controls) false) (ui/enable! (:ok dialog-controls) (not is-ssh-remote)) ; Disallow sync over SSH. (ui/set-style! (:username-field pull-controls) "error" (= :https-not-authorized error)) (ui/set-style! (:password-field pull-controls) "error" (= :https-not-authorized error))) :pull/conflicts (do (ui/text! (:main-label pull-controls) "Resolve Conflicts") (ui/visible! (:pull-info-box pull-controls) false) (ui/visible! (:pull-start-box pull-controls) false) (ui/visible! (:conflict-box pull-controls) true) (ui/items! (:conflicting pull-controls) (sort (keys conflicts))) (ui/items! (:resolved pull-controls) (sort (keys resolved))) (let [button (:ok dialog-controls)] (ui/text! button "Apply") (ui/disable! button (not (empty? conflicts))))) :pull/done (let [has-local-changes (not (empty? (concat modified staged)))] (ui/text! (:main-label pull-controls) "Done!") (ui/visible! (:push dialog-controls) true) (ui/visible! (:pull-start-box pull-controls) false) (ui/visible! (:conflict-box pull-controls) false) (ui/text! (:ok dialog-controls) "Done") (ui/enable! (:push dialog-controls) has-local-changes) (doto (:pull-info-box pull-controls) (ui/visible! true) (ui/children! [(Text. (if has-local-changes pull-done-info-text pull-done-no-local-changes-info-text))]))) :pull/error (do (ui/text! (:main-label pull-controls) "Error getting changes") (ui/visible! (:push dialog-controls) false) (ui/visible! (:pull-start-box pull-controls) false) (ui/visible! (:conflict-box pull-controls) false) (ui/text! (:ok dialog-controls) "Done") (ui/disable! (:ok dialog-controls) true) (doto (:pull-info-box pull-controls) (ui/visible! true) (ui/children! [(Text. pull-error-info-text)]))) :push/staging (let [changed-view ^ListView (:changed push-controls) staged-view ^ListView (:staged push-controls) changed-selection (vec (ui/selection changed-view)) staged-selection (vec (ui/selection staged-view)) empty-message (empty? (ui/text (:message push-controls))) empty-committer-name (empty? (ui/text (:committer-name-field push-controls))) empty-committer-email (empty? (ui/text (:committer-email-field push-controls)))] (ui/visible! (:pull-info-box pull-controls) false) (ui/items! changed-view (sort-by git/change-path modified)) (ui/items! staged-view (sort-by git/change-path staged)) (ui/set-style! (:message push-controls) "info" empty-message) (ui/set-style! (:committer-name-field push-controls) "info" empty-committer-name) (ui/set-style! (:committer-email-field push-controls) "info" empty-committer-email) (ui/disable! (:ok dialog-controls) (or (empty? staged) empty-message empty-committer-name empty-committer-email)) ;; The stage, unstage and diff buttons start off disabled, but ;; might be enabled by the event handler triggered by select! (ui/disable! (:diff push-controls) true) (ui/disable! (:stage push-controls) true) (ui/disable! (:unstage push-controls) true) (doseq [item changed-selection] (ui/select! changed-view item)) (doseq [item staged-selection] (ui/select! staged-view item))) :push/done (do (ui/text! (:main-label push-controls) "Done!") (ui/visible! (:content-box push-controls) false) (ui/text! (:ok dialog-controls) "Done") (doto (:push-info-box push-controls) (ui/visible! true) (ui/children! [(Text. push-done-info-text)]))) :push/error (do (ui/text! (:main-label push-controls) "Error pushing changes to server") (ui/visible! (:content-box push-controls) false) (ui/text! (:ok dialog-controls) "Done") (ui/enable! (:ok dialog-controls) true) (doto (:push-info-box push-controls) (ui/visible! true) (ui/children! [(Text. push-error-info-text)]))) nil)))] (update-controls @!flow) (add-watch !flow :updater (fn [_ _ _ flow] (update-controls flow))) ; Disable the window close button, since it is unclear what it means. ; This forces the user to make an active choice between Done or Cancel. (ui/on-closing! stage (fn [_] false)) (ui/on-action! (:cancel dialog-controls) (fn [_] (interactive-cancel! (partial cancel-flow! !flow)) (.close stage))) (ui/on-action! (:ok dialog-controls) (fn [_] (let [flow @!flow state (:state flow)] (cond (= "done" (name state)) (do (finish-flow! !flow) (.close stage)) (= :pull/start state) (swap! !flow (fn [flow] (let [username (ui/text (:username-field pull-controls)) password (ui/text (:password-field pull-controls)) save-password (ui/value (:save-password-checkbox pull-controls))] (git-credentials/write-encrypted-credentials! prefs (:git flow) (git-credentials/encrypt-credentials (cond-> {:username username} save-password (assoc :password password)))) (advance-flow (-> flow (dissoc :error) (assoc :verify-creds (not (empty? password))) (assoc :creds (git-credentials/encrypt-credentials {:username username :password password}))) render-progress)))) (= :push/staging state) (swap! !flow (fn [flow] (let [committer-name (ui/text (:committer-name-field push-controls)) committer-email (ui/text (:committer-email-field push-controls))] (git/set-user-info! (:git flow) {:name committer-name :email committer-email}) (advance-flow (merge flow {:state :push/committing :message (ui/text (:message push-controls))}) render-progress)))) :else (swap! !flow advance-flow render-progress))))) (ui/on-action! (:push dialog-controls) (fn [_] (swap! !flow #(merge % {:state :push/start :progress (progress/make "push" 4)})) (swap! !flow advance-flow render-progress))) (ui/bind-action! (:diff push-controls) :show-change-diff) (ui/observe (.focusOwnerProperty scene) (fn [_ _ new] (when-not (instance? Button new) (update-push-buttons!)))) (ui/observe-selection ^ListView (:changed push-controls) (fn [_ _] (update-push-buttons!))) (ui/observe-selection ^ListView (:staged push-controls) (fn [_ _] (update-push-buttons!))) (let [update-push-controls! (fn [_ _ _] (update-controls @!flow))] (doseq [field-name [:message :committer-name-field :committer-email-field] :let [^TextInputControl text-input-control (get push-controls field-name)]] (ui/observe (.textProperty text-input-control) update-push-controls!))) (ui/with-controls pull-root [personal-access-token-link password-field pull-info-box pull-start-box save-password-checkbox username-field] (let [{:keys [git creds]} @!flow {:keys [username password]} (git-credentials/decrypt-credentials creds) remote-info (git/remote-info git :fetch)] (ui/text! username-field (or username "")) (ui/text! password-field (or password "")) (ui/value! save-password-checkbox (not (empty? password))) (case (:scheme remote-info) :https (do (ui/visible! pull-start-box true) (ui/visible! pull-info-box false) (if-some [personal-access-token-uri (personal-access-token-uri remote-info)] (ui/on-action! personal-access-token-link (fn [_] (ui/open-url personal-access-token-uri))) (ui/enable! personal-access-token-link false))) :ssh (do (ui/visible! pull-start-box false) (doto pull-info-box (ui/visible! true) (ui/children! [(Text. ssh-not-supported-info-text)]))) ;; Other protocols. nil))) (let [^ListView list-view (:conflicting pull-controls)] (.setSelectionMode (.getSelectionModel list-view) SelectionMode/MULTIPLE) (ui/context! list-view :sync {:!flow !flow} (ui/->selection-provider list-view)) (ui/register-context-menu list-view ::conflicts-menu) (ui/cell-factory! list-view (fn [e] {:text e}))) (ui/cell-factory! (:resolved pull-controls) (fn [e] {:text e})) (ui/with-controls push-root [committer-email-field committer-name-field] (let [{:keys [git]} @!flow {:keys [name email]} (git/user-info git)] (ui/text! committer-name-field name) (ui/text! committer-email-field email))) (let [^ListView list-view (:changed push-controls)] (.setSelectionMode (.getSelectionModel list-view) SelectionMode/MULTIPLE) (ui/context! list-view :sync {:!flow !flow} (ui/->selection-provider list-view)) (ui/context! (:stage push-controls) :sync {:!flow !flow} (ui/->selection-provider list-view)) (ui/bind-action! (:stage push-controls) :stage-change) (ui/register-context-menu list-view ::staging-menu) (ui/cell-factory! list-view vcs-status/render-verbose)) (let [^ListView list-view (:staged push-controls)] (.setSelectionMode (.getSelectionModel list-view) SelectionMode/MULTIPLE) (ui/context! list-view :sync {:!flow !flow} (ui/->selection-provider list-view)) (ui/context! (:unstage push-controls) :sync {:!flow !flow} (ui/->selection-provider list-view)) (ui/bind-action! (:unstage push-controls) :unstage-change) (ui/register-context-menu list-view ::unstaging-menu) (ui/cell-factory! list-view vcs-status/render-verbose)) (.addEventFilter scene KeyEvent/KEY_PRESSED (ui/event-handler event (let [code (.getCode ^KeyEvent event)] (when (= code KeyCode/ESCAPE) (interactive-cancel! (partial cancel-flow! !flow)) (.close stage))))) (.setScene stage scene) (try (reset! sync-dialog-open-atom true) (ui/show-and-wait-throwing! stage) (let [files-may-have-changed (not= :pull/start (:state @!flow))] files-may-have-changed) (catch Exception e (cancel-flow! !flow) (throw e)) (finally (reset! sync-dialog-open-atom false)))))
91429
;; Copyright 2020-2022 The Defold Foundation ;; Copyright 2014-2020 King ;; Copyright 2009-2014 <NAME>, <NAME> ;; Licensed under the Defold License version 1.0 (the "License"); you may not use ;; this file except in compliance with the License. ;; ;; You may obtain a copy of the License, together with FAQs at ;; https://www.defold.com/license ;; ;; Unless required by applicable law or agreed to in writing, software distributed ;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR ;; CONDITIONS OF ANY KIND, either express or implied. See the License for the ;; specific language governing permissions and limitations under the License. (ns editor.sync (:require [clojure.edn :as edn] [clojure.java.io :as io] [clojure.string :as string] [editor.dialogs :as dialogs] [editor.diff-view :as diff-view] [editor.fs :as fs] [editor.fxui :as fxui] [editor.git :as git] [editor.git-credentials :as git-credentials] [editor.handler :as handler] [editor.ui :as ui] [editor.vcs-status :as vcs-status] [editor.progress :as progress] [service.log :as log]) (:import [org.eclipse.jgit.api Git PullResult] [org.eclipse.jgit.api.errors StashApplyFailureException TransportException] [org.eclipse.jgit.errors MissingObjectException] [org.eclipse.jgit.revwalk RevCommit] [java.net URI] [javafx.scene Parent Scene] [javafx.scene.control Button ListView SelectionMode TextInputControl] [javafx.scene.input KeyCode KeyEvent] [javafx.scene.text Text])) (set! *warn-on-reflection* true) ;; ================================================================================= ;; Flow state-diagram ;; 1. Pull ;; ;; :start -> :pulling -> ---------> :done ;; \ / ;; -> :resolve ;; \ ;; -> :cancel ;; 2. Push ;; ;; <PULL-FLOW> -> :start -> :staging -> :committing -> :pushing -> :done ;; \ / \ ;; <----------------------------------------------- -> :cancel (defn- serialize-ref [^RevCommit ref] (some->> ref .getName)) (defn- deserialize-ref [revision ^Git git] (some->> revision (git/get-commit (.getRepository git)))) (defn- serialize-stash-info [stash-info] (some-> stash-info (update :ref serialize-ref))) (defn- deserialize-stash-info [stash-info ^Git git] (some-> stash-info (update :ref deserialize-ref git))) (defn- serialize-flow [flow] (-> flow (dissoc :git) (update :start-ref serialize-ref) (update :stash-info serialize-stash-info))) (defn- deserialize-flow [serialized-flow ^Git git] (assert (instance? Git git)) (-> serialized-flow (assoc :git git) (update :start-ref deserialize-ref git) (update :stash-info deserialize-stash-info git))) (defn- make-flow [^Git git creds start-ref stash-info] (assert (instance? Git git)) (assert (or (nil? creds) (git-credentials/encrypted-credentials? creds))) {:state :pull/start :git git :creds creds :start-ref start-ref :stash-info stash-info :progress (progress/make "pull" 4) :conflicts {} :resolved {} :staged #{} :modified #{}}) (defn- should-update-journal? [old-flow new-flow] (when (or (:git old-flow) (:git new-flow)) (let [simple-keys [:state :start-ref :stash-info :conflicts :resolved :staged :modified]] (or (not= (select-keys old-flow simple-keys) (select-keys new-flow simple-keys)) (let [old-progress (:progress old-flow) new-progress (:progress new-flow)] (or (not= (:message old-progress) (:message new-progress)) (and (= (:pos new-progress) (:size new-progress)) (not= (:pos old-progress) (:size old-progress))))))))) (defn flow-journal-file ^java.io.File [^Git git] (some-> git .getRepository .getWorkTree (io/file ".internal/.sync-in-progress"))) (defn- write-flow-journal! [{:keys [git] :as flow}] (let [file (flow-journal-file git) data (serialize-flow flow)] (fs/create-file! file (pr-str data)))) (defn- on-flow-changed [_ _ old-flow new-flow] (when (should-update-journal? old-flow new-flow) (write-flow-journal! new-flow))) (defn flow-in-progress? [^Git git] (if-let [file (flow-journal-file git)] (.exists file) false)) (defn read-journal [file] (with-open [reader (java.io.PushbackReader. (io/reader file))] (edn/read reader))) (defn- try-load-flow [^Git git file] (let [data (try (read-journal file) (catch Throwable e e))] (if (instance? Throwable data) ;; We failed to read or parse the data from the journal file. Possibly a ;; permissions issue, or it could be corrupt. We should notify the user, ;; but we cannot retry. {:type :error :code :read-error :exception data :can-retry? false} ;; The flow journal file parsed OK, but it might contain invalid refs. (let [flow (try (deserialize-flow data git) (catch Throwable e e))] (cond (map? flow) ;; We got a map back. Verify it contains the required data. (if (and (instance? RevCommit (:start-ref flow)) (instance? RevCommit (:ref (:stash-info flow)))) ;; The journal file looks good! We can proceed with reverting to the ;; pre-sync state. If that fails, we can retry. {:type :success :flow flow} ;; The journal file appears malformed. We cannot retry. {:type :error :code :invalid-data-error :data flow :can-retry? false}) (instance? MissingObjectException flow) ;; One of the refs from the journal file are invalid. Presumably the ;; stash was deleted. We should notify the user, but we cannot retry. {:type :error :code :invalid-ref-error :exception flow :can-retry? false} (instance? Throwable flow) ;; We somehow failed to deserialize the data we read from the journal ;; file. We should notify the user, but we cannot retry. {:type :error :code :deserialize-error :exception flow :can-retry? false} :else ;; Programming error - should not happen. (throw (ex-info (str "Unhandled return value from deserialize-flow: " (pr-str flow)) {:data data :return-value flow}))))))) (defn- try-revert-to-stashed! [^Git git start-ref stash-info] (or (when-some [locked-files (not-empty (git/locked-files git))] {:type :error :code :locked-files-error :locked-files locked-files :can-retry? true}) (try (git/revert-to-revision! git start-ref) nil (catch Throwable e {:type :error :code :revert-to-start-ref-error :exception e :can-retry? true})) (when stash-info (try (git/stash-apply! git stash-info) nil (catch Throwable e {:type :error :code :stash-apply-error :exception e :can-retry? true}))) (when stash-info (try (git/stash-drop! git stash-info) nil (catch Throwable e {:type :warning :code :stash-drop-error :exception e :can-retry? false}))) {:type :success})) (defn cancel-flow-in-progress! [^Git git] (let [file (flow-journal-file git) load-result (try-load-flow git file)] ;; Begin by deleting the flow journal file, since the revert could ;; potentially delete it if it is not .gitignored. In case something ;; goes wrong we will re-write it unless we think it is invalid. (fs/delete-file! file {:fail :silently}) (if (not= :success (:type load-result)) ;; There was an error loading the flow journal file. We cannot retry, ;; so we return the error info without restoring the journal file. load-result ;; The journal file looks good! Proceed with reverting to the ;; pre-sync state. In case something goes wrong, we restore the flow ;; journal file so we can retry the operation. (let [{:keys [start-ref stash-info] :as flow} (:flow load-result) revert-result (try-revert-to-stashed! git start-ref stash-info)] (when (and (not= :success (:type revert-result)) (:can-retry? revert-result)) ;; Something went wrong. Re-write the journal file so we can retry. (write-flow-journal! flow)) revert-result)))) (defn cancel-flow! [!flow] (remove-watch !flow ::on-flow-changed) (let [flow @!flow state (:state flow)] (case (namespace state) ("pull" "push") (let [{:keys [git start-ref stash-info]} flow file (flow-journal-file git) file-existed? (.exists file)] ;; Always delete the flow journal file, since the revert could potentially ;; delete it if it is not .gitignored. In case something goes wrong we will ;; re-write it unless we think it is invalid. (when file-existed? (fs/delete-file! file {:fail :silently})) ;; Proceed with reverting to the pre-sync state. In case something goes ;; wrong, we restore the flow journal file so we can retry the operation. (let [revert-result (try-revert-to-stashed! git start-ref stash-info)] (when (and file-existed? (not= :success (:type revert-result)) (:can-retry? revert-result)) ;; Something went wrong. Re-write the journal file so we can retry. (write-flow-journal! flow)) revert-result))))) (defn- cancel-result-message [cancel-result] (if (= :success (:type cancel-result)) "Successfully restored the project to the pre-sync state." (case (:code cancel-result) :deserialize-error "The sync journal file is corrupt. Unable to restore the project to the pre-sync state. A Git stash with your changes might exist, should you want to attempt to restore it manually." :invalid-data-error "The sync journal file is malformed. Unable to restore the project to the pre-sync state. A Git stash with your changes might exist, should you want to attempt to restore it manually." :invalid-ref-error "The sync journal file references invalid Git objects. Unable to restore the project to the pre-sync state. A Git stash with your changes might exist, should you want to attempt to restore it manually." :locked-files-error (git/locked-files-error-message (:locked-files cancel-result)) :read-error "Failed to read the sync journal file. Unable to restore the project to the pre-sync state. A Git stash with your changes might exist, should you want to attempt to restore it manually." :revert-to-start-ref-error "Failed to revert the project to the commit your changes were made on. Unable to restore the project to the pre-sync state. A Git stash with your changes might exist, should you want to attempt to restore it manually." :stash-apply-error "Failed to apply your stashed changes on top of the base commit. Unable to restore the project to the pre-sync state. A Git stash with your changes might exist, should you want to attempt to restore it manually." :stash-drop-error "Successfully restored the project to the pre-sync state, but was unable to drop the Git stash with your pre-sync changes. You might want to clean it up manually." (let [{:keys [code exception]} cancel-result] (if (instance? Throwable exception) (let [message (.getMessage ^Throwable exception)] (if (keyword? code) (str "Unknown error " code ". " message) (str "Unknown error. " message))) (str "Malformed sync cancellation result " (pr-str cancel-result))))))) (defn interactive-cancel! [cancel-fn] (loop [] (let [result (cancel-fn) dialog-props {:title "Unable to Cancel Sync" :icon :icon/triangle-error :header "An error occurred" :content {:fx/type fxui/label :style-class "dialog-content-padding" :text (cancel-result-message result)}}] (when (not= :success (:type result)) (if (:can-retry? result) (when (dialogs/make-info-dialog (assoc dialog-props :buttons [{:text "Lose Changes" :cancel-button true :result false} {:text "Retry" :default-button true :result true}])) (recur)) (dialogs/make-info-dialog dialog-props)))))) (defn begin-flow! [^Git git prefs] (let [start-ref (git/get-current-commit-ref git) stash-info (git/stash! git) creds (git-credentials/read-encrypted-credentials prefs git) flow (make-flow git creds start-ref stash-info) !flow (atom flow)] (try (write-flow-journal! flow) (add-watch !flow ::on-flow-changed on-flow-changed) !flow (catch Exception e (cancel-flow! !flow) (throw e))))) (defn resume-flow [^Git git] (let [file (flow-journal-file git) data (with-open [reader (java.io.PushbackReader. (io/reader file))] (edn/read reader)) flow (deserialize-flow data git) !flow (atom flow)] (add-watch !flow ::on-flow-changed on-flow-changed) !flow)) (defn finish-flow! [!flow] (remove-watch !flow ::on-flow-changed) (let [{:keys [git stash-info]} @!flow file (flow-journal-file git)] (fs/delete-file! file {:fail :silently}) (when stash-info (git/stash-drop! git stash-info)))) (defn- tick ([flow new-state] (tick flow new-state 1)) ([flow new-state n] (-> flow (assoc :state new-state) (update :progress #(progress/advance % n))))) (defn find-git-state [{:keys [added changed removed]} unified-status] (reduce (fn [result {:keys [change-type old-path new-path] :as change}] (case change-type :add (if (contains? added new-path) (update result :staged conj (dissoc change :score)) (update result :modified conj (dissoc change :score))) :delete (if (contains? removed old-path) (update result :staged conj (dissoc change :score)) (update result :modified conj (dissoc change :score))) :modify (if (contains? changed new-path) (update result :staged conj (dissoc change :score)) (update result :modified conj (dissoc change :score))) :rename (let [add-staged (contains? added new-path) delete-staged (contains? removed old-path)] (cond (and add-staged delete-staged) (update result :staged conj (dissoc change :score)) add-staged (-> result (update :staged conj (git/make-add-change new-path)) (update :modified conj (git/make-delete-change old-path))) delete-staged (-> result (update :staged conj (git/make-delete-change old-path)) (update :modified conj (git/make-add-change new-path))) :else (update result :modified conj (dissoc change :score)))))) {:modified #{} :staged #{}} unified-status)) (defn refresh-git-state [{:keys [git] :as flow}] (let [status (git/status git)] (merge flow (find-git-state status (git/unified-status git status))))) (defn- matching-transport-exception? [re-pattern exception] (and (instance? TransportException exception) (some? (re-find re-pattern (ex-message exception))))) (def ^:private https-not-authorized-exception? (partial matching-transport-exception? #"(?i)\bnot authorized\b")) (defn advance-flow [{:keys [git state progress creds conflicts stash-info message] :as flow} render-progress] (render-progress progress) (condp = state :pull/start (if (nil? (:error flow)) (advance-flow (tick flow :pull/pulling) render-progress) flow) :pull/pulling (let [dry-run-push-error (when (:verify-creds flow) ;; Pulling from a public repository will not use the ;; credentials. Do a dry-run push here to check if our ;; credentials are valid for the push state. (try (git/push! git {:encrypted-credentials creds :dry-run true}) nil (catch Exception error error)))] (if (https-not-authorized-exception? dry-run-push-error) (advance-flow (-> flow (tick :pull/start) (assoc :error :https-not-authorized)) render-progress) (let [pull-result-or-error (try (git/pull! git {:encrypted-credentials creds}) (catch Exception error error))] (cond (and (instance? PullResult pull-result-or-error) (.isSuccessful ^PullResult pull-result-or-error)) (advance-flow (tick flow :pull/applying) render-progress) (https-not-authorized-exception? pull-result-or-error) (advance-flow (-> flow (tick :pull/start) (assoc :error :https-not-authorized)) render-progress) :else (do (log/error :exception pull-result-or-error :msg (format "Error pulling during sync: %s" (ex-message pull-result-or-error))) (advance-flow (tick flow :pull/error) render-progress)))))) :pull/applying (let [stash-res (when stash-info (try (git/stash-apply! git stash-info) (catch StashApplyFailureException _ :conflict) (catch Exception e (println e)))) status (git/status git)] (cond (nil? stash-info) (advance-flow (tick flow :pull/done 2) render-progress) (= :conflict stash-res) (advance-flow (-> flow (tick :pull/conflicts) (assoc :conflicts (:conflicting-stage-state status))) render-progress) stash-res (advance-flow (tick flow :pull/done 2) render-progress) :else (advance-flow (tick flow :pull/error) render-progress))) :pull/conflicts (if (empty? conflicts) (advance-flow (tick flow :pull/done) render-progress) flow) :pull/done (refresh-git-state flow) ; Affects info text and Push command availability. :pull/error flow :push/start (advance-flow (tick (refresh-git-state flow) :push/staging) render-progress) :push/staging flow :push/committing (do (git/commit git message) (advance-flow (tick flow :push/pushing) render-progress)) :push/pushing (try (git/push! git {:encrypted-credentials creds}) (advance-flow (tick flow :push/done) render-progress) (catch Exception e (println e) (advance-flow (tick flow :push/error) render-progress))) :push/done flow :push/error flow)) (handler/register-menu! ::conflicts-menu [{:label "View Diff" :command :show-diff} {:label "Use Ours" :command :use-ours} {:label "Use Theirs" :command :use-theirs}]) (handler/register-menu! ::staging-menu [{:label "View Diff" :command :show-change-diff} {:label "Stage Change" :command :stage-change}]) (handler/register-menu! ::unstaging-menu [{:label "View Diff" :command :show-change-diff} {:label "Unstage Change" :command :unstage-change}]) (defn- get-theirs [{:keys [git] :as flow} file] (when-let [their-bytes (git/show-file git file)] (String. their-bytes))) (defn- get-ours [{:keys [git stash-info] :as flow} file] (when-let [stash-ref ^RevCommit (:ref stash-info)] (when-let [our-bytes (git/show-file git file (.name stash-ref))] (String. our-bytes)))) (defn- resolve-file! [!flow file] (let [{:keys [^Git git conflicts]} @!flow] (when-let [entry (get conflicts file)] (if (.exists (git/file git file)) (-> git .add (.addFilepattern file) .call) (-> git .rm (.addFilepattern file) .call)) (-> git .reset (.addPath file) .call) (swap! !flow #(-> % (update :conflicts dissoc file) (update :resolved assoc file entry)))))) (defn use-ours! [!flow file] (if-let [ours (get-ours @!flow file)] (spit (git/file (:git @!flow) file) ours) (fs/delete-file! (git/file (:git @!flow) file) {:fail :silently})) (resolve-file! !flow file)) (defn use-theirs! [!flow file] (if-let [theirs (get-theirs @!flow file)] (spit (git/file (:git @!flow) file) theirs) (fs/delete-file! (git/file (:git @!flow) file) {:fail :silently})) (resolve-file! !flow file)) (handler/defhandler :show-diff :sync (enabled? [selection] (= 1 (count selection))) (run [selection !flow] (let [file (first selection) ours (get-ours @!flow file) theirs (get-theirs @!flow file)] (when (and ours theirs) (diff-view/make-diff-viewer (str "Theirs '" file "'") theirs (str "Ours '" file "'") ours))))) (handler/defhandler :show-change-diff :sync (enabled? [selection] (git/selection-diffable? selection)) (run [selection !flow] (diff-view/present-diff-data (git/selection-diff-data (:git @!flow) selection)))) (handler/defhandler :use-ours :sync (enabled? [selection] (pos? (count selection))) (run [selection !flow] (doseq [f selection] (use-ours! !flow f)))) (handler/defhandler :use-theirs :sync (enabled? [selection] (pos? (count selection))) (run [selection !flow] (doseq [f selection] (use-theirs! !flow f)))) (handler/defhandler :stage-change :sync (enabled? [selection] (pos? (count selection))) (run [selection !flow] (doseq [change selection] (git/stage-change! (:git @!flow) change)) (swap! !flow refresh-git-state))) (handler/defhandler :unstage-change :sync (enabled? [selection] (pos? (count selection))) (run [selection !flow] (doseq [change selection] (git/unstage-change! (:git @!flow) change)) (swap! !flow refresh-git-state))) ;; ================================================================================= (def ^:private ^String ssh-not-supported-info-text (string/join "\n\n" ["This project is configured to synchronize over the SSH protocol, possibly due to having been initially cloned from an SSH URL." "It is not currently possible to synchronize from the Defold editor over SSH, but you can use external tools to do so." "If you want to be able to sync directly from the Defold editor, you must clone the project from a HTTPS link."])) (def ^:private ^String pull-done-info-text (string/join "\n\n" ["You are up-to-date with the latest changes from the server." "Press Push if you want to also upload your local changes to the server, or Done to return to the editor without pushing your changes."])) (def ^:private ^String pull-done-no-local-changes-info-text (string/join "\n\n" ["You are up-to-date with the latest changes from the server." "There are no local changes to Push to the server. Press Done to return to the editor."])) (def ^:private ^String pull-error-info-text (string/join "\n\n" ["Something went wrong when we tried to get the latest changes from the server." "Click Cancel to restore the project the pre-sync state."])) (def ^:private ^String push-done-info-text (string/join "\n\n" ["Your changes were successfully pushed to the server." "Click Done to return to the editor."])) (def ^:private ^String push-error-info-text (string/join "\n\n" ["Something went wrong when we tried to push your local changes to the server." "Click Done to return to the editor without pushing. You will still have the latest changes from the server unless you choose Cancel to restore the project the pre-sync state."])) (defn- personal-access-token-uri ^URI [remote-info] (when (= :https (:scheme remote-info)) (let [host (:host remote-info) remote-uri (git/remote-uri remote-info)] (cond (re-find #"\bgithub\b" host) (.resolve remote-uri "/settings/tokens") (re-find #"\bgitlab\b" host) (.resolve remote-uri "/profile/personal_access_tokens") (re-find #"\bbbitbucket\b" host) (URI. "https://confluence.atlassian.com/bitbucket/app-passwords-828781300.html"))))) (def ^:private sync-dialog-open-atom (atom false)) (defn sync-dialog-open? [] @sync-dialog-open-atom) (defn open-sync-dialog [!flow prefs] ;; Note: It would be really nice to rewrite this using cljfx. It is quite ;; cumbersome to make changes as it stands. The advance-flow function above ;; also contributes to the overall complexity. We actually might want to ;; revise the git workflow to separate the acts of committing and pushing ;; since there are sometimes issues with this simplified model. (let [root ^Parent (ui/load-fxml "sync-dialog.fxml") pull-root ^Parent (ui/load-fxml "sync-pull.fxml") push-root ^Parent (ui/load-fxml "sync-push.fxml") stage (ui/make-dialog-stage (ui/main-stage)) scene (Scene. root) dialog-controls (ui/collect-controls root ["ok" "push" "cancel" "dialog-area" "progress-bar"]) pull-controls (ui/collect-controls pull-root ["username-field" "password-field" "save-password-checkbox" "pull-start-box" "pull-info-box" "conflicting" "resolved" "conflict-box" "main-label"]) push-controls (ui/collect-controls push-root ["changed" "staged" "message" "committer-name-field" "committer-email-field" "push-info-box" "content-box" "main-label" "diff" "stage" "unstage"]) render-progress (fn [progress] (when progress (ui/run-later (ui/render-progress-controls! progress (:progress-bar dialog-controls) nil)))) update-push-buttons! (fn [] ;; The stage, unstage and diff buttons are enabled ;; if the changed or staged views have input focus ;; and something selected. The diff button is ;; disabled if more than one item is selected. (let [changed-view (:changed push-controls) changed-selection (ui/selection changed-view) staged-view (:staged push-controls) staged-selection (ui/selection staged-view) enabled (cond (and (ui/focus? changed-view) (seq changed-selection)) (if (git/selection-diffable? changed-selection) #{:stage :diff} #{:stage}) (and (ui/focus? staged-view) (seq staged-selection)) (if (git/selection-diffable? staged-selection) #{:unstage :diff} #{:unstage}) :else #{})] (ui/disable! (:diff push-controls) (not (:diff enabled))) (ui/disable! (:stage push-controls) (not (:stage enabled))) (ui/disable! (:unstage push-controls) (not (:unstage enabled))) (when (:diff enabled) (if-let [focused-list-view (cond (ui/focus? changed-view) changed-view (ui/focus? staged-view) staged-view :else nil)] (ui/context! (:diff push-controls) :sync {:!flow !flow} (ui/->selection-provider focused-list-view)))))) update-controls (fn [{:keys [state conflicts resolved modified staged] :as flow}] (ui/run-later (case (namespace state) "pull" (do (ui/title! stage "Get Remote Changes") (ui/text! (:ok dialog-controls) "Pull") (ui/children! (:dialog-area dialog-controls) [pull-root]) (ui/fill-control pull-root) (.sizeToScene (.getWindow scene))) "push" (do (ui/title! stage "Push Local Changes") (ui/visible! (:push dialog-controls) false) (ui/text! (:ok dialog-controls) "Push") (ui/children! (:dialog-area dialog-controls) [push-root]) (ui/fill-control push-root) (.sizeToScene (.getWindow scene)))) (condp = state :pull/start (let [error (:error flow) remote-info (git/remote-info (:git flow) :fetch) is-ssh-remote (= :ssh (:scheme remote-info))] (ui/text! (:main-label pull-controls) (cond is-ssh-remote "Cannot Sync Over SSH" (= :https-not-authorized error) "Invalid Username or Password" :else "Get Remote Changes")) (ui/visible! (:pull-start-box pull-controls) (not is-ssh-remote)) (ui/visible! (:conflict-box pull-controls) false) (ui/enable! (:ok dialog-controls) (not is-ssh-remote)) ; Disallow sync over SSH. (ui/set-style! (:username-field pull-controls) "error" (= :https-not-authorized error)) (ui/set-style! (:password-field pull-controls) "error" (= :https-not-authorized error))) :pull/conflicts (do (ui/text! (:main-label pull-controls) "Resolve Conflicts") (ui/visible! (:pull-info-box pull-controls) false) (ui/visible! (:pull-start-box pull-controls) false) (ui/visible! (:conflict-box pull-controls) true) (ui/items! (:conflicting pull-controls) (sort (keys conflicts))) (ui/items! (:resolved pull-controls) (sort (keys resolved))) (let [button (:ok dialog-controls)] (ui/text! button "Apply") (ui/disable! button (not (empty? conflicts))))) :pull/done (let [has-local-changes (not (empty? (concat modified staged)))] (ui/text! (:main-label pull-controls) "Done!") (ui/visible! (:push dialog-controls) true) (ui/visible! (:pull-start-box pull-controls) false) (ui/visible! (:conflict-box pull-controls) false) (ui/text! (:ok dialog-controls) "Done") (ui/enable! (:push dialog-controls) has-local-changes) (doto (:pull-info-box pull-controls) (ui/visible! true) (ui/children! [(Text. (if has-local-changes pull-done-info-text pull-done-no-local-changes-info-text))]))) :pull/error (do (ui/text! (:main-label pull-controls) "Error getting changes") (ui/visible! (:push dialog-controls) false) (ui/visible! (:pull-start-box pull-controls) false) (ui/visible! (:conflict-box pull-controls) false) (ui/text! (:ok dialog-controls) "Done") (ui/disable! (:ok dialog-controls) true) (doto (:pull-info-box pull-controls) (ui/visible! true) (ui/children! [(Text. pull-error-info-text)]))) :push/staging (let [changed-view ^ListView (:changed push-controls) staged-view ^ListView (:staged push-controls) changed-selection (vec (ui/selection changed-view)) staged-selection (vec (ui/selection staged-view)) empty-message (empty? (ui/text (:message push-controls))) empty-committer-name (empty? (ui/text (:committer-name-field push-controls))) empty-committer-email (empty? (ui/text (:committer-email-field push-controls)))] (ui/visible! (:pull-info-box pull-controls) false) (ui/items! changed-view (sort-by git/change-path modified)) (ui/items! staged-view (sort-by git/change-path staged)) (ui/set-style! (:message push-controls) "info" empty-message) (ui/set-style! (:committer-name-field push-controls) "info" empty-committer-name) (ui/set-style! (:committer-email-field push-controls) "info" empty-committer-email) (ui/disable! (:ok dialog-controls) (or (empty? staged) empty-message empty-committer-name empty-committer-email)) ;; The stage, unstage and diff buttons start off disabled, but ;; might be enabled by the event handler triggered by select! (ui/disable! (:diff push-controls) true) (ui/disable! (:stage push-controls) true) (ui/disable! (:unstage push-controls) true) (doseq [item changed-selection] (ui/select! changed-view item)) (doseq [item staged-selection] (ui/select! staged-view item))) :push/done (do (ui/text! (:main-label push-controls) "Done!") (ui/visible! (:content-box push-controls) false) (ui/text! (:ok dialog-controls) "Done") (doto (:push-info-box push-controls) (ui/visible! true) (ui/children! [(Text. push-done-info-text)]))) :push/error (do (ui/text! (:main-label push-controls) "Error pushing changes to server") (ui/visible! (:content-box push-controls) false) (ui/text! (:ok dialog-controls) "Done") (ui/enable! (:ok dialog-controls) true) (doto (:push-info-box push-controls) (ui/visible! true) (ui/children! [(Text. push-error-info-text)]))) nil)))] (update-controls @!flow) (add-watch !flow :updater (fn [_ _ _ flow] (update-controls flow))) ; Disable the window close button, since it is unclear what it means. ; This forces the user to make an active choice between Done or Cancel. (ui/on-closing! stage (fn [_] false)) (ui/on-action! (:cancel dialog-controls) (fn [_] (interactive-cancel! (partial cancel-flow! !flow)) (.close stage))) (ui/on-action! (:ok dialog-controls) (fn [_] (let [flow @!flow state (:state flow)] (cond (= "done" (name state)) (do (finish-flow! !flow) (.close stage)) (= :pull/start state) (swap! !flow (fn [flow] (let [username (ui/text (:username-field pull-controls)) password (ui/text (:password-field pull-controls)) save-password (ui/value (:save-password-checkbox pull-controls))] (git-credentials/write-encrypted-credentials! prefs (:git flow) (git-credentials/encrypt-credentials (cond-> {:username username} save-password (assoc :password <PASSWORD>)))) (advance-flow (-> flow (dissoc :error) (assoc :verify-creds (not (empty? password))) (assoc :creds (git-credentials/encrypt-credentials {:username username :password <PASSWORD>}))) render-progress)))) (= :push/staging state) (swap! !flow (fn [flow] (let [committer-name (ui/text (:committer-name-field push-controls)) committer-email (ui/text (:committer-email-field push-controls))] (git/set-user-info! (:git flow) {:name committer-name :email committer-email}) (advance-flow (merge flow {:state :push/committing :message (ui/text (:message push-controls))}) render-progress)))) :else (swap! !flow advance-flow render-progress))))) (ui/on-action! (:push dialog-controls) (fn [_] (swap! !flow #(merge % {:state :push/start :progress (progress/make "push" 4)})) (swap! !flow advance-flow render-progress))) (ui/bind-action! (:diff push-controls) :show-change-diff) (ui/observe (.focusOwnerProperty scene) (fn [_ _ new] (when-not (instance? Button new) (update-push-buttons!)))) (ui/observe-selection ^ListView (:changed push-controls) (fn [_ _] (update-push-buttons!))) (ui/observe-selection ^ListView (:staged push-controls) (fn [_ _] (update-push-buttons!))) (let [update-push-controls! (fn [_ _ _] (update-controls @!flow))] (doseq [field-name [:message :committer-name-field :committer-email-field] :let [^TextInputControl text-input-control (get push-controls field-name)]] (ui/observe (.textProperty text-input-control) update-push-controls!))) (ui/with-controls pull-root [personal-access-token-link password-field pull-info-box pull-start-box save-password-checkbox username-field] (let [{:keys [git creds]} @!flow {:keys [username password]} (git-credentials/decrypt-credentials creds) remote-info (git/remote-info git :fetch)] (ui/text! username-field (or username "")) (ui/text! password-field (or password "")) (ui/value! save-password-checkbox (not (empty? password))) (case (:scheme remote-info) :https (do (ui/visible! pull-start-box true) (ui/visible! pull-info-box false) (if-some [personal-access-token-uri (personal-access-token-uri remote-info)] (ui/on-action! personal-access-token-link (fn [_] (ui/open-url personal-access-token-uri))) (ui/enable! personal-access-token-link false))) :ssh (do (ui/visible! pull-start-box false) (doto pull-info-box (ui/visible! true) (ui/children! [(Text. ssh-not-supported-info-text)]))) ;; Other protocols. nil))) (let [^ListView list-view (:conflicting pull-controls)] (.setSelectionMode (.getSelectionModel list-view) SelectionMode/MULTIPLE) (ui/context! list-view :sync {:!flow !flow} (ui/->selection-provider list-view)) (ui/register-context-menu list-view ::conflicts-menu) (ui/cell-factory! list-view (fn [e] {:text e}))) (ui/cell-factory! (:resolved pull-controls) (fn [e] {:text e})) (ui/with-controls push-root [committer-email-field committer-name-field] (let [{:keys [git]} @!flow {:keys [name email]} (git/user-info git)] (ui/text! committer-name-field name) (ui/text! committer-email-field email))) (let [^ListView list-view (:changed push-controls)] (.setSelectionMode (.getSelectionModel list-view) SelectionMode/MULTIPLE) (ui/context! list-view :sync {:!flow !flow} (ui/->selection-provider list-view)) (ui/context! (:stage push-controls) :sync {:!flow !flow} (ui/->selection-provider list-view)) (ui/bind-action! (:stage push-controls) :stage-change) (ui/register-context-menu list-view ::staging-menu) (ui/cell-factory! list-view vcs-status/render-verbose)) (let [^ListView list-view (:staged push-controls)] (.setSelectionMode (.getSelectionModel list-view) SelectionMode/MULTIPLE) (ui/context! list-view :sync {:!flow !flow} (ui/->selection-provider list-view)) (ui/context! (:unstage push-controls) :sync {:!flow !flow} (ui/->selection-provider list-view)) (ui/bind-action! (:unstage push-controls) :unstage-change) (ui/register-context-menu list-view ::unstaging-menu) (ui/cell-factory! list-view vcs-status/render-verbose)) (.addEventFilter scene KeyEvent/KEY_PRESSED (ui/event-handler event (let [code (.getCode ^KeyEvent event)] (when (= code KeyCode/ESCAPE) (interactive-cancel! (partial cancel-flow! !flow)) (.close stage))))) (.setScene stage scene) (try (reset! sync-dialog-open-atom true) (ui/show-and-wait-throwing! stage) (let [files-may-have-changed (not= :pull/start (:state @!flow))] files-may-have-changed) (catch Exception e (cancel-flow! !flow) (throw e)) (finally (reset! sync-dialog-open-atom false)))))
true
;; Copyright 2020-2022 The Defold Foundation ;; Copyright 2014-2020 King ;; Copyright 2009-2014 PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI ;; Licensed under the Defold License version 1.0 (the "License"); you may not use ;; this file except in compliance with the License. ;; ;; You may obtain a copy of the License, together with FAQs at ;; https://www.defold.com/license ;; ;; Unless required by applicable law or agreed to in writing, software distributed ;; under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR ;; CONDITIONS OF ANY KIND, either express or implied. See the License for the ;; specific language governing permissions and limitations under the License. (ns editor.sync (:require [clojure.edn :as edn] [clojure.java.io :as io] [clojure.string :as string] [editor.dialogs :as dialogs] [editor.diff-view :as diff-view] [editor.fs :as fs] [editor.fxui :as fxui] [editor.git :as git] [editor.git-credentials :as git-credentials] [editor.handler :as handler] [editor.ui :as ui] [editor.vcs-status :as vcs-status] [editor.progress :as progress] [service.log :as log]) (:import [org.eclipse.jgit.api Git PullResult] [org.eclipse.jgit.api.errors StashApplyFailureException TransportException] [org.eclipse.jgit.errors MissingObjectException] [org.eclipse.jgit.revwalk RevCommit] [java.net URI] [javafx.scene Parent Scene] [javafx.scene.control Button ListView SelectionMode TextInputControl] [javafx.scene.input KeyCode KeyEvent] [javafx.scene.text Text])) (set! *warn-on-reflection* true) ;; ================================================================================= ;; Flow state-diagram ;; 1. Pull ;; ;; :start -> :pulling -> ---------> :done ;; \ / ;; -> :resolve ;; \ ;; -> :cancel ;; 2. Push ;; ;; <PULL-FLOW> -> :start -> :staging -> :committing -> :pushing -> :done ;; \ / \ ;; <----------------------------------------------- -> :cancel (defn- serialize-ref [^RevCommit ref] (some->> ref .getName)) (defn- deserialize-ref [revision ^Git git] (some->> revision (git/get-commit (.getRepository git)))) (defn- serialize-stash-info [stash-info] (some-> stash-info (update :ref serialize-ref))) (defn- deserialize-stash-info [stash-info ^Git git] (some-> stash-info (update :ref deserialize-ref git))) (defn- serialize-flow [flow] (-> flow (dissoc :git) (update :start-ref serialize-ref) (update :stash-info serialize-stash-info))) (defn- deserialize-flow [serialized-flow ^Git git] (assert (instance? Git git)) (-> serialized-flow (assoc :git git) (update :start-ref deserialize-ref git) (update :stash-info deserialize-stash-info git))) (defn- make-flow [^Git git creds start-ref stash-info] (assert (instance? Git git)) (assert (or (nil? creds) (git-credentials/encrypted-credentials? creds))) {:state :pull/start :git git :creds creds :start-ref start-ref :stash-info stash-info :progress (progress/make "pull" 4) :conflicts {} :resolved {} :staged #{} :modified #{}}) (defn- should-update-journal? [old-flow new-flow] (when (or (:git old-flow) (:git new-flow)) (let [simple-keys [:state :start-ref :stash-info :conflicts :resolved :staged :modified]] (or (not= (select-keys old-flow simple-keys) (select-keys new-flow simple-keys)) (let [old-progress (:progress old-flow) new-progress (:progress new-flow)] (or (not= (:message old-progress) (:message new-progress)) (and (= (:pos new-progress) (:size new-progress)) (not= (:pos old-progress) (:size old-progress))))))))) (defn flow-journal-file ^java.io.File [^Git git] (some-> git .getRepository .getWorkTree (io/file ".internal/.sync-in-progress"))) (defn- write-flow-journal! [{:keys [git] :as flow}] (let [file (flow-journal-file git) data (serialize-flow flow)] (fs/create-file! file (pr-str data)))) (defn- on-flow-changed [_ _ old-flow new-flow] (when (should-update-journal? old-flow new-flow) (write-flow-journal! new-flow))) (defn flow-in-progress? [^Git git] (if-let [file (flow-journal-file git)] (.exists file) false)) (defn read-journal [file] (with-open [reader (java.io.PushbackReader. (io/reader file))] (edn/read reader))) (defn- try-load-flow [^Git git file] (let [data (try (read-journal file) (catch Throwable e e))] (if (instance? Throwable data) ;; We failed to read or parse the data from the journal file. Possibly a ;; permissions issue, or it could be corrupt. We should notify the user, ;; but we cannot retry. {:type :error :code :read-error :exception data :can-retry? false} ;; The flow journal file parsed OK, but it might contain invalid refs. (let [flow (try (deserialize-flow data git) (catch Throwable e e))] (cond (map? flow) ;; We got a map back. Verify it contains the required data. (if (and (instance? RevCommit (:start-ref flow)) (instance? RevCommit (:ref (:stash-info flow)))) ;; The journal file looks good! We can proceed with reverting to the ;; pre-sync state. If that fails, we can retry. {:type :success :flow flow} ;; The journal file appears malformed. We cannot retry. {:type :error :code :invalid-data-error :data flow :can-retry? false}) (instance? MissingObjectException flow) ;; One of the refs from the journal file are invalid. Presumably the ;; stash was deleted. We should notify the user, but we cannot retry. {:type :error :code :invalid-ref-error :exception flow :can-retry? false} (instance? Throwable flow) ;; We somehow failed to deserialize the data we read from the journal ;; file. We should notify the user, but we cannot retry. {:type :error :code :deserialize-error :exception flow :can-retry? false} :else ;; Programming error - should not happen. (throw (ex-info (str "Unhandled return value from deserialize-flow: " (pr-str flow)) {:data data :return-value flow}))))))) (defn- try-revert-to-stashed! [^Git git start-ref stash-info] (or (when-some [locked-files (not-empty (git/locked-files git))] {:type :error :code :locked-files-error :locked-files locked-files :can-retry? true}) (try (git/revert-to-revision! git start-ref) nil (catch Throwable e {:type :error :code :revert-to-start-ref-error :exception e :can-retry? true})) (when stash-info (try (git/stash-apply! git stash-info) nil (catch Throwable e {:type :error :code :stash-apply-error :exception e :can-retry? true}))) (when stash-info (try (git/stash-drop! git stash-info) nil (catch Throwable e {:type :warning :code :stash-drop-error :exception e :can-retry? false}))) {:type :success})) (defn cancel-flow-in-progress! [^Git git] (let [file (flow-journal-file git) load-result (try-load-flow git file)] ;; Begin by deleting the flow journal file, since the revert could ;; potentially delete it if it is not .gitignored. In case something ;; goes wrong we will re-write it unless we think it is invalid. (fs/delete-file! file {:fail :silently}) (if (not= :success (:type load-result)) ;; There was an error loading the flow journal file. We cannot retry, ;; so we return the error info without restoring the journal file. load-result ;; The journal file looks good! Proceed with reverting to the ;; pre-sync state. In case something goes wrong, we restore the flow ;; journal file so we can retry the operation. (let [{:keys [start-ref stash-info] :as flow} (:flow load-result) revert-result (try-revert-to-stashed! git start-ref stash-info)] (when (and (not= :success (:type revert-result)) (:can-retry? revert-result)) ;; Something went wrong. Re-write the journal file so we can retry. (write-flow-journal! flow)) revert-result)))) (defn cancel-flow! [!flow] (remove-watch !flow ::on-flow-changed) (let [flow @!flow state (:state flow)] (case (namespace state) ("pull" "push") (let [{:keys [git start-ref stash-info]} flow file (flow-journal-file git) file-existed? (.exists file)] ;; Always delete the flow journal file, since the revert could potentially ;; delete it if it is not .gitignored. In case something goes wrong we will ;; re-write it unless we think it is invalid. (when file-existed? (fs/delete-file! file {:fail :silently})) ;; Proceed with reverting to the pre-sync state. In case something goes ;; wrong, we restore the flow journal file so we can retry the operation. (let [revert-result (try-revert-to-stashed! git start-ref stash-info)] (when (and file-existed? (not= :success (:type revert-result)) (:can-retry? revert-result)) ;; Something went wrong. Re-write the journal file so we can retry. (write-flow-journal! flow)) revert-result))))) (defn- cancel-result-message [cancel-result] (if (= :success (:type cancel-result)) "Successfully restored the project to the pre-sync state." (case (:code cancel-result) :deserialize-error "The sync journal file is corrupt. Unable to restore the project to the pre-sync state. A Git stash with your changes might exist, should you want to attempt to restore it manually." :invalid-data-error "The sync journal file is malformed. Unable to restore the project to the pre-sync state. A Git stash with your changes might exist, should you want to attempt to restore it manually." :invalid-ref-error "The sync journal file references invalid Git objects. Unable to restore the project to the pre-sync state. A Git stash with your changes might exist, should you want to attempt to restore it manually." :locked-files-error (git/locked-files-error-message (:locked-files cancel-result)) :read-error "Failed to read the sync journal file. Unable to restore the project to the pre-sync state. A Git stash with your changes might exist, should you want to attempt to restore it manually." :revert-to-start-ref-error "Failed to revert the project to the commit your changes were made on. Unable to restore the project to the pre-sync state. A Git stash with your changes might exist, should you want to attempt to restore it manually." :stash-apply-error "Failed to apply your stashed changes on top of the base commit. Unable to restore the project to the pre-sync state. A Git stash with your changes might exist, should you want to attempt to restore it manually." :stash-drop-error "Successfully restored the project to the pre-sync state, but was unable to drop the Git stash with your pre-sync changes. You might want to clean it up manually." (let [{:keys [code exception]} cancel-result] (if (instance? Throwable exception) (let [message (.getMessage ^Throwable exception)] (if (keyword? code) (str "Unknown error " code ". " message) (str "Unknown error. " message))) (str "Malformed sync cancellation result " (pr-str cancel-result))))))) (defn interactive-cancel! [cancel-fn] (loop [] (let [result (cancel-fn) dialog-props {:title "Unable to Cancel Sync" :icon :icon/triangle-error :header "An error occurred" :content {:fx/type fxui/label :style-class "dialog-content-padding" :text (cancel-result-message result)}}] (when (not= :success (:type result)) (if (:can-retry? result) (when (dialogs/make-info-dialog (assoc dialog-props :buttons [{:text "Lose Changes" :cancel-button true :result false} {:text "Retry" :default-button true :result true}])) (recur)) (dialogs/make-info-dialog dialog-props)))))) (defn begin-flow! [^Git git prefs] (let [start-ref (git/get-current-commit-ref git) stash-info (git/stash! git) creds (git-credentials/read-encrypted-credentials prefs git) flow (make-flow git creds start-ref stash-info) !flow (atom flow)] (try (write-flow-journal! flow) (add-watch !flow ::on-flow-changed on-flow-changed) !flow (catch Exception e (cancel-flow! !flow) (throw e))))) (defn resume-flow [^Git git] (let [file (flow-journal-file git) data (with-open [reader (java.io.PushbackReader. (io/reader file))] (edn/read reader)) flow (deserialize-flow data git) !flow (atom flow)] (add-watch !flow ::on-flow-changed on-flow-changed) !flow)) (defn finish-flow! [!flow] (remove-watch !flow ::on-flow-changed) (let [{:keys [git stash-info]} @!flow file (flow-journal-file git)] (fs/delete-file! file {:fail :silently}) (when stash-info (git/stash-drop! git stash-info)))) (defn- tick ([flow new-state] (tick flow new-state 1)) ([flow new-state n] (-> flow (assoc :state new-state) (update :progress #(progress/advance % n))))) (defn find-git-state [{:keys [added changed removed]} unified-status] (reduce (fn [result {:keys [change-type old-path new-path] :as change}] (case change-type :add (if (contains? added new-path) (update result :staged conj (dissoc change :score)) (update result :modified conj (dissoc change :score))) :delete (if (contains? removed old-path) (update result :staged conj (dissoc change :score)) (update result :modified conj (dissoc change :score))) :modify (if (contains? changed new-path) (update result :staged conj (dissoc change :score)) (update result :modified conj (dissoc change :score))) :rename (let [add-staged (contains? added new-path) delete-staged (contains? removed old-path)] (cond (and add-staged delete-staged) (update result :staged conj (dissoc change :score)) add-staged (-> result (update :staged conj (git/make-add-change new-path)) (update :modified conj (git/make-delete-change old-path))) delete-staged (-> result (update :staged conj (git/make-delete-change old-path)) (update :modified conj (git/make-add-change new-path))) :else (update result :modified conj (dissoc change :score)))))) {:modified #{} :staged #{}} unified-status)) (defn refresh-git-state [{:keys [git] :as flow}] (let [status (git/status git)] (merge flow (find-git-state status (git/unified-status git status))))) (defn- matching-transport-exception? [re-pattern exception] (and (instance? TransportException exception) (some? (re-find re-pattern (ex-message exception))))) (def ^:private https-not-authorized-exception? (partial matching-transport-exception? #"(?i)\bnot authorized\b")) (defn advance-flow [{:keys [git state progress creds conflicts stash-info message] :as flow} render-progress] (render-progress progress) (condp = state :pull/start (if (nil? (:error flow)) (advance-flow (tick flow :pull/pulling) render-progress) flow) :pull/pulling (let [dry-run-push-error (when (:verify-creds flow) ;; Pulling from a public repository will not use the ;; credentials. Do a dry-run push here to check if our ;; credentials are valid for the push state. (try (git/push! git {:encrypted-credentials creds :dry-run true}) nil (catch Exception error error)))] (if (https-not-authorized-exception? dry-run-push-error) (advance-flow (-> flow (tick :pull/start) (assoc :error :https-not-authorized)) render-progress) (let [pull-result-or-error (try (git/pull! git {:encrypted-credentials creds}) (catch Exception error error))] (cond (and (instance? PullResult pull-result-or-error) (.isSuccessful ^PullResult pull-result-or-error)) (advance-flow (tick flow :pull/applying) render-progress) (https-not-authorized-exception? pull-result-or-error) (advance-flow (-> flow (tick :pull/start) (assoc :error :https-not-authorized)) render-progress) :else (do (log/error :exception pull-result-or-error :msg (format "Error pulling during sync: %s" (ex-message pull-result-or-error))) (advance-flow (tick flow :pull/error) render-progress)))))) :pull/applying (let [stash-res (when stash-info (try (git/stash-apply! git stash-info) (catch StashApplyFailureException _ :conflict) (catch Exception e (println e)))) status (git/status git)] (cond (nil? stash-info) (advance-flow (tick flow :pull/done 2) render-progress) (= :conflict stash-res) (advance-flow (-> flow (tick :pull/conflicts) (assoc :conflicts (:conflicting-stage-state status))) render-progress) stash-res (advance-flow (tick flow :pull/done 2) render-progress) :else (advance-flow (tick flow :pull/error) render-progress))) :pull/conflicts (if (empty? conflicts) (advance-flow (tick flow :pull/done) render-progress) flow) :pull/done (refresh-git-state flow) ; Affects info text and Push command availability. :pull/error flow :push/start (advance-flow (tick (refresh-git-state flow) :push/staging) render-progress) :push/staging flow :push/committing (do (git/commit git message) (advance-flow (tick flow :push/pushing) render-progress)) :push/pushing (try (git/push! git {:encrypted-credentials creds}) (advance-flow (tick flow :push/done) render-progress) (catch Exception e (println e) (advance-flow (tick flow :push/error) render-progress))) :push/done flow :push/error flow)) (handler/register-menu! ::conflicts-menu [{:label "View Diff" :command :show-diff} {:label "Use Ours" :command :use-ours} {:label "Use Theirs" :command :use-theirs}]) (handler/register-menu! ::staging-menu [{:label "View Diff" :command :show-change-diff} {:label "Stage Change" :command :stage-change}]) (handler/register-menu! ::unstaging-menu [{:label "View Diff" :command :show-change-diff} {:label "Unstage Change" :command :unstage-change}]) (defn- get-theirs [{:keys [git] :as flow} file] (when-let [their-bytes (git/show-file git file)] (String. their-bytes))) (defn- get-ours [{:keys [git stash-info] :as flow} file] (when-let [stash-ref ^RevCommit (:ref stash-info)] (when-let [our-bytes (git/show-file git file (.name stash-ref))] (String. our-bytes)))) (defn- resolve-file! [!flow file] (let [{:keys [^Git git conflicts]} @!flow] (when-let [entry (get conflicts file)] (if (.exists (git/file git file)) (-> git .add (.addFilepattern file) .call) (-> git .rm (.addFilepattern file) .call)) (-> git .reset (.addPath file) .call) (swap! !flow #(-> % (update :conflicts dissoc file) (update :resolved assoc file entry)))))) (defn use-ours! [!flow file] (if-let [ours (get-ours @!flow file)] (spit (git/file (:git @!flow) file) ours) (fs/delete-file! (git/file (:git @!flow) file) {:fail :silently})) (resolve-file! !flow file)) (defn use-theirs! [!flow file] (if-let [theirs (get-theirs @!flow file)] (spit (git/file (:git @!flow) file) theirs) (fs/delete-file! (git/file (:git @!flow) file) {:fail :silently})) (resolve-file! !flow file)) (handler/defhandler :show-diff :sync (enabled? [selection] (= 1 (count selection))) (run [selection !flow] (let [file (first selection) ours (get-ours @!flow file) theirs (get-theirs @!flow file)] (when (and ours theirs) (diff-view/make-diff-viewer (str "Theirs '" file "'") theirs (str "Ours '" file "'") ours))))) (handler/defhandler :show-change-diff :sync (enabled? [selection] (git/selection-diffable? selection)) (run [selection !flow] (diff-view/present-diff-data (git/selection-diff-data (:git @!flow) selection)))) (handler/defhandler :use-ours :sync (enabled? [selection] (pos? (count selection))) (run [selection !flow] (doseq [f selection] (use-ours! !flow f)))) (handler/defhandler :use-theirs :sync (enabled? [selection] (pos? (count selection))) (run [selection !flow] (doseq [f selection] (use-theirs! !flow f)))) (handler/defhandler :stage-change :sync (enabled? [selection] (pos? (count selection))) (run [selection !flow] (doseq [change selection] (git/stage-change! (:git @!flow) change)) (swap! !flow refresh-git-state))) (handler/defhandler :unstage-change :sync (enabled? [selection] (pos? (count selection))) (run [selection !flow] (doseq [change selection] (git/unstage-change! (:git @!flow) change)) (swap! !flow refresh-git-state))) ;; ================================================================================= (def ^:private ^String ssh-not-supported-info-text (string/join "\n\n" ["This project is configured to synchronize over the SSH protocol, possibly due to having been initially cloned from an SSH URL." "It is not currently possible to synchronize from the Defold editor over SSH, but you can use external tools to do so." "If you want to be able to sync directly from the Defold editor, you must clone the project from a HTTPS link."])) (def ^:private ^String pull-done-info-text (string/join "\n\n" ["You are up-to-date with the latest changes from the server." "Press Push if you want to also upload your local changes to the server, or Done to return to the editor without pushing your changes."])) (def ^:private ^String pull-done-no-local-changes-info-text (string/join "\n\n" ["You are up-to-date with the latest changes from the server." "There are no local changes to Push to the server. Press Done to return to the editor."])) (def ^:private ^String pull-error-info-text (string/join "\n\n" ["Something went wrong when we tried to get the latest changes from the server." "Click Cancel to restore the project the pre-sync state."])) (def ^:private ^String push-done-info-text (string/join "\n\n" ["Your changes were successfully pushed to the server." "Click Done to return to the editor."])) (def ^:private ^String push-error-info-text (string/join "\n\n" ["Something went wrong when we tried to push your local changes to the server." "Click Done to return to the editor without pushing. You will still have the latest changes from the server unless you choose Cancel to restore the project the pre-sync state."])) (defn- personal-access-token-uri ^URI [remote-info] (when (= :https (:scheme remote-info)) (let [host (:host remote-info) remote-uri (git/remote-uri remote-info)] (cond (re-find #"\bgithub\b" host) (.resolve remote-uri "/settings/tokens") (re-find #"\bgitlab\b" host) (.resolve remote-uri "/profile/personal_access_tokens") (re-find #"\bbbitbucket\b" host) (URI. "https://confluence.atlassian.com/bitbucket/app-passwords-828781300.html"))))) (def ^:private sync-dialog-open-atom (atom false)) (defn sync-dialog-open? [] @sync-dialog-open-atom) (defn open-sync-dialog [!flow prefs] ;; Note: It would be really nice to rewrite this using cljfx. It is quite ;; cumbersome to make changes as it stands. The advance-flow function above ;; also contributes to the overall complexity. We actually might want to ;; revise the git workflow to separate the acts of committing and pushing ;; since there are sometimes issues with this simplified model. (let [root ^Parent (ui/load-fxml "sync-dialog.fxml") pull-root ^Parent (ui/load-fxml "sync-pull.fxml") push-root ^Parent (ui/load-fxml "sync-push.fxml") stage (ui/make-dialog-stage (ui/main-stage)) scene (Scene. root) dialog-controls (ui/collect-controls root ["ok" "push" "cancel" "dialog-area" "progress-bar"]) pull-controls (ui/collect-controls pull-root ["username-field" "password-field" "save-password-checkbox" "pull-start-box" "pull-info-box" "conflicting" "resolved" "conflict-box" "main-label"]) push-controls (ui/collect-controls push-root ["changed" "staged" "message" "committer-name-field" "committer-email-field" "push-info-box" "content-box" "main-label" "diff" "stage" "unstage"]) render-progress (fn [progress] (when progress (ui/run-later (ui/render-progress-controls! progress (:progress-bar dialog-controls) nil)))) update-push-buttons! (fn [] ;; The stage, unstage and diff buttons are enabled ;; if the changed or staged views have input focus ;; and something selected. The diff button is ;; disabled if more than one item is selected. (let [changed-view (:changed push-controls) changed-selection (ui/selection changed-view) staged-view (:staged push-controls) staged-selection (ui/selection staged-view) enabled (cond (and (ui/focus? changed-view) (seq changed-selection)) (if (git/selection-diffable? changed-selection) #{:stage :diff} #{:stage}) (and (ui/focus? staged-view) (seq staged-selection)) (if (git/selection-diffable? staged-selection) #{:unstage :diff} #{:unstage}) :else #{})] (ui/disable! (:diff push-controls) (not (:diff enabled))) (ui/disable! (:stage push-controls) (not (:stage enabled))) (ui/disable! (:unstage push-controls) (not (:unstage enabled))) (when (:diff enabled) (if-let [focused-list-view (cond (ui/focus? changed-view) changed-view (ui/focus? staged-view) staged-view :else nil)] (ui/context! (:diff push-controls) :sync {:!flow !flow} (ui/->selection-provider focused-list-view)))))) update-controls (fn [{:keys [state conflicts resolved modified staged] :as flow}] (ui/run-later (case (namespace state) "pull" (do (ui/title! stage "Get Remote Changes") (ui/text! (:ok dialog-controls) "Pull") (ui/children! (:dialog-area dialog-controls) [pull-root]) (ui/fill-control pull-root) (.sizeToScene (.getWindow scene))) "push" (do (ui/title! stage "Push Local Changes") (ui/visible! (:push dialog-controls) false) (ui/text! (:ok dialog-controls) "Push") (ui/children! (:dialog-area dialog-controls) [push-root]) (ui/fill-control push-root) (.sizeToScene (.getWindow scene)))) (condp = state :pull/start (let [error (:error flow) remote-info (git/remote-info (:git flow) :fetch) is-ssh-remote (= :ssh (:scheme remote-info))] (ui/text! (:main-label pull-controls) (cond is-ssh-remote "Cannot Sync Over SSH" (= :https-not-authorized error) "Invalid Username or Password" :else "Get Remote Changes")) (ui/visible! (:pull-start-box pull-controls) (not is-ssh-remote)) (ui/visible! (:conflict-box pull-controls) false) (ui/enable! (:ok dialog-controls) (not is-ssh-remote)) ; Disallow sync over SSH. (ui/set-style! (:username-field pull-controls) "error" (= :https-not-authorized error)) (ui/set-style! (:password-field pull-controls) "error" (= :https-not-authorized error))) :pull/conflicts (do (ui/text! (:main-label pull-controls) "Resolve Conflicts") (ui/visible! (:pull-info-box pull-controls) false) (ui/visible! (:pull-start-box pull-controls) false) (ui/visible! (:conflict-box pull-controls) true) (ui/items! (:conflicting pull-controls) (sort (keys conflicts))) (ui/items! (:resolved pull-controls) (sort (keys resolved))) (let [button (:ok dialog-controls)] (ui/text! button "Apply") (ui/disable! button (not (empty? conflicts))))) :pull/done (let [has-local-changes (not (empty? (concat modified staged)))] (ui/text! (:main-label pull-controls) "Done!") (ui/visible! (:push dialog-controls) true) (ui/visible! (:pull-start-box pull-controls) false) (ui/visible! (:conflict-box pull-controls) false) (ui/text! (:ok dialog-controls) "Done") (ui/enable! (:push dialog-controls) has-local-changes) (doto (:pull-info-box pull-controls) (ui/visible! true) (ui/children! [(Text. (if has-local-changes pull-done-info-text pull-done-no-local-changes-info-text))]))) :pull/error (do (ui/text! (:main-label pull-controls) "Error getting changes") (ui/visible! (:push dialog-controls) false) (ui/visible! (:pull-start-box pull-controls) false) (ui/visible! (:conflict-box pull-controls) false) (ui/text! (:ok dialog-controls) "Done") (ui/disable! (:ok dialog-controls) true) (doto (:pull-info-box pull-controls) (ui/visible! true) (ui/children! [(Text. pull-error-info-text)]))) :push/staging (let [changed-view ^ListView (:changed push-controls) staged-view ^ListView (:staged push-controls) changed-selection (vec (ui/selection changed-view)) staged-selection (vec (ui/selection staged-view)) empty-message (empty? (ui/text (:message push-controls))) empty-committer-name (empty? (ui/text (:committer-name-field push-controls))) empty-committer-email (empty? (ui/text (:committer-email-field push-controls)))] (ui/visible! (:pull-info-box pull-controls) false) (ui/items! changed-view (sort-by git/change-path modified)) (ui/items! staged-view (sort-by git/change-path staged)) (ui/set-style! (:message push-controls) "info" empty-message) (ui/set-style! (:committer-name-field push-controls) "info" empty-committer-name) (ui/set-style! (:committer-email-field push-controls) "info" empty-committer-email) (ui/disable! (:ok dialog-controls) (or (empty? staged) empty-message empty-committer-name empty-committer-email)) ;; The stage, unstage and diff buttons start off disabled, but ;; might be enabled by the event handler triggered by select! (ui/disable! (:diff push-controls) true) (ui/disable! (:stage push-controls) true) (ui/disable! (:unstage push-controls) true) (doseq [item changed-selection] (ui/select! changed-view item)) (doseq [item staged-selection] (ui/select! staged-view item))) :push/done (do (ui/text! (:main-label push-controls) "Done!") (ui/visible! (:content-box push-controls) false) (ui/text! (:ok dialog-controls) "Done") (doto (:push-info-box push-controls) (ui/visible! true) (ui/children! [(Text. push-done-info-text)]))) :push/error (do (ui/text! (:main-label push-controls) "Error pushing changes to server") (ui/visible! (:content-box push-controls) false) (ui/text! (:ok dialog-controls) "Done") (ui/enable! (:ok dialog-controls) true) (doto (:push-info-box push-controls) (ui/visible! true) (ui/children! [(Text. push-error-info-text)]))) nil)))] (update-controls @!flow) (add-watch !flow :updater (fn [_ _ _ flow] (update-controls flow))) ; Disable the window close button, since it is unclear what it means. ; This forces the user to make an active choice between Done or Cancel. (ui/on-closing! stage (fn [_] false)) (ui/on-action! (:cancel dialog-controls) (fn [_] (interactive-cancel! (partial cancel-flow! !flow)) (.close stage))) (ui/on-action! (:ok dialog-controls) (fn [_] (let [flow @!flow state (:state flow)] (cond (= "done" (name state)) (do (finish-flow! !flow) (.close stage)) (= :pull/start state) (swap! !flow (fn [flow] (let [username (ui/text (:username-field pull-controls)) password (ui/text (:password-field pull-controls)) save-password (ui/value (:save-password-checkbox pull-controls))] (git-credentials/write-encrypted-credentials! prefs (:git flow) (git-credentials/encrypt-credentials (cond-> {:username username} save-password (assoc :password PI:PASSWORD:<PASSWORD>END_PI)))) (advance-flow (-> flow (dissoc :error) (assoc :verify-creds (not (empty? password))) (assoc :creds (git-credentials/encrypt-credentials {:username username :password PI:PASSWORD:<PASSWORD>END_PI}))) render-progress)))) (= :push/staging state) (swap! !flow (fn [flow] (let [committer-name (ui/text (:committer-name-field push-controls)) committer-email (ui/text (:committer-email-field push-controls))] (git/set-user-info! (:git flow) {:name committer-name :email committer-email}) (advance-flow (merge flow {:state :push/committing :message (ui/text (:message push-controls))}) render-progress)))) :else (swap! !flow advance-flow render-progress))))) (ui/on-action! (:push dialog-controls) (fn [_] (swap! !flow #(merge % {:state :push/start :progress (progress/make "push" 4)})) (swap! !flow advance-flow render-progress))) (ui/bind-action! (:diff push-controls) :show-change-diff) (ui/observe (.focusOwnerProperty scene) (fn [_ _ new] (when-not (instance? Button new) (update-push-buttons!)))) (ui/observe-selection ^ListView (:changed push-controls) (fn [_ _] (update-push-buttons!))) (ui/observe-selection ^ListView (:staged push-controls) (fn [_ _] (update-push-buttons!))) (let [update-push-controls! (fn [_ _ _] (update-controls @!flow))] (doseq [field-name [:message :committer-name-field :committer-email-field] :let [^TextInputControl text-input-control (get push-controls field-name)]] (ui/observe (.textProperty text-input-control) update-push-controls!))) (ui/with-controls pull-root [personal-access-token-link password-field pull-info-box pull-start-box save-password-checkbox username-field] (let [{:keys [git creds]} @!flow {:keys [username password]} (git-credentials/decrypt-credentials creds) remote-info (git/remote-info git :fetch)] (ui/text! username-field (or username "")) (ui/text! password-field (or password "")) (ui/value! save-password-checkbox (not (empty? password))) (case (:scheme remote-info) :https (do (ui/visible! pull-start-box true) (ui/visible! pull-info-box false) (if-some [personal-access-token-uri (personal-access-token-uri remote-info)] (ui/on-action! personal-access-token-link (fn [_] (ui/open-url personal-access-token-uri))) (ui/enable! personal-access-token-link false))) :ssh (do (ui/visible! pull-start-box false) (doto pull-info-box (ui/visible! true) (ui/children! [(Text. ssh-not-supported-info-text)]))) ;; Other protocols. nil))) (let [^ListView list-view (:conflicting pull-controls)] (.setSelectionMode (.getSelectionModel list-view) SelectionMode/MULTIPLE) (ui/context! list-view :sync {:!flow !flow} (ui/->selection-provider list-view)) (ui/register-context-menu list-view ::conflicts-menu) (ui/cell-factory! list-view (fn [e] {:text e}))) (ui/cell-factory! (:resolved pull-controls) (fn [e] {:text e})) (ui/with-controls push-root [committer-email-field committer-name-field] (let [{:keys [git]} @!flow {:keys [name email]} (git/user-info git)] (ui/text! committer-name-field name) (ui/text! committer-email-field email))) (let [^ListView list-view (:changed push-controls)] (.setSelectionMode (.getSelectionModel list-view) SelectionMode/MULTIPLE) (ui/context! list-view :sync {:!flow !flow} (ui/->selection-provider list-view)) (ui/context! (:stage push-controls) :sync {:!flow !flow} (ui/->selection-provider list-view)) (ui/bind-action! (:stage push-controls) :stage-change) (ui/register-context-menu list-view ::staging-menu) (ui/cell-factory! list-view vcs-status/render-verbose)) (let [^ListView list-view (:staged push-controls)] (.setSelectionMode (.getSelectionModel list-view) SelectionMode/MULTIPLE) (ui/context! list-view :sync {:!flow !flow} (ui/->selection-provider list-view)) (ui/context! (:unstage push-controls) :sync {:!flow !flow} (ui/->selection-provider list-view)) (ui/bind-action! (:unstage push-controls) :unstage-change) (ui/register-context-menu list-view ::unstaging-menu) (ui/cell-factory! list-view vcs-status/render-verbose)) (.addEventFilter scene KeyEvent/KEY_PRESSED (ui/event-handler event (let [code (.getCode ^KeyEvent event)] (when (= code KeyCode/ESCAPE) (interactive-cancel! (partial cancel-flow! !flow)) (.close stage))))) (.setScene stage scene) (try (reset! sync-dialog-open-atom true) (ui/show-and-wait-throwing! stage) (let [files-may-have-changed (not= :pull/start (:state @!flow))] files-may-have-changed) (catch Exception e (cancel-flow! !flow) (throw e)) (finally (reset! sync-dialog-open-atom false)))))
[ { "context": "1 2)\n\n (deserialize \"{ \\\"person\\\": { \\\"name\\\": \\\"Garrett\\\" } }\")\n\n (js->clj (deserialize \"{ \\\"person\\\": {", "end": 1589, "score": 0.999678373336792, "start": 1582, "tag": "NAME", "value": "Garrett" }, { "context": "js->clj (deserialize \"{ \\\"person\\\": { \\\"name\\\": \\\"Garrett\\\" } }\"))\n\n (apply str [\"hello\" \" \" \"world\"])\n\n ", "end": 1659, "score": 0.9997161030769348, "start": 1652, "tag": "NAME", "value": "Garrett" } ]
src/main/blink/app/node_wrappers/http.cljs
GHvW/blink
0
(ns blink.app.node-wrappers.http (:require ["https" :as https] [cljs.core.match :refer-macros [match]] [cljs.core.async :refer [go chan <! put! >! close! reduce]])) (defn deserialize [data] (.parse js/JSON data)) (defn node-get ([url] (node-get url #js {})) ([url options] (let [out-chan (chan)] (-> (.get https url options (fn [res] (let [status-code (.-statusCode res)] (if (not= status-code 200) (do (put! out-chan [:error {:code status-code :message "error"}]) (close! out-chan) ;; close channel first in case .resume creates a data event (.resume res)) (do (.setEncoding res "utf8") (.on res "data" (fn [chunk] (put! out-chan [:data chunk]))) (.on res "end" (fn [] (close! out-chan)))))))) (.on "error" (fn [err] (put! out-chan [:error "cant perform request"]) (close! out-chan)))) (reduce (fn [buff next] (match [next] [[:error message]] (conj buff message) [[:data data]] (conj buff data))) [] out-chan)))) (defn get-request ([url] (get-request url nil)) ([url options] ;; (println options) (let [out (chan 1)] (go (>! out (apply str (<! (node-get url options))))) out))) (comment (not= 1 2) (deserialize "{ \"person\": { \"name\": \"Garrett\" } }") (js->clj (deserialize "{ \"person\": { \"name\": \"Garrett\" } }")) (apply str ["hello" " " "world"]) (apply str (conj [] "hello")) (+ 1 2))
34815
(ns blink.app.node-wrappers.http (:require ["https" :as https] [cljs.core.match :refer-macros [match]] [cljs.core.async :refer [go chan <! put! >! close! reduce]])) (defn deserialize [data] (.parse js/JSON data)) (defn node-get ([url] (node-get url #js {})) ([url options] (let [out-chan (chan)] (-> (.get https url options (fn [res] (let [status-code (.-statusCode res)] (if (not= status-code 200) (do (put! out-chan [:error {:code status-code :message "error"}]) (close! out-chan) ;; close channel first in case .resume creates a data event (.resume res)) (do (.setEncoding res "utf8") (.on res "data" (fn [chunk] (put! out-chan [:data chunk]))) (.on res "end" (fn [] (close! out-chan)))))))) (.on "error" (fn [err] (put! out-chan [:error "cant perform request"]) (close! out-chan)))) (reduce (fn [buff next] (match [next] [[:error message]] (conj buff message) [[:data data]] (conj buff data))) [] out-chan)))) (defn get-request ([url] (get-request url nil)) ([url options] ;; (println options) (let [out (chan 1)] (go (>! out (apply str (<! (node-get url options))))) out))) (comment (not= 1 2) (deserialize "{ \"person\": { \"name\": \"<NAME>\" } }") (js->clj (deserialize "{ \"person\": { \"name\": \"<NAME>\" } }")) (apply str ["hello" " " "world"]) (apply str (conj [] "hello")) (+ 1 2))
true
(ns blink.app.node-wrappers.http (:require ["https" :as https] [cljs.core.match :refer-macros [match]] [cljs.core.async :refer [go chan <! put! >! close! reduce]])) (defn deserialize [data] (.parse js/JSON data)) (defn node-get ([url] (node-get url #js {})) ([url options] (let [out-chan (chan)] (-> (.get https url options (fn [res] (let [status-code (.-statusCode res)] (if (not= status-code 200) (do (put! out-chan [:error {:code status-code :message "error"}]) (close! out-chan) ;; close channel first in case .resume creates a data event (.resume res)) (do (.setEncoding res "utf8") (.on res "data" (fn [chunk] (put! out-chan [:data chunk]))) (.on res "end" (fn [] (close! out-chan)))))))) (.on "error" (fn [err] (put! out-chan [:error "cant perform request"]) (close! out-chan)))) (reduce (fn [buff next] (match [next] [[:error message]] (conj buff message) [[:data data]] (conj buff data))) [] out-chan)))) (defn get-request ([url] (get-request url nil)) ([url options] ;; (println options) (let [out (chan 1)] (go (>! out (apply str (<! (node-get url options))))) out))) (comment (not= 1 2) (deserialize "{ \"person\": { \"name\": \"PI:NAME:<NAME>END_PI\" } }") (js->clj (deserialize "{ \"person\": { \"name\": \"PI:NAME:<NAME>END_PI\" } }")) (apply str ["hello" " " "world"]) (apply str (conj [] "hello")) (+ 1 2))
[ { "context": "ses/MIT\"\n :year 2015\n :key \"mit\"}\n :description \"Serves holojam data to the web ", "end": 175, "score": 0.6542733907699585, "start": 172, "tag": "KEY", "value": "mit" } ]
project.clj
WFT/holojam-playback-server
0
(defproject playback-server "0.1.0-SNAPSHOT" :license {:name "MIT License" :url "http://opensource.org/licenses/MIT" :year 2015 :key "mit"} :description "Serves holojam data to the web playback client." :dependencies [[org.clojure/clojure "1.7.0"] [compojure "1.4.0"] [ring/ring-defaults "0.1.5"] [aleph "0.4.0"] [manifold "0.1.1"] [clj-struct "0.1.0"] [org.clojure/tools.cli "0.3.3"]] :main ^:skip-aot playback-server.core :target-path "target/%s" :profiles {:uberjar {:aot :all}})
32959
(defproject playback-server "0.1.0-SNAPSHOT" :license {:name "MIT License" :url "http://opensource.org/licenses/MIT" :year 2015 :key "<KEY>"} :description "Serves holojam data to the web playback client." :dependencies [[org.clojure/clojure "1.7.0"] [compojure "1.4.0"] [ring/ring-defaults "0.1.5"] [aleph "0.4.0"] [manifold "0.1.1"] [clj-struct "0.1.0"] [org.clojure/tools.cli "0.3.3"]] :main ^:skip-aot playback-server.core :target-path "target/%s" :profiles {:uberjar {:aot :all}})
true
(defproject playback-server "0.1.0-SNAPSHOT" :license {:name "MIT License" :url "http://opensource.org/licenses/MIT" :year 2015 :key "PI:KEY:<KEY>END_PI"} :description "Serves holojam data to the web playback client." :dependencies [[org.clojure/clojure "1.7.0"] [compojure "1.4.0"] [ring/ring-defaults "0.1.5"] [aleph "0.4.0"] [manifold "0.1.1"] [clj-struct "0.1.0"] [org.clojure/tools.cli "0.3.3"]] :main ^:skip-aot playback-server.core :target-path "target/%s" :profiles {:uberjar {:aot :all}})
[ { "context": " user2 (:identity req2)\n habit1 {:name \"piano\" :owner_id (parseLong (:id user1))}\n habit", "end": 526, "score": 0.9894514083862305, "start": 521, "tag": "NAME", "value": "piano" } ]
groove-api/test/groove/handlers/groove_test.clj
MrLys/vicissitudes
1
(ns groove.handlers.groove-test (:require [clojure.test :refer :all] [groove.util.utils :refer [parseLong]] [groove.test-utils :refer :all] [groove.db-test-utils :refer :all] [groove.handlers.groove :as handler] [groove.bulwark :as blwrk] [groove.db :as db])) (deftest update-groove-test "Update grooves" (let [req (create-user) user1 (:identity req) req2 (create-user 1) user2 (:identity req2) habit1 {:name "piano" :owner_id (parseLong (:id user1))} habit2 {:name "exercise" :owner_id (parseLong (:id user1))} habit3 {:name "exercise" :owner_id (parseLong (:id user2))} h1 (blwrk/create-habit habit1 req) h2 (blwrk/create-habit habit2 req) today (java.time.LocalDate/now java.time.ZoneOffset/UTC) g1 (create-groove req h1 today -5 "success") g2 (create-groove req h1 today 1 "fail") ;; invalid g3 (create-groove req h1 today -4 "success") g4 (create-groove req h2 today -3 "success") g5 (create-groove req h2 today 0 "fail") g6 (create-groove req h2 today -22 "success") g7 (create-groove req2 h1 today -22 "success") ;; req 2 should not be able to create groove for habit 1. resp1 (handler/update-groove g1 req) resp2 (handler/update-groove g2 req) ;; should fail resp3 (handler/update-groove g3 req) resp4 (handler/update-groove g4 req) resp5 (handler/update-groove g5 req) resp6 (handler/update-groove g6 req) resp7 (handler/update-groove g7 req2) resp8 (handler/get-all-grooves-handler req user1 (.plusDays today -22) today)] (and (is (nil? (:error resp1))) (is (not (nil? (:error resp2)))) (is (nil? (:error resp3))) (is (nil? (:error resp4))) (is (nil? (:error resp5))) (is (nil? (:error resp6))) (is (not (nil? (:error resp7)))) (is (not (empty? resp8))) (is (= (.length resp8) 5))))) (use-fixtures :once once-fixture) (use-fixtures :each each-fixture)
10383
(ns groove.handlers.groove-test (:require [clojure.test :refer :all] [groove.util.utils :refer [parseLong]] [groove.test-utils :refer :all] [groove.db-test-utils :refer :all] [groove.handlers.groove :as handler] [groove.bulwark :as blwrk] [groove.db :as db])) (deftest update-groove-test "Update grooves" (let [req (create-user) user1 (:identity req) req2 (create-user 1) user2 (:identity req2) habit1 {:name "<NAME>" :owner_id (parseLong (:id user1))} habit2 {:name "exercise" :owner_id (parseLong (:id user1))} habit3 {:name "exercise" :owner_id (parseLong (:id user2))} h1 (blwrk/create-habit habit1 req) h2 (blwrk/create-habit habit2 req) today (java.time.LocalDate/now java.time.ZoneOffset/UTC) g1 (create-groove req h1 today -5 "success") g2 (create-groove req h1 today 1 "fail") ;; invalid g3 (create-groove req h1 today -4 "success") g4 (create-groove req h2 today -3 "success") g5 (create-groove req h2 today 0 "fail") g6 (create-groove req h2 today -22 "success") g7 (create-groove req2 h1 today -22 "success") ;; req 2 should not be able to create groove for habit 1. resp1 (handler/update-groove g1 req) resp2 (handler/update-groove g2 req) ;; should fail resp3 (handler/update-groove g3 req) resp4 (handler/update-groove g4 req) resp5 (handler/update-groove g5 req) resp6 (handler/update-groove g6 req) resp7 (handler/update-groove g7 req2) resp8 (handler/get-all-grooves-handler req user1 (.plusDays today -22) today)] (and (is (nil? (:error resp1))) (is (not (nil? (:error resp2)))) (is (nil? (:error resp3))) (is (nil? (:error resp4))) (is (nil? (:error resp5))) (is (nil? (:error resp6))) (is (not (nil? (:error resp7)))) (is (not (empty? resp8))) (is (= (.length resp8) 5))))) (use-fixtures :once once-fixture) (use-fixtures :each each-fixture)
true
(ns groove.handlers.groove-test (:require [clojure.test :refer :all] [groove.util.utils :refer [parseLong]] [groove.test-utils :refer :all] [groove.db-test-utils :refer :all] [groove.handlers.groove :as handler] [groove.bulwark :as blwrk] [groove.db :as db])) (deftest update-groove-test "Update grooves" (let [req (create-user) user1 (:identity req) req2 (create-user 1) user2 (:identity req2) habit1 {:name "PI:NAME:<NAME>END_PI" :owner_id (parseLong (:id user1))} habit2 {:name "exercise" :owner_id (parseLong (:id user1))} habit3 {:name "exercise" :owner_id (parseLong (:id user2))} h1 (blwrk/create-habit habit1 req) h2 (blwrk/create-habit habit2 req) today (java.time.LocalDate/now java.time.ZoneOffset/UTC) g1 (create-groove req h1 today -5 "success") g2 (create-groove req h1 today 1 "fail") ;; invalid g3 (create-groove req h1 today -4 "success") g4 (create-groove req h2 today -3 "success") g5 (create-groove req h2 today 0 "fail") g6 (create-groove req h2 today -22 "success") g7 (create-groove req2 h1 today -22 "success") ;; req 2 should not be able to create groove for habit 1. resp1 (handler/update-groove g1 req) resp2 (handler/update-groove g2 req) ;; should fail resp3 (handler/update-groove g3 req) resp4 (handler/update-groove g4 req) resp5 (handler/update-groove g5 req) resp6 (handler/update-groove g6 req) resp7 (handler/update-groove g7 req2) resp8 (handler/get-all-grooves-handler req user1 (.plusDays today -22) today)] (and (is (nil? (:error resp1))) (is (not (nil? (:error resp2)))) (is (nil? (:error resp3))) (is (nil? (:error resp4))) (is (nil? (:error resp5))) (is (nil? (:error resp6))) (is (not (nil? (:error resp7)))) (is (not (empty? resp8))) (is (= (.length resp8) 5))))) (use-fixtures :once once-fixture) (use-fixtures :each each-fixture)
[ { "context": "re]\n (str (:name were-creature) \" will try to eat Harry Potter for dinner\"))\n\n(full-moon-behavior {:were-type :h", "end": 221, "score": 0.9982450008392334, "start": 209, "tag": "NAME", "value": "Harry Potter" }, { "context": "e :hogwarts-professor\n :name \"Lupin\"})\n\n; 2\n\n(defprotocol WereCreature\n (full-moon-b", "end": 322, "score": 0.9995726943016052, "start": 317, "tag": "NAME", "value": "Lupin" }, { "context": "he oldies\")))\n\n(full-moon-behavior (WereSimmons. \"Bob\"))\n\n; 3 & 4\n\n; Skipped\n", "end": 563, "score": 0.998623788356781, "start": 560, "tag": "NAME", "value": "Bob" } ]
chapter-13.clj
peferron/clojure-brave-true
1
; 1 (defmulti full-moon-behavior (fn [were-creature] (:were-type were-creature))) (defmethod full-moon-behavior :hogwarts-professor [were-creature] (str (:name were-creature) " will try to eat Harry Potter for dinner")) (full-moon-behavior {:were-type :hogwarts-professor :name "Lupin"}) ; 2 (defprotocol WereCreature (full-moon-behavior [x])) (defrecord WereSimmons [name] WereCreature (full-moon-behavior [x] (str name " will encourage people and sweat to the oldies"))) (full-moon-behavior (WereSimmons. "Bob")) ; 3 & 4 ; Skipped
79892
; 1 (defmulti full-moon-behavior (fn [were-creature] (:were-type were-creature))) (defmethod full-moon-behavior :hogwarts-professor [were-creature] (str (:name were-creature) " will try to eat <NAME> for dinner")) (full-moon-behavior {:were-type :hogwarts-professor :name "<NAME>"}) ; 2 (defprotocol WereCreature (full-moon-behavior [x])) (defrecord WereSimmons [name] WereCreature (full-moon-behavior [x] (str name " will encourage people and sweat to the oldies"))) (full-moon-behavior (WereSimmons. "<NAME>")) ; 3 & 4 ; Skipped
true
; 1 (defmulti full-moon-behavior (fn [were-creature] (:were-type were-creature))) (defmethod full-moon-behavior :hogwarts-professor [were-creature] (str (:name were-creature) " will try to eat PI:NAME:<NAME>END_PI for dinner")) (full-moon-behavior {:were-type :hogwarts-professor :name "PI:NAME:<NAME>END_PI"}) ; 2 (defprotocol WereCreature (full-moon-behavior [x])) (defrecord WereSimmons [name] WereCreature (full-moon-behavior [x] (str name " will encourage people and sweat to the oldies"))) (full-moon-behavior (WereSimmons. "PI:NAME:<NAME>END_PI")) ; 3 & 4 ; Skipped
[ { "context": "parse-mentor-handle\n [^String mentor-handle]\n ;; karl@foo.com\n (when (and mentor-handle (re-seq #\"@\" mentor-ha", "end": 726, "score": 0.9999099373817444, "start": 714, "tag": "EMAIL", "value": "karl@foo.com" } ]
src/krukow/mentor_match/sheets_parser.clj
krukow/mentor-match
0
(ns krukow.mentor-match.sheets-parser (:require [krukow.mentor-match.google-api :as api] [clojure.string :as s])) (defn- spreadsheet-url->id [spreadsheetUrl] (let [url-path (.getPath (java.net.URL. spreadsheetUrl)) id-action-segment (last (clojure.string/split url-path #"/spreadsheets/d/"))] ;; "1QkBeMNGfsHHga85c-UsLAwnpmz7QyhvFK_n31CzDe7c/edit" (first (s/split id-action-segment #"/")))) (defn- parse-preferences [^String prefs-raw] (let [lines (s/split-lines prefs-raw) prefs (map #(second (re-matches #"(?:\s*\d+\.\s*)?(@\w+).*" %)) lines)] (filter (complement nil?) prefs))) (defn- parse-mentor-handle [^String mentor-handle] ;; karl@foo.com (when (and mentor-handle (re-seq #"@" mentor-handle)) (let [h (s/split mentor-handle #"@")] (str "@" (first h))))) (def sheet-columns {"Timestamp" :timestamp "Email Address" :mentee "Admin Facilitating" :admin "Status" :status "3 Mentor Choices" :mentor-preferences "Fit Check Meeting Date" :fit-check-date "Mentor" :mentor "Start Date" :start-date "Notes" :notes "Are you comfortable committing to spending 45-90 minutes a month on this program?" :comfort "Where are you located?" :location "How comfortable are you receiving direct, honest feedback?" :feedback "What areas of growth are you looking to focus on during the next 6 months?" :areas-of-growth "What are you looking for in a mentor?" :looking-for "Is there anything you're explicitly not looking for in a mentor?" :not-looking-for "Is there anyone you have in mind for a mentor?" :mentor-in-mind "Is there anyone you wouldn't feel comfortable having as a mentor?" :non-mentors "Tell us a little bit about yourself outside of programming." :yourself "Anything else you would like us to know?" :anything-else "Mentor handle" :mentor-handle }) (def row-key->column (zipmap (vals sheet-columns) (keys sheet-columns))) (defn parse-sheet [url google-config] (let [data-raw (api/eval-range google-config (spreadsheet-url->id url) "A:T") row-key->index (map #(get sheet-columns (s/trim %) (s/trim %)) (first data-raw))] (->> (rest data-raw) (map (fn [row] (let [raw-map (zipmap row-key->index row) prefs (get raw-map :mentor-preferences) mentor-handle (get raw-map :mentor-handle)] (assoc raw-map :mentor-preferences (parse-preferences prefs) :mentor-handle (parse-mentor-handle mentor-handle)))))))) (comment (def spreadsheet-url "https://docs.google.com/spreadsheets/d/13eowmhtoWvTsrT1v9mXC6W3TH-fLme0S0jCOVaPgpKc/edit#gid=1314170866") (parse-sheet spreadsheet-url {:token-directory "./tokens"}) )
25124
(ns krukow.mentor-match.sheets-parser (:require [krukow.mentor-match.google-api :as api] [clojure.string :as s])) (defn- spreadsheet-url->id [spreadsheetUrl] (let [url-path (.getPath (java.net.URL. spreadsheetUrl)) id-action-segment (last (clojure.string/split url-path #"/spreadsheets/d/"))] ;; "1QkBeMNGfsHHga85c-UsLAwnpmz7QyhvFK_n31CzDe7c/edit" (first (s/split id-action-segment #"/")))) (defn- parse-preferences [^String prefs-raw] (let [lines (s/split-lines prefs-raw) prefs (map #(second (re-matches #"(?:\s*\d+\.\s*)?(@\w+).*" %)) lines)] (filter (complement nil?) prefs))) (defn- parse-mentor-handle [^String mentor-handle] ;; <EMAIL> (when (and mentor-handle (re-seq #"@" mentor-handle)) (let [h (s/split mentor-handle #"@")] (str "@" (first h))))) (def sheet-columns {"Timestamp" :timestamp "Email Address" :mentee "Admin Facilitating" :admin "Status" :status "3 Mentor Choices" :mentor-preferences "Fit Check Meeting Date" :fit-check-date "Mentor" :mentor "Start Date" :start-date "Notes" :notes "Are you comfortable committing to spending 45-90 minutes a month on this program?" :comfort "Where are you located?" :location "How comfortable are you receiving direct, honest feedback?" :feedback "What areas of growth are you looking to focus on during the next 6 months?" :areas-of-growth "What are you looking for in a mentor?" :looking-for "Is there anything you're explicitly not looking for in a mentor?" :not-looking-for "Is there anyone you have in mind for a mentor?" :mentor-in-mind "Is there anyone you wouldn't feel comfortable having as a mentor?" :non-mentors "Tell us a little bit about yourself outside of programming." :yourself "Anything else you would like us to know?" :anything-else "Mentor handle" :mentor-handle }) (def row-key->column (zipmap (vals sheet-columns) (keys sheet-columns))) (defn parse-sheet [url google-config] (let [data-raw (api/eval-range google-config (spreadsheet-url->id url) "A:T") row-key->index (map #(get sheet-columns (s/trim %) (s/trim %)) (first data-raw))] (->> (rest data-raw) (map (fn [row] (let [raw-map (zipmap row-key->index row) prefs (get raw-map :mentor-preferences) mentor-handle (get raw-map :mentor-handle)] (assoc raw-map :mentor-preferences (parse-preferences prefs) :mentor-handle (parse-mentor-handle mentor-handle)))))))) (comment (def spreadsheet-url "https://docs.google.com/spreadsheets/d/13eowmhtoWvTsrT1v9mXC6W3TH-fLme0S0jCOVaPgpKc/edit#gid=1314170866") (parse-sheet spreadsheet-url {:token-directory "./tokens"}) )
true
(ns krukow.mentor-match.sheets-parser (:require [krukow.mentor-match.google-api :as api] [clojure.string :as s])) (defn- spreadsheet-url->id [spreadsheetUrl] (let [url-path (.getPath (java.net.URL. spreadsheetUrl)) id-action-segment (last (clojure.string/split url-path #"/spreadsheets/d/"))] ;; "1QkBeMNGfsHHga85c-UsLAwnpmz7QyhvFK_n31CzDe7c/edit" (first (s/split id-action-segment #"/")))) (defn- parse-preferences [^String prefs-raw] (let [lines (s/split-lines prefs-raw) prefs (map #(second (re-matches #"(?:\s*\d+\.\s*)?(@\w+).*" %)) lines)] (filter (complement nil?) prefs))) (defn- parse-mentor-handle [^String mentor-handle] ;; PI:EMAIL:<EMAIL>END_PI (when (and mentor-handle (re-seq #"@" mentor-handle)) (let [h (s/split mentor-handle #"@")] (str "@" (first h))))) (def sheet-columns {"Timestamp" :timestamp "Email Address" :mentee "Admin Facilitating" :admin "Status" :status "3 Mentor Choices" :mentor-preferences "Fit Check Meeting Date" :fit-check-date "Mentor" :mentor "Start Date" :start-date "Notes" :notes "Are you comfortable committing to spending 45-90 minutes a month on this program?" :comfort "Where are you located?" :location "How comfortable are you receiving direct, honest feedback?" :feedback "What areas of growth are you looking to focus on during the next 6 months?" :areas-of-growth "What are you looking for in a mentor?" :looking-for "Is there anything you're explicitly not looking for in a mentor?" :not-looking-for "Is there anyone you have in mind for a mentor?" :mentor-in-mind "Is there anyone you wouldn't feel comfortable having as a mentor?" :non-mentors "Tell us a little bit about yourself outside of programming." :yourself "Anything else you would like us to know?" :anything-else "Mentor handle" :mentor-handle }) (def row-key->column (zipmap (vals sheet-columns) (keys sheet-columns))) (defn parse-sheet [url google-config] (let [data-raw (api/eval-range google-config (spreadsheet-url->id url) "A:T") row-key->index (map #(get sheet-columns (s/trim %) (s/trim %)) (first data-raw))] (->> (rest data-raw) (map (fn [row] (let [raw-map (zipmap row-key->index row) prefs (get raw-map :mentor-preferences) mentor-handle (get raw-map :mentor-handle)] (assoc raw-map :mentor-preferences (parse-preferences prefs) :mentor-handle (parse-mentor-handle mentor-handle)))))))) (comment (def spreadsheet-url "https://docs.google.com/spreadsheets/d/13eowmhtoWvTsrT1v9mXC6W3TH-fLme0S0jCOVaPgpKc/edit#gid=1314170866") (parse-sheet spreadsheet-url {:token-directory "./tokens"}) )
[ { "context": ") ;; warnings in cheshire.generate\n(ns ^{:author \"wahpenayo at gmail dot com\" \n :date \"2017-12-06\"\n :doc \"Probabilit", "end": 137, "score": 0.9344533681869507, "start": 111, "tag": "EMAIL", "value": "wahpenayo at gmail dot com" } ]
src/main/clojure/zana/prob/measure.clj
wahpenayo/zana
2
(set! *warn-on-reflection* true) (set! *unchecked-math* false) ;; warnings in cheshire.generate (ns ^{:author "wahpenayo at gmail dot com" :date "2017-12-06" :doc "Probability measures over <b>R</b>." } zana.prob.measure (:refer-clojure :exclude [every?]) (:require [cheshire.generate] [zana.commons.core :as zcc] [zana.collections.clojurize :as zccl] [zana.io.edn :as zedn] [zana.stats.statistics :as zss]) (:import [java.util Arrays Map] [com.carrotsearch.hppc DoubleArrayList FloatArrayList] [clojure.lang IFn$DO IFn$DDO] [org.apache.commons.math3.distribution NormalDistribution RealDistribution UniformRealDistribution] [org.apache.commons.math3.random RandomGenerator] [zana.java.arrays Sorter] [zana.java.math Statistics] [zana.java.prob ApproximatelyEqual TranslatedRealDistribution WECDF WEPDF])) (set! *unchecked-math* :warn-on-boxed) ;;---------------------------------------------------------------- ;; TODO: use float arrays but calculate in double to eliminate ;; Math/ulp in comparisons? ;; Probably want to move interface and classes to Java in that ;; case... ;;---------------------------------------------------------------- (defn- to-double ^double [x] (cond (instance? Number x) (.doubleValue ^Number x) (instance? String x) (Double/parseDouble ^String x) :else (throw (IllegalArgumentException. (str "can't convert" (class x) "to double."))))) ;;---------------------------------------------------------------- ;; TODO: move elsewhere? (defn- to-doubles ^doubles [z] (assert (not (nil? z))) (cond (zcc/double-array? z) z (instance? DoubleArrayList z) (.toArray ^DoubleArrayList z) (vector? z) (double-array z) :else (throw (IllegalArgumentException. (str "can't convert " (class z) " to double[]"))))) ;;---------------------------------------------------------------- ;; TODO: move elsewhere? (defn- doubles-to-floats ^floats [^doubles z] (let [n (alength z) f (float-array n)] (dotimes [i n] (aset-float f i (float (aget z i)))) f)) ;;---------------------------------------------------------------- ;; TODO: move elsewhere? (defn- to-floats ^floats [z] (assert (not (nil? z))) (cond (zcc/float-array? z) z (zcc/double-array? z) (doubles-to-floats z) (instance? FloatArrayList z) (.toArray ^FloatArrayList z) (instance? DoubleArrayList z) (doubles-to-floats (.toArray ^DoubleArrayList z)) (vector? z) (float-array z) :else (throw (IllegalArgumentException. (str "can't convert " (class z) " to float[]"))))) ;;---------------------------------------------------------------- (defn make-wepdf "Create an instance of <code>WEPDF</code>." (^WEPDF [^RandomGenerator prng z w] (WEPDF/make prng (to-floats z) (to-floats w))) (^WEPDF [z w] (WEPDF/make (to-floats z) (to-floats w))) (^WEPDF [z] (WEPDF/make (to-floats z)))) ;;---------------------------------------------------------------- (defn make-wecdf "Create an instance of <code>WECDF</code>." (^WECDF [^RandomGenerator prng z w] (WECDF/make prng (to-floats z) (to-floats w))) (^WECDF [z w] (WECDF/make (to-floats z) (to-floats w))) (^WECDF [z] (WECDF/make (to-floats z)))) ;;---------------------------------------------------------------- (defn wepdf-to-wecdf "Convert a point mass density representation to a cumulative one." ^WECDF [^WEPDF pdf] (WECDF/make pdf)) (defn wecdf-to-wepdf "Convert a cumulative representation to a point mass density one." ^WEPDF [^WECDF cdf] (WEPDF/make cdf)) ;;---------------------------------------------------------------- ;; TODO: generic function api for general probability measures (defn pointmass ^double [^RealDistribution rpm ^double z] (.probability rpm z)) (defn cdf ^double [^RealDistribution rpm ^double z] (.cumulativeProbability rpm z)) (defn quantile ^double [^RealDistribution rpm ^double p] (.inverseCumulativeProbability rpm p)) ;;---------------------------------------------------------------- ;; text serialization ;;---------------------------------------------------------------- ;; TODO: JSON/END serialization for RandomGenerator classes (defn map->WEPDF [m] (WEPDF/sortedAndNormalized (:rng m) (to-floats (:z m)) (to-floats (:w m)))) (defn map<-WEPDF [^WEPDF d] {#_:rng #_(.rng d) :z (.getZ d) :w (.getW d)}) (defmethod zccl/clojurize WEPDF [this] (map<-WEPDF this)) (defmethod print-method WEPDF [^WEPDF this ^java.io.Writer w] (if *print-readably* (do (.write w " #zana.java.prob.WEPDF{") #_(.write w ":rng ") #_(.write w (print-str (.rng this))) (.write w " :z ") (.write w (print-str (.getZ this))) (.write w " :w ") (.write w (print-str (.getW this))) (.write w "} ")) (.write w (print-str (map<-WEPDF this))))) ;;---------------------------------------------------------------- (defn map->WECDF [m] (WECDF/sortedAndNormalized (:rng m) (to-floats (:z m)) (to-floats (:w m)))) (defn map<-WECDF [^WECDF d] {#_:rng #_(.rng d) :z (.getZ d) :w (.getW d)}) (defmethod zccl/clojurize WECDF [this] (map<-WECDF this)) (defmethod print-method WECDF [^WECDF this ^java.io.Writer w] (if *print-readably* (do (.write w " #zana.java.prob.WECDF{") #_(.write w ":rng ") #_(.write w (print-str (.rng this))) (.write w " :z ") (.write w (print-str (.getZ this))) (.write w " :w ") (.write w (print-str (.getW this))) (.write w "} ")) (.write w (print-str (map<-WECDF this))))) ;;---------------------------------------------------------------- ;; serializing other RealDistributions to Strings that can be used ;; in EDN or TSV files. ;; Not attempting to serialize RandomGenerators. ;;---------------------------------------------------------------- (defn gaussian-distribution ^RealDistribution [^RandomGenerator prng ^double mean ^double standard-deviation] (NormalDistribution. prng mean standard-deviation)) (defn- map<-NormalDistribution ^Map [^NormalDistribution d] {:mean (.getMean d) :standardDeviation (.getStandardDeviation d)}) (defn- map->NormalDistribution ^NormalDistribution [^Map m] (NormalDistribution. (to-double (:mean m)) (to-double (:standardDeviation m)))) (defmethod zccl/clojurize NormalDistribution [^NormalDistribution this] (map<-NormalDistribution this)) (defmethod print-method NormalDistribution [^NormalDistribution this ^java.io.Writer w] (.write w "#org.apache.commons.math3.distribution.") (.write w "NormalDistribution{") (.write w ",:mean,") (.write w (Double/toString (.getMean this))) (.write w ",:standardDeviation,") (.write w (Double/toString (.getStandardDeviation this))) (.write w "}")) ;;---------------------------------------------------------------- (defn uniform-distribution ^RealDistribution [^RandomGenerator prng ^double lower ^double upper] (UniformRealDistribution. prng lower upper)) (defn- map<-UniformRealDistribution ^Map [^UniformRealDistribution d] {:supportLowerBound (.getSupportLowerBound d) :supportUpperBound (.getSupportUpperBound d)}) (defn- map->UniformRealDistribution ^UniformRealDistribution [^Map m] (UniformRealDistribution. (to-double (:supportLowerBound m)) (to-double (:supportUpperBound m)))) (defmethod zccl/clojurize UniformRealDistribution [^UniformRealDistribution this] (map<-UniformRealDistribution this)) (defmethod print-method UniformRealDistribution [^UniformRealDistribution this ^java.io.Writer w] (.write w "#org.apache.commons.math3.distribution.") (.write w "UniformRealDistribution{") (.write w ",:supportLowerBound,") (.write w (Double/toString (.getSupportLowerBound this))) (.write w ",:supportUpperBound,") (.write w (Double/toString (.getSupportUpperBound this))) (.write w "}")) ;;---------------------------------------------------------------- (defn- map<-TranslatedRealDistribution ^Map [^TranslatedRealDistribution d] {:dz (.getDz d) :rd (.getRd d)}) (defn- map->TranslatedRealDistribution ^TranslatedRealDistribution [^Map m] ;; handle case where inner distribution is represented by an EDN ;; string (let [dz (to-double (:dz m)) rd (:rd m) rd (if (string? rd) (zedn/read-edn rd) rd)] (TranslatedRealDistribution/shift rd dz))) (defmethod zccl/clojurize TranslatedRealDistribution [^TranslatedRealDistribution this] (map<-TranslatedRealDistribution this)) (defmethod print-method TranslatedRealDistribution [^TranslatedRealDistribution this ^java.io.Writer w] (.write w " #zana.java.prob.") (.write w "TranslatedRealDistribution{") (.write w ",:dz,") (.write w (Double/toString (.getDz this))) (.write w ",:rd,") (.write w (print-str (.getRd this))) (.write w "}")) ;;---------------------------------------------------------------- ;; EDN ;;---------------------------------------------------------------- (zedn/add-edn-readers! {'zana.java.prob.WEPDF map->WEPDF 'zana.java.prob.WECDF map->WECDF 'org.apache.commons.math3.distribution.NormalDistribution map->NormalDistribution 'org.apache.commons.math3.distribution.UniformRealDistribution map->UniformRealDistribution 'zana.java.prob.TranslatedRealDistribution map->TranslatedRealDistribution}) ;;---------------------------------------------------------------- ;; JSON output (input not supported) ;;---------------------------------------------------------------- (defn- WEPDF-encoder [^WEPDF d json-generator] (cheshire.generate/encode-map (map<-WEPDF d) json-generator)) (cheshire.generate/add-encoder zana.java.prob.WEPDF WEPDF-encoder) (defn- WECDF-encoder [^WECDF d json-generator] (cheshire.generate/encode-map (map<-WECDF d) json-generator)) (cheshire.generate/add-encoder zana.java.prob.WECDF WECDF-encoder) (defn- NormalDistribution-encoder [^NormalDistribution d json-generator] (cheshire.generate/encode-map (map<-NormalDistribution d) json-generator)) (cheshire.generate/add-encoder org.apache.commons.math3.distribution.NormalDistribution NormalDistribution-encoder) (defn- UniformRealDistribution-encoder [^UniformRealDistribution d json-generator] (cheshire.generate/encode-map (map<-UniformRealDistribution d) json-generator)) (cheshire.generate/add-encoder org.apache.commons.math3.distribution.UniformRealDistribution UniformRealDistribution-encoder) ;;----------------------------------------------------------------
15471
(set! *warn-on-reflection* true) (set! *unchecked-math* false) ;; warnings in cheshire.generate (ns ^{:author "<EMAIL>" :date "2017-12-06" :doc "Probability measures over <b>R</b>." } zana.prob.measure (:refer-clojure :exclude [every?]) (:require [cheshire.generate] [zana.commons.core :as zcc] [zana.collections.clojurize :as zccl] [zana.io.edn :as zedn] [zana.stats.statistics :as zss]) (:import [java.util Arrays Map] [com.carrotsearch.hppc DoubleArrayList FloatArrayList] [clojure.lang IFn$DO IFn$DDO] [org.apache.commons.math3.distribution NormalDistribution RealDistribution UniformRealDistribution] [org.apache.commons.math3.random RandomGenerator] [zana.java.arrays Sorter] [zana.java.math Statistics] [zana.java.prob ApproximatelyEqual TranslatedRealDistribution WECDF WEPDF])) (set! *unchecked-math* :warn-on-boxed) ;;---------------------------------------------------------------- ;; TODO: use float arrays but calculate in double to eliminate ;; Math/ulp in comparisons? ;; Probably want to move interface and classes to Java in that ;; case... ;;---------------------------------------------------------------- (defn- to-double ^double [x] (cond (instance? Number x) (.doubleValue ^Number x) (instance? String x) (Double/parseDouble ^String x) :else (throw (IllegalArgumentException. (str "can't convert" (class x) "to double."))))) ;;---------------------------------------------------------------- ;; TODO: move elsewhere? (defn- to-doubles ^doubles [z] (assert (not (nil? z))) (cond (zcc/double-array? z) z (instance? DoubleArrayList z) (.toArray ^DoubleArrayList z) (vector? z) (double-array z) :else (throw (IllegalArgumentException. (str "can't convert " (class z) " to double[]"))))) ;;---------------------------------------------------------------- ;; TODO: move elsewhere? (defn- doubles-to-floats ^floats [^doubles z] (let [n (alength z) f (float-array n)] (dotimes [i n] (aset-float f i (float (aget z i)))) f)) ;;---------------------------------------------------------------- ;; TODO: move elsewhere? (defn- to-floats ^floats [z] (assert (not (nil? z))) (cond (zcc/float-array? z) z (zcc/double-array? z) (doubles-to-floats z) (instance? FloatArrayList z) (.toArray ^FloatArrayList z) (instance? DoubleArrayList z) (doubles-to-floats (.toArray ^DoubleArrayList z)) (vector? z) (float-array z) :else (throw (IllegalArgumentException. (str "can't convert " (class z) " to float[]"))))) ;;---------------------------------------------------------------- (defn make-wepdf "Create an instance of <code>WEPDF</code>." (^WEPDF [^RandomGenerator prng z w] (WEPDF/make prng (to-floats z) (to-floats w))) (^WEPDF [z w] (WEPDF/make (to-floats z) (to-floats w))) (^WEPDF [z] (WEPDF/make (to-floats z)))) ;;---------------------------------------------------------------- (defn make-wecdf "Create an instance of <code>WECDF</code>." (^WECDF [^RandomGenerator prng z w] (WECDF/make prng (to-floats z) (to-floats w))) (^WECDF [z w] (WECDF/make (to-floats z) (to-floats w))) (^WECDF [z] (WECDF/make (to-floats z)))) ;;---------------------------------------------------------------- (defn wepdf-to-wecdf "Convert a point mass density representation to a cumulative one." ^WECDF [^WEPDF pdf] (WECDF/make pdf)) (defn wecdf-to-wepdf "Convert a cumulative representation to a point mass density one." ^WEPDF [^WECDF cdf] (WEPDF/make cdf)) ;;---------------------------------------------------------------- ;; TODO: generic function api for general probability measures (defn pointmass ^double [^RealDistribution rpm ^double z] (.probability rpm z)) (defn cdf ^double [^RealDistribution rpm ^double z] (.cumulativeProbability rpm z)) (defn quantile ^double [^RealDistribution rpm ^double p] (.inverseCumulativeProbability rpm p)) ;;---------------------------------------------------------------- ;; text serialization ;;---------------------------------------------------------------- ;; TODO: JSON/END serialization for RandomGenerator classes (defn map->WEPDF [m] (WEPDF/sortedAndNormalized (:rng m) (to-floats (:z m)) (to-floats (:w m)))) (defn map<-WEPDF [^WEPDF d] {#_:rng #_(.rng d) :z (.getZ d) :w (.getW d)}) (defmethod zccl/clojurize WEPDF [this] (map<-WEPDF this)) (defmethod print-method WEPDF [^WEPDF this ^java.io.Writer w] (if *print-readably* (do (.write w " #zana.java.prob.WEPDF{") #_(.write w ":rng ") #_(.write w (print-str (.rng this))) (.write w " :z ") (.write w (print-str (.getZ this))) (.write w " :w ") (.write w (print-str (.getW this))) (.write w "} ")) (.write w (print-str (map<-WEPDF this))))) ;;---------------------------------------------------------------- (defn map->WECDF [m] (WECDF/sortedAndNormalized (:rng m) (to-floats (:z m)) (to-floats (:w m)))) (defn map<-WECDF [^WECDF d] {#_:rng #_(.rng d) :z (.getZ d) :w (.getW d)}) (defmethod zccl/clojurize WECDF [this] (map<-WECDF this)) (defmethod print-method WECDF [^WECDF this ^java.io.Writer w] (if *print-readably* (do (.write w " #zana.java.prob.WECDF{") #_(.write w ":rng ") #_(.write w (print-str (.rng this))) (.write w " :z ") (.write w (print-str (.getZ this))) (.write w " :w ") (.write w (print-str (.getW this))) (.write w "} ")) (.write w (print-str (map<-WECDF this))))) ;;---------------------------------------------------------------- ;; serializing other RealDistributions to Strings that can be used ;; in EDN or TSV files. ;; Not attempting to serialize RandomGenerators. ;;---------------------------------------------------------------- (defn gaussian-distribution ^RealDistribution [^RandomGenerator prng ^double mean ^double standard-deviation] (NormalDistribution. prng mean standard-deviation)) (defn- map<-NormalDistribution ^Map [^NormalDistribution d] {:mean (.getMean d) :standardDeviation (.getStandardDeviation d)}) (defn- map->NormalDistribution ^NormalDistribution [^Map m] (NormalDistribution. (to-double (:mean m)) (to-double (:standardDeviation m)))) (defmethod zccl/clojurize NormalDistribution [^NormalDistribution this] (map<-NormalDistribution this)) (defmethod print-method NormalDistribution [^NormalDistribution this ^java.io.Writer w] (.write w "#org.apache.commons.math3.distribution.") (.write w "NormalDistribution{") (.write w ",:mean,") (.write w (Double/toString (.getMean this))) (.write w ",:standardDeviation,") (.write w (Double/toString (.getStandardDeviation this))) (.write w "}")) ;;---------------------------------------------------------------- (defn uniform-distribution ^RealDistribution [^RandomGenerator prng ^double lower ^double upper] (UniformRealDistribution. prng lower upper)) (defn- map<-UniformRealDistribution ^Map [^UniformRealDistribution d] {:supportLowerBound (.getSupportLowerBound d) :supportUpperBound (.getSupportUpperBound d)}) (defn- map->UniformRealDistribution ^UniformRealDistribution [^Map m] (UniformRealDistribution. (to-double (:supportLowerBound m)) (to-double (:supportUpperBound m)))) (defmethod zccl/clojurize UniformRealDistribution [^UniformRealDistribution this] (map<-UniformRealDistribution this)) (defmethod print-method UniformRealDistribution [^UniformRealDistribution this ^java.io.Writer w] (.write w "#org.apache.commons.math3.distribution.") (.write w "UniformRealDistribution{") (.write w ",:supportLowerBound,") (.write w (Double/toString (.getSupportLowerBound this))) (.write w ",:supportUpperBound,") (.write w (Double/toString (.getSupportUpperBound this))) (.write w "}")) ;;---------------------------------------------------------------- (defn- map<-TranslatedRealDistribution ^Map [^TranslatedRealDistribution d] {:dz (.getDz d) :rd (.getRd d)}) (defn- map->TranslatedRealDistribution ^TranslatedRealDistribution [^Map m] ;; handle case where inner distribution is represented by an EDN ;; string (let [dz (to-double (:dz m)) rd (:rd m) rd (if (string? rd) (zedn/read-edn rd) rd)] (TranslatedRealDistribution/shift rd dz))) (defmethod zccl/clojurize TranslatedRealDistribution [^TranslatedRealDistribution this] (map<-TranslatedRealDistribution this)) (defmethod print-method TranslatedRealDistribution [^TranslatedRealDistribution this ^java.io.Writer w] (.write w " #zana.java.prob.") (.write w "TranslatedRealDistribution{") (.write w ",:dz,") (.write w (Double/toString (.getDz this))) (.write w ",:rd,") (.write w (print-str (.getRd this))) (.write w "}")) ;;---------------------------------------------------------------- ;; EDN ;;---------------------------------------------------------------- (zedn/add-edn-readers! {'zana.java.prob.WEPDF map->WEPDF 'zana.java.prob.WECDF map->WECDF 'org.apache.commons.math3.distribution.NormalDistribution map->NormalDistribution 'org.apache.commons.math3.distribution.UniformRealDistribution map->UniformRealDistribution 'zana.java.prob.TranslatedRealDistribution map->TranslatedRealDistribution}) ;;---------------------------------------------------------------- ;; JSON output (input not supported) ;;---------------------------------------------------------------- (defn- WEPDF-encoder [^WEPDF d json-generator] (cheshire.generate/encode-map (map<-WEPDF d) json-generator)) (cheshire.generate/add-encoder zana.java.prob.WEPDF WEPDF-encoder) (defn- WECDF-encoder [^WECDF d json-generator] (cheshire.generate/encode-map (map<-WECDF d) json-generator)) (cheshire.generate/add-encoder zana.java.prob.WECDF WECDF-encoder) (defn- NormalDistribution-encoder [^NormalDistribution d json-generator] (cheshire.generate/encode-map (map<-NormalDistribution d) json-generator)) (cheshire.generate/add-encoder org.apache.commons.math3.distribution.NormalDistribution NormalDistribution-encoder) (defn- UniformRealDistribution-encoder [^UniformRealDistribution d json-generator] (cheshire.generate/encode-map (map<-UniformRealDistribution d) json-generator)) (cheshire.generate/add-encoder org.apache.commons.math3.distribution.UniformRealDistribution UniformRealDistribution-encoder) ;;----------------------------------------------------------------
true
(set! *warn-on-reflection* true) (set! *unchecked-math* false) ;; warnings in cheshire.generate (ns ^{:author "PI:EMAIL:<EMAIL>END_PI" :date "2017-12-06" :doc "Probability measures over <b>R</b>." } zana.prob.measure (:refer-clojure :exclude [every?]) (:require [cheshire.generate] [zana.commons.core :as zcc] [zana.collections.clojurize :as zccl] [zana.io.edn :as zedn] [zana.stats.statistics :as zss]) (:import [java.util Arrays Map] [com.carrotsearch.hppc DoubleArrayList FloatArrayList] [clojure.lang IFn$DO IFn$DDO] [org.apache.commons.math3.distribution NormalDistribution RealDistribution UniformRealDistribution] [org.apache.commons.math3.random RandomGenerator] [zana.java.arrays Sorter] [zana.java.math Statistics] [zana.java.prob ApproximatelyEqual TranslatedRealDistribution WECDF WEPDF])) (set! *unchecked-math* :warn-on-boxed) ;;---------------------------------------------------------------- ;; TODO: use float arrays but calculate in double to eliminate ;; Math/ulp in comparisons? ;; Probably want to move interface and classes to Java in that ;; case... ;;---------------------------------------------------------------- (defn- to-double ^double [x] (cond (instance? Number x) (.doubleValue ^Number x) (instance? String x) (Double/parseDouble ^String x) :else (throw (IllegalArgumentException. (str "can't convert" (class x) "to double."))))) ;;---------------------------------------------------------------- ;; TODO: move elsewhere? (defn- to-doubles ^doubles [z] (assert (not (nil? z))) (cond (zcc/double-array? z) z (instance? DoubleArrayList z) (.toArray ^DoubleArrayList z) (vector? z) (double-array z) :else (throw (IllegalArgumentException. (str "can't convert " (class z) " to double[]"))))) ;;---------------------------------------------------------------- ;; TODO: move elsewhere? (defn- doubles-to-floats ^floats [^doubles z] (let [n (alength z) f (float-array n)] (dotimes [i n] (aset-float f i (float (aget z i)))) f)) ;;---------------------------------------------------------------- ;; TODO: move elsewhere? (defn- to-floats ^floats [z] (assert (not (nil? z))) (cond (zcc/float-array? z) z (zcc/double-array? z) (doubles-to-floats z) (instance? FloatArrayList z) (.toArray ^FloatArrayList z) (instance? DoubleArrayList z) (doubles-to-floats (.toArray ^DoubleArrayList z)) (vector? z) (float-array z) :else (throw (IllegalArgumentException. (str "can't convert " (class z) " to float[]"))))) ;;---------------------------------------------------------------- (defn make-wepdf "Create an instance of <code>WEPDF</code>." (^WEPDF [^RandomGenerator prng z w] (WEPDF/make prng (to-floats z) (to-floats w))) (^WEPDF [z w] (WEPDF/make (to-floats z) (to-floats w))) (^WEPDF [z] (WEPDF/make (to-floats z)))) ;;---------------------------------------------------------------- (defn make-wecdf "Create an instance of <code>WECDF</code>." (^WECDF [^RandomGenerator prng z w] (WECDF/make prng (to-floats z) (to-floats w))) (^WECDF [z w] (WECDF/make (to-floats z) (to-floats w))) (^WECDF [z] (WECDF/make (to-floats z)))) ;;---------------------------------------------------------------- (defn wepdf-to-wecdf "Convert a point mass density representation to a cumulative one." ^WECDF [^WEPDF pdf] (WECDF/make pdf)) (defn wecdf-to-wepdf "Convert a cumulative representation to a point mass density one." ^WEPDF [^WECDF cdf] (WEPDF/make cdf)) ;;---------------------------------------------------------------- ;; TODO: generic function api for general probability measures (defn pointmass ^double [^RealDistribution rpm ^double z] (.probability rpm z)) (defn cdf ^double [^RealDistribution rpm ^double z] (.cumulativeProbability rpm z)) (defn quantile ^double [^RealDistribution rpm ^double p] (.inverseCumulativeProbability rpm p)) ;;---------------------------------------------------------------- ;; text serialization ;;---------------------------------------------------------------- ;; TODO: JSON/END serialization for RandomGenerator classes (defn map->WEPDF [m] (WEPDF/sortedAndNormalized (:rng m) (to-floats (:z m)) (to-floats (:w m)))) (defn map<-WEPDF [^WEPDF d] {#_:rng #_(.rng d) :z (.getZ d) :w (.getW d)}) (defmethod zccl/clojurize WEPDF [this] (map<-WEPDF this)) (defmethod print-method WEPDF [^WEPDF this ^java.io.Writer w] (if *print-readably* (do (.write w " #zana.java.prob.WEPDF{") #_(.write w ":rng ") #_(.write w (print-str (.rng this))) (.write w " :z ") (.write w (print-str (.getZ this))) (.write w " :w ") (.write w (print-str (.getW this))) (.write w "} ")) (.write w (print-str (map<-WEPDF this))))) ;;---------------------------------------------------------------- (defn map->WECDF [m] (WECDF/sortedAndNormalized (:rng m) (to-floats (:z m)) (to-floats (:w m)))) (defn map<-WECDF [^WECDF d] {#_:rng #_(.rng d) :z (.getZ d) :w (.getW d)}) (defmethod zccl/clojurize WECDF [this] (map<-WECDF this)) (defmethod print-method WECDF [^WECDF this ^java.io.Writer w] (if *print-readably* (do (.write w " #zana.java.prob.WECDF{") #_(.write w ":rng ") #_(.write w (print-str (.rng this))) (.write w " :z ") (.write w (print-str (.getZ this))) (.write w " :w ") (.write w (print-str (.getW this))) (.write w "} ")) (.write w (print-str (map<-WECDF this))))) ;;---------------------------------------------------------------- ;; serializing other RealDistributions to Strings that can be used ;; in EDN or TSV files. ;; Not attempting to serialize RandomGenerators. ;;---------------------------------------------------------------- (defn gaussian-distribution ^RealDistribution [^RandomGenerator prng ^double mean ^double standard-deviation] (NormalDistribution. prng mean standard-deviation)) (defn- map<-NormalDistribution ^Map [^NormalDistribution d] {:mean (.getMean d) :standardDeviation (.getStandardDeviation d)}) (defn- map->NormalDistribution ^NormalDistribution [^Map m] (NormalDistribution. (to-double (:mean m)) (to-double (:standardDeviation m)))) (defmethod zccl/clojurize NormalDistribution [^NormalDistribution this] (map<-NormalDistribution this)) (defmethod print-method NormalDistribution [^NormalDistribution this ^java.io.Writer w] (.write w "#org.apache.commons.math3.distribution.") (.write w "NormalDistribution{") (.write w ",:mean,") (.write w (Double/toString (.getMean this))) (.write w ",:standardDeviation,") (.write w (Double/toString (.getStandardDeviation this))) (.write w "}")) ;;---------------------------------------------------------------- (defn uniform-distribution ^RealDistribution [^RandomGenerator prng ^double lower ^double upper] (UniformRealDistribution. prng lower upper)) (defn- map<-UniformRealDistribution ^Map [^UniformRealDistribution d] {:supportLowerBound (.getSupportLowerBound d) :supportUpperBound (.getSupportUpperBound d)}) (defn- map->UniformRealDistribution ^UniformRealDistribution [^Map m] (UniformRealDistribution. (to-double (:supportLowerBound m)) (to-double (:supportUpperBound m)))) (defmethod zccl/clojurize UniformRealDistribution [^UniformRealDistribution this] (map<-UniformRealDistribution this)) (defmethod print-method UniformRealDistribution [^UniformRealDistribution this ^java.io.Writer w] (.write w "#org.apache.commons.math3.distribution.") (.write w "UniformRealDistribution{") (.write w ",:supportLowerBound,") (.write w (Double/toString (.getSupportLowerBound this))) (.write w ",:supportUpperBound,") (.write w (Double/toString (.getSupportUpperBound this))) (.write w "}")) ;;---------------------------------------------------------------- (defn- map<-TranslatedRealDistribution ^Map [^TranslatedRealDistribution d] {:dz (.getDz d) :rd (.getRd d)}) (defn- map->TranslatedRealDistribution ^TranslatedRealDistribution [^Map m] ;; handle case where inner distribution is represented by an EDN ;; string (let [dz (to-double (:dz m)) rd (:rd m) rd (if (string? rd) (zedn/read-edn rd) rd)] (TranslatedRealDistribution/shift rd dz))) (defmethod zccl/clojurize TranslatedRealDistribution [^TranslatedRealDistribution this] (map<-TranslatedRealDistribution this)) (defmethod print-method TranslatedRealDistribution [^TranslatedRealDistribution this ^java.io.Writer w] (.write w " #zana.java.prob.") (.write w "TranslatedRealDistribution{") (.write w ",:dz,") (.write w (Double/toString (.getDz this))) (.write w ",:rd,") (.write w (print-str (.getRd this))) (.write w "}")) ;;---------------------------------------------------------------- ;; EDN ;;---------------------------------------------------------------- (zedn/add-edn-readers! {'zana.java.prob.WEPDF map->WEPDF 'zana.java.prob.WECDF map->WECDF 'org.apache.commons.math3.distribution.NormalDistribution map->NormalDistribution 'org.apache.commons.math3.distribution.UniformRealDistribution map->UniformRealDistribution 'zana.java.prob.TranslatedRealDistribution map->TranslatedRealDistribution}) ;;---------------------------------------------------------------- ;; JSON output (input not supported) ;;---------------------------------------------------------------- (defn- WEPDF-encoder [^WEPDF d json-generator] (cheshire.generate/encode-map (map<-WEPDF d) json-generator)) (cheshire.generate/add-encoder zana.java.prob.WEPDF WEPDF-encoder) (defn- WECDF-encoder [^WECDF d json-generator] (cheshire.generate/encode-map (map<-WECDF d) json-generator)) (cheshire.generate/add-encoder zana.java.prob.WECDF WECDF-encoder) (defn- NormalDistribution-encoder [^NormalDistribution d json-generator] (cheshire.generate/encode-map (map<-NormalDistribution d) json-generator)) (cheshire.generate/add-encoder org.apache.commons.math3.distribution.NormalDistribution NormalDistribution-encoder) (defn- UniformRealDistribution-encoder [^UniformRealDistribution d json-generator] (cheshire.generate/encode-map (map<-UniformRealDistribution d) json-generator)) (cheshire.generate/add-encoder org.apache.commons.math3.distribution.UniformRealDistribution UniformRealDistribution-encoder) ;;----------------------------------------------------------------
[ { "context": " [c/text-input \"Email\" :email \"enter a email, EX: example@xx.com\" reg-data]\n [c/password-input \"密码\" :passw", "end": 3801, "score": 0.9999153017997742, "start": 3787, "tag": "EMAIL", "value": "example@xx.com" }, { "context": "-data]\n [c/password-input \"密码\" :password \"输入密码最少8位\" reg-data]\n [c/password-input \"确认密码\" :", "end": 3861, "score": 0.9868144392967224, "start": 3856, "tag": "PASSWORD", "value": "输入密码最" } ]
src/cljs/soul_talk/register.cljs
yuppieghost/soul-talk
0
(ns soul-talk.register (:require [domina :as dom] [reagent.core :as reagent :refer [atom]] [soul-talk.auth-validate :as validate] [ajax.core :as ajax :refer [POST]] [reagent.session :as session] [taoensso.timbre :as log] [soul-talk.components.common :as c] )) (defn validate-invalid [input vali-fun] (if-not (vali-fun (.-value input)) (dom/add-class! input "is-invalid") (dom/remove-class! input "is-invalid"))) (defn register! [reg-date errors] (reset! errors (validate/reg-errors @reg-date)) (if-not @errors (POST "/register" {:format :json :headers {"Accept" "application/transit+json"} :params @reg-date :handler #(do (session/put! :identity (:email @reg-date)) (reset! reg-date {}) (js/alert "注册成功") (set! (.. js/window -location -href) "/login")) :error-handler #(reset! errors {:server-error (get-in % [:response "message"])})}) (let [error (vals @errors) msg (ffirst error)] (js/alert msg)))) (defn register-component [] (let [reg-data (atom {}) error (atom nil)] (fn [] [:div.container [:div#loginForm.form-signin [:h1.h3.mb-3.font-weight-normal.text-center "注册"] [:div.form-group [:label "邮箱"] [:input#email.form-control {:type "text" :name "email" :auto-focus true :placeholder "xx@xx.xx" :on-change (fn [e] (let [d (.. e -target)] (swap! reg-data assoc :email (.-value d)) (validate-invalid d validate/validate-email))) :value (:email @reg-data)}] [:div.invalid-feedback "无效的 Email"]] [:div.form-group [:label "密码"] [:input#password.form-control {:type "password" :name "password" :placeholder "密码" :on-change (fn [e] (let [d (.-target e)] (swap! reg-data assoc :password (.-value d)) (validate-invalid d validate/validate-passoword))) :value (:password @reg-data)}] [:div.invalid-feedback "无效的密码"]] [:div.form-group [:label "重复密码"] [:input#pass-confirm.form-control {:type "password" :name "pass-confirm" :placeholder "重复密码" :on-change (fn [e] (let [d (.-target e)] (swap! reg-data assoc :pass-confirm (.-value d)) (validate-invalid d validate/validate-passoword))) :value (:pass-confirm @reg-data)}] [:div.invalid-feedback "无效的密码"]] (when-not [error (:client-error @error)] [:div#error.alert.alert-danger error]) (when-not [error (:server-error @error)] [:div#error.alert.alert-danger error]) [:input#submit.btn.btn-primary.btn-lg.btn-block {:type :submit :value "保存" :on-click #(register! reg-data error)}] [:p.mt-5.mb-3.text-muted "©copy @2018"]]]))) (defn register-component [] (let [reg-data (atom {}) error (atom nil)] (fn [] [:div.container [:div#loginForm.form-signin [:h1.h3.mb-3.font-weight-normal.text-center "Soul Talk"] [:div [:div.well.well-sm "* 为必填"] [c/text-input "Email" :email "enter a email, EX: example@xx.com" reg-data] [c/password-input "密码" :password "输入密码最少8位" reg-data] [c/password-input "确认密码" :pass-confirm "确认密码和上面一样" reg-data] (when-let [error (:server-error @error)] [:div.alert.alert-danger error])] [:div [:input.btn.btn-primary.btn-block {:type :submit :value "注册" :on-click #(register! reg-data error)}] [:input.btn.btn-primary.btn-block {:type :submit :value "登录" :on-click #(set! (.. js/window -location -href) "/login")}]] [:p.mt-5.mb-3.text-muted "©copy @2018"]]]))) (defn load-page [] (reagent/render [register-component] (dom/by-id "app"))) (defn ^:export init [] (if (and js/document (.-getElementById js/document)) (load-page)))
123653
(ns soul-talk.register (:require [domina :as dom] [reagent.core :as reagent :refer [atom]] [soul-talk.auth-validate :as validate] [ajax.core :as ajax :refer [POST]] [reagent.session :as session] [taoensso.timbre :as log] [soul-talk.components.common :as c] )) (defn validate-invalid [input vali-fun] (if-not (vali-fun (.-value input)) (dom/add-class! input "is-invalid") (dom/remove-class! input "is-invalid"))) (defn register! [reg-date errors] (reset! errors (validate/reg-errors @reg-date)) (if-not @errors (POST "/register" {:format :json :headers {"Accept" "application/transit+json"} :params @reg-date :handler #(do (session/put! :identity (:email @reg-date)) (reset! reg-date {}) (js/alert "注册成功") (set! (.. js/window -location -href) "/login")) :error-handler #(reset! errors {:server-error (get-in % [:response "message"])})}) (let [error (vals @errors) msg (ffirst error)] (js/alert msg)))) (defn register-component [] (let [reg-data (atom {}) error (atom nil)] (fn [] [:div.container [:div#loginForm.form-signin [:h1.h3.mb-3.font-weight-normal.text-center "注册"] [:div.form-group [:label "邮箱"] [:input#email.form-control {:type "text" :name "email" :auto-focus true :placeholder "xx@xx.xx" :on-change (fn [e] (let [d (.. e -target)] (swap! reg-data assoc :email (.-value d)) (validate-invalid d validate/validate-email))) :value (:email @reg-data)}] [:div.invalid-feedback "无效的 Email"]] [:div.form-group [:label "密码"] [:input#password.form-control {:type "password" :name "password" :placeholder "密码" :on-change (fn [e] (let [d (.-target e)] (swap! reg-data assoc :password (.-value d)) (validate-invalid d validate/validate-passoword))) :value (:password @reg-data)}] [:div.invalid-feedback "无效的密码"]] [:div.form-group [:label "重复密码"] [:input#pass-confirm.form-control {:type "password" :name "pass-confirm" :placeholder "重复密码" :on-change (fn [e] (let [d (.-target e)] (swap! reg-data assoc :pass-confirm (.-value d)) (validate-invalid d validate/validate-passoword))) :value (:pass-confirm @reg-data)}] [:div.invalid-feedback "无效的密码"]] (when-not [error (:client-error @error)] [:div#error.alert.alert-danger error]) (when-not [error (:server-error @error)] [:div#error.alert.alert-danger error]) [:input#submit.btn.btn-primary.btn-lg.btn-block {:type :submit :value "保存" :on-click #(register! reg-data error)}] [:p.mt-5.mb-3.text-muted "©copy @2018"]]]))) (defn register-component [] (let [reg-data (atom {}) error (atom nil)] (fn [] [:div.container [:div#loginForm.form-signin [:h1.h3.mb-3.font-weight-normal.text-center "Soul Talk"] [:div [:div.well.well-sm "* 为必填"] [c/text-input "Email" :email "enter a email, EX: <EMAIL>" reg-data] [c/password-input "密码" :password "<PASSWORD>少8位" reg-data] [c/password-input "确认密码" :pass-confirm "确认密码和上面一样" reg-data] (when-let [error (:server-error @error)] [:div.alert.alert-danger error])] [:div [:input.btn.btn-primary.btn-block {:type :submit :value "注册" :on-click #(register! reg-data error)}] [:input.btn.btn-primary.btn-block {:type :submit :value "登录" :on-click #(set! (.. js/window -location -href) "/login")}]] [:p.mt-5.mb-3.text-muted "©copy @2018"]]]))) (defn load-page [] (reagent/render [register-component] (dom/by-id "app"))) (defn ^:export init [] (if (and js/document (.-getElementById js/document)) (load-page)))
true
(ns soul-talk.register (:require [domina :as dom] [reagent.core :as reagent :refer [atom]] [soul-talk.auth-validate :as validate] [ajax.core :as ajax :refer [POST]] [reagent.session :as session] [taoensso.timbre :as log] [soul-talk.components.common :as c] )) (defn validate-invalid [input vali-fun] (if-not (vali-fun (.-value input)) (dom/add-class! input "is-invalid") (dom/remove-class! input "is-invalid"))) (defn register! [reg-date errors] (reset! errors (validate/reg-errors @reg-date)) (if-not @errors (POST "/register" {:format :json :headers {"Accept" "application/transit+json"} :params @reg-date :handler #(do (session/put! :identity (:email @reg-date)) (reset! reg-date {}) (js/alert "注册成功") (set! (.. js/window -location -href) "/login")) :error-handler #(reset! errors {:server-error (get-in % [:response "message"])})}) (let [error (vals @errors) msg (ffirst error)] (js/alert msg)))) (defn register-component [] (let [reg-data (atom {}) error (atom nil)] (fn [] [:div.container [:div#loginForm.form-signin [:h1.h3.mb-3.font-weight-normal.text-center "注册"] [:div.form-group [:label "邮箱"] [:input#email.form-control {:type "text" :name "email" :auto-focus true :placeholder "xx@xx.xx" :on-change (fn [e] (let [d (.. e -target)] (swap! reg-data assoc :email (.-value d)) (validate-invalid d validate/validate-email))) :value (:email @reg-data)}] [:div.invalid-feedback "无效的 Email"]] [:div.form-group [:label "密码"] [:input#password.form-control {:type "password" :name "password" :placeholder "密码" :on-change (fn [e] (let [d (.-target e)] (swap! reg-data assoc :password (.-value d)) (validate-invalid d validate/validate-passoword))) :value (:password @reg-data)}] [:div.invalid-feedback "无效的密码"]] [:div.form-group [:label "重复密码"] [:input#pass-confirm.form-control {:type "password" :name "pass-confirm" :placeholder "重复密码" :on-change (fn [e] (let [d (.-target e)] (swap! reg-data assoc :pass-confirm (.-value d)) (validate-invalid d validate/validate-passoword))) :value (:pass-confirm @reg-data)}] [:div.invalid-feedback "无效的密码"]] (when-not [error (:client-error @error)] [:div#error.alert.alert-danger error]) (when-not [error (:server-error @error)] [:div#error.alert.alert-danger error]) [:input#submit.btn.btn-primary.btn-lg.btn-block {:type :submit :value "保存" :on-click #(register! reg-data error)}] [:p.mt-5.mb-3.text-muted "©copy @2018"]]]))) (defn register-component [] (let [reg-data (atom {}) error (atom nil)] (fn [] [:div.container [:div#loginForm.form-signin [:h1.h3.mb-3.font-weight-normal.text-center "Soul Talk"] [:div [:div.well.well-sm "* 为必填"] [c/text-input "Email" :email "enter a email, EX: PI:EMAIL:<EMAIL>END_PI" reg-data] [c/password-input "密码" :password "PI:PASSWORD:<PASSWORD>END_PI少8位" reg-data] [c/password-input "确认密码" :pass-confirm "确认密码和上面一样" reg-data] (when-let [error (:server-error @error)] [:div.alert.alert-danger error])] [:div [:input.btn.btn-primary.btn-block {:type :submit :value "注册" :on-click #(register! reg-data error)}] [:input.btn.btn-primary.btn-block {:type :submit :value "登录" :on-click #(set! (.. js/window -location -href) "/login")}]] [:p.mt-5.mb-3.text-muted "©copy @2018"]]]))) (defn load-page [] (reagent/render [register-component] (dom/by-id "app"))) (defn ^:export init [] (if (and js/document (.-getElementById js/document)) (load-page)))
[ { "context": ">> feed-items\n (rss/channel-xml {:title \"John Jacobsen\"\n :link (str \"http:/", "end": 2217, "score": 0.9991579055786133, "start": 2206, "tag": "NAME", "value": "John Jacobs" }, { "context": " :description \"Posts by John Jacobsen\"})\n (spit rss-file-path)))))\n\n(defn rss-", "end": 2347, "score": 0.999664843082428, "start": 2334, "tag": "NAME", "value": "John Jacobsen" } ]
src/organa/rss.clj
eigenhombre/organa
14
(ns organa.rss (:require [clj-rss.core :as rss] [clojure.walk :as walk] [net.cgrand.enlive-html :as html] [organa.config :refer [config]] [organa.html :as h]) (:import [java.time Instant])) ;; FIXME: (def ^:private target-dir (:target-dir config)) ;; FIXME: (def ^:private remote-host (:remote-host config)) (defn ^:private html-for-rss " Remove JavaScript and CSS bits from Org-generated HTML for RSS feed. " [parsed-html] (walk/prewalk (fn [x] (if (and (map? x) (#{"text/css" "text/javascript"} (get-in x [:attrs :type]))) (dissoc (into {} x) :content) x)) parsed-html)) (defn make-rss-feeds " Create RSS feeds for blog. FIXME: reduce arity FIXME: make more configurable " {:doc/format :markdown} ([topic rss-file-name org-files] (make-rss-feeds rss-file-name (filter (comp (partial some #{topic}) :tags) org-files))) ([rss-file-name org-files] (let [rss-file-path (str target-dir "/" rss-file-name) posts-for-feed (->> org-files (remove :static?) (remove :draft?) (take 20)) feed-items (for [f posts-for-feed :let [file-name (:file-name f) link-path (format "http://%s/%s.html" remote-host file-name)]] {:title (:title f) :link link-path :pubDate (Instant/ofEpochMilli (.getMillis ^org.joda.time.DateTime (:date f))) :description (format "<![CDATA[ %s ]]>" (->> f :parsed html-for-rss html/emit* (apply str)))})] (->> feed-items (rss/channel-xml {:title "John Jacobsen" :link (str "http://" remote-host) :description "Posts by John Jacobsen"}) (spit rss-file-path))))) (defn rss-links " Create HTML paragraph (`p`) of links for available RSS feeds. " {:doc/format :markdown} [] (h/p ["Subscribe: " (h/a {:href "feed.xml" :class "rss"} ["RSS feed ... all topics"]) " ... or " (h/a {:href "feed.clojure.xml" :class "rss"} ["Clojure only"]) " / " (h/a {:href "feed.lisp.xml" :class "rss"} ["Lisp only"])]))
27644
(ns organa.rss (:require [clj-rss.core :as rss] [clojure.walk :as walk] [net.cgrand.enlive-html :as html] [organa.config :refer [config]] [organa.html :as h]) (:import [java.time Instant])) ;; FIXME: (def ^:private target-dir (:target-dir config)) ;; FIXME: (def ^:private remote-host (:remote-host config)) (defn ^:private html-for-rss " Remove JavaScript and CSS bits from Org-generated HTML for RSS feed. " [parsed-html] (walk/prewalk (fn [x] (if (and (map? x) (#{"text/css" "text/javascript"} (get-in x [:attrs :type]))) (dissoc (into {} x) :content) x)) parsed-html)) (defn make-rss-feeds " Create RSS feeds for blog. FIXME: reduce arity FIXME: make more configurable " {:doc/format :markdown} ([topic rss-file-name org-files] (make-rss-feeds rss-file-name (filter (comp (partial some #{topic}) :tags) org-files))) ([rss-file-name org-files] (let [rss-file-path (str target-dir "/" rss-file-name) posts-for-feed (->> org-files (remove :static?) (remove :draft?) (take 20)) feed-items (for [f posts-for-feed :let [file-name (:file-name f) link-path (format "http://%s/%s.html" remote-host file-name)]] {:title (:title f) :link link-path :pubDate (Instant/ofEpochMilli (.getMillis ^org.joda.time.DateTime (:date f))) :description (format "<![CDATA[ %s ]]>" (->> f :parsed html-for-rss html/emit* (apply str)))})] (->> feed-items (rss/channel-xml {:title "<NAME>en" :link (str "http://" remote-host) :description "Posts by <NAME>"}) (spit rss-file-path))))) (defn rss-links " Create HTML paragraph (`p`) of links for available RSS feeds. " {:doc/format :markdown} [] (h/p ["Subscribe: " (h/a {:href "feed.xml" :class "rss"} ["RSS feed ... all topics"]) " ... or " (h/a {:href "feed.clojure.xml" :class "rss"} ["Clojure only"]) " / " (h/a {:href "feed.lisp.xml" :class "rss"} ["Lisp only"])]))
true
(ns organa.rss (:require [clj-rss.core :as rss] [clojure.walk :as walk] [net.cgrand.enlive-html :as html] [organa.config :refer [config]] [organa.html :as h]) (:import [java.time Instant])) ;; FIXME: (def ^:private target-dir (:target-dir config)) ;; FIXME: (def ^:private remote-host (:remote-host config)) (defn ^:private html-for-rss " Remove JavaScript and CSS bits from Org-generated HTML for RSS feed. " [parsed-html] (walk/prewalk (fn [x] (if (and (map? x) (#{"text/css" "text/javascript"} (get-in x [:attrs :type]))) (dissoc (into {} x) :content) x)) parsed-html)) (defn make-rss-feeds " Create RSS feeds for blog. FIXME: reduce arity FIXME: make more configurable " {:doc/format :markdown} ([topic rss-file-name org-files] (make-rss-feeds rss-file-name (filter (comp (partial some #{topic}) :tags) org-files))) ([rss-file-name org-files] (let [rss-file-path (str target-dir "/" rss-file-name) posts-for-feed (->> org-files (remove :static?) (remove :draft?) (take 20)) feed-items (for [f posts-for-feed :let [file-name (:file-name f) link-path (format "http://%s/%s.html" remote-host file-name)]] {:title (:title f) :link link-path :pubDate (Instant/ofEpochMilli (.getMillis ^org.joda.time.DateTime (:date f))) :description (format "<![CDATA[ %s ]]>" (->> f :parsed html-for-rss html/emit* (apply str)))})] (->> feed-items (rss/channel-xml {:title "PI:NAME:<NAME>END_PIen" :link (str "http://" remote-host) :description "Posts by PI:NAME:<NAME>END_PI"}) (spit rss-file-path))))) (defn rss-links " Create HTML paragraph (`p`) of links for available RSS feeds. " {:doc/format :markdown} [] (h/p ["Subscribe: " (h/a {:href "feed.xml" :class "rss"} ["RSS feed ... all topics"]) " ... or " (h/a {:href "feed.clojure.xml" :class "rss"} ["Clojure only"]) " / " (h/a {:href "feed.lisp.xml" :class "rss"} ["Lisp only"])]))
[ { "context": "[{:id 1 :label \"Aldous-Broder\"}\n {:id 2 :label \"Wilson\"}\n {:id 3 :label \"Depth-First Search\"}\n {:id ", "end": 338, "score": 0.7845740914344788, "start": 332, "tag": "NAME", "value": "Wilson" } ]
src/cljs/amaze/config.cljs
brown131/amaze
1
(ns amaze.config (:require [reagent.core :as reagent])) (def debug? ^boolean goog.DEBUG) (def directions "Maze directions with coordinate deltas. Origin is upper left." {:north [0 -1] :south [0 1] :east [1 0] :west [-1 0]}) (def maze-algorithms "Maze generation algorithms" [{:id 1 :label "Aldous-Broder"} {:id 2 :label "Wilson"} {:id 3 :label "Depth-First Search"} {:id 4 :label "Aldous-Broder/Wilson Hybrid"} {:id 5 :label "AB/DFS/W Hybrid"}]) (def db "State of the maze UI controls." {:width (reagent/atom "20") :height (reagent/atom "20") :thickness (reagent/atom "10") :breadth (reagent/atom "15") :algorithm (reagent/atom 1) :entrance (reagent/atom [0 0]) :exit (reagent/atom [19 19]) :size (reagent/atom 400) :ball-position (reagent/atom [0 0])}) (defn get-db-value [kw] (js/parseInt @(kw db)))
60185
(ns amaze.config (:require [reagent.core :as reagent])) (def debug? ^boolean goog.DEBUG) (def directions "Maze directions with coordinate deltas. Origin is upper left." {:north [0 -1] :south [0 1] :east [1 0] :west [-1 0]}) (def maze-algorithms "Maze generation algorithms" [{:id 1 :label "Aldous-Broder"} {:id 2 :label "<NAME>"} {:id 3 :label "Depth-First Search"} {:id 4 :label "Aldous-Broder/Wilson Hybrid"} {:id 5 :label "AB/DFS/W Hybrid"}]) (def db "State of the maze UI controls." {:width (reagent/atom "20") :height (reagent/atom "20") :thickness (reagent/atom "10") :breadth (reagent/atom "15") :algorithm (reagent/atom 1) :entrance (reagent/atom [0 0]) :exit (reagent/atom [19 19]) :size (reagent/atom 400) :ball-position (reagent/atom [0 0])}) (defn get-db-value [kw] (js/parseInt @(kw db)))
true
(ns amaze.config (:require [reagent.core :as reagent])) (def debug? ^boolean goog.DEBUG) (def directions "Maze directions with coordinate deltas. Origin is upper left." {:north [0 -1] :south [0 1] :east [1 0] :west [-1 0]}) (def maze-algorithms "Maze generation algorithms" [{:id 1 :label "Aldous-Broder"} {:id 2 :label "PI:NAME:<NAME>END_PI"} {:id 3 :label "Depth-First Search"} {:id 4 :label "Aldous-Broder/Wilson Hybrid"} {:id 5 :label "AB/DFS/W Hybrid"}]) (def db "State of the maze UI controls." {:width (reagent/atom "20") :height (reagent/atom "20") :thickness (reagent/atom "10") :breadth (reagent/atom "15") :algorithm (reagent/atom 1) :entrance (reagent/atom [0 0]) :exit (reagent/atom [19 19]) :size (reagent/atom 400) :ball-position (reagent/atom [0 0])}) (defn get-db-value [kw] (js/parseInt @(kw db)))
[ { "context": "r\n #^{:doc \"Clojure monitor timer\"\n :author \"dennis<xzhuang@avos.com>\"}\n (:import (org.quartz JobDet", "end": 76, "score": 0.9933557510375977, "start": 70, "tag": "USERNAME", "value": "dennis" }, { "context": ":doc \"Clojure monitor timer\"\n :author \"dennis<xzhuang@avos.com>\"}\n (:import (org.quartz JobDetail Scheduler Tri", "end": 93, "score": 0.9999325275421143, "start": 77, "tag": "EMAIL", "value": "xzhuang@avos.com" } ]
src/clj/monitor/timer.clj
killme2008/clj.monitor
3
(ns clj.monitor.timer #^{:doc "Clojure monitor timer" :author "dennis<xzhuang@avos.com>"} (:import (org.quartz JobDetail Scheduler Trigger CronScheduleBuilder JobBuilder TriggerBuilder Job JobDataMap JobExecutionContext) (org.quartz.impl DirectSchedulerFactory))) (defn init-scheduler "Initialize a quartz scheduler" [n] (when (.. DirectSchedulerFactory (getInstance) (getAllSchedulers) (isEmpty)) (.. DirectSchedulerFactory (getInstance) (createVolatileScheduler n))) (.. DirectSchedulerFactory (getInstance) (getScheduler))) (def ^:private FUN "quartz-fn") (deftype FunctionJob [] Job (execute [this ctx] (let [^JobDataMap m (.. ctx (getJobDetail) (getJobDataMap)) f (.get m FUN)] (when f (f))))) (defn schedule-task "Schedule a function with crontab-like string" [^Scheduler s f ^String cron] (let [job (.. (JobBuilder/newJob FunctionJob) (build)) trigger (.. (TriggerBuilder/newTrigger) (withSchedule (CronScheduleBuilder/cronSchedule cron)) (forJob job) (build))] (.. job (getJobDataMap) (put FUN f)) (. s (scheduleJob job trigger)))) (defn start-scheduler "Start the quartz scheduler" [^Scheduler s] (.start s)) (defn stop-scheduler "Stop the quartz scheduler" [^Scheduler s] (.shutdown s true))
62896
(ns clj.monitor.timer #^{:doc "Clojure monitor timer" :author "dennis<<EMAIL>>"} (:import (org.quartz JobDetail Scheduler Trigger CronScheduleBuilder JobBuilder TriggerBuilder Job JobDataMap JobExecutionContext) (org.quartz.impl DirectSchedulerFactory))) (defn init-scheduler "Initialize a quartz scheduler" [n] (when (.. DirectSchedulerFactory (getInstance) (getAllSchedulers) (isEmpty)) (.. DirectSchedulerFactory (getInstance) (createVolatileScheduler n))) (.. DirectSchedulerFactory (getInstance) (getScheduler))) (def ^:private FUN "quartz-fn") (deftype FunctionJob [] Job (execute [this ctx] (let [^JobDataMap m (.. ctx (getJobDetail) (getJobDataMap)) f (.get m FUN)] (when f (f))))) (defn schedule-task "Schedule a function with crontab-like string" [^Scheduler s f ^String cron] (let [job (.. (JobBuilder/newJob FunctionJob) (build)) trigger (.. (TriggerBuilder/newTrigger) (withSchedule (CronScheduleBuilder/cronSchedule cron)) (forJob job) (build))] (.. job (getJobDataMap) (put FUN f)) (. s (scheduleJob job trigger)))) (defn start-scheduler "Start the quartz scheduler" [^Scheduler s] (.start s)) (defn stop-scheduler "Stop the quartz scheduler" [^Scheduler s] (.shutdown s true))
true
(ns clj.monitor.timer #^{:doc "Clojure monitor timer" :author "dennis<PI:EMAIL:<EMAIL>END_PI>"} (:import (org.quartz JobDetail Scheduler Trigger CronScheduleBuilder JobBuilder TriggerBuilder Job JobDataMap JobExecutionContext) (org.quartz.impl DirectSchedulerFactory))) (defn init-scheduler "Initialize a quartz scheduler" [n] (when (.. DirectSchedulerFactory (getInstance) (getAllSchedulers) (isEmpty)) (.. DirectSchedulerFactory (getInstance) (createVolatileScheduler n))) (.. DirectSchedulerFactory (getInstance) (getScheduler))) (def ^:private FUN "quartz-fn") (deftype FunctionJob [] Job (execute [this ctx] (let [^JobDataMap m (.. ctx (getJobDetail) (getJobDataMap)) f (.get m FUN)] (when f (f))))) (defn schedule-task "Schedule a function with crontab-like string" [^Scheduler s f ^String cron] (let [job (.. (JobBuilder/newJob FunctionJob) (build)) trigger (.. (TriggerBuilder/newTrigger) (withSchedule (CronScheduleBuilder/cronSchedule cron)) (forJob job) (build))] (.. job (getJobDataMap) (put FUN f)) (. s (scheduleJob job trigger)))) (defn start-scheduler "Start the quartz scheduler" [^Scheduler s] (.start s)) (defn stop-scheduler "Stop the quartz scheduler" [^Scheduler s] (.shutdown s true))
[ { "context": "document editor.\n;; Copyright 2011-2013, Vixu.com, F.M. (Filip) de Waard <fmw@vixu.com>.\n;;\n;; Licensed under the Apache L", "end": 125, "score": 0.9878386855125427, "start": 104, "tag": "NAME", "value": "F.M. (Filip) de Waard" }, { "context": "right 2011-2013, Vixu.com, F.M. (Filip) de Waard <fmw@vixu.com>.\n;;\n;; Licensed under the Apache License, Versio", "end": 139, "score": 0.9999327063560486, "start": 127, "tag": "EMAIL", "value": "fmw@vixu.com" } ]
src/cljs/src/views/editor.cljs
fmw/vix
22
;; cljs/src/views/editor.cljs: UI implementation for document editor. ;; Copyright 2011-2013, Vixu.com, F.M. (Filip) de Waard <fmw@vixu.com>. ;; ;; Licensed under the Apache License, Version 2.0 (the "License"); ;; you may not use this file except in compliance with the License. ;; You may obtain a copy of the License at ;; ;; http://www.apache.org/licenses/LICENSE-2.0 ;; ;; Unless required by applicable law or agreed to in writing, software ;; distributed under the License is distributed on an "AS IS" BASIS, ;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ;; See the License for the specific language governing permissions and ;; limitations under the License. (ns vix.views.editor (:use-macros [vix.crossover.macros :only [defhandler get-cdn-hostname]]) (:require [vix.document :as document] [vix.ui :as ui] [vix.util :as util] [clojure.string :as string] [domina :as domina] [domina.events :as events] [goog.dom :as dom] [goog.editor.Field :as Field] [goog.editor.plugins.BasicTextFormatter :as BasicTextFormatter] [goog.editor.plugins.RemoveFormatting :as RemoveFormatting] [goog.editor.plugins.UndoRedo :as UndoRedo] [goog.editor.plugins.ListTabHandler :as ListTabHandler] [goog.editor.plugins.SpacesTabHandler :as SpacesTabHandler] [goog.editor.plugins.EnterHandler :as EnterHandler] [goog.editor.plugins.HeaderFormatter :as HeaderFormatter] [goog.editor.plugins.LinkDialogPlugin :as LinkDialogPlugin] [goog.editor.plugins.LinkBubble :as LinkBubble] [goog.editor.Command :as buttons] [goog.ui.editor.DefaultToolbar :as DefaultToolbar] [goog.ui.editor.ToolbarController :as ToolbarController] [goog.crypt.base64 :as base64]) (:use [domina.css :only [sel]] [domina.xpath :only [xpath]])) (def error-messages {:document-title-required-error "The document title value is required." :slug-has-invalid-chars-error "Slugs can only contain '/', '-', '.' and alphanumeric characters." :slug-has-consecutive-dashes-or-slashes-error "Slugs shouldn't contain any consecutive '-' or '/' characters." :slug-required-error "A valid slug is required for every document." :slug-initial-slash-required-error "The slug needs to start with a '/'." :slug-not-unique-error "This slug is not unique (document already exists)." :could-not-save-document-error "Something went wrong while saving the document." :document-update-conflict-error (str "The document has been changed after you loaded it. " "Please copy your changes, refresh the page and edit the " "most recent version of the document.") :document-already-exists-error (str "There already is an existing document with the provided slug. " "Please use a different slug.") :invalid-filetype-error "The filetype for the file you are trying to upload isn't supported." :file-required-error "This editing mode requires a file to be added." :link-label-required-error "The link label is required." :link-label-has-invalid-chars-error (str "Labels can only contain '/', '-', '.', '?', '!' " "and alphanumeric characters.")}) (defn slug-has-invalid-chars? "Checks whether the slug contains any invalid characters (i.e. not a forward slash, dash, period or alphanumeric character)." [slug] (nil? (re-matches #"[/\-a-zA-Z0-9\.]+" slug))) (defn validate-slug "Validates the provided slug string and returns either a map with a :message corresponding to a human-readable error message and an :error key corresponding to the the internal error keyword. If there is no error the function returns they keyword :pass. Slugs must: - not be blank (:slug-required-error), - not have invalid characters (:slug-has-invalid-chars-error; see the slug-has-invalid-chars? fn), - not have any consecutive dashes or slashes (:slug-has-consecutive-dashes-or-slashes-error), and, - start with a slash (:slug-initial-slash-required-error)." [slug] (cond (string/blank? slug) {:error :slug-required-error :message (:slug-required-error error-messages)} (slug-has-invalid-chars? slug) {:error :slug-has-invalid-chars-error :message (:slug-has-invalid-chars-error error-messages)} (util/has-consecutive-dashes-or-slashes? slug) {:error :slug-has-consecutive-dashes-or-slashes-error :message (:slug-has-consecutive-dashes-or-slashes-error error-messages)} (not (= (first slug) "/")) {:error :slug-initial-slash-required-error :message (:slug-initial-slash-required-error error-messages)} :default :pass)) (defn link-label-has-invalid-chars? "Returns true if the provided label contains any invalid characters and false if it doesn't (\\u0080 to \\uffff, alphanumeric characters, periods, question marks, exclamation marks, spaces and dashes are valid)." [label] (nil? (re-matches #"[\u0080-\uffffa-zA-Z0-9.?! -]+" label))) (defn html-with-clean-image-uris "Accepts a string with an HTML value and converts any relative paths to images to absolute paths. Returns the improved HTML string." [html] (let [get-num-sub-paths (fn [s] (when (string? s) (count (re-seq #"../" s)))) unsorted-pairs (map (fn [re-pair] (let [orig-src (nth re-pair 1)] [orig-src (str (when-not (re-matches #"^http[s]{0,1}(.*)" orig-src) "/") (string/replace orig-src "../" ""))])) (re-seq #"<img src=\"(.*?)\"" html))] ;; need to sort, otherwise shorter links to the same image mess ;; up longer ones (loop [modified-html html img-uri-pairs (sort-by get-num-sub-paths unsorted-pairs)] (if (pos? (count img-uri-pairs)) (recur (string/replace modified-html (first (last img-uri-pairs)) (last (last img-uri-pairs))) (butlast img-uri-pairs)) modified-html)))) (defn create-editor-field! "Creates a goog.editor.Field instance with provided element-id." [element-id] (goog.editor.Field. element-id)) (defn register-editor-plugins! "Accepts a goog.editor.Field instance (created by the create-editor-field! fn) and registers the following goog.editor.plugins: BasicTextFormatter, RemoveFormatting, UndoRedo, ListTabHandler, SpacesTabHandler, EnterHandler, HeaderFormatter, LinkDialogPlugin and LinkBubble." [editor] (doto editor (.registerPlugin (goog.editor.plugins.BasicTextFormatter.)) (.registerPlugin (goog.editor.plugins.RemoveFormatting.)) (.registerPlugin (goog.editor.plugins.UndoRedo.)) (.registerPlugin (goog.editor.plugins.ListTabHandler.)) (.registerPlugin (goog.editor.plugins.SpacesTabHandler.)) (.registerPlugin (goog.editor.plugins.EnterHandler.)) (.registerPlugin (goog.editor.plugins.HeaderFormatter.)) (.registerPlugin (goog.editor.plugins.LinkDialogPlugin.)) (.registerPlugin (goog.editor.plugins.LinkBubble.)))) (defn create-editor-toolbar! "Creates a toolbar using the node with given element-id as a containe and optionally accepts buttons that are to be included as optional further elements. If no optional arguments are provided the default buttons are used. The available buttons are: :bold :italic :underline :font-color :background-color :font-face :font-size, :format-block :link :image :undo :redo :unordered-list :ordered-list :indent :outdent :justify-left :justify-center, :justify-right :subscript :superscript :strike-through, :remove-format :edit-html The default buttons are: :bold :italic :underline :strike-through :font-color :background-color :font-face :font-size :link :image :undo :redo :unordered-list :ordered-list :indent :outdent :justify-left :justify-center :justify-right :subscript :superscript :remove-format" [element-id & included-buttons] (let [buttons-map {:bold buttons/BOLD :italic buttons/ITALIC :underline buttons/UNDERLINE :font-color buttons/FONT_COLOR :background-color buttons/BACKGROUND_COLOR :font-face buttons/FONT_FACE :font-size buttons/FONT_SIZE :format-block buttons/FORMAT_BLOCK :link buttons/LINK :image buttons/IMAGE :undo buttons/UNDO :redo buttons/REDO :unordered-list buttons/UNORDERED_LIST :ordered-list buttons/ORDERED_LIST :indent buttons/INDENT :outdent buttons/OUTDENT :justify-left buttons/JUSTIFY_LEFT :justify-center buttons/JUSTIFY_CENTER :justify-right buttons/JUSTIFY_RIGHT :subscript buttons/SUBSCRIPT :superscript buttons/SUPERSCRIPT :strike-through buttons/STRIKE_THROUGH :remove-format buttons/REMOVE_FORMAT :edit-html buttons/EDIT_HTML} buttons (to-array (map #(get buttons-map %) (or included-buttons [:bold :italic :underline :strike-through :font-color :background-color :font-face :font-size :link :image :undo :redo :unordered-list :ordered-list :indent :outdent :justify-left :justify-center :justify-right :subscript :superscript :remove-format])))] (DefaultToolbar/makeToolbar buttons (util/get-element element-id)))) (def editor-fields (atom {})) (defn make-editable! "Turns the node with provided element-id into an editable field, using the node with the provided toolbar-id as a toolbar and swapping the value into the editor-fields map using the provided field-key. Sets the content to the provided html-content string." [field-key element-id toolbar-id html-content] (let [editor (create-editor-field! element-id)] (swap! editor-fields assoc field-key editor) (when html-content (. editor (setHtml false html-content true false))) (register-editor-plugins! editor) (goog.ui.editor.ToolbarController. editor (create-editor-toolbar! toolbar-id :bold :italic :underline :strike-through :format-block :link :image :undo :redo :unordered-list :ordered-list :indent :outdent :justify-left :justify-center :justify-right :subscript :superscript :remove-format)) (. editor (makeEditable)))) (def *file* (atom {})) (defn create-slug "Returns a slug using the provided title and using the :default-slug-format and :default-document-type keys from the feed map provided as the second argument through the vix.util/create-slug fn." [title {:keys [default-slug-format default-document-type] :as feed}] (util/create-slug default-slug-format title feed (util/date-now!) (if (and (= default-document-type "image") (:extension (:data @*file*))) (:extension (:data @*file*)) "html"))) (defn strip-filename-extension "Returns a the provided filename string, removing the extension if it has any." [filename] (let [pieces (re-find #"^(.*?)\.[a-zA-Z0-9]{1,10}$" filename)] (if (= (count pieces) 2) (nth pieces 1) filename))) (defn image-filename-to-title "Translates the provided image filename to a document title suggestion by joining the alphanumeric segments separated by spaces." [filename] (string/join " " (filter #(not (string/blank? %)) (string/split (strip-filename-extension filename) #"[^a-zA-Z0-9]")))) (defn select-feed-by-language-and-name "Returns the feed map with the value of the :language key matching the given language string and the value of the :name key matching the given feed-name string from the provided feeds sequence." [language feed-name feeds] (first (filter (fn [feed] (and (= feed-name (:name feed)) (= language (:language feed)))) feeds))) (defn get-editor-content "Returns the clean HTML string content from the editor with the provided key (e.g. :content or :description) or nil if that editor doesn't exist in the @editor-fields map." [field-key] (when-let [editor-field (field-key @editor-fields)] (string/replace (html-with-clean-image-uris (.getCleanContents editor-field editor-field)) #"id=\"image-dialog-preview\"" ""))) (defn parse-related-document-id "Parses a related document id attribute value (e.g. related-image@@@/en/images/foo@@@Foo) which is either provided as a string or as a Domina element with the value in the id attribute. Returns a map with the document slug mapped to the :slug key and the title mapped to the :title key." [el-or-string] (let [[_ slug title] (string/split (if (string? el-or-string) el-or-string (domina/attr el-or-string :id)) #"@@@")] {:slug slug :title title})) (defn get-document-relations! "Retrieves a vector of related pages from the DOM, using the provided domina-css-selector string to select the desired elements." [domina-css-selector] (vec (map parse-related-document-id (domina/nodes (sel domina-css-selector))))) (defn get-icon-image! "Retrieves a map with the icon image details from the DOM, with a :title key for the title and a :slug key for the slug." [] (when-let [icon-container-el (domina/single-node (sel ".image-icon-container"))] (parse-related-document-id icon-container-el))) (defn get-document-data-from-dom! "Returns a map with the document data extracted from the DOM using the provided feed-map for the language and feed-name values, as well as to determine the document type." [feed-map] (let [form-fields [:title :subtitle :slug :start-time :end-time]] (merge (zipmap form-fields (map (fn [field-name] (try (ui/get-form-value (name field-name)) (catch js/Error e nil))) form-fields)) {:language (:language feed-map) :feed (:name feed-map) :draft (ui/is-checked? "draft") :content (if (= (:default-document-type feed-map) "menu") (get-menu-string-from-dom!) (get-editor-content :content)) :description (get-editor-content :description) :icon (get-icon-image!) :related-pages (get-document-relations! ".related-page") :related-images (get-document-relations! ".related-image")}))) (defn save-document! "Saves the document and either returns the new version or an error message. Takes a singular feed map, a boolean value for new? that determines if its a new or edited document, the current-document-state (a map if editing an existing document), the future-state map with the output from the get-document-data-from! function and slug-validation with the output from a validate-slug call." [feed-map new? current-document-state future-state slug-validation] (let [save-button-el (sel "#save-document")] (domina/set-attr! save-button-el :disabled "disabled") (document/append-to-document (merge future-state (if new? {:action :create} {:action :update :previous-id (:_id current-document-state)})) (fn [status document-states] (let [[{:keys [slug title]} & _] document-states] (cond (= status 201) (util/navigate-replace-state (str (:language feed-map) "/" (:name feed-map) "/edit" slug) (str "Edit \"" title "\"")) (= status 200) (display-editor! (:language feed-map) (:name feed-map) slug) :else (ui/display-error (sel "#status-message") (cond (= (str "There is an existing document " "with the provided slug.") document-states) (:document-already-exists-error error-messages) (= (str "This document map doesn't contain " "the most recent :previous-id.") document-states) (:document-update-conflict-error error-messages) :default (:could-not-save-feed-error error-messages)))) (domina/remove-attr! save-button-el :disabled)))))) (defn save-image-document! "Wrapper for save-document! that deals with image documents. See the save-document! function for the arguments. If the user is trying to add a new image without providing a file an error message is displayed." [feed-map new? current-document-state future-state slug-validation] (let [file (:obj @*file*) reader (new js/FileReader)] (cond ;; if the image has been changed, load it and call save-document!: (not (nil? file)) (do (set! (.-onload reader) (fn [evt] (save-document! feed-map new? current-document-state (merge future-state {:attachment {:type (.-type file) :data (base64/encodeString (.-result (.-target evt)))}}) slug-validation))) (. reader (readAsBinaryString file))) ;; allow editing existing documents without changing the image: (not new?) (save-document! feed-map new? current-document-state (merge future-state (select-keys current-document-state [:attachments])) slug-validation) ;; display an error message if the user is trying to save a new ;; document without providing an image: :default (ui/display-error (sel "#status-message") (:file-required-error error-messages))))) (defhandler save-document-button-click-callback! "Performs validation when the default-slug-format value changes and displays and removes errors when necessary. If the validation is successful the save-document! fn is called. Takes a singular feed map, a boolean value for new? that determines if this is a fresh or existing document, a sequence of document states if editing an existing document and a Domina event object." [feed-map new? [current-document-state & _] evt] (let [status-message-el (sel "#status-message") future-state (get-document-data-from-dom! feed-map) slug-validation (validate-slug (:slug future-state))] (cond (string/blank? (:title future-state)) (ui/display-error status-message-el (:document-title-required-error error-messages)) (not (= :pass slug-validation)) (ui/display-error status-message-el (:message slug-validation)) (= (:default-document-type feed-map) "image") (save-image-document! feed-map new? current-document-state future-state slug-validation) :default (when-not (= (select-keys current-document-state (keys future-state)) future-state) ;; don't save if there are no changes (save-document! feed-map new? current-document-state future-state slug-validation))))) (defn detect-duplicate-custom-slug-callback! "Detects if the custom slug is a duplicate and adds/removes UI error messages accordingly. Used as a callback for vix.document/get-doc." [status [doc-current-state & _]] (let [slug-el (sel "#slug") slug-label-el (sel "#slug-label") status-el (sel "#status-message")] (if (and (= status 200) (not (= (:action doc-current-state) :delete))) (ui/display-error status-el (:slug-not-unique-error error-messages) slug-el slug-label-el) (ui/remove-error status-el slug-el slug-label-el)))) (defn increment-slug "Increments the provided slug string by either incrementing the last character if it is a number or adding '-2' to the end of the string." [slug] (if-let [slug-matches (re-matches #"(.*?)-([0-9]+)$" slug)] (str (nth slug-matches 1) "-" (inc (js/parseInt (last slug-matches)))) (str slug "-2"))) (defhandler slug-validation-callback! "Validates custom slugs. Displays an error if required, otherwise the doc with the desired slug is requested and validated by the detect-duplicate-custom-slug-callback!." [evt] (when (ui/is-checked? "custom-slug") (let [slug-el (events/target evt) slug (ui/get-form-value slug-el) slug-label-el (sel "#slug-label") status-el (sel "#status-message") slug-validation (validate-slug slug)] (if (= :pass slug-validation) (do (ui/remove-error status-el slug-el slug-label-el) (document/get-doc slug detect-duplicate-custom-slug-callback!)) (ui/display-error status-el (:message slug-validation) slug-el slug-label-el))))) (def slugs (atom #{})) (defn handle-duplicate-slug-callback! "Automatically appends '-2' to the end of a duplicate slug, or increments n if the slug already ends with '-n' where n is an integer. Keeps calling itself until no more duplicates are found and finally sets the unique value if that value doesn't match the original value." [status [doc-current-state & _]] (if (and (= status 200) (not (= (:action doc-current-state) :delete))) ;; add incremented slug to @slugs and rerun the get-doc call with ;; the new slug and this function as the callback: (let [new-slug (increment-slug (document/add-initial-slash (:slug doc-current-state)))] (swap! slugs conj new-slug) (document/get-doc new-slug handle-duplicate-slug-callback!)) ;; set the final slug value if needed: (let [final-slug (last (sort-by count @slugs)) slug-el (sel "#slug")] (when-not (= (ui/get-form-value slug-el) final-slug) (ui/set-form-value slug-el final-slug))))) (defn synchronize-slug "Automatically generates a slug value based on the value of the provided title element." [title-el feed] (let [new-slug (create-slug (ui/get-form-value title-el) feed)] (when (nil? (ui/get-form-value "custom-slug")) (reset! slugs #{new-slug}) (document/get-doc new-slug handle-duplicate-slug-callback!)))) (defhandler custom-slug-toggle-callback! "Toggles the slug input between editable (i.e. custom) and disabled." [feed evt] (if (nil? (ui/get-form-value (events/target evt))) (do (ui/disable-element "slug") (ui/disable-element "slug-label") (synchronize-slug (sel "#title") feed)) (do (ui/enable-element "slug") (ui/enable-element "slug-label")))) (defhandler title-edited-callback! "Automatically generates a slug for current title if custom slug isn't checked." [feed evt] (synchronize-slug (events/target evt) feed)) (defhandler handle-datefield-click-callback! "Displays a datepicker on click." [evt] (ui/display-datepicker (fn [{:keys [date-string date-object hour minute]}] (ui/set-form-value (events/target evt) (if date-object (str date-string " " hour ":" minute) ""))) true)) (defn display-image-preview "Displays a preview of the image contained in given file object with the provided title." [file title] (let [reader (new js/FileReader)] (set! (.-onload reader) (fn [evt] (domina/set-attrs! (sel "#image-preview") {:title title :src (.-result (.-target evt))}))) (. reader (readAsDataURL file)))) (def mimetype-to-extension-map {:image/png "png" :image/gif "gif" :image/jpeg "jpg"}) (def *file* (atom {})) (defn handle-image-drop-callback! "Is called when a file is dropped on the image drop target. If the file is recognized as an image it is displayed in the interface and swapped into the *file* atom (under the :obj key). Otherwise an invalid filetype message is shown." [current-feed-state new? evt] ;; domina/prevent-default & domina/stop-propagation don't work for ;; these events, so not using defhandler, but a regular function. (do (. evt (preventDefault)) (. evt (stopPropagation))) (if-let [file (aget (.-files (.-dataTransfer evt)) 0)] (let [status-el (sel "#status-message") image-information-el (sel "#image-information-container") title (image-filename-to-title (.-name file))] (if-let [extension (mimetype-to-extension-map (keyword (.-type file)))] (do (swap! *file* assoc :obj file :data {:extension extension}) (ui/remove-error status-el) (ui/set-form-value (dom/getElement "title") title) (display-image-preview file title) (domina/remove-class! image-information-el "hide") (domina/set-text! (sel "#image-filename") (.-name file)) (domina/set-text! (sel "#image-filetype") (.-type file)) (domina/set-text! (sel "#image-size") (format "%.2f"(/ (.-size file) 1000))) (when new? (synchronize-slug (sel "#title") current-feed-state))) (do (swap! *file* dissoc :obj :data) (domina/add-class! image-information-el "hide") (ui/display-error status-el (:invalid-filetype-error error-messages))))))) (defn remove-self-from-documents "Removes the map with the given self-slug string as a :slug value from the provided sequence of document maps." [self-slug documents] (remove #(= (:slug %) self-slug) documents)) (defn remove-existing-from-documents "Removes any documents with a :slug value that is part of the existing-documents vector." [existing-documents documents] (remove (fn [{:keys [slug]}] (not (nil? (some #{slug} (map :slug existing-documents))))) documents)) (defn format-option "Replaces keyword values in the given option-format sequence with the string value for that keyword in the provided option-map and leaves string values in the option-format sequence as-is. Returns a string version of the new sequence." [option-map option-format] (apply str (map (fn [v] (if (string? v) v (option-map v))) option-format))) (defn update-options-in-dialog! "Adds select options to the DOM. Takes the select-el, which is the Domina element node the options are being added to, the snippet-key, which is the key for the HTML snippets in the ui/snippets map, the option-xpath-selector, which is the xpath selector required to select the newly added option, option-maps, which is a sequence of maps containing values for the option, title-format and value-format, which are sequences describing the format for the particular values (see the format-option function for the structure of the format sequences)." [select-el snippet-key option-xpath-selector option-maps title-format value-format] (domina/destroy-children! select-el) (doseq [option option-maps] (domina/append! select-el (ui/snippets snippet-key)) (doto (xpath option-xpath-selector) (domina/set-text! (format-option option title-format)) (domina/set-attr! :value (format-option option value-format))))) (defn update-feed-options-in-dialog! "Appends select options for given feeds to the provided feeds-el." [feeds-el feeds] (update-options-in-dialog! feeds-el :editor/.feed-select-option "(//option[@class='feed-select-option'])[last()]" feeds [:title] ["['" :language "','" :name "']"])) (defn update-document-options-in-dialog! "Appends provided select options for given documents to documents-el, while removing the document matching self-slug. Any documents in the removable-documents vector are removed from the documents list." [self-slug documents-el documents removable-documents] (update-options-in-dialog! documents-el :editor/.document-select-option "(//option[@class='document-select-option'])[last()]" (remove-existing-from-documents removable-documents (remove-self-from-documents self-slug documents)) [:title] ["option@@@" :slug "@@@" :title])) (defn update-menu-links-options-in-dialog! "Adds the provided sequence of document maps to the dialog as select options." [documents] (update-options-in-dialog! (sel "select#internal-link") :editor/.document-select-option "(//option[@class='document-select-option'])[last()]" documents [:title] [:slug])) (defhandler remove-parent-callback! "Event callback that removes the parent of the event target DOM node." [evt] (domina/destroy! (xpath (events/target evt) ".."))) (defn add-related-page! "Adds a li element for the given page to the DOM. This function is used as a ui/display-dialog callback!, which provides the dialog-status (either :ok or :cancel) and a map with the form fields and their values as callback arguments." [dialog-status {:keys [internal-link-feed internal-link]}] (when (and (= dialog-status :ok) (not (nil? internal-link))) (domina/append! (sel "#related-pages-container") (:editor/.related-page ui/snippets)) (let [{:keys [slug title]} (parse-related-document-id internal-link)] (doto (xpath "(//li[@class='related-page'])[last()]") (domina/set-attr! :id (str "related-page@@@" slug "@@@" title))) (doto (xpath "(//span[@class='related-page-title'])[last()]") (domina/set-text! title)) (events/listen! (xpath "(//a[@class='related-page-delete-link'])[last()]") :click remove-parent-callback!)))) (defhandler image-preview-link-click-callback! "Handler for clicks on an image preview link that displays the image in a dialog." [evt] (let [{:keys [title slug]} (parse-related-document-id (xpath (events/target evt) ".."))] (ui/display-dialog! "Image Preview" (:editor/image-preview-in-dialog-container ui/snippets) nil {:auto-close? true :modal? true} (fn []) :cancel) (domina/set-attrs! (sel "#image-dialog-preview") {:src (str (get-cdn-hostname) slug) :alt title :title title}))) (defn add-related-image! "Adds a li element for the given image to the DOM. This function is used as a ui/display-dialog callback, which provides the dialog-status (either :ok or :cancel) and a map with the form fields and their values as callback arguments." [dialog-status {:keys [internal-link-feed internal-link]}] (when (and (= dialog-status :ok) (not (nil? internal-link))) (domina/append! (sel "#related-images-container") (:editor/.related-image ui/snippets)) (let [{:keys [slug title]} (parse-related-document-id internal-link)] (doto (xpath "(//li[@class='related-image'])[last()]") (domina/set-attr! :id (str "related-page@@@" slug "@@@" title))) (doto (xpath "(//span[@class='related-image-title'])[last()]") (domina/set-text! title)) (events/listen! (xpath "(//a[@class='related-image-delete-link'])[last()]") :click remove-parent-callback!) (events/listen! (xpath "(//a[@class='related-image-preview-link'])[last()]") :click image-preview-link-click-callback!)))) (defn create-icon-image-events! "Creates delete and preview events for image icon." [] (events/listen! (sel "#icon-image-delete-link") :click remove-parent-callback!) (events/listen! (sel "#icon-image-preview-link") :click image-preview-link-click-callback!)) (defn add-icon-image! "Adds a li element for the given image to the DOM. This function is used as a ui/display-dialog callback, which provides the dialog-status (either :ok or :cancel) and a map with the form fields and their values as callback arguments." [dialog-status {:keys [internal-link-feed internal-link]}] (when (and (= dialog-status :ok) (not (nil? internal-link))) (let [{:keys [slug title]} (parse-related-document-id internal-link)] (domina/destroy! (sel ".image-icon-container")) (domina/append! (sel "#icon-container") (:editor/.image-icon-container ui/snippets)) (domina/set-attr! (sel ".image-icon-container") :id (str "icon@@@" slug "@@@" title)) (domina/set-text! (sel "#icon-image-title") title) (create-icon-image-events!)))) (defn update-dialog-image-preview! "Sets the image preview in the image selection dialog to the slug and title of the provided document map." [{:keys [slug title]}] (when (string? slug) (domina/set-attrs! (sel "#image-dialog-preview") {:src (str (get-cdn-hostname) slug) :alt title :title title}))) (defn create-dialog-feed-change-event! "Creates an event that tracks changes on feed-el and executes the update-document-options-fn if a change happens, which will update the document select field. The update-document-options-fn needs to accept the retrieved document sequence as the first and only value. If the dialog contains an image-dialog-preview image the update-dialog-image-preview! function is executed as well, using the optional relations argument to purge already related images from the options." [feed-el update-document-options-fn & [relations]] (events/listen! feed-el :change (fn [evt] (let [[language feed-name] (util/pair-from-string (ui/get-form-value (events/target evt)))] (document/get-documents-for-feed language feed-name (fn [status {:keys [documents]}] (update-document-options-fn documents) (when (domina/single-node (sel "img#image-dialog-preview")) (update-dialog-image-preview! (first (remove-existing-from-documents (or relations []) documents)))))))))) (defn fill-document-select-dialog! "Fills the internal-link and internal-link-feeds select fields in a previously created dialog and adds an event listener that loads the new documents if another feed is selected. Takes node-selector (nil or a Domina CSS selector; to pass to the get-document-relations! function), self-slug with the slug for the active document, feeds with a sequence of feed maps and documents with a sequence of document maps for the first feed." [node-selector self-slug feeds documents] (let [internal-link-el (sel "#internal-link") internal-link-feed-el (sel "#internal-link-feed") relations (if node-selector (get-document-relations! node-selector) [])] (update-feed-options-in-dialog! internal-link-feed-el feeds) (update-document-options-in-dialog! self-slug internal-link-el documents (if node-selector (get-document-relations! node-selector) [])) (create-dialog-feed-change-event! internal-link-feed-el #(update-document-options-in-dialog! self-slug internal-link-el % relations) relations))) (defhandler add-related-page-link-callback! "Handler for clicks on the add-related-page link that displays a dialog that can be used to select a related page for the document. Takes self-slug, the slug of the current document, to filter it out of the results, as well as a sequence of singular feed maps called feeds and a Domina event called evt." [self-slug all-feeds evt] (let [[{:keys [language name]} :as feeds] (filter #(not (= "image" (:default-document-type %))) all-feeds)] (document/get-documents-for-feed language name (fn [status {:keys [documents]}] (ui/display-dialog! "Add Related Page" (:editor/add-related-page-dialog-form ui/snippets) (sel "#add-related-page-dialog-form") {:auto-close? true :modal? true} add-related-page!) (fill-document-select-dialog! ".related-page" self-slug feeds documents))))) (defn create-image-preview-event! "Creates the event that changes the preview image when a new image is selected in an image dialog." [] (events/listen! (sel "select#internal-link") :change (fn [evt] (update-dialog-image-preview! (parse-related-document-id (ui/get-form-value (events/target evt))))))) (defhandler add-related-image-link-callback! "Handler for clicks on the add-related-image link that displays a dialog that can be used to select a related image for the document. Takes self-slug, the slug of the current document, to filter it out of the results, as well as a sequence of singular feed maps called feeds and a Domina event called evt." [self-slug all-feeds evt] (let [[{:keys [language name]} :as feeds] (filter #(= "image" (:default-document-type %)) all-feeds)] (document/get-documents-for-feed language name (fn [status {:keys [documents]}] (ui/display-dialog! "Add Related Image" (:editor/add-image-dialog-form ui/snippets) (sel "#add-image-dialog-form") {:auto-close? true :modal? true} add-related-image!) (update-dialog-image-preview! (first (remove-existing-from-documents (get-document-relations! ".related-image") documents))) (create-image-preview-event!) (fill-document-select-dialog! ".related-image" self-slug feeds documents))))) (defhandler add-icon-image-link-callback! "Handler for clicks on the add-icon-image-link that displays a dialog that can be used to select an icon image for the document. Takes self-slug, the slug of the current document, to filter it out of the results, as well as a sequence of singular feed maps called feeds and a Domina event called evt." [self-slug all-feeds evt] (let [[{:keys [language name]} :as feeds] (filter #(= "image" (:default-document-type %)) all-feeds)] (document/get-documents-for-feed language name (fn [status {:keys [documents]}] (ui/display-dialog! "Add Icon Image" (:editor/add-image-dialog-form ui/snippets) (sel "#add-image-dialog-form") {:auto-close? true :modal? true} add-icon-image!) (update-dialog-image-preview! (first documents)) (create-image-preview-event!) (fill-document-select-dialog! nil self-slug feeds documents))))) (defhandler editor-add-image-button-callback! "Handler for clicks on the add image button in editor toolbars that displays a non-modal image selection dialog. Images can be dragged into the editor field from the dialog. The all-feeds argument is a sequence of singular feed maps and the evt argument contains the Domina event object." [all-feeds evt] (when-not (domina/single-node (sel "div.modal-dialog")) (let [[{:keys [language name]} :as feeds] (filter #(= "image" (:default-document-type %)) all-feeds)] (document/get-documents-for-feed language name (fn [status {:keys [documents]}] (ui/display-dialog! "Add Image" (:editor/add-image-dialog-form ui/snippets) (sel "#add-image-dialog-form") {:auto-close? true :modal? false} add-icon-image! :ok) (domina/remove-class! (sel "h5#drag-to-editor-instruction") "hide") (update-dialog-image-preview! (first documents)) (create-image-preview-event!) (fill-document-select-dialog! nil "" feeds documents)))))) (defn create-common-editor-events! "Creates events that are shared by all editor modes. Takes a boolean value new? which determines if it concerns a fresh document and feed-map, a singular map containing feed information for the active document." [new? feed-map] (when new? (events/listen! (sel "#title") :keyup (partial title-edited-callback! feed-map))) (events/listen! (sel "#custom-slug") :change (partial custom-slug-toggle-callback! feed-map)) (events/listen! (sel "#slug") :keyup slug-validation-callback!)) (defn display-editor-callback! "Displays the editor UI and creates the relevant events. The feeds argument contains a sequence containing all available feeds. The feed-map is a map containing feed information for this specific document. The status is either an HTTP status integer if editing an existing document or nil if editing a new document. The last argument contains a map that describes the document as stored in the database." [feeds {:keys [default-document-type] :as feed-map} status [{:keys [title content slug _id description draft related-pages language feed subtitle icon related-images created start-time-rfc3339 start-time published previous-id end-time-rfc3339 end-time datestamp attachments]} :as document-states]] (let [new? (nil? status)] (ui/show! {:snippet :editor/back-to-overview-link :transformations [{:selector "//p[@id='back-to-overview-link']/a[1]" :attrs {:href (str "/admin/" language "/" feed "/overview")}}]} {:snippet :ui/status-message} (when (= default-document-type "image") [{:snippet :editor/image-drop-target} {:snippet :editor/image-information-container :transformations [(when slug {:selector "//img[@id='image-preview']" :attrs {:src (str "data:" (get-in attachments [:original :type]) ";base64," (get-in attachments [:original :data])) :alt title :name title}})]}]) {:snippet :ui/caption :transformations [{:selector "//h3[@id='caption']" :text (if new? "Create Document" "Edit Document")}]} {:snippet :editor/title-row :transformations [{:selector "//input[@id='title']" :value title} {:selector "//input[@id='draft']" :checked draft}]} {:snippet :editor/subtitle-row :transformations [{:selector "//input[@id='subtitle']" :value subtitle}]} {:snippet :editor/slug-row :transformations (concat [{:selector "//input[@id='slug']" :value slug}] (when new? [{:selector "//input[@id='custom-slug']" :remove-attr "disabled"} {:selector "//label[@id='custom-slug-label']" :remove-class "disabled"}]))} (when (= default-document-type "event") [{:snippet :editor/start-time-row :transformations [{:selector "//input[@id='start-time']" :value start-time}]} {:snippet :editor/end-time-row :transformations [{:selector "//input[@id='end-time']" :value end-time}]}]) [(merge {:snippet :editor/icon-container} (when-not (nil? icon) {:children [{:snippet :editor/.image-icon-container :transformations [{:selector "//div[@class='image-icon-container']" :attrs {:id (str "icon@@@" (:slug icon) "@@@" (:title icon))}} {:selector "//span[@id='icon-image-title']" :text (:title icon)}]}]})) {:snippet :editor/document-relations :children (doall (map (fn [{:keys [title slug]}] {:snippet :editor/.related-page :parent "//ul[@id='related-pages-container']" :transformations [{:selector "(//li[@class='related-page'])[last()]" :attrs {:id (str "related-page@@@" slug "@@@" title)}} {:selector "(//span[@class='related-page-title'])[last()]" :text title}]}) related-pages))} {:snippet :editor/image-relations :children (doall (map (fn [{:keys [title slug]}] {:snippet :editor/.related-image :parent "//ul[@id='related-images-container']" :transformations [{:selector "(//li[@class='related-image'])[last()]" :attrs {:id (str "related-image@@@" slug "@@@" title)}} {:selector "(//span[@class='related-image-title'])[last()]" :text title}]}) related-images))} {:snippet :editor/editor-images} (when (= default-document-type "with-description") {:snippet :editor/description-container}) {:snippet :editor/content-container}] {:snippet :editor/save-button-container}) (make-editable! :content "content" "toolbar" content) (when (= default-document-type "with-description") (make-editable! :description "description-content" "description-toolbar" description)) (when-let [drop-target-el (dom/getElement "image-drop-target")] (. drop-target-el (addEventListener "drop" (partial handle-image-drop-callback! feed-map new?) false)) (. drop-target-el (addEventListener "dragover" (fn [e] (. e (preventDefault)) (. e (stopPropagation))) false))) (create-common-editor-events! new? feed-map) (create-icon-image-events!) (events/listen! (sel "#start-time") :click handle-datefield-click-callback!) (events/listen! (sel "#end-time") :click handle-datefield-click-callback!) (events/listen! (sel "#add-icon-image-link") :click (partial add-icon-image-link-callback! slug feeds)) (events/listen! (sel ".related-page-delete-link") :click remove-parent-callback!) (events/listen! (sel ".related-image-delete-link") :click remove-parent-callback!) (events/listen! (sel ".related-image-preview-link") :click image-preview-link-click-callback!) (events/listen! (sel "#add-related-page-link") :click (partial add-related-page-link-callback! slug feeds)) (events/listen! (sel "#add-related-image-link") :click (partial add-related-image-link-callback! slug feeds)) (events/listen! (xpath "//div[@class='tr-icon tr-image']/../..") :click (partial editor-add-image-button-callback! feeds)) (events/listen! (sel "#save-document") :click (partial save-document-button-click-callback! feed-map new? document-states)))) (defhandler toggle-add-menu-item-dialog-link-type-callback! "Toggles between showing the internal-link-row and the external-link-row, depending on the link-type field value." [evt] (if (= (ui/get-form-value (events/target evt)) "internal") (do (domina/remove-class! (sel "tr#internal-link-row") "hide") (domina/add-class! (sel "tr#external-link-row") "hide")) (do (domina/remove-class! (sel "tr#external-link-row") "hide") (domina/add-class! (sel "tr#internal-link-row") "hide")))) (defn append-add-sub-item-link! "Adds an add-sub-item link the the provided item-details-el, accepting a sequence of singular feed maps called as the second argument to pass to the display-add-menu-item-dialog-callback! function." [item-details-el all-feeds] (domina/append! item-details-el (:editor/.add-sub-item ui/snippets)) (events/listen! (xpath item-details-el "(.//a[@class='add-sub-item'])[last()]") :click (partial display-add-menu-item-dialog-callback! (domina/single-node (xpath item-details-el "..")) all-feeds))) (defhandler delete-menu-item-callback! "Deletes the menu item containing the active delete item link, as well as the ul containing it if it is the only item node left. If the latter is true, the add-sub-item link is also re-added to the parent. Accepts a sequence containing singular feed maps as the first argument and the triggered Domina event as the second argument." [all-feeds evt] (let [three-levels-up (xpath (events/target evt) "../../..") four-levels-up (xpath three-levels-up "..")] (if (and (= (domina/attr three-levels-up "class") "nested-menu-category") (= (count (domina/children three-levels-up)) 2)) (do ;; re-add add-sub-item to parent if deleting the whole category (append-add-sub-item-link! (domina/single-node (xpath four-levels-up "(.//span[@class='item-details'])")) all-feeds) ;; and remove the sub-menu ul (domina/destroy! three-levels-up) ) ;; just remove the li containing the current item if it has siblings (domina/destroy! (xpath (events/target evt) "../.."))))) (defn get-nested-menu-category-el! "Returns the .nested-menu-category element for parent-el, creating it if necessary." [parent-el] (or (domina/single-node (xpath parent-el "(.//ul[@class='nested-menu-category'])[last()]")) (do (domina/append! parent-el (:editor/.nested-menu-category ui/snippets)) (get-nested-menu-category-el! parent-el)))) (defn add-item-details! "Adds the item-details span to parent-el, using the link-label, link-type, internal-links and external-link arguments to determine the contents. Accepts a sequence of singular feed maps as the second argument, to pass to the detele-menu-item-callback! function." [parent-el all-feeds link-label link-type internal-link external-link] (domina/append! parent-el (:editor/.item-details ui/snippets)) (let [item-details-el (xpath parent-el ".//span[@class='item-details']")] (domina/set-text! (xpath item-details-el ".//span[@class='link-label']") link-label) (domina/set-text! (xpath item-details-el ".//span[@class='link-uri']") (if (= link-type "internal") internal-link external-link)) (events/listen! (xpath item-details-el ".//a[@class='menu-item-delete-link']") :click (partial delete-menu-item-callback! all-feeds)))) (defn add-nested-menu-item-to-dom! "Adds a nested menu item to the menu builder, removes the add-sub-item link from the parent element and if this is the first nested item also adds an add-item node to the newly created ul. Accepts a keyword determining the source (either :string or :dialog; determines whether the item is appended or prepended) as the first argument. Accepts all-feeds as the second argument, containing a sequence of singular feeds maps, parent-el, pointing to the element to add the item to, and link-label, link-type, internal-link and external-link values to pass to the add-item-details! function." [source all-feeds parent-el link-label link-type internal-link external-link] (domina/destroy! (domina/single-node (xpath parent-el "(.//a[@class='add-sub-item'])"))) (let [nested-menu-category-el (get-nested-menu-category-el! parent-el)] ;; if this is the first item, create an add-item node (when (zero? (count (domina/children nested-menu-category-el))) (domina/append! nested-menu-category-el (:editor/.add-item-node ui/snippets)) (events/listen! (xpath nested-menu-category-el "(.//li)[last()]/a") :click (partial display-add-menu-item-dialog-callback! parent-el all-feeds))) (if (= source :dialog) (domina/prepend! nested-menu-category-el (:editor/.nested-menu-item ui/snippets)) (domina/insert-before! (xpath nested-menu-category-el "(.//li)[last()]") (:editor/.nested-menu-item ui/snippets))) (add-item-details! (if (= source :dialog) ;; use first li as parent with a :dialog source (xpath nested-menu-category-el "(.//li[@class='nested-menu-item draggable'])[1]") ;; use last .nested-menu-item-li as parent if :string (xpath nested-menu-category-el (str "(.//li[@class='nested-menu-item draggable'])" "[last()]"))) all-feeds link-label link-type internal-link external-link) (ui/remove-dialog!))) (defn add-menu-item-to-dom! "Adds a top level menu item to the menu builder. Accepts all-feeds as the first argument, containing a sequence of singular feeds maps, parent-el, pointing to the element to add the item to, and link-label, link-type, internal-link and external-link values to pass to the add-item-details! function." [all-feeds link-label link-type internal-link external-link] (domina/append! (sel "#menu-container") (:editor/.top-level-menu-item ui/snippets)) (let [menu-item-el (xpath "(//li[@class='top-level-menu-item draggable'])[last()]")] (add-item-details! menu-item-el all-feeds link-label link-type internal-link external-link) (append-add-sub-item-link! (xpath menu-item-el "(.//span[@class='item-details'])[last()]") all-feeds)) (ui/remove-dialog!)) (defn handle-add-menu-item-dialog-completion "Validates the add-menu-item dialog. If the dialog doesn't validate an error message is displayed, if it does it is added the the menu builder and the dialog is closed. The next-child-parent argument is either nil (in case of interaction with the top level) or points to the parent element that the new child is supposed to be added to. The all-feeds argument is a sequence of singular feed maps, the dialog status is passed by the ui/display-dialog! function (either :ok or :cancel) and the final argument is a map with form values passed by the ui/display-dialog! function." [next-child-parent-el all-feeds dialog-status {:keys [external-link internal-link link-label link-type]}] (when (= :ok dialog-status) (cond (string/blank? link-label) (ui/display-error (sel "p#add-link-status") (:link-label-required-error error-messages)) (link-label-has-invalid-chars? link-label) (ui/display-error (sel "p#add-link-status") (:link-label-has-invalid-chars-error error-messages)) next-child-parent-el (add-nested-menu-item-to-dom! :dialog all-feeds next-child-parent-el link-label link-type internal-link external-link) :default (add-menu-item-to-dom! all-feeds link-label link-type internal-link external-link)))) (defn parse-dummy-menu-ul! "Reads the menu data from the dummy DOM object created by parse-menu-content-string! and its children. Accepts the dummy DOM element as the first and only argument (also used for the child UL if calling itself recursively)." [element] (map (fn [el] (let [children (domina/children el) last-child (last children)] {:label (domina/text (first children)) :uri (domina/attr (first children) "href") :children (when (= (.-tagName last-child) "UL") (parse-dummy-menu-ul! last-child))})) (domina/children element))) (defn parse-menu-content-string! "Takes string s and adds it to a dummy DOM elemenet to be parsed by the parse-dummy-menu-ul! function." [s] (if (string? s) (let [dummy-list-el (dom/createElement "ul")] (set! (.-innerHTML dummy-list-el) (subs s 4 (- (count s) 5))) (parse-dummy-menu-ul! dummy-list-el)))) (defn display-existing-menu-from-string! "Takes a menu string containing the HTML version of the menu and creates the menu builder UI for it, using a sequence of singular feed maps as the first argument to the function call." [all-feeds menu-string] (domina/destroy-children! (sel "ul#menu-container")) (doseq [{:keys [label uri children]} (parse-menu-content-string! menu-string)] (add-menu-item-to-dom! all-feeds label "internal" uri nil) (doseq [{:keys [label uri]} children] (add-nested-menu-item-to-dom! :string all-feeds (domina/single-node ;; force computation of Domina xpath lazy seq (xpath "(//li[@class='top-level-menu-item draggable'])[last()]")) label "internal" uri nil)))) (defn get-item-details-from-dom! "Extracts the link label and URI from the text value of the .link-label and .link-uri span elements in the provided parent-el DOM element. Returns a map with the link label mapped to :label and the URI mapped to :uri." [parent-el] {:label (domina/text (xpath parent-el ".//span[@class='link-label']")) :uri (domina/text (xpath parent-el ".//span[@class='link-uri']"))}) (defn get-menu-data-from-dom! "Converts the DOM structure of the menu builder to a sequence of maps with :label and :uri keys for the corresponding values, as well as a :children key for the children elements of top level nodes." [] (map (fn [top-level-menu-item-el] (assoc (get-item-details-from-dom! top-level-menu-item-el) :children (map get-item-details-from-dom! (reverse (domina/nodes (xpath top-level-menu-item-el ".//li[@class='nested-menu-item draggable']")))))) (reverse (domina/nodes (xpath "//li[@class='top-level-menu-item draggable']"))))) (defn get-menu-string-from-dom! "Returns the HTML string represenation of the menu builder." [] ;; FIX rewrite using Hiccup or Enfocus (let [dummy-list-el (dom/createElement "ul")] (doseq [{:keys [label uri children]} (get-menu-data-from-dom!)] (let [dummy-li-el (dom/createElement "li") dummy-link-el (dom/createElement "a")] (domina/set-attr! dummy-link-el "href" uri) (domina/set-text! dummy-link-el label) (domina/append! dummy-li-el dummy-link-el) (when (pos? (count children)) (let [dummy-sub-menu-el (dom/createElement "ul")] (domina/add-class! dummy-sub-menu-el "sub-menu") (doseq [{:keys [label uri]} children] (let [dummy-li-el (dom/createElement "li") dummy-link-el (dom/createElement "a")] (domina/set-attr! dummy-link-el "href" uri) (domina/set-text! dummy-link-el label) (domina/append! dummy-li-el dummy-link-el) (domina/append! dummy-sub-menu-el dummy-li-el))) (domina/append! dummy-li-el dummy-sub-menu-el))) (domina/append! dummy-list-el dummy-li-el))) (str "<ul id=\"menu\">"(.-innerHTML dummy-list-el) "</ul>"))) (defhandler display-add-menu-item-dialog-callback! "Callback for the add menu item links. If the first element is not nil it is expected to be the parent element of the child element that is being added. The second argument is a sequence of singular feed maps and the third argument is a Domina event." [next-child-parent-el all-feeds evt] (let [[{:keys [language name]} :as feeds] (filter #(not (= "image" (:default-document-type %))) all-feeds) feed-el (sel "select#internal-link-feed")] (document/get-documents-for-feed language name (fn [status {:keys [documents]}] (ui/display-dialog! "Add Menu Item" (:editor/add-menu-item-dialog-form ui/snippets) (sel "form#add-menu-item-dialog-form") {:auto-close? false :modal? true} (partial handle-add-menu-item-dialog-completion next-child-parent-el all-feeds)) (update-feed-options-in-dialog! feed-el feeds) (update-menu-links-options-in-dialog! documents) (create-dialog-feed-change-event! feed-el update-menu-links-options-in-dialog!) (events/listen! (sel "input#link-type-internal") :change toggle-add-menu-item-dialog-link-type-callback!) (events/listen! (sel "input#link-type-external") :change toggle-add-menu-item-dialog-link-type-callback!))))) (defn display-menu-editor-callback! "Displays the UI for the menu editor and creates the relevant events. The feeds argument contains a sequence containing all available feeds. The feed-map is a map containing feed information for this specific document. The status is either an HTTP status integer if editing an existing document or nil if editing a new document. The last argument contains a map that describes the document as stored in the database." [feeds feed-map status [{:keys [title content slug _id draft language feed created published previous-id datestamp]} :as document-states]] (let [new? (nil? status)] (ui/show! {:snippet :editor/back-to-overview-link :transformations [{:selector "//p[@id='back-to-overview-link']/a[1]" :attrs {:href (str "/admin/" language "/" feed "/overview")}}]} {:snippet :ui/status-message} {:snippet :ui/caption :transformations [{:selector "//h3[@id='caption']" :text (if new? "Create Menu" "Edit Menu")}]} {:snippet :editor/title-row :transformations [{:selector "//input[@id='title']" :value title} {:selector "//input[@id='draft']" :checked draft}]} {:snippet :editor/slug-row :transformations (concat [{:selector "//input[@id='slug']" :value slug}] (when new? [{:selector "//input[@id='custom-slug']" :remove-attr "disabled"} {:selector "//label[@id='custom-slug-label']" :remove-class "disabled"}]))} {:snippet :editor/menu-builder} {:snippet :editor/add-menu-item-container} {:snippet :editor/save-button-container}) (create-common-editor-events! new? feed-map) (display-existing-menu-from-string! feeds content) ;; TODO reimplement sorting ;; TODO add edit link feature ;; TODO add blank link (i.e. '#') option (events/listen! (sel "a#add-menu-item") :click (partial display-add-menu-item-dialog-callback! nil feeds)) (events/listen! (sel "#save-document") :click (partial save-document-button-click-callback! feed-map new? document-states)))) (defn display-editor! "Displays the editor for given language and feed-name strings, as well as document slug if an existing document is being edited." [language feed-name & [slug]] (swap! *file* dissoc :obj :data) (document/get-feeds-list (fn [status feeds] (let [feed-map (select-feed-by-language-and-name language feed-name feeds) editor-callback-fn (cond (= (:default-document-type feed-map) "menu") display-menu-editor-callback! :default display-editor-callback!)] (if slug (document/get-doc slug (partial editor-callback-fn feeds feed-map)) (editor-callback-fn feeds feed-map nil [{:language language :feed feed-name}]))))))
109143
;; cljs/src/views/editor.cljs: UI implementation for document editor. ;; Copyright 2011-2013, Vixu.com, <NAME> <<EMAIL>>. ;; ;; Licensed under the Apache License, Version 2.0 (the "License"); ;; you may not use this file except in compliance with the License. ;; You may obtain a copy of the License at ;; ;; http://www.apache.org/licenses/LICENSE-2.0 ;; ;; Unless required by applicable law or agreed to in writing, software ;; distributed under the License is distributed on an "AS IS" BASIS, ;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ;; See the License for the specific language governing permissions and ;; limitations under the License. (ns vix.views.editor (:use-macros [vix.crossover.macros :only [defhandler get-cdn-hostname]]) (:require [vix.document :as document] [vix.ui :as ui] [vix.util :as util] [clojure.string :as string] [domina :as domina] [domina.events :as events] [goog.dom :as dom] [goog.editor.Field :as Field] [goog.editor.plugins.BasicTextFormatter :as BasicTextFormatter] [goog.editor.plugins.RemoveFormatting :as RemoveFormatting] [goog.editor.plugins.UndoRedo :as UndoRedo] [goog.editor.plugins.ListTabHandler :as ListTabHandler] [goog.editor.plugins.SpacesTabHandler :as SpacesTabHandler] [goog.editor.plugins.EnterHandler :as EnterHandler] [goog.editor.plugins.HeaderFormatter :as HeaderFormatter] [goog.editor.plugins.LinkDialogPlugin :as LinkDialogPlugin] [goog.editor.plugins.LinkBubble :as LinkBubble] [goog.editor.Command :as buttons] [goog.ui.editor.DefaultToolbar :as DefaultToolbar] [goog.ui.editor.ToolbarController :as ToolbarController] [goog.crypt.base64 :as base64]) (:use [domina.css :only [sel]] [domina.xpath :only [xpath]])) (def error-messages {:document-title-required-error "The document title value is required." :slug-has-invalid-chars-error "Slugs can only contain '/', '-', '.' and alphanumeric characters." :slug-has-consecutive-dashes-or-slashes-error "Slugs shouldn't contain any consecutive '-' or '/' characters." :slug-required-error "A valid slug is required for every document." :slug-initial-slash-required-error "The slug needs to start with a '/'." :slug-not-unique-error "This slug is not unique (document already exists)." :could-not-save-document-error "Something went wrong while saving the document." :document-update-conflict-error (str "The document has been changed after you loaded it. " "Please copy your changes, refresh the page and edit the " "most recent version of the document.") :document-already-exists-error (str "There already is an existing document with the provided slug. " "Please use a different slug.") :invalid-filetype-error "The filetype for the file you are trying to upload isn't supported." :file-required-error "This editing mode requires a file to be added." :link-label-required-error "The link label is required." :link-label-has-invalid-chars-error (str "Labels can only contain '/', '-', '.', '?', '!' " "and alphanumeric characters.")}) (defn slug-has-invalid-chars? "Checks whether the slug contains any invalid characters (i.e. not a forward slash, dash, period or alphanumeric character)." [slug] (nil? (re-matches #"[/\-a-zA-Z0-9\.]+" slug))) (defn validate-slug "Validates the provided slug string and returns either a map with a :message corresponding to a human-readable error message and an :error key corresponding to the the internal error keyword. If there is no error the function returns they keyword :pass. Slugs must: - not be blank (:slug-required-error), - not have invalid characters (:slug-has-invalid-chars-error; see the slug-has-invalid-chars? fn), - not have any consecutive dashes or slashes (:slug-has-consecutive-dashes-or-slashes-error), and, - start with a slash (:slug-initial-slash-required-error)." [slug] (cond (string/blank? slug) {:error :slug-required-error :message (:slug-required-error error-messages)} (slug-has-invalid-chars? slug) {:error :slug-has-invalid-chars-error :message (:slug-has-invalid-chars-error error-messages)} (util/has-consecutive-dashes-or-slashes? slug) {:error :slug-has-consecutive-dashes-or-slashes-error :message (:slug-has-consecutive-dashes-or-slashes-error error-messages)} (not (= (first slug) "/")) {:error :slug-initial-slash-required-error :message (:slug-initial-slash-required-error error-messages)} :default :pass)) (defn link-label-has-invalid-chars? "Returns true if the provided label contains any invalid characters and false if it doesn't (\\u0080 to \\uffff, alphanumeric characters, periods, question marks, exclamation marks, spaces and dashes are valid)." [label] (nil? (re-matches #"[\u0080-\uffffa-zA-Z0-9.?! -]+" label))) (defn html-with-clean-image-uris "Accepts a string with an HTML value and converts any relative paths to images to absolute paths. Returns the improved HTML string." [html] (let [get-num-sub-paths (fn [s] (when (string? s) (count (re-seq #"../" s)))) unsorted-pairs (map (fn [re-pair] (let [orig-src (nth re-pair 1)] [orig-src (str (when-not (re-matches #"^http[s]{0,1}(.*)" orig-src) "/") (string/replace orig-src "../" ""))])) (re-seq #"<img src=\"(.*?)\"" html))] ;; need to sort, otherwise shorter links to the same image mess ;; up longer ones (loop [modified-html html img-uri-pairs (sort-by get-num-sub-paths unsorted-pairs)] (if (pos? (count img-uri-pairs)) (recur (string/replace modified-html (first (last img-uri-pairs)) (last (last img-uri-pairs))) (butlast img-uri-pairs)) modified-html)))) (defn create-editor-field! "Creates a goog.editor.Field instance with provided element-id." [element-id] (goog.editor.Field. element-id)) (defn register-editor-plugins! "Accepts a goog.editor.Field instance (created by the create-editor-field! fn) and registers the following goog.editor.plugins: BasicTextFormatter, RemoveFormatting, UndoRedo, ListTabHandler, SpacesTabHandler, EnterHandler, HeaderFormatter, LinkDialogPlugin and LinkBubble." [editor] (doto editor (.registerPlugin (goog.editor.plugins.BasicTextFormatter.)) (.registerPlugin (goog.editor.plugins.RemoveFormatting.)) (.registerPlugin (goog.editor.plugins.UndoRedo.)) (.registerPlugin (goog.editor.plugins.ListTabHandler.)) (.registerPlugin (goog.editor.plugins.SpacesTabHandler.)) (.registerPlugin (goog.editor.plugins.EnterHandler.)) (.registerPlugin (goog.editor.plugins.HeaderFormatter.)) (.registerPlugin (goog.editor.plugins.LinkDialogPlugin.)) (.registerPlugin (goog.editor.plugins.LinkBubble.)))) (defn create-editor-toolbar! "Creates a toolbar using the node with given element-id as a containe and optionally accepts buttons that are to be included as optional further elements. If no optional arguments are provided the default buttons are used. The available buttons are: :bold :italic :underline :font-color :background-color :font-face :font-size, :format-block :link :image :undo :redo :unordered-list :ordered-list :indent :outdent :justify-left :justify-center, :justify-right :subscript :superscript :strike-through, :remove-format :edit-html The default buttons are: :bold :italic :underline :strike-through :font-color :background-color :font-face :font-size :link :image :undo :redo :unordered-list :ordered-list :indent :outdent :justify-left :justify-center :justify-right :subscript :superscript :remove-format" [element-id & included-buttons] (let [buttons-map {:bold buttons/BOLD :italic buttons/ITALIC :underline buttons/UNDERLINE :font-color buttons/FONT_COLOR :background-color buttons/BACKGROUND_COLOR :font-face buttons/FONT_FACE :font-size buttons/FONT_SIZE :format-block buttons/FORMAT_BLOCK :link buttons/LINK :image buttons/IMAGE :undo buttons/UNDO :redo buttons/REDO :unordered-list buttons/UNORDERED_LIST :ordered-list buttons/ORDERED_LIST :indent buttons/INDENT :outdent buttons/OUTDENT :justify-left buttons/JUSTIFY_LEFT :justify-center buttons/JUSTIFY_CENTER :justify-right buttons/JUSTIFY_RIGHT :subscript buttons/SUBSCRIPT :superscript buttons/SUPERSCRIPT :strike-through buttons/STRIKE_THROUGH :remove-format buttons/REMOVE_FORMAT :edit-html buttons/EDIT_HTML} buttons (to-array (map #(get buttons-map %) (or included-buttons [:bold :italic :underline :strike-through :font-color :background-color :font-face :font-size :link :image :undo :redo :unordered-list :ordered-list :indent :outdent :justify-left :justify-center :justify-right :subscript :superscript :remove-format])))] (DefaultToolbar/makeToolbar buttons (util/get-element element-id)))) (def editor-fields (atom {})) (defn make-editable! "Turns the node with provided element-id into an editable field, using the node with the provided toolbar-id as a toolbar and swapping the value into the editor-fields map using the provided field-key. Sets the content to the provided html-content string." [field-key element-id toolbar-id html-content] (let [editor (create-editor-field! element-id)] (swap! editor-fields assoc field-key editor) (when html-content (. editor (setHtml false html-content true false))) (register-editor-plugins! editor) (goog.ui.editor.ToolbarController. editor (create-editor-toolbar! toolbar-id :bold :italic :underline :strike-through :format-block :link :image :undo :redo :unordered-list :ordered-list :indent :outdent :justify-left :justify-center :justify-right :subscript :superscript :remove-format)) (. editor (makeEditable)))) (def *file* (atom {})) (defn create-slug "Returns a slug using the provided title and using the :default-slug-format and :default-document-type keys from the feed map provided as the second argument through the vix.util/create-slug fn." [title {:keys [default-slug-format default-document-type] :as feed}] (util/create-slug default-slug-format title feed (util/date-now!) (if (and (= default-document-type "image") (:extension (:data @*file*))) (:extension (:data @*file*)) "html"))) (defn strip-filename-extension "Returns a the provided filename string, removing the extension if it has any." [filename] (let [pieces (re-find #"^(.*?)\.[a-zA-Z0-9]{1,10}$" filename)] (if (= (count pieces) 2) (nth pieces 1) filename))) (defn image-filename-to-title "Translates the provided image filename to a document title suggestion by joining the alphanumeric segments separated by spaces." [filename] (string/join " " (filter #(not (string/blank? %)) (string/split (strip-filename-extension filename) #"[^a-zA-Z0-9]")))) (defn select-feed-by-language-and-name "Returns the feed map with the value of the :language key matching the given language string and the value of the :name key matching the given feed-name string from the provided feeds sequence." [language feed-name feeds] (first (filter (fn [feed] (and (= feed-name (:name feed)) (= language (:language feed)))) feeds))) (defn get-editor-content "Returns the clean HTML string content from the editor with the provided key (e.g. :content or :description) or nil if that editor doesn't exist in the @editor-fields map." [field-key] (when-let [editor-field (field-key @editor-fields)] (string/replace (html-with-clean-image-uris (.getCleanContents editor-field editor-field)) #"id=\"image-dialog-preview\"" ""))) (defn parse-related-document-id "Parses a related document id attribute value (e.g. related-image@@@/en/images/foo@@@Foo) which is either provided as a string or as a Domina element with the value in the id attribute. Returns a map with the document slug mapped to the :slug key and the title mapped to the :title key." [el-or-string] (let [[_ slug title] (string/split (if (string? el-or-string) el-or-string (domina/attr el-or-string :id)) #"@@@")] {:slug slug :title title})) (defn get-document-relations! "Retrieves a vector of related pages from the DOM, using the provided domina-css-selector string to select the desired elements." [domina-css-selector] (vec (map parse-related-document-id (domina/nodes (sel domina-css-selector))))) (defn get-icon-image! "Retrieves a map with the icon image details from the DOM, with a :title key for the title and a :slug key for the slug." [] (when-let [icon-container-el (domina/single-node (sel ".image-icon-container"))] (parse-related-document-id icon-container-el))) (defn get-document-data-from-dom! "Returns a map with the document data extracted from the DOM using the provided feed-map for the language and feed-name values, as well as to determine the document type." [feed-map] (let [form-fields [:title :subtitle :slug :start-time :end-time]] (merge (zipmap form-fields (map (fn [field-name] (try (ui/get-form-value (name field-name)) (catch js/Error e nil))) form-fields)) {:language (:language feed-map) :feed (:name feed-map) :draft (ui/is-checked? "draft") :content (if (= (:default-document-type feed-map) "menu") (get-menu-string-from-dom!) (get-editor-content :content)) :description (get-editor-content :description) :icon (get-icon-image!) :related-pages (get-document-relations! ".related-page") :related-images (get-document-relations! ".related-image")}))) (defn save-document! "Saves the document and either returns the new version or an error message. Takes a singular feed map, a boolean value for new? that determines if its a new or edited document, the current-document-state (a map if editing an existing document), the future-state map with the output from the get-document-data-from! function and slug-validation with the output from a validate-slug call." [feed-map new? current-document-state future-state slug-validation] (let [save-button-el (sel "#save-document")] (domina/set-attr! save-button-el :disabled "disabled") (document/append-to-document (merge future-state (if new? {:action :create} {:action :update :previous-id (:_id current-document-state)})) (fn [status document-states] (let [[{:keys [slug title]} & _] document-states] (cond (= status 201) (util/navigate-replace-state (str (:language feed-map) "/" (:name feed-map) "/edit" slug) (str "Edit \"" title "\"")) (= status 200) (display-editor! (:language feed-map) (:name feed-map) slug) :else (ui/display-error (sel "#status-message") (cond (= (str "There is an existing document " "with the provided slug.") document-states) (:document-already-exists-error error-messages) (= (str "This document map doesn't contain " "the most recent :previous-id.") document-states) (:document-update-conflict-error error-messages) :default (:could-not-save-feed-error error-messages)))) (domina/remove-attr! save-button-el :disabled)))))) (defn save-image-document! "Wrapper for save-document! that deals with image documents. See the save-document! function for the arguments. If the user is trying to add a new image without providing a file an error message is displayed." [feed-map new? current-document-state future-state slug-validation] (let [file (:obj @*file*) reader (new js/FileReader)] (cond ;; if the image has been changed, load it and call save-document!: (not (nil? file)) (do (set! (.-onload reader) (fn [evt] (save-document! feed-map new? current-document-state (merge future-state {:attachment {:type (.-type file) :data (base64/encodeString (.-result (.-target evt)))}}) slug-validation))) (. reader (readAsBinaryString file))) ;; allow editing existing documents without changing the image: (not new?) (save-document! feed-map new? current-document-state (merge future-state (select-keys current-document-state [:attachments])) slug-validation) ;; display an error message if the user is trying to save a new ;; document without providing an image: :default (ui/display-error (sel "#status-message") (:file-required-error error-messages))))) (defhandler save-document-button-click-callback! "Performs validation when the default-slug-format value changes and displays and removes errors when necessary. If the validation is successful the save-document! fn is called. Takes a singular feed map, a boolean value for new? that determines if this is a fresh or existing document, a sequence of document states if editing an existing document and a Domina event object." [feed-map new? [current-document-state & _] evt] (let [status-message-el (sel "#status-message") future-state (get-document-data-from-dom! feed-map) slug-validation (validate-slug (:slug future-state))] (cond (string/blank? (:title future-state)) (ui/display-error status-message-el (:document-title-required-error error-messages)) (not (= :pass slug-validation)) (ui/display-error status-message-el (:message slug-validation)) (= (:default-document-type feed-map) "image") (save-image-document! feed-map new? current-document-state future-state slug-validation) :default (when-not (= (select-keys current-document-state (keys future-state)) future-state) ;; don't save if there are no changes (save-document! feed-map new? current-document-state future-state slug-validation))))) (defn detect-duplicate-custom-slug-callback! "Detects if the custom slug is a duplicate and adds/removes UI error messages accordingly. Used as a callback for vix.document/get-doc." [status [doc-current-state & _]] (let [slug-el (sel "#slug") slug-label-el (sel "#slug-label") status-el (sel "#status-message")] (if (and (= status 200) (not (= (:action doc-current-state) :delete))) (ui/display-error status-el (:slug-not-unique-error error-messages) slug-el slug-label-el) (ui/remove-error status-el slug-el slug-label-el)))) (defn increment-slug "Increments the provided slug string by either incrementing the last character if it is a number or adding '-2' to the end of the string." [slug] (if-let [slug-matches (re-matches #"(.*?)-([0-9]+)$" slug)] (str (nth slug-matches 1) "-" (inc (js/parseInt (last slug-matches)))) (str slug "-2"))) (defhandler slug-validation-callback! "Validates custom slugs. Displays an error if required, otherwise the doc with the desired slug is requested and validated by the detect-duplicate-custom-slug-callback!." [evt] (when (ui/is-checked? "custom-slug") (let [slug-el (events/target evt) slug (ui/get-form-value slug-el) slug-label-el (sel "#slug-label") status-el (sel "#status-message") slug-validation (validate-slug slug)] (if (= :pass slug-validation) (do (ui/remove-error status-el slug-el slug-label-el) (document/get-doc slug detect-duplicate-custom-slug-callback!)) (ui/display-error status-el (:message slug-validation) slug-el slug-label-el))))) (def slugs (atom #{})) (defn handle-duplicate-slug-callback! "Automatically appends '-2' to the end of a duplicate slug, or increments n if the slug already ends with '-n' where n is an integer. Keeps calling itself until no more duplicates are found and finally sets the unique value if that value doesn't match the original value." [status [doc-current-state & _]] (if (and (= status 200) (not (= (:action doc-current-state) :delete))) ;; add incremented slug to @slugs and rerun the get-doc call with ;; the new slug and this function as the callback: (let [new-slug (increment-slug (document/add-initial-slash (:slug doc-current-state)))] (swap! slugs conj new-slug) (document/get-doc new-slug handle-duplicate-slug-callback!)) ;; set the final slug value if needed: (let [final-slug (last (sort-by count @slugs)) slug-el (sel "#slug")] (when-not (= (ui/get-form-value slug-el) final-slug) (ui/set-form-value slug-el final-slug))))) (defn synchronize-slug "Automatically generates a slug value based on the value of the provided title element." [title-el feed] (let [new-slug (create-slug (ui/get-form-value title-el) feed)] (when (nil? (ui/get-form-value "custom-slug")) (reset! slugs #{new-slug}) (document/get-doc new-slug handle-duplicate-slug-callback!)))) (defhandler custom-slug-toggle-callback! "Toggles the slug input between editable (i.e. custom) and disabled." [feed evt] (if (nil? (ui/get-form-value (events/target evt))) (do (ui/disable-element "slug") (ui/disable-element "slug-label") (synchronize-slug (sel "#title") feed)) (do (ui/enable-element "slug") (ui/enable-element "slug-label")))) (defhandler title-edited-callback! "Automatically generates a slug for current title if custom slug isn't checked." [feed evt] (synchronize-slug (events/target evt) feed)) (defhandler handle-datefield-click-callback! "Displays a datepicker on click." [evt] (ui/display-datepicker (fn [{:keys [date-string date-object hour minute]}] (ui/set-form-value (events/target evt) (if date-object (str date-string " " hour ":" minute) ""))) true)) (defn display-image-preview "Displays a preview of the image contained in given file object with the provided title." [file title] (let [reader (new js/FileReader)] (set! (.-onload reader) (fn [evt] (domina/set-attrs! (sel "#image-preview") {:title title :src (.-result (.-target evt))}))) (. reader (readAsDataURL file)))) (def mimetype-to-extension-map {:image/png "png" :image/gif "gif" :image/jpeg "jpg"}) (def *file* (atom {})) (defn handle-image-drop-callback! "Is called when a file is dropped on the image drop target. If the file is recognized as an image it is displayed in the interface and swapped into the *file* atom (under the :obj key). Otherwise an invalid filetype message is shown." [current-feed-state new? evt] ;; domina/prevent-default & domina/stop-propagation don't work for ;; these events, so not using defhandler, but a regular function. (do (. evt (preventDefault)) (. evt (stopPropagation))) (if-let [file (aget (.-files (.-dataTransfer evt)) 0)] (let [status-el (sel "#status-message") image-information-el (sel "#image-information-container") title (image-filename-to-title (.-name file))] (if-let [extension (mimetype-to-extension-map (keyword (.-type file)))] (do (swap! *file* assoc :obj file :data {:extension extension}) (ui/remove-error status-el) (ui/set-form-value (dom/getElement "title") title) (display-image-preview file title) (domina/remove-class! image-information-el "hide") (domina/set-text! (sel "#image-filename") (.-name file)) (domina/set-text! (sel "#image-filetype") (.-type file)) (domina/set-text! (sel "#image-size") (format "%.2f"(/ (.-size file) 1000))) (when new? (synchronize-slug (sel "#title") current-feed-state))) (do (swap! *file* dissoc :obj :data) (domina/add-class! image-information-el "hide") (ui/display-error status-el (:invalid-filetype-error error-messages))))))) (defn remove-self-from-documents "Removes the map with the given self-slug string as a :slug value from the provided sequence of document maps." [self-slug documents] (remove #(= (:slug %) self-slug) documents)) (defn remove-existing-from-documents "Removes any documents with a :slug value that is part of the existing-documents vector." [existing-documents documents] (remove (fn [{:keys [slug]}] (not (nil? (some #{slug} (map :slug existing-documents))))) documents)) (defn format-option "Replaces keyword values in the given option-format sequence with the string value for that keyword in the provided option-map and leaves string values in the option-format sequence as-is. Returns a string version of the new sequence." [option-map option-format] (apply str (map (fn [v] (if (string? v) v (option-map v))) option-format))) (defn update-options-in-dialog! "Adds select options to the DOM. Takes the select-el, which is the Domina element node the options are being added to, the snippet-key, which is the key for the HTML snippets in the ui/snippets map, the option-xpath-selector, which is the xpath selector required to select the newly added option, option-maps, which is a sequence of maps containing values for the option, title-format and value-format, which are sequences describing the format for the particular values (see the format-option function for the structure of the format sequences)." [select-el snippet-key option-xpath-selector option-maps title-format value-format] (domina/destroy-children! select-el) (doseq [option option-maps] (domina/append! select-el (ui/snippets snippet-key)) (doto (xpath option-xpath-selector) (domina/set-text! (format-option option title-format)) (domina/set-attr! :value (format-option option value-format))))) (defn update-feed-options-in-dialog! "Appends select options for given feeds to the provided feeds-el." [feeds-el feeds] (update-options-in-dialog! feeds-el :editor/.feed-select-option "(//option[@class='feed-select-option'])[last()]" feeds [:title] ["['" :language "','" :name "']"])) (defn update-document-options-in-dialog! "Appends provided select options for given documents to documents-el, while removing the document matching self-slug. Any documents in the removable-documents vector are removed from the documents list." [self-slug documents-el documents removable-documents] (update-options-in-dialog! documents-el :editor/.document-select-option "(//option[@class='document-select-option'])[last()]" (remove-existing-from-documents removable-documents (remove-self-from-documents self-slug documents)) [:title] ["option@@@" :slug "@@@" :title])) (defn update-menu-links-options-in-dialog! "Adds the provided sequence of document maps to the dialog as select options." [documents] (update-options-in-dialog! (sel "select#internal-link") :editor/.document-select-option "(//option[@class='document-select-option'])[last()]" documents [:title] [:slug])) (defhandler remove-parent-callback! "Event callback that removes the parent of the event target DOM node." [evt] (domina/destroy! (xpath (events/target evt) ".."))) (defn add-related-page! "Adds a li element for the given page to the DOM. This function is used as a ui/display-dialog callback!, which provides the dialog-status (either :ok or :cancel) and a map with the form fields and their values as callback arguments." [dialog-status {:keys [internal-link-feed internal-link]}] (when (and (= dialog-status :ok) (not (nil? internal-link))) (domina/append! (sel "#related-pages-container") (:editor/.related-page ui/snippets)) (let [{:keys [slug title]} (parse-related-document-id internal-link)] (doto (xpath "(//li[@class='related-page'])[last()]") (domina/set-attr! :id (str "related-page@@@" slug "@@@" title))) (doto (xpath "(//span[@class='related-page-title'])[last()]") (domina/set-text! title)) (events/listen! (xpath "(//a[@class='related-page-delete-link'])[last()]") :click remove-parent-callback!)))) (defhandler image-preview-link-click-callback! "Handler for clicks on an image preview link that displays the image in a dialog." [evt] (let [{:keys [title slug]} (parse-related-document-id (xpath (events/target evt) ".."))] (ui/display-dialog! "Image Preview" (:editor/image-preview-in-dialog-container ui/snippets) nil {:auto-close? true :modal? true} (fn []) :cancel) (domina/set-attrs! (sel "#image-dialog-preview") {:src (str (get-cdn-hostname) slug) :alt title :title title}))) (defn add-related-image! "Adds a li element for the given image to the DOM. This function is used as a ui/display-dialog callback, which provides the dialog-status (either :ok or :cancel) and a map with the form fields and their values as callback arguments." [dialog-status {:keys [internal-link-feed internal-link]}] (when (and (= dialog-status :ok) (not (nil? internal-link))) (domina/append! (sel "#related-images-container") (:editor/.related-image ui/snippets)) (let [{:keys [slug title]} (parse-related-document-id internal-link)] (doto (xpath "(//li[@class='related-image'])[last()]") (domina/set-attr! :id (str "related-page@@@" slug "@@@" title))) (doto (xpath "(//span[@class='related-image-title'])[last()]") (domina/set-text! title)) (events/listen! (xpath "(//a[@class='related-image-delete-link'])[last()]") :click remove-parent-callback!) (events/listen! (xpath "(//a[@class='related-image-preview-link'])[last()]") :click image-preview-link-click-callback!)))) (defn create-icon-image-events! "Creates delete and preview events for image icon." [] (events/listen! (sel "#icon-image-delete-link") :click remove-parent-callback!) (events/listen! (sel "#icon-image-preview-link") :click image-preview-link-click-callback!)) (defn add-icon-image! "Adds a li element for the given image to the DOM. This function is used as a ui/display-dialog callback, which provides the dialog-status (either :ok or :cancel) and a map with the form fields and their values as callback arguments." [dialog-status {:keys [internal-link-feed internal-link]}] (when (and (= dialog-status :ok) (not (nil? internal-link))) (let [{:keys [slug title]} (parse-related-document-id internal-link)] (domina/destroy! (sel ".image-icon-container")) (domina/append! (sel "#icon-container") (:editor/.image-icon-container ui/snippets)) (domina/set-attr! (sel ".image-icon-container") :id (str "icon@@@" slug "@@@" title)) (domina/set-text! (sel "#icon-image-title") title) (create-icon-image-events!)))) (defn update-dialog-image-preview! "Sets the image preview in the image selection dialog to the slug and title of the provided document map." [{:keys [slug title]}] (when (string? slug) (domina/set-attrs! (sel "#image-dialog-preview") {:src (str (get-cdn-hostname) slug) :alt title :title title}))) (defn create-dialog-feed-change-event! "Creates an event that tracks changes on feed-el and executes the update-document-options-fn if a change happens, which will update the document select field. The update-document-options-fn needs to accept the retrieved document sequence as the first and only value. If the dialog contains an image-dialog-preview image the update-dialog-image-preview! function is executed as well, using the optional relations argument to purge already related images from the options." [feed-el update-document-options-fn & [relations]] (events/listen! feed-el :change (fn [evt] (let [[language feed-name] (util/pair-from-string (ui/get-form-value (events/target evt)))] (document/get-documents-for-feed language feed-name (fn [status {:keys [documents]}] (update-document-options-fn documents) (when (domina/single-node (sel "img#image-dialog-preview")) (update-dialog-image-preview! (first (remove-existing-from-documents (or relations []) documents)))))))))) (defn fill-document-select-dialog! "Fills the internal-link and internal-link-feeds select fields in a previously created dialog and adds an event listener that loads the new documents if another feed is selected. Takes node-selector (nil or a Domina CSS selector; to pass to the get-document-relations! function), self-slug with the slug for the active document, feeds with a sequence of feed maps and documents with a sequence of document maps for the first feed." [node-selector self-slug feeds documents] (let [internal-link-el (sel "#internal-link") internal-link-feed-el (sel "#internal-link-feed") relations (if node-selector (get-document-relations! node-selector) [])] (update-feed-options-in-dialog! internal-link-feed-el feeds) (update-document-options-in-dialog! self-slug internal-link-el documents (if node-selector (get-document-relations! node-selector) [])) (create-dialog-feed-change-event! internal-link-feed-el #(update-document-options-in-dialog! self-slug internal-link-el % relations) relations))) (defhandler add-related-page-link-callback! "Handler for clicks on the add-related-page link that displays a dialog that can be used to select a related page for the document. Takes self-slug, the slug of the current document, to filter it out of the results, as well as a sequence of singular feed maps called feeds and a Domina event called evt." [self-slug all-feeds evt] (let [[{:keys [language name]} :as feeds] (filter #(not (= "image" (:default-document-type %))) all-feeds)] (document/get-documents-for-feed language name (fn [status {:keys [documents]}] (ui/display-dialog! "Add Related Page" (:editor/add-related-page-dialog-form ui/snippets) (sel "#add-related-page-dialog-form") {:auto-close? true :modal? true} add-related-page!) (fill-document-select-dialog! ".related-page" self-slug feeds documents))))) (defn create-image-preview-event! "Creates the event that changes the preview image when a new image is selected in an image dialog." [] (events/listen! (sel "select#internal-link") :change (fn [evt] (update-dialog-image-preview! (parse-related-document-id (ui/get-form-value (events/target evt))))))) (defhandler add-related-image-link-callback! "Handler for clicks on the add-related-image link that displays a dialog that can be used to select a related image for the document. Takes self-slug, the slug of the current document, to filter it out of the results, as well as a sequence of singular feed maps called feeds and a Domina event called evt." [self-slug all-feeds evt] (let [[{:keys [language name]} :as feeds] (filter #(= "image" (:default-document-type %)) all-feeds)] (document/get-documents-for-feed language name (fn [status {:keys [documents]}] (ui/display-dialog! "Add Related Image" (:editor/add-image-dialog-form ui/snippets) (sel "#add-image-dialog-form") {:auto-close? true :modal? true} add-related-image!) (update-dialog-image-preview! (first (remove-existing-from-documents (get-document-relations! ".related-image") documents))) (create-image-preview-event!) (fill-document-select-dialog! ".related-image" self-slug feeds documents))))) (defhandler add-icon-image-link-callback! "Handler for clicks on the add-icon-image-link that displays a dialog that can be used to select an icon image for the document. Takes self-slug, the slug of the current document, to filter it out of the results, as well as a sequence of singular feed maps called feeds and a Domina event called evt." [self-slug all-feeds evt] (let [[{:keys [language name]} :as feeds] (filter #(= "image" (:default-document-type %)) all-feeds)] (document/get-documents-for-feed language name (fn [status {:keys [documents]}] (ui/display-dialog! "Add Icon Image" (:editor/add-image-dialog-form ui/snippets) (sel "#add-image-dialog-form") {:auto-close? true :modal? true} add-icon-image!) (update-dialog-image-preview! (first documents)) (create-image-preview-event!) (fill-document-select-dialog! nil self-slug feeds documents))))) (defhandler editor-add-image-button-callback! "Handler for clicks on the add image button in editor toolbars that displays a non-modal image selection dialog. Images can be dragged into the editor field from the dialog. The all-feeds argument is a sequence of singular feed maps and the evt argument contains the Domina event object." [all-feeds evt] (when-not (domina/single-node (sel "div.modal-dialog")) (let [[{:keys [language name]} :as feeds] (filter #(= "image" (:default-document-type %)) all-feeds)] (document/get-documents-for-feed language name (fn [status {:keys [documents]}] (ui/display-dialog! "Add Image" (:editor/add-image-dialog-form ui/snippets) (sel "#add-image-dialog-form") {:auto-close? true :modal? false} add-icon-image! :ok) (domina/remove-class! (sel "h5#drag-to-editor-instruction") "hide") (update-dialog-image-preview! (first documents)) (create-image-preview-event!) (fill-document-select-dialog! nil "" feeds documents)))))) (defn create-common-editor-events! "Creates events that are shared by all editor modes. Takes a boolean value new? which determines if it concerns a fresh document and feed-map, a singular map containing feed information for the active document." [new? feed-map] (when new? (events/listen! (sel "#title") :keyup (partial title-edited-callback! feed-map))) (events/listen! (sel "#custom-slug") :change (partial custom-slug-toggle-callback! feed-map)) (events/listen! (sel "#slug") :keyup slug-validation-callback!)) (defn display-editor-callback! "Displays the editor UI and creates the relevant events. The feeds argument contains a sequence containing all available feeds. The feed-map is a map containing feed information for this specific document. The status is either an HTTP status integer if editing an existing document or nil if editing a new document. The last argument contains a map that describes the document as stored in the database." [feeds {:keys [default-document-type] :as feed-map} status [{:keys [title content slug _id description draft related-pages language feed subtitle icon related-images created start-time-rfc3339 start-time published previous-id end-time-rfc3339 end-time datestamp attachments]} :as document-states]] (let [new? (nil? status)] (ui/show! {:snippet :editor/back-to-overview-link :transformations [{:selector "//p[@id='back-to-overview-link']/a[1]" :attrs {:href (str "/admin/" language "/" feed "/overview")}}]} {:snippet :ui/status-message} (when (= default-document-type "image") [{:snippet :editor/image-drop-target} {:snippet :editor/image-information-container :transformations [(when slug {:selector "//img[@id='image-preview']" :attrs {:src (str "data:" (get-in attachments [:original :type]) ";base64," (get-in attachments [:original :data])) :alt title :name title}})]}]) {:snippet :ui/caption :transformations [{:selector "//h3[@id='caption']" :text (if new? "Create Document" "Edit Document")}]} {:snippet :editor/title-row :transformations [{:selector "//input[@id='title']" :value title} {:selector "//input[@id='draft']" :checked draft}]} {:snippet :editor/subtitle-row :transformations [{:selector "//input[@id='subtitle']" :value subtitle}]} {:snippet :editor/slug-row :transformations (concat [{:selector "//input[@id='slug']" :value slug}] (when new? [{:selector "//input[@id='custom-slug']" :remove-attr "disabled"} {:selector "//label[@id='custom-slug-label']" :remove-class "disabled"}]))} (when (= default-document-type "event") [{:snippet :editor/start-time-row :transformations [{:selector "//input[@id='start-time']" :value start-time}]} {:snippet :editor/end-time-row :transformations [{:selector "//input[@id='end-time']" :value end-time}]}]) [(merge {:snippet :editor/icon-container} (when-not (nil? icon) {:children [{:snippet :editor/.image-icon-container :transformations [{:selector "//div[@class='image-icon-container']" :attrs {:id (str "icon@@@" (:slug icon) "@@@" (:title icon))}} {:selector "//span[@id='icon-image-title']" :text (:title icon)}]}]})) {:snippet :editor/document-relations :children (doall (map (fn [{:keys [title slug]}] {:snippet :editor/.related-page :parent "//ul[@id='related-pages-container']" :transformations [{:selector "(//li[@class='related-page'])[last()]" :attrs {:id (str "related-page@@@" slug "@@@" title)}} {:selector "(//span[@class='related-page-title'])[last()]" :text title}]}) related-pages))} {:snippet :editor/image-relations :children (doall (map (fn [{:keys [title slug]}] {:snippet :editor/.related-image :parent "//ul[@id='related-images-container']" :transformations [{:selector "(//li[@class='related-image'])[last()]" :attrs {:id (str "related-image@@@" slug "@@@" title)}} {:selector "(//span[@class='related-image-title'])[last()]" :text title}]}) related-images))} {:snippet :editor/editor-images} (when (= default-document-type "with-description") {:snippet :editor/description-container}) {:snippet :editor/content-container}] {:snippet :editor/save-button-container}) (make-editable! :content "content" "toolbar" content) (when (= default-document-type "with-description") (make-editable! :description "description-content" "description-toolbar" description)) (when-let [drop-target-el (dom/getElement "image-drop-target")] (. drop-target-el (addEventListener "drop" (partial handle-image-drop-callback! feed-map new?) false)) (. drop-target-el (addEventListener "dragover" (fn [e] (. e (preventDefault)) (. e (stopPropagation))) false))) (create-common-editor-events! new? feed-map) (create-icon-image-events!) (events/listen! (sel "#start-time") :click handle-datefield-click-callback!) (events/listen! (sel "#end-time") :click handle-datefield-click-callback!) (events/listen! (sel "#add-icon-image-link") :click (partial add-icon-image-link-callback! slug feeds)) (events/listen! (sel ".related-page-delete-link") :click remove-parent-callback!) (events/listen! (sel ".related-image-delete-link") :click remove-parent-callback!) (events/listen! (sel ".related-image-preview-link") :click image-preview-link-click-callback!) (events/listen! (sel "#add-related-page-link") :click (partial add-related-page-link-callback! slug feeds)) (events/listen! (sel "#add-related-image-link") :click (partial add-related-image-link-callback! slug feeds)) (events/listen! (xpath "//div[@class='tr-icon tr-image']/../..") :click (partial editor-add-image-button-callback! feeds)) (events/listen! (sel "#save-document") :click (partial save-document-button-click-callback! feed-map new? document-states)))) (defhandler toggle-add-menu-item-dialog-link-type-callback! "Toggles between showing the internal-link-row and the external-link-row, depending on the link-type field value." [evt] (if (= (ui/get-form-value (events/target evt)) "internal") (do (domina/remove-class! (sel "tr#internal-link-row") "hide") (domina/add-class! (sel "tr#external-link-row") "hide")) (do (domina/remove-class! (sel "tr#external-link-row") "hide") (domina/add-class! (sel "tr#internal-link-row") "hide")))) (defn append-add-sub-item-link! "Adds an add-sub-item link the the provided item-details-el, accepting a sequence of singular feed maps called as the second argument to pass to the display-add-menu-item-dialog-callback! function." [item-details-el all-feeds] (domina/append! item-details-el (:editor/.add-sub-item ui/snippets)) (events/listen! (xpath item-details-el "(.//a[@class='add-sub-item'])[last()]") :click (partial display-add-menu-item-dialog-callback! (domina/single-node (xpath item-details-el "..")) all-feeds))) (defhandler delete-menu-item-callback! "Deletes the menu item containing the active delete item link, as well as the ul containing it if it is the only item node left. If the latter is true, the add-sub-item link is also re-added to the parent. Accepts a sequence containing singular feed maps as the first argument and the triggered Domina event as the second argument." [all-feeds evt] (let [three-levels-up (xpath (events/target evt) "../../..") four-levels-up (xpath three-levels-up "..")] (if (and (= (domina/attr three-levels-up "class") "nested-menu-category") (= (count (domina/children three-levels-up)) 2)) (do ;; re-add add-sub-item to parent if deleting the whole category (append-add-sub-item-link! (domina/single-node (xpath four-levels-up "(.//span[@class='item-details'])")) all-feeds) ;; and remove the sub-menu ul (domina/destroy! three-levels-up) ) ;; just remove the li containing the current item if it has siblings (domina/destroy! (xpath (events/target evt) "../.."))))) (defn get-nested-menu-category-el! "Returns the .nested-menu-category element for parent-el, creating it if necessary." [parent-el] (or (domina/single-node (xpath parent-el "(.//ul[@class='nested-menu-category'])[last()]")) (do (domina/append! parent-el (:editor/.nested-menu-category ui/snippets)) (get-nested-menu-category-el! parent-el)))) (defn add-item-details! "Adds the item-details span to parent-el, using the link-label, link-type, internal-links and external-link arguments to determine the contents. Accepts a sequence of singular feed maps as the second argument, to pass to the detele-menu-item-callback! function." [parent-el all-feeds link-label link-type internal-link external-link] (domina/append! parent-el (:editor/.item-details ui/snippets)) (let [item-details-el (xpath parent-el ".//span[@class='item-details']")] (domina/set-text! (xpath item-details-el ".//span[@class='link-label']") link-label) (domina/set-text! (xpath item-details-el ".//span[@class='link-uri']") (if (= link-type "internal") internal-link external-link)) (events/listen! (xpath item-details-el ".//a[@class='menu-item-delete-link']") :click (partial delete-menu-item-callback! all-feeds)))) (defn add-nested-menu-item-to-dom! "Adds a nested menu item to the menu builder, removes the add-sub-item link from the parent element and if this is the first nested item also adds an add-item node to the newly created ul. Accepts a keyword determining the source (either :string or :dialog; determines whether the item is appended or prepended) as the first argument. Accepts all-feeds as the second argument, containing a sequence of singular feeds maps, parent-el, pointing to the element to add the item to, and link-label, link-type, internal-link and external-link values to pass to the add-item-details! function." [source all-feeds parent-el link-label link-type internal-link external-link] (domina/destroy! (domina/single-node (xpath parent-el "(.//a[@class='add-sub-item'])"))) (let [nested-menu-category-el (get-nested-menu-category-el! parent-el)] ;; if this is the first item, create an add-item node (when (zero? (count (domina/children nested-menu-category-el))) (domina/append! nested-menu-category-el (:editor/.add-item-node ui/snippets)) (events/listen! (xpath nested-menu-category-el "(.//li)[last()]/a") :click (partial display-add-menu-item-dialog-callback! parent-el all-feeds))) (if (= source :dialog) (domina/prepend! nested-menu-category-el (:editor/.nested-menu-item ui/snippets)) (domina/insert-before! (xpath nested-menu-category-el "(.//li)[last()]") (:editor/.nested-menu-item ui/snippets))) (add-item-details! (if (= source :dialog) ;; use first li as parent with a :dialog source (xpath nested-menu-category-el "(.//li[@class='nested-menu-item draggable'])[1]") ;; use last .nested-menu-item-li as parent if :string (xpath nested-menu-category-el (str "(.//li[@class='nested-menu-item draggable'])" "[last()]"))) all-feeds link-label link-type internal-link external-link) (ui/remove-dialog!))) (defn add-menu-item-to-dom! "Adds a top level menu item to the menu builder. Accepts all-feeds as the first argument, containing a sequence of singular feeds maps, parent-el, pointing to the element to add the item to, and link-label, link-type, internal-link and external-link values to pass to the add-item-details! function." [all-feeds link-label link-type internal-link external-link] (domina/append! (sel "#menu-container") (:editor/.top-level-menu-item ui/snippets)) (let [menu-item-el (xpath "(//li[@class='top-level-menu-item draggable'])[last()]")] (add-item-details! menu-item-el all-feeds link-label link-type internal-link external-link) (append-add-sub-item-link! (xpath menu-item-el "(.//span[@class='item-details'])[last()]") all-feeds)) (ui/remove-dialog!)) (defn handle-add-menu-item-dialog-completion "Validates the add-menu-item dialog. If the dialog doesn't validate an error message is displayed, if it does it is added the the menu builder and the dialog is closed. The next-child-parent argument is either nil (in case of interaction with the top level) or points to the parent element that the new child is supposed to be added to. The all-feeds argument is a sequence of singular feed maps, the dialog status is passed by the ui/display-dialog! function (either :ok or :cancel) and the final argument is a map with form values passed by the ui/display-dialog! function." [next-child-parent-el all-feeds dialog-status {:keys [external-link internal-link link-label link-type]}] (when (= :ok dialog-status) (cond (string/blank? link-label) (ui/display-error (sel "p#add-link-status") (:link-label-required-error error-messages)) (link-label-has-invalid-chars? link-label) (ui/display-error (sel "p#add-link-status") (:link-label-has-invalid-chars-error error-messages)) next-child-parent-el (add-nested-menu-item-to-dom! :dialog all-feeds next-child-parent-el link-label link-type internal-link external-link) :default (add-menu-item-to-dom! all-feeds link-label link-type internal-link external-link)))) (defn parse-dummy-menu-ul! "Reads the menu data from the dummy DOM object created by parse-menu-content-string! and its children. Accepts the dummy DOM element as the first and only argument (also used for the child UL if calling itself recursively)." [element] (map (fn [el] (let [children (domina/children el) last-child (last children)] {:label (domina/text (first children)) :uri (domina/attr (first children) "href") :children (when (= (.-tagName last-child) "UL") (parse-dummy-menu-ul! last-child))})) (domina/children element))) (defn parse-menu-content-string! "Takes string s and adds it to a dummy DOM elemenet to be parsed by the parse-dummy-menu-ul! function." [s] (if (string? s) (let [dummy-list-el (dom/createElement "ul")] (set! (.-innerHTML dummy-list-el) (subs s 4 (- (count s) 5))) (parse-dummy-menu-ul! dummy-list-el)))) (defn display-existing-menu-from-string! "Takes a menu string containing the HTML version of the menu and creates the menu builder UI for it, using a sequence of singular feed maps as the first argument to the function call." [all-feeds menu-string] (domina/destroy-children! (sel "ul#menu-container")) (doseq [{:keys [label uri children]} (parse-menu-content-string! menu-string)] (add-menu-item-to-dom! all-feeds label "internal" uri nil) (doseq [{:keys [label uri]} children] (add-nested-menu-item-to-dom! :string all-feeds (domina/single-node ;; force computation of Domina xpath lazy seq (xpath "(//li[@class='top-level-menu-item draggable'])[last()]")) label "internal" uri nil)))) (defn get-item-details-from-dom! "Extracts the link label and URI from the text value of the .link-label and .link-uri span elements in the provided parent-el DOM element. Returns a map with the link label mapped to :label and the URI mapped to :uri." [parent-el] {:label (domina/text (xpath parent-el ".//span[@class='link-label']")) :uri (domina/text (xpath parent-el ".//span[@class='link-uri']"))}) (defn get-menu-data-from-dom! "Converts the DOM structure of the menu builder to a sequence of maps with :label and :uri keys for the corresponding values, as well as a :children key for the children elements of top level nodes." [] (map (fn [top-level-menu-item-el] (assoc (get-item-details-from-dom! top-level-menu-item-el) :children (map get-item-details-from-dom! (reverse (domina/nodes (xpath top-level-menu-item-el ".//li[@class='nested-menu-item draggable']")))))) (reverse (domina/nodes (xpath "//li[@class='top-level-menu-item draggable']"))))) (defn get-menu-string-from-dom! "Returns the HTML string represenation of the menu builder." [] ;; FIX rewrite using Hiccup or Enfocus (let [dummy-list-el (dom/createElement "ul")] (doseq [{:keys [label uri children]} (get-menu-data-from-dom!)] (let [dummy-li-el (dom/createElement "li") dummy-link-el (dom/createElement "a")] (domina/set-attr! dummy-link-el "href" uri) (domina/set-text! dummy-link-el label) (domina/append! dummy-li-el dummy-link-el) (when (pos? (count children)) (let [dummy-sub-menu-el (dom/createElement "ul")] (domina/add-class! dummy-sub-menu-el "sub-menu") (doseq [{:keys [label uri]} children] (let [dummy-li-el (dom/createElement "li") dummy-link-el (dom/createElement "a")] (domina/set-attr! dummy-link-el "href" uri) (domina/set-text! dummy-link-el label) (domina/append! dummy-li-el dummy-link-el) (domina/append! dummy-sub-menu-el dummy-li-el))) (domina/append! dummy-li-el dummy-sub-menu-el))) (domina/append! dummy-list-el dummy-li-el))) (str "<ul id=\"menu\">"(.-innerHTML dummy-list-el) "</ul>"))) (defhandler display-add-menu-item-dialog-callback! "Callback for the add menu item links. If the first element is not nil it is expected to be the parent element of the child element that is being added. The second argument is a sequence of singular feed maps and the third argument is a Domina event." [next-child-parent-el all-feeds evt] (let [[{:keys [language name]} :as feeds] (filter #(not (= "image" (:default-document-type %))) all-feeds) feed-el (sel "select#internal-link-feed")] (document/get-documents-for-feed language name (fn [status {:keys [documents]}] (ui/display-dialog! "Add Menu Item" (:editor/add-menu-item-dialog-form ui/snippets) (sel "form#add-menu-item-dialog-form") {:auto-close? false :modal? true} (partial handle-add-menu-item-dialog-completion next-child-parent-el all-feeds)) (update-feed-options-in-dialog! feed-el feeds) (update-menu-links-options-in-dialog! documents) (create-dialog-feed-change-event! feed-el update-menu-links-options-in-dialog!) (events/listen! (sel "input#link-type-internal") :change toggle-add-menu-item-dialog-link-type-callback!) (events/listen! (sel "input#link-type-external") :change toggle-add-menu-item-dialog-link-type-callback!))))) (defn display-menu-editor-callback! "Displays the UI for the menu editor and creates the relevant events. The feeds argument contains a sequence containing all available feeds. The feed-map is a map containing feed information for this specific document. The status is either an HTTP status integer if editing an existing document or nil if editing a new document. The last argument contains a map that describes the document as stored in the database." [feeds feed-map status [{:keys [title content slug _id draft language feed created published previous-id datestamp]} :as document-states]] (let [new? (nil? status)] (ui/show! {:snippet :editor/back-to-overview-link :transformations [{:selector "//p[@id='back-to-overview-link']/a[1]" :attrs {:href (str "/admin/" language "/" feed "/overview")}}]} {:snippet :ui/status-message} {:snippet :ui/caption :transformations [{:selector "//h3[@id='caption']" :text (if new? "Create Menu" "Edit Menu")}]} {:snippet :editor/title-row :transformations [{:selector "//input[@id='title']" :value title} {:selector "//input[@id='draft']" :checked draft}]} {:snippet :editor/slug-row :transformations (concat [{:selector "//input[@id='slug']" :value slug}] (when new? [{:selector "//input[@id='custom-slug']" :remove-attr "disabled"} {:selector "//label[@id='custom-slug-label']" :remove-class "disabled"}]))} {:snippet :editor/menu-builder} {:snippet :editor/add-menu-item-container} {:snippet :editor/save-button-container}) (create-common-editor-events! new? feed-map) (display-existing-menu-from-string! feeds content) ;; TODO reimplement sorting ;; TODO add edit link feature ;; TODO add blank link (i.e. '#') option (events/listen! (sel "a#add-menu-item") :click (partial display-add-menu-item-dialog-callback! nil feeds)) (events/listen! (sel "#save-document") :click (partial save-document-button-click-callback! feed-map new? document-states)))) (defn display-editor! "Displays the editor for given language and feed-name strings, as well as document slug if an existing document is being edited." [language feed-name & [slug]] (swap! *file* dissoc :obj :data) (document/get-feeds-list (fn [status feeds] (let [feed-map (select-feed-by-language-and-name language feed-name feeds) editor-callback-fn (cond (= (:default-document-type feed-map) "menu") display-menu-editor-callback! :default display-editor-callback!)] (if slug (document/get-doc slug (partial editor-callback-fn feeds feed-map)) (editor-callback-fn feeds feed-map nil [{:language language :feed feed-name}]))))))
true
;; cljs/src/views/editor.cljs: UI implementation for document editor. ;; Copyright 2011-2013, Vixu.com, PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>. ;; ;; Licensed under the Apache License, Version 2.0 (the "License"); ;; you may not use this file except in compliance with the License. ;; You may obtain a copy of the License at ;; ;; http://www.apache.org/licenses/LICENSE-2.0 ;; ;; Unless required by applicable law or agreed to in writing, software ;; distributed under the License is distributed on an "AS IS" BASIS, ;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ;; See the License for the specific language governing permissions and ;; limitations under the License. (ns vix.views.editor (:use-macros [vix.crossover.macros :only [defhandler get-cdn-hostname]]) (:require [vix.document :as document] [vix.ui :as ui] [vix.util :as util] [clojure.string :as string] [domina :as domina] [domina.events :as events] [goog.dom :as dom] [goog.editor.Field :as Field] [goog.editor.plugins.BasicTextFormatter :as BasicTextFormatter] [goog.editor.plugins.RemoveFormatting :as RemoveFormatting] [goog.editor.plugins.UndoRedo :as UndoRedo] [goog.editor.plugins.ListTabHandler :as ListTabHandler] [goog.editor.plugins.SpacesTabHandler :as SpacesTabHandler] [goog.editor.plugins.EnterHandler :as EnterHandler] [goog.editor.plugins.HeaderFormatter :as HeaderFormatter] [goog.editor.plugins.LinkDialogPlugin :as LinkDialogPlugin] [goog.editor.plugins.LinkBubble :as LinkBubble] [goog.editor.Command :as buttons] [goog.ui.editor.DefaultToolbar :as DefaultToolbar] [goog.ui.editor.ToolbarController :as ToolbarController] [goog.crypt.base64 :as base64]) (:use [domina.css :only [sel]] [domina.xpath :only [xpath]])) (def error-messages {:document-title-required-error "The document title value is required." :slug-has-invalid-chars-error "Slugs can only contain '/', '-', '.' and alphanumeric characters." :slug-has-consecutive-dashes-or-slashes-error "Slugs shouldn't contain any consecutive '-' or '/' characters." :slug-required-error "A valid slug is required for every document." :slug-initial-slash-required-error "The slug needs to start with a '/'." :slug-not-unique-error "This slug is not unique (document already exists)." :could-not-save-document-error "Something went wrong while saving the document." :document-update-conflict-error (str "The document has been changed after you loaded it. " "Please copy your changes, refresh the page and edit the " "most recent version of the document.") :document-already-exists-error (str "There already is an existing document with the provided slug. " "Please use a different slug.") :invalid-filetype-error "The filetype for the file you are trying to upload isn't supported." :file-required-error "This editing mode requires a file to be added." :link-label-required-error "The link label is required." :link-label-has-invalid-chars-error (str "Labels can only contain '/', '-', '.', '?', '!' " "and alphanumeric characters.")}) (defn slug-has-invalid-chars? "Checks whether the slug contains any invalid characters (i.e. not a forward slash, dash, period or alphanumeric character)." [slug] (nil? (re-matches #"[/\-a-zA-Z0-9\.]+" slug))) (defn validate-slug "Validates the provided slug string and returns either a map with a :message corresponding to a human-readable error message and an :error key corresponding to the the internal error keyword. If there is no error the function returns they keyword :pass. Slugs must: - not be blank (:slug-required-error), - not have invalid characters (:slug-has-invalid-chars-error; see the slug-has-invalid-chars? fn), - not have any consecutive dashes or slashes (:slug-has-consecutive-dashes-or-slashes-error), and, - start with a slash (:slug-initial-slash-required-error)." [slug] (cond (string/blank? slug) {:error :slug-required-error :message (:slug-required-error error-messages)} (slug-has-invalid-chars? slug) {:error :slug-has-invalid-chars-error :message (:slug-has-invalid-chars-error error-messages)} (util/has-consecutive-dashes-or-slashes? slug) {:error :slug-has-consecutive-dashes-or-slashes-error :message (:slug-has-consecutive-dashes-or-slashes-error error-messages)} (not (= (first slug) "/")) {:error :slug-initial-slash-required-error :message (:slug-initial-slash-required-error error-messages)} :default :pass)) (defn link-label-has-invalid-chars? "Returns true if the provided label contains any invalid characters and false if it doesn't (\\u0080 to \\uffff, alphanumeric characters, periods, question marks, exclamation marks, spaces and dashes are valid)." [label] (nil? (re-matches #"[\u0080-\uffffa-zA-Z0-9.?! -]+" label))) (defn html-with-clean-image-uris "Accepts a string with an HTML value and converts any relative paths to images to absolute paths. Returns the improved HTML string." [html] (let [get-num-sub-paths (fn [s] (when (string? s) (count (re-seq #"../" s)))) unsorted-pairs (map (fn [re-pair] (let [orig-src (nth re-pair 1)] [orig-src (str (when-not (re-matches #"^http[s]{0,1}(.*)" orig-src) "/") (string/replace orig-src "../" ""))])) (re-seq #"<img src=\"(.*?)\"" html))] ;; need to sort, otherwise shorter links to the same image mess ;; up longer ones (loop [modified-html html img-uri-pairs (sort-by get-num-sub-paths unsorted-pairs)] (if (pos? (count img-uri-pairs)) (recur (string/replace modified-html (first (last img-uri-pairs)) (last (last img-uri-pairs))) (butlast img-uri-pairs)) modified-html)))) (defn create-editor-field! "Creates a goog.editor.Field instance with provided element-id." [element-id] (goog.editor.Field. element-id)) (defn register-editor-plugins! "Accepts a goog.editor.Field instance (created by the create-editor-field! fn) and registers the following goog.editor.plugins: BasicTextFormatter, RemoveFormatting, UndoRedo, ListTabHandler, SpacesTabHandler, EnterHandler, HeaderFormatter, LinkDialogPlugin and LinkBubble." [editor] (doto editor (.registerPlugin (goog.editor.plugins.BasicTextFormatter.)) (.registerPlugin (goog.editor.plugins.RemoveFormatting.)) (.registerPlugin (goog.editor.plugins.UndoRedo.)) (.registerPlugin (goog.editor.plugins.ListTabHandler.)) (.registerPlugin (goog.editor.plugins.SpacesTabHandler.)) (.registerPlugin (goog.editor.plugins.EnterHandler.)) (.registerPlugin (goog.editor.plugins.HeaderFormatter.)) (.registerPlugin (goog.editor.plugins.LinkDialogPlugin.)) (.registerPlugin (goog.editor.plugins.LinkBubble.)))) (defn create-editor-toolbar! "Creates a toolbar using the node with given element-id as a containe and optionally accepts buttons that are to be included as optional further elements. If no optional arguments are provided the default buttons are used. The available buttons are: :bold :italic :underline :font-color :background-color :font-face :font-size, :format-block :link :image :undo :redo :unordered-list :ordered-list :indent :outdent :justify-left :justify-center, :justify-right :subscript :superscript :strike-through, :remove-format :edit-html The default buttons are: :bold :italic :underline :strike-through :font-color :background-color :font-face :font-size :link :image :undo :redo :unordered-list :ordered-list :indent :outdent :justify-left :justify-center :justify-right :subscript :superscript :remove-format" [element-id & included-buttons] (let [buttons-map {:bold buttons/BOLD :italic buttons/ITALIC :underline buttons/UNDERLINE :font-color buttons/FONT_COLOR :background-color buttons/BACKGROUND_COLOR :font-face buttons/FONT_FACE :font-size buttons/FONT_SIZE :format-block buttons/FORMAT_BLOCK :link buttons/LINK :image buttons/IMAGE :undo buttons/UNDO :redo buttons/REDO :unordered-list buttons/UNORDERED_LIST :ordered-list buttons/ORDERED_LIST :indent buttons/INDENT :outdent buttons/OUTDENT :justify-left buttons/JUSTIFY_LEFT :justify-center buttons/JUSTIFY_CENTER :justify-right buttons/JUSTIFY_RIGHT :subscript buttons/SUBSCRIPT :superscript buttons/SUPERSCRIPT :strike-through buttons/STRIKE_THROUGH :remove-format buttons/REMOVE_FORMAT :edit-html buttons/EDIT_HTML} buttons (to-array (map #(get buttons-map %) (or included-buttons [:bold :italic :underline :strike-through :font-color :background-color :font-face :font-size :link :image :undo :redo :unordered-list :ordered-list :indent :outdent :justify-left :justify-center :justify-right :subscript :superscript :remove-format])))] (DefaultToolbar/makeToolbar buttons (util/get-element element-id)))) (def editor-fields (atom {})) (defn make-editable! "Turns the node with provided element-id into an editable field, using the node with the provided toolbar-id as a toolbar and swapping the value into the editor-fields map using the provided field-key. Sets the content to the provided html-content string." [field-key element-id toolbar-id html-content] (let [editor (create-editor-field! element-id)] (swap! editor-fields assoc field-key editor) (when html-content (. editor (setHtml false html-content true false))) (register-editor-plugins! editor) (goog.ui.editor.ToolbarController. editor (create-editor-toolbar! toolbar-id :bold :italic :underline :strike-through :format-block :link :image :undo :redo :unordered-list :ordered-list :indent :outdent :justify-left :justify-center :justify-right :subscript :superscript :remove-format)) (. editor (makeEditable)))) (def *file* (atom {})) (defn create-slug "Returns a slug using the provided title and using the :default-slug-format and :default-document-type keys from the feed map provided as the second argument through the vix.util/create-slug fn." [title {:keys [default-slug-format default-document-type] :as feed}] (util/create-slug default-slug-format title feed (util/date-now!) (if (and (= default-document-type "image") (:extension (:data @*file*))) (:extension (:data @*file*)) "html"))) (defn strip-filename-extension "Returns a the provided filename string, removing the extension if it has any." [filename] (let [pieces (re-find #"^(.*?)\.[a-zA-Z0-9]{1,10}$" filename)] (if (= (count pieces) 2) (nth pieces 1) filename))) (defn image-filename-to-title "Translates the provided image filename to a document title suggestion by joining the alphanumeric segments separated by spaces." [filename] (string/join " " (filter #(not (string/blank? %)) (string/split (strip-filename-extension filename) #"[^a-zA-Z0-9]")))) (defn select-feed-by-language-and-name "Returns the feed map with the value of the :language key matching the given language string and the value of the :name key matching the given feed-name string from the provided feeds sequence." [language feed-name feeds] (first (filter (fn [feed] (and (= feed-name (:name feed)) (= language (:language feed)))) feeds))) (defn get-editor-content "Returns the clean HTML string content from the editor with the provided key (e.g. :content or :description) or nil if that editor doesn't exist in the @editor-fields map." [field-key] (when-let [editor-field (field-key @editor-fields)] (string/replace (html-with-clean-image-uris (.getCleanContents editor-field editor-field)) #"id=\"image-dialog-preview\"" ""))) (defn parse-related-document-id "Parses a related document id attribute value (e.g. related-image@@@/en/images/foo@@@Foo) which is either provided as a string or as a Domina element with the value in the id attribute. Returns a map with the document slug mapped to the :slug key and the title mapped to the :title key." [el-or-string] (let [[_ slug title] (string/split (if (string? el-or-string) el-or-string (domina/attr el-or-string :id)) #"@@@")] {:slug slug :title title})) (defn get-document-relations! "Retrieves a vector of related pages from the DOM, using the provided domina-css-selector string to select the desired elements." [domina-css-selector] (vec (map parse-related-document-id (domina/nodes (sel domina-css-selector))))) (defn get-icon-image! "Retrieves a map with the icon image details from the DOM, with a :title key for the title and a :slug key for the slug." [] (when-let [icon-container-el (domina/single-node (sel ".image-icon-container"))] (parse-related-document-id icon-container-el))) (defn get-document-data-from-dom! "Returns a map with the document data extracted from the DOM using the provided feed-map for the language and feed-name values, as well as to determine the document type." [feed-map] (let [form-fields [:title :subtitle :slug :start-time :end-time]] (merge (zipmap form-fields (map (fn [field-name] (try (ui/get-form-value (name field-name)) (catch js/Error e nil))) form-fields)) {:language (:language feed-map) :feed (:name feed-map) :draft (ui/is-checked? "draft") :content (if (= (:default-document-type feed-map) "menu") (get-menu-string-from-dom!) (get-editor-content :content)) :description (get-editor-content :description) :icon (get-icon-image!) :related-pages (get-document-relations! ".related-page") :related-images (get-document-relations! ".related-image")}))) (defn save-document! "Saves the document and either returns the new version or an error message. Takes a singular feed map, a boolean value for new? that determines if its a new or edited document, the current-document-state (a map if editing an existing document), the future-state map with the output from the get-document-data-from! function and slug-validation with the output from a validate-slug call." [feed-map new? current-document-state future-state slug-validation] (let [save-button-el (sel "#save-document")] (domina/set-attr! save-button-el :disabled "disabled") (document/append-to-document (merge future-state (if new? {:action :create} {:action :update :previous-id (:_id current-document-state)})) (fn [status document-states] (let [[{:keys [slug title]} & _] document-states] (cond (= status 201) (util/navigate-replace-state (str (:language feed-map) "/" (:name feed-map) "/edit" slug) (str "Edit \"" title "\"")) (= status 200) (display-editor! (:language feed-map) (:name feed-map) slug) :else (ui/display-error (sel "#status-message") (cond (= (str "There is an existing document " "with the provided slug.") document-states) (:document-already-exists-error error-messages) (= (str "This document map doesn't contain " "the most recent :previous-id.") document-states) (:document-update-conflict-error error-messages) :default (:could-not-save-feed-error error-messages)))) (domina/remove-attr! save-button-el :disabled)))))) (defn save-image-document! "Wrapper for save-document! that deals with image documents. See the save-document! function for the arguments. If the user is trying to add a new image without providing a file an error message is displayed." [feed-map new? current-document-state future-state slug-validation] (let [file (:obj @*file*) reader (new js/FileReader)] (cond ;; if the image has been changed, load it and call save-document!: (not (nil? file)) (do (set! (.-onload reader) (fn [evt] (save-document! feed-map new? current-document-state (merge future-state {:attachment {:type (.-type file) :data (base64/encodeString (.-result (.-target evt)))}}) slug-validation))) (. reader (readAsBinaryString file))) ;; allow editing existing documents without changing the image: (not new?) (save-document! feed-map new? current-document-state (merge future-state (select-keys current-document-state [:attachments])) slug-validation) ;; display an error message if the user is trying to save a new ;; document without providing an image: :default (ui/display-error (sel "#status-message") (:file-required-error error-messages))))) (defhandler save-document-button-click-callback! "Performs validation when the default-slug-format value changes and displays and removes errors when necessary. If the validation is successful the save-document! fn is called. Takes a singular feed map, a boolean value for new? that determines if this is a fresh or existing document, a sequence of document states if editing an existing document and a Domina event object." [feed-map new? [current-document-state & _] evt] (let [status-message-el (sel "#status-message") future-state (get-document-data-from-dom! feed-map) slug-validation (validate-slug (:slug future-state))] (cond (string/blank? (:title future-state)) (ui/display-error status-message-el (:document-title-required-error error-messages)) (not (= :pass slug-validation)) (ui/display-error status-message-el (:message slug-validation)) (= (:default-document-type feed-map) "image") (save-image-document! feed-map new? current-document-state future-state slug-validation) :default (when-not (= (select-keys current-document-state (keys future-state)) future-state) ;; don't save if there are no changes (save-document! feed-map new? current-document-state future-state slug-validation))))) (defn detect-duplicate-custom-slug-callback! "Detects if the custom slug is a duplicate and adds/removes UI error messages accordingly. Used as a callback for vix.document/get-doc." [status [doc-current-state & _]] (let [slug-el (sel "#slug") slug-label-el (sel "#slug-label") status-el (sel "#status-message")] (if (and (= status 200) (not (= (:action doc-current-state) :delete))) (ui/display-error status-el (:slug-not-unique-error error-messages) slug-el slug-label-el) (ui/remove-error status-el slug-el slug-label-el)))) (defn increment-slug "Increments the provided slug string by either incrementing the last character if it is a number or adding '-2' to the end of the string." [slug] (if-let [slug-matches (re-matches #"(.*?)-([0-9]+)$" slug)] (str (nth slug-matches 1) "-" (inc (js/parseInt (last slug-matches)))) (str slug "-2"))) (defhandler slug-validation-callback! "Validates custom slugs. Displays an error if required, otherwise the doc with the desired slug is requested and validated by the detect-duplicate-custom-slug-callback!." [evt] (when (ui/is-checked? "custom-slug") (let [slug-el (events/target evt) slug (ui/get-form-value slug-el) slug-label-el (sel "#slug-label") status-el (sel "#status-message") slug-validation (validate-slug slug)] (if (= :pass slug-validation) (do (ui/remove-error status-el slug-el slug-label-el) (document/get-doc slug detect-duplicate-custom-slug-callback!)) (ui/display-error status-el (:message slug-validation) slug-el slug-label-el))))) (def slugs (atom #{})) (defn handle-duplicate-slug-callback! "Automatically appends '-2' to the end of a duplicate slug, or increments n if the slug already ends with '-n' where n is an integer. Keeps calling itself until no more duplicates are found and finally sets the unique value if that value doesn't match the original value." [status [doc-current-state & _]] (if (and (= status 200) (not (= (:action doc-current-state) :delete))) ;; add incremented slug to @slugs and rerun the get-doc call with ;; the new slug and this function as the callback: (let [new-slug (increment-slug (document/add-initial-slash (:slug doc-current-state)))] (swap! slugs conj new-slug) (document/get-doc new-slug handle-duplicate-slug-callback!)) ;; set the final slug value if needed: (let [final-slug (last (sort-by count @slugs)) slug-el (sel "#slug")] (when-not (= (ui/get-form-value slug-el) final-slug) (ui/set-form-value slug-el final-slug))))) (defn synchronize-slug "Automatically generates a slug value based on the value of the provided title element." [title-el feed] (let [new-slug (create-slug (ui/get-form-value title-el) feed)] (when (nil? (ui/get-form-value "custom-slug")) (reset! slugs #{new-slug}) (document/get-doc new-slug handle-duplicate-slug-callback!)))) (defhandler custom-slug-toggle-callback! "Toggles the slug input between editable (i.e. custom) and disabled." [feed evt] (if (nil? (ui/get-form-value (events/target evt))) (do (ui/disable-element "slug") (ui/disable-element "slug-label") (synchronize-slug (sel "#title") feed)) (do (ui/enable-element "slug") (ui/enable-element "slug-label")))) (defhandler title-edited-callback! "Automatically generates a slug for current title if custom slug isn't checked." [feed evt] (synchronize-slug (events/target evt) feed)) (defhandler handle-datefield-click-callback! "Displays a datepicker on click." [evt] (ui/display-datepicker (fn [{:keys [date-string date-object hour minute]}] (ui/set-form-value (events/target evt) (if date-object (str date-string " " hour ":" minute) ""))) true)) (defn display-image-preview "Displays a preview of the image contained in given file object with the provided title." [file title] (let [reader (new js/FileReader)] (set! (.-onload reader) (fn [evt] (domina/set-attrs! (sel "#image-preview") {:title title :src (.-result (.-target evt))}))) (. reader (readAsDataURL file)))) (def mimetype-to-extension-map {:image/png "png" :image/gif "gif" :image/jpeg "jpg"}) (def *file* (atom {})) (defn handle-image-drop-callback! "Is called when a file is dropped on the image drop target. If the file is recognized as an image it is displayed in the interface and swapped into the *file* atom (under the :obj key). Otherwise an invalid filetype message is shown." [current-feed-state new? evt] ;; domina/prevent-default & domina/stop-propagation don't work for ;; these events, so not using defhandler, but a regular function. (do (. evt (preventDefault)) (. evt (stopPropagation))) (if-let [file (aget (.-files (.-dataTransfer evt)) 0)] (let [status-el (sel "#status-message") image-information-el (sel "#image-information-container") title (image-filename-to-title (.-name file))] (if-let [extension (mimetype-to-extension-map (keyword (.-type file)))] (do (swap! *file* assoc :obj file :data {:extension extension}) (ui/remove-error status-el) (ui/set-form-value (dom/getElement "title") title) (display-image-preview file title) (domina/remove-class! image-information-el "hide") (domina/set-text! (sel "#image-filename") (.-name file)) (domina/set-text! (sel "#image-filetype") (.-type file)) (domina/set-text! (sel "#image-size") (format "%.2f"(/ (.-size file) 1000))) (when new? (synchronize-slug (sel "#title") current-feed-state))) (do (swap! *file* dissoc :obj :data) (domina/add-class! image-information-el "hide") (ui/display-error status-el (:invalid-filetype-error error-messages))))))) (defn remove-self-from-documents "Removes the map with the given self-slug string as a :slug value from the provided sequence of document maps." [self-slug documents] (remove #(= (:slug %) self-slug) documents)) (defn remove-existing-from-documents "Removes any documents with a :slug value that is part of the existing-documents vector." [existing-documents documents] (remove (fn [{:keys [slug]}] (not (nil? (some #{slug} (map :slug existing-documents))))) documents)) (defn format-option "Replaces keyword values in the given option-format sequence with the string value for that keyword in the provided option-map and leaves string values in the option-format sequence as-is. Returns a string version of the new sequence." [option-map option-format] (apply str (map (fn [v] (if (string? v) v (option-map v))) option-format))) (defn update-options-in-dialog! "Adds select options to the DOM. Takes the select-el, which is the Domina element node the options are being added to, the snippet-key, which is the key for the HTML snippets in the ui/snippets map, the option-xpath-selector, which is the xpath selector required to select the newly added option, option-maps, which is a sequence of maps containing values for the option, title-format and value-format, which are sequences describing the format for the particular values (see the format-option function for the structure of the format sequences)." [select-el snippet-key option-xpath-selector option-maps title-format value-format] (domina/destroy-children! select-el) (doseq [option option-maps] (domina/append! select-el (ui/snippets snippet-key)) (doto (xpath option-xpath-selector) (domina/set-text! (format-option option title-format)) (domina/set-attr! :value (format-option option value-format))))) (defn update-feed-options-in-dialog! "Appends select options for given feeds to the provided feeds-el." [feeds-el feeds] (update-options-in-dialog! feeds-el :editor/.feed-select-option "(//option[@class='feed-select-option'])[last()]" feeds [:title] ["['" :language "','" :name "']"])) (defn update-document-options-in-dialog! "Appends provided select options for given documents to documents-el, while removing the document matching self-slug. Any documents in the removable-documents vector are removed from the documents list." [self-slug documents-el documents removable-documents] (update-options-in-dialog! documents-el :editor/.document-select-option "(//option[@class='document-select-option'])[last()]" (remove-existing-from-documents removable-documents (remove-self-from-documents self-slug documents)) [:title] ["option@@@" :slug "@@@" :title])) (defn update-menu-links-options-in-dialog! "Adds the provided sequence of document maps to the dialog as select options." [documents] (update-options-in-dialog! (sel "select#internal-link") :editor/.document-select-option "(//option[@class='document-select-option'])[last()]" documents [:title] [:slug])) (defhandler remove-parent-callback! "Event callback that removes the parent of the event target DOM node." [evt] (domina/destroy! (xpath (events/target evt) ".."))) (defn add-related-page! "Adds a li element for the given page to the DOM. This function is used as a ui/display-dialog callback!, which provides the dialog-status (either :ok or :cancel) and a map with the form fields and their values as callback arguments." [dialog-status {:keys [internal-link-feed internal-link]}] (when (and (= dialog-status :ok) (not (nil? internal-link))) (domina/append! (sel "#related-pages-container") (:editor/.related-page ui/snippets)) (let [{:keys [slug title]} (parse-related-document-id internal-link)] (doto (xpath "(//li[@class='related-page'])[last()]") (domina/set-attr! :id (str "related-page@@@" slug "@@@" title))) (doto (xpath "(//span[@class='related-page-title'])[last()]") (domina/set-text! title)) (events/listen! (xpath "(//a[@class='related-page-delete-link'])[last()]") :click remove-parent-callback!)))) (defhandler image-preview-link-click-callback! "Handler for clicks on an image preview link that displays the image in a dialog." [evt] (let [{:keys [title slug]} (parse-related-document-id (xpath (events/target evt) ".."))] (ui/display-dialog! "Image Preview" (:editor/image-preview-in-dialog-container ui/snippets) nil {:auto-close? true :modal? true} (fn []) :cancel) (domina/set-attrs! (sel "#image-dialog-preview") {:src (str (get-cdn-hostname) slug) :alt title :title title}))) (defn add-related-image! "Adds a li element for the given image to the DOM. This function is used as a ui/display-dialog callback, which provides the dialog-status (either :ok or :cancel) and a map with the form fields and their values as callback arguments." [dialog-status {:keys [internal-link-feed internal-link]}] (when (and (= dialog-status :ok) (not (nil? internal-link))) (domina/append! (sel "#related-images-container") (:editor/.related-image ui/snippets)) (let [{:keys [slug title]} (parse-related-document-id internal-link)] (doto (xpath "(//li[@class='related-image'])[last()]") (domina/set-attr! :id (str "related-page@@@" slug "@@@" title))) (doto (xpath "(//span[@class='related-image-title'])[last()]") (domina/set-text! title)) (events/listen! (xpath "(//a[@class='related-image-delete-link'])[last()]") :click remove-parent-callback!) (events/listen! (xpath "(//a[@class='related-image-preview-link'])[last()]") :click image-preview-link-click-callback!)))) (defn create-icon-image-events! "Creates delete and preview events for image icon." [] (events/listen! (sel "#icon-image-delete-link") :click remove-parent-callback!) (events/listen! (sel "#icon-image-preview-link") :click image-preview-link-click-callback!)) (defn add-icon-image! "Adds a li element for the given image to the DOM. This function is used as a ui/display-dialog callback, which provides the dialog-status (either :ok or :cancel) and a map with the form fields and their values as callback arguments." [dialog-status {:keys [internal-link-feed internal-link]}] (when (and (= dialog-status :ok) (not (nil? internal-link))) (let [{:keys [slug title]} (parse-related-document-id internal-link)] (domina/destroy! (sel ".image-icon-container")) (domina/append! (sel "#icon-container") (:editor/.image-icon-container ui/snippets)) (domina/set-attr! (sel ".image-icon-container") :id (str "icon@@@" slug "@@@" title)) (domina/set-text! (sel "#icon-image-title") title) (create-icon-image-events!)))) (defn update-dialog-image-preview! "Sets the image preview in the image selection dialog to the slug and title of the provided document map." [{:keys [slug title]}] (when (string? slug) (domina/set-attrs! (sel "#image-dialog-preview") {:src (str (get-cdn-hostname) slug) :alt title :title title}))) (defn create-dialog-feed-change-event! "Creates an event that tracks changes on feed-el and executes the update-document-options-fn if a change happens, which will update the document select field. The update-document-options-fn needs to accept the retrieved document sequence as the first and only value. If the dialog contains an image-dialog-preview image the update-dialog-image-preview! function is executed as well, using the optional relations argument to purge already related images from the options." [feed-el update-document-options-fn & [relations]] (events/listen! feed-el :change (fn [evt] (let [[language feed-name] (util/pair-from-string (ui/get-form-value (events/target evt)))] (document/get-documents-for-feed language feed-name (fn [status {:keys [documents]}] (update-document-options-fn documents) (when (domina/single-node (sel "img#image-dialog-preview")) (update-dialog-image-preview! (first (remove-existing-from-documents (or relations []) documents)))))))))) (defn fill-document-select-dialog! "Fills the internal-link and internal-link-feeds select fields in a previously created dialog and adds an event listener that loads the new documents if another feed is selected. Takes node-selector (nil or a Domina CSS selector; to pass to the get-document-relations! function), self-slug with the slug for the active document, feeds with a sequence of feed maps and documents with a sequence of document maps for the first feed." [node-selector self-slug feeds documents] (let [internal-link-el (sel "#internal-link") internal-link-feed-el (sel "#internal-link-feed") relations (if node-selector (get-document-relations! node-selector) [])] (update-feed-options-in-dialog! internal-link-feed-el feeds) (update-document-options-in-dialog! self-slug internal-link-el documents (if node-selector (get-document-relations! node-selector) [])) (create-dialog-feed-change-event! internal-link-feed-el #(update-document-options-in-dialog! self-slug internal-link-el % relations) relations))) (defhandler add-related-page-link-callback! "Handler for clicks on the add-related-page link that displays a dialog that can be used to select a related page for the document. Takes self-slug, the slug of the current document, to filter it out of the results, as well as a sequence of singular feed maps called feeds and a Domina event called evt." [self-slug all-feeds evt] (let [[{:keys [language name]} :as feeds] (filter #(not (= "image" (:default-document-type %))) all-feeds)] (document/get-documents-for-feed language name (fn [status {:keys [documents]}] (ui/display-dialog! "Add Related Page" (:editor/add-related-page-dialog-form ui/snippets) (sel "#add-related-page-dialog-form") {:auto-close? true :modal? true} add-related-page!) (fill-document-select-dialog! ".related-page" self-slug feeds documents))))) (defn create-image-preview-event! "Creates the event that changes the preview image when a new image is selected in an image dialog." [] (events/listen! (sel "select#internal-link") :change (fn [evt] (update-dialog-image-preview! (parse-related-document-id (ui/get-form-value (events/target evt))))))) (defhandler add-related-image-link-callback! "Handler for clicks on the add-related-image link that displays a dialog that can be used to select a related image for the document. Takes self-slug, the slug of the current document, to filter it out of the results, as well as a sequence of singular feed maps called feeds and a Domina event called evt." [self-slug all-feeds evt] (let [[{:keys [language name]} :as feeds] (filter #(= "image" (:default-document-type %)) all-feeds)] (document/get-documents-for-feed language name (fn [status {:keys [documents]}] (ui/display-dialog! "Add Related Image" (:editor/add-image-dialog-form ui/snippets) (sel "#add-image-dialog-form") {:auto-close? true :modal? true} add-related-image!) (update-dialog-image-preview! (first (remove-existing-from-documents (get-document-relations! ".related-image") documents))) (create-image-preview-event!) (fill-document-select-dialog! ".related-image" self-slug feeds documents))))) (defhandler add-icon-image-link-callback! "Handler for clicks on the add-icon-image-link that displays a dialog that can be used to select an icon image for the document. Takes self-slug, the slug of the current document, to filter it out of the results, as well as a sequence of singular feed maps called feeds and a Domina event called evt." [self-slug all-feeds evt] (let [[{:keys [language name]} :as feeds] (filter #(= "image" (:default-document-type %)) all-feeds)] (document/get-documents-for-feed language name (fn [status {:keys [documents]}] (ui/display-dialog! "Add Icon Image" (:editor/add-image-dialog-form ui/snippets) (sel "#add-image-dialog-form") {:auto-close? true :modal? true} add-icon-image!) (update-dialog-image-preview! (first documents)) (create-image-preview-event!) (fill-document-select-dialog! nil self-slug feeds documents))))) (defhandler editor-add-image-button-callback! "Handler for clicks on the add image button in editor toolbars that displays a non-modal image selection dialog. Images can be dragged into the editor field from the dialog. The all-feeds argument is a sequence of singular feed maps and the evt argument contains the Domina event object." [all-feeds evt] (when-not (domina/single-node (sel "div.modal-dialog")) (let [[{:keys [language name]} :as feeds] (filter #(= "image" (:default-document-type %)) all-feeds)] (document/get-documents-for-feed language name (fn [status {:keys [documents]}] (ui/display-dialog! "Add Image" (:editor/add-image-dialog-form ui/snippets) (sel "#add-image-dialog-form") {:auto-close? true :modal? false} add-icon-image! :ok) (domina/remove-class! (sel "h5#drag-to-editor-instruction") "hide") (update-dialog-image-preview! (first documents)) (create-image-preview-event!) (fill-document-select-dialog! nil "" feeds documents)))))) (defn create-common-editor-events! "Creates events that are shared by all editor modes. Takes a boolean value new? which determines if it concerns a fresh document and feed-map, a singular map containing feed information for the active document." [new? feed-map] (when new? (events/listen! (sel "#title") :keyup (partial title-edited-callback! feed-map))) (events/listen! (sel "#custom-slug") :change (partial custom-slug-toggle-callback! feed-map)) (events/listen! (sel "#slug") :keyup slug-validation-callback!)) (defn display-editor-callback! "Displays the editor UI and creates the relevant events. The feeds argument contains a sequence containing all available feeds. The feed-map is a map containing feed information for this specific document. The status is either an HTTP status integer if editing an existing document or nil if editing a new document. The last argument contains a map that describes the document as stored in the database." [feeds {:keys [default-document-type] :as feed-map} status [{:keys [title content slug _id description draft related-pages language feed subtitle icon related-images created start-time-rfc3339 start-time published previous-id end-time-rfc3339 end-time datestamp attachments]} :as document-states]] (let [new? (nil? status)] (ui/show! {:snippet :editor/back-to-overview-link :transformations [{:selector "//p[@id='back-to-overview-link']/a[1]" :attrs {:href (str "/admin/" language "/" feed "/overview")}}]} {:snippet :ui/status-message} (when (= default-document-type "image") [{:snippet :editor/image-drop-target} {:snippet :editor/image-information-container :transformations [(when slug {:selector "//img[@id='image-preview']" :attrs {:src (str "data:" (get-in attachments [:original :type]) ";base64," (get-in attachments [:original :data])) :alt title :name title}})]}]) {:snippet :ui/caption :transformations [{:selector "//h3[@id='caption']" :text (if new? "Create Document" "Edit Document")}]} {:snippet :editor/title-row :transformations [{:selector "//input[@id='title']" :value title} {:selector "//input[@id='draft']" :checked draft}]} {:snippet :editor/subtitle-row :transformations [{:selector "//input[@id='subtitle']" :value subtitle}]} {:snippet :editor/slug-row :transformations (concat [{:selector "//input[@id='slug']" :value slug}] (when new? [{:selector "//input[@id='custom-slug']" :remove-attr "disabled"} {:selector "//label[@id='custom-slug-label']" :remove-class "disabled"}]))} (when (= default-document-type "event") [{:snippet :editor/start-time-row :transformations [{:selector "//input[@id='start-time']" :value start-time}]} {:snippet :editor/end-time-row :transformations [{:selector "//input[@id='end-time']" :value end-time}]}]) [(merge {:snippet :editor/icon-container} (when-not (nil? icon) {:children [{:snippet :editor/.image-icon-container :transformations [{:selector "//div[@class='image-icon-container']" :attrs {:id (str "icon@@@" (:slug icon) "@@@" (:title icon))}} {:selector "//span[@id='icon-image-title']" :text (:title icon)}]}]})) {:snippet :editor/document-relations :children (doall (map (fn [{:keys [title slug]}] {:snippet :editor/.related-page :parent "//ul[@id='related-pages-container']" :transformations [{:selector "(//li[@class='related-page'])[last()]" :attrs {:id (str "related-page@@@" slug "@@@" title)}} {:selector "(//span[@class='related-page-title'])[last()]" :text title}]}) related-pages))} {:snippet :editor/image-relations :children (doall (map (fn [{:keys [title slug]}] {:snippet :editor/.related-image :parent "//ul[@id='related-images-container']" :transformations [{:selector "(//li[@class='related-image'])[last()]" :attrs {:id (str "related-image@@@" slug "@@@" title)}} {:selector "(//span[@class='related-image-title'])[last()]" :text title}]}) related-images))} {:snippet :editor/editor-images} (when (= default-document-type "with-description") {:snippet :editor/description-container}) {:snippet :editor/content-container}] {:snippet :editor/save-button-container}) (make-editable! :content "content" "toolbar" content) (when (= default-document-type "with-description") (make-editable! :description "description-content" "description-toolbar" description)) (when-let [drop-target-el (dom/getElement "image-drop-target")] (. drop-target-el (addEventListener "drop" (partial handle-image-drop-callback! feed-map new?) false)) (. drop-target-el (addEventListener "dragover" (fn [e] (. e (preventDefault)) (. e (stopPropagation))) false))) (create-common-editor-events! new? feed-map) (create-icon-image-events!) (events/listen! (sel "#start-time") :click handle-datefield-click-callback!) (events/listen! (sel "#end-time") :click handle-datefield-click-callback!) (events/listen! (sel "#add-icon-image-link") :click (partial add-icon-image-link-callback! slug feeds)) (events/listen! (sel ".related-page-delete-link") :click remove-parent-callback!) (events/listen! (sel ".related-image-delete-link") :click remove-parent-callback!) (events/listen! (sel ".related-image-preview-link") :click image-preview-link-click-callback!) (events/listen! (sel "#add-related-page-link") :click (partial add-related-page-link-callback! slug feeds)) (events/listen! (sel "#add-related-image-link") :click (partial add-related-image-link-callback! slug feeds)) (events/listen! (xpath "//div[@class='tr-icon tr-image']/../..") :click (partial editor-add-image-button-callback! feeds)) (events/listen! (sel "#save-document") :click (partial save-document-button-click-callback! feed-map new? document-states)))) (defhandler toggle-add-menu-item-dialog-link-type-callback! "Toggles between showing the internal-link-row and the external-link-row, depending on the link-type field value." [evt] (if (= (ui/get-form-value (events/target evt)) "internal") (do (domina/remove-class! (sel "tr#internal-link-row") "hide") (domina/add-class! (sel "tr#external-link-row") "hide")) (do (domina/remove-class! (sel "tr#external-link-row") "hide") (domina/add-class! (sel "tr#internal-link-row") "hide")))) (defn append-add-sub-item-link! "Adds an add-sub-item link the the provided item-details-el, accepting a sequence of singular feed maps called as the second argument to pass to the display-add-menu-item-dialog-callback! function." [item-details-el all-feeds] (domina/append! item-details-el (:editor/.add-sub-item ui/snippets)) (events/listen! (xpath item-details-el "(.//a[@class='add-sub-item'])[last()]") :click (partial display-add-menu-item-dialog-callback! (domina/single-node (xpath item-details-el "..")) all-feeds))) (defhandler delete-menu-item-callback! "Deletes the menu item containing the active delete item link, as well as the ul containing it if it is the only item node left. If the latter is true, the add-sub-item link is also re-added to the parent. Accepts a sequence containing singular feed maps as the first argument and the triggered Domina event as the second argument." [all-feeds evt] (let [three-levels-up (xpath (events/target evt) "../../..") four-levels-up (xpath three-levels-up "..")] (if (and (= (domina/attr three-levels-up "class") "nested-menu-category") (= (count (domina/children three-levels-up)) 2)) (do ;; re-add add-sub-item to parent if deleting the whole category (append-add-sub-item-link! (domina/single-node (xpath four-levels-up "(.//span[@class='item-details'])")) all-feeds) ;; and remove the sub-menu ul (domina/destroy! three-levels-up) ) ;; just remove the li containing the current item if it has siblings (domina/destroy! (xpath (events/target evt) "../.."))))) (defn get-nested-menu-category-el! "Returns the .nested-menu-category element for parent-el, creating it if necessary." [parent-el] (or (domina/single-node (xpath parent-el "(.//ul[@class='nested-menu-category'])[last()]")) (do (domina/append! parent-el (:editor/.nested-menu-category ui/snippets)) (get-nested-menu-category-el! parent-el)))) (defn add-item-details! "Adds the item-details span to parent-el, using the link-label, link-type, internal-links and external-link arguments to determine the contents. Accepts a sequence of singular feed maps as the second argument, to pass to the detele-menu-item-callback! function." [parent-el all-feeds link-label link-type internal-link external-link] (domina/append! parent-el (:editor/.item-details ui/snippets)) (let [item-details-el (xpath parent-el ".//span[@class='item-details']")] (domina/set-text! (xpath item-details-el ".//span[@class='link-label']") link-label) (domina/set-text! (xpath item-details-el ".//span[@class='link-uri']") (if (= link-type "internal") internal-link external-link)) (events/listen! (xpath item-details-el ".//a[@class='menu-item-delete-link']") :click (partial delete-menu-item-callback! all-feeds)))) (defn add-nested-menu-item-to-dom! "Adds a nested menu item to the menu builder, removes the add-sub-item link from the parent element and if this is the first nested item also adds an add-item node to the newly created ul. Accepts a keyword determining the source (either :string or :dialog; determines whether the item is appended or prepended) as the first argument. Accepts all-feeds as the second argument, containing a sequence of singular feeds maps, parent-el, pointing to the element to add the item to, and link-label, link-type, internal-link and external-link values to pass to the add-item-details! function." [source all-feeds parent-el link-label link-type internal-link external-link] (domina/destroy! (domina/single-node (xpath parent-el "(.//a[@class='add-sub-item'])"))) (let [nested-menu-category-el (get-nested-menu-category-el! parent-el)] ;; if this is the first item, create an add-item node (when (zero? (count (domina/children nested-menu-category-el))) (domina/append! nested-menu-category-el (:editor/.add-item-node ui/snippets)) (events/listen! (xpath nested-menu-category-el "(.//li)[last()]/a") :click (partial display-add-menu-item-dialog-callback! parent-el all-feeds))) (if (= source :dialog) (domina/prepend! nested-menu-category-el (:editor/.nested-menu-item ui/snippets)) (domina/insert-before! (xpath nested-menu-category-el "(.//li)[last()]") (:editor/.nested-menu-item ui/snippets))) (add-item-details! (if (= source :dialog) ;; use first li as parent with a :dialog source (xpath nested-menu-category-el "(.//li[@class='nested-menu-item draggable'])[1]") ;; use last .nested-menu-item-li as parent if :string (xpath nested-menu-category-el (str "(.//li[@class='nested-menu-item draggable'])" "[last()]"))) all-feeds link-label link-type internal-link external-link) (ui/remove-dialog!))) (defn add-menu-item-to-dom! "Adds a top level menu item to the menu builder. Accepts all-feeds as the first argument, containing a sequence of singular feeds maps, parent-el, pointing to the element to add the item to, and link-label, link-type, internal-link and external-link values to pass to the add-item-details! function." [all-feeds link-label link-type internal-link external-link] (domina/append! (sel "#menu-container") (:editor/.top-level-menu-item ui/snippets)) (let [menu-item-el (xpath "(//li[@class='top-level-menu-item draggable'])[last()]")] (add-item-details! menu-item-el all-feeds link-label link-type internal-link external-link) (append-add-sub-item-link! (xpath menu-item-el "(.//span[@class='item-details'])[last()]") all-feeds)) (ui/remove-dialog!)) (defn handle-add-menu-item-dialog-completion "Validates the add-menu-item dialog. If the dialog doesn't validate an error message is displayed, if it does it is added the the menu builder and the dialog is closed. The next-child-parent argument is either nil (in case of interaction with the top level) or points to the parent element that the new child is supposed to be added to. The all-feeds argument is a sequence of singular feed maps, the dialog status is passed by the ui/display-dialog! function (either :ok or :cancel) and the final argument is a map with form values passed by the ui/display-dialog! function." [next-child-parent-el all-feeds dialog-status {:keys [external-link internal-link link-label link-type]}] (when (= :ok dialog-status) (cond (string/blank? link-label) (ui/display-error (sel "p#add-link-status") (:link-label-required-error error-messages)) (link-label-has-invalid-chars? link-label) (ui/display-error (sel "p#add-link-status") (:link-label-has-invalid-chars-error error-messages)) next-child-parent-el (add-nested-menu-item-to-dom! :dialog all-feeds next-child-parent-el link-label link-type internal-link external-link) :default (add-menu-item-to-dom! all-feeds link-label link-type internal-link external-link)))) (defn parse-dummy-menu-ul! "Reads the menu data from the dummy DOM object created by parse-menu-content-string! and its children. Accepts the dummy DOM element as the first and only argument (also used for the child UL if calling itself recursively)." [element] (map (fn [el] (let [children (domina/children el) last-child (last children)] {:label (domina/text (first children)) :uri (domina/attr (first children) "href") :children (when (= (.-tagName last-child) "UL") (parse-dummy-menu-ul! last-child))})) (domina/children element))) (defn parse-menu-content-string! "Takes string s and adds it to a dummy DOM elemenet to be parsed by the parse-dummy-menu-ul! function." [s] (if (string? s) (let [dummy-list-el (dom/createElement "ul")] (set! (.-innerHTML dummy-list-el) (subs s 4 (- (count s) 5))) (parse-dummy-menu-ul! dummy-list-el)))) (defn display-existing-menu-from-string! "Takes a menu string containing the HTML version of the menu and creates the menu builder UI for it, using a sequence of singular feed maps as the first argument to the function call." [all-feeds menu-string] (domina/destroy-children! (sel "ul#menu-container")) (doseq [{:keys [label uri children]} (parse-menu-content-string! menu-string)] (add-menu-item-to-dom! all-feeds label "internal" uri nil) (doseq [{:keys [label uri]} children] (add-nested-menu-item-to-dom! :string all-feeds (domina/single-node ;; force computation of Domina xpath lazy seq (xpath "(//li[@class='top-level-menu-item draggable'])[last()]")) label "internal" uri nil)))) (defn get-item-details-from-dom! "Extracts the link label and URI from the text value of the .link-label and .link-uri span elements in the provided parent-el DOM element. Returns a map with the link label mapped to :label and the URI mapped to :uri." [parent-el] {:label (domina/text (xpath parent-el ".//span[@class='link-label']")) :uri (domina/text (xpath parent-el ".//span[@class='link-uri']"))}) (defn get-menu-data-from-dom! "Converts the DOM structure of the menu builder to a sequence of maps with :label and :uri keys for the corresponding values, as well as a :children key for the children elements of top level nodes." [] (map (fn [top-level-menu-item-el] (assoc (get-item-details-from-dom! top-level-menu-item-el) :children (map get-item-details-from-dom! (reverse (domina/nodes (xpath top-level-menu-item-el ".//li[@class='nested-menu-item draggable']")))))) (reverse (domina/nodes (xpath "//li[@class='top-level-menu-item draggable']"))))) (defn get-menu-string-from-dom! "Returns the HTML string represenation of the menu builder." [] ;; FIX rewrite using Hiccup or Enfocus (let [dummy-list-el (dom/createElement "ul")] (doseq [{:keys [label uri children]} (get-menu-data-from-dom!)] (let [dummy-li-el (dom/createElement "li") dummy-link-el (dom/createElement "a")] (domina/set-attr! dummy-link-el "href" uri) (domina/set-text! dummy-link-el label) (domina/append! dummy-li-el dummy-link-el) (when (pos? (count children)) (let [dummy-sub-menu-el (dom/createElement "ul")] (domina/add-class! dummy-sub-menu-el "sub-menu") (doseq [{:keys [label uri]} children] (let [dummy-li-el (dom/createElement "li") dummy-link-el (dom/createElement "a")] (domina/set-attr! dummy-link-el "href" uri) (domina/set-text! dummy-link-el label) (domina/append! dummy-li-el dummy-link-el) (domina/append! dummy-sub-menu-el dummy-li-el))) (domina/append! dummy-li-el dummy-sub-menu-el))) (domina/append! dummy-list-el dummy-li-el))) (str "<ul id=\"menu\">"(.-innerHTML dummy-list-el) "</ul>"))) (defhandler display-add-menu-item-dialog-callback! "Callback for the add menu item links. If the first element is not nil it is expected to be the parent element of the child element that is being added. The second argument is a sequence of singular feed maps and the third argument is a Domina event." [next-child-parent-el all-feeds evt] (let [[{:keys [language name]} :as feeds] (filter #(not (= "image" (:default-document-type %))) all-feeds) feed-el (sel "select#internal-link-feed")] (document/get-documents-for-feed language name (fn [status {:keys [documents]}] (ui/display-dialog! "Add Menu Item" (:editor/add-menu-item-dialog-form ui/snippets) (sel "form#add-menu-item-dialog-form") {:auto-close? false :modal? true} (partial handle-add-menu-item-dialog-completion next-child-parent-el all-feeds)) (update-feed-options-in-dialog! feed-el feeds) (update-menu-links-options-in-dialog! documents) (create-dialog-feed-change-event! feed-el update-menu-links-options-in-dialog!) (events/listen! (sel "input#link-type-internal") :change toggle-add-menu-item-dialog-link-type-callback!) (events/listen! (sel "input#link-type-external") :change toggle-add-menu-item-dialog-link-type-callback!))))) (defn display-menu-editor-callback! "Displays the UI for the menu editor and creates the relevant events. The feeds argument contains a sequence containing all available feeds. The feed-map is a map containing feed information for this specific document. The status is either an HTTP status integer if editing an existing document or nil if editing a new document. The last argument contains a map that describes the document as stored in the database." [feeds feed-map status [{:keys [title content slug _id draft language feed created published previous-id datestamp]} :as document-states]] (let [new? (nil? status)] (ui/show! {:snippet :editor/back-to-overview-link :transformations [{:selector "//p[@id='back-to-overview-link']/a[1]" :attrs {:href (str "/admin/" language "/" feed "/overview")}}]} {:snippet :ui/status-message} {:snippet :ui/caption :transformations [{:selector "//h3[@id='caption']" :text (if new? "Create Menu" "Edit Menu")}]} {:snippet :editor/title-row :transformations [{:selector "//input[@id='title']" :value title} {:selector "//input[@id='draft']" :checked draft}]} {:snippet :editor/slug-row :transformations (concat [{:selector "//input[@id='slug']" :value slug}] (when new? [{:selector "//input[@id='custom-slug']" :remove-attr "disabled"} {:selector "//label[@id='custom-slug-label']" :remove-class "disabled"}]))} {:snippet :editor/menu-builder} {:snippet :editor/add-menu-item-container} {:snippet :editor/save-button-container}) (create-common-editor-events! new? feed-map) (display-existing-menu-from-string! feeds content) ;; TODO reimplement sorting ;; TODO add edit link feature ;; TODO add blank link (i.e. '#') option (events/listen! (sel "a#add-menu-item") :click (partial display-add-menu-item-dialog-callback! nil feeds)) (events/listen! (sel "#save-document") :click (partial save-document-button-click-callback! feed-map new? document-states)))) (defn display-editor! "Displays the editor for given language and feed-name strings, as well as document slug if an existing document is being edited." [language feed-name & [slug]] (swap! *file* dissoc :obj :data) (document/get-feeds-list (fn [status feeds] (let [feed-map (select-feed-by-language-and-name language feed-name feeds) editor-callback-fn (cond (= (:default-document-type feed-map) "menu") display-menu-editor-callback! :default display-editor-callback!)] (if slug (document/get-doc slug (partial editor-callback-fn feeds feed-map)) (editor-callback-fn feeds feed-map nil [{:language language :feed feed-name}]))))))
[ { "context": "0.0,\n :to 100.0000000001}\n {:key \"100 to 250 meters\",\n :from 100.0,\n :t", "end": 3722, "score": 0.7861198782920837, "start": 3720, "tag": "KEY", "value": "00" }, { "context": " :to 100.0000000001}\n {:key \"100 to 250 meters\",\n :from 100.0,\n :to 250.0", "end": 3729, "score": 0.7991794347763062, "start": 3727, "tag": "KEY", "value": "50" }, { "context": "0.0,\n :to 250.0000000001}\n {:key \"250 to 500 meters\",\n :from 250.0,\n :t", "end": 3808, "score": 0.8811342716217041, "start": 3806, "tag": "KEY", "value": "50" }, { "context": " :to 250.0000000001}\n {:key \"250 to 500 meters\",\n :from 250.0,\n :to 500.0", "end": 3815, "score": 0.8015167117118835, "start": 3813, "tag": "KEY", "value": "00" }, { "context": "50.0,\n :to 500.0000000001}\n {:key \"500 to 1000 meters\",\n :from 500.0,\n :", "end": 3894, "score": 0.8782999515533447, "start": 3891, "tag": "KEY", "value": "500" }, { "context": " :to 500.0000000001}\n {:key \"500 to 1000 meters\",\n :from 500.0,\n :to 1000.0000000", "end": 3909, "score": 0.8074772953987122, "start": 3898, "tag": "KEY", "value": "1000 meters" }, { "context": "rom 1000.0, :to 10000.0000000001}\n {:key \"10 to 50 km\",\n :from 10000.0,\n :to 5", "end": 4045, "score": 0.6219130754470825, "start": 4044, "tag": "KEY", "value": "0" }, { "context": "00.0, :to 10000.0000000001}\n {:key \"10 to 50 km\",\n :from 10000.0,\n :to 50000.0", "end": 4051, "score": 0.5919694900512695, "start": 4050, "tag": "KEY", "value": "0" }, { "context": ".0,\n :to 50000.0000000001}\n {:key \"50 to 100 km\",\n :from 50000.0,\n :to ", "end": 4129, "score": 0.7940579652786255, "start": 4127, "tag": "KEY", "value": "50" }, { "context": " :to 50000.0000000001}\n {:key \"50 to 100 km\",\n :from 50000.0,\n :to 100000.000", "end": 4139, "score": 0.7831355929374695, "start": 4133, "tag": "KEY", "value": "100 km" }, { "context": "0,\n :to 100000.0000000001}\n {:key \"100 to 250 km\",\n :from 100000.0,\n :to", "end": 4216, "score": 0.8155474662780762, "start": 4213, "tag": "KEY", "value": "100" }, { "context": " :to 100000.0000000001}\n {:key \"100 to 250 km\",\n :from 100000.0,\n :to 250000.00", "end": 4226, "score": 0.8517955541610718, "start": 4220, "tag": "KEY", "value": "250 km" }, { "context": "0,\n :to 250000.0000000001}\n {:key \"250 to 500 km\",\n :from 250000.0,\n :to", "end": 4304, "score": 0.8400194048881531, "start": 4301, "tag": "KEY", "value": "250" }, { "context": " :to 250000.0000000001}\n {:key \"250 to 500 km\",\n :from 250000.0,\n :to 500000.00", "end": 4314, "score": 0.8517643809318542, "start": 4308, "tag": "KEY", "value": "500 km" }, { "context": "0,\n :to 500000.0000000001}\n {:key \"500 to 1000 km\",\n :from 500000.0,\n :t", "end": 4392, "score": 0.8405890464782715, "start": 4389, "tag": "KEY", "value": "500" }, { "context": " :to 500000.0000000001}\n {:key \"500 to 1000 km\",\n :from 500000.0,\n :to 1000000.0", "end": 4403, "score": 0.8861740827560425, "start": 4396, "tag": "KEY", "value": "1000 km" }, { "context": " {:key \"250 to 500 meters\",\n ", "end": 15769, "score": 0.9758626222610474, "start": 15752, "tag": "KEY", "value": "250 to 500 meters" }, { "context": " {:key \"500 to 1000 meters\",\n ", "end": 16278, "score": 0.9910644292831421, "start": 16260, "tag": "KEY", "value": "500 to 1000 meters" }, { "context": " {:key \"1 to 10 km\",\n ", "end": 16780, "score": 0.956519365310669, "start": 16770, "tag": "KEY", "value": "1 to 10 km" }, { "context": " {:key \"10 to 50 km\",\n ", "end": 17285, "score": 0.9825162887573242, "start": 17274, "tag": "KEY", "value": "10 to 50 km" }, { "context": " {:key \"50 to 100 km\",\n ", "end": 17792, "score": 0.9894735813140869, "start": 17780, "tag": "KEY", "value": "50 to 100 km" }, { "context": " {:key \"100 to 250 km\",\n ", "end": 18301, "score": 0.9884111881256104, "start": 18288, "tag": "KEY", "value": "100 to 250 km" }, { "context": " {:key \"250 to 500 km\",\n ", "end": 18811, "score": 0.9903792142868042, "start": 18798, "tag": "KEY", "value": "250 to 500 km" }, { "context": " {:key \"500 to 1000 km\",\n ", "end": 19322, "score": 0.9943685531616211, "start": 19308, "tag": "KEY", "value": "500 to 1000 km" } ]
search-app/test/cmr/search/test/services/query_execution/facets/facets_v2_results_feature_test.clj
cgokey/Common-Metadata-Repository
0
(ns cmr.search.test.services.query-execution.facets.facets-v2-results-feature-test (:require [clojure.test :refer :all] [cmr.common-app.services.search.query-execution :as query-execution] [cmr.search.services.humanizers.humanizer-range-facet-service :as rfs] [cmr.search.services.query-execution.facets.facets-v2-results-feature :as v2-facets] [cmr.search.services.query-execution.facets.collection-v2-facets :as cv2f])) (def expected-pre-process-query-result-feature-result {:project-h {:nested {:path :project-sn-humanized}, :aggs {:values {:terms {:field :project-sn-humanized.value, :size 50, :order [{:priority :desc} {:_count :desc}]}, :aggs {:priority {:avg {:field :project-sn-humanized.priority}}}}}}, :science-keywords-h {:nested {:path :science-keywords-humanized}, :aggs {:category {:terms {:field "science-keywords-humanized.category", :size 50}, :aggs {:coll-count {:reverse_nested {}, :aggs {:concept-id {:terms {:field :concept-id, :size 1}}}}, :topic {:terms {:field "science-keywords-humanized.topic", :size 50}, :aggs {:coll-count {:reverse_nested {}, :aggs {:concept-id {:terms {:field :concept-id, :size 1}}}}, :term {:terms {:field "science-keywords-humanized.term", :size 50}, :aggs {:coll-count {:reverse_nested {}, :aggs {:concept-id {:terms {:field :concept-id, :size 1}}}}, :detailed-variable {:terms {:field "science-keywords-humanized.detailed-variable", :size 50}, :aggs {:coll-count {:reverse_nested {}, :aggs {:concept-id {:terms {:field :concept-id, :size 1}}}}}}}}}}}}}}, :data-center-h {:nested {:path :organization-humanized}, :aggs {:values {:terms {:field :organization-humanized.value, :size 50, :order [{:priority :desc} {:_count :desc}]}, :aggs {:priority {:avg {:field :organization-humanized.priority}}}}}}, :processing-level-id-h {:nested {:path :processing-level-id-humanized}, :aggs {:values {:terms {:field :processing-level-id-humanized.value, :size 50, :order [{:priority :desc} {:_count :desc}]}, :aggs {:priority {:avg {:field :processing-level-id-humanized.priority}}}}}}, :latency-h {:terms {:field :latency, :size 50}} :granule-data-format-h {:nested {:path :granule-data-format-humanized}, :aggs {:values {:terms {:field :granule-data-format-humanized.value, :size 50, :order [{:priority :desc} {:_count :desc}]}, :aggs {:priority {:avg {:field :granule-data-format-humanized.priority}}}}}}, :instrument-h {:nested {:path :instrument-sn-humanized}, :aggs {:values {:terms {:field :instrument-sn-humanized.value, :size 50, :order [{:priority :desc} {:_count :desc}]}, :aggs {:priority {:avg {:field :instrument-sn-humanized.priority}}}}}}, :horizontal-data-resolution-range {:nested {:path :horizontal-data-resolutions}, :aggs {:values {:range {:field :horizontal-data-resolutions.value, :ranges [{:key "0 to 1 meter", :from 0.0, :to 1.0000000001} {:key "1 to 30 meters", :from 1.0, :to 30.0000000001} {:key "30 to 100 meters", :from 30.0, :to 100.0000000001} {:key "100 to 250 meters", :from 100.0, :to 250.0000000001} {:key "250 to 500 meters", :from 250.0, :to 500.0000000001} {:key "500 to 1000 meters", :from 500.0, :to 1000.0000000001} {:key "1 to 10 km", :from 1000.0, :to 10000.0000000001} {:key "10 to 50 km", :from 10000.0, :to 50000.0000000001} {:key "50 to 100 km", :from 50000.0, :to 100000.0000000001} {:key "100 to 250 km", :from 100000.0, :to 250000.0000000001} {:key "250 to 500 km", :from 250000.0, :to 500000.0000000001} {:key "500 to 1000 km", :from 500000.0, :to 1000000.0000000001} {:key "1000 km & beyond", :from 1000000.0, :to 3.4028234663852886E38}]}, :aggs {:priority {:avg {:field :horizontal-data-resolutions.priority}}}}}}, :platforms-h {:nested {:path :platforms2-humanized}, :aggs {:basis {:terms {:field "platforms2-humanized.basis", :size 50}, :aggs {:coll-count {:reverse_nested {}, :aggs {:concept-id {:terms {:field :concept-id, :size 1}}}}, :category {:terms {:field "platforms2-humanized.category", :size 50}, :aggs {:coll-count {:reverse_nested {}, :aggs {:concept-id {:terms {:field :concept-id, :size 1}}}}, :short-name {:terms {:field "platforms2-humanized.short-name", :size 50}, :aggs {:coll-count {:reverse_nested {}, :aggs {:concept-id {:terms {:field :concept-id, :size 1}}}}}}, :sub-category {:terms {:field "platforms2-humanized.sub-category", :size 50}, :aggs {:coll-count {:reverse_nested {}, :aggs {:concept-id {:terms {:field :concept-id, :size 1}}}}}}}}}}}}, :two-d-coordinate-system-name-h {:terms {:field :two-d-coord-name, :size 50}}, :variables-h {:nested {:path :variables}, :aggs {:measurement {:terms {:field "variables.measurement", :size 50}, :aggs {:coll-count {:reverse_nested {}, :aggs {:concept-id {:terms {:field :concept-id, :size 1}}}}, :variable {:terms {:field "variables.variable", :size 50}, :aggs {:coll-count {:reverse_nested {}, :aggs {:concept-id {:terms {:field :concept-id, :size 1}}}}}}}}}}}) (deftest pre-process-query-result-feature-test (testing "Testing the preprocessing of the query without facets in the query." (let [context {:query-string "keyword=*&include_facets=v2&pretty=true"} query {:concept-type :collection :facet-fields nil :facets-size nil}] (is (= expected-pre-process-query-result-feature-result (:aggregations (query-execution/pre-process-query-result-feature context query :facets-v2)))))) (testing "Test query includes a facet." (let [context {:query-string "keyword=*&include_facets=v2&pretty=true&two_d_coordinate_system_name%5B%5D=MODIS+Tile+EASE"} query {:concept-type :collection :facet-fields [:two-d-coordinate-system-name] :facets-size nil}] (is (= {:two-d-coordinate-system-name-h {:terms {:field :two-d-coord-name, :size 50}}} (:aggregations (query-execution/pre-process-query-result-feature context query :facets-v2)))))) (testing "Test query includes a horizontal-data-resolutions facet." (let [context {:query-string "keyword=*&include_facets=v2&pretty=true&horizontal-data-resolution-range%5B%5D=%5B%5D=1%20to%2030%20meters"} query {:concept-type :collection :facet-fields [:horizontal-data-resolution-range]}] (is (= {:horizontal-data-resolution-range {:nested {:path :horizontal-data-resolutions}, :aggs {:values {:range {:field :horizontal-data-resolutions.value, :ranges [{:key "0 to 1 meter", :from 0.0, :to (+ 1.0 rfs/addition-factor)} {:key "1 to 30 meters", :from 1.0, :to (+ 30.0 rfs/addition-factor)} {:key "30 to 100 meters", :from 30.0, :to (+ 100.0 rfs/addition-factor)} {:key "100 to 250 meters", :from 100.0, :to (+ 250.0 rfs/addition-factor)} {:key "250 to 500 meters", :from 250.0, :to (+ 500.0 rfs/addition-factor)} {:key "500 to 1000 meters", :from 500.0, :to (+ 1000.0 rfs/addition-factor)} {:key "1 to 10 km", :from 1000.0, :to (+ 10000.0 rfs/addition-factor)} {:key "10 to 50 km", :from 10000.0, :to (+ 50000.0 rfs/addition-factor)} {:key "50 to 100 km", :from 50000.0, :to (+ 100000.0 rfs/addition-factor)} {:key "100 to 250 km", :from 100000.0, :to (+ 250000.0 rfs/addition-factor)} {:key "250 to 500 km", :from 250000.0, :to (+ 500000.0 rfs/addition-factor)} {:key "500 to 1000 km", :from 500000.0, :to (+ 1000000.0 rfs/addition-factor)} {:key "1000 km & beyond", :from 1000000.0, :to (Float/MAX_VALUE)}]}, :aggs {:priority {:avg {:field :horizontal-data-resolutions.priority}}}}}}} (:aggregations (query-execution/pre-process-query-result-feature context query :facets-v2))))))) (deftest post-process-query-result-feature-test (testing "Testing the post processing of the query without facets in the query." (let [context {:query-string "keyword=*&include_facets=v2&pretty=true" :system {:public-conf {:protocol "http", :port 3003, :host "localhost", :relative-root-url ""}}} query {:concept-type :collection :facet-fields nil} elastic-results {:aggregations {:two-d-coordinate-system-name-h {:doc_count_error_upper_bound 0, :buckets [{:key "MODIS Tile EASE", :doc_count 1,}] :sum_other_doc_count 0}}} query-results nil] (is (= {:title "Browse Collections", :type :group, :has_children true, :children [{:title "Tiling System", :type :group, :applied false, :has_children true, :children [{:title "MODIS Tile EASE", :type :filter, :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&two_d_coordinate_system_name%5B%5D=MODIS+Tile+EASE"}, :has_children false}]}]} (:facets (query-execution/post-process-query-result-feature context query elastic-results query-results :facets-v2)))))) (testing "Testing the post processing of the query with facets in the query." (let [context {:query-string "keyword=*&include_facets=v2&pretty=true&two_d_coordinate_system_name%5B%5D=MODIS+Tile+EASE" :system {:public-conf {:protocol "http", :port 3003, :host "localhost", :relative-root-url ""}}} query {:concept-type :collection :facet-fields [:two-d-coordinate-system-name]} elastic-results {:aggregations {:two-d-coordinate-system-name-h {:doc_count_error_upper_bound 0, :buckets [{:key "MODIS Tile EASE", :doc_count 1,}] :sum_other_doc_count 0}}} query-results nil] (is (= {:title "Browse Collections", :type :group, :has_children true, :children [{:title "Tiling System", :type :group, :applied true, :has_children true, :children [{:title "MODIS Tile EASE", :type :filter, :applied true, :count 1, :links {:remove "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true"}, :has_children false}]}]} (:facets (query-execution/post-process-query-result-feature context query elastic-results query-results :facets-v2)))))) (testing "Testing the post processing of the query without facets in the query for horizontal data resolutions." (let [context {:query-string "keyword=*&include_facets=v2&pretty=true" :system {:public-conf {:protocol "http", :port 3003, :host "localhost", :relative-root-url ""}}} query {:concept-type :collection :facet-fields nil} elastic-results {:aggregations {:horizontal-data-resolution-range {:doc_count 1, :values {:buckets [{:key "0 to 1 meter", :from 0.0, :to 1.0, :doc_count 1, :priority {:value 0.0}} {:key "1 to 30 meters", :from 1.0, :to 30.0, :doc_count 1, :priority {:value 0.0}} {:key "30 to 100 meters", :from 30.0, :to 100.0, :doc_count 0, :priority {:value nil}} {:key "100 to 250 meters", :from 100.0, :to 250.0, :doc_count 0, :priority {:value nil}} {:key "250 to 500 meters", :from 250.0, :to 500.0, :doc_count 0, :priority {:value nil}} {:key "500 to 1000 meters", :from 500.0, :to 1000.0, :doc_count 0, :priority {:value nil}} {:key "1 to 10 km", :from 1000.0, :to 10000.0, :doc_count 1, :priority {:value 0.0}} {:key "10 to 50 km", :from 10000.0, :to 50000.0, :doc_count 1, :priority {:value 0.0}} {:key "50 to 100 km", :from 50000.0, :to 100000.0, :doc_count 1, :priority {:value 0.0}} {:key "100 to 250 km", :from 100000.0, :to 250000.0, :doc_count 1, :priority {:value 0.0}} {:key "250 to 500 km", :from 250000.0, :to 500000.0, :doc_count 0, :priority {:value nil}} {:key "500 to 1000 km", :from 500000.0, :to 1000000.0, :doc_count 0, :priority {:value nil}} {:key "1000 km & beyond", :from 1000000.0, :to (Float/MAX_VALUE), :doc_count 0, :priority {:value nil}}]}}}} query-results nil] (is (= {:title "Browse Collections", :type :group, :has_children true, :children [{:title "Horizontal Data Resolution", :type :group, :applied false, :has_children true, :children [{:title "0 to 1 meter", :type :filter, :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=0+to+1+meter"}, :has_children false} {:title "1 to 30 meters", :type :filter, :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=1+to+30+meters"}, :has_children false} {:title "30 to 100 meters", :type :filter, :applied false, :count 0, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=30+to+100+meters"}, :has_children false} {:title "100 to 250 meters", :type :filter, :applied false, :count 0, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=100+to+250+meters"}, :has_children false} {:title "250 to 500 meters", :type :filter, :applied false, :count 0, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=250+to+500+meters"}, :has_children false} {:title "500 to 1000 meters", :type :filter, :applied false, :count 0, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=500+to+1000+meters"}, :has_children false} {:title "1 to 10 km", :type :filter, :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=1+to+10+km"}, :has_children false} {:title "10 to 50 km", :type :filter, :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=10+to+50+km"}, :has_children false} {:title "50 to 100 km", :type :filter, :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=50+to+100+km"}, :has_children false} {:title "100 to 250 km", :type :filter, :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=100+to+250+km"}, :has_children false} {:title "250 to 500 km", :type :filter, :applied false, :count 0, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=250+to+500+km"}, :has_children false} {:title "500 to 1000 km", :type :filter, :applied false, :count 0, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=500+to+1000+km"}, :has_children false} {:title "1000 km & beyond", :type :filter, :applied false, :count 0, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=1000+km+%26+beyond"}, :has_children false}]}]} (:facets (query-execution/post-process-query-result-feature context query elastic-results query-results :facets-v2)))))) (testing "Testing the post processing of the query with facets in the query for horizontal data resolutions." (let [context {:query-string "keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=0+to+1+meter" :system {:public-conf {:protocol "http", :port 3003, :host "localhost", :relative-root-url ""}}} query {:concept-type :collection :condition {:operation :and :conditions [{:field :keyword :query-str *} {:path :horizontal-data-resolutions :condition {:field :horizontal-data-resolutions.value :min-value 0.0 :max-value 1.0 :exclusive? false}}]}} facet-fields [:horizontal-data-resolutions] elastic-results {:aggregations {:horizontal-data-resolution-range {:doc_count 1 :values {:buckets [{:key "0 to 1 meter" :from 0.0 :to 1.0 :doc_count 1 :priority {:value 0.0}} {:key "1 to 30 meters" :from 1.0 :to 30.0 :doc_count 1 :priority {:value 0.0}} {:key "30 to 100 meters" :from 30.0 :to 100.0 :doc_count 0 :priority {:value nil}} {:key "1 to 10 km" :from 1000.0 :to 10000.0 :doc_count 1 :priority {:value 0.0}} {:key "1000 km & beyond" :from 1000000.0 :to (Float/MAX_VALUE) :doc_count 0 :priority {:value nil}}]}}}} query-results nil] (is (= {:title "Browse Collections", :type :group, :has_children true, :children [{:title "Horizontal Data Resolution", :type :group, :applied true, :has_children true, :children [{:title "0 to 1 meter", :type :filter, :applied true, :count 1, :links {:remove "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true"}, :has_children false} {:title "1 to 30 meters", :type :filter, :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=1+to+30+meters&horizontal_data_resolution_range%5B%5D=0+to+1+meter"}, :has_children false} {:title "30 to 100 meters", :type :filter, :applied false, :count 0, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=30+to+100+meters&horizontal_data_resolution_range%5B%5D=0+to+1+meter"}, :has_children false} {:title "1 to 10 km", :type :filter, :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=1+to+10+km&horizontal_data_resolution_range%5B%5D=0+to+1+meter"}, :has_children false} {:title "1000 km & beyond", :type :filter, :applied false, :count 0, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=1000+km+%26+beyond&horizontal_data_resolution_range%5B%5D=0+to+1+meter"}, :has_children false}]}]} (:facets (query-execution/post-process-query-result-feature context query elastic-results query-results :facets-v2)))))))
105042
(ns cmr.search.test.services.query-execution.facets.facets-v2-results-feature-test (:require [clojure.test :refer :all] [cmr.common-app.services.search.query-execution :as query-execution] [cmr.search.services.humanizers.humanizer-range-facet-service :as rfs] [cmr.search.services.query-execution.facets.facets-v2-results-feature :as v2-facets] [cmr.search.services.query-execution.facets.collection-v2-facets :as cv2f])) (def expected-pre-process-query-result-feature-result {:project-h {:nested {:path :project-sn-humanized}, :aggs {:values {:terms {:field :project-sn-humanized.value, :size 50, :order [{:priority :desc} {:_count :desc}]}, :aggs {:priority {:avg {:field :project-sn-humanized.priority}}}}}}, :science-keywords-h {:nested {:path :science-keywords-humanized}, :aggs {:category {:terms {:field "science-keywords-humanized.category", :size 50}, :aggs {:coll-count {:reverse_nested {}, :aggs {:concept-id {:terms {:field :concept-id, :size 1}}}}, :topic {:terms {:field "science-keywords-humanized.topic", :size 50}, :aggs {:coll-count {:reverse_nested {}, :aggs {:concept-id {:terms {:field :concept-id, :size 1}}}}, :term {:terms {:field "science-keywords-humanized.term", :size 50}, :aggs {:coll-count {:reverse_nested {}, :aggs {:concept-id {:terms {:field :concept-id, :size 1}}}}, :detailed-variable {:terms {:field "science-keywords-humanized.detailed-variable", :size 50}, :aggs {:coll-count {:reverse_nested {}, :aggs {:concept-id {:terms {:field :concept-id, :size 1}}}}}}}}}}}}}}, :data-center-h {:nested {:path :organization-humanized}, :aggs {:values {:terms {:field :organization-humanized.value, :size 50, :order [{:priority :desc} {:_count :desc}]}, :aggs {:priority {:avg {:field :organization-humanized.priority}}}}}}, :processing-level-id-h {:nested {:path :processing-level-id-humanized}, :aggs {:values {:terms {:field :processing-level-id-humanized.value, :size 50, :order [{:priority :desc} {:_count :desc}]}, :aggs {:priority {:avg {:field :processing-level-id-humanized.priority}}}}}}, :latency-h {:terms {:field :latency, :size 50}} :granule-data-format-h {:nested {:path :granule-data-format-humanized}, :aggs {:values {:terms {:field :granule-data-format-humanized.value, :size 50, :order [{:priority :desc} {:_count :desc}]}, :aggs {:priority {:avg {:field :granule-data-format-humanized.priority}}}}}}, :instrument-h {:nested {:path :instrument-sn-humanized}, :aggs {:values {:terms {:field :instrument-sn-humanized.value, :size 50, :order [{:priority :desc} {:_count :desc}]}, :aggs {:priority {:avg {:field :instrument-sn-humanized.priority}}}}}}, :horizontal-data-resolution-range {:nested {:path :horizontal-data-resolutions}, :aggs {:values {:range {:field :horizontal-data-resolutions.value, :ranges [{:key "0 to 1 meter", :from 0.0, :to 1.0000000001} {:key "1 to 30 meters", :from 1.0, :to 30.0000000001} {:key "30 to 100 meters", :from 30.0, :to 100.0000000001} {:key "1<KEY> to 2<KEY> meters", :from 100.0, :to 250.0000000001} {:key "2<KEY> to 5<KEY> meters", :from 250.0, :to 500.0000000001} {:key "<KEY> to <KEY>", :from 500.0, :to 1000.0000000001} {:key "1 to 10 km", :from 1000.0, :to 10000.0000000001} {:key "1<KEY> to 5<KEY> km", :from 10000.0, :to 50000.0000000001} {:key "<KEY> to <KEY>", :from 50000.0, :to 100000.0000000001} {:key "<KEY> to <KEY>", :from 100000.0, :to 250000.0000000001} {:key "<KEY> to <KEY>", :from 250000.0, :to 500000.0000000001} {:key "<KEY> to <KEY>", :from 500000.0, :to 1000000.0000000001} {:key "1000 km & beyond", :from 1000000.0, :to 3.4028234663852886E38}]}, :aggs {:priority {:avg {:field :horizontal-data-resolutions.priority}}}}}}, :platforms-h {:nested {:path :platforms2-humanized}, :aggs {:basis {:terms {:field "platforms2-humanized.basis", :size 50}, :aggs {:coll-count {:reverse_nested {}, :aggs {:concept-id {:terms {:field :concept-id, :size 1}}}}, :category {:terms {:field "platforms2-humanized.category", :size 50}, :aggs {:coll-count {:reverse_nested {}, :aggs {:concept-id {:terms {:field :concept-id, :size 1}}}}, :short-name {:terms {:field "platforms2-humanized.short-name", :size 50}, :aggs {:coll-count {:reverse_nested {}, :aggs {:concept-id {:terms {:field :concept-id, :size 1}}}}}}, :sub-category {:terms {:field "platforms2-humanized.sub-category", :size 50}, :aggs {:coll-count {:reverse_nested {}, :aggs {:concept-id {:terms {:field :concept-id, :size 1}}}}}}}}}}}}, :two-d-coordinate-system-name-h {:terms {:field :two-d-coord-name, :size 50}}, :variables-h {:nested {:path :variables}, :aggs {:measurement {:terms {:field "variables.measurement", :size 50}, :aggs {:coll-count {:reverse_nested {}, :aggs {:concept-id {:terms {:field :concept-id, :size 1}}}}, :variable {:terms {:field "variables.variable", :size 50}, :aggs {:coll-count {:reverse_nested {}, :aggs {:concept-id {:terms {:field :concept-id, :size 1}}}}}}}}}}}) (deftest pre-process-query-result-feature-test (testing "Testing the preprocessing of the query without facets in the query." (let [context {:query-string "keyword=*&include_facets=v2&pretty=true"} query {:concept-type :collection :facet-fields nil :facets-size nil}] (is (= expected-pre-process-query-result-feature-result (:aggregations (query-execution/pre-process-query-result-feature context query :facets-v2)))))) (testing "Test query includes a facet." (let [context {:query-string "keyword=*&include_facets=v2&pretty=true&two_d_coordinate_system_name%5B%5D=MODIS+Tile+EASE"} query {:concept-type :collection :facet-fields [:two-d-coordinate-system-name] :facets-size nil}] (is (= {:two-d-coordinate-system-name-h {:terms {:field :two-d-coord-name, :size 50}}} (:aggregations (query-execution/pre-process-query-result-feature context query :facets-v2)))))) (testing "Test query includes a horizontal-data-resolutions facet." (let [context {:query-string "keyword=*&include_facets=v2&pretty=true&horizontal-data-resolution-range%5B%5D=%5B%5D=1%20to%2030%20meters"} query {:concept-type :collection :facet-fields [:horizontal-data-resolution-range]}] (is (= {:horizontal-data-resolution-range {:nested {:path :horizontal-data-resolutions}, :aggs {:values {:range {:field :horizontal-data-resolutions.value, :ranges [{:key "0 to 1 meter", :from 0.0, :to (+ 1.0 rfs/addition-factor)} {:key "1 to 30 meters", :from 1.0, :to (+ 30.0 rfs/addition-factor)} {:key "30 to 100 meters", :from 30.0, :to (+ 100.0 rfs/addition-factor)} {:key "100 to 250 meters", :from 100.0, :to (+ 250.0 rfs/addition-factor)} {:key "250 to 500 meters", :from 250.0, :to (+ 500.0 rfs/addition-factor)} {:key "500 to 1000 meters", :from 500.0, :to (+ 1000.0 rfs/addition-factor)} {:key "1 to 10 km", :from 1000.0, :to (+ 10000.0 rfs/addition-factor)} {:key "10 to 50 km", :from 10000.0, :to (+ 50000.0 rfs/addition-factor)} {:key "50 to 100 km", :from 50000.0, :to (+ 100000.0 rfs/addition-factor)} {:key "100 to 250 km", :from 100000.0, :to (+ 250000.0 rfs/addition-factor)} {:key "250 to 500 km", :from 250000.0, :to (+ 500000.0 rfs/addition-factor)} {:key "500 to 1000 km", :from 500000.0, :to (+ 1000000.0 rfs/addition-factor)} {:key "1000 km & beyond", :from 1000000.0, :to (Float/MAX_VALUE)}]}, :aggs {:priority {:avg {:field :horizontal-data-resolutions.priority}}}}}}} (:aggregations (query-execution/pre-process-query-result-feature context query :facets-v2))))))) (deftest post-process-query-result-feature-test (testing "Testing the post processing of the query without facets in the query." (let [context {:query-string "keyword=*&include_facets=v2&pretty=true" :system {:public-conf {:protocol "http", :port 3003, :host "localhost", :relative-root-url ""}}} query {:concept-type :collection :facet-fields nil} elastic-results {:aggregations {:two-d-coordinate-system-name-h {:doc_count_error_upper_bound 0, :buckets [{:key "MODIS Tile EASE", :doc_count 1,}] :sum_other_doc_count 0}}} query-results nil] (is (= {:title "Browse Collections", :type :group, :has_children true, :children [{:title "Tiling System", :type :group, :applied false, :has_children true, :children [{:title "MODIS Tile EASE", :type :filter, :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&two_d_coordinate_system_name%5B%5D=MODIS+Tile+EASE"}, :has_children false}]}]} (:facets (query-execution/post-process-query-result-feature context query elastic-results query-results :facets-v2)))))) (testing "Testing the post processing of the query with facets in the query." (let [context {:query-string "keyword=*&include_facets=v2&pretty=true&two_d_coordinate_system_name%5B%5D=MODIS+Tile+EASE" :system {:public-conf {:protocol "http", :port 3003, :host "localhost", :relative-root-url ""}}} query {:concept-type :collection :facet-fields [:two-d-coordinate-system-name]} elastic-results {:aggregations {:two-d-coordinate-system-name-h {:doc_count_error_upper_bound 0, :buckets [{:key "MODIS Tile EASE", :doc_count 1,}] :sum_other_doc_count 0}}} query-results nil] (is (= {:title "Browse Collections", :type :group, :has_children true, :children [{:title "Tiling System", :type :group, :applied true, :has_children true, :children [{:title "MODIS Tile EASE", :type :filter, :applied true, :count 1, :links {:remove "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true"}, :has_children false}]}]} (:facets (query-execution/post-process-query-result-feature context query elastic-results query-results :facets-v2)))))) (testing "Testing the post processing of the query without facets in the query for horizontal data resolutions." (let [context {:query-string "keyword=*&include_facets=v2&pretty=true" :system {:public-conf {:protocol "http", :port 3003, :host "localhost", :relative-root-url ""}}} query {:concept-type :collection :facet-fields nil} elastic-results {:aggregations {:horizontal-data-resolution-range {:doc_count 1, :values {:buckets [{:key "0 to 1 meter", :from 0.0, :to 1.0, :doc_count 1, :priority {:value 0.0}} {:key "1 to 30 meters", :from 1.0, :to 30.0, :doc_count 1, :priority {:value 0.0}} {:key "30 to 100 meters", :from 30.0, :to 100.0, :doc_count 0, :priority {:value nil}} {:key "100 to 250 meters", :from 100.0, :to 250.0, :doc_count 0, :priority {:value nil}} {:key "<KEY>", :from 250.0, :to 500.0, :doc_count 0, :priority {:value nil}} {:key "<KEY>", :from 500.0, :to 1000.0, :doc_count 0, :priority {:value nil}} {:key "<KEY>", :from 1000.0, :to 10000.0, :doc_count 1, :priority {:value 0.0}} {:key "<KEY>", :from 10000.0, :to 50000.0, :doc_count 1, :priority {:value 0.0}} {:key "<KEY>", :from 50000.0, :to 100000.0, :doc_count 1, :priority {:value 0.0}} {:key "<KEY>", :from 100000.0, :to 250000.0, :doc_count 1, :priority {:value 0.0}} {:key "<KEY>", :from 250000.0, :to 500000.0, :doc_count 0, :priority {:value nil}} {:key "<KEY>", :from 500000.0, :to 1000000.0, :doc_count 0, :priority {:value nil}} {:key "1000 km & beyond", :from 1000000.0, :to (Float/MAX_VALUE), :doc_count 0, :priority {:value nil}}]}}}} query-results nil] (is (= {:title "Browse Collections", :type :group, :has_children true, :children [{:title "Horizontal Data Resolution", :type :group, :applied false, :has_children true, :children [{:title "0 to 1 meter", :type :filter, :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=0+to+1+meter"}, :has_children false} {:title "1 to 30 meters", :type :filter, :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=1+to+30+meters"}, :has_children false} {:title "30 to 100 meters", :type :filter, :applied false, :count 0, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=30+to+100+meters"}, :has_children false} {:title "100 to 250 meters", :type :filter, :applied false, :count 0, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=100+to+250+meters"}, :has_children false} {:title "250 to 500 meters", :type :filter, :applied false, :count 0, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=250+to+500+meters"}, :has_children false} {:title "500 to 1000 meters", :type :filter, :applied false, :count 0, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=500+to+1000+meters"}, :has_children false} {:title "1 to 10 km", :type :filter, :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=1+to+10+km"}, :has_children false} {:title "10 to 50 km", :type :filter, :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=10+to+50+km"}, :has_children false} {:title "50 to 100 km", :type :filter, :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=50+to+100+km"}, :has_children false} {:title "100 to 250 km", :type :filter, :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=100+to+250+km"}, :has_children false} {:title "250 to 500 km", :type :filter, :applied false, :count 0, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=250+to+500+km"}, :has_children false} {:title "500 to 1000 km", :type :filter, :applied false, :count 0, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=500+to+1000+km"}, :has_children false} {:title "1000 km & beyond", :type :filter, :applied false, :count 0, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=1000+km+%26+beyond"}, :has_children false}]}]} (:facets (query-execution/post-process-query-result-feature context query elastic-results query-results :facets-v2)))))) (testing "Testing the post processing of the query with facets in the query for horizontal data resolutions." (let [context {:query-string "keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=0+to+1+meter" :system {:public-conf {:protocol "http", :port 3003, :host "localhost", :relative-root-url ""}}} query {:concept-type :collection :condition {:operation :and :conditions [{:field :keyword :query-str *} {:path :horizontal-data-resolutions :condition {:field :horizontal-data-resolutions.value :min-value 0.0 :max-value 1.0 :exclusive? false}}]}} facet-fields [:horizontal-data-resolutions] elastic-results {:aggregations {:horizontal-data-resolution-range {:doc_count 1 :values {:buckets [{:key "0 to 1 meter" :from 0.0 :to 1.0 :doc_count 1 :priority {:value 0.0}} {:key "1 to 30 meters" :from 1.0 :to 30.0 :doc_count 1 :priority {:value 0.0}} {:key "30 to 100 meters" :from 30.0 :to 100.0 :doc_count 0 :priority {:value nil}} {:key "1 to 10 km" :from 1000.0 :to 10000.0 :doc_count 1 :priority {:value 0.0}} {:key "1000 km & beyond" :from 1000000.0 :to (Float/MAX_VALUE) :doc_count 0 :priority {:value nil}}]}}}} query-results nil] (is (= {:title "Browse Collections", :type :group, :has_children true, :children [{:title "Horizontal Data Resolution", :type :group, :applied true, :has_children true, :children [{:title "0 to 1 meter", :type :filter, :applied true, :count 1, :links {:remove "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true"}, :has_children false} {:title "1 to 30 meters", :type :filter, :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=1+to+30+meters&horizontal_data_resolution_range%5B%5D=0+to+1+meter"}, :has_children false} {:title "30 to 100 meters", :type :filter, :applied false, :count 0, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=30+to+100+meters&horizontal_data_resolution_range%5B%5D=0+to+1+meter"}, :has_children false} {:title "1 to 10 km", :type :filter, :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=1+to+10+km&horizontal_data_resolution_range%5B%5D=0+to+1+meter"}, :has_children false} {:title "1000 km & beyond", :type :filter, :applied false, :count 0, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=1000+km+%26+beyond&horizontal_data_resolution_range%5B%5D=0+to+1+meter"}, :has_children false}]}]} (:facets (query-execution/post-process-query-result-feature context query elastic-results query-results :facets-v2)))))))
true
(ns cmr.search.test.services.query-execution.facets.facets-v2-results-feature-test (:require [clojure.test :refer :all] [cmr.common-app.services.search.query-execution :as query-execution] [cmr.search.services.humanizers.humanizer-range-facet-service :as rfs] [cmr.search.services.query-execution.facets.facets-v2-results-feature :as v2-facets] [cmr.search.services.query-execution.facets.collection-v2-facets :as cv2f])) (def expected-pre-process-query-result-feature-result {:project-h {:nested {:path :project-sn-humanized}, :aggs {:values {:terms {:field :project-sn-humanized.value, :size 50, :order [{:priority :desc} {:_count :desc}]}, :aggs {:priority {:avg {:field :project-sn-humanized.priority}}}}}}, :science-keywords-h {:nested {:path :science-keywords-humanized}, :aggs {:category {:terms {:field "science-keywords-humanized.category", :size 50}, :aggs {:coll-count {:reverse_nested {}, :aggs {:concept-id {:terms {:field :concept-id, :size 1}}}}, :topic {:terms {:field "science-keywords-humanized.topic", :size 50}, :aggs {:coll-count {:reverse_nested {}, :aggs {:concept-id {:terms {:field :concept-id, :size 1}}}}, :term {:terms {:field "science-keywords-humanized.term", :size 50}, :aggs {:coll-count {:reverse_nested {}, :aggs {:concept-id {:terms {:field :concept-id, :size 1}}}}, :detailed-variable {:terms {:field "science-keywords-humanized.detailed-variable", :size 50}, :aggs {:coll-count {:reverse_nested {}, :aggs {:concept-id {:terms {:field :concept-id, :size 1}}}}}}}}}}}}}}, :data-center-h {:nested {:path :organization-humanized}, :aggs {:values {:terms {:field :organization-humanized.value, :size 50, :order [{:priority :desc} {:_count :desc}]}, :aggs {:priority {:avg {:field :organization-humanized.priority}}}}}}, :processing-level-id-h {:nested {:path :processing-level-id-humanized}, :aggs {:values {:terms {:field :processing-level-id-humanized.value, :size 50, :order [{:priority :desc} {:_count :desc}]}, :aggs {:priority {:avg {:field :processing-level-id-humanized.priority}}}}}}, :latency-h {:terms {:field :latency, :size 50}} :granule-data-format-h {:nested {:path :granule-data-format-humanized}, :aggs {:values {:terms {:field :granule-data-format-humanized.value, :size 50, :order [{:priority :desc} {:_count :desc}]}, :aggs {:priority {:avg {:field :granule-data-format-humanized.priority}}}}}}, :instrument-h {:nested {:path :instrument-sn-humanized}, :aggs {:values {:terms {:field :instrument-sn-humanized.value, :size 50, :order [{:priority :desc} {:_count :desc}]}, :aggs {:priority {:avg {:field :instrument-sn-humanized.priority}}}}}}, :horizontal-data-resolution-range {:nested {:path :horizontal-data-resolutions}, :aggs {:values {:range {:field :horizontal-data-resolutions.value, :ranges [{:key "0 to 1 meter", :from 0.0, :to 1.0000000001} {:key "1 to 30 meters", :from 1.0, :to 30.0000000001} {:key "30 to 100 meters", :from 30.0, :to 100.0000000001} {:key "1PI:KEY:<KEY>END_PI to 2PI:KEY:<KEY>END_PI meters", :from 100.0, :to 250.0000000001} {:key "2PI:KEY:<KEY>END_PI to 5PI:KEY:<KEY>END_PI meters", :from 250.0, :to 500.0000000001} {:key "PI:KEY:<KEY>END_PI to PI:KEY:<KEY>END_PI", :from 500.0, :to 1000.0000000001} {:key "1 to 10 km", :from 1000.0, :to 10000.0000000001} {:key "1PI:KEY:<KEY>END_PI to 5PI:KEY:<KEY>END_PI km", :from 10000.0, :to 50000.0000000001} {:key "PI:KEY:<KEY>END_PI to PI:KEY:<KEY>END_PI", :from 50000.0, :to 100000.0000000001} {:key "PI:KEY:<KEY>END_PI to PI:KEY:<KEY>END_PI", :from 100000.0, :to 250000.0000000001} {:key "PI:KEY:<KEY>END_PI to PI:KEY:<KEY>END_PI", :from 250000.0, :to 500000.0000000001} {:key "PI:KEY:<KEY>END_PI to PI:KEY:<KEY>END_PI", :from 500000.0, :to 1000000.0000000001} {:key "1000 km & beyond", :from 1000000.0, :to 3.4028234663852886E38}]}, :aggs {:priority {:avg {:field :horizontal-data-resolutions.priority}}}}}}, :platforms-h {:nested {:path :platforms2-humanized}, :aggs {:basis {:terms {:field "platforms2-humanized.basis", :size 50}, :aggs {:coll-count {:reverse_nested {}, :aggs {:concept-id {:terms {:field :concept-id, :size 1}}}}, :category {:terms {:field "platforms2-humanized.category", :size 50}, :aggs {:coll-count {:reverse_nested {}, :aggs {:concept-id {:terms {:field :concept-id, :size 1}}}}, :short-name {:terms {:field "platforms2-humanized.short-name", :size 50}, :aggs {:coll-count {:reverse_nested {}, :aggs {:concept-id {:terms {:field :concept-id, :size 1}}}}}}, :sub-category {:terms {:field "platforms2-humanized.sub-category", :size 50}, :aggs {:coll-count {:reverse_nested {}, :aggs {:concept-id {:terms {:field :concept-id, :size 1}}}}}}}}}}}}, :two-d-coordinate-system-name-h {:terms {:field :two-d-coord-name, :size 50}}, :variables-h {:nested {:path :variables}, :aggs {:measurement {:terms {:field "variables.measurement", :size 50}, :aggs {:coll-count {:reverse_nested {}, :aggs {:concept-id {:terms {:field :concept-id, :size 1}}}}, :variable {:terms {:field "variables.variable", :size 50}, :aggs {:coll-count {:reverse_nested {}, :aggs {:concept-id {:terms {:field :concept-id, :size 1}}}}}}}}}}}) (deftest pre-process-query-result-feature-test (testing "Testing the preprocessing of the query without facets in the query." (let [context {:query-string "keyword=*&include_facets=v2&pretty=true"} query {:concept-type :collection :facet-fields nil :facets-size nil}] (is (= expected-pre-process-query-result-feature-result (:aggregations (query-execution/pre-process-query-result-feature context query :facets-v2)))))) (testing "Test query includes a facet." (let [context {:query-string "keyword=*&include_facets=v2&pretty=true&two_d_coordinate_system_name%5B%5D=MODIS+Tile+EASE"} query {:concept-type :collection :facet-fields [:two-d-coordinate-system-name] :facets-size nil}] (is (= {:two-d-coordinate-system-name-h {:terms {:field :two-d-coord-name, :size 50}}} (:aggregations (query-execution/pre-process-query-result-feature context query :facets-v2)))))) (testing "Test query includes a horizontal-data-resolutions facet." (let [context {:query-string "keyword=*&include_facets=v2&pretty=true&horizontal-data-resolution-range%5B%5D=%5B%5D=1%20to%2030%20meters"} query {:concept-type :collection :facet-fields [:horizontal-data-resolution-range]}] (is (= {:horizontal-data-resolution-range {:nested {:path :horizontal-data-resolutions}, :aggs {:values {:range {:field :horizontal-data-resolutions.value, :ranges [{:key "0 to 1 meter", :from 0.0, :to (+ 1.0 rfs/addition-factor)} {:key "1 to 30 meters", :from 1.0, :to (+ 30.0 rfs/addition-factor)} {:key "30 to 100 meters", :from 30.0, :to (+ 100.0 rfs/addition-factor)} {:key "100 to 250 meters", :from 100.0, :to (+ 250.0 rfs/addition-factor)} {:key "250 to 500 meters", :from 250.0, :to (+ 500.0 rfs/addition-factor)} {:key "500 to 1000 meters", :from 500.0, :to (+ 1000.0 rfs/addition-factor)} {:key "1 to 10 km", :from 1000.0, :to (+ 10000.0 rfs/addition-factor)} {:key "10 to 50 km", :from 10000.0, :to (+ 50000.0 rfs/addition-factor)} {:key "50 to 100 km", :from 50000.0, :to (+ 100000.0 rfs/addition-factor)} {:key "100 to 250 km", :from 100000.0, :to (+ 250000.0 rfs/addition-factor)} {:key "250 to 500 km", :from 250000.0, :to (+ 500000.0 rfs/addition-factor)} {:key "500 to 1000 km", :from 500000.0, :to (+ 1000000.0 rfs/addition-factor)} {:key "1000 km & beyond", :from 1000000.0, :to (Float/MAX_VALUE)}]}, :aggs {:priority {:avg {:field :horizontal-data-resolutions.priority}}}}}}} (:aggregations (query-execution/pre-process-query-result-feature context query :facets-v2))))))) (deftest post-process-query-result-feature-test (testing "Testing the post processing of the query without facets in the query." (let [context {:query-string "keyword=*&include_facets=v2&pretty=true" :system {:public-conf {:protocol "http", :port 3003, :host "localhost", :relative-root-url ""}}} query {:concept-type :collection :facet-fields nil} elastic-results {:aggregations {:two-d-coordinate-system-name-h {:doc_count_error_upper_bound 0, :buckets [{:key "MODIS Tile EASE", :doc_count 1,}] :sum_other_doc_count 0}}} query-results nil] (is (= {:title "Browse Collections", :type :group, :has_children true, :children [{:title "Tiling System", :type :group, :applied false, :has_children true, :children [{:title "MODIS Tile EASE", :type :filter, :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&two_d_coordinate_system_name%5B%5D=MODIS+Tile+EASE"}, :has_children false}]}]} (:facets (query-execution/post-process-query-result-feature context query elastic-results query-results :facets-v2)))))) (testing "Testing the post processing of the query with facets in the query." (let [context {:query-string "keyword=*&include_facets=v2&pretty=true&two_d_coordinate_system_name%5B%5D=MODIS+Tile+EASE" :system {:public-conf {:protocol "http", :port 3003, :host "localhost", :relative-root-url ""}}} query {:concept-type :collection :facet-fields [:two-d-coordinate-system-name]} elastic-results {:aggregations {:two-d-coordinate-system-name-h {:doc_count_error_upper_bound 0, :buckets [{:key "MODIS Tile EASE", :doc_count 1,}] :sum_other_doc_count 0}}} query-results nil] (is (= {:title "Browse Collections", :type :group, :has_children true, :children [{:title "Tiling System", :type :group, :applied true, :has_children true, :children [{:title "MODIS Tile EASE", :type :filter, :applied true, :count 1, :links {:remove "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true"}, :has_children false}]}]} (:facets (query-execution/post-process-query-result-feature context query elastic-results query-results :facets-v2)))))) (testing "Testing the post processing of the query without facets in the query for horizontal data resolutions." (let [context {:query-string "keyword=*&include_facets=v2&pretty=true" :system {:public-conf {:protocol "http", :port 3003, :host "localhost", :relative-root-url ""}}} query {:concept-type :collection :facet-fields nil} elastic-results {:aggregations {:horizontal-data-resolution-range {:doc_count 1, :values {:buckets [{:key "0 to 1 meter", :from 0.0, :to 1.0, :doc_count 1, :priority {:value 0.0}} {:key "1 to 30 meters", :from 1.0, :to 30.0, :doc_count 1, :priority {:value 0.0}} {:key "30 to 100 meters", :from 30.0, :to 100.0, :doc_count 0, :priority {:value nil}} {:key "100 to 250 meters", :from 100.0, :to 250.0, :doc_count 0, :priority {:value nil}} {:key "PI:KEY:<KEY>END_PI", :from 250.0, :to 500.0, :doc_count 0, :priority {:value nil}} {:key "PI:KEY:<KEY>END_PI", :from 500.0, :to 1000.0, :doc_count 0, :priority {:value nil}} {:key "PI:KEY:<KEY>END_PI", :from 1000.0, :to 10000.0, :doc_count 1, :priority {:value 0.0}} {:key "PI:KEY:<KEY>END_PI", :from 10000.0, :to 50000.0, :doc_count 1, :priority {:value 0.0}} {:key "PI:KEY:<KEY>END_PI", :from 50000.0, :to 100000.0, :doc_count 1, :priority {:value 0.0}} {:key "PI:KEY:<KEY>END_PI", :from 100000.0, :to 250000.0, :doc_count 1, :priority {:value 0.0}} {:key "PI:KEY:<KEY>END_PI", :from 250000.0, :to 500000.0, :doc_count 0, :priority {:value nil}} {:key "PI:KEY:<KEY>END_PI", :from 500000.0, :to 1000000.0, :doc_count 0, :priority {:value nil}} {:key "1000 km & beyond", :from 1000000.0, :to (Float/MAX_VALUE), :doc_count 0, :priority {:value nil}}]}}}} query-results nil] (is (= {:title "Browse Collections", :type :group, :has_children true, :children [{:title "Horizontal Data Resolution", :type :group, :applied false, :has_children true, :children [{:title "0 to 1 meter", :type :filter, :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=0+to+1+meter"}, :has_children false} {:title "1 to 30 meters", :type :filter, :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=1+to+30+meters"}, :has_children false} {:title "30 to 100 meters", :type :filter, :applied false, :count 0, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=30+to+100+meters"}, :has_children false} {:title "100 to 250 meters", :type :filter, :applied false, :count 0, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=100+to+250+meters"}, :has_children false} {:title "250 to 500 meters", :type :filter, :applied false, :count 0, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=250+to+500+meters"}, :has_children false} {:title "500 to 1000 meters", :type :filter, :applied false, :count 0, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=500+to+1000+meters"}, :has_children false} {:title "1 to 10 km", :type :filter, :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=1+to+10+km"}, :has_children false} {:title "10 to 50 km", :type :filter, :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=10+to+50+km"}, :has_children false} {:title "50 to 100 km", :type :filter, :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=50+to+100+km"}, :has_children false} {:title "100 to 250 km", :type :filter, :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=100+to+250+km"}, :has_children false} {:title "250 to 500 km", :type :filter, :applied false, :count 0, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=250+to+500+km"}, :has_children false} {:title "500 to 1000 km", :type :filter, :applied false, :count 0, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=500+to+1000+km"}, :has_children false} {:title "1000 km & beyond", :type :filter, :applied false, :count 0, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=1000+km+%26+beyond"}, :has_children false}]}]} (:facets (query-execution/post-process-query-result-feature context query elastic-results query-results :facets-v2)))))) (testing "Testing the post processing of the query with facets in the query for horizontal data resolutions." (let [context {:query-string "keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=0+to+1+meter" :system {:public-conf {:protocol "http", :port 3003, :host "localhost", :relative-root-url ""}}} query {:concept-type :collection :condition {:operation :and :conditions [{:field :keyword :query-str *} {:path :horizontal-data-resolutions :condition {:field :horizontal-data-resolutions.value :min-value 0.0 :max-value 1.0 :exclusive? false}}]}} facet-fields [:horizontal-data-resolutions] elastic-results {:aggregations {:horizontal-data-resolution-range {:doc_count 1 :values {:buckets [{:key "0 to 1 meter" :from 0.0 :to 1.0 :doc_count 1 :priority {:value 0.0}} {:key "1 to 30 meters" :from 1.0 :to 30.0 :doc_count 1 :priority {:value 0.0}} {:key "30 to 100 meters" :from 30.0 :to 100.0 :doc_count 0 :priority {:value nil}} {:key "1 to 10 km" :from 1000.0 :to 10000.0 :doc_count 1 :priority {:value 0.0}} {:key "1000 km & beyond" :from 1000000.0 :to (Float/MAX_VALUE) :doc_count 0 :priority {:value nil}}]}}}} query-results nil] (is (= {:title "Browse Collections", :type :group, :has_children true, :children [{:title "Horizontal Data Resolution", :type :group, :applied true, :has_children true, :children [{:title "0 to 1 meter", :type :filter, :applied true, :count 1, :links {:remove "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true"}, :has_children false} {:title "1 to 30 meters", :type :filter, :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=1+to+30+meters&horizontal_data_resolution_range%5B%5D=0+to+1+meter"}, :has_children false} {:title "30 to 100 meters", :type :filter, :applied false, :count 0, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=30+to+100+meters&horizontal_data_resolution_range%5B%5D=0+to+1+meter"}, :has_children false} {:title "1 to 10 km", :type :filter, :applied false, :count 1, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=1+to+10+km&horizontal_data_resolution_range%5B%5D=0+to+1+meter"}, :has_children false} {:title "1000 km & beyond", :type :filter, :applied false, :count 0, :links {:apply "http://localhost:3003/collections.json?keyword=*&include_facets=v2&pretty=true&horizontal_data_resolution_range%5B%5D=1000+km+%26+beyond&horizontal_data_resolution_range%5B%5D=0+to+1+meter"}, :has_children false}]}]} (:facets (query-execution/post-process-query-result-feature context query elastic-results query-results :facets-v2)))))))
[ { "context": "(ns ^{:author \"Mike Ananev\"}\n org.rssys.vault.util\n (:require [matcho.core", "end": 26, "score": 0.9998871684074402, "start": 15, "tag": "NAME", "value": "Mike Ananev" } ]
src/org/rssys/vault/util.clj
redstarssystems/vault
0
(ns ^{:author "Mike Ananev"} org.rssys.vault.util (:require [matcho.core :as matcho])) (defmacro match? "Match against each pattern and return true if match and false if not" [x & pattern] `(let [x# ~x patterns# [~@pattern] errors# (apply matcho/match* x# patterns#)] (empty? errors#))) (defmacro not-match? "Match against each pattern and return true if not match and false match." [x & pattern] `(let [x# ~x patterns# [~@pattern] errors# (apply matcho/match* x# patterns#)] (not (empty? errors#)))) (defmacro match-explain "Match against each pattern and return :success if match and String explanation if not" [x & pattern] `(let [x# ~x patterns# [~@pattern] errors# (apply matcho/match* x# patterns#)] (if-not (empty? errors#) (pr-str errors# x# patterns#) :success))) (comment (match? {:a 1 :b 2} {:a pos-int? :b 2}) (not-match? {:a 1 :b 2} {:a pos-int? :b 3}) (match-explain {:a 1 :b 2} {:a 1 :b 2}) (match-explain {:a 1 :b 2} {:a 1 :b 3}) )
1285
(ns ^{:author "<NAME>"} org.rssys.vault.util (:require [matcho.core :as matcho])) (defmacro match? "Match against each pattern and return true if match and false if not" [x & pattern] `(let [x# ~x patterns# [~@pattern] errors# (apply matcho/match* x# patterns#)] (empty? errors#))) (defmacro not-match? "Match against each pattern and return true if not match and false match." [x & pattern] `(let [x# ~x patterns# [~@pattern] errors# (apply matcho/match* x# patterns#)] (not (empty? errors#)))) (defmacro match-explain "Match against each pattern and return :success if match and String explanation if not" [x & pattern] `(let [x# ~x patterns# [~@pattern] errors# (apply matcho/match* x# patterns#)] (if-not (empty? errors#) (pr-str errors# x# patterns#) :success))) (comment (match? {:a 1 :b 2} {:a pos-int? :b 2}) (not-match? {:a 1 :b 2} {:a pos-int? :b 3}) (match-explain {:a 1 :b 2} {:a 1 :b 2}) (match-explain {:a 1 :b 2} {:a 1 :b 3}) )
true
(ns ^{:author "PI:NAME:<NAME>END_PI"} org.rssys.vault.util (:require [matcho.core :as matcho])) (defmacro match? "Match against each pattern and return true if match and false if not" [x & pattern] `(let [x# ~x patterns# [~@pattern] errors# (apply matcho/match* x# patterns#)] (empty? errors#))) (defmacro not-match? "Match against each pattern and return true if not match and false match." [x & pattern] `(let [x# ~x patterns# [~@pattern] errors# (apply matcho/match* x# patterns#)] (not (empty? errors#)))) (defmacro match-explain "Match against each pattern and return :success if match and String explanation if not" [x & pattern] `(let [x# ~x patterns# [~@pattern] errors# (apply matcho/match* x# patterns#)] (if-not (empty? errors#) (pr-str errors# x# patterns#) :success))) (comment (match? {:a 1 :b 2} {:a pos-int? :b 2}) (not-match? {:a 1 :b 2} {:a pos-int? :b 3}) (match-explain {:a 1 :b 2} {:a 1 :b 2}) (match-explain {:a 1 :b 2} {:a 1 :b 3}) )
[ { "context": "n.\n\n(ns ^{:doc \"The Retry pattern\"\n :author \"Vijay Mathew <vijay@anvetsu.com>\"}\n recoil.retry\n (:requir", "end": 741, "score": 0.9998748898506165, "start": 729, "tag": "NAME", "value": "Vijay Mathew" }, { "context": " \"The Retry pattern\"\n :author \"Vijay Mathew <vijay@anvetsu.com>\"}\n recoil.retry\n (:require [recoil.util :as ", "end": 760, "score": 0.999934732913971, "start": 743, "tag": "EMAIL", "value": "vijay@anvetsu.com" } ]
src/recoil/retry.clj
anvetsu/recoil
0
;; Copyright (c) 2018 Anvetsu Technologies. All rights reserved. ;; The use and distribution terms for this software are covered by the ;; MIT License (https://mit-license.org/) ;; which can be found in the file `LICENSE` at the root of this distribution. ;; By using this software in any fashion, you are agreeing to be bound by ;; the terms of this license. ;; You must not remove this notice, or any other, from this software. ;; Implements the Retry pattern which enable an application to handle transient failures when ;; it tries to connect to a service or network resource, by transparently retrying a failed operation. ;; This can improve the stability of the application. (ns ^{:doc "The Retry pattern" :author "Vijay Mathew <vijay@anvetsu.com>"} recoil.retry (:require [recoil.util :as ru])) (declare do-wait) (defn executor "Returns a function that execute retries for a user-defined request based on some `policies`. `policies` is a map with following keys: :handle - list of exceptions that can cause a restart. any other exception will be re-thrown :retry - the number of retries, defaults to 1 :wait-secs - number of seconds to wait before each retry :wait-fn - a function to dynamically compute the seconds to wait based on current response and wait-secs The user-defined `request-fn` must return `{:ok result}` on success. Any other value will trigger a retry. If all retries are expired, `{:status :no-retries-left :result <result>}` will be returned, where <result> will the return value of the last failed attempt." [policies] (let [handle (:handle policies) retry (or (:retry policies) 1) orig-wait-secs (:wait-secs policies) wait-fn (:wait-fn policies) no-retries {:status :no-retries-left}] (fn [request-fn] (loop [wait-secs orig-wait-secs r retry] (let [result (ru/try-call request-fn handle :retry)] (if (or (get result :ok) (= :unhandled-exception (get result :error))) result (if (zero? r) (assoc no-retries :result result) (if (or wait-secs wait-fn) (recur (do-wait wait-secs wait-fn result r) (dec r)) (recur wait-secs (dec r)))))))))) (defn- do-wait [wait-secs wait-fn last-result n-retry] (let [actual-wait-secs (if wait-fn (wait-fn last-result wait-secs n-retry) wait-secs)] (try (do (Thread/sleep (* actual-wait-secs 1000)) actual-wait-secs) (catch InterruptedException _ actual-wait-secs))))
109190
;; Copyright (c) 2018 Anvetsu Technologies. All rights reserved. ;; The use and distribution terms for this software are covered by the ;; MIT License (https://mit-license.org/) ;; which can be found in the file `LICENSE` at the root of this distribution. ;; By using this software in any fashion, you are agreeing to be bound by ;; the terms of this license. ;; You must not remove this notice, or any other, from this software. ;; Implements the Retry pattern which enable an application to handle transient failures when ;; it tries to connect to a service or network resource, by transparently retrying a failed operation. ;; This can improve the stability of the application. (ns ^{:doc "The Retry pattern" :author "<NAME> <<EMAIL>>"} recoil.retry (:require [recoil.util :as ru])) (declare do-wait) (defn executor "Returns a function that execute retries for a user-defined request based on some `policies`. `policies` is a map with following keys: :handle - list of exceptions that can cause a restart. any other exception will be re-thrown :retry - the number of retries, defaults to 1 :wait-secs - number of seconds to wait before each retry :wait-fn - a function to dynamically compute the seconds to wait based on current response and wait-secs The user-defined `request-fn` must return `{:ok result}` on success. Any other value will trigger a retry. If all retries are expired, `{:status :no-retries-left :result <result>}` will be returned, where <result> will the return value of the last failed attempt." [policies] (let [handle (:handle policies) retry (or (:retry policies) 1) orig-wait-secs (:wait-secs policies) wait-fn (:wait-fn policies) no-retries {:status :no-retries-left}] (fn [request-fn] (loop [wait-secs orig-wait-secs r retry] (let [result (ru/try-call request-fn handle :retry)] (if (or (get result :ok) (= :unhandled-exception (get result :error))) result (if (zero? r) (assoc no-retries :result result) (if (or wait-secs wait-fn) (recur (do-wait wait-secs wait-fn result r) (dec r)) (recur wait-secs (dec r)))))))))) (defn- do-wait [wait-secs wait-fn last-result n-retry] (let [actual-wait-secs (if wait-fn (wait-fn last-result wait-secs n-retry) wait-secs)] (try (do (Thread/sleep (* actual-wait-secs 1000)) actual-wait-secs) (catch InterruptedException _ actual-wait-secs))))
true
;; Copyright (c) 2018 Anvetsu Technologies. All rights reserved. ;; The use and distribution terms for this software are covered by the ;; MIT License (https://mit-license.org/) ;; which can be found in the file `LICENSE` at the root of this distribution. ;; By using this software in any fashion, you are agreeing to be bound by ;; the terms of this license. ;; You must not remove this notice, or any other, from this software. ;; Implements the Retry pattern which enable an application to handle transient failures when ;; it tries to connect to a service or network resource, by transparently retrying a failed operation. ;; This can improve the stability of the application. (ns ^{:doc "The Retry pattern" :author "PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>"} recoil.retry (:require [recoil.util :as ru])) (declare do-wait) (defn executor "Returns a function that execute retries for a user-defined request based on some `policies`. `policies` is a map with following keys: :handle - list of exceptions that can cause a restart. any other exception will be re-thrown :retry - the number of retries, defaults to 1 :wait-secs - number of seconds to wait before each retry :wait-fn - a function to dynamically compute the seconds to wait based on current response and wait-secs The user-defined `request-fn` must return `{:ok result}` on success. Any other value will trigger a retry. If all retries are expired, `{:status :no-retries-left :result <result>}` will be returned, where <result> will the return value of the last failed attempt." [policies] (let [handle (:handle policies) retry (or (:retry policies) 1) orig-wait-secs (:wait-secs policies) wait-fn (:wait-fn policies) no-retries {:status :no-retries-left}] (fn [request-fn] (loop [wait-secs orig-wait-secs r retry] (let [result (ru/try-call request-fn handle :retry)] (if (or (get result :ok) (= :unhandled-exception (get result :error))) result (if (zero? r) (assoc no-retries :result result) (if (or wait-secs wait-fn) (recur (do-wait wait-secs wait-fn result r) (dec r)) (recur wait-secs (dec r)))))))))) (defn- do-wait [wait-secs wait-fn last-result n-retry] (let [actual-wait-secs (if wait-fn (wait-fn last-result wait-secs n-retry) wait-secs)] (try (do (Thread/sleep (* actual-wait-secs 1000)) actual-wait-secs) (catch InterruptedException _ actual-wait-secs))))
[ { "context": "`)\n * `:db-user` (`DB_USER`)\n * `:db-password (`DB_PASSWORD`)\"\n [env]\n (env->config env [[:db-name :databas", "end": 1755, "score": 0.6466527581214905, "start": 1744, "tag": "PASSWORD", "value": "DB_PASSWORD" }, { "context": "ser :username]\n [:db-password :password]]))\n\n(defn new-database-from-env\n \"Returns a new", "end": 1943, "score": 0.9550702571868896, "start": 1935, "tag": "PASSWORD", "value": "password" } ]
data/train/clojure/222220058fcfcfe06fcd2ab88a3b15ede9a3c34adatabase.clj
harshp8l/deep-learning-lang-detection
84
(ns cdc-util.components.database "A component to manage (pooled) database connections using HikariCP. Defaults to using the Oracle Thin driver for connections with a min/max pool size of 2/20 connections." (:require [com.stuartsierra.component :as component] [hikari-cp.core :as hk] [cdc-util.env :refer [env->config]])) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; Defaults (def default-options (merge hk/default-datasource-options {:auto-commit true :read-only false :minimum-idle 2 :maximum-pool-size 20 :adapter "oracle" :driver-type "thin" :port-number 1521 :implicit-caching-enabled true :max-statements 200})) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; Component (defrecord Database [options] component/Lifecycle (start [this] (let [o (merge default-options options) ds (hk/make-datasource o)] (assoc this :options o :datasource ds))) (stop [this] (if-let [ds (:datasource this)] (hk/close-datasource ds)) (dissoc this :datasource))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; Public (defn new-database "Returns a new, un-started, database component. Must be provided with a map of datasource options specifying, at a minimum, the: * `:database-name` * `:server-name` * `:username` * `:password`" [opts] (component/using (->Database opts) [])) (defn env->database-opts "Returns a map of component options derived from the given environment variable map. The supported env variables are: * `:db-name` (`DB_NAME`) * `:db-server` (`DB_SERVER`) * `:db-user` (`DB_USER`) * `:db-password (`DB_PASSWORD`)" [env] (env->config env [[:db-name :database-name] [:db-server :server-name] [:db-user :username] [:db-password :password]])) (defn new-database-from-env "Returns a new, un-started, database component initialized with options from the given map of environment variables (per `env->database-opts`.)" [env] (new-database (env->database-opts env)))
98340
(ns cdc-util.components.database "A component to manage (pooled) database connections using HikariCP. Defaults to using the Oracle Thin driver for connections with a min/max pool size of 2/20 connections." (:require [com.stuartsierra.component :as component] [hikari-cp.core :as hk] [cdc-util.env :refer [env->config]])) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; Defaults (def default-options (merge hk/default-datasource-options {:auto-commit true :read-only false :minimum-idle 2 :maximum-pool-size 20 :adapter "oracle" :driver-type "thin" :port-number 1521 :implicit-caching-enabled true :max-statements 200})) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; Component (defrecord Database [options] component/Lifecycle (start [this] (let [o (merge default-options options) ds (hk/make-datasource o)] (assoc this :options o :datasource ds))) (stop [this] (if-let [ds (:datasource this)] (hk/close-datasource ds)) (dissoc this :datasource))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; Public (defn new-database "Returns a new, un-started, database component. Must be provided with a map of datasource options specifying, at a minimum, the: * `:database-name` * `:server-name` * `:username` * `:password`" [opts] (component/using (->Database opts) [])) (defn env->database-opts "Returns a map of component options derived from the given environment variable map. The supported env variables are: * `:db-name` (`DB_NAME`) * `:db-server` (`DB_SERVER`) * `:db-user` (`DB_USER`) * `:db-password (`<PASSWORD>`)" [env] (env->config env [[:db-name :database-name] [:db-server :server-name] [:db-user :username] [:db-password :<PASSWORD>]])) (defn new-database-from-env "Returns a new, un-started, database component initialized with options from the given map of environment variables (per `env->database-opts`.)" [env] (new-database (env->database-opts env)))
true
(ns cdc-util.components.database "A component to manage (pooled) database connections using HikariCP. Defaults to using the Oracle Thin driver for connections with a min/max pool size of 2/20 connections." (:require [com.stuartsierra.component :as component] [hikari-cp.core :as hk] [cdc-util.env :refer [env->config]])) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; Defaults (def default-options (merge hk/default-datasource-options {:auto-commit true :read-only false :minimum-idle 2 :maximum-pool-size 20 :adapter "oracle" :driver-type "thin" :port-number 1521 :implicit-caching-enabled true :max-statements 200})) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; Component (defrecord Database [options] component/Lifecycle (start [this] (let [o (merge default-options options) ds (hk/make-datasource o)] (assoc this :options o :datasource ds))) (stop [this] (if-let [ds (:datasource this)] (hk/close-datasource ds)) (dissoc this :datasource))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; Public (defn new-database "Returns a new, un-started, database component. Must be provided with a map of datasource options specifying, at a minimum, the: * `:database-name` * `:server-name` * `:username` * `:password`" [opts] (component/using (->Database opts) [])) (defn env->database-opts "Returns a map of component options derived from the given environment variable map. The supported env variables are: * `:db-name` (`DB_NAME`) * `:db-server` (`DB_SERVER`) * `:db-user` (`DB_USER`) * `:db-password (`PI:PASSWORD:<PASSWORD>END_PI`)" [env] (env->config env [[:db-name :database-name] [:db-server :server-name] [:db-user :username] [:db-password :PI:PASSWORD:<PASSWORD>END_PI]])) (defn new-database-from-env "Returns a new, un-started, database component initialized with options from the given map of environment variables (per `env->database-opts`.)" [env] (new-database (env->database-opts env)))
[ { "context": "ptors\n\n(def session-interceptor\n (let [cookie-key (.getBytes (get-in config [:cookie :key]))\n cookie-na", "end": 5406, "score": 0.8126959204673767, "start": 5396, "tag": "KEY", "value": "(.getBytes" } ]
src/codes/stel/functional_news/handler.clj
stelcodes/functional-news
4
(ns codes.stel.functional-news.handler (:require [reitit.ring :refer [create-resource-handler routes redirect-trailing-slash-handler create-default-handler]] [reitit.http :as http] [taoensso.timbre :refer [info debug]] [reitit.http.coercion :refer [coerce-request-interceptor coerce-response-interceptor]] [reitit.coercion.malli] [ring.util.response :refer [redirect bad-request]] [codes.stel.functional-news.views :as views] [codes.stel.functional-news.state :as state] [codes.stel.functional-news.util :refer [validate-url validate-email validate-password pp]] [codes.stel.functional-news.config :refer [config]] [clojure.string :as str] [ring.middleware.session :refer [session-request session-response]] [ring.middleware.session.cookie :refer [cookie-store]] [reitit.http.interceptors.muuntaja :refer [format-interceptor]] [reitit.http.interceptors.parameters :refer [parameters-interceptor]] [reitit.http.interceptors.multipart :refer [multipart-interceptor]] [reitit.spec :as rspec] [reitit.dev.pretty :as pretty] [reitit.interceptor.sieppari :as sieppari])) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; Helper functions (defn good-html-response [body] {:status 200, :content-type "text/html", :body body}) (defn bad-html-response [body] {:status 400, :content-type "text/html", :body body}) (defn error-page-handler [{:keys [user]}] (bad-html-response (views/not-found user))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; GET request handler functions (defn hot-submissions-page-handler [{:keys [user], :as request}] (let [submissions (state/get-hot-submissions)] (good-html-response (views/submission-list user submissions)))) (defn new-submissions-page-handler [{:keys [user], :as request}] (let [submissions (state/get-new-submissions)] (good-html-response (views/submission-list user submissions)))) (defn login-page-handler [_request] (good-html-response (views/login-page))) (defn submit-page-handler [{:keys [user], :as request}] (if user (good-html-response (views/submit-page user nil)) (redirect "/login" :see-other))) (defn submission-page-handler [{:keys [user], :as request}] (try (let [submission-id (get-in request [:parameters :path :id]) submission (state/find-submission submission-id) comments (state/find-comments submission-id)] (good-html-response (views/submission-page user submission comments))) (catch Exception _ (error-page-handler request)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; POST request handler functions (defn authorize-user-handler [request] (try (let [email (get-in request [:parameters :form :email]) password (get-in request [:parameters :form :password]) user (state/auth-user email password) session-body {:id (:users/id user)}] (assoc (redirect "/" :see-other) :session session-body)) (catch Exception _ (bad-html-response (views/login-page {:login "We didn't recognize that email password combination ;-;"}))))) (defn create-comment-handler [request] (try (let [user-id (get-in request [:session :id]) submission-id (get-in request [:parameters :form :submission-id]) body (get-in request [:parameters :form :body]) _result (state/create-comment user-id submission-id body)] (redirect (str "/submissions/" submission-id) :see-other)) (catch Exception _ (redirect "/login" :see-other)))) (defn create-submission-handler [{:keys [user], :as request}] (try (let [user-id (get-in request [:session :id]) title (get-in request [:parameters :form :title]) url (get-in request [:parameters :form :url]) _ (validate-url url) submission (state/create-submission title url user-id) submission-id (:submissions/id submission)] (redirect (str "/submissions/" submission-id) :see-other)) (catch Exception e (bad-request (views/submit-page user (.getMessage e)))))) (defn logout-handler [_request] (assoc (redirect "/") :session nil)) (defn upvote-handler [{:keys [user], :as request}] (if-not user (redirect "/login" :see-other) (try (let [submission-id (get-in request [:parameters :path :id]) user-id (get-in request [:session :id]) location (get-in request [:headers "referer"]) _ (state/create-upvote user-id submission-id)] (redirect location :see-other)) (catch Exception _ (redirect "/" :see-other))))) (defn signup-handler [request] (try (let [email (get-in request [:parameters :form :email]) password (get-in request [:parameters :form :password]) _ (validate-email email) _ (validate-password password) result (state/create-user email password) session-body {:id (:users/id result)}] (assoc (redirect "/" :see-other) :session session-body)) (catch Exception e (debug "Signup Handler Exception - " e) (bad-request (views/login-page {:signup (.getMessage e)}))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; Interceptors (def session-interceptor (let [cookie-key (.getBytes (get-in config [:cookie :key])) cookie-name (get-in config [:cookie :name]) options {:store (cookie-store {:key cookie-key}), :cookie-name cookie-name}] {:enter (fn [{:keys [request], :as context}] (let [new-request (session-request request options) user-id (get-in new-request [:session :id]) user (when user-id (try (state/find-user user-id) (catch Exception _ nil))) new-request (assoc new-request :user user)] (assoc context :request new-request))), :leave (fn [{:keys [response request], :as context}] (assoc context :response (session-response response request options)))})) (def log-info-interceptor {:leave (fn [{:keys [request response], :as context}] (let [method-str (-> request (:request-method) (name) (str) (str/upper-case))] (info "\n📫" method-str (:status response) (:uri request)) context))}) (def debug-interceptor {:enter (fn [{:keys [request], :as context}] (when-not (re-find #"^.*\.(jpg|png|gif|svg|ico|css|js)$" (:uri request)) (let [keep-keys [:session :request-method :headers :scheme :user :body :path-params :query-string :server-name :uri :character-encoding :content-type :query-params :websocket? :form-params :content-length :server-port :parameters :params :cookies :remote-addr] filtered-request (select-keys request keep-keys) sorted-request (into (sorted-map) filtered-request)] (debug "\n================================================" "\n🐞 REQUEST INTERCEPTOR\n" (pp sorted-request) "================================================"))) context)}) (def error-interceptor {:error (fn [{:keys [request error], :as context}] (debug error) (assoc context :error nil :response (error-page-handler request)))}) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; Reitit App (def app-routes [["/" {:handler hot-submissions-page-handler}] ["/assets/*" {:handler (create-resource-handler {:root "public/assets"})}] ["/new" {:handler new-submissions-page-handler}] ["/submit" {:get {:handler submit-page-handler}, :post {:handler create-submission-handler, :parameters {:form [:map [:title string?] [:url string?]]}}}] ["/logout" {:get {:handler logout-handler}}] ["/submissions/:id" {:handler submission-page-handler, :parameters {:path [:map [:id int?]]}}] ["/upvote/:id" {:get {:name ::upvote, :handler upvote-handler, :parameters {:path [:map [:id int?]]}}}] ["/login" {:get {:handler login-page-handler}, :post {:name ::authorize-user, :handler authorize-user-handler, :parameters {:form [:map [:email string?] [:password string?]]}}}] ["/signup" {:post {:name ::signup, :handler signup-handler, :parameters {:form [:map [:email string?] [:password string?]]}}}] ["/comments" {:post {:handler create-comment-handler, :name ::create-comment, :parameters {:form [:map [:submission-id int?] [:body string?]]}}}]]) (def router-options {:validate rspec/validate, :exception pretty/exception, :data {:coercion reitit.coercion.malli/coercion}}) (def app (http/ring-handler (http/router app-routes router-options) (routes (redirect-trailing-slash-handler) (create-default-handler {:not-found error-page-handler, :method-not-allowed error-page-handler, :not-acceptable error-page-handler})) {:interceptors [error-interceptor log-info-interceptor (format-interceptor) (parameters-interceptor) (multipart-interceptor) session-interceptor (coerce-request-interceptor) (when-not (config :prod) debug-interceptor) (coerce-response-interceptor)], :executor sieppari/executor}))
116879
(ns codes.stel.functional-news.handler (:require [reitit.ring :refer [create-resource-handler routes redirect-trailing-slash-handler create-default-handler]] [reitit.http :as http] [taoensso.timbre :refer [info debug]] [reitit.http.coercion :refer [coerce-request-interceptor coerce-response-interceptor]] [reitit.coercion.malli] [ring.util.response :refer [redirect bad-request]] [codes.stel.functional-news.views :as views] [codes.stel.functional-news.state :as state] [codes.stel.functional-news.util :refer [validate-url validate-email validate-password pp]] [codes.stel.functional-news.config :refer [config]] [clojure.string :as str] [ring.middleware.session :refer [session-request session-response]] [ring.middleware.session.cookie :refer [cookie-store]] [reitit.http.interceptors.muuntaja :refer [format-interceptor]] [reitit.http.interceptors.parameters :refer [parameters-interceptor]] [reitit.http.interceptors.multipart :refer [multipart-interceptor]] [reitit.spec :as rspec] [reitit.dev.pretty :as pretty] [reitit.interceptor.sieppari :as sieppari])) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; Helper functions (defn good-html-response [body] {:status 200, :content-type "text/html", :body body}) (defn bad-html-response [body] {:status 400, :content-type "text/html", :body body}) (defn error-page-handler [{:keys [user]}] (bad-html-response (views/not-found user))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; GET request handler functions (defn hot-submissions-page-handler [{:keys [user], :as request}] (let [submissions (state/get-hot-submissions)] (good-html-response (views/submission-list user submissions)))) (defn new-submissions-page-handler [{:keys [user], :as request}] (let [submissions (state/get-new-submissions)] (good-html-response (views/submission-list user submissions)))) (defn login-page-handler [_request] (good-html-response (views/login-page))) (defn submit-page-handler [{:keys [user], :as request}] (if user (good-html-response (views/submit-page user nil)) (redirect "/login" :see-other))) (defn submission-page-handler [{:keys [user], :as request}] (try (let [submission-id (get-in request [:parameters :path :id]) submission (state/find-submission submission-id) comments (state/find-comments submission-id)] (good-html-response (views/submission-page user submission comments))) (catch Exception _ (error-page-handler request)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; POST request handler functions (defn authorize-user-handler [request] (try (let [email (get-in request [:parameters :form :email]) password (get-in request [:parameters :form :password]) user (state/auth-user email password) session-body {:id (:users/id user)}] (assoc (redirect "/" :see-other) :session session-body)) (catch Exception _ (bad-html-response (views/login-page {:login "We didn't recognize that email password combination ;-;"}))))) (defn create-comment-handler [request] (try (let [user-id (get-in request [:session :id]) submission-id (get-in request [:parameters :form :submission-id]) body (get-in request [:parameters :form :body]) _result (state/create-comment user-id submission-id body)] (redirect (str "/submissions/" submission-id) :see-other)) (catch Exception _ (redirect "/login" :see-other)))) (defn create-submission-handler [{:keys [user], :as request}] (try (let [user-id (get-in request [:session :id]) title (get-in request [:parameters :form :title]) url (get-in request [:parameters :form :url]) _ (validate-url url) submission (state/create-submission title url user-id) submission-id (:submissions/id submission)] (redirect (str "/submissions/" submission-id) :see-other)) (catch Exception e (bad-request (views/submit-page user (.getMessage e)))))) (defn logout-handler [_request] (assoc (redirect "/") :session nil)) (defn upvote-handler [{:keys [user], :as request}] (if-not user (redirect "/login" :see-other) (try (let [submission-id (get-in request [:parameters :path :id]) user-id (get-in request [:session :id]) location (get-in request [:headers "referer"]) _ (state/create-upvote user-id submission-id)] (redirect location :see-other)) (catch Exception _ (redirect "/" :see-other))))) (defn signup-handler [request] (try (let [email (get-in request [:parameters :form :email]) password (get-in request [:parameters :form :password]) _ (validate-email email) _ (validate-password password) result (state/create-user email password) session-body {:id (:users/id result)}] (assoc (redirect "/" :see-other) :session session-body)) (catch Exception e (debug "Signup Handler Exception - " e) (bad-request (views/login-page {:signup (.getMessage e)}))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; Interceptors (def session-interceptor (let [cookie-key <KEY> (get-in config [:cookie :key])) cookie-name (get-in config [:cookie :name]) options {:store (cookie-store {:key cookie-key}), :cookie-name cookie-name}] {:enter (fn [{:keys [request], :as context}] (let [new-request (session-request request options) user-id (get-in new-request [:session :id]) user (when user-id (try (state/find-user user-id) (catch Exception _ nil))) new-request (assoc new-request :user user)] (assoc context :request new-request))), :leave (fn [{:keys [response request], :as context}] (assoc context :response (session-response response request options)))})) (def log-info-interceptor {:leave (fn [{:keys [request response], :as context}] (let [method-str (-> request (:request-method) (name) (str) (str/upper-case))] (info "\n📫" method-str (:status response) (:uri request)) context))}) (def debug-interceptor {:enter (fn [{:keys [request], :as context}] (when-not (re-find #"^.*\.(jpg|png|gif|svg|ico|css|js)$" (:uri request)) (let [keep-keys [:session :request-method :headers :scheme :user :body :path-params :query-string :server-name :uri :character-encoding :content-type :query-params :websocket? :form-params :content-length :server-port :parameters :params :cookies :remote-addr] filtered-request (select-keys request keep-keys) sorted-request (into (sorted-map) filtered-request)] (debug "\n================================================" "\n🐞 REQUEST INTERCEPTOR\n" (pp sorted-request) "================================================"))) context)}) (def error-interceptor {:error (fn [{:keys [request error], :as context}] (debug error) (assoc context :error nil :response (error-page-handler request)))}) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; Reitit App (def app-routes [["/" {:handler hot-submissions-page-handler}] ["/assets/*" {:handler (create-resource-handler {:root "public/assets"})}] ["/new" {:handler new-submissions-page-handler}] ["/submit" {:get {:handler submit-page-handler}, :post {:handler create-submission-handler, :parameters {:form [:map [:title string?] [:url string?]]}}}] ["/logout" {:get {:handler logout-handler}}] ["/submissions/:id" {:handler submission-page-handler, :parameters {:path [:map [:id int?]]}}] ["/upvote/:id" {:get {:name ::upvote, :handler upvote-handler, :parameters {:path [:map [:id int?]]}}}] ["/login" {:get {:handler login-page-handler}, :post {:name ::authorize-user, :handler authorize-user-handler, :parameters {:form [:map [:email string?] [:password string?]]}}}] ["/signup" {:post {:name ::signup, :handler signup-handler, :parameters {:form [:map [:email string?] [:password string?]]}}}] ["/comments" {:post {:handler create-comment-handler, :name ::create-comment, :parameters {:form [:map [:submission-id int?] [:body string?]]}}}]]) (def router-options {:validate rspec/validate, :exception pretty/exception, :data {:coercion reitit.coercion.malli/coercion}}) (def app (http/ring-handler (http/router app-routes router-options) (routes (redirect-trailing-slash-handler) (create-default-handler {:not-found error-page-handler, :method-not-allowed error-page-handler, :not-acceptable error-page-handler})) {:interceptors [error-interceptor log-info-interceptor (format-interceptor) (parameters-interceptor) (multipart-interceptor) session-interceptor (coerce-request-interceptor) (when-not (config :prod) debug-interceptor) (coerce-response-interceptor)], :executor sieppari/executor}))
true
(ns codes.stel.functional-news.handler (:require [reitit.ring :refer [create-resource-handler routes redirect-trailing-slash-handler create-default-handler]] [reitit.http :as http] [taoensso.timbre :refer [info debug]] [reitit.http.coercion :refer [coerce-request-interceptor coerce-response-interceptor]] [reitit.coercion.malli] [ring.util.response :refer [redirect bad-request]] [codes.stel.functional-news.views :as views] [codes.stel.functional-news.state :as state] [codes.stel.functional-news.util :refer [validate-url validate-email validate-password pp]] [codes.stel.functional-news.config :refer [config]] [clojure.string :as str] [ring.middleware.session :refer [session-request session-response]] [ring.middleware.session.cookie :refer [cookie-store]] [reitit.http.interceptors.muuntaja :refer [format-interceptor]] [reitit.http.interceptors.parameters :refer [parameters-interceptor]] [reitit.http.interceptors.multipart :refer [multipart-interceptor]] [reitit.spec :as rspec] [reitit.dev.pretty :as pretty] [reitit.interceptor.sieppari :as sieppari])) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; Helper functions (defn good-html-response [body] {:status 200, :content-type "text/html", :body body}) (defn bad-html-response [body] {:status 400, :content-type "text/html", :body body}) (defn error-page-handler [{:keys [user]}] (bad-html-response (views/not-found user))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; GET request handler functions (defn hot-submissions-page-handler [{:keys [user], :as request}] (let [submissions (state/get-hot-submissions)] (good-html-response (views/submission-list user submissions)))) (defn new-submissions-page-handler [{:keys [user], :as request}] (let [submissions (state/get-new-submissions)] (good-html-response (views/submission-list user submissions)))) (defn login-page-handler [_request] (good-html-response (views/login-page))) (defn submit-page-handler [{:keys [user], :as request}] (if user (good-html-response (views/submit-page user nil)) (redirect "/login" :see-other))) (defn submission-page-handler [{:keys [user], :as request}] (try (let [submission-id (get-in request [:parameters :path :id]) submission (state/find-submission submission-id) comments (state/find-comments submission-id)] (good-html-response (views/submission-page user submission comments))) (catch Exception _ (error-page-handler request)))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; POST request handler functions (defn authorize-user-handler [request] (try (let [email (get-in request [:parameters :form :email]) password (get-in request [:parameters :form :password]) user (state/auth-user email password) session-body {:id (:users/id user)}] (assoc (redirect "/" :see-other) :session session-body)) (catch Exception _ (bad-html-response (views/login-page {:login "We didn't recognize that email password combination ;-;"}))))) (defn create-comment-handler [request] (try (let [user-id (get-in request [:session :id]) submission-id (get-in request [:parameters :form :submission-id]) body (get-in request [:parameters :form :body]) _result (state/create-comment user-id submission-id body)] (redirect (str "/submissions/" submission-id) :see-other)) (catch Exception _ (redirect "/login" :see-other)))) (defn create-submission-handler [{:keys [user], :as request}] (try (let [user-id (get-in request [:session :id]) title (get-in request [:parameters :form :title]) url (get-in request [:parameters :form :url]) _ (validate-url url) submission (state/create-submission title url user-id) submission-id (:submissions/id submission)] (redirect (str "/submissions/" submission-id) :see-other)) (catch Exception e (bad-request (views/submit-page user (.getMessage e)))))) (defn logout-handler [_request] (assoc (redirect "/") :session nil)) (defn upvote-handler [{:keys [user], :as request}] (if-not user (redirect "/login" :see-other) (try (let [submission-id (get-in request [:parameters :path :id]) user-id (get-in request [:session :id]) location (get-in request [:headers "referer"]) _ (state/create-upvote user-id submission-id)] (redirect location :see-other)) (catch Exception _ (redirect "/" :see-other))))) (defn signup-handler [request] (try (let [email (get-in request [:parameters :form :email]) password (get-in request [:parameters :form :password]) _ (validate-email email) _ (validate-password password) result (state/create-user email password) session-body {:id (:users/id result)}] (assoc (redirect "/" :see-other) :session session-body)) (catch Exception e (debug "Signup Handler Exception - " e) (bad-request (views/login-page {:signup (.getMessage e)}))))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; Interceptors (def session-interceptor (let [cookie-key PI:KEY:<KEY>END_PI (get-in config [:cookie :key])) cookie-name (get-in config [:cookie :name]) options {:store (cookie-store {:key cookie-key}), :cookie-name cookie-name}] {:enter (fn [{:keys [request], :as context}] (let [new-request (session-request request options) user-id (get-in new-request [:session :id]) user (when user-id (try (state/find-user user-id) (catch Exception _ nil))) new-request (assoc new-request :user user)] (assoc context :request new-request))), :leave (fn [{:keys [response request], :as context}] (assoc context :response (session-response response request options)))})) (def log-info-interceptor {:leave (fn [{:keys [request response], :as context}] (let [method-str (-> request (:request-method) (name) (str) (str/upper-case))] (info "\n📫" method-str (:status response) (:uri request)) context))}) (def debug-interceptor {:enter (fn [{:keys [request], :as context}] (when-not (re-find #"^.*\.(jpg|png|gif|svg|ico|css|js)$" (:uri request)) (let [keep-keys [:session :request-method :headers :scheme :user :body :path-params :query-string :server-name :uri :character-encoding :content-type :query-params :websocket? :form-params :content-length :server-port :parameters :params :cookies :remote-addr] filtered-request (select-keys request keep-keys) sorted-request (into (sorted-map) filtered-request)] (debug "\n================================================" "\n🐞 REQUEST INTERCEPTOR\n" (pp sorted-request) "================================================"))) context)}) (def error-interceptor {:error (fn [{:keys [request error], :as context}] (debug error) (assoc context :error nil :response (error-page-handler request)))}) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; Reitit App (def app-routes [["/" {:handler hot-submissions-page-handler}] ["/assets/*" {:handler (create-resource-handler {:root "public/assets"})}] ["/new" {:handler new-submissions-page-handler}] ["/submit" {:get {:handler submit-page-handler}, :post {:handler create-submission-handler, :parameters {:form [:map [:title string?] [:url string?]]}}}] ["/logout" {:get {:handler logout-handler}}] ["/submissions/:id" {:handler submission-page-handler, :parameters {:path [:map [:id int?]]}}] ["/upvote/:id" {:get {:name ::upvote, :handler upvote-handler, :parameters {:path [:map [:id int?]]}}}] ["/login" {:get {:handler login-page-handler}, :post {:name ::authorize-user, :handler authorize-user-handler, :parameters {:form [:map [:email string?] [:password string?]]}}}] ["/signup" {:post {:name ::signup, :handler signup-handler, :parameters {:form [:map [:email string?] [:password string?]]}}}] ["/comments" {:post {:handler create-comment-handler, :name ::create-comment, :parameters {:form [:map [:submission-id int?] [:body string?]]}}}]]) (def router-options {:validate rspec/validate, :exception pretty/exception, :data {:coercion reitit.coercion.malli/coercion}}) (def app (http/ring-handler (http/router app-routes router-options) (routes (redirect-trailing-slash-handler) (create-default-handler {:not-found error-page-handler, :method-not-allowed error-page-handler, :not-acceptable error-page-handler})) {:interceptors [error-interceptor log-info-interceptor (format-interceptor) (parameters-interceptor) (multipart-interceptor) session-interceptor (coerce-request-interceptor) (when-not (config :prod) debug-interceptor) (coerce-response-interceptor)], :executor sieppari/executor}))
[ { "context": "(comment \n re-core, Copyright 2012 Ronen Narkis, narkisr.com\n Licensed under the Apache License", "end": 49, "score": 0.9998838305473328, "start": 37, "tag": "NAME", "value": "Ronen Narkis" }, { "context": "omment \n re-core, Copyright 2012 Ronen Narkis, narkisr.com\n Licensed under the Apache License,\n Version ", "end": 62, "score": 0.8008121848106384, "start": 52, "tag": "EMAIL", "value": "arkisr.com" } ]
src/re_core/persistency/actions.clj
celestial-ops/core
1
(comment re-core, Copyright 2012 Ronen Narkis, narkisr.com Licensed under the Apache License, Version 2.0 (the "License") you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.) (ns re-core.persistency.actions "Actions persistency" (:refer-clojure :exclude [name type]) (:require [puny.migrations :refer (Migration register)] [clojure.string :refer (join escape)] [re-core.persistency.common :as c] [re-core.model :refer (figure-rem)] [slingshot.slingshot :refer [throw+]] [subs.core :as subs :refer (validate! validation every-kv combine)] [clojure.core.strint :refer (<<)] [puny.core :refer (entity)])) (declare unique-name with-provided) (entity action :indices [operates-on] :intercept {:create [unique-name with-provided] :update [with-provided]}) (defn remoter [action] (get action (figure-rem action))) (defn add-provided [action] "appends action expected arguments derived from args strings" (if (:args (remoter action)) action ; not migrated yet (reduce (fn [m e] (assoc-in m [(figure-rem m) e :provided] (remove #{"target" "hostname"} (c/args-of (join " " ((remoter m) e :args)))))) action (keys (remoter action))))) (def with-provided (partial c/with-transform add-provided)) (defn find-action-for [name type] (let [ids (get-action-index :operates-on type) actions (map #(-> % Long/parseLong get-action) ids)] (first (filter #(= (-> % :name) name) actions)))) (defn unique-name [f & [{:keys [name operates-on]} & r :as args]] (when (and name operates-on (find-action-for name operates-on)) (throw+ {:type ::duplicated-action } (<< "action for ~{operates-on} named ~{name} already exists"))) (apply f args)) (validation :git-based* (every-kv { :args #{:required :Vector} :timeout #{:required :Integer} })) (def action-validation {:operates-on #{:required :String :type-exists} :name #{:required :String} :src #{:required :String}}) (defn validate-action [action] (let [remoter-validation {(figure-rem action) #{:required :git-based*}}] (validate! action (combine action-validation remoter-validation) :error ::invalid-action))) (defrecord Timeout [identifier] Migration (apply- [this] (doseq [id (all-actions)] (when-not ((get-action id) :timeout) (update-action id (assoc (get-action id) :timeout (* 1000 60 15)))))) (rollback [this])) (defn register-migrations [] (register :actions (Timeout. :default-timeout)))
116440
(comment re-core, Copyright 2012 <NAME>, n<EMAIL> Licensed under the Apache License, Version 2.0 (the "License") you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.) (ns re-core.persistency.actions "Actions persistency" (:refer-clojure :exclude [name type]) (:require [puny.migrations :refer (Migration register)] [clojure.string :refer (join escape)] [re-core.persistency.common :as c] [re-core.model :refer (figure-rem)] [slingshot.slingshot :refer [throw+]] [subs.core :as subs :refer (validate! validation every-kv combine)] [clojure.core.strint :refer (<<)] [puny.core :refer (entity)])) (declare unique-name with-provided) (entity action :indices [operates-on] :intercept {:create [unique-name with-provided] :update [with-provided]}) (defn remoter [action] (get action (figure-rem action))) (defn add-provided [action] "appends action expected arguments derived from args strings" (if (:args (remoter action)) action ; not migrated yet (reduce (fn [m e] (assoc-in m [(figure-rem m) e :provided] (remove #{"target" "hostname"} (c/args-of (join " " ((remoter m) e :args)))))) action (keys (remoter action))))) (def with-provided (partial c/with-transform add-provided)) (defn find-action-for [name type] (let [ids (get-action-index :operates-on type) actions (map #(-> % Long/parseLong get-action) ids)] (first (filter #(= (-> % :name) name) actions)))) (defn unique-name [f & [{:keys [name operates-on]} & r :as args]] (when (and name operates-on (find-action-for name operates-on)) (throw+ {:type ::duplicated-action } (<< "action for ~{operates-on} named ~{name} already exists"))) (apply f args)) (validation :git-based* (every-kv { :args #{:required :Vector} :timeout #{:required :Integer} })) (def action-validation {:operates-on #{:required :String :type-exists} :name #{:required :String} :src #{:required :String}}) (defn validate-action [action] (let [remoter-validation {(figure-rem action) #{:required :git-based*}}] (validate! action (combine action-validation remoter-validation) :error ::invalid-action))) (defrecord Timeout [identifier] Migration (apply- [this] (doseq [id (all-actions)] (when-not ((get-action id) :timeout) (update-action id (assoc (get-action id) :timeout (* 1000 60 15)))))) (rollback [this])) (defn register-migrations [] (register :actions (Timeout. :default-timeout)))
true
(comment re-core, Copyright 2012 PI:NAME:<NAME>END_PI, nPI:EMAIL:<EMAIL>END_PI Licensed under the Apache License, Version 2.0 (the "License") you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.) (ns re-core.persistency.actions "Actions persistency" (:refer-clojure :exclude [name type]) (:require [puny.migrations :refer (Migration register)] [clojure.string :refer (join escape)] [re-core.persistency.common :as c] [re-core.model :refer (figure-rem)] [slingshot.slingshot :refer [throw+]] [subs.core :as subs :refer (validate! validation every-kv combine)] [clojure.core.strint :refer (<<)] [puny.core :refer (entity)])) (declare unique-name with-provided) (entity action :indices [operates-on] :intercept {:create [unique-name with-provided] :update [with-provided]}) (defn remoter [action] (get action (figure-rem action))) (defn add-provided [action] "appends action expected arguments derived from args strings" (if (:args (remoter action)) action ; not migrated yet (reduce (fn [m e] (assoc-in m [(figure-rem m) e :provided] (remove #{"target" "hostname"} (c/args-of (join " " ((remoter m) e :args)))))) action (keys (remoter action))))) (def with-provided (partial c/with-transform add-provided)) (defn find-action-for [name type] (let [ids (get-action-index :operates-on type) actions (map #(-> % Long/parseLong get-action) ids)] (first (filter #(= (-> % :name) name) actions)))) (defn unique-name [f & [{:keys [name operates-on]} & r :as args]] (when (and name operates-on (find-action-for name operates-on)) (throw+ {:type ::duplicated-action } (<< "action for ~{operates-on} named ~{name} already exists"))) (apply f args)) (validation :git-based* (every-kv { :args #{:required :Vector} :timeout #{:required :Integer} })) (def action-validation {:operates-on #{:required :String :type-exists} :name #{:required :String} :src #{:required :String}}) (defn validate-action [action] (let [remoter-validation {(figure-rem action) #{:required :git-based*}}] (validate! action (combine action-validation remoter-validation) :error ::invalid-action))) (defrecord Timeout [identifier] Migration (apply- [this] (doseq [id (all-actions)] (when-not ((get-action id) :timeout) (update-action id (assoc (get-action id) :timeout (* 1000 60 15)))))) (rollback [this])) (defn register-migrations [] (register :actions (Timeout. :default-timeout)))
[ { "context": "mplementations\"\n\n (key-factory)\n => (\"DSA\" \"DiffieHellman\" \"EC\" \"RSA\")\n\n (key-factory \"RSA\")\n => j", "end": 1212, "score": 0.6353322267532349, "start": 1210, "tag": "NAME", "value": "ie" }, { "context": "ementations\"\n\n (key-factory)\n => (\"DSA\" \"DiffieHellman\" \"EC\" \"RSA\")\n\n (key-factory \"RSA\")\n => java.", "end": 1216, "score": 0.6613558530807495, "start": 1213, "tag": "NAME", "value": "ell" }, { "context": "ementations\"\n\n (key-pair-generator)\n => (\"DSA\" \"DiffieHellman\" \"EC\" \"RSA\")\n\n (key-pair-generator \"RSA\")\n => j", "end": 1735, "score": 0.9806656837463379, "start": 1722, "tag": "NAME", "value": "DiffieHellman" } ]
test/hara/security/base/provider_test.clj
zcaudate/hara
309
(ns hara.security.base.provider-test (:use hara.test) (:require [hara.security.base.provider :refer :all])) ^{:refer hara.security.base.provider/list-providers :added "3.0"} (comment "list all security providers" (list-providers) => ["Apple" "SUN" "SunEC" "SunJCE" "SunJGSS" "SunJSSE" "SunPCSC" "SunRsaSign" "SunSASL" "XMLDSig"]) ^{:refer hara.security.base.provider/sort-services :added "3.0"} (comment "filters and sorts the services by type") ^{:refer hara.security.base.provider/list-services :added "3.0"} (comment "lists all services that are available" (list-services) => ("AlgorithmParameterGenerator" "AlgorithmParameters" ...) (list-services "Cipher") => ("AES" "AESWrap" "AESWrap_128" ...) (list-services "KeyGenerator" "SunJCE") => ("AES" "ARCFOUR" "Blowfish" "DES" "DESede" ...)) ^{:refer hara.security.base.provider/cipher :added "3.0"} (comment "lists or returns available `Cipher` implementations" (cipher) => ("AES" "AESWrap" "AESWrap_128" ...) (cipher "AES") => javax.crypto.Cipher) ^{:refer hara.security.base.provider/key-factory :added "3.0"} (comment "lists or returns available `KeyFactory` implementations" (key-factory) => ("DSA" "DiffieHellman" "EC" "RSA") (key-factory "RSA") => java.security.KeyFactory) ^{:refer hara.security.base.provider/key-generator :added "3.0"} (comment "lists or returns available `KeyGenerator` implementations" (key-generator) => ("AES" "ARCFOUR" "Blowfish" ...) (key-generator "Blowfish") => javax.crypto.KeyGenerator) ^{:refer hara.security.base.provider/key-pair-generator :added "3.0"} (comment "lists or returns available `KeyPairGenerator` implementations" (key-pair-generator) => ("DSA" "DiffieHellman" "EC" "RSA") (key-pair-generator "RSA") => java.security.KeyPairGenerator) ^{:refer hara.security.base.provider/key-store :added "3.0"} (comment "lists or returns available `KeyStore` implementations" (key-store) => ("CaseExactJKS" "DKS" "JCEKS" "JKS" "KeychainStore" "PKCS12") (key-store "JKS") => java.security.KeyStore) ^{:refer hara.security.base.provider/mac :added "3.0"} (comment "lists or returns available `Mac` implementations" (mac) => ("HmacMD5" "HmacPBESHA1" "HmacSHA1" ...) (mac "HmacMD5") => javax.crypto.Mac) ^{:refer hara.security.base.provider/message-digest :added "3.0"} (comment "lists or returns available `MessageDigest` implementations" (message-digest) => ("MD2" "MD5" "SHA" "SHA-224" "SHA-256" "SHA-384" "SHA-512") (message-digest "MD2") => java.security.MessageDigest$Delegate) ^{:refer hara.security.base.provider/signature :added "3.0"} (comment "lists or returns available `Signature` implementations" (signature) => ("MD2withRSA" "MD5andSHA1withRSA" "MD5withRSA" ...) (signature "MD2withRSA") => java.security.Signature$Delegate)
69051
(ns hara.security.base.provider-test (:use hara.test) (:require [hara.security.base.provider :refer :all])) ^{:refer hara.security.base.provider/list-providers :added "3.0"} (comment "list all security providers" (list-providers) => ["Apple" "SUN" "SunEC" "SunJCE" "SunJGSS" "SunJSSE" "SunPCSC" "SunRsaSign" "SunSASL" "XMLDSig"]) ^{:refer hara.security.base.provider/sort-services :added "3.0"} (comment "filters and sorts the services by type") ^{:refer hara.security.base.provider/list-services :added "3.0"} (comment "lists all services that are available" (list-services) => ("AlgorithmParameterGenerator" "AlgorithmParameters" ...) (list-services "Cipher") => ("AES" "AESWrap" "AESWrap_128" ...) (list-services "KeyGenerator" "SunJCE") => ("AES" "ARCFOUR" "Blowfish" "DES" "DESede" ...)) ^{:refer hara.security.base.provider/cipher :added "3.0"} (comment "lists or returns available `Cipher` implementations" (cipher) => ("AES" "AESWrap" "AESWrap_128" ...) (cipher "AES") => javax.crypto.Cipher) ^{:refer hara.security.base.provider/key-factory :added "3.0"} (comment "lists or returns available `KeyFactory` implementations" (key-factory) => ("DSA" "Diff<NAME>H<NAME>man" "EC" "RSA") (key-factory "RSA") => java.security.KeyFactory) ^{:refer hara.security.base.provider/key-generator :added "3.0"} (comment "lists or returns available `KeyGenerator` implementations" (key-generator) => ("AES" "ARCFOUR" "Blowfish" ...) (key-generator "Blowfish") => javax.crypto.KeyGenerator) ^{:refer hara.security.base.provider/key-pair-generator :added "3.0"} (comment "lists or returns available `KeyPairGenerator` implementations" (key-pair-generator) => ("DSA" "<NAME>" "EC" "RSA") (key-pair-generator "RSA") => java.security.KeyPairGenerator) ^{:refer hara.security.base.provider/key-store :added "3.0"} (comment "lists or returns available `KeyStore` implementations" (key-store) => ("CaseExactJKS" "DKS" "JCEKS" "JKS" "KeychainStore" "PKCS12") (key-store "JKS") => java.security.KeyStore) ^{:refer hara.security.base.provider/mac :added "3.0"} (comment "lists or returns available `Mac` implementations" (mac) => ("HmacMD5" "HmacPBESHA1" "HmacSHA1" ...) (mac "HmacMD5") => javax.crypto.Mac) ^{:refer hara.security.base.provider/message-digest :added "3.0"} (comment "lists or returns available `MessageDigest` implementations" (message-digest) => ("MD2" "MD5" "SHA" "SHA-224" "SHA-256" "SHA-384" "SHA-512") (message-digest "MD2") => java.security.MessageDigest$Delegate) ^{:refer hara.security.base.provider/signature :added "3.0"} (comment "lists or returns available `Signature` implementations" (signature) => ("MD2withRSA" "MD5andSHA1withRSA" "MD5withRSA" ...) (signature "MD2withRSA") => java.security.Signature$Delegate)
true
(ns hara.security.base.provider-test (:use hara.test) (:require [hara.security.base.provider :refer :all])) ^{:refer hara.security.base.provider/list-providers :added "3.0"} (comment "list all security providers" (list-providers) => ["Apple" "SUN" "SunEC" "SunJCE" "SunJGSS" "SunJSSE" "SunPCSC" "SunRsaSign" "SunSASL" "XMLDSig"]) ^{:refer hara.security.base.provider/sort-services :added "3.0"} (comment "filters and sorts the services by type") ^{:refer hara.security.base.provider/list-services :added "3.0"} (comment "lists all services that are available" (list-services) => ("AlgorithmParameterGenerator" "AlgorithmParameters" ...) (list-services "Cipher") => ("AES" "AESWrap" "AESWrap_128" ...) (list-services "KeyGenerator" "SunJCE") => ("AES" "ARCFOUR" "Blowfish" "DES" "DESede" ...)) ^{:refer hara.security.base.provider/cipher :added "3.0"} (comment "lists or returns available `Cipher` implementations" (cipher) => ("AES" "AESWrap" "AESWrap_128" ...) (cipher "AES") => javax.crypto.Cipher) ^{:refer hara.security.base.provider/key-factory :added "3.0"} (comment "lists or returns available `KeyFactory` implementations" (key-factory) => ("DSA" "DiffPI:NAME:<NAME>END_PIHPI:NAME:<NAME>END_PIman" "EC" "RSA") (key-factory "RSA") => java.security.KeyFactory) ^{:refer hara.security.base.provider/key-generator :added "3.0"} (comment "lists or returns available `KeyGenerator` implementations" (key-generator) => ("AES" "ARCFOUR" "Blowfish" ...) (key-generator "Blowfish") => javax.crypto.KeyGenerator) ^{:refer hara.security.base.provider/key-pair-generator :added "3.0"} (comment "lists or returns available `KeyPairGenerator` implementations" (key-pair-generator) => ("DSA" "PI:NAME:<NAME>END_PI" "EC" "RSA") (key-pair-generator "RSA") => java.security.KeyPairGenerator) ^{:refer hara.security.base.provider/key-store :added "3.0"} (comment "lists or returns available `KeyStore` implementations" (key-store) => ("CaseExactJKS" "DKS" "JCEKS" "JKS" "KeychainStore" "PKCS12") (key-store "JKS") => java.security.KeyStore) ^{:refer hara.security.base.provider/mac :added "3.0"} (comment "lists or returns available `Mac` implementations" (mac) => ("HmacMD5" "HmacPBESHA1" "HmacSHA1" ...) (mac "HmacMD5") => javax.crypto.Mac) ^{:refer hara.security.base.provider/message-digest :added "3.0"} (comment "lists or returns available `MessageDigest` implementations" (message-digest) => ("MD2" "MD5" "SHA" "SHA-224" "SHA-256" "SHA-384" "SHA-512") (message-digest "MD2") => java.security.MessageDigest$Delegate) ^{:refer hara.security.base.provider/signature :added "3.0"} (comment "lists or returns available `Signature` implementations" (signature) => ("MD2withRSA" "MD5andSHA1withRSA" "MD5withRSA" ...) (signature "MD2withRSA") => java.security.Signature$Delegate)
[ { "context": "\n (render \\\"Hello {first-name}!\\\" {:first-name \\\"John\\\"})\n => Hello John!\n\n (render \\\"Hello {[user fi", "end": 1700, "score": 0.9991718530654907, "start": 1696, "tag": "NAME", "value": "John" }, { "context": "{first-name}!\\\" {:first-name \\\"John\\\"})\n => Hello John!\n\n (render \\\"Hello {[user first-name]}!\\\" {:user", "end": 1720, "score": 0.9996908903121948, "start": 1716, "tag": "NAME", "value": "John" }, { "context": "ello {[user first-name]}!\\\" {:user {:first-name \\\"John\\\"}})\n => Hello John!\"\n [^String template contex", "end": 1790, "score": 0.9995279312133789, "start": 1786, "tag": "NAME", "value": "John" }, { "context": "me]}!\\\" {:user {:first-name \\\"John\\\"}})\n => Hello John!\"\n [^String template context]\n {:pre [template]", "end": 1811, "score": 0.9995279908180237, "start": 1807, "tag": "NAME", "value": "John" } ]
src/tekton_watcher/misc.clj
nubank/tekton-watcher
3
(ns tekton-watcher.misc (:require [clojure.edn :as edn] [clojure.spec.alpha :as s] [clojure.string :as string]) (:import java.io.File java.text.DecimalFormat [java.time Duration Instant] java.time.format.DateTimeFormatter java.time.temporal.TemporalQuery java.util.Locale)) (def ^:private digits-and-letters (keep (comp #(when (Character/isLetterOrDigit %) %) char) (range 48 123))) (defn correlation-id "Returns a random string composed of numbers ([0-9]) and letters ([a-zA-Z]) to be used as a correlation identifier." [] (apply str (repeatedly 7 #(rand-nth digits-and-letters)))) (defn file-exists? "Returns true if the file exists or false otherwise." [^File file] (.exists file)) (defn map-vals "Applies the function f to each value in the map m and return the resulting map." [f m] (into {} (map (fn [[k v]] [k (f v)]) m))) (defn read-edn "Reads an EDN object and parses it as Clojure data. input can be any object supported by clojure.core/slurp." [input] (edn/read-string (slurp input))) (defn render "Given a template string containing one or more placeholders between curly braces and a context map of arbitrary values whose keys should match the placeholders, returns a new string where placeholders were replaced with values taken from matching keys in the context map. Placeholders can refer either to keys in the root level of the context map or to keys in nested data structures. In the former case, use {key} and in the later one, {[key1 key2]}. Example: (render \"Hello {first-name}!\" {:first-name \"John\"}) => Hello John! (render \"Hello {[user first-name]}!\" {:user {:first-name \"John\"}}) => Hello John!" [^String template context] {:pre [template]} (letfn [(parse [^String path] (let [ks (edn/read-string path)] (if (vector? ks) (map keyword ks) [(keyword ks)])))] (string/replace template #"\{([^\}]+)\}" (fn [match] (str (get-in context (parse (last match)) (first match))))))) (defn parse-input "Given a spec and an arbitrary data structure as the input, tries to conform the input using the supplied spec. Returns the conformed data or throws an exception if the data doesn't conform to the spec." [spec input] (let [result (s/conform spec input)] (if-not (s/invalid? result) result (throw (ex-info "Data doesn't conform to the spec" (s/explain-data spec input)))))) (def ^:private instant-query "Implements java.time.temporal.TemporalQuery by delegating to java.time.Instant/from." (reify TemporalQuery (queryFrom [this temporal] (Instant/from temporal)))) (defn ^Duration duration "Returns a java.time.Duration object representing the duration of the task run in question." [task-run] (let [parse (fn [^String instant] (.. DateTimeFormatter ISO_INSTANT (parse instant instant-query))) ^String start-time (get-in task-run [:status :startTime]) ^String end-time (get-in task-run [:status :completionTime])] (Duration/between (parse start-time) (parse end-time)))) (def ^:private formatter "Instance of java.text.Decimalformat used internally to format decimal values." (let [decimal-format (DecimalFormat/getInstance (Locale/ENGLISH))] (.applyPattern decimal-format "#.##") decimal-format)) (defn ^String display-duration "Returns a friendly representation of the duration of the task-run in question." [task-run] (let [format-duration (fn [value time-unit] (str (.format formatter value) " " (if (= (float value) 1.0) (name time-unit) (str (name time-unit) "s")))) millis (.toMillis (duration task-run))] (cond (< millis 60000) (format-duration (float (/ millis 1000)) :second) (< millis 3600000) (format-duration (float (/ millis 60000)) :minute) :else (format-duration (float (/ millis 3600000)) :hour))))
53981
(ns tekton-watcher.misc (:require [clojure.edn :as edn] [clojure.spec.alpha :as s] [clojure.string :as string]) (:import java.io.File java.text.DecimalFormat [java.time Duration Instant] java.time.format.DateTimeFormatter java.time.temporal.TemporalQuery java.util.Locale)) (def ^:private digits-and-letters (keep (comp #(when (Character/isLetterOrDigit %) %) char) (range 48 123))) (defn correlation-id "Returns a random string composed of numbers ([0-9]) and letters ([a-zA-Z]) to be used as a correlation identifier." [] (apply str (repeatedly 7 #(rand-nth digits-and-letters)))) (defn file-exists? "Returns true if the file exists or false otherwise." [^File file] (.exists file)) (defn map-vals "Applies the function f to each value in the map m and return the resulting map." [f m] (into {} (map (fn [[k v]] [k (f v)]) m))) (defn read-edn "Reads an EDN object and parses it as Clojure data. input can be any object supported by clojure.core/slurp." [input] (edn/read-string (slurp input))) (defn render "Given a template string containing one or more placeholders between curly braces and a context map of arbitrary values whose keys should match the placeholders, returns a new string where placeholders were replaced with values taken from matching keys in the context map. Placeholders can refer either to keys in the root level of the context map or to keys in nested data structures. In the former case, use {key} and in the later one, {[key1 key2]}. Example: (render \"Hello {first-name}!\" {:first-name \"<NAME>\"}) => Hello <NAME>! (render \"Hello {[user first-name]}!\" {:user {:first-name \"<NAME>\"}}) => Hello <NAME>!" [^String template context] {:pre [template]} (letfn [(parse [^String path] (let [ks (edn/read-string path)] (if (vector? ks) (map keyword ks) [(keyword ks)])))] (string/replace template #"\{([^\}]+)\}" (fn [match] (str (get-in context (parse (last match)) (first match))))))) (defn parse-input "Given a spec and an arbitrary data structure as the input, tries to conform the input using the supplied spec. Returns the conformed data or throws an exception if the data doesn't conform to the spec." [spec input] (let [result (s/conform spec input)] (if-not (s/invalid? result) result (throw (ex-info "Data doesn't conform to the spec" (s/explain-data spec input)))))) (def ^:private instant-query "Implements java.time.temporal.TemporalQuery by delegating to java.time.Instant/from." (reify TemporalQuery (queryFrom [this temporal] (Instant/from temporal)))) (defn ^Duration duration "Returns a java.time.Duration object representing the duration of the task run in question." [task-run] (let [parse (fn [^String instant] (.. DateTimeFormatter ISO_INSTANT (parse instant instant-query))) ^String start-time (get-in task-run [:status :startTime]) ^String end-time (get-in task-run [:status :completionTime])] (Duration/between (parse start-time) (parse end-time)))) (def ^:private formatter "Instance of java.text.Decimalformat used internally to format decimal values." (let [decimal-format (DecimalFormat/getInstance (Locale/ENGLISH))] (.applyPattern decimal-format "#.##") decimal-format)) (defn ^String display-duration "Returns a friendly representation of the duration of the task-run in question." [task-run] (let [format-duration (fn [value time-unit] (str (.format formatter value) " " (if (= (float value) 1.0) (name time-unit) (str (name time-unit) "s")))) millis (.toMillis (duration task-run))] (cond (< millis 60000) (format-duration (float (/ millis 1000)) :second) (< millis 3600000) (format-duration (float (/ millis 60000)) :minute) :else (format-duration (float (/ millis 3600000)) :hour))))
true
(ns tekton-watcher.misc (:require [clojure.edn :as edn] [clojure.spec.alpha :as s] [clojure.string :as string]) (:import java.io.File java.text.DecimalFormat [java.time Duration Instant] java.time.format.DateTimeFormatter java.time.temporal.TemporalQuery java.util.Locale)) (def ^:private digits-and-letters (keep (comp #(when (Character/isLetterOrDigit %) %) char) (range 48 123))) (defn correlation-id "Returns a random string composed of numbers ([0-9]) and letters ([a-zA-Z]) to be used as a correlation identifier." [] (apply str (repeatedly 7 #(rand-nth digits-and-letters)))) (defn file-exists? "Returns true if the file exists or false otherwise." [^File file] (.exists file)) (defn map-vals "Applies the function f to each value in the map m and return the resulting map." [f m] (into {} (map (fn [[k v]] [k (f v)]) m))) (defn read-edn "Reads an EDN object and parses it as Clojure data. input can be any object supported by clojure.core/slurp." [input] (edn/read-string (slurp input))) (defn render "Given a template string containing one or more placeholders between curly braces and a context map of arbitrary values whose keys should match the placeholders, returns a new string where placeholders were replaced with values taken from matching keys in the context map. Placeholders can refer either to keys in the root level of the context map or to keys in nested data structures. In the former case, use {key} and in the later one, {[key1 key2]}. Example: (render \"Hello {first-name}!\" {:first-name \"PI:NAME:<NAME>END_PI\"}) => Hello PI:NAME:<NAME>END_PI! (render \"Hello {[user first-name]}!\" {:user {:first-name \"PI:NAME:<NAME>END_PI\"}}) => Hello PI:NAME:<NAME>END_PI!" [^String template context] {:pre [template]} (letfn [(parse [^String path] (let [ks (edn/read-string path)] (if (vector? ks) (map keyword ks) [(keyword ks)])))] (string/replace template #"\{([^\}]+)\}" (fn [match] (str (get-in context (parse (last match)) (first match))))))) (defn parse-input "Given a spec and an arbitrary data structure as the input, tries to conform the input using the supplied spec. Returns the conformed data or throws an exception if the data doesn't conform to the spec." [spec input] (let [result (s/conform spec input)] (if-not (s/invalid? result) result (throw (ex-info "Data doesn't conform to the spec" (s/explain-data spec input)))))) (def ^:private instant-query "Implements java.time.temporal.TemporalQuery by delegating to java.time.Instant/from." (reify TemporalQuery (queryFrom [this temporal] (Instant/from temporal)))) (defn ^Duration duration "Returns a java.time.Duration object representing the duration of the task run in question." [task-run] (let [parse (fn [^String instant] (.. DateTimeFormatter ISO_INSTANT (parse instant instant-query))) ^String start-time (get-in task-run [:status :startTime]) ^String end-time (get-in task-run [:status :completionTime])] (Duration/between (parse start-time) (parse end-time)))) (def ^:private formatter "Instance of java.text.Decimalformat used internally to format decimal values." (let [decimal-format (DecimalFormat/getInstance (Locale/ENGLISH))] (.applyPattern decimal-format "#.##") decimal-format)) (defn ^String display-duration "Returns a friendly representation of the duration of the task-run in question." [task-run] (let [format-duration (fn [value time-unit] (str (.format formatter value) " " (if (= (float value) 1.0) (name time-unit) (str (name time-unit) "s")))) millis (.toMillis (duration task-run))] (cond (< millis 60000) (format-duration (float (/ millis 1000)) :second) (< millis 3600000) (format-duration (float (/ millis 60000)) :minute) :else (format-duration (float (/ millis 3600000)) :hour))))
[ { "context": " (k/session test-app)\n (steps/register \"existing@user.com\" \"password\")\n (steps/sign-out)\n ", "end": 2086, "score": 0.9999158978462219, "start": 2069, "tag": "EMAIL", "value": "existing@user.com" }, { "context": ")\n (steps/register \"existing@user.com\" \"password\")\n (steps/sign-out)\n (k/visit", "end": 2097, "score": 0.9936698079109192, "start": 2089, "tag": "PASSWORD", "value": "password" }, { "context": "kc/check-and-fill-in ks/registration-email-input \"existing@user.com\")\n (kc/check-and-fill-in ks/registratio", "end": 2232, "score": 0.9999176859855652, "start": 2215, "tag": "EMAIL", "value": "existing@user.com" }, { "context": "check-and-fill-in ks/registration-password-input \"password\")\n (kc/check-and-press ks/registration-", "end": 2308, "score": 0.9978693723678589, "start": 2300, "tag": "PASSWORD", "value": "password" }, { "context": " (k/session test-app)\n (steps/register \"Frank\" \"Lasty\" \"email@server.com\" \"valid-password\")\n ", "end": 2576, "score": 0.9998371601104736, "start": 2571, "tag": "NAME", "value": "Frank" }, { "context": "ion test-app)\n (steps/register \"Frank\" \"Lasty\" \"email@server.com\" \"valid-password\")\n ", "end": 2584, "score": 0.9997696876525879, "start": 2579, "tag": "NAME", "value": "Lasty" }, { "context": "-app)\n (steps/register \"Frank\" \"Lasty\" \"email@server.com\" \"valid-password\")\n (kc/check-and-follo", "end": 2603, "score": 0.999791145324707, "start": 2587, "tag": "EMAIL", "value": "email@server.com" }, { "context": "teps/register \"Frank\" \"Lasty\" \"email@server.com\" \"valid-password\")\n (kc/check-and-follow-redirect)\n ", "end": 2620, "score": 0.9933056235313416, "start": 2606, "tag": "PASSWORD", "value": "valid-password" }, { "context": "ctor-includes-content [ks/profile-created-flash] \"email@server.com\")\n\n (k/visit \"/profile\")\n (kc", "end": 2833, "score": 0.9998014569282532, "start": 2817, "tag": "EMAIL", "value": "email@server.com" }, { "context": "des-content [ks/profile-page-profile-card-email] \"email@server.com\")\n (kc/selector-includes-content [ks/pr", "end": 3031, "score": 0.9998241662979126, "start": 3015, "tag": "EMAIL", "value": "email@server.com" }, { "context": "udes-content [ks/profile-page-profile-card-name] \"Frank Lasty\")\n (kc/selector-has-attribute-with-cont", "end": 3123, "score": 0.9997898936271667, "start": 3112, "tag": "NAME", "value": "Frank Lasty" }, { "context": "sion test-app)\n (steps/accept-invite \"Bob\" \"Invitee\" \"valid-password\" invitation-store \"ema", "end": 3453, "score": 0.9987069368362427, "start": 3450, "tag": "NAME", "value": "Bob" }, { "context": "est-app)\n (steps/accept-invite \"Bob\" \"Invitee\" \"valid-password\" invitation-store \"email-1@serve", "end": 3463, "score": 0.8615220785140991, "start": 3456, "tag": "NAME", "value": "Invitee" }, { "context": " (steps/accept-invite \"Bob\" \"Invitee\" \"valid-password\" invitation-store \"email-1@server.com\" (test-time", "end": 3480, "score": 0.9887523651123047, "start": 3466, "tag": "PASSWORD", "value": "valid-password" }, { "context": "Bob\" \"Invitee\" \"valid-password\" invitation-store \"email-1@server.com\" (test-time/new-stub-clock 0) 7)\n (kc", "end": 3518, "score": 0.9996126890182495, "start": 3500, "tag": "EMAIL", "value": "email-1@server.com" }, { "context": "ctor-includes-content [ks/profile-created-flash] \"email-1@server.com\"))))\n\n(facts \"User is redirected to index page wh", "end": 3770, "score": 0.9997650384902954, "start": 3752, "tag": "EMAIL", "value": "email-1@server.com" }, { "context": "> (k/session test-app)\n (steps/sign-in \"email@server.com\" \"valid-password\")\n (kc/check-and-follo", "end": 4129, "score": 0.9994601011276245, "start": 4113, "tag": "EMAIL", "value": "email@server.com" }, { "context": "pp)\n (steps/sign-in \"email@server.com\" \"valid-password\")\n (kc/check-and-follow-redirect)\n ", "end": 4146, "score": 0.9693511128425598, "start": 4132, "tag": "PASSWORD", "value": "valid-password" }, { "context": " (kc/selector-includes-content [:body] \"email@server.com\")))\n\n(facts \"User can sign out\"\n (-> (k/ses", "end": 4324, "score": 0.9995846748352051, "start": 4308, "tag": "EMAIL", "value": "email@server.com" }, { "context": "> (k/session test-app)\n (steps/sign-in \"email@server.com\" \"valid-password\")\n (k/visit \"/profile\"", "end": 4432, "score": 0.9996347427368164, "start": 4416, "tag": "EMAIL", "value": "email@server.com" }, { "context": "pp)\n (steps/sign-in \"email@server.com\" \"valid-password\")\n (k/visit \"/profile\")\n (k/f", "end": 4449, "score": 0.9526448249816895, "start": 4435, "tag": "PASSWORD", "value": "valid-password" }, { "context": "ofile-picture-store stores-m)\n email \"email@server.com\"\n uid (ih/get-uid user-store email)]\n", "end": 4822, "score": 0.9996910095214844, "start": 4806, "tag": "EMAIL", "value": "email@server.com" }, { "context": "sion test-app)\n (steps/sign-in email \"valid-password\")\n (ih/add-profile-image profile-pict", "end": 4955, "score": 0.9545443654060364, "start": 4941, "tag": "PASSWORD", "value": "valid-password" }, { "context": "> (k/session test-app)\n (steps/sign-in \"email@server.com\" \"valid-password\")\n (k/visit \"/\")\n ", "end": 5384, "score": 0.9998971819877625, "start": 5368, "tag": "EMAIL", "value": "email@server.com" }, { "context": " (steps/sign-in \"email@server.com\" \"valid-password\")\n (k/visit \"/\")\n (kc", "end": 5392, "score": 0.5535330176353455, "start": 5392, "tag": "PASSWORD", "value": "" }, { "context": " (k/session test-app)\n (steps/register \"email2@server.com\" \"valid-password\")\n (k/visit \"/\")\n ", "end": 5685, "score": 0.9999015927314758, "start": 5668, "tag": "EMAIL", "value": "email2@server.com" }, { "context": " (steps/register \"email2@server.com\" \"valid-password\")\n (k/visit \"/\")\n (kc", "end": 5693, "score": 0.5380769968032837, "start": 5693, "tag": "PASSWORD", "value": "" }, { "context": " (k/session test-app)\n (steps/register \"user@withclient.com\" \"valid-password\"))\n (setup-add-client-to-u", "end": 5966, "score": 0.9999065399169922, "start": 5947, "tag": "EMAIL", "value": "user@withclient.com" }, { "context": " (steps/register \"user@withclient.com\" \"valid-password\"))\n (setup-add-client-to-user! \"user@withcl", "end": 5983, "score": 0.9428615570068359, "start": 5969, "tag": "PASSWORD", "value": "valid-password" }, { "context": "id-password\"))\n (setup-add-client-to-user! \"user@withclient.com\" \"myapp\")\n (-> (k/session test-app)\n ", "end": 6041, "score": 0.9999019503593445, "start": 6022, "tag": "EMAIL", "value": "user@withclient.com" }, { "context": "> (k/session test-app)\n (steps/sign-in \"user@withclient.com\" \"valid-password\")\n (k/visit \"/profile\"", "end": 6130, "score": 0.9998874664306641, "start": 6111, "tag": "EMAIL", "value": "user@withclient.com" }, { "context": "\n (steps/sign-in \"user@withclient.com\" \"valid-password\")\n (k/visit \"/profile\")\n (kc/", "end": 6147, "score": 0.9065840840339661, "start": 6133, "tag": "PASSWORD", "value": "valid-password" }, { "context": "> (k/session test-app)\n (steps/sign-in \"user@withclient.com\" \"valid-password\")\n (k/visit \"/profile\"", "end": 6388, "score": 0.9999054074287415, "start": 6369, "tag": "EMAIL", "value": "user@withclient.com" }, { "context": "\n (steps/sign-in \"user@withclient.com\" \"valid-password\")\n (k/visit \"/profile\")\n (kc/", "end": 6405, "score": 0.9352841973304749, "start": 6391, "tag": "PASSWORD", "value": "valid-password" }, { "context": "> (k/session test-app)\n (steps/sign-in \"user@withclient.com\" \"valid-password\")\n (k/visit \"/profile\"", "end": 7054, "score": 0.9998429417610168, "start": 7035, "tag": "EMAIL", "value": "user@withclient.com" }, { "context": "\n (steps/sign-in \"user@withclient.com\" \"valid-password\")\n (k/visit \"/profile\")\n (kc/", "end": 7071, "score": 0.9443631768226624, "start": 7057, "tag": "PASSWORD", "value": "valid-password" }, { "context": "> (k/session test-app)\n (steps/sign-in \"user@withclient.com\" \"valid-password\")\n (k/visit \"/profile\"", "end": 7458, "score": 0.9997970461845398, "start": 7439, "tag": "EMAIL", "value": "user@withclient.com" }, { "context": "\n (steps/sign-in \"user@withclient.com\" \"valid-password\")\n (k/visit \"/profile\")\n (k/f", "end": 7475, "score": 0.8977524638175964, "start": 7461, "tag": "PASSWORD", "value": "valid-password" }, { "context": "ill-in ks/change-password-current-password-input \"valid-password\")\n (kc/check-and-fill-in ks/change-pass", "end": 7742, "score": 0.9176506996154785, "start": 7728, "tag": "PASSWORD", "value": "valid-password" }, { "context": "nd-fill-in ks/change-password-new-password-input \"new-valid-password\")\n (kc/check-and-press ks/change-passwo", "end": 7835, "score": 0.9616573452949524, "start": 7817, "tag": "PASSWORD", "value": "new-valid-password" }, { "context": " (k/session test-app)\n (steps/register \"account_to_be@deleted.com\" \"valid-password\")\n (k/visit \"/profile\"", "end": 8185, "score": 0.9925987720489502, "start": 8160, "tag": "EMAIL", "value": "account_to_be@deleted.com" }, { "context": " (steps/register \"account_to_be@deleted.com\" \"valid-password\")\n (k/visit \"/profile\")\n (k/f", "end": 8202, "score": 0.9602691531181335, "start": 8188, "tag": "PASSWORD", "value": "valid-password" }, { "context": "> (k/session test-app)\n (steps/sign-in \"user@withclient.com\" \"new-valid-password\")\n (k/visit \"/prof", "end": 8686, "score": 0.9978492856025696, "start": 8667, "tag": "EMAIL", "value": "user@withclient.com" }, { "context": "\n (steps/sign-in \"user@withclient.com\" \"new-valid-password\")\n (k/visit \"/profile\")\n (k/f", "end": 8707, "score": 0.9897343516349792, "start": 8689, "tag": "PASSWORD", "value": "new-valid-password" }, { "context": " (kc/check-and-fill-in ks/change-email-input \"new_email@somewhere.com\")\n (k/follow ks/change-email-cancel-but", "end": 8954, "score": 0.9998681545257568, "start": 8931, "tag": "EMAIL", "value": "new_email@somewhere.com" }, { "context": "des-content [ks/profile-page-profile-card-email] \"user@withclient.com\")\n (kc/selector-does-not-include-conten", "end": 9174, "score": 0.9998960494995117, "start": 9155, "tag": "EMAIL", "value": "user@withclient.com" }, { "context": "ude-content [ks/profile-page-profile-card-email] \"new_email@somewhere.com\")\n (k/follow ks/profile-change-email-li", "end": 9287, "score": 0.9998252391815186, "start": 9264, "tag": "EMAIL", "value": "new_email@somewhere.com" }, { "context": " (kc/check-and-fill-in ks/change-email-input \"new_email@somewhere.com\")\n (kc/check-and-press ks/change-email-", "end": 9502, "score": 0.9998834133148193, "start": 9479, "tag": "EMAIL", "value": "new_email@somewhere.com" }, { "context": "s-content [ks/profile-unconfirmed-email-message] \"new_email@somewhere.com\")\n (kc/selector-includes-content [ks/pr", "end": 9761, "score": 0.9998670816421509, "start": 9738, "tag": "EMAIL", "value": "new_email@somewhere.com" }, { "context": "des-content [ks/profile-page-profile-card-email] \"new_email@somewhere.com\")))\n\n(facts \"User can change profile details, and", "end": 10022, "score": 0.999862790107727, "start": 9999, "tag": "EMAIL", "value": "new_email@somewhere.com" }, { "context": "> (k/session test-app)\n (steps/sign-in \"new_email@somewhere.com\" \"new-valid-password\")\n (k/visit \"/chan", "end": 10178, "score": 0.9998045563697815, "start": 10155, "tag": "EMAIL", "value": "new_email@somewhere.com" }, { "context": " (steps/sign-in \"new_email@somewhere.com\" \"new-valid-password\")\n (k/visit \"/change-profile\")\n ", "end": 10199, "score": 0.9873817563056946, "start": 10181, "tag": "PASSWORD", "value": "new-valid-password" }, { "context": "ssion test-app)\n (steps/register \"csrf@email.com\" \"valid-password\")\n (steps/sign-i", "end": 13653, "score": 0.9999201893806458, "start": 13639, "tag": "EMAIL", "value": "csrf@email.com" }, { "context": " (steps/register \"csrf@email.com\" \"valid-password\")\n (steps/sign-in \"csrf@", "end": 13661, "score": 0.8006471991539001, "start": 13656, "tag": "PASSWORD", "value": "valid" }, { "context": " (steps/register \"csrf@email.com\" \"valid-password\")\n (steps/sign-in \"csrf@email.com", "end": 13670, "score": 0.5828612446784973, "start": 13662, "tag": "PASSWORD", "value": "password" }, { "context": "valid-password\")\n (steps/sign-in \"csrf@email.com\" \"valid-password\")\n (kc/replay-la", "end": 13720, "score": 0.9999194145202637, "start": 13706, "tag": "EMAIL", "value": "csrf@email.com" }, { "context": " (steps/sign-in \"csrf@email.com\" \"valid-password\")\n (kc/replay-last-request)\n ", "end": 13737, "score": 0.9243901371955872, "start": 13723, "tag": "PASSWORD", "value": "valid-password" } ]
test/stonecutter/integration/kerodon/user.clj
d-cent/stonecutter
39
(ns stonecutter.integration.kerodon.user (:require [midje.sweet :refer :all] [kerodon.core :as k] [clauth.client :as cl-client] [stonecutter.email :as email] [stonecutter.db.storage :as s] [stonecutter.logging :as l] [stonecutter.db.user :as user] [stonecutter.view.index :as index] [stonecutter.integration.integration-helpers :as ih] [stonecutter.integration.kerodon.kerodon-selectors :as ks] [stonecutter.integration.kerodon.kerodon-checkers :as kc] [stonecutter.integration.kerodon.steps :as steps] [stonecutter.test.util.time :as test-time] [stonecutter.config :as config])) (l/init-logger!) (ih/setup-db) (def stores-m (s/create-mongo-stores (ih/get-test-db) (ih/get-test-db-connection) "stonecutter")) (defn setup-add-client-to-user! [email client-name] (let [client (cl-client/register-client (s/get-client-store stores-m) client-name "myclient.com") client-id (:client-id client)] (user/add-authorised-client-for-user! (s/get-user-store stores-m) email client-id))) (def email-sender (email/bash-sender-factory "test-resources/mail_stub.sh")) (def test-app (ih/build-app {:stores-m stores-m :email-sender email-sender})) (defn debug [state] (prn state) state) (facts "User can access index page" (-> (k/session test-app) (k/visit "/") (kc/check-page-is :index [ks/index-page-body]))) (facts "User is returned to index page when registration is invalid" (-> (k/session test-app) (k/visit "/") (kc/check-and-fill-in ks/registration-email-input "invalid-email") (kc/check-and-press ks/registration-submit) (kc/check-page-is :index [ks/index-page-body]) (kc/selector-includes-content [ks/registration-email-validation-element] "Enter a valid email address"))) (facts "User is returned to same page when existing email is used" (-> (k/session test-app) (steps/register "existing@user.com" "password") (steps/sign-out) (k/visit "/") (kc/check-and-fill-in ks/registration-email-input "existing@user.com") (kc/check-and-fill-in ks/registration-password-input "password") (kc/check-and-press ks/registration-submit) (kc/check-page-is :index [ks/index-page-body]))) (facts "Index page redirects to profile-created page and profile card is displayed" (-> (k/session test-app) (steps/register "Frank" "Lasty" "email@server.com" "valid-password") (kc/check-and-follow-redirect) (kc/check-page-is :show-profile-created [ks/profile-created-page-body]) (kc/selector-includes-content [ks/profile-created-flash] "email@server.com") (k/visit "/profile") (kc/check-page-is :show-profile [ks/profile-page-body]) (kc/selector-includes-content [ks/profile-page-profile-card-email] "email@server.com") (kc/selector-includes-content [ks/profile-page-profile-card-name] "Frank Lasty") (kc/selector-has-attribute-with-content [ks/profile-page-profile-card-image :img] :src config/default-profile-picture))) (facts "Accept invite page redirects to profile-created page" (let [invitation-store (:invitation-store stores-m)] (-> (k/session test-app) (steps/accept-invite "Bob" "Invitee" "valid-password" invitation-store "email-1@server.com" (test-time/new-stub-clock 0) 7) (kc/check-and-follow-redirect) (kc/check-page-is :show-profile-created [ks/profile-created-page-body]) (kc/selector-includes-content [ks/profile-created-flash] "email-1@server.com")))) (facts "User is redirected to index page when accessing profile page not signed in" (-> (k/session test-app) (k/visit "/profile") (kc/check-and-follow-redirect) (kc/check-page-is :index [ks/index-page-body]))) (facts "User can sign in" (-> (k/session test-app) (steps/sign-in "email@server.com" "valid-password") (kc/check-and-follow-redirect) (kc/check-page-is :show-profile [ks/profile-page-body]) (kc/selector-includes-content [:body] "email@server.com"))) (facts "User can sign out" (-> (k/session test-app) (steps/sign-in "email@server.com" "valid-password") (k/visit "/profile") (k/follow ks/sign-out-link) (kc/check-and-follow-redirect) (kc/check-page-is :index [ks/index-page-body]))) (facts "User can see their profile picture" (let [user-store (:user-store stores-m) profile-picture-store (:profile-picture-store stores-m) email "email@server.com" uid (ih/get-uid user-store email)] (-> (k/session test-app) (steps/sign-in email "valid-password") (ih/add-profile-image profile-picture-store uid) (k/visit "/profile") (kc/selector-has-attribute-with-content [ks/profile-page-profile-card-image :img] :src (ih/get-encoded-image))) (ih/remove-profile-image profile-picture-store uid))) (facts "Index url redirects to profile page if user is signed in" (-> (k/session test-app) (steps/sign-in "email@server.com" "valid-password") (k/visit "/") (kc/check-and-follow-redirect) (kc/check-page-is :show-profile [ks/profile-page-body]))) (facts "Index url redirects to profile page if user is registered" (-> (k/session test-app) (steps/register "email2@server.com" "valid-password") (k/visit "/") (kc/check-and-follow-redirect) (kc/check-page-is :show-profile [ks/profile-page-body]))) (facts "Clients appear on user profile page" (-> (k/session test-app) (steps/register "user@withclient.com" "valid-password")) (setup-add-client-to-user! "user@withclient.com" "myapp") (-> (k/session test-app) (steps/sign-in "user@withclient.com" "valid-password") (k/visit "/profile") (kc/selector-includes-content [ks/profile-authorised-client-list] "myapp"))) (facts "User can unshare profile card" (-> (k/session test-app) (steps/sign-in "user@withclient.com" "valid-password") (k/visit "/profile") (kc/selector-includes-content [ks/profile-authorised-client-list] "myapp") (k/follow ks/profile-authorised-client-unshare-link) (kc/page-uri-contains "/unshare-profile-card") (kc/check-and-press ks/unshare-profile-card-confirm-button) (kc/check-and-follow-redirect) (kc/check-page-is :show-profile [ks/profile-page-body]) (kc/selector-does-not-include-content [ks/profile-authorised-client-list] "myapp"))) (facts "User can request a new confirmation email" (-> (k/session test-app) (steps/sign-in "user@withclient.com" "valid-password") (k/visit "/profile") (kc/check-and-press ks/profile-resend-confirmation-email) (kc/check-and-follow-redirect) (kc/check-page-is :show-profile [ks/profile-page-body]) (kc/selector-exists [ks/profile-flash-message]))) (facts "User can change password" (-> (k/session test-app) (steps/sign-in "user@withclient.com" "valid-password") (k/visit "/profile") (k/follow ks/profile-change-password-link) (kc/check-page-is :show-change-password-form [ks/change-password-page-body]) (kc/check-and-fill-in ks/change-password-current-password-input "valid-password") (kc/check-and-fill-in ks/change-password-new-password-input "new-valid-password") (kc/check-and-press ks/change-password-submit) (kc/check-and-follow-redirect) (kc/check-page-is :show-profile [ks/profile-page-body]) (kc/selector-exists [ks/profile-flash-message]))) (facts "User can delete account" (-> (k/session test-app) (steps/register "account_to_be@deleted.com" "valid-password") (k/visit "/profile") (k/follow ks/profile-delete-account-link) (kc/check-page-is :show-delete-account-confirmation [ks/delete-account-page-body]) (kc/check-and-press ks/delete-account-button) (kc/check-and-follow-redirect) (kc/check-page-is :show-profile-deleted [ks/profile-deleted-page-body]))) (facts "User can change email address" (-> (k/session test-app) (steps/sign-in "user@withclient.com" "new-valid-password") (k/visit "/profile") (k/follow ks/profile-change-email-link) (kc/check-page-is :show-change-email-form [ks/change-email-page-body]) (kc/check-and-fill-in ks/change-email-input "new_email@somewhere.com") (k/follow ks/change-email-cancel-button) (kc/check-page-is :show-profile [ks/profile-page-body]) (kc/selector-includes-content [ks/profile-page-profile-card-email] "user@withclient.com") (kc/selector-does-not-include-content [ks/profile-page-profile-card-email] "new_email@somewhere.com") (k/follow ks/profile-change-email-link) (kc/check-page-is :show-change-email-form [ks/change-email-page-body]) (kc/check-and-fill-in ks/change-email-input "new_email@somewhere.com") (kc/check-and-press ks/change-email-button) (kc/check-and-follow-redirect) (kc/selector-exists [ks/profile-page-body]) (kc/selector-includes-content [ks/profile-unconfirmed-email-message] "new_email@somewhere.com") (kc/selector-includes-content [ks/profile-flash-message] "email") (kc/selector-includes-content [ks/profile-flash-message] "changed") (kc/selector-includes-content [ks/profile-page-profile-card-email] "new_email@somewhere.com"))) (facts "User can change profile details, and fields are pre-filled" (-> (k/session test-app) (steps/sign-in "new_email@somewhere.com" "new-valid-password") (k/visit "/change-profile") (kc/check-page-is :show-change-profile-form [ks/change-profile-page-body]) (kc/selector-has-attribute-with-content [ks/change-profile-first-name-input] :value "dummy first") (kc/selector-has-attribute-with-content [ks/change-profile-last-name-input] :value "dummy last") (kc/check-and-fill-in ks/change-profile-first-name-input "new first") (kc/check-and-fill-in ks/change-profile-last-name-input "") (kc/check-and-press ks/change-details-button) (kc/check-page-is :show-change-profile-form [ks/change-profile-page-body]) (kc/selector-has-attribute-with-content [ks/change-profile-first-name-input] :value "new first") (kc/selector-has-attribute-with-content [ks/change-profile-last-name-input] :value "") (kc/check-and-fill-in ks/change-profile-last-name-input "new last") (kc/check-and-press ks/change-details-button) (kc/check-and-follow-redirect) (kc/check-page-is :show-profile [ks/profile-page-body]) (kc/selector-includes-content [ks/profile-flash-message] "profile") (kc/selector-includes-content [ks/profile-flash-message] "updated") (k/visit "/change-profile") (k/follow ks/change-profile-back-button) (kc/check-page-is :show-profile [ks/profile-page-body]))) (facts "Not found page is shown for unknown url" (-> (k/session test-app) (k/visit "/wrong-url") (kc/response-status-is 404) (kc/selector-exists [ks/error-404-page-body]))) (fact "Error page is shown if an exception is thrown" (against-background (index/index anything) =throws=> (Exception.)) (-> (k/session (ih/build-app {:prone-stack-tracing? false})) (k/visit "/") (kc/response-status-is 500) (kc/selector-exists [ks/error-500-page-body])) (fact "if prone stack-tracing is enabled then error middleware isn't invoked (exception not caught)" (-> (k/session (ih/build-app {:prone-stack-tracing? true})) (k/visit "/")) => (throws Exception))) (fact "theme.css file is generated using environment variables" (-> (k/session (ih/build-app {:config-m {:secure "false" :header-bg-color "#012345" :header-font-color "#ABCDEF" :header-font-color-hover "#FEDCBA" :static-resources-dir-path "./test-resources" :logo-file-name "beautiful_logo.png"}})) (k/visit "/stylesheets/theme.css") (kc/response-status-is 200) (kc/response-body-contains "#012345") (kc/response-body-contains "#abcdef") (kc/response-body-contains "#fedcba") (kc/response-body-contains "/beautiful_logo.png"))) (fact "Correct app-name is used when config includes an :app-name" (-> (k/session (ih/build-app {:config-m {:secure "false" :app-name "My App Name"}})) (k/visit "/") (kc/selector-includes-content [ks/index-app-name] "My App Name"))) ;; 06 Jul 2015 (future-fact "Replaying the same post will generate a 403 from the csrf handling" (-> (k/session test-app) (steps/register "csrf@email.com" "valid-password") (steps/sign-in "csrf@email.com" "valid-password") (kc/replay-last-request) (kc/response-status-is 403))) (ih/teardown-db)
69671
(ns stonecutter.integration.kerodon.user (:require [midje.sweet :refer :all] [kerodon.core :as k] [clauth.client :as cl-client] [stonecutter.email :as email] [stonecutter.db.storage :as s] [stonecutter.logging :as l] [stonecutter.db.user :as user] [stonecutter.view.index :as index] [stonecutter.integration.integration-helpers :as ih] [stonecutter.integration.kerodon.kerodon-selectors :as ks] [stonecutter.integration.kerodon.kerodon-checkers :as kc] [stonecutter.integration.kerodon.steps :as steps] [stonecutter.test.util.time :as test-time] [stonecutter.config :as config])) (l/init-logger!) (ih/setup-db) (def stores-m (s/create-mongo-stores (ih/get-test-db) (ih/get-test-db-connection) "stonecutter")) (defn setup-add-client-to-user! [email client-name] (let [client (cl-client/register-client (s/get-client-store stores-m) client-name "myclient.com") client-id (:client-id client)] (user/add-authorised-client-for-user! (s/get-user-store stores-m) email client-id))) (def email-sender (email/bash-sender-factory "test-resources/mail_stub.sh")) (def test-app (ih/build-app {:stores-m stores-m :email-sender email-sender})) (defn debug [state] (prn state) state) (facts "User can access index page" (-> (k/session test-app) (k/visit "/") (kc/check-page-is :index [ks/index-page-body]))) (facts "User is returned to index page when registration is invalid" (-> (k/session test-app) (k/visit "/") (kc/check-and-fill-in ks/registration-email-input "invalid-email") (kc/check-and-press ks/registration-submit) (kc/check-page-is :index [ks/index-page-body]) (kc/selector-includes-content [ks/registration-email-validation-element] "Enter a valid email address"))) (facts "User is returned to same page when existing email is used" (-> (k/session test-app) (steps/register "<EMAIL>" "<PASSWORD>") (steps/sign-out) (k/visit "/") (kc/check-and-fill-in ks/registration-email-input "<EMAIL>") (kc/check-and-fill-in ks/registration-password-input "<PASSWORD>") (kc/check-and-press ks/registration-submit) (kc/check-page-is :index [ks/index-page-body]))) (facts "Index page redirects to profile-created page and profile card is displayed" (-> (k/session test-app) (steps/register "<NAME>" "<NAME>" "<EMAIL>" "<PASSWORD>") (kc/check-and-follow-redirect) (kc/check-page-is :show-profile-created [ks/profile-created-page-body]) (kc/selector-includes-content [ks/profile-created-flash] "<EMAIL>") (k/visit "/profile") (kc/check-page-is :show-profile [ks/profile-page-body]) (kc/selector-includes-content [ks/profile-page-profile-card-email] "<EMAIL>") (kc/selector-includes-content [ks/profile-page-profile-card-name] "<NAME>") (kc/selector-has-attribute-with-content [ks/profile-page-profile-card-image :img] :src config/default-profile-picture))) (facts "Accept invite page redirects to profile-created page" (let [invitation-store (:invitation-store stores-m)] (-> (k/session test-app) (steps/accept-invite "<NAME>" "<NAME>" "<PASSWORD>" invitation-store "<EMAIL>" (test-time/new-stub-clock 0) 7) (kc/check-and-follow-redirect) (kc/check-page-is :show-profile-created [ks/profile-created-page-body]) (kc/selector-includes-content [ks/profile-created-flash] "<EMAIL>")))) (facts "User is redirected to index page when accessing profile page not signed in" (-> (k/session test-app) (k/visit "/profile") (kc/check-and-follow-redirect) (kc/check-page-is :index [ks/index-page-body]))) (facts "User can sign in" (-> (k/session test-app) (steps/sign-in "<EMAIL>" "<PASSWORD>") (kc/check-and-follow-redirect) (kc/check-page-is :show-profile [ks/profile-page-body]) (kc/selector-includes-content [:body] "<EMAIL>"))) (facts "User can sign out" (-> (k/session test-app) (steps/sign-in "<EMAIL>" "<PASSWORD>") (k/visit "/profile") (k/follow ks/sign-out-link) (kc/check-and-follow-redirect) (kc/check-page-is :index [ks/index-page-body]))) (facts "User can see their profile picture" (let [user-store (:user-store stores-m) profile-picture-store (:profile-picture-store stores-m) email "<EMAIL>" uid (ih/get-uid user-store email)] (-> (k/session test-app) (steps/sign-in email "<PASSWORD>") (ih/add-profile-image profile-picture-store uid) (k/visit "/profile") (kc/selector-has-attribute-with-content [ks/profile-page-profile-card-image :img] :src (ih/get-encoded-image))) (ih/remove-profile-image profile-picture-store uid))) (facts "Index url redirects to profile page if user is signed in" (-> (k/session test-app) (steps/sign-in "<EMAIL>" "valid<PASSWORD>-password") (k/visit "/") (kc/check-and-follow-redirect) (kc/check-page-is :show-profile [ks/profile-page-body]))) (facts "Index url redirects to profile page if user is registered" (-> (k/session test-app) (steps/register "<EMAIL>" "valid<PASSWORD>-password") (k/visit "/") (kc/check-and-follow-redirect) (kc/check-page-is :show-profile [ks/profile-page-body]))) (facts "Clients appear on user profile page" (-> (k/session test-app) (steps/register "<EMAIL>" "<PASSWORD>")) (setup-add-client-to-user! "<EMAIL>" "myapp") (-> (k/session test-app) (steps/sign-in "<EMAIL>" "<PASSWORD>") (k/visit "/profile") (kc/selector-includes-content [ks/profile-authorised-client-list] "myapp"))) (facts "User can unshare profile card" (-> (k/session test-app) (steps/sign-in "<EMAIL>" "<PASSWORD>") (k/visit "/profile") (kc/selector-includes-content [ks/profile-authorised-client-list] "myapp") (k/follow ks/profile-authorised-client-unshare-link) (kc/page-uri-contains "/unshare-profile-card") (kc/check-and-press ks/unshare-profile-card-confirm-button) (kc/check-and-follow-redirect) (kc/check-page-is :show-profile [ks/profile-page-body]) (kc/selector-does-not-include-content [ks/profile-authorised-client-list] "myapp"))) (facts "User can request a new confirmation email" (-> (k/session test-app) (steps/sign-in "<EMAIL>" "<PASSWORD>") (k/visit "/profile") (kc/check-and-press ks/profile-resend-confirmation-email) (kc/check-and-follow-redirect) (kc/check-page-is :show-profile [ks/profile-page-body]) (kc/selector-exists [ks/profile-flash-message]))) (facts "User can change password" (-> (k/session test-app) (steps/sign-in "<EMAIL>" "<PASSWORD>") (k/visit "/profile") (k/follow ks/profile-change-password-link) (kc/check-page-is :show-change-password-form [ks/change-password-page-body]) (kc/check-and-fill-in ks/change-password-current-password-input "<PASSWORD>") (kc/check-and-fill-in ks/change-password-new-password-input "<PASSWORD>") (kc/check-and-press ks/change-password-submit) (kc/check-and-follow-redirect) (kc/check-page-is :show-profile [ks/profile-page-body]) (kc/selector-exists [ks/profile-flash-message]))) (facts "User can delete account" (-> (k/session test-app) (steps/register "<EMAIL>" "<PASSWORD>") (k/visit "/profile") (k/follow ks/profile-delete-account-link) (kc/check-page-is :show-delete-account-confirmation [ks/delete-account-page-body]) (kc/check-and-press ks/delete-account-button) (kc/check-and-follow-redirect) (kc/check-page-is :show-profile-deleted [ks/profile-deleted-page-body]))) (facts "User can change email address" (-> (k/session test-app) (steps/sign-in "<EMAIL>" "<PASSWORD>") (k/visit "/profile") (k/follow ks/profile-change-email-link) (kc/check-page-is :show-change-email-form [ks/change-email-page-body]) (kc/check-and-fill-in ks/change-email-input "<EMAIL>") (k/follow ks/change-email-cancel-button) (kc/check-page-is :show-profile [ks/profile-page-body]) (kc/selector-includes-content [ks/profile-page-profile-card-email] "<EMAIL>") (kc/selector-does-not-include-content [ks/profile-page-profile-card-email] "<EMAIL>") (k/follow ks/profile-change-email-link) (kc/check-page-is :show-change-email-form [ks/change-email-page-body]) (kc/check-and-fill-in ks/change-email-input "<EMAIL>") (kc/check-and-press ks/change-email-button) (kc/check-and-follow-redirect) (kc/selector-exists [ks/profile-page-body]) (kc/selector-includes-content [ks/profile-unconfirmed-email-message] "<EMAIL>") (kc/selector-includes-content [ks/profile-flash-message] "email") (kc/selector-includes-content [ks/profile-flash-message] "changed") (kc/selector-includes-content [ks/profile-page-profile-card-email] "<EMAIL>"))) (facts "User can change profile details, and fields are pre-filled" (-> (k/session test-app) (steps/sign-in "<EMAIL>" "<PASSWORD>") (k/visit "/change-profile") (kc/check-page-is :show-change-profile-form [ks/change-profile-page-body]) (kc/selector-has-attribute-with-content [ks/change-profile-first-name-input] :value "dummy first") (kc/selector-has-attribute-with-content [ks/change-profile-last-name-input] :value "dummy last") (kc/check-and-fill-in ks/change-profile-first-name-input "new first") (kc/check-and-fill-in ks/change-profile-last-name-input "") (kc/check-and-press ks/change-details-button) (kc/check-page-is :show-change-profile-form [ks/change-profile-page-body]) (kc/selector-has-attribute-with-content [ks/change-profile-first-name-input] :value "new first") (kc/selector-has-attribute-with-content [ks/change-profile-last-name-input] :value "") (kc/check-and-fill-in ks/change-profile-last-name-input "new last") (kc/check-and-press ks/change-details-button) (kc/check-and-follow-redirect) (kc/check-page-is :show-profile [ks/profile-page-body]) (kc/selector-includes-content [ks/profile-flash-message] "profile") (kc/selector-includes-content [ks/profile-flash-message] "updated") (k/visit "/change-profile") (k/follow ks/change-profile-back-button) (kc/check-page-is :show-profile [ks/profile-page-body]))) (facts "Not found page is shown for unknown url" (-> (k/session test-app) (k/visit "/wrong-url") (kc/response-status-is 404) (kc/selector-exists [ks/error-404-page-body]))) (fact "Error page is shown if an exception is thrown" (against-background (index/index anything) =throws=> (Exception.)) (-> (k/session (ih/build-app {:prone-stack-tracing? false})) (k/visit "/") (kc/response-status-is 500) (kc/selector-exists [ks/error-500-page-body])) (fact "if prone stack-tracing is enabled then error middleware isn't invoked (exception not caught)" (-> (k/session (ih/build-app {:prone-stack-tracing? true})) (k/visit "/")) => (throws Exception))) (fact "theme.css file is generated using environment variables" (-> (k/session (ih/build-app {:config-m {:secure "false" :header-bg-color "#012345" :header-font-color "#ABCDEF" :header-font-color-hover "#FEDCBA" :static-resources-dir-path "./test-resources" :logo-file-name "beautiful_logo.png"}})) (k/visit "/stylesheets/theme.css") (kc/response-status-is 200) (kc/response-body-contains "#012345") (kc/response-body-contains "#abcdef") (kc/response-body-contains "#fedcba") (kc/response-body-contains "/beautiful_logo.png"))) (fact "Correct app-name is used when config includes an :app-name" (-> (k/session (ih/build-app {:config-m {:secure "false" :app-name "My App Name"}})) (k/visit "/") (kc/selector-includes-content [ks/index-app-name] "My App Name"))) ;; 06 Jul 2015 (future-fact "Replaying the same post will generate a 403 from the csrf handling" (-> (k/session test-app) (steps/register "<EMAIL>" "<PASSWORD>-<PASSWORD>") (steps/sign-in "<EMAIL>" "<PASSWORD>") (kc/replay-last-request) (kc/response-status-is 403))) (ih/teardown-db)
true
(ns stonecutter.integration.kerodon.user (:require [midje.sweet :refer :all] [kerodon.core :as k] [clauth.client :as cl-client] [stonecutter.email :as email] [stonecutter.db.storage :as s] [stonecutter.logging :as l] [stonecutter.db.user :as user] [stonecutter.view.index :as index] [stonecutter.integration.integration-helpers :as ih] [stonecutter.integration.kerodon.kerodon-selectors :as ks] [stonecutter.integration.kerodon.kerodon-checkers :as kc] [stonecutter.integration.kerodon.steps :as steps] [stonecutter.test.util.time :as test-time] [stonecutter.config :as config])) (l/init-logger!) (ih/setup-db) (def stores-m (s/create-mongo-stores (ih/get-test-db) (ih/get-test-db-connection) "stonecutter")) (defn setup-add-client-to-user! [email client-name] (let [client (cl-client/register-client (s/get-client-store stores-m) client-name "myclient.com") client-id (:client-id client)] (user/add-authorised-client-for-user! (s/get-user-store stores-m) email client-id))) (def email-sender (email/bash-sender-factory "test-resources/mail_stub.sh")) (def test-app (ih/build-app {:stores-m stores-m :email-sender email-sender})) (defn debug [state] (prn state) state) (facts "User can access index page" (-> (k/session test-app) (k/visit "/") (kc/check-page-is :index [ks/index-page-body]))) (facts "User is returned to index page when registration is invalid" (-> (k/session test-app) (k/visit "/") (kc/check-and-fill-in ks/registration-email-input "invalid-email") (kc/check-and-press ks/registration-submit) (kc/check-page-is :index [ks/index-page-body]) (kc/selector-includes-content [ks/registration-email-validation-element] "Enter a valid email address"))) (facts "User is returned to same page when existing email is used" (-> (k/session test-app) (steps/register "PI:EMAIL:<EMAIL>END_PI" "PI:PASSWORD:<PASSWORD>END_PI") (steps/sign-out) (k/visit "/") (kc/check-and-fill-in ks/registration-email-input "PI:EMAIL:<EMAIL>END_PI") (kc/check-and-fill-in ks/registration-password-input "PI:PASSWORD:<PASSWORD>END_PI") (kc/check-and-press ks/registration-submit) (kc/check-page-is :index [ks/index-page-body]))) (facts "Index page redirects to profile-created page and profile card is displayed" (-> (k/session test-app) (steps/register "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" "PI:EMAIL:<EMAIL>END_PI" "PI:PASSWORD:<PASSWORD>END_PI") (kc/check-and-follow-redirect) (kc/check-page-is :show-profile-created [ks/profile-created-page-body]) (kc/selector-includes-content [ks/profile-created-flash] "PI:EMAIL:<EMAIL>END_PI") (k/visit "/profile") (kc/check-page-is :show-profile [ks/profile-page-body]) (kc/selector-includes-content [ks/profile-page-profile-card-email] "PI:EMAIL:<EMAIL>END_PI") (kc/selector-includes-content [ks/profile-page-profile-card-name] "PI:NAME:<NAME>END_PI") (kc/selector-has-attribute-with-content [ks/profile-page-profile-card-image :img] :src config/default-profile-picture))) (facts "Accept invite page redirects to profile-created page" (let [invitation-store (:invitation-store stores-m)] (-> (k/session test-app) (steps/accept-invite "PI:NAME:<NAME>END_PI" "PI:NAME:<NAME>END_PI" "PI:PASSWORD:<PASSWORD>END_PI" invitation-store "PI:EMAIL:<EMAIL>END_PI" (test-time/new-stub-clock 0) 7) (kc/check-and-follow-redirect) (kc/check-page-is :show-profile-created [ks/profile-created-page-body]) (kc/selector-includes-content [ks/profile-created-flash] "PI:EMAIL:<EMAIL>END_PI")))) (facts "User is redirected to index page when accessing profile page not signed in" (-> (k/session test-app) (k/visit "/profile") (kc/check-and-follow-redirect) (kc/check-page-is :index [ks/index-page-body]))) (facts "User can sign in" (-> (k/session test-app) (steps/sign-in "PI:EMAIL:<EMAIL>END_PI" "PI:PASSWORD:<PASSWORD>END_PI") (kc/check-and-follow-redirect) (kc/check-page-is :show-profile [ks/profile-page-body]) (kc/selector-includes-content [:body] "PI:EMAIL:<EMAIL>END_PI"))) (facts "User can sign out" (-> (k/session test-app) (steps/sign-in "PI:EMAIL:<EMAIL>END_PI" "PI:PASSWORD:<PASSWORD>END_PI") (k/visit "/profile") (k/follow ks/sign-out-link) (kc/check-and-follow-redirect) (kc/check-page-is :index [ks/index-page-body]))) (facts "User can see their profile picture" (let [user-store (:user-store stores-m) profile-picture-store (:profile-picture-store stores-m) email "PI:EMAIL:<EMAIL>END_PI" uid (ih/get-uid user-store email)] (-> (k/session test-app) (steps/sign-in email "PI:PASSWORD:<PASSWORD>END_PI") (ih/add-profile-image profile-picture-store uid) (k/visit "/profile") (kc/selector-has-attribute-with-content [ks/profile-page-profile-card-image :img] :src (ih/get-encoded-image))) (ih/remove-profile-image profile-picture-store uid))) (facts "Index url redirects to profile page if user is signed in" (-> (k/session test-app) (steps/sign-in "PI:EMAIL:<EMAIL>END_PI" "validPI:PASSWORD:<PASSWORD>END_PI-password") (k/visit "/") (kc/check-and-follow-redirect) (kc/check-page-is :show-profile [ks/profile-page-body]))) (facts "Index url redirects to profile page if user is registered" (-> (k/session test-app) (steps/register "PI:EMAIL:<EMAIL>END_PI" "validPI:PASSWORD:<PASSWORD>END_PI-password") (k/visit "/") (kc/check-and-follow-redirect) (kc/check-page-is :show-profile [ks/profile-page-body]))) (facts "Clients appear on user profile page" (-> (k/session test-app) (steps/register "PI:EMAIL:<EMAIL>END_PI" "PI:PASSWORD:<PASSWORD>END_PI")) (setup-add-client-to-user! "PI:EMAIL:<EMAIL>END_PI" "myapp") (-> (k/session test-app) (steps/sign-in "PI:EMAIL:<EMAIL>END_PI" "PI:PASSWORD:<PASSWORD>END_PI") (k/visit "/profile") (kc/selector-includes-content [ks/profile-authorised-client-list] "myapp"))) (facts "User can unshare profile card" (-> (k/session test-app) (steps/sign-in "PI:EMAIL:<EMAIL>END_PI" "PI:PASSWORD:<PASSWORD>END_PI") (k/visit "/profile") (kc/selector-includes-content [ks/profile-authorised-client-list] "myapp") (k/follow ks/profile-authorised-client-unshare-link) (kc/page-uri-contains "/unshare-profile-card") (kc/check-and-press ks/unshare-profile-card-confirm-button) (kc/check-and-follow-redirect) (kc/check-page-is :show-profile [ks/profile-page-body]) (kc/selector-does-not-include-content [ks/profile-authorised-client-list] "myapp"))) (facts "User can request a new confirmation email" (-> (k/session test-app) (steps/sign-in "PI:EMAIL:<EMAIL>END_PI" "PI:PASSWORD:<PASSWORD>END_PI") (k/visit "/profile") (kc/check-and-press ks/profile-resend-confirmation-email) (kc/check-and-follow-redirect) (kc/check-page-is :show-profile [ks/profile-page-body]) (kc/selector-exists [ks/profile-flash-message]))) (facts "User can change password" (-> (k/session test-app) (steps/sign-in "PI:EMAIL:<EMAIL>END_PI" "PI:PASSWORD:<PASSWORD>END_PI") (k/visit "/profile") (k/follow ks/profile-change-password-link) (kc/check-page-is :show-change-password-form [ks/change-password-page-body]) (kc/check-and-fill-in ks/change-password-current-password-input "PI:PASSWORD:<PASSWORD>END_PI") (kc/check-and-fill-in ks/change-password-new-password-input "PI:PASSWORD:<PASSWORD>END_PI") (kc/check-and-press ks/change-password-submit) (kc/check-and-follow-redirect) (kc/check-page-is :show-profile [ks/profile-page-body]) (kc/selector-exists [ks/profile-flash-message]))) (facts "User can delete account" (-> (k/session test-app) (steps/register "PI:EMAIL:<EMAIL>END_PI" "PI:PASSWORD:<PASSWORD>END_PI") (k/visit "/profile") (k/follow ks/profile-delete-account-link) (kc/check-page-is :show-delete-account-confirmation [ks/delete-account-page-body]) (kc/check-and-press ks/delete-account-button) (kc/check-and-follow-redirect) (kc/check-page-is :show-profile-deleted [ks/profile-deleted-page-body]))) (facts "User can change email address" (-> (k/session test-app) (steps/sign-in "PI:EMAIL:<EMAIL>END_PI" "PI:PASSWORD:<PASSWORD>END_PI") (k/visit "/profile") (k/follow ks/profile-change-email-link) (kc/check-page-is :show-change-email-form [ks/change-email-page-body]) (kc/check-and-fill-in ks/change-email-input "PI:EMAIL:<EMAIL>END_PI") (k/follow ks/change-email-cancel-button) (kc/check-page-is :show-profile [ks/profile-page-body]) (kc/selector-includes-content [ks/profile-page-profile-card-email] "PI:EMAIL:<EMAIL>END_PI") (kc/selector-does-not-include-content [ks/profile-page-profile-card-email] "PI:EMAIL:<EMAIL>END_PI") (k/follow ks/profile-change-email-link) (kc/check-page-is :show-change-email-form [ks/change-email-page-body]) (kc/check-and-fill-in ks/change-email-input "PI:EMAIL:<EMAIL>END_PI") (kc/check-and-press ks/change-email-button) (kc/check-and-follow-redirect) (kc/selector-exists [ks/profile-page-body]) (kc/selector-includes-content [ks/profile-unconfirmed-email-message] "PI:EMAIL:<EMAIL>END_PI") (kc/selector-includes-content [ks/profile-flash-message] "email") (kc/selector-includes-content [ks/profile-flash-message] "changed") (kc/selector-includes-content [ks/profile-page-profile-card-email] "PI:EMAIL:<EMAIL>END_PI"))) (facts "User can change profile details, and fields are pre-filled" (-> (k/session test-app) (steps/sign-in "PI:EMAIL:<EMAIL>END_PI" "PI:PASSWORD:<PASSWORD>END_PI") (k/visit "/change-profile") (kc/check-page-is :show-change-profile-form [ks/change-profile-page-body]) (kc/selector-has-attribute-with-content [ks/change-profile-first-name-input] :value "dummy first") (kc/selector-has-attribute-with-content [ks/change-profile-last-name-input] :value "dummy last") (kc/check-and-fill-in ks/change-profile-first-name-input "new first") (kc/check-and-fill-in ks/change-profile-last-name-input "") (kc/check-and-press ks/change-details-button) (kc/check-page-is :show-change-profile-form [ks/change-profile-page-body]) (kc/selector-has-attribute-with-content [ks/change-profile-first-name-input] :value "new first") (kc/selector-has-attribute-with-content [ks/change-profile-last-name-input] :value "") (kc/check-and-fill-in ks/change-profile-last-name-input "new last") (kc/check-and-press ks/change-details-button) (kc/check-and-follow-redirect) (kc/check-page-is :show-profile [ks/profile-page-body]) (kc/selector-includes-content [ks/profile-flash-message] "profile") (kc/selector-includes-content [ks/profile-flash-message] "updated") (k/visit "/change-profile") (k/follow ks/change-profile-back-button) (kc/check-page-is :show-profile [ks/profile-page-body]))) (facts "Not found page is shown for unknown url" (-> (k/session test-app) (k/visit "/wrong-url") (kc/response-status-is 404) (kc/selector-exists [ks/error-404-page-body]))) (fact "Error page is shown if an exception is thrown" (against-background (index/index anything) =throws=> (Exception.)) (-> (k/session (ih/build-app {:prone-stack-tracing? false})) (k/visit "/") (kc/response-status-is 500) (kc/selector-exists [ks/error-500-page-body])) (fact "if prone stack-tracing is enabled then error middleware isn't invoked (exception not caught)" (-> (k/session (ih/build-app {:prone-stack-tracing? true})) (k/visit "/")) => (throws Exception))) (fact "theme.css file is generated using environment variables" (-> (k/session (ih/build-app {:config-m {:secure "false" :header-bg-color "#012345" :header-font-color "#ABCDEF" :header-font-color-hover "#FEDCBA" :static-resources-dir-path "./test-resources" :logo-file-name "beautiful_logo.png"}})) (k/visit "/stylesheets/theme.css") (kc/response-status-is 200) (kc/response-body-contains "#012345") (kc/response-body-contains "#abcdef") (kc/response-body-contains "#fedcba") (kc/response-body-contains "/beautiful_logo.png"))) (fact "Correct app-name is used when config includes an :app-name" (-> (k/session (ih/build-app {:config-m {:secure "false" :app-name "My App Name"}})) (k/visit "/") (kc/selector-includes-content [ks/index-app-name] "My App Name"))) ;; 06 Jul 2015 (future-fact "Replaying the same post will generate a 403 from the csrf handling" (-> (k/session test-app) (steps/register "PI:EMAIL:<EMAIL>END_PI" "PI:PASSWORD:<PASSWORD>END_PI-PI:PASSWORD:<PASSWORD>END_PI") (steps/sign-in "PI:EMAIL:<EMAIL>END_PI" "PI:PASSWORD:<PASSWORD>END_PI") (kc/replay-last-request) (kc/response-status-is 403))) (ih/teardown-db)
[ { "context": "(ns clj-heroku-api.test.core\n ^{:author \"Matthew Burns\"\n :doc \"Unit/Component tests for the functiona", "end": 55, "score": 0.9998651742935181, "start": 42, "tag": "NAME", "value": "Matthew Burns" }, { "context": "==========================\n(def test-ssh-pub-key \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCz29znMi/UJX/nvkRSO", "end": 1804, "score": 0.8714783191680908, "start": 1797, "tag": "KEY", "value": "ssh-rsa" }, { "context": "===================\n(def test-ssh-pub-key \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCz29znMi/UJX/nvkRSO5FFugKhU9DkkI53E0vXUnP8zeLFxMgyUqmXryPVjWtGzz2LRWqjm14SbqHAmM44pGHVfBIp6wCKBWSUYGv/FxOulwYgtWzz4moxWLZrFyWWgJAnehcVUifHNgzKwT2ovWm2ns52681Z8yFK3K8/uLStDjLIaPePEOaxaTvgIxZNsfyEoXoHcyTPwdR1GtQuDTuDYqYmjmPCoKybYnXrTQ1QFuQxDneBkswQYSl0H2aLf3uBK4F01hr+azXQuSe39eSV4I/TqzmNJlanpILT9Jz3/J1i4r6brpF3AxLnFnb9ufIbzQAIa/VZIulfrZkcBsUl user@company.com\")\n(def plain-test-ssh-pub-key \"A", "end": 2177, "score": 0.9856827855110168, "start": 1805, "tag": "KEY", "value": "AAAAB3NzaC1yc2EAAAADAQABAAABAQCz29znMi/UJX/nvkRSO5FFugKhU9DkkI53E0vXUnP8zeLFxMgyUqmXryPVjWtGzz2LRWqjm14SbqHAmM44pGHVfBIp6wCKBWSUYGv/FxOulwYgtWzz4moxWLZrFyWWgJAnehcVUifHNgzKwT2ovWm2ns52681Z8yFK3K8/uLStDjLIaPePEOaxaTvgIxZNsfyEoXoHcyTPwdR1GtQuDTuDYqYmjmPCoKybYnXrTQ1QFuQxDneBkswQYSl0H2aLf3uBK4F01hr+azXQuSe39eSV4I/TqzmNJlanpILT9Jz3/J1i4r6brpF3AxLnFnb9ufIbzQAIa/VZIulfrZkcBsUl" }, { "context": "LT9Jz3/J1i4r6brpF3AxLnFnb9ufIbzQAIa/VZIulfrZkcBsUl user@company.com\")\n(def plain-test-ssh-pub-key \"AAAAB3", "end": 2182, "score": 0.8925743103027344, "start": 2178, "tag": "EMAIL", "value": "user" }, { "context": "3/J1i4r6brpF3AxLnFnb9ufIbzQAIa/VZIulfrZkcBsUl user@company.com\")\n(def plain-test-ssh-pub-key \"AAAAB3N", "end": 2183, "score": 0.998683512210846, "start": 2182, "tag": "KEY", "value": "@" }, { "context": "/J1i4r6brpF3AxLnFnb9ufIbzQAIa/VZIulfrZkcBsUl user@company.com\")\n(def plain-test-ssh-pub-key \"AAAAB3NzaC1yc2EAAA", "end": 2194, "score": 0.9979146122932434, "start": 2183, "tag": "EMAIL", "value": "company.com" }, { "context": "l user@company.com\")\n(def plain-test-ssh-pub-key \"AAAAB3NzaC1yc2EAAAADAQABAAABAQCz29znMi/UJX/nvkRSO5FFugKhU9DkkI53E0vXUnP8zeLFxMgyUqmXryPVjWtGzz2LRWqjm14SbqHAmM44pGHVfBIp6wCKBWSUYGv/FxOulwYgtWzz4moxWLZrFyWWgJAnehcVUifHNgzKwT2ovWm2ns52681Z8yFK3K8/uLStDjLIaPePEOaxaTvgIxZNsfyEoXoHcyTPwdR1GtQuDTuDYqYmjmPCoKybYnXrTQ1QFuQxDneBkswQYSl0H2aLf3uBK4F01hr+azXQuSe39eSV4I/TqzmNJlanpILT9Jz3/J1i4r6brpF3AxLnFnb9ufIbzQAIa/VZIulfrZkcBsUl\")\n\n(def add-ssh-pub-key-result @(future (add-key ", "end": 2598, "score": 0.9997692108154297, "start": 2226, "tag": "KEY", "value": "AAAAB3NzaC1yc2EAAAADAQABAAABAQCz29znMi/UJX/nvkRSO5FFugKhU9DkkI53E0vXUnP8zeLFxMgyUqmXryPVjWtGzz2LRWqjm14SbqHAmM44pGHVfBIp6wCKBWSUYGv/FxOulwYgtWzz4moxWLZrFyWWgJAnehcVUifHNgzKwT2ovWm2ns52681Z8yFK3K8/uLStDjLIaPePEOaxaTvgIxZNsfyEoXoHcyTPwdR1GtQuDTuDYqYmjmPCoKybYnXrTQ1QFuQxDneBkswQYSl0H2aLf3uBK4F01hr+azXQuSe39eSV4I/TqzmNJlanpILT9Jz3/J1i4r6brpF3AxLnFnb9ufIbzQAIa/VZIulfrZkcBsUl" } ]
test/clj_heroku_api/test/core.clj
mateoconfeugo/clj-heroku-api
0
(ns clj-heroku-api.test.core ^{:author "Matthew Burns" :doc "Unit/Component tests for the functionality offered by the Heroku API via this clojure wrapper library"} (:use [clj-heroku-api.core] ;; Library being tested [clojure.core]) (:require [expectations :refer [expect]]) (:import [java.lang Exception])) ;;======================================================================== ;; SETUP: Obtain secret key from environment and a bogus name ;;======================================================================== (def heroku-api-token (if-let [token (System/getenv "HEROKU_API_TOKEN")] token (throw (Exception. "No api token environment variable")))) ;; TODO make this some random name or adjust just to use the random name heroku assigns ;;(def test-app-name "nobody-would-ever-use-this") ;;======================================================================== ;; UNIT TEST: New app added and removed from heroku app hosting cloud ;;======================================================================== (def test-heroku-app @(future (create-app heroku-api-token))) (def test-app-name @(future (.getName test-heroku-app))) (def test-app-present @(future (app-exists? test-app-name heroku-api-token))) ;; DANGER DANGER DANGER fix this because its just the opposite of this testing for false when it should be true ahh! (expect false test-app-present) (def removal-result @(future (remove-app test-app-name heroku-api-token))) (def test-app-present @(future (app-exists? test-app-name heroku-api-token))) (expect false test-app-present) ;;======================================================================== ;; UNIT TEST: New ssh public key add and remove ;;======================================================================== (def test-ssh-pub-key "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCz29znMi/UJX/nvkRSO5FFugKhU9DkkI53E0vXUnP8zeLFxMgyUqmXryPVjWtGzz2LRWqjm14SbqHAmM44pGHVfBIp6wCKBWSUYGv/FxOulwYgtWzz4moxWLZrFyWWgJAnehcVUifHNgzKwT2ovWm2ns52681Z8yFK3K8/uLStDjLIaPePEOaxaTvgIxZNsfyEoXoHcyTPwdR1GtQuDTuDYqYmjmPCoKybYnXrTQ1QFuQxDneBkswQYSl0H2aLf3uBK4F01hr+azXQuSe39eSV4I/TqzmNJlanpILT9Jz3/J1i4r6brpF3AxLnFnb9ufIbzQAIa/VZIulfrZkcBsUl user@company.com") (def plain-test-ssh-pub-key "AAAAB3NzaC1yc2EAAAADAQABAAABAQCz29znMi/UJX/nvkRSO5FFugKhU9DkkI53E0vXUnP8zeLFxMgyUqmXryPVjWtGzz2LRWqjm14SbqHAmM44pGHVfBIp6wCKBWSUYGv/FxOulwYgtWzz4moxWLZrFyWWgJAnehcVUifHNgzKwT2ovWm2ns52681Z8yFK3K8/uLStDjLIaPePEOaxaTvgIxZNsfyEoXoHcyTPwdR1GtQuDTuDYqYmjmPCoKybYnXrTQ1QFuQxDneBkswQYSl0H2aLf3uBK4F01hr+azXQuSe39eSV4I/TqzmNJlanpILT9Jz3/J1i4r6brpF3AxLnFnb9ufIbzQAIa/VZIulfrZkcBsUl") (def add-ssh-pub-key-result @(future (add-key test-ssh-pub-key heroku-api-token))) (def app-count @(future (count (list-keys heroku-api-token)))) ;; DANGER DANGER DANGER fix this because its just the opposite of this testing for false when it should be true ahh! (expect false (> app-count 1)) (def delete-ssh-pub-key-result @(future (remove-key plain-test-ssh-pub-key heroku-api-token))) (def app-count @(future (count (list-keys heroku-api-token)))) ;; DANGER DANGER DANGER fix this because its just the opposite of this testing for false when it should be true ahh! (expect true (= app-count 1)) ;;======================================================================== ;; UNIT TEST: List, Add and remove application configuration settings ;;======================================================================== (def test-heroku-app @(future (create-app heroku-api-token))) (def test-app-name @(future (.getName test-heroku-app))) (def test-app-present @(future (app-exists? test-app-name heroku-api-token))) (when test-app-present (add-config test-app-name {:ham "foo"} heroku-api-token) (expect true (contains? @(future (list-config test-app-name heroku-api-token)) "ham")) (remove-config test-app-name "ham" heroku-api-token) (expect false (contains? @(future (list-config test-app-name heroku-api-token)) "ham")) (remove-app test-app-name heroku-api-token))
36782
(ns clj-heroku-api.test.core ^{:author "<NAME>" :doc "Unit/Component tests for the functionality offered by the Heroku API via this clojure wrapper library"} (:use [clj-heroku-api.core] ;; Library being tested [clojure.core]) (:require [expectations :refer [expect]]) (:import [java.lang Exception])) ;;======================================================================== ;; SETUP: Obtain secret key from environment and a bogus name ;;======================================================================== (def heroku-api-token (if-let [token (System/getenv "HEROKU_API_TOKEN")] token (throw (Exception. "No api token environment variable")))) ;; TODO make this some random name or adjust just to use the random name heroku assigns ;;(def test-app-name "nobody-would-ever-use-this") ;;======================================================================== ;; UNIT TEST: New app added and removed from heroku app hosting cloud ;;======================================================================== (def test-heroku-app @(future (create-app heroku-api-token))) (def test-app-name @(future (.getName test-heroku-app))) (def test-app-present @(future (app-exists? test-app-name heroku-api-token))) ;; DANGER DANGER DANGER fix this because its just the opposite of this testing for false when it should be true ahh! (expect false test-app-present) (def removal-result @(future (remove-app test-app-name heroku-api-token))) (def test-app-present @(future (app-exists? test-app-name heroku-api-token))) (expect false test-app-present) ;;======================================================================== ;; UNIT TEST: New ssh public key add and remove ;;======================================================================== (def test-ssh-pub-key "<KEY> <KEY> <EMAIL> <KEY> <EMAIL>") (def plain-test-ssh-pub-key "<KEY>") (def add-ssh-pub-key-result @(future (add-key test-ssh-pub-key heroku-api-token))) (def app-count @(future (count (list-keys heroku-api-token)))) ;; DANGER DANGER DANGER fix this because its just the opposite of this testing for false when it should be true ahh! (expect false (> app-count 1)) (def delete-ssh-pub-key-result @(future (remove-key plain-test-ssh-pub-key heroku-api-token))) (def app-count @(future (count (list-keys heroku-api-token)))) ;; DANGER DANGER DANGER fix this because its just the opposite of this testing for false when it should be true ahh! (expect true (= app-count 1)) ;;======================================================================== ;; UNIT TEST: List, Add and remove application configuration settings ;;======================================================================== (def test-heroku-app @(future (create-app heroku-api-token))) (def test-app-name @(future (.getName test-heroku-app))) (def test-app-present @(future (app-exists? test-app-name heroku-api-token))) (when test-app-present (add-config test-app-name {:ham "foo"} heroku-api-token) (expect true (contains? @(future (list-config test-app-name heroku-api-token)) "ham")) (remove-config test-app-name "ham" heroku-api-token) (expect false (contains? @(future (list-config test-app-name heroku-api-token)) "ham")) (remove-app test-app-name heroku-api-token))
true
(ns clj-heroku-api.test.core ^{:author "PI:NAME:<NAME>END_PI" :doc "Unit/Component tests for the functionality offered by the Heroku API via this clojure wrapper library"} (:use [clj-heroku-api.core] ;; Library being tested [clojure.core]) (:require [expectations :refer [expect]]) (:import [java.lang Exception])) ;;======================================================================== ;; SETUP: Obtain secret key from environment and a bogus name ;;======================================================================== (def heroku-api-token (if-let [token (System/getenv "HEROKU_API_TOKEN")] token (throw (Exception. "No api token environment variable")))) ;; TODO make this some random name or adjust just to use the random name heroku assigns ;;(def test-app-name "nobody-would-ever-use-this") ;;======================================================================== ;; UNIT TEST: New app added and removed from heroku app hosting cloud ;;======================================================================== (def test-heroku-app @(future (create-app heroku-api-token))) (def test-app-name @(future (.getName test-heroku-app))) (def test-app-present @(future (app-exists? test-app-name heroku-api-token))) ;; DANGER DANGER DANGER fix this because its just the opposite of this testing for false when it should be true ahh! (expect false test-app-present) (def removal-result @(future (remove-app test-app-name heroku-api-token))) (def test-app-present @(future (app-exists? test-app-name heroku-api-token))) (expect false test-app-present) ;;======================================================================== ;; UNIT TEST: New ssh public key add and remove ;;======================================================================== (def test-ssh-pub-key "PI:KEY:<KEY>END_PI PI:KEY:<KEY>END_PI PI:EMAIL:<EMAIL>END_PI PI:KEY:<KEY>END_PI PI:EMAIL:<EMAIL>END_PI") (def plain-test-ssh-pub-key "PI:KEY:<KEY>END_PI") (def add-ssh-pub-key-result @(future (add-key test-ssh-pub-key heroku-api-token))) (def app-count @(future (count (list-keys heroku-api-token)))) ;; DANGER DANGER DANGER fix this because its just the opposite of this testing for false when it should be true ahh! (expect false (> app-count 1)) (def delete-ssh-pub-key-result @(future (remove-key plain-test-ssh-pub-key heroku-api-token))) (def app-count @(future (count (list-keys heroku-api-token)))) ;; DANGER DANGER DANGER fix this because its just the opposite of this testing for false when it should be true ahh! (expect true (= app-count 1)) ;;======================================================================== ;; UNIT TEST: List, Add and remove application configuration settings ;;======================================================================== (def test-heroku-app @(future (create-app heroku-api-token))) (def test-app-name @(future (.getName test-heroku-app))) (def test-app-present @(future (app-exists? test-app-name heroku-api-token))) (when test-app-present (add-config test-app-name {:ham "foo"} heroku-api-token) (expect true (contains? @(future (list-config test-app-name heroku-api-token)) "ham")) (remove-config test-app-name "ham" heroku-api-token) (expect false (contains? @(future (list-config test-app-name heroku-api-token)) "ham")) (remove-app test-app-name heroku-api-token))
[ { "context": ";; Generic interface for functors\n\n;; by Konrad Hinsen\n;; last updated May 3, 2009\n\n;; Copyright (c) Kon", "end": 54, "score": 0.9998754262924194, "start": 41, "tag": "NAME", "value": "Konrad Hinsen" }, { "context": "nsen\n;; last updated May 3, 2009\n\n;; Copyright (c) Konrad Hinsen, 2009. All rights reserved. The use\n;; and distr", "end": 114, "score": 0.9998688697814941, "start": 101, "tag": "NAME", "value": "Konrad Hinsen" }, { "context": "any other, from this software.\n\n(ns\n #^{:author \"Konrad Hinsen\"\n :doc \"Generic functor interface (fmap)\"}\n ", "end": 583, "score": 0.9998852014541626, "start": 570, "tag": "NAME", "value": "Konrad Hinsen" } ]
ThirdParty/clojure-contrib-1.1.0/src/clojure/contrib/generic/functor.clj
allertonm/Couverjure
3
;; Generic interface for functors ;; by Konrad Hinsen ;; last updated May 3, 2009 ;; Copyright (c) Konrad Hinsen, 2009. All rights reserved. The use ;; and distribution terms for this software are covered by the Eclipse ;; Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php) ;; which can be found in the file epl-v10.html at the root of this ;; distribution. By using this software in any fashion, you are ;; agreeing to be bound by the terms of this license. You must not ;; remove this notice, or any other, from this software. (ns #^{:author "Konrad Hinsen" :doc "Generic functor interface (fmap)"} clojure.contrib.generic.functor) (defmulti fmap "Applies function f to each item in the data structure s and returns a structure of the same kind." {:arglists '([f s])} (fn [f s] (type s))) (defmethod fmap clojure.lang.IPersistentList [f v] (into (empty v) (map f v))) (defmethod fmap clojure.lang.IPersistentVector [f v] (into (empty v) (map f v))) (defmethod fmap clojure.lang.IPersistentMap [f m] (into (empty m) (for [[k v] m] [k (f v)]))) (defmethod fmap clojure.lang.IPersistentSet [f s] (into (empty s) (map f s)))
64570
;; Generic interface for functors ;; by <NAME> ;; last updated May 3, 2009 ;; Copyright (c) <NAME>, 2009. All rights reserved. The use ;; and distribution terms for this software are covered by the Eclipse ;; Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php) ;; which can be found in the file epl-v10.html at the root of this ;; distribution. By using this software in any fashion, you are ;; agreeing to be bound by the terms of this license. You must not ;; remove this notice, or any other, from this software. (ns #^{:author "<NAME>" :doc "Generic functor interface (fmap)"} clojure.contrib.generic.functor) (defmulti fmap "Applies function f to each item in the data structure s and returns a structure of the same kind." {:arglists '([f s])} (fn [f s] (type s))) (defmethod fmap clojure.lang.IPersistentList [f v] (into (empty v) (map f v))) (defmethod fmap clojure.lang.IPersistentVector [f v] (into (empty v) (map f v))) (defmethod fmap clojure.lang.IPersistentMap [f m] (into (empty m) (for [[k v] m] [k (f v)]))) (defmethod fmap clojure.lang.IPersistentSet [f s] (into (empty s) (map f s)))
true
;; Generic interface for functors ;; by PI:NAME:<NAME>END_PI ;; last updated May 3, 2009 ;; Copyright (c) PI:NAME:<NAME>END_PI, 2009. All rights reserved. The use ;; and distribution terms for this software are covered by the Eclipse ;; Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php) ;; which can be found in the file epl-v10.html at the root of this ;; distribution. By using this software in any fashion, you are ;; agreeing to be bound by the terms of this license. You must not ;; remove this notice, or any other, from this software. (ns #^{:author "PI:NAME:<NAME>END_PI" :doc "Generic functor interface (fmap)"} clojure.contrib.generic.functor) (defmulti fmap "Applies function f to each item in the data structure s and returns a structure of the same kind." {:arglists '([f s])} (fn [f s] (type s))) (defmethod fmap clojure.lang.IPersistentList [f v] (into (empty v) (map f v))) (defmethod fmap clojure.lang.IPersistentVector [f v] (into (empty v) (map f v))) (defmethod fmap clojure.lang.IPersistentMap [f m] (into (empty m) (for [[k v] m] [k (f v)]))) (defmethod fmap clojure.lang.IPersistentSet [f s] (into (empty s) (map f s)))
[ { "context": " \"subnet-f929df91\"\n :provisioning-user {:login \"ubuntu\"\n :ssh-key-name ssh-key-nam", "end": 3140, "score": 0.9928012490272522, "start": 3134, "tag": "USERNAME", "value": "ubuntu" }, { "context": "{:key-id (secret/resolve-secret key-id :passphrase passphrase)\n :key-secret (secret/resolve-secret key-s", "end": 3664, "score": 0.9982383251190186, "start": 3654, "tag": "PASSWORD", "value": "passphrase" }, { "context": "cret (secret/resolve-secret key-secret :passphrase passphrase)}))))\n\n(defn provider\n [resolved-aws-context]\n ", "end": 3742, "score": 0.99850994348526, "start": 3732, "tag": "PASSWORD", "value": "passphrase" } ]
main/src/dda/pallet/commons/aws.clj
DomainDrivenArchitecture/dda-pallet-commons
2
; Licensed to the Apache Software Foundation (ASF) under one ; or more contributor license agreements. See the NOTICE file ; distributed with this work for additional information ; regarding copyright ownership. The ASF licenses this file ; to you under the Apache License, Version 2.0 (the ; "License"); you may not use this file except in compliance ; with the License. You may obtain a copy of the License at ; ; http://www.apache.org/licenses/LICENSE-2.0 ; ; Unless required by applicable law or agreed to in writing, software ; distributed under the License is distributed on an "AS IS" BASIS, ; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ; See the License for the specific language governing permissions and ; limitations under the License. (ns dda.pallet.commons.aws (:require [schema.core :as s] [pallet.api :as api] [pallet.compute :as compute] [dda.pallet.commons.external-config :as ext-config] [dda.pallet.commons.secret :as secret])) ; TODO: refactor - move to config commons (def AwsContext {:key-id secret/Secret :key-secret secret/Secret :region s/Str :subnet-ids [s/Str]}) ; TODO: refactor - move to config commons (def ProvisioningUser {:login s/Str :ssh-key-name s/Str}) ; TODO: refactor - move to config commons (def AwsNodeSpec {:region s/Str :ami-id s/Str :hardware-id s/Str :security-group-ids [s/Str] :subnet-id s/Str :provisioning-user ProvisioningUser}) ; TODO: refactor - move to config commons (def Targets {:context AwsContext :node-spec AwsNodeSpec}) (def AwsContextResolved (secret/create-resolved-schema AwsContext)) (def TargetsResolved (secret/create-resolved-schema Targets)) (s/defn ^:always-validate resolve-targets :- TargetsResolved [targets :- Targets] (secret/resolve-secrets targets Targets)) ; TODO: refactor - move to config commons (s/defn ^:always-validate load-targets :- Targets [file-name :- s/Str] (ext-config/parse-config file-name)) (s/defn ^:always-validate meissa-unencrypted-context :- AwsContext [] (let [aws-decrypted-credentials (get-in (pallet.configure/pallet-config) [:services :aws])] {:key-id {:plain (get-in aws-decrypted-credentials [:account])} :key-secret {:plain (get-in aws-decrypted-credentials [:secret])} :region "eu-central-1" :subnet-ids ["subnet-f929df91"]})) (s/defn ^:always-validate meissa-encrypted-context :- AwsContext [key-id :- s/Str] {:key-id {:pallet-secret {:service-path [:services :aws] :record-element :account :key-id key-id}} :key-secret {:pallet-secret {:service-path [:services :aws] :record-element :secret :key-id key-id}} :region "eu-central-1" :subnet-ids ["subnet-f929df91"]}) (s/defn meissa-default-node-spec :- AwsNodeSpec [ssh-key-name :- s/Str] {:region "eu-central-1a" :ami-id "ami-82cf0aed" :hardware-id "t2.micro" :security-group-ids ["sg-0606b16e"] :subnet-id "subnet-f929df91" :provisioning-user {:login "ubuntu" :ssh-key-name ssh-key-name}}) (s/defn resolve-secrets :- AwsContextResolved ([context :- AwsContext] (let [{:keys [key-id key-secret]} context] (merge context {:key-id (secret/resolve-secret key-id) :key-secret (secret/resolve-secret key-secret)}))) ([context :- AwsContext passphrase :- s/Str] (let [{:keys [key-id key-secret]} context] (println key-id) (merge context {:key-id (secret/resolve-secret key-id :passphrase passphrase) :key-secret (secret/resolve-secret key-secret :passphrase passphrase)})))) (defn provider [resolved-aws-context] (let [{:keys [key-id key-secret region subnet-ids]} resolved-aws-context] (compute/instantiate-provider :pallet-ec2 :identity key-id :credential key-secret :endpoint region :subnet-ids subnet-ids))) (s/defn ^:always-validate node-spec [aws-node-spec :- AwsNodeSpec] (let [{:keys [region ami-id hardware-id security-group-ids subnet-id provisioning-user]} aws-node-spec {:keys [login ssh-key-name]} provisioning-user] (api/node-spec :location {:location-id region} :image {:os-family :ubuntu ;eu-central-1 16-04 LTS hvm :image-id ami-id ;eu-west1 16-04 LTS hvm :image-id "ami-07174474" ;us-east-1 16-04 LTS hvm :image-id "ami-45b69e52" :os-version "16.04" :key-name ssh-key-name :login-user login} :hardware {:hardware-id hardware-id} :provider {:pallet-ec2 {:network-interfaces [{:device-index 0 :groups security-group-ids :subnet-id subnet-id :associate-public-ip-address true :delete-on-termination true}]}})))
5869
; Licensed to the Apache Software Foundation (ASF) under one ; or more contributor license agreements. See the NOTICE file ; distributed with this work for additional information ; regarding copyright ownership. The ASF licenses this file ; to you under the Apache License, Version 2.0 (the ; "License"); you may not use this file except in compliance ; with the License. You may obtain a copy of the License at ; ; http://www.apache.org/licenses/LICENSE-2.0 ; ; Unless required by applicable law or agreed to in writing, software ; distributed under the License is distributed on an "AS IS" BASIS, ; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ; See the License for the specific language governing permissions and ; limitations under the License. (ns dda.pallet.commons.aws (:require [schema.core :as s] [pallet.api :as api] [pallet.compute :as compute] [dda.pallet.commons.external-config :as ext-config] [dda.pallet.commons.secret :as secret])) ; TODO: refactor - move to config commons (def AwsContext {:key-id secret/Secret :key-secret secret/Secret :region s/Str :subnet-ids [s/Str]}) ; TODO: refactor - move to config commons (def ProvisioningUser {:login s/Str :ssh-key-name s/Str}) ; TODO: refactor - move to config commons (def AwsNodeSpec {:region s/Str :ami-id s/Str :hardware-id s/Str :security-group-ids [s/Str] :subnet-id s/Str :provisioning-user ProvisioningUser}) ; TODO: refactor - move to config commons (def Targets {:context AwsContext :node-spec AwsNodeSpec}) (def AwsContextResolved (secret/create-resolved-schema AwsContext)) (def TargetsResolved (secret/create-resolved-schema Targets)) (s/defn ^:always-validate resolve-targets :- TargetsResolved [targets :- Targets] (secret/resolve-secrets targets Targets)) ; TODO: refactor - move to config commons (s/defn ^:always-validate load-targets :- Targets [file-name :- s/Str] (ext-config/parse-config file-name)) (s/defn ^:always-validate meissa-unencrypted-context :- AwsContext [] (let [aws-decrypted-credentials (get-in (pallet.configure/pallet-config) [:services :aws])] {:key-id {:plain (get-in aws-decrypted-credentials [:account])} :key-secret {:plain (get-in aws-decrypted-credentials [:secret])} :region "eu-central-1" :subnet-ids ["subnet-f929df91"]})) (s/defn ^:always-validate meissa-encrypted-context :- AwsContext [key-id :- s/Str] {:key-id {:pallet-secret {:service-path [:services :aws] :record-element :account :key-id key-id}} :key-secret {:pallet-secret {:service-path [:services :aws] :record-element :secret :key-id key-id}} :region "eu-central-1" :subnet-ids ["subnet-f929df91"]}) (s/defn meissa-default-node-spec :- AwsNodeSpec [ssh-key-name :- s/Str] {:region "eu-central-1a" :ami-id "ami-82cf0aed" :hardware-id "t2.micro" :security-group-ids ["sg-0606b16e"] :subnet-id "subnet-f929df91" :provisioning-user {:login "ubuntu" :ssh-key-name ssh-key-name}}) (s/defn resolve-secrets :- AwsContextResolved ([context :- AwsContext] (let [{:keys [key-id key-secret]} context] (merge context {:key-id (secret/resolve-secret key-id) :key-secret (secret/resolve-secret key-secret)}))) ([context :- AwsContext passphrase :- s/Str] (let [{:keys [key-id key-secret]} context] (println key-id) (merge context {:key-id (secret/resolve-secret key-id :passphrase <PASSWORD>) :key-secret (secret/resolve-secret key-secret :passphrase <PASSWORD>)})))) (defn provider [resolved-aws-context] (let [{:keys [key-id key-secret region subnet-ids]} resolved-aws-context] (compute/instantiate-provider :pallet-ec2 :identity key-id :credential key-secret :endpoint region :subnet-ids subnet-ids))) (s/defn ^:always-validate node-spec [aws-node-spec :- AwsNodeSpec] (let [{:keys [region ami-id hardware-id security-group-ids subnet-id provisioning-user]} aws-node-spec {:keys [login ssh-key-name]} provisioning-user] (api/node-spec :location {:location-id region} :image {:os-family :ubuntu ;eu-central-1 16-04 LTS hvm :image-id ami-id ;eu-west1 16-04 LTS hvm :image-id "ami-07174474" ;us-east-1 16-04 LTS hvm :image-id "ami-45b69e52" :os-version "16.04" :key-name ssh-key-name :login-user login} :hardware {:hardware-id hardware-id} :provider {:pallet-ec2 {:network-interfaces [{:device-index 0 :groups security-group-ids :subnet-id subnet-id :associate-public-ip-address true :delete-on-termination true}]}})))
true
; Licensed to the Apache Software Foundation (ASF) under one ; or more contributor license agreements. See the NOTICE file ; distributed with this work for additional information ; regarding copyright ownership. The ASF licenses this file ; to you under the Apache License, Version 2.0 (the ; "License"); you may not use this file except in compliance ; with the License. You may obtain a copy of the License at ; ; http://www.apache.org/licenses/LICENSE-2.0 ; ; Unless required by applicable law or agreed to in writing, software ; distributed under the License is distributed on an "AS IS" BASIS, ; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ; See the License for the specific language governing permissions and ; limitations under the License. (ns dda.pallet.commons.aws (:require [schema.core :as s] [pallet.api :as api] [pallet.compute :as compute] [dda.pallet.commons.external-config :as ext-config] [dda.pallet.commons.secret :as secret])) ; TODO: refactor - move to config commons (def AwsContext {:key-id secret/Secret :key-secret secret/Secret :region s/Str :subnet-ids [s/Str]}) ; TODO: refactor - move to config commons (def ProvisioningUser {:login s/Str :ssh-key-name s/Str}) ; TODO: refactor - move to config commons (def AwsNodeSpec {:region s/Str :ami-id s/Str :hardware-id s/Str :security-group-ids [s/Str] :subnet-id s/Str :provisioning-user ProvisioningUser}) ; TODO: refactor - move to config commons (def Targets {:context AwsContext :node-spec AwsNodeSpec}) (def AwsContextResolved (secret/create-resolved-schema AwsContext)) (def TargetsResolved (secret/create-resolved-schema Targets)) (s/defn ^:always-validate resolve-targets :- TargetsResolved [targets :- Targets] (secret/resolve-secrets targets Targets)) ; TODO: refactor - move to config commons (s/defn ^:always-validate load-targets :- Targets [file-name :- s/Str] (ext-config/parse-config file-name)) (s/defn ^:always-validate meissa-unencrypted-context :- AwsContext [] (let [aws-decrypted-credentials (get-in (pallet.configure/pallet-config) [:services :aws])] {:key-id {:plain (get-in aws-decrypted-credentials [:account])} :key-secret {:plain (get-in aws-decrypted-credentials [:secret])} :region "eu-central-1" :subnet-ids ["subnet-f929df91"]})) (s/defn ^:always-validate meissa-encrypted-context :- AwsContext [key-id :- s/Str] {:key-id {:pallet-secret {:service-path [:services :aws] :record-element :account :key-id key-id}} :key-secret {:pallet-secret {:service-path [:services :aws] :record-element :secret :key-id key-id}} :region "eu-central-1" :subnet-ids ["subnet-f929df91"]}) (s/defn meissa-default-node-spec :- AwsNodeSpec [ssh-key-name :- s/Str] {:region "eu-central-1a" :ami-id "ami-82cf0aed" :hardware-id "t2.micro" :security-group-ids ["sg-0606b16e"] :subnet-id "subnet-f929df91" :provisioning-user {:login "ubuntu" :ssh-key-name ssh-key-name}}) (s/defn resolve-secrets :- AwsContextResolved ([context :- AwsContext] (let [{:keys [key-id key-secret]} context] (merge context {:key-id (secret/resolve-secret key-id) :key-secret (secret/resolve-secret key-secret)}))) ([context :- AwsContext passphrase :- s/Str] (let [{:keys [key-id key-secret]} context] (println key-id) (merge context {:key-id (secret/resolve-secret key-id :passphrase PI:PASSWORD:<PASSWORD>END_PI) :key-secret (secret/resolve-secret key-secret :passphrase PI:PASSWORD:<PASSWORD>END_PI)})))) (defn provider [resolved-aws-context] (let [{:keys [key-id key-secret region subnet-ids]} resolved-aws-context] (compute/instantiate-provider :pallet-ec2 :identity key-id :credential key-secret :endpoint region :subnet-ids subnet-ids))) (s/defn ^:always-validate node-spec [aws-node-spec :- AwsNodeSpec] (let [{:keys [region ami-id hardware-id security-group-ids subnet-id provisioning-user]} aws-node-spec {:keys [login ssh-key-name]} provisioning-user] (api/node-spec :location {:location-id region} :image {:os-family :ubuntu ;eu-central-1 16-04 LTS hvm :image-id ami-id ;eu-west1 16-04 LTS hvm :image-id "ami-07174474" ;us-east-1 16-04 LTS hvm :image-id "ami-45b69e52" :os-version "16.04" :key-name ssh-key-name :login-user login} :hardware {:hardware-id hardware-id} :provider {:pallet-ec2 {:network-interfaces [{:device-index 0 :groups security-group-ids :subnet-id subnet-id :associate-public-ip-address true :delete-on-termination true}]}})))
[ { "context": "e [[zensols.example.sa-tp-eval]].\"\n :author \"Paul Landes\"}\n zensols.example.sa-tp-feature\n (:require [", "end": 172, "score": 0.9998868107795715, "start": 161, "tag": "NAME", "value": "Paul Landes" }, { "context": "ols.model.eval-classifier :as ec]))\n\n(def id-key :id)\n\n(def ^{:dynamic true :private true} *context* n", "end": 634, "score": 0.5255217552185059, "start": 632, "tag": "KEY", "value": "id" } ]
src/clojure/zensols/example/sa_tp_feature.clj
plandes/clj-example-nlp-ml
13
(ns ^{:doc "Just like [[zensols.example.sa_feature]] but this shows how to use a two pass cross validation. See [[zensols.example.sa-tp-eval]]." :author "Paul Landes"} zensols.example.sa-tp-feature (:require [clojure.tools.logging :as log]) (:require [zensols.nlparse.parse :as p] [zensols.util.string :as zs] [zensols.nlparse.feature.lang :as fe] [zensols.nlparse.feature.word :as fw] [zensols.nlparse.feature.word-count :as wc] [zensols.model.execute-classifier :refer (with-model-conf)] [zensols.model.eval-classifier :as ec])) (def id-key :id) (def ^{:dynamic true :private true} *context* nil) (def ^:private classes ["answer" "question" "expressive"]) (defn- context [& {:keys [no-throw?] :or {no-throw? false}}] (or *context* (and (not no-throw?) (throw (ex-info "No context bound" {}))))) (defmacro with-feature-context {:style/indent 1} [context & forms] `(binding [*context* ~context] ~@forms)) (defn create-features ([panon] (create-features panon nil)) ([panon context] (log/debugf "creating features (context=<%s>) for <%s>" (zs/trunc context) (zs/trunc panon)) (let [{:keys [word-count-stats]} context tokens (p/tokens panon)] (merge (fe/verb-features (->> panon :sents first)) (fw/token-features panon tokens) (fe/pos-tag-features tokens) (if word-count-stats (wc/label-count-score-features panon word-count-stats)))))) (defn- flatten-keys [adb-keys] (mapcat #(into [] %) adb-keys)) (defn create-feature-sets [& {:keys [context] :as adb-keys}] (log/infof "creating features with keys=%s: %s" adb-keys (zs/trunc adb-keys)) (let [context (or context *context*) {:keys [anons-fn]} context anons (apply anons-fn (->> (flatten-keys adb-keys) (concat [:include-ids? true]))) ;; we must provide keys so the second pass can correlate results back ;; in the testing/training for each fold; the label is also needed for ;; every instance fs (if (ec/executing-two-pass?) (->> anons (map (fn [{:keys [id class-label]}] {:sa class-label id-key id}))) (->> anons (map (fn [{:keys [class-label instance id]}] (merge {:sa class-label :utterance (->> instance :text) id-key id} (create-features instance context))))))] (log/debugf "ids: %s" (->> fs (map #(-> % id-key read-string)) pr-str)) fs)) (defn create-context [& {:keys [anons-fn] :as adb-keys}] (let [fkeys (flatten-keys adb-keys) anons (apply anons-fn fkeys)] (log/infof "creating context with key=%s anon count: %d" (zs/trunc adb-keys) (count anons)) (log/tracef "adb-keys: %s" (pr-str adb-keys)) (->> anons wc/calculate-feature-stats (hash-map :anons-fn anons-fn :word-count-stats)))) (defn feature-metas [& _] (concat (fe/verb-feature-metas) (fw/token-feature-metas) (fe/pos-tag-feature-metas) (wc/label-word-count-feature-metas classes))) (defn- class-feature-meta [] [:sa classes]) (defn create-model-config [] {:name "speech-act-two-pass" :create-feature-sets-fn create-feature-sets :create-features-fn create-features :feature-metas-fn feature-metas :class-feature-meta-fn class-feature-meta :create-two-pass-context-fn create-context :model-return-keys #{:label :distributions :features}}) (defn- main [& actions] (with-feature-context (create-context) (->> actions (map (fn [action] (case action 1 (with-model-conf (create-model-config) (ec/display-features :max 10))))) doall)))
41419
(ns ^{:doc "Just like [[zensols.example.sa_feature]] but this shows how to use a two pass cross validation. See [[zensols.example.sa-tp-eval]]." :author "<NAME>"} zensols.example.sa-tp-feature (:require [clojure.tools.logging :as log]) (:require [zensols.nlparse.parse :as p] [zensols.util.string :as zs] [zensols.nlparse.feature.lang :as fe] [zensols.nlparse.feature.word :as fw] [zensols.nlparse.feature.word-count :as wc] [zensols.model.execute-classifier :refer (with-model-conf)] [zensols.model.eval-classifier :as ec])) (def id-key :<KEY>) (def ^{:dynamic true :private true} *context* nil) (def ^:private classes ["answer" "question" "expressive"]) (defn- context [& {:keys [no-throw?] :or {no-throw? false}}] (or *context* (and (not no-throw?) (throw (ex-info "No context bound" {}))))) (defmacro with-feature-context {:style/indent 1} [context & forms] `(binding [*context* ~context] ~@forms)) (defn create-features ([panon] (create-features panon nil)) ([panon context] (log/debugf "creating features (context=<%s>) for <%s>" (zs/trunc context) (zs/trunc panon)) (let [{:keys [word-count-stats]} context tokens (p/tokens panon)] (merge (fe/verb-features (->> panon :sents first)) (fw/token-features panon tokens) (fe/pos-tag-features tokens) (if word-count-stats (wc/label-count-score-features panon word-count-stats)))))) (defn- flatten-keys [adb-keys] (mapcat #(into [] %) adb-keys)) (defn create-feature-sets [& {:keys [context] :as adb-keys}] (log/infof "creating features with keys=%s: %s" adb-keys (zs/trunc adb-keys)) (let [context (or context *context*) {:keys [anons-fn]} context anons (apply anons-fn (->> (flatten-keys adb-keys) (concat [:include-ids? true]))) ;; we must provide keys so the second pass can correlate results back ;; in the testing/training for each fold; the label is also needed for ;; every instance fs (if (ec/executing-two-pass?) (->> anons (map (fn [{:keys [id class-label]}] {:sa class-label id-key id}))) (->> anons (map (fn [{:keys [class-label instance id]}] (merge {:sa class-label :utterance (->> instance :text) id-key id} (create-features instance context))))))] (log/debugf "ids: %s" (->> fs (map #(-> % id-key read-string)) pr-str)) fs)) (defn create-context [& {:keys [anons-fn] :as adb-keys}] (let [fkeys (flatten-keys adb-keys) anons (apply anons-fn fkeys)] (log/infof "creating context with key=%s anon count: %d" (zs/trunc adb-keys) (count anons)) (log/tracef "adb-keys: %s" (pr-str adb-keys)) (->> anons wc/calculate-feature-stats (hash-map :anons-fn anons-fn :word-count-stats)))) (defn feature-metas [& _] (concat (fe/verb-feature-metas) (fw/token-feature-metas) (fe/pos-tag-feature-metas) (wc/label-word-count-feature-metas classes))) (defn- class-feature-meta [] [:sa classes]) (defn create-model-config [] {:name "speech-act-two-pass" :create-feature-sets-fn create-feature-sets :create-features-fn create-features :feature-metas-fn feature-metas :class-feature-meta-fn class-feature-meta :create-two-pass-context-fn create-context :model-return-keys #{:label :distributions :features}}) (defn- main [& actions] (with-feature-context (create-context) (->> actions (map (fn [action] (case action 1 (with-model-conf (create-model-config) (ec/display-features :max 10))))) doall)))
true
(ns ^{:doc "Just like [[zensols.example.sa_feature]] but this shows how to use a two pass cross validation. See [[zensols.example.sa-tp-eval]]." :author "PI:NAME:<NAME>END_PI"} zensols.example.sa-tp-feature (:require [clojure.tools.logging :as log]) (:require [zensols.nlparse.parse :as p] [zensols.util.string :as zs] [zensols.nlparse.feature.lang :as fe] [zensols.nlparse.feature.word :as fw] [zensols.nlparse.feature.word-count :as wc] [zensols.model.execute-classifier :refer (with-model-conf)] [zensols.model.eval-classifier :as ec])) (def id-key :PI:KEY:<KEY>END_PI) (def ^{:dynamic true :private true} *context* nil) (def ^:private classes ["answer" "question" "expressive"]) (defn- context [& {:keys [no-throw?] :or {no-throw? false}}] (or *context* (and (not no-throw?) (throw (ex-info "No context bound" {}))))) (defmacro with-feature-context {:style/indent 1} [context & forms] `(binding [*context* ~context] ~@forms)) (defn create-features ([panon] (create-features panon nil)) ([panon context] (log/debugf "creating features (context=<%s>) for <%s>" (zs/trunc context) (zs/trunc panon)) (let [{:keys [word-count-stats]} context tokens (p/tokens panon)] (merge (fe/verb-features (->> panon :sents first)) (fw/token-features panon tokens) (fe/pos-tag-features tokens) (if word-count-stats (wc/label-count-score-features panon word-count-stats)))))) (defn- flatten-keys [adb-keys] (mapcat #(into [] %) adb-keys)) (defn create-feature-sets [& {:keys [context] :as adb-keys}] (log/infof "creating features with keys=%s: %s" adb-keys (zs/trunc adb-keys)) (let [context (or context *context*) {:keys [anons-fn]} context anons (apply anons-fn (->> (flatten-keys adb-keys) (concat [:include-ids? true]))) ;; we must provide keys so the second pass can correlate results back ;; in the testing/training for each fold; the label is also needed for ;; every instance fs (if (ec/executing-two-pass?) (->> anons (map (fn [{:keys [id class-label]}] {:sa class-label id-key id}))) (->> anons (map (fn [{:keys [class-label instance id]}] (merge {:sa class-label :utterance (->> instance :text) id-key id} (create-features instance context))))))] (log/debugf "ids: %s" (->> fs (map #(-> % id-key read-string)) pr-str)) fs)) (defn create-context [& {:keys [anons-fn] :as adb-keys}] (let [fkeys (flatten-keys adb-keys) anons (apply anons-fn fkeys)] (log/infof "creating context with key=%s anon count: %d" (zs/trunc adb-keys) (count anons)) (log/tracef "adb-keys: %s" (pr-str adb-keys)) (->> anons wc/calculate-feature-stats (hash-map :anons-fn anons-fn :word-count-stats)))) (defn feature-metas [& _] (concat (fe/verb-feature-metas) (fw/token-feature-metas) (fe/pos-tag-feature-metas) (wc/label-word-count-feature-metas classes))) (defn- class-feature-meta [] [:sa classes]) (defn create-model-config [] {:name "speech-act-two-pass" :create-feature-sets-fn create-feature-sets :create-features-fn create-features :feature-metas-fn feature-metas :class-feature-meta-fn class-feature-meta :create-two-pass-context-fn create-context :model-return-keys #{:label :distributions :features}}) (defn- main [& actions] (with-feature-context (create-context) (->> actions (map (fn [action] (case action 1 (with-model-conf (create-model-config) (ec/display-features :max 10))))) doall)))