_id
stringlengths
64
64
repository
stringlengths
6
84
name
stringlengths
4
110
content
stringlengths
0
248k
license
null
download_url
stringlengths
89
454
language
stringclasses
7 values
comments
stringlengths
0
74.6k
code
stringlengths
0
248k
510ed41a1228deb50ae2649d8e3d185f65628b0345e79e68e69e7c606b9774d8
dbuenzli/vz
evidence_diamonds.mli
--------------------------------------------------------------------------- Copyright ( c ) 2022 The vz programmers . All rights reserved . Distributed under the ISC license , see terms at the end of the file . --------------------------------------------------------------------------- Copyright (c) 2022 The vz programmers. All rights reserved. Distributed under the ISC license, see terms at the end of the file. ---------------------------------------------------------------------------*) * Diamonds dataset . This is the { { : } diamonds } dataset distributed with [ ] . This is the {{:} diamonds} dataset distributed with [ggplot2]. *) open Evidence type obs = float * string * string * string * float * float * int * float * float * float * The type for diamond observations . The 10 variables are as follows in order . in order. *) val carat : (obs, float) Var.t (** [carat] is the diamond weight. *) val cut : (obs, string) Var.t * [ cut ] is the cut quality , one of [ " Fair " ] , [ " Good " ] , [ " Very Good " ] , [ " Premium " ] , [ " Ideal " ] . ["Premium"], ["Ideal"]. *) val color : (obs, string) Var.t (** [color] is the diamond color from ["D"] (best) to ["J"] (worst). *) val clarity : (obs, string) Var.t * [ clarity ] is a measurement of how clear the diamond is , one of ( [ " I1 " ] ( worst ) , [ " " ] , [ " SI1 " ] , [ " VS2 " ] , [ " VS1 " ] , [ " VVS2 " ] , [ " VVS1 " ] , [ " IF " ] ( best ) . (["I1"] (worst), ["SI2"], ["SI1"], ["VS2"], ["VS1"], ["VVS2"], ["VVS1"], ["IF"] (best). *) val depth : (obs, float) Var.t * [ depth ] is the total depth percentage z / mean(x , y ) = 2 * z / ( x + y ) . val table : (obs, float) Var.t (** [table] is the width of top of diamond relative to widest point. *) val price : (obs, int) Var.t (** [price] is the diamond price in USD. *) val x : (obs, float) Var.t * [ x ] is the diamond length in mm . val y : (obs, float) Var.t * [ y ] is the diamond width in mm . val z : (obs, float) Var.t * [ y ] is the diamond depth in . val dataset : obs Dataset.t * [ dataset ] is a list of 53940 diamond observations . --------------------------------------------------------------------------- Copyright ( c ) 2022 The vz programmers Permission to use , copy , modify , and/or distribute this software for any purpose with or without fee is hereby granted , provided that the above copyright notice and this permission notice appear in all copies . THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE . --------------------------------------------------------------------------- Copyright (c) 2022 The vz programmers Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. ---------------------------------------------------------------------------*)
null
https://raw.githubusercontent.com/dbuenzli/vz/11056a7b5927519b4b7fa7ae762cdd2b7532936b/samples/evidence_diamonds.mli
ocaml
* [carat] is the diamond weight. * [color] is the diamond color from ["D"] (best) to ["J"] (worst). * [table] is the width of top of diamond relative to widest point. * [price] is the diamond price in USD.
--------------------------------------------------------------------------- Copyright ( c ) 2022 The vz programmers . All rights reserved . Distributed under the ISC license , see terms at the end of the file . --------------------------------------------------------------------------- Copyright (c) 2022 The vz programmers. All rights reserved. Distributed under the ISC license, see terms at the end of the file. ---------------------------------------------------------------------------*) * Diamonds dataset . This is the { { : } diamonds } dataset distributed with [ ] . This is the {{:} diamonds} dataset distributed with [ggplot2]. *) open Evidence type obs = float * string * string * string * float * float * int * float * float * float * The type for diamond observations . The 10 variables are as follows in order . in order. *) val carat : (obs, float) Var.t val cut : (obs, string) Var.t * [ cut ] is the cut quality , one of [ " Fair " ] , [ " Good " ] , [ " Very Good " ] , [ " Premium " ] , [ " Ideal " ] . ["Premium"], ["Ideal"]. *) val color : (obs, string) Var.t val clarity : (obs, string) Var.t * [ clarity ] is a measurement of how clear the diamond is , one of ( [ " I1 " ] ( worst ) , [ " " ] , [ " SI1 " ] , [ " VS2 " ] , [ " VS1 " ] , [ " VVS2 " ] , [ " VVS1 " ] , [ " IF " ] ( best ) . (["I1"] (worst), ["SI2"], ["SI1"], ["VS2"], ["VS1"], ["VVS2"], ["VVS1"], ["IF"] (best). *) val depth : (obs, float) Var.t * [ depth ] is the total depth percentage z / mean(x , y ) = 2 * z / ( x + y ) . val table : (obs, float) Var.t val price : (obs, int) Var.t val x : (obs, float) Var.t * [ x ] is the diamond length in mm . val y : (obs, float) Var.t * [ y ] is the diamond width in mm . val z : (obs, float) Var.t * [ y ] is the diamond depth in . val dataset : obs Dataset.t * [ dataset ] is a list of 53940 diamond observations . --------------------------------------------------------------------------- Copyright ( c ) 2022 The vz programmers Permission to use , copy , modify , and/or distribute this software for any purpose with or without fee is hereby granted , provided that the above copyright notice and this permission notice appear in all copies . THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE . --------------------------------------------------------------------------- Copyright (c) 2022 The vz programmers Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. ---------------------------------------------------------------------------*)
8f4e911b7abfa82417a19148554acbf79906cd910230894114f515fc814884b4
fulcrologic/fulcro-rad-tutorial
database_queries.clj
(ns com.example.components.database-queries (:require [com.fulcrologic.rad.database-adapters.datomic :as datomic] [datomic.api :as d] [taoensso.timbre :as log] [taoensso.encore :as enc])) (defn get-all-accounts [env query-params] (if-let [db (some-> (get-in env [::datomic/databases :production]) deref)] (let [ids (if (:show-inactive? query-params) (d/q [:find '[?uuid ...] :where ['?dbid :account/id '?uuid]] db) (d/q [:find '[?uuid ...] :where ['?dbid :account/active? true] ['?dbid :account/id '?uuid]] db))] (mapv (fn [id] {:account/id id}) ids)) (log/error "No database atom for production schema!"))) (defn get-all-items [env {:category/keys [id]}] (if-let [db (some-> (get-in env [::datomic/databases :production]) deref)] (let [ids (if id (d/q '[:find [?uuid ...] :in $ ?catid :where [?c :category/id ?catid] [?i :item/category ?c] [?i :item/id ?uuid]] db id) (d/q '[:find [?uuid ...] :where [_ :item/id ?uuid]] db))] (mapv (fn [id] {:item/id id}) ids)) (log/error "No database atom for production schema!"))) (defn get-customer-invoices [env {:account/keys [id]}] (if-let [db (some-> (get-in env [::datomic/databases :production]) deref)] (let [ids (d/q '[:find [?uuid ...] :in $ ?cid :where [?dbid :invoice/id ?uuid] [?dbid :invoice/customer ?c] [?c :account/id ?cid]] db id)] (mapv (fn [id] {:invoice/id id}) ids)) (log/error "No database atom for production schema!"))) (defn get-all-invoices [env query-params] (if-let [db (some-> (get-in env [::datomic/databases :production]) deref)] (let [ids (d/q [:find '[?uuid ...] :where ['?dbid :invoice/id '?uuid]] db)] (mapv (fn [id] {:invoice/id id}) ids)) (log/error "No database atom for production schema!"))) (defn get-invoice-customer-id [env invoice-id] (if-let [db (some-> (get-in env [::datomic/databases :production]) deref)] (d/q '[:find ?account-uuid . :in $ ?invoice-uuid :where [?i :invoice/id ?invoice-uuid] [?i :invoice/customer ?c] [?c :account/id ?account-uuid]] db invoice-id) (log/error "No database atom for production schema!"))) (defn get-all-categories [env query-params] (if-let [db (some-> (get-in env [::datomic/databases :production]) deref)] (let [ids (d/q '[:find [?id ...] :where [?e :category/label] [?e :category/id ?id]] db)] (mapv (fn [id] {:category/id id}) ids)) (log/error "No database atom for production schema!"))) (defn get-line-item-category [env line-item-id] (if-let [db (some-> (get-in env [::datomic/databases :production]) deref)] (let [id (d/q '[:find ?cid . :in $ ?line-item-id :where [?e :line-item/id ?line-item-id] [?e :line-item/item ?item] [?item :item/category ?c] [?c :category/id ?cid]] db line-item-id)] id) (log/error "No database atom for production schema!"))) (defn get-login-info "Get the account name, time zone, and password info via a username (email)." [{::datomic/keys [databases] :as env} username] (enc/if-let [db @(:production databases)] (d/pull db [:account/name {:time-zone/zone-id [:db/ident]} :password/hashed-value :password/salt :password/iterations] [:account/email username])))
null
https://raw.githubusercontent.com/fulcrologic/fulcro-rad-tutorial/809b8f8833363be7dc0c9a66307b1fa164da4d70/src/main/com/example/components/database_queries.clj
clojure
(ns com.example.components.database-queries (:require [com.fulcrologic.rad.database-adapters.datomic :as datomic] [datomic.api :as d] [taoensso.timbre :as log] [taoensso.encore :as enc])) (defn get-all-accounts [env query-params] (if-let [db (some-> (get-in env [::datomic/databases :production]) deref)] (let [ids (if (:show-inactive? query-params) (d/q [:find '[?uuid ...] :where ['?dbid :account/id '?uuid]] db) (d/q [:find '[?uuid ...] :where ['?dbid :account/active? true] ['?dbid :account/id '?uuid]] db))] (mapv (fn [id] {:account/id id}) ids)) (log/error "No database atom for production schema!"))) (defn get-all-items [env {:category/keys [id]}] (if-let [db (some-> (get-in env [::datomic/databases :production]) deref)] (let [ids (if id (d/q '[:find [?uuid ...] :in $ ?catid :where [?c :category/id ?catid] [?i :item/category ?c] [?i :item/id ?uuid]] db id) (d/q '[:find [?uuid ...] :where [_ :item/id ?uuid]] db))] (mapv (fn [id] {:item/id id}) ids)) (log/error "No database atom for production schema!"))) (defn get-customer-invoices [env {:account/keys [id]}] (if-let [db (some-> (get-in env [::datomic/databases :production]) deref)] (let [ids (d/q '[:find [?uuid ...] :in $ ?cid :where [?dbid :invoice/id ?uuid] [?dbid :invoice/customer ?c] [?c :account/id ?cid]] db id)] (mapv (fn [id] {:invoice/id id}) ids)) (log/error "No database atom for production schema!"))) (defn get-all-invoices [env query-params] (if-let [db (some-> (get-in env [::datomic/databases :production]) deref)] (let [ids (d/q [:find '[?uuid ...] :where ['?dbid :invoice/id '?uuid]] db)] (mapv (fn [id] {:invoice/id id}) ids)) (log/error "No database atom for production schema!"))) (defn get-invoice-customer-id [env invoice-id] (if-let [db (some-> (get-in env [::datomic/databases :production]) deref)] (d/q '[:find ?account-uuid . :in $ ?invoice-uuid :where [?i :invoice/id ?invoice-uuid] [?i :invoice/customer ?c] [?c :account/id ?account-uuid]] db invoice-id) (log/error "No database atom for production schema!"))) (defn get-all-categories [env query-params] (if-let [db (some-> (get-in env [::datomic/databases :production]) deref)] (let [ids (d/q '[:find [?id ...] :where [?e :category/label] [?e :category/id ?id]] db)] (mapv (fn [id] {:category/id id}) ids)) (log/error "No database atom for production schema!"))) (defn get-line-item-category [env line-item-id] (if-let [db (some-> (get-in env [::datomic/databases :production]) deref)] (let [id (d/q '[:find ?cid . :in $ ?line-item-id :where [?e :line-item/id ?line-item-id] [?e :line-item/item ?item] [?item :item/category ?c] [?c :category/id ?cid]] db line-item-id)] id) (log/error "No database atom for production schema!"))) (defn get-login-info "Get the account name, time zone, and password info via a username (email)." [{::datomic/keys [databases] :as env} username] (enc/if-let [db @(:production databases)] (d/pull db [:account/name {:time-zone/zone-id [:db/ident]} :password/hashed-value :password/salt :password/iterations] [:account/email username])))
23c48897b3b2a8d20da910681c51cf1b80ef5e82389c947e2c1b4eecd3c1a29f
boot-clj/boot
gpg.clj
(ns boot.gpg (:require [clojure.java.io :as io] [clojure.java.shell :as shell] [boot.pod :as pod] [boot.util :as util]) (:import [java.io StringReader File])) (defn ^{:boot/from :technomancy/leiningen} gpg-program "Lookup the gpg program to use, defaulting to 'gpg'" [] (or (boot.App/config "BOOT_GPG_COMMAND") "gpg")) (defn- ^{:boot/from :technomancy/leiningen} get-english-env [] "Returns environment variables as a map with clojure keywords and LANGUAGE set to 'en'" (let [env (System/getenv) keywords (map #(keyword %) (keys env))] (merge (zipmap keywords (vals env)) {:LANGUAGE "en"}))) (defn ^{:boot/from :technomancy/leiningen} gpg "Shells out to (gpg-program) with the given arguments" [& args] (let [env (get-english-env)] (try (shell/with-sh-env env (apply shell/sh (gpg-program) args)) (catch java.io.IOException e {:exit 1 :err (.getMessage e)})))) (defn ^{:boot/from :technomancy/leiningen} signing-args "Produce GPG arguments for signing a file." [file opts] (let [key-spec (if-let [key (:gpg-key opts)] ["--default-key" key]) passphrase-spec (if-let [pass (:gpg-passphrase opts)] ["--passphrase-fd" "0"]) passphrase-in-spec (if-let [pass (:gpg-passphrase opts)] [:in (StringReader. pass)])] `["--yes" "-ab" ~@key-spec ~@passphrase-spec "--" ~file ~@passphrase-in-spec])) (defn ^{:boot/from :technomancy/leiningen} sign "Create a detached signature and return the signature file name." [file opts] (let [{:keys [err exit]} (apply gpg (signing-args file opts))] (when-not (zero? exit) (util/fail (str "Could not sign " file "\n" err "\n\nIf you don't expect people to need to verify the " "authorship of your jar, don't set :gpg-sign option of push task to true.\n"))) (str file ".asc"))) (defn sign-it "Sign a java.io.File given the options." [f gpg-options] (slurp (sign (.getPath f) gpg-options))) (defn sign-pom "Materialize and sign the pom contained in jarfile. Returns an artifact-map entry - a map from partial coordinates to file path or File (see pomegranate/aether.clj for details). If you receive a \"Could not sign ... gpg: no default secret key: secret key not available\" error, make sure boot is using the right gpg executable. You can use the BOOT_GPG_COMMAND environment variable for that. In order to use gpg2, for instance, run: BOOT_GPG_COMMAND=gpg2 boot push --gpg-sign ... You rarely need to use this directly, use the push task instead." [outdir jarfile pompath gpg-options] (shell/with-sh-dir outdir (let [jarname (.getName jarfile) pomfile (doto (File/createTempFile "pom" ".xml") (.deleteOnExit) (spit (pod/pom-xml jarfile pompath))) pomout (io/file outdir (.replaceAll jarname "\\.jar$" ".pom.asc")) ] (spit pomout (sign-it pomfile gpg-options)) [[:extension "pom.asc"] (.getPath pomout)]))) (defn sign-jar "Sign a jar. Returns an artifact-map entry - a map from partial coordinates to file path or File (see pomegranate/aether.clj for details). If you receive a \"Could not sign ... gpg: no default secret key: secret key not available\" error, make sure boot is using the right gpg executable. You can use the BOOT_GPG_COMMAND environment variable for that. In order to use gpg2, for instance, run: BOOT_GPG_COMMAND=gpg2 boot push --gpg-sign ... You rarely need to use this directly, use the push task instead." [outdir jarfile gpg-options] (shell/with-sh-dir outdir (let [jarname (.getName jarfile) jarout (io/file outdir (str jarname ".asc"))] (spit jarout (sign-it jarfile gpg-options)) [[:extension "jar.asc"] (.getPath jarout)]))) (defn decrypt "Use gpg to decrypt a file -- returns string contents of file." [file] (let [path (.getPath (io/file file)) {:keys [out err exit]} (gpg "--quiet" "--batch" "--decrypt" "--" path)] (assert (zero? exit) err) out))
null
https://raw.githubusercontent.com/boot-clj/boot/64334b4d5744b1444634f4a5a5a52b3ae67dddeb/boot/pod/src/boot/gpg.clj
clojure
(ns boot.gpg (:require [clojure.java.io :as io] [clojure.java.shell :as shell] [boot.pod :as pod] [boot.util :as util]) (:import [java.io StringReader File])) (defn ^{:boot/from :technomancy/leiningen} gpg-program "Lookup the gpg program to use, defaulting to 'gpg'" [] (or (boot.App/config "BOOT_GPG_COMMAND") "gpg")) (defn- ^{:boot/from :technomancy/leiningen} get-english-env [] "Returns environment variables as a map with clojure keywords and LANGUAGE set to 'en'" (let [env (System/getenv) keywords (map #(keyword %) (keys env))] (merge (zipmap keywords (vals env)) {:LANGUAGE "en"}))) (defn ^{:boot/from :technomancy/leiningen} gpg "Shells out to (gpg-program) with the given arguments" [& args] (let [env (get-english-env)] (try (shell/with-sh-env env (apply shell/sh (gpg-program) args)) (catch java.io.IOException e {:exit 1 :err (.getMessage e)})))) (defn ^{:boot/from :technomancy/leiningen} signing-args "Produce GPG arguments for signing a file." [file opts] (let [key-spec (if-let [key (:gpg-key opts)] ["--default-key" key]) passphrase-spec (if-let [pass (:gpg-passphrase opts)] ["--passphrase-fd" "0"]) passphrase-in-spec (if-let [pass (:gpg-passphrase opts)] [:in (StringReader. pass)])] `["--yes" "-ab" ~@key-spec ~@passphrase-spec "--" ~file ~@passphrase-in-spec])) (defn ^{:boot/from :technomancy/leiningen} sign "Create a detached signature and return the signature file name." [file opts] (let [{:keys [err exit]} (apply gpg (signing-args file opts))] (when-not (zero? exit) (util/fail (str "Could not sign " file "\n" err "\n\nIf you don't expect people to need to verify the " "authorship of your jar, don't set :gpg-sign option of push task to true.\n"))) (str file ".asc"))) (defn sign-it "Sign a java.io.File given the options." [f gpg-options] (slurp (sign (.getPath f) gpg-options))) (defn sign-pom "Materialize and sign the pom contained in jarfile. Returns an artifact-map entry - a map from partial coordinates to file path or File (see pomegranate/aether.clj for details). If you receive a \"Could not sign ... gpg: no default secret key: secret key not available\" error, make sure boot is using the right gpg executable. You can use the BOOT_GPG_COMMAND environment variable for that. In order to use gpg2, for instance, run: BOOT_GPG_COMMAND=gpg2 boot push --gpg-sign ... You rarely need to use this directly, use the push task instead." [outdir jarfile pompath gpg-options] (shell/with-sh-dir outdir (let [jarname (.getName jarfile) pomfile (doto (File/createTempFile "pom" ".xml") (.deleteOnExit) (spit (pod/pom-xml jarfile pompath))) pomout (io/file outdir (.replaceAll jarname "\\.jar$" ".pom.asc")) ] (spit pomout (sign-it pomfile gpg-options)) [[:extension "pom.asc"] (.getPath pomout)]))) (defn sign-jar "Sign a jar. Returns an artifact-map entry - a map from partial coordinates to file path or File (see pomegranate/aether.clj for details). If you receive a \"Could not sign ... gpg: no default secret key: secret key not available\" error, make sure boot is using the right gpg executable. You can use the BOOT_GPG_COMMAND environment variable for that. In order to use gpg2, for instance, run: BOOT_GPG_COMMAND=gpg2 boot push --gpg-sign ... You rarely need to use this directly, use the push task instead." [outdir jarfile gpg-options] (shell/with-sh-dir outdir (let [jarname (.getName jarfile) jarout (io/file outdir (str jarname ".asc"))] (spit jarout (sign-it jarfile gpg-options)) [[:extension "jar.asc"] (.getPath jarout)]))) (defn decrypt "Use gpg to decrypt a file -- returns string contents of file." [file] (let [path (.getPath (io/file file)) {:keys [out err exit]} (gpg "--quiet" "--batch" "--decrypt" "--" path)] (assert (zero? exit) err) out))
8f3ee4d3139aea8a58f73c91cc91973ee2fb8f8c9707f1215c8ed968d42d2719
nomnom-insights/nomnom.duckula
swagger_test.clj
(ns duckula.swagger-test (:require [clojure.edn :as edn] [clojure.java.io :as io] [clojure.test :refer [deftest is testing]] [duckula.swagger :as swag] [duckula.test.server :as test.server])) (deftest it-generates-a-swagger-config (testing "empty endpoints" (let [conf (swag/generate {:name "empty"} #_test.server/config)] (is (= {:consumes ["application/json"] :definitions {} :info {:title "Swagger API: empty" :version "0.0.1"} :paths {} :produces ["application/json"] :swagger "2.0"} conf)))) (testing "endpoint with no validations" (let [conf (swag/generate {:name "no validation" :prefix "/SOAP" :endpoints {"/foo" {:handler identity}}})] (is (= {:consumes ["application/json"] :definitions {"Error" {:additionalProperties false :properties {:error {:type "string"} :message {:type "string"} :metadata {:$ref "#/definitions/ErrorMetadata"}} :required [:message :error :metadata] :type "object"} "ErrorMetadata" {:additionalProperties {}, :type "object"}} :info {:title "Swagger API: no validation", :version "0.0.1"} :paths {"/SOAP/foo" {:post {:description ":no-doc:" :parameters [{:description "" :in "body" :name "/SOAP/foo" :required true :schema {}}] :responses {200 {:description ":no-doc:" :schema {}} 410 {:description "Request data didn't conform to the request data schema" :schema {:$ref "#/definitions/Error"}} 500 {:description "Internal server error, or response couldn't be serialized according to the response schema" :schema {:$ref "#/definitions/Error"}}} :summary "/SOAP/foo"}}} :produces ["application/json"] :swagger "2.0"} conf))))) (deftest working-server-example-config (let [conf (swag/generate test.server/config)] (is (= (edn/read-string (slurp (io/resource "duckula/test_swagger.edn"))) conf)))) (deftest with-inline-schema (let [conf (swag/generate {:name "empty" :endpoints {"/test" {:request {:type "record" :name "test.Empty" :fields [{:name "exit_status" :type "string"}]}}}})] (is (= {:consumes ["application/json"] :definitions {"Error" {:additionalProperties false :properties {:error {:type "string"} :message {:type "string"} :metadata {:$ref "#/definitions/ErrorMetadata"}} :required [:message :error :metadata] :type "object"} "ErrorMetadata" {:additionalProperties {}, :type "object"} "test.Empty" {:additionalProperties false :properties {:exit_status {:type "string"}} :required [:exit_status] :type "object"}} :info {:title "Swagger API: empty", :version "0.0.1"} :paths {"/test" {:post {:description "" :parameters [{:description "" :in "body" :name "test.Empty" :required true :schema {:$ref "#/definitions/test.Empty"}}] :responses {200 {:description ":no-doc:", :schema {}} 410 {:description "Request data didn't conform to the request data schema" :schema {:$ref "#/definitions/Error"}} 500 {:description "Internal server error, or response couldn't be serialized according to the response schema" :schema {:$ref "#/definitions/Error"}}} :summary "/test"}}} :produces ["application/json"] :swagger "2.0"} conf)))) (deftest name-mangling-in-docs (let [conf (swag/generate {:name "empty" :kebab-case-names? true ;; :same as: ;; mangle-names? true ;; or ;; snake-case-names? false :endpoints {"/test" {:request {:type "record" :name "test.Empty" :fields [{:name "status_code" :type {:name "Code" :type "enum" :symbols ["foo_bar" "bar_baz"]}} {:name "status_field" :type "string"}]}}}})] (is (= {:consumes ["application/json"] :definitions {"Error" {:additionalProperties false :properties {:error {:type "string"} :message {:type "string"} :metadata {:$ref "#/definitions/ErrorMetadata"}} :required [:message :error :metadata] :type "object"} "ErrorMetadata" {:additionalProperties {}, :type "object"} "test.Empty" {:additionalProperties false :properties {:status-field {:type "string"} :status-code {:enum '("foo-bar" :foo-bar :bar-baz "bar-baz") :type "string"}} :required [:status-field :status-code] :type "object"}} :info {:title "Swagger API: empty", :version "0.0.1"} :paths {"/test" {:post {:description "" :parameters [{:description "" :in "body" :name "test.Empty" :required true :schema {:$ref "#/definitions/test.Empty"}}] :responses {200 {:description ":no-doc:", :schema {}} 410 {:description "Request data didn't conform to the request data schema" :schema {:$ref "#/definitions/Error"}} 500 {:description "Internal server error, or response couldn't be serialized according to the response schema" :schema {:$ref "#/definitions/Error"}}} :summary "/test"}}} :produces ["application/json"] :swagger "2.0"} conf))))
null
https://raw.githubusercontent.com/nomnom-insights/nomnom.duckula/a6c8ee965da5717c5909bfbda5745be7a287d2ce/test/duckula/swagger_test.clj
clojure
:same as: mangle-names? true or snake-case-names? false
(ns duckula.swagger-test (:require [clojure.edn :as edn] [clojure.java.io :as io] [clojure.test :refer [deftest is testing]] [duckula.swagger :as swag] [duckula.test.server :as test.server])) (deftest it-generates-a-swagger-config (testing "empty endpoints" (let [conf (swag/generate {:name "empty"} #_test.server/config)] (is (= {:consumes ["application/json"] :definitions {} :info {:title "Swagger API: empty" :version "0.0.1"} :paths {} :produces ["application/json"] :swagger "2.0"} conf)))) (testing "endpoint with no validations" (let [conf (swag/generate {:name "no validation" :prefix "/SOAP" :endpoints {"/foo" {:handler identity}}})] (is (= {:consumes ["application/json"] :definitions {"Error" {:additionalProperties false :properties {:error {:type "string"} :message {:type "string"} :metadata {:$ref "#/definitions/ErrorMetadata"}} :required [:message :error :metadata] :type "object"} "ErrorMetadata" {:additionalProperties {}, :type "object"}} :info {:title "Swagger API: no validation", :version "0.0.1"} :paths {"/SOAP/foo" {:post {:description ":no-doc:" :parameters [{:description "" :in "body" :name "/SOAP/foo" :required true :schema {}}] :responses {200 {:description ":no-doc:" :schema {}} 410 {:description "Request data didn't conform to the request data schema" :schema {:$ref "#/definitions/Error"}} 500 {:description "Internal server error, or response couldn't be serialized according to the response schema" :schema {:$ref "#/definitions/Error"}}} :summary "/SOAP/foo"}}} :produces ["application/json"] :swagger "2.0"} conf))))) (deftest working-server-example-config (let [conf (swag/generate test.server/config)] (is (= (edn/read-string (slurp (io/resource "duckula/test_swagger.edn"))) conf)))) (deftest with-inline-schema (let [conf (swag/generate {:name "empty" :endpoints {"/test" {:request {:type "record" :name "test.Empty" :fields [{:name "exit_status" :type "string"}]}}}})] (is (= {:consumes ["application/json"] :definitions {"Error" {:additionalProperties false :properties {:error {:type "string"} :message {:type "string"} :metadata {:$ref "#/definitions/ErrorMetadata"}} :required [:message :error :metadata] :type "object"} "ErrorMetadata" {:additionalProperties {}, :type "object"} "test.Empty" {:additionalProperties false :properties {:exit_status {:type "string"}} :required [:exit_status] :type "object"}} :info {:title "Swagger API: empty", :version "0.0.1"} :paths {"/test" {:post {:description "" :parameters [{:description "" :in "body" :name "test.Empty" :required true :schema {:$ref "#/definitions/test.Empty"}}] :responses {200 {:description ":no-doc:", :schema {}} 410 {:description "Request data didn't conform to the request data schema" :schema {:$ref "#/definitions/Error"}} 500 {:description "Internal server error, or response couldn't be serialized according to the response schema" :schema {:$ref "#/definitions/Error"}}} :summary "/test"}}} :produces ["application/json"] :swagger "2.0"} conf)))) (deftest name-mangling-in-docs (let [conf (swag/generate {:name "empty" :kebab-case-names? true :endpoints {"/test" {:request {:type "record" :name "test.Empty" :fields [{:name "status_code" :type {:name "Code" :type "enum" :symbols ["foo_bar" "bar_baz"]}} {:name "status_field" :type "string"}]}}}})] (is (= {:consumes ["application/json"] :definitions {"Error" {:additionalProperties false :properties {:error {:type "string"} :message {:type "string"} :metadata {:$ref "#/definitions/ErrorMetadata"}} :required [:message :error :metadata] :type "object"} "ErrorMetadata" {:additionalProperties {}, :type "object"} "test.Empty" {:additionalProperties false :properties {:status-field {:type "string"} :status-code {:enum '("foo-bar" :foo-bar :bar-baz "bar-baz") :type "string"}} :required [:status-field :status-code] :type "object"}} :info {:title "Swagger API: empty", :version "0.0.1"} :paths {"/test" {:post {:description "" :parameters [{:description "" :in "body" :name "test.Empty" :required true :schema {:$ref "#/definitions/test.Empty"}}] :responses {200 {:description ":no-doc:", :schema {}} 410 {:description "Request data didn't conform to the request data schema" :schema {:$ref "#/definitions/Error"}} 500 {:description "Internal server error, or response couldn't be serialized according to the response schema" :schema {:$ref "#/definitions/Error"}}} :summary "/test"}}} :produces ["application/json"] :swagger "2.0"} conf))))
b798cff54603bb6fcff799ed3697af131d7fbd57275b14f9282a3068639a2a56
pietervdvn/ALGT
TypeSystemData.hs
module TypeSystem.TypeSystemData (module TypeSystem) where {- This module reexports all things you need -} import TypeSystem.Types as TypeSystem import TypeSystem.BNF as TypeSystem import TypeSystem.Syntax as TypeSystem import TypeSystem.SyntaxStyle as TypeSystem import TypeSystem.ParseTree as TypeSystem import TypeSystem.Expression as TypeSystem import TypeSystem.Function as TypeSystem import TypeSystem.Relation as TypeSystem import TypeSystem.Rule as TypeSystem import TypeSystem.Proof as TypeSystem import TypeSystem.PropertyProof as TypeSystem import TypeSystem.TypeSystem as TypeSystem
null
https://raw.githubusercontent.com/pietervdvn/ALGT/43a2811931be6daf1362f37cb16f99375ca4999e/src/TypeSystem/TypeSystemData.hs
haskell
This module reexports all things you need
module TypeSystem.TypeSystemData (module TypeSystem) where import TypeSystem.Types as TypeSystem import TypeSystem.BNF as TypeSystem import TypeSystem.Syntax as TypeSystem import TypeSystem.SyntaxStyle as TypeSystem import TypeSystem.ParseTree as TypeSystem import TypeSystem.Expression as TypeSystem import TypeSystem.Function as TypeSystem import TypeSystem.Relation as TypeSystem import TypeSystem.Rule as TypeSystem import TypeSystem.Proof as TypeSystem import TypeSystem.PropertyProof as TypeSystem import TypeSystem.TypeSystem as TypeSystem
8cf8ef33edb7ebddf14386b75644d3d05e0c5a4ee00138a11bad98824c4e4c7d
donnerpeter/Constructor
Sense.hs
module Constructor.Sense (Sense(..), Frame(..), Fact(..), allFrames, allFrameFacts, toFrame, fDeclaredValue, sDeclaredValue, earlier, usages, usage, allUsages, usageFacts, isFrameReachable, reachableFrames, framesTo, makeSense, composeSense) where import Constructor.Variable import Data.List (intercalate, findIndex, intersect, sort) import Data.Maybe import qualified Data.Map as Map import qualified Data.Set as Set import qualified Constructor.LinkedSet as LS type EqClass = [Variable] data EqClasses = EqClasses { baseVars:: Map.Map Variable Variable, eqClasses :: Map.Map Variable EqClass } toBase (EqClasses baseVars _) var = baseVars Map.! var addEqClass :: EqClasses -> EqClass -> (EqClasses, ClassUpdate) addEqClass (EqClasses baseVars eqClasses) vars = (EqClasses newBaseVars newClasses, ClassUpdate addedClasses removedClasses) where basesToUnify = catMaybes $ map (\v -> Map.lookup v baseVars) vars mergedClass = sort $ LS.removeDups $ concat (map (\v -> eqClasses Map.! v) basesToUnify) ++ vars singleBase = head mergedClass newClasses = Map.insert singleBase mergedClass $ foldl (\m v -> Map.delete v m) eqClasses basesToUnify newBaseVars = foldl (\m v -> Map.insert v singleBase m) baseVars mergedClass addedClasses = Set.fromList [mergedClass] removedClasses = Set.fromList $ map (\v -> eqClasses Map.! v) basesToUnify data ClassUpdate = ClassUpdate { removedClasses:: Set.Set EqClass, addedClasses:: Set.Set EqClass } composeUpdates u1 u2 = ClassUpdate added removed where added = Set.union (Set.difference (addedClasses u1) (removedClasses u2)) (addedClasses u2) removed = Set.union (removedClasses u1) (removedClasses u2) data Fact = Fact { variable:: Variable, value:: SemValue } deriving (Eq, Ord) instance Show Fact where show (Fact var value) = (show var)++"."++(show value) data FactMap = FactMap { knownVariables :: Set.Set Variable, var2Values :: Map.Map EqClass (LS.LinkedSet SemValue), var2Usages :: Map.Map EqClass (LS.LinkedSet Fact), children :: [FactMap] } type MapGetter a = FactMap -> Map.Map EqClass (LS.LinkedSet a) composeFactMaps factMaps baseFM update = FactMap knownVars valueCache usageCache factMaps where knownVars = foldl Set.union Set.empty $ map knownVariables factMaps updateCache getter = let withoutRemoved = Set.foldl (\m c -> Map.delete c m) (getter baseFM) (removedClasses update) withAdded = Set.foldl (\m c -> Map.insert c (combineFacts getter factMaps c) m) withoutRemoved (addedClasses update) in withAdded valueCache = updateCache var2Values usageCache = updateCache var2Usages combineFacts :: (Ord a) => MapGetter a -> [FactMap] -> [Variable] -> LS.LinkedSet a combineFacts getter factMaps vars = if length facts == 1 then head facts else foldl LS.union LS.empty facts where facts = filter (not . LS.isEmpty) $ map (\fm -> getFactsFromMap getter fm $ filter (flip Set.member (knownVariables fm)) vars) factMaps getFactsFromMap :: (Ord a) => MapGetter a -> FactMap -> [Variable] -> LS.LinkedSet a getFactsFromMap getter factMap vars = if null vars then LS.empty else Map.findWithDefault calcValue vars $ getter factMap where calcValue = if null $ children factMap then foldl LS.union LS.empty [facts | (vs, facts) <- Map.toList $ getter factMap, not (null $ intersect vs vars)] else combineFacts getter (children factMap) vars data Sense = Sense { allFrameVars:: [Variable], varClasses:: EqClasses, factMap:: FactMap, factCache:: Map.Map Variable [Fact], usageCache:: Map.Map Variable [Fact], bareFacts:: [Fact] } instance Show Sense where show sense = Data.List.intercalate "\n" (map show $ LS.removeDups $ map (normalizeFact $ varClasses sense) $ bareFacts sense) instance Eq Sense where s1 == s2 = bareFacts s1 == bareFacts s2 data Frame = Frame { var:: Variable, sense:: Sense } deriving (Eq) instance Show Frame where show frame = "{" ++ (Data.List.intercalate "," (map show $ allFrameFacts frame)) ++ "}" instance Ord Frame where compare s1 s2 = compare (var s1) (var s2) makeSense bareFacts unifications = makeSenseInternal bareFacts allBaseVars varClasses factMap where factVars = LS.removeDups $ map variable bareFacts ++ [v | Fact {value=VarValue _ v} <- bareFacts] singleVarClasses = foldl (\ec var -> fst $ addEqClass ec [var]) (EqClasses Map.empty Map.empty) factVars varClasses = foldl (\ec (var1, var2) -> fst $ addEqClass ec [var1, var2]) singleVarClasses unifications allBaseVars = LS.removeDups $ factVars ++ Map.keys (baseVars varClasses) cacheMap pairs = let base2Facts = Map.fromListWith (flip (++)) [(toBase varClasses v, facts) | (v, facts) <- pairs] in Map.fromList [(cacheKey varClasses v, LS.fromList $ Map.findWithDefault [] v base2Facts) | v <- allBaseVars] factMap = FactMap { knownVariables = Map.keysSet (baseVars varClasses), var2Values = cacheMap [(variable fact, [value fact]) | fact <- bareFacts], var2Usages = cacheMap [(v, [fact]) | fact@(Fact {value=VarValue _ v}) <- bareFacts], children = [] } cacheKey varClasses v = Map.findWithDefault [v] v $ eqClasses varClasses makeSenseInternal bareFacts allFrameVars varClasses factMap = Sense allFrameVars varClasses factMap factCache usageCache bareFacts where factCache = Map.fromList [(var, [Fact var (normalizeValue varClasses value) | value <- LS.elements $ getFactsFromMap var2Values factMap $ cacheKey varClasses var]) | var <- allFrameVars] usageCache = Map.fromList [(var, map (normalizeFact varClasses) $ LS.elements $ getFactsFromMap var2Usages factMap $ cacheKey varClasses var) | var <- allFrameVars] normalizeFact varClasses (Fact var1 value) = Fact (toBase varClasses var1) (normalizeValue varClasses value) toFrame sense var = Frame (toBase (varClasses sense) var) sense normalizeValue varClasses value = case value of StrValue _ _ -> value VarValue prop var2 -> VarValue prop $ toBase varClasses var2 composeSense senses = makeSenseInternal _bareFacts _allFrameVars mergedClasses _factMap where _bareFacts = senses >>= bareFacts baseIndex = snd $ maximum $ zip (map (Map.size . eqClasses . varClasses) senses) [0..] baseSense = senses !! baseIndex addedSenses = take baseIndex senses ++ drop (baseIndex + 1) senses folder (ec1, u1) vars = let (ec2, u2) = addEqClass ec1 vars in (ec2, composeUpdates u1 u2) classesToAdd = addedSenses >>= Map.elems . eqClasses . varClasses (mergedClasses, compositeUpdate) = foldl folder (varClasses baseSense, ClassUpdate Set.empty Set.empty) classesToAdd _allFrameVars = LS.removeDups $ map (toBase mergedClasses) $ (senses >>= allFrameVars) _factMap = composeFactMaps (map factMap senses) (factMap baseSense) compositeUpdate toFrames sense vars = map (flip Frame sense) vars allFrames sense = toFrames sense $ allFrameVars sense allFrameFacts frame = Map.findWithDefault [] (var frame) $ factCache (sense frame) singleListElement list = case list of [single] -> Just single _ -> Nothing earlier f1 attr1 f2 attr2 = let allFacts = bareFacts $ sense f1 eqClasses = varClasses $ sense f1 mi1 = findIndex (isStrAssignment (var f1) attr1) allFacts mi2 = findIndex (isStrAssignment (var f2) attr2) allFacts isStrAssignment var attr = \case Fact {variable=v, value=StrValue a _} | a == attr && toBase eqClasses v == var -> True _ -> False in case (mi1, mi2) of (Just i1, Just i2) | i1 < i2 -> True _ -> False sDeclaredValue attr frame = singleListElement $ allFrameFacts frame >>= \case Fact {value=StrValue a s} | a == attr -> [s] _ -> [] fDeclaredValue attr frame = singleListElement $ allFrameFacts frame >>= \case Fact {value=VarValue a v} | a == attr -> [Frame v (sense frame)] _ -> [] usageFacts frame = Map.findWithDefault [] (var frame) $ usageCache (sense frame) allUsages attrs frame = toFrames (sense frame) $ [v | Fact {variable=v, value=VarValue s _} <- usageFacts frame, s `elem` attrs] usages attr frame = toFrames (sense frame) $ [v | Fact {variable=v, value=VarValue s _} <- usageFacts frame, s == attr] usage attr frame = singleListElement $ usages attr frame reachableFrames :: Frame -> Set.Set Frame reachableFrames origin = visitFrame Set.empty origin where visitFrame visited frame = let neighbours = Set.fromList [toFrame (sense frame) v | (Fact {value=(VarValue _ v)}) <- allFrameFacts frame ] goFurther = Set.foldl visitFrame (Set.insert frame visited) neighbours in if Set.member frame visited then visited else goFurther isFrameReachable src dest = Set.member dest (reachableFrames src) framesTo :: Frame -> Frame -> Int -> Maybe (Set.Set Frame) framesTo src dst maxPathLength = inner Set.empty src where sens = sense src inner path current = let allNeighbors = [toFrame sens v | (Fact {value=(VarValue _ v)}) <- allFrameFacts current] newNeighbors = filter (not . flip Set.member path) allNeighbors pathWithCurrent = Set.insert current path in if current == dst then Just path else if Set.size path >= maxPathLength then Nothing else Just $ Set.unions $ mapMaybe (inner pathWithCurrent) newNeighbors
null
https://raw.githubusercontent.com/donnerpeter/Constructor/5b76403eb2356127fcf2a6d86c23cf3447a77402/src_hs/Constructor/Sense.hs
haskell
module Constructor.Sense (Sense(..), Frame(..), Fact(..), allFrames, allFrameFacts, toFrame, fDeclaredValue, sDeclaredValue, earlier, usages, usage, allUsages, usageFacts, isFrameReachable, reachableFrames, framesTo, makeSense, composeSense) where import Constructor.Variable import Data.List (intercalate, findIndex, intersect, sort) import Data.Maybe import qualified Data.Map as Map import qualified Data.Set as Set import qualified Constructor.LinkedSet as LS type EqClass = [Variable] data EqClasses = EqClasses { baseVars:: Map.Map Variable Variable, eqClasses :: Map.Map Variable EqClass } toBase (EqClasses baseVars _) var = baseVars Map.! var addEqClass :: EqClasses -> EqClass -> (EqClasses, ClassUpdate) addEqClass (EqClasses baseVars eqClasses) vars = (EqClasses newBaseVars newClasses, ClassUpdate addedClasses removedClasses) where basesToUnify = catMaybes $ map (\v -> Map.lookup v baseVars) vars mergedClass = sort $ LS.removeDups $ concat (map (\v -> eqClasses Map.! v) basesToUnify) ++ vars singleBase = head mergedClass newClasses = Map.insert singleBase mergedClass $ foldl (\m v -> Map.delete v m) eqClasses basesToUnify newBaseVars = foldl (\m v -> Map.insert v singleBase m) baseVars mergedClass addedClasses = Set.fromList [mergedClass] removedClasses = Set.fromList $ map (\v -> eqClasses Map.! v) basesToUnify data ClassUpdate = ClassUpdate { removedClasses:: Set.Set EqClass, addedClasses:: Set.Set EqClass } composeUpdates u1 u2 = ClassUpdate added removed where added = Set.union (Set.difference (addedClasses u1) (removedClasses u2)) (addedClasses u2) removed = Set.union (removedClasses u1) (removedClasses u2) data Fact = Fact { variable:: Variable, value:: SemValue } deriving (Eq, Ord) instance Show Fact where show (Fact var value) = (show var)++"."++(show value) data FactMap = FactMap { knownVariables :: Set.Set Variable, var2Values :: Map.Map EqClass (LS.LinkedSet SemValue), var2Usages :: Map.Map EqClass (LS.LinkedSet Fact), children :: [FactMap] } type MapGetter a = FactMap -> Map.Map EqClass (LS.LinkedSet a) composeFactMaps factMaps baseFM update = FactMap knownVars valueCache usageCache factMaps where knownVars = foldl Set.union Set.empty $ map knownVariables factMaps updateCache getter = let withoutRemoved = Set.foldl (\m c -> Map.delete c m) (getter baseFM) (removedClasses update) withAdded = Set.foldl (\m c -> Map.insert c (combineFacts getter factMaps c) m) withoutRemoved (addedClasses update) in withAdded valueCache = updateCache var2Values usageCache = updateCache var2Usages combineFacts :: (Ord a) => MapGetter a -> [FactMap] -> [Variable] -> LS.LinkedSet a combineFacts getter factMaps vars = if length facts == 1 then head facts else foldl LS.union LS.empty facts where facts = filter (not . LS.isEmpty) $ map (\fm -> getFactsFromMap getter fm $ filter (flip Set.member (knownVariables fm)) vars) factMaps getFactsFromMap :: (Ord a) => MapGetter a -> FactMap -> [Variable] -> LS.LinkedSet a getFactsFromMap getter factMap vars = if null vars then LS.empty else Map.findWithDefault calcValue vars $ getter factMap where calcValue = if null $ children factMap then foldl LS.union LS.empty [facts | (vs, facts) <- Map.toList $ getter factMap, not (null $ intersect vs vars)] else combineFacts getter (children factMap) vars data Sense = Sense { allFrameVars:: [Variable], varClasses:: EqClasses, factMap:: FactMap, factCache:: Map.Map Variable [Fact], usageCache:: Map.Map Variable [Fact], bareFacts:: [Fact] } instance Show Sense where show sense = Data.List.intercalate "\n" (map show $ LS.removeDups $ map (normalizeFact $ varClasses sense) $ bareFacts sense) instance Eq Sense where s1 == s2 = bareFacts s1 == bareFacts s2 data Frame = Frame { var:: Variable, sense:: Sense } deriving (Eq) instance Show Frame where show frame = "{" ++ (Data.List.intercalate "," (map show $ allFrameFacts frame)) ++ "}" instance Ord Frame where compare s1 s2 = compare (var s1) (var s2) makeSense bareFacts unifications = makeSenseInternal bareFacts allBaseVars varClasses factMap where factVars = LS.removeDups $ map variable bareFacts ++ [v | Fact {value=VarValue _ v} <- bareFacts] singleVarClasses = foldl (\ec var -> fst $ addEqClass ec [var]) (EqClasses Map.empty Map.empty) factVars varClasses = foldl (\ec (var1, var2) -> fst $ addEqClass ec [var1, var2]) singleVarClasses unifications allBaseVars = LS.removeDups $ factVars ++ Map.keys (baseVars varClasses) cacheMap pairs = let base2Facts = Map.fromListWith (flip (++)) [(toBase varClasses v, facts) | (v, facts) <- pairs] in Map.fromList [(cacheKey varClasses v, LS.fromList $ Map.findWithDefault [] v base2Facts) | v <- allBaseVars] factMap = FactMap { knownVariables = Map.keysSet (baseVars varClasses), var2Values = cacheMap [(variable fact, [value fact]) | fact <- bareFacts], var2Usages = cacheMap [(v, [fact]) | fact@(Fact {value=VarValue _ v}) <- bareFacts], children = [] } cacheKey varClasses v = Map.findWithDefault [v] v $ eqClasses varClasses makeSenseInternal bareFacts allFrameVars varClasses factMap = Sense allFrameVars varClasses factMap factCache usageCache bareFacts where factCache = Map.fromList [(var, [Fact var (normalizeValue varClasses value) | value <- LS.elements $ getFactsFromMap var2Values factMap $ cacheKey varClasses var]) | var <- allFrameVars] usageCache = Map.fromList [(var, map (normalizeFact varClasses) $ LS.elements $ getFactsFromMap var2Usages factMap $ cacheKey varClasses var) | var <- allFrameVars] normalizeFact varClasses (Fact var1 value) = Fact (toBase varClasses var1) (normalizeValue varClasses value) toFrame sense var = Frame (toBase (varClasses sense) var) sense normalizeValue varClasses value = case value of StrValue _ _ -> value VarValue prop var2 -> VarValue prop $ toBase varClasses var2 composeSense senses = makeSenseInternal _bareFacts _allFrameVars mergedClasses _factMap where _bareFacts = senses >>= bareFacts baseIndex = snd $ maximum $ zip (map (Map.size . eqClasses . varClasses) senses) [0..] baseSense = senses !! baseIndex addedSenses = take baseIndex senses ++ drop (baseIndex + 1) senses folder (ec1, u1) vars = let (ec2, u2) = addEqClass ec1 vars in (ec2, composeUpdates u1 u2) classesToAdd = addedSenses >>= Map.elems . eqClasses . varClasses (mergedClasses, compositeUpdate) = foldl folder (varClasses baseSense, ClassUpdate Set.empty Set.empty) classesToAdd _allFrameVars = LS.removeDups $ map (toBase mergedClasses) $ (senses >>= allFrameVars) _factMap = composeFactMaps (map factMap senses) (factMap baseSense) compositeUpdate toFrames sense vars = map (flip Frame sense) vars allFrames sense = toFrames sense $ allFrameVars sense allFrameFacts frame = Map.findWithDefault [] (var frame) $ factCache (sense frame) singleListElement list = case list of [single] -> Just single _ -> Nothing earlier f1 attr1 f2 attr2 = let allFacts = bareFacts $ sense f1 eqClasses = varClasses $ sense f1 mi1 = findIndex (isStrAssignment (var f1) attr1) allFacts mi2 = findIndex (isStrAssignment (var f2) attr2) allFacts isStrAssignment var attr = \case Fact {variable=v, value=StrValue a _} | a == attr && toBase eqClasses v == var -> True _ -> False in case (mi1, mi2) of (Just i1, Just i2) | i1 < i2 -> True _ -> False sDeclaredValue attr frame = singleListElement $ allFrameFacts frame >>= \case Fact {value=StrValue a s} | a == attr -> [s] _ -> [] fDeclaredValue attr frame = singleListElement $ allFrameFacts frame >>= \case Fact {value=VarValue a v} | a == attr -> [Frame v (sense frame)] _ -> [] usageFacts frame = Map.findWithDefault [] (var frame) $ usageCache (sense frame) allUsages attrs frame = toFrames (sense frame) $ [v | Fact {variable=v, value=VarValue s _} <- usageFacts frame, s `elem` attrs] usages attr frame = toFrames (sense frame) $ [v | Fact {variable=v, value=VarValue s _} <- usageFacts frame, s == attr] usage attr frame = singleListElement $ usages attr frame reachableFrames :: Frame -> Set.Set Frame reachableFrames origin = visitFrame Set.empty origin where visitFrame visited frame = let neighbours = Set.fromList [toFrame (sense frame) v | (Fact {value=(VarValue _ v)}) <- allFrameFacts frame ] goFurther = Set.foldl visitFrame (Set.insert frame visited) neighbours in if Set.member frame visited then visited else goFurther isFrameReachable src dest = Set.member dest (reachableFrames src) framesTo :: Frame -> Frame -> Int -> Maybe (Set.Set Frame) framesTo src dst maxPathLength = inner Set.empty src where sens = sense src inner path current = let allNeighbors = [toFrame sens v | (Fact {value=(VarValue _ v)}) <- allFrameFacts current] newNeighbors = filter (not . flip Set.member path) allNeighbors pathWithCurrent = Set.insert current path in if current == dst then Just path else if Set.size path >= maxPathLength then Nothing else Just $ Set.unions $ mapMaybe (inner pathWithCurrent) newNeighbors
3a48368ed7bd28a6f4a22e14edeb350bb9a5c9a0471f486b7969a3a91991794a
juspay/atlas
RegistrationToken.hs
{-# LANGUAGE DeriveAnyClass #-} # LANGUAGE TypeFamilies # # LANGUAGE UndecidableInstances # | Copyright 2022 Juspay Technologies Pvt Ltd Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not use this file except in compliance with the License . You may obtain a copy of the License at -2.0 Unless required by applicable law or agreed to in writing , software distributed under the License is distributed on an " AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . See the License for the specific language governing permissions and limitations under the License . Module : Domain . Types . RegistrationToken Copyright : ( C ) Juspay Technologies Pvt Ltd 2019 - 2022 License : Apache 2.0 ( see the file LICENSE ) Maintainer : Stability : experimental Portability : non - portable Copyright 2022 Juspay Technologies Pvt Ltd Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at -2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Module : Domain.Types.RegistrationToken Copyright : (C) Juspay Technologies Pvt Ltd 2019-2022 License : Apache 2.0 (see the file LICENSE) Maintainer : Stability : experimental Portability : non-portable -} module Domain.Types.RegistrationToken where import Beckn.Types.Id import Data.Aeson import Data.Time import EulerHS.Prelude hiding (id) data Medium = SMS | EMAIL deriving (Generic, FromJSON, ToJSON, Eq, Show, Read) data RTEntityType = CUSTOMER | USER deriving (Generic, FromJSON, ToJSON, Eq, Show, Read) data LoginType = OTP | PASSWORD deriving (Generic, FromJSON, ToJSON, Eq, Show, Read) data RegistrationToken = RegistrationToken { id :: Id RegistrationToken, token :: Text, attempts :: Int, authMedium :: Medium, authType :: LoginType, authValueHash :: Text, verified :: Bool, authExpiry :: Int, tokenExpiry :: Int, entityId :: Text, entityType :: RTEntityType, createdAt :: UTCTime, updatedAt :: UTCTime, info :: Maybe Text } deriving (Generic, Show, Eq)
null
https://raw.githubusercontent.com/juspay/atlas/e64b227dc17887fb01c2554db21c08284d18a806/app/driver-offer-bpp/src/Domain/Types/RegistrationToken.hs
haskell
# LANGUAGE DeriveAnyClass #
# LANGUAGE TypeFamilies # # LANGUAGE UndecidableInstances # | Copyright 2022 Juspay Technologies Pvt Ltd Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not use this file except in compliance with the License . You may obtain a copy of the License at -2.0 Unless required by applicable law or agreed to in writing , software distributed under the License is distributed on an " AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . See the License for the specific language governing permissions and limitations under the License . Module : Domain . Types . RegistrationToken Copyright : ( C ) Juspay Technologies Pvt Ltd 2019 - 2022 License : Apache 2.0 ( see the file LICENSE ) Maintainer : Stability : experimental Portability : non - portable Copyright 2022 Juspay Technologies Pvt Ltd Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at -2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Module : Domain.Types.RegistrationToken Copyright : (C) Juspay Technologies Pvt Ltd 2019-2022 License : Apache 2.0 (see the file LICENSE) Maintainer : Stability : experimental Portability : non-portable -} module Domain.Types.RegistrationToken where import Beckn.Types.Id import Data.Aeson import Data.Time import EulerHS.Prelude hiding (id) data Medium = SMS | EMAIL deriving (Generic, FromJSON, ToJSON, Eq, Show, Read) data RTEntityType = CUSTOMER | USER deriving (Generic, FromJSON, ToJSON, Eq, Show, Read) data LoginType = OTP | PASSWORD deriving (Generic, FromJSON, ToJSON, Eq, Show, Read) data RegistrationToken = RegistrationToken { id :: Id RegistrationToken, token :: Text, attempts :: Int, authMedium :: Medium, authType :: LoginType, authValueHash :: Text, verified :: Bool, authExpiry :: Int, tokenExpiry :: Int, entityId :: Text, entityType :: RTEntityType, createdAt :: UTCTime, updatedAt :: UTCTime, info :: Maybe Text } deriving (Generic, Show, Eq)
0e3c175715e06b7d9eafd8ea52dd4f0a9d8d98d1ffe5c16e16df60cad8e8ccc2
scalar-labs/scalar-jepsen
transfer_test.clj
(ns scalardl.transfer-test (:require [clojure.test :refer :all] [jepsen.client :as client] [jepsen.checker :as checker] [scalardl.cassandra :as cassandra] [scalardl.core :as dl] [scalardl.transfer :as transfer] [spy.core :as spy]) (:import (com.scalar.dl.client.exception ClientException) (com.scalar.dl.client.service ClientService) (com.scalar.dl.ledger.model ContractExecutionResult) (com.scalar.dl.ledger.service StatusCode) (javax.json Json))) (def ^:dynamic contract-count (atom 0)) (def ^:dynamic execute-count (atom 0)) (def ^:dynamic test-records (atom [])) (def mock-client-service (proxy [ClientService] [nil nil nil nil] (registerCertificate []) (registerContract [_ _ _ _] (swap! contract-count inc) nil) (executeContract [& _] (swap! execute-count inc) (ContractExecutionResult. (-> (Json/createObjectBuilder) (.add "balance" 1000) (.add "age" 111) .build) nil nil)))) (def mock-failure-client-service (proxy [ClientService] [nil nil nil nil] (registerCertificate []) (registerContract [_ _ _ _] (swap! contract-count inc) nil) (executeContract [& _] (swap! execute-count inc) (throw (ClientException. "the status is unknown" (Exception.) StatusCode/UNKNOWN_TRANSACTION_STATUS))))) (deftest transfer-client-init-test (binding [contract-count (atom 0) execute-count (atom 0)] (with-redefs [dl/prepare-client-service (spy/stub mock-client-service) cassandra/create-tables (spy/spy)] (let [client (client/open! (transfer/->TransferClient (atom false) (atom nil) 5) nil nil)] (client/setup! client nil) (is (spy/called-once? cassandra/create-tables)) (is (= 3 @contract-count)) (is (= 5 @execute-count)) (is (true? @(:initialized? client))) ;; setup isn't executed (client/setup! client nil) (is (spy/called-once? cassandra/create-tables)))))) (deftest transfer-client-transfer-test (binding [execute-count (atom 0)] (with-redefs [dl/prepare-client-service (spy/stub mock-client-service)] (let [client (client/open! (transfer/->TransferClient (atom false) (atom nil) 5) nil nil) result (client/invoke! client nil (#'transfer/transfer {:client client} nil))] (is (= 1 @execute-count)) (is (= :ok (:type result))))))) (deftest transfer-client-transfer-unknown-test (with-redefs [dl/prepare-client-service (spy/stub mock-failure-client-service) dl/try-switch-server! (spy/stub mock-client-service)] (let [client (client/open! (transfer/->TransferClient (atom false) (atom nil) 5) nil nil) test {:unknown-tx (atom #{})} result (client/invoke! client test (#'transfer/transfer {:client client} nil))] (is (spy/called-once? dl/try-switch-server!)) (is (= 1 (count @(:unknown-tx test)))) (is (= mock-client-service @(:client-service client))) (is (= :fail (:type result)))))) (deftest transfer-client-check-tx-test (with-redefs [dl/prepare-client-service (spy/stub mock-client-service) dl/committed? (spy/stub true)] (let [client (client/open! (transfer/->TransferClient (atom false) (atom nil) 5) nil nil) result (client/invoke! client {:unknown-tx (atom #{"tx1" "tx2"})} (#'transfer/check-tx {} nil))] (is (= :ok (:type result))) (is (= 2 (:value result)))))) (deftest transfer-client-check-tx-fail-test (with-redefs [dl/prepare-client-service (spy/stub mock-client-service) dl/committed? (spy/mock (fn [_ _] (throw (ex-info "fail" {}))))] (let [client (client/open! (transfer/->TransferClient (atom false) (atom nil) 5) nil nil)] (is (thrown? clojure.lang.ExceptionInfo (client/invoke! client {:unknown-tx (atom #{"tx1"})} (#'transfer/check-tx {} nil))))))) (deftest transfer-client-get-all-test (binding [execute-count (atom 0)] (with-redefs [cassandra/wait-cassandra (spy/spy) dl/prepare-client-service (spy/stub mock-client-service)] (let [client (client/open! (transfer/->TransferClient (atom false) (atom nil) 1) nil nil) result (client/invoke! client nil (#'transfer/get-all {} nil))] (is (= 1 @execute-count)) (is (= :ok (:type result))) (is (= [{:balance 1000 :age 111}] (:value result))))))) (deftest transfer-client-get-all-fail-test (with-redefs [cassandra/wait-cassandra (spy/spy) dl/prepare-client-service (spy/stub mock-failure-client-service) dl/exponential-backoff (spy/spy)] (let [client (client/open! (transfer/->TransferClient (atom false) (atom nil) 1) nil nil)] (is (thrown? clojure.lang.ExceptionInfo (client/invoke! client nil (#'transfer/get-all {} nil)))) (is (spy/called-n-times? dl/exponential-backoff 8))))) (def correct-history [{:type :ok :f :transfer} {:type :ok :f :transfer} {:type :fail :f :transfer :error {:unknown-tx-status "unknown-state-tx"}} {:type :ok :f :transfer} {:type :ok :f :transfer} {:type :ok :f :transfer} {:type :ok :f :transfer} {:type :ok :f :get-all :value [{:balance 10120 :age 1} {:balance 10140 :age 2} {:balance 9980 :age 1} {:balance 9760 :age 2} {:balance 10000 :age 0} {:balance 10500 :age 1} {:balance 9820 :age 1} {:balance 8700 :age 3} {:balance 10620 :age 1} {:balance 10360 :age 2}]} {:type :ok :f :check-tx :value 1}]) (deftest asset-checker-test (with-redefs [dl/prepare-client-service (spy/stub mock-client-service)] (let [client (client/open! (transfer/->TransferClient (atom false) (atom nil) 10) nil nil) checker (#'transfer/asset-checker) result (checker/check checker {:client client} correct-history nil)] (is (true? (:valid? result))) (is (= 14 (:total-age result))) (is (= 1 (:committed-unknown-tx result))) (is (nil? (:bad-balance result))) (is (nil? (:bad-age result)))))) (def bad-history [{:type :ok :f :transfer} {:type :ok :f :transfer} {:type :fail :f :transfer :error {:unknown-tx-status "unknown-state-tx"}} {:type :ok :f :transfer} {:type :ok :f :transfer} {:type :ok :f :transfer} {:type :ok :f :transfer} {:type :ok :f :get-all :value [{:balance 10120 :age 1} {:balance 10140 :age 2} {:balance 9980 :age 1} {:balance 9760 :age 2} {:balance 10001 :age 0} {:balance 10500 :age 1} {:balance 9820 :age 1} {:balance 8700 :age 3} {:balance 10620 :age 1} {:balance 10360 :age 2}]} {:type :fail :f :check-tx}]) (deftest asset-checker-fail-test (with-redefs [dl/prepare-client-service (spy/stub mock-client-service)] (let [client (client/open! (transfer/->TransferClient (atom false) (atom nil) 10) nil nil) checker (#'transfer/asset-checker) result (checker/check checker {:client client} bad-history nil)] (is (false? (:valid? result))) (is (= 14 (:total-age result))) (is (= 0 (:committed-unknown-tx result))) (is (= {:type :wrong-balance :expected 100000 :actual 100001} (:bad-balance result))) (is (= {:type :wrong-age :expected 12 :actual 14} (:bad-age result))))))
null
https://raw.githubusercontent.com/scalar-labs/scalar-jepsen/1a21df4c3b5b5cd069d71161bda6d6b989469fe8/scalardl/test/scalardl/transfer_test.clj
clojure
setup isn't executed
(ns scalardl.transfer-test (:require [clojure.test :refer :all] [jepsen.client :as client] [jepsen.checker :as checker] [scalardl.cassandra :as cassandra] [scalardl.core :as dl] [scalardl.transfer :as transfer] [spy.core :as spy]) (:import (com.scalar.dl.client.exception ClientException) (com.scalar.dl.client.service ClientService) (com.scalar.dl.ledger.model ContractExecutionResult) (com.scalar.dl.ledger.service StatusCode) (javax.json Json))) (def ^:dynamic contract-count (atom 0)) (def ^:dynamic execute-count (atom 0)) (def ^:dynamic test-records (atom [])) (def mock-client-service (proxy [ClientService] [nil nil nil nil] (registerCertificate []) (registerContract [_ _ _ _] (swap! contract-count inc) nil) (executeContract [& _] (swap! execute-count inc) (ContractExecutionResult. (-> (Json/createObjectBuilder) (.add "balance" 1000) (.add "age" 111) .build) nil nil)))) (def mock-failure-client-service (proxy [ClientService] [nil nil nil nil] (registerCertificate []) (registerContract [_ _ _ _] (swap! contract-count inc) nil) (executeContract [& _] (swap! execute-count inc) (throw (ClientException. "the status is unknown" (Exception.) StatusCode/UNKNOWN_TRANSACTION_STATUS))))) (deftest transfer-client-init-test (binding [contract-count (atom 0) execute-count (atom 0)] (with-redefs [dl/prepare-client-service (spy/stub mock-client-service) cassandra/create-tables (spy/spy)] (let [client (client/open! (transfer/->TransferClient (atom false) (atom nil) 5) nil nil)] (client/setup! client nil) (is (spy/called-once? cassandra/create-tables)) (is (= 3 @contract-count)) (is (= 5 @execute-count)) (is (true? @(:initialized? client))) (client/setup! client nil) (is (spy/called-once? cassandra/create-tables)))))) (deftest transfer-client-transfer-test (binding [execute-count (atom 0)] (with-redefs [dl/prepare-client-service (spy/stub mock-client-service)] (let [client (client/open! (transfer/->TransferClient (atom false) (atom nil) 5) nil nil) result (client/invoke! client nil (#'transfer/transfer {:client client} nil))] (is (= 1 @execute-count)) (is (= :ok (:type result))))))) (deftest transfer-client-transfer-unknown-test (with-redefs [dl/prepare-client-service (spy/stub mock-failure-client-service) dl/try-switch-server! (spy/stub mock-client-service)] (let [client (client/open! (transfer/->TransferClient (atom false) (atom nil) 5) nil nil) test {:unknown-tx (atom #{})} result (client/invoke! client test (#'transfer/transfer {:client client} nil))] (is (spy/called-once? dl/try-switch-server!)) (is (= 1 (count @(:unknown-tx test)))) (is (= mock-client-service @(:client-service client))) (is (= :fail (:type result)))))) (deftest transfer-client-check-tx-test (with-redefs [dl/prepare-client-service (spy/stub mock-client-service) dl/committed? (spy/stub true)] (let [client (client/open! (transfer/->TransferClient (atom false) (atom nil) 5) nil nil) result (client/invoke! client {:unknown-tx (atom #{"tx1" "tx2"})} (#'transfer/check-tx {} nil))] (is (= :ok (:type result))) (is (= 2 (:value result)))))) (deftest transfer-client-check-tx-fail-test (with-redefs [dl/prepare-client-service (spy/stub mock-client-service) dl/committed? (spy/mock (fn [_ _] (throw (ex-info "fail" {}))))] (let [client (client/open! (transfer/->TransferClient (atom false) (atom nil) 5) nil nil)] (is (thrown? clojure.lang.ExceptionInfo (client/invoke! client {:unknown-tx (atom #{"tx1"})} (#'transfer/check-tx {} nil))))))) (deftest transfer-client-get-all-test (binding [execute-count (atom 0)] (with-redefs [cassandra/wait-cassandra (spy/spy) dl/prepare-client-service (spy/stub mock-client-service)] (let [client (client/open! (transfer/->TransferClient (atom false) (atom nil) 1) nil nil) result (client/invoke! client nil (#'transfer/get-all {} nil))] (is (= 1 @execute-count)) (is (= :ok (:type result))) (is (= [{:balance 1000 :age 111}] (:value result))))))) (deftest transfer-client-get-all-fail-test (with-redefs [cassandra/wait-cassandra (spy/spy) dl/prepare-client-service (spy/stub mock-failure-client-service) dl/exponential-backoff (spy/spy)] (let [client (client/open! (transfer/->TransferClient (atom false) (atom nil) 1) nil nil)] (is (thrown? clojure.lang.ExceptionInfo (client/invoke! client nil (#'transfer/get-all {} nil)))) (is (spy/called-n-times? dl/exponential-backoff 8))))) (def correct-history [{:type :ok :f :transfer} {:type :ok :f :transfer} {:type :fail :f :transfer :error {:unknown-tx-status "unknown-state-tx"}} {:type :ok :f :transfer} {:type :ok :f :transfer} {:type :ok :f :transfer} {:type :ok :f :transfer} {:type :ok :f :get-all :value [{:balance 10120 :age 1} {:balance 10140 :age 2} {:balance 9980 :age 1} {:balance 9760 :age 2} {:balance 10000 :age 0} {:balance 10500 :age 1} {:balance 9820 :age 1} {:balance 8700 :age 3} {:balance 10620 :age 1} {:balance 10360 :age 2}]} {:type :ok :f :check-tx :value 1}]) (deftest asset-checker-test (with-redefs [dl/prepare-client-service (spy/stub mock-client-service)] (let [client (client/open! (transfer/->TransferClient (atom false) (atom nil) 10) nil nil) checker (#'transfer/asset-checker) result (checker/check checker {:client client} correct-history nil)] (is (true? (:valid? result))) (is (= 14 (:total-age result))) (is (= 1 (:committed-unknown-tx result))) (is (nil? (:bad-balance result))) (is (nil? (:bad-age result)))))) (def bad-history [{:type :ok :f :transfer} {:type :ok :f :transfer} {:type :fail :f :transfer :error {:unknown-tx-status "unknown-state-tx"}} {:type :ok :f :transfer} {:type :ok :f :transfer} {:type :ok :f :transfer} {:type :ok :f :transfer} {:type :ok :f :get-all :value [{:balance 10120 :age 1} {:balance 10140 :age 2} {:balance 9980 :age 1} {:balance 9760 :age 2} {:balance 10001 :age 0} {:balance 10500 :age 1} {:balance 9820 :age 1} {:balance 8700 :age 3} {:balance 10620 :age 1} {:balance 10360 :age 2}]} {:type :fail :f :check-tx}]) (deftest asset-checker-fail-test (with-redefs [dl/prepare-client-service (spy/stub mock-client-service)] (let [client (client/open! (transfer/->TransferClient (atom false) (atom nil) 10) nil nil) checker (#'transfer/asset-checker) result (checker/check checker {:client client} bad-history nil)] (is (false? (:valid? result))) (is (= 14 (:total-age result))) (is (= 0 (:committed-unknown-tx result))) (is (= {:type :wrong-balance :expected 100000 :actual 100001} (:bad-balance result))) (is (= {:type :wrong-age :expected 12 :actual 14} (:bad-age result))))))
0851ec1ec357f339a7fc69d8f72d527fcf770b94bfc8f5c2e93822fb77baf311
jarofghosts/clojure-koans
06_functions.clj
(defn multiply-by-ten [n] (* 10 n)) (defn square [n] (* n n)) (meditations "Calling a function is like giving it a hug with parentheses" (= 81 (square 9)) "Functions are usually defined before they are used" (= 20 (multiply-by-ten 2)) "But they can also be defined inline" (= 10 ((fn [n] (* 5 n)) 2)) "Or using an even shorter syntax" (= 60 (#(* 15 %) 4)) "Even anonymous functions may take multiple arguments" (= 15 (#(+ %1 %2 %3) 4 5 6)) "Arguments can also be skipped" (= 30 (#(* 15 %2) 1 2)) "One function can beget another" (= 9 (((fn [] +)) 4 5)) "Functions can also take other functions as input" (= 20 ((fn [f] (f 4 5)) *)) "Higher-order functions take function arguments" (= 25 ((fn [f] (f 5)) (fn [n] (* n n)))) "But they are often better written using the names of functions" (= 25 ((fn [f] (f 5)) square)))
null
https://raw.githubusercontent.com/jarofghosts/clojure-koans/9bc2a46414f479021ff27ac8744d36dce507ad7f/06_functions.clj
clojure
(defn multiply-by-ten [n] (* 10 n)) (defn square [n] (* n n)) (meditations "Calling a function is like giving it a hug with parentheses" (= 81 (square 9)) "Functions are usually defined before they are used" (= 20 (multiply-by-ten 2)) "But they can also be defined inline" (= 10 ((fn [n] (* 5 n)) 2)) "Or using an even shorter syntax" (= 60 (#(* 15 %) 4)) "Even anonymous functions may take multiple arguments" (= 15 (#(+ %1 %2 %3) 4 5 6)) "Arguments can also be skipped" (= 30 (#(* 15 %2) 1 2)) "One function can beget another" (= 9 (((fn [] +)) 4 5)) "Functions can also take other functions as input" (= 20 ((fn [f] (f 4 5)) *)) "Higher-order functions take function arguments" (= 25 ((fn [f] (f 5)) (fn [n] (* n n)))) "But they are often better written using the names of functions" (= 25 ((fn [f] (f 5)) square)))
a6981f102b3ce08b6a2bbf9b234b629c44912048bcb6b26106efe32c9adfeeae
inhabitedtype/ocaml-aws
purchaseReservedInstancesOffering.mli
open Types type input = PurchaseReservedInstancesOfferingRequest.t type output = PurchaseReservedInstancesOfferingResult.t type error = Errors_internal.t include Aws.Call with type input := input and type output := output and type error := error
null
https://raw.githubusercontent.com/inhabitedtype/ocaml-aws/b6d5554c5d201202b5de8d0b0253871f7b66dab6/libraries/ec2/lib/purchaseReservedInstancesOffering.mli
ocaml
open Types type input = PurchaseReservedInstancesOfferingRequest.t type output = PurchaseReservedInstancesOfferingResult.t type error = Errors_internal.t include Aws.Call with type input := input and type output := output and type error := error
2cf4cb52d8aa5c50d73b013205539445d2049827b49d10e119f952d293f2af8e
haskell-mafia/projector
QQ.hs
# LANGUAGE NoImplicitPrelude # {-# LANGUAGE OverloadedStrings #-} module Projector.Html.Syntax.QQ ( template ) where import qualified Data.Text as T import P import qualified Prelude import Projector.Html.Syntax (templateFromText) import Language.Haskell.TH (Loc (..), location) import Language.Haskell.TH.Quote (QuasiQuoter) import X.Language.Haskell.TH (dataExp, qparse) template :: QuasiQuoter template = qparse $ \s -> do loc <- location case templateFromText (loc_filename loc) (T.pack s) of Left e -> Prelude.error $ "Failed to parse quasi quoter: " <> show e Right t -> dataExp t
null
https://raw.githubusercontent.com/haskell-mafia/projector/6af7c7f1e8a428b14c2c5a508f7d4a3ac2decd52/projector-html/src/Projector/Html/Syntax/QQ.hs
haskell
# LANGUAGE OverloadedStrings #
# LANGUAGE NoImplicitPrelude # module Projector.Html.Syntax.QQ ( template ) where import qualified Data.Text as T import P import qualified Prelude import Projector.Html.Syntax (templateFromText) import Language.Haskell.TH (Loc (..), location) import Language.Haskell.TH.Quote (QuasiQuoter) import X.Language.Haskell.TH (dataExp, qparse) template :: QuasiQuoter template = qparse $ \s -> do loc <- location case templateFromText (loc_filename loc) (T.pack s) of Left e -> Prelude.error $ "Failed to parse quasi quoter: " <> show e Right t -> dataExp t
4e1693dedd1cd48ee9a5f00dfbe65ba3088ed350ea955f17f9143be4abcb9fb9
gregtatcam/imaplet-lwt
imaplet_irmin_read.ml
* Copyright ( c ) 2013 - 2014 < > * * Permission to use , copy , modify , and distribute this software for any * purpose with or without fee is hereby granted , provided that the above * copyright notice and this permission notice appear in all copies . * * THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN * ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE . * Copyright (c) 2013-2014 Gregory Tsipenyuk <> * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. *) open Lwt open Sexplib open Irmin_storage open Irmin_core open Imaplet_types open Utils open Regex open Dates open Lazy_message open Server_config open Parsemail exception InvalidCmd exception Quit let uinput = ref [] let arg n = if List.length !uinput > n then List.nth !uinput n else raise InvalidCmd let in_line () = Lwt_io.read_line Lwt_io.stdin let out_line str = Lwt_io.write Lwt_io.stdout str >>= fun () -> Lwt_io.flush Lwt_io.stdout let prompt str = out_line str >>= fun () -> in_line () >>= fun msg -> uinput := (Str.split (Str.regexp " ") msg); return (arg 0) let get_user_pswd user = if Regex.match_regex ~regx:"^\\([^:]+\\):\\(.+\\)$" user then (Str.matched_group 1 user, Some (Str.matched_group 2 user)) else (user,None) let get_user user = let (user,_) = get_user_pswd user in user let get_keys srv_config user = let (user,pswd) = get_user_pswd user in Ssl_.get_user_keys ~user ?pswd srv_config let rec tree user key indent = IrminIntf.create ~user:(get_user user) srv_config >>= fun store -> IrminIntf.list store key >>= fun l -> Lwt_list.iter_s (fun i -> Printf.printf "%s%s%!" indent ("/" ^ i); IrminIntf.mem store (Key_.t_of_path i) >>= fun res -> if res then ( IrminIntf.read_exn store (Key_.t_of_path i) >>= fun v -> Printf.printf "%s\n%!" v; return () ) else ( Printf.printf "\n%!"; tree user (Key_.t_of_path i) (indent ^ " ") ) ) l let message_template from_ to_ subject_ email_ = let postmark = replace ~regx:"DATE" ~tmpl:(postmark_date_time()) "From FROM DATE" in let postmark = replace ~regx:"FROM" ~tmpl:from_ postmark in let id_ = Pervasives.string_of_float (Unix.time()) in let message = ("From: FROM\r\n" ^ "Content-Type: text/plain; charset=us-ascii\r\n" ^ "Content-Transfer-Encoding: 7bit\r\n" ^ "Subject: SUBJECT\r\n" ^ "Message-Id: <ID@localhost>\r\n" ^ "Date: DATE\r\n" ^ "To: TO\r\n" ^ "\r\n" ^ "EMAIL\r\n\r\n") in let message = replace ~regx:"FROM" ~tmpl:from_ message in let message = replace ~regx:"SUBJECT" ~tmpl:subject_ message in let message = replace ~regx:"TO" ~tmpl:to_ message in let message = replace ~regx:"EMAIL" ~tmpl:email_ message in let message = replace ~regx:"ID" ~tmpl:id_ message in postmark ^ "\r\n" ^ message let append user mailbox = let open Storage_meta in prompt "from: " >>= fun from_ -> prompt "to: " >>= fun to_ -> prompt "subject: " >>= fun subject_ -> prompt "email: " >>= fun email_ -> get_keys srv_config user >>= fun keys -> let message = message_template from_ to_ subject_ email_ in IrminStorage.create srv_config (get_user user) mailbox keys >>= fun str -> IrminStorage.append str message (empty_mailbox_message_metadata()) let rec selected user mailbox mbox = let open Storage_meta in try prompt ((get_user user) ^ ":" ^ mailbox ^ ": ") >>= function | "help" -> Printf.printf "all\nexists\nhelp\nlist\nmeta\nappend\nmessage #\ntree \nclose\nremove uid\nstore # +-| flags-list\nquit\n%!"; selected user mailbox mbox | "quit" -> raise Quit | "append" -> append user mailbox >>= fun () -> selected user mailbox mbox | "all" -> GitMailbox.show_all mbox >>= fun () -> selected user mailbox mbox | "tree" -> let (_,key) = Key_.mailbox_of_path mailbox in let key = "imaplet" :: ((get_user user) :: key) in tree user key "" >>= fun () -> selected user mailbox mbox | "exists" -> GitMailbox.exists mbox >>= fun res -> ( match res with | `No -> Printf.printf "no\n%!" | `Folder -> Printf.printf "folder\n%!" | `Mailbox -> Printf.printf "storage\n%!" ); selected user mailbox mbox | "meta" -> GitMailbox.read_mailbox_metadata mbox >>= fun meta -> Printf.printf "%s\n%!" (Sexp.to_string (sexp_of_mailbox_metadata meta)); selected user mailbox mbox | "message" -> let pos = arg 1 in ( let pos = int_of_string pos in GitMailbox.read_message mbox (`Sequence pos) >>= function | `Ok (module LM:LazyMessage_inst) -> LM.LazyMessage.get_message_metadata LM.this >>= fun meta -> LM.LazyMessage.get_email LM.this >>= fun (module LE:LazyEmail_inst) -> LE.LazyEmail.to_string LE.this >>= fun email -> Printf.printf "%s\n%!" (Sexp.to_string (sexp_of_mailbox_message_metadata meta)); Printf.printf "%s\n%!" email; return () | `NotFound -> Printf.printf "not found\n%!"; return () | `Eof -> Printf.printf "eof\n%!"; return () ) >>= fun() -> selected user mailbox mbox | "store" -> let pos = arg 1 in ( let pos = int_of_string pos in GitMailbox.read_message_metadata mbox (`Sequence pos) >>= function | `Ok (meta) -> let (_,flags) = List.fold_left (fun (i,acc) el -> Printf.printf "%s\n%!" el;if i < 3 then (i+1,acc) else (i+1,(str_to_fl ("\\" ^ el)) :: acc)) (0,[]) !uinput in let find l i = try let _ = (List.find (fun el -> el = i) l) in true with _ -> false in let meta = ( match (arg 2) with | "+" -> let flags = List.fold_left (fun acc i -> if find acc i then acc else i :: acc) meta.flags flags in {meta with flags} | "-" -> let flags = List.fold_left (fun acc i -> if find flags i then acc else i :: acc) [] meta.flags in {meta with flags} | "|" -> {meta with flags} | _ -> raise InvalidCmd ) in GitMailbox.update_message_metadata mbox (`Sequence pos) meta >>= fun res -> ( match res with | `Ok -> Printf.printf "updated\n%!" | `Eof -> Printf.printf "eof\n%!" | `NotFound -> Printf.printf "not found\n%!" ); return () | `NotFound -> Printf.printf "not found\n%!"; return () | `Eof -> Printf.printf "eof\n%!"; return () ) >>= fun () -> selected user mailbox mbox | "remove" -> let uid = arg 1 in GitMailbox.delete_message mbox (`UID (int_of_string uid)) >>= fun () -> selected user mailbox mbox (* | "expunge" -> GitMailbox.expunge mbox >>= fun deleted -> List.iter deleted ~f:(fun i -> Printf.printf "deleted %d\n%!" i); selected user mailbox mbox *) | "list" -> GitMailbox.list ~subscribed:false ~access:(fun _ -> true) mbox ~init:[] ~f:( fun acc item -> return ((item::acc)) ) >>= fun l -> List.iter (fun i -> match i with | `Folder (f,i) -> Printf.printf "folder/%d %s\n%!" i f; | `Mailbox (m,i) -> Printf.printf "mailbox %s\n%!" m; ) l; selected user mailbox mbox | "close" -> return () | _ -> Printf.printf "unknown command\n%!"; selected user mailbox mbox with InvalidCmd -> Printf.printf "unknown command\n%!"; selected user mailbox mbox let main () = out_line "type help for commands\n" >>= fun () -> let rec request user = catch (fun () -> prompt ((get_user user) ^ ": ") >>= function | "help" -> Printf.printf "help\nselect mbox\ncrtmailbox mailbox\nlist\ntree\ndelete\ncreate\nuser\nquit\n%!"; request user | "user" -> prompt "user? " >>= fun user -> request user | "delete" -> get_keys srv_config user >>= fun keys -> IrminStorage.create srv_config (get_user user) "" keys >>= fun str -> IrminStorage.delete_account str | "crtmailbox" -> let mailbox = arg 1 in get_keys srv_config user >>= fun keys -> IrminStorage.create srv_config (get_user user) mailbox keys >>= fun str -> IrminStorage.create_mailbox str >>= fun () -> request user | "create" -> get_keys srv_config user >>= fun keys -> IrminStorage.create srv_config (get_user user) "" keys >>= fun str -> IrminStorage.create_account str >>= fun _ -> return () | "tree" -> let key = Key_.create_account (get_user user) in tree user key "" >> request user | "select" -> let mailbox = Str.replace_first (Str.regexp "+") " " (arg 1) in get_keys srv_config user >>= fun keys -> GitMailbox.create srv_config (get_user user) mailbox keys >>= fun mbox -> selected user mailbox mbox >>= fun () -> request user | "list" -> get_keys srv_config user >>= fun keys -> GitMailbox.create srv_config (get_user user) "" keys >>= fun mbox -> GitMailbox.list ~subscribed:false ~access:(fun _ -> true) mbox ~init:[] ~f:( fun acc item -> return ((item::acc)) ) >>= fun l -> List.iter (fun i -> match i with | `Folder (i,c) -> Printf.printf "folder children:%d %s\n%!" c i | `Mailbox (i,c) -> Printf.printf "storage %d %s\n%!" c i) l; request user | "quit" -> return () | _ -> Printf.printf "unknown command\n%!"; request user ) (fun ex -> match ex with | InvalidCmd -> Printf.printf "unknown command\n%!"; request user | Quit -> return () | _ -> Printf.printf "exception %s\n%!" (Printexc.to_string ex); return () ) in prompt "user? " >>= fun user -> request user let () = Lwt_main.run (main())
null
https://raw.githubusercontent.com/gregtatcam/imaplet-lwt/d7b51253e79cffa97e98ab899ed833cd7cb44bb6/servers/imaplet_irmin_read.ml
ocaml
| "expunge" -> GitMailbox.expunge mbox >>= fun deleted -> List.iter deleted ~f:(fun i -> Printf.printf "deleted %d\n%!" i); selected user mailbox mbox
* Copyright ( c ) 2013 - 2014 < > * * Permission to use , copy , modify , and distribute this software for any * purpose with or without fee is hereby granted , provided that the above * copyright notice and this permission notice appear in all copies . * * THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN * ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE . * Copyright (c) 2013-2014 Gregory Tsipenyuk <> * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. *) open Lwt open Sexplib open Irmin_storage open Irmin_core open Imaplet_types open Utils open Regex open Dates open Lazy_message open Server_config open Parsemail exception InvalidCmd exception Quit let uinput = ref [] let arg n = if List.length !uinput > n then List.nth !uinput n else raise InvalidCmd let in_line () = Lwt_io.read_line Lwt_io.stdin let out_line str = Lwt_io.write Lwt_io.stdout str >>= fun () -> Lwt_io.flush Lwt_io.stdout let prompt str = out_line str >>= fun () -> in_line () >>= fun msg -> uinput := (Str.split (Str.regexp " ") msg); return (arg 0) let get_user_pswd user = if Regex.match_regex ~regx:"^\\([^:]+\\):\\(.+\\)$" user then (Str.matched_group 1 user, Some (Str.matched_group 2 user)) else (user,None) let get_user user = let (user,_) = get_user_pswd user in user let get_keys srv_config user = let (user,pswd) = get_user_pswd user in Ssl_.get_user_keys ~user ?pswd srv_config let rec tree user key indent = IrminIntf.create ~user:(get_user user) srv_config >>= fun store -> IrminIntf.list store key >>= fun l -> Lwt_list.iter_s (fun i -> Printf.printf "%s%s%!" indent ("/" ^ i); IrminIntf.mem store (Key_.t_of_path i) >>= fun res -> if res then ( IrminIntf.read_exn store (Key_.t_of_path i) >>= fun v -> Printf.printf "%s\n%!" v; return () ) else ( Printf.printf "\n%!"; tree user (Key_.t_of_path i) (indent ^ " ") ) ) l let message_template from_ to_ subject_ email_ = let postmark = replace ~regx:"DATE" ~tmpl:(postmark_date_time()) "From FROM DATE" in let postmark = replace ~regx:"FROM" ~tmpl:from_ postmark in let id_ = Pervasives.string_of_float (Unix.time()) in let message = ("From: FROM\r\n" ^ "Content-Type: text/plain; charset=us-ascii\r\n" ^ "Content-Transfer-Encoding: 7bit\r\n" ^ "Subject: SUBJECT\r\n" ^ "Message-Id: <ID@localhost>\r\n" ^ "Date: DATE\r\n" ^ "To: TO\r\n" ^ "\r\n" ^ "EMAIL\r\n\r\n") in let message = replace ~regx:"FROM" ~tmpl:from_ message in let message = replace ~regx:"SUBJECT" ~tmpl:subject_ message in let message = replace ~regx:"TO" ~tmpl:to_ message in let message = replace ~regx:"EMAIL" ~tmpl:email_ message in let message = replace ~regx:"ID" ~tmpl:id_ message in postmark ^ "\r\n" ^ message let append user mailbox = let open Storage_meta in prompt "from: " >>= fun from_ -> prompt "to: " >>= fun to_ -> prompt "subject: " >>= fun subject_ -> prompt "email: " >>= fun email_ -> get_keys srv_config user >>= fun keys -> let message = message_template from_ to_ subject_ email_ in IrminStorage.create srv_config (get_user user) mailbox keys >>= fun str -> IrminStorage.append str message (empty_mailbox_message_metadata()) let rec selected user mailbox mbox = let open Storage_meta in try prompt ((get_user user) ^ ":" ^ mailbox ^ ": ") >>= function | "help" -> Printf.printf "all\nexists\nhelp\nlist\nmeta\nappend\nmessage #\ntree \nclose\nremove uid\nstore # +-| flags-list\nquit\n%!"; selected user mailbox mbox | "quit" -> raise Quit | "append" -> append user mailbox >>= fun () -> selected user mailbox mbox | "all" -> GitMailbox.show_all mbox >>= fun () -> selected user mailbox mbox | "tree" -> let (_,key) = Key_.mailbox_of_path mailbox in let key = "imaplet" :: ((get_user user) :: key) in tree user key "" >>= fun () -> selected user mailbox mbox | "exists" -> GitMailbox.exists mbox >>= fun res -> ( match res with | `No -> Printf.printf "no\n%!" | `Folder -> Printf.printf "folder\n%!" | `Mailbox -> Printf.printf "storage\n%!" ); selected user mailbox mbox | "meta" -> GitMailbox.read_mailbox_metadata mbox >>= fun meta -> Printf.printf "%s\n%!" (Sexp.to_string (sexp_of_mailbox_metadata meta)); selected user mailbox mbox | "message" -> let pos = arg 1 in ( let pos = int_of_string pos in GitMailbox.read_message mbox (`Sequence pos) >>= function | `Ok (module LM:LazyMessage_inst) -> LM.LazyMessage.get_message_metadata LM.this >>= fun meta -> LM.LazyMessage.get_email LM.this >>= fun (module LE:LazyEmail_inst) -> LE.LazyEmail.to_string LE.this >>= fun email -> Printf.printf "%s\n%!" (Sexp.to_string (sexp_of_mailbox_message_metadata meta)); Printf.printf "%s\n%!" email; return () | `NotFound -> Printf.printf "not found\n%!"; return () | `Eof -> Printf.printf "eof\n%!"; return () ) >>= fun() -> selected user mailbox mbox | "store" -> let pos = arg 1 in ( let pos = int_of_string pos in GitMailbox.read_message_metadata mbox (`Sequence pos) >>= function | `Ok (meta) -> let (_,flags) = List.fold_left (fun (i,acc) el -> Printf.printf "%s\n%!" el;if i < 3 then (i+1,acc) else (i+1,(str_to_fl ("\\" ^ el)) :: acc)) (0,[]) !uinput in let find l i = try let _ = (List.find (fun el -> el = i) l) in true with _ -> false in let meta = ( match (arg 2) with | "+" -> let flags = List.fold_left (fun acc i -> if find acc i then acc else i :: acc) meta.flags flags in {meta with flags} | "-" -> let flags = List.fold_left (fun acc i -> if find flags i then acc else i :: acc) [] meta.flags in {meta with flags} | "|" -> {meta with flags} | _ -> raise InvalidCmd ) in GitMailbox.update_message_metadata mbox (`Sequence pos) meta >>= fun res -> ( match res with | `Ok -> Printf.printf "updated\n%!" | `Eof -> Printf.printf "eof\n%!" | `NotFound -> Printf.printf "not found\n%!" ); return () | `NotFound -> Printf.printf "not found\n%!"; return () | `Eof -> Printf.printf "eof\n%!"; return () ) >>= fun () -> selected user mailbox mbox | "remove" -> let uid = arg 1 in GitMailbox.delete_message mbox (`UID (int_of_string uid)) >>= fun () -> selected user mailbox mbox | "list" -> GitMailbox.list ~subscribed:false ~access:(fun _ -> true) mbox ~init:[] ~f:( fun acc item -> return ((item::acc)) ) >>= fun l -> List.iter (fun i -> match i with | `Folder (f,i) -> Printf.printf "folder/%d %s\n%!" i f; | `Mailbox (m,i) -> Printf.printf "mailbox %s\n%!" m; ) l; selected user mailbox mbox | "close" -> return () | _ -> Printf.printf "unknown command\n%!"; selected user mailbox mbox with InvalidCmd -> Printf.printf "unknown command\n%!"; selected user mailbox mbox let main () = out_line "type help for commands\n" >>= fun () -> let rec request user = catch (fun () -> prompt ((get_user user) ^ ": ") >>= function | "help" -> Printf.printf "help\nselect mbox\ncrtmailbox mailbox\nlist\ntree\ndelete\ncreate\nuser\nquit\n%!"; request user | "user" -> prompt "user? " >>= fun user -> request user | "delete" -> get_keys srv_config user >>= fun keys -> IrminStorage.create srv_config (get_user user) "" keys >>= fun str -> IrminStorage.delete_account str | "crtmailbox" -> let mailbox = arg 1 in get_keys srv_config user >>= fun keys -> IrminStorage.create srv_config (get_user user) mailbox keys >>= fun str -> IrminStorage.create_mailbox str >>= fun () -> request user | "create" -> get_keys srv_config user >>= fun keys -> IrminStorage.create srv_config (get_user user) "" keys >>= fun str -> IrminStorage.create_account str >>= fun _ -> return () | "tree" -> let key = Key_.create_account (get_user user) in tree user key "" >> request user | "select" -> let mailbox = Str.replace_first (Str.regexp "+") " " (arg 1) in get_keys srv_config user >>= fun keys -> GitMailbox.create srv_config (get_user user) mailbox keys >>= fun mbox -> selected user mailbox mbox >>= fun () -> request user | "list" -> get_keys srv_config user >>= fun keys -> GitMailbox.create srv_config (get_user user) "" keys >>= fun mbox -> GitMailbox.list ~subscribed:false ~access:(fun _ -> true) mbox ~init:[] ~f:( fun acc item -> return ((item::acc)) ) >>= fun l -> List.iter (fun i -> match i with | `Folder (i,c) -> Printf.printf "folder children:%d %s\n%!" c i | `Mailbox (i,c) -> Printf.printf "storage %d %s\n%!" c i) l; request user | "quit" -> return () | _ -> Printf.printf "unknown command\n%!"; request user ) (fun ex -> match ex with | InvalidCmd -> Printf.printf "unknown command\n%!"; request user | Quit -> return () | _ -> Printf.printf "exception %s\n%!" (Printexc.to_string ex); return () ) in prompt "user? " >>= fun user -> request user let () = Lwt_main.run (main())
1dffb1aa09356f28800c548305c8f004ea9a086f697a17bdf4ea2b81f203656c
scymtym/clim.flamegraph
package.lisp
(cl:defpackage #:clim.flamegraph.view.flat (:use #:cl) (:local-nicknames (#:model #:clim.flamegraph.model) (#:view #:clim.flamegraph.view)))
null
https://raw.githubusercontent.com/scymtym/clim.flamegraph/03b5e4f08b53af86a98afa975a8e7a29d0ddd3a7/src/view/flat/package.lisp
lisp
(cl:defpackage #:clim.flamegraph.view.flat (:use #:cl) (:local-nicknames (#:model #:clim.flamegraph.model) (#:view #:clim.flamegraph.view)))
c3d652f7cec74eaac23d3285c008b57149f4083fef04d75f1c15bc35284ad54b
rwmjones/guestfs-tools
cache.mli
virt - builder * Copyright ( C ) 2013 - 2023 Red Hat Inc. * * This program is free software ; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation ; either version 2 of the License , or * ( at your option ) any later version . * * This program is distributed in the hope that it will be useful , * but WITHOUT ANY WARRANTY ; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the * GNU General Public License for more details . * * You should have received a copy of the GNU General Public License along * with this program ; if not , write to the Free Software Foundation , Inc. , * 51 Franklin Street , Fifth Floor , Boston , USA . * Copyright (C) 2013-2023 Red Hat Inc. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. *) (** This module represents a local cache. *) val clean_cachedir : string -> unit (** [clean_cachedir dir] clean the specified cache directory. *) type t (** The abstract data type. *) val create : directory:string -> t (** Create the abstract type. *) val cache_of_name : t -> string -> Index.arch -> Utils.revision -> string (** [cache_of_name t name arch revision] return the filename of the cached file. (Note: It doesn't check if the filename exists, this is just a simple string transformation). *) val is_cached : t -> string -> Index.arch -> Utils.revision -> bool (** [is_cached t name arch revision] return whether the file with specified name, architecture and revision is cached. *) val print_item_status : t -> header:bool -> (string * Index.arch * Utils.revision) list -> unit (** [print_item_status t header items] print the status in the cache of the specified items (which are tuples of name, architecture, and revision). If [~header:true] then display a header with the path of the cache. *)
null
https://raw.githubusercontent.com/rwmjones/guestfs-tools/57423d907270526ea664ff15601cce956353820e/builder/cache.mli
ocaml
* This module represents a local cache. * [clean_cachedir dir] clean the specified cache directory. * The abstract data type. * Create the abstract type. * [cache_of_name t name arch revision] return the filename of the cached file. (Note: It doesn't check if the filename exists, this is just a simple string transformation). * [is_cached t name arch revision] return whether the file with specified name, architecture and revision is cached. * [print_item_status t header items] print the status in the cache of the specified items (which are tuples of name, architecture, and revision). If [~header:true] then display a header with the path of the cache.
virt - builder * Copyright ( C ) 2013 - 2023 Red Hat Inc. * * This program is free software ; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation ; either version 2 of the License , or * ( at your option ) any later version . * * This program is distributed in the hope that it will be useful , * but WITHOUT ANY WARRANTY ; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the * GNU General Public License for more details . * * You should have received a copy of the GNU General Public License along * with this program ; if not , write to the Free Software Foundation , Inc. , * 51 Franklin Street , Fifth Floor , Boston , USA . * Copyright (C) 2013-2023 Red Hat Inc. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. *) val clean_cachedir : string -> unit type t val create : directory:string -> t val cache_of_name : t -> string -> Index.arch -> Utils.revision -> string val is_cached : t -> string -> Index.arch -> Utils.revision -> bool val print_item_status : t -> header:bool -> (string * Index.arch * Utils.revision) list -> unit
2ec13434942dfd0782fcd5339adcf85e467646af37f279dce67f89681d1aaccc
apauley/hledger-flow
Types.hs
# LANGUAGE FlexibleInstances # module Hledger.Flow.Types where import qualified Turtle (ExitCode, NominalDiffTime, Shell, Line) import qualified Data.Text as T import Hledger.Flow.PathHelpers type BaseDir = AbsDir type RunDir = RelDir data LogMessage = StdOut T.Text | StdErr T.Text | Terminate deriving (Show) type FullOutput = (Turtle.ExitCode, T.Text, T.Text) type FullTimedOutput = (FullOutput, Turtle.NominalDiffTime) type ProcFun = T.Text -> [T.Text] -> Turtle.Shell Turtle.Line -> IO FullOutput type ProcInput = (T.Text, [T.Text], Turtle.Shell Turtle.Line) data HledgerInfo = HledgerInfo { hlPath :: AbsFile , hlVersion :: T.Text } deriving (Show) class HasVerbosity a where verbose :: a -> Bool class HasBaseDir a where baseDir :: a -> BaseDir class HasRunDir a where importRunDir :: a -> RunDir class HasSequential a where sequential :: a -> Bool class HasBatchSize a where batchSize :: a -> Int
null
https://raw.githubusercontent.com/apauley/hledger-flow/bec3ec0ac223d097f6f784297314ae2e108f1016/src/Hledger/Flow/Types.hs
haskell
# LANGUAGE FlexibleInstances # module Hledger.Flow.Types where import qualified Turtle (ExitCode, NominalDiffTime, Shell, Line) import qualified Data.Text as T import Hledger.Flow.PathHelpers type BaseDir = AbsDir type RunDir = RelDir data LogMessage = StdOut T.Text | StdErr T.Text | Terminate deriving (Show) type FullOutput = (Turtle.ExitCode, T.Text, T.Text) type FullTimedOutput = (FullOutput, Turtle.NominalDiffTime) type ProcFun = T.Text -> [T.Text] -> Turtle.Shell Turtle.Line -> IO FullOutput type ProcInput = (T.Text, [T.Text], Turtle.Shell Turtle.Line) data HledgerInfo = HledgerInfo { hlPath :: AbsFile , hlVersion :: T.Text } deriving (Show) class HasVerbosity a where verbose :: a -> Bool class HasBaseDir a where baseDir :: a -> BaseDir class HasRunDir a where importRunDir :: a -> RunDir class HasSequential a where sequential :: a -> Bool class HasBatchSize a where batchSize :: a -> Int
ec01995c7940b1947e7641e7e1c41c3d4ec8633d6bb44c961bdbfcbe87a1394e
Kappa-Dev/KappaTools
run_cli_args.ml
(******************************************************************************) (* _ __ * The Kappa Language *) | |/ / * Copyright 2010 - 2020 CNRS - Harvard Medical School - INRIA - IRIF (* | ' / *********************************************************************) (* | . \ * This file is distributed under the terms of the *) (* |_|\_\ * GNU Lesser General Public License Version 3 *) (******************************************************************************) type t = { mutable inputKappaFileNames : string list; mutable minValue : float option; mutable maxValue : float option; mutable plotPeriod : float option; mutable outputDataFile : string option; mutable outputDirectory : string; mutable batchmode : bool; mutable interactive : bool; mutable syntaxVersion : Ast.syntax_version; } type t_gui = { inputKappaFileNames_gui : string list ref; minValue_gui : float option ref; maxValue_gui : float option ref; plotPeriod_gui : float option ref; outputDataFile_gui : string option ref; outputDirectory_gui : string ref; syntaxVersion_gui : string ref; batchmode_gui : string ref; } let default : t = { inputKappaFileNames = []; minValue = None ; maxValue = None; plotPeriod = None; outputDataFile = None; outputDirectory = "."; syntaxVersion = Ast.V4; batchmode = false; interactive = false; } let default_gui = { inputKappaFileNames_gui = ref []; minValue_gui = ref (Some 0.); maxValue_gui = ref (Some 1.); plotPeriod_gui = ref (Some 0.01); outputDataFile_gui = ref (Some "data.csv"); outputDirectory_gui = ref "."; syntaxVersion_gui = ref "4"; batchmode_gui = ref "interactive"; } let rec aux l accu = match l with | (v,var_val)::tail -> aux tail ((v, (try Nbr.of_string var_val with Failure _ -> raise (Arg.Bad ("\""^var_val^"\" is not a valid value")))) ::accu) | [] -> accu let get_from_gui t_gui = { minValue = !(t_gui.minValue_gui); maxValue = !(t_gui.maxValue_gui); plotPeriod = !(t_gui.plotPeriod_gui); inputKappaFileNames = !(t_gui.inputKappaFileNames_gui); outputDataFile = !(t_gui.outputDataFile_gui); outputDirectory = !(t_gui.outputDirectory_gui); syntaxVersion = (match !(t_gui.syntaxVersion_gui) with | "3" | "v3" | "V3" -> Ast.V3 | "4" | "v4" | "V4" -> Ast.V4 | _s -> Ast.V4); batchmode = (Tools.lowercase (!(t_gui.batchmode_gui)))="batch" ; interactive = (Tools.lowercase (!(t_gui.batchmode_gui)))="interactive"; } let copy_from_gui t_gui t = let t_tmp = get_from_gui t_gui in t.minValue <- t_tmp.minValue; t.maxValue <- t_tmp.maxValue; t.plotPeriod <- t_tmp.plotPeriod; t.inputKappaFileNames <- t_tmp.inputKappaFileNames; t.outputDataFile <- t_tmp.outputDataFile; t.outputDirectory <- t_tmp.outputDirectory; t.syntaxVersion <- t_tmp.syntaxVersion ; t.batchmode <- t_tmp.batchmode ; t.interactive <- t_tmp.interactive let options_gen (t :t) (t_gui :t_gui) : (string * Arg.spec * Superarg.spec * string * (Superarg.category * Superarg.position) list * Superarg.level) list = [ ("-i", Arg.String (fun fic -> t.inputKappaFileNames <- fic::t.inputKappaFileNames), Superarg.String_list t_gui.inputKappaFileNames_gui, "name of a kappa file to use as input (can be used multiple times for multiple input files)", [],Superarg.Hidden); ("-initial", Arg.Float (fun time -> t.minValue <- Some time), (Superarg.Float_opt t_gui.minValue_gui), "Min time of simulation (arbitrary time unit)", [Common_args.data_set,0; Common_args.integration_settings,0], Superarg.Normal); ("-l", Arg.Float(fun time -> t.maxValue <- Some time), (Superarg.Float_opt t_gui.maxValue_gui), "Limit of the simulation", [ Common_args.data_set,1; Common_args.integration_settings,1],Superarg.Normal); ("-t", Arg.Float (fun f -> raise (Arg.Bad ("Option '-t' has been replace by '[-u time] -l "^ string_of_float f^"'"))), (Superarg.Float_opt t_gui.maxValue_gui), "Deprecated option", [],Superarg.Hidden); ("-p", Arg.Float (fun pointNumberValue -> t.plotPeriod <- Some pointNumberValue), Superarg.Float_opt t_gui.plotPeriod_gui, "plot period: time interval between points in plot (default: 1.0)", [Common_args.data_set,2;Common_args.integration_settings,2],Superarg.Normal); ("-o", Arg.String (fun outputDataFile -> t.outputDataFile <- Some outputDataFile), Superarg.String_opt t_gui.outputDataFile_gui, "file name for data output", [ Common_args.data_set,3; Common_args.output,3; Common_args.integration_settings,3], Superarg.Hidden) ; ("-d", Arg.String (fun outputDirectory -> t.outputDirectory <- outputDirectory), Superarg.String t_gui.outputDirectory_gui, "Specifies directory name where output file(s) should be stored", [ Common_args.data_set,100; Common_args.output,100; Common_args.semantics,100; Common_args.integration_settings,100; Common_args.model_reduction,100; Common_args.static_analysis,100; Common_args.debug_mode,100 ], Superarg.Normal) ; ("-mode", Arg.String (fun m -> if m = "batch" then t.batchmode <- true else if m = "interactive" then t.interactive <- true), Superarg.Choice (["batch","batch mode";"interactive","interactive mode"],[],t_gui.batchmode_gui), "either \"batch\" to never ask anything to the user or \"interactive\" to ask something before doing anything", [Common_args.output,7;Common_args.debug_mode,7], Superarg.Expert) ; ("-syntax", Arg.String (function | "3" | "v3" | "V3" -> t.syntaxVersion <- Ast.V3 | "4" | "v4" | "V4" -> t.syntaxVersion <- Ast.V4 | s -> raise (Arg.Bad ("\""^s^"\" is not a valid syntax version")) ), Superarg.Choice (["3","old";"v3","old";"V3","old";"4","new";"v4","new";"V4","new"],[],t_gui.syntaxVersion_gui), "Use explicit notation for free site", [Common_args.semantics,8], Superarg.Normal); ] let options t = List.rev_map (fun (a,b,_,c,_,_) -> a,b,c) (List.rev (options_gen t default_gui)) let options_gui t_gui = (List.rev_map (fun (a,_,b,c,d,e) -> a,b,c,d,e) (List.rev (options_gen default t_gui))) @[ "--output-plot", Superarg.String_opt t_gui.outputDataFile_gui, "file name for data output", [ Common_args.output,1; Common_args.semantics,1; Common_args.integration_settings,1 ],Superarg.Normal; "--data-file", Superarg.String_opt t_gui.outputDataFile_gui, "file name for data output", [ Common_args.output,1; Common_args.semantics,2; Common_args.integration_settings,3 ],Superarg.Hidden;]
null
https://raw.githubusercontent.com/Kappa-Dev/KappaTools/eef2337e8688018eda47ccc838aea809cae68de7/core/cli/run_cli_args.ml
ocaml
**************************************************************************** _ __ * The Kappa Language | ' / ******************************************************************** | . \ * This file is distributed under the terms of the |_|\_\ * GNU Lesser General Public License Version 3 ****************************************************************************
| |/ / * Copyright 2010 - 2020 CNRS - Harvard Medical School - INRIA - IRIF type t = { mutable inputKappaFileNames : string list; mutable minValue : float option; mutable maxValue : float option; mutable plotPeriod : float option; mutable outputDataFile : string option; mutable outputDirectory : string; mutable batchmode : bool; mutable interactive : bool; mutable syntaxVersion : Ast.syntax_version; } type t_gui = { inputKappaFileNames_gui : string list ref; minValue_gui : float option ref; maxValue_gui : float option ref; plotPeriod_gui : float option ref; outputDataFile_gui : string option ref; outputDirectory_gui : string ref; syntaxVersion_gui : string ref; batchmode_gui : string ref; } let default : t = { inputKappaFileNames = []; minValue = None ; maxValue = None; plotPeriod = None; outputDataFile = None; outputDirectory = "."; syntaxVersion = Ast.V4; batchmode = false; interactive = false; } let default_gui = { inputKappaFileNames_gui = ref []; minValue_gui = ref (Some 0.); maxValue_gui = ref (Some 1.); plotPeriod_gui = ref (Some 0.01); outputDataFile_gui = ref (Some "data.csv"); outputDirectory_gui = ref "."; syntaxVersion_gui = ref "4"; batchmode_gui = ref "interactive"; } let rec aux l accu = match l with | (v,var_val)::tail -> aux tail ((v, (try Nbr.of_string var_val with Failure _ -> raise (Arg.Bad ("\""^var_val^"\" is not a valid value")))) ::accu) | [] -> accu let get_from_gui t_gui = { minValue = !(t_gui.minValue_gui); maxValue = !(t_gui.maxValue_gui); plotPeriod = !(t_gui.plotPeriod_gui); inputKappaFileNames = !(t_gui.inputKappaFileNames_gui); outputDataFile = !(t_gui.outputDataFile_gui); outputDirectory = !(t_gui.outputDirectory_gui); syntaxVersion = (match !(t_gui.syntaxVersion_gui) with | "3" | "v3" | "V3" -> Ast.V3 | "4" | "v4" | "V4" -> Ast.V4 | _s -> Ast.V4); batchmode = (Tools.lowercase (!(t_gui.batchmode_gui)))="batch" ; interactive = (Tools.lowercase (!(t_gui.batchmode_gui)))="interactive"; } let copy_from_gui t_gui t = let t_tmp = get_from_gui t_gui in t.minValue <- t_tmp.minValue; t.maxValue <- t_tmp.maxValue; t.plotPeriod <- t_tmp.plotPeriod; t.inputKappaFileNames <- t_tmp.inputKappaFileNames; t.outputDataFile <- t_tmp.outputDataFile; t.outputDirectory <- t_tmp.outputDirectory; t.syntaxVersion <- t_tmp.syntaxVersion ; t.batchmode <- t_tmp.batchmode ; t.interactive <- t_tmp.interactive let options_gen (t :t) (t_gui :t_gui) : (string * Arg.spec * Superarg.spec * string * (Superarg.category * Superarg.position) list * Superarg.level) list = [ ("-i", Arg.String (fun fic -> t.inputKappaFileNames <- fic::t.inputKappaFileNames), Superarg.String_list t_gui.inputKappaFileNames_gui, "name of a kappa file to use as input (can be used multiple times for multiple input files)", [],Superarg.Hidden); ("-initial", Arg.Float (fun time -> t.minValue <- Some time), (Superarg.Float_opt t_gui.minValue_gui), "Min time of simulation (arbitrary time unit)", [Common_args.data_set,0; Common_args.integration_settings,0], Superarg.Normal); ("-l", Arg.Float(fun time -> t.maxValue <- Some time), (Superarg.Float_opt t_gui.maxValue_gui), "Limit of the simulation", [ Common_args.data_set,1; Common_args.integration_settings,1],Superarg.Normal); ("-t", Arg.Float (fun f -> raise (Arg.Bad ("Option '-t' has been replace by '[-u time] -l "^ string_of_float f^"'"))), (Superarg.Float_opt t_gui.maxValue_gui), "Deprecated option", [],Superarg.Hidden); ("-p", Arg.Float (fun pointNumberValue -> t.plotPeriod <- Some pointNumberValue), Superarg.Float_opt t_gui.plotPeriod_gui, "plot period: time interval between points in plot (default: 1.0)", [Common_args.data_set,2;Common_args.integration_settings,2],Superarg.Normal); ("-o", Arg.String (fun outputDataFile -> t.outputDataFile <- Some outputDataFile), Superarg.String_opt t_gui.outputDataFile_gui, "file name for data output", [ Common_args.data_set,3; Common_args.output,3; Common_args.integration_settings,3], Superarg.Hidden) ; ("-d", Arg.String (fun outputDirectory -> t.outputDirectory <- outputDirectory), Superarg.String t_gui.outputDirectory_gui, "Specifies directory name where output file(s) should be stored", [ Common_args.data_set,100; Common_args.output,100; Common_args.semantics,100; Common_args.integration_settings,100; Common_args.model_reduction,100; Common_args.static_analysis,100; Common_args.debug_mode,100 ], Superarg.Normal) ; ("-mode", Arg.String (fun m -> if m = "batch" then t.batchmode <- true else if m = "interactive" then t.interactive <- true), Superarg.Choice (["batch","batch mode";"interactive","interactive mode"],[],t_gui.batchmode_gui), "either \"batch\" to never ask anything to the user or \"interactive\" to ask something before doing anything", [Common_args.output,7;Common_args.debug_mode,7], Superarg.Expert) ; ("-syntax", Arg.String (function | "3" | "v3" | "V3" -> t.syntaxVersion <- Ast.V3 | "4" | "v4" | "V4" -> t.syntaxVersion <- Ast.V4 | s -> raise (Arg.Bad ("\""^s^"\" is not a valid syntax version")) ), Superarg.Choice (["3","old";"v3","old";"V3","old";"4","new";"v4","new";"V4","new"],[],t_gui.syntaxVersion_gui), "Use explicit notation for free site", [Common_args.semantics,8], Superarg.Normal); ] let options t = List.rev_map (fun (a,b,_,c,_,_) -> a,b,c) (List.rev (options_gen t default_gui)) let options_gui t_gui = (List.rev_map (fun (a,_,b,c,d,e) -> a,b,c,d,e) (List.rev (options_gen default t_gui))) @[ "--output-plot", Superarg.String_opt t_gui.outputDataFile_gui, "file name for data output", [ Common_args.output,1; Common_args.semantics,1; Common_args.integration_settings,1 ],Superarg.Normal; "--data-file", Superarg.String_opt t_gui.outputDataFile_gui, "file name for data output", [ Common_args.output,1; Common_args.semantics,2; Common_args.integration_settings,3 ],Superarg.Hidden;]
d8aecc1708c8036fa547b0bae7650d92e26718a2717630071737445f8748a224
Perry961002/SICP
exe3.69-triples.scm
;首先我们描述一下(triples S T U),也是将它分成三部分 序对(S0 , T0 , U0),第一个平面里面所有其他序对,其他序对 , Ti , Uj ) , i < = j , i > = 0 , j > = 1 , Tr , Ur ) (define (triples s t u) (stream-cons (list (stream-car s) (stream-car t) (stream-car u)) (interleave (stream-map (lambda (x) (cons (stream-car s) x)) (pairs t (stream-cdr u))) (triples (stream-cdr s) (stream-cdr t) (stream-cdr u))))) ;Pythagoras三元组 (define pythagoras (define (square x) (* x x)) (stream-filter (lambda (x) (= (+ (square (car x)) (square (cadr x))) (square (caddr x)))) (triples integers integers integers))) (display-top10 pythagoras) ;只能算这么多,后面溢出了 ( 3 4 5 ) ( 6 8 10 ) ( 5 12 13 ) ( 9 12 15 ) ( 8 15 17 ) ( 12 16 20 )
null
https://raw.githubusercontent.com/Perry961002/SICP/89d539e600a73bec42d350592f0ac626e041bf16/Chap3/exercise/exe3.69-triples.scm
scheme
首先我们描述一下(triples S T U),也是将它分成三部分 Pythagoras三元组 只能算这么多,后面溢出了
序对(S0 , T0 , U0),第一个平面里面所有其他序对,其他序对 , Ti , Uj ) , i < = j , i > = 0 , j > = 1 , Tr , Ur ) (define (triples s t u) (stream-cons (list (stream-car s) (stream-car t) (stream-car u)) (interleave (stream-map (lambda (x) (cons (stream-car s) x)) (pairs t (stream-cdr u))) (triples (stream-cdr s) (stream-cdr t) (stream-cdr u))))) (define pythagoras (define (square x) (* x x)) (stream-filter (lambda (x) (= (+ (square (car x)) (square (cadr x))) (square (caddr x)))) (triples integers integers integers))) (display-top10 pythagoras) ( 3 4 5 ) ( 6 8 10 ) ( 5 12 13 ) ( 9 12 15 ) ( 8 15 17 ) ( 12 16 20 )
851155fb0aad836d5b01499cda1a10a5391b28803cc749eebd065035ddc256a5
falgon/htcc
Spec.hs
{-# LANGUAGE OverloadedStrings #-} module Main where import Codec.Binary.UTF8.String (decodeString) import Control.Exception (finally) import qualified Data.ByteString.Char8 as B import qualified Data.Text as T import qualified Data.Text.IO as T import Dhall.JSON (omitNull) import Dhall.Yaml (Options (..), defaultOptions, dhallToYaml) import qualified Options.Applicative as OA import System.Directory (createDirectoryIfMissing) import System.FilePath ((</>)) import System.Process (readCreateProcess, shell) import qualified Tests.SubProcTests as SubProcTests import Tests.Utils workDir :: FilePath workDir = "/tmp" </> "htcc" specPath :: FilePath specPath = workDir </> "spec.s" dockerComposePath :: FilePath dockerComposePath = "./docker" </> "test.dhall" data Command = WithSubProc | WithDocker | WithSelf data Opts = Opts { optClean :: !Bool , optCmd :: !Command } subProcCmd :: OA.Mod OA.CommandFields Command subProcCmd = OA.command "subp" $ OA.info (pure WithSubProc) $ OA.progDesc "run tests with subprocess" dockerCmd :: OA.Mod OA.CommandFields Command dockerCmd = OA.command "docker" $ OA.info (pure WithDocker) $ OA.progDesc "run tests in docker container" selfCmd :: OA.Mod OA.CommandFields Command selfCmd = OA.command "self" $ OA.info (pure WithSelf) $ OA.progDesc "run the test using htcc's processing power" cleanOpt :: OA.Parser Bool cleanOpt = OA.switch $ mconcat [ OA.long "clean" , OA.help "clean the docker container" ] programOptions :: OA.Parser Opts programOptions = Opts <$> cleanOpt <*> OA.hsubparser (mconcat [ subProcCmd , dockerCmd , selfCmd ]) optsParser :: OA.ParserInfo Opts optsParser = OA.info (OA.helper <*> programOptions) $ mconcat [ OA.fullDesc , OA.progDesc $ "The htcc unit tester" ] genTestAsm :: IO () genTestAsm = do createDirectoryIfMissing False workDir execErrFin $ "stack exec htcc -- " <> T.pack testCoreFile <> " > " <> T.pack specPath where testCoreFile = "./test" </> "Tests" </> "csrc" </> "test_core.c" createProcessDhallDocker :: FilePath -> String -> IO () createProcessDhallDocker fp cmd = T.readFile fp >>= dhallToYaml (defaultOptions { explain = True, omission = omitNull }) (Just fp) >>= readCreateProcess (shell $ "docker-compose -f - " <> cmd) . decodeString . B.unpack >>= putStrLn main :: IO () main = do opts <- OA.execParser optsParser case optCmd opts of WithSubProc -> SubProcTests.exec WithDocker -> let runDhallDocker = createProcessDhallDocker dockerComposePath in if optClean opts then runDhallDocker "down --rmi all" else flip finally (clean [workDir]) $ genTestAsm >> runDhallDocker "up --build" WithSelf -> flip finally (clean [workDir, "spec"]) $ do genTestAsm execErrFin $ "gcc -no-pie -o spec " <> T.pack specPath execErrFin "./spec"
null
https://raw.githubusercontent.com/falgon/htcc/3cef6fc362b00d4bc0ae261cba567bfd9c69b3c5/test/Spec.hs
haskell
# LANGUAGE OverloadedStrings #
module Main where import Codec.Binary.UTF8.String (decodeString) import Control.Exception (finally) import qualified Data.ByteString.Char8 as B import qualified Data.Text as T import qualified Data.Text.IO as T import Dhall.JSON (omitNull) import Dhall.Yaml (Options (..), defaultOptions, dhallToYaml) import qualified Options.Applicative as OA import System.Directory (createDirectoryIfMissing) import System.FilePath ((</>)) import System.Process (readCreateProcess, shell) import qualified Tests.SubProcTests as SubProcTests import Tests.Utils workDir :: FilePath workDir = "/tmp" </> "htcc" specPath :: FilePath specPath = workDir </> "spec.s" dockerComposePath :: FilePath dockerComposePath = "./docker" </> "test.dhall" data Command = WithSubProc | WithDocker | WithSelf data Opts = Opts { optClean :: !Bool , optCmd :: !Command } subProcCmd :: OA.Mod OA.CommandFields Command subProcCmd = OA.command "subp" $ OA.info (pure WithSubProc) $ OA.progDesc "run tests with subprocess" dockerCmd :: OA.Mod OA.CommandFields Command dockerCmd = OA.command "docker" $ OA.info (pure WithDocker) $ OA.progDesc "run tests in docker container" selfCmd :: OA.Mod OA.CommandFields Command selfCmd = OA.command "self" $ OA.info (pure WithSelf) $ OA.progDesc "run the test using htcc's processing power" cleanOpt :: OA.Parser Bool cleanOpt = OA.switch $ mconcat [ OA.long "clean" , OA.help "clean the docker container" ] programOptions :: OA.Parser Opts programOptions = Opts <$> cleanOpt <*> OA.hsubparser (mconcat [ subProcCmd , dockerCmd , selfCmd ]) optsParser :: OA.ParserInfo Opts optsParser = OA.info (OA.helper <*> programOptions) $ mconcat [ OA.fullDesc , OA.progDesc $ "The htcc unit tester" ] genTestAsm :: IO () genTestAsm = do createDirectoryIfMissing False workDir execErrFin $ "stack exec htcc -- " <> T.pack testCoreFile <> " > " <> T.pack specPath where testCoreFile = "./test" </> "Tests" </> "csrc" </> "test_core.c" createProcessDhallDocker :: FilePath -> String -> IO () createProcessDhallDocker fp cmd = T.readFile fp >>= dhallToYaml (defaultOptions { explain = True, omission = omitNull }) (Just fp) >>= readCreateProcess (shell $ "docker-compose -f - " <> cmd) . decodeString . B.unpack >>= putStrLn main :: IO () main = do opts <- OA.execParser optsParser case optCmd opts of WithSubProc -> SubProcTests.exec WithDocker -> let runDhallDocker = createProcessDhallDocker dockerComposePath in if optClean opts then runDhallDocker "down --rmi all" else flip finally (clean [workDir]) $ genTestAsm >> runDhallDocker "up --build" WithSelf -> flip finally (clean [workDir, "spec"]) $ do genTestAsm execErrFin $ "gcc -no-pie -o spec " <> T.pack specPath execErrFin "./spec"
ae3a06cf5d1063fea309a2380b9c60f8bbe5f5955b805eeadafccf30e400089b
tanakh/ICFP2011
Random.hs
{-# OPTIONS -Wall #-} import Control.Monad import Data.Vector ((!)) import qualified Data.Vector as V import LTG import Prelude hiding (putStrLn) import System.Environment import System.Random slotRange :: Int slotRange = 8 main :: IO () main = do -- uncomment next line to fix the seed -- setStdGen $ mkStdGen 0x9b (side:_) <- fmap (map read) getArgs when (side==(1::Int)) skip play slotRange skip :: IO () skip = do _ <- getLine _ <- getLine _ <- getLine return () play :: Int -> IO () play slotRange' = do ci <- randomRIO (0, V.length cards-1) s <- randomRIO (0, slotRange'-1) lr <- randomRIO (0, 1::Int) let c = cards ! ci if lr == 0 then (s $< c) else (c $> s) skip play slotRange
null
https://raw.githubusercontent.com/tanakh/ICFP2011/db0d670cdbe12e9cef4242d6ab202a98c254412e/nushio/Random.hs
haskell
# OPTIONS -Wall # uncomment next line to fix the seed setStdGen $ mkStdGen 0x9b
import Control.Monad import Data.Vector ((!)) import qualified Data.Vector as V import LTG import Prelude hiding (putStrLn) import System.Environment import System.Random slotRange :: Int slotRange = 8 main :: IO () main = do (side:_) <- fmap (map read) getArgs when (side==(1::Int)) skip play slotRange skip :: IO () skip = do _ <- getLine _ <- getLine _ <- getLine return () play :: Int -> IO () play slotRange' = do ci <- randomRIO (0, V.length cards-1) s <- randomRIO (0, slotRange'-1) lr <- randomRIO (0, 1::Int) let c = cards ! ci if lr == 0 then (s $< c) else (c $> s) skip play slotRange
f94e1867b383978204f400d242cde5116acaca335f95c847b046248b573b2e53
clj-commons/camel-snake-kebab
core.cljc
(ns camel-snake-kebab.core (:require [clojure.string] [camel-snake-kebab.internals.misc :as misc] #?(:clj [camel-snake-kebab.internals.macros :refer [defconversion]] :cljs [camel-snake-kebab.internals.alter-name])) ;; Needed for expansion of defconversion #?(:cljs (:require-macros [camel-snake-kebab.internals.macros :refer [defconversion]]))) (declare ->PascalCase ->Camel_Snake_Case ->camelCase ->SCREAMING_SNAKE_CASE ->snake_case ->kebab-case ->HTTP-Header-Case ->PascalCaseKeyword ->camelCaseKeyword ->SCREAMING_SNAKE_CASE_KEYWORD ->snake_case_keyword ->kebab-case-keyword ->Camel_Snake_Case_Keyword ->HTTP-Header-Case-Keyword ->PascalCaseString ->camelCaseString ->SCREAMING_SNAKE_CASE_STRING ->snake_case_string ->kebab-case-string ->Camel_Snake_Case_String ->HTTP-Header-Case-String ->PascalCaseSymbol ->camelCaseSymbol ->SCREAMING_SNAKE_CASE_SYMBOL ->snake_case_symbol ->kebab-case-symbol ->Camel_Snake_Case_Symbol ->HTTP-Header-Case-Symbol) (defn convert-case "Converts the case of a string according to the rule for the first word, remaining words, and the separator." [first-fn rest-fn sep s & rest] (apply misc/convert-case first-fn rest-fn sep s rest)) ;; These are fully qualified to workaround some issue with ClojureScript: (defconversion "PascalCase" clojure.string/capitalize clojure.string/capitalize "") (defconversion "Camel_Snake_Case" clojure.string/capitalize clojure.string/capitalize "_") (defconversion "camelCase" clojure.string/lower-case clojure.string/capitalize "" ) (defconversion "SCREAMING_SNAKE_CASE" clojure.string/upper-case clojure.string/upper-case "_") (defconversion "snake_case" clojure.string/lower-case clojure.string/lower-case "_") (defconversion "kebab-case" clojure.string/lower-case clojure.string/lower-case "-") (defconversion "HTTP-Header-Case" camel-snake-kebab.internals.misc/capitalize-http-header camel-snake-kebab.internals.misc/capitalize-http-header "-")
null
https://raw.githubusercontent.com/clj-commons/camel-snake-kebab/5f1b76358fac9eb71f0d870c892106cdd430f2a8/src/camel_snake_kebab/core.cljc
clojure
Needed for expansion of defconversion These are fully qualified to workaround some issue with ClojureScript:
(ns camel-snake-kebab.core (:require [clojure.string] [camel-snake-kebab.internals.misc :as misc] #?(:clj [camel-snake-kebab.internals.macros :refer [defconversion]] #?(:cljs (:require-macros [camel-snake-kebab.internals.macros :refer [defconversion]]))) (declare ->PascalCase ->Camel_Snake_Case ->camelCase ->SCREAMING_SNAKE_CASE ->snake_case ->kebab-case ->HTTP-Header-Case ->PascalCaseKeyword ->camelCaseKeyword ->SCREAMING_SNAKE_CASE_KEYWORD ->snake_case_keyword ->kebab-case-keyword ->Camel_Snake_Case_Keyword ->HTTP-Header-Case-Keyword ->PascalCaseString ->camelCaseString ->SCREAMING_SNAKE_CASE_STRING ->snake_case_string ->kebab-case-string ->Camel_Snake_Case_String ->HTTP-Header-Case-String ->PascalCaseSymbol ->camelCaseSymbol ->SCREAMING_SNAKE_CASE_SYMBOL ->snake_case_symbol ->kebab-case-symbol ->Camel_Snake_Case_Symbol ->HTTP-Header-Case-Symbol) (defn convert-case "Converts the case of a string according to the rule for the first word, remaining words, and the separator." [first-fn rest-fn sep s & rest] (apply misc/convert-case first-fn rest-fn sep s rest)) (defconversion "PascalCase" clojure.string/capitalize clojure.string/capitalize "") (defconversion "Camel_Snake_Case" clojure.string/capitalize clojure.string/capitalize "_") (defconversion "camelCase" clojure.string/lower-case clojure.string/capitalize "" ) (defconversion "SCREAMING_SNAKE_CASE" clojure.string/upper-case clojure.string/upper-case "_") (defconversion "snake_case" clojure.string/lower-case clojure.string/lower-case "_") (defconversion "kebab-case" clojure.string/lower-case clojure.string/lower-case "-") (defconversion "HTTP-Header-Case" camel-snake-kebab.internals.misc/capitalize-http-header camel-snake-kebab.internals.misc/capitalize-http-header "-")
944944f8c3f36d400a6917bebf38a96d2b591e79a83d68f071c5f594eda001ad
malcolmreynolds/GSLL
monte-carlo.lisp
Monte Carlo Integration Sat Feb 3 2007 - 17:42 Time - stamp : < 2009 - 06 - 06 09:27:29EDT monte-carlo.lisp > $ Id$ (in-package :gsl) ;;; /usr/include/gsl/gsl_monte.h ;;; /usr/include/gsl/gsl_monte_plain.h /usr / include / gsl / gsl_monte_miser.h /usr / include / gsl / gsl_monte_vegas.h ;;;;**************************************************************************** PLAIN ;;;;**************************************************************************** (defmobject monte-carlo-plain "gsl_monte_plain" ((dim sizet)) "plain Monte Carlo integration" "Make and initialize a workspace for Monte Carlo integration in dimension dim." :initialize-suffix "init" :initialize-args nil) (defparameter *monte-carlo-default-samples-per-dimension* 150000) (defmfun monte-carlo-integrate-plain (function lower-limits upper-limits &optional (number-of-samples (* *monte-carlo-default-samples-per-dimension* (dim0 lower-limits))) (generator (make-random-number-generator +mt19937+ 0)) (state (make-monte-carlo-plain (dim0 lower-limits))) (scalars t)) "gsl_monte_plain_integrate" ((callback :pointer) ((c-pointer lower-limits) :pointer) ((c-pointer upper-limits) :pointer) ((dim0 lower-limits) sizet) (number-of-samples sizet) ((mpointer generator) :pointer) ((mpointer state) :pointer) (result (:pointer :double)) (abserr (:pointer :double))) :inputs (lower-limits upper-limits) :callbacks (callback fnstruct-dimension (dimension) (function :double (:input :double :cvector dim0) :slug)) :callback-dynamic (((dim0 lower-limits)) (function scalars)) "Uses the plain Monte Carlo algorithm to integrate the function f over the hypercubic region defined by the lower and upper limits in the arrays 'lower-limits and 'upper-limits, each a gsl-vector of length dim. The integration uses a fixed number of function calls number-of-samples, and obtains random sampling points using the random number generator 'generator. A previously allocated workspace 'state must be supplied. The result of the integration is returned with an estimated absolute error.") ;;;;**************************************************************************** ;;;; MISER ;;;;**************************************************************************** The MISER algorithm of Press and Farrar is based on recursive ;;; stratified sampling. This technique aims to reduce the overall ;;; integration error by concentrating integration points in the ;;; regions of highest variance. (defmobject monte-carlo-miser "gsl_monte_miser" ((dim sizet)) "miser Monte Carlo integration" "Make and initialize a workspace for Monte Carlo integration in dimension dim. The workspace is used to maintain the state of the integration." :initialize-suffix "init" :initialize-args nil) (export 'miser-parameter) (defmacro miser-parameter (workspace parameter) "Get or set with setf the parameter value for the MISER Monte Carlo integration method." ;; (miser-parameter ws min-calls) ( setf ( miser - parameter ws min - calls ) 300 ) `(cffi:foreign-slot-value ,workspace 'miser-state ',parameter)) (defmfun monte-carlo-integrate-miser (function lower-limits upper-limits &optional (number-of-samples (* *monte-carlo-default-samples-per-dimension* (dim0 lower-limits))) (generator (make-random-number-generator +mt19937+ 0)) (state (make-monte-carlo-miser (dim0 lower-limits))) (scalars t)) "gsl_monte_miser_integrate" ((callback :pointer) ((c-pointer lower-limits) :pointer) ((c-pointer upper-limits) :pointer) ((dim0 lower-limits) sizet) (number-of-samples sizet) ((mpointer generator) :pointer) ((mpointer state) :pointer) (result (:pointer :double)) (abserr (:pointer :double))) :inputs (lower-limits upper-limits) :callbacks (callback fnstruct-dimension (dimension) (function :double (:input :double :cvector dim0) :slug)) :callback-dynamic (((dim0 lower-limits)) (function scalars)) "Uses the miser Monte Carlo algorithm to integrate the function f over the hypercubic region defined by the lower and upper limits in the arrays 'lower-limits and 'upper-limits, each a gsl-vector of the samelength The integration uses a fixed number of function calls number-of-samples, and obtains random sampling points using the random number generator 'generator. A previously allocated workspace 'state must be supplied. The result of the integration is returned with an estimated absolute error.") ;;;;**************************************************************************** VEGAS ;;;;**************************************************************************** The vegas algorithm of Lepage is based on importance sampling . It ;;; samples points from the probability distribution described by the function |f| , so that the points are concentrated in the regions ;;; that make the largest contribution to the integral. (defmobject monte-carlo-vegas "gsl_monte_vegas" ((dim sizet)) "vegas Monte Carlo integration" "Make and initialize a workspace for Monte Carlo integration in dimension dim. The workspace is used to maintain the state of the integration. Returns a pointer to vegas-state." :initialize-suffix "init" :initialize-args nil) (export 'vegas-parameter) (defmacro vegas-parameter (workspace parameter) "Get or set with setf the parameter value for the VEGAS Monte Carlo integration method." ;; (vegas-parameter ws bins-max) ( setf ( vegas - parameter ws bins - max ) 300 ) `(cffi:foreign-slot-value ,workspace 'vegas-state ',parameter)) (defmfun monte-carlo-integrate-vegas (function lower-limits upper-limits &optional (number-of-samples (* *monte-carlo-default-samples-per-dimension* (dim0 lower-limits))) (generator (make-random-number-generator +mt19937+ 0)) (state (make-monte-carlo-vegas (dim0 lower-limits))) (scalars t)) "gsl_monte_vegas_integrate" ((callback :pointer) ((c-pointer lower-limits) :pointer) ((c-pointer upper-limits) :pointer) ((dim0 lower-limits) sizet) (number-of-samples sizet) ((mpointer generator) :pointer) ((mpointer state) :pointer) (result (:pointer :double)) (abserr (:pointer :double))) :inputs (lower-limits upper-limits) :callbacks (callback fnstruct-dimension (dimension) (function :double (:input :double :cvector dim0) :slug)) :callback-dynamic (((dim0 lower-limits)) (function scalars)) "Uses the vegas Monte Carlo algorithm to integrate the function f over the dim-dimensional hypercubic region defined by the lower and upper limits in the arrays x1 and xu, each of the same length. The integration uses a fixed number of function calls number-of-samples, and obtains random sampling points using the random number generator r. A previously allocated workspace s must be supplied. The result of the integration is returned with an estimated absolute error. The result and its error estimate are based on a weighted average of independent samples. The chi-squared per degree of freedom for the weighted average is returned via the state struct component, s->chisq, and must be consistent with 1 for the weighted average to be reliable.") ;;;;**************************************************************************** ;;;; Examples and unit test ;;;;**************************************************************************** Example from Sec . 23.5 (defun mcrw (x y z) "Example function for Monte Carlo used in random walk studies." (* (/ (expt pi 3)) (/ (- 1 (* (cos x) (cos y) (cos z)))))) (defparameter *mc-lower* #m(0.0d0 0.0d0 0.0d0)) (defparameter *mc-upper* (make-marray 'double-float :initial-contents (list pi pi pi))) (defun random-walk-plain-example (&optional (nsamples 500000)) (monte-carlo-integrate-plain 'mcrw *mc-lower* *mc-upper* nsamples)) (defun random-walk-miser-example (&optional (nsamples 500000)) (monte-carlo-integrate-miser 'mcrw *mc-lower* *mc-upper* nsamples)) (defun random-walk-vegas-example (&optional (nsamples 500000)) (monte-carlo-integrate-vegas 'mcrw *mc-lower* *mc-upper* nsamples)) (save-test monte-carlo (random-walk-plain-example) (random-walk-miser-example) (random-walk-vegas-example))
null
https://raw.githubusercontent.com/malcolmreynolds/GSLL/2f722f12f1d08e1b9550a46e2a22adba8e1e52c4/calculus/monte-carlo.lisp
lisp
/usr/include/gsl/gsl_monte.h /usr/include/gsl/gsl_monte_plain.h **************************************************************************** **************************************************************************** **************************************************************************** MISER **************************************************************************** stratified sampling. This technique aims to reduce the overall integration error by concentrating integration points in the regions of highest variance. (miser-parameter ws min-calls) **************************************************************************** **************************************************************************** samples points from the probability distribution described by the that make the largest contribution to the integral. (vegas-parameter ws bins-max) **************************************************************************** Examples and unit test ****************************************************************************
Monte Carlo Integration Sat Feb 3 2007 - 17:42 Time - stamp : < 2009 - 06 - 06 09:27:29EDT monte-carlo.lisp > $ Id$ (in-package :gsl) /usr / include / gsl / gsl_monte_miser.h /usr / include / gsl / gsl_monte_vegas.h PLAIN (defmobject monte-carlo-plain "gsl_monte_plain" ((dim sizet)) "plain Monte Carlo integration" "Make and initialize a workspace for Monte Carlo integration in dimension dim." :initialize-suffix "init" :initialize-args nil) (defparameter *monte-carlo-default-samples-per-dimension* 150000) (defmfun monte-carlo-integrate-plain (function lower-limits upper-limits &optional (number-of-samples (* *monte-carlo-default-samples-per-dimension* (dim0 lower-limits))) (generator (make-random-number-generator +mt19937+ 0)) (state (make-monte-carlo-plain (dim0 lower-limits))) (scalars t)) "gsl_monte_plain_integrate" ((callback :pointer) ((c-pointer lower-limits) :pointer) ((c-pointer upper-limits) :pointer) ((dim0 lower-limits) sizet) (number-of-samples sizet) ((mpointer generator) :pointer) ((mpointer state) :pointer) (result (:pointer :double)) (abserr (:pointer :double))) :inputs (lower-limits upper-limits) :callbacks (callback fnstruct-dimension (dimension) (function :double (:input :double :cvector dim0) :slug)) :callback-dynamic (((dim0 lower-limits)) (function scalars)) "Uses the plain Monte Carlo algorithm to integrate the function f over the hypercubic region defined by the lower and upper limits in the arrays 'lower-limits and 'upper-limits, each a gsl-vector of length dim. The integration uses a fixed number of function calls number-of-samples, and obtains random sampling points using the random number generator 'generator. A previously allocated workspace 'state must be supplied. The result of the integration is returned with an estimated absolute error.") The MISER algorithm of Press and Farrar is based on recursive (defmobject monte-carlo-miser "gsl_monte_miser" ((dim sizet)) "miser Monte Carlo integration" "Make and initialize a workspace for Monte Carlo integration in dimension dim. The workspace is used to maintain the state of the integration." :initialize-suffix "init" :initialize-args nil) (export 'miser-parameter) (defmacro miser-parameter (workspace parameter) "Get or set with setf the parameter value for the MISER Monte Carlo integration method." ( setf ( miser - parameter ws min - calls ) 300 ) `(cffi:foreign-slot-value ,workspace 'miser-state ',parameter)) (defmfun monte-carlo-integrate-miser (function lower-limits upper-limits &optional (number-of-samples (* *monte-carlo-default-samples-per-dimension* (dim0 lower-limits))) (generator (make-random-number-generator +mt19937+ 0)) (state (make-monte-carlo-miser (dim0 lower-limits))) (scalars t)) "gsl_monte_miser_integrate" ((callback :pointer) ((c-pointer lower-limits) :pointer) ((c-pointer upper-limits) :pointer) ((dim0 lower-limits) sizet) (number-of-samples sizet) ((mpointer generator) :pointer) ((mpointer state) :pointer) (result (:pointer :double)) (abserr (:pointer :double))) :inputs (lower-limits upper-limits) :callbacks (callback fnstruct-dimension (dimension) (function :double (:input :double :cvector dim0) :slug)) :callback-dynamic (((dim0 lower-limits)) (function scalars)) "Uses the miser Monte Carlo algorithm to integrate the function f over the hypercubic region defined by the lower and upper limits in the arrays 'lower-limits and 'upper-limits, each a gsl-vector of the samelength The integration uses a fixed number of function calls number-of-samples, and obtains random sampling points using the random number generator 'generator. A previously allocated workspace 'state must be supplied. The result of the integration is returned with an estimated absolute error.") VEGAS The vegas algorithm of Lepage is based on importance sampling . It function |f| , so that the points are concentrated in the regions (defmobject monte-carlo-vegas "gsl_monte_vegas" ((dim sizet)) "vegas Monte Carlo integration" "Make and initialize a workspace for Monte Carlo integration in dimension dim. The workspace is used to maintain the state of the integration. Returns a pointer to vegas-state." :initialize-suffix "init" :initialize-args nil) (export 'vegas-parameter) (defmacro vegas-parameter (workspace parameter) "Get or set with setf the parameter value for the VEGAS Monte Carlo integration method." ( setf ( vegas - parameter ws bins - max ) 300 ) `(cffi:foreign-slot-value ,workspace 'vegas-state ',parameter)) (defmfun monte-carlo-integrate-vegas (function lower-limits upper-limits &optional (number-of-samples (* *monte-carlo-default-samples-per-dimension* (dim0 lower-limits))) (generator (make-random-number-generator +mt19937+ 0)) (state (make-monte-carlo-vegas (dim0 lower-limits))) (scalars t)) "gsl_monte_vegas_integrate" ((callback :pointer) ((c-pointer lower-limits) :pointer) ((c-pointer upper-limits) :pointer) ((dim0 lower-limits) sizet) (number-of-samples sizet) ((mpointer generator) :pointer) ((mpointer state) :pointer) (result (:pointer :double)) (abserr (:pointer :double))) :inputs (lower-limits upper-limits) :callbacks (callback fnstruct-dimension (dimension) (function :double (:input :double :cvector dim0) :slug)) :callback-dynamic (((dim0 lower-limits)) (function scalars)) "Uses the vegas Monte Carlo algorithm to integrate the function f over the dim-dimensional hypercubic region defined by the lower and upper limits in the arrays x1 and xu, each of the same length. The integration uses a fixed number of function calls number-of-samples, and obtains random sampling points using the random number generator r. A previously allocated workspace s must be supplied. The result of the integration is returned with an estimated absolute error. The result and its error estimate are based on a weighted average of independent samples. The chi-squared per degree of freedom for the weighted average is returned via the state struct component, s->chisq, and must be consistent with 1 for the weighted average to be reliable.") Example from Sec . 23.5 (defun mcrw (x y z) "Example function for Monte Carlo used in random walk studies." (* (/ (expt pi 3)) (/ (- 1 (* (cos x) (cos y) (cos z)))))) (defparameter *mc-lower* #m(0.0d0 0.0d0 0.0d0)) (defparameter *mc-upper* (make-marray 'double-float :initial-contents (list pi pi pi))) (defun random-walk-plain-example (&optional (nsamples 500000)) (monte-carlo-integrate-plain 'mcrw *mc-lower* *mc-upper* nsamples)) (defun random-walk-miser-example (&optional (nsamples 500000)) (monte-carlo-integrate-miser 'mcrw *mc-lower* *mc-upper* nsamples)) (defun random-walk-vegas-example (&optional (nsamples 500000)) (monte-carlo-integrate-vegas 'mcrw *mc-lower* *mc-upper* nsamples)) (save-test monte-carlo (random-walk-plain-example) (random-walk-miser-example) (random-walk-vegas-example))
42a66ff578f029798334db2cf2266bda77521a1993531c01221a3115e84b4082
tisnik/clojure-examples
project.clj
; ( C ) Copyright 2018 , 2020 , 2021 ; ; All rights reserved. This program and the accompanying materials ; are made available under the terms of the Eclipse Public License v1.0 ; which accompanies this distribution, and is available at -v10.html ; ; Contributors: ; (defproject cucumber+expect9 "0.1.0-SNAPSHOT" :description "FIXME: write description" :url "" :license {:name "Eclipse Public License" :url "-v10.html"} :dependencies [[org.clojure/clojure "1.10.1"] [org.clojure/data.json "0.2.5"] [expectations "2.0.9"]] :plugins [[lein-codox "0.10.7"] [test2junit "1.1.0"] [ lein - test - out " 0.3.1 " ] [lein-cloverage "1.0.7-SNAPSHOT"] [lein-kibit "0.1.8"] [lein-clean-m2 "0.1.2"] [lein-project-edn "0.3.0"] [lein-marginalia "0.9.1"] [com.siili/lein-cucumber "1.0.7"] [lein-expectations "0.0.8"]] :cucumber-feature-paths ["features/"] :target-path "target/%s" :profiles {:uberjar {:aot :all} :dev {:dependencies [[com.siili/lein-cucumber "1.0.7"]]}})
null
https://raw.githubusercontent.com/tisnik/clojure-examples/78061b533c0755d0165002961334bbe98d994087/cucumber%2Bexpect9/project.clj
clojure
All rights reserved. This program and the accompanying materials are made available under the terms of the Eclipse Public License v1.0 which accompanies this distribution, and is available at Contributors:
( C ) Copyright 2018 , 2020 , 2021 -v10.html (defproject cucumber+expect9 "0.1.0-SNAPSHOT" :description "FIXME: write description" :url "" :license {:name "Eclipse Public License" :url "-v10.html"} :dependencies [[org.clojure/clojure "1.10.1"] [org.clojure/data.json "0.2.5"] [expectations "2.0.9"]] :plugins [[lein-codox "0.10.7"] [test2junit "1.1.0"] [ lein - test - out " 0.3.1 " ] [lein-cloverage "1.0.7-SNAPSHOT"] [lein-kibit "0.1.8"] [lein-clean-m2 "0.1.2"] [lein-project-edn "0.3.0"] [lein-marginalia "0.9.1"] [com.siili/lein-cucumber "1.0.7"] [lein-expectations "0.0.8"]] :cucumber-feature-paths ["features/"] :target-path "target/%s" :profiles {:uberjar {:aot :all} :dev {:dependencies [[com.siili/lein-cucumber "1.0.7"]]}})
c46ba4185581120da1d5fbfa7ec23c4771db63f753492fd20882b742322988f3
dcastro/twenty48
Bench.hs
{-# LANGUAGE OverloadedStrings #-} module Bench where import ClassyPrelude import Criterion.Main import qualified Data.Strict.Maybe as M import Game.AlphaBeta import Game.Optimized.Board import Game.Types groups :: [Benchmark] groups = [ bgroup ("alpha-beta: find best move") $ flip map [7 .. 7] $ \depth -> bench ("board4 depth: " <> show depth) $ nf (findBestMove depth) sampleBoard4 ] findBestMove :: Int -> Board -> Int findBestMove depth b = M.fromJust . fmap (fromEnum . unPlayer) $ alphaBeta b depth
null
https://raw.githubusercontent.com/dcastro/twenty48/d4a58bfa389bb247525fd18f80ef428d84ef5fe9/benchmark/Bench.hs
haskell
# LANGUAGE OverloadedStrings #
module Bench where import ClassyPrelude import Criterion.Main import qualified Data.Strict.Maybe as M import Game.AlphaBeta import Game.Optimized.Board import Game.Types groups :: [Benchmark] groups = [ bgroup ("alpha-beta: find best move") $ flip map [7 .. 7] $ \depth -> bench ("board4 depth: " <> show depth) $ nf (findBestMove depth) sampleBoard4 ] findBestMove :: Int -> Board -> Int findBestMove depth b = M.fromJust . fmap (fromEnum . unPlayer) $ alphaBeta b depth
437846b14fd5192d0ebc1b46f49c64f52223f8587c9c4bd2dfae97ee9fc3b265
yav/hobbit
Utils.hs
module Parsing.Utils where import AST import Parsing.Lexer import Parsing.Range import Error import Utils import MonadLib import List type ParseM = ExceptionT String Id run txt p = runId $ runExceptionT $ p $ layout [] $ lexer txt parseError :: String -> ParseM a parseError = raise happyError :: [Lexeme] -> ParseM a happyError ls = raise ("Parse error at " ++ show ( case ls of [] -> end l:_ -> from (lexPos l))) -- Modules --------------------------------------------------------------------- data ParseTop = TopBind ParseBind | TopType DataDecl data ParseBind = ParsePrim PrimDecl | ParseArea AreaDecl | ParseSig Name Schema | ParseBind ImpBind topDecls :: [ParseTop] -> ParseM ([DataDecl], BindBlock) topDecls ps = do b <- bindBlock [ b | TopBind b <- ps ] return ([ b | TopType b <- ps ], b) -- Note: Expects that the argument is reversed. -- This matters when we join the equations that belong to a definition. bindBlock :: [ParseBind] -> ParseM BindBlock bindBlock bs = case dupSigs of [] -> foldM addSig block1 sigs xs -> parseError $ unlines ( "Duplicate signatures" : map show xs ) where act (b,m) (ParsePrim p) = (b { prims = p : prims b }, m) act (b,m) (ParseArea p) = (b { areas = p : areas b }, m) act (b,m) (ParseBind i) = (consBind i b, m) act (b,m) (ParseSig x s) = (b, (x,s) : m) (block1, sigs) = foldl act (emptyBindBlock,[]) bs dupSigs = filter multiple $ groupBy ((==) `on` fst) $ sortBy (compare `on` fst) sigs multiple [_] = False multiple _ = True consBind i@(ImpBind x m) b = case impBinds b of ImpBind y m' : js | x == y -> b { impBinds = ImpBind x (MOr m m') : js } js -> b { impBinds = i : js } addSig b (x,s) = case remove ((x ==) . biName) (impBinds b) of Just (i,is) -> return b { expBinds = ExpBind i s : expBinds b, impBinds = is } Nothing -> parseError ("Signature " ++ show x ++ " has no definition.") qualName f (x,y) = Qual x (f y) notQual (Q n) = case n of Qual _ _ -> parseError ("Qualified name: " ++ show n) n -> return n impSpec (Ent x s) = (`Ent` s) # notQual x -- Expressions ----------------------------------------------------------------- eCase e@(Var _) m = Match (m e) eCase e m = Match (MGrd (QLet b) (m (Var x))) where x = VarName "$case" b = emptyBindBlock { impBinds = [ImpBind x (MIs e)] } eIf e1 e2 e3 = Match (MGrd (QGrd e1) (MIs e2) `MOr` MIs e3) eRecord x fs | isCon x = Con x fs | otherwise = Upd (Var x) fs isCon (Qual _ x) = isCon x isCon (ConName _) = True isCon _ = False intLit n = Lit (Int n) binLit (BinTok n w) = intLit n `Sig` TApp (TCon (ConName "Bit")) (tNat (fromIntegral w)) binLit x = "binLit" `unexpected` show x charLit x = binLit (BinTok (fromIntegral (fromEnum x)) 8) strLit xs = foldr eCons eNil (map charLit xs) where eNil = Var (ConName "Nil") eCons x xs = apps (Var (ConName "Cons")) [x,xs] -- Patterns -------------------------------------------------------------------- pBin (BinTok n w) = pInt n `PSig` tBit (tNat $ fromIntegral w) pBin x = "pBin" `unexpected` show x pInt n = PAbs (PVar it) $ QGrd (apps eEquals [Var it, intLit n]) where it = VarName "it" -- is it safe to reuse the name? -- we could make it unique by using the pos. pCon c ps = PApp (BPCon c) [] [] ps pSplit p1 p2 = PApp BPSplit [] [] [p1,p2] pIxUpd op e1 e2 p = PApp (BPUpd op e1 e2) [] [] [p] pDec x k = PApp ( BPUpd Inc ( Lit ( Int k ) ) ( Var ( VarName " minVal " ) ) ) [ ] [ ] [ PVar x ] pInc x k = PApp ( BPUpd Inc ( Lit ( Int k ) ) ( Var ( VarName " minVal " ) ) ) [ ] [ ] [ PVar x ] pDecBd x k e = case e of Infix ( Var x ' ) ( VarName " > = " ) e | x = = x ' - > return ( PDec x k e ) Infix e ( VarName " = < " ) ( Var x ' ) | x = = x ' - > return ( PDec x k e ) _ - > parseError " Invalid decrement pattern " pIncBd = case e of Infix ( Var x ' ) ( VarName " < = " ) e | x = = x ' - > return ( PInc x k e ) Infix e ( VarName " > = " ) ( Var x ' ) | x = = x ' - > return ( PInc x k e ) _ - > parseError " Invalid increment pattern " pDec x k = PApp (BPUpd Inc (Lit (Int k)) (Var (VarName "minVal"))) [] [] [PVar x] pInc x k = PApp (BPUpd Inc (Lit (Int k)) (Var (VarName "minVal"))) [] [] [PVar x] pDecBd x k e = case e of Infix (Var x') (VarName ">=") e | x == x' -> return (PDec x k e) Infix e (VarName "=<") (Var x') | x == x' -> return (PDec x k e) _ -> parseError "Invalid decrement pattern" pIncBd x k e = case e of Infix (Var x') (VarName "<=") e | x == x' -> return (PInc x k e) Infix e (VarName ">=") (Var x') | x == x' -> return (PInc x k e) _ -> parseError "Invalid increment pattern" -} pFields :: Pat -> [FieldP] -> Pat pFields p fs = foldl pUpd p fs pUpd p1 (FieldP l _ p2) = PAbs (PVar it) (QPat p1 (Var it) `QThen` QPat p2 (apps (Var (Select l)) [Var it])) where it = VarName "it" -- is it safe to reuse the name? -- we could make it unique by using the pos. pAs x p = PAbs (PVar x) (QPat p (Var x)) pChar x = pBin (BinTok (fromIntegral (fromEnum x)) 8) -- Types ----------------------------------------------------------------------- prelType :: String -> Type prelType t = TCon (qPrel (ConName t)) tExp2 (TCon (TNat 2)) t1 t2 = return (prelType "Exp2" `TApp` t1 `TApp` t2) tExp2 _ _ _ = parseError "Invalid exponent predicate" tyVar :: Name -> Type tyVar x = TFree (TyUser { tyVarName = x }) typeToCon :: Type -> ParseM DataCon typeToCon (TInfix s c t) = return (DataCon c [s,t]) typeToCon t = case splitTApp t [] of (TCon c, ts) -> return (DataCon c ts) _ -> parseError ("Invalid constructr: " ++ show t) typeToSchema :: Type -> Type -> ParseM Schema typeToSchema ps t = do ps <- preds return (Forall [] ps t) where preds = case splitTApp ps [] of (TCon (Tup _), ps) -> forEach ps typeToPred' _ -> return # typeToPred' ps schemaToRule :: Schema -> ParseM (Poly Pred) schemaToRule s = do c <- typeToPred' (poly s) return (s { poly = c }) typeToPred' :: Type -> ParseM Pred typeToPred' (TParens t) = typeToPred' t typeToPred' t = case splitTApp t [] of (TCon c, ts) -> pred c ts (TInfix t1 c t2, ts) -> pred c (t1:t2:ts) _ -> parseError ("Invalid predicate " ++ show t) where pred name args = return (CIn name args)
null
https://raw.githubusercontent.com/yav/hobbit/31414ba1188f4b39620c2553b45b9e4d4aa40169/src/Parsing/Utils.hs
haskell
Modules --------------------------------------------------------------------- Note: Expects that the argument is reversed. This matters when we join the equations that belong to a definition. Expressions ----------------------------------------------------------------- Patterns -------------------------------------------------------------------- is it safe to reuse the name? we could make it unique by using the pos. is it safe to reuse the name? we could make it unique by using the pos. Types -----------------------------------------------------------------------
module Parsing.Utils where import AST import Parsing.Lexer import Parsing.Range import Error import Utils import MonadLib import List type ParseM = ExceptionT String Id run txt p = runId $ runExceptionT $ p $ layout [] $ lexer txt parseError :: String -> ParseM a parseError = raise happyError :: [Lexeme] -> ParseM a happyError ls = raise ("Parse error at " ++ show ( case ls of [] -> end l:_ -> from (lexPos l))) data ParseTop = TopBind ParseBind | TopType DataDecl data ParseBind = ParsePrim PrimDecl | ParseArea AreaDecl | ParseSig Name Schema | ParseBind ImpBind topDecls :: [ParseTop] -> ParseM ([DataDecl], BindBlock) topDecls ps = do b <- bindBlock [ b | TopBind b <- ps ] return ([ b | TopType b <- ps ], b) bindBlock :: [ParseBind] -> ParseM BindBlock bindBlock bs = case dupSigs of [] -> foldM addSig block1 sigs xs -> parseError $ unlines ( "Duplicate signatures" : map show xs ) where act (b,m) (ParsePrim p) = (b { prims = p : prims b }, m) act (b,m) (ParseArea p) = (b { areas = p : areas b }, m) act (b,m) (ParseBind i) = (consBind i b, m) act (b,m) (ParseSig x s) = (b, (x,s) : m) (block1, sigs) = foldl act (emptyBindBlock,[]) bs dupSigs = filter multiple $ groupBy ((==) `on` fst) $ sortBy (compare `on` fst) sigs multiple [_] = False multiple _ = True consBind i@(ImpBind x m) b = case impBinds b of ImpBind y m' : js | x == y -> b { impBinds = ImpBind x (MOr m m') : js } js -> b { impBinds = i : js } addSig b (x,s) = case remove ((x ==) . biName) (impBinds b) of Just (i,is) -> return b { expBinds = ExpBind i s : expBinds b, impBinds = is } Nothing -> parseError ("Signature " ++ show x ++ " has no definition.") qualName f (x,y) = Qual x (f y) notQual (Q n) = case n of Qual _ _ -> parseError ("Qualified name: " ++ show n) n -> return n impSpec (Ent x s) = (`Ent` s) # notQual x eCase e@(Var _) m = Match (m e) eCase e m = Match (MGrd (QLet b) (m (Var x))) where x = VarName "$case" b = emptyBindBlock { impBinds = [ImpBind x (MIs e)] } eIf e1 e2 e3 = Match (MGrd (QGrd e1) (MIs e2) `MOr` MIs e3) eRecord x fs | isCon x = Con x fs | otherwise = Upd (Var x) fs isCon (Qual _ x) = isCon x isCon (ConName _) = True isCon _ = False intLit n = Lit (Int n) binLit (BinTok n w) = intLit n `Sig` TApp (TCon (ConName "Bit")) (tNat (fromIntegral w)) binLit x = "binLit" `unexpected` show x charLit x = binLit (BinTok (fromIntegral (fromEnum x)) 8) strLit xs = foldr eCons eNil (map charLit xs) where eNil = Var (ConName "Nil") eCons x xs = apps (Var (ConName "Cons")) [x,xs] pBin (BinTok n w) = pInt n `PSig` tBit (tNat $ fromIntegral w) pBin x = "pBin" `unexpected` show x pInt n = PAbs (PVar it) $ QGrd (apps eEquals [Var it, intLit n]) pCon c ps = PApp (BPCon c) [] [] ps pSplit p1 p2 = PApp BPSplit [] [] [p1,p2] pIxUpd op e1 e2 p = PApp (BPUpd op e1 e2) [] [] [p] pDec x k = PApp ( BPUpd Inc ( Lit ( Int k ) ) ( Var ( VarName " minVal " ) ) ) [ ] [ ] [ PVar x ] pInc x k = PApp ( BPUpd Inc ( Lit ( Int k ) ) ( Var ( VarName " minVal " ) ) ) [ ] [ ] [ PVar x ] pDecBd x k e = case e of Infix ( Var x ' ) ( VarName " > = " ) e | x = = x ' - > return ( PDec x k e ) Infix e ( VarName " = < " ) ( Var x ' ) | x = = x ' - > return ( PDec x k e ) _ - > parseError " Invalid decrement pattern " pIncBd = case e of Infix ( Var x ' ) ( VarName " < = " ) e | x = = x ' - > return ( PInc x k e ) Infix e ( VarName " > = " ) ( Var x ' ) | x = = x ' - > return ( PInc x k e ) _ - > parseError " Invalid increment pattern " pDec x k = PApp (BPUpd Inc (Lit (Int k)) (Var (VarName "minVal"))) [] [] [PVar x] pInc x k = PApp (BPUpd Inc (Lit (Int k)) (Var (VarName "minVal"))) [] [] [PVar x] pDecBd x k e = case e of Infix (Var x') (VarName ">=") e | x == x' -> return (PDec x k e) Infix e (VarName "=<") (Var x') | x == x' -> return (PDec x k e) _ -> parseError "Invalid decrement pattern" pIncBd x k e = case e of Infix (Var x') (VarName "<=") e | x == x' -> return (PInc x k e) Infix e (VarName ">=") (Var x') | x == x' -> return (PInc x k e) _ -> parseError "Invalid increment pattern" -} pFields :: Pat -> [FieldP] -> Pat pFields p fs = foldl pUpd p fs pUpd p1 (FieldP l _ p2) = PAbs (PVar it) (QPat p1 (Var it) `QThen` QPat p2 (apps (Var (Select l)) [Var it])) pAs x p = PAbs (PVar x) (QPat p (Var x)) pChar x = pBin (BinTok (fromIntegral (fromEnum x)) 8) prelType :: String -> Type prelType t = TCon (qPrel (ConName t)) tExp2 (TCon (TNat 2)) t1 t2 = return (prelType "Exp2" `TApp` t1 `TApp` t2) tExp2 _ _ _ = parseError "Invalid exponent predicate" tyVar :: Name -> Type tyVar x = TFree (TyUser { tyVarName = x }) typeToCon :: Type -> ParseM DataCon typeToCon (TInfix s c t) = return (DataCon c [s,t]) typeToCon t = case splitTApp t [] of (TCon c, ts) -> return (DataCon c ts) _ -> parseError ("Invalid constructr: " ++ show t) typeToSchema :: Type -> Type -> ParseM Schema typeToSchema ps t = do ps <- preds return (Forall [] ps t) where preds = case splitTApp ps [] of (TCon (Tup _), ps) -> forEach ps typeToPred' _ -> return # typeToPred' ps schemaToRule :: Schema -> ParseM (Poly Pred) schemaToRule s = do c <- typeToPred' (poly s) return (s { poly = c }) typeToPred' :: Type -> ParseM Pred typeToPred' (TParens t) = typeToPred' t typeToPred' t = case splitTApp t [] of (TCon c, ts) -> pred c ts (TInfix t1 c t2, ts) -> pred c (t1:t2:ts) _ -> parseError ("Invalid predicate " ++ show t) where pred name args = return (CIn name args)
b707a78fde2f320e70bef4667ef41644d14e88aafb641c90cde7828d3fa23e55
TyOverby/mono
client.ml
open Base open Async_kernel open Async_unix module Request = struct include Cohttp.Request include (Make(Io) : module type of Make(Io) with type t := t) end module Response = struct include Cohttp.Response include (Make(Io) : module type of Make(Io) with type t := t) end module Net = struct let lookup uri = let host = Uri.host_with_default ~default:"localhost" uri in match Uri_services.tcp_port_of_uri ~default:"http" uri with | None -> Deferred.Or_error.error_string "Net.lookup: failed to get TCP port form Uri" | Some port -> let open Unix in Addr_info.get ~host [ Addr_info.AI_FAMILY PF_INET ; Addr_info.AI_SOCKTYPE SOCK_STREAM] >>| function | { Addr_info.ai_addr=ADDR_INET (addr,_); _ }::_ -> Or_error.return (host, Ipaddr_unix.of_inet_addr addr, port) | _ -> Or_error.error "Failed to resolve Uri" uri Uri_sexp.sexp_of_t let connect_uri ?interrupt ?ssl_config uri = (match Uri.scheme uri with | Some "httpunix" -> let host = Uri.host_with_default ~default:"localhost" uri in return @@ `Unix_domain_socket host | _ -> lookup uri |> Deferred.Or_error.ok_exn >>= fun (host, addr, port) -> return @@ match (Uri.scheme uri, ssl_config) with | Some "https", Some config -> `OpenSSL (addr, port, config) | Some "https", None -> let config = Conduit_async.V2.Ssl.Config.create ~hostname:host () in `OpenSSL (addr, port, config) | _ -> `TCP (addr, port)) >>= fun mode -> Conduit_async.V2.connect ?interrupt mode end let read_response ic = Response.read ic >>| function | `Eof -> failwith "Connection closed by remote host" | `Invalid reason -> failwith reason | `Ok res -> begin match Response.has_body res with | `Yes | `Unknown -> (* Build a response pipe for the body *) let reader = Response.make_body_reader res ic in let pipe = Body_raw.pipe_of_body Response.read_body_chunk reader in (res, pipe) | `No -> let pipe = Pipe.of_list [] in (res, pipe) end let request ?interrupt ?ssl_config ?uri ?(body=`Empty) req = Connect to the remote side let uri = match uri with | Some t -> t | None -> Request.uri req in Net.connect_uri ?interrupt ?ssl_config uri >>= fun (ic, oc) -> try_with (fun () -> Request.write (fun writer -> Body_raw.write_body Request.write_body body writer) req oc >>= fun () -> read_response ic >>| fun (resp, body) -> don't_wait_for ( Pipe.closed body >>= fun () -> Deferred.all_unit [Reader.close ic; Writer.close oc]); (resp, `Pipe body)) >>= begin function | Ok res -> return res | Error e -> don't_wait_for (Reader.close ic); don't_wait_for (Writer.close oc); raise e end let callv ?interrupt ?ssl_config uri reqs = let reqs_c = ref 0 in let resp_c = ref 0 in Net.connect_uri ?interrupt ?ssl_config uri >>= fun (ic, oc) -> try_with (fun () -> reqs |> Pipe.iter ~f:(fun (req, body) -> Int.incr reqs_c; Request.write (fun w -> Body_raw.write_body Request.write_body body w) req oc) |> don't_wait_for; let last_body_drained = ref Deferred.unit in let responses = Reader.read_all ic (fun ic -> !last_body_drained >>= fun () -> if Pipe.is_closed reqs && (!resp_c >= !reqs_c) then return `Eof else ic |> read_response >>| fun (resp, body) -> Int.incr resp_c; last_body_drained := Pipe.closed body; `Ok (resp, `Pipe body) ) in don't_wait_for ( Pipe.closed reqs >>= fun () -> Pipe.closed responses >>= fun () -> Writer.close oc ); return responses) >>= begin function | Ok x -> return x | Error e -> don't_wait_for (Reader.close ic); don't_wait_for (Writer.close oc); raise e end let call ?interrupt ?ssl_config ?headers ?(chunked=false) ?(body=`Empty) meth uri = (* Create a request, then make the request. Figure out an appropriate transfer encoding *) let req = match chunked with | false -> Body_raw.disable_chunked_encoding body >>| fun (_body, body_length) -> Request.make_for_client ?headers ~chunked ~body_length meth uri | true -> begin Body.is_empty body >>| function | true -> (* Don't used chunked encoding with an empty body *) Request.make_for_client ?headers ~chunked:false ~body_length:0L meth uri | false -> (* Use chunked encoding if there is a body *) Request.make_for_client ?headers ~chunked:true meth uri end in req >>= request ?interrupt ?ssl_config ~body ~uri let get ?interrupt ?ssl_config ?headers uri = call ?interrupt ?ssl_config ?headers ~chunked:false `GET uri let head ?interrupt ?ssl_config ?headers uri = call ?interrupt ?ssl_config ?headers ~chunked:false `HEAD uri >>| fun (res, body) -> (match body with | `Pipe p -> Pipe.close_read p; | _ -> ()); res let post ?interrupt ?ssl_config ?headers ?(chunked=false) ?body uri = call ?interrupt ?ssl_config ?headers ~chunked ?body `POST uri let post_form ?interrupt ?ssl_config ?headers ~params uri = let headers = Cohttp.Header.add_opt_unless_exists headers "content-type" "application/x-www-form-urlencoded" in let body = Body.of_string (Uri.encoded_of_query params) in post ?interrupt ?ssl_config ~headers ~chunked:false ~body uri let put ?interrupt ?ssl_config ?headers ?(chunked=false) ?body uri = call ?interrupt ?ssl_config ?headers ~chunked ?body `PUT uri let patch ?interrupt ?ssl_config ?headers ?(chunked=false) ?body uri = call ?interrupt ?ssl_config ?headers ~chunked ?body `PATCH uri let delete ?interrupt ?ssl_config ?headers ?(chunked=false) ?body uri = call ?interrupt ?ssl_config ?headers ~chunked ?body `DELETE uri
null
https://raw.githubusercontent.com/TyOverby/mono/8d6b3484d5db63f2f5472c7367986ea30290764d/vendor/mirage-ocaml-cohttp/cohttp-async/src/client.ml
ocaml
Build a response pipe for the body Create a request, then make the request. Figure out an appropriate transfer encoding Don't used chunked encoding with an empty body Use chunked encoding if there is a body
open Base open Async_kernel open Async_unix module Request = struct include Cohttp.Request include (Make(Io) : module type of Make(Io) with type t := t) end module Response = struct include Cohttp.Response include (Make(Io) : module type of Make(Io) with type t := t) end module Net = struct let lookup uri = let host = Uri.host_with_default ~default:"localhost" uri in match Uri_services.tcp_port_of_uri ~default:"http" uri with | None -> Deferred.Or_error.error_string "Net.lookup: failed to get TCP port form Uri" | Some port -> let open Unix in Addr_info.get ~host [ Addr_info.AI_FAMILY PF_INET ; Addr_info.AI_SOCKTYPE SOCK_STREAM] >>| function | { Addr_info.ai_addr=ADDR_INET (addr,_); _ }::_ -> Or_error.return (host, Ipaddr_unix.of_inet_addr addr, port) | _ -> Or_error.error "Failed to resolve Uri" uri Uri_sexp.sexp_of_t let connect_uri ?interrupt ?ssl_config uri = (match Uri.scheme uri with | Some "httpunix" -> let host = Uri.host_with_default ~default:"localhost" uri in return @@ `Unix_domain_socket host | _ -> lookup uri |> Deferred.Or_error.ok_exn >>= fun (host, addr, port) -> return @@ match (Uri.scheme uri, ssl_config) with | Some "https", Some config -> `OpenSSL (addr, port, config) | Some "https", None -> let config = Conduit_async.V2.Ssl.Config.create ~hostname:host () in `OpenSSL (addr, port, config) | _ -> `TCP (addr, port)) >>= fun mode -> Conduit_async.V2.connect ?interrupt mode end let read_response ic = Response.read ic >>| function | `Eof -> failwith "Connection closed by remote host" | `Invalid reason -> failwith reason | `Ok res -> begin match Response.has_body res with | `Yes | `Unknown -> let reader = Response.make_body_reader res ic in let pipe = Body_raw.pipe_of_body Response.read_body_chunk reader in (res, pipe) | `No -> let pipe = Pipe.of_list [] in (res, pipe) end let request ?interrupt ?ssl_config ?uri ?(body=`Empty) req = Connect to the remote side let uri = match uri with | Some t -> t | None -> Request.uri req in Net.connect_uri ?interrupt ?ssl_config uri >>= fun (ic, oc) -> try_with (fun () -> Request.write (fun writer -> Body_raw.write_body Request.write_body body writer) req oc >>= fun () -> read_response ic >>| fun (resp, body) -> don't_wait_for ( Pipe.closed body >>= fun () -> Deferred.all_unit [Reader.close ic; Writer.close oc]); (resp, `Pipe body)) >>= begin function | Ok res -> return res | Error e -> don't_wait_for (Reader.close ic); don't_wait_for (Writer.close oc); raise e end let callv ?interrupt ?ssl_config uri reqs = let reqs_c = ref 0 in let resp_c = ref 0 in Net.connect_uri ?interrupt ?ssl_config uri >>= fun (ic, oc) -> try_with (fun () -> reqs |> Pipe.iter ~f:(fun (req, body) -> Int.incr reqs_c; Request.write (fun w -> Body_raw.write_body Request.write_body body w) req oc) |> don't_wait_for; let last_body_drained = ref Deferred.unit in let responses = Reader.read_all ic (fun ic -> !last_body_drained >>= fun () -> if Pipe.is_closed reqs && (!resp_c >= !reqs_c) then return `Eof else ic |> read_response >>| fun (resp, body) -> Int.incr resp_c; last_body_drained := Pipe.closed body; `Ok (resp, `Pipe body) ) in don't_wait_for ( Pipe.closed reqs >>= fun () -> Pipe.closed responses >>= fun () -> Writer.close oc ); return responses) >>= begin function | Ok x -> return x | Error e -> don't_wait_for (Reader.close ic); don't_wait_for (Writer.close oc); raise e end let call ?interrupt ?ssl_config ?headers ?(chunked=false) ?(body=`Empty) meth uri = let req = match chunked with | false -> Body_raw.disable_chunked_encoding body >>| fun (_body, body_length) -> Request.make_for_client ?headers ~chunked ~body_length meth uri | true -> begin Body.is_empty body >>| function Request.make_for_client ?headers ~chunked:false ~body_length:0L meth uri Request.make_for_client ?headers ~chunked:true meth uri end in req >>= request ?interrupt ?ssl_config ~body ~uri let get ?interrupt ?ssl_config ?headers uri = call ?interrupt ?ssl_config ?headers ~chunked:false `GET uri let head ?interrupt ?ssl_config ?headers uri = call ?interrupt ?ssl_config ?headers ~chunked:false `HEAD uri >>| fun (res, body) -> (match body with | `Pipe p -> Pipe.close_read p; | _ -> ()); res let post ?interrupt ?ssl_config ?headers ?(chunked=false) ?body uri = call ?interrupt ?ssl_config ?headers ~chunked ?body `POST uri let post_form ?interrupt ?ssl_config ?headers ~params uri = let headers = Cohttp.Header.add_opt_unless_exists headers "content-type" "application/x-www-form-urlencoded" in let body = Body.of_string (Uri.encoded_of_query params) in post ?interrupt ?ssl_config ~headers ~chunked:false ~body uri let put ?interrupt ?ssl_config ?headers ?(chunked=false) ?body uri = call ?interrupt ?ssl_config ?headers ~chunked ?body `PUT uri let patch ?interrupt ?ssl_config ?headers ?(chunked=false) ?body uri = call ?interrupt ?ssl_config ?headers ~chunked ?body `PATCH uri let delete ?interrupt ?ssl_config ?headers ?(chunked=false) ?body uri = call ?interrupt ?ssl_config ?headers ~chunked ?body `DELETE uri
b4b01501ca196795f3760ce71e9873fc816162c56df61fab1e1a1b1dcfa7080e
Viasat/halite
test_analysis.clj
Copyright ( c ) 2022 Viasat , Inc. Licensed under the MIT license (ns com.viasat.halite.test-analysis (:require [clojure.test :refer :all] [com.viasat.halite.analysis :as analysis] [com.viasat.halite.envs :as envs] [com.viasat.halite.var-types :as var-types] [schema.core :as s] [schema.test :refer [validate-schemas]])) (set! *warn-on-reflection* true) (clojure.test/use-fixtures :once validate-schemas) (deftest test-schema (s/check analysis/EnumConstraint {:enum #{100}}) (s/check analysis/Range {:min 100 :min-inclusive true}) (s/check analysis/Range {:max 100 :min-inclusive true}) (s/check analysis/Range {:max 100 :max-inclusive true}) (s/check analysis/Range {:min 1 :min-inclusive false :max 100 :max-inclusive true}) (s/check analysis/RangeConstraint {:ranges #{{:min 100 :min-inclusive true}}}) (s/check analysis/CollectionConstraint {:coll-size 5}) (s/check analysis/CollectionConstraint {:coll-elements {:enum #{1 2}}}) (s/check analysis/CollectionConstraint {:coll-size 5 :coll-elements {:enum #{1 2}}})) (deftest test-gather-free-vars (are [v expected] (= expected (analysis/gather-free-vars v)) 1 #{} 'x '#{x} '(+ 1 x y) '#{x y} '(let [x a y x a 1 z a] (+ x y z b)) '#{a b} '(and (or (= (+ 1 z)) b)) '#{z b} '{:$type :my/S$v1 :x 1 :y a :z (and b {:$type :my/T$v1 :q c})} '#{a b c} '[1 2 x] '#{x} '#{a b 3} '#{a b} '(if x y z) '#{x y z} '(let [x a y x a 1 z a p [#{[#{c}]}]] (if (+ x y z b) (every? [x [a a d]] (+ x e)))) '#{a b c d e} '(every? [x [1 2 3]] (+ x 1)) #{})) (deftest test-replace-free-vars (are [v var-map expected] (= expected (analysis/replace-free-vars var-map v)) true {} true 'x {} 'x 'x '{x y} 'y '(+ x 1) {} '(+ x 1) '(+ x (let [x 1] x)) '{x y} '(+ y (let [x 1] x)) '[x #{y}] '{x a y b} '[a #{b}] '(+ x (let [x 1 a y] (+ x a b z))) '{x p y q z r} '(+ p (let [x 1 a q] (+ x a b r))) '(+ x a b z) '{x p y q z r} '(+ p a b r) ;; '(if-value x 1 0) '{x a} '1 '(if-value x 1 0) '{x 99} '1 '(if-value x 1 0) '{x (when-value a a)} '(if-value a 1 0) '(if-value x 1 0) '{x (when-value a (+ a 1))} '(if-value-let [x (when-value a (+ a 1))] 1 0) '(if-value x 1 0) '{x (if-value a (when-value b b) c)} '(if-value-let [x (if-value a (when-value b b) c)] 1 0) ;; '(if-value x x y) '{x a y b} 'a '(if-value x x y) '{x 99 y b} '99 '(if-value x x y) '{x (when-value a a) y b} '(if-value a a b) '(if-value x x y) '{x (when-value a (+ a 1)) y b} '(if-value-let [x (when-value a (+ a 1))] x b) '(if-value x x y) '{x (if-value a (when-value b b) c) y b} '(if-value-let [x (if-value a (when-value b b) c)] x b) ;; '(when-value x y) '{x a y b} 'b '(when-value x y) '{x 99 y b} 'b '(when-value x y) '{x (when-value a a) y b} '(when-value a b) '(when-value x y) '{x (when-value a (+ a 1)) y b} '(when-value-let [x (when-value a (+ a 1))] b) '(when-value x y) '{x (if-value a (when-value b b) c) y b} '(when-value-let [x (if-value a (when-value b b) c)] b) ;; '(when-value x 1) '{x a} '1 '(when-value x 1) '{x 99} '1 '(when-value x 1) '{x (when-value a a)} '(when-value a 1) '(when-value x 1) '{x (when-value a (+ a 1))} '(when-value-let [x (when-value a (+ a 1))] 1) '(when-value x 1) '{x (if-value a (when-value b b) c)} '(when-value-let [x (if-value a (when-value b b) c)] 1))) (deftest test-gather-tlfc (are [v x] (= x [(analysis/gather-tlfc v) (analysis/sort-tlfc (analysis/gather-tlfc v)) (binding [analysis/*max-enum-size* 2] (analysis/compute-tlfc-map v)) (binding [analysis/*max-enum-size* 10] (analysis/compute-tlfc-map v))]) true [true nil {} {}] '(and true ;; this expression references no fields, so no field constraints are extracted false) ['true nil {} {}] '(= x 100) ;; field assignments come out as enum values ['(= x 100) '{x (= x 100)} '{x {:enum #{100}}} '{x {:enum #{100}}}] '(= x (+ 200 3)) [true nil {} {}] '(and (= x 300) ;; multiple fields can be teased apart as long as the clauses are independent (= 2 y)) ['(and (= x 300) (= 2 y)) '{x (= x 300) y (= 2 y)} '{x {:enum #{300}} y {:enum #{2}}} '{x {:enum #{300}} y {:enum #{2}}}] '(and ;; multiple ands can be walked through z ;; even if there are constraints that are not extraced (and (= x 400) (= 2 y))) ['(and (= x 400) (= 2 y)) '{x (= x 400) y (= 2 y)} '{x {:enum #{400}} y {:enum #{2}}} '{x {:enum #{400}} y {:enum #{2}}}] '(< x 500) ;; partial ranges are extracted ['(< x 500) '{x (< x 500)} '{x {:ranges #{{:max 500 :max-inclusive false}}}} '{x {:ranges #{{:max 500 :max-inclusive false}}}}] '(< 510 x) ;; arguments can be in either order ['(< 510 x) '{x (< 510 x)} '{x {:ranges #{{:min 510 :min-inclusive true}}}} '{x {:ranges #{{:min 510 :min-inclusive true}}}}] '(< x y) ;; expressions over multiple fields are not extracted [true nil {} {}] '(= x [1 600]) ;; fields of any type are extracted into enums ['(= x [1 600]) '{x (= x [1 600])} '{x {:enum #{[1 600]}}} '{x {:enum #{[1 600]}}}] '(= x {:$type :my/Spec}) ;; instance values are pulled out into enums ['(= x {:$type :my/Spec}) '{x (= x {:$type :my/Spec})} '{x {:enum #{{:$type :my/Spec}}}} '{x {:enum #{{:$type :my/Spec}}}}] '(= x [700 z]) ;; no "destructuring" of collections [true nil {} {}] '(let [y 800] ;; no navigating through locals to find literal values (< x y)) [true nil {} {}] '(< x (let [y 900] y)) [true nil {} {}] '(contains? #{1000 2 3} x) ;; set containment translated into enums ['(contains? #{1000 3 2} x) '{x (contains? #{1000 3 2} x)} '{x {:enum #{1000 3 2}}} '{x {:enum #{1000 3 2}}}] '(contains? #{1050 x 3} 1) ;; no set "deconstruction" [true nil {} {}] '(and (contains? #{1100 2 3} x) ;; many fields can be teased apart as long as the clauses are independent 'and' sub-expressions (= y 1) (and (<= z 20) (> z 10))) ['(and (contains? #{1100 3 2} x) (= y 1) (<= z 20) (> z 10)) '{x (contains? #{1100 3 2} x), y (= y 1), z (and (<= z 20) (> z 10))} '{x {:enum #{1100 3 2}} y {:enum #{1}} z {:ranges #{{:max 20 :max-inclusive true :min 10 :min-inclusive false}}}} '{x {:enum #{1100 3 2}} y {:enum #{1}} z {:ranges #{{:max 20 :max-inclusive true :min 10 :min-inclusive false}}}}] '(and (contains? #{1200 2 3} x) (or a b) (and (= y 1) (and (<= z 20) (> z 10))) q) ['(and (contains? #{1200 3 2} x) (= y 1) (<= z 20) (> z 10)) '{x (contains? #{1200 3 2} x) y (= y 1), z (and (<= z 20) (> z 10))} '{x {:enum #{1200 3 2}} y {:enum #{1}} z {:ranges #{{:max 20 :max-inclusive true :min 10 :min-inclusive false}}}} '{x {:enum #{1200 3 2}} y {:enum #{1}} z {:ranges #{{:max 20 :max-inclusive true :min 10 :min-inclusive false}}}}] '(or (= x 1300) (= x 2)) ['(or (= x 1300) (= x 2)) '{x (or (= x 1300) (= x 2))} '{x {:enum #{2 1300}}} '{x {:enum #{2 1300}}}] '(and (or (= x 1310) (= x 2)) (= y 3)) ['(and (or (= x 1310) (= x 2)) (= y 3)) '{x (or (= x 1310) (= x 2)) y (= y 3)} '{x {:enum #{1310 2}} y {:enum #{3}}} '{x {:enum #{1310 2}} y {:enum #{3}}}] '(and (contains? #{1400 2 3} x) (or a b) (every? [c [1 2 3]] (= c 1)) (= z 1) (= y 1) (<= z 20) (> z 10) q) ['(and (contains? #{1400 3 2} x) (= z 1) (= y 1) (<= z 20) (> z 10)) '{x (contains? #{1400 3 2} x) z (and (= z 1) (<= z 20) (> z 10)), y (= y 1)} '{x {:enum #{1400 3 2}} z {:enum #{}} y {:enum #{1}}} '{x {:enum #{1400 3 2}} z {:enum #{}} y {:enum #{1}}}] '(= [1 1500] [x 1500]) [true nil {} {}] '(= #{x 1600} #{1 1600}) [true nil {} {}] '(or (and (>= x 3) (< x 12)) (and (>= x 20) (< x 1700))) ['(or (and (>= x 3) (< x 12)) (and (>= x 20) (< x 1700))) '{x (or (and (>= x 3) (< x 12)) (and (>= x 20) (< x 1700)))} '{x {:ranges #{{:min 3 :min-inclusive true :max 12 :max-inclusive false} {:min 20 :min-inclusive true :max 1700 :max-inclusive false}}}} '{x {:ranges #{{:min 3 :min-inclusive true :max 12 :max-inclusive false} {:min 20 :min-inclusive true :max 1700 :max-inclusive false}}}}] '(or (and (>= x 3) (< x 13) (<= x 9) (< x 12) (> 14 x)) (and (>= x 20) (< x 1800))) ['(or (and (>= x 3) (< x 13) (<= x 9) (< x 12) (> 14 x)) (and (>= x 20) (< x 1800))) '{x (or (and (>= x 3) (< x 13) (<= x 9) (< x 12) (> 14 x)) (and (>= x 20) (< x 1800)))} '{x {:ranges #{{:min 3 :min-inclusive true :max 9 :max-inclusive true} {:min 20 :min-inclusive true :max 1800 :max-inclusive false}}}} '{x {:ranges #{{:min 3 :min-inclusive true :max 9 :max-inclusive true} {:min 20 :min-inclusive true :max 1800 :max-inclusive false}}}}] '(or (< x 1) (= x 2) (< x 1900)) ['(or (< x 1) (= x 2) (< x 1900)) '{x (or (< x 1) (= x 2) (< x 1900))} {} {}] '(or (< x 1) (< x 2000)) ['(or (< x 1) (< x 2000)) '{x (or (< x 1) (< x 2000))} '{x {:ranges #{{:max 2000 :max-inclusive false}}}} '{x {:ranges #{{:max 2000 :max-inclusive false}}}}] '(or (< x 2100) (<= x 2100)) ['(or (< x 2100) (<= x 2100)) '{x (or (< x 2100) (<= x 2100))} '{x {:ranges #{{:max 2100 :max-inclusive true}}}} '{x {:ranges #{{:max 2100 :max-inclusive true}}}}] '(or (< x 2200) (>= 2200 x)) ['(or (< x 2200) (>= 2200 x)) '{x (or (< x 2200) (>= 2200 x))} '{x {:ranges #{{:max 2200 :max-inclusive false}}}} '{x {:ranges #{{:max 2200 :max-inclusive false}}}}] '(and (< x 2300) (>= 2300 x)) ['(and (< x 2300) (>= 2300 x)) '{x (and (< x 2300) (>= 2300 x))} '{x {:ranges #{{:max 2300 :max-inclusive false}}}} '{x {:ranges #{{:max 2300 :max-inclusive false}}}}] '(or (< x 2400) (<= x 2400)) ['(or (< x 2400) (<= x 2400)) '{x (or (< x 2400) (<= x 2400))} '{x {:ranges #{{:max 2400 :max-inclusive true}}}} '{x {:ranges #{{:max 2400 :max-inclusive true}}}}] '(and (< x 2500) (<= x 2500)) ['(and (< x 2500) (<= x 2500)) '{x (and (< x 2500) (<= x 2500))} '{x {:ranges #{{:max 2500 :max-inclusive false}}}} '{x {:ranges #{{:max 2500 :max-inclusive false}}}}] '(or (> x 2600) (>= x 2600)) ['(or (> x 2600) (>= x 2600)) '{x (or (> x 2600) (>= x 2600))} '{x {:ranges #{{:min 2600 :min-inclusive true}}}} '{x {:ranges #{{:min 2600 :min-inclusive true}}}}] '(and (> x 2700) (>= x 2700)) ['(and (> x 2700) (>= x 2700)) '{x (and (> x 2700) (>= x 2700))} '{x {:ranges #{{:min 2700 :min-inclusive false}}}} '{x {:ranges #{{:min 2700 :min-inclusive false}}}}] '(or (and (> x 2800) (<= x 2850)) (and (> x 2840) (<= x 2860))) ['(or (and (> x 2800) (<= x 2850)) (and (> x 2840) (<= x 2860))) '{x (or (and (> x 2800) (<= x 2850)) (and (> x 2840) (<= x 2860)))} '{x {:ranges #{{:min 2800 :min-inclusive false :max 2860 :max-inclusive true}}}} '{x {:ranges #{{:min 2800 :min-inclusive false :max 2860 :max-inclusive true}}}}] '(or (and (> x 2900) (< x 2950)) (and (>= x 2950) (<= x 2960))) ['(or (and (> x 2900) (< x 2950)) (and (>= x 2950) (<= x 2960))) '{x (or (and (> x 2900) (< x 2950)) (and (>= x 2950) (<= x 2960)))} '{x {:ranges #{{:min 2900 :min-inclusive false :max 2960 :max-inclusive true}}}} '{x {:ranges #{{:min 2900 :min-inclusive false :max 2960 :max-inclusive true}}}}] '(or (and (> x 3000) (<= x 3050)) (and (> x 3040) (<= x 3060)) (> x 1)) ['(or (and (> x 3000) (<= x 3050)) (and (> x 3040) (<= x 3060)) (> x 1)) '{x (or (and (> x 3000) (<= x 3050)) (and (> x 3040) (<= x 3060)) (> x 1))} '{x {:ranges #{{:min 1 :min-inclusive false :max 3060 :max-inclusive true}}}} '{x {:ranges #{{:min 1 :min-inclusive false :max 3060 :max-inclusive true}}}}] '(and (> x 1) (let [x 5] ;; this x does not collide with the outer x, this clause is ignored since it is in 'and' (> x 6)) (or (and (> x 3100) (<= x 3150)) (and (> x 3140) (<= x 3160)))) ['(and (> x 1) (or (and (> x 3100) (<= x 3150)) (and (> x 3140) (<= x 3160)))) '{x (and (> x 1) (or (and (> x 3100) (<= x 3150)) (and (> x 3140) (<= x 3160))))} '{x {:ranges #{{:min 3100 :min-inclusive false :max 3160 :max-inclusive true}}}} '{x {:ranges #{{:min 3100 :min-inclusive false :max 3160 :max-inclusive true}}}}] '(and (> x 3225) ;; this is folded into the rest of the range restriction (= b x) ;; this is ignored, but it is in an 'and', so the other expressions remain (or (and (> x 3200) (<= x 3250)) (and (> x 3240) (<= x 3260)))) ['(and (> x 3225) (or (and (> x 3200) (<= x 3250)) (and (> x 3240) (<= x 3260)))) '{x (and (> x 3225) (or (and (> x 3200) (<= x 3250)) (and (> x 3240) (<= x 3260))))} '{x {:ranges #{{:min 3225 :min-inclusive false :max 3260 :max-inclusive true}}}} '{x {:ranges #{{:min 3225 :min-inclusive false :max 3260 :max-inclusive true}}}}] '(and (> x 3275) (= b x) (or (and (> x 3270) (<= x 3280)) (and (> x 3285) (<= x 3295)) (and (> x 3260) (<= x 3265))) (< x 3290)) ['(and (> x 3275) (or (and (> x 3270) (<= x 3280)) (and (> x 3285) (<= x 3295)) (and (> x 3260) (<= x 3265))) (< x 3290)) '{x (and (> x 3275) (or (and (> x 3270) (<= x 3280)) (and (> x 3285) (<= x 3295)) (and (> x 3260) (<= x 3265))) (< x 3290))} '{x {:ranges #{{:min 3275 :min-inclusive false :max 3280 :max-inclusive true} {:min 3285 :min-inclusive false :max 3290 :max-inclusive false}}}} '{x {:enum #{3276 3277 3278 3279 3280 3286 3287 3288 3289}}}] '(and (> x #d "32.75") (= b x) (or (and (> x #d "32.70") (<= x #d "32.80")) (and (> x #d "32.85") (<= x #d "32.95")) (and (> x #d "32.60") (<= x #d "32.65"))) (< x #d "32.90")) ['(and (> x #d "32.75") (or (and (> x #d "32.70") (<= x #d "32.80")) (and (> x #d "32.85") (<= x #d "32.95")) (and (> x #d "32.60") (<= x #d "32.65"))) (< x #d "32.90")) '{x (and (> x #d "32.75") (or (and (> x #d "32.70") (<= x #d "32.80")) (and (> x #d "32.85") (<= x #d "32.95")) (and (> x #d "32.60") (<= x #d "32.65"))) (< x #d "32.90"))} '{x {:ranges #{{:min #d "32.75" :min-inclusive false :max #d "32.80" :max-inclusive true} {:min #d "32.85" :min-inclusive false :max #d "32.90" :max-inclusive false}}}} '{x {:enum #{#d "32.76" #d "32.77" #d "32.78" #d "32.79" #d "32.80" #d "32.86" #d "32.87" #d "32.88" #d "32.89"}}}] '(and ;; extra 'and' at root makes no difference (and (= x 3325) (or (and (> x 3300) (<= x 3350)) (and (> x 3340) (<= x 3360))))) ['(and (= x 3325) (or (and (> x 3300) (<= x 3350)) (and (> x 3340) (<= x 3360)))) '{x (and (= x 3325) (or (and (> x 3300) (<= x 3350)) (and (> x 3340) (<= x 3360))))} '{x {:enum #{3325}}} '{x {:enum #{3325}}}] '(and (= x 0) (or (and (> x 3400) (<= x 3450)) (and (> x 3440) (<= x 3460)))) ['(and (= x 0) (or (and (> x 3400) (<= x 3450)) (and (> x 3440) (<= x 3460)))) '{x (and (= x 0) (or (and (> x 3400) (<= x 3450)) (and (> x 3440) (<= x 3460))))} '{x {:enum #{}}} ;; empty because of the range '{x {:enum #{}}}] '(and (= x 3500) (= x 3500) (< x 16) (>= x 10)) ['(and (= x 3500) (= x 3500) (< x 16) (>= x 10)) '{x (and (= x 3500) (= x 3500) (< x 16) (>= x 10))} '{x {:enum #{}}} '{x {:enum #{}}}] '(and (< x 3501) (or (= x 10) (= x 20)) (> x 16)) ['(and (< x 3501) (or (= x 10) (= x 20)) (> x 16)) '{x (and (< x 3501) (or (= x 10) (= x 20)) (> x 16))} '{x {:enum #{20}}} '{x {:enum #{20}}}] '(and (or (= x 10) (= x 20) (= x 0)) (or (and (< x 3550) (> x 16)) (and (< x 15) (> x 5)))) ['(and (or (= x 10) (= x 20) (= x 0)) (or (and (< x 3550) (> x 16)) (and (< x 15) (> x 5)))) '{x (and (or (= x 10) (= x 20) (= x 0)) (or (and (< x 3550) (> x 16)) (and (< x 15) (> x 5))))} '{x {:enum #{20 10}}} ;; if enum values fall in any of the alternate ranges they are included '{x {:enum #{20 10}}}] '(or ;; or at the root with mixed enum and ranges foils the logic to pull out direct field constraints (or (= x 3600) (= x 1)) (and (< x 16) (>= x 10))) [true nil {} {}] '(or (and (> x 3700)) q ;; an extra clause in an 'or' foils attempts to lift out mandatory values for x (and (>= x 3750) (<= x 3760))) [true nil {} {}] '(= x y z 3800) ;; only binary '=' are extracted [true nil {} {}] '(every? [x a] (= x 3900)) ['[(= a 3900)] '{a [(= a 3900)]} '{a {:coll-elements {:enum #{3900}}}} '{a {:coll-elements {:enum #{3900}}}}] '(every? [x a] (and (= x 4000) y)) [true nil {} {}] '(every? [x a] (every? [y x] (= y 4100))) ['[[(= a 4100)]] '{a [[(= a 4100)]]} '{a {:coll-elements {:coll-elements {:enum #{4100}}}}} '{a {:coll-elements {:coll-elements {:enum #{4100}}}}}] '(every? [x a] (every? [y x] (> y 4150))) ['[[(> a 4150)]] '{a [[(> a 4150)]]} '{a {:coll-elements {:coll-elements {:ranges #{{:min 4150 :min-inclusive false}}}}}} '{a {:coll-elements {:coll-elements {:ranges #{{:min 4150 :min-inclusive false}}}}}}] '(every? [x a] (every? [y x] (or (and (> y 4200) (<= y 4210)) (and (< y 5) (> y 0))))) ['[[(or (and (> a 4200) (<= a 4210)) (and (< a 5) (> a 0)))]] '{a [[(or (and (> a 4200) (<= a 4210)) (and (< a 5) (> a 0)))]]} '{a {:coll-elements {:coll-elements {:ranges #{{:max 5 :max-inclusive false :min 0 :min-inclusive false} {:min 4200 :min-inclusive false :max 4210 :max-inclusive true}}}}}} '{a {:coll-elements {:coll-elements {:ranges #{{:max 5 :max-inclusive false :min 0 :min-inclusive false} {:min 4200 :min-inclusive false :max 4210 :max-inclusive true}}}}}}] '(= x "4300") ['(= x "4300") '{x (= x "4300")} '{x {:enum #{"4300"}}} '{x {:enum #{"4300"}}}] '(contains? #{"4400" "a" "b"} x) ['(contains? #{"a" "b" "4400"} x) '{x (contains? #{"a" "b" "4400"} x)} '{x {:enum #{"a" "b" "4400"}}} '{x {:enum #{"a" "b" "4400"}}}] '(= 4500 (count x)) ['(= 4500 (count x)) '{x (= 4500 (count x))} '{x {:coll-size 4500}} '{x {:coll-size 4500}}] '(= (count x) 4510) ['(= (count x) 4510) '{x (= (count x) 4510)} '{x {:coll-size 4510}} '{x {:coll-size 4510}}] '(every? [x a] (= 4600 (count x))) ['[(= 4600 (count a))] '{a [(= 4600 (count a))]} '{a {:coll-elements {:coll-size 4600}}} '{a {:coll-elements {:coll-size 4600}}}] '(and (= (count a) 4700) (every? [x a] (and (= 5 (count x)) (every? [y x] (or (= y "a") (= y "b")))))) ['(and (= (count a) 4700) [(and (= 5 (count a)) [(or (= a "a") (= a "b"))])]) '{a (and (= (count a) 4700) [(and (= 5 (count a)) [(or (= a "a") (= a "b"))])])} '{a {:coll-size 4700 :coll-elements {:coll-size 5 :coll-elements {:enum #{"a" "b"}}}}} '{a {:coll-size 4700 :coll-elements {:coll-size 5 :coll-elements {:enum #{"a" "b"}}}}}] '(or (= (count a) 4800) (every? [x a] (and (= 5 (count x)) (every? [y x] (or (= y "a") (= y "b")))))) [true nil {} {}] '(and (every? [v a] (or (= v ["a"]) (= v ["a" "b"]))) (every? [v a] (every? [s v] (or (= s "a") (= s "b") (= s "4895"))))) ['(and [(or (= a ["a"]) (= a ["a" "b"]))] [[(or (= a "a") (= a "b") (= a "4895"))]]) '{a (and [(or (= a ["a"]) (= a ["a" "b"]))] [[(or (= a "a") (= a "b") (= a "4895"))]])} '{a {:coll-elements {:coll-elements {:enum #{"a" "b" "4895"}} :enum #{["a" "b"] ["a"]}}}} '{a {:coll-elements {:coll-elements {:enum #{"a" "b" "4895"}} :enum #{["a" "b"] ["a"]}}}}] '(or (= "a" x) (= x 4900)) ['(or (= "a" x) (= x 4900)) '{x (or (= "a" x) (= x 4900))} '{x {:enum #{"a" 4900}}} '{x {:enum #{"a" 4900}}}] '(or (= {:$type :spec/A :a 5000} x) (contains? #{{:$type :spec/B} {:$type :spec/C}} x)) ['(or (= {:$type :spec/A :a 5000} x) (contains? #{{:$type :spec/C} {:$type :spec/B}} x)) '{x (or (= {:$type :spec/A :a 5000} x) (contains? #{{:$type :spec/C} {:$type :spec/B}} x))} '{x {:enum #{{:$type :spec/C} {:$type :spec/B} {:$type :spec/A :a 5000}}}} '{x {:enum #{{:$type :spec/C} {:$type :spec/B} {:$type :spec/A :a 5000}}}}] '(or (= {:$type :spec/A :a 5100} x) (= x {:$type :spec/B})) ['(or (= {:$type :spec/A :a 5100} x) (= x {:$type :spec/B})) '{x (or (= {:$type :spec/A :a 5100} x) (= x {:$type :spec/B}))} '{x {:enum #{{:$type :spec/A :a 5100} {:$type :spec/B}}}} '{x {:enum #{{:$type :spec/A :a 5100} {:$type :spec/B}}}}] '(and (<= z 10) ;; what to do if the min is greater than the max? (> z 5200)) ['(and (<= z 10) (> z 5200)) '{z (and (<= z 10) (> z 5200))} '{z {:enum #{}}} '{z {:enum #{}}}] '(and (<= z #d "1.1") ;; what to do if the min is greater than the max? (> z #d "5250.1")) ['(and (<= z #d "1.1") (> z #d "5250.1")) '{z (and (<= z #d "1.1") (> z #d "5250.1"))} '{z {:enum #{}}} '{z {:enum #{}}}] '(or (and (<= z 10) ;; sensible ranges are not combined with non-sensical ranges (> z 5300)) (and (<= z 30) (> z 20))) ['(or (and (<= z 10) (> z 5300)) (and (<= z 30) (> z 20))) '{z (or (and (<= z 10) (> z 5300)) (and (<= z 30) (> z 20)))} '{z {:ranges #{{:max 10 :max-inclusive true :min 5300 :min-inclusive false} {:max 30 :max-inclusive true :min 20 :min-inclusive false}}}} '{z {:ranges #{{:max 10 :max-inclusive true :min 5300 :min-inclusive false} {:max 30 :max-inclusive true :min 20 :min-inclusive false}}}}] '(or (and (> #d "1.5400" x) (>= x #d "0.0000")) (and (> x #d "20.0000") (< x #d "30.0000"))) ['(or (and (> #d "1.5400" x) (>= x #d "0.0000")) (and (> x #d "20.0000") (< x #d "30.0000"))) '{x (or (and (> #d "1.5400" x) (>= x #d "0.0000")) (and (> x #d "20.0000") (< x #d "30.0000")))} '{x {:ranges #{{:max #d "1.5400" :max-inclusive true :min #d "0.0000" :min-inclusive true} {:min #d "20.0000" :min-inclusive false :max #d "30.0000" :max-inclusive false}}}} '{x {:ranges #{{:max #d "1.5400" :max-inclusive true :min #d "0.0000" :min-inclusive true} {:min #d "20.0000" :min-inclusive false :max #d "30.0000" :max-inclusive false}}}}] #_'(or (and (> #d "1.5500" x) ;; inconsistent data types throw (>= x 0)) (and (> x 20) (< x 30))) #_['(or (and (> #d "1.5500" x) (>= x 0)) (and (> x 20) (< x 30))) '{x (or (and (> #d "1.5500" x) (>= x 0)) (and (> x 20) (< x 30)))} '{x {:ranges #{{:max #d "1.5500" :max-inclusive true :min 0 :min-inclusive true} {:min 20 :min-inclusive false :max 30 :max-inclusive false}}}}] '(or (= #d "5600.12" x) (= x #d "2.24") (contains? #{#d "1.00" #d "2.00"} x)) ['(or (= #d "5600.12" x) (= x #d "2.24") (contains? #{#d "2.00" #d "1.00"} x)) '{x (or (= #d "5600.12" x) (= x #d "2.24") (contains? #{#d "2.00" #d "1.00"} x))} '{x {:enum #{#d "5600.12" #d "2.24" #d "2.00" #d "1.00"}}} '{x {:enum #{#d "5600.12" #d "2.24" #d "2.00" #d "1.00"}}}] '(or (= 5700 x) (= x 2) (contains? #{1 2} x)) ['(or (= 5700 x) (= x 2) (contains? #{1 2} x)) '{x (or (= 5700 x) (= x 2) (contains? #{1 2} x))} '{x {:enum #{1 2 5700}}} '{x {:enum #{1 2 5700}}}] '(and (= #d "5800.12" x) (= x #d "2.24") (contains? #{#d "1.00" #d "2.00"} x)) ['(and (= #d "5800.12" x) (= x #d "2.24") (contains? #{#d "2.00" #d "1.00"} x)) '{x (and (= #d "5800.12" x) (= x #d "2.24") (contains? #{#d "2.00" #d "1.00"} x))} '{x {:enum #{}}} '{x {:enum #{}}}] '(and (= 5900 x) (= x 2) (contains? #{1 2} x)) ['(and (= 5900 x) (= x 2) (contains? #{1 2} x)) '{x (and (= 5900 x) (= x 2) (contains? #{1 2} x))} '{x {:enum #{}}} '{x {:enum #{}}}] '(if-value x (= x 6000) true) ['(if-value x (= x 6000)) '{x (if-value x (= x 6000))} '{x {:enum #{6000} :optional true}} '{x {:enum #{6000} :optional true}}] '(if-value x (or (= x "6100") (= x "no") (= x "yes")) true) ['(if-value x (or (= x "6100") (= x "no") (= x "yes"))) '{x (if-value x (or (= x "6100") (= x "no") (= x "yes")))} '{x {:enum #{"6100" "yes" "no"} :optional true}} '{x {:enum #{"6100" "yes" "no"} :optional true}}] '(if-value y (= z 6200) true) [true nil {} {}] '(if-value x (<= 6300 x) true) ['(if-value x (<= 6300 x)) '{x (if-value x (<= 6300 x))} '{x {:ranges #{{:min 6300 :min-inclusive false :optional true}}}} '{x {:ranges #{{:min 6300 :min-inclusive false :optional true}}}}] '(and (if-value x (<= 6400 x) true) (if-value x (> 0 x) true)) ['(and (if-value x (<= 6400 x)) (if-value x (> 0 x))) '{x (and (if-value x (<= 6400 x)) (if-value x (> 0 x)))} '{x {:enum #{} :optional true}} '{x {:enum #{} :optional true}}] '(or (if-value x (<= 6500 x) true) (if-value x (> 0 x) true)) [true nil {} {}] '(any? [n #{1 6600}] (= x n)) [true nil {} {}] '(and (= x $no-value) (= x 6700)) ['(and (= x $no-value) (= x 6700)) '{x (and (= x $no-value) (= x 6700))} '{x :none} '{x :none}] '(= x $no-value) ['(= x $no-value) '{x (= x $no-value)} '{x :none} '{x :none}] '(not= x $no-value) ['(not= x $no-value) '{x (not= x $no-value)} '{x :some} '{x :some}] '(and (= x $no-value) (> x 6900)) ['(and (= x $no-value) (> x 6900)) '{x (and (= x $no-value) (> x 6900))} '{x :none} '{x :none}] '(or (= x $no-value) (contains? #{7000} x)) ['(or (= x $no-value) (contains? #{7000} x)) '{x (or (= x $no-value) (contains? #{7000} x))} '{x {:enum #{7000} :optional true}} '{x {:enum #{7000} :optional true}}] '(and (not= x $no-value) (if-value x (contains? #{7100} x) true)) ['(and (not= x $no-value) (if-value x (contains? #{7100} x))) '{x (and (not= x $no-value) (if-value x (contains? #{7100} x)))} '{x {:enum #{7100}}} '{x {:enum #{7100}}}])) (deftest test-encode-fixed-decimals (is (= 100 (analysis/encode-fixed-decimals #d "1.00"))) (is (= '(let [x 100 y [21 0] z #{1}] 12) (analysis/encode-fixed-decimals '(let [x #d "1.00" y [#d "2.1" #d "0.0"] z #{#d "0.0001"}] #d "1.2"))))) (deftest test-find-spec-refs (is (= #{} (analysis/find-spec-refs '1))) (is (= #{{:tail :my/Spec}} (analysis/find-spec-refs '{:$type :my/Spec}))) (is (= #{:my/Spec} (analysis/find-spec-refs '(= {:$type :my/Spec} {:$type :my/Spec})))) (is (= #{{:tail :my/Other} {:tail :my/Spec}} (analysis/find-spec-refs '{:$type :my/Spec :a {:$type :my/Other}}))) (is (= #{:my/Other {:tail :my/Spec}} (analysis/find-spec-refs '{:$type :my/Spec :a (= {:$type :my/Other} {:$type :my/Other})}))) (is (= #{{:tail :my/Spec}} (analysis/find-spec-refs '(let [x {:$type :my/Spec}] x)))) (is (= #{:my/Spec {:tail :my/Other}} (analysis/find-spec-refs '(when {:$type :my/Spec} {:$type :my/Other})))) (is (= #{{:tail :my/Spec} :my/Spec} (analysis/find-spec-refs '(let [x {:$type :my/Spec}] (when x (if x x x)))))) (is (= #{{:tail :my/Spec} :my/Spec} (analysis/find-spec-refs '(let [x {:$type :my/Spec}] (when x (if x x (get x :a))))))) (is (= #{{:tail :my/Spec} :my/Spec} (analysis/find-spec-refs '(let [x {:$type :my/Spec}] (when x (if x x (get-in x [:a]))))))) (is (= #{:my/Spec} (analysis/find-spec-refs '(any? [x [{:$type :my/Spec}]] true)))) (is (= #{:my/Spec :my/Other} (analysis/find-spec-refs '(any? [x [{:$type :my/Spec}]] {:$type :my/Other})))) (is (= #{:my/Spec {:tail :my/Other}} (analysis/find-spec-refs '(refine-to {:$type :my/Spec} :my/Other)))) (is (= #{:my/Spec :my/Other} (analysis/find-spec-refs '(refines-to? {:$type :my/Spec} :my/Other)))) (is (= #{{:tail :my/Spec} {:tail :my/Other}} (analysis/find-spec-refs '(if true {:$type :my/Spec} {:$type :my/Other})))) (is (= #{{:tail :my/Spec}} (analysis/find-spec-refs '(let [x {:$type :my/Spec} y x] y)))) (is (= #{:tutorials.vending/EventHandler$v1 :tutorials.vending/State$v1 :tutorials.vending/Transition$v1 :spec/Mine :spec/Event} (analysis/find-spec-refs '(let [events [{:$type :spec/Event}]] (reduce [a (refine-to {:$type :spec/Mine} :tutorials.vending/State$v1)] [e events] (get (refine-to {:$type :tutorials.vending/EventHandler$v1 :current a :event e} :tutorials.vending/Transition$v1) :next)))))) (is (= #{:tutorials.vending/EventHandler$v1 :tutorials.vending/State$v1 {:tail :tutorials.vending/Transition$v1} :spec/Mine :spec/Event} (analysis/find-spec-refs '(let [events [{:$type :spec/Event}]] (reduce [a (refine-to {:$type :spec/Mine} :tutorials.vending/State$v1)] [e events] (refine-to {:$type :tutorials.vending/EventHandler$v1 :current a :event e} :tutorials.vending/Transition$v1)))))) (is (= #{{:tail :spec/Event} :spec/Event :tutorials.vending/State$v1 :spec/Mine} (analysis/find-spec-refs '(let [events [{:$type :spec/Event}]] (reduce [a (refine-to {:$type :spec/Mine} :tutorials.vending/State$v1)] [e events] e))))) (is (= #{{:tail :tutorials.vending/State$v1} :spec/Mine :spec/Event} (analysis/find-spec-refs '(let [events [{:$type :spec/Event}]] (reduce [a (refine-to {:$type :spec/Mine} :tutorials.vending/State$v1)] [e events] a))))) (is (= #{:spec/X {:tail :spec/Y} :spec/Z} (analysis/find-spec-refs '(let [x {:$type :spec/X} y {:$type :spec/Y}] (let [z {:$type :spec/Z} a y] a))))) (is (= #{:spec/X {:tail :spec/Y} :spec/Z} (analysis/find-spec-refs '(let [x {:$type :spec/X} y {:$type :spec/Y} z {:$type :spec/Z} a y] a)))) (is (= #{:spec/X {:tail :spec/Y} :spec/Y :spec/Z} (analysis/find-spec-refs '(let [x {:$type :spec/X} y {:$type :spec/Y}] (let [z {:$type :spec/Z} a y] y))))) (is (= #{:spec/X :spec/Y {:tail :spec/Z}} (analysis/find-spec-refs '(let [x {:$type :spec/X} y {:$type :spec/Y}] (let [z {:$type :spec/Z}] z))))) (is (= #{{:tail :spec/X} {:tail :spec/Y} :spec/Z} (analysis/find-spec-refs '(let [x {:$type :spec/X} y {:$type :spec/Y}] (let [z {:$type :spec/Z} a y] (if true a x)))))) (is (= #{{:tail :spec/X} {:tail :spec/Y} :spec/Z} (analysis/find-spec-refs '(let [x {:$type :spec/X} y {:$type :spec/Y}] (let [z {:$type :spec/Z} a y q (if true a x)] q))))) (is (= #{:spec/X :spec/Y :spec/Z {:tail :spec/Q}} (analysis/find-spec-refs '(let [x {:$type :spec/X} y {:$type :spec/Y}] (let [z {:$type :spec/Z} a y q (if true a x)] (when-value-let [q {:$type :spec/Q}] q)))))) (is (= #{{:tail :spec/X} {:tail :spec/Y} :spec/Z :spec/Q} (analysis/find-spec-refs '(let [x {:$type :spec/X} y {:$type :spec/Y}] (let [z {:$type :spec/Z} a y q (if true a x)] (when-value-let [p {:$type :spec/Q}] q)))))) (is (= #{{:tail :my/Spec}} (analysis/find-spec-refs '(if-value-let [o {:$type :my/Spec}] o {:$type :my/Spec})))) (is (= #{{:tail :my/Spec} {:tail :my/Other}} (analysis/find-spec-refs '(if-value-let [o {:$type :my/Spec}] o {:$type :my/Other})))) (is (= #{:my/Spec {:tail :my/X} {:tail :my/Other}} (analysis/find-spec-refs '(if-value-let [o {:$type :my/Spec}] {:$type :my/X} {:$type :my/Other})))) (is (= #{{:tail :my/Spec}} (analysis/find-spec-refs '(when-value-let [o {:$type :my/Spec}] o)))) (is (= #{:my/Spec {:tail :my/Other}} (analysis/find-spec-refs '(when-value-let [o {:$type :my/Spec}] {:$type :my/Other})))) (is (= #{:my/Other} (analysis/find-spec-refs-but-tail :my/Spec '(if true {:$type :my/Spec} {:$type :my/Other}))))) (deftest test-cyclical-dependencies (let [spec-map (var-types/to-halite-spec-env {:spec/Destination {:fields {:d :Integer}} :spec/Path1 {:refines-to {:spec/Destination {:name "refine_to_Destination" :expr '{:$type :spec/Destination :d 1}}}} :spec/Path2 {:refines-to {:spec/Destination {:name "refine_to_Destination" :expr '{:$type :spec/Destination :d 2}}}} :spec/Start {:refines-to {:spec/Path1 {:name "refine_to_Path1" :expr '{:$type :spec/Path1}} :spec/Path2 {:name "refine_to_Path2" :expr '{:$type :spec/Path2}}}}})] (is (= {:spec/Path1 #{:spec/Destination} :spec/Path2 #{:spec/Destination} :spec/Start #{:spec/Path1 :spec/Path2}} (#'analysis/get-spec-map-dependencies spec-map))) (is (nil? (analysis/find-cycle-in-dependencies spec-map)))) (let [spec-map (var-types/to-halite-spec-env {:spec/A {:fields {:b :spec/B}} :spec/B {:fields {:a :spec/A}}})] (is (= {:spec/A #{:spec/B} :spec/B #{:spec/A}} (#'analysis/get-spec-map-dependencies spec-map))) (is (= [:spec/A :spec/B :spec/A] (analysis/find-cycle-in-dependencies spec-map))))) ;; (run-tests)
null
https://raw.githubusercontent.com/Viasat/halite/5a434fa2276f5f2654c4d91045595ae5bbc6580e/test/com/viasat/halite/test_analysis.clj
clojure
this expression references no fields, so no field constraints are extracted field assignments come out as enum values multiple fields can be teased apart as long as the clauses are independent multiple ands can be walked through even if there are constraints that are not extraced partial ranges are extracted arguments can be in either order expressions over multiple fields are not extracted fields of any type are extracted into enums instance values are pulled out into enums no "destructuring" of collections no navigating through locals to find literal values set containment translated into enums no set "deconstruction" many fields can be teased apart as long as the clauses are independent 'and' sub-expressions this x does not collide with the outer x, this clause is ignored since it is in 'and' this is folded into the rest of the range restriction this is ignored, but it is in an 'and', so the other expressions remain extra 'and' at root makes no difference empty because of the range if enum values fall in any of the alternate ranges they are included or at the root with mixed enum and ranges foils the logic to pull out direct field constraints an extra clause in an 'or' foils attempts to lift out mandatory values for x only binary '=' are extracted what to do if the min is greater than the max? what to do if the min is greater than the max? sensible ranges are not combined with non-sensical ranges inconsistent data types throw (run-tests)
Copyright ( c ) 2022 Viasat , Inc. Licensed under the MIT license (ns com.viasat.halite.test-analysis (:require [clojure.test :refer :all] [com.viasat.halite.analysis :as analysis] [com.viasat.halite.envs :as envs] [com.viasat.halite.var-types :as var-types] [schema.core :as s] [schema.test :refer [validate-schemas]])) (set! *warn-on-reflection* true) (clojure.test/use-fixtures :once validate-schemas) (deftest test-schema (s/check analysis/EnumConstraint {:enum #{100}}) (s/check analysis/Range {:min 100 :min-inclusive true}) (s/check analysis/Range {:max 100 :min-inclusive true}) (s/check analysis/Range {:max 100 :max-inclusive true}) (s/check analysis/Range {:min 1 :min-inclusive false :max 100 :max-inclusive true}) (s/check analysis/RangeConstraint {:ranges #{{:min 100 :min-inclusive true}}}) (s/check analysis/CollectionConstraint {:coll-size 5}) (s/check analysis/CollectionConstraint {:coll-elements {:enum #{1 2}}}) (s/check analysis/CollectionConstraint {:coll-size 5 :coll-elements {:enum #{1 2}}})) (deftest test-gather-free-vars (are [v expected] (= expected (analysis/gather-free-vars v)) 1 #{} 'x '#{x} '(+ 1 x y) '#{x y} '(let [x a y x a 1 z a] (+ x y z b)) '#{a b} '(and (or (= (+ 1 z)) b)) '#{z b} '{:$type :my/S$v1 :x 1 :y a :z (and b {:$type :my/T$v1 :q c})} '#{a b c} '[1 2 x] '#{x} '#{a b 3} '#{a b} '(if x y z) '#{x y z} '(let [x a y x a 1 z a p [#{[#{c}]}]] (if (+ x y z b) (every? [x [a a d]] (+ x e)))) '#{a b c d e} '(every? [x [1 2 3]] (+ x 1)) #{})) (deftest test-replace-free-vars (are [v var-map expected] (= expected (analysis/replace-free-vars var-map v)) true {} true 'x {} 'x 'x '{x y} 'y '(+ x 1) {} '(+ x 1) '(+ x (let [x 1] x)) '{x y} '(+ y (let [x 1] x)) '[x #{y}] '{x a y b} '[a #{b}] '(+ x (let [x 1 a y] (+ x a b z))) '{x p y q z r} '(+ p (let [x 1 a q] (+ x a b r))) '(+ x a b z) '{x p y q z r} '(+ p a b r) '(if-value x 1 0) '{x a} '1 '(if-value x 1 0) '{x 99} '1 '(if-value x 1 0) '{x (when-value a a)} '(if-value a 1 0) '(if-value x 1 0) '{x (when-value a (+ a 1))} '(if-value-let [x (when-value a (+ a 1))] 1 0) '(if-value x 1 0) '{x (if-value a (when-value b b) c)} '(if-value-let [x (if-value a (when-value b b) c)] 1 0) '(if-value x x y) '{x a y b} 'a '(if-value x x y) '{x 99 y b} '99 '(if-value x x y) '{x (when-value a a) y b} '(if-value a a b) '(if-value x x y) '{x (when-value a (+ a 1)) y b} '(if-value-let [x (when-value a (+ a 1))] x b) '(if-value x x y) '{x (if-value a (when-value b b) c) y b} '(if-value-let [x (if-value a (when-value b b) c)] x b) '(when-value x y) '{x a y b} 'b '(when-value x y) '{x 99 y b} 'b '(when-value x y) '{x (when-value a a) y b} '(when-value a b) '(when-value x y) '{x (when-value a (+ a 1)) y b} '(when-value-let [x (when-value a (+ a 1))] b) '(when-value x y) '{x (if-value a (when-value b b) c) y b} '(when-value-let [x (if-value a (when-value b b) c)] b) '(when-value x 1) '{x a} '1 '(when-value x 1) '{x 99} '1 '(when-value x 1) '{x (when-value a a)} '(when-value a 1) '(when-value x 1) '{x (when-value a (+ a 1))} '(when-value-let [x (when-value a (+ a 1))] 1) '(when-value x 1) '{x (if-value a (when-value b b) c)} '(when-value-let [x (if-value a (when-value b b) c)] 1))) (deftest test-gather-tlfc (are [v x] (= x [(analysis/gather-tlfc v) (analysis/sort-tlfc (analysis/gather-tlfc v)) (binding [analysis/*max-enum-size* 2] (analysis/compute-tlfc-map v)) (binding [analysis/*max-enum-size* 10] (analysis/compute-tlfc-map v))]) true [true nil {} {}] false) ['true nil {} {}] ['(= x 100) '{x (= x 100)} '{x {:enum #{100}}} '{x {:enum #{100}}}] '(= x (+ 200 3)) [true nil {} {}] (= 2 y)) ['(and (= x 300) (= 2 y)) '{x (= x 300) y (= 2 y)} '{x {:enum #{300}} y {:enum #{2}}} '{x {:enum #{300}} y {:enum #{2}}}] (and (= x 400) (= 2 y))) ['(and (= x 400) (= 2 y)) '{x (= x 400) y (= 2 y)} '{x {:enum #{400}} y {:enum #{2}}} '{x {:enum #{400}} y {:enum #{2}}}] ['(< x 500) '{x (< x 500)} '{x {:ranges #{{:max 500 :max-inclusive false}}}} '{x {:ranges #{{:max 500 :max-inclusive false}}}}] ['(< 510 x) '{x (< 510 x)} '{x {:ranges #{{:min 510 :min-inclusive true}}}} '{x {:ranges #{{:min 510 :min-inclusive true}}}}] [true nil {} {}] ['(= x [1 600]) '{x (= x [1 600])} '{x {:enum #{[1 600]}}} '{x {:enum #{[1 600]}}}] ['(= x {:$type :my/Spec}) '{x (= x {:$type :my/Spec})} '{x {:enum #{{:$type :my/Spec}}}} '{x {:enum #{{:$type :my/Spec}}}}] [true nil {} {}] (< x y)) [true nil {} {}] '(< x (let [y 900] y)) [true nil {} {}] ['(contains? #{1000 3 2} x) '{x (contains? #{1000 3 2} x)} '{x {:enum #{1000 3 2}}} '{x {:enum #{1000 3 2}}}] [true nil {} {}] (= y 1) (and (<= z 20) (> z 10))) ['(and (contains? #{1100 3 2} x) (= y 1) (<= z 20) (> z 10)) '{x (contains? #{1100 3 2} x), y (= y 1), z (and (<= z 20) (> z 10))} '{x {:enum #{1100 3 2}} y {:enum #{1}} z {:ranges #{{:max 20 :max-inclusive true :min 10 :min-inclusive false}}}} '{x {:enum #{1100 3 2}} y {:enum #{1}} z {:ranges #{{:max 20 :max-inclusive true :min 10 :min-inclusive false}}}}] '(and (contains? #{1200 2 3} x) (or a b) (and (= y 1) (and (<= z 20) (> z 10))) q) ['(and (contains? #{1200 3 2} x) (= y 1) (<= z 20) (> z 10)) '{x (contains? #{1200 3 2} x) y (= y 1), z (and (<= z 20) (> z 10))} '{x {:enum #{1200 3 2}} y {:enum #{1}} z {:ranges #{{:max 20 :max-inclusive true :min 10 :min-inclusive false}}}} '{x {:enum #{1200 3 2}} y {:enum #{1}} z {:ranges #{{:max 20 :max-inclusive true :min 10 :min-inclusive false}}}}] '(or (= x 1300) (= x 2)) ['(or (= x 1300) (= x 2)) '{x (or (= x 1300) (= x 2))} '{x {:enum #{2 1300}}} '{x {:enum #{2 1300}}}] '(and (or (= x 1310) (= x 2)) (= y 3)) ['(and (or (= x 1310) (= x 2)) (= y 3)) '{x (or (= x 1310) (= x 2)) y (= y 3)} '{x {:enum #{1310 2}} y {:enum #{3}}} '{x {:enum #{1310 2}} y {:enum #{3}}}] '(and (contains? #{1400 2 3} x) (or a b) (every? [c [1 2 3]] (= c 1)) (= z 1) (= y 1) (<= z 20) (> z 10) q) ['(and (contains? #{1400 3 2} x) (= z 1) (= y 1) (<= z 20) (> z 10)) '{x (contains? #{1400 3 2} x) z (and (= z 1) (<= z 20) (> z 10)), y (= y 1)} '{x {:enum #{1400 3 2}} z {:enum #{}} y {:enum #{1}}} '{x {:enum #{1400 3 2}} z {:enum #{}} y {:enum #{1}}}] '(= [1 1500] [x 1500]) [true nil {} {}] '(= #{x 1600} #{1 1600}) [true nil {} {}] '(or (and (>= x 3) (< x 12)) (and (>= x 20) (< x 1700))) ['(or (and (>= x 3) (< x 12)) (and (>= x 20) (< x 1700))) '{x (or (and (>= x 3) (< x 12)) (and (>= x 20) (< x 1700)))} '{x {:ranges #{{:min 3 :min-inclusive true :max 12 :max-inclusive false} {:min 20 :min-inclusive true :max 1700 :max-inclusive false}}}} '{x {:ranges #{{:min 3 :min-inclusive true :max 12 :max-inclusive false} {:min 20 :min-inclusive true :max 1700 :max-inclusive false}}}}] '(or (and (>= x 3) (< x 13) (<= x 9) (< x 12) (> 14 x)) (and (>= x 20) (< x 1800))) ['(or (and (>= x 3) (< x 13) (<= x 9) (< x 12) (> 14 x)) (and (>= x 20) (< x 1800))) '{x (or (and (>= x 3) (< x 13) (<= x 9) (< x 12) (> 14 x)) (and (>= x 20) (< x 1800)))} '{x {:ranges #{{:min 3 :min-inclusive true :max 9 :max-inclusive true} {:min 20 :min-inclusive true :max 1800 :max-inclusive false}}}} '{x {:ranges #{{:min 3 :min-inclusive true :max 9 :max-inclusive true} {:min 20 :min-inclusive true :max 1800 :max-inclusive false}}}}] '(or (< x 1) (= x 2) (< x 1900)) ['(or (< x 1) (= x 2) (< x 1900)) '{x (or (< x 1) (= x 2) (< x 1900))} {} {}] '(or (< x 1) (< x 2000)) ['(or (< x 1) (< x 2000)) '{x (or (< x 1) (< x 2000))} '{x {:ranges #{{:max 2000 :max-inclusive false}}}} '{x {:ranges #{{:max 2000 :max-inclusive false}}}}] '(or (< x 2100) (<= x 2100)) ['(or (< x 2100) (<= x 2100)) '{x (or (< x 2100) (<= x 2100))} '{x {:ranges #{{:max 2100 :max-inclusive true}}}} '{x {:ranges #{{:max 2100 :max-inclusive true}}}}] '(or (< x 2200) (>= 2200 x)) ['(or (< x 2200) (>= 2200 x)) '{x (or (< x 2200) (>= 2200 x))} '{x {:ranges #{{:max 2200 :max-inclusive false}}}} '{x {:ranges #{{:max 2200 :max-inclusive false}}}}] '(and (< x 2300) (>= 2300 x)) ['(and (< x 2300) (>= 2300 x)) '{x (and (< x 2300) (>= 2300 x))} '{x {:ranges #{{:max 2300 :max-inclusive false}}}} '{x {:ranges #{{:max 2300 :max-inclusive false}}}}] '(or (< x 2400) (<= x 2400)) ['(or (< x 2400) (<= x 2400)) '{x (or (< x 2400) (<= x 2400))} '{x {:ranges #{{:max 2400 :max-inclusive true}}}} '{x {:ranges #{{:max 2400 :max-inclusive true}}}}] '(and (< x 2500) (<= x 2500)) ['(and (< x 2500) (<= x 2500)) '{x (and (< x 2500) (<= x 2500))} '{x {:ranges #{{:max 2500 :max-inclusive false}}}} '{x {:ranges #{{:max 2500 :max-inclusive false}}}}] '(or (> x 2600) (>= x 2600)) ['(or (> x 2600) (>= x 2600)) '{x (or (> x 2600) (>= x 2600))} '{x {:ranges #{{:min 2600 :min-inclusive true}}}} '{x {:ranges #{{:min 2600 :min-inclusive true}}}}] '(and (> x 2700) (>= x 2700)) ['(and (> x 2700) (>= x 2700)) '{x (and (> x 2700) (>= x 2700))} '{x {:ranges #{{:min 2700 :min-inclusive false}}}} '{x {:ranges #{{:min 2700 :min-inclusive false}}}}] '(or (and (> x 2800) (<= x 2850)) (and (> x 2840) (<= x 2860))) ['(or (and (> x 2800) (<= x 2850)) (and (> x 2840) (<= x 2860))) '{x (or (and (> x 2800) (<= x 2850)) (and (> x 2840) (<= x 2860)))} '{x {:ranges #{{:min 2800 :min-inclusive false :max 2860 :max-inclusive true}}}} '{x {:ranges #{{:min 2800 :min-inclusive false :max 2860 :max-inclusive true}}}}] '(or (and (> x 2900) (< x 2950)) (and (>= x 2950) (<= x 2960))) ['(or (and (> x 2900) (< x 2950)) (and (>= x 2950) (<= x 2960))) '{x (or (and (> x 2900) (< x 2950)) (and (>= x 2950) (<= x 2960)))} '{x {:ranges #{{:min 2900 :min-inclusive false :max 2960 :max-inclusive true}}}} '{x {:ranges #{{:min 2900 :min-inclusive false :max 2960 :max-inclusive true}}}}] '(or (and (> x 3000) (<= x 3050)) (and (> x 3040) (<= x 3060)) (> x 1)) ['(or (and (> x 3000) (<= x 3050)) (and (> x 3040) (<= x 3060)) (> x 1)) '{x (or (and (> x 3000) (<= x 3050)) (and (> x 3040) (<= x 3060)) (> x 1))} '{x {:ranges #{{:min 1 :min-inclusive false :max 3060 :max-inclusive true}}}} '{x {:ranges #{{:min 1 :min-inclusive false :max 3060 :max-inclusive true}}}}] '(and (> x 1) (> x 6)) (or (and (> x 3100) (<= x 3150)) (and (> x 3140) (<= x 3160)))) ['(and (> x 1) (or (and (> x 3100) (<= x 3150)) (and (> x 3140) (<= x 3160)))) '{x (and (> x 1) (or (and (> x 3100) (<= x 3150)) (and (> x 3140) (<= x 3160))))} '{x {:ranges #{{:min 3100 :min-inclusive false :max 3160 :max-inclusive true}}}} '{x {:ranges #{{:min 3100 :min-inclusive false :max 3160 :max-inclusive true}}}}] (or (and (> x 3200) (<= x 3250)) (and (> x 3240) (<= x 3260)))) ['(and (> x 3225) (or (and (> x 3200) (<= x 3250)) (and (> x 3240) (<= x 3260)))) '{x (and (> x 3225) (or (and (> x 3200) (<= x 3250)) (and (> x 3240) (<= x 3260))))} '{x {:ranges #{{:min 3225 :min-inclusive false :max 3260 :max-inclusive true}}}} '{x {:ranges #{{:min 3225 :min-inclusive false :max 3260 :max-inclusive true}}}}] '(and (> x 3275) (= b x) (or (and (> x 3270) (<= x 3280)) (and (> x 3285) (<= x 3295)) (and (> x 3260) (<= x 3265))) (< x 3290)) ['(and (> x 3275) (or (and (> x 3270) (<= x 3280)) (and (> x 3285) (<= x 3295)) (and (> x 3260) (<= x 3265))) (< x 3290)) '{x (and (> x 3275) (or (and (> x 3270) (<= x 3280)) (and (> x 3285) (<= x 3295)) (and (> x 3260) (<= x 3265))) (< x 3290))} '{x {:ranges #{{:min 3275 :min-inclusive false :max 3280 :max-inclusive true} {:min 3285 :min-inclusive false :max 3290 :max-inclusive false}}}} '{x {:enum #{3276 3277 3278 3279 3280 3286 3287 3288 3289}}}] '(and (> x #d "32.75") (= b x) (or (and (> x #d "32.70") (<= x #d "32.80")) (and (> x #d "32.85") (<= x #d "32.95")) (and (> x #d "32.60") (<= x #d "32.65"))) (< x #d "32.90")) ['(and (> x #d "32.75") (or (and (> x #d "32.70") (<= x #d "32.80")) (and (> x #d "32.85") (<= x #d "32.95")) (and (> x #d "32.60") (<= x #d "32.65"))) (< x #d "32.90")) '{x (and (> x #d "32.75") (or (and (> x #d "32.70") (<= x #d "32.80")) (and (> x #d "32.85") (<= x #d "32.95")) (and (> x #d "32.60") (<= x #d "32.65"))) (< x #d "32.90"))} '{x {:ranges #{{:min #d "32.75" :min-inclusive false :max #d "32.80" :max-inclusive true} {:min #d "32.85" :min-inclusive false :max #d "32.90" :max-inclusive false}}}} '{x {:enum #{#d "32.76" #d "32.77" #d "32.78" #d "32.79" #d "32.80" #d "32.86" #d "32.87" #d "32.88" #d "32.89"}}}] (and (= x 3325) (or (and (> x 3300) (<= x 3350)) (and (> x 3340) (<= x 3360))))) ['(and (= x 3325) (or (and (> x 3300) (<= x 3350)) (and (> x 3340) (<= x 3360)))) '{x (and (= x 3325) (or (and (> x 3300) (<= x 3350)) (and (> x 3340) (<= x 3360))))} '{x {:enum #{3325}}} '{x {:enum #{3325}}}] '(and (= x 0) (or (and (> x 3400) (<= x 3450)) (and (> x 3440) (<= x 3460)))) ['(and (= x 0) (or (and (> x 3400) (<= x 3450)) (and (> x 3440) (<= x 3460)))) '{x (and (= x 0) (or (and (> x 3400) (<= x 3450)) (and (> x 3440) (<= x 3460))))} '{x {:enum #{}}}] '(and (= x 3500) (= x 3500) (< x 16) (>= x 10)) ['(and (= x 3500) (= x 3500) (< x 16) (>= x 10)) '{x (and (= x 3500) (= x 3500) (< x 16) (>= x 10))} '{x {:enum #{}}} '{x {:enum #{}}}] '(and (< x 3501) (or (= x 10) (= x 20)) (> x 16)) ['(and (< x 3501) (or (= x 10) (= x 20)) (> x 16)) '{x (and (< x 3501) (or (= x 10) (= x 20)) (> x 16))} '{x {:enum #{20}}} '{x {:enum #{20}}}] '(and (or (= x 10) (= x 20) (= x 0)) (or (and (< x 3550) (> x 16)) (and (< x 15) (> x 5)))) ['(and (or (= x 10) (= x 20) (= x 0)) (or (and (< x 3550) (> x 16)) (and (< x 15) (> x 5)))) '{x (and (or (= x 10) (= x 20) (= x 0)) (or (and (< x 3550) (> x 16)) (and (< x 15) (> x 5))))} '{x {:enum #{20 10}}}] (or (= x 3600) (= x 1)) (and (< x 16) (>= x 10))) [true nil {} {}] '(or (and (> x 3700)) (and (>= x 3750) (<= x 3760))) [true nil {} {}] [true nil {} {}] '(every? [x a] (= x 3900)) ['[(= a 3900)] '{a [(= a 3900)]} '{a {:coll-elements {:enum #{3900}}}} '{a {:coll-elements {:enum #{3900}}}}] '(every? [x a] (and (= x 4000) y)) [true nil {} {}] '(every? [x a] (every? [y x] (= y 4100))) ['[[(= a 4100)]] '{a [[(= a 4100)]]} '{a {:coll-elements {:coll-elements {:enum #{4100}}}}} '{a {:coll-elements {:coll-elements {:enum #{4100}}}}}] '(every? [x a] (every? [y x] (> y 4150))) ['[[(> a 4150)]] '{a [[(> a 4150)]]} '{a {:coll-elements {:coll-elements {:ranges #{{:min 4150 :min-inclusive false}}}}}} '{a {:coll-elements {:coll-elements {:ranges #{{:min 4150 :min-inclusive false}}}}}}] '(every? [x a] (every? [y x] (or (and (> y 4200) (<= y 4210)) (and (< y 5) (> y 0))))) ['[[(or (and (> a 4200) (<= a 4210)) (and (< a 5) (> a 0)))]] '{a [[(or (and (> a 4200) (<= a 4210)) (and (< a 5) (> a 0)))]]} '{a {:coll-elements {:coll-elements {:ranges #{{:max 5 :max-inclusive false :min 0 :min-inclusive false} {:min 4200 :min-inclusive false :max 4210 :max-inclusive true}}}}}} '{a {:coll-elements {:coll-elements {:ranges #{{:max 5 :max-inclusive false :min 0 :min-inclusive false} {:min 4200 :min-inclusive false :max 4210 :max-inclusive true}}}}}}] '(= x "4300") ['(= x "4300") '{x (= x "4300")} '{x {:enum #{"4300"}}} '{x {:enum #{"4300"}}}] '(contains? #{"4400" "a" "b"} x) ['(contains? #{"a" "b" "4400"} x) '{x (contains? #{"a" "b" "4400"} x)} '{x {:enum #{"a" "b" "4400"}}} '{x {:enum #{"a" "b" "4400"}}}] '(= 4500 (count x)) ['(= 4500 (count x)) '{x (= 4500 (count x))} '{x {:coll-size 4500}} '{x {:coll-size 4500}}] '(= (count x) 4510) ['(= (count x) 4510) '{x (= (count x) 4510)} '{x {:coll-size 4510}} '{x {:coll-size 4510}}] '(every? [x a] (= 4600 (count x))) ['[(= 4600 (count a))] '{a [(= 4600 (count a))]} '{a {:coll-elements {:coll-size 4600}}} '{a {:coll-elements {:coll-size 4600}}}] '(and (= (count a) 4700) (every? [x a] (and (= 5 (count x)) (every? [y x] (or (= y "a") (= y "b")))))) ['(and (= (count a) 4700) [(and (= 5 (count a)) [(or (= a "a") (= a "b"))])]) '{a (and (= (count a) 4700) [(and (= 5 (count a)) [(or (= a "a") (= a "b"))])])} '{a {:coll-size 4700 :coll-elements {:coll-size 5 :coll-elements {:enum #{"a" "b"}}}}} '{a {:coll-size 4700 :coll-elements {:coll-size 5 :coll-elements {:enum #{"a" "b"}}}}}] '(or (= (count a) 4800) (every? [x a] (and (= 5 (count x)) (every? [y x] (or (= y "a") (= y "b")))))) [true nil {} {}] '(and (every? [v a] (or (= v ["a"]) (= v ["a" "b"]))) (every? [v a] (every? [s v] (or (= s "a") (= s "b") (= s "4895"))))) ['(and [(or (= a ["a"]) (= a ["a" "b"]))] [[(or (= a "a") (= a "b") (= a "4895"))]]) '{a (and [(or (= a ["a"]) (= a ["a" "b"]))] [[(or (= a "a") (= a "b") (= a "4895"))]])} '{a {:coll-elements {:coll-elements {:enum #{"a" "b" "4895"}} :enum #{["a" "b"] ["a"]}}}} '{a {:coll-elements {:coll-elements {:enum #{"a" "b" "4895"}} :enum #{["a" "b"] ["a"]}}}}] '(or (= "a" x) (= x 4900)) ['(or (= "a" x) (= x 4900)) '{x (or (= "a" x) (= x 4900))} '{x {:enum #{"a" 4900}}} '{x {:enum #{"a" 4900}}}] '(or (= {:$type :spec/A :a 5000} x) (contains? #{{:$type :spec/B} {:$type :spec/C}} x)) ['(or (= {:$type :spec/A :a 5000} x) (contains? #{{:$type :spec/C} {:$type :spec/B}} x)) '{x (or (= {:$type :spec/A :a 5000} x) (contains? #{{:$type :spec/C} {:$type :spec/B}} x))} '{x {:enum #{{:$type :spec/C} {:$type :spec/B} {:$type :spec/A :a 5000}}}} '{x {:enum #{{:$type :spec/C} {:$type :spec/B} {:$type :spec/A :a 5000}}}}] '(or (= {:$type :spec/A :a 5100} x) (= x {:$type :spec/B})) ['(or (= {:$type :spec/A :a 5100} x) (= x {:$type :spec/B})) '{x (or (= {:$type :spec/A :a 5100} x) (= x {:$type :spec/B}))} '{x {:enum #{{:$type :spec/A :a 5100} {:$type :spec/B}}}} '{x {:enum #{{:$type :spec/A :a 5100} {:$type :spec/B}}}}] (> z 5200)) ['(and (<= z 10) (> z 5200)) '{z (and (<= z 10) (> z 5200))} '{z {:enum #{}}} '{z {:enum #{}}}] (> z #d "5250.1")) ['(and (<= z #d "1.1") (> z #d "5250.1")) '{z (and (<= z #d "1.1") (> z #d "5250.1"))} '{z {:enum #{}}} '{z {:enum #{}}}] (> z 5300)) (and (<= z 30) (> z 20))) ['(or (and (<= z 10) (> z 5300)) (and (<= z 30) (> z 20))) '{z (or (and (<= z 10) (> z 5300)) (and (<= z 30) (> z 20)))} '{z {:ranges #{{:max 10 :max-inclusive true :min 5300 :min-inclusive false} {:max 30 :max-inclusive true :min 20 :min-inclusive false}}}} '{z {:ranges #{{:max 10 :max-inclusive true :min 5300 :min-inclusive false} {:max 30 :max-inclusive true :min 20 :min-inclusive false}}}}] '(or (and (> #d "1.5400" x) (>= x #d "0.0000")) (and (> x #d "20.0000") (< x #d "30.0000"))) ['(or (and (> #d "1.5400" x) (>= x #d "0.0000")) (and (> x #d "20.0000") (< x #d "30.0000"))) '{x (or (and (> #d "1.5400" x) (>= x #d "0.0000")) (and (> x #d "20.0000") (< x #d "30.0000")))} '{x {:ranges #{{:max #d "1.5400" :max-inclusive true :min #d "0.0000" :min-inclusive true} {:min #d "20.0000" :min-inclusive false :max #d "30.0000" :max-inclusive false}}}} '{x {:ranges #{{:max #d "1.5400" :max-inclusive true :min #d "0.0000" :min-inclusive true} {:min #d "20.0000" :min-inclusive false :max #d "30.0000" :max-inclusive false}}}}] (>= x 0)) (and (> x 20) (< x 30))) #_['(or (and (> #d "1.5500" x) (>= x 0)) (and (> x 20) (< x 30))) '{x (or (and (> #d "1.5500" x) (>= x 0)) (and (> x 20) (< x 30)))} '{x {:ranges #{{:max #d "1.5500" :max-inclusive true :min 0 :min-inclusive true} {:min 20 :min-inclusive false :max 30 :max-inclusive false}}}}] '(or (= #d "5600.12" x) (= x #d "2.24") (contains? #{#d "1.00" #d "2.00"} x)) ['(or (= #d "5600.12" x) (= x #d "2.24") (contains? #{#d "2.00" #d "1.00"} x)) '{x (or (= #d "5600.12" x) (= x #d "2.24") (contains? #{#d "2.00" #d "1.00"} x))} '{x {:enum #{#d "5600.12" #d "2.24" #d "2.00" #d "1.00"}}} '{x {:enum #{#d "5600.12" #d "2.24" #d "2.00" #d "1.00"}}}] '(or (= 5700 x) (= x 2) (contains? #{1 2} x)) ['(or (= 5700 x) (= x 2) (contains? #{1 2} x)) '{x (or (= 5700 x) (= x 2) (contains? #{1 2} x))} '{x {:enum #{1 2 5700}}} '{x {:enum #{1 2 5700}}}] '(and (= #d "5800.12" x) (= x #d "2.24") (contains? #{#d "1.00" #d "2.00"} x)) ['(and (= #d "5800.12" x) (= x #d "2.24") (contains? #{#d "2.00" #d "1.00"} x)) '{x (and (= #d "5800.12" x) (= x #d "2.24") (contains? #{#d "2.00" #d "1.00"} x))} '{x {:enum #{}}} '{x {:enum #{}}}] '(and (= 5900 x) (= x 2) (contains? #{1 2} x)) ['(and (= 5900 x) (= x 2) (contains? #{1 2} x)) '{x (and (= 5900 x) (= x 2) (contains? #{1 2} x))} '{x {:enum #{}}} '{x {:enum #{}}}] '(if-value x (= x 6000) true) ['(if-value x (= x 6000)) '{x (if-value x (= x 6000))} '{x {:enum #{6000} :optional true}} '{x {:enum #{6000} :optional true}}] '(if-value x (or (= x "6100") (= x "no") (= x "yes")) true) ['(if-value x (or (= x "6100") (= x "no") (= x "yes"))) '{x (if-value x (or (= x "6100") (= x "no") (= x "yes")))} '{x {:enum #{"6100" "yes" "no"} :optional true}} '{x {:enum #{"6100" "yes" "no"} :optional true}}] '(if-value y (= z 6200) true) [true nil {} {}] '(if-value x (<= 6300 x) true) ['(if-value x (<= 6300 x)) '{x (if-value x (<= 6300 x))} '{x {:ranges #{{:min 6300 :min-inclusive false :optional true}}}} '{x {:ranges #{{:min 6300 :min-inclusive false :optional true}}}}] '(and (if-value x (<= 6400 x) true) (if-value x (> 0 x) true)) ['(and (if-value x (<= 6400 x)) (if-value x (> 0 x))) '{x (and (if-value x (<= 6400 x)) (if-value x (> 0 x)))} '{x {:enum #{} :optional true}} '{x {:enum #{} :optional true}}] '(or (if-value x (<= 6500 x) true) (if-value x (> 0 x) true)) [true nil {} {}] '(any? [n #{1 6600}] (= x n)) [true nil {} {}] '(and (= x $no-value) (= x 6700)) ['(and (= x $no-value) (= x 6700)) '{x (and (= x $no-value) (= x 6700))} '{x :none} '{x :none}] '(= x $no-value) ['(= x $no-value) '{x (= x $no-value)} '{x :none} '{x :none}] '(not= x $no-value) ['(not= x $no-value) '{x (not= x $no-value)} '{x :some} '{x :some}] '(and (= x $no-value) (> x 6900)) ['(and (= x $no-value) (> x 6900)) '{x (and (= x $no-value) (> x 6900))} '{x :none} '{x :none}] '(or (= x $no-value) (contains? #{7000} x)) ['(or (= x $no-value) (contains? #{7000} x)) '{x (or (= x $no-value) (contains? #{7000} x))} '{x {:enum #{7000} :optional true}} '{x {:enum #{7000} :optional true}}] '(and (not= x $no-value) (if-value x (contains? #{7100} x) true)) ['(and (not= x $no-value) (if-value x (contains? #{7100} x))) '{x (and (not= x $no-value) (if-value x (contains? #{7100} x)))} '{x {:enum #{7100}}} '{x {:enum #{7100}}}])) (deftest test-encode-fixed-decimals (is (= 100 (analysis/encode-fixed-decimals #d "1.00"))) (is (= '(let [x 100 y [21 0] z #{1}] 12) (analysis/encode-fixed-decimals '(let [x #d "1.00" y [#d "2.1" #d "0.0"] z #{#d "0.0001"}] #d "1.2"))))) (deftest test-find-spec-refs (is (= #{} (analysis/find-spec-refs '1))) (is (= #{{:tail :my/Spec}} (analysis/find-spec-refs '{:$type :my/Spec}))) (is (= #{:my/Spec} (analysis/find-spec-refs '(= {:$type :my/Spec} {:$type :my/Spec})))) (is (= #{{:tail :my/Other} {:tail :my/Spec}} (analysis/find-spec-refs '{:$type :my/Spec :a {:$type :my/Other}}))) (is (= #{:my/Other {:tail :my/Spec}} (analysis/find-spec-refs '{:$type :my/Spec :a (= {:$type :my/Other} {:$type :my/Other})}))) (is (= #{{:tail :my/Spec}} (analysis/find-spec-refs '(let [x {:$type :my/Spec}] x)))) (is (= #{:my/Spec {:tail :my/Other}} (analysis/find-spec-refs '(when {:$type :my/Spec} {:$type :my/Other})))) (is (= #{{:tail :my/Spec} :my/Spec} (analysis/find-spec-refs '(let [x {:$type :my/Spec}] (when x (if x x x)))))) (is (= #{{:tail :my/Spec} :my/Spec} (analysis/find-spec-refs '(let [x {:$type :my/Spec}] (when x (if x x (get x :a))))))) (is (= #{{:tail :my/Spec} :my/Spec} (analysis/find-spec-refs '(let [x {:$type :my/Spec}] (when x (if x x (get-in x [:a]))))))) (is (= #{:my/Spec} (analysis/find-spec-refs '(any? [x [{:$type :my/Spec}]] true)))) (is (= #{:my/Spec :my/Other} (analysis/find-spec-refs '(any? [x [{:$type :my/Spec}]] {:$type :my/Other})))) (is (= #{:my/Spec {:tail :my/Other}} (analysis/find-spec-refs '(refine-to {:$type :my/Spec} :my/Other)))) (is (= #{:my/Spec :my/Other} (analysis/find-spec-refs '(refines-to? {:$type :my/Spec} :my/Other)))) (is (= #{{:tail :my/Spec} {:tail :my/Other}} (analysis/find-spec-refs '(if true {:$type :my/Spec} {:$type :my/Other})))) (is (= #{{:tail :my/Spec}} (analysis/find-spec-refs '(let [x {:$type :my/Spec} y x] y)))) (is (= #{:tutorials.vending/EventHandler$v1 :tutorials.vending/State$v1 :tutorials.vending/Transition$v1 :spec/Mine :spec/Event} (analysis/find-spec-refs '(let [events [{:$type :spec/Event}]] (reduce [a (refine-to {:$type :spec/Mine} :tutorials.vending/State$v1)] [e events] (get (refine-to {:$type :tutorials.vending/EventHandler$v1 :current a :event e} :tutorials.vending/Transition$v1) :next)))))) (is (= #{:tutorials.vending/EventHandler$v1 :tutorials.vending/State$v1 {:tail :tutorials.vending/Transition$v1} :spec/Mine :spec/Event} (analysis/find-spec-refs '(let [events [{:$type :spec/Event}]] (reduce [a (refine-to {:$type :spec/Mine} :tutorials.vending/State$v1)] [e events] (refine-to {:$type :tutorials.vending/EventHandler$v1 :current a :event e} :tutorials.vending/Transition$v1)))))) (is (= #{{:tail :spec/Event} :spec/Event :tutorials.vending/State$v1 :spec/Mine} (analysis/find-spec-refs '(let [events [{:$type :spec/Event}]] (reduce [a (refine-to {:$type :spec/Mine} :tutorials.vending/State$v1)] [e events] e))))) (is (= #{{:tail :tutorials.vending/State$v1} :spec/Mine :spec/Event} (analysis/find-spec-refs '(let [events [{:$type :spec/Event}]] (reduce [a (refine-to {:$type :spec/Mine} :tutorials.vending/State$v1)] [e events] a))))) (is (= #{:spec/X {:tail :spec/Y} :spec/Z} (analysis/find-spec-refs '(let [x {:$type :spec/X} y {:$type :spec/Y}] (let [z {:$type :spec/Z} a y] a))))) (is (= #{:spec/X {:tail :spec/Y} :spec/Z} (analysis/find-spec-refs '(let [x {:$type :spec/X} y {:$type :spec/Y} z {:$type :spec/Z} a y] a)))) (is (= #{:spec/X {:tail :spec/Y} :spec/Y :spec/Z} (analysis/find-spec-refs '(let [x {:$type :spec/X} y {:$type :spec/Y}] (let [z {:$type :spec/Z} a y] y))))) (is (= #{:spec/X :spec/Y {:tail :spec/Z}} (analysis/find-spec-refs '(let [x {:$type :spec/X} y {:$type :spec/Y}] (let [z {:$type :spec/Z}] z))))) (is (= #{{:tail :spec/X} {:tail :spec/Y} :spec/Z} (analysis/find-spec-refs '(let [x {:$type :spec/X} y {:$type :spec/Y}] (let [z {:$type :spec/Z} a y] (if true a x)))))) (is (= #{{:tail :spec/X} {:tail :spec/Y} :spec/Z} (analysis/find-spec-refs '(let [x {:$type :spec/X} y {:$type :spec/Y}] (let [z {:$type :spec/Z} a y q (if true a x)] q))))) (is (= #{:spec/X :spec/Y :spec/Z {:tail :spec/Q}} (analysis/find-spec-refs '(let [x {:$type :spec/X} y {:$type :spec/Y}] (let [z {:$type :spec/Z} a y q (if true a x)] (when-value-let [q {:$type :spec/Q}] q)))))) (is (= #{{:tail :spec/X} {:tail :spec/Y} :spec/Z :spec/Q} (analysis/find-spec-refs '(let [x {:$type :spec/X} y {:$type :spec/Y}] (let [z {:$type :spec/Z} a y q (if true a x)] (when-value-let [p {:$type :spec/Q}] q)))))) (is (= #{{:tail :my/Spec}} (analysis/find-spec-refs '(if-value-let [o {:$type :my/Spec}] o {:$type :my/Spec})))) (is (= #{{:tail :my/Spec} {:tail :my/Other}} (analysis/find-spec-refs '(if-value-let [o {:$type :my/Spec}] o {:$type :my/Other})))) (is (= #{:my/Spec {:tail :my/X} {:tail :my/Other}} (analysis/find-spec-refs '(if-value-let [o {:$type :my/Spec}] {:$type :my/X} {:$type :my/Other})))) (is (= #{{:tail :my/Spec}} (analysis/find-spec-refs '(when-value-let [o {:$type :my/Spec}] o)))) (is (= #{:my/Spec {:tail :my/Other}} (analysis/find-spec-refs '(when-value-let [o {:$type :my/Spec}] {:$type :my/Other})))) (is (= #{:my/Other} (analysis/find-spec-refs-but-tail :my/Spec '(if true {:$type :my/Spec} {:$type :my/Other}))))) (deftest test-cyclical-dependencies (let [spec-map (var-types/to-halite-spec-env {:spec/Destination {:fields {:d :Integer}} :spec/Path1 {:refines-to {:spec/Destination {:name "refine_to_Destination" :expr '{:$type :spec/Destination :d 1}}}} :spec/Path2 {:refines-to {:spec/Destination {:name "refine_to_Destination" :expr '{:$type :spec/Destination :d 2}}}} :spec/Start {:refines-to {:spec/Path1 {:name "refine_to_Path1" :expr '{:$type :spec/Path1}} :spec/Path2 {:name "refine_to_Path2" :expr '{:$type :spec/Path2}}}}})] (is (= {:spec/Path1 #{:spec/Destination} :spec/Path2 #{:spec/Destination} :spec/Start #{:spec/Path1 :spec/Path2}} (#'analysis/get-spec-map-dependencies spec-map))) (is (nil? (analysis/find-cycle-in-dependencies spec-map)))) (let [spec-map (var-types/to-halite-spec-env {:spec/A {:fields {:b :spec/B}} :spec/B {:fields {:a :spec/A}}})] (is (= {:spec/A #{:spec/B} :spec/B #{:spec/A}} (#'analysis/get-spec-map-dependencies spec-map))) (is (= [:spec/A :spec/B :spec/A] (analysis/find-cycle-in-dependencies spec-map)))))
fa253141553e789fb088312671c41a920c1a5c117c9c1c32353a3cc2eca07c5f
OCamlPro/operf-micro
equations.ml
(***********************************************************************) (* *) (* OCaml *) (* *) , projet Cristal , INRIA Rocquencourt (* *) Copyright 1996 Institut National de Recherche en Informatique et en Automatique . All rights reserved . This file is distributed under the terms of the Q Public License version 1.0 . (* *) (***********************************************************************) (****************** Equation manipulations *************) open Terms type rule = { number: int; numvars: int; lhs: term; rhs: term } (* standardizes an equation so its variables are 1,2,... *) let mk_rule num m n = let all_vars = union (vars m) (vars n) in let counter = ref 0 in let subst = List.map (fun v -> incr counter; (v, Var !counter)) (List.rev all_vars) in { number = num; numvars = !counter; lhs = substitute subst m; rhs = substitute subst n } (* checks that rules are numbered in sequence and returns their number *) let check_rules rules = let counter = ref 0 in List.iter (fun r -> incr counter; if r.number <> !counter then failwith "Rule numbers not in sequence") rules; !counter let pretty_rule b rule = Buffer.add_string b (string_of_int rule.number); Buffer.add_string b " : "; pretty_term b rule.lhs; Buffer.add_string b " = "; pretty_term b rule.rhs; Buffer.add_char b '\n' let pretty_rules b rules = List.iter (pretty_rule b) rules (****************** Rewriting **************************) Top - level rewriting . Let eq : L = R be an equation , M be a term such that L<=M. With sigma = matching L M , we define the image of M by eq as sigma(R ) With sigma = matching L M, we define the image of M by eq as sigma(R) *) let reduce l m r = substitute (matching l m) r Test whether m can be reduced by l , i.e. m contains an instance of l. let can_match l m = try let _ = matching l m in true with Failure _ -> false let rec reducible l m = can_match l m || (match m with | Term(_,sons) -> List.exists (reducible l) sons | _ -> false) (* Top-level rewriting with multiple rules. *) let rec mreduce rules m = match rules with [] -> failwith "mreduce" | rule::rest -> try reduce rule.lhs m rule.rhs with Failure _ -> mreduce rest m One step of rewriting in leftmost - outermost strategy , with multiple rules . Fails if no redex is found with multiple rules. Fails if no redex is found *) let rec mrewrite1 rules m = try mreduce rules m with Failure _ -> match m with Var n -> failwith "mrewrite1" | Term(f, sons) -> Term(f, mrewrite1_sons rules sons) and mrewrite1_sons rules = function [] -> failwith "mrewrite1" | son::rest -> try mrewrite1 rules son :: rest with Failure _ -> son :: mrewrite1_sons rules rest (* Iterating rewrite1. Returns a normal form. May loop forever *) let rec mrewrite_all rules m = try mrewrite_all rules (mrewrite1 rules m) with Failure _ -> m
null
https://raw.githubusercontent.com/OCamlPro/operf-micro/d5d2bf2068204b889321b0c5a7bc0d079c0fca80/share/operf-micro/benchmarks/kb/equations.ml
ocaml
********************************************************************* OCaml ********************************************************************* ***************** Equation manipulations ************ standardizes an equation so its variables are 1,2,... checks that rules are numbered in sequence and returns their number ***************** Rewriting ************************* Top-level rewriting with multiple rules. Iterating rewrite1. Returns a normal form. May loop forever
, projet Cristal , INRIA Rocquencourt Copyright 1996 Institut National de Recherche en Informatique et en Automatique . All rights reserved . This file is distributed under the terms of the Q Public License version 1.0 . open Terms type rule = { number: int; numvars: int; lhs: term; rhs: term } let mk_rule num m n = let all_vars = union (vars m) (vars n) in let counter = ref 0 in let subst = List.map (fun v -> incr counter; (v, Var !counter)) (List.rev all_vars) in { number = num; numvars = !counter; lhs = substitute subst m; rhs = substitute subst n } let check_rules rules = let counter = ref 0 in List.iter (fun r -> incr counter; if r.number <> !counter then failwith "Rule numbers not in sequence") rules; !counter let pretty_rule b rule = Buffer.add_string b (string_of_int rule.number); Buffer.add_string b " : "; pretty_term b rule.lhs; Buffer.add_string b " = "; pretty_term b rule.rhs; Buffer.add_char b '\n' let pretty_rules b rules = List.iter (pretty_rule b) rules Top - level rewriting . Let eq : L = R be an equation , M be a term such that L<=M. With sigma = matching L M , we define the image of M by eq as sigma(R ) With sigma = matching L M, we define the image of M by eq as sigma(R) *) let reduce l m r = substitute (matching l m) r Test whether m can be reduced by l , i.e. m contains an instance of l. let can_match l m = try let _ = matching l m in true with Failure _ -> false let rec reducible l m = can_match l m || (match m with | Term(_,sons) -> List.exists (reducible l) sons | _ -> false) let rec mreduce rules m = match rules with [] -> failwith "mreduce" | rule::rest -> try reduce rule.lhs m rule.rhs with Failure _ -> mreduce rest m One step of rewriting in leftmost - outermost strategy , with multiple rules . Fails if no redex is found with multiple rules. Fails if no redex is found *) let rec mrewrite1 rules m = try mreduce rules m with Failure _ -> match m with Var n -> failwith "mrewrite1" | Term(f, sons) -> Term(f, mrewrite1_sons rules sons) and mrewrite1_sons rules = function [] -> failwith "mrewrite1" | son::rest -> try mrewrite1 rules son :: rest with Failure _ -> son :: mrewrite1_sons rules rest let rec mrewrite_all rules m = try mrewrite_all rules (mrewrite1 rules m) with Failure _ -> m
f69414e1ffeb5fd20baf86cbde5f95638dd7be52dad17f7b6346d80a8c0c3c98
cognitect-labs/aws-api
ec2_metadata_utils.clj
Copyright ( c ) Cognitect , Inc. ;; All rights reserved. (ns ^:skip-wiki cognitect.aws.ec2-metadata-utils "Impl, don't call directly" (:require [clojure.string :as str] [clojure.data.json :as json] [clojure.core.async :as a] [cognitect.aws.http :as http] [cognitect.aws.util :as u] [cognitect.aws.retry :as retry]) (:import (java.net URI))) (set! *warn-on-reflection* true) (def ^:const ec2-metadata-service-override-system-property "com.amazonaws.sdk.ec2MetadataServiceEndpointOverride") (def ^:const dynamic-data-root "/latest/dynamic/") (def ^:const security-credentials-path "/latest/meta-data/iam/security-credentials/") (def ^:const instance-identity-document "instance-identity/document") ECS (def ^:const container-credentials-relative-uri-env-var "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI") (def ^:const container-credentials-full-uri-env-var "AWS_CONTAINER_CREDENTIALS_FULL_URI") (def ^:const container-authorization-token-env-var "AWS_CONTAINER_AUTHORIZATION_TOKEN") (def ^:const ec2-metadata-host "") (def ^:const ecs-metadata-host "") (defn in-container? [] (or (u/getenv container-credentials-relative-uri-env-var) (u/getenv container-credentials-full-uri-env-var))) (defn build-path [& components] (str/replace (str/join \/ components) #"\/\/+" (constantly "/"))) (defn- build-uri [host path] (str host "/" (cond-> path (str/starts-with? path "/") (subs 1)))) (defn get-host-address "Gets the EC2 (or ECS) metadata host address" [] (or (u/getProperty ec2-metadata-service-override-system-property) (when (in-container?) ecs-metadata-host) ec2-metadata-host)) (defn- request-map [^URI uri] (let [auth-token (u/getenv container-authorization-token-env-var)] {:scheme (.getScheme uri) :server-name (.getHost uri) :server-port (or (when (pos? (.getPort uri)) (.getPort uri)) (when (#{"https"} (.getScheme uri)) 443) 80) :uri (.getPath uri) :request-method :get :headers (cond-> {"Accept" "*/*"} auth-token (assoc "Authorization" auth-token))})) (defn get-data [uri http-client] (let [response (a/<!! (retry/with-retry #(http/submit http-client (request-map (URI. uri))) (a/promise-chan) retry/default-retriable? retry/default-backoff))] ;; TODO: handle unhappy paths -JS (when (= 200 (:status response)) (u/bbuf->str (:body response))))) (defn get-data-at-path [path http-client] (get-data (build-uri (get-host-address) path) http-client)) (defn get-listing [uri http-client] (some-> (get-data uri http-client) str/split-lines)) (defn get-listing-at-path [path http-client] (get-listing (build-uri (get-host-address) path) http-client)) (defn get-ec2-instance-data [http-client] (some-> (build-path dynamic-data-root instance-identity-document) (get-data-at-path http-client) (json/read-str :key-fn keyword))) (defn get-ec2-instance-region [http-client] (:region (get-ec2-instance-data http-client))) (defn container-credentials [http-client] (let [endpoint (or (when-let [path (u/getenv container-credentials-relative-uri-env-var)] (str (get-host-address) path)) (u/getenv container-credentials-full-uri-env-var))] (some-> endpoint (get-data http-client) (json/read-str :key-fn keyword)))) (defn instance-credentials [http-client] (when (not (in-container?)) (when-let [cred-name (first (get-listing-at-path security-credentials-path http-client))] (some-> (get-data-at-path (str security-credentials-path cred-name) http-client) (json/read-str :key-fn keyword)))))
null
https://raw.githubusercontent.com/cognitect-labs/aws-api/40d5fcf228302de66fee698935ae6ecad8474925/src/cognitect/aws/ec2_metadata_utils.clj
clojure
All rights reserved. TODO: handle unhappy paths -JS
Copyright ( c ) Cognitect , Inc. (ns ^:skip-wiki cognitect.aws.ec2-metadata-utils "Impl, don't call directly" (:require [clojure.string :as str] [clojure.data.json :as json] [clojure.core.async :as a] [cognitect.aws.http :as http] [cognitect.aws.util :as u] [cognitect.aws.retry :as retry]) (:import (java.net URI))) (set! *warn-on-reflection* true) (def ^:const ec2-metadata-service-override-system-property "com.amazonaws.sdk.ec2MetadataServiceEndpointOverride") (def ^:const dynamic-data-root "/latest/dynamic/") (def ^:const security-credentials-path "/latest/meta-data/iam/security-credentials/") (def ^:const instance-identity-document "instance-identity/document") ECS (def ^:const container-credentials-relative-uri-env-var "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI") (def ^:const container-credentials-full-uri-env-var "AWS_CONTAINER_CREDENTIALS_FULL_URI") (def ^:const container-authorization-token-env-var "AWS_CONTAINER_AUTHORIZATION_TOKEN") (def ^:const ec2-metadata-host "") (def ^:const ecs-metadata-host "") (defn in-container? [] (or (u/getenv container-credentials-relative-uri-env-var) (u/getenv container-credentials-full-uri-env-var))) (defn build-path [& components] (str/replace (str/join \/ components) #"\/\/+" (constantly "/"))) (defn- build-uri [host path] (str host "/" (cond-> path (str/starts-with? path "/") (subs 1)))) (defn get-host-address "Gets the EC2 (or ECS) metadata host address" [] (or (u/getProperty ec2-metadata-service-override-system-property) (when (in-container?) ecs-metadata-host) ec2-metadata-host)) (defn- request-map [^URI uri] (let [auth-token (u/getenv container-authorization-token-env-var)] {:scheme (.getScheme uri) :server-name (.getHost uri) :server-port (or (when (pos? (.getPort uri)) (.getPort uri)) (when (#{"https"} (.getScheme uri)) 443) 80) :uri (.getPath uri) :request-method :get :headers (cond-> {"Accept" "*/*"} auth-token (assoc "Authorization" auth-token))})) (defn get-data [uri http-client] (let [response (a/<!! (retry/with-retry #(http/submit http-client (request-map (URI. uri))) (a/promise-chan) retry/default-retriable? retry/default-backoff))] (when (= 200 (:status response)) (u/bbuf->str (:body response))))) (defn get-data-at-path [path http-client] (get-data (build-uri (get-host-address) path) http-client)) (defn get-listing [uri http-client] (some-> (get-data uri http-client) str/split-lines)) (defn get-listing-at-path [path http-client] (get-listing (build-uri (get-host-address) path) http-client)) (defn get-ec2-instance-data [http-client] (some-> (build-path dynamic-data-root instance-identity-document) (get-data-at-path http-client) (json/read-str :key-fn keyword))) (defn get-ec2-instance-region [http-client] (:region (get-ec2-instance-data http-client))) (defn container-credentials [http-client] (let [endpoint (or (when-let [path (u/getenv container-credentials-relative-uri-env-var)] (str (get-host-address) path)) (u/getenv container-credentials-full-uri-env-var))] (some-> endpoint (get-data http-client) (json/read-str :key-fn keyword)))) (defn instance-credentials [http-client] (when (not (in-container?)) (when-let [cred-name (first (get-listing-at-path security-credentials-path http-client))] (some-> (get-data-at-path (str security-credentials-path cred-name) http-client) (json/read-str :key-fn keyword)))))
eaebc0a75a7f3f51ac2454cb51cdee3d932c2a187401468674d97ee7672570a2
theobat/massalia
SpecAPI.hs
# LANGUAGE DataKinds # {-# LANGUAGE DeriveAnyClass #-} # LANGUAGE DeriveGeneric # # LANGUAGE FlexibleInstances # # LANGUAGE KindSignatures # {-# LANGUAGE OverloadedStrings #-} # LANGUAGE NoImplicitPrelude # import Data.Morpheus.Types (GQLRequest (..)) import MassaliaMigration (findAndRunAllMigration, defaultMigrationPattern, GlobalMigrationError) import MigrationTypes ( MigrationPattern(..) ) import MassaliaSchema.TestAPI (api, apiWithoutDB) import Protolude import Test.Tasty () import Test.Tasty.HUnit () import Text.Pretty.Simple (pPrint) import Massalia.HasqlExec (poolFromURLString, release) import Massalia.HasqlConnection (URLError) main :: IO () main = do let queryStruct = GQLRequest { query = "query plantList_test { plantListPaginated (first: 10, offset: 0, globalFilter: {id: {isIn: []}}) { id name } }", operationName = Nothing, variables = Nothing } resWihtoutDB <- liftIO $ apiWithoutDB queryStruct pPrint resWihtoutDB res <- runExceptT $ executionScheme case res of Left err -> pPrint err Right _ -> pure () data TestError = InitErrorMigration [GlobalMigrationError] | InitErrorURL URLError deriving (Show) executionScheme :: ExceptT TestError IO () executionScheme = do withExceptT InitErrorMigration $ findAndRunAllMigration migrationConfig dbURL pool <- withExceptT InitErrorURL $ ExceptT $ poolFromURLString 1 10 dbURL let queryStruct = GQLRequest { query = "query plantList_test { plantListPaginated (first: 10, offset: 0) { id name } }", operationName = Nothing, variables = Nothing } res <- liftIO $ api pool queryStruct liftIO $ pPrint res liftIO $ release pool where migrationConfig = defaultMigrationPattern {basePath = "./test"} dbURL = "postgres:p@localhost:5432/massalia_test_industry" dbPoolSize = 1 dbTimeoutInSec = 10
null
https://raw.githubusercontent.com/theobat/massalia/66d65c3431132091107d07e041f33c907f9d08cb/test/integration/SpecAPI.hs
haskell
# LANGUAGE DeriveAnyClass # # LANGUAGE OverloadedStrings #
# LANGUAGE DataKinds # # LANGUAGE DeriveGeneric # # LANGUAGE FlexibleInstances # # LANGUAGE KindSignatures # # LANGUAGE NoImplicitPrelude # import Data.Morpheus.Types (GQLRequest (..)) import MassaliaMigration (findAndRunAllMigration, defaultMigrationPattern, GlobalMigrationError) import MigrationTypes ( MigrationPattern(..) ) import MassaliaSchema.TestAPI (api, apiWithoutDB) import Protolude import Test.Tasty () import Test.Tasty.HUnit () import Text.Pretty.Simple (pPrint) import Massalia.HasqlExec (poolFromURLString, release) import Massalia.HasqlConnection (URLError) main :: IO () main = do let queryStruct = GQLRequest { query = "query plantList_test { plantListPaginated (first: 10, offset: 0, globalFilter: {id: {isIn: []}}) { id name } }", operationName = Nothing, variables = Nothing } resWihtoutDB <- liftIO $ apiWithoutDB queryStruct pPrint resWihtoutDB res <- runExceptT $ executionScheme case res of Left err -> pPrint err Right _ -> pure () data TestError = InitErrorMigration [GlobalMigrationError] | InitErrorURL URLError deriving (Show) executionScheme :: ExceptT TestError IO () executionScheme = do withExceptT InitErrorMigration $ findAndRunAllMigration migrationConfig dbURL pool <- withExceptT InitErrorURL $ ExceptT $ poolFromURLString 1 10 dbURL let queryStruct = GQLRequest { query = "query plantList_test { plantListPaginated (first: 10, offset: 0) { id name } }", operationName = Nothing, variables = Nothing } res <- liftIO $ api pool queryStruct liftIO $ pPrint res liftIO $ release pool where migrationConfig = defaultMigrationPattern {basePath = "./test"} dbURL = "postgres:p@localhost:5432/massalia_test_industry" dbPoolSize = 1 dbTimeoutInSec = 10
9c8e2f6f912809676aaef4eb381443540cc17057fc24cc64266ec59a26a799a3
nkaretnikov/OOHaskell
Circle.hs
( C ) 2004 - 2005 , Oleg Kiselyov & 's overlooked object system module Circle where import Shape -- The composed type of circles data CircleData = CircleData { shapeData :: ShapeData , radiusData :: Int } -- A "closed" constructor circle x y r = CircleData { shapeData = shape x y , radiusData = r } -- A circle is a shape instance Shape CircleData where moveTo x y s = s { shapeData = moveTo' x y (shapeData s) } rMoveTo dx dy s = s { shapeData = rMoveTo' dx dy (shapeData s) } draw s = putStrLn $ concat ["Drawing a Circle at:", show (xData (shapeData s),yData (shapeData s)), ", radius ", show (radiusData s)] -- An interface in case more kinds of circles show up class Shape s => Circle s where getRadius :: s -> Int setRadius :: Int -> s -> s
null
https://raw.githubusercontent.com/nkaretnikov/OOHaskell/ddf42cfa62f8bd27643ff6db136dec6c14466232/repository/shapes/Haskell/Shapes7/Circle.hs
haskell
The composed type of circles A "closed" constructor A circle is a shape An interface in case more kinds of circles show up
( C ) 2004 - 2005 , Oleg Kiselyov & 's overlooked object system module Circle where import Shape data CircleData = CircleData { shapeData :: ShapeData , radiusData :: Int } circle x y r = CircleData { shapeData = shape x y , radiusData = r } instance Shape CircleData where moveTo x y s = s { shapeData = moveTo' x y (shapeData s) } rMoveTo dx dy s = s { shapeData = rMoveTo' dx dy (shapeData s) } draw s = putStrLn $ concat ["Drawing a Circle at:", show (xData (shapeData s),yData (shapeData s)), ", radius ", show (radiusData s)] class Shape s => Circle s where getRadius :: s -> Int setRadius :: Int -> s -> s
a3329c1cc72058a2be4f755d0b9356f324e9df3807fdae3bef57e3851cb9ff7c
joearms/elib1
elib1_search.erl
Copyright ( c ) 2006 - 2009 See MIT - LICENSE for licensing information . -module(elib1_search). -compile(export_all). -import(elib1_mysql, [start/4, cmd/2, stop/1]). make_data_base() -> {ok, Pid} = start("localhost", 3306, password:username(mysql), password:password(mysql)), cmd(Pid, "use test"), cmd(Pid, "drop table if exists mods"), cmd(Pid, "create table if not exists mods (" "themod CHAR(32) not null primary key," %% don't call this mod "val mediumtext not null, fulltext(val))"), _V1 = cmd(Pid, "show tables"), V2 = cmd(Pid, "describe mods"), stop(Pid), V2. insert_erl() -> _Files = find:files(".", "*.erl", true), a.
null
https://raw.githubusercontent.com/joearms/elib1/d617d0ec70a058ef102749eadf51c024444c28d9/src/elib1_search.erl
erlang
don't call this mod
Copyright ( c ) 2006 - 2009 See MIT - LICENSE for licensing information . -module(elib1_search). -compile(export_all). -import(elib1_mysql, [start/4, cmd/2, stop/1]). make_data_base() -> {ok, Pid} = start("localhost", 3306, password:username(mysql), password:password(mysql)), cmd(Pid, "use test"), cmd(Pid, "drop table if exists mods"), cmd(Pid, "create table if not exists mods (" "val mediumtext not null, fulltext(val))"), _V1 = cmd(Pid, "show tables"), V2 = cmd(Pid, "describe mods"), stop(Pid), V2. insert_erl() -> _Files = find:files(".", "*.erl", true), a.
c473aeea0fa19202ac79ce9ba1cbe216d834bb15d7cf5c30b053e43cc2b08f83
grin-compiler/grin
NametableSpec.hs
module NametableSpec where import Test.Hspec import Test.QuickCheck import Test.QuickCheck.Monadic import Test.Test import Grin.Nametable import Grin.Pretty runTests :: IO () runTests = hspec spec spec :: Spec spec = do describe "Property" $ do it "restore . convert == id" $ property $ forAll genProg $ \p -> let p' = restore $ convert p in (PP p') `shouldBe` (PP p)
null
https://raw.githubusercontent.com/grin-compiler/grin/44ac2958810ecee969c8028d2d2a082d47fba51b/grin/test/NametableSpec.hs
haskell
module NametableSpec where import Test.Hspec import Test.QuickCheck import Test.QuickCheck.Monadic import Test.Test import Grin.Nametable import Grin.Pretty runTests :: IO () runTests = hspec spec spec :: Spec spec = do describe "Property" $ do it "restore . convert == id" $ property $ forAll genProg $ \p -> let p' = restore $ convert p in (PP p') `shouldBe` (PP p)
a041fb640d2575eca06778f86ddd57104bd0a666a11a3d88afc1afaf5730a360
isovector/transitive-anns
InstanceSpec.hs
# LANGUAGE MultiParamTypeClasses # module InstanceSpec where import qualified Data.Set as S import Data.Set (Set) import TransitiveAnns.Types import Test.Hspec import GHC.TypeLits import InstanceVia test :: forall (comp :: Symbol) (name :: Symbol) x. AddAnnotation 'Remote comp name x => Int test = 5 class Hello where hello :: Int instance Hello where hello = test @"brig" @"hello" ahello :: Set Annotation ahello = annotated hello ahelloVia :: Set Annotation ahelloVia = annotated helloVia spec :: Spec spec = do xit "should propagate via instances" $ do ahello `shouldBe` S.fromList [ Annotation Remote "brig" "hello" ] it "should propagate via instances across modules" $ do ahelloVia `shouldBe` S.fromList [ Annotation Remote "brig" "helloVia" ]
null
https://raw.githubusercontent.com/isovector/transitive-anns/b491b7b264745efd447831c4ce10105461712e6f/test/InstanceSpec.hs
haskell
# LANGUAGE MultiParamTypeClasses # module InstanceSpec where import qualified Data.Set as S import Data.Set (Set) import TransitiveAnns.Types import Test.Hspec import GHC.TypeLits import InstanceVia test :: forall (comp :: Symbol) (name :: Symbol) x. AddAnnotation 'Remote comp name x => Int test = 5 class Hello where hello :: Int instance Hello where hello = test @"brig" @"hello" ahello :: Set Annotation ahello = annotated hello ahelloVia :: Set Annotation ahelloVia = annotated helloVia spec :: Spec spec = do xit "should propagate via instances" $ do ahello `shouldBe` S.fromList [ Annotation Remote "brig" "hello" ] it "should propagate via instances across modules" $ do ahelloVia `shouldBe` S.fromList [ Annotation Remote "brig" "helloVia" ]
18eb4d40b5e04dcd31fd37fa9a14d2880d016a6a1789e7f233b19900c6cdd89c
JHU-PL-Lab/jaylang
main.ml
open Core open Fix open Share module F = Fix.Make (ClauseMap) (IdSet) module FE = Fix . Make ( ExprMap ) ( IdSet ) let id_of_var (Jayil.Ast.Var (id, _)) = id let rec clause_uses clause request = let open Jayil.Ast in let (Clause (_var, cbody)) = clause in match cbody with | Value_body (Value_int _) -> Set.empty (module Id) | Value_body (Value_record (Record_value map)) -> Ident_map.key_list map |> Set.of_list (module Id) | Value_body (Value_function (Function_value (para, fbody))) -> Set.add (expr_uses fbody request) (id_of_var para) | Var_body ( Var ( x , _ ) ) - > | Input_body - > SSet.empty | ( ( x1 , _ ) , ( x2 , _ ) ) - > SSet.of_list [ x1 ; x2 ] | Var_body (Var (x, _)) -> SSet.singleton x | Input_body -> SSet.empty | Appl_body (Var (x1, _), Var (x2, _)) -> SSet.of_list [ x1; x2 ] *) | _ -> Set.singleton (module Id) (Ident "x") (* SSet.(add empty (Ident "_")) *) and expr_uses expr request = let open Jayil.Ast in let (Expr clauses) = expr in List.fold clauses ~init:(Set.empty (module Id)) ~f:(fun acc clause -> let this_answer = clause_uses clause request in Set.union acc this_answer) let compute_closed : F.variable -> F.valuation -> IdSet.property = let open Jayil.Ast in fun clause request -> clause_uses clause request let close_ids program = F.lfp compute_closed program | > Set.to_list | > Fmt.(pr " % a " Id.pp_list ) F.lfp compute_closed program |> Set.to_list |> Fmt.(pr "%a" Id.pp_list) *) let close_ids clause = F.lfp compute_closed clause |> Set.to_list |> Fmt.(pr "%a" Id.pp_list) let run filename = let program = Load.load filename in print_endline @@ Jayil.Ast_pp.show_expr program ; let (Expr clauses) = program in List.iter clauses ~f:close_ids
null
https://raw.githubusercontent.com/JHU-PL-Lab/jaylang/484b3876986a515fb57b11768a1b3b50418cde0c/src/analysis/main.ml
ocaml
SSet.(add empty (Ident "_"))
open Core open Fix open Share module F = Fix.Make (ClauseMap) (IdSet) module FE = Fix . Make ( ExprMap ) ( IdSet ) let id_of_var (Jayil.Ast.Var (id, _)) = id let rec clause_uses clause request = let open Jayil.Ast in let (Clause (_var, cbody)) = clause in match cbody with | Value_body (Value_int _) -> Set.empty (module Id) | Value_body (Value_record (Record_value map)) -> Ident_map.key_list map |> Set.of_list (module Id) | Value_body (Value_function (Function_value (para, fbody))) -> Set.add (expr_uses fbody request) (id_of_var para) | Var_body ( Var ( x , _ ) ) - > | Input_body - > SSet.empty | ( ( x1 , _ ) , ( x2 , _ ) ) - > SSet.of_list [ x1 ; x2 ] | Var_body (Var (x, _)) -> SSet.singleton x | Input_body -> SSet.empty | Appl_body (Var (x1, _), Var (x2, _)) -> SSet.of_list [ x1; x2 ] *) | _ -> Set.singleton (module Id) (Ident "x") and expr_uses expr request = let open Jayil.Ast in let (Expr clauses) = expr in List.fold clauses ~init:(Set.empty (module Id)) ~f:(fun acc clause -> let this_answer = clause_uses clause request in Set.union acc this_answer) let compute_closed : F.variable -> F.valuation -> IdSet.property = let open Jayil.Ast in fun clause request -> clause_uses clause request let close_ids program = F.lfp compute_closed program | > Set.to_list | > Fmt.(pr " % a " Id.pp_list ) F.lfp compute_closed program |> Set.to_list |> Fmt.(pr "%a" Id.pp_list) *) let close_ids clause = F.lfp compute_closed clause |> Set.to_list |> Fmt.(pr "%a" Id.pp_list) let run filename = let program = Load.load filename in print_endline @@ Jayil.Ast_pp.show_expr program ; let (Expr clauses) = program in List.iter clauses ~f:close_ids
41c35c0d6dc2b569c0ddfd0ff73880e43673e07fa9814597c594dc5edd00d4fb
josefs/Gradualizer
exhaustive.erl
-module(exhaustive). -export([integer_0/1,integer_1/1 ,pos_integer/1, neg_integer/1 ,tuple_1/1 ,union_atom/1, union_nil/1, union_mix/1 ,annotated/1 ,char/1]). -spec integer_0(integer()) -> {}. integer_0(0) -> {}. -spec integer_1(integer()) -> {}. integer_1(1) -> {}. -spec pos_integer(pos_integer()) -> {}. pos_integer(1) -> {}. -spec neg_integer(neg_integer()) -> {}. neg_integer(-1) -> {}. -spec tuple_1({integer()}) -> {}. tuple_1({0}) -> {}. -spec union_atom(a | b) -> {}. union_atom(a) -> {}. -spec union_nil([] | a) -> {}. union_nil([]) -> {}. -spec union_mix(a | pos_integer()) -> {}. union_mix(a) -> {}. -spec annotated(X :: integer()) -> {}. annotated(1) -> {}. -spec char(char()) -> {}. char($a) -> {}.
null
https://raw.githubusercontent.com/josefs/Gradualizer/208f5816b0157f282212fc036ba7560f0822f9fc/test/should_fail/exhaustive.erl
erlang
-module(exhaustive). -export([integer_0/1,integer_1/1 ,pos_integer/1, neg_integer/1 ,tuple_1/1 ,union_atom/1, union_nil/1, union_mix/1 ,annotated/1 ,char/1]). -spec integer_0(integer()) -> {}. integer_0(0) -> {}. -spec integer_1(integer()) -> {}. integer_1(1) -> {}. -spec pos_integer(pos_integer()) -> {}. pos_integer(1) -> {}. -spec neg_integer(neg_integer()) -> {}. neg_integer(-1) -> {}. -spec tuple_1({integer()}) -> {}. tuple_1({0}) -> {}. -spec union_atom(a | b) -> {}. union_atom(a) -> {}. -spec union_nil([] | a) -> {}. union_nil([]) -> {}. -spec union_mix(a | pos_integer()) -> {}. union_mix(a) -> {}. -spec annotated(X :: integer()) -> {}. annotated(1) -> {}. -spec char(char()) -> {}. char($a) -> {}.
9166a2bd94fa981417f5dc9faa4ccd46c6224d52ec3858afd8c897e8641c89b8
mortuosplango/frankentone
genetic.clj
(ns frankentone.examples.genetic.genetic (:use [frankentone.genetic analysis simplegp simplegp-functions utils dspgp] [frankentone utils ugens dsp instruments patterns samples]) (:require [incanter core charts stats] [hiphip.double :as dbl] clojure.java.io)) (do (def reference (get-reference-map (let [len (* 1024 4)] (dbl/afill! [[idx _] (double-array (* 1024 4))] (Math/tanh (* 2.05 (Math/sin (* (/ idx *sample-rate*) 220.0 TAU)))))))) (error-fn reference '(Math/tanh (* 2.05 (Math/sin (* x 220.0 TAU)))))) (do (def reference (atom (get-reference-map (load-sample (clojure.java.io/resource "hihat-open.wav"))))) (def best-pg (atom {:program '(pmod (sin (* TAU (sin (* TAU x)))) (sin (* x Math/PI))) :error 1000.0 :changed true}))) (dotimes [i 3] (time (dotimes [i 10] (error-fn @reference [:rms] '(if>0 (+ (* (* *sample-rate* x prev) (+ (* 4.239626725410776 (sin (+ *sample-rate* (* *sample-rate* x prev))) prev) -0.22132726531217406) prev) (mul-sin TAU x)) (+ x x) (- (mul-cos *sample-rate* (mul-sin TAU x)) 0.023044934598853834)))) )) (hiphip.array/amap [x (double-array (range 0 20)) :let [y (< x 7)] :while (= y true)] y) ;; nothing: ;; 500 ms ;; mfcc: 730 ms ;; spf: 650 ms boz : 630 ms ;; rms: 780 ms (defn best-callback [best best-error] (let [value-function (program->fn best)] (reset! best-pg {:program best :error best-error :changed true :samples (mapv (let [prev-samp (atom 0.0)] (fn ^Double [x] (hardclip (swap! prev-samp #(filter-bad-value (value-function x %)))))) (:x @reference))})) (program->dsp! best true)) ;; (future-done? evol) ;; (reset! *evolution* false) ;; (start-dsp) ;; (stop-dsp) ;;;; Run it: (def evol (future (evolve 100 (memoize (partial dynamic-error-fn reference)) :best-callback best-callback :terminals dsp-terminals :functions dsp-functions)))
null
https://raw.githubusercontent.com/mortuosplango/frankentone/6602e9623c23f3543b9f779fea7851a043ad7fca/resources/examples/genetic/genetic.clj
clojure
nothing: 500 ms mfcc: spf: rms: (future-done? evol) (reset! *evolution* false) (start-dsp) (stop-dsp) Run it:
(ns frankentone.examples.genetic.genetic (:use [frankentone.genetic analysis simplegp simplegp-functions utils dspgp] [frankentone utils ugens dsp instruments patterns samples]) (:require [incanter core charts stats] [hiphip.double :as dbl] clojure.java.io)) (do (def reference (get-reference-map (let [len (* 1024 4)] (dbl/afill! [[idx _] (double-array (* 1024 4))] (Math/tanh (* 2.05 (Math/sin (* (/ idx *sample-rate*) 220.0 TAU)))))))) (error-fn reference '(Math/tanh (* 2.05 (Math/sin (* x 220.0 TAU)))))) (do (def reference (atom (get-reference-map (load-sample (clojure.java.io/resource "hihat-open.wav"))))) (def best-pg (atom {:program '(pmod (sin (* TAU (sin (* TAU x)))) (sin (* x Math/PI))) :error 1000.0 :changed true}))) (dotimes [i 3] (time (dotimes [i 10] (error-fn @reference [:rms] '(if>0 (+ (* (* *sample-rate* x prev) (+ (* 4.239626725410776 (sin (+ *sample-rate* (* *sample-rate* x prev))) prev) -0.22132726531217406) prev) (mul-sin TAU x)) (+ x x) (- (mul-cos *sample-rate* (mul-sin TAU x)) 0.023044934598853834)))) )) (hiphip.array/amap [x (double-array (range 0 20)) :let [y (< x 7)] :while (= y true)] y) 730 ms 650 ms boz : 630 ms 780 ms (defn best-callback [best best-error] (let [value-function (program->fn best)] (reset! best-pg {:program best :error best-error :changed true :samples (mapv (let [prev-samp (atom 0.0)] (fn ^Double [x] (hardclip (swap! prev-samp #(filter-bad-value (value-function x %)))))) (:x @reference))})) (program->dsp! best true)) (def evol (future (evolve 100 (memoize (partial dynamic-error-fn reference)) :best-callback best-callback :terminals dsp-terminals :functions dsp-functions)))
93f91317f61478c800db516833ff953abcffd65ef9d62c9a7e5df2a1dcb3c15b
nubank/vessel
data_readers.clj
{lib1/url lib1.url/string->url}
null
https://raw.githubusercontent.com/nubank/vessel/40036928d20cfd07b31b99bb2389d5421c49d26d/test/resources/lib1/data_readers.clj
clojure
{lib1/url lib1.url/string->url}
b077a76ad5d89d71d8618e0d34994310d479bd6412c3b77af519d94240faa0bb
hoytech/antiweb
parser.lisp
-*- Mode : LISP ; Syntax : COMMON - LISP ; Package : CL - PPCRE ; Base : 10 -*- $ Header : /usr / cvs / hcsw / antiweb / bundled / cl - ppcre / parser.lisp , v 1.1 2008/04/26 02:40:56 ;;; The parser will - with the help of the lexer - parse a regex ;;; string and convert it into a "parse tree" (see docs for details ;;; about the syntax of these trees). Note that the lexer might return ;;; illegal parse trees. It is assumed that the conversion process ;;; later on will track them down. Copyright ( c ) 2002 - 2007 , Dr. . All rights reserved . ;;; Redistribution and use in source and binary forms, with or without ;;; modification, are permitted provided that the following conditions ;;; are met: ;;; * Redistributions of source code must retain the above copyright ;;; notice, this list of conditions and the following disclaimer. ;;; * Redistributions in binary form must reproduce the above ;;; copyright notice, this list of conditions and the following ;;; disclaimer in the documentation and/or other materials ;;; provided with the distribution. ;;; THIS SOFTWARE IS PROVIDED BY THE AUTHOR 'AS IS' AND ANY EXPRESSED ;;; OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ;;; WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ;;; ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL ;;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE ;;; GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , ;;; WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING ;;; NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS ;;; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. (in-package #:cl-ppcre) (defun group (lexer) (declare #.*standard-optimize-settings*) "Parses and consumes a <group>. The productions are: <group> -> \"\(\"<regex>\")\" \"\(?:\"<regex>\")\" \"\(?>\"<regex>\")\" \"\(?<flags>:\"<regex>\")\" \"\(?=\"<regex>\")\" \"\(?!\"<regex>\")\" \"\(?<=\"<regex>\")\" \"\(?<!\"<regex>\")\" \"\(?\(\"<num>\")\"<regex>\")\" \"\(?\(\"<regex>\")\"<regex>\")\" \"\(?<name>\"<regex>\")\" \(when *ALLOW-NAMED-REGISTERS* is T) <legal-token> where <flags> is parsed by the lexer function MAYBE-PARSE-FLAGS. Will return <parse-tree> or \(<grouping-type> <parse-tree>) where <grouping-type> is one of six keywords - see source for details." (multiple-value-bind (open-token flags) (get-token lexer) (cond ((eq open-token :open-paren-paren) ;; special case for conditional regular expressions; note ;; that at this point we accept a couple of illegal ;; combinations which'll be sorted out later by the ;; converter (let* ((open-paren-pos (car (lexer-last-pos lexer))) ;; check if what follows "(?(" is a number (number (try-number lexer :no-whitespace-p t)) ;; make changes to extended-mode-p local (*extended-mode-p* *extended-mode-p*)) (declare (type fixnum open-paren-pos)) (cond (number ;; condition is a number (i.e. refers to a ;; back-reference) (let* ((inner-close-token (get-token lexer)) (reg-expr (reg-expr lexer)) (close-token (get-token lexer))) (unless (eq inner-close-token :close-paren) (signal-ppcre-syntax-error* (+ open-paren-pos 2) "Opening paren has no matching closing paren")) (unless (eq close-token :close-paren) (signal-ppcre-syntax-error* open-paren-pos "Opening paren has no matching closing paren")) (list :branch number reg-expr))) (t ;; condition must be a full regex (actually a ;; look-behind or look-ahead); and here comes a ;; terrible kludge: instead of being cleanly ;; separated from the lexer, the parser pushes back the lexer by one position , thereby ;; landing in the middle of the 'token' "(?(" - ;; yuck!! (decf (lexer-pos lexer)) (let* ((inner-reg-expr (group lexer)) (reg-expr (reg-expr lexer)) (close-token (get-token lexer))) (unless (eq close-token :close-paren) (signal-ppcre-syntax-error* open-paren-pos "Opening paren has no matching closing paren")) (list :branch inner-reg-expr reg-expr)))))) ((member open-token '(:open-paren :open-paren-colon :open-paren-greater :open-paren-equal :open-paren-exclamation :open-paren-less-equal :open-paren-less-exclamation :open-paren-less-letter) :test #'eq) ;; make changes to extended-mode-p local (let ((*extended-mode-p* *extended-mode-p*)) we saw one of the six token representing opening ;; parentheses (let* ((open-paren-pos (car (lexer-last-pos lexer))) (register-name (when (eq open-token :open-paren-less-letter) (parse-register-name-aux lexer))) (reg-expr (reg-expr lexer)) (close-token (get-token lexer))) (when (or (eq open-token :open-paren) (eq open-token :open-paren-less-letter)) ;; if this is the "("<regex>")" or "(?"<name>""<regex>")" production we have to ;; increment the register counter of the lexer (incf (lexer-reg lexer))) (unless (eq close-token :close-paren) ;; the token following <regex> must be the closing ;; parenthesis or this is a syntax error (signal-ppcre-syntax-error* open-paren-pos "Opening paren has no matching closing paren")) (if flags ;; if the lexer has returned a list of flags this must ;; have been the "(?:"<regex>")" production (cons :group (nconc flags (list reg-expr))) (if (eq open-token :open-paren-less-letter) (list :named-register ;; every string was reversed, so we have to ;; reverse it back to get the name (nreverse register-name) reg-expr) (list (case open-token ((:open-paren) :register) ((:open-paren-colon) :group) ((:open-paren-greater) :standalone) ((:open-paren-equal) :positive-lookahead) ((:open-paren-exclamation) :negative-lookahead) ((:open-paren-less-equal) :positive-lookbehind) ((:open-paren-less-exclamation) :negative-lookbehind)) reg-expr)))))) (t ;; this is the <legal-token> production; <legal-token> is ;; any token which passes START-OF-SUBEXPR-P (otherwise parsing had already stopped in the SEQ method ) open-token)))) (defun greedy-quant (lexer) (declare #.*standard-optimize-settings*) "Parses and consumes a <greedy-quant>. The productions are: <greedy-quant> -> <group> | <group><quantifier> where <quantifier> is parsed by the lexer function GET-QUANTIFIER. Will return <parse-tree> or (:GREEDY-REPETITION <min> <max> <parse-tree>)." (let* ((group (group lexer)) (token (get-quantifier lexer))) (if token ;; if GET-QUANTIFIER returned a non-NIL value it's the two - element list ( < min > < max > ) (list :greedy-repetition (first token) (second token) group) group))) (defun quant (lexer) (declare #.*standard-optimize-settings*) "Parses and consumes a <quant>. The productions are: <quant> -> <greedy-quant> | <greedy-quant>\"?\". Will return the <parse-tree> returned by GREEDY-QUANT and optionally change :GREEDY-REPETITION to :NON-GREEDY-REPETITION." (let* ((greedy-quant (greedy-quant lexer)) (pos (lexer-pos lexer)) (next-char (next-char lexer))) (when next-char (if (char= next-char #\?) (setf (car greedy-quant) :non-greedy-repetition) (setf (lexer-pos lexer) pos))) greedy-quant)) (defun seq (lexer) (declare #.*standard-optimize-settings*) "Parses and consumes a <seq>. The productions are: <seq> -> <quant> | <quant><seq>. Will return <parse-tree> or (:SEQUENCE <parse-tree> <parse-tree>)." (flet ((make-array-from-two-chars (char1 char2) (let ((string (make-array 2 :element-type 'character :fill-pointer t :adjustable t))) (setf (aref string 0) char1) (setf (aref string 1) char2) string))) Note that we 're calling START - OF - SUBEXPR - P before we actually try ;; to parse a <seq> or <quant> in order to catch empty regular ;; expressions (if (start-of-subexpr-p lexer) (let ((quant (quant lexer))) (if (start-of-subexpr-p lexer) (let* ((seq (seq lexer)) (quant-is-char-p (characterp quant)) (seq-is-sequence-p (and (consp seq) (eq (first seq) :sequence)))) (cond ((and quant-is-char-p (characterp seq)) (make-array-from-two-chars seq quant)) ((and quant-is-char-p (stringp seq)) (vector-push-extend quant seq) seq) ((and quant-is-char-p seq-is-sequence-p (characterp (second seq))) (cond ((cddr seq) (setf (cdr seq) (cons (make-array-from-two-chars (second seq) quant) (cddr seq))) seq) (t (make-array-from-two-chars (second seq) quant)))) ((and quant-is-char-p seq-is-sequence-p (stringp (second seq))) (cond ((cddr seq) (setf (cdr seq) (cons (progn (vector-push-extend quant (second seq)) (second seq)) (cddr seq))) seq) (t (vector-push-extend quant (second seq)) (second seq)))) (seq-is-sequence-p ;; if <seq> is also a :SEQUENCE parse tree we merge ;; both lists into one to avoid unnecessary consing (setf (cdr seq) (cons quant (cdr seq))) seq) (t (list :sequence quant seq)))) quant)) :void))) (defun reg-expr (lexer) (declare #.*standard-optimize-settings*) "Parses and consumes a <regex>, a complete regular expression. The productions are: <regex> -> <seq> | <seq>\"|\"<regex>. Will return <parse-tree> or (:ALTERNATION <parse-tree> <parse-tree>)." (let ((pos (lexer-pos lexer))) (case (next-char lexer) ((nil) ;; if we didn't get any token we return :VOID which stands for ;; "empty regular expression" :void) ((#\|) ;; now check whether the expression started with a vertical ;; bar, i.e. <seq> - the left alternation - is empty (list :alternation :void (reg-expr lexer))) (otherwise otherwise un - read the character we just saw and parse a ;; <seq> plus the character following it (setf (lexer-pos lexer) pos) (let* ((seq (seq lexer)) (pos (lexer-pos lexer))) (case (next-char lexer) ((nil) ;; no further character, just a <seq> seq) ((#\|) ;; if the character was a vertical bar, this is an alternation and we have the second production (let ((reg-expr (reg-expr lexer))) (cond ((and (consp reg-expr) (eq (first reg-expr) :alternation)) again we try to merge as above in SEQ (setf (cdr reg-expr) (cons seq (cdr reg-expr))) reg-expr) (t (list :alternation seq reg-expr))))) (otherwise ;; a character which is not a vertical bar - this is ;; either a syntax error or we're inside of a group and ;; the next character is a closing parenthesis; so we just un - read the character and let another function ;; take care of it (setf (lexer-pos lexer) pos) seq))))))) (defun reverse-strings (parse-tree) (declare #.*standard-optimize-settings*) (cond ((stringp parse-tree) (nreverse parse-tree)) ((consp parse-tree) (loop for parse-tree-rest on parse-tree while parse-tree-rest do (setf (car parse-tree-rest) (reverse-strings (car parse-tree-rest)))) parse-tree) (t parse-tree))) (defun parse-string (string) (declare #.*standard-optimize-settings*) "Translate the regex string STRING into a parse tree." (let* ((lexer (make-lexer string)) (parse-tree (reverse-strings (reg-expr lexer)))) ;; check whether we've consumed the whole regex string (if (end-of-string-p lexer) parse-tree (signal-ppcre-syntax-error* (lexer-pos lexer) "Expected end of string"))))
null
https://raw.githubusercontent.com/hoytech/antiweb/53c38f78ea01f04f6d1a1ecdca5c012e7a9ae4bb/bundled/cl-ppcre/parser.lisp
lisp
Syntax : COMMON - LISP ; Package : CL - PPCRE ; Base : 10 -*- The parser will - with the help of the lexer - parse a regex string and convert it into a "parse tree" (see docs for details about the syntax of these trees). Note that the lexer might return illegal parse trees. It is assumed that the conversion process later on will track them down. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE AUTHOR 'AS IS' AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. special case for conditional regular expressions; note that at this point we accept a couple of illegal combinations which'll be sorted out later by the converter check if what follows "(?(" is a number make changes to extended-mode-p local condition is a number (i.e. refers to a back-reference) condition must be a full regex (actually a look-behind or look-ahead); and here comes a terrible kludge: instead of being cleanly separated from the lexer, the parser pushes landing in the middle of the 'token' "(?(" - yuck!! make changes to extended-mode-p local parentheses if this is the "("<regex>")" or "(?"<name>""<regex>")" production we have to increment the register counter of the lexer the token following <regex> must be the closing parenthesis or this is a syntax error if the lexer has returned a list of flags this must have been the "(?:"<regex>")" production every string was reversed, so we have to reverse it back to get the name this is the <legal-token> production; <legal-token> is any token which passes START-OF-SUBEXPR-P (otherwise if GET-QUANTIFIER returned a non-NIL value it's the to parse a <seq> or <quant> in order to catch empty regular expressions if <seq> is also a :SEQUENCE parse tree we merge both lists into one to avoid unnecessary consing if we didn't get any token we return :VOID which stands for "empty regular expression" now check whether the expression started with a vertical bar, i.e. <seq> - the left alternation - is empty <seq> plus the character following it no further character, just a <seq> if the character was a vertical bar, this is an a character which is not a vertical bar - this is either a syntax error or we're inside of a group and the next character is a closing parenthesis; so we take care of it check whether we've consumed the whole regex string
$ Header : /usr / cvs / hcsw / antiweb / bundled / cl - ppcre / parser.lisp , v 1.1 2008/04/26 02:40:56 Copyright ( c ) 2002 - 2007 , Dr. . All rights reserved . DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , (in-package #:cl-ppcre) (defun group (lexer) (declare #.*standard-optimize-settings*) "Parses and consumes a <group>. The productions are: <group> -> \"\(\"<regex>\")\" \"\(?:\"<regex>\")\" \"\(?>\"<regex>\")\" \"\(?<flags>:\"<regex>\")\" \"\(?=\"<regex>\")\" \"\(?!\"<regex>\")\" \"\(?<=\"<regex>\")\" \"\(?<!\"<regex>\")\" \"\(?\(\"<num>\")\"<regex>\")\" \"\(?\(\"<regex>\")\"<regex>\")\" \"\(?<name>\"<regex>\")\" \(when *ALLOW-NAMED-REGISTERS* is T) <legal-token> where <flags> is parsed by the lexer function MAYBE-PARSE-FLAGS. Will return <parse-tree> or \(<grouping-type> <parse-tree>) where <grouping-type> is one of six keywords - see source for details." (multiple-value-bind (open-token flags) (get-token lexer) (cond ((eq open-token :open-paren-paren) (let* ((open-paren-pos (car (lexer-last-pos lexer))) (number (try-number lexer :no-whitespace-p t)) (*extended-mode-p* *extended-mode-p*)) (declare (type fixnum open-paren-pos)) (cond (number (let* ((inner-close-token (get-token lexer)) (reg-expr (reg-expr lexer)) (close-token (get-token lexer))) (unless (eq inner-close-token :close-paren) (signal-ppcre-syntax-error* (+ open-paren-pos 2) "Opening paren has no matching closing paren")) (unless (eq close-token :close-paren) (signal-ppcre-syntax-error* open-paren-pos "Opening paren has no matching closing paren")) (list :branch number reg-expr))) (t back the lexer by one position , thereby (decf (lexer-pos lexer)) (let* ((inner-reg-expr (group lexer)) (reg-expr (reg-expr lexer)) (close-token (get-token lexer))) (unless (eq close-token :close-paren) (signal-ppcre-syntax-error* open-paren-pos "Opening paren has no matching closing paren")) (list :branch inner-reg-expr reg-expr)))))) ((member open-token '(:open-paren :open-paren-colon :open-paren-greater :open-paren-equal :open-paren-exclamation :open-paren-less-equal :open-paren-less-exclamation :open-paren-less-letter) :test #'eq) (let ((*extended-mode-p* *extended-mode-p*)) we saw one of the six token representing opening (let* ((open-paren-pos (car (lexer-last-pos lexer))) (register-name (when (eq open-token :open-paren-less-letter) (parse-register-name-aux lexer))) (reg-expr (reg-expr lexer)) (close-token (get-token lexer))) (when (or (eq open-token :open-paren) (eq open-token :open-paren-less-letter)) (incf (lexer-reg lexer))) (unless (eq close-token :close-paren) (signal-ppcre-syntax-error* open-paren-pos "Opening paren has no matching closing paren")) (if flags (cons :group (nconc flags (list reg-expr))) (if (eq open-token :open-paren-less-letter) (list :named-register (nreverse register-name) reg-expr) (list (case open-token ((:open-paren) :register) ((:open-paren-colon) :group) ((:open-paren-greater) :standalone) ((:open-paren-equal) :positive-lookahead) ((:open-paren-exclamation) :negative-lookahead) ((:open-paren-less-equal) :positive-lookbehind) ((:open-paren-less-exclamation) :negative-lookbehind)) reg-expr)))))) (t parsing had already stopped in the SEQ method ) open-token)))) (defun greedy-quant (lexer) (declare #.*standard-optimize-settings*) "Parses and consumes a <greedy-quant>. The productions are: <greedy-quant> -> <group> | <group><quantifier> where <quantifier> is parsed by the lexer function GET-QUANTIFIER. Will return <parse-tree> or (:GREEDY-REPETITION <min> <max> <parse-tree>)." (let* ((group (group lexer)) (token (get-quantifier lexer))) (if token two - element list ( < min > < max > ) (list :greedy-repetition (first token) (second token) group) group))) (defun quant (lexer) (declare #.*standard-optimize-settings*) "Parses and consumes a <quant>. The productions are: <quant> -> <greedy-quant> | <greedy-quant>\"?\". Will return the <parse-tree> returned by GREEDY-QUANT and optionally change :GREEDY-REPETITION to :NON-GREEDY-REPETITION." (let* ((greedy-quant (greedy-quant lexer)) (pos (lexer-pos lexer)) (next-char (next-char lexer))) (when next-char (if (char= next-char #\?) (setf (car greedy-quant) :non-greedy-repetition) (setf (lexer-pos lexer) pos))) greedy-quant)) (defun seq (lexer) (declare #.*standard-optimize-settings*) "Parses and consumes a <seq>. The productions are: <seq> -> <quant> | <quant><seq>. Will return <parse-tree> or (:SEQUENCE <parse-tree> <parse-tree>)." (flet ((make-array-from-two-chars (char1 char2) (let ((string (make-array 2 :element-type 'character :fill-pointer t :adjustable t))) (setf (aref string 0) char1) (setf (aref string 1) char2) string))) Note that we 're calling START - OF - SUBEXPR - P before we actually try (if (start-of-subexpr-p lexer) (let ((quant (quant lexer))) (if (start-of-subexpr-p lexer) (let* ((seq (seq lexer)) (quant-is-char-p (characterp quant)) (seq-is-sequence-p (and (consp seq) (eq (first seq) :sequence)))) (cond ((and quant-is-char-p (characterp seq)) (make-array-from-two-chars seq quant)) ((and quant-is-char-p (stringp seq)) (vector-push-extend quant seq) seq) ((and quant-is-char-p seq-is-sequence-p (characterp (second seq))) (cond ((cddr seq) (setf (cdr seq) (cons (make-array-from-two-chars (second seq) quant) (cddr seq))) seq) (t (make-array-from-two-chars (second seq) quant)))) ((and quant-is-char-p seq-is-sequence-p (stringp (second seq))) (cond ((cddr seq) (setf (cdr seq) (cons (progn (vector-push-extend quant (second seq)) (second seq)) (cddr seq))) seq) (t (vector-push-extend quant (second seq)) (second seq)))) (seq-is-sequence-p (setf (cdr seq) (cons quant (cdr seq))) seq) (t (list :sequence quant seq)))) quant)) :void))) (defun reg-expr (lexer) (declare #.*standard-optimize-settings*) "Parses and consumes a <regex>, a complete regular expression. The productions are: <regex> -> <seq> | <seq>\"|\"<regex>. Will return <parse-tree> or (:ALTERNATION <parse-tree> <parse-tree>)." (let ((pos (lexer-pos lexer))) (case (next-char lexer) ((nil) :void) ((#\|) (list :alternation :void (reg-expr lexer))) (otherwise otherwise un - read the character we just saw and parse a (setf (lexer-pos lexer) pos) (let* ((seq (seq lexer)) (pos (lexer-pos lexer))) (case (next-char lexer) ((nil) seq) ((#\|) alternation and we have the second production (let ((reg-expr (reg-expr lexer))) (cond ((and (consp reg-expr) (eq (first reg-expr) :alternation)) again we try to merge as above in SEQ (setf (cdr reg-expr) (cons seq (cdr reg-expr))) reg-expr) (t (list :alternation seq reg-expr))))) (otherwise just un - read the character and let another function (setf (lexer-pos lexer) pos) seq))))))) (defun reverse-strings (parse-tree) (declare #.*standard-optimize-settings*) (cond ((stringp parse-tree) (nreverse parse-tree)) ((consp parse-tree) (loop for parse-tree-rest on parse-tree while parse-tree-rest do (setf (car parse-tree-rest) (reverse-strings (car parse-tree-rest)))) parse-tree) (t parse-tree))) (defun parse-string (string) (declare #.*standard-optimize-settings*) "Translate the regex string STRING into a parse tree." (let* ((lexer (make-lexer string)) (parse-tree (reverse-strings (reg-expr lexer)))) (if (end-of-string-p lexer) parse-tree (signal-ppcre-syntax-error* (lexer-pos lexer) "Expected end of string"))))
809a1aef321cd6b7250c0bb5c0e052cb92fe9d8251bb944580950a838dec6124
tolitius/mount
project.clj
(defproject mount "0.1.17" :description "managing Clojure and ClojureScript app state since (reset)" :url "" :license {:name "Eclipse Public License" :url "-v10.html"} :source-paths ["src"] :dependencies [] ;; for visual clarity :tach {:test-runner-ns 'mount.test-self-host :source-paths ["test/core"]} :profiles {:dev {:source-paths ["dev" "dev/clj" "test/clj"] :dependencies [[org.clojure/clojure "1.8.0"] [org.clojure/clojurescript "1.9.946"]; :classifier "aot"] [datascript "0.13.3"] [compojure "1.4.0"] [ring/ring-jetty-adapter "1.1.0"] [cheshire "5.5.0"] [hiccups "0.3.0"] [com.andrewmcveigh/cljs-time "0.3.14"] [ch.qos.logback/logback-classic "1.1.3"] [org.clojure/tools.logging "0.3.1"] [robert/hooke "1.3.0"] [org.clojure/tools.namespace "0.2.11"] [org.clojure/tools.nrepl "0.2.11"] [com.datomic/datomic-free "0.9.5327" :exclusions [joda-time]]] :plugins [[lein-cljsbuild "1.1.1"] [lein-doo "0.1.6"] [lein-figwheel "0.5.0-2"] [test2junit "1.1.3"] [lein-tach "1.0.0"]] :test2junit-output-dir ~(or (System/getenv "CIRCLE_TEST_REPORTS") "target/test2junit") :clean-targets ^{:protect false} [:target-path [:cljsbuild :builds :dev :compiler :output-dir] [:cljsbuild :builds :prod :compiler :output-to]] :cljsbuild { :builds {:dev {:source-paths ["src" "dev/cljs"] :figwheel true :compiler {:main app.example :asset-path "js/compiled/out" :output-to "dev/resources/public/js/compiled/mount.js" :output-dir "dev/resources/public/js/compiled/out" :optimizations :none :source-map true :source-map-timestamp true}} :test {:source-paths ["src" "dev/cljs" "test"] :compiler {:main mount.test ;; :asset-path "js/compiled/out" :output-to "dev/resources/public/js/compiled/mount.js" :output-dir "dev/resources/public/js/compiled/test" :optimizations :none :source-map true :source-map-timestamp true}} :prod {:source-paths ["src" "dev/cljs"] :compiler {:output-to "dev/resources/public/js/compiled/mount.js" :optimizations :advanced :pretty-print false}}}}} :test {:source-paths ["test/core" "test/clj" "test/cljs"]}})
null
https://raw.githubusercontent.com/tolitius/mount/8a3fc385a2824a2f9dc47009628dc1d7506753eb/project.clj
clojure
for visual clarity :classifier "aot"] :asset-path "js/compiled/out"
(defproject mount "0.1.17" :description "managing Clojure and ClojureScript app state since (reset)" :url "" :license {:name "Eclipse Public License" :url "-v10.html"} :source-paths ["src"] :tach {:test-runner-ns 'mount.test-self-host :source-paths ["test/core"]} :profiles {:dev {:source-paths ["dev" "dev/clj" "test/clj"] :dependencies [[org.clojure/clojure "1.8.0"] [datascript "0.13.3"] [compojure "1.4.0"] [ring/ring-jetty-adapter "1.1.0"] [cheshire "5.5.0"] [hiccups "0.3.0"] [com.andrewmcveigh/cljs-time "0.3.14"] [ch.qos.logback/logback-classic "1.1.3"] [org.clojure/tools.logging "0.3.1"] [robert/hooke "1.3.0"] [org.clojure/tools.namespace "0.2.11"] [org.clojure/tools.nrepl "0.2.11"] [com.datomic/datomic-free "0.9.5327" :exclusions [joda-time]]] :plugins [[lein-cljsbuild "1.1.1"] [lein-doo "0.1.6"] [lein-figwheel "0.5.0-2"] [test2junit "1.1.3"] [lein-tach "1.0.0"]] :test2junit-output-dir ~(or (System/getenv "CIRCLE_TEST_REPORTS") "target/test2junit") :clean-targets ^{:protect false} [:target-path [:cljsbuild :builds :dev :compiler :output-dir] [:cljsbuild :builds :prod :compiler :output-to]] :cljsbuild { :builds {:dev {:source-paths ["src" "dev/cljs"] :figwheel true :compiler {:main app.example :asset-path "js/compiled/out" :output-to "dev/resources/public/js/compiled/mount.js" :output-dir "dev/resources/public/js/compiled/out" :optimizations :none :source-map true :source-map-timestamp true}} :test {:source-paths ["src" "dev/cljs" "test"] :compiler {:main mount.test :output-to "dev/resources/public/js/compiled/mount.js" :output-dir "dev/resources/public/js/compiled/test" :optimizations :none :source-map true :source-map-timestamp true}} :prod {:source-paths ["src" "dev/cljs"] :compiler {:output-to "dev/resources/public/js/compiled/mount.js" :optimizations :advanced :pretty-print false}}}}} :test {:source-paths ["test/core" "test/clj" "test/cljs"]}})
0746a092f905de08b7d11490ad6afd2b4205875b6a4bf9f4351fe893c9410e61
sonowz/advent-of-code-haskell
Day02.hs
module Y2021.Day02 where import Lib.IO import Lib.Types import Lib.Vector2D import Relude import Relude.Extra.Bifunctor import Relude.Extra.Foldable1 import Relude.Extra.Map import Relude.Extra.Newtype import Relude.Extra.Tuple ----------------------- -- Type declarations -- ----------------------- data Command = CForward Int | CUp Int | CDown Int newtype Pos = Pos (Int, Int) deriving (Pos2D, Ord, Eq, Show) via (Int, Int) newtype Dir = Dir (Int, Int) deriving (Pos2D, Ord, Eq, Num) via Pos newtype Aim = Aim Int deriving (Ord, Eq, Num) via Int instance Num Pos where Pos (x1, y1) + Pos (x2, y2) = Pos (x1 + x2, y1 + y2) Pos (x1, y1) - Pos (x2, y2) = Pos (x1 - x2, y1 - y2) Pos (x1, y1) * Pos (x2, y2) = Pos (x1 * x2, y1 * y2) abs (Pos (x, y)) = Pos (abs x, abs y) signum (Pos (x, y)) = Pos (signum x, signum y) fromInteger x = Pos (fromInteger x, fromInteger x) ------------ -- Part 1 -- ------------ solve1 :: [Command] -> Int solve1 commands = posToAnswer $ moveByDirs (Pos (0, 0)) dirs where dirs = commandToDir <$> commands posToAnswer :: Pos -> Int posToAnswer (Pos (x, y)) = x * y -- Note that down is positive commandToDir :: Command -> Dir commandToDir (CForward n) = Dir (n, 0) commandToDir (CUp n) = Dir (0, -n) commandToDir (CDown n) = Dir (0, n) moveByDirs :: Pos -> [Dir] -> Pos moveByDirs = foldl' (\pos dir -> pos + coerce dir) ------------ Part 2 -- ------------ solve2 :: [Command] -> Int solve2 = posToAnswer . moveByCommands (Pos (0, 0), Aim 0) moveByCommands :: (Pos, Aim) -> [Command] -> Pos moveByCommands posAim = fst . foldl' move posAim where move (Pos (x, y), Aim a) (CForward n) = (Pos (x + n, y + a * n), Aim a) move (p , Aim a) (CUp n) = (p, Aim (a - n)) move (p , Aim a) (CDown n) = (p, Aim (a + n)) -------------------- Main & Parsing -- -------------------- main' :: IO () main' = do commands <- parseCommand <<$>> readFileLines "inputs/Y2021/Day02.txt" :: IO [Command] print $ solve1 commands print $ solve2 commands replace :: Char -> Char -> String -> String replace a b = map (\c -> if c == a then b else c) parseCommand :: Text -> Command parseCommand line = case words line of ["forward", n] -> CForward (readInt n) ["up" , n] -> CUp (readInt n) ["down" , n] -> CDown (readInt n) _ -> error "unexpected command"
null
https://raw.githubusercontent.com/sonowz/advent-of-code-haskell/af2e439bb7d2d18c769e9c50eef712f92c5508dc/src/Y2021/Day02.hs
haskell
--------------------- Type declarations -- --------------------- ---------- Part 1 -- ---------- Note that down is positive ---------- ---------- ------------------ ------------------
module Y2021.Day02 where import Lib.IO import Lib.Types import Lib.Vector2D import Relude import Relude.Extra.Bifunctor import Relude.Extra.Foldable1 import Relude.Extra.Map import Relude.Extra.Newtype import Relude.Extra.Tuple data Command = CForward Int | CUp Int | CDown Int newtype Pos = Pos (Int, Int) deriving (Pos2D, Ord, Eq, Show) via (Int, Int) newtype Dir = Dir (Int, Int) deriving (Pos2D, Ord, Eq, Num) via Pos newtype Aim = Aim Int deriving (Ord, Eq, Num) via Int instance Num Pos where Pos (x1, y1) + Pos (x2, y2) = Pos (x1 + x2, y1 + y2) Pos (x1, y1) - Pos (x2, y2) = Pos (x1 - x2, y1 - y2) Pos (x1, y1) * Pos (x2, y2) = Pos (x1 * x2, y1 * y2) abs (Pos (x, y)) = Pos (abs x, abs y) signum (Pos (x, y)) = Pos (signum x, signum y) fromInteger x = Pos (fromInteger x, fromInteger x) solve1 :: [Command] -> Int solve1 commands = posToAnswer $ moveByDirs (Pos (0, 0)) dirs where dirs = commandToDir <$> commands posToAnswer :: Pos -> Int posToAnswer (Pos (x, y)) = x * y commandToDir :: Command -> Dir commandToDir (CForward n) = Dir (n, 0) commandToDir (CUp n) = Dir (0, -n) commandToDir (CDown n) = Dir (0, n) moveByDirs :: Pos -> [Dir] -> Pos moveByDirs = foldl' (\pos dir -> pos + coerce dir) solve2 :: [Command] -> Int solve2 = posToAnswer . moveByCommands (Pos (0, 0), Aim 0) moveByCommands :: (Pos, Aim) -> [Command] -> Pos moveByCommands posAim = fst . foldl' move posAim where move (Pos (x, y), Aim a) (CForward n) = (Pos (x + n, y + a * n), Aim a) move (p , Aim a) (CUp n) = (p, Aim (a - n)) move (p , Aim a) (CDown n) = (p, Aim (a + n)) main' :: IO () main' = do commands <- parseCommand <<$>> readFileLines "inputs/Y2021/Day02.txt" :: IO [Command] print $ solve1 commands print $ solve2 commands replace :: Char -> Char -> String -> String replace a b = map (\c -> if c == a then b else c) parseCommand :: Text -> Command parseCommand line = case words line of ["forward", n] -> CForward (readInt n) ["up" , n] -> CUp (readInt n) ["down" , n] -> CDown (readInt n) _ -> error "unexpected command"
321e3c2c32baa108491071f7f7865249afc362960213d1317e237322539de998
quark-lang/quark
Closure.hs
module Core.Closure.Definition.Closure where import Data.Map import Core.Inference.Type.AST data Closure = Closure { name :: String, environment :: Map String Type, arguments :: Map String Type, body :: TypedAST }
null
https://raw.githubusercontent.com/quark-lang/quark/e3dc7fff4e4dfba3e5c9ab71f10ede8bc5a30a44/app/Core/Closure/Definition/Closure.hs
haskell
module Core.Closure.Definition.Closure where import Data.Map import Core.Inference.Type.AST data Closure = Closure { name :: String, environment :: Map String Type, arguments :: Map String Type, body :: TypedAST }
aa8b3a73a426df7600097aafd065a5eb19c72f64dbfb8a62a6b996f6d8d61208
Eonblast/Scalaxis
fd.erl
2007 - 2011 Zuse Institute Berlin 2009 onScale solutions GmbH Licensed under the Apache License , Version 2.0 ( the " License " ) ; % you may not use this file except in compliance with the License. % You may obtain a copy of the License at % % -2.0 % % Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an " AS IS " BASIS , % WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. % See the License for the specific language governing permissions and % limitations under the License. @author < > @doc Failure detector based on . %% @end %% @version $Id$ -module(fd). -author(''). -vsn('$Id$'). %-define(TRACE(X,Y), io:format(X,Y)). -define(TRACE(_X,_Y), ok). -behaviour(gen_component). -include("scalaris.hrl"). -ifdef(with_export_type_support). -export_type([cookie/0]). -endif. -export([subscribe/1, subscribe/2]). -export([unsubscribe/1, unsubscribe/2]). -export([update_subscriptions/2]). gen_server & gen_component callbacks -export([start_link/1, init/1, on/2]). -type(cookie() :: '$fd_nil' | any()). -type(state() :: ok). %% @doc generates a failure detector for the calling process on the given pid. -spec subscribe(comm:mypid() | [comm:mypid()]) -> ok. subscribe([]) -> ok; subscribe(GlobalPids) -> subscribe(GlobalPids, {self(), '$fd_nil'}). %% @doc generates a failure detector for the calling process and cookie on the %% given pid. -spec subscribe(comm:mypid() | [comm:mypid()], cookie()) -> ok. subscribe([], _Cookie) -> ok; subscribe(GlobalPids, Cookie) when is_list(GlobalPids) -> _ = [begin HBPid = get_hbs(Pid), comm:send_local(HBPid, {add_subscriber, self(), Pid, Cookie}) end || Pid <- GlobalPids], ok; subscribe(GlobalPid, Cookie) -> subscribe([GlobalPid], Cookie). %% @doc deletes the failure detector for the given pid. -spec unsubscribe(comm:mypid() | [comm:mypid()]) -> ok. unsubscribe([])-> ok; unsubscribe(GlobalPids)-> unsubscribe(GlobalPids, {self(), '$fd_nil'}). %% @doc deletes the failure detector for the given pid and cookie. -spec unsubscribe(comm:mypid() | [comm:mypid()], cookie()) -> ok. unsubscribe([], _Cookie) -> ok; unsubscribe(GlobalPids, Cookie) when is_list(GlobalPids) -> _ = [begin HBPid = get_hbs(Pid), comm:send_local(HBPid, {del_subscriber, self(), Pid, Cookie}) end || Pid <- GlobalPids], ok; unsubscribe(GlobalPid, Cookie) -> unsubscribe([GlobalPid], Cookie). @doc from the pids in OldPids but not in and subscribes to the pids in but not in OldPids . -spec update_subscriptions([comm:mypid()], [comm:mypid()]) -> ok. update_subscriptions(OldPids, NewPids) -> {OnlyOldPids, _Same, OnlyNewPids} = util:split_unique(OldPids, NewPids), unsubscribe(OnlyOldPids), subscribe(OnlyNewPids). %% gen_component functions %% @doc Starts the failure detector server -spec start_link(pid_groups:groupname()) -> {ok, pid()}. start_link(ServiceGroup) -> gen_component:start_link(?MODULE, [], [wait_for_init, {erlang_register, ?MODULE}, {pid_groups_join_as, ServiceGroup, ?MODULE}]). %% @doc Initialises the module with an empty state. -spec init([]) -> state(). init([]) -> % local heartbeat processes _ = ets:new(fd_hbs, [set, protected, named_table]), ok. @private -spec on(comm:message(), state()) -> state(). on({create_hbs, Pid, ReplyTo}, State) -> NewHBS = start_and_register_hbs(Pid), comm:send_local(ReplyTo, {create_hbs_reply, NewHBS}), State; on({hbs_finished, RemoteWatchedPid}, State) -> ets:delete(fd_hbs, comm:get(fd, RemoteWatchedPid)), State; on({subscribe_heartbeats, Subscriber, TargetPid}, State) -> %% we establish the back-direction here, so we subscribe to the %% subscriber and add the TargetPid to the local monitoring. ?TRACE("FD: subscribe_heartbeats~n", []), HBPid = case ets:lookup(fd_hbs, comm:get(fd, Subscriber)) of [] -> % synchronously create new hb process start_and_register_hbs(Subscriber); [Res] -> element(2, Res) end, comm:send_local(HBPid, {add_watching_of, TargetPid}), comm:send(Subscriber, {update_remote_hbs_to, comm:make_global(HBPid)}), State; on({pong, RemHBSSubscriber, RemoteDelay}, State) -> ?TRACE("FD: pong, ~p~n", [RemHBSSubscriber]), forward_to_hbs(RemHBSSubscriber, {pong_via_fd, RemHBSSubscriber, RemoteDelay}), State; on({add_watching_of_via_fd, Subscriber, Pid}, State) -> ?TRACE("FD: add_watching_of ~p~n", [Pid]), forward_to_hbs(Subscriber, {add_watching_of, Pid}), State; on({del_watching_of_via_fd, Subscriber, Pid}, State) -> ?TRACE("FD: del_watching_of ~p~n", [Pid]), forward_to_hbs(Subscriber, {del_watching_of, Pid}), State; on({web_debug_info , _ Requestor } , State ) - > %% ?TRACE("FD: web_debug_info~n", []), %% TODO: reimplement for new fd. Subscriptions = fd_db : ( ) , %% % resolve (local and remote) pids to names: %% S2 = [begin %% case comm:is_local(TargetPid) of %% true -> {Subscriber, %% {pid_groups:pid_to_name(comm:make_local(TargetPid)), Cookie}}; %% _ -> comm : send(comm : get(pid_groups , TargetPid ) , { group_and_name_of , TargetPid , comm : this ( ) } ) , %% receive %% {group_and_name_of_response, Name} -> %% {Subscriber, {pid_groups:pid_to_name2(Name), Cookie}} after 2000 - > X %% end %% end end || X = { Subscriber , { TargetPid , Cookie } } < - Subscriptions ] , KeyValueList = %% [{"subscriptions", length(Subscriptions)}, %% {"subscriptions (subscriber, {target, cookie}):", ""} | %% [{pid_groups:pid_to_name(Pid), lists : flatten(io_lib : format("~p " , [ X ] ) ) } || { Pid , X } < - S2 ] ] , comm : send_local(Requestor , { web_debug_info_reply , } ) , State ; on({unittest_report_down, Pid}, State) -> ?TRACE("FD: unittest_report_down p~n", [Pid]), forward_to_hbs( Pid, {'DOWN', no_ref, process, comm:make_local(Pid), unittest_down}), State. Internal functions -spec my_fd_pid() -> pid() | failed. my_fd_pid() -> case whereis(?MODULE) of undefined -> log:log(error, "[ FD ] call of my_fd_pid undefined"), failed; PID -> PID end. %@doc get hbs process in the context of a client process -spec get_hbs(comm:mypid()) -> pid(). get_hbs(Pid) -> %% normalize for the table entry (just distinguish nodes) FDPid = comm:get(fd, Pid), case ets:lookup(fd_hbs, FDPid) of [] -> % synchronously create new hb process comm:send_local(my_fd_pid(), {create_hbs, Pid, self()}), receive {create_hbs_reply, NewHBS} -> NewHBS end; [Res] -> element(2, Res) end. @doc start a new hbs process inside the fd process context ( ets owner ) -spec start_and_register_hbs(comm:mypid()) -> pid(). start_and_register_hbs(Pid) -> FDPid = comm:get(fd, Pid), case ets:lookup(fd_hbs, FDPid) of [] -> NewHBS = element(2, fd_hbs:start_link(pid_groups:my_groupname(), Pid)), ets:insert(fd_hbs, {FDPid, NewHBS}), NewHBS; [Res] -> element(2, Res) end. -spec forward_to_hbs(comm:mypid(), comm:message()) -> ok. forward_to_hbs(Pid, Msg) -> case ets:lookup(fd_hbs, comm:get(fd,Pid)) of %% [] -> hbs not yet started. This should not happen, as all %% execution paths before this call should invoke the hbs %% synchronously. Let gen_component report the error. [Entry] -> HBSPid = element(2, Entry), comm:send_local(HBSPid, Msg) end, ok.
null
https://raw.githubusercontent.com/Eonblast/Scalaxis/10287d11428e627dca8c41c818745763b9f7e8d4/src/fd.erl
erlang
you may not use this file except in compliance with the License. You may obtain a copy of the License at -2.0 Unless required by applicable law or agreed to in writing, software WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. @end @version $Id$ -define(TRACE(X,Y), io:format(X,Y)). @doc generates a failure detector for the calling process on the given pid. @doc generates a failure detector for the calling process and cookie on the given pid. @doc deletes the failure detector for the given pid. @doc deletes the failure detector for the given pid and cookie. gen_component functions @doc Starts the failure detector server @doc Initialises the module with an empty state. local heartbeat processes we establish the back-direction here, so we subscribe to the subscriber and add the TargetPid to the local monitoring. synchronously create new hb process ?TRACE("FD: web_debug_info~n", []), TODO: reimplement for new fd. % resolve (local and remote) pids to names: S2 = [begin case comm:is_local(TargetPid) of true -> {Subscriber, {pid_groups:pid_to_name(comm:make_local(TargetPid)), Cookie}}; _ -> receive {group_and_name_of_response, Name} -> {Subscriber, {pid_groups:pid_to_name2(Name), Cookie}} end end [{"subscriptions", length(Subscriptions)}, {"subscriptions (subscriber, {target, cookie}):", ""} | [{pid_groups:pid_to_name(Pid), @doc get hbs process in the context of a client process normalize for the table entry (just distinguish nodes) synchronously create new hb process [] -> hbs not yet started. This should not happen, as all execution paths before this call should invoke the hbs synchronously. Let gen_component report the error.
2007 - 2011 Zuse Institute Berlin 2009 onScale solutions GmbH Licensed under the Apache License , Version 2.0 ( the " License " ) ; distributed under the License is distributed on an " AS IS " BASIS , @author < > @doc Failure detector based on . -module(fd). -author(''). -vsn('$Id$'). -define(TRACE(_X,_Y), ok). -behaviour(gen_component). -include("scalaris.hrl"). -ifdef(with_export_type_support). -export_type([cookie/0]). -endif. -export([subscribe/1, subscribe/2]). -export([unsubscribe/1, unsubscribe/2]). -export([update_subscriptions/2]). gen_server & gen_component callbacks -export([start_link/1, init/1, on/2]). -type(cookie() :: '$fd_nil' | any()). -type(state() :: ok). -spec subscribe(comm:mypid() | [comm:mypid()]) -> ok. subscribe([]) -> ok; subscribe(GlobalPids) -> subscribe(GlobalPids, {self(), '$fd_nil'}). -spec subscribe(comm:mypid() | [comm:mypid()], cookie()) -> ok. subscribe([], _Cookie) -> ok; subscribe(GlobalPids, Cookie) when is_list(GlobalPids) -> _ = [begin HBPid = get_hbs(Pid), comm:send_local(HBPid, {add_subscriber, self(), Pid, Cookie}) end || Pid <- GlobalPids], ok; subscribe(GlobalPid, Cookie) -> subscribe([GlobalPid], Cookie). -spec unsubscribe(comm:mypid() | [comm:mypid()]) -> ok. unsubscribe([])-> ok; unsubscribe(GlobalPids)-> unsubscribe(GlobalPids, {self(), '$fd_nil'}). -spec unsubscribe(comm:mypid() | [comm:mypid()], cookie()) -> ok. unsubscribe([], _Cookie) -> ok; unsubscribe(GlobalPids, Cookie) when is_list(GlobalPids) -> _ = [begin HBPid = get_hbs(Pid), comm:send_local(HBPid, {del_subscriber, self(), Pid, Cookie}) end || Pid <- GlobalPids], ok; unsubscribe(GlobalPid, Cookie) -> unsubscribe([GlobalPid], Cookie). @doc from the pids in OldPids but not in and subscribes to the pids in but not in OldPids . -spec update_subscriptions([comm:mypid()], [comm:mypid()]) -> ok. update_subscriptions(OldPids, NewPids) -> {OnlyOldPids, _Same, OnlyNewPids} = util:split_unique(OldPids, NewPids), unsubscribe(OnlyOldPids), subscribe(OnlyNewPids). -spec start_link(pid_groups:groupname()) -> {ok, pid()}. start_link(ServiceGroup) -> gen_component:start_link(?MODULE, [], [wait_for_init, {erlang_register, ?MODULE}, {pid_groups_join_as, ServiceGroup, ?MODULE}]). -spec init([]) -> state(). init([]) -> _ = ets:new(fd_hbs, [set, protected, named_table]), ok. @private -spec on(comm:message(), state()) -> state(). on({create_hbs, Pid, ReplyTo}, State) -> NewHBS = start_and_register_hbs(Pid), comm:send_local(ReplyTo, {create_hbs_reply, NewHBS}), State; on({hbs_finished, RemoteWatchedPid}, State) -> ets:delete(fd_hbs, comm:get(fd, RemoteWatchedPid)), State; on({subscribe_heartbeats, Subscriber, TargetPid}, State) -> ?TRACE("FD: subscribe_heartbeats~n", []), HBPid = case ets:lookup(fd_hbs, comm:get(fd, Subscriber)) of start_and_register_hbs(Subscriber); [Res] -> element(2, Res) end, comm:send_local(HBPid, {add_watching_of, TargetPid}), comm:send(Subscriber, {update_remote_hbs_to, comm:make_global(HBPid)}), State; on({pong, RemHBSSubscriber, RemoteDelay}, State) -> ?TRACE("FD: pong, ~p~n", [RemHBSSubscriber]), forward_to_hbs(RemHBSSubscriber, {pong_via_fd, RemHBSSubscriber, RemoteDelay}), State; on({add_watching_of_via_fd, Subscriber, Pid}, State) -> ?TRACE("FD: add_watching_of ~p~n", [Pid]), forward_to_hbs(Subscriber, {add_watching_of, Pid}), State; on({del_watching_of_via_fd, Subscriber, Pid}, State) -> ?TRACE("FD: del_watching_of ~p~n", [Pid]), forward_to_hbs(Subscriber, {del_watching_of, Pid}), State; on({web_debug_info , _ Requestor } , State ) - > Subscriptions = fd_db : ( ) , comm : send(comm : get(pid_groups , TargetPid ) , { group_and_name_of , TargetPid , comm : this ( ) } ) , after 2000 - > X end || X = { Subscriber , { TargetPid , Cookie } } < - Subscriptions ] , KeyValueList = lists : flatten(io_lib : format("~p " , [ X ] ) ) } || { Pid , X } < - S2 ] ] , comm : send_local(Requestor , { web_debug_info_reply , } ) , State ; on({unittest_report_down, Pid}, State) -> ?TRACE("FD: unittest_report_down p~n", [Pid]), forward_to_hbs( Pid, {'DOWN', no_ref, process, comm:make_local(Pid), unittest_down}), State. Internal functions -spec my_fd_pid() -> pid() | failed. my_fd_pid() -> case whereis(?MODULE) of undefined -> log:log(error, "[ FD ] call of my_fd_pid undefined"), failed; PID -> PID end. -spec get_hbs(comm:mypid()) -> pid(). get_hbs(Pid) -> FDPid = comm:get(fd, Pid), case ets:lookup(fd_hbs, FDPid) of comm:send_local(my_fd_pid(), {create_hbs, Pid, self()}), receive {create_hbs_reply, NewHBS} -> NewHBS end; [Res] -> element(2, Res) end. @doc start a new hbs process inside the fd process context ( ets owner ) -spec start_and_register_hbs(comm:mypid()) -> pid(). start_and_register_hbs(Pid) -> FDPid = comm:get(fd, Pid), case ets:lookup(fd_hbs, FDPid) of [] -> NewHBS = element(2, fd_hbs:start_link(pid_groups:my_groupname(), Pid)), ets:insert(fd_hbs, {FDPid, NewHBS}), NewHBS; [Res] -> element(2, Res) end. -spec forward_to_hbs(comm:mypid(), comm:message()) -> ok. forward_to_hbs(Pid, Msg) -> case ets:lookup(fd_hbs, comm:get(fd,Pid)) of [Entry] -> HBSPid = element(2, Entry), comm:send_local(HBSPid, Msg) end, ok.
4967f5a803a09d5d3bb77e50e6d9357d6323ed0620bfd4bca584b1aec9f834b5
simonmar/parconc-examples
FiniteMap.hs
-- -- Adapted from the program "infer", believed to have been originally authored by , and used in the nofib benchmark suite since at least the late 90s . -- module FiniteMap (FM, emptyFM, unitFM, extendFM, makeFM, unmakeFM, thenFM, plusFM, lookupFM, lookupElseFM, mapFM, domFM, ranFM, disjointFM) where data FM a b = MkFM [(a,b)] emptyFM :: FM a b emptyFM = MkFM [] unitFM :: a -> b -> FM a b unitFM a b = MkFM [(a,b)] extendFM :: FM a b -> a -> b -> FM a b extendFM (MkFM abs) a b = MkFM ((a,b) : abs) makeFM :: [(a,b)] -> FM a b makeFM abs = MkFM abs unmakeFM :: FM a b -> [(a,b)] unmakeFM (MkFM abs) = abs thenFM :: FM a b -> FM a b -> FM a b (MkFM abs1) `thenFM` (MkFM abs2) = MkFM (abs2 ++ abs1) plusFM :: (Eq a) => FM a b -> FM a b -> FM a b f `plusFM` g | f `disjointFM` g = f `thenFM` g lookupFM :: (Eq a) => FM a b -> a -> b lookupFM f a = lookupElseFM (error "lookup") f a lookupElseFM :: (Eq a) => b -> FM a b -> a -> b lookupElseFM b (MkFM abs) a = head ( [ b' | (a',b') <- abs, a==a' ] ++ [ b ] ) mapFM :: (b -> c) -> FM a b -> FM a c mapFM h (MkFM abs) = MkFM [ (a, h b) | (a,b) <- abs ] domFM :: FM a b -> [a] domFM (MkFM abs) = [ a | (a,b) <- abs ] ranFM :: FM a b -> [b] ranFM (MkFM abs) = [ b | (a,b) <- abs ] disjointFM :: (Eq a) => FM a b -> FM a b -> Bool f `disjointFM` g = domFM f `disjoint` domFM g disjoint :: (Eq a) => [a] -> [a] -> Bool xs `disjoint` ys = and [ not (x `elem` ys) | x <- xs ]
null
https://raw.githubusercontent.com/simonmar/parconc-examples/840a3f508f9bb6e03961e1b90311a1edd945adba/parinfer/FiniteMap.hs
haskell
Adapted from the program "infer", believed to have been originally
authored by , and used in the nofib benchmark suite since at least the late 90s . module FiniteMap (FM, emptyFM, unitFM, extendFM, makeFM, unmakeFM, thenFM, plusFM, lookupFM, lookupElseFM, mapFM, domFM, ranFM, disjointFM) where data FM a b = MkFM [(a,b)] emptyFM :: FM a b emptyFM = MkFM [] unitFM :: a -> b -> FM a b unitFM a b = MkFM [(a,b)] extendFM :: FM a b -> a -> b -> FM a b extendFM (MkFM abs) a b = MkFM ((a,b) : abs) makeFM :: [(a,b)] -> FM a b makeFM abs = MkFM abs unmakeFM :: FM a b -> [(a,b)] unmakeFM (MkFM abs) = abs thenFM :: FM a b -> FM a b -> FM a b (MkFM abs1) `thenFM` (MkFM abs2) = MkFM (abs2 ++ abs1) plusFM :: (Eq a) => FM a b -> FM a b -> FM a b f `plusFM` g | f `disjointFM` g = f `thenFM` g lookupFM :: (Eq a) => FM a b -> a -> b lookupFM f a = lookupElseFM (error "lookup") f a lookupElseFM :: (Eq a) => b -> FM a b -> a -> b lookupElseFM b (MkFM abs) a = head ( [ b' | (a',b') <- abs, a==a' ] ++ [ b ] ) mapFM :: (b -> c) -> FM a b -> FM a c mapFM h (MkFM abs) = MkFM [ (a, h b) | (a,b) <- abs ] domFM :: FM a b -> [a] domFM (MkFM abs) = [ a | (a,b) <- abs ] ranFM :: FM a b -> [b] ranFM (MkFM abs) = [ b | (a,b) <- abs ] disjointFM :: (Eq a) => FM a b -> FM a b -> Bool f `disjointFM` g = domFM f `disjoint` domFM g disjoint :: (Eq a) => [a] -> [a] -> Bool xs `disjoint` ys = and [ not (x `elem` ys) | x <- xs ]
0b7b68e414d095fcb69e0e2e2e4b490a4a5c7ee87f6b32b8f53c787134ef7448
rcherrueau/rastache
inverted.rkt
#lang racket/base (require rastache) (define template #<<HERESTRING {{#admin}}Admin.{{/admin}} {{^admin}}Not Admin.{{/admin}} {{#person}}Hi {{name}}!{{/person}} HERESTRING ) (rast-compile/render (open-input-string template) #hash{ (admin . #f) (person . #hash{(name . "Jim")}) } (current-output-port))
null
https://raw.githubusercontent.com/rcherrueau/rastache/059d00c83416f8ba27cc38fa7f8321b075756d14/examples/inverted.rkt
racket
#lang racket/base (require rastache) (define template #<<HERESTRING {{#admin}}Admin.{{/admin}} {{^admin}}Not Admin.{{/admin}} {{#person}}Hi {{name}}!{{/person}} HERESTRING ) (rast-compile/render (open-input-string template) #hash{ (admin . #f) (person . #hash{(name . "Jim")}) } (current-output-port))
5bed7717d3e4b16611e2e9ce9588f43e653ef02352f9d3de97744db60a923d10
bpr/kd_tree
two_d.ml
module type Location = sig type point = float * float type elt val to_point : elt -> point end module Make(L : Location) : ( Multidim.S with type real = float with type range = (float * float) array with type elt = L.elt with type point = float * float) = struct type real = float type point = float * float type elt = L.elt type interval = float * float type range = (float * float) array let noninterval (x0, x1) = x1 > x0 let add_loc ((x0, x1) as interval) x = if x0 < x1 then if x < x0 then (x,x1) else if x > x1 then (x0,x) else interval else if x0 = x1 then if x > x0 then (x0,x) else if x < x0 then (x,x0) else interval else (* x0 > x1, invalid range *) (x,x) let dim = 2 let null_interval = (Pervasives.max_float, Pervasives.min_float) let null_range = Array.create dim null_interval let range_maker (r : range) (e : elt) : range = let (f0,f1) = L.to_point e in let mapf i interval = if i = 0 then add_loc interval f0 else add_loc interval f1 in Array.mapi mapf r let intersect_intervals (x00,x01) (x10,x11) = if x01 < x10 || x11 < x00 then null_interval else (max x00 x10, min x01 x11) let is_valid_interval (x0,x1) = x0 <= x1 let intersect_ranges r0 r1 = let ((x00,x01) as x0_interval) = Array.get r0 0 in let ((y00,y01) as y0_interval) = Array.get r0 1 in let ((x10,x11) as x1_interval) = Array.get r1 0 in let ((y10,y11) as y1_interval) = Array.get r1 1 in let ivi = is_valid_interval in if ivi x0_interval && ivi x1_interval && ivi y0_interval && ivi y1_interval then let x_interval = intersect_intervals x0_interval x1_interval in let y_interval = intersect_intervals y0_interval y1_interval in if ivi x_interval && ivi y_interval then [|x_interval; y_interval|] else null_range else null_range let point_in_range r (x,y) = let (x0,x1) = Array.get r 0 in let (y0,y1) = Array.get r 1 in x >= x0 && x <= x1 && y >= y0 && y <= y1 let to_point = L.to_point let square x = x *. x let axial_compare n (x0,y0) (x1,y1) = if n = 0 then Pervasives.compare x0 x1 else if n = 1 then Pervasives.compare y0 y1 else invalid_arg ("TwoD.select: " ^ (Pervasives.string_of_int n)) let squared_distance (x0,y0) (x1,y1) = let dx = x1 -. x0 in let dy = y1 -. y0 in dx *. dx +. dy *. dy let squared_axial_distance n (x0,y0) (x1,y1) = if n = 0 then square (x0 -. x1) else if n = 1 then square (y0 -. y1) else invalid_arg ("TwoD.select: " ^ (Pervasives.string_of_int n)) end;;
null
https://raw.githubusercontent.com/bpr/kd_tree/d0450aadd4617c635fd0c1c1c759137207ebb8d7/two_d.ml
ocaml
x0 > x1, invalid range
module type Location = sig type point = float * float type elt val to_point : elt -> point end module Make(L : Location) : ( Multidim.S with type real = float with type range = (float * float) array with type elt = L.elt with type point = float * float) = struct type real = float type point = float * float type elt = L.elt type interval = float * float type range = (float * float) array let noninterval (x0, x1) = x1 > x0 let add_loc ((x0, x1) as interval) x = if x0 < x1 then if x < x0 then (x,x1) else if x > x1 then (x0,x) else interval else if x0 = x1 then if x > x0 then (x0,x) else if x < x0 then (x,x0) else interval (x,x) let dim = 2 let null_interval = (Pervasives.max_float, Pervasives.min_float) let null_range = Array.create dim null_interval let range_maker (r : range) (e : elt) : range = let (f0,f1) = L.to_point e in let mapf i interval = if i = 0 then add_loc interval f0 else add_loc interval f1 in Array.mapi mapf r let intersect_intervals (x00,x01) (x10,x11) = if x01 < x10 || x11 < x00 then null_interval else (max x00 x10, min x01 x11) let is_valid_interval (x0,x1) = x0 <= x1 let intersect_ranges r0 r1 = let ((x00,x01) as x0_interval) = Array.get r0 0 in let ((y00,y01) as y0_interval) = Array.get r0 1 in let ((x10,x11) as x1_interval) = Array.get r1 0 in let ((y10,y11) as y1_interval) = Array.get r1 1 in let ivi = is_valid_interval in if ivi x0_interval && ivi x1_interval && ivi y0_interval && ivi y1_interval then let x_interval = intersect_intervals x0_interval x1_interval in let y_interval = intersect_intervals y0_interval y1_interval in if ivi x_interval && ivi y_interval then [|x_interval; y_interval|] else null_range else null_range let point_in_range r (x,y) = let (x0,x1) = Array.get r 0 in let (y0,y1) = Array.get r 1 in x >= x0 && x <= x1 && y >= y0 && y <= y1 let to_point = L.to_point let square x = x *. x let axial_compare n (x0,y0) (x1,y1) = if n = 0 then Pervasives.compare x0 x1 else if n = 1 then Pervasives.compare y0 y1 else invalid_arg ("TwoD.select: " ^ (Pervasives.string_of_int n)) let squared_distance (x0,y0) (x1,y1) = let dx = x1 -. x0 in let dy = y1 -. y0 in dx *. dx +. dy *. dy let squared_axial_distance n (x0,y0) (x1,y1) = if n = 0 then square (x0 -. x1) else if n = 1 then square (y0 -. y1) else invalid_arg ("TwoD.select: " ^ (Pervasives.string_of_int n)) end;;
71aabd9f9a1766ec6ae7b0d2528c2da76d76528f25fa39e319b1320ba7666725
nuprl/gradual-typing-performance
array-struct-make-array.rkt
#lang racket/base (provide make-array) ;; ----------------------------------------------------------------------------- (require "data-array.rkt" (only-in "array-utils-check-array-shape.rkt" check-array-shape) (only-in "array-struct-unsafe-build-simple-array.rkt" unsafe-build-simple-array)) ;; ============================================================================= (define (make-array ds v) (let ([ds (check-array-shape ds (λ () (raise-argument-error 'make-array "(Vectorof Integer)" 0 ds v)))]) (unsafe-build-simple-array ds (λ (js) v))))
null
https://raw.githubusercontent.com/nuprl/gradual-typing-performance/35442b3221299a9cadba6810573007736b0d65d4/experimental/micro/synth/untyped/array-struct-make-array.rkt
racket
----------------------------------------------------------------------------- =============================================================================
#lang racket/base (provide make-array) (require "data-array.rkt" (only-in "array-utils-check-array-shape.rkt" check-array-shape) (only-in "array-struct-unsafe-build-simple-array.rkt" unsafe-build-simple-array)) (define (make-array ds v) (let ([ds (check-array-shape ds (λ () (raise-argument-error 'make-array "(Vectorof Integer)" 0 ds v)))]) (unsafe-build-simple-array ds (λ (js) v))))
781be7a4fa64057535c5c7db7b9b8971fee5e138d8feee3517688530bdcb0eac
dbuenzli/vz
pkg.ml
#!/usr/bin/env ocaml #use "topfind" #require "topkg" open Topkg let () = Pkg.describe "vz" @@ fun c -> Ok [ Pkg.mllib "src/vz.mllib"; Pkg.mllib "src/vz_plot.mllib" ~dst_dir:"plot"; Pkg.mllib "src/vz_doc.mllib" ~dst_dir:"doc"; Pkg.mllib "src/evidence.mllib" ~dst_dir:"evidence"; Pkg.lib "src/evidence_top_init.ml" ~dst:"evidence/"; Pkg.mllib "samples/evidence_samples.mllib" ~dst_dir:"samples"; Pkg.doc "doc/index.mld" ~dst:"odoc-pages/index.mld"; ]
null
https://raw.githubusercontent.com/dbuenzli/vz/f4e62392d84d9b0dd86622063f9b935e7bc64810/pkg/pkg.ml
ocaml
#!/usr/bin/env ocaml #use "topfind" #require "topkg" open Topkg let () = Pkg.describe "vz" @@ fun c -> Ok [ Pkg.mllib "src/vz.mllib"; Pkg.mllib "src/vz_plot.mllib" ~dst_dir:"plot"; Pkg.mllib "src/vz_doc.mllib" ~dst_dir:"doc"; Pkg.mllib "src/evidence.mllib" ~dst_dir:"evidence"; Pkg.lib "src/evidence_top_init.ml" ~dst:"evidence/"; Pkg.mllib "samples/evidence_samples.mllib" ~dst_dir:"samples"; Pkg.doc "doc/index.mld" ~dst:"odoc-pages/index.mld"; ]
8470a1d1ad46a6c94366ebde2504651d8892811763e2c2f5afb172320052f7be
dmitryvk/sbcl-win32-threads
insts.lisp
the instruction set definition for MIPS This software is part of the SBCL system . See the README file for ;;;; more information. ;;;; This software is derived from the CMU CL system , which was written at Carnegie Mellon University and released into the ;;;; public domain. The software is in the public domain and is ;;;; provided with absolutely no warranty. See the COPYING and CREDITS ;;;; files for more information. (in-package "SB!VM") (setf *assem-scheduler-p* t) (setf *assem-max-locations* 68) ;;;; Constants, types, conversion functions, some disassembler stuff. (defun reg-tn-encoding (tn) (declare (type tn tn)) (sc-case tn (zero zero-offset) (null null-offset) (t (if (eq (sb-name (sc-sb (tn-sc tn))) 'registers) (tn-offset tn) (error "~S isn't a register." tn))))) (defun fp-reg-tn-encoding (tn) (declare (type tn tn)) (unless (eq (sb-name (sc-sb (tn-sc tn))) 'float-registers) (error "~S isn't a floating-point register." tn)) (tn-offset tn)) ( sb!disassem : set - disassem - params : instruction - alignment 32 ) (defvar *disassem-use-lisp-reg-names* t) (!def-vm-support-routine location-number (loc) (etypecase loc (null) (number) (label) (fixup) (tn (ecase (sb-name (sc-sb (tn-sc loc))) (immediate-constant Can happen if $ ZERO or $ NULL are passed in . nil) (registers (unless (zerop (tn-offset loc)) (tn-offset loc))) (float-registers (+ (tn-offset loc) 32)))) (symbol (ecase loc (:memory 0) (:hi-reg 64) (:low-reg 65) (:float-status 66) (:ctrl-stat-reg 67))))) (defparameter reg-symbols (map 'vector #'(lambda (name) (cond ((null name) nil) (t (make-symbol (concatenate 'string "$" name))))) *register-names*)) (sb!disassem:define-arg-type reg :printer #'(lambda (value stream dstate) (declare (stream stream) (fixnum value)) (let ((regname (aref reg-symbols value))) (princ regname stream) (sb!disassem:maybe-note-associated-storage-ref value 'registers regname dstate)))) (defparameter float-reg-symbols #.(coerce (loop for n from 0 to 31 collect (make-symbol (format nil "$F~d" n))) 'vector)) (sb!disassem:define-arg-type fp-reg :printer #'(lambda (value stream dstate) (declare (stream stream) (fixnum value)) (let ((regname (aref float-reg-symbols value))) (princ regname stream) (sb!disassem:maybe-note-associated-storage-ref value 'float-registers regname dstate)))) (sb!disassem:define-arg-type control-reg :printer "(CR:#x~X)") (sb!disassem:define-arg-type relative-label :sign-extend t :use-label #'(lambda (value dstate) (declare (type (signed-byte 16) value) (type sb!disassem:disassem-state dstate)) (+ (ash (1+ value) 2) (sb!disassem:dstate-cur-addr dstate)))) (deftype float-format () '(member :s :single :d :double :w :word)) (defun float-format-value (format) (ecase format ((:s :single) 0) ((:d :double) 1) ((:w :word) 4))) (sb!disassem:define-arg-type float-format :printer #'(lambda (value stream dstate) (declare (ignore dstate) (stream stream) (fixnum value)) (princ (case value (0 's) (1 'd) (4 'w) (t '?)) stream))) (defconstant-eqx compare-kinds '(:f :un :eq :ueq :olt :ult :ole :ule :sf :ngle :seq :ngl :lt :nge :le :ngt) #'equalp) (defconstant-eqx compare-kinds-vec (apply #'vector compare-kinds) #'equalp) (deftype compare-kind () `(member ,@compare-kinds)) (defun compare-kind (kind) (or (position kind compare-kinds) (error "Unknown floating point compare kind: ~S~%Must be one of: ~S" kind compare-kinds))) (sb!disassem:define-arg-type compare-kind :printer compare-kinds-vec) (defconstant-eqx float-operations '(+ - * /) #'equalp) (deftype float-operation () `(member ,@float-operations)) (defconstant-eqx float-operation-names ;; this gets used for output only #(add sub mul div) #'equalp) (defun float-operation (op) (or (position op float-operations) (error "Unknown floating point operation: ~S~%Must be one of: ~S" op float-operations))) (sb!disassem:define-arg-type float-operation :printer float-operation-names) ;;;; Constants used by instruction emitters. (def!constant special-op #b000000) (def!constant bcond-op #b000001) (def!constant cop0-op #b010000) (def!constant cop1-op #b010001) (def!constant cop2-op #b010010) (def!constant cop3-op #b010011) ;;;; dissassem:define-instruction-formats (defconstant-eqx immed-printer '(:name :tab rt (:unless (:same-as rt) ", " rs) ", " immediate) #'equalp) for things that use rt=0 as a nop (defconstant-eqx immed-zero-printer '(:name :tab rt (:unless (:constant 0) ", " rs) ", " immediate) #'equalp) (sb!disassem:define-instruction-format (immediate 32 :default-printer immed-printer) (op :field (byte 6 26)) (rs :field (byte 5 21) :type 'reg) (rt :field (byte 5 16) :type 'reg) (immediate :field (byte 16 0) :sign-extend t)) (eval-when (:compile-toplevel :load-toplevel :execute) (defparameter jump-printer #'(lambda (value stream dstate) (let ((addr (ash value 2))) (sb!disassem:maybe-note-assembler-routine addr t dstate) (write addr :base 16 :radix t :stream stream))))) (sb!disassem:define-instruction-format (jump 32 :default-printer '(:name :tab target)) (op :field (byte 6 26)) (target :field (byte 26 0) :printer jump-printer)) (defconstant-eqx reg-printer '(:name :tab rd (:unless (:same-as rd) ", " rs) ", " rt) #'equalp) (sb!disassem:define-instruction-format (register 32 :default-printer reg-printer) (op :field (byte 6 26)) (rs :field (byte 5 21) :type 'reg) (rt :field (byte 5 16) :type 'reg) (rd :field (byte 5 11) :type 'reg) (shamt :field (byte 5 6) :value 0) (funct :field (byte 6 0))) (sb!disassem:define-instruction-format (break 32 :default-printer '(:name :tab code (:unless (:constant 0) ", " subcode))) (op :field (byte 6 26) :value special-op) (code :field (byte 10 16)) (subcode :field (byte 10 6)) (funct :field (byte 6 0) :value #b001101)) (sb!disassem:define-instruction-format (coproc-branch 32 :default-printer '(:name :tab offset)) (op :field (byte 6 26)) (funct :field (byte 10 16)) (offset :field (byte 16 0))) (defconstant-eqx float-fmt-printer '((:unless :constant funct) (:choose (:unless :constant sub-funct) nil) "." format) #'equalp) (defconstant-eqx float-printer `(:name ,@float-fmt-printer :tab fd (:unless (:same-as fd) ", " fs) ", " ft) #'equalp) (sb!disassem:define-instruction-format (float 32 :default-printer float-printer) (op :field (byte 6 26) :value cop1-op) (filler :field (byte 1 25) :value 1) (format :field (byte 4 21) :type 'float-format) (ft :field (byte 5 16) :value 0) (fs :field (byte 5 11) :type 'fp-reg) (fd :field (byte 5 6) :type 'fp-reg) (funct :field (byte 6 0))) (sb!disassem:define-instruction-format (float-aux 32 :default-printer float-printer) (op :field (byte 6 26) :value cop1-op) (filler-1 :field (byte 1 25) :value 1) (format :field (byte 4 21) :type 'float-format) (ft :field (byte 5 16) :type 'fp-reg) (fs :field (byte 5 11) :type 'fp-reg) (fd :field (byte 5 6) :type 'fp-reg) (funct :field (byte 2 4)) (sub-funct :field (byte 4 0))) (sb!disassem:define-instruction-format (float-op 32 :include 'float :default-printer '('f funct "." format :tab fd (:unless (:same-as fd) ", " fs) ", " ft)) (funct :field (byte 2 0) :type 'float-operation) (funct-filler :field (byte 4 2) :value 0) (ft :value nil :type 'fp-reg)) ;;;; Primitive emitters. (define-bitfield-emitter emit-word 32 (byte 32 0)) (define-bitfield-emitter emit-short 16 (byte 16 0)) (define-bitfield-emitter emit-immediate-inst 32 (byte 6 26) (byte 5 21) (byte 5 16) (byte 16 0)) (define-bitfield-emitter emit-jump-inst 32 (byte 6 26) (byte 26 0)) (define-bitfield-emitter emit-register-inst 32 (byte 6 26) (byte 5 21) (byte 5 16) (byte 5 11) (byte 5 6) (byte 6 0)) (define-bitfield-emitter emit-break-inst 32 (byte 6 26) (byte 10 16) (byte 10 6) (byte 6 0)) (define-bitfield-emitter emit-float-inst 32 (byte 6 26) (byte 1 25) (byte 4 21) (byte 5 16) (byte 5 11) (byte 5 6) (byte 6 0)) ;;;; Math instructions. (defun emit-math-inst (segment dst src1 src2 reg-opcode immed-opcode &optional allow-fixups) (unless src2 (setf src2 src1) (setf src1 dst)) (etypecase src2 (tn (emit-register-inst segment special-op (reg-tn-encoding src1) (reg-tn-encoding src2) (reg-tn-encoding dst) 0 reg-opcode)) (integer (emit-immediate-inst segment immed-opcode (reg-tn-encoding src1) (reg-tn-encoding dst) src2)) (fixup (unless allow-fixups (error "Fixups aren't allowed.")) (note-fixup segment :addi src2) (emit-immediate-inst segment immed-opcode (reg-tn-encoding src1) (reg-tn-encoding dst) 0)))) (define-instruction add (segment dst src1 &optional src2) (:declare (type tn dst) (type (or tn (signed-byte 16) null) src1 src2)) (:printer register ((op special-op) (funct #b100000))) (:printer immediate ((op #b001000))) (:dependencies (reads src1) (if src2 (reads src2) (reads dst)) (writes dst)) (:delay 0) (:emitter (emit-math-inst segment dst src1 src2 #b100000 #b001000))) (define-instruction addu (segment dst src1 &optional src2) (:declare (type tn dst) (type (or tn (signed-byte 16) fixup null) src1 src2)) (:printer register ((op special-op) (funct #b100001))) (:printer immediate ((op #b001001))) (:dependencies (reads src1) (if src2 (reads src2) (reads dst)) (writes dst)) (:delay 0) (:emitter (emit-math-inst segment dst src1 src2 #b100001 #b001001 t))) (define-instruction sub (segment dst src1 &optional src2) (:declare (type tn dst) (type (or tn (integer #.(- 1 (ash 1 15)) #.(ash 1 15)) null) src1 src2)) (:printer register ((op special-op) (funct #b100010))) (:dependencies (reads src1) (if src2 (reads src2) (reads dst)) (writes dst)) (:delay 0) (:emitter (unless src2 (setf src2 src1) (setf src1 dst)) (emit-math-inst segment dst src1 (if (integerp src2) (- src2) src2) #b100010 #b001000))) (define-instruction subu (segment dst src1 &optional src2) (:declare (type tn dst) (type (or tn (integer #.(- 1 (ash 1 15)) #.(ash 1 15)) fixup null) src1 src2)) (:printer register ((op special-op) (funct #b100011))) (:dependencies (reads src1) (if src2 (reads src2) (reads dst)) (writes dst)) (:delay 0) (:emitter (unless src2 (setf src2 src1) (setf src1 dst)) (emit-math-inst segment dst src1 (if (integerp src2) (- src2) src2) #b100011 #b001001 t))) (define-instruction and (segment dst src1 &optional src2) (:declare (type tn dst) (type (or tn (unsigned-byte 16) null) src1 src2)) (:printer register ((op special-op) (funct #b100100))) (:printer immediate ((op #b001100) (immediate nil :sign-extend nil))) (:dependencies (reads src1) (if src2 (reads src2) (reads dst)) (writes dst)) (:delay 0) (:emitter (emit-math-inst segment dst src1 src2 #b100100 #b001100))) (define-instruction or (segment dst src1 &optional src2) (:declare (type tn dst) (type (or tn (unsigned-byte 16) null) src1 src2)) (:printer register ((op special-op) (funct #b100101))) (:printer immediate ((op #b001101))) (:dependencies (reads src1) (if src2 (reads src2) (reads dst)) (writes dst)) (:delay 0) (:emitter (emit-math-inst segment dst src1 src2 #b100101 #b001101))) (define-instruction xor (segment dst src1 &optional src2) (:declare (type tn dst) (type (or tn (unsigned-byte 16) null) src1 src2)) (:printer register ((op special-op) (funct #b100110))) (:printer immediate ((op #b001110))) (:dependencies (reads src1) (if src2 (reads src2) (reads dst)) (writes dst)) (:delay 0) (:emitter (emit-math-inst segment dst src1 src2 #b100110 #b001110))) (define-instruction nor (segment dst src1 &optional src2) (:declare (type tn dst src1) (type (or tn null) src2)) (:printer register ((op special-op) (funct #b100111))) (:dependencies (reads src1) (if src2 (reads src2) (reads dst)) (writes dst)) (:delay 0) (:emitter (emit-math-inst segment dst src1 src2 #b100111 #b000000))) (define-instruction slt (segment dst src1 &optional src2) (:declare (type tn dst) (type (or tn (signed-byte 16) null) src1 src2)) (:printer register ((op special-op) (funct #b101010))) (:printer immediate ((op #b001010))) (:dependencies (reads src1) (if src2 (reads src2) (reads dst)) (writes dst)) (:delay 0) (:emitter (emit-math-inst segment dst src1 src2 #b101010 #b001010))) (define-instruction sltu (segment dst src1 &optional src2) (:declare (type tn dst) (type (or tn (signed-byte 16) null) src1 src2)) (:printer register ((op special-op) (funct #b101011))) (:printer immediate ((op #b001011))) (:dependencies (reads src1) (if src2 (reads src2) (reads dst)) (writes dst)) (:delay 0) (:emitter (emit-math-inst segment dst src1 src2 #b101011 #b001011))) (defconstant-eqx divmul-printer '(:name :tab rs ", " rt) #'equalp) (define-instruction div (segment src1 src2) (:declare (type tn src1 src2)) (:printer register ((op special-op) (rd 0) (funct #b011010)) divmul-printer) (:dependencies (reads src1) (reads src2) (writes :hi-reg) (writes :low-reg)) (:delay 1) (:emitter (emit-register-inst segment special-op (reg-tn-encoding src1) (reg-tn-encoding src2) 0 0 #b011010))) (define-instruction divu (segment src1 src2) (:declare (type tn src1 src2)) (:printer register ((op special-op) (rd 0) (funct #b011011)) divmul-printer) (:dependencies (reads src1) (reads src2) (writes :hi-reg) (writes :low-reg)) (:delay 1) (:emitter (emit-register-inst segment special-op (reg-tn-encoding src1) (reg-tn-encoding src2) 0 0 #b011011))) (define-instruction mult (segment src1 src2) (:declare (type tn src1 src2)) (:printer register ((op special-op) (rd 0) (funct #b011000)) divmul-printer) (:dependencies (reads src1) (reads src2) (writes :hi-reg) (writes :low-reg)) (:delay 1) (:emitter (emit-register-inst segment special-op (reg-tn-encoding src1) (reg-tn-encoding src2) 0 0 #b011000))) (define-instruction multu (segment src1 src2) (:declare (type tn src1 src2)) (:printer register ((op special-op) (rd 0) (funct #b011001))) (:dependencies (reads src1) (reads src2) (writes :hi-reg) (writes :low-reg)) (:delay 1) (:emitter (emit-register-inst segment special-op (reg-tn-encoding src1) (reg-tn-encoding src2) 0 0 #b011001))) (defun emit-shift-inst (segment opcode dst src1 src2) (unless src2 (setf src2 src1) (setf src1 dst)) (etypecase src2 (tn (emit-register-inst segment special-op (reg-tn-encoding src2) (reg-tn-encoding src1) (reg-tn-encoding dst) 0 (logior #b000100 opcode))) ((unsigned-byte 5) (emit-register-inst segment special-op 0 (reg-tn-encoding src1) (reg-tn-encoding dst) src2 opcode)))) (defconstant-eqx shift-printer '(:name :tab rd (:unless (:same-as rd) ", " rt) ", " (:cond ((rs :constant 0) shamt) (t rs))) #'equalp) (define-instruction sll (segment dst src1 &optional src2) (:declare (type tn dst) (type (or tn (unsigned-byte 5) null) src1 src2)) (:printer register ((op special-op) (rs 0) (shamt nil) (funct #b000000)) shift-printer) (:printer register ((op special-op) (funct #b000100)) shift-printer) (:dependencies (reads src1) (if src2 (reads src2) (reads dst)) (writes dst)) (:delay 0) (:emitter (emit-shift-inst segment #b00 dst src1 src2))) (define-instruction sra (segment dst src1 &optional src2) (:declare (type tn dst) (type (or tn (unsigned-byte 5) null) src1 src2)) (:printer register ((op special-op) (rs 0) (shamt nil) (funct #b000011)) shift-printer) (:printer register ((op special-op) (funct #b000111)) shift-printer) (:dependencies (reads src1) (if src2 (reads src2) (reads dst)) (writes dst)) (:delay 0) (:emitter (emit-shift-inst segment #b11 dst src1 src2))) (define-instruction srl (segment dst src1 &optional src2) (:declare (type tn dst) (type (or tn (unsigned-byte 5) null) src1 src2)) (:printer register ((op special-op) (rs 0) (shamt nil) (funct #b000010)) shift-printer) (:printer register ((op special-op) (funct #b000110)) shift-printer) (:dependencies (reads src1) (if src2 (reads src2) (reads dst)) (writes dst)) (:delay 0) (:emitter (emit-shift-inst segment #b10 dst src1 src2))) ;;;; Floating point math. (define-instruction float-op (segment operation format dst src1 src2) (:declare (type float-operation operation) (type float-format format) (type tn dst src1 src2)) (:printer float-op ()) (:dependencies (reads src1) (reads src2) (writes dst)) (:delay 0) (:emitter (emit-float-inst segment cop1-op 1 (float-format-value format) (fp-reg-tn-encoding src2) (fp-reg-tn-encoding src1) (fp-reg-tn-encoding dst) (float-operation operation)))) (defconstant-eqx float-unop-printer `(:name ,@float-fmt-printer :tab fd (:unless (:same-as fd) ", " fs)) #'equalp) (define-instruction fabs (segment format dst &optional (src dst)) (:declare (type float-format format) (type tn dst src)) (:printer float ((funct #b000101)) float-unop-printer) (:dependencies (reads src) (writes dst)) (:delay 0) (:emitter (emit-float-inst segment cop1-op 1 (float-format-value format) 0 (fp-reg-tn-encoding src) (fp-reg-tn-encoding dst) #b000101))) (define-instruction fneg (segment format dst &optional (src dst)) (:declare (type float-format format) (type tn dst src)) (:printer float ((funct #b000111)) float-unop-printer) (:dependencies (reads src) (writes dst)) (:delay 0) (:emitter (emit-float-inst segment cop1-op 1 (float-format-value format) 0 (fp-reg-tn-encoding src) (fp-reg-tn-encoding dst) #b000111))) (define-instruction fcvt (segment format1 format2 dst src) (:declare (type float-format format1 format2) (type tn dst src)) (:printer float-aux ((funct #b10) (sub-funct nil :type 'float-format)) `(:name "." sub-funct "." format :tab fd ", " fs)) (:dependencies (reads src) (writes dst)) (:delay 0) (:emitter (emit-float-inst segment cop1-op 1 (float-format-value format2) 0 (fp-reg-tn-encoding src) (fp-reg-tn-encoding dst) (logior #b100000 (float-format-value format1))))) (define-instruction fcmp (segment operation format fs ft) (:declare (type compare-kind operation) (type float-format format) (type tn fs ft)) (:printer float-aux ((fd 0) (funct #b11) (sub-funct nil :type 'compare-kind)) `(:name "-" sub-funct "." format :tab fs ", " ft)) (:dependencies (reads fs) (reads ft) (writes :float-status)) (:delay 1) (:emitter (emit-float-inst segment cop1-op 1 (float-format-value format) (fp-reg-tn-encoding ft) (fp-reg-tn-encoding fs) 0 (logior #b110000 (compare-kind operation))))) Branch / Jump instructions . (defun emit-relative-branch (segment opcode r1 r2 target) (emit-chooser segment 20 2 #'(lambda (segment posn magic-value) (declare (ignore magic-value)) (let ((delta (ash (- (label-position target) (+ posn 4)) -2))) (when (typep delta '(signed-byte 16)) (emit-back-patch segment 4 #'(lambda (segment posn) (emit-immediate-inst segment opcode (if (fixnump r1) r1 (reg-tn-encoding r1)) (if (fixnump r2) r2 (reg-tn-encoding r2)) (ash (- (label-position target) (+ posn 4)) -2)))) t))) #'(lambda (segment posn) (declare (ignore posn)) (let ((linked)) ;; invert branch condition (if (or (= opcode bcond-op) (= opcode cop1-op)) (setf r2 (logxor r2 #b00001)) (setf opcode (logxor opcode #b00001))) ;; check link flag (if (= opcode bcond-op) (if (logand r2 #b10000) (progn (setf r2 (logand r2 #b01111)) (setf linked t)))) (emit-immediate-inst segment opcode (if (fixnump r1) r1 (reg-tn-encoding r1)) (if (fixnump r2) r2 (reg-tn-encoding r2)) 4) (emit-nop segment) (emit-back-patch segment 8 #'(lambda (segment posn) (declare (ignore posn)) (emit-immediate-inst segment #b001111 0 (reg-tn-encoding lip-tn) (ldb (byte 16 16) (label-position target))) (emit-immediate-inst segment #b001101 0 (reg-tn-encoding lip-tn) (ldb (byte 16 0) (label-position target))))) (emit-register-inst segment special-op (reg-tn-encoding lip-tn) 0 (if linked 31 0) 0 (if linked #b001001 #b001000)))))) (define-instruction b (segment target) (:declare (type label target)) (:printer immediate ((op #b000100) (rs 0) (rt 0) (immediate nil :type 'relative-label)) '(:name :tab immediate)) (:attributes branch) (:delay 1) (:emitter (emit-relative-branch segment #b000100 0 0 target))) (define-instruction bal (segment target) (:declare (type label target)) (:printer immediate ((op bcond-op) (rs 0) (rt #b01001) (immediate nil :type 'relative-label)) '(:name :tab immediate)) (:attributes branch) (:dependencies (writes lip-tn)) (:delay 1) (:emitter (emit-relative-branch segment bcond-op 0 #b10001 target))) (define-instruction beq (segment r1 r2-or-target &optional target) (:declare (type tn r1) (type (or tn fixnum label) r2-or-target) (type (or label null) target)) (:printer immediate ((op #b000100) (immediate nil :type 'relative-label))) (:attributes branch) (:dependencies (reads r1) (if target (reads r2-or-target))) (:delay 1) (:emitter (unless target (setf target r2-or-target) (setf r2-or-target 0)) (emit-relative-branch segment #b000100 r1 r2-or-target target))) (define-instruction bne (segment r1 r2-or-target &optional target) (:declare (type tn r1) (type (or tn fixnum label) r2-or-target) (type (or label null) target)) (:printer immediate ((op #b000101) (immediate nil :type 'relative-label))) (:attributes branch) (:dependencies (reads r1) (if target (reads r2-or-target))) (:delay 1) (:emitter (unless target (setf target r2-or-target) (setf r2-or-target 0)) (emit-relative-branch segment #b000101 r1 r2-or-target target))) (defconstant-eqx cond-branch-printer '(:name :tab rs ", " immediate) #'equalp) (define-instruction blez (segment reg target) (:declare (type label target) (type tn reg)) (:printer immediate ((op #b000110) (rt 0) (immediate nil :type 'relative-label)) cond-branch-printer) (:attributes branch) (:dependencies (reads reg)) (:delay 1) (:emitter (emit-relative-branch segment #b000110 reg 0 target))) (define-instruction bgtz (segment reg target) (:declare (type label target) (type tn reg)) (:printer immediate ((op #b000111) (rt 0) (immediate nil :type 'relative-label)) cond-branch-printer) (:attributes branch) (:dependencies (reads reg)) (:delay 1) (:emitter (emit-relative-branch segment #b000111 reg 0 target))) (define-instruction bltz (segment reg target) (:declare (type label target) (type tn reg)) (:printer immediate ((op bcond-op) (rt 0) (immediate nil :type 'relative-label)) cond-branch-printer) (:attributes branch) (:dependencies (reads reg)) (:delay 1) (:emitter (emit-relative-branch segment bcond-op reg #b00000 target))) (define-instruction bgez (segment reg target) (:declare (type label target) (type tn reg)) (:printer immediate ((op bcond-op) (rt 1) (immediate nil :type 'relative-label)) cond-branch-printer) (:attributes branch) (:dependencies (reads reg)) (:delay 1) (:emitter (emit-relative-branch segment bcond-op reg #b00001 target))) (define-instruction bltzal (segment reg target) (:declare (type label target) (type tn reg)) (:printer immediate ((op bcond-op) (rt #b01000) (immediate nil :type 'relative-label)) cond-branch-printer) (:attributes branch) (:dependencies (reads reg) (writes lip-tn)) (:delay 1) (:emitter (emit-relative-branch segment bcond-op reg #b10000 target))) (define-instruction bgezal (segment reg target) (:declare (type label target) (type tn reg)) (:printer immediate ((op bcond-op) (rt #b01001) (immediate nil :type 'relative-label)) cond-branch-printer) (:attributes branch) (:delay 1) (:dependencies (reads reg) (writes lip-tn)) (:emitter (emit-relative-branch segment bcond-op reg #b10001 target))) (defconstant-eqx j-printer '(:name :tab (:choose rs target)) #'equalp) (define-instruction j (segment target) (:declare (type (or tn fixup) target)) (:printer register ((op special-op) (rt 0) (rd 0) (funct #b001000)) j-printer) (:printer jump ((op #b000010)) j-printer) (:attributes branch) (:dependencies (reads target)) (:delay 1) (:emitter (etypecase target (tn (emit-register-inst segment special-op (reg-tn-encoding target) 0 0 0 #b001000)) (fixup (note-fixup segment :lui target) (emit-immediate-inst segment #b001111 0 28 0) (note-fixup segment :addi target) (emit-immediate-inst segment #b001001 28 28 0) (emit-register-inst segment special-op 28 0 0 0 #b001000))))) (define-instruction jal (segment reg-or-target &optional target) (:declare (type (or null tn fixup) target) (type (or tn fixup) reg-or-target)) (:printer register ((op special-op) (rt 0) (funct #b001001)) j-printer) (:printer jump ((op #b000011)) j-printer) (:attributes branch) (:dependencies (cond (target (writes reg-or-target) (reads target)) (t (writes lip-tn) (when (tn-p reg-or-target) (reads reg-or-target))))) (:delay 1) (:emitter (unless target (setf target reg-or-target reg-or-target lip-tn)) (etypecase target (tn (emit-register-inst segment special-op (reg-tn-encoding target) 0 (reg-tn-encoding reg-or-target) 0 #b001001)) (fixup (note-fixup segment :lui target) (emit-immediate-inst segment #b001111 0 28 0) (note-fixup segment :addi target) (emit-immediate-inst segment #b001001 28 28 0) (emit-register-inst segment special-op 28 0 (reg-tn-encoding reg-or-target) 0 #b001001))))) (define-instruction bc1f (segment target) (:declare (type label target)) (:printer coproc-branch ((op cop1-op) (funct #x100) (offset nil :type 'relative-label))) (:attributes branch) (:dependencies (reads :float-status)) (:delay 1) (:emitter (emit-relative-branch segment cop1-op #b01000 #b00000 target))) (define-instruction bc1t (segment target) (:declare (type label target)) (:printer coproc-branch ((op cop1-op) (funct #x101) (offset nil :type 'relative-label))) (:attributes branch) (:dependencies (reads :float-status)) (:delay 1) (:emitter (emit-relative-branch segment cop1-op #b01000 #b00001 target))) ;;;; Random movement instructions. (define-instruction lui (segment reg value) (:declare (type tn reg) (type (or fixup (signed-byte 16) (unsigned-byte 16)) value)) (:printer immediate ((op #b001111) (immediate nil :sign-extend nil :printer "#x~4,'0X"))) (:dependencies (writes reg)) (:delay 0) (:emitter (when (fixup-p value) (note-fixup segment :lui value) (setf value 0)) (emit-immediate-inst segment #b001111 0 (reg-tn-encoding reg) value))) (defconstant-eqx mvsreg-printer '(:name :tab rd) #'equalp) (define-instruction mfhi (segment reg) (:declare (type tn reg)) (:printer register ((op special-op) (rs 0) (rt 0) (funct #b010000)) mvsreg-printer) (:dependencies (reads :hi-reg) (writes reg)) (:delay 2) (:emitter (emit-register-inst segment special-op 0 0 (reg-tn-encoding reg) 0 #b010000))) (define-instruction mthi (segment reg) (:declare (type tn reg)) (:printer register ((op special-op) (rs 0) (rt 0) (funct #b010001)) mvsreg-printer) (:dependencies (reads reg) (writes :hi-reg)) (:delay 0) (:emitter (emit-register-inst segment special-op 0 0 (reg-tn-encoding reg) 0 #b010001))) (define-instruction mflo (segment reg) (:declare (type tn reg)) (:printer register ((op special-op) (rs 0) (rt 0) (funct #b010010)) mvsreg-printer) (:dependencies (reads :low-reg) (writes reg)) (:delay 2) (:emitter (emit-register-inst segment special-op 0 0 (reg-tn-encoding reg) 0 #b010010))) (define-instruction mtlo (segment reg) (:declare (type tn reg)) (:printer register ((op special-op) (rs 0) (rt 0) (funct #b010011)) mvsreg-printer) (:dependencies (reads reg) (writes :low-reg)) (:delay 0) (:emitter (emit-register-inst segment special-op 0 0 (reg-tn-encoding reg) 0 #b010011))) (define-instruction move (segment dst src) (:declare (type tn dst src)) (:printer register ((op special-op) (rt 0) (funct #b100001)) '(:name :tab rd ", " rs)) (:attributes flushable) (:dependencies (reads src) (writes dst)) (:delay 0) (:emitter (emit-register-inst segment special-op (reg-tn-encoding src) 0 (reg-tn-encoding dst) 0 #b100001))) (define-instruction fmove (segment format dst src) (:declare (type float-format format) (type tn dst src)) (:printer float ((funct #b000110)) '(:name "." format :tab fd ", " fs)) (:attributes flushable) (:dependencies (reads src) (writes dst)) (:delay 0) (:emitter (emit-float-inst segment cop1-op 1 (float-format-value format) 0 (fp-reg-tn-encoding src) (fp-reg-tn-encoding dst) #b000110))) (defun %li (reg value) (etypecase value ((unsigned-byte 16) (inst or reg zero-tn value)) ((signed-byte 16) (inst addu reg zero-tn value)) ((or (signed-byte 32) (unsigned-byte 32)) (inst lui reg (ldb (byte 16 16) value)) (inst or reg (ldb (byte 16 0) value))) (fixup (inst lui reg value) (inst addu reg value)))) (define-instruction-macro li (reg value) `(%li ,reg ,value)) (defconstant-eqx sub-op-printer '(:name :tab rd ", " rt) #'equalp) (define-instruction mtc1 (segment to from) (:declare (type tn to from)) (:printer register ((op cop1-op) (rs #b00100) (funct 0)) sub-op-printer) (:dependencies (reads from) (writes to)) (:delay 1) (:emitter (emit-register-inst segment cop1-op #b00100 (reg-tn-encoding from) (fp-reg-tn-encoding to) 0 0))) (define-instruction mtc1-odd (segment to from) (:declare (type tn to from)) (:dependencies (reads from) (writes to)) (:delay 1) (:emitter (emit-register-inst segment cop1-op #b00100 (reg-tn-encoding from) (1+ (fp-reg-tn-encoding to)) 0 0))) (define-instruction mfc1 (segment to from) (:declare (type tn to from)) (:printer register ((op cop1-op) (rs 0) (rd nil :type 'fp-reg) (funct 0)) sub-op-printer) (:dependencies (reads from) (writes to)) (:delay 1) (:emitter (emit-register-inst segment cop1-op #b00000 (reg-tn-encoding to) (fp-reg-tn-encoding from) 0 0))) (define-instruction mfc1-odd (segment to from) (:declare (type tn to from)) (:dependencies (reads from) (writes to)) (:delay 1) (:emitter (emit-register-inst segment cop1-op #b00000 (reg-tn-encoding to) (1+ (fp-reg-tn-encoding from)) 0 0))) (define-instruction mfc1-odd2 (segment to from) (:declare (type tn to from)) (:dependencies (reads from) (writes to)) (:delay 1) (:emitter (emit-register-inst segment cop1-op #b00000 (1+ (reg-tn-encoding to)) (fp-reg-tn-encoding from) 0 0))) (define-instruction mfc1-odd3 (segment to from) (:declare (type tn to from)) (:dependencies (reads from) (writes to)) (:delay 1) (:emitter (emit-register-inst segment cop1-op #b00000 (1+ (reg-tn-encoding to)) (1+ (fp-reg-tn-encoding from)) 0 0))) (define-instruction cfc1 (segment reg cr) (:declare (type tn reg) (type (unsigned-byte 5) cr)) (:printer register ((op cop1-op) (rs #b00010) (rd nil :type 'control-reg) (funct 0)) sub-op-printer) (:dependencies (reads :ctrl-stat-reg) (writes reg)) (:delay 1) (:emitter (emit-register-inst segment cop1-op #b00010 (reg-tn-encoding reg) cr 0 0))) (define-instruction ctc1 (segment reg cr) (:declare (type tn reg) (type (unsigned-byte 5) cr)) (:printer register ((op cop1-op) (rs #b00110) (rd nil :type 'control-reg) (funct 0)) sub-op-printer) (:dependencies (reads reg) (writes :ctrl-stat-reg)) (:delay 1) (:emitter (emit-register-inst segment cop1-op #b00110 (reg-tn-encoding reg) cr 0 0))) ;;;; Random system hackery and other noise (define-instruction-macro entry-point () nil) (defun snarf-error-junk (sap offset &optional length-only) (let* ((length (sap-ref-8 sap offset)) (vector (make-array length :element-type '(unsigned-byte 8)))) (declare (type system-area-pointer sap) (type (unsigned-byte 8) length) (type (simple-array (unsigned-byte 8) (*)) vector)) (cond (length-only (values 0 (1+ length) nil nil)) (t (copy-ub8-from-system-area sap (1+ offset) vector 0 length) (collect ((sc-offsets) (lengths)) (lengths 1) ; the length byte (let* ((index 0) (error-number (sb!c:read-var-integer vector index))) (lengths index) (loop (when (>= index length) (return)) (let ((old-index index)) (sc-offsets (sb!c:read-var-integer vector index)) (lengths (- index old-index)))) (values error-number (1+ length) (sc-offsets) (lengths)))))))) (defmacro break-cases (breaknum &body cases) (let ((bn-temp (gensym))) (collect ((clauses)) (dolist (case cases) (clauses `((= ,bn-temp ,(car case)) ,@(cdr case)))) `(let ((,bn-temp ,breaknum)) (cond ,@(clauses)))))) (defun break-control (chunk inst stream dstate) (declare (ignore inst)) (flet ((nt (x) (if stream (sb!disassem:note x dstate)))) (when (= (break-code chunk dstate) 0) (case (break-subcode chunk dstate) (#.halt-trap (nt "Halt trap")) (#.pending-interrupt-trap (nt "Pending interrupt trap")) (#.error-trap (nt "Error trap") (sb!disassem:handle-break-args #'snarf-error-junk stream dstate)) (#.cerror-trap (nt "Cerror trap") (sb!disassem:handle-break-args #'snarf-error-junk stream dstate)) (#.breakpoint-trap (nt "Breakpoint trap")) (#.fun-end-breakpoint-trap (nt "Function end breakpoint trap")) (#.after-breakpoint-trap (nt "After breakpoint trap")) (#.pseudo-atomic-trap (nt "Pseudo atomic trap")) (#.object-not-list-trap (nt "Object not list trap")) (#.object-not-instance-trap (nt "Object not instance trap")) (#.single-step-around-trap (nt "Single step around trap")) (#.single-step-before-trap (nt "Single step before trap")))))) (define-instruction break (segment code &optional (subcode 0)) (:declare (type (unsigned-byte 10) code subcode)) (:printer break ((op special-op) (funct #b001101)) '(:name :tab code (:unless (:constant 0) ", " subcode)) :control #'break-control) :pinned (:cost 0) (:delay 0) (:emitter (emit-break-inst segment special-op code subcode #b001101))) (define-instruction syscall (segment) (:printer register ((op special-op) (rd 0) (rt 0) (rs 0) (funct #b001110)) '(:name)) :pinned (:delay 0) (:emitter (emit-register-inst segment special-op 0 0 0 0 #b001110))) (define-instruction nop (segment) (:printer register ((op 0) (rd 0) (rd 0) (rs 0) (funct 0)) '(:name)) (:attributes flushable) (:delay 0) (:emitter (emit-word segment 0))) (!def-vm-support-routine emit-nop (segment) (emit-word segment 0)) (define-instruction word (segment word) (:declare (type (or (unsigned-byte 32) (signed-byte 32)) word)) :pinned (:cost 0) (:delay 0) (:emitter (emit-word segment word))) (define-instruction short (segment short) (:declare (type (or (unsigned-byte 16) (signed-byte 16)) short)) :pinned (:cost 0) (:delay 0) (:emitter (emit-short segment short))) (define-instruction byte (segment byte) (:declare (type (or (unsigned-byte 8) (signed-byte 8)) byte)) :pinned (:cost 0) (:delay 0) (:emitter (emit-byte segment byte))) (defun emit-header-data (segment type) (emit-back-patch segment 4 #'(lambda (segment posn) (emit-word segment (logior type (ash (+ posn (component-header-length)) (- n-widetag-bits word-shift))))))) (define-instruction simple-fun-header-word (segment) :pinned (:cost 0) (:delay 0) (:emitter (emit-header-data segment simple-fun-header-widetag))) (define-instruction lra-header-word (segment) :pinned (:cost 0) (:delay 0) (:emitter (emit-header-data segment return-pc-header-widetag))) (defun emit-compute-inst (segment vop dst src label temp calc) (emit-chooser We emit either 12 or 4 bytes , so we maintain 8 byte alignments . segment 12 3 #'(lambda (segment posn delta-if-after) (let ((delta (funcall calc label posn delta-if-after))) (when (typep delta '(signed-byte 16)) (emit-back-patch segment 4 #'(lambda (segment posn) (assemble (segment vop) (inst addu dst src (funcall calc label posn 0))))) t))) #'(lambda (segment posn) (let ((delta (funcall calc label posn 0))) (assemble (segment vop) (inst lui temp (ldb (byte 16 16) delta)) (inst or temp (ldb (byte 16 0) delta)) (inst addu dst src temp)))))) ;; code = lip - header - label-offset + other-pointer-lowtag (define-instruction compute-code-from-lip (segment dst src label temp) (:declare (type tn dst src temp) (type label label)) (:attributes variable-length) (:dependencies (reads src) (writes dst) (writes temp)) (:delay 0) (:vop-var vop) (:emitter (emit-compute-inst segment vop dst src label temp #'(lambda (label posn delta-if-after) (- other-pointer-lowtag (label-position label posn delta-if-after) (component-header-length)))))) ;; code = lra - other-pointer-tag - header - label-offset + other-pointer-tag = lra - ( header + label - offset ) (define-instruction compute-code-from-lra (segment dst src label temp) (:declare (type tn dst src temp) (type label label)) (:attributes variable-length) (:dependencies (reads src) (writes dst) (writes temp)) (:delay 0) (:vop-var vop) (:emitter (emit-compute-inst segment vop dst src label temp #'(lambda (label posn delta-if-after) (- (+ (label-position label posn delta-if-after) (component-header-length))))))) lra = code + other - pointer - tag + header + label - offset - other - pointer - tag ;; = code + header + label-offset (define-instruction compute-lra-from-code (segment dst src label temp) (:declare (type tn dst src temp) (type label label)) (:attributes variable-length) (:dependencies (reads src) (writes dst) (writes temp)) (:delay 0) (:vop-var vop) (:emitter (emit-compute-inst segment vop dst src label temp #'(lambda (label posn delta-if-after) (+ (label-position label posn delta-if-after) (component-header-length)))))) ;;;; Loads and Stores (defun emit-load/store-inst (segment opcode reg base index &optional (oddhack 0)) (when (fixup-p index) (note-fixup segment :addi index) (setf index 0)) (emit-immediate-inst segment opcode (reg-tn-encoding reg) (+ (reg-tn-encoding base) oddhack) index)) (defconstant-eqx load-store-printer '(:name :tab rt ", " rs (:unless (:constant 0) "[" immediate "]")) #'equalp) (define-instruction lb (segment reg base &optional (index 0)) (:declare (type tn reg base) (type (or (signed-byte 16) fixup) index)) (:printer immediate ((op #b100000)) load-store-printer) (:dependencies (reads base) (reads :memory) (writes reg)) (:delay 1) (:emitter (emit-load/store-inst segment #b100000 base reg index))) (define-instruction lh (segment reg base &optional (index 0)) (:declare (type tn reg base) (type (or (signed-byte 16) fixup) index)) (:printer immediate ((op #b100001)) load-store-printer) (:dependencies (reads base) (reads :memory) (writes reg)) (:delay 1) (:emitter (emit-load/store-inst segment #b100001 base reg index))) (define-instruction lwl (segment reg base &optional (index 0)) (:declare (type tn reg base) (type (or (signed-byte 16) fixup) index)) (:printer immediate ((op #b100010)) load-store-printer) (:dependencies (reads base) (reads :memory) (writes reg)) (:delay 1) (:emitter (emit-load/store-inst segment #b100010 base reg index))) (define-instruction lw (segment reg base &optional (index 0)) (:declare (type tn reg base) (type (or (signed-byte 16) fixup) index)) (:printer immediate ((op #b100011)) load-store-printer) (:dependencies (reads base) (reads :memory) (writes reg)) (:delay 1) (:emitter (emit-load/store-inst segment #b100011 base reg index))) ;; next is just for ease of coding double-in-int c-call convention (define-instruction lw-odd (segment reg base &optional (index 0)) (:declare (type tn reg base) (type (or (signed-byte 16) fixup) index)) (:dependencies (reads base) (reads :memory) (writes reg)) (:delay 1) (:emitter (emit-load/store-inst segment #b100011 base reg index 1))) (define-instruction lbu (segment reg base &optional (index 0)) (:declare (type tn reg base) (type (or (signed-byte 16) fixup) index)) (:printer immediate ((op #b100100)) load-store-printer) (:dependencies (reads base) (reads :memory) (writes reg)) (:delay 1) (:emitter (emit-load/store-inst segment #b100100 base reg index))) (define-instruction lhu (segment reg base &optional (index 0)) (:declare (type tn reg base) (type (or (signed-byte 16) fixup) index)) (:printer immediate ((op #b100101)) load-store-printer) (:dependencies (reads base) (reads :memory) (writes reg)) (:delay 1) (:emitter (emit-load/store-inst segment #b100101 base reg index))) (define-instruction lwr (segment reg base &optional (index 0)) (:declare (type tn reg base) (type (or (signed-byte 16) fixup) index)) (:printer immediate ((op #b100110)) load-store-printer) (:dependencies (reads base) (reads :memory) (writes reg)) (:delay 1) (:emitter (emit-load/store-inst segment #b100110 base reg index))) (define-instruction sb (segment reg base &optional (index 0)) (:declare (type tn reg base) (type (or (signed-byte 16) fixup) index)) (:printer immediate ((op #b101000)) load-store-printer) (:dependencies (reads base) (reads reg) (writes :memory)) (:delay 0) (:emitter (emit-load/store-inst segment #b101000 base reg index))) (define-instruction sh (segment reg base &optional (index 0)) (:declare (type tn reg base) (type (or (signed-byte 16) fixup) index)) (:printer immediate ((op #b101001)) load-store-printer) (:dependencies (reads base) (reads reg) (writes :memory)) (:delay 0) (:emitter (emit-load/store-inst segment #b101001 base reg index))) (define-instruction swl (segment reg base &optional (index 0)) (:declare (type tn reg base) (type (or (signed-byte 16) fixup) index)) (:printer immediate ((op #b101010)) load-store-printer) (:dependencies (reads base) (reads reg) (writes :memory)) (:delay 0) (:emitter (emit-load/store-inst segment #b101010 base reg index))) (define-instruction sw (segment reg base &optional (index 0)) (:declare (type tn reg base) (type (or (signed-byte 16) fixup) index)) (:printer immediate ((op #b101011)) load-store-printer) (:dependencies (reads base) (reads reg) (writes :memory)) (:delay 0) (:emitter (emit-load/store-inst segment #b101011 base reg index))) (define-instruction swr (segment reg base &optional (index 0)) (:declare (type tn reg base) (type (or (signed-byte 16) fixup) index)) (:printer immediate ((op #b101110)) load-store-printer) (:dependencies (reads base) (reads reg) (writes :memory)) (:delay 0) (:emitter (emit-load/store-inst segment #b101110 base reg index))) (defun emit-fp-load/store-inst (segment opcode reg odd base index) (when (fixup-p index) (note-fixup segment :addi index) (setf index 0)) (emit-immediate-inst segment opcode (reg-tn-encoding base) (+ (fp-reg-tn-encoding reg) odd) index)) (define-instruction lwc1 (segment reg base &optional (index 0)) (:declare (type tn reg base) (type (or (signed-byte 16) fixup) index)) (:printer immediate ((op #b110001) (rt nil :type 'fp-reg)) load-store-printer) (:dependencies (reads base) (reads :memory) (writes reg)) (:delay 1) (:emitter (emit-fp-load/store-inst segment #b110001 reg 0 base index))) (define-instruction lwc1-odd (segment reg base &optional (index 0)) (:declare (type tn reg base) (type (or (signed-byte 16) fixup) index)) (:dependencies (reads base) (reads :memory) (writes reg)) (:delay 1) (:emitter (emit-fp-load/store-inst segment #b110001 reg 1 base index))) (define-instruction swc1 (segment reg base &optional (index 0)) (:declare (type tn reg base) (type (or (signed-byte 16) fixup) index)) (:printer immediate ((op #b111001) (rt nil :type 'fp-reg)) load-store-printer) (:dependencies (reads base) (reads reg) (writes :memory)) (:delay 0) (:emitter (emit-fp-load/store-inst segment #b111001 reg 0 base index))) (define-instruction swc1-odd (segment reg base &optional (index 0)) (:declare (type tn reg base) (type (or (signed-byte 16) fixup) index)) (:dependencies (reads base) (reads reg) (writes :memory)) (:delay 0) (:emitter (emit-fp-load/store-inst segment #b111001 reg 1 base index)))
null
https://raw.githubusercontent.com/dmitryvk/sbcl-win32-threads/5abfd64b00a0937ba2df2919f177697d1d91bde4/src/compiler/mips/insts.lisp
lisp
more information. public domain. The software is in the public domain and is provided with absolutely no warranty. See the COPYING and CREDITS files for more information. Constants, types, conversion functions, some disassembler stuff. this gets used for output only Constants used by instruction emitters. dissassem:define-instruction-formats Primitive emitters. Math instructions. Floating point math. invert branch condition check link flag Random movement instructions. Random system hackery and other noise the length byte code = lip - header - label-offset + other-pointer-lowtag code = lra - other-pointer-tag - header - label-offset + other-pointer-tag = code + header + label-offset Loads and Stores next is just for ease of coding double-in-int c-call convention
the instruction set definition for MIPS This software is part of the SBCL system . See the README file for This software is derived from the CMU CL system , which was written at Carnegie Mellon University and released into the (in-package "SB!VM") (setf *assem-scheduler-p* t) (setf *assem-max-locations* 68) (defun reg-tn-encoding (tn) (declare (type tn tn)) (sc-case tn (zero zero-offset) (null null-offset) (t (if (eq (sb-name (sc-sb (tn-sc tn))) 'registers) (tn-offset tn) (error "~S isn't a register." tn))))) (defun fp-reg-tn-encoding (tn) (declare (type tn tn)) (unless (eq (sb-name (sc-sb (tn-sc tn))) 'float-registers) (error "~S isn't a floating-point register." tn)) (tn-offset tn)) ( sb!disassem : set - disassem - params : instruction - alignment 32 ) (defvar *disassem-use-lisp-reg-names* t) (!def-vm-support-routine location-number (loc) (etypecase loc (null) (number) (label) (fixup) (tn (ecase (sb-name (sc-sb (tn-sc loc))) (immediate-constant Can happen if $ ZERO or $ NULL are passed in . nil) (registers (unless (zerop (tn-offset loc)) (tn-offset loc))) (float-registers (+ (tn-offset loc) 32)))) (symbol (ecase loc (:memory 0) (:hi-reg 64) (:low-reg 65) (:float-status 66) (:ctrl-stat-reg 67))))) (defparameter reg-symbols (map 'vector #'(lambda (name) (cond ((null name) nil) (t (make-symbol (concatenate 'string "$" name))))) *register-names*)) (sb!disassem:define-arg-type reg :printer #'(lambda (value stream dstate) (declare (stream stream) (fixnum value)) (let ((regname (aref reg-symbols value))) (princ regname stream) (sb!disassem:maybe-note-associated-storage-ref value 'registers regname dstate)))) (defparameter float-reg-symbols #.(coerce (loop for n from 0 to 31 collect (make-symbol (format nil "$F~d" n))) 'vector)) (sb!disassem:define-arg-type fp-reg :printer #'(lambda (value stream dstate) (declare (stream stream) (fixnum value)) (let ((regname (aref float-reg-symbols value))) (princ regname stream) (sb!disassem:maybe-note-associated-storage-ref value 'float-registers regname dstate)))) (sb!disassem:define-arg-type control-reg :printer "(CR:#x~X)") (sb!disassem:define-arg-type relative-label :sign-extend t :use-label #'(lambda (value dstate) (declare (type (signed-byte 16) value) (type sb!disassem:disassem-state dstate)) (+ (ash (1+ value) 2) (sb!disassem:dstate-cur-addr dstate)))) (deftype float-format () '(member :s :single :d :double :w :word)) (defun float-format-value (format) (ecase format ((:s :single) 0) ((:d :double) 1) ((:w :word) 4))) (sb!disassem:define-arg-type float-format :printer #'(lambda (value stream dstate) (declare (ignore dstate) (stream stream) (fixnum value)) (princ (case value (0 's) (1 'd) (4 'w) (t '?)) stream))) (defconstant-eqx compare-kinds '(:f :un :eq :ueq :olt :ult :ole :ule :sf :ngle :seq :ngl :lt :nge :le :ngt) #'equalp) (defconstant-eqx compare-kinds-vec (apply #'vector compare-kinds) #'equalp) (deftype compare-kind () `(member ,@compare-kinds)) (defun compare-kind (kind) (or (position kind compare-kinds) (error "Unknown floating point compare kind: ~S~%Must be one of: ~S" kind compare-kinds))) (sb!disassem:define-arg-type compare-kind :printer compare-kinds-vec) (defconstant-eqx float-operations '(+ - * /) #'equalp) (deftype float-operation () `(member ,@float-operations)) (defconstant-eqx float-operation-names #(add sub mul div) #'equalp) (defun float-operation (op) (or (position op float-operations) (error "Unknown floating point operation: ~S~%Must be one of: ~S" op float-operations))) (sb!disassem:define-arg-type float-operation :printer float-operation-names) (def!constant special-op #b000000) (def!constant bcond-op #b000001) (def!constant cop0-op #b010000) (def!constant cop1-op #b010001) (def!constant cop2-op #b010010) (def!constant cop3-op #b010011) (defconstant-eqx immed-printer '(:name :tab rt (:unless (:same-as rt) ", " rs) ", " immediate) #'equalp) for things that use rt=0 as a nop (defconstant-eqx immed-zero-printer '(:name :tab rt (:unless (:constant 0) ", " rs) ", " immediate) #'equalp) (sb!disassem:define-instruction-format (immediate 32 :default-printer immed-printer) (op :field (byte 6 26)) (rs :field (byte 5 21) :type 'reg) (rt :field (byte 5 16) :type 'reg) (immediate :field (byte 16 0) :sign-extend t)) (eval-when (:compile-toplevel :load-toplevel :execute) (defparameter jump-printer #'(lambda (value stream dstate) (let ((addr (ash value 2))) (sb!disassem:maybe-note-assembler-routine addr t dstate) (write addr :base 16 :radix t :stream stream))))) (sb!disassem:define-instruction-format (jump 32 :default-printer '(:name :tab target)) (op :field (byte 6 26)) (target :field (byte 26 0) :printer jump-printer)) (defconstant-eqx reg-printer '(:name :tab rd (:unless (:same-as rd) ", " rs) ", " rt) #'equalp) (sb!disassem:define-instruction-format (register 32 :default-printer reg-printer) (op :field (byte 6 26)) (rs :field (byte 5 21) :type 'reg) (rt :field (byte 5 16) :type 'reg) (rd :field (byte 5 11) :type 'reg) (shamt :field (byte 5 6) :value 0) (funct :field (byte 6 0))) (sb!disassem:define-instruction-format (break 32 :default-printer '(:name :tab code (:unless (:constant 0) ", " subcode))) (op :field (byte 6 26) :value special-op) (code :field (byte 10 16)) (subcode :field (byte 10 6)) (funct :field (byte 6 0) :value #b001101)) (sb!disassem:define-instruction-format (coproc-branch 32 :default-printer '(:name :tab offset)) (op :field (byte 6 26)) (funct :field (byte 10 16)) (offset :field (byte 16 0))) (defconstant-eqx float-fmt-printer '((:unless :constant funct) (:choose (:unless :constant sub-funct) nil) "." format) #'equalp) (defconstant-eqx float-printer `(:name ,@float-fmt-printer :tab fd (:unless (:same-as fd) ", " fs) ", " ft) #'equalp) (sb!disassem:define-instruction-format (float 32 :default-printer float-printer) (op :field (byte 6 26) :value cop1-op) (filler :field (byte 1 25) :value 1) (format :field (byte 4 21) :type 'float-format) (ft :field (byte 5 16) :value 0) (fs :field (byte 5 11) :type 'fp-reg) (fd :field (byte 5 6) :type 'fp-reg) (funct :field (byte 6 0))) (sb!disassem:define-instruction-format (float-aux 32 :default-printer float-printer) (op :field (byte 6 26) :value cop1-op) (filler-1 :field (byte 1 25) :value 1) (format :field (byte 4 21) :type 'float-format) (ft :field (byte 5 16) :type 'fp-reg) (fs :field (byte 5 11) :type 'fp-reg) (fd :field (byte 5 6) :type 'fp-reg) (funct :field (byte 2 4)) (sub-funct :field (byte 4 0))) (sb!disassem:define-instruction-format (float-op 32 :include 'float :default-printer '('f funct "." format :tab fd (:unless (:same-as fd) ", " fs) ", " ft)) (funct :field (byte 2 0) :type 'float-operation) (funct-filler :field (byte 4 2) :value 0) (ft :value nil :type 'fp-reg)) (define-bitfield-emitter emit-word 32 (byte 32 0)) (define-bitfield-emitter emit-short 16 (byte 16 0)) (define-bitfield-emitter emit-immediate-inst 32 (byte 6 26) (byte 5 21) (byte 5 16) (byte 16 0)) (define-bitfield-emitter emit-jump-inst 32 (byte 6 26) (byte 26 0)) (define-bitfield-emitter emit-register-inst 32 (byte 6 26) (byte 5 21) (byte 5 16) (byte 5 11) (byte 5 6) (byte 6 0)) (define-bitfield-emitter emit-break-inst 32 (byte 6 26) (byte 10 16) (byte 10 6) (byte 6 0)) (define-bitfield-emitter emit-float-inst 32 (byte 6 26) (byte 1 25) (byte 4 21) (byte 5 16) (byte 5 11) (byte 5 6) (byte 6 0)) (defun emit-math-inst (segment dst src1 src2 reg-opcode immed-opcode &optional allow-fixups) (unless src2 (setf src2 src1) (setf src1 dst)) (etypecase src2 (tn (emit-register-inst segment special-op (reg-tn-encoding src1) (reg-tn-encoding src2) (reg-tn-encoding dst) 0 reg-opcode)) (integer (emit-immediate-inst segment immed-opcode (reg-tn-encoding src1) (reg-tn-encoding dst) src2)) (fixup (unless allow-fixups (error "Fixups aren't allowed.")) (note-fixup segment :addi src2) (emit-immediate-inst segment immed-opcode (reg-tn-encoding src1) (reg-tn-encoding dst) 0)))) (define-instruction add (segment dst src1 &optional src2) (:declare (type tn dst) (type (or tn (signed-byte 16) null) src1 src2)) (:printer register ((op special-op) (funct #b100000))) (:printer immediate ((op #b001000))) (:dependencies (reads src1) (if src2 (reads src2) (reads dst)) (writes dst)) (:delay 0) (:emitter (emit-math-inst segment dst src1 src2 #b100000 #b001000))) (define-instruction addu (segment dst src1 &optional src2) (:declare (type tn dst) (type (or tn (signed-byte 16) fixup null) src1 src2)) (:printer register ((op special-op) (funct #b100001))) (:printer immediate ((op #b001001))) (:dependencies (reads src1) (if src2 (reads src2) (reads dst)) (writes dst)) (:delay 0) (:emitter (emit-math-inst segment dst src1 src2 #b100001 #b001001 t))) (define-instruction sub (segment dst src1 &optional src2) (:declare (type tn dst) (type (or tn (integer #.(- 1 (ash 1 15)) #.(ash 1 15)) null) src1 src2)) (:printer register ((op special-op) (funct #b100010))) (:dependencies (reads src1) (if src2 (reads src2) (reads dst)) (writes dst)) (:delay 0) (:emitter (unless src2 (setf src2 src1) (setf src1 dst)) (emit-math-inst segment dst src1 (if (integerp src2) (- src2) src2) #b100010 #b001000))) (define-instruction subu (segment dst src1 &optional src2) (:declare (type tn dst) (type (or tn (integer #.(- 1 (ash 1 15)) #.(ash 1 15)) fixup null) src1 src2)) (:printer register ((op special-op) (funct #b100011))) (:dependencies (reads src1) (if src2 (reads src2) (reads dst)) (writes dst)) (:delay 0) (:emitter (unless src2 (setf src2 src1) (setf src1 dst)) (emit-math-inst segment dst src1 (if (integerp src2) (- src2) src2) #b100011 #b001001 t))) (define-instruction and (segment dst src1 &optional src2) (:declare (type tn dst) (type (or tn (unsigned-byte 16) null) src1 src2)) (:printer register ((op special-op) (funct #b100100))) (:printer immediate ((op #b001100) (immediate nil :sign-extend nil))) (:dependencies (reads src1) (if src2 (reads src2) (reads dst)) (writes dst)) (:delay 0) (:emitter (emit-math-inst segment dst src1 src2 #b100100 #b001100))) (define-instruction or (segment dst src1 &optional src2) (:declare (type tn dst) (type (or tn (unsigned-byte 16) null) src1 src2)) (:printer register ((op special-op) (funct #b100101))) (:printer immediate ((op #b001101))) (:dependencies (reads src1) (if src2 (reads src2) (reads dst)) (writes dst)) (:delay 0) (:emitter (emit-math-inst segment dst src1 src2 #b100101 #b001101))) (define-instruction xor (segment dst src1 &optional src2) (:declare (type tn dst) (type (or tn (unsigned-byte 16) null) src1 src2)) (:printer register ((op special-op) (funct #b100110))) (:printer immediate ((op #b001110))) (:dependencies (reads src1) (if src2 (reads src2) (reads dst)) (writes dst)) (:delay 0) (:emitter (emit-math-inst segment dst src1 src2 #b100110 #b001110))) (define-instruction nor (segment dst src1 &optional src2) (:declare (type tn dst src1) (type (or tn null) src2)) (:printer register ((op special-op) (funct #b100111))) (:dependencies (reads src1) (if src2 (reads src2) (reads dst)) (writes dst)) (:delay 0) (:emitter (emit-math-inst segment dst src1 src2 #b100111 #b000000))) (define-instruction slt (segment dst src1 &optional src2) (:declare (type tn dst) (type (or tn (signed-byte 16) null) src1 src2)) (:printer register ((op special-op) (funct #b101010))) (:printer immediate ((op #b001010))) (:dependencies (reads src1) (if src2 (reads src2) (reads dst)) (writes dst)) (:delay 0) (:emitter (emit-math-inst segment dst src1 src2 #b101010 #b001010))) (define-instruction sltu (segment dst src1 &optional src2) (:declare (type tn dst) (type (or tn (signed-byte 16) null) src1 src2)) (:printer register ((op special-op) (funct #b101011))) (:printer immediate ((op #b001011))) (:dependencies (reads src1) (if src2 (reads src2) (reads dst)) (writes dst)) (:delay 0) (:emitter (emit-math-inst segment dst src1 src2 #b101011 #b001011))) (defconstant-eqx divmul-printer '(:name :tab rs ", " rt) #'equalp) (define-instruction div (segment src1 src2) (:declare (type tn src1 src2)) (:printer register ((op special-op) (rd 0) (funct #b011010)) divmul-printer) (:dependencies (reads src1) (reads src2) (writes :hi-reg) (writes :low-reg)) (:delay 1) (:emitter (emit-register-inst segment special-op (reg-tn-encoding src1) (reg-tn-encoding src2) 0 0 #b011010))) (define-instruction divu (segment src1 src2) (:declare (type tn src1 src2)) (:printer register ((op special-op) (rd 0) (funct #b011011)) divmul-printer) (:dependencies (reads src1) (reads src2) (writes :hi-reg) (writes :low-reg)) (:delay 1) (:emitter (emit-register-inst segment special-op (reg-tn-encoding src1) (reg-tn-encoding src2) 0 0 #b011011))) (define-instruction mult (segment src1 src2) (:declare (type tn src1 src2)) (:printer register ((op special-op) (rd 0) (funct #b011000)) divmul-printer) (:dependencies (reads src1) (reads src2) (writes :hi-reg) (writes :low-reg)) (:delay 1) (:emitter (emit-register-inst segment special-op (reg-tn-encoding src1) (reg-tn-encoding src2) 0 0 #b011000))) (define-instruction multu (segment src1 src2) (:declare (type tn src1 src2)) (:printer register ((op special-op) (rd 0) (funct #b011001))) (:dependencies (reads src1) (reads src2) (writes :hi-reg) (writes :low-reg)) (:delay 1) (:emitter (emit-register-inst segment special-op (reg-tn-encoding src1) (reg-tn-encoding src2) 0 0 #b011001))) (defun emit-shift-inst (segment opcode dst src1 src2) (unless src2 (setf src2 src1) (setf src1 dst)) (etypecase src2 (tn (emit-register-inst segment special-op (reg-tn-encoding src2) (reg-tn-encoding src1) (reg-tn-encoding dst) 0 (logior #b000100 opcode))) ((unsigned-byte 5) (emit-register-inst segment special-op 0 (reg-tn-encoding src1) (reg-tn-encoding dst) src2 opcode)))) (defconstant-eqx shift-printer '(:name :tab rd (:unless (:same-as rd) ", " rt) ", " (:cond ((rs :constant 0) shamt) (t rs))) #'equalp) (define-instruction sll (segment dst src1 &optional src2) (:declare (type tn dst) (type (or tn (unsigned-byte 5) null) src1 src2)) (:printer register ((op special-op) (rs 0) (shamt nil) (funct #b000000)) shift-printer) (:printer register ((op special-op) (funct #b000100)) shift-printer) (:dependencies (reads src1) (if src2 (reads src2) (reads dst)) (writes dst)) (:delay 0) (:emitter (emit-shift-inst segment #b00 dst src1 src2))) (define-instruction sra (segment dst src1 &optional src2) (:declare (type tn dst) (type (or tn (unsigned-byte 5) null) src1 src2)) (:printer register ((op special-op) (rs 0) (shamt nil) (funct #b000011)) shift-printer) (:printer register ((op special-op) (funct #b000111)) shift-printer) (:dependencies (reads src1) (if src2 (reads src2) (reads dst)) (writes dst)) (:delay 0) (:emitter (emit-shift-inst segment #b11 dst src1 src2))) (define-instruction srl (segment dst src1 &optional src2) (:declare (type tn dst) (type (or tn (unsigned-byte 5) null) src1 src2)) (:printer register ((op special-op) (rs 0) (shamt nil) (funct #b000010)) shift-printer) (:printer register ((op special-op) (funct #b000110)) shift-printer) (:dependencies (reads src1) (if src2 (reads src2) (reads dst)) (writes dst)) (:delay 0) (:emitter (emit-shift-inst segment #b10 dst src1 src2))) (define-instruction float-op (segment operation format dst src1 src2) (:declare (type float-operation operation) (type float-format format) (type tn dst src1 src2)) (:printer float-op ()) (:dependencies (reads src1) (reads src2) (writes dst)) (:delay 0) (:emitter (emit-float-inst segment cop1-op 1 (float-format-value format) (fp-reg-tn-encoding src2) (fp-reg-tn-encoding src1) (fp-reg-tn-encoding dst) (float-operation operation)))) (defconstant-eqx float-unop-printer `(:name ,@float-fmt-printer :tab fd (:unless (:same-as fd) ", " fs)) #'equalp) (define-instruction fabs (segment format dst &optional (src dst)) (:declare (type float-format format) (type tn dst src)) (:printer float ((funct #b000101)) float-unop-printer) (:dependencies (reads src) (writes dst)) (:delay 0) (:emitter (emit-float-inst segment cop1-op 1 (float-format-value format) 0 (fp-reg-tn-encoding src) (fp-reg-tn-encoding dst) #b000101))) (define-instruction fneg (segment format dst &optional (src dst)) (:declare (type float-format format) (type tn dst src)) (:printer float ((funct #b000111)) float-unop-printer) (:dependencies (reads src) (writes dst)) (:delay 0) (:emitter (emit-float-inst segment cop1-op 1 (float-format-value format) 0 (fp-reg-tn-encoding src) (fp-reg-tn-encoding dst) #b000111))) (define-instruction fcvt (segment format1 format2 dst src) (:declare (type float-format format1 format2) (type tn dst src)) (:printer float-aux ((funct #b10) (sub-funct nil :type 'float-format)) `(:name "." sub-funct "." format :tab fd ", " fs)) (:dependencies (reads src) (writes dst)) (:delay 0) (:emitter (emit-float-inst segment cop1-op 1 (float-format-value format2) 0 (fp-reg-tn-encoding src) (fp-reg-tn-encoding dst) (logior #b100000 (float-format-value format1))))) (define-instruction fcmp (segment operation format fs ft) (:declare (type compare-kind operation) (type float-format format) (type tn fs ft)) (:printer float-aux ((fd 0) (funct #b11) (sub-funct nil :type 'compare-kind)) `(:name "-" sub-funct "." format :tab fs ", " ft)) (:dependencies (reads fs) (reads ft) (writes :float-status)) (:delay 1) (:emitter (emit-float-inst segment cop1-op 1 (float-format-value format) (fp-reg-tn-encoding ft) (fp-reg-tn-encoding fs) 0 (logior #b110000 (compare-kind operation))))) Branch / Jump instructions . (defun emit-relative-branch (segment opcode r1 r2 target) (emit-chooser segment 20 2 #'(lambda (segment posn magic-value) (declare (ignore magic-value)) (let ((delta (ash (- (label-position target) (+ posn 4)) -2))) (when (typep delta '(signed-byte 16)) (emit-back-patch segment 4 #'(lambda (segment posn) (emit-immediate-inst segment opcode (if (fixnump r1) r1 (reg-tn-encoding r1)) (if (fixnump r2) r2 (reg-tn-encoding r2)) (ash (- (label-position target) (+ posn 4)) -2)))) t))) #'(lambda (segment posn) (declare (ignore posn)) (let ((linked)) (if (or (= opcode bcond-op) (= opcode cop1-op)) (setf r2 (logxor r2 #b00001)) (setf opcode (logxor opcode #b00001))) (if (= opcode bcond-op) (if (logand r2 #b10000) (progn (setf r2 (logand r2 #b01111)) (setf linked t)))) (emit-immediate-inst segment opcode (if (fixnump r1) r1 (reg-tn-encoding r1)) (if (fixnump r2) r2 (reg-tn-encoding r2)) 4) (emit-nop segment) (emit-back-patch segment 8 #'(lambda (segment posn) (declare (ignore posn)) (emit-immediate-inst segment #b001111 0 (reg-tn-encoding lip-tn) (ldb (byte 16 16) (label-position target))) (emit-immediate-inst segment #b001101 0 (reg-tn-encoding lip-tn) (ldb (byte 16 0) (label-position target))))) (emit-register-inst segment special-op (reg-tn-encoding lip-tn) 0 (if linked 31 0) 0 (if linked #b001001 #b001000)))))) (define-instruction b (segment target) (:declare (type label target)) (:printer immediate ((op #b000100) (rs 0) (rt 0) (immediate nil :type 'relative-label)) '(:name :tab immediate)) (:attributes branch) (:delay 1) (:emitter (emit-relative-branch segment #b000100 0 0 target))) (define-instruction bal (segment target) (:declare (type label target)) (:printer immediate ((op bcond-op) (rs 0) (rt #b01001) (immediate nil :type 'relative-label)) '(:name :tab immediate)) (:attributes branch) (:dependencies (writes lip-tn)) (:delay 1) (:emitter (emit-relative-branch segment bcond-op 0 #b10001 target))) (define-instruction beq (segment r1 r2-or-target &optional target) (:declare (type tn r1) (type (or tn fixnum label) r2-or-target) (type (or label null) target)) (:printer immediate ((op #b000100) (immediate nil :type 'relative-label))) (:attributes branch) (:dependencies (reads r1) (if target (reads r2-or-target))) (:delay 1) (:emitter (unless target (setf target r2-or-target) (setf r2-or-target 0)) (emit-relative-branch segment #b000100 r1 r2-or-target target))) (define-instruction bne (segment r1 r2-or-target &optional target) (:declare (type tn r1) (type (or tn fixnum label) r2-or-target) (type (or label null) target)) (:printer immediate ((op #b000101) (immediate nil :type 'relative-label))) (:attributes branch) (:dependencies (reads r1) (if target (reads r2-or-target))) (:delay 1) (:emitter (unless target (setf target r2-or-target) (setf r2-or-target 0)) (emit-relative-branch segment #b000101 r1 r2-or-target target))) (defconstant-eqx cond-branch-printer '(:name :tab rs ", " immediate) #'equalp) (define-instruction blez (segment reg target) (:declare (type label target) (type tn reg)) (:printer immediate ((op #b000110) (rt 0) (immediate nil :type 'relative-label)) cond-branch-printer) (:attributes branch) (:dependencies (reads reg)) (:delay 1) (:emitter (emit-relative-branch segment #b000110 reg 0 target))) (define-instruction bgtz (segment reg target) (:declare (type label target) (type tn reg)) (:printer immediate ((op #b000111) (rt 0) (immediate nil :type 'relative-label)) cond-branch-printer) (:attributes branch) (:dependencies (reads reg)) (:delay 1) (:emitter (emit-relative-branch segment #b000111 reg 0 target))) (define-instruction bltz (segment reg target) (:declare (type label target) (type tn reg)) (:printer immediate ((op bcond-op) (rt 0) (immediate nil :type 'relative-label)) cond-branch-printer) (:attributes branch) (:dependencies (reads reg)) (:delay 1) (:emitter (emit-relative-branch segment bcond-op reg #b00000 target))) (define-instruction bgez (segment reg target) (:declare (type label target) (type tn reg)) (:printer immediate ((op bcond-op) (rt 1) (immediate nil :type 'relative-label)) cond-branch-printer) (:attributes branch) (:dependencies (reads reg)) (:delay 1) (:emitter (emit-relative-branch segment bcond-op reg #b00001 target))) (define-instruction bltzal (segment reg target) (:declare (type label target) (type tn reg)) (:printer immediate ((op bcond-op) (rt #b01000) (immediate nil :type 'relative-label)) cond-branch-printer) (:attributes branch) (:dependencies (reads reg) (writes lip-tn)) (:delay 1) (:emitter (emit-relative-branch segment bcond-op reg #b10000 target))) (define-instruction bgezal (segment reg target) (:declare (type label target) (type tn reg)) (:printer immediate ((op bcond-op) (rt #b01001) (immediate nil :type 'relative-label)) cond-branch-printer) (:attributes branch) (:delay 1) (:dependencies (reads reg) (writes lip-tn)) (:emitter (emit-relative-branch segment bcond-op reg #b10001 target))) (defconstant-eqx j-printer '(:name :tab (:choose rs target)) #'equalp) (define-instruction j (segment target) (:declare (type (or tn fixup) target)) (:printer register ((op special-op) (rt 0) (rd 0) (funct #b001000)) j-printer) (:printer jump ((op #b000010)) j-printer) (:attributes branch) (:dependencies (reads target)) (:delay 1) (:emitter (etypecase target (tn (emit-register-inst segment special-op (reg-tn-encoding target) 0 0 0 #b001000)) (fixup (note-fixup segment :lui target) (emit-immediate-inst segment #b001111 0 28 0) (note-fixup segment :addi target) (emit-immediate-inst segment #b001001 28 28 0) (emit-register-inst segment special-op 28 0 0 0 #b001000))))) (define-instruction jal (segment reg-or-target &optional target) (:declare (type (or null tn fixup) target) (type (or tn fixup) reg-or-target)) (:printer register ((op special-op) (rt 0) (funct #b001001)) j-printer) (:printer jump ((op #b000011)) j-printer) (:attributes branch) (:dependencies (cond (target (writes reg-or-target) (reads target)) (t (writes lip-tn) (when (tn-p reg-or-target) (reads reg-or-target))))) (:delay 1) (:emitter (unless target (setf target reg-or-target reg-or-target lip-tn)) (etypecase target (tn (emit-register-inst segment special-op (reg-tn-encoding target) 0 (reg-tn-encoding reg-or-target) 0 #b001001)) (fixup (note-fixup segment :lui target) (emit-immediate-inst segment #b001111 0 28 0) (note-fixup segment :addi target) (emit-immediate-inst segment #b001001 28 28 0) (emit-register-inst segment special-op 28 0 (reg-tn-encoding reg-or-target) 0 #b001001))))) (define-instruction bc1f (segment target) (:declare (type label target)) (:printer coproc-branch ((op cop1-op) (funct #x100) (offset nil :type 'relative-label))) (:attributes branch) (:dependencies (reads :float-status)) (:delay 1) (:emitter (emit-relative-branch segment cop1-op #b01000 #b00000 target))) (define-instruction bc1t (segment target) (:declare (type label target)) (:printer coproc-branch ((op cop1-op) (funct #x101) (offset nil :type 'relative-label))) (:attributes branch) (:dependencies (reads :float-status)) (:delay 1) (:emitter (emit-relative-branch segment cop1-op #b01000 #b00001 target))) (define-instruction lui (segment reg value) (:declare (type tn reg) (type (or fixup (signed-byte 16) (unsigned-byte 16)) value)) (:printer immediate ((op #b001111) (immediate nil :sign-extend nil :printer "#x~4,'0X"))) (:dependencies (writes reg)) (:delay 0) (:emitter (when (fixup-p value) (note-fixup segment :lui value) (setf value 0)) (emit-immediate-inst segment #b001111 0 (reg-tn-encoding reg) value))) (defconstant-eqx mvsreg-printer '(:name :tab rd) #'equalp) (define-instruction mfhi (segment reg) (:declare (type tn reg)) (:printer register ((op special-op) (rs 0) (rt 0) (funct #b010000)) mvsreg-printer) (:dependencies (reads :hi-reg) (writes reg)) (:delay 2) (:emitter (emit-register-inst segment special-op 0 0 (reg-tn-encoding reg) 0 #b010000))) (define-instruction mthi (segment reg) (:declare (type tn reg)) (:printer register ((op special-op) (rs 0) (rt 0) (funct #b010001)) mvsreg-printer) (:dependencies (reads reg) (writes :hi-reg)) (:delay 0) (:emitter (emit-register-inst segment special-op 0 0 (reg-tn-encoding reg) 0 #b010001))) (define-instruction mflo (segment reg) (:declare (type tn reg)) (:printer register ((op special-op) (rs 0) (rt 0) (funct #b010010)) mvsreg-printer) (:dependencies (reads :low-reg) (writes reg)) (:delay 2) (:emitter (emit-register-inst segment special-op 0 0 (reg-tn-encoding reg) 0 #b010010))) (define-instruction mtlo (segment reg) (:declare (type tn reg)) (:printer register ((op special-op) (rs 0) (rt 0) (funct #b010011)) mvsreg-printer) (:dependencies (reads reg) (writes :low-reg)) (:delay 0) (:emitter (emit-register-inst segment special-op 0 0 (reg-tn-encoding reg) 0 #b010011))) (define-instruction move (segment dst src) (:declare (type tn dst src)) (:printer register ((op special-op) (rt 0) (funct #b100001)) '(:name :tab rd ", " rs)) (:attributes flushable) (:dependencies (reads src) (writes dst)) (:delay 0) (:emitter (emit-register-inst segment special-op (reg-tn-encoding src) 0 (reg-tn-encoding dst) 0 #b100001))) (define-instruction fmove (segment format dst src) (:declare (type float-format format) (type tn dst src)) (:printer float ((funct #b000110)) '(:name "." format :tab fd ", " fs)) (:attributes flushable) (:dependencies (reads src) (writes dst)) (:delay 0) (:emitter (emit-float-inst segment cop1-op 1 (float-format-value format) 0 (fp-reg-tn-encoding src) (fp-reg-tn-encoding dst) #b000110))) (defun %li (reg value) (etypecase value ((unsigned-byte 16) (inst or reg zero-tn value)) ((signed-byte 16) (inst addu reg zero-tn value)) ((or (signed-byte 32) (unsigned-byte 32)) (inst lui reg (ldb (byte 16 16) value)) (inst or reg (ldb (byte 16 0) value))) (fixup (inst lui reg value) (inst addu reg value)))) (define-instruction-macro li (reg value) `(%li ,reg ,value)) (defconstant-eqx sub-op-printer '(:name :tab rd ", " rt) #'equalp) (define-instruction mtc1 (segment to from) (:declare (type tn to from)) (:printer register ((op cop1-op) (rs #b00100) (funct 0)) sub-op-printer) (:dependencies (reads from) (writes to)) (:delay 1) (:emitter (emit-register-inst segment cop1-op #b00100 (reg-tn-encoding from) (fp-reg-tn-encoding to) 0 0))) (define-instruction mtc1-odd (segment to from) (:declare (type tn to from)) (:dependencies (reads from) (writes to)) (:delay 1) (:emitter (emit-register-inst segment cop1-op #b00100 (reg-tn-encoding from) (1+ (fp-reg-tn-encoding to)) 0 0))) (define-instruction mfc1 (segment to from) (:declare (type tn to from)) (:printer register ((op cop1-op) (rs 0) (rd nil :type 'fp-reg) (funct 0)) sub-op-printer) (:dependencies (reads from) (writes to)) (:delay 1) (:emitter (emit-register-inst segment cop1-op #b00000 (reg-tn-encoding to) (fp-reg-tn-encoding from) 0 0))) (define-instruction mfc1-odd (segment to from) (:declare (type tn to from)) (:dependencies (reads from) (writes to)) (:delay 1) (:emitter (emit-register-inst segment cop1-op #b00000 (reg-tn-encoding to) (1+ (fp-reg-tn-encoding from)) 0 0))) (define-instruction mfc1-odd2 (segment to from) (:declare (type tn to from)) (:dependencies (reads from) (writes to)) (:delay 1) (:emitter (emit-register-inst segment cop1-op #b00000 (1+ (reg-tn-encoding to)) (fp-reg-tn-encoding from) 0 0))) (define-instruction mfc1-odd3 (segment to from) (:declare (type tn to from)) (:dependencies (reads from) (writes to)) (:delay 1) (:emitter (emit-register-inst segment cop1-op #b00000 (1+ (reg-tn-encoding to)) (1+ (fp-reg-tn-encoding from)) 0 0))) (define-instruction cfc1 (segment reg cr) (:declare (type tn reg) (type (unsigned-byte 5) cr)) (:printer register ((op cop1-op) (rs #b00010) (rd nil :type 'control-reg) (funct 0)) sub-op-printer) (:dependencies (reads :ctrl-stat-reg) (writes reg)) (:delay 1) (:emitter (emit-register-inst segment cop1-op #b00010 (reg-tn-encoding reg) cr 0 0))) (define-instruction ctc1 (segment reg cr) (:declare (type tn reg) (type (unsigned-byte 5) cr)) (:printer register ((op cop1-op) (rs #b00110) (rd nil :type 'control-reg) (funct 0)) sub-op-printer) (:dependencies (reads reg) (writes :ctrl-stat-reg)) (:delay 1) (:emitter (emit-register-inst segment cop1-op #b00110 (reg-tn-encoding reg) cr 0 0))) (define-instruction-macro entry-point () nil) (defun snarf-error-junk (sap offset &optional length-only) (let* ((length (sap-ref-8 sap offset)) (vector (make-array length :element-type '(unsigned-byte 8)))) (declare (type system-area-pointer sap) (type (unsigned-byte 8) length) (type (simple-array (unsigned-byte 8) (*)) vector)) (cond (length-only (values 0 (1+ length) nil nil)) (t (copy-ub8-from-system-area sap (1+ offset) vector 0 length) (collect ((sc-offsets) (lengths)) (let* ((index 0) (error-number (sb!c:read-var-integer vector index))) (lengths index) (loop (when (>= index length) (return)) (let ((old-index index)) (sc-offsets (sb!c:read-var-integer vector index)) (lengths (- index old-index)))) (values error-number (1+ length) (sc-offsets) (lengths)))))))) (defmacro break-cases (breaknum &body cases) (let ((bn-temp (gensym))) (collect ((clauses)) (dolist (case cases) (clauses `((= ,bn-temp ,(car case)) ,@(cdr case)))) `(let ((,bn-temp ,breaknum)) (cond ,@(clauses)))))) (defun break-control (chunk inst stream dstate) (declare (ignore inst)) (flet ((nt (x) (if stream (sb!disassem:note x dstate)))) (when (= (break-code chunk dstate) 0) (case (break-subcode chunk dstate) (#.halt-trap (nt "Halt trap")) (#.pending-interrupt-trap (nt "Pending interrupt trap")) (#.error-trap (nt "Error trap") (sb!disassem:handle-break-args #'snarf-error-junk stream dstate)) (#.cerror-trap (nt "Cerror trap") (sb!disassem:handle-break-args #'snarf-error-junk stream dstate)) (#.breakpoint-trap (nt "Breakpoint trap")) (#.fun-end-breakpoint-trap (nt "Function end breakpoint trap")) (#.after-breakpoint-trap (nt "After breakpoint trap")) (#.pseudo-atomic-trap (nt "Pseudo atomic trap")) (#.object-not-list-trap (nt "Object not list trap")) (#.object-not-instance-trap (nt "Object not instance trap")) (#.single-step-around-trap (nt "Single step around trap")) (#.single-step-before-trap (nt "Single step before trap")))))) (define-instruction break (segment code &optional (subcode 0)) (:declare (type (unsigned-byte 10) code subcode)) (:printer break ((op special-op) (funct #b001101)) '(:name :tab code (:unless (:constant 0) ", " subcode)) :control #'break-control) :pinned (:cost 0) (:delay 0) (:emitter (emit-break-inst segment special-op code subcode #b001101))) (define-instruction syscall (segment) (:printer register ((op special-op) (rd 0) (rt 0) (rs 0) (funct #b001110)) '(:name)) :pinned (:delay 0) (:emitter (emit-register-inst segment special-op 0 0 0 0 #b001110))) (define-instruction nop (segment) (:printer register ((op 0) (rd 0) (rd 0) (rs 0) (funct 0)) '(:name)) (:attributes flushable) (:delay 0) (:emitter (emit-word segment 0))) (!def-vm-support-routine emit-nop (segment) (emit-word segment 0)) (define-instruction word (segment word) (:declare (type (or (unsigned-byte 32) (signed-byte 32)) word)) :pinned (:cost 0) (:delay 0) (:emitter (emit-word segment word))) (define-instruction short (segment short) (:declare (type (or (unsigned-byte 16) (signed-byte 16)) short)) :pinned (:cost 0) (:delay 0) (:emitter (emit-short segment short))) (define-instruction byte (segment byte) (:declare (type (or (unsigned-byte 8) (signed-byte 8)) byte)) :pinned (:cost 0) (:delay 0) (:emitter (emit-byte segment byte))) (defun emit-header-data (segment type) (emit-back-patch segment 4 #'(lambda (segment posn) (emit-word segment (logior type (ash (+ posn (component-header-length)) (- n-widetag-bits word-shift))))))) (define-instruction simple-fun-header-word (segment) :pinned (:cost 0) (:delay 0) (:emitter (emit-header-data segment simple-fun-header-widetag))) (define-instruction lra-header-word (segment) :pinned (:cost 0) (:delay 0) (:emitter (emit-header-data segment return-pc-header-widetag))) (defun emit-compute-inst (segment vop dst src label temp calc) (emit-chooser We emit either 12 or 4 bytes , so we maintain 8 byte alignments . segment 12 3 #'(lambda (segment posn delta-if-after) (let ((delta (funcall calc label posn delta-if-after))) (when (typep delta '(signed-byte 16)) (emit-back-patch segment 4 #'(lambda (segment posn) (assemble (segment vop) (inst addu dst src (funcall calc label posn 0))))) t))) #'(lambda (segment posn) (let ((delta (funcall calc label posn 0))) (assemble (segment vop) (inst lui temp (ldb (byte 16 16) delta)) (inst or temp (ldb (byte 16 0) delta)) (inst addu dst src temp)))))) (define-instruction compute-code-from-lip (segment dst src label temp) (:declare (type tn dst src temp) (type label label)) (:attributes variable-length) (:dependencies (reads src) (writes dst) (writes temp)) (:delay 0) (:vop-var vop) (:emitter (emit-compute-inst segment vop dst src label temp #'(lambda (label posn delta-if-after) (- other-pointer-lowtag (label-position label posn delta-if-after) (component-header-length)))))) = lra - ( header + label - offset ) (define-instruction compute-code-from-lra (segment dst src label temp) (:declare (type tn dst src temp) (type label label)) (:attributes variable-length) (:dependencies (reads src) (writes dst) (writes temp)) (:delay 0) (:vop-var vop) (:emitter (emit-compute-inst segment vop dst src label temp #'(lambda (label posn delta-if-after) (- (+ (label-position label posn delta-if-after) (component-header-length))))))) lra = code + other - pointer - tag + header + label - offset - other - pointer - tag (define-instruction compute-lra-from-code (segment dst src label temp) (:declare (type tn dst src temp) (type label label)) (:attributes variable-length) (:dependencies (reads src) (writes dst) (writes temp)) (:delay 0) (:vop-var vop) (:emitter (emit-compute-inst segment vop dst src label temp #'(lambda (label posn delta-if-after) (+ (label-position label posn delta-if-after) (component-header-length)))))) (defun emit-load/store-inst (segment opcode reg base index &optional (oddhack 0)) (when (fixup-p index) (note-fixup segment :addi index) (setf index 0)) (emit-immediate-inst segment opcode (reg-tn-encoding reg) (+ (reg-tn-encoding base) oddhack) index)) (defconstant-eqx load-store-printer '(:name :tab rt ", " rs (:unless (:constant 0) "[" immediate "]")) #'equalp) (define-instruction lb (segment reg base &optional (index 0)) (:declare (type tn reg base) (type (or (signed-byte 16) fixup) index)) (:printer immediate ((op #b100000)) load-store-printer) (:dependencies (reads base) (reads :memory) (writes reg)) (:delay 1) (:emitter (emit-load/store-inst segment #b100000 base reg index))) (define-instruction lh (segment reg base &optional (index 0)) (:declare (type tn reg base) (type (or (signed-byte 16) fixup) index)) (:printer immediate ((op #b100001)) load-store-printer) (:dependencies (reads base) (reads :memory) (writes reg)) (:delay 1) (:emitter (emit-load/store-inst segment #b100001 base reg index))) (define-instruction lwl (segment reg base &optional (index 0)) (:declare (type tn reg base) (type (or (signed-byte 16) fixup) index)) (:printer immediate ((op #b100010)) load-store-printer) (:dependencies (reads base) (reads :memory) (writes reg)) (:delay 1) (:emitter (emit-load/store-inst segment #b100010 base reg index))) (define-instruction lw (segment reg base &optional (index 0)) (:declare (type tn reg base) (type (or (signed-byte 16) fixup) index)) (:printer immediate ((op #b100011)) load-store-printer) (:dependencies (reads base) (reads :memory) (writes reg)) (:delay 1) (:emitter (emit-load/store-inst segment #b100011 base reg index))) (define-instruction lw-odd (segment reg base &optional (index 0)) (:declare (type tn reg base) (type (or (signed-byte 16) fixup) index)) (:dependencies (reads base) (reads :memory) (writes reg)) (:delay 1) (:emitter (emit-load/store-inst segment #b100011 base reg index 1))) (define-instruction lbu (segment reg base &optional (index 0)) (:declare (type tn reg base) (type (or (signed-byte 16) fixup) index)) (:printer immediate ((op #b100100)) load-store-printer) (:dependencies (reads base) (reads :memory) (writes reg)) (:delay 1) (:emitter (emit-load/store-inst segment #b100100 base reg index))) (define-instruction lhu (segment reg base &optional (index 0)) (:declare (type tn reg base) (type (or (signed-byte 16) fixup) index)) (:printer immediate ((op #b100101)) load-store-printer) (:dependencies (reads base) (reads :memory) (writes reg)) (:delay 1) (:emitter (emit-load/store-inst segment #b100101 base reg index))) (define-instruction lwr (segment reg base &optional (index 0)) (:declare (type tn reg base) (type (or (signed-byte 16) fixup) index)) (:printer immediate ((op #b100110)) load-store-printer) (:dependencies (reads base) (reads :memory) (writes reg)) (:delay 1) (:emitter (emit-load/store-inst segment #b100110 base reg index))) (define-instruction sb (segment reg base &optional (index 0)) (:declare (type tn reg base) (type (or (signed-byte 16) fixup) index)) (:printer immediate ((op #b101000)) load-store-printer) (:dependencies (reads base) (reads reg) (writes :memory)) (:delay 0) (:emitter (emit-load/store-inst segment #b101000 base reg index))) (define-instruction sh (segment reg base &optional (index 0)) (:declare (type tn reg base) (type (or (signed-byte 16) fixup) index)) (:printer immediate ((op #b101001)) load-store-printer) (:dependencies (reads base) (reads reg) (writes :memory)) (:delay 0) (:emitter (emit-load/store-inst segment #b101001 base reg index))) (define-instruction swl (segment reg base &optional (index 0)) (:declare (type tn reg base) (type (or (signed-byte 16) fixup) index)) (:printer immediate ((op #b101010)) load-store-printer) (:dependencies (reads base) (reads reg) (writes :memory)) (:delay 0) (:emitter (emit-load/store-inst segment #b101010 base reg index))) (define-instruction sw (segment reg base &optional (index 0)) (:declare (type tn reg base) (type (or (signed-byte 16) fixup) index)) (:printer immediate ((op #b101011)) load-store-printer) (:dependencies (reads base) (reads reg) (writes :memory)) (:delay 0) (:emitter (emit-load/store-inst segment #b101011 base reg index))) (define-instruction swr (segment reg base &optional (index 0)) (:declare (type tn reg base) (type (or (signed-byte 16) fixup) index)) (:printer immediate ((op #b101110)) load-store-printer) (:dependencies (reads base) (reads reg) (writes :memory)) (:delay 0) (:emitter (emit-load/store-inst segment #b101110 base reg index))) (defun emit-fp-load/store-inst (segment opcode reg odd base index) (when (fixup-p index) (note-fixup segment :addi index) (setf index 0)) (emit-immediate-inst segment opcode (reg-tn-encoding base) (+ (fp-reg-tn-encoding reg) odd) index)) (define-instruction lwc1 (segment reg base &optional (index 0)) (:declare (type tn reg base) (type (or (signed-byte 16) fixup) index)) (:printer immediate ((op #b110001) (rt nil :type 'fp-reg)) load-store-printer) (:dependencies (reads base) (reads :memory) (writes reg)) (:delay 1) (:emitter (emit-fp-load/store-inst segment #b110001 reg 0 base index))) (define-instruction lwc1-odd (segment reg base &optional (index 0)) (:declare (type tn reg base) (type (or (signed-byte 16) fixup) index)) (:dependencies (reads base) (reads :memory) (writes reg)) (:delay 1) (:emitter (emit-fp-load/store-inst segment #b110001 reg 1 base index))) (define-instruction swc1 (segment reg base &optional (index 0)) (:declare (type tn reg base) (type (or (signed-byte 16) fixup) index)) (:printer immediate ((op #b111001) (rt nil :type 'fp-reg)) load-store-printer) (:dependencies (reads base) (reads reg) (writes :memory)) (:delay 0) (:emitter (emit-fp-load/store-inst segment #b111001 reg 0 base index))) (define-instruction swc1-odd (segment reg base &optional (index 0)) (:declare (type tn reg base) (type (or (signed-byte 16) fixup) index)) (:dependencies (reads base) (reads reg) (writes :memory)) (:delay 0) (:emitter (emit-fp-load/store-inst segment #b111001 reg 1 base index)))
67bdac9d5bd4cfabb7c5bd61b07d1e0169975e77e209bf456e5a5f290c662965
acl2/acl2
(RP::CASESPLITTER-AUX (705 13 (:REWRITE RP::RP-TERMP-IMPLIES-SUBTERMS)) (682 11 (:DEFINITION QUOTEP)) (650 4 (:LINEAR APPLY$-BADGEP-PROPERTIES . 1)) (646 2 (:DEFINITION APPLY$-BADGEP)) (394 2 (:DEFINITION SUBSETP-EQUAL)) (368 28 (:DEFINITION MEMBER-EQUAL)) (234 14 (:REWRITE MEMBER-EQUAL-NEWVAR-COMPONENTS-1)) (223 223 (:REWRITE DEFAULT-CDR)) (182 13 (:REWRITE RP::RP-TERMP-IMPLIES-CDR-LISTP)) (101 101 (:REWRITE DEFAULT-CAR)) (63 63 (:META RP::BINARY-OR**/AND**-GUARD-META-CORRECT)) (62 62 (:TYPE-PRESCRIPTION MEMBER-EQUAL)) (58 18 (:REWRITE RP::IS-IF-RP-TERMP)) (50 2 (:DEFINITION TRUE-LISTP)) (42 42 (:REWRITE MEMBER-EQUAL-NEWVAR-COMPONENTS-3)) (41 41 (:REWRITE RP::RP-TERMP-SHOULD-TERM-BE-IN-CONS-LHS)) (40 4 (:REWRITE RP::RP-TERM-LISTP-IS-TRUE-LISTP)) (38 4 (:DEFINITION NATP)) (32 10 (:REWRITE RP::RP-TERMP-CADR)) (32 4 (:REWRITE SET::SETS-ARE-TRUE-LISTS-CHEAP)) (28 28 (:REWRITE MEMBER-EQUAL-NEWVAR-COMPONENTS-2)) (26 8 (:REWRITE RP::RP-TERMP-CADDR)) (26 8 (:REWRITE RP::IS-RP-PSEUDO-TERMP)) (25 8 (:REWRITE RP::RP-TERMP-SINGLE-STEP-3)) (22 22 (:TYPE-PRESCRIPTION APPLY$-BADGEP)) (16 8 (:REWRITE APPLY$-BADGEP-PROPERTIES . 3)) (14 14 (:TYPE-PRESCRIPTION LEN)) (14 2 (:DEFINITION LEN)) (13 13 (:TYPE-PRESCRIPTION QUOTEP)) (13 6 (:REWRITE RP::ATOM-RP-TERMP-IS-SYMBOLP)) (12 6 (:REWRITE APPLY$-BADGEP-PROPERTIES . 1)) (12 2 (:REWRITE RP::NOT-INCLUDE-RP)) (12 2 (:DEFINITION ALL-NILS)) (10 10 (:TYPE-PRESCRIPTION RP::EX-FROM-SYNP)) (10 10 (:TYPE-PRESCRIPTION ALL-NILS)) (10 2 (:DEFINITION WEAK-APPLY$-BADGE-P)) (9 9 (:REWRITE FN-CHECK-DEF-NOT-QUOTE)) (8 8 (:TYPE-PRESCRIPTION TRUE-LISTP)) (8 8 (:TYPE-PRESCRIPTION SUBSETP-EQUAL)) (8 8 (:TYPE-PRESCRIPTION SET::SETP-TYPE)) (8 4 (:REWRITE OMAP::SETP-WHEN-MAPP)) (8 4 (:REWRITE SET::NONEMPTY-MEANS-SET)) (8 2 (:DEFINITION RP::INCLUDE-FNC)) (6 6 (:REWRITE SYMBOL-LISTP-IMPLIES-SYMBOLP)) (6 6 (:REWRITE INTEGER-LISTP-IMPLIES-INTEGERP)) (4 4 (:TYPE-PRESCRIPTION OMAP::MAPP)) (4 4 (:TYPE-PRESCRIPTION SET::EMPTY-TYPE)) (4 4 (:REWRITE SET::IN-SET)) (4 4 (:LINEAR LOWER-BOUND-OF-LEN-WHEN-SUBLISTP)) (4 4 (:LINEAR LISTPOS-UPPER-BOUND-STRONG-2)) (4 4 (:LINEAR LEN-WHEN-PREFIXP)) (4 2 (:REWRITE DEFAULT-+-2)) (4 2 (:REWRITE APPLY$-BADGEP-PROPERTIES . 2)) (4 2 (:LINEAR APPLY$-BADGEP-PROPERTIES . 2)) (2 2 (:TYPE-PRESCRIPTION RP::INCLUDE-FNC)) (2 2 (:REWRITE LEN-MEMBER-EQUAL-LOOP$-AS)) (2 2 (:REWRITE DEFAULT-<-2)) (2 2 (:REWRITE DEFAULT-<-1)) (2 2 (:REWRITE DEFAULT-+-1)) ) (RP::RP-TERMP-OF-CASESPLITTER-AUX (1625 10 (:LINEAR APPLY$-BADGEP-PROPERTIES . 1)) (1615 5 (:DEFINITION APPLY$-BADGEP)) (985 5 (:DEFINITION SUBSETP-EQUAL)) (920 70 (:DEFINITION MEMBER-EQUAL)) (802 746 (:REWRITE DEFAULT-CDR)) (585 35 (:REWRITE MEMBER-EQUAL-NEWVAR-COMPONENTS-1)) (554 483 (:REWRITE DEFAULT-CAR)) (550 54 (:REWRITE RP::RP-TERMP-IMPLIES-CDR-LISTP)) (516 137 (:REWRITE RP::IS-IF-RP-TERMP)) (273 273 (:REWRITE RP::RP-TERMP-SHOULD-TERM-BE-IN-CONS-LHS)) (264 71 (:REWRITE RP::RP-TERMP-CADR)) (263 66 (:REWRITE RP::RP-TERMP-CADDR)) (229 229 (:META RP::BINARY-OR**/AND**-GUARD-META-CORRECT)) (225 66 (:REWRITE RP::RP-TERMP-SINGLE-STEP-3)) (196 31 (:REWRITE RP::NOT-INCLUDE-RP)) (155 155 (:TYPE-PRESCRIPTION MEMBER-EQUAL)) (136 29 (:DEFINITION RP::INCLUDE-FNC)) (125 5 (:DEFINITION TRUE-LISTP)) (105 105 (:REWRITE MEMBER-EQUAL-NEWVAR-COMPONENTS-3)) (100 10 (:REWRITE RP::RP-TERM-LISTP-IS-TRUE-LISTP)) (95 10 (:DEFINITION NATP)) (80 10 (:REWRITE SET::SETS-ARE-TRUE-LISTS-CHEAP)) (70 70 (:REWRITE MEMBER-EQUAL-NEWVAR-COMPONENTS-2)) (57 57 (:REWRITE SYMBOL-LISTP-IMPLIES-SYMBOLP)) (55 55 (:TYPE-PRESCRIPTION APPLY$-BADGEP)) (46 46 (:REWRITE FN-CHECK-DEF-NOT-QUOTE)) (40 20 (:REWRITE APPLY$-BADGEP-PROPERTIES . 3)) (35 35 (:TYPE-PRESCRIPTION LEN)) (35 5 (:DEFINITION LEN)) (30 15 (:REWRITE APPLY$-BADGEP-PROPERTIES . 1)) (30 5 (:DEFINITION ALL-NILS)) (29 29 (:TYPE-PRESCRIPTION RP::INCLUDE-FNC)) (25 25 (:TYPE-PRESCRIPTION ALL-NILS)) (25 5 (:DEFINITION WEAK-APPLY$-BADGE-P)) (20 20 (:TYPE-PRESCRIPTION TRUE-LISTP)) (20 20 (:TYPE-PRESCRIPTION SUBSETP-EQUAL)) (20 20 (:TYPE-PRESCRIPTION SET::SETP-TYPE)) (20 10 (:REWRITE OMAP::SETP-WHEN-MAPP)) (20 10 (:REWRITE SET::NONEMPTY-MEANS-SET)) (15 15 (:REWRITE INTEGER-LISTP-IMPLIES-INTEGERP)) (10 10 (:TYPE-PRESCRIPTION OMAP::MAPP)) (10 10 (:TYPE-PRESCRIPTION SET::EMPTY-TYPE)) (10 10 (:REWRITE SET::IN-SET)) (10 10 (:LINEAR LOWER-BOUND-OF-LEN-WHEN-SUBLISTP)) (10 10 (:LINEAR LISTPOS-UPPER-BOUND-STRONG-2)) (10 10 (:LINEAR LEN-WHEN-PREFIXP)) (10 5 (:REWRITE DEFAULT-+-2)) (10 5 (:REWRITE APPLY$-BADGEP-PROPERTIES . 2)) (10 5 (:LINEAR APPLY$-BADGEP-PROPERTIES . 2)) (5 5 (:REWRITE LEN-MEMBER-EQUAL-LOOP$-AS)) (5 5 (:REWRITE DEFAULT-<-2)) (5 5 (:REWRITE DEFAULT-<-1)) (5 5 (:REWRITE DEFAULT-+-1)) (4 4 (:TYPE-PRESCRIPTION RP::IS-RP-LOOSE$INLINE)) (4 4 (:TYPE-PRESCRIPTION RP::IS-IF$INLINE)) ) (RP::CASESPLITTER (414 1 (:DEFINITION TRUE-LISTP)) (357 3 (:DEFINITION RP::RP-TERM-LISTP)) (293 2 (:LINEAR APPLY$-BADGEP-PROPERTIES . 1)) (291 1 (:DEFINITION APPLY$-BADGEP)) (197 1 (:DEFINITION SUBSETP-EQUAL)) (184 14 (:DEFINITION MEMBER-EQUAL)) (141 141 (:REWRITE DEFAULT-CDR)) (117 7 (:REWRITE MEMBER-EQUAL-NEWVAR-COMPONENTS-1)) (89 6 (:REWRITE RP::RP-TERMP-IMPLIES-CDR-LISTP)) (61 61 (:REWRITE DEFAULT-CAR)) (52 4 (:REWRITE OMAP::ALISTP-WHEN-MAPP)) (38 19 (:DEFINITION NTH)) (37 37 (:META RP::BINARY-OR**/AND**-GUARD-META-CORRECT)) (35 11 (:REWRITE RP::IS-IF-RP-TERMP)) (32 2 (:DEFINITION ALISTP)) (31 31 (:TYPE-PRESCRIPTION MEMBER-EQUAL)) (29 29 (:TYPE-PRESCRIPTION RP::EX-FROM-SYNP)) (24 3 (:REWRITE SET::SETS-ARE-TRUE-LISTS-CHEAP)) (22 22 (:REWRITE RP::RP-TERMP-SHOULD-TERM-BE-IN-CONS-LHS)) (21 21 (:REWRITE MEMBER-EQUAL-NEWVAR-COMPONENTS-3)) (20 6 (:REWRITE RP::RP-TERMP-CADR)) (20 4 (:REWRITE OMAP::MFIX-IMPLIES-MAPP)) (20 4 (:REWRITE OMAP::MAPP-WHEN-NOT-EMPTY)) (19 19 (:TYPE-PRESCRIPTION OMAP::MAPP)) (19 19 (:REWRITE NTH-WHEN-PREFIXP)) (19 2 (:DEFINITION NATP)) (17 5 (:REWRITE RP::RP-TERMP-CADDR)) (15 5 (:REWRITE RP::RP-TERMP-SINGLE-STEP-3)) (14 14 (:REWRITE MEMBER-EQUAL-NEWVAR-COMPONENTS-2)) (13 3 (:REWRITE RP::NOT-INCLUDE-RP)) (12 2 (:DEFINITION LEN)) (10 10 (:TYPE-PRESCRIPTION APPLY$-BADGEP)) (8 8 (:TYPE-PRESCRIPTION OMAP::MFIX)) (8 8 (:TYPE-PRESCRIPTION OMAP::EMPTY)) (8 4 (:REWRITE OMAP::MFIX-WHEN-MAPP)) (8 4 (:REWRITE OMAP::MAPP-NON-NIL-IMPLIES-NON-EMPTY)) (8 2 (:DEFINITION RP::INCLUDE-FNC)) (7 7 (:REWRITE FN-CHECK-DEF-NOT-QUOTE)) (6 6 (:TYPE-PRESCRIPTION SET::SETP-TYPE)) (6 6 (:REWRITE INTEGER-LISTP-IMPLIES-INTEGERP)) (6 3 (:REWRITE OMAP::SETP-WHEN-MAPP)) (6 3 (:REWRITE SET::NONEMPTY-MEANS-SET)) (6 3 (:REWRITE APPLY$-BADGEP-PROPERTIES . 3)) (6 3 (:REWRITE APPLY$-BADGEP-PROPERTIES . 1)) (6 1 (:DEFINITION ALL-NILS)) (5 5 (:TYPE-PRESCRIPTION ALL-NILS)) (5 5 (:REWRITE SYMBOL-LISTP-IMPLIES-SYMBOLP)) (5 5 (:REWRITE DEFAULT-<-2)) (5 5 (:REWRITE DEFAULT-<-1)) (5 1 (:DEFINITION WEAK-APPLY$-BADGE-P)) (4 4 (:TYPE-PRESCRIPTION SUBSETP-EQUAL)) (4 2 (:REWRITE DEFAULT-+-2)) (4 1 (:REWRITE REV-WHEN-NOT-CONSP)) (3 3 (:TYPE-PRESCRIPTION SET::EMPTY-TYPE)) (3 3 (:REWRITE SET::IN-SET)) (2 2 (:TYPE-PRESCRIPTION RP::IS-RP-LOOSE$INLINE)) (2 2 (:TYPE-PRESCRIPTION RP::IS-IF$INLINE)) (2 2 (:TYPE-PRESCRIPTION RP::INCLUDE-FNC)) (2 2 (:REWRITE LEN-MEMBER-EQUAL-LOOP$-AS)) (2 2 (:REWRITE DEFAULT-+-1)) (2 2 (:LINEAR LOWER-BOUND-OF-LEN-WHEN-SUBLISTP)) (2 2 (:LINEAR LISTPOS-UPPER-BOUND-STRONG-2)) (2 2 (:LINEAR LEN-WHEN-PREFIXP)) (2 1 (:REWRITE APPLY$-BADGEP-PROPERTIES . 2)) (2 1 (:LINEAR APPLY$-BADGEP-PROPERTIES . 2)) ) (RP::CASESPLITTER-AUX-CORRECT (4279 3984 (:REWRITE DEFAULT-CDR)) (3610 19 (:DEFINITION RP::EVAL-AND-ALL)) (3224 18 (:DEFINITION RP::RP-TERMP)) (3029 2611 (:REWRITE DEFAULT-CAR)) (2437 4 (:REWRITE RP::RP-EVL-OF-RP-EQUAL2)) (2228 288 (:LINEAR APPLY$-BADGEP-PROPERTIES . 1)) (2204 38 (:REWRITE RP::EVL-OF-EXTRACT-FROM-RP-2)) (1719 291 (:DEFINITION APPLY$-BADGEP)) (1607 8 (:REWRITE RP::RP-TERMP-OF-RP-TRANS)) (1479 33 (:DEFINITION RP::EX-FROM-RP)) (1257 53 (:REWRITE RP::VALID-SC-CADR)) (1119 1119 (:META RP::BINARY-OR**/AND**-GUARD-META-CORRECT)) (1119 8 (:REWRITE RP::RP-EQUAL-IS-SYMMETRIC)) (1072 59 (:DEFINITION RP::TRANS-LIST)) (1071 8 (:DEFINITION RP::RP-EQUAL)) (1032 100 (:DEFINITION RP::INCLUDE-FNC)) (876 4 (:REWRITE RP::RP-TERMP-OF-CASESPLITTER-AUX)) (838 4 (:REWRITE RP::RP-EVL-OF-RP-EQUAL-LOOSE)) (757 53 (:REWRITE RP::EX-FROM-SYNP-LEMMA1)) (743 52 (:REWRITE RP::RP-TRANS-IS-TERM-WHEN-LIST-IS-ABSENT)) (672 291 (:DEFINITION WEAK-APPLY$-BADGE-P)) (653 51 (:DEFINITION RP::IS-SYNP$INLINE)) (603 4 (:REWRITE RP::RP-EVLT-OF-RP-EQUAL)) (574 50 (:REWRITE RP::NOT-INCLUDE-RP)) (553 553 (:TYPE-PRESCRIPTION RP::RP-TRANS-LST)) (544 34 (:REWRITE RP::RP-TERMP-SINGLE-STEP-3)) (532 4 (:REWRITE RP::RP-EVL-OF-RP-EQUAL)) (442 34 (:DEFINITION NATP)) (435 435 (:TYPE-PRESCRIPTION APPLY$-BADGEP)) (400 3 (:DEFINITION RP::RP-TERM-LISTP)) (266 266 (:TYPE-PRESCRIPTION RP::INCLUDE-FNC)) (242 236 (:REWRITE RP::CONSP-RP-TRANS-LST)) (230 230 (:REWRITE FN-CHECK-DEF-NOT-QUOTE)) (195 171 (:REWRITE RP::ATOM-RP-TERMP-IS-SYMBOLP)) (171 171 (:REWRITE SYMBOL-LISTP-IMPLIES-SYMBOLP)) (170 34 (:REWRITE RP::VALID-SC-CADDR)) (166 166 (:TYPE-PRESCRIPTION RP::INCLUDE-FNC-SUBTERMS)) (154 68 (:REWRITE RP::IS-IF-RP-TERMP)) (152 52 (:REWRITE RP::RP-EVL-OF-VARIABLE)) (138 51 (:REWRITE APPLY$-BADGEP-PROPERTIES . 2)) (131 34 (:LINEAR APPLY$-BADGEP-PROPERTIES . 2)) (114 114 (:REWRITE RP::RP-TERMP-SHOULD-TERM-BE-IN-CONS-LHS)) (108 51 (:REWRITE APPLY$-BADGEP-PROPERTIES . 1)) (93 17 (:REWRITE RP::RP-TERMP-IMPLIES-SUBTERMS)) (78 34 (:REWRITE RP::RP-TERMP-CADDR)) (78 34 (:REWRITE RP::IS-RP-PSEUDO-TERMP)) (76 76 (:TYPE-PRESCRIPTION RP::IS-RP-LOOSE$INLINE)) (76 76 (:TYPE-PRESCRIPTION RP::CONTEXT-FROM-RP)) (76 34 (:REWRITE RP::RP-TERMP-CADR)) (60 15 (:REWRITE RP::VALID-SC-OF-EX-FROM-RP)) (55 55 (:TYPE-PRESCRIPTION BOOLEANP)) (55 53 (:REWRITE RP::RP-EVL-OF-ZP-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-UNARY-/-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-UNARY---CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-TYPESPEC-CHECK-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-SYNP-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-SYMBOLP-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-SYMBOL-PACKAGE-NAME-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-SYMBOL-NAME-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-STRINGP-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-RP-EQUAL-SUBTERMS-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-RP-EQUAL-CNT-SUBTERMS-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-RP-EQUAL-CNT-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-RP-EQUAL-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-RP-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-RETURN-LAST-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-REALPART-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-RATIONALP-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-NUMERATOR-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-NATP-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-INTERN-IN-PACKAGE-OF-SYMBOL-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-INTEGERP-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-IMPLIES-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-IMAGPART-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-IFF-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-HIDE-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-FORCE-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-FORCE$-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-DONT-RW-CONTEXT-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-DENOMINATOR-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-CONSP-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-CONS-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-COMPLEX-RATIONALP-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-COERCE-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-CODE-CHAR-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-CHARACTERP-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-CHAR-CODE-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-CDR-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-CASESPLIT-FROM-CONTEXT-TRIG-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-CAR-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-BITP-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-BINARY-+-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-BINARY-*-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-BADGE-USERFN-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-BAD-ATOM<=-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-APPLY$-USERFN-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-APPLY$-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-ACL2-NUMBERP-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-<-CALL)) (52 52 (:REWRITE RP::RP-EVL-OF-QUOTE)) (52 52 (:REWRITE RP::RP-EVL-OF-NOT-CALL)) (52 52 (:REWRITE RP::RP-EVL-OF-LAMBDA)) (52 52 (:REWRITE RP::RP-EVL-OF-FALIST-CALL)) (52 52 (:REWRITE RP::RP-EVL-OF-EQUAL-CALL)) (51 51 (:TYPE-PRESCRIPTION RP::IS-SYNP$INLINE)) (51 51 (:REWRITE INTEGER-LISTP-IMPLIES-INTEGERP)) (40 40 (:TYPE-PRESCRIPTION RP::RP-TERM-LISTP)) (40 17 (:REWRITE RP::RP-TERMP-IMPLIES-CDR-LISTP)) (34 34 (:TYPE-PRESCRIPTION RP::FALIST-CONSISTENT)) (32 8 (:REWRITE RP::RP-EQUAL-REFLEXIVE)) (24 24 (:TYPE-PRESCRIPTION RP::RP-EQUAL)) (19 17 (:REWRITE RP::VALID-SC-CADDDR)) (17 17 (:TYPE-PRESCRIPTION QUOTEP)) (17 17 (:REWRITE DEFAULT-<-2)) (17 17 (:REWRITE DEFAULT-<-1)) (16 16 (:TYPE-PRESCRIPTION RP::RP-EQUAL-SUBTERMS)) (16 8 (:REWRITE RP::RP-EQUAL-SUBTERMS-REFLEXIVE)) (1 1 (:REWRITE RP::RP-EVL-META-EXTRACT-FN-CHECK-DEF)) ) (RP::CASESPLITTER_VALID (15657 6 (:DEFINITION RP::VALID-RULESP)) (14070 6 (:DEFINITION RP::VALID-RULEP)) (14052 6 (:DEFINITION RP::VALID-RULEP-SK)) (14046 6 (:DEFINITION RP::VALID-RULEP-SK-BODY)) (9544 4 (:REWRITE RP::VALID-RULESP-IMPLIES-RULE-LIST-SYNTAXP)) (5952 14 (:DEFINITION APPLY$-BADGEP)) (5682 46 (:LINEAR APPLY$-BADGEP-PROPERTIES . 1)) (4798 2 (:DEFINITION RP::RULE-LIST-SYNTAXP)) (4792 120 (:DEFINITION RP::INCLUDE-FNC)) (3984 6 (:REWRITE RP::RP-EVL-OF-RP-EQUAL)) (3954 6 (:DEFINITION RP::RP-EQUAL)) (3880 25 (:DEFINITION RP::EX-FROM-RP)) (3865 12 (:DEFINITION TRUE-LISTP)) (3816 12 (:DEFINITION RP::VALID-SC-NT)) (3804 40 (:REWRITE RP::NOT-INCLUDE-RP)) (3464 36 (:DEFINITION RP::EVAL-AND-ALL-NT)) (3140 3110 (:REWRITE DEFAULT-CDR)) (3140 25 (:DEFINITION RP::RP-TERM-LISTP)) (3077 211 (:DEFINITION QUOTEP)) (3028 14 (:DEFINITION SUBSETP-EQUAL)) (2880 69 (:REWRITE RP::RP-TERMP-IMPLIES-SUBTERMS)) (2846 196 (:DEFINITION MEMBER-EQUAL)) (2374 38 (:DEFINITION RP::RP-TERMP)) (1959 12 (:REWRITE RP::VALID-RULES-SUBSETP)) (1818 98 (:REWRITE MEMBER-EQUAL-NEWVAR-COMPONENTS-1)) (1696 1636 (:REWRITE DEFAULT-CAR)) (1227 81 (:REWRITE RP::EX-FROM-SYNP-LEMMA1)) (1210 69 (:REWRITE RP::RP-TERMP-IMPLIES-CDR-LISTP)) (1065 81 (:DEFINITION RP::IS-SYNP$INLINE)) (972 60 (:REWRITE RP::VALID-SC-NT-IS-VALID-SC)) (900 72 (:REWRITE RP::EVAL-AND-ALL-NT-IS-EVAL-AND-ALL)) (826 826 (:META RP::BINARY-OR**/AND**-GUARD-META-CORRECT)) (792 84 (:DEFINITION RP::INCLUDE-FNC-SUBTERMS)) (660 660 (:TYPE-PRESCRIPTION RP::INCLUDE-FNC-SUBTERMS)) (612 612 (:TYPE-PRESCRIPTION RP::INCLUDE-FNC)) (578 185 (:REWRITE RP::IS-IF-RP-TERMP)) (576 6 (:REWRITE RP::RP-EVL-OF-RP-EQUAL2)) (576 6 (:REWRITE RP::RP-EVL-OF-RP-EQUAL-LOOSE)) (434 434 (:TYPE-PRESCRIPTION MEMBER-EQUAL)) (414 69 (:REWRITE RP::EVL-OF-EXTRACT-FROM-RP-2)) (404 10 (:DEFINITION RP::RP-TRANS)) (364 32 (:LINEAR APPLY$-BADGEP-PROPERTIES . 2)) (337 1 (:DEFINITION RP::VALID-SC)) (294 294 (:REWRITE MEMBER-EQUAL-NEWVAR-COMPONENTS-3)) (278 278 (:REWRITE RP::RP-TERMP-SHOULD-TERM-BE-IN-CONS-LHS)) (271 28 (:DEFINITION NATP)) (248 31 (:REWRITE SET::SETS-ARE-TRUE-LISTS-CHEAP)) (243 81 (:REWRITE RP::RP-EVL-OF-VARIABLE)) (230 50 (:REWRITE RP::RP-TERMP-CADDDR)) (196 196 (:REWRITE MEMBER-EQUAL-NEWVAR-COMPONENTS-2)) (187 187 (:TYPE-PRESCRIPTION APPLY$-BADGEP)) (184 70 (:REWRITE RP::RP-TERMP-CADR)) (182 14 (:REWRITE OMAP::ALISTP-WHEN-MAPP)) (180 10 (:DEFINITION RP::TRANS-LIST)) (173 65 (:REWRITE RP::RP-TERMP-SINGLE-STEP-3)) (164 65 (:REWRITE RP::RP-TERMP-CADDR)) (164 65 (:REWRITE RP::IS-RP-PSEUDO-TERMP)) (162 162 (:TYPE-PRESCRIPTION RP::IS-RP-LOOSE$INLINE)) (156 156 (:TYPE-PRESCRIPTION RP::EVAL-AND-ALL-NT)) (156 12 (:REWRITE RP::EVAL-AND-ALL-NT-OF-CONTEXT-FROM-RP)) (144 126 (:REWRITE RP::ATOM-RP-TERMP-IS-SYMBOLP)) (144 36 (:DEFINITION RP::RP-RHS$INLINE)) (144 12 (:REWRITE RP::VALID-SC-NT-SUBTERMS-VALID-SC-SUBTERMS)) (144 12 (:REWRITE RP::NOT-INCLUDE-EX-FROM-RP)) (138 1 (:DEFINITION RP::EVAL-AND-ALL)) (126 126 (:REWRITE SYMBOL-LISTP-IMPLIES-SYMBOLP)) (116 58 (:DEFINITION NTH)) (113 17 (:DEFINITION LEN)) (112 7 (:DEFINITION ALISTP)) (110 110 (:TYPE-PRESCRIPTION RP::CASESPLITTER-AUX)) (106 8 (:REWRITE RP::RP-TRANS-IS-TERM-WHEN-LIST-IS-ABSENT)) (104 104 (:TYPE-PRESCRIPTION RP::VALID-SC-NT)) (98 49 (:REWRITE APPLY$-BADGEP-PROPERTIES . 3)) (96 24 (:DEFINITION RP::RP-HYP$INLINE)) (94 94 (:REWRITE FN-CHECK-DEF-NOT-QUOTE)) (90 90 (:TYPE-PRESCRIPTION RP::RP-TRANS-LST)) (87 87 (:TYPE-PRESCRIPTION OMAP::MAPP)) (84 84 (:TYPE-PRESCRIPTION RP::CONTEXT-FROM-RP)) (84 42 (:REWRITE APPLY$-BADGEP-PROPERTIES . 1)) (84 14 (:DEFINITION ALL-NILS)) (81 81 (:TYPE-PRESCRIPTION QUOTEP)) (81 81 (:TYPE-PRESCRIPTION RP::IS-SYNP$INLINE)) (81 81 (:REWRITE RP::RP-EVL-OF-ZP-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-UNARY-/-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-UNARY---CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-TYPESPEC-CHECK-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-SYNP-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-SYMBOLP-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-SYMBOL-PACKAGE-NAME-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-SYMBOL-NAME-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-STRINGP-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-RP-EQUAL-SUBTERMS-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-RP-EQUAL-CNT-SUBTERMS-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-RP-EQUAL-CNT-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-RP-EQUAL-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-RP-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-RETURN-LAST-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-REALPART-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-RATIONALP-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-QUOTE)) (81 81 (:REWRITE RP::RP-EVL-OF-NUMERATOR-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-NOT-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-NATP-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-LAMBDA)) (81 81 (:REWRITE RP::RP-EVL-OF-INTERN-IN-PACKAGE-OF-SYMBOL-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-INTEGERP-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-IMPLIES-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-IMAGPART-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-IFF-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-IF-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-HIDE-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-FORCE-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-FORCE$-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-FALIST-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-EQUAL-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-DONT-RW-CONTEXT-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-DENOMINATOR-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-CONSP-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-CONS-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-COMPLEX-RATIONALP-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-COERCE-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-CODE-CHAR-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-CHARACTERP-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-CHAR-CODE-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-CDR-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-CASESPLIT-FROM-CONTEXT-TRIG-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-CAR-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-BITP-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-BINARY-+-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-BINARY-*-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-BADGE-USERFN-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-BAD-ATOM<=-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-APPLY$-USERFN-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-APPLY$-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-ACL2-NUMBERP-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-<-CALL)) (78 18 (:REWRITE RP::RP-TERMP-CADDDDR)) (77 77 (:TYPE-PRESCRIPTION RP::EX-FROM-SYNP)) (72 12 (:REWRITE RP::EX-FROM-RP-LEMMA1)) (71 14 (:DEFINITION WEAK-APPLY$-BADGE-P)) (70 70 (:TYPE-PRESCRIPTION ALL-NILS)) (70 14 (:REWRITE OMAP::MFIX-IMPLIES-MAPP)) (70 14 (:REWRITE OMAP::MAPP-WHEN-NOT-EMPTY)) (62 62 (:TYPE-PRESCRIPTION SET::SETP-TYPE)) (62 31 (:REWRITE OMAP::SETP-WHEN-MAPP)) (62 31 (:REWRITE SET::NONEMPTY-MEANS-SET)) (58 58 (:REWRITE NTH-WHEN-PREFIXP)) (56 56 (:TYPE-PRESCRIPTION SUBSETP-EQUAL)) (51 51 (:REWRITE INTEGER-LISTP-IMPLIES-INTEGERP)) (48 12 (:REWRITE RP::QUOTEP-TERM-WITH-EX-FROM-RP)) (40 40 (:REWRITE RP::CONSP-RP-TRANS-LST)) (36 12 (:DEFINITION RP::RP-LHS$INLINE)) (35 17 (:REWRITE APPLY$-BADGEP-PROPERTIES . 2)) (34 17 (:REWRITE DEFAULT-+-2)) (31 31 (:TYPE-PRESCRIPTION SET::EMPTY-TYPE)) (31 31 (:REWRITE SET::IN-SET)) (30 30 (:LINEAR LOWER-BOUND-OF-LEN-WHEN-SUBLISTP)) (30 30 (:LINEAR LISTPOS-UPPER-BOUND-STRONG-2)) (30 30 (:LINEAR LEN-WHEN-PREFIXP)) (28 28 (:TYPE-PRESCRIPTION OMAP::MFIX)) (28 28 (:TYPE-PRESCRIPTION OMAP::EMPTY)) (28 14 (:REWRITE OMAP::MFIX-WHEN-MAPP)) (28 14 (:REWRITE OMAP::MAPP-NON-NIL-IMPLIES-NON-EMPTY)) (26 26 (:REWRITE DEFAULT-<-2)) (26 26 (:REWRITE DEFAULT-<-1)) (24 6 (:DEFINITION RP::RP-IFF-FLAG$INLINE)) (20 20 (:TYPE-PRESCRIPTION RP::EX-FROM-RP-ALL2-LST)) (18 18 (:TYPE-PRESCRIPTION RP::RULE-SYNTAXP-FN)) (18 18 (:TYPE-PRESCRIPTION RP::RP-EQUAL)) (17 17 (:REWRITE LEN-MEMBER-EQUAL-LOOP$-AS)) (17 17 (:REWRITE DEFAULT-+-1)) (16 16 (:TYPE-PRESCRIPTION RP::VALID-SC-NT-SUBTERMS)) (16 4 (:REWRITE REV-WHEN-NOT-CONSP)) (13 13 (:TYPE-PRESCRIPTION RP::IS-IF$INLINE)) (12 12 (:TYPE-PRESCRIPTION RP::RP-EQUAL-SUBTERMS)) (12 6 (:REWRITE RP::RP-EQUAL-SUBTERMS-REFLEXIVE)) (12 6 (:REWRITE RP::RP-EQUAL-REFLEXIVE)) (5 3 (:REWRITE RP::VALID-SC-CADR)) (4 2 (:REWRITE RP::VALID-SC-CADDR)) (4 1 (:REWRITE RP::VALID-SC-OF-EX-FROM-RP)) (4 1 (:REWRITE RP::VALID-SC-EX-FROM-RP)) (3 1 (:REWRITE RP::VALID-SC-CADDDR)) (2 2 (:TYPE-PRESCRIPTION RP::CASESPLITTER)) (2 2 (:REWRITE RP::VALID-RULEP-SK-NECC)) (1 1 (:TYPE-PRESCRIPTION RP::EVAL-AND-ALL)) (1 1 (:REWRITE RP::VALID-RP-STATEP-NECC)) (1 1 (:REWRITE RP::VALID-RP-STATE-SYNTAXP-AUX-NECC)) (1 1 (:REWRITE RP::RP-STATE-PRESERVEDP-IMPLIES-VALID-RP-STATEP)) )
null
https://raw.githubusercontent.com/acl2/acl2/f64742cc6d41c35f9d3f94e154cd5fd409105d34/books/projects/rp-rewriter/meta/.sys/casesplitter%40useless-runes.lsp
lisp
(RP::CASESPLITTER-AUX (705 13 (:REWRITE RP::RP-TERMP-IMPLIES-SUBTERMS)) (682 11 (:DEFINITION QUOTEP)) (650 4 (:LINEAR APPLY$-BADGEP-PROPERTIES . 1)) (646 2 (:DEFINITION APPLY$-BADGEP)) (394 2 (:DEFINITION SUBSETP-EQUAL)) (368 28 (:DEFINITION MEMBER-EQUAL)) (234 14 (:REWRITE MEMBER-EQUAL-NEWVAR-COMPONENTS-1)) (223 223 (:REWRITE DEFAULT-CDR)) (182 13 (:REWRITE RP::RP-TERMP-IMPLIES-CDR-LISTP)) (101 101 (:REWRITE DEFAULT-CAR)) (63 63 (:META RP::BINARY-OR**/AND**-GUARD-META-CORRECT)) (62 62 (:TYPE-PRESCRIPTION MEMBER-EQUAL)) (58 18 (:REWRITE RP::IS-IF-RP-TERMP)) (50 2 (:DEFINITION TRUE-LISTP)) (42 42 (:REWRITE MEMBER-EQUAL-NEWVAR-COMPONENTS-3)) (41 41 (:REWRITE RP::RP-TERMP-SHOULD-TERM-BE-IN-CONS-LHS)) (40 4 (:REWRITE RP::RP-TERM-LISTP-IS-TRUE-LISTP)) (38 4 (:DEFINITION NATP)) (32 10 (:REWRITE RP::RP-TERMP-CADR)) (32 4 (:REWRITE SET::SETS-ARE-TRUE-LISTS-CHEAP)) (28 28 (:REWRITE MEMBER-EQUAL-NEWVAR-COMPONENTS-2)) (26 8 (:REWRITE RP::RP-TERMP-CADDR)) (26 8 (:REWRITE RP::IS-RP-PSEUDO-TERMP)) (25 8 (:REWRITE RP::RP-TERMP-SINGLE-STEP-3)) (22 22 (:TYPE-PRESCRIPTION APPLY$-BADGEP)) (16 8 (:REWRITE APPLY$-BADGEP-PROPERTIES . 3)) (14 14 (:TYPE-PRESCRIPTION LEN)) (14 2 (:DEFINITION LEN)) (13 13 (:TYPE-PRESCRIPTION QUOTEP)) (13 6 (:REWRITE RP::ATOM-RP-TERMP-IS-SYMBOLP)) (12 6 (:REWRITE APPLY$-BADGEP-PROPERTIES . 1)) (12 2 (:REWRITE RP::NOT-INCLUDE-RP)) (12 2 (:DEFINITION ALL-NILS)) (10 10 (:TYPE-PRESCRIPTION RP::EX-FROM-SYNP)) (10 10 (:TYPE-PRESCRIPTION ALL-NILS)) (10 2 (:DEFINITION WEAK-APPLY$-BADGE-P)) (9 9 (:REWRITE FN-CHECK-DEF-NOT-QUOTE)) (8 8 (:TYPE-PRESCRIPTION TRUE-LISTP)) (8 8 (:TYPE-PRESCRIPTION SUBSETP-EQUAL)) (8 8 (:TYPE-PRESCRIPTION SET::SETP-TYPE)) (8 4 (:REWRITE OMAP::SETP-WHEN-MAPP)) (8 4 (:REWRITE SET::NONEMPTY-MEANS-SET)) (8 2 (:DEFINITION RP::INCLUDE-FNC)) (6 6 (:REWRITE SYMBOL-LISTP-IMPLIES-SYMBOLP)) (6 6 (:REWRITE INTEGER-LISTP-IMPLIES-INTEGERP)) (4 4 (:TYPE-PRESCRIPTION OMAP::MAPP)) (4 4 (:TYPE-PRESCRIPTION SET::EMPTY-TYPE)) (4 4 (:REWRITE SET::IN-SET)) (4 4 (:LINEAR LOWER-BOUND-OF-LEN-WHEN-SUBLISTP)) (4 4 (:LINEAR LISTPOS-UPPER-BOUND-STRONG-2)) (4 4 (:LINEAR LEN-WHEN-PREFIXP)) (4 2 (:REWRITE DEFAULT-+-2)) (4 2 (:REWRITE APPLY$-BADGEP-PROPERTIES . 2)) (4 2 (:LINEAR APPLY$-BADGEP-PROPERTIES . 2)) (2 2 (:TYPE-PRESCRIPTION RP::INCLUDE-FNC)) (2 2 (:REWRITE LEN-MEMBER-EQUAL-LOOP$-AS)) (2 2 (:REWRITE DEFAULT-<-2)) (2 2 (:REWRITE DEFAULT-<-1)) (2 2 (:REWRITE DEFAULT-+-1)) ) (RP::RP-TERMP-OF-CASESPLITTER-AUX (1625 10 (:LINEAR APPLY$-BADGEP-PROPERTIES . 1)) (1615 5 (:DEFINITION APPLY$-BADGEP)) (985 5 (:DEFINITION SUBSETP-EQUAL)) (920 70 (:DEFINITION MEMBER-EQUAL)) (802 746 (:REWRITE DEFAULT-CDR)) (585 35 (:REWRITE MEMBER-EQUAL-NEWVAR-COMPONENTS-1)) (554 483 (:REWRITE DEFAULT-CAR)) (550 54 (:REWRITE RP::RP-TERMP-IMPLIES-CDR-LISTP)) (516 137 (:REWRITE RP::IS-IF-RP-TERMP)) (273 273 (:REWRITE RP::RP-TERMP-SHOULD-TERM-BE-IN-CONS-LHS)) (264 71 (:REWRITE RP::RP-TERMP-CADR)) (263 66 (:REWRITE RP::RP-TERMP-CADDR)) (229 229 (:META RP::BINARY-OR**/AND**-GUARD-META-CORRECT)) (225 66 (:REWRITE RP::RP-TERMP-SINGLE-STEP-3)) (196 31 (:REWRITE RP::NOT-INCLUDE-RP)) (155 155 (:TYPE-PRESCRIPTION MEMBER-EQUAL)) (136 29 (:DEFINITION RP::INCLUDE-FNC)) (125 5 (:DEFINITION TRUE-LISTP)) (105 105 (:REWRITE MEMBER-EQUAL-NEWVAR-COMPONENTS-3)) (100 10 (:REWRITE RP::RP-TERM-LISTP-IS-TRUE-LISTP)) (95 10 (:DEFINITION NATP)) (80 10 (:REWRITE SET::SETS-ARE-TRUE-LISTS-CHEAP)) (70 70 (:REWRITE MEMBER-EQUAL-NEWVAR-COMPONENTS-2)) (57 57 (:REWRITE SYMBOL-LISTP-IMPLIES-SYMBOLP)) (55 55 (:TYPE-PRESCRIPTION APPLY$-BADGEP)) (46 46 (:REWRITE FN-CHECK-DEF-NOT-QUOTE)) (40 20 (:REWRITE APPLY$-BADGEP-PROPERTIES . 3)) (35 35 (:TYPE-PRESCRIPTION LEN)) (35 5 (:DEFINITION LEN)) (30 15 (:REWRITE APPLY$-BADGEP-PROPERTIES . 1)) (30 5 (:DEFINITION ALL-NILS)) (29 29 (:TYPE-PRESCRIPTION RP::INCLUDE-FNC)) (25 25 (:TYPE-PRESCRIPTION ALL-NILS)) (25 5 (:DEFINITION WEAK-APPLY$-BADGE-P)) (20 20 (:TYPE-PRESCRIPTION TRUE-LISTP)) (20 20 (:TYPE-PRESCRIPTION SUBSETP-EQUAL)) (20 20 (:TYPE-PRESCRIPTION SET::SETP-TYPE)) (20 10 (:REWRITE OMAP::SETP-WHEN-MAPP)) (20 10 (:REWRITE SET::NONEMPTY-MEANS-SET)) (15 15 (:REWRITE INTEGER-LISTP-IMPLIES-INTEGERP)) (10 10 (:TYPE-PRESCRIPTION OMAP::MAPP)) (10 10 (:TYPE-PRESCRIPTION SET::EMPTY-TYPE)) (10 10 (:REWRITE SET::IN-SET)) (10 10 (:LINEAR LOWER-BOUND-OF-LEN-WHEN-SUBLISTP)) (10 10 (:LINEAR LISTPOS-UPPER-BOUND-STRONG-2)) (10 10 (:LINEAR LEN-WHEN-PREFIXP)) (10 5 (:REWRITE DEFAULT-+-2)) (10 5 (:REWRITE APPLY$-BADGEP-PROPERTIES . 2)) (10 5 (:LINEAR APPLY$-BADGEP-PROPERTIES . 2)) (5 5 (:REWRITE LEN-MEMBER-EQUAL-LOOP$-AS)) (5 5 (:REWRITE DEFAULT-<-2)) (5 5 (:REWRITE DEFAULT-<-1)) (5 5 (:REWRITE DEFAULT-+-1)) (4 4 (:TYPE-PRESCRIPTION RP::IS-RP-LOOSE$INLINE)) (4 4 (:TYPE-PRESCRIPTION RP::IS-IF$INLINE)) ) (RP::CASESPLITTER (414 1 (:DEFINITION TRUE-LISTP)) (357 3 (:DEFINITION RP::RP-TERM-LISTP)) (293 2 (:LINEAR APPLY$-BADGEP-PROPERTIES . 1)) (291 1 (:DEFINITION APPLY$-BADGEP)) (197 1 (:DEFINITION SUBSETP-EQUAL)) (184 14 (:DEFINITION MEMBER-EQUAL)) (141 141 (:REWRITE DEFAULT-CDR)) (117 7 (:REWRITE MEMBER-EQUAL-NEWVAR-COMPONENTS-1)) (89 6 (:REWRITE RP::RP-TERMP-IMPLIES-CDR-LISTP)) (61 61 (:REWRITE DEFAULT-CAR)) (52 4 (:REWRITE OMAP::ALISTP-WHEN-MAPP)) (38 19 (:DEFINITION NTH)) (37 37 (:META RP::BINARY-OR**/AND**-GUARD-META-CORRECT)) (35 11 (:REWRITE RP::IS-IF-RP-TERMP)) (32 2 (:DEFINITION ALISTP)) (31 31 (:TYPE-PRESCRIPTION MEMBER-EQUAL)) (29 29 (:TYPE-PRESCRIPTION RP::EX-FROM-SYNP)) (24 3 (:REWRITE SET::SETS-ARE-TRUE-LISTS-CHEAP)) (22 22 (:REWRITE RP::RP-TERMP-SHOULD-TERM-BE-IN-CONS-LHS)) (21 21 (:REWRITE MEMBER-EQUAL-NEWVAR-COMPONENTS-3)) (20 6 (:REWRITE RP::RP-TERMP-CADR)) (20 4 (:REWRITE OMAP::MFIX-IMPLIES-MAPP)) (20 4 (:REWRITE OMAP::MAPP-WHEN-NOT-EMPTY)) (19 19 (:TYPE-PRESCRIPTION OMAP::MAPP)) (19 19 (:REWRITE NTH-WHEN-PREFIXP)) (19 2 (:DEFINITION NATP)) (17 5 (:REWRITE RP::RP-TERMP-CADDR)) (15 5 (:REWRITE RP::RP-TERMP-SINGLE-STEP-3)) (14 14 (:REWRITE MEMBER-EQUAL-NEWVAR-COMPONENTS-2)) (13 3 (:REWRITE RP::NOT-INCLUDE-RP)) (12 2 (:DEFINITION LEN)) (10 10 (:TYPE-PRESCRIPTION APPLY$-BADGEP)) (8 8 (:TYPE-PRESCRIPTION OMAP::MFIX)) (8 8 (:TYPE-PRESCRIPTION OMAP::EMPTY)) (8 4 (:REWRITE OMAP::MFIX-WHEN-MAPP)) (8 4 (:REWRITE OMAP::MAPP-NON-NIL-IMPLIES-NON-EMPTY)) (8 2 (:DEFINITION RP::INCLUDE-FNC)) (7 7 (:REWRITE FN-CHECK-DEF-NOT-QUOTE)) (6 6 (:TYPE-PRESCRIPTION SET::SETP-TYPE)) (6 6 (:REWRITE INTEGER-LISTP-IMPLIES-INTEGERP)) (6 3 (:REWRITE OMAP::SETP-WHEN-MAPP)) (6 3 (:REWRITE SET::NONEMPTY-MEANS-SET)) (6 3 (:REWRITE APPLY$-BADGEP-PROPERTIES . 3)) (6 3 (:REWRITE APPLY$-BADGEP-PROPERTIES . 1)) (6 1 (:DEFINITION ALL-NILS)) (5 5 (:TYPE-PRESCRIPTION ALL-NILS)) (5 5 (:REWRITE SYMBOL-LISTP-IMPLIES-SYMBOLP)) (5 5 (:REWRITE DEFAULT-<-2)) (5 5 (:REWRITE DEFAULT-<-1)) (5 1 (:DEFINITION WEAK-APPLY$-BADGE-P)) (4 4 (:TYPE-PRESCRIPTION SUBSETP-EQUAL)) (4 2 (:REWRITE DEFAULT-+-2)) (4 1 (:REWRITE REV-WHEN-NOT-CONSP)) (3 3 (:TYPE-PRESCRIPTION SET::EMPTY-TYPE)) (3 3 (:REWRITE SET::IN-SET)) (2 2 (:TYPE-PRESCRIPTION RP::IS-RP-LOOSE$INLINE)) (2 2 (:TYPE-PRESCRIPTION RP::IS-IF$INLINE)) (2 2 (:TYPE-PRESCRIPTION RP::INCLUDE-FNC)) (2 2 (:REWRITE LEN-MEMBER-EQUAL-LOOP$-AS)) (2 2 (:REWRITE DEFAULT-+-1)) (2 2 (:LINEAR LOWER-BOUND-OF-LEN-WHEN-SUBLISTP)) (2 2 (:LINEAR LISTPOS-UPPER-BOUND-STRONG-2)) (2 2 (:LINEAR LEN-WHEN-PREFIXP)) (2 1 (:REWRITE APPLY$-BADGEP-PROPERTIES . 2)) (2 1 (:LINEAR APPLY$-BADGEP-PROPERTIES . 2)) ) (RP::CASESPLITTER-AUX-CORRECT (4279 3984 (:REWRITE DEFAULT-CDR)) (3610 19 (:DEFINITION RP::EVAL-AND-ALL)) (3224 18 (:DEFINITION RP::RP-TERMP)) (3029 2611 (:REWRITE DEFAULT-CAR)) (2437 4 (:REWRITE RP::RP-EVL-OF-RP-EQUAL2)) (2228 288 (:LINEAR APPLY$-BADGEP-PROPERTIES . 1)) (2204 38 (:REWRITE RP::EVL-OF-EXTRACT-FROM-RP-2)) (1719 291 (:DEFINITION APPLY$-BADGEP)) (1607 8 (:REWRITE RP::RP-TERMP-OF-RP-TRANS)) (1479 33 (:DEFINITION RP::EX-FROM-RP)) (1257 53 (:REWRITE RP::VALID-SC-CADR)) (1119 1119 (:META RP::BINARY-OR**/AND**-GUARD-META-CORRECT)) (1119 8 (:REWRITE RP::RP-EQUAL-IS-SYMMETRIC)) (1072 59 (:DEFINITION RP::TRANS-LIST)) (1071 8 (:DEFINITION RP::RP-EQUAL)) (1032 100 (:DEFINITION RP::INCLUDE-FNC)) (876 4 (:REWRITE RP::RP-TERMP-OF-CASESPLITTER-AUX)) (838 4 (:REWRITE RP::RP-EVL-OF-RP-EQUAL-LOOSE)) (757 53 (:REWRITE RP::EX-FROM-SYNP-LEMMA1)) (743 52 (:REWRITE RP::RP-TRANS-IS-TERM-WHEN-LIST-IS-ABSENT)) (672 291 (:DEFINITION WEAK-APPLY$-BADGE-P)) (653 51 (:DEFINITION RP::IS-SYNP$INLINE)) (603 4 (:REWRITE RP::RP-EVLT-OF-RP-EQUAL)) (574 50 (:REWRITE RP::NOT-INCLUDE-RP)) (553 553 (:TYPE-PRESCRIPTION RP::RP-TRANS-LST)) (544 34 (:REWRITE RP::RP-TERMP-SINGLE-STEP-3)) (532 4 (:REWRITE RP::RP-EVL-OF-RP-EQUAL)) (442 34 (:DEFINITION NATP)) (435 435 (:TYPE-PRESCRIPTION APPLY$-BADGEP)) (400 3 (:DEFINITION RP::RP-TERM-LISTP)) (266 266 (:TYPE-PRESCRIPTION RP::INCLUDE-FNC)) (242 236 (:REWRITE RP::CONSP-RP-TRANS-LST)) (230 230 (:REWRITE FN-CHECK-DEF-NOT-QUOTE)) (195 171 (:REWRITE RP::ATOM-RP-TERMP-IS-SYMBOLP)) (171 171 (:REWRITE SYMBOL-LISTP-IMPLIES-SYMBOLP)) (170 34 (:REWRITE RP::VALID-SC-CADDR)) (166 166 (:TYPE-PRESCRIPTION RP::INCLUDE-FNC-SUBTERMS)) (154 68 (:REWRITE RP::IS-IF-RP-TERMP)) (152 52 (:REWRITE RP::RP-EVL-OF-VARIABLE)) (138 51 (:REWRITE APPLY$-BADGEP-PROPERTIES . 2)) (131 34 (:LINEAR APPLY$-BADGEP-PROPERTIES . 2)) (114 114 (:REWRITE RP::RP-TERMP-SHOULD-TERM-BE-IN-CONS-LHS)) (108 51 (:REWRITE APPLY$-BADGEP-PROPERTIES . 1)) (93 17 (:REWRITE RP::RP-TERMP-IMPLIES-SUBTERMS)) (78 34 (:REWRITE RP::RP-TERMP-CADDR)) (78 34 (:REWRITE RP::IS-RP-PSEUDO-TERMP)) (76 76 (:TYPE-PRESCRIPTION RP::IS-RP-LOOSE$INLINE)) (76 76 (:TYPE-PRESCRIPTION RP::CONTEXT-FROM-RP)) (76 34 (:REWRITE RP::RP-TERMP-CADR)) (60 15 (:REWRITE RP::VALID-SC-OF-EX-FROM-RP)) (55 55 (:TYPE-PRESCRIPTION BOOLEANP)) (55 53 (:REWRITE RP::RP-EVL-OF-ZP-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-UNARY-/-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-UNARY---CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-TYPESPEC-CHECK-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-SYNP-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-SYMBOLP-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-SYMBOL-PACKAGE-NAME-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-SYMBOL-NAME-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-STRINGP-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-RP-EQUAL-SUBTERMS-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-RP-EQUAL-CNT-SUBTERMS-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-RP-EQUAL-CNT-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-RP-EQUAL-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-RP-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-RETURN-LAST-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-REALPART-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-RATIONALP-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-NUMERATOR-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-NATP-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-INTERN-IN-PACKAGE-OF-SYMBOL-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-INTEGERP-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-IMPLIES-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-IMAGPART-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-IFF-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-HIDE-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-FORCE-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-FORCE$-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-DONT-RW-CONTEXT-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-DENOMINATOR-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-CONSP-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-CONS-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-COMPLEX-RATIONALP-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-COERCE-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-CODE-CHAR-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-CHARACTERP-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-CHAR-CODE-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-CDR-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-CASESPLIT-FROM-CONTEXT-TRIG-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-CAR-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-BITP-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-BINARY-+-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-BINARY-*-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-BADGE-USERFN-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-BAD-ATOM<=-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-APPLY$-USERFN-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-APPLY$-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-ACL2-NUMBERP-CALL)) (55 53 (:REWRITE RP::RP-EVL-OF-<-CALL)) (52 52 (:REWRITE RP::RP-EVL-OF-QUOTE)) (52 52 (:REWRITE RP::RP-EVL-OF-NOT-CALL)) (52 52 (:REWRITE RP::RP-EVL-OF-LAMBDA)) (52 52 (:REWRITE RP::RP-EVL-OF-FALIST-CALL)) (52 52 (:REWRITE RP::RP-EVL-OF-EQUAL-CALL)) (51 51 (:TYPE-PRESCRIPTION RP::IS-SYNP$INLINE)) (51 51 (:REWRITE INTEGER-LISTP-IMPLIES-INTEGERP)) (40 40 (:TYPE-PRESCRIPTION RP::RP-TERM-LISTP)) (40 17 (:REWRITE RP::RP-TERMP-IMPLIES-CDR-LISTP)) (34 34 (:TYPE-PRESCRIPTION RP::FALIST-CONSISTENT)) (32 8 (:REWRITE RP::RP-EQUAL-REFLEXIVE)) (24 24 (:TYPE-PRESCRIPTION RP::RP-EQUAL)) (19 17 (:REWRITE RP::VALID-SC-CADDDR)) (17 17 (:TYPE-PRESCRIPTION QUOTEP)) (17 17 (:REWRITE DEFAULT-<-2)) (17 17 (:REWRITE DEFAULT-<-1)) (16 16 (:TYPE-PRESCRIPTION RP::RP-EQUAL-SUBTERMS)) (16 8 (:REWRITE RP::RP-EQUAL-SUBTERMS-REFLEXIVE)) (1 1 (:REWRITE RP::RP-EVL-META-EXTRACT-FN-CHECK-DEF)) ) (RP::CASESPLITTER_VALID (15657 6 (:DEFINITION RP::VALID-RULESP)) (14070 6 (:DEFINITION RP::VALID-RULEP)) (14052 6 (:DEFINITION RP::VALID-RULEP-SK)) (14046 6 (:DEFINITION RP::VALID-RULEP-SK-BODY)) (9544 4 (:REWRITE RP::VALID-RULESP-IMPLIES-RULE-LIST-SYNTAXP)) (5952 14 (:DEFINITION APPLY$-BADGEP)) (5682 46 (:LINEAR APPLY$-BADGEP-PROPERTIES . 1)) (4798 2 (:DEFINITION RP::RULE-LIST-SYNTAXP)) (4792 120 (:DEFINITION RP::INCLUDE-FNC)) (3984 6 (:REWRITE RP::RP-EVL-OF-RP-EQUAL)) (3954 6 (:DEFINITION RP::RP-EQUAL)) (3880 25 (:DEFINITION RP::EX-FROM-RP)) (3865 12 (:DEFINITION TRUE-LISTP)) (3816 12 (:DEFINITION RP::VALID-SC-NT)) (3804 40 (:REWRITE RP::NOT-INCLUDE-RP)) (3464 36 (:DEFINITION RP::EVAL-AND-ALL-NT)) (3140 3110 (:REWRITE DEFAULT-CDR)) (3140 25 (:DEFINITION RP::RP-TERM-LISTP)) (3077 211 (:DEFINITION QUOTEP)) (3028 14 (:DEFINITION SUBSETP-EQUAL)) (2880 69 (:REWRITE RP::RP-TERMP-IMPLIES-SUBTERMS)) (2846 196 (:DEFINITION MEMBER-EQUAL)) (2374 38 (:DEFINITION RP::RP-TERMP)) (1959 12 (:REWRITE RP::VALID-RULES-SUBSETP)) (1818 98 (:REWRITE MEMBER-EQUAL-NEWVAR-COMPONENTS-1)) (1696 1636 (:REWRITE DEFAULT-CAR)) (1227 81 (:REWRITE RP::EX-FROM-SYNP-LEMMA1)) (1210 69 (:REWRITE RP::RP-TERMP-IMPLIES-CDR-LISTP)) (1065 81 (:DEFINITION RP::IS-SYNP$INLINE)) (972 60 (:REWRITE RP::VALID-SC-NT-IS-VALID-SC)) (900 72 (:REWRITE RP::EVAL-AND-ALL-NT-IS-EVAL-AND-ALL)) (826 826 (:META RP::BINARY-OR**/AND**-GUARD-META-CORRECT)) (792 84 (:DEFINITION RP::INCLUDE-FNC-SUBTERMS)) (660 660 (:TYPE-PRESCRIPTION RP::INCLUDE-FNC-SUBTERMS)) (612 612 (:TYPE-PRESCRIPTION RP::INCLUDE-FNC)) (578 185 (:REWRITE RP::IS-IF-RP-TERMP)) (576 6 (:REWRITE RP::RP-EVL-OF-RP-EQUAL2)) (576 6 (:REWRITE RP::RP-EVL-OF-RP-EQUAL-LOOSE)) (434 434 (:TYPE-PRESCRIPTION MEMBER-EQUAL)) (414 69 (:REWRITE RP::EVL-OF-EXTRACT-FROM-RP-2)) (404 10 (:DEFINITION RP::RP-TRANS)) (364 32 (:LINEAR APPLY$-BADGEP-PROPERTIES . 2)) (337 1 (:DEFINITION RP::VALID-SC)) (294 294 (:REWRITE MEMBER-EQUAL-NEWVAR-COMPONENTS-3)) (278 278 (:REWRITE RP::RP-TERMP-SHOULD-TERM-BE-IN-CONS-LHS)) (271 28 (:DEFINITION NATP)) (248 31 (:REWRITE SET::SETS-ARE-TRUE-LISTS-CHEAP)) (243 81 (:REWRITE RP::RP-EVL-OF-VARIABLE)) (230 50 (:REWRITE RP::RP-TERMP-CADDDR)) (196 196 (:REWRITE MEMBER-EQUAL-NEWVAR-COMPONENTS-2)) (187 187 (:TYPE-PRESCRIPTION APPLY$-BADGEP)) (184 70 (:REWRITE RP::RP-TERMP-CADR)) (182 14 (:REWRITE OMAP::ALISTP-WHEN-MAPP)) (180 10 (:DEFINITION RP::TRANS-LIST)) (173 65 (:REWRITE RP::RP-TERMP-SINGLE-STEP-3)) (164 65 (:REWRITE RP::RP-TERMP-CADDR)) (164 65 (:REWRITE RP::IS-RP-PSEUDO-TERMP)) (162 162 (:TYPE-PRESCRIPTION RP::IS-RP-LOOSE$INLINE)) (156 156 (:TYPE-PRESCRIPTION RP::EVAL-AND-ALL-NT)) (156 12 (:REWRITE RP::EVAL-AND-ALL-NT-OF-CONTEXT-FROM-RP)) (144 126 (:REWRITE RP::ATOM-RP-TERMP-IS-SYMBOLP)) (144 36 (:DEFINITION RP::RP-RHS$INLINE)) (144 12 (:REWRITE RP::VALID-SC-NT-SUBTERMS-VALID-SC-SUBTERMS)) (144 12 (:REWRITE RP::NOT-INCLUDE-EX-FROM-RP)) (138 1 (:DEFINITION RP::EVAL-AND-ALL)) (126 126 (:REWRITE SYMBOL-LISTP-IMPLIES-SYMBOLP)) (116 58 (:DEFINITION NTH)) (113 17 (:DEFINITION LEN)) (112 7 (:DEFINITION ALISTP)) (110 110 (:TYPE-PRESCRIPTION RP::CASESPLITTER-AUX)) (106 8 (:REWRITE RP::RP-TRANS-IS-TERM-WHEN-LIST-IS-ABSENT)) (104 104 (:TYPE-PRESCRIPTION RP::VALID-SC-NT)) (98 49 (:REWRITE APPLY$-BADGEP-PROPERTIES . 3)) (96 24 (:DEFINITION RP::RP-HYP$INLINE)) (94 94 (:REWRITE FN-CHECK-DEF-NOT-QUOTE)) (90 90 (:TYPE-PRESCRIPTION RP::RP-TRANS-LST)) (87 87 (:TYPE-PRESCRIPTION OMAP::MAPP)) (84 84 (:TYPE-PRESCRIPTION RP::CONTEXT-FROM-RP)) (84 42 (:REWRITE APPLY$-BADGEP-PROPERTIES . 1)) (84 14 (:DEFINITION ALL-NILS)) (81 81 (:TYPE-PRESCRIPTION QUOTEP)) (81 81 (:TYPE-PRESCRIPTION RP::IS-SYNP$INLINE)) (81 81 (:REWRITE RP::RP-EVL-OF-ZP-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-UNARY-/-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-UNARY---CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-TYPESPEC-CHECK-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-SYNP-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-SYMBOLP-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-SYMBOL-PACKAGE-NAME-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-SYMBOL-NAME-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-STRINGP-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-RP-EQUAL-SUBTERMS-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-RP-EQUAL-CNT-SUBTERMS-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-RP-EQUAL-CNT-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-RP-EQUAL-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-RP-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-RETURN-LAST-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-REALPART-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-RATIONALP-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-QUOTE)) (81 81 (:REWRITE RP::RP-EVL-OF-NUMERATOR-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-NOT-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-NATP-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-LAMBDA)) (81 81 (:REWRITE RP::RP-EVL-OF-INTERN-IN-PACKAGE-OF-SYMBOL-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-INTEGERP-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-IMPLIES-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-IMAGPART-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-IFF-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-IF-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-HIDE-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-FORCE-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-FORCE$-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-FALIST-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-EQUAL-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-DONT-RW-CONTEXT-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-DENOMINATOR-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-CONSP-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-CONS-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-COMPLEX-RATIONALP-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-COERCE-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-CODE-CHAR-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-CHARACTERP-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-CHAR-CODE-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-CDR-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-CASESPLIT-FROM-CONTEXT-TRIG-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-CAR-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-BITP-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-BINARY-+-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-BINARY-*-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-BADGE-USERFN-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-BAD-ATOM<=-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-APPLY$-USERFN-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-APPLY$-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-ACL2-NUMBERP-CALL)) (81 81 (:REWRITE RP::RP-EVL-OF-<-CALL)) (78 18 (:REWRITE RP::RP-TERMP-CADDDDR)) (77 77 (:TYPE-PRESCRIPTION RP::EX-FROM-SYNP)) (72 12 (:REWRITE RP::EX-FROM-RP-LEMMA1)) (71 14 (:DEFINITION WEAK-APPLY$-BADGE-P)) (70 70 (:TYPE-PRESCRIPTION ALL-NILS)) (70 14 (:REWRITE OMAP::MFIX-IMPLIES-MAPP)) (70 14 (:REWRITE OMAP::MAPP-WHEN-NOT-EMPTY)) (62 62 (:TYPE-PRESCRIPTION SET::SETP-TYPE)) (62 31 (:REWRITE OMAP::SETP-WHEN-MAPP)) (62 31 (:REWRITE SET::NONEMPTY-MEANS-SET)) (58 58 (:REWRITE NTH-WHEN-PREFIXP)) (56 56 (:TYPE-PRESCRIPTION SUBSETP-EQUAL)) (51 51 (:REWRITE INTEGER-LISTP-IMPLIES-INTEGERP)) (48 12 (:REWRITE RP::QUOTEP-TERM-WITH-EX-FROM-RP)) (40 40 (:REWRITE RP::CONSP-RP-TRANS-LST)) (36 12 (:DEFINITION RP::RP-LHS$INLINE)) (35 17 (:REWRITE APPLY$-BADGEP-PROPERTIES . 2)) (34 17 (:REWRITE DEFAULT-+-2)) (31 31 (:TYPE-PRESCRIPTION SET::EMPTY-TYPE)) (31 31 (:REWRITE SET::IN-SET)) (30 30 (:LINEAR LOWER-BOUND-OF-LEN-WHEN-SUBLISTP)) (30 30 (:LINEAR LISTPOS-UPPER-BOUND-STRONG-2)) (30 30 (:LINEAR LEN-WHEN-PREFIXP)) (28 28 (:TYPE-PRESCRIPTION OMAP::MFIX)) (28 28 (:TYPE-PRESCRIPTION OMAP::EMPTY)) (28 14 (:REWRITE OMAP::MFIX-WHEN-MAPP)) (28 14 (:REWRITE OMAP::MAPP-NON-NIL-IMPLIES-NON-EMPTY)) (26 26 (:REWRITE DEFAULT-<-2)) (26 26 (:REWRITE DEFAULT-<-1)) (24 6 (:DEFINITION RP::RP-IFF-FLAG$INLINE)) (20 20 (:TYPE-PRESCRIPTION RP::EX-FROM-RP-ALL2-LST)) (18 18 (:TYPE-PRESCRIPTION RP::RULE-SYNTAXP-FN)) (18 18 (:TYPE-PRESCRIPTION RP::RP-EQUAL)) (17 17 (:REWRITE LEN-MEMBER-EQUAL-LOOP$-AS)) (17 17 (:REWRITE DEFAULT-+-1)) (16 16 (:TYPE-PRESCRIPTION RP::VALID-SC-NT-SUBTERMS)) (16 4 (:REWRITE REV-WHEN-NOT-CONSP)) (13 13 (:TYPE-PRESCRIPTION RP::IS-IF$INLINE)) (12 12 (:TYPE-PRESCRIPTION RP::RP-EQUAL-SUBTERMS)) (12 6 (:REWRITE RP::RP-EQUAL-SUBTERMS-REFLEXIVE)) (12 6 (:REWRITE RP::RP-EQUAL-REFLEXIVE)) (5 3 (:REWRITE RP::VALID-SC-CADR)) (4 2 (:REWRITE RP::VALID-SC-CADDR)) (4 1 (:REWRITE RP::VALID-SC-OF-EX-FROM-RP)) (4 1 (:REWRITE RP::VALID-SC-EX-FROM-RP)) (3 1 (:REWRITE RP::VALID-SC-CADDDR)) (2 2 (:TYPE-PRESCRIPTION RP::CASESPLITTER)) (2 2 (:REWRITE RP::VALID-RULEP-SK-NECC)) (1 1 (:TYPE-PRESCRIPTION RP::EVAL-AND-ALL)) (1 1 (:REWRITE RP::VALID-RP-STATEP-NECC)) (1 1 (:REWRITE RP::VALID-RP-STATE-SYNTAXP-AUX-NECC)) (1 1 (:REWRITE RP::RP-STATE-PRESERVEDP-IMPLIES-VALID-RP-STATEP)) )
4e56b341e42c011b47983e229d80677a98a51413336853452fac83fd31f63c5d
skanev/playground
08-tests.scm
(require rackunit rackunit/text-ui) (load "helpers/simulator.scm") (load "../08.scm") (define sicp-5.08-tests (test-suite "Tests for SICP exercise 5.08" (check-exn (regexp "Duplicate label: here") (lambda () (make-machine '(a) '() '( (goto (label here)) here (assign a (const 3)) (goto (label there)) here (assign a (const 4)) (goto (label there)) there)))) )) (run-tests sicp-5.08-tests)
null
https://raw.githubusercontent.com/skanev/playground/d88e53a7f277b35041c2f709771a0b96f993b310/scheme/sicp/05/tests/08-tests.scm
scheme
(require rackunit rackunit/text-ui) (load "helpers/simulator.scm") (load "../08.scm") (define sicp-5.08-tests (test-suite "Tests for SICP exercise 5.08" (check-exn (regexp "Duplicate label: here") (lambda () (make-machine '(a) '() '( (goto (label here)) here (assign a (const 3)) (goto (label there)) here (assign a (const 4)) (goto (label there)) there)))) )) (run-tests sicp-5.08-tests)
e8dcfa19de89ce96d3eb1e94918d6dcbe57b17a181b7a585f22d7766ef8e431c
Zulu-Inuoe/raw-bindings-sdl2
sdl-temp-string.lisp
raw - bindings - sdl2 - FFI bindings to Written in 2017 by < > ;;; ;;;To the extent possible under law, the author(s) have dedicated all copyright ;;;and related and neighboring rights to this software to the public domain ;;;worldwide. This software is distributed without any warranty. You should have received a copy of the CC0 Public Domain Dedication along ;;;with this software. If not, see ;;;</>. (in-package #:raw-bindings-sdl2) (cffi:define-foreign-type sdl-temp-string-type () () (:documentation "Type for certain SDL functions which return a char* string meant to be freed via SDL_free(). Performs translation into a lisp string and immediately SDL_free()s the pointer, when not null.") (:actual-type :string) (:simple-parser sdl-temp-string)) (defmethod cffi:translate-from-foreign (pointer (type sdl-temp-string-type)) (if (cffi:null-pointer-p pointer) "" (prog1 (cffi:foreign-string-to-lisp pointer :encoding :utf-8) (sdl-free pointer))))
null
https://raw.githubusercontent.com/Zulu-Inuoe/raw-bindings-sdl2/fd219b896f3a0a9daef83cd1b303d806eeffa75a/src/sdl-temp-string.lisp
lisp
To the extent possible under law, the author(s) have dedicated all copyright and related and neighboring rights to this software to the public domain worldwide. This software is distributed without any warranty. with this software. If not, see </>.
raw - bindings - sdl2 - FFI bindings to Written in 2017 by < > You should have received a copy of the CC0 Public Domain Dedication along (in-package #:raw-bindings-sdl2) (cffi:define-foreign-type sdl-temp-string-type () () (:documentation "Type for certain SDL functions which return a char* string meant to be freed via SDL_free(). Performs translation into a lisp string and immediately SDL_free()s the pointer, when not null.") (:actual-type :string) (:simple-parser sdl-temp-string)) (defmethod cffi:translate-from-foreign (pointer (type sdl-temp-string-type)) (if (cffi:null-pointer-p pointer) "" (prog1 (cffi:foreign-string-to-lisp pointer :encoding :utf-8) (sdl-free pointer))))
da9fd90a5d0ffbc55859ad83b507c238ebb09eb0539c0568f6b706045de61e42
haskell/haskell-platform
Templates.hs
# LANGUAGE RecordWildCards # module Templates ( ctxEmpty, ctxAppend, ctxConcat , errorCtx , assocListContext, mapListContext, mapListStrContext , releaseContext, buildConfigContext, platformContext , copyExpandedFile, copyExpandedDir ) where import Control.Monad (foldM, forM_, unless) import qualified Data.Text.Lazy.IO as TL import Data.Version (showVersion) import Development.Shake import Development.Shake.FilePath import System.Directory (copyPermissions) import Text.Hastache import Text.Hastache.Context import Config import PlatformDB import Types import Utils ctxEmpty :: (Monad m) => MuContext m ctxEmpty = const $ return MuNothing ctxAppend :: (Monad m) => MuContext m -> MuContext m -> MuContext m a `ctxAppend` b = ctxConcat [a, b] ctxConcat :: (Monad m) => [MuContext m] -> MuContext m ctxConcat cs t = foldM mix MuNothing cs where mix MuNothing ctx = ctx t mix r _ = return r errorCtx :: (Monad m) => MuContext m errorCtx t = return $ MuLambda $ const msg where msg = "### unknown tag: " ++ decodeStr t ++ " ###" -- | Create a context from an association list. assocListContext :: (Monad m) => [(String, String)] -> MuContext m assocListContext pairs = mkStrContext ex where ex x = case lookup x pairs of Just y -> MuVariable y Nothing -> MuNothing | Create a ` MuList ` by mapping a context creating function over a list . In addition , the context will be augmented to support tags " first " and " last " that are ` MuBool ` values , and " index " which is a ` MuVariable ` ` Int ` . mapListContext :: (Monad m) => (a -> MuContext m) -> [a] -> MuType m mapListContext fCtx vs = MuList . map ctx' . zip [1..] $ vs where ctx' (i,v) t = do r <- fCtx v t case r of MuNothing -> (mkStrContext $ lCtx i) t _ -> return r n = length vs lCtx :: Int -> String -> MuType m lCtx i "first" = MuBool $ i == 1 lCtx i "last" = MuBool $ i == n lCtx i "index" = MuVariable i lCtx _ _ = MuNothing mapListStrContext :: (Monad m) => (a -> String -> MuType m) -> [a] -> MuType m mapListStrContext fCtx = mapListContext (mkStrContext . fCtx) releaseContext :: Action (MuContext Action) releaseContext = askHpRelease >>= return . expandRelease fullReleaseContext :: Action (MuContext Action) fullReleaseContext = askHpRelease >>= return . expandFullRelease expandRelease :: (Monad m) => Release -> MuContext m expandRelease rel = mkStrContext ex where ex "hpVersion" = MuVariable . showVersion . hpVersion . relVersion $ rel ex "ghcVersion" = case pkgsThat [isGhc, not . isLib, not . isTool] of [] -> error "No ghc version spec'd in release." [ghcPkg] -> MuVariable . showVersion . pkgVersion $ ghcPkg _ -> error "More than one ghc version spec'd in release." ex "ghcLibs" = exPkgs $ pkgsThat [isGhc, isLib] ex "nonWindowsLibs" = exPkgs $ pkgsThat [isNotWindows] ex "onlyWindowsLibs" = exPkgs $ pkgsThat [isWindows] ex "platformLibs" = exPkgs $ pkgsThat [not . isGhc, isLib] ex "tools" = exPkgs $ pkgsThat [isTool] ex _ = MuNothing pkgsThat tests = packagesByIncludeFilter (\i -> all ($i) tests) False rel exPkgs = mapListStrContext exPkg exPkg p "name" = MuVariable $ pad 30 $ pkgName p exPkg p "version" = MuVariable $ showVersion $ pkgVersion p exPkg _ _ = MuNothing pad n s = s ++ replicate (n - length s) ' ' expandFullRelease :: (Monad m) => Release -> MuContext m expandFullRelease rel = mkStrContext ex where ex "fullLibs" = mapListStrContext exPkg $ map snd (relIncludes rel) ex _ = MuNothing exPkg p "name" = MuVariable $ pad 30 $ pkgName p exPkg p "version" = MuVariable $ showVersion $ pkgVersion p exPkg _ _ = MuNothing pad n s = s ++ replicate (n - length s) ' ' buildConfigContext :: Action (MuContext Action) buildConfigContext = askBuildConfig >>= return . expandBuildConfig expandBuildConfig :: (Monad m) => BuildConfig -> MuContext m expandBuildConfig BuildConfig{..} = mkStrContext ex where ex "hpVersion" = MuVariable . showVersion . hpVersion $ bcHpVersion ex "ghcVersion" = MuVariable . showVersion . ghcVersion $ bcGhcVersion ex "arch" = MuVariable bcArch ex _ = MuNothing platformContext :: Action (MuContext Action) platformContext = do rlsCtx <- releaseContext fullRlsCtx <- fullReleaseContext bcCtx <- buildConfigContext return $ ctxConcat [bcCtx, rlsCtx, fullRlsCtx, errorCtx] templateDirname :: String templateDirname = "templates" copyExpandedFile :: MuContext Action -> FilePath -> FilePath -> Action () copyExpandedFile = copyExpanded' False copyExpandedDir :: MuContext Action -> FilePath -> FilePath -> Action () copyExpandedDir = copyExpanded' True copyExpanded' :: Bool -> MuContext Action -> FilePath -> FilePath -> Action () copyExpanded' isDir ctx srcTop dstTop = copyTop srcTop dstTop where copyTop = if isDir then copyTree (== templateDirname) else expandFile copyTree skipDir src dst = do putLoud $ " dir " ++ src makeDirectory dst srcFiles <- getDirectoryFiles src ["*"] forM_ srcFiles $ \f -> copyOne (src </> f) (dst </> f) srcDirs <- getDirectoryDirs src forM_ srcDirs $ \d -> unless (skipDir d) $ copyTree (const False) (src </> d) (dst </> d) copyOne src dst = do putLoud $ " file " ++ src case takeExtension src of ".mu" -> expandFile src (dropExtension dst) _ -> copyFile' src dst expandFile from to = do let conf = case takeExtension to of ".html" -> muHtmlConf _ -> muPlainConf need [from] hastacheFile conf from ctx >>= liftIO . TL.writeFile to liftIO $ copyPermissions from to muPlainConf = MuConfig { muEscapeFunc = emptyEscape , muTemplateFileDir = if isDir then Just $ srcTop </> templateDirname else Nothing , muTemplateFileExt = Just ".mu" , muTemplateRead = \f -> need [f] >> muTemplateRead defaultConfig f } muHtmlConf = muPlainConf { muEscapeFunc = htmlEscape }
null
https://raw.githubusercontent.com/haskell/haskell-platform/6357fb6645782278f43fc8340bf46771f1c2768d/hptool/src/Templates.hs
haskell
| Create a context from an association list.
# LANGUAGE RecordWildCards # module Templates ( ctxEmpty, ctxAppend, ctxConcat , errorCtx , assocListContext, mapListContext, mapListStrContext , releaseContext, buildConfigContext, platformContext , copyExpandedFile, copyExpandedDir ) where import Control.Monad (foldM, forM_, unless) import qualified Data.Text.Lazy.IO as TL import Data.Version (showVersion) import Development.Shake import Development.Shake.FilePath import System.Directory (copyPermissions) import Text.Hastache import Text.Hastache.Context import Config import PlatformDB import Types import Utils ctxEmpty :: (Monad m) => MuContext m ctxEmpty = const $ return MuNothing ctxAppend :: (Monad m) => MuContext m -> MuContext m -> MuContext m a `ctxAppend` b = ctxConcat [a, b] ctxConcat :: (Monad m) => [MuContext m] -> MuContext m ctxConcat cs t = foldM mix MuNothing cs where mix MuNothing ctx = ctx t mix r _ = return r errorCtx :: (Monad m) => MuContext m errorCtx t = return $ MuLambda $ const msg where msg = "### unknown tag: " ++ decodeStr t ++ " ###" assocListContext :: (Monad m) => [(String, String)] -> MuContext m assocListContext pairs = mkStrContext ex where ex x = case lookup x pairs of Just y -> MuVariable y Nothing -> MuNothing | Create a ` MuList ` by mapping a context creating function over a list . In addition , the context will be augmented to support tags " first " and " last " that are ` MuBool ` values , and " index " which is a ` MuVariable ` ` Int ` . mapListContext :: (Monad m) => (a -> MuContext m) -> [a] -> MuType m mapListContext fCtx vs = MuList . map ctx' . zip [1..] $ vs where ctx' (i,v) t = do r <- fCtx v t case r of MuNothing -> (mkStrContext $ lCtx i) t _ -> return r n = length vs lCtx :: Int -> String -> MuType m lCtx i "first" = MuBool $ i == 1 lCtx i "last" = MuBool $ i == n lCtx i "index" = MuVariable i lCtx _ _ = MuNothing mapListStrContext :: (Monad m) => (a -> String -> MuType m) -> [a] -> MuType m mapListStrContext fCtx = mapListContext (mkStrContext . fCtx) releaseContext :: Action (MuContext Action) releaseContext = askHpRelease >>= return . expandRelease fullReleaseContext :: Action (MuContext Action) fullReleaseContext = askHpRelease >>= return . expandFullRelease expandRelease :: (Monad m) => Release -> MuContext m expandRelease rel = mkStrContext ex where ex "hpVersion" = MuVariable . showVersion . hpVersion . relVersion $ rel ex "ghcVersion" = case pkgsThat [isGhc, not . isLib, not . isTool] of [] -> error "No ghc version spec'd in release." [ghcPkg] -> MuVariable . showVersion . pkgVersion $ ghcPkg _ -> error "More than one ghc version spec'd in release." ex "ghcLibs" = exPkgs $ pkgsThat [isGhc, isLib] ex "nonWindowsLibs" = exPkgs $ pkgsThat [isNotWindows] ex "onlyWindowsLibs" = exPkgs $ pkgsThat [isWindows] ex "platformLibs" = exPkgs $ pkgsThat [not . isGhc, isLib] ex "tools" = exPkgs $ pkgsThat [isTool] ex _ = MuNothing pkgsThat tests = packagesByIncludeFilter (\i -> all ($i) tests) False rel exPkgs = mapListStrContext exPkg exPkg p "name" = MuVariable $ pad 30 $ pkgName p exPkg p "version" = MuVariable $ showVersion $ pkgVersion p exPkg _ _ = MuNothing pad n s = s ++ replicate (n - length s) ' ' expandFullRelease :: (Monad m) => Release -> MuContext m expandFullRelease rel = mkStrContext ex where ex "fullLibs" = mapListStrContext exPkg $ map snd (relIncludes rel) ex _ = MuNothing exPkg p "name" = MuVariable $ pad 30 $ pkgName p exPkg p "version" = MuVariable $ showVersion $ pkgVersion p exPkg _ _ = MuNothing pad n s = s ++ replicate (n - length s) ' ' buildConfigContext :: Action (MuContext Action) buildConfigContext = askBuildConfig >>= return . expandBuildConfig expandBuildConfig :: (Monad m) => BuildConfig -> MuContext m expandBuildConfig BuildConfig{..} = mkStrContext ex where ex "hpVersion" = MuVariable . showVersion . hpVersion $ bcHpVersion ex "ghcVersion" = MuVariable . showVersion . ghcVersion $ bcGhcVersion ex "arch" = MuVariable bcArch ex _ = MuNothing platformContext :: Action (MuContext Action) platformContext = do rlsCtx <- releaseContext fullRlsCtx <- fullReleaseContext bcCtx <- buildConfigContext return $ ctxConcat [bcCtx, rlsCtx, fullRlsCtx, errorCtx] templateDirname :: String templateDirname = "templates" copyExpandedFile :: MuContext Action -> FilePath -> FilePath -> Action () copyExpandedFile = copyExpanded' False copyExpandedDir :: MuContext Action -> FilePath -> FilePath -> Action () copyExpandedDir = copyExpanded' True copyExpanded' :: Bool -> MuContext Action -> FilePath -> FilePath -> Action () copyExpanded' isDir ctx srcTop dstTop = copyTop srcTop dstTop where copyTop = if isDir then copyTree (== templateDirname) else expandFile copyTree skipDir src dst = do putLoud $ " dir " ++ src makeDirectory dst srcFiles <- getDirectoryFiles src ["*"] forM_ srcFiles $ \f -> copyOne (src </> f) (dst </> f) srcDirs <- getDirectoryDirs src forM_ srcDirs $ \d -> unless (skipDir d) $ copyTree (const False) (src </> d) (dst </> d) copyOne src dst = do putLoud $ " file " ++ src case takeExtension src of ".mu" -> expandFile src (dropExtension dst) _ -> copyFile' src dst expandFile from to = do let conf = case takeExtension to of ".html" -> muHtmlConf _ -> muPlainConf need [from] hastacheFile conf from ctx >>= liftIO . TL.writeFile to liftIO $ copyPermissions from to muPlainConf = MuConfig { muEscapeFunc = emptyEscape , muTemplateFileDir = if isDir then Just $ srcTop </> templateDirname else Nothing , muTemplateFileExt = Just ".mu" , muTemplateRead = \f -> need [f] >> muTemplateRead defaultConfig f } muHtmlConf = muPlainConf { muEscapeFunc = htmlEscape }
4ab616e45caa513ae110d237ddef8d82cf7ebc0721698558c596f52096606ece
homegrownlabs/sim-template
sim.clj
(ns {{namespace}}.sim "This namespace contains the logic for creating and running a simulation." (:require [clojure.tools.logging :as l] [simulant.sim :as sim] [simulant.util :refer [e tx-ent]] [datomic.api :as d] [cheshire.core :as json] [clj-http.client :as http] [{{namespace}}.test :as test] [{{namespace}}.util :as util])) (defmethod sim/create-sim :test.type/sample [conn test sim] (-> @(d/transact conn (sim/construct-basic-sim test sim)) (tx-ent (e sim)))) (defn find-test [context] (let [{:keys [conn test-name]} context test (test/find-by-name (d/db conn) test-name)] (assert test (str "Test with name " test-name " must exist.")) (assoc context :test test))) (defn- retrieve-codebase-ent [host] (l/info (str "Retrieving codebase info for " host)) (let [resp (http/get (str host "/version")) body (json/parse-string (:body resp) keyword)] (assoc (util/codebase-ent :sim (:repo body) (:sha body)) :codebase/host host))) (defn- setup-sim "Create and setup a sim, capturing metadata about the system under test and creating associated internal entities." [context] (l/info "Setting up sim") (let [{:keys [conn test process-count clock-multiplier]} context sim-def {:db/id (d/tempid :sim) :source/codebase (util/codebase-ent :sim) :sim/codebases [(retrieve-codebase-ent (:test/host test))] :sim/processCount process-count} sim (sim/create-sim conn test sim-def)] (sim/create-action-log conn sim) (sim/create-process-state conn sim) (sim/create-fixed-clock conn sim {:clock/multiplier clock-multiplier}) (assoc context :sim sim))) (defn- setup-system "Perform miscellaneous setup actions on the target system" [context] (l/info "Setting up target system") ;; Create a simulation.target-system namespace, and perform any necessary ;; actions on it. Make sure to return an original or modified context from ;; those functions. ;; ;; For example: ;; (-> context ;; target-system/create-accounts) context) (defn- setup-actions "Require action namespaces, so their multimethod definitions get loaded." [context] (require '{{namespace}}.actions.sample) context) (defn- start-sim "Initiate processes for sim, effectively starting the sim." [context] (let [{:keys [uri sim]} context run-process-fn #(sim/run-sim-process uri (e sim)) _ (l/info (str "Launching " (:sim/processCount sim) " sim processes")) processes (repeatedly (:sim/processCount sim) run-process-fn)] (doall processes) (assoc context :processes processes))) (defn- await-sim "Wait for all sim processes to complete." [context] (l/info "Awaiting sim processes...") (doseq [p (:processes context)] @(:runner p)) (l/info "Sim complete") context) (defn run-sim! "Setup and run a sim. Awaits completion of each agent process." [uri test-name process-count clock-multiplier] (-> {:uri uri :conn (d/connect uri) :test-name test-name :process-count process-count :clock-multiplier clock-multiplier} find-test setup-sim setup-system setup-actions start-sim await-sim)) ;; == Sim retrieval ======================================== (defn all-sims [db] (->> (d/q '[:find ?sim ?test-name ?created :where [?test :test/sims ?sim ?tx] [?test :test/name ?test-name] [?tx :db/txInstant ?created]] db) (map #(zipmap [:sim :test-name :created] %)))) (defn sims-for-test [db test-name] (->> (d/q '[:find ?sim ?test-name ?created :in $ ?test :where [?test :test/sims ?sim ?tx] [?test :test/name ?test-name] [?tx :db/txInstant ?created]] db [:test/name test-name]) (map #(zipmap [:sim :test-name :created] %)))) (defn list-sims "Return listing of sims in database." [db test-name] (if test-name (sims-for-test db test-name) (all-sims db))) (defn latest-sim [db test-name] (->> (sims-for-test db test-name) (sort-by :created) last :sim))
null
https://raw.githubusercontent.com/homegrownlabs/sim-template/2ddba7c1a3c2a17aff1e1ed30bada941bcb938a3/src/leiningen/new/sim_test/src/sim.clj
clojure
Create a simulation.target-system namespace, and perform any necessary actions on it. Make sure to return an original or modified context from those functions. For example: (-> context target-system/create-accounts) == Sim retrieval ========================================
(ns {{namespace}}.sim "This namespace contains the logic for creating and running a simulation." (:require [clojure.tools.logging :as l] [simulant.sim :as sim] [simulant.util :refer [e tx-ent]] [datomic.api :as d] [cheshire.core :as json] [clj-http.client :as http] [{{namespace}}.test :as test] [{{namespace}}.util :as util])) (defmethod sim/create-sim :test.type/sample [conn test sim] (-> @(d/transact conn (sim/construct-basic-sim test sim)) (tx-ent (e sim)))) (defn find-test [context] (let [{:keys [conn test-name]} context test (test/find-by-name (d/db conn) test-name)] (assert test (str "Test with name " test-name " must exist.")) (assoc context :test test))) (defn- retrieve-codebase-ent [host] (l/info (str "Retrieving codebase info for " host)) (let [resp (http/get (str host "/version")) body (json/parse-string (:body resp) keyword)] (assoc (util/codebase-ent :sim (:repo body) (:sha body)) :codebase/host host))) (defn- setup-sim "Create and setup a sim, capturing metadata about the system under test and creating associated internal entities." [context] (l/info "Setting up sim") (let [{:keys [conn test process-count clock-multiplier]} context sim-def {:db/id (d/tempid :sim) :source/codebase (util/codebase-ent :sim) :sim/codebases [(retrieve-codebase-ent (:test/host test))] :sim/processCount process-count} sim (sim/create-sim conn test sim-def)] (sim/create-action-log conn sim) (sim/create-process-state conn sim) (sim/create-fixed-clock conn sim {:clock/multiplier clock-multiplier}) (assoc context :sim sim))) (defn- setup-system "Perform miscellaneous setup actions on the target system" [context] (l/info "Setting up target system") context) (defn- setup-actions "Require action namespaces, so their multimethod definitions get loaded." [context] (require '{{namespace}}.actions.sample) context) (defn- start-sim "Initiate processes for sim, effectively starting the sim." [context] (let [{:keys [uri sim]} context run-process-fn #(sim/run-sim-process uri (e sim)) _ (l/info (str "Launching " (:sim/processCount sim) " sim processes")) processes (repeatedly (:sim/processCount sim) run-process-fn)] (doall processes) (assoc context :processes processes))) (defn- await-sim "Wait for all sim processes to complete." [context] (l/info "Awaiting sim processes...") (doseq [p (:processes context)] @(:runner p)) (l/info "Sim complete") context) (defn run-sim! "Setup and run a sim. Awaits completion of each agent process." [uri test-name process-count clock-multiplier] (-> {:uri uri :conn (d/connect uri) :test-name test-name :process-count process-count :clock-multiplier clock-multiplier} find-test setup-sim setup-system setup-actions start-sim await-sim)) (defn all-sims [db] (->> (d/q '[:find ?sim ?test-name ?created :where [?test :test/sims ?sim ?tx] [?test :test/name ?test-name] [?tx :db/txInstant ?created]] db) (map #(zipmap [:sim :test-name :created] %)))) (defn sims-for-test [db test-name] (->> (d/q '[:find ?sim ?test-name ?created :in $ ?test :where [?test :test/sims ?sim ?tx] [?test :test/name ?test-name] [?tx :db/txInstant ?created]] db [:test/name test-name]) (map #(zipmap [:sim :test-name :created] %)))) (defn list-sims "Return listing of sims in database." [db test-name] (if test-name (sims-for-test db test-name) (all-sims db))) (defn latest-sim [db test-name] (->> (sims-for-test db test-name) (sort-by :created) last :sim))
9de72f9264e5e88e1c96116aadb299f1eaa0a00c0aaa4306f6f018e1554ee650
killme2008/clj-rate-limiter
core.clj
(ns clj-rate-limiter.core (:import (java.util TimerTask Timer)) (:require [taoensso.carmine :as car] [clojure.core.cache :as cache])) (defn- ttl-cache [interval] (atom (cache/ttl-cache-factory {} :ttl interval))) (defprotocol RateLimiter "Rate limiter for clojure." (allow? [this id] "Return true if the request can be allowd by rate limiter.") (permit? [this id] "Return {:result true} if the request can be permited by rate limiter, Otherwise returns {:result false :current requests}.") (remove-permit [this id ts] "Remove the permit by id and permit timestamp.")) (defprotocol RateLimiterFactory "A factory to create RateLimiter" (create [this] "Return an RateLimiter instance.")) (defn- set-timeout [f interval] (let [task (proxy [TimerTask] [] (run [] (f))) timer (new Timer)] (.schedule timer task (long interval)) timer)) (defn- clear-timeout [^Timer timer] (try (.cancel timer) (catch Throwable _))) (definline mills->nanos [m] `(* 1000000 ~m)) (definline nanos->mills [n] `(long (/ ~n 1000000))) (defn- calc-result [now first-req too-many-in-interval? time-since-last-req min-difference interval] (if (or too-many-in-interval? (when (and min-difference time-since-last-req) (< time-since-last-req (mills->nanos min-difference)))) (long (Math/floor (min (nanos->mills (+ (- first-req now) (mills->nanos interval))) (if min-difference (- min-difference (nanos->mills time-since-last-req)) (Double/MAX_VALUE))))) 0)) (deftype MemoryRateLimiterFactory [opts] RateLimiterFactory (create [this] (let [timeouts (atom {}) {:keys [interval min-difference max-in-interval namespace flood-threshold] :or {namespace "clj-rate"}} opts flood-cache (ttl-cache interval) lock (Object.) storage (atom {})] (reify RateLimiter (allow? [this id] (:result (permit? this id))) (permit? [_ id] ;;It must not be in flood cache (when-not (and flood-threshold (cache/lookup @flood-cache (or id ""))) (let [id (or id "") now (System/nanoTime) key (format "%s-%s" namespace id) before (- now (mills->nanos interval))] (when-let [t (get @timeouts key)] (clear-timeout t)) (let [user-set (locking lock (let [new-set (into {} (->> key (get @storage) (filter #(-> % first (> before)))))] (swap! storage assoc key new-set) new-set)) total (count user-set) user-set (filter #(-> % second not) user-set) current (count user-set) too-many-in-interval? (>= current max-in-interval) flood-req? (and flood-threshold too-many-in-interval? (>= current (* flood-threshold max-in-interval))) time-since-last-req (when (and min-difference (first (last user-set))) (- now (first (last user-set))))] (when flood-req? (swap! flood-cache assoc id true)) (let [ret (calc-result now (ffirst user-set) too-many-in-interval? time-since-last-req min-difference interval)] (swap! storage update-in [key] (fn [s] (assoc s now false))) (swap! timeouts assoc key (set-timeout (fn [] (swap! storage dissoc key)) (:interval opts))) (let [ret ((complement pos?) ret)] (if ret {:result ret :ts now :current current :total total} {:result ret :ts now :current current :total total}))))))) (remove-permit [_ id ts] (let [id (or id "") key (format "%s-%s" namespace id)] (when ts (swap! storage update-in [key] (fn [s] (assoc s ts true)))))))))) (defn- release-key [key] (format "%s-rs" key)) (defn- exec-batch [redis pool key stamp before now expire-secs min-difference] (car/wcar {:spec redis :pool pool} (car/multi) (car/zremrangebyscore key 0 before) (car/zcard key) (car/zcard (release-key key)) (car/zrangebyscore key "-inf" "+inf" "LIMIT" 0 1) (when min-difference (car/zrevrangebyscore key "+inf" "-inf" "LIMIT" 0 1)) (car/zadd key now stamp) (car/expire key expire-secs) (car/exec))) (defn- match-exec-ret [ret min-difference] (if min-difference (let [[_ total rs-total [first-req] [last-req] _ _] (last ret)] [total rs-total first-req last-req]) (let [[_ total rs-total [first-req] _ _] (last ret)] [total rs-total first-req]))) (defn- calc-result-in-millis [now first-req too-many-in-interval? time-since-last-req min-difference interval] (if (or too-many-in-interval? (when (and min-difference time-since-last-req) (< time-since-last-req min-difference))) (long (Math/floor (min (+ (- first-req now) interval) (if min-difference (- min-difference time-since-last-req) (Double/MAX_VALUE))))) 0)) (deftype RedisRateLimiterFactory [opts] RateLimiterFactory (create [this] (let [{:keys [interval min-difference max-in-interval namespace redis flood-threshold pool] :or {namespace "clj-rate"}} opts flood-cache (ttl-cache interval) expire-secs (long (Math/ceil (/ interval 1000)))] (reify RateLimiter (allow? [this id] (:result (permit? this id))) (permit? [_ id] (when-not (and flood-threshold (cache/lookup @flood-cache (or id ""))) (let [id (or id "") stamp (System/nanoTime) now (System/currentTimeMillis) key (format "%s-%s" namespace id) before (- now interval)] (let [exec-ret (exec-batch redis pool key stamp before now expire-secs min-difference) [total rs-total first-req last-req] (match-exec-ret exec-ret min-difference) too-many-in-interval? (>= total max-in-interval) flood-req? (and flood-threshold too-many-in-interval? (>= total (* flood-threshold max-in-interval))) time-since-last-req (when (and min-difference last-req) (- now (Long/valueOf ^String last-req)))] (when flood-req? (swap! flood-cache assoc id true)) (let [ret ((complement pos?) (calc-result-in-millis now (when first-req (Long/valueOf ^String first-req)) too-many-in-interval? time-since-last-req min-difference interval))] (if ret {:result ret :ts stamp :current total :total (+ total rs-total)} {:result ret :ts stamp :current total :total (+ total rs-total)})))))) (remove-permit [_ id ts] (let [id (or id "") key (format "%s-%s" namespace id) now (System/currentTimeMillis) before (- now interval)] (when (and ts (pos? ts)) (car/wcar {:spec redis :pool pool} (car/multi) (car/zrem key ts) (car/zremrangebyscore (release-key key) 0 before) (car/zadd (release-key key) now ts) (car/expire (release-key key) expire-secs) (car/exec))))))))) (defn rate-limiter-factory "Returns a rate limiter factory by type and options. Valid type includes :memory and :redis, for example: 100 requests in 1 seconds . (def rt (rate-limiter-factory :memory :interval 1000 :max-in-interval 100)) " [type & {:as opts}] (case type :memory (MemoryRateLimiterFactory. opts) :redis (RedisRateLimiterFactory. opts) (throw (ex-info (format "Unknow rate limiter type:%s" type) {:type type})))) (comment (defn- benchmark [] (let [rf (rate-limiter-factory :redis :redis {:spec {:host "localhost" :port 6379 :timeout 5000} :pool {:max-active (* 3 (.availableProcessors (Runtime/getRuntime))) :min-idle (.availableProcessors (Runtime/getRuntime)) :max-wait 5000}} :flood-threshold 10 :interval 1000 :max-in-interval 100000) r (create rf) cost (atom {:total 0 :current 0 :max_total 0 :max_concurrent 0 :times 0}) ts 100 cl (java.util.concurrent.CountDownLatch. ts)] (time (do (dotimes [n ts] (-> (fn [] (dotimes [m 10000] (let [{:keys [ts result total current]} (permit? r (mod m 1))] (when result (swap! cost (fn [{:keys [total current times max_total max_concurrent]} t c] {:total (+ total t) :current (+ current c) :max_total (if (< max_total t) t max_total) :max_concurrent (if (< max_concurrent c) c max_concurrent) :times (inc times)}) total current)) (remove-permit r (mod m 1) ts))) (.countDown cl)) (Thread.) (.start))) (.await cl) (println @cost))))))
null
https://raw.githubusercontent.com/killme2008/clj-rate-limiter/26f3689b9efa9cce8b3f38a7f679e3e27a4d75f5/src/clj_rate_limiter/core.clj
clojure
It must not be in flood cache
(ns clj-rate-limiter.core (:import (java.util TimerTask Timer)) (:require [taoensso.carmine :as car] [clojure.core.cache :as cache])) (defn- ttl-cache [interval] (atom (cache/ttl-cache-factory {} :ttl interval))) (defprotocol RateLimiter "Rate limiter for clojure." (allow? [this id] "Return true if the request can be allowd by rate limiter.") (permit? [this id] "Return {:result true} if the request can be permited by rate limiter, Otherwise returns {:result false :current requests}.") (remove-permit [this id ts] "Remove the permit by id and permit timestamp.")) (defprotocol RateLimiterFactory "A factory to create RateLimiter" (create [this] "Return an RateLimiter instance.")) (defn- set-timeout [f interval] (let [task (proxy [TimerTask] [] (run [] (f))) timer (new Timer)] (.schedule timer task (long interval)) timer)) (defn- clear-timeout [^Timer timer] (try (.cancel timer) (catch Throwable _))) (definline mills->nanos [m] `(* 1000000 ~m)) (definline nanos->mills [n] `(long (/ ~n 1000000))) (defn- calc-result [now first-req too-many-in-interval? time-since-last-req min-difference interval] (if (or too-many-in-interval? (when (and min-difference time-since-last-req) (< time-since-last-req (mills->nanos min-difference)))) (long (Math/floor (min (nanos->mills (+ (- first-req now) (mills->nanos interval))) (if min-difference (- min-difference (nanos->mills time-since-last-req)) (Double/MAX_VALUE))))) 0)) (deftype MemoryRateLimiterFactory [opts] RateLimiterFactory (create [this] (let [timeouts (atom {}) {:keys [interval min-difference max-in-interval namespace flood-threshold] :or {namespace "clj-rate"}} opts flood-cache (ttl-cache interval) lock (Object.) storage (atom {})] (reify RateLimiter (allow? [this id] (:result (permit? this id))) (permit? [_ id] (when-not (and flood-threshold (cache/lookup @flood-cache (or id ""))) (let [id (or id "") now (System/nanoTime) key (format "%s-%s" namespace id) before (- now (mills->nanos interval))] (when-let [t (get @timeouts key)] (clear-timeout t)) (let [user-set (locking lock (let [new-set (into {} (->> key (get @storage) (filter #(-> % first (> before)))))] (swap! storage assoc key new-set) new-set)) total (count user-set) user-set (filter #(-> % second not) user-set) current (count user-set) too-many-in-interval? (>= current max-in-interval) flood-req? (and flood-threshold too-many-in-interval? (>= current (* flood-threshold max-in-interval))) time-since-last-req (when (and min-difference (first (last user-set))) (- now (first (last user-set))))] (when flood-req? (swap! flood-cache assoc id true)) (let [ret (calc-result now (ffirst user-set) too-many-in-interval? time-since-last-req min-difference interval)] (swap! storage update-in [key] (fn [s] (assoc s now false))) (swap! timeouts assoc key (set-timeout (fn [] (swap! storage dissoc key)) (:interval opts))) (let [ret ((complement pos?) ret)] (if ret {:result ret :ts now :current current :total total} {:result ret :ts now :current current :total total}))))))) (remove-permit [_ id ts] (let [id (or id "") key (format "%s-%s" namespace id)] (when ts (swap! storage update-in [key] (fn [s] (assoc s ts true)))))))))) (defn- release-key [key] (format "%s-rs" key)) (defn- exec-batch [redis pool key stamp before now expire-secs min-difference] (car/wcar {:spec redis :pool pool} (car/multi) (car/zremrangebyscore key 0 before) (car/zcard key) (car/zcard (release-key key)) (car/zrangebyscore key "-inf" "+inf" "LIMIT" 0 1) (when min-difference (car/zrevrangebyscore key "+inf" "-inf" "LIMIT" 0 1)) (car/zadd key now stamp) (car/expire key expire-secs) (car/exec))) (defn- match-exec-ret [ret min-difference] (if min-difference (let [[_ total rs-total [first-req] [last-req] _ _] (last ret)] [total rs-total first-req last-req]) (let [[_ total rs-total [first-req] _ _] (last ret)] [total rs-total first-req]))) (defn- calc-result-in-millis [now first-req too-many-in-interval? time-since-last-req min-difference interval] (if (or too-many-in-interval? (when (and min-difference time-since-last-req) (< time-since-last-req min-difference))) (long (Math/floor (min (+ (- first-req now) interval) (if min-difference (- min-difference time-since-last-req) (Double/MAX_VALUE))))) 0)) (deftype RedisRateLimiterFactory [opts] RateLimiterFactory (create [this] (let [{:keys [interval min-difference max-in-interval namespace redis flood-threshold pool] :or {namespace "clj-rate"}} opts flood-cache (ttl-cache interval) expire-secs (long (Math/ceil (/ interval 1000)))] (reify RateLimiter (allow? [this id] (:result (permit? this id))) (permit? [_ id] (when-not (and flood-threshold (cache/lookup @flood-cache (or id ""))) (let [id (or id "") stamp (System/nanoTime) now (System/currentTimeMillis) key (format "%s-%s" namespace id) before (- now interval)] (let [exec-ret (exec-batch redis pool key stamp before now expire-secs min-difference) [total rs-total first-req last-req] (match-exec-ret exec-ret min-difference) too-many-in-interval? (>= total max-in-interval) flood-req? (and flood-threshold too-many-in-interval? (>= total (* flood-threshold max-in-interval))) time-since-last-req (when (and min-difference last-req) (- now (Long/valueOf ^String last-req)))] (when flood-req? (swap! flood-cache assoc id true)) (let [ret ((complement pos?) (calc-result-in-millis now (when first-req (Long/valueOf ^String first-req)) too-many-in-interval? time-since-last-req min-difference interval))] (if ret {:result ret :ts stamp :current total :total (+ total rs-total)} {:result ret :ts stamp :current total :total (+ total rs-total)})))))) (remove-permit [_ id ts] (let [id (or id "") key (format "%s-%s" namespace id) now (System/currentTimeMillis) before (- now interval)] (when (and ts (pos? ts)) (car/wcar {:spec redis :pool pool} (car/multi) (car/zrem key ts) (car/zremrangebyscore (release-key key) 0 before) (car/zadd (release-key key) now ts) (car/expire (release-key key) expire-secs) (car/exec))))))))) (defn rate-limiter-factory "Returns a rate limiter factory by type and options. Valid type includes :memory and :redis, for example: 100 requests in 1 seconds . (def rt (rate-limiter-factory :memory :interval 1000 :max-in-interval 100)) " [type & {:as opts}] (case type :memory (MemoryRateLimiterFactory. opts) :redis (RedisRateLimiterFactory. opts) (throw (ex-info (format "Unknow rate limiter type:%s" type) {:type type})))) (comment (defn- benchmark [] (let [rf (rate-limiter-factory :redis :redis {:spec {:host "localhost" :port 6379 :timeout 5000} :pool {:max-active (* 3 (.availableProcessors (Runtime/getRuntime))) :min-idle (.availableProcessors (Runtime/getRuntime)) :max-wait 5000}} :flood-threshold 10 :interval 1000 :max-in-interval 100000) r (create rf) cost (atom {:total 0 :current 0 :max_total 0 :max_concurrent 0 :times 0}) ts 100 cl (java.util.concurrent.CountDownLatch. ts)] (time (do (dotimes [n ts] (-> (fn [] (dotimes [m 10000] (let [{:keys [ts result total current]} (permit? r (mod m 1))] (when result (swap! cost (fn [{:keys [total current times max_total max_concurrent]} t c] {:total (+ total t) :current (+ current c) :max_total (if (< max_total t) t max_total) :max_concurrent (if (< max_concurrent c) c max_concurrent) :times (inc times)}) total current)) (remove-permit r (mod m 1) ts))) (.countDown cl)) (Thread.) (.start))) (.await cl) (println @cost))))))
c160d9242dfbe6464a9049db378034cdc0d45198c47a7c276d203c007ed455c1
larcenists/larceny
macros.body.scm
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;; To simplify interoperability between R7RS and R6RS , this implementation of define - record - type supports all four of ;;; these specifications of define-record-type: ;;; SRFI 9 ;;; ERR5RS ;;; R6RS (deprecated) ;;; R7RS ;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (define-syntax define-record-type (syntax-rules (fields parent protocol sealed opaque nongenerative parent-rtd) ;; R6RS syntax (deprecated) ((_ (<name> <constructor> <predicate>) stuff ...) (r6rs-define-record-type (<name> <constructor> <predicate>) stuff ...)) ((_ <name> (fields <spec> ...) stuff ...) (r6rs-define-record-type <name> (fields <spec> ...) stuff ...)) ((_ <name> (parent <pname>) stuff ...) (r6rs-define-record-type <name> (parent <pname>) stuff ...)) ((_ <name> (protocol <exp>) stuff ...) (r6rs-define-record-type <name> (protocol <exp>) stuff ...)) ((_ <name> (sealed <boolean>) stuff ...) (r6rs-define-record-type <name> (sealed <boolean>) stuff ...)) ((_ <name> (opaque <boolean>) stuff ...) (r6rs-define-record-type <name> (opaque <boolean>) stuff ...)) ((_ <name> (nongenerative) stuff ...) (r6rs-define-record-type <name> (nongenerative) stuff ...)) ((_ <name> (nongenerative <uid>) stuff ...) (r6rs-define-record-type <name> (nongenerative <uid>) stuff ...)) ((_ <name> (parent-rtd <rtd> <cd>) stuff ...) (r6rs-define-record-type <name> (parent-rtd <rtd> <cd>) stuff ...)) ((_ <name>) (r6rs-define-record-type <name>)) R7RS , ERR5RS , and SRFI 9 syntax ((_ <name> <constructor> <pred> <field> ...) (r7rs-define-record-type <name> <constructor> <pred> <field> ...)))) From R7RS 7.3 (define-syntax define-values (syntax-rules () ((define-values () expr) (define dummy (call-with-values (lambda () expr) (lambda args #f)))) ((define-values (var) expr) (define var expr)) ((define-values (var0 var1 ... varn) expr) (begin (define var0 (call-with-values (lambda () expr) list)) (define var1 (let ((v (cadr var0))) (set-cdr! var0 (cddr var0)) v)) ... (define varn (let ((v (cadr var0))) (set! var0 (car var0)) v)))) ((define-values (var0 var1 ... . varn) expr) (begin (define var0 (call-with-values (lambda () expr) list)) (define var1 (let ((v (cadr var0))) (set-cdr! var0 (cddr var0)) v)) ... (define varn (let ((v (cdr var0))) (set! var0 (car var0)) v)))) ((define-values var expr) (define var (call-with-values (lambda () expr) list))))) ;;; From src/Compiler/usual.sch ;;; With the SRFI-39 and semantics , we have to bypass a call to the ;;; conversion procedure, which is done by passing the no-conversion symbol ;;; as an extra argument. That extra argument is recognized only by real ;;; parameters, so we have to be careful. (define-syntax parameterize (syntax-rules () ((parameterize ((p1 e1) ...) b1 b2 ...) (letrec-syntax ((parameterize-aux (... (syntax-rules () ((parameterize-aux (t ...) ((p0 e0) x ...) body1 body2 ...) (let ((tempE e0) (tempP p0) (first-time? #t)) (parameterize-aux ((tempE tempP first-time?) t ...) (x ...) body1 body2 ...))) ((parameterize-aux ((tE tP first-time?) ...) () body1 body2 ...) (let-syntax ((swap! (syntax-rules () ((swap! var param) (let ((tmp var)) (set! var (param)) (param tmp))) ((swap! var param flag) (let ((tmp var)) (set! var (param)) (if (parameter? param) (param tmp flag) (param tmp))))))) (dynamic-wind (lambda () (begin (if first-time? (swap! tE tP) (swap! tE tP 'no-conversion)) (set! first-time? #f)) ...) (lambda () body1 body2 ...) (lambda () (swap! tE tP 'no-conversion) ...)))))))) (parameterize-aux () ((p1 e1) ...) b1 b2 ...))))) eof
null
https://raw.githubusercontent.com/larcenists/larceny/fef550c7d3923deb7a5a1ccd5a628e54cf231c75/lib/R6RS/larceny/r7rs/macros.body.scm
scheme
these specifications of define-record-type: ERR5RS R6RS (deprecated) R7RS R6RS syntax (deprecated) From src/Compiler/usual.sch conversion procedure, which is done by passing the no-conversion symbol as an extra argument. That extra argument is recognized only by real parameters, so we have to be careful.
To simplify interoperability between R7RS and R6RS , this implementation of define - record - type supports all four of SRFI 9 (define-syntax define-record-type (syntax-rules (fields parent protocol sealed opaque nongenerative parent-rtd) ((_ (<name> <constructor> <predicate>) stuff ...) (r6rs-define-record-type (<name> <constructor> <predicate>) stuff ...)) ((_ <name> (fields <spec> ...) stuff ...) (r6rs-define-record-type <name> (fields <spec> ...) stuff ...)) ((_ <name> (parent <pname>) stuff ...) (r6rs-define-record-type <name> (parent <pname>) stuff ...)) ((_ <name> (protocol <exp>) stuff ...) (r6rs-define-record-type <name> (protocol <exp>) stuff ...)) ((_ <name> (sealed <boolean>) stuff ...) (r6rs-define-record-type <name> (sealed <boolean>) stuff ...)) ((_ <name> (opaque <boolean>) stuff ...) (r6rs-define-record-type <name> (opaque <boolean>) stuff ...)) ((_ <name> (nongenerative) stuff ...) (r6rs-define-record-type <name> (nongenerative) stuff ...)) ((_ <name> (nongenerative <uid>) stuff ...) (r6rs-define-record-type <name> (nongenerative <uid>) stuff ...)) ((_ <name> (parent-rtd <rtd> <cd>) stuff ...) (r6rs-define-record-type <name> (parent-rtd <rtd> <cd>) stuff ...)) ((_ <name>) (r6rs-define-record-type <name>)) R7RS , ERR5RS , and SRFI 9 syntax ((_ <name> <constructor> <pred> <field> ...) (r7rs-define-record-type <name> <constructor> <pred> <field> ...)))) From R7RS 7.3 (define-syntax define-values (syntax-rules () ((define-values () expr) (define dummy (call-with-values (lambda () expr) (lambda args #f)))) ((define-values (var) expr) (define var expr)) ((define-values (var0 var1 ... varn) expr) (begin (define var0 (call-with-values (lambda () expr) list)) (define var1 (let ((v (cadr var0))) (set-cdr! var0 (cddr var0)) v)) ... (define varn (let ((v (cadr var0))) (set! var0 (car var0)) v)))) ((define-values (var0 var1 ... . varn) expr) (begin (define var0 (call-with-values (lambda () expr) list)) (define var1 (let ((v (cadr var0))) (set-cdr! var0 (cddr var0)) v)) ... (define varn (let ((v (cdr var0))) (set! var0 (car var0)) v)))) ((define-values var expr) (define var (call-with-values (lambda () expr) list))))) With the SRFI-39 and semantics , we have to bypass a call to the (define-syntax parameterize (syntax-rules () ((parameterize ((p1 e1) ...) b1 b2 ...) (letrec-syntax ((parameterize-aux (... (syntax-rules () ((parameterize-aux (t ...) ((p0 e0) x ...) body1 body2 ...) (let ((tempE e0) (tempP p0) (first-time? #t)) (parameterize-aux ((tempE tempP first-time?) t ...) (x ...) body1 body2 ...))) ((parameterize-aux ((tE tP first-time?) ...) () body1 body2 ...) (let-syntax ((swap! (syntax-rules () ((swap! var param) (let ((tmp var)) (set! var (param)) (param tmp))) ((swap! var param flag) (let ((tmp var)) (set! var (param)) (if (parameter? param) (param tmp flag) (param tmp))))))) (dynamic-wind (lambda () (begin (if first-time? (swap! tE tP) (swap! tE tP 'no-conversion)) (set! first-time? #f)) ...) (lambda () body1 body2 ...) (lambda () (swap! tE tP 'no-conversion) ...)))))))) (parameterize-aux () ((p1 e1) ...) b1 b2 ...))))) eof
7eaf04b1089a6888cd036d8f384bd95a42f8a2a1e51d6e66b3fa8e3d3386c7a2
picty/parsifal
struct-0f.ml
struct s = { x : uint8; dump_checkpoint : debug_dump; }
null
https://raw.githubusercontent.com/picty/parsifal/767a1d558ea6da23ada46d8d96a057514b0aa2a8/syntax/unit/struct-0f.ml
ocaml
struct s = { x : uint8; dump_checkpoint : debug_dump; }
d0f8535150bfc345a847dc053ad7b1513dd3850c7020860a832c29d5e0e3645a
malgo-lang/malgo
TypeRep.hs
{-# LANGUAGE DeriveAnyClass #-} # LANGUAGE PatternSynonyms # # LANGUAGE TemplateHaskell # # LANGUAGE UndecidableInstances # module Malgo.Infer.TypeRep ( PrimT (..), Kind, TypeVar, KindCtx, insertKind, Type (..), MetaVar (..), HasType (..), HasKind (..), Scheme (..), TypeDef (..), typeConstructor, typeParameters, valueConstructors, TypeUnifyT (..), runTypeUnifyT, pattern TyConApp, viewTyConApp, buildTyArr, splitTyArr, applySubst, expandTypeSynonym, expandAllTypeSynonym, freevars, ) where import Control.Lens (At (at), Traversal', makeLenses, mapped, over, (^.), _1, _2) import Data.Binary (Binary) import Data.Binary.Instances.UnorderedContainers () import Data.Data (Data) import Data.HashMap.Strict qualified as HashMap import Koriel.Id import Koriel.Pretty import Malgo.Prelude -------------------------------- -- Common tag representations -- -------------------------------- -- | Primitive Types data PrimT = Int32T | Int64T | FloatT | DoubleT | CharT | StringT deriving stock (Eq, Show, Ord, Generic, Data) deriving anyclass (Hashable, Binary) instance Pretty PrimT where pPrint Int32T = "Int32#" pPrint Int64T = "Int64#" pPrint FloatT = "Float#" pPrint DoubleT = "Double#" pPrint CharT = "Char#" pPrint StringT = "String#" -------------------------- -- Type representations -- -------------------------- type Kind = Type type TypeVar = Id () TODO : Add insert function that ignores ` = TYPE ` type KindCtx = HashMap TypeVar Kind insertKind :: TypeVar -> Kind -> KindCtx -> KindCtx insertKind tv k ctx | k == TYPE = ctx | otherwise = HashMap.insert tv k ctx askKind :: TypeVar -> KindCtx -> Kind askKind tv ctx = fromMaybe TYPE (HashMap.lookup tv ctx) -- | Definition of `Type` data Type = -- type level operator -- | application of type constructor TyApp Type Type | -- | type variable (qualified by `Forall`) TyVar TypeVar | -- | type constructor TyCon TypeVar | -- primitive type constructor -- | primitive types TyPrim PrimT | -- | function type TyArr Type Type | -- | tuple type TyTuple Int | -- record type TyRecord (HashMap Text Type) | -- | pointer type TyPtr | -- kind constructor -- | star TYPE | -- unifiable type variable -- | type variable (not qualified) TyMeta MetaVar deriving stock (Eq, Ord, Show, Generic, Data) deriving anyclass (Hashable, Binary) instance Pretty Type where pPrintPrec l _ (TyConApp (TyCon c) ts) = foldl' (<+>) (pPrintPrec l 0 c) (map (pPrintPrec l 11) ts) pPrintPrec l _ (TyConApp (TyTuple _) ts) = parens $ sep $ punctuate "," $ map (pPrintPrec l 0) ts pPrintPrec l _ (TyConApp TyPtr [t]) = "Ptr#" <+> pPrintPrec l 0 t pPrintPrec l d (TyApp t1 t2) = maybeParens (d > 10) $ hsep [pPrintPrec l 10 t1, pPrintPrec l 11 t2] pPrintPrec _ _ (TyVar v) = pPrint v pPrintPrec l _ (TyCon c) = pPrintPrec l 0 c pPrintPrec l _ (TyPrim p) = pPrintPrec l 0 p pPrintPrec l d (TyArr t1 t2) = maybeParens (d > 10) $ pPrintPrec l 11 t1 <+> "->" <+> pPrintPrec l 10 t2 pPrintPrec _ _ (TyTuple n) = parens $ sep $ replicate (max 0 (n - 1)) "," pPrintPrec l _ (TyRecord kvs) = braces $ sep $ punctuate "," $ map (\(k, v) -> pPrintPrec l 0 k <> ":" <+> pPrintPrec l 0 v) $ HashMap.toList kvs pPrintPrec _ _ TyPtr = "Ptr#" pPrintPrec _ _ TYPE = "TYPE" pPrintPrec l _ (TyMeta tv) = pPrintPrec l 0 tv pattern TyConApp :: Type -> [Type] -> Type pattern TyConApp x xs <- (viewTyConApp -> Just (x, xs)) where TyConApp x xs = buildTyApp x xs viewTyConApp :: Type -> Maybe (Type, [Type]) viewTyConApp (TyCon con) = Just (TyCon con, []) viewTyConApp (TyTuple n) = Just (TyTuple n, []) viewTyConApp TyPtr = Just (TyPtr, []) viewTyConApp (TyApp t1 t2) = over (mapped . _2) (<> [t2]) $ viewTyConApp t1 viewTyConApp _ = Nothing buildTyApp :: Type -> [Type] -> Type buildTyApp = foldl' TyApp buildTyArr :: Foldable t => t Type -> Type -> Type buildTyArr ps ret = foldr TyArr ret ps -- | split a function type into its parameter types and return type splitTyArr :: Type -> ([Type], Type) splitTyArr (TyArr t1 t2) = over _1 (t1 :) $ splitTyArr t2 splitTyArr t = ([], t) ------------------- -- Type variable -- ------------------- newtype MetaVar = MetaVar {metaVar :: Id ()} deriving newtype (Eq, Ord, Show, Generic, Hashable) deriving stock (Data, Typeable) deriving anyclass (Binary) instance Pretty MetaVar where pPrint (MetaVar v) = "'" <> pPrint v instance HasType MetaVar where typeOf = TyMeta types _ (MetaVar v) = pure $ MetaVar v instance HasKind MetaVar where kindOf ctx MetaVar {metaVar} = askKind metaVar ctx ------------------------- HasType and HasKind -- ------------------------- -- | Types that have a `Type` class HasType a where typeOf :: a -> Type types :: Traversal' a Type class HasKind a where kindOf :: KindCtx -> a -> Kind instance HasKind TypeVar where kindOf ctx v = askKind v ctx instance HasKind PrimT where kindOf _ _ = TYPE instance HasType Type where typeOf = identity types = identity instance HasKind Type where kindOf ctx (TyApp (kindOf ctx -> TyArr _ k) _) = k kindOf _ TyApp {} = error "invalid kind" kindOf ctx (TyVar v) = kindOf ctx v kindOf ctx (TyCon c) = kindOf ctx c kindOf ctx (TyPrim p) = kindOf ctx p kindOf ctx (TyArr _ t2) = kindOf ctx t2 kindOf _ (TyTuple n) = buildTyArr (replicate n TYPE) TYPE kindOf _ (TyRecord _) = TYPE kindOf _ TyPtr = TYPE `TyArr` TYPE kindOf _ TYPE = TYPE -- Type :: Type kindOf ctx (TyMeta tv) = kindOf ctx tv instance HasType Void where typeOf = absurd types _ = absurd instance HasKind Void where kindOf _ = absurd -- | Universally quantified type data Scheme ty = Forall [TypeVar] ty deriving stock (Eq, Ord, Show, Generic, Functor, Foldable, Traversable) deriving anyclass (Hashable, Binary) instance Pretty ty => Pretty (Scheme ty) where pPrint (Forall [] t) = pPrint t pPrint (Forall vs t) = "forall" <+> hsep (map pPrint vs) <> "." <+> pPrint t -- | Definition of Type constructor -- valueConstructorsのSchemeは、typeParametersで全称化されている data TypeDef ty = TypeDef { _typeConstructor :: ty, _typeParameters :: [TypeVar], _valueConstructors :: [(Id (), Scheme ty)] } deriving stock (Show, Generic, Functor, Foldable, Traversable) deriving anyclass (Binary) instance Pretty ty => Pretty (TypeDef ty) where pPrint (TypeDef c q u) = pPrint (c, q, u) instance HasKind ty => HasKind (TypeDef ty) where kindOf ctx TypeDef {_typeConstructor} = kindOf ctx _typeConstructor makeLenses ''TypeDef ----------------------- -- Unification monad -- ----------------------- type TypeMap = HashMap MetaVar Type | Note for The Instance of ' MonadState ' for ' TypeUnifyT ' : -- @MonadState TypeUnifyT@ does not use ' TypeMap ' . -- Instead, it uses inner monad's 'MonadState' instance. newtype TypeUnifyT m a = TypeUnifyT {unTypeUnifyT :: StateT TypeMap m a} deriving newtype (Functor, Applicative, Monad, MonadReader r, MonadIO, MonadFail) instance MonadState s m => MonadState s (TypeUnifyT m) where get = TypeUnifyT $ lift get put x = TypeUnifyT $ lift $ put x instance MonadTrans TypeUnifyT where lift m = TypeUnifyT $ lift m runTypeUnifyT :: Monad m => TypeUnifyT m a -> m a runTypeUnifyT (TypeUnifyT m) = evalStateT m mempty --------------- Utilities -- --------------- -- | apply substitution to a type applySubst :: HashMap TypeVar Type -> Type -> Type applySubst subst = \case TyApp ty ty' -> TyApp (applySubst subst ty) (applySubst subst ty') TyVar id -> fromMaybe (TyVar id) $ subst ^. at id TyCon id -> TyCon id TyPrim pt -> TyPrim pt TyArr ty ty' -> TyArr (applySubst subst ty) (applySubst subst ty') TyTuple n -> TyTuple n TyRecord hm -> TyRecord $ fmap (applySubst subst) hm TyPtr -> TyPtr TYPE -> TYPE TyMeta tv -> TyMeta tv -- | expand type synonyms expandTypeSynonym :: HashMap TypeVar ([TypeVar], Type) -> Type -> Maybe Type expandTypeSynonym abbrEnv (TyConApp (TyCon con) ts) = case abbrEnv ^. at con of Nothing -> Nothing Just (ps, orig) -> Just (applySubst (HashMap.fromList $ zip ps ts) orig) expandTypeSynonym _ _ = Nothing expandAllTypeSynonym :: HashMap TypeVar ([TypeVar], Type) -> Type -> Type expandAllTypeSynonym abbrEnv (TyConApp (TyCon con) ts) = case abbrEnv ^. at con of Nothing -> TyConApp (TyCon con) $ map (expandAllTypeSynonym abbrEnv) ts Just (ps, orig) -> -- ネストした型シノニムを展開するため、展開直後の型をもう一度展開する expandAllTypeSynonym abbrEnv $ applySubst (HashMap.fromList $ zip ps ts) $ expandAllTypeSynonym abbrEnv orig expandAllTypeSynonym abbrEnv (TyApp t1 t2) = TyApp (expandAllTypeSynonym abbrEnv t1) (expandAllTypeSynonym abbrEnv t2) expandAllTypeSynonym _ t@TyVar {} = t expandAllTypeSynonym _ t@TyCon {} = t expandAllTypeSynonym _ t@TyPrim {} = t expandAllTypeSynonym abbrEnv (TyArr t1 t2) = TyArr (expandAllTypeSynonym abbrEnv t1) (expandAllTypeSynonym abbrEnv t2) expandAllTypeSynonym _ t@TyTuple {} = t expandAllTypeSynonym abbrEnv (TyRecord kts) = TyRecord $ fmap (expandAllTypeSynonym abbrEnv) kts expandAllTypeSynonym _ TyPtr = TyPtr expandAllTypeSynonym _ TYPE = TYPE expandAllTypeSynonym _ (TyMeta tv) = TyMeta tv -- | get all meta type variables in a type freevars :: Type -> HashSet MetaVar freevars (TyApp t1 t2) = freevars t1 <> freevars t2 freevars TyVar {} = mempty freevars TyCon {} = mempty freevars TyPrim {} = mempty freevars (TyArr t1 t2) = freevars t1 <> freevars t2 freevars TyTuple {} = mempty freevars (TyRecord kts) = foldMap freevars kts freevars TyPtr = mempty freevars TYPE = mempty freevars (TyMeta tv) = one tv
null
https://raw.githubusercontent.com/malgo-lang/malgo/002f522bf6376edf67716cef99033d87b46112f4/src/Malgo/Infer/TypeRep.hs
haskell
# LANGUAGE DeriveAnyClass # ------------------------------ Common tag representations -- ------------------------------ | Primitive Types ------------------------ Type representations -- ------------------------ | Definition of `Type` type level operator | application of type constructor | type variable (qualified by `Forall`) | type constructor primitive type constructor | primitive types | function type | tuple type record type | pointer type kind constructor | star unifiable type variable | type variable (not qualified) | split a function type into its parameter types and return type ----------------- Type variable -- ----------------- ----------------------- ----------------------- | Types that have a `Type` Type :: Type | Universally quantified type | Definition of Type constructor valueConstructorsのSchemeは、typeParametersで全称化されている --------------------- Unification monad -- --------------------- Instead, it uses inner monad's 'MonadState' instance. ------------- ------------- | apply substitution to a type | expand type synonyms ネストした型シノニムを展開するため、展開直後の型をもう一度展開する | get all meta type variables in a type
# LANGUAGE PatternSynonyms # # LANGUAGE TemplateHaskell # # LANGUAGE UndecidableInstances # module Malgo.Infer.TypeRep ( PrimT (..), Kind, TypeVar, KindCtx, insertKind, Type (..), MetaVar (..), HasType (..), HasKind (..), Scheme (..), TypeDef (..), typeConstructor, typeParameters, valueConstructors, TypeUnifyT (..), runTypeUnifyT, pattern TyConApp, viewTyConApp, buildTyArr, splitTyArr, applySubst, expandTypeSynonym, expandAllTypeSynonym, freevars, ) where import Control.Lens (At (at), Traversal', makeLenses, mapped, over, (^.), _1, _2) import Data.Binary (Binary) import Data.Binary.Instances.UnorderedContainers () import Data.Data (Data) import Data.HashMap.Strict qualified as HashMap import Koriel.Id import Koriel.Pretty import Malgo.Prelude data PrimT = Int32T | Int64T | FloatT | DoubleT | CharT | StringT deriving stock (Eq, Show, Ord, Generic, Data) deriving anyclass (Hashable, Binary) instance Pretty PrimT where pPrint Int32T = "Int32#" pPrint Int64T = "Int64#" pPrint FloatT = "Float#" pPrint DoubleT = "Double#" pPrint CharT = "Char#" pPrint StringT = "String#" type Kind = Type type TypeVar = Id () TODO : Add insert function that ignores ` = TYPE ` type KindCtx = HashMap TypeVar Kind insertKind :: TypeVar -> Kind -> KindCtx -> KindCtx insertKind tv k ctx | k == TYPE = ctx | otherwise = HashMap.insert tv k ctx askKind :: TypeVar -> KindCtx -> Kind askKind tv ctx = fromMaybe TYPE (HashMap.lookup tv ctx) data Type TyApp Type Type TyVar TypeVar TyCon TypeVar TyPrim PrimT TyArr Type Type TyTuple Int TyRecord (HashMap Text Type) TyPtr TYPE TyMeta MetaVar deriving stock (Eq, Ord, Show, Generic, Data) deriving anyclass (Hashable, Binary) instance Pretty Type where pPrintPrec l _ (TyConApp (TyCon c) ts) = foldl' (<+>) (pPrintPrec l 0 c) (map (pPrintPrec l 11) ts) pPrintPrec l _ (TyConApp (TyTuple _) ts) = parens $ sep $ punctuate "," $ map (pPrintPrec l 0) ts pPrintPrec l _ (TyConApp TyPtr [t]) = "Ptr#" <+> pPrintPrec l 0 t pPrintPrec l d (TyApp t1 t2) = maybeParens (d > 10) $ hsep [pPrintPrec l 10 t1, pPrintPrec l 11 t2] pPrintPrec _ _ (TyVar v) = pPrint v pPrintPrec l _ (TyCon c) = pPrintPrec l 0 c pPrintPrec l _ (TyPrim p) = pPrintPrec l 0 p pPrintPrec l d (TyArr t1 t2) = maybeParens (d > 10) $ pPrintPrec l 11 t1 <+> "->" <+> pPrintPrec l 10 t2 pPrintPrec _ _ (TyTuple n) = parens $ sep $ replicate (max 0 (n - 1)) "," pPrintPrec l _ (TyRecord kvs) = braces $ sep $ punctuate "," $ map (\(k, v) -> pPrintPrec l 0 k <> ":" <+> pPrintPrec l 0 v) $ HashMap.toList kvs pPrintPrec _ _ TyPtr = "Ptr#" pPrintPrec _ _ TYPE = "TYPE" pPrintPrec l _ (TyMeta tv) = pPrintPrec l 0 tv pattern TyConApp :: Type -> [Type] -> Type pattern TyConApp x xs <- (viewTyConApp -> Just (x, xs)) where TyConApp x xs = buildTyApp x xs viewTyConApp :: Type -> Maybe (Type, [Type]) viewTyConApp (TyCon con) = Just (TyCon con, []) viewTyConApp (TyTuple n) = Just (TyTuple n, []) viewTyConApp TyPtr = Just (TyPtr, []) viewTyConApp (TyApp t1 t2) = over (mapped . _2) (<> [t2]) $ viewTyConApp t1 viewTyConApp _ = Nothing buildTyApp :: Type -> [Type] -> Type buildTyApp = foldl' TyApp buildTyArr :: Foldable t => t Type -> Type -> Type buildTyArr ps ret = foldr TyArr ret ps splitTyArr :: Type -> ([Type], Type) splitTyArr (TyArr t1 t2) = over _1 (t1 :) $ splitTyArr t2 splitTyArr t = ([], t) newtype MetaVar = MetaVar {metaVar :: Id ()} deriving newtype (Eq, Ord, Show, Generic, Hashable) deriving stock (Data, Typeable) deriving anyclass (Binary) instance Pretty MetaVar where pPrint (MetaVar v) = "'" <> pPrint v instance HasType MetaVar where typeOf = TyMeta types _ (MetaVar v) = pure $ MetaVar v instance HasKind MetaVar where kindOf ctx MetaVar {metaVar} = askKind metaVar ctx class HasType a where typeOf :: a -> Type types :: Traversal' a Type class HasKind a where kindOf :: KindCtx -> a -> Kind instance HasKind TypeVar where kindOf ctx v = askKind v ctx instance HasKind PrimT where kindOf _ _ = TYPE instance HasType Type where typeOf = identity types = identity instance HasKind Type where kindOf ctx (TyApp (kindOf ctx -> TyArr _ k) _) = k kindOf _ TyApp {} = error "invalid kind" kindOf ctx (TyVar v) = kindOf ctx v kindOf ctx (TyCon c) = kindOf ctx c kindOf ctx (TyPrim p) = kindOf ctx p kindOf ctx (TyArr _ t2) = kindOf ctx t2 kindOf _ (TyTuple n) = buildTyArr (replicate n TYPE) TYPE kindOf _ (TyRecord _) = TYPE kindOf _ TyPtr = TYPE `TyArr` TYPE kindOf ctx (TyMeta tv) = kindOf ctx tv instance HasType Void where typeOf = absurd types _ = absurd instance HasKind Void where kindOf _ = absurd data Scheme ty = Forall [TypeVar] ty deriving stock (Eq, Ord, Show, Generic, Functor, Foldable, Traversable) deriving anyclass (Hashable, Binary) instance Pretty ty => Pretty (Scheme ty) where pPrint (Forall [] t) = pPrint t pPrint (Forall vs t) = "forall" <+> hsep (map pPrint vs) <> "." <+> pPrint t data TypeDef ty = TypeDef { _typeConstructor :: ty, _typeParameters :: [TypeVar], _valueConstructors :: [(Id (), Scheme ty)] } deriving stock (Show, Generic, Functor, Foldable, Traversable) deriving anyclass (Binary) instance Pretty ty => Pretty (TypeDef ty) where pPrint (TypeDef c q u) = pPrint (c, q, u) instance HasKind ty => HasKind (TypeDef ty) where kindOf ctx TypeDef {_typeConstructor} = kindOf ctx _typeConstructor makeLenses ''TypeDef type TypeMap = HashMap MetaVar Type | Note for The Instance of ' MonadState ' for ' TypeUnifyT ' : @MonadState TypeUnifyT@ does not use ' TypeMap ' . newtype TypeUnifyT m a = TypeUnifyT {unTypeUnifyT :: StateT TypeMap m a} deriving newtype (Functor, Applicative, Monad, MonadReader r, MonadIO, MonadFail) instance MonadState s m => MonadState s (TypeUnifyT m) where get = TypeUnifyT $ lift get put x = TypeUnifyT $ lift $ put x instance MonadTrans TypeUnifyT where lift m = TypeUnifyT $ lift m runTypeUnifyT :: Monad m => TypeUnifyT m a -> m a runTypeUnifyT (TypeUnifyT m) = evalStateT m mempty applySubst :: HashMap TypeVar Type -> Type -> Type applySubst subst = \case TyApp ty ty' -> TyApp (applySubst subst ty) (applySubst subst ty') TyVar id -> fromMaybe (TyVar id) $ subst ^. at id TyCon id -> TyCon id TyPrim pt -> TyPrim pt TyArr ty ty' -> TyArr (applySubst subst ty) (applySubst subst ty') TyTuple n -> TyTuple n TyRecord hm -> TyRecord $ fmap (applySubst subst) hm TyPtr -> TyPtr TYPE -> TYPE TyMeta tv -> TyMeta tv expandTypeSynonym :: HashMap TypeVar ([TypeVar], Type) -> Type -> Maybe Type expandTypeSynonym abbrEnv (TyConApp (TyCon con) ts) = case abbrEnv ^. at con of Nothing -> Nothing Just (ps, orig) -> Just (applySubst (HashMap.fromList $ zip ps ts) orig) expandTypeSynonym _ _ = Nothing expandAllTypeSynonym :: HashMap TypeVar ([TypeVar], Type) -> Type -> Type expandAllTypeSynonym abbrEnv (TyConApp (TyCon con) ts) = case abbrEnv ^. at con of Nothing -> TyConApp (TyCon con) $ map (expandAllTypeSynonym abbrEnv) ts Just (ps, orig) -> expandAllTypeSynonym abbrEnv $ applySubst (HashMap.fromList $ zip ps ts) $ expandAllTypeSynonym abbrEnv orig expandAllTypeSynonym abbrEnv (TyApp t1 t2) = TyApp (expandAllTypeSynonym abbrEnv t1) (expandAllTypeSynonym abbrEnv t2) expandAllTypeSynonym _ t@TyVar {} = t expandAllTypeSynonym _ t@TyCon {} = t expandAllTypeSynonym _ t@TyPrim {} = t expandAllTypeSynonym abbrEnv (TyArr t1 t2) = TyArr (expandAllTypeSynonym abbrEnv t1) (expandAllTypeSynonym abbrEnv t2) expandAllTypeSynonym _ t@TyTuple {} = t expandAllTypeSynonym abbrEnv (TyRecord kts) = TyRecord $ fmap (expandAllTypeSynonym abbrEnv) kts expandAllTypeSynonym _ TyPtr = TyPtr expandAllTypeSynonym _ TYPE = TYPE expandAllTypeSynonym _ (TyMeta tv) = TyMeta tv freevars :: Type -> HashSet MetaVar freevars (TyApp t1 t2) = freevars t1 <> freevars t2 freevars TyVar {} = mempty freevars TyCon {} = mempty freevars TyPrim {} = mempty freevars (TyArr t1 t2) = freevars t1 <> freevars t2 freevars TyTuple {} = mempty freevars (TyRecord kts) = foldMap freevars kts freevars TyPtr = mempty freevars TYPE = mempty freevars (TyMeta tv) = one tv
93f88e1d13b3894b6cae534c2ffe8d4ff7afcdab7615c2eab98b13787206526e
RyanHope/ACT-R
simple-dynamic-model.lisp
(clear-all) (define-model dynamic-introduction (sgp :v t :trace-detail high :cst t :style-warnings nil) (chunk-type fact context data) (chunk-type step step destination) (chunk-type result data1 data2) (add-dm (first isa chunk) (second isa chunk) (destination isa chunk) (data2 isa chunk) (data3 isa chunk) (data isa chunk) (a isa step step first destination data2) (b isa step step second destination data3)) (goal-focus-fct (car (define-chunks (isa fact context data data 10)))) (p start =goal> isa fact context =context =context =x ?imaginal> state free buffer empty ==> =goal> context destination +imaginal> isa result data1 =x +retrieval> isa step step first) (p retrieve-first-step =goal> isa fact context =slot data =x =imaginal> isa result data1 =x =retrieval> isa step =slot =target step first ==> =goal> data 11 =imaginal> =target =x +retrieval> isa step step second) (p retrieve-second-step =goal> isa fact context =slot data =x =imaginal> =retrieval> isa step =slot =target step second ==> =imaginal> =target =x))
null
https://raw.githubusercontent.com/RyanHope/ACT-R/c65f3fe7057da0476281ad869c7963c84c0ad735/tutorial/unit8/simple-dynamic-model.lisp
lisp
(clear-all) (define-model dynamic-introduction (sgp :v t :trace-detail high :cst t :style-warnings nil) (chunk-type fact context data) (chunk-type step step destination) (chunk-type result data1 data2) (add-dm (first isa chunk) (second isa chunk) (destination isa chunk) (data2 isa chunk) (data3 isa chunk) (data isa chunk) (a isa step step first destination data2) (b isa step step second destination data3)) (goal-focus-fct (car (define-chunks (isa fact context data data 10)))) (p start =goal> isa fact context =context =context =x ?imaginal> state free buffer empty ==> =goal> context destination +imaginal> isa result data1 =x +retrieval> isa step step first) (p retrieve-first-step =goal> isa fact context =slot data =x =imaginal> isa result data1 =x =retrieval> isa step =slot =target step first ==> =goal> data 11 =imaginal> =target =x +retrieval> isa step step second) (p retrieve-second-step =goal> isa fact context =slot data =x =imaginal> =retrieval> isa step =slot =target step second ==> =imaginal> =target =x))
8da80dcdb14b74590f4b82c18ce02a000c33186d9a26e7d5cd0dd010dc16de77
squint-cljs/squint
main.cljs
(ns main) (println "Hello world!") (prn (apply map vector [[1 2 3] [4 5 6] [7 8 9]]))
null
https://raw.githubusercontent.com/squint-cljs/squint/bdbac2a3dd97542abf135d5e07237040a512e008/examples/quickjs/main.cljs
clojure
(ns main) (println "Hello world!") (prn (apply map vector [[1 2 3] [4 5 6] [7 8 9]]))
254df72034174d10ad12e7ea82385aa66ba4554f04c2b5b59df445f08c892ee8
zotonic/zotonic
scomp_survey_poll.erl
@author < > 2011 - 2021 Copyright 2011 - 2021 %% Licensed under the Apache License , Version 2.0 ( the " License " ) ; %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% %% -2.0 %% %% Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an " AS IS " BASIS , %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. -module(scomp_survey_poll). -behaviour(zotonic_scomp). -export([vary/2, render/3]). -export([single_result/3]). -include_lib("zotonic_core/include/zotonic.hrl"). -include_lib("zotonic_mod_survey/include/survey.hrl"). vary(_,_) -> nocache. render(Args, _Vars, Context) -> {id, SurveyId} = proplists:lookup(id, Args), ElementId = proplists:get_value(element_id, Args, "survey-question"), AnswerId = proplists:get_value(answer_id, Args), Actions = proplists:get_all_values(action, Args), Actions1 = [ Action || Action <- Actions, Action =/= undefined ], Render = case z_acl:rsc_editable(SurveyId, Context) of true when is_integer(AnswerId) -> {UserId, Answers} = single_result(SurveyId, AnswerId, Context), Editing = {editing, AnswerId, Actions1}, Args1 = [ {answer_user_id, UserId} | Args ], mod_survey:render_next_page(SurveyId, 1, exact, Answers, [], Editing, Args1, Context); _NotEditing -> mod_survey:render_next_page(SurveyId, 1, exact, [], [], undefined, Args, Context) end, {ok, z_template:render(Render#render{vars=[{element_id, ElementId}|Render#render.vars]}, Context)}. single_result(SurveyId, AnswerId, Context) -> case m_survey:single_result(SurveyId, AnswerId, Context) of None when None =:= undefined; None =:= [] -> {undefined, []}; Result -> Answers = proplists:get_value(answers, Result, []), Answers1 = lists:map( fun({QName, Ans}) -> Answer = proplists:get_value(answer, Ans), {QName, Answer} end, Answers), {proplists:get_value(user_id, Result), Answers1} end.
null
https://raw.githubusercontent.com/zotonic/zotonic/ec8c25581afdbc20eb8b755b2b1af81300759e93/apps/zotonic_mod_survey/src/scomps/scomp_survey_poll.erl
erlang
you may not use this file except in compliance with the License. You may obtain a copy of the License at -2.0 Unless required by applicable law or agreed to in writing, software WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
@author < > 2011 - 2021 Copyright 2011 - 2021 Licensed under the Apache License , Version 2.0 ( the " License " ) ; distributed under the License is distributed on an " AS IS " BASIS , -module(scomp_survey_poll). -behaviour(zotonic_scomp). -export([vary/2, render/3]). -export([single_result/3]). -include_lib("zotonic_core/include/zotonic.hrl"). -include_lib("zotonic_mod_survey/include/survey.hrl"). vary(_,_) -> nocache. render(Args, _Vars, Context) -> {id, SurveyId} = proplists:lookup(id, Args), ElementId = proplists:get_value(element_id, Args, "survey-question"), AnswerId = proplists:get_value(answer_id, Args), Actions = proplists:get_all_values(action, Args), Actions1 = [ Action || Action <- Actions, Action =/= undefined ], Render = case z_acl:rsc_editable(SurveyId, Context) of true when is_integer(AnswerId) -> {UserId, Answers} = single_result(SurveyId, AnswerId, Context), Editing = {editing, AnswerId, Actions1}, Args1 = [ {answer_user_id, UserId} | Args ], mod_survey:render_next_page(SurveyId, 1, exact, Answers, [], Editing, Args1, Context); _NotEditing -> mod_survey:render_next_page(SurveyId, 1, exact, [], [], undefined, Args, Context) end, {ok, z_template:render(Render#render{vars=[{element_id, ElementId}|Render#render.vars]}, Context)}. single_result(SurveyId, AnswerId, Context) -> case m_survey:single_result(SurveyId, AnswerId, Context) of None when None =:= undefined; None =:= [] -> {undefined, []}; Result -> Answers = proplists:get_value(answers, Result, []), Answers1 = lists:map( fun({QName, Ans}) -> Answer = proplists:get_value(answer, Ans), {QName, Answer} end, Answers), {proplists:get_value(user_id, Result), Answers1} end.
13f974e9fabccf5c62143d944b784951130b92669970cf1915ec373d3d63dd6b
input-output-hk/project-icarus-importer
Delegation.hs
{-# LANGUAGE RankNTypes #-} -- | Send and receive for delegation. module Pos.Diffusion.Full.Delegation ( delegationListeners , delegationOutSpecs , sendPskHeavy ) where import Universum import qualified Network.Broadcast.OutboundQueue as OQ import Pos.Binary () import Pos.Communication.Limits (mlHeavyDlgIndex, mlProxySecretKey) import Pos.Communication.Message () import Pos.Communication.Protocol (MsgType (..), NodeId, EnqueueMsg, MkListeners, OutSpecs) import Pos.Communication.Relay (DataParams (..), Relay (..), relayListeners, dataFlow, relayPropagateOut) import Pos.Core (ProxySKHeavy) import Pos.Logic.Types (Logic (..)) import Pos.Network.Types (Bucket) import Pos.Util.Trace (Trace, Severity) delegationListeners :: Trace IO (Severity, Text) -> Logic IO -> OQ.OutboundQ pack NodeId Bucket -> EnqueueMsg -> MkListeners delegationListeners logTrace logic oq enqueue = relayListeners logTrace oq enqueue (delegationRelays logic) -- | Listeners for requests related to delegation processing. delegationRelays :: Logic IO -> [Relay] delegationRelays logic = [ pskHeavyRelay logic ] | ' OutSpecs ' for the tx relays , to keep up with the ' InSpecs'/'OutSpecs ' -- motif required for communication. -- The 'Logic m' isn't *really* needed, it's just an artefact of the design. delegationOutSpecs :: Logic IO -> OutSpecs delegationOutSpecs logic = relayPropagateOut (delegationRelays logic) pskHeavyRelay :: Logic IO -> Relay pskHeavyRelay logic = Data $ DataParams MsgTransaction (\_ _ -> postPskHeavy logic) The message size limit for ProxySKHeavy : a ProxySecretKey with an EpochIndex . (pure (mlProxySecretKey mlHeavyDlgIndex)) sendPskHeavy :: Trace IO (Severity, Text) -> EnqueueMsg -> ProxySKHeavy -> IO () sendPskHeavy logTrace enqueue = dataFlow logTrace "pskHeavy" enqueue (MsgTransaction OQ.OriginSender)
null
https://raw.githubusercontent.com/input-output-hk/project-icarus-importer/36342f277bcb7f1902e677a02d1ce93e4cf224f0/lib/src/Pos/Diffusion/Full/Delegation.hs
haskell
# LANGUAGE RankNTypes # | Send and receive for delegation. | Listeners for requests related to delegation processing. motif required for communication. The 'Logic m' isn't *really* needed, it's just an artefact of the design.
module Pos.Diffusion.Full.Delegation ( delegationListeners , delegationOutSpecs , sendPskHeavy ) where import Universum import qualified Network.Broadcast.OutboundQueue as OQ import Pos.Binary () import Pos.Communication.Limits (mlHeavyDlgIndex, mlProxySecretKey) import Pos.Communication.Message () import Pos.Communication.Protocol (MsgType (..), NodeId, EnqueueMsg, MkListeners, OutSpecs) import Pos.Communication.Relay (DataParams (..), Relay (..), relayListeners, dataFlow, relayPropagateOut) import Pos.Core (ProxySKHeavy) import Pos.Logic.Types (Logic (..)) import Pos.Network.Types (Bucket) import Pos.Util.Trace (Trace, Severity) delegationListeners :: Trace IO (Severity, Text) -> Logic IO -> OQ.OutboundQ pack NodeId Bucket -> EnqueueMsg -> MkListeners delegationListeners logTrace logic oq enqueue = relayListeners logTrace oq enqueue (delegationRelays logic) delegationRelays :: Logic IO -> [Relay] delegationRelays logic = [ pskHeavyRelay logic ] | ' OutSpecs ' for the tx relays , to keep up with the ' InSpecs'/'OutSpecs ' delegationOutSpecs :: Logic IO -> OutSpecs delegationOutSpecs logic = relayPropagateOut (delegationRelays logic) pskHeavyRelay :: Logic IO -> Relay pskHeavyRelay logic = Data $ DataParams MsgTransaction (\_ _ -> postPskHeavy logic) The message size limit for ProxySKHeavy : a ProxySecretKey with an EpochIndex . (pure (mlProxySecretKey mlHeavyDlgIndex)) sendPskHeavy :: Trace IO (Severity, Text) -> EnqueueMsg -> ProxySKHeavy -> IO () sendPskHeavy logTrace enqueue = dataFlow logTrace "pskHeavy" enqueue (MsgTransaction OQ.OriginSender)
19ecdda0551d811e562f1fe3f483a21045f6944c3463b2e590f2ecbc0ee5e032
atzedijkstra/chr
VecAlloc.hs
# LANGUAGE ScopedTypeVariables # # LANGUAGE TemplateHaskell # ------------------------------------------------------------------------------------------- | Vector intended for densily filled entries close to 0 , > 0 . -- In situ updates are not supposed to happen often. ------------------------------------------------------------------------------------------- module CHR.Data.VecAlloc ( VecAlloc , empty , alter , lookup , toList , fromList , null , size ) where ------------------------------------------------------------------------------------------- import Prelude hiding (lookup, map, null) import qualified Data.List as List import Control.Monad import qualified Data.Vector as V import qualified Data.Vector.Mutable as MV import CHR.Data.Lens ------------------------------------------------------------------------------------------- ------------------------------------------------------------------------------------------- -- Types ------------------------------------------------------------------------------------------- data Val v = Init | Noth | Val v instance Show v => Show (Val v) where show (Val v) = show v show _ = "" m2v :: Maybe v -> Val v m2v = maybe Noth Val # INLINE m2v # v2m :: Val v -> Maybe v v2m (Val v) = Just v v2m _ = Nothing # INLINE v2 m # | VecAlloc e newtype VecAlloc e = VecAlloc { _vecallocVec :: V.Vector (Val e) -- , _vecallocFree :: {-# UNPACK #-} !Int } deriving Show mkLabel ''VecAlloc ------------------------------------------------------------------------------------------- VecAlloc e utils ------------------------------------------------------------------------------------------- -- | Ensure enough free slots ensure :: Int -> VecAlloc e -> VecAlloc e ensure sz s@(VecAlloc {_vecallocVec=v}) | l >= sz = s | otherwise = s {_vecallocVec = v V.++ V.replicate ((sz `max` ((3 * l) `div` 2)) - l) Init} where l = V.length v # INLINE ensure # empty :: VecAlloc e 0 {-# INLINE empty #-} alter :: (Maybe e -> Maybe e) -> Int -> VecAlloc e -> VecAlloc e alter f k s@(VecAlloc {_vecallocVec=v}) | k >= V.length v = maybe s (\val -> vecallocVec ^$= V.modify (\v -> MV.write v k (Val val)) $ ensure (k+1) s) $ f Nothing | otherwise = let upd vv = case vv V.! k of Init -> V.modify (\v -> MV.write v k (m2v $ f Nothing)) vv Noth -> vv V.// [(k, m2v $ f Nothing)] Val v -> vv V.// [(k, m2v $ f $ Just v)] in vecallocVec ^$= upd $ s lookup :: Int -> VecAlloc e -> Maybe e lookup k (VecAlloc {_vecallocVec=v}) | k >= V.length v = Nothing | otherwise = v2m $ v V.! k toList :: VecAlloc e -> [(Int,e)] toList (VecAlloc {_vecallocVec=v}) = [ (i,v) | (i, Val v) <- zip [0..] $ V.toList v ] fromList :: [(Int,e)] -> VecAlloc e fromList [] = empty fromList l = vecallocVec ^$= V.modify (\v -> forM_ l $ \(k,x) -> MV.write v k (Val x)) $ ensure (mx+1) empty where mx = maximum $ List.map fst l null :: VecAlloc e -> Bool null = List.null . toList unionWith : : ( e - > e - > e ) - > VecAlloc e - > VecAlloc e - > VecAlloc e unionWith f ( VecAlloc { _ vecallocVec = v1 } ) ( VecAlloc { _ vecallocVec = v2 } ) unionWith :: (e -> e -> e) -> VecAlloc e -> VecAlloc e -> VecAlloc e unionWith f (VecAlloc {_vecallocVec=v1}) (VecAlloc {_vecallocVec=v2}) -} size :: VecAlloc e -> Int size = V.length . _vecallocVec
null
https://raw.githubusercontent.com/atzedijkstra/chr/ad465828b1560831e90c4056e12338231872e8db/chr-data/src/CHR/Data/VecAlloc.hs
haskell
----------------------------------------------------------------------------------------- In situ updates are not supposed to happen often. ----------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------- Types ----------------------------------------------------------------------------------------- , _vecallocFree :: {-# UNPACK #-} !Int ----------------------------------------------------------------------------------------- ----------------------------------------------------------------------------------------- | Ensure enough free slots # INLINE empty #
# LANGUAGE ScopedTypeVariables # # LANGUAGE TemplateHaskell # | Vector intended for densily filled entries close to 0 , > 0 . module CHR.Data.VecAlloc ( VecAlloc , empty , alter , lookup , toList , fromList , null , size ) where import Prelude hiding (lookup, map, null) import qualified Data.List as List import Control.Monad import qualified Data.Vector as V import qualified Data.Vector.Mutable as MV import CHR.Data.Lens data Val v = Init | Noth | Val v instance Show v => Show (Val v) where show (Val v) = show v show _ = "" m2v :: Maybe v -> Val v m2v = maybe Noth Val # INLINE m2v # v2m :: Val v -> Maybe v v2m (Val v) = Just v v2m _ = Nothing # INLINE v2 m # | VecAlloc e newtype VecAlloc e = VecAlloc { _vecallocVec :: V.Vector (Val e) } deriving Show mkLabel ''VecAlloc VecAlloc e utils ensure :: Int -> VecAlloc e -> VecAlloc e ensure sz s@(VecAlloc {_vecallocVec=v}) | l >= sz = s | otherwise = s {_vecallocVec = v V.++ V.replicate ((sz `max` ((3 * l) `div` 2)) - l) Init} where l = V.length v # INLINE ensure # empty :: VecAlloc e 0 alter :: (Maybe e -> Maybe e) -> Int -> VecAlloc e -> VecAlloc e alter f k s@(VecAlloc {_vecallocVec=v}) | k >= V.length v = maybe s (\val -> vecallocVec ^$= V.modify (\v -> MV.write v k (Val val)) $ ensure (k+1) s) $ f Nothing | otherwise = let upd vv = case vv V.! k of Init -> V.modify (\v -> MV.write v k (m2v $ f Nothing)) vv Noth -> vv V.// [(k, m2v $ f Nothing)] Val v -> vv V.// [(k, m2v $ f $ Just v)] in vecallocVec ^$= upd $ s lookup :: Int -> VecAlloc e -> Maybe e lookup k (VecAlloc {_vecallocVec=v}) | k >= V.length v = Nothing | otherwise = v2m $ v V.! k toList :: VecAlloc e -> [(Int,e)] toList (VecAlloc {_vecallocVec=v}) = [ (i,v) | (i, Val v) <- zip [0..] $ V.toList v ] fromList :: [(Int,e)] -> VecAlloc e fromList [] = empty fromList l = vecallocVec ^$= V.modify (\v -> forM_ l $ \(k,x) -> MV.write v k (Val x)) $ ensure (mx+1) empty where mx = maximum $ List.map fst l null :: VecAlloc e -> Bool null = List.null . toList unionWith : : ( e - > e - > e ) - > VecAlloc e - > VecAlloc e - > VecAlloc e unionWith f ( VecAlloc { _ vecallocVec = v1 } ) ( VecAlloc { _ vecallocVec = v2 } ) unionWith :: (e -> e -> e) -> VecAlloc e -> VecAlloc e -> VecAlloc e unionWith f (VecAlloc {_vecallocVec=v1}) (VecAlloc {_vecallocVec=v2}) -} size :: VecAlloc e -> Int size = V.length . _vecallocVec
75ea6bc969f9d9c98a1ce37ff68fa7fe075ab3cdf9ba3158fd264f30f47fdaa1
joelburget/lvca
Ast_operations.ml
module Model = struct let initial_model = () end module View = struct open Brr.El let view _model = div [ txt' "substitution" ; txt' "opening" ; txt' "closing" ; txt' "structural induction" ; txt' "folding" ; txt' "is open? (free vars)" ; txt' "renaming" ; txt' "(alpha) equivalence checking" ] ;; end let stateless_view () = View.view Model.initial_model
null
https://raw.githubusercontent.com/joelburget/lvca/0bf86d981cab5d45d1c21918305f0d448ba57ca4/pages/Ast_operations.ml
ocaml
module Model = struct let initial_model = () end module View = struct open Brr.El let view _model = div [ txt' "substitution" ; txt' "opening" ; txt' "closing" ; txt' "structural induction" ; txt' "folding" ; txt' "is open? (free vars)" ; txt' "renaming" ; txt' "(alpha) equivalence checking" ] ;; end let stateless_view () = View.view Model.initial_model
2803106cb6688a03130aebf94fefb86931683e68282434e0b7c3095ff94a72d9
peti/postmaster
Main.hs
| Module : Main Copyright : ( C ) 2004 - 2019 License : GNU AFFERO GPL v3 or later Maintainer : Stability : experimental Portability : non - portable Module: Main Copyright: (C) 2004-2019 Peter Simons License: GNU AFFERO GPL v3 or later Maintainer: Stability: experimental Portability: non-portable -} # LANGUAGE ApplicativeDo # {-# LANGUAGE ConstraintKinds #-} # LANGUAGE DuplicateRecordFields # # LANGUAGE FlexibleContexts # # LANGUAGE FlexibleInstances # # LANGUAGE GeneralizedNewtypeDeriving # # LANGUAGE LambdaCase # {-# LANGUAGE OverloadedStrings #-} # LANGUAGE RecordWildCards # module Main ( main ) where import Paths_postmaster ( version ) import Postmaster TOOD : Add to prelude ? import Data.Version import Data.List import Network.Socket import Network.TLS as TLS hiding ( HostName ) import Network.TLS.Extra.Cipher as TLS import Network.TLS.SessionManager as TLS import Options.Applicative import System.Exit import System.Posix.Syslog as Syslog ( withSyslog, Facility(Mail) ) import System.X509 newtype Postmaster a = Postmaster { runPostmaster :: ReaderT (LogAction Postmaster LogMsg) IO a } deriving ( Applicative, Functor, Monad, MonadIO, MonadUnliftIO , MonadReader (LogAction Postmaster LogMsg) ) data CliOptions = CliOptions { listenAddressStrings :: [String] , tlsCertFile :: Maybe FilePath , tlsKeyFile :: Maybe FilePath , spoolDir :: FilePath } deriving (Show) data Configuration = Configuration { listenAddresses :: [(Maybe HostName, ServiceName)] , tlsServerParams :: Maybe ServerParams , spoolDir :: FilePath } deriving (Show) cliOptions :: Parser CliOptions cliOptions = do listenAddressStrings <- many (strOption $ long "listen" <> metavar "ADDR-SPEC" <> help "Accept incoming connections on this address. Can be specified multiple times.") tlsCertFile <- optional (strOption $ long "tls-cert" <> metavar "PATH" <> help "The server's TLS certificate.") tlsKeyFile <- optional (strOption $ long "tls-key" <> metavar "PATH" <> help "The server's TLS private key.") spoolDir <- strOption (long "spool-dir" <> metavar "PATH" <> help "Path to the mail spool directory." <> value "/var/spool/mqueue") pure CliOptions {..} cli :: ParserInfo CliOptions cli = info ( helper <*> infoOption ("postmaster version " ++ showVersion version) (long "version" <> help "Show version number") <*> cliOptions ) (briefDesc <> header ("Postmaster ESMTP Server version " ++ showVersion version)) main :: IO () main = withSocketsDo $ withSyslog "postmaster" [] Mail $ do cfg <- execParser cli >>= makeConfiguration runReaderT (runPostmaster (postmaster cfg)) (logToHandle stderr) postmaster :: (MonadUnliftIO m, MonadLog env m) => Configuration -> m () postmaster cfg@Configuration {..} = handle (\e -> logError ("fatal error: " <> display (e::SomeException)) >> liftIO exitFailure) $ do logDebug "postmaster starting up ..." logInfo $ display cfg mapConcurrently_ (`listener` acceptor (esmtpdAcceptor tlsServerParams spoolDir)) listenAddresses makeConfiguration :: CliOptions -> IO Configuration makeConfiguration CliOptions {..} = do let listenAddresses | null listenAddressStrings = [(Nothing, "25")] | otherwise = map parseListenAddr listenAddressStrings tlsServerParams <- case (tlsCertFile, tlsKeyFile) of (Just cf, Just kf) -> credentialLoadX509 cf kf >>= \case Left err -> Postmaster.fail ("cannot load certificate: " ++ err) Right cred -> Just <$> makeTlsServerParams cred _ -> return Nothing pure Configuration {..} makeTlsServerParams :: Credential -> IO ServerParams makeTlsServerParams cred = do store <- getSystemCertificateStore smgr <- newSessionManager defaultConfig return $ def { serverShared = def { sharedSessionManager = smgr , sharedCAStore = store , sharedCredentials = Credentials [cred] } , serverSupported = def { supportedCiphers = ciphersuite_default , supportedHashSignatures = delete (HashIntrinsic, SignatureRSApssRSAeSHA512) (supportedHashSignatures def) } } ----- Helper functions -- | Parse a listen address specification into a (host name, service name) -- tuple suitable for resolution with 'getAddrInfo'. -- -- >>> parseListenAddr "0.0.0.0:25" -- (Just "0.0.0.0","25") -- > > > parseListenAddr " : 25 " -- (Nothing,"25") -- -- >>> parseListenAddr "localhost:smtp" -- (Just "localhost","smtp") -- > > > parseListenAddr " : : : 25 " ( Just " : : " , " 25 " ) -- > > > 25 " -- (Nothing,"25") parseListenAddr :: String -> (Maybe HostName, ServiceName) parseListenAddr buf = case break (==':') (reverse buf) of (sn,hn) | hn `elem` ["",":"] -> (Nothing, reverse sn) | otherwise -> (Just (reverse (drop 1 hn)), reverse sn)
null
https://raw.githubusercontent.com/peti/postmaster/65867f01fa5084b2895340e7d98c601a5e0ab1e3/src/Main.hs
haskell
# LANGUAGE ConstraintKinds # # LANGUAGE OverloadedStrings # --- Helper functions | Parse a listen address specification into a (host name, service name) tuple suitable for resolution with 'getAddrInfo'. >>> parseListenAddr "0.0.0.0:25" (Just "0.0.0.0","25") (Nothing,"25") >>> parseListenAddr "localhost:smtp" (Just "localhost","smtp") (Nothing,"25")
| Module : Main Copyright : ( C ) 2004 - 2019 License : GNU AFFERO GPL v3 or later Maintainer : Stability : experimental Portability : non - portable Module: Main Copyright: (C) 2004-2019 Peter Simons License: GNU AFFERO GPL v3 or later Maintainer: Stability: experimental Portability: non-portable -} # LANGUAGE ApplicativeDo # # LANGUAGE DuplicateRecordFields # # LANGUAGE FlexibleContexts # # LANGUAGE FlexibleInstances # # LANGUAGE GeneralizedNewtypeDeriving # # LANGUAGE LambdaCase # # LANGUAGE RecordWildCards # module Main ( main ) where import Paths_postmaster ( version ) import Postmaster TOOD : Add to prelude ? import Data.Version import Data.List import Network.Socket import Network.TLS as TLS hiding ( HostName ) import Network.TLS.Extra.Cipher as TLS import Network.TLS.SessionManager as TLS import Options.Applicative import System.Exit import System.Posix.Syslog as Syslog ( withSyslog, Facility(Mail) ) import System.X509 newtype Postmaster a = Postmaster { runPostmaster :: ReaderT (LogAction Postmaster LogMsg) IO a } deriving ( Applicative, Functor, Monad, MonadIO, MonadUnliftIO , MonadReader (LogAction Postmaster LogMsg) ) data CliOptions = CliOptions { listenAddressStrings :: [String] , tlsCertFile :: Maybe FilePath , tlsKeyFile :: Maybe FilePath , spoolDir :: FilePath } deriving (Show) data Configuration = Configuration { listenAddresses :: [(Maybe HostName, ServiceName)] , tlsServerParams :: Maybe ServerParams , spoolDir :: FilePath } deriving (Show) cliOptions :: Parser CliOptions cliOptions = do listenAddressStrings <- many (strOption $ long "listen" <> metavar "ADDR-SPEC" <> help "Accept incoming connections on this address. Can be specified multiple times.") tlsCertFile <- optional (strOption $ long "tls-cert" <> metavar "PATH" <> help "The server's TLS certificate.") tlsKeyFile <- optional (strOption $ long "tls-key" <> metavar "PATH" <> help "The server's TLS private key.") spoolDir <- strOption (long "spool-dir" <> metavar "PATH" <> help "Path to the mail spool directory." <> value "/var/spool/mqueue") pure CliOptions {..} cli :: ParserInfo CliOptions cli = info ( helper <*> infoOption ("postmaster version " ++ showVersion version) (long "version" <> help "Show version number") <*> cliOptions ) (briefDesc <> header ("Postmaster ESMTP Server version " ++ showVersion version)) main :: IO () main = withSocketsDo $ withSyslog "postmaster" [] Mail $ do cfg <- execParser cli >>= makeConfiguration runReaderT (runPostmaster (postmaster cfg)) (logToHandle stderr) postmaster :: (MonadUnliftIO m, MonadLog env m) => Configuration -> m () postmaster cfg@Configuration {..} = handle (\e -> logError ("fatal error: " <> display (e::SomeException)) >> liftIO exitFailure) $ do logDebug "postmaster starting up ..." logInfo $ display cfg mapConcurrently_ (`listener` acceptor (esmtpdAcceptor tlsServerParams spoolDir)) listenAddresses makeConfiguration :: CliOptions -> IO Configuration makeConfiguration CliOptions {..} = do let listenAddresses | null listenAddressStrings = [(Nothing, "25")] | otherwise = map parseListenAddr listenAddressStrings tlsServerParams <- case (tlsCertFile, tlsKeyFile) of (Just cf, Just kf) -> credentialLoadX509 cf kf >>= \case Left err -> Postmaster.fail ("cannot load certificate: " ++ err) Right cred -> Just <$> makeTlsServerParams cred _ -> return Nothing pure Configuration {..} makeTlsServerParams :: Credential -> IO ServerParams makeTlsServerParams cred = do store <- getSystemCertificateStore smgr <- newSessionManager defaultConfig return $ def { serverShared = def { sharedSessionManager = smgr , sharedCAStore = store , sharedCredentials = Credentials [cred] } , serverSupported = def { supportedCiphers = ciphersuite_default , supportedHashSignatures = delete (HashIntrinsic, SignatureRSApssRSAeSHA512) (supportedHashSignatures def) } } > > > parseListenAddr " : 25 " > > > parseListenAddr " : : : 25 " ( Just " : : " , " 25 " ) > > > 25 " parseListenAddr :: String -> (Maybe HostName, ServiceName) parseListenAddr buf = case break (==':') (reverse buf) of (sn,hn) | hn `elem` ["",":"] -> (Nothing, reverse sn) | otherwise -> (Just (reverse (drop 1 hn)), reverse sn)
e5a59e8944b454b505e18887f27d39b0c61a20fe426f99a5713c5c30fc9a587f
nvim-treesitter/nvim-treesitter
highlights.scm
[ (path_mod) "||" "&&" "=" "<" ">" "<=" ">=" "+" "-" "*" "/" "!" "|" "^" ] @operator [ "_:" (namespace) ] @namespace [ "UNDEF" "a" ] @variable.builtin [ "ADD" "ALL" "AS" "ASC" "ASK" "BIND" "BY" "CLEAR" "CONSTRUCT" "COPY" "CREATE" "DEFAULT" "DELETE" "DELETE DATA" "DELETE WHERE" "DESC" "DESCRIBE" "DISTINCT" "DROP" "EXISTS" "FILTER" "FROM" "GRAPH" "GROUP" "HAVING" "INSERT" "INSERT DATA" "INTO" "LIMIT" "LOAD" "MINUS" "MOVE" "NAMED" "NOT" "OFFSET" "OPTIONAL" "ORDER" "PREFIX" "REDUCED" "SELECT" "SERVICE" "SILENT" "UNION" "USING" "VALUES" "WHERE" "WITH" ] @keyword (string) @string (echar) @string.escape (integer) @number [ (decimal) (double) ] @float (boolean_literal) @boolean [ "BASE" "PREFIX" ] @keyword [ "ABS" "AVG" "BNODE" "BOUND" "CEIL" "CONCAT" "COALESCE" "CONTAINS" "DATATYPE" "DAY" "ENCODE_FOR_URI" "FLOOR" "HOURS" "IF" "IRI" "LANG" "LANGMATCHES" "LCASE" "MD5" "MINUTES" "MONTH" "NOW" "RAND" "REGEX" "ROUND" "SECONDS" "SHA1" "SHA256" "SHA384" "SHA512" "STR" "SUM" "MAX" "MIN" "SAMPLE" "GROUP_CONCAT" "SEPARATOR" "COUNT" "STRAFTER" "STRBEFORE" "STRDT" "STRENDS" "STRLANG" "STRLEN" "STRSTARTS" "STRUUID" "TIMEZONE" "TZ" "UCASE" "URI" "UUID" "YEAR" "isBLANK" "isIRI" "isLITERAL" "isNUMERIC" "isURI" "sameTerm" ] @function.builtin [ "." "," ";" ] @punctuation.delimiter [ "(" ")" "[" "]" "{" "}" (nil) (anon) ] @punctuation.bracket [ "IN" ("NOT" "IN") ] @keyword.operator (comment) @comment ; Could this be summarized? (select_clause [ bound_variable: (var) "*" ] @parameter) (bind bound_variable: (var) @parameter) (data_block bound_variable: (var) @parameter) (group_condition bound_variable: (var) @parameter) (iri_reference ["<" ">"] @namespace) (lang_tag) @type (rdf_literal "^^" @type datatype: (_ ["<" ">" (namespace)] @type) @type) (function_call identifier: (_) @function) (function_call identifier: (iri_reference ["<" ">"] @function)) (function_call identifier: (prefixed_name (namespace) @function)) (base_declaration (iri_reference ["<" ">"] @variable)) (prefix_declaration (iri_reference ["<" ">"] @variable)) [ (var) (blank_node_label) (iri_reference) (prefixed_name) ] @variable
null
https://raw.githubusercontent.com/nvim-treesitter/nvim-treesitter/ddc0f1b606472b6a1ab85ee9becfd4877507627d/queries/sparql/highlights.scm
scheme
Could this be summarized?
[ (path_mod) "||" "&&" "=" "<" ">" "<=" ">=" "+" "-" "*" "/" "!" "|" "^" ] @operator [ "_:" (namespace) ] @namespace [ "UNDEF" "a" ] @variable.builtin [ "ADD" "ALL" "AS" "ASC" "ASK" "BIND" "BY" "CLEAR" "CONSTRUCT" "COPY" "CREATE" "DEFAULT" "DELETE" "DELETE DATA" "DELETE WHERE" "DESC" "DESCRIBE" "DISTINCT" "DROP" "EXISTS" "FILTER" "FROM" "GRAPH" "GROUP" "HAVING" "INSERT" "INSERT DATA" "INTO" "LIMIT" "LOAD" "MINUS" "MOVE" "NAMED" "NOT" "OFFSET" "OPTIONAL" "ORDER" "PREFIX" "REDUCED" "SELECT" "SERVICE" "SILENT" "UNION" "USING" "VALUES" "WHERE" "WITH" ] @keyword (string) @string (echar) @string.escape (integer) @number [ (decimal) (double) ] @float (boolean_literal) @boolean [ "BASE" "PREFIX" ] @keyword [ "ABS" "AVG" "BNODE" "BOUND" "CEIL" "CONCAT" "COALESCE" "CONTAINS" "DATATYPE" "DAY" "ENCODE_FOR_URI" "FLOOR" "HOURS" "IF" "IRI" "LANG" "LANGMATCHES" "LCASE" "MD5" "MINUTES" "MONTH" "NOW" "RAND" "REGEX" "ROUND" "SECONDS" "SHA1" "SHA256" "SHA384" "SHA512" "STR" "SUM" "MAX" "MIN" "SAMPLE" "GROUP_CONCAT" "SEPARATOR" "COUNT" "STRAFTER" "STRBEFORE" "STRDT" "STRENDS" "STRLANG" "STRLEN" "STRSTARTS" "STRUUID" "TIMEZONE" "TZ" "UCASE" "URI" "UUID" "YEAR" "isBLANK" "isIRI" "isLITERAL" "isNUMERIC" "isURI" "sameTerm" ] @function.builtin [ "." "," ";" ] @punctuation.delimiter [ "(" ")" "[" "]" "{" "}" (nil) (anon) ] @punctuation.bracket [ "IN" ("NOT" "IN") ] @keyword.operator (comment) @comment (select_clause [ bound_variable: (var) "*" ] @parameter) (bind bound_variable: (var) @parameter) (data_block bound_variable: (var) @parameter) (group_condition bound_variable: (var) @parameter) (iri_reference ["<" ">"] @namespace) (lang_tag) @type (rdf_literal "^^" @type datatype: (_ ["<" ">" (namespace)] @type) @type) (function_call identifier: (_) @function) (function_call identifier: (iri_reference ["<" ">"] @function)) (function_call identifier: (prefixed_name (namespace) @function)) (base_declaration (iri_reference ["<" ">"] @variable)) (prefix_declaration (iri_reference ["<" ">"] @variable)) [ (var) (blank_node_label) (iri_reference) (prefixed_name) ] @variable
0bc5843abc7a9f4099ab05637bc9667c0280026f7fe6d9a9d377c02073adb603
input-output-hk/cardano-sl
Production.hs
-- | Definition of a 'Reporter IO' which uses log-warper to gather logs and -- uses the HTTP backend to send them to some server(s). module Pos.Reporting.Production ( ProductionReporterParams (..) , productionReporter ) where import Universum import Control.Exception.Safe (catchIO) import Pos.Crypto (ProtocolMagic) import Pos.Infra.Diffusion.Types (Diffusion) import Pos.Infra.Reporting (Reporter (..)) import Pos.Infra.Reporting.Http (reportNode) import Pos.Infra.Reporting.NodeInfo (extendWithNodeInfo) import Pos.Infra.Reporting.Wlog (LoggerConfig, withWlogTempFile) import Pos.Util.CompileInfo (CompileTimeInfo) import Pos.Util.Trace (Severity, Trace) data ProductionReporterParams = ProductionReporterParams { prpServers :: ![Text] , prpLoggerConfig :: !LoggerConfig , prpProtocolMagic :: !ProtocolMagic , prpCompileTimeInfo :: !CompileTimeInfo , prpTrace :: !(Trace IO (Severity, Text)) } productionReporter :: ProductionReporterParams -> Diffusion IO -- ^ Used to get status info, not to do any network stuff. -> Reporter IO productionReporter params diffusion = Reporter $ \rt -> withWlogTempFile logConfig $ \mfp -> do rt' <- extendWithNodeInfo diffusion rt reportNode logTrace protocolMagic compileTimeInfo servers mfp rt' `catchIO` reportExnHandler rt' where servers = prpServers params logConfig = prpLoggerConfig params protocolMagic = prpProtocolMagic params compileTimeInfo = prpCompileTimeInfo params logTrace = prpTrace params -- reportExnHandler _rt _e = pure ()
null
https://raw.githubusercontent.com/input-output-hk/cardano-sl/1499214d93767b703b9599369a431e67d83f10a2/lib/src/Pos/Reporting/Production.hs
haskell
| Definition of a 'Reporter IO' which uses log-warper to gather logs and uses the HTTP backend to send them to some server(s). ^ Used to get status info, not to do any network stuff.
module Pos.Reporting.Production ( ProductionReporterParams (..) , productionReporter ) where import Universum import Control.Exception.Safe (catchIO) import Pos.Crypto (ProtocolMagic) import Pos.Infra.Diffusion.Types (Diffusion) import Pos.Infra.Reporting (Reporter (..)) import Pos.Infra.Reporting.Http (reportNode) import Pos.Infra.Reporting.NodeInfo (extendWithNodeInfo) import Pos.Infra.Reporting.Wlog (LoggerConfig, withWlogTempFile) import Pos.Util.CompileInfo (CompileTimeInfo) import Pos.Util.Trace (Severity, Trace) data ProductionReporterParams = ProductionReporterParams { prpServers :: ![Text] , prpLoggerConfig :: !LoggerConfig , prpProtocolMagic :: !ProtocolMagic , prpCompileTimeInfo :: !CompileTimeInfo , prpTrace :: !(Trace IO (Severity, Text)) } productionReporter :: ProductionReporterParams -> Reporter IO productionReporter params diffusion = Reporter $ \rt -> withWlogTempFile logConfig $ \mfp -> do rt' <- extendWithNodeInfo diffusion rt reportNode logTrace protocolMagic compileTimeInfo servers mfp rt' `catchIO` reportExnHandler rt' where servers = prpServers params logConfig = prpLoggerConfig params protocolMagic = prpProtocolMagic params compileTimeInfo = prpCompileTimeInfo params logTrace = prpTrace params reportExnHandler _rt _e = pure ()
a41efc0940c785f88fa4ee93e77d9fdb2bbd66669634b080f4261677efc85aba
fossas/fossa-cli
PodfileLockSpec.hs
module Cocoapods.PodfileLockSpec ( spec, ) where import Data.ByteString qualified as BS import Data.Map.Strict qualified as Map import Data.Text (Text) import Data.Yaml (decodeEither') import DepTypes ( DepType (GitType, PodType), Dependency (..), VerConstraint (CEq), ) import GraphUtil (expectDeps, expectDirect, expectEdges) import Strategy.Cocoapods.PodfileLock ( Dep (Dep), ExternalGitSource (..), ExternalSource (..), Pod (Pod), PodLock (..), PodsSpecJSONSubSpec (PodsSpecJSONSubSpec), allSubspecs, buildGraphStatic, ) import Test.Hspec qualified as T podDepOf :: Text -> Maybe Text -> Dependency podDepOf name version = Dependency { dependencyType = PodType , dependencyName = name , dependencyVersion = CEq <$> version , dependencyLocations = [] , dependencyEnvironments = mempty , dependencyTags = Map.empty } gitDepOf :: Text -> Maybe Text -> Dependency gitDepOf name version = Dependency { dependencyType = GitType , dependencyName = name , dependencyVersion = CEq <$> version , dependencyLocations = [] , dependencyEnvironments = mempty , dependencyTags = Map.empty } dependencyOne :: Dependency dependencyOne = podDepOf "one" (Just "1.0.0") dependencyTwo :: Dependency dependencyTwo = podDepOf "two" (Just "2.0.0") dependencyThree :: Dependency dependencyThree = podDepOf "three" (Just "3.0.0") dependencyFour :: Dependency dependencyFour = podDepOf "four" (Just "4.0.0") dependencyAbnormalName :: Dependency dependencyAbnormalName = podDepOf "ab-normal/+name" (Just "2.0.0") dependencyGitTagged :: Dependency dependencyGitTagged = gitDepOf ":ab/cap.git" (Just "v1.2.3") dependencyTwoDepA :: Dependency dependencyTwoDepA = podDepOf "two_dep_A" Nothing dependencyTwoDepB :: Dependency dependencyTwoDepB = podDepOf "two-dep-B" Nothing pods :: [Pod] pods = [ Pod "one" "1.0.0" [Dep "two", Dep "three", Dep "ab-normal/+name"] , Pod "two" "2.0.0" [Dep "two_dep_A", Dep "two-dep-B"] , Pod "three" "3.0.0" [Dep "four"] , Pod "ab-normal/+name" "2.0.0" [] , Pod "four" "4.0.0" [] , Pod "depWithTag" "2.0.0" [] ] dependencies :: [Dep] dependencies = [ Dep "one" , Dep "three" , Dep "depWithTag" ] externalSources :: Map.Map Text ExternalSource externalSources = Map.fromList [ ("depWithTag", ExternalGitType $ ExternalGitSource ":ab/cap.git" (Just "v1.2.3") Nothing Nothing) , ("depWithBranch", ExternalGitType $ ExternalGitSource ":ab/cap.git" Nothing Nothing $ Just "main") , ("depWithCommit", ExternalGitType $ ExternalGitSource ":ab/cap.git" Nothing (Just "9a9a9") Nothing) , ("ChatSecure-Push-iOS", ExternalPath "../Submodules/ChatSecure-Push-iOS") , ("ChatSecureCore", ExternalPodSpec "ChatSecureCore.podspec") ] subspecNoNestedSubspecs :: PodsSpecJSONSubSpec subspecNoNestedSubspecs = PodsSpecJSONSubSpec "a" [] subspecSingleNestedSubspec :: PodsSpecJSONSubSpec subspecSingleNestedSubspec = PodsSpecJSONSubSpec "a" [ PodsSpecJSONSubSpec "b" [] ] subspecMultipleNestedSubspec :: PodsSpecJSONSubSpec subspecMultipleNestedSubspec = PodsSpecJSONSubSpec "a" [ PodsSpecJSONSubSpec "b" [ PodsSpecJSONSubSpec "c" mempty ] , PodsSpecJSONSubSpec "d" [] ] spec :: T.Spec spec = do T.describe "podfile lock analyzer" $ T.it "produces the expected output" $ do let graph = buildGraphStatic $ PodLock pods dependencies externalSources expectDeps [ dependencyOne , dependencyTwo , dependencyThree , dependencyAbnormalName , dependencyFour , dependencyGitTagged , dependencyTwoDepA , dependencyTwoDepB ] graph expectDirect [ dependencyOne , dependencyThree , dependencyGitTagged ] graph expectEdges [ (dependencyOne, dependencyAbnormalName) , (dependencyOne, dependencyTwo) , (dependencyOne, dependencyThree) , (dependencyTwo, dependencyTwoDepA) , (dependencyTwo, dependencyTwoDepB) , (dependencyThree, dependencyFour) ] graph podLockFile <- T.runIO (BS.readFile "test/Cocoapods/testdata/Podfile.lock") T.describe "Podfile.lock parser" $ do T.it "should parse content" $ case decodeEither' podLockFile of Left err -> T.expectationFailure $ "failed to parse: " <> show err Right result -> result `T.shouldBe` PodLock pods dependencies externalSources T.describe "Podfile.lock utilities" $ do T.describe "allSubspecs" $ T.it "should returns all subspecs (including nested subspecs)" $ do allSubspecs subspecNoNestedSubspecs `T.shouldMatchList` ["a"] allSubspecs subspecSingleNestedSubspec `T.shouldMatchList` ["a", "a/b"] allSubspecs subspecMultipleNestedSubspec `T.shouldMatchList` ["a", "a/b", "a/b/c", "a/d"]
null
https://raw.githubusercontent.com/fossas/fossa-cli/a346525293f7f5dff232371842017c87ac265693/test/Cocoapods/PodfileLockSpec.hs
haskell
module Cocoapods.PodfileLockSpec ( spec, ) where import Data.ByteString qualified as BS import Data.Map.Strict qualified as Map import Data.Text (Text) import Data.Yaml (decodeEither') import DepTypes ( DepType (GitType, PodType), Dependency (..), VerConstraint (CEq), ) import GraphUtil (expectDeps, expectDirect, expectEdges) import Strategy.Cocoapods.PodfileLock ( Dep (Dep), ExternalGitSource (..), ExternalSource (..), Pod (Pod), PodLock (..), PodsSpecJSONSubSpec (PodsSpecJSONSubSpec), allSubspecs, buildGraphStatic, ) import Test.Hspec qualified as T podDepOf :: Text -> Maybe Text -> Dependency podDepOf name version = Dependency { dependencyType = PodType , dependencyName = name , dependencyVersion = CEq <$> version , dependencyLocations = [] , dependencyEnvironments = mempty , dependencyTags = Map.empty } gitDepOf :: Text -> Maybe Text -> Dependency gitDepOf name version = Dependency { dependencyType = GitType , dependencyName = name , dependencyVersion = CEq <$> version , dependencyLocations = [] , dependencyEnvironments = mempty , dependencyTags = Map.empty } dependencyOne :: Dependency dependencyOne = podDepOf "one" (Just "1.0.0") dependencyTwo :: Dependency dependencyTwo = podDepOf "two" (Just "2.0.0") dependencyThree :: Dependency dependencyThree = podDepOf "three" (Just "3.0.0") dependencyFour :: Dependency dependencyFour = podDepOf "four" (Just "4.0.0") dependencyAbnormalName :: Dependency dependencyAbnormalName = podDepOf "ab-normal/+name" (Just "2.0.0") dependencyGitTagged :: Dependency dependencyGitTagged = gitDepOf ":ab/cap.git" (Just "v1.2.3") dependencyTwoDepA :: Dependency dependencyTwoDepA = podDepOf "two_dep_A" Nothing dependencyTwoDepB :: Dependency dependencyTwoDepB = podDepOf "two-dep-B" Nothing pods :: [Pod] pods = [ Pod "one" "1.0.0" [Dep "two", Dep "three", Dep "ab-normal/+name"] , Pod "two" "2.0.0" [Dep "two_dep_A", Dep "two-dep-B"] , Pod "three" "3.0.0" [Dep "four"] , Pod "ab-normal/+name" "2.0.0" [] , Pod "four" "4.0.0" [] , Pod "depWithTag" "2.0.0" [] ] dependencies :: [Dep] dependencies = [ Dep "one" , Dep "three" , Dep "depWithTag" ] externalSources :: Map.Map Text ExternalSource externalSources = Map.fromList [ ("depWithTag", ExternalGitType $ ExternalGitSource ":ab/cap.git" (Just "v1.2.3") Nothing Nothing) , ("depWithBranch", ExternalGitType $ ExternalGitSource ":ab/cap.git" Nothing Nothing $ Just "main") , ("depWithCommit", ExternalGitType $ ExternalGitSource ":ab/cap.git" Nothing (Just "9a9a9") Nothing) , ("ChatSecure-Push-iOS", ExternalPath "../Submodules/ChatSecure-Push-iOS") , ("ChatSecureCore", ExternalPodSpec "ChatSecureCore.podspec") ] subspecNoNestedSubspecs :: PodsSpecJSONSubSpec subspecNoNestedSubspecs = PodsSpecJSONSubSpec "a" [] subspecSingleNestedSubspec :: PodsSpecJSONSubSpec subspecSingleNestedSubspec = PodsSpecJSONSubSpec "a" [ PodsSpecJSONSubSpec "b" [] ] subspecMultipleNestedSubspec :: PodsSpecJSONSubSpec subspecMultipleNestedSubspec = PodsSpecJSONSubSpec "a" [ PodsSpecJSONSubSpec "b" [ PodsSpecJSONSubSpec "c" mempty ] , PodsSpecJSONSubSpec "d" [] ] spec :: T.Spec spec = do T.describe "podfile lock analyzer" $ T.it "produces the expected output" $ do let graph = buildGraphStatic $ PodLock pods dependencies externalSources expectDeps [ dependencyOne , dependencyTwo , dependencyThree , dependencyAbnormalName , dependencyFour , dependencyGitTagged , dependencyTwoDepA , dependencyTwoDepB ] graph expectDirect [ dependencyOne , dependencyThree , dependencyGitTagged ] graph expectEdges [ (dependencyOne, dependencyAbnormalName) , (dependencyOne, dependencyTwo) , (dependencyOne, dependencyThree) , (dependencyTwo, dependencyTwoDepA) , (dependencyTwo, dependencyTwoDepB) , (dependencyThree, dependencyFour) ] graph podLockFile <- T.runIO (BS.readFile "test/Cocoapods/testdata/Podfile.lock") T.describe "Podfile.lock parser" $ do T.it "should parse content" $ case decodeEither' podLockFile of Left err -> T.expectationFailure $ "failed to parse: " <> show err Right result -> result `T.shouldBe` PodLock pods dependencies externalSources T.describe "Podfile.lock utilities" $ do T.describe "allSubspecs" $ T.it "should returns all subspecs (including nested subspecs)" $ do allSubspecs subspecNoNestedSubspecs `T.shouldMatchList` ["a"] allSubspecs subspecSingleNestedSubspec `T.shouldMatchList` ["a", "a/b"] allSubspecs subspecMultipleNestedSubspec `T.shouldMatchList` ["a", "a/b", "a/b/c", "a/d"]
2278026c20d2282e837251fdf90988f1443bf67ac3c3cf0c88b3384741ebbcd3
diffusionkinetics/open
AST.hs
# LANGUAGE DeriveAnyClass , DeriveGeneric , FlexibleInstances # module Stan.AST where import Data.Hashable import GHC.Generics (Generic) import Data.String type Var = String data Stan = Data [Decl] | TransformedData [Decl] | Parameters [Decl] | TransformedParameters [Decl] | Model [Decl] | GeneratedQuantities [Decl] deriving (Eq, Show, Generic, Hashable) data Decl = Type ::: (Var,[Expr]) | (Var,[Expr]) := Expr | (Var,[Expr]) :~ (String, [Expr]) | For Var Expr Expr [Decl] | Print String [Expr] deriving (Eq, Show, Generic, Hashable) data Type = Real | Int | Bounded (Maybe Expr) (Maybe Expr) Type deriving (Eq, Show, Generic, Hashable) data Expr = LitInt Int | LitFloat Float | BinOp String Expr Expr | Ix Expr [Expr] | Apply String [Expr] | Var Var deriving (Eq, Show, Generic, Hashable) infixl 1 := infixl 1 ::: class Indexable a where (!) :: a -> [Expr] -> a instance Indexable Expr where (!) = Ix instance Indexable (Var,[Expr]) where (v,exprs) ! es = (v,exprs++es) instance Num Expr where e1 + e2 = BinOp "+" e1 e2 e1 - e2 = BinOp "-" e1 e2 e1 * e2 = BinOp "*" e1 e2 negate e = Apply "-" [e] abs e = Apply "abs" [e] signum _ = error "stan: signum?" fromInteger x = LitInt (fromInteger x) instance Fractional Expr where e1 / e2 = BinOp "/" e1 e2 fromRational x = LitFloat $ fromRational x instance Floating Expr where pi = LitFloat pi exp e = Apply "exp" [e] log e = Apply "log" [e] sqrt e = Apply "sqrt" [e] sin e = Apply "sin" [e] cos e = Apply "cos" [e] tan e = Apply "tan" [e] asin e = Apply "asin" [e] acos e = Apply "acos" [e] atan e = Apply "atan" [e] asinh e = Apply "asinh" [e] acosh e = Apply "acosh" [e] atanh e = Apply "atanh" [e] sinh e = Apply "sinh" [e] cosh e = Apply "cosh" [e] tanh e = Apply "tanh" [e] instance IsString Expr where fromString s = Var s instance IsString (Var,[Expr]) where fromString s = (s, []) normal :: (Expr , Expr) -> (String, [Expr]) normal (mn, sd) = ("normal", [mn,sd]) gamma :: (Expr , Expr) -> (String, [Expr]) gamma (a, b) = ("gamma", [a,b]) exponential :: Expr -> (String, [Expr]) exponential mu = ("exponential", [mu]) dot :: Expr -> Expr -> Expr dot e1 e2 = Apply "dot_product" [e1,e2] lower :: Expr -> Type -> Type lower lo ty = Bounded (Just lo) Nothing ty
null
https://raw.githubusercontent.com/diffusionkinetics/open/673d9a4a099abd9035ccc21e37d8e614a45a1901/stanhs/lib/Stan/AST.hs
haskell
# LANGUAGE DeriveAnyClass , DeriveGeneric , FlexibleInstances # module Stan.AST where import Data.Hashable import GHC.Generics (Generic) import Data.String type Var = String data Stan = Data [Decl] | TransformedData [Decl] | Parameters [Decl] | TransformedParameters [Decl] | Model [Decl] | GeneratedQuantities [Decl] deriving (Eq, Show, Generic, Hashable) data Decl = Type ::: (Var,[Expr]) | (Var,[Expr]) := Expr | (Var,[Expr]) :~ (String, [Expr]) | For Var Expr Expr [Decl] | Print String [Expr] deriving (Eq, Show, Generic, Hashable) data Type = Real | Int | Bounded (Maybe Expr) (Maybe Expr) Type deriving (Eq, Show, Generic, Hashable) data Expr = LitInt Int | LitFloat Float | BinOp String Expr Expr | Ix Expr [Expr] | Apply String [Expr] | Var Var deriving (Eq, Show, Generic, Hashable) infixl 1 := infixl 1 ::: class Indexable a where (!) :: a -> [Expr] -> a instance Indexable Expr where (!) = Ix instance Indexable (Var,[Expr]) where (v,exprs) ! es = (v,exprs++es) instance Num Expr where e1 + e2 = BinOp "+" e1 e2 e1 - e2 = BinOp "-" e1 e2 e1 * e2 = BinOp "*" e1 e2 negate e = Apply "-" [e] abs e = Apply "abs" [e] signum _ = error "stan: signum?" fromInteger x = LitInt (fromInteger x) instance Fractional Expr where e1 / e2 = BinOp "/" e1 e2 fromRational x = LitFloat $ fromRational x instance Floating Expr where pi = LitFloat pi exp e = Apply "exp" [e] log e = Apply "log" [e] sqrt e = Apply "sqrt" [e] sin e = Apply "sin" [e] cos e = Apply "cos" [e] tan e = Apply "tan" [e] asin e = Apply "asin" [e] acos e = Apply "acos" [e] atan e = Apply "atan" [e] asinh e = Apply "asinh" [e] acosh e = Apply "acosh" [e] atanh e = Apply "atanh" [e] sinh e = Apply "sinh" [e] cosh e = Apply "cosh" [e] tanh e = Apply "tanh" [e] instance IsString Expr where fromString s = Var s instance IsString (Var,[Expr]) where fromString s = (s, []) normal :: (Expr , Expr) -> (String, [Expr]) normal (mn, sd) = ("normal", [mn,sd]) gamma :: (Expr , Expr) -> (String, [Expr]) gamma (a, b) = ("gamma", [a,b]) exponential :: Expr -> (String, [Expr]) exponential mu = ("exponential", [mu]) dot :: Expr -> Expr -> Expr dot e1 e2 = Apply "dot_product" [e1,e2] lower :: Expr -> Type -> Type lower lo ty = Bounded (Just lo) Nothing ty
28bf36170c76b5dac3823a9e52b2209ee5883789d041cebb4ea28dffa60d6b97
ndmitchell/ghc-make
BootChild.hs
module BootChild where
null
https://raw.githubusercontent.com/ndmitchell/ghc-make/5164c721efa38a02be33d340cc91f5c737c29156/tests/complex/children/BootChild.hs
haskell
module BootChild where
3f17d8b4966b36e8fc4d95107882d516379cf291e96efb74e2b5d3b8cb23f14e
atgeller/WASM-Redex
SimpleOps.rkt
#lang racket (require racket/flonum redex/reduction-semantics "AdministrativeSyntax.rkt" "../Utilities.rkt" "SizedOps.rkt" "ConstUtilities.rkt") (provide (all-defined-out)) to implement all of the WebAssembly operations ; equivalent to unop_t(c) (define-metafunction WASM-Admin eval-unop : unop t c -> c [(eval-unop clz t c) ,(sized-clz (term (bit-width t)) (term c))] [(eval-unop ctz t c) ,(sized-ctz (term (bit-width t)) (term c))] [(eval-unop popcnt t c) ,(sized-popcnt (term (bit-width t)) (term c))] [(eval-unop abs t c) ,(abs (term c))] [(eval-unop neg t c) ,(- (term c))] [(eval-unop sqrt f32 c) ,(if (negative? (term c)) +nan.0 (flsingle (flsqrt (term c))))] [(eval-unop sqrt f64 c) ,(if (negative? (term c)) +nan.0 (flsqrt (term c)))] [(eval-unop ceil t c) ,(ceiling (term c))] [(eval-unop floor t c) ,(floor (term c))] [(eval-unop nearest t c) ,(round (term c))]) ; equivalent to binop_t(c1, c2) (define-metafunction WASM-Admin eval-binop : binop t c c -> (c ...) [(eval-binop add inn c_1 c_2) (,(sized-add (term (bit-width inn)) (term c_1) (term c_2)))] [(eval-binop sub inn c_1 c_2) (,(sized-sub (term (bit-width inn)) (term c_1) (term c_2)))] [(eval-binop mul inn c_1 c_2) (,(sized-mul (term (bit-width inn)) (term c_1) (term c_2)))] [(eval-binop div-s inn c_1 c_2) (,(sized-signed-div (term (bit-width inn)) (term c_1) (term c_2))) (side-condition (not (equal? (term c_2) 0))) or ()] [(eval-binop div-u inn c_1 c_2) (,(sized-unsigned-div (term (bit-width inn)) (term c_1) (term c_2))) (side-condition (not (equal? (term c_2) 0))) or ()] [(eval-binop rem-s inn c_1 c_2) (,(sized-signed-rem (term (bit-width inn)) (term c_1) (term c_2))) (side-condition (not (equal? (term c_2) 0))) or ()] [(eval-binop rem-u inn c_1 c_2) (,(sized-unsigned-rem (term (bit-width inn)) (term c_1) (term c_2))) (side-condition (not (equal? (term c_2) 0))) or ()] [(eval-binop and inn c_1 c_2) (,(bitwise-and (term c_1) (term c_2)))] [(eval-binop or inn c_1 c_2) (,(bitwise-ior (term c_1) (term c_2)))] [(eval-binop xor inn c_1 c_2) (,(bitwise-xor (term c_1) (term c_2)))] [(eval-binop shl inn c_1 c_2) (,(sized-shl (term (bit-width inn)) (term c_1) (term c_2)))] [(eval-binop shr-s inn c_1 c_2) (,(sized-signed-shr (term (bit-width inn)) (term c_1) (term c_2)))] [(eval-binop shr-u inn c_1 c_2) (,(sized-unsigned-shr (term (bit-width inn)) (term c_1) (term c_2)))] [(eval-binop rotl inn c_1 c_2) (,(sized-rotl (term (bit-width inn)) (term c_1) (term c_2)))] [(eval-binop rotr inn c_1 c_2) (,(sized-rotr (term (bit-width inn)) (term c_1) (term c_2)))] [(eval-binop add f32 c_1 c_2) (,(flsingle (fl+ (term c_1) (term c_2))))] [(eval-binop sub f32 c_1 c_2) (,(flsingle (fl- (term c_1) (term c_2))))] [(eval-binop mul f32 c_1 c_2) (,(flsingle (fl* (term c_1) (term c_2))))] [(eval-binop div f32 c_1 c_2) (,(flsingle (fl/ (term c_1) (term c_2))))] [(eval-binop add f64 c_1 c_2) (,(fl+ (term c_1) (term c_2)))] [(eval-binop sub f64 c_1 c_2) (,(fl- (term c_1) (term c_2)))] [(eval-binop mul f64 c_1 c_2) (,(fl* (term c_1) (term c_2)))] [(eval-binop div f64 c_1 c_2) (,(fl/ (term c_1) (term c_2)))] [(eval-binop min fnn c_1 c_2) (,(flmin (term c_1) (term c_2)))] [(eval-binop max fnn c_1 c_2) (,(flmax (term c_1) (term c_2)))] [(eval-binop copysign fnn c_1 c_2) (,(if (or (negative? (term c_2)) (fl= (term c_2) -0.0)) (fl- (flabs (term c_1))) (flabs (term c_1))))]) (define-metafunction WASM-Admin eval-testop : testop t c -> c [(eval-testop eqz t c) (bool ,(= (term c) 0))]) (define-metafunction WASM-Admin eval-relop : relop t c c -> c [(eval-relop eq t c_1 c_2) (bool ,(= (term c_1) (term c_2)))] [(eval-relop ne t c_1 c_2) (bool ,(not (= (term c_1) (term c_2))))] [(eval-relop lt-u t c_1 c_2) (bool ,(< (term c_1) (term c_2)))] [(eval-relop gt-u t c_1 c_2) (bool ,(> (term c_1) (term c_2)))] [(eval-relop le-u t c_1 c_2) (bool ,(<= (term c_1) (term c_2)))] [(eval-relop ge-u t c_1 c_2) (bool ,(>= (term c_1) (term c_2)))] [(eval-relop lt-s t c_1 c_2) (bool ,(< (term (signed t c_1)) (term (signed t c_2))))] [(eval-relop gt-s t c_1 c_2) (bool ,(> (term (signed t c_1)) (term (signed t c_2))))] [(eval-relop le-s t c_1 c_2) (bool ,(<= (term (signed t c_1)) (term (signed t c_2))))] [(eval-relop ge-s t c_1 c_2) (bool ,(>= (term (signed t c_1)) (term (signed t c_2))))] [(eval-relop lt t c_1 c_2) (bool ,(fl< (term c_1) (term c_2)))] [(eval-relop gt t c_1 c_2) (bool ,(fl> (term c_1) (term c_2)))] [(eval-relop le t c_1 c_2) (bool ,(fl<= (term c_1) (term c_2)))] [(eval-relop ge t c_1 c_2) (bool ,(fl>= (term c_1) (term c_2)))]) (define-metafunction WASM-Admin do-convert : t_1 t_2 (sx ...) c -> (c ...) [(do-convert i64 i32 () c) (,(to-unsigned-sized 32 (term c)))] [(do-convert i32 i64 (signed) c) (,(to-unsigned-sized 64 (to-signed-sized 32 (term c))))] [(do-convert i32 i64 (unsigned) c) (c)] [(do-convert f64 f32 () c) (,(flsingle (term c)))] [(do-convert f32 f64 () c) (c)] [(do-convert inn fnn (signed) c) (,(->fl (to-signed-sized (term (bit-width inn)) (term c))))] [(do-convert inn fnn (unsigned) c) (,(->fl (term c)))] [(do-convert fnn inn (sx) c) () (side-condition (or (nan? (term c)) (infinite? (term c))))] [(do-convert fnn inn (signed) c) (,(to-unsigned-sized (term (bit-width inn)) (fl->exact-integer (truncate (term c))))) (side-condition (< (sub1 (- (expt 2 (sub1 (term (bit-width inn)))))) (truncate (term c)) (expt 2 (sub1 (term (bit-width inn)))))) or ()] [(do-convert fnn inn (unsigned) c) (,(fl->exact-integer (truncate (term c)))) (side-condition (< -1 (truncate (term c)) (expt 2 (term (bit-width inn))))) or ()])
null
https://raw.githubusercontent.com/atgeller/WASM-Redex/81f882f9fd95b645a10baf2565f27efc8f8766e5/Semantics/SimpleOps.rkt
racket
equivalent to unop_t(c) equivalent to binop_t(c1, c2)
#lang racket (require racket/flonum redex/reduction-semantics "AdministrativeSyntax.rkt" "../Utilities.rkt" "SizedOps.rkt" "ConstUtilities.rkt") (provide (all-defined-out)) to implement all of the WebAssembly operations (define-metafunction WASM-Admin eval-unop : unop t c -> c [(eval-unop clz t c) ,(sized-clz (term (bit-width t)) (term c))] [(eval-unop ctz t c) ,(sized-ctz (term (bit-width t)) (term c))] [(eval-unop popcnt t c) ,(sized-popcnt (term (bit-width t)) (term c))] [(eval-unop abs t c) ,(abs (term c))] [(eval-unop neg t c) ,(- (term c))] [(eval-unop sqrt f32 c) ,(if (negative? (term c)) +nan.0 (flsingle (flsqrt (term c))))] [(eval-unop sqrt f64 c) ,(if (negative? (term c)) +nan.0 (flsqrt (term c)))] [(eval-unop ceil t c) ,(ceiling (term c))] [(eval-unop floor t c) ,(floor (term c))] [(eval-unop nearest t c) ,(round (term c))]) (define-metafunction WASM-Admin eval-binop : binop t c c -> (c ...) [(eval-binop add inn c_1 c_2) (,(sized-add (term (bit-width inn)) (term c_1) (term c_2)))] [(eval-binop sub inn c_1 c_2) (,(sized-sub (term (bit-width inn)) (term c_1) (term c_2)))] [(eval-binop mul inn c_1 c_2) (,(sized-mul (term (bit-width inn)) (term c_1) (term c_2)))] [(eval-binop div-s inn c_1 c_2) (,(sized-signed-div (term (bit-width inn)) (term c_1) (term c_2))) (side-condition (not (equal? (term c_2) 0))) or ()] [(eval-binop div-u inn c_1 c_2) (,(sized-unsigned-div (term (bit-width inn)) (term c_1) (term c_2))) (side-condition (not (equal? (term c_2) 0))) or ()] [(eval-binop rem-s inn c_1 c_2) (,(sized-signed-rem (term (bit-width inn)) (term c_1) (term c_2))) (side-condition (not (equal? (term c_2) 0))) or ()] [(eval-binop rem-u inn c_1 c_2) (,(sized-unsigned-rem (term (bit-width inn)) (term c_1) (term c_2))) (side-condition (not (equal? (term c_2) 0))) or ()] [(eval-binop and inn c_1 c_2) (,(bitwise-and (term c_1) (term c_2)))] [(eval-binop or inn c_1 c_2) (,(bitwise-ior (term c_1) (term c_2)))] [(eval-binop xor inn c_1 c_2) (,(bitwise-xor (term c_1) (term c_2)))] [(eval-binop shl inn c_1 c_2) (,(sized-shl (term (bit-width inn)) (term c_1) (term c_2)))] [(eval-binop shr-s inn c_1 c_2) (,(sized-signed-shr (term (bit-width inn)) (term c_1) (term c_2)))] [(eval-binop shr-u inn c_1 c_2) (,(sized-unsigned-shr (term (bit-width inn)) (term c_1) (term c_2)))] [(eval-binop rotl inn c_1 c_2) (,(sized-rotl (term (bit-width inn)) (term c_1) (term c_2)))] [(eval-binop rotr inn c_1 c_2) (,(sized-rotr (term (bit-width inn)) (term c_1) (term c_2)))] [(eval-binop add f32 c_1 c_2) (,(flsingle (fl+ (term c_1) (term c_2))))] [(eval-binop sub f32 c_1 c_2) (,(flsingle (fl- (term c_1) (term c_2))))] [(eval-binop mul f32 c_1 c_2) (,(flsingle (fl* (term c_1) (term c_2))))] [(eval-binop div f32 c_1 c_2) (,(flsingle (fl/ (term c_1) (term c_2))))] [(eval-binop add f64 c_1 c_2) (,(fl+ (term c_1) (term c_2)))] [(eval-binop sub f64 c_1 c_2) (,(fl- (term c_1) (term c_2)))] [(eval-binop mul f64 c_1 c_2) (,(fl* (term c_1) (term c_2)))] [(eval-binop div f64 c_1 c_2) (,(fl/ (term c_1) (term c_2)))] [(eval-binop min fnn c_1 c_2) (,(flmin (term c_1) (term c_2)))] [(eval-binop max fnn c_1 c_2) (,(flmax (term c_1) (term c_2)))] [(eval-binop copysign fnn c_1 c_2) (,(if (or (negative? (term c_2)) (fl= (term c_2) -0.0)) (fl- (flabs (term c_1))) (flabs (term c_1))))]) (define-metafunction WASM-Admin eval-testop : testop t c -> c [(eval-testop eqz t c) (bool ,(= (term c) 0))]) (define-metafunction WASM-Admin eval-relop : relop t c c -> c [(eval-relop eq t c_1 c_2) (bool ,(= (term c_1) (term c_2)))] [(eval-relop ne t c_1 c_2) (bool ,(not (= (term c_1) (term c_2))))] [(eval-relop lt-u t c_1 c_2) (bool ,(< (term c_1) (term c_2)))] [(eval-relop gt-u t c_1 c_2) (bool ,(> (term c_1) (term c_2)))] [(eval-relop le-u t c_1 c_2) (bool ,(<= (term c_1) (term c_2)))] [(eval-relop ge-u t c_1 c_2) (bool ,(>= (term c_1) (term c_2)))] [(eval-relop lt-s t c_1 c_2) (bool ,(< (term (signed t c_1)) (term (signed t c_2))))] [(eval-relop gt-s t c_1 c_2) (bool ,(> (term (signed t c_1)) (term (signed t c_2))))] [(eval-relop le-s t c_1 c_2) (bool ,(<= (term (signed t c_1)) (term (signed t c_2))))] [(eval-relop ge-s t c_1 c_2) (bool ,(>= (term (signed t c_1)) (term (signed t c_2))))] [(eval-relop lt t c_1 c_2) (bool ,(fl< (term c_1) (term c_2)))] [(eval-relop gt t c_1 c_2) (bool ,(fl> (term c_1) (term c_2)))] [(eval-relop le t c_1 c_2) (bool ,(fl<= (term c_1) (term c_2)))] [(eval-relop ge t c_1 c_2) (bool ,(fl>= (term c_1) (term c_2)))]) (define-metafunction WASM-Admin do-convert : t_1 t_2 (sx ...) c -> (c ...) [(do-convert i64 i32 () c) (,(to-unsigned-sized 32 (term c)))] [(do-convert i32 i64 (signed) c) (,(to-unsigned-sized 64 (to-signed-sized 32 (term c))))] [(do-convert i32 i64 (unsigned) c) (c)] [(do-convert f64 f32 () c) (,(flsingle (term c)))] [(do-convert f32 f64 () c) (c)] [(do-convert inn fnn (signed) c) (,(->fl (to-signed-sized (term (bit-width inn)) (term c))))] [(do-convert inn fnn (unsigned) c) (,(->fl (term c)))] [(do-convert fnn inn (sx) c) () (side-condition (or (nan? (term c)) (infinite? (term c))))] [(do-convert fnn inn (signed) c) (,(to-unsigned-sized (term (bit-width inn)) (fl->exact-integer (truncate (term c))))) (side-condition (< (sub1 (- (expt 2 (sub1 (term (bit-width inn)))))) (truncate (term c)) (expt 2 (sub1 (term (bit-width inn)))))) or ()] [(do-convert fnn inn (unsigned) c) (,(fl->exact-integer (truncate (term c)))) (side-condition (< -1 (truncate (term c)) (expt 2 (term (bit-width inn))))) or ()])
3b6163965ec0a0e6129754cd9d8676e9fa595b6c6a09c6e6e0f560f76bd4274a
bananu7/Hate
Events.hs
module Hate.Events ( initialEventsState , setCallbacks , fetchEvents , allowedEvent , module Hate.Events.Types ) where import qualified Graphics.UI.GLFW as GLFW import Control.Concurrent.STM (TQueue, atomically, newTQueueIO, tryReadTQueue, writeTQueue) import Hate.Events.Types import Hate.Common.Types import Control.Monad.IO.Class (liftIO) import Control.Monad.State.Class (gets) import Control.Applicative import Data.Maybe import GHC.Float (double2Float) initialEventsState :: IO EventsState initialEventsState = newTQueueIO :: IO (TQueue TimedEvent) The code has been borrowed from GLFW - b - demo ; thanks @bsl I assume only one window can be used by the framework time = fromJust <$> GLFW.getTime writeWithTime :: TQueue TimedEvent -> Event -> IO () writeWithTime tc e = time >>= \t -> atomically . writeTQueue tc $ (t, e) errorCallback :: TQueue TimedEvent -> GLFW.Error -> String -> IO () windowPosCallback :: TQueue TimedEvent -> GLFW.Window -> Int -> Int -> IO () windowSizeCallback :: TQueue TimedEvent -> GLFW.Window -> Int -> Int -> IO () windowCloseCallback :: TQueue TimedEvent -> GLFW.Window -> IO () windowRefreshCallback :: TQueue TimedEvent -> GLFW.Window -> IO () windowFocusCallback :: TQueue TimedEvent -> GLFW.Window -> GLFW.FocusState -> IO () windowIconifyCallback :: TQueue TimedEvent -> GLFW.Window -> GLFW.IconifyState -> IO () framebufferSizeCallback :: TQueue TimedEvent -> GLFW.Window -> Int -> Int -> IO () mouseButtonCallback :: TQueue TimedEvent -> GLFW.Window -> GLFW.MouseButton -> GLFW.MouseButtonState -> GLFW.ModifierKeys -> IO () cursorPosCallback :: TQueue TimedEvent -> GLFW.Window -> Double -> Double -> IO () cursorEnterCallback :: TQueue TimedEvent -> GLFW.Window -> GLFW.CursorState -> IO () scrollCallback :: TQueue TimedEvent -> GLFW.Window -> Double -> Double -> IO () keyCallback :: TQueue TimedEvent -> GLFW.Window -> GLFW.Key -> Int -> GLFW.KeyState -> GLFW.ModifierKeys -> IO () charCallback :: TQueue TimedEvent -> GLFW.Window -> Char -> IO () errorCallback tc e s = writeWithTime tc $ EventError e s windowPosCallback tc _ x y = writeWithTime tc $ EventWindowPos x y windowSizeCallback tc _ w h = writeWithTime tc $ EventWindowSize w h windowCloseCallback tc _ = writeWithTime tc $ EventWindowClose windowRefreshCallback tc _ = writeWithTime tc $ EventWindowRefresh windowFocusCallback tc _ fa = writeWithTime tc $ EventWindowFocus fa windowIconifyCallback tc _ ia = writeWithTime tc $ EventWindowIconify ia framebufferSizeCallback tc _ w h = writeWithTime tc $ EventFramebufferSize w h mouseButtonCallback tc _ mb mba mk = writeWithTime tc $ EventMouseButton mb mba mk cursorPosCallback tc _ x y = writeWithTime tc $ EventCursorPos (double2Float x) (double2Float y) cursorEnterCallback tc _ ca = writeWithTime tc $ EventCursorEnter ca scrollCallback tc _ x y = writeWithTime tc $ EventScroll x y keyCallback tc _ k sc ka mk = writeWithTime tc $ EventKey k sc ka mk charCallback tc _ c = writeWithTime tc $ EventChar c setErrorCallback :: TQueue TimedEvent -> IO () setErrorCallback eventsChan = GLFW.setErrorCallback $ Just $ errorCallback eventsChan setCallbacks :: EventsState -> GLFW.Window -> IO () setCallbacks eventsChan win = do GLFW.setWindowPosCallback win $ Just $ windowPosCallback eventsChan GLFW.setWindowSizeCallback win $ Just $ windowSizeCallback eventsChan GLFW.setWindowCloseCallback win $ Just $ windowCloseCallback eventsChan GLFW.setWindowRefreshCallback win $ Just $ windowRefreshCallback eventsChan GLFW.setWindowFocusCallback win $ Just $ windowFocusCallback eventsChan GLFW.setWindowIconifyCallback win $ Just $ windowIconifyCallback eventsChan GLFW.setFramebufferSizeCallback win $ Just $ framebufferSizeCallback eventsChan GLFW.setMouseButtonCallback win $ Just $ mouseButtonCallback eventsChan GLFW.setCursorPosCallback win $ Just $ cursorPosCallback eventsChan GLFW.setCursorEnterCallback win $ Just $ cursorEnterCallback eventsChan GLFW.setScrollCallback win $ Just $ scrollCallback eventsChan GLFW.setKeyCallback win $ Just $ keyCallback eventsChan GLFW.setCharCallback win $ Just $ charCallback eventsChan fetchEvents :: HateInner us [TimedEvent] fetchEvents = fetchEvents' [] where fetchEvents' :: [TimedEvent] -> HateInner us [TimedEvent] fetchEvents' xs = do tc <- gets (eventsState . libraryState) me <- liftIO $ atomically $ tryReadTQueue tc case me of Just e -> fetchEvents' (e:xs) Nothing -> return xs -- | Some events aren't meant to impact the user, and should be handled -- internally by framework instead. allowedEvent :: Event -> Bool allowedEvent (EventWindowClose) = True allowedEvent (EventWindowFocus _) = True allowedEvent (EventMouseButton _ _ _) = True allowedEvent (EventCursorPos _ _) = True allowedEvent (EventScroll _ _) = True allowedEvent (EventKey _ _ _ _) = True allowedEvent (EventChar _) = True allowedEvent _ = False
null
https://raw.githubusercontent.com/bananu7/Hate/4aa4ec0cf1e9bcb32d8cc807ab2ee092807c7ddb/src/Hate/Events.hs
haskell
| Some events aren't meant to impact the user, and should be handled internally by framework instead.
module Hate.Events ( initialEventsState , setCallbacks , fetchEvents , allowedEvent , module Hate.Events.Types ) where import qualified Graphics.UI.GLFW as GLFW import Control.Concurrent.STM (TQueue, atomically, newTQueueIO, tryReadTQueue, writeTQueue) import Hate.Events.Types import Hate.Common.Types import Control.Monad.IO.Class (liftIO) import Control.Monad.State.Class (gets) import Control.Applicative import Data.Maybe import GHC.Float (double2Float) initialEventsState :: IO EventsState initialEventsState = newTQueueIO :: IO (TQueue TimedEvent) The code has been borrowed from GLFW - b - demo ; thanks @bsl I assume only one window can be used by the framework time = fromJust <$> GLFW.getTime writeWithTime :: TQueue TimedEvent -> Event -> IO () writeWithTime tc e = time >>= \t -> atomically . writeTQueue tc $ (t, e) errorCallback :: TQueue TimedEvent -> GLFW.Error -> String -> IO () windowPosCallback :: TQueue TimedEvent -> GLFW.Window -> Int -> Int -> IO () windowSizeCallback :: TQueue TimedEvent -> GLFW.Window -> Int -> Int -> IO () windowCloseCallback :: TQueue TimedEvent -> GLFW.Window -> IO () windowRefreshCallback :: TQueue TimedEvent -> GLFW.Window -> IO () windowFocusCallback :: TQueue TimedEvent -> GLFW.Window -> GLFW.FocusState -> IO () windowIconifyCallback :: TQueue TimedEvent -> GLFW.Window -> GLFW.IconifyState -> IO () framebufferSizeCallback :: TQueue TimedEvent -> GLFW.Window -> Int -> Int -> IO () mouseButtonCallback :: TQueue TimedEvent -> GLFW.Window -> GLFW.MouseButton -> GLFW.MouseButtonState -> GLFW.ModifierKeys -> IO () cursorPosCallback :: TQueue TimedEvent -> GLFW.Window -> Double -> Double -> IO () cursorEnterCallback :: TQueue TimedEvent -> GLFW.Window -> GLFW.CursorState -> IO () scrollCallback :: TQueue TimedEvent -> GLFW.Window -> Double -> Double -> IO () keyCallback :: TQueue TimedEvent -> GLFW.Window -> GLFW.Key -> Int -> GLFW.KeyState -> GLFW.ModifierKeys -> IO () charCallback :: TQueue TimedEvent -> GLFW.Window -> Char -> IO () errorCallback tc e s = writeWithTime tc $ EventError e s windowPosCallback tc _ x y = writeWithTime tc $ EventWindowPos x y windowSizeCallback tc _ w h = writeWithTime tc $ EventWindowSize w h windowCloseCallback tc _ = writeWithTime tc $ EventWindowClose windowRefreshCallback tc _ = writeWithTime tc $ EventWindowRefresh windowFocusCallback tc _ fa = writeWithTime tc $ EventWindowFocus fa windowIconifyCallback tc _ ia = writeWithTime tc $ EventWindowIconify ia framebufferSizeCallback tc _ w h = writeWithTime tc $ EventFramebufferSize w h mouseButtonCallback tc _ mb mba mk = writeWithTime tc $ EventMouseButton mb mba mk cursorPosCallback tc _ x y = writeWithTime tc $ EventCursorPos (double2Float x) (double2Float y) cursorEnterCallback tc _ ca = writeWithTime tc $ EventCursorEnter ca scrollCallback tc _ x y = writeWithTime tc $ EventScroll x y keyCallback tc _ k sc ka mk = writeWithTime tc $ EventKey k sc ka mk charCallback tc _ c = writeWithTime tc $ EventChar c setErrorCallback :: TQueue TimedEvent -> IO () setErrorCallback eventsChan = GLFW.setErrorCallback $ Just $ errorCallback eventsChan setCallbacks :: EventsState -> GLFW.Window -> IO () setCallbacks eventsChan win = do GLFW.setWindowPosCallback win $ Just $ windowPosCallback eventsChan GLFW.setWindowSizeCallback win $ Just $ windowSizeCallback eventsChan GLFW.setWindowCloseCallback win $ Just $ windowCloseCallback eventsChan GLFW.setWindowRefreshCallback win $ Just $ windowRefreshCallback eventsChan GLFW.setWindowFocusCallback win $ Just $ windowFocusCallback eventsChan GLFW.setWindowIconifyCallback win $ Just $ windowIconifyCallback eventsChan GLFW.setFramebufferSizeCallback win $ Just $ framebufferSizeCallback eventsChan GLFW.setMouseButtonCallback win $ Just $ mouseButtonCallback eventsChan GLFW.setCursorPosCallback win $ Just $ cursorPosCallback eventsChan GLFW.setCursorEnterCallback win $ Just $ cursorEnterCallback eventsChan GLFW.setScrollCallback win $ Just $ scrollCallback eventsChan GLFW.setKeyCallback win $ Just $ keyCallback eventsChan GLFW.setCharCallback win $ Just $ charCallback eventsChan fetchEvents :: HateInner us [TimedEvent] fetchEvents = fetchEvents' [] where fetchEvents' :: [TimedEvent] -> HateInner us [TimedEvent] fetchEvents' xs = do tc <- gets (eventsState . libraryState) me <- liftIO $ atomically $ tryReadTQueue tc case me of Just e -> fetchEvents' (e:xs) Nothing -> return xs allowedEvent :: Event -> Bool allowedEvent (EventWindowClose) = True allowedEvent (EventWindowFocus _) = True allowedEvent (EventMouseButton _ _ _) = True allowedEvent (EventCursorPos _ _) = True allowedEvent (EventScroll _ _) = True allowedEvent (EventKey _ _ _ _) = True allowedEvent (EventChar _) = True allowedEvent _ = False
c9b246088d3153ee7b2792098d034f13847b41732b52c420d39851cf762961bd
MercuryTechnologies/ghc-specter
GHC.hs
{- FOURMOLU_DISABLE -} # LANGUAGE CPP # # LANGUAGE LambdaCase # module GHCSpecter.Util.GHC ( -- * pretty print showPpr, printPpr, -- * module name getModuleName, mkModuleNameMap, formatName, formatImportedNames, -- * module graph getTopSortedModules, extractModuleSources, extractModuleGraphInfo, ) where import Control.Monad.IO.Class (MonadIO (liftIO)) import Data.Char (isAlpha) import Data.IntMap (IntMap) import Data.IntMap qualified as IM import Data.List qualified as L import Data.Map (Map) import Data.Map qualified as M import Data.Maybe (catMaybes, mapMaybe) import Data.Text qualified as T import Data.Tuple (swap) import GHC.Data.Graph.Directed qualified as G import GHC.Driver.Make (topSortModuleGraph) import GHC.Driver.Session (DynFlags) import GHC.Plugins ( ModSummary, Name, localiseName, showSDoc, ) import GHC.Types.Name.Reader ( GlobalRdrElt (..), GreName (..), ImpDeclSpec (..), ImportSpec (..), ) import GHC.Types.SourceFile (HscSource (..)) import GHC.Unit.Module.Graph ( ModuleGraph, ModuleGraphNode (..), mgModSummaries, mgModSummaries', ) import GHC.Unit.Module.Location (ModLocation (..)) import GHC.Unit.Module.ModSummary (ModSummary (..)) import GHC.Unit.Module.Name (moduleNameString) import GHC.Unit.Types (GenModule (moduleName)) import GHC.Utils.Outputable (Outputable (ppr)) import GHCSpecter.Channel.Common.Types (type ModuleName) import GHCSpecter.Channel.Outbound.Types (ModuleGraphInfo (..)) import System.Directory (canonicalizePath) GHC - version - dependent imports #if MIN_VERSION_ghc(9, 4, 0) import GHC.Data.Bag (bagToList) import GHC.Unit.Module.Graph (moduleGraphNodes) #elif MIN_VERSION_ghc(9, 2, 0) import GHC.Driver.Make (moduleGraphNodes) import GHC.Unit.Module.ModSummary (ExtendedModSummary (..)) #endif -- -- pretty print -- showPpr :: (Outputable a) => DynFlags -> a -> String showPpr dflags = showSDoc dflags . ppr printPpr :: (Outputable a, MonadIO m) => DynFlags -> a -> m () printPpr dflags = liftIO . putStrLn . showPpr dflags -- -- module name -- | Extract module name from ModSummary . -- For hs-boot and hsig, we rely on stringy suffix ".hs-boot" and ".hsig". TODO : Use HscSource directly ( i.e. ( ModuleName , HscSource ) ) to index a module . getModuleName :: ModSummary -> ModuleName getModuleName s = let sig = ms_hsc_src s name = T.pack . moduleNameString . moduleName . ms_mod $ s in case sig of HsSrcFile -> name HsBootFile -> name <> ".hs-boot" HsigFile -> name <> ".hsig" formatName :: DynFlags -> Name -> String formatName dflags name = let str = showSDoc dflags . ppr . localiseName $ name in case str of (x : _) -> -- NOTE: As we want to have resultant text directly copied and pasted to -- the source code, the operator identifiers should be wrapped with -- parentheses. if isAlpha x then str else "(" ++ str ++ ")" _ -> str formatImportedNames :: [String] -> String formatImportedNames names = case fmap (++ ",\n") $ L.sort names of l0 : ls -> let l0' = " ( " ++ l0 ls' = fmap (" " ++) ls footer = " )" in concat ([l0'] ++ ls' ++ [footer]) _ -> " ()" mkModuleNameMap :: GlobalRdrElt -> [(ModuleName, Name)] mkModuleNameMap gre = do #if MIN_VERSION_ghc(9, 4, 0) spec <- bagToList (gre_imp gre) #elif MIN_VERSION_ghc(9, 2, 0) spec <- gre_imp gre #endif case gre_name gre of NormalGreName name -> do let modName = T.pack . moduleNameString . is_mod . is_decl $ spec pure (modName, name) -- TODO: Handle the record field name case correctly. FieldGreName _ -> [] -- -- module graph -- gnode2ModSummary :: ModuleGraphNode -> Maybe ModSummary gnode2ModSummary InstantiationNode {} = Nothing #if MIN_VERSION_ghc(9, 4, 0) gnode2ModSummary (ModuleNode _ modSummary) = Just modSummary gnode2ModSummary LinkNode {} = Nothing #else gnode2ModSummary (ModuleNode emod) = Just (emsModSummary emod) #endif getTopSortedModules :: ModuleGraph -> [ModuleName] getTopSortedModules modGraph = let sccs' = topSortModuleGraph False modGraph Nothing allMods = concatMap G.flattenSCC sccs' maybeModNameFromModSummary = fmap getModuleName . gnode2ModSummary allModNames = mapMaybe maybeModNameFromModSummary allMods in allModNames extractModuleSources :: ModuleGraph -> IO (Map ModuleName FilePath) extractModuleSources modGraph = do M.fromList . catMaybes <$> traverse extract (mgModSummaries modGraph) where extract ms = do let msrcFile = ml_hs_file (ms_location ms) msrcFile' <- traverse canonicalizePath msrcFile pure $ fmap (getModuleName ms,) msrcFile' extractModuleGraphInfo :: ModuleGraph -> ModuleGraphInfo extractModuleGraphInfo modGraph = do let (graph, _) = moduleGraphNodes False (mgModSummaries' modGraph) vtxs = G.verticesG graph modNameFromVertex = fmap getModuleName . gnode2ModSummary . G.node_payload modNameMapLst = mapMaybe (\v -> (G.node_key v,) <$> modNameFromVertex v) vtxs modNameMap :: IntMap ModuleName modNameMap = IM.fromList modNameMapLst modNameRevMap :: Map ModuleName Int modNameRevMap = M.fromList $ fmap swap modNameMapLst topSorted = mapMaybe (\n -> M.lookup n modNameRevMap) $ getTopSortedModules modGraph modDeps = IM.fromList $ fmap (\v -> (G.node_key v, G.node_dependencies v)) vtxs in ModuleGraphInfo modNameMap modDeps topSorted
null
https://raw.githubusercontent.com/MercuryTechnologies/ghc-specter/5fbabc11a78011095298cebdfff1b53d431ece73/plugin/src/GHCSpecter/Util/GHC.hs
haskell
FOURMOLU_DISABLE * pretty print * module name * module graph pretty print module name For hs-boot and hsig, we rely on stringy suffix ".hs-boot" and ".hsig". NOTE: As we want to have resultant text directly copied and pasted to the source code, the operator identifiers should be wrapped with parentheses. TODO: Handle the record field name case correctly. module graph
# LANGUAGE CPP # # LANGUAGE LambdaCase # module GHCSpecter.Util.GHC ( showPpr, printPpr, getModuleName, mkModuleNameMap, formatName, formatImportedNames, getTopSortedModules, extractModuleSources, extractModuleGraphInfo, ) where import Control.Monad.IO.Class (MonadIO (liftIO)) import Data.Char (isAlpha) import Data.IntMap (IntMap) import Data.IntMap qualified as IM import Data.List qualified as L import Data.Map (Map) import Data.Map qualified as M import Data.Maybe (catMaybes, mapMaybe) import Data.Text qualified as T import Data.Tuple (swap) import GHC.Data.Graph.Directed qualified as G import GHC.Driver.Make (topSortModuleGraph) import GHC.Driver.Session (DynFlags) import GHC.Plugins ( ModSummary, Name, localiseName, showSDoc, ) import GHC.Types.Name.Reader ( GlobalRdrElt (..), GreName (..), ImpDeclSpec (..), ImportSpec (..), ) import GHC.Types.SourceFile (HscSource (..)) import GHC.Unit.Module.Graph ( ModuleGraph, ModuleGraphNode (..), mgModSummaries, mgModSummaries', ) import GHC.Unit.Module.Location (ModLocation (..)) import GHC.Unit.Module.ModSummary (ModSummary (..)) import GHC.Unit.Module.Name (moduleNameString) import GHC.Unit.Types (GenModule (moduleName)) import GHC.Utils.Outputable (Outputable (ppr)) import GHCSpecter.Channel.Common.Types (type ModuleName) import GHCSpecter.Channel.Outbound.Types (ModuleGraphInfo (..)) import System.Directory (canonicalizePath) GHC - version - dependent imports #if MIN_VERSION_ghc(9, 4, 0) import GHC.Data.Bag (bagToList) import GHC.Unit.Module.Graph (moduleGraphNodes) #elif MIN_VERSION_ghc(9, 2, 0) import GHC.Driver.Make (moduleGraphNodes) import GHC.Unit.Module.ModSummary (ExtendedModSummary (..)) #endif showPpr :: (Outputable a) => DynFlags -> a -> String showPpr dflags = showSDoc dflags . ppr printPpr :: (Outputable a, MonadIO m) => DynFlags -> a -> m () printPpr dflags = liftIO . putStrLn . showPpr dflags | Extract module name from ModSummary . TODO : Use HscSource directly ( i.e. ( ModuleName , HscSource ) ) to index a module . getModuleName :: ModSummary -> ModuleName getModuleName s = let sig = ms_hsc_src s name = T.pack . moduleNameString . moduleName . ms_mod $ s in case sig of HsSrcFile -> name HsBootFile -> name <> ".hs-boot" HsigFile -> name <> ".hsig" formatName :: DynFlags -> Name -> String formatName dflags name = let str = showSDoc dflags . ppr . localiseName $ name in case str of (x : _) -> if isAlpha x then str else "(" ++ str ++ ")" _ -> str formatImportedNames :: [String] -> String formatImportedNames names = case fmap (++ ",\n") $ L.sort names of l0 : ls -> let l0' = " ( " ++ l0 ls' = fmap (" " ++) ls footer = " )" in concat ([l0'] ++ ls' ++ [footer]) _ -> " ()" mkModuleNameMap :: GlobalRdrElt -> [(ModuleName, Name)] mkModuleNameMap gre = do #if MIN_VERSION_ghc(9, 4, 0) spec <- bagToList (gre_imp gre) #elif MIN_VERSION_ghc(9, 2, 0) spec <- gre_imp gre #endif case gre_name gre of NormalGreName name -> do let modName = T.pack . moduleNameString . is_mod . is_decl $ spec pure (modName, name) FieldGreName _ -> [] gnode2ModSummary :: ModuleGraphNode -> Maybe ModSummary gnode2ModSummary InstantiationNode {} = Nothing #if MIN_VERSION_ghc(9, 4, 0) gnode2ModSummary (ModuleNode _ modSummary) = Just modSummary gnode2ModSummary LinkNode {} = Nothing #else gnode2ModSummary (ModuleNode emod) = Just (emsModSummary emod) #endif getTopSortedModules :: ModuleGraph -> [ModuleName] getTopSortedModules modGraph = let sccs' = topSortModuleGraph False modGraph Nothing allMods = concatMap G.flattenSCC sccs' maybeModNameFromModSummary = fmap getModuleName . gnode2ModSummary allModNames = mapMaybe maybeModNameFromModSummary allMods in allModNames extractModuleSources :: ModuleGraph -> IO (Map ModuleName FilePath) extractModuleSources modGraph = do M.fromList . catMaybes <$> traverse extract (mgModSummaries modGraph) where extract ms = do let msrcFile = ml_hs_file (ms_location ms) msrcFile' <- traverse canonicalizePath msrcFile pure $ fmap (getModuleName ms,) msrcFile' extractModuleGraphInfo :: ModuleGraph -> ModuleGraphInfo extractModuleGraphInfo modGraph = do let (graph, _) = moduleGraphNodes False (mgModSummaries' modGraph) vtxs = G.verticesG graph modNameFromVertex = fmap getModuleName . gnode2ModSummary . G.node_payload modNameMapLst = mapMaybe (\v -> (G.node_key v,) <$> modNameFromVertex v) vtxs modNameMap :: IntMap ModuleName modNameMap = IM.fromList modNameMapLst modNameRevMap :: Map ModuleName Int modNameRevMap = M.fromList $ fmap swap modNameMapLst topSorted = mapMaybe (\n -> M.lookup n modNameRevMap) $ getTopSortedModules modGraph modDeps = IM.fromList $ fmap (\v -> (G.node_key v, G.node_dependencies v)) vtxs in ModuleGraphInfo modNameMap modDeps topSorted
29db957277a46bcba2999fe585bd07b85888919ee1359b489b35c5e2b5065642
janestreet/merlin-jst
old_command.ml
{ { { COPYING * ( This file is part of Merlin , an helper for ocaml editors Copyright ( C ) 2013 - 2015 < frederic.bour(_)lakaban.net > refis.thomas(_)gmail.com > < simon.castellan(_)iuwt.fr > Permission is hereby granted , free of charge , to any person obtaining a copy of this software and associated documentation files ( the " Software " ) , to deal in the Software without restriction , including without limitation the rights to use , copy , modify , merge , publish , distribute , sublicense , and/or sell copies of the Software , and to permit persons to whom the Software is furnished to do so , subject to the following conditions : The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software . The Software is provided " as is " , without warranty of any kind , express or implied , including but not limited to the warranties of merchantability , fitness for a particular purpose and noninfringement . In no event shall the authors or copyright holders be liable for any claim , damages or other liability , whether in an action of contract , tort or otherwise , arising from , out of or in connection with the software or the use or other dealings in the Software . ) * } } } This file is part of Merlin, an helper for ocaml editors Copyright (C) 2013 - 2015 Frédéric Bour <frederic.bour(_)lakaban.net> Thomas Refis <refis.thomas(_)gmail.com> Simon Castellan <simon.castellan(_)iuwt.fr> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. The Software is provided "as is", without warranty of any kind, express or implied, including but not limited to the warranties of merchantability, fitness for a particular purpose and noninfringement. In no event shall the authors or copyright holders be liable for any claim, damages or other liability, whether in an action of contract, tort or otherwise, arising from, out of or in connection with the software or the use or other dealings in the Software. )* }}} *) open Std open Old_protocol module Printtyp = Type_utils.Printtyp type customization = [ | `Ext of [`Enabled | `Disabled] * string | `Flags of string list | `Use of string list | `Path of [`Build | `Source] * [`Add | `Rem] * string list ] let customize config = let open Mconfig in function | `Ext (`Enabled, ext) -> let extensions = ext :: config.merlin.extensions in {config with merlin = {config.merlin with extensions}}; | `Ext (`Disabled, ext) -> let extensions = List.remove_all ext config.merlin.extensions in {config with merlin = {config.merlin with extensions}}; | `Flags flags -> let flags_to_apply = [{workdir = config.query.directory; workval = flags}] in {config with merlin = {config.merlin with flags_to_apply}} | `Use _pkgs -> config | `Path (var, action, paths) -> let f l = match action with | `Add -> List.filter_dup (paths @ l) | `Rem -> List.filter l ~f:(fun x -> not (List.mem x ~set:paths)) in let merlin = config.merlin in let merlin = match var with | `Build -> {merlin with build_path = f merlin.build_path} | `Source -> {merlin with source_path = f merlin.source_path} in {config with merlin} type buffer = { path: string option; dot_merlins: string list option; mutable customization : customization list; mutable source : Msource.t; } type state = { mutable buffer : buffer; } let normalize_document doc = doc.Context.path, doc.Context.dot_merlins let new_buffer (path, dot_merlins) = { path; dot_merlins; customization = []; source = Msource.make "" } let default_config = ref Mconfig.initial let configure (state : buffer) = let config = !default_config in let config = {config with Mconfig.query = match state.path with | None -> config.Mconfig.query | Some path -> { config.Mconfig.query with Mconfig. filename = Filename.basename path; directory = Misc.canonicalize_filename (Filename.dirname path); } } in let config = match state.dot_merlins with ignore anything but the first one ... Mconfig.get_external_config first config | None | Some [] -> match state.path with | None -> config | Some p -> Mconfig.get_external_config p config in List.fold_left ~f:customize ~init:config state.customization let new_state document = { buffer = new_buffer document } let checkout_buffer_cache = ref [] let checkout_buffer = let cache_size = 8 in fun document -> let document = normalize_document document in try List.assoc document !checkout_buffer_cache with Not_found -> let buffer = new_buffer document in begin match document with | Some _, _ -> checkout_buffer_cache := (document, buffer) :: List.take_n cache_size !checkout_buffer_cache | None, _ -> () end; buffer let make_pipeline config buffer = Mpipeline.make config buffer.source let dispatch_sync config state (type a) : a sync_command -> a = function | Idle_job -> false | Tell (pos_start, pos_end, text) -> let source = Msource.substitute state.source pos_start pos_end text in state.source <- source | Refresh -> checkout_buffer_cache := []; Cmi_cache.flush () | Flags_set flags -> state.customization <- (`Flags flags) :: List.filter ~f:(function `Flags _ -> false | _ -> true) state.customization; `Ok | Findlib_use packages -> state.customization <- (`Use packages) :: List.filter ~f:(function `Use _ -> false | _ -> true) state.customization; `Ok | Extension_set (action,exts) -> state.customization <- List.map ~f:(fun ext -> `Ext (action, ext)) exts @ List.filter ~f:(function | `Ext (_, ext) when List.mem ext ~set:exts -> false | _ -> true ) state.customization; `Ok | Path (var,_,paths) -> state.customization <- List.filter_map ~f:(function | `Path (var', action', paths') when var = var' -> let paths' = List.filter paths' ~f:(fun path -> not (List.mem path ~set:paths)) in if paths' = [] then None else Some (`Path (var', action', paths')) | x -> Some x ) state.customization | Path_reset -> state.customization <- List.filter ~f:(function | `Path _ -> false | _ -> true ) state.customization; | Protocol_version version -> begin match version with | None -> () | Some 2 -> Old_IO.current_version := `V2 | Some 3 -> Old_IO.current_version := `V3 | Some _ -> () end; (`Selected !Old_IO.current_version, `Latest Old_IO.latest_version, Printf.sprintf "The Merlin toolkit version %s, for Ocaml %s\n" Merlin_config.version Sys.ocaml_version) | Flags_get -> let pipeline = make_pipeline config state in let config = Mpipeline.final_config pipeline in List.concat_map ~f:(fun f -> f.workval) Mconfig.(config.merlin.flags_to_apply) | Project_get -> let failures = match Mconfig.(config.merlin.failures) with | [] -> `Ok | failures -> `Failures failures in (Option.cons Mconfig.(config.merlin.config_path) [], failures) | Checkout _ -> failwith "invalid arguments" let default_state = lazy (new_state (None, None)) let document_states : (string option * string list option, state) Hashtbl.t = Hashtbl.create 7 let dispatch (type a) (context : Context.t) (cmd : a command) : a = let open Context in (* Document selection *) let state = match context.document with | None -> Lazy.force default_state | Some document -> let document = normalize_document document in try Hashtbl.find document_states document with Not_found -> let state = new_state document in Hashtbl.add document_states document state; state in let config = configure state.buffer in (* Printer verbosity *) let config = match context.printer_verbosity with | None -> config | Some verbosity -> let verbosity = Mconfig.Verbosity.of_string verbosity in Mconfig.({config with query = {config.query with verbosity}}) in let config = match context.printer_width with | None -> config | Some printer_width -> Mconfig.({config with query = {config.query with printer_width}}) in (* Printer width *) Format.default_width := Option.value ~default:0 context.printer_width; (* Actual dispatch *) match cmd with | Query q -> let pipeline = make_pipeline config state.buffer in Mpipeline.with_pipeline pipeline @@ fun () -> Query_commands.dispatch pipeline q | Sync (Checkout context) when state == Lazy.force default_state -> let buffer = checkout_buffer context in state.buffer <- buffer | Sync s -> dispatch_sync config state.buffer s
null
https://raw.githubusercontent.com/janestreet/merlin-jst/9c3b60c98d80b56af18ea95c27a0902f0244659a/src/frontend/ocamlmerlin/old/old_command.ml
ocaml
Document selection Printer verbosity Printer width Actual dispatch
{ { { COPYING * ( This file is part of Merlin , an helper for ocaml editors Copyright ( C ) 2013 - 2015 < frederic.bour(_)lakaban.net > refis.thomas(_)gmail.com > < simon.castellan(_)iuwt.fr > Permission is hereby granted , free of charge , to any person obtaining a copy of this software and associated documentation files ( the " Software " ) , to deal in the Software without restriction , including without limitation the rights to use , copy , modify , merge , publish , distribute , sublicense , and/or sell copies of the Software , and to permit persons to whom the Software is furnished to do so , subject to the following conditions : The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software . The Software is provided " as is " , without warranty of any kind , express or implied , including but not limited to the warranties of merchantability , fitness for a particular purpose and noninfringement . In no event shall the authors or copyright holders be liable for any claim , damages or other liability , whether in an action of contract , tort or otherwise , arising from , out of or in connection with the software or the use or other dealings in the Software . ) * } } } This file is part of Merlin, an helper for ocaml editors Copyright (C) 2013 - 2015 Frédéric Bour <frederic.bour(_)lakaban.net> Thomas Refis <refis.thomas(_)gmail.com> Simon Castellan <simon.castellan(_)iuwt.fr> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. The Software is provided "as is", without warranty of any kind, express or implied, including but not limited to the warranties of merchantability, fitness for a particular purpose and noninfringement. In no event shall the authors or copyright holders be liable for any claim, damages or other liability, whether in an action of contract, tort or otherwise, arising from, out of or in connection with the software or the use or other dealings in the Software. )* }}} *) open Std open Old_protocol module Printtyp = Type_utils.Printtyp type customization = [ | `Ext of [`Enabled | `Disabled] * string | `Flags of string list | `Use of string list | `Path of [`Build | `Source] * [`Add | `Rem] * string list ] let customize config = let open Mconfig in function | `Ext (`Enabled, ext) -> let extensions = ext :: config.merlin.extensions in {config with merlin = {config.merlin with extensions}}; | `Ext (`Disabled, ext) -> let extensions = List.remove_all ext config.merlin.extensions in {config with merlin = {config.merlin with extensions}}; | `Flags flags -> let flags_to_apply = [{workdir = config.query.directory; workval = flags}] in {config with merlin = {config.merlin with flags_to_apply}} | `Use _pkgs -> config | `Path (var, action, paths) -> let f l = match action with | `Add -> List.filter_dup (paths @ l) | `Rem -> List.filter l ~f:(fun x -> not (List.mem x ~set:paths)) in let merlin = config.merlin in let merlin = match var with | `Build -> {merlin with build_path = f merlin.build_path} | `Source -> {merlin with source_path = f merlin.source_path} in {config with merlin} type buffer = { path: string option; dot_merlins: string list option; mutable customization : customization list; mutable source : Msource.t; } type state = { mutable buffer : buffer; } let normalize_document doc = doc.Context.path, doc.Context.dot_merlins let new_buffer (path, dot_merlins) = { path; dot_merlins; customization = []; source = Msource.make "" } let default_config = ref Mconfig.initial let configure (state : buffer) = let config = !default_config in let config = {config with Mconfig.query = match state.path with | None -> config.Mconfig.query | Some path -> { config.Mconfig.query with Mconfig. filename = Filename.basename path; directory = Misc.canonicalize_filename (Filename.dirname path); } } in let config = match state.dot_merlins with ignore anything but the first one ... Mconfig.get_external_config first config | None | Some [] -> match state.path with | None -> config | Some p -> Mconfig.get_external_config p config in List.fold_left ~f:customize ~init:config state.customization let new_state document = { buffer = new_buffer document } let checkout_buffer_cache = ref [] let checkout_buffer = let cache_size = 8 in fun document -> let document = normalize_document document in try List.assoc document !checkout_buffer_cache with Not_found -> let buffer = new_buffer document in begin match document with | Some _, _ -> checkout_buffer_cache := (document, buffer) :: List.take_n cache_size !checkout_buffer_cache | None, _ -> () end; buffer let make_pipeline config buffer = Mpipeline.make config buffer.source let dispatch_sync config state (type a) : a sync_command -> a = function | Idle_job -> false | Tell (pos_start, pos_end, text) -> let source = Msource.substitute state.source pos_start pos_end text in state.source <- source | Refresh -> checkout_buffer_cache := []; Cmi_cache.flush () | Flags_set flags -> state.customization <- (`Flags flags) :: List.filter ~f:(function `Flags _ -> false | _ -> true) state.customization; `Ok | Findlib_use packages -> state.customization <- (`Use packages) :: List.filter ~f:(function `Use _ -> false | _ -> true) state.customization; `Ok | Extension_set (action,exts) -> state.customization <- List.map ~f:(fun ext -> `Ext (action, ext)) exts @ List.filter ~f:(function | `Ext (_, ext) when List.mem ext ~set:exts -> false | _ -> true ) state.customization; `Ok | Path (var,_,paths) -> state.customization <- List.filter_map ~f:(function | `Path (var', action', paths') when var = var' -> let paths' = List.filter paths' ~f:(fun path -> not (List.mem path ~set:paths)) in if paths' = [] then None else Some (`Path (var', action', paths')) | x -> Some x ) state.customization | Path_reset -> state.customization <- List.filter ~f:(function | `Path _ -> false | _ -> true ) state.customization; | Protocol_version version -> begin match version with | None -> () | Some 2 -> Old_IO.current_version := `V2 | Some 3 -> Old_IO.current_version := `V3 | Some _ -> () end; (`Selected !Old_IO.current_version, `Latest Old_IO.latest_version, Printf.sprintf "The Merlin toolkit version %s, for Ocaml %s\n" Merlin_config.version Sys.ocaml_version) | Flags_get -> let pipeline = make_pipeline config state in let config = Mpipeline.final_config pipeline in List.concat_map ~f:(fun f -> f.workval) Mconfig.(config.merlin.flags_to_apply) | Project_get -> let failures = match Mconfig.(config.merlin.failures) with | [] -> `Ok | failures -> `Failures failures in (Option.cons Mconfig.(config.merlin.config_path) [], failures) | Checkout _ -> failwith "invalid arguments" let default_state = lazy (new_state (None, None)) let document_states : (string option * string list option, state) Hashtbl.t = Hashtbl.create 7 let dispatch (type a) (context : Context.t) (cmd : a command) : a = let open Context in let state = match context.document with | None -> Lazy.force default_state | Some document -> let document = normalize_document document in try Hashtbl.find document_states document with Not_found -> let state = new_state document in Hashtbl.add document_states document state; state in let config = configure state.buffer in let config = match context.printer_verbosity with | None -> config | Some verbosity -> let verbosity = Mconfig.Verbosity.of_string verbosity in Mconfig.({config with query = {config.query with verbosity}}) in let config = match context.printer_width with | None -> config | Some printer_width -> Mconfig.({config with query = {config.query with printer_width}}) in Format.default_width := Option.value ~default:0 context.printer_width; match cmd with | Query q -> let pipeline = make_pipeline config state.buffer in Mpipeline.with_pipeline pipeline @@ fun () -> Query_commands.dispatch pipeline q | Sync (Checkout context) when state == Lazy.force default_state -> let buffer = checkout_buffer context in state.buffer <- buffer | Sync s -> dispatch_sync config state.buffer s
93966e7c82e270e9ed52d95cd88da71935a65d30abe0e1000b2fd3a2d4665b9e
tweag/sparkle
Main.hs
# LANGUAGE FlexibleContexts # {-# LANGUAGE LinearTypes #-} # LANGUAGE NoImplicitPrelude # {-# LANGUAGE OverloadedStrings #-} # LANGUAGE StaticPointers # # LANGUAGE QualifiedDo # module Main where import Control.Distributed.Closure import qualified Control.Distributed.Spark as Spark import Control.Distributed.Spark.Safe.RDD as RDD import Control.Distributed.Spark.Safe.Context import qualified Prelude as P import Prelude.Linear hiding (IO, filter, zero, sqrt) import qualified Prelude.Linear as PL import System.IO.Linear as LIO import Control.Functor.Linear as Linear import Control.Monad.IO.Class.Linear import qualified Data.Functor.Linear as D import qualified Data.Text as Text import Data.Coerce as Coerce import Foreign.JNI.Safe import qualified Foreign.JNI.Types import Language.Java.Safe newLocalRef3 :: (MonadIO m, Coerce.Coercible o (J ty)) => o %1-> m (o, o, o) newLocalRef3 j = Linear.do (j0, j1) <- newLocalRef j (j2, j3) <- newLocalRef j0 pure $ (j1, j2, j3) newLocalRef4 :: (MonadIO m, Coerce.Coercible o (J ty)) => o %1-> m (o, o, o, o) newLocalRef4 j = Linear.do (j0, j1, j2) <- newLocalRef3 j (j3, j4) <- newLocalRef j0 pure $ (j1, j2, j3, j4) newLocalRef5 :: (MonadIO m, Coerce.Coercible o (J ty)) => o %1-> m (o, o, o, o, o) newLocalRef5 j = Linear.do (j0, j1, j2, j3) <- newLocalRef4 j (j4, j5) <- newLocalRef j0 pure $ (j1, j2, j3, j4, j5) newLocalRef6 :: (MonadIO m, Coerce.Coercible o (J ty)) => o %1-> m (o, o, o, o, o, o) newLocalRef6 j = Linear.do (j0, j1, j2, j3, j4) <- newLocalRef5 j (j5, j6) <- newLocalRef j0 pure $ (j1, j2, j3, j4, j5, j6) (=<<) = flip (Linear.>>=) infixr 1 =<< main :: P.IO () main = Spark.forwardUnhandledExceptionsToSpark $ do withLocalFrame $ Linear.do conf <- newSparkConf "RDD operations demo" sc <- getOrCreateSparkContext conf rdd <- parallelize sc $ Text.words "The quick brown fox jumps over the lazy dog" (rdd0, rdd1, rdd2, rdd3, rdd4, rdd5) <- newLocalRef6 rdd print =<< collect rdd0 -- Does not work, because we don't have reify and reflect instances for streams - this might require jvm - streaming - safe print =<< RDD.reduce (closure P.$ static (\a b -> b P.<> " " P.<> a)) rdd1 print =<< collect =<< RDD.map (closure $ static Text.reverse) rdd2 print =<< RDD.take 3 rdd3 print =<< collect =<< RDD.distinct rdd4 printU =<< RDD.fold (closure P.$ static (P.||)) False =<< RDD.map (closure P.$ static (P.=="dog")) rdd5 where print :: Show a => Ur a %1 -> IO () print (Ur a) = LIO.fromSystemIO (P.print a) printU :: Show a => Ur a %1 -> IO (Ur ()) printU (Ur a) = LIO.fromSystemIOU (P.print a)
null
https://raw.githubusercontent.com/tweag/sparkle/7ad678c6830cfe689b3bed03e008728e59c40cfc/apps/rdd-ops-safe/Main.hs
haskell
# LANGUAGE LinearTypes # # LANGUAGE OverloadedStrings # Does not work, because we don't have reify and reflect instances for
# LANGUAGE FlexibleContexts # # LANGUAGE NoImplicitPrelude # # LANGUAGE StaticPointers # # LANGUAGE QualifiedDo # module Main where import Control.Distributed.Closure import qualified Control.Distributed.Spark as Spark import Control.Distributed.Spark.Safe.RDD as RDD import Control.Distributed.Spark.Safe.Context import qualified Prelude as P import Prelude.Linear hiding (IO, filter, zero, sqrt) import qualified Prelude.Linear as PL import System.IO.Linear as LIO import Control.Functor.Linear as Linear import Control.Monad.IO.Class.Linear import qualified Data.Functor.Linear as D import qualified Data.Text as Text import Data.Coerce as Coerce import Foreign.JNI.Safe import qualified Foreign.JNI.Types import Language.Java.Safe newLocalRef3 :: (MonadIO m, Coerce.Coercible o (J ty)) => o %1-> m (o, o, o) newLocalRef3 j = Linear.do (j0, j1) <- newLocalRef j (j2, j3) <- newLocalRef j0 pure $ (j1, j2, j3) newLocalRef4 :: (MonadIO m, Coerce.Coercible o (J ty)) => o %1-> m (o, o, o, o) newLocalRef4 j = Linear.do (j0, j1, j2) <- newLocalRef3 j (j3, j4) <- newLocalRef j0 pure $ (j1, j2, j3, j4) newLocalRef5 :: (MonadIO m, Coerce.Coercible o (J ty)) => o %1-> m (o, o, o, o, o) newLocalRef5 j = Linear.do (j0, j1, j2, j3) <- newLocalRef4 j (j4, j5) <- newLocalRef j0 pure $ (j1, j2, j3, j4, j5) newLocalRef6 :: (MonadIO m, Coerce.Coercible o (J ty)) => o %1-> m (o, o, o, o, o, o) newLocalRef6 j = Linear.do (j0, j1, j2, j3, j4) <- newLocalRef5 j (j5, j6) <- newLocalRef j0 pure $ (j1, j2, j3, j4, j5, j6) (=<<) = flip (Linear.>>=) infixr 1 =<< main :: P.IO () main = Spark.forwardUnhandledExceptionsToSpark $ do withLocalFrame $ Linear.do conf <- newSparkConf "RDD operations demo" sc <- getOrCreateSparkContext conf rdd <- parallelize sc $ Text.words "The quick brown fox jumps over the lazy dog" (rdd0, rdd1, rdd2, rdd3, rdd4, rdd5) <- newLocalRef6 rdd print =<< collect rdd0 streams - this might require jvm - streaming - safe print =<< RDD.reduce (closure P.$ static (\a b -> b P.<> " " P.<> a)) rdd1 print =<< collect =<< RDD.map (closure $ static Text.reverse) rdd2 print =<< RDD.take 3 rdd3 print =<< collect =<< RDD.distinct rdd4 printU =<< RDD.fold (closure P.$ static (P.||)) False =<< RDD.map (closure P.$ static (P.=="dog")) rdd5 where print :: Show a => Ur a %1 -> IO () print (Ur a) = LIO.fromSystemIO (P.print a) printU :: Show a => Ur a %1 -> IO (Ur ()) printU (Ur a) = LIO.fromSystemIOU (P.print a)
792af1f682dab95c2119909fd70e495cd41a9495c4567e4134ab93be5fedbeab
mflatt/shrubbery-rhombus-0
transformer-result.rkt
#lang racket/base (require "private/transform.rkt") (provide check-transformer-result)
null
https://raw.githubusercontent.com/mflatt/shrubbery-rhombus-0/39886de4660e8d303e5345680524389cd1dcf3cc/enforest/transformer-result.rkt
racket
#lang racket/base (require "private/transform.rkt") (provide check-transformer-result)
fe75e48249d64bd10fc44e28bf697513b004c0c10cf000afb8adb81980b40ed8
electric-sql/vaxine
clocksi_interactive_coord.erl
%% ------------------------------------------------------------------- %% Copyright < 2013 - 2018 > < Technische Universität Kaiserslautern , Germany , France Universidade NOVA de Lisboa , Portugal Université catholique de Louvain ( UCL ) , Belgique , Portugal %% > %% This file is provided to you under the Apache License , %% Version 2.0 (the "License"); you may not use this file except in compliance with the License . You may obtain %% a copy of the License at %% %% -2.0 %% %% Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an " AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY %% KIND, either expressed or implied. See the License for the %% specific language governing permissions and limitations %% under the License. %% List of the contributors to the development of Antidote : see file . %% Description and complete License: see LICENSE file. %% ------------------------------------------------------------------- %% @doc The coordinator for a given Clock SI interactive transaction. It handles the state of the tx and executes the operations sequentially %% by sending each operation to the responsible clockSI_vnode of the involved key . When a tx is finalized ( committed or aborted , the fsm %% also finishes. -module(clocksi_interactive_coord). -behavior(gen_statem). -include("antidote.hrl"). -include_lib("kernel/include/logger.hrl"). %% API -export([ start_link/0, perform_singleitem_operation/4, perform_singleitem_update/5, finish_op/3 ]). %% gen_statem callbacks -export([ init/1, code_change/4, callback_mode/0, terminate/3, stop/1, wait_for_start_transaction/3 ]). %% states -export([ receive_committed/3, receive_logging_responses/3, receive_read_objects_result/3, receive_validate_or_read_objects_result/3, receive_aborted/3, single_committing/3, receive_prepared/3, execute_op/3, committing/3, committing_single/3 ]). %%%=================================================================== %%% API %%%=================================================================== called by clocksi_interactive_coord_sup : start_fm -spec start_link() -> {ok, pid()}. start_link() -> gen_statem:start_link(?MODULE, [], []). TODO spec stop(Pid) -> gen_statem:stop(Pid). %% @doc This is a standalone function for directly contacting the read %% server located at the vnode of the key being read. This read %% is supposed to be light weight because it is done outside of a transaction fsm and directly in the calling thread . %% It either returns the object value or the object state. -spec perform_singleitem_operation( snapshot_time() | ignore, key(), type(), clocksi_readitem:read_property_list() ) -> {ok, val() | term(), snapshot_time()} | {error, reason()}. perform_singleitem_operation(Clock, Key, Type, Properties) -> Transaction = create_transaction_record(Clock, true, Properties), : { Transaction , _ TransactionId } = create_transaction_record(ignore , update_clock , false , undefined , true ) , Preflist = log_utilities:get_preflist_from_key(Key), IndexNode = hd(Preflist), case clocksi_readitem:read_data_item(IndexNode, Key, Type, Transaction, []) of {error, Reason} -> {error, Reason}; {ok, Snapshot} -> %% Read only transaction has no commit, hence return the snapshot time CommitTime = Transaction#transaction.vec_snapshot_time, {ok, Snapshot, CommitTime} end. %% @doc This is a standalone function for directly contacting the update %% server vnode. This is lighter than creating a transaction because the update / prepare / commit are all done at one time -spec perform_singleitem_update(snapshot_time() | ignore, key(), type(), {atom(), term()}, list()) -> {ok, {txid(), [], snapshot_time()}} | {error, term()}. perform_singleitem_update(Clock, Key, Type, Params, Properties) -> Transaction = create_transaction_record(Clock, true, Properties), Partition = log_utilities:get_key_partition(Key), %% Execute pre_commit_hook if any case antidote_hooks:execute_pre_commit_hook(Key, Type, Params) of {Key, Type, Params1} -> case clocksi_downstream:generate_downstream_op( Transaction, Partition, Key, Type, Params1, [] ) of {ok, DownstreamRecord} -> UpdatedPartitions = [{Partition, [{Key, Type, DownstreamRecord}]}], TxId = Transaction#transaction.txn_id, LogRecord = #log_operation{ tx_id = TxId, op_type = update_start, log_payload = #update_log_payload{ key = Key, type = Type, op = DownstreamRecord } }, LogId = log_utilities:get_logid_from_key(Key), case logging_vnode:append(Partition, LogId, LogRecord) of {ok, _} -> case clocksi_vnode:single_commit_sync(UpdatedPartitions, Transaction) of {committed, CommitTime} -> %% Execute post commit hook case antidote_hooks:execute_post_commit_hook(Key, Type, Params1) of {error, Reason} -> ?LOG_INFO("Post commit hook failed. Reason ~p", [Reason]); _ -> ok end, TxId = Transaction#transaction.txn_id, DcId = dc_utilities:get_my_dc_id(), CausalClock = vectorclock:set( DcId, CommitTime, Transaction#transaction.vec_snapshot_time ), {ok, {TxId, [], CausalClock}}; abort -> TODO increment aborted transaction metrics ? {error, aborted}; {error, Reason} -> {error, Reason} end; Error -> {error, Error} end; {error, Reason} -> {error, Reason} end; {error, Reason} -> {error, Reason} end. TODO spec finish_op(From, Key, Result) -> gen_statem:cast(From, {Key, Result}). %%%=================================================================== Internal State %%%=================================================================== %%--------------------------------------------------------------------- %% @doc Data Type: state %% where: %% from: the pid of the calling process. txid : transaction i d handled by this fsm , as defined in src / antidote.hrl . %% updated_partitions: the partitions where update operations take place. : when sending prepare_commit , %% number of partitions that have acknowledged. : when sending read requests %% number of partitions that are asked. prepare_time : transaction prepare time . commit_time : transaction commit time . %% state: state of the transaction: {active|prepared|committing|committed} %%---------------------------------------------------------------------- -record(state, { from :: undefined | gen_statem:from(), transaction :: undefined | tx(), updated_partitions :: list(), % list of upstream updates, used for post commit hooks client_ops :: list(), num_to_ack :: non_neg_integer(), num_to_read :: non_neg_integer(), prepare_time :: undefined | clock_time(), commit_time :: undefined | clock_time(), commit_protocol :: term(), state :: active | prepared | committing | committed | committed_read_only | undefined | aborted, operations :: undefined | list() | {update_objects, list()}, return_accumulator :: list() | ok | {error, reason()}, is_static :: boolean(), full_commit :: boolean(), properties :: txn_properties() }). -type state() :: #state{}. %%%=================================================================== States %%%=================================================================== %%%== init %% @doc Initialize the state. init([]) -> {ok, wait_for_start_transaction, ignore}. wait_for_start_transaction({call, Sender}, {start_tx, ClientClock, Properties}, _State) -> BaseState = init_state(false, false, Properties), {ok, State} = start_tx_internal(ClientClock, Properties, BaseState), TxnId = (State#state.transaction)#transaction.txn_id, {next_state, execute_op, State, {reply, Sender, {ok, TxnId}}}. = = execute_op %% @doc Contact the leader computed in the prepare state for it to execute the %% operation, wait for it to finish (synchronous) and go to the prepareOP %% to execute the next operation. %% internal state timeout -spec execute_op( {call, gen_statem:from()}, {update | update_objects | read_objects | read | abort | prepare, list()}, state() ) -> gen_statem:event_handler_result(state()). %% update kept for backwards compatibility with tests. execute_op({call, Sender}, {update, Args}, State) -> execute_op({call, Sender}, {update_objects, [Args]}, State); execute_op({call, Sender}, {OpType, Args}, State) -> execute_command(OpType, Args, Sender, State). %%%== receive_prepared @doc in this state , the fsm waits for prepare_time from each updated partitions in order to compute the final tx timestamp ( the maximum of the received prepare_time ) . receive_prepared(cast, {prepared, ReceivedPrepareTime}, State) -> process_prepared(ReceivedPrepareTime, State); receive_prepared(cast, abort, State) -> receive_prepared(cast, timeout, State); receive_prepared(cast, timeout, State) -> abort(State); %% capture regular events (e.g. logging_vnode responses) receive_prepared(info, {_EventType, EventValue}, State) -> receive_prepared(cast, EventValue, State). %%%== committing %% @doc after receiving all prepare_times, send the commit message to all %% updated partitions, and go to the "receive_committed" state. %% This state is used when no commit message from the client is %% expected committing( {call, Sender}, commit, State = #state{ transaction = Transaction, updated_partitions = UpdatedPartitions, commit_time = Commit_time } ) -> NumToAck = length(UpdatedPartitions), case NumToAck of 0 -> reply_to_client(State#state{state = committed_read_only, from = Sender}); _ -> ok = clocksi_vnode:commit(UpdatedPartitions, Transaction, Commit_time), {next_state, receive_committed, State#state{ num_to_ack = NumToAck, from = Sender, state = committing }} end. %%%== single_committing @doc TODO -spec single_committing (cast, {committed | clock_time()} | abort | timeout, state()) -> gen_statem:event_handler_result(state()); (info, {any(), any()}, state()) -> gen_statem:event_handler_result(state()). single_committing( cast, {committed, CommitTime}, State = #state{from = From, full_commit = FullCommit} ) -> case FullCommit of false -> {next_state, committing_single, State#state{commit_time = CommitTime, state = committing}, [ {reply, From, {ok, CommitTime}} ]}; true -> reply_to_client(State#state{ prepare_time = CommitTime, commit_time = CommitTime, state = committed }) end; single_committing(cast, abort, State) -> single_committing(cast, timeout, State); single_committing(cast, timeout, State) -> abort(State); %% capture regular events (e.g. logging_vnode responses) single_committing(info, {_EventType, EventValue}, State) -> single_committing(cast, EventValue, State). %%%== receive_aborted @doc the fsm waits for indicating that each partition has successfully %% aborted the tx and finishes operation. %% Should we retry sending the aborted message if we don't receive a %% reply from every partition? %% What delivery guarantees does sending messages provide? receive_aborted(cast, ack_abort, State = #state{num_to_ack = NumToAck}) -> case NumToAck of 1 -> reply_to_client(State#state{state = aborted}); _ -> {next_state, receive_aborted, State#state{num_to_ack = NumToAck - 1}} end; receive_aborted(cast, _, State) -> {next_state, receive_aborted, State}; %% capture regular events (e.g. logging_vnode responses) receive_aborted(info, {_EventType, EventValue}, State) -> receive_aborted(cast, EventValue, State). %%%== receive_read_objects_result %% @doc After asynchronously reading a batch of keys, collect the responses here receive_read_objects_result( cast, {ok, {Key, Type, Snapshot}}, CoordState = #state{ num_to_read = NumToRead, return_accumulator = ReadKeys } ) -> %% Apply local updates to the read snapshot UpdatedSnapshot = apply_tx_updates_to_snapshot(Key, CoordState, Type, Snapshot), %% Swap keys with their appropriate read values ReadValues = replace_first(ReadKeys, Key, UpdatedSnapshot), %% Loop back to the same state until we process all the replies case NumToRead > 1 of true -> {next_state, receive_read_objects_result, CoordState#state{ num_to_read = NumToRead - 1, return_accumulator = ReadValues }}; false -> {next_state, execute_op, CoordState#state{num_to_read = 0}, [ {reply, CoordState#state.from, {ok, lists:reverse(ReadValues)}} ]} end; %% capture regular events (e.g. logging_vnode responses) receive_read_objects_result(info, {_EventType, EventValue}, State) -> receive_read_objects_result(cast, EventValue, State). receive_validate_or_read_objects_result( cast, {ok, {invalid, Key, Type, ReadSnapshot, Token}}, CoordState = #state{ num_to_read = NumToRead, return_accumulator = ReadKeys, updated_partitions = UpdatedPartitions } ) -> {Snapshot, EffectiveToken} = case length(UpdatedPartitions) > 0 of true -> % There are some local updates that must be applied. % It is not safe to apply the update and to return the token returned by the vnode noor it is safe to return % the updated state with a token specific to this uncommitted % transaction. % % In such case, always return a token that will be invalid so % the client can know that returned value shouldn't be cached for % later revalidation. UpdatedSnapshot = apply_tx_updates_to_snapshot(Key, CoordState, Type, ReadSnapshot), {UpdatedSnapshot, ?INVALID_OBJECT_TOKEN}; false -> {ReadSnapshot, Token} end, ReadValues = replace_first(ReadKeys, Key, {invalid, Snapshot, EffectiveToken}), case NumToRead > 1 of true -> {next_state, receive_validate_or_read_objects_result, CoordState#state{ num_to_read = NumToRead - 1, return_accumulator = ReadValues }}; false -> {next_state, execute_op, CoordState#state{num_to_read = 0}, [ {reply, CoordState#state.from, {ok, lists:reverse(ReadValues)}} ]} end; receive_validate_or_read_objects_result( cast, {ok, {valid, Key, _Type}}, CoordState = #state{ num_to_read = NumToRead, return_accumulator = ReadKeys } ) -> ReadValues = replace_first(ReadKeys, Key, valid), case NumToRead > 1 of true -> {next_state, receive_validate_or_read_objects_result, CoordState#state{ num_to_read = NumToRead - 1, return_accumulator = ReadValues }}; false -> {next_state, execute_op, CoordState#state{num_to_read = 0}, [ {reply, CoordState#state.from, {ok, lists:reverse(ReadValues)}} ]} end. %%%== receive_logging_responses %% internal state timeout receive_logging_responses(state_timeout, timeout, State) -> receive_logging_responses(cast, timeout, State); %% @doc This state reached after an execute_op(update_objects[Params]). %% update_objects calls the perform_update function, which asynchronously %% sends a log operation per update, to the vnode responsible of the updated %% key. After sending all those messages, the coordinator reaches this state %% to receive the responses of the vnodes. receive_logging_responses( cast, Response, State = #state{ is_static = IsStatic, num_to_read = NumToReply, return_accumulator = ReturnAcc } ) -> NewAcc = case Response of {error, _r} = Err -> Err; {ok, _OpId} -> ReturnAcc; timeout -> ReturnAcc end, %% Loop back to the same state until we process all the replies case NumToReply > 1 of true -> {next_state, receive_logging_responses, State#state{ num_to_read = NumToReply - 1, return_accumulator = NewAcc }}; false -> case NewAcc of ok -> case IsStatic of true -> prepare(State); false -> {next_state, execute_op, State#state{num_to_read = 0, return_accumulator = []}, [ {reply, State#state.from, NewAcc} ]} end; _ -> abort(State) end end; %% capture regular events (e.g. logging_vnode responses) receive_logging_responses(info, {_EventType, EventValue}, State) -> receive_logging_responses(cast, EventValue, State). %%%== receive_committed @doc the fsm waits for indicating that each partition has successfully %% committed the tx and finishes operation. %% Should we retry sending the committed message if we don't receive a %% reply from every partition? %% What delivery guarantees does sending messages provide? receive_committed(cast, committed, State = #state{num_to_ack = NumToAck}) -> case NumToAck of 1 -> reply_to_client(State#state{state = committed}); _ -> {next_state, receive_committed, State#state{num_to_ack = NumToAck - 1}} end; %% capture regular events (e.g. logging_vnode responses) receive_committed(info, {_EventType, EventValue}, State) -> receive_committed(cast, EventValue, State). %%%== committing_single %% @doc There was only a single partition with an update in this transaction %% so the transaction has already been committed %% so just wait for the commit message from the client committing_single({call, Sender}, commit, State = #state{commit_time = Commit_time}) -> reply_to_client(State#state{ prepare_time = Commit_time, from = Sender, commit_time = Commit_time, state = committed }). %% ============================================================================= TODO add to all state functions %%handle_sync_event(stop, _From, _StateName, StateData) -> {stop, normal, ok, StateData}. %%handle_call(From, stop, Data) -> %% {stop_and_reply, normal, {reply, From, ok}, Data}. %% %%handle_info(Info, StateName, Data) -> { stop , { shutdown , { unexpected , Info , StateName } } , , Data } . code_change(_OldVsn, StateName, State, _Extra) -> {ok, StateName, State}. terminate(_Reason, _SN, _SD) -> ok. callback_mode() -> state_functions. %%%=================================================================== %%% Internal Functions %%%=================================================================== @doc TODO -spec init_state(boolean(), boolean(), proplists:proplist()) -> state(). init_state(FullCommit, IsStatic, Properties) -> #state{ from = undefined, transaction = undefined, updated_partitions = [], client_ops = [], num_to_ack = 0, num_to_read = 0, prepare_time = 0, operations = undefined, return_accumulator = [], is_static = IsStatic, full_commit = FullCommit, properties = Properties }. @doc TODO -spec start_tx_internal(snapshot_time(), proplists:proplist(), state()) -> {ok, state()} | {error, any()}. start_tx_internal(ClientClock, Properties, State = #state{}) -> TransactionRecord = create_transaction_record(ClientClock, false, Properties), % a new transaction was started, increment metrics ?STATS(open_transaction), {ok, State#state{transaction = TransactionRecord, num_to_read = 0, properties = Properties}}. @doc TODO -spec create_transaction_record(snapshot_time() | ignore, boolean(), txn_properties()) -> tx(). noinspection ErlangUnresolvedFunction create_transaction_record(ClientClock, _IsStatic, Properties) -> %% Seed the random because you pick a random read server, this is stored in the process state _Res = rand:seed(exsplus, { erlang:phash2([node()]), erlang:monotonic_time(), erlang:unique_integer() }), {ok, SnapshotTime} = case ClientClock of ignore -> get_snapshot_time(); _ -> case antidote:get_txn_property(update_clock, Properties) of update_clock -> get_snapshot_time(ClientClock); no_update_clock -> {ok, ClientClock} end end, DcId = dc_utilities:get_my_dc_id(), LocalClock = vectorclock:get(DcId, SnapshotTime), TransactionId = #tx_id{local_start_time = LocalClock, server_pid = self()}, #transaction{ snapshot_time_local = LocalClock, vec_snapshot_time = SnapshotTime, txn_id = TransactionId, properties = Properties }. %% @doc Execute the commit protocol -spec execute_command(atom(), term(), gen_statem:from(), state()) -> gen_statem:event_handler_result(state()). execute_command(prepare, Protocol, Sender, State0) -> State = State0#state{from = Sender, commit_protocol = Protocol}, prepare(State); %% @doc Abort the current transaction execute_command(abort, _Protocol, Sender, State) -> abort(State#state{from = Sender}); %% @doc Perform a single read, synchronous execute_command( read, {Key, Type}, Sender, State = #state{ transaction = Transaction, updated_partitions = UpdatedPartitions } ) -> case perform_read({Key, Type}, UpdatedPartitions, Transaction, Sender) of {error, _} -> abort(State); ReadResult -> {next_state, execute_op, State, {reply, Sender, {ok, ReadResult}}} end; %% @doc Read a batch of objects, asynchronous execute_command(read_objects, Objects, Sender, State = #state{transaction = Transaction}) -> ExecuteReads = fun({Key, Type}, AccState) -> ?STATS(operation_read_async), Partition = log_utilities:get_key_partition(Key), ok = clocksi_vnode:async_read_data_item(Partition, Transaction, Key, Type), ReadKeys = AccState#state.return_accumulator, AccState#state{return_accumulator = [Key | ReadKeys]} end, NewCoordState = lists:foldl( ExecuteReads, State#state{num_to_read = length(Objects), return_accumulator = []}, Objects ), {next_state, receive_read_objects_result, NewCoordState#state{from = Sender}}; execute_command( validate_or_read, {Key, Type, Token}, Sender, State = #state{ transaction = Transaction, updated_partitions = UpdatedPartitions } ) -> Partition = log_utilities:get_key_partition(Key), EffectiveToken = case has_tx_updates(Partition, State) of true -> ?INVALID_OBJECT_TOKEN; false -> Token end, case perform_validate_or_read( {Key, Type, EffectiveToken}, UpdatedPartitions, Transaction, Sender ) of {error, _} -> abort(State); ReadResult -> {next_state, execute_op, State, {reply, Sender, {ok, ReadResult}}} end; execute_command( validate_or_read_objects, {Objects, Tokens}, Sender, State = #state{transaction = Transaction} ) -> ExecuteReads = fun({{Key, Type}, Token}, AccState) -> Partition = log_utilities:get_key_partition(Key), % If there are some local update, there is not point on trying to % validate the value as it will be invalidated here anyway. % % Send an invalid token and expect a full result. EffectiveToken = case has_tx_updates(Partition, State) of true -> ?INVALID_OBJECT_TOKEN; false -> Token end, ok = clocksi_vnode:async_validate_or_read_data_item( Partition, Transaction, Key, Type, EffectiveToken ), ReadKeys = AccState#state.return_accumulator, AccState#state{return_accumulator = [Key | ReadKeys]} end, NewCoordState = lists:foldl( ExecuteReads, State#state{num_to_read = length(Objects), return_accumulator = []}, lists:zip(Objects, Tokens) ), {next_state, receive_validate_or_read_objects_result, NewCoordState#state{from = Sender}}; %% @doc Perform update operations on a batch of Objects execute_command(update_objects, UpdateOps, Sender, State = #state{transaction = Transaction}) -> ExecuteUpdates = fun( Op, AccState = #state{ client_ops = ClientOps0, updated_partitions = UpdatedPartitions0 } ) -> case perform_update(Op, UpdatedPartitions0, Transaction, Sender, ClientOps0) of {error, _} = Err -> AccState#state{return_accumulator = Err}; {UpdatedPartitions, ClientOps} -> NumToRead = AccState#state.num_to_read, AccState#state{ client_ops = ClientOps, num_to_read = NumToRead + 1, updated_partitions = UpdatedPartitions } end end, NewCoordState = lists:foldl( ExecuteUpdates, State#state{num_to_read = 0, return_accumulator = ok}, UpdateOps ), LoggingState = NewCoordState#state{from = Sender}, case LoggingState#state.num_to_read > 0 of true -> {next_state, receive_logging_responses, LoggingState}; false -> {next_state, receive_logging_responses, LoggingState, [{state_timeout, 0, timeout}]} end. %% @doc when the transaction has committed or aborted, %% a reply is sent to the client that started the transaction. reply_to_client( State = #state{ from = From, state = TxState, is_static = IsStatic, client_ops = ClientOps, commit_time = CommitTime, transaction = Transaction, return_accumulator = ReturnAcc } ) -> TxId = Transaction#transaction.txn_id, _ = case From of undefined -> ok; {_Pid, _Tag} -> Reply = case TxState of committed_read_only -> case IsStatic of false -> {ok, {TxId, Transaction#transaction.vec_snapshot_time}}; true -> {ok, {TxId, ReturnAcc, Transaction#transaction.vec_snapshot_time}} end; committed -> %% Execute post_commit_hooks _Result = execute_post_commit_hooks(ClientOps), %% TODO: What happens if commit hook fails? DcId = dc_utilities:get_my_dc_id(), CausalClock = vectorclock:set( DcId, CommitTime, Transaction#transaction.vec_snapshot_time ), case IsStatic of false -> {ok, {TxId, CausalClock}}; true -> {ok, CausalClock} end; aborted -> ?STATS(transaction_aborted), case ReturnAcc of {error, Reason} -> {error, Reason}; _ -> {error, aborted} end %% can never match (dialyzer) %% Reason -> %% {TxId, Reason} end, gen_statem:reply(From, Reply) end, % transaction is finished, decrement count ?STATS(transaction_finished), {stop, normal, State}. %% @doc The following function is used to apply the updates that were performed by the running %% transaction, to the result returned by a read. -spec apply_tx_updates_to_snapshot(key(), state(), type(), snapshot()) -> snapshot(). apply_tx_updates_to_snapshot(Key, CoordState, Type, Snapshot) -> Partition = log_utilities:get_key_partition(Key), Found = lists:keyfind(Partition, 1, CoordState#state.updated_partitions), case Found of false -> Snapshot; {Partition, WS} -> FilteredAndReversedUpdates = clocksi_vnode:reverse_and_filter_updates_per_key(WS, Key), clocksi_materializer:materialize_eager(Type, Snapshot, FilteredAndReversedUpdates) end. -spec has_tx_updates(index_node(), state()) -> boolean(). has_tx_updates(Partition, CoordState) -> case lists:keyfind(Partition, 1, CoordState#state.updated_partitions) of false -> false; _ -> true end. @doc Set the transaction Snapshot Time to the maximum value of : %% 1.ClientClock, which is the last clock of the system the client %% starting this transaction has seen, and 2.machine 's local time , as returned by : now ( ) . -spec get_snapshot_time(snapshot_time()) -> {ok, snapshot_time()}. get_snapshot_time(ClientClock) -> wait_for_clock(ClientClock). -spec get_snapshot_time() -> {ok, snapshot_time()}. get_snapshot_time() -> Now = dc_utilities:now_microsec() - ?OLD_SS_MICROSEC, {ok, VecSnapshotTime} = dc_utilities:get_stable_snapshot(), DcId = dc_utilities:get_my_dc_id(), SnapshotTime = vectorclock:set(DcId, Now, VecSnapshotTime), {ok, SnapshotTime}. -spec wait_for_clock(snapshot_time()) -> {ok, snapshot_time()}. wait_for_clock(Clock) -> {ok, VecSnapshotTime} = get_snapshot_time(), case vectorclock:ge(VecSnapshotTime, Clock) of true -> %% No need to wait {ok, VecSnapshotTime}; false -> %% wait for snapshot time to catch up with Client Clock %TODO Refactor into constant timer:sleep(10), wait_for_clock(Clock) end. Replaces the first occurrence of an entry ; %% yields error if there the element to be replaced is not in the list replace_first([], _, _) -> error; replace_first([Key | Rest], Key, NewKey) -> [NewKey | Rest]; replace_first([NotMyKey | Rest], Key, NewKey) -> [NotMyKey | replace_first(Rest, Key, NewKey)]. perform_read({Key, Type}, UpdatedPartitions, Transaction, Sender) -> ?STATS(operation_read), Partition = log_utilities:get_key_partition(Key), WriteSet = case lists:keyfind(Partition, 1, UpdatedPartitions) of false -> []; {Partition, WS} -> WS end, case clocksi_vnode:read_data_item(Partition, Transaction, Key, Type, WriteSet) of {ok, Snapshot} -> Snapshot; {error, Reason} -> gen_statem:reply(Sender, {error, Reason}), {error, Reason} end. perform_validate_or_read({Key, Type, Token}, UpdatedPartitions, Transaction, Sender) -> Partition = log_utilities:get_key_partition(Key), WriteSet = case lists:keyfind(Partition, 1, UpdatedPartitions) of false -> []; {Partition, WS} -> ?INVALID_OBJECT_TOKEN = Token, WS end, case clocksi_vnode:validate_or_read_data_item(Partition, Transaction, Key, Type, Token, WriteSet) of {ok, Result} -> Result; {error, Reason} -> gen_statem:reply(Sender, {error, Reason}), {error, Reason} end. perform_update(Op, UpdatedPartitions, Transaction, _Sender, ClientOps) -> ?STATS(operation_update), {Key, Type, Update} = Op, Partition = log_utilities:get_key_partition(Key), {FirstOp, WriteSet} = case lists:keyfind(Partition, 1, UpdatedPartitions) of false -> {true, []}; {Partition, WS} -> {false, WS} end, %% Execute pre_commit_hook if any case antidote_hooks:execute_pre_commit_hook(Key, Type, Update) of {error, Reason} -> ?LOG_DEBUG("Execute pre-commit hook failed ~p", [Reason]), {error, Reason}; {Key, Type, PostHookUpdate} -> %% Generate the appropriate state operations based on older snapshots GenerateResult = clocksi_downstream:generate_downstream_op( Transaction, Partition, Key, Type, PostHookUpdate, WriteSet ), case GenerateResult of {error, Reason} -> {error, Reason}; {ok, DownstreamOp} -> ok = async_log_propagation( Partition, Transaction#transaction.txn_id, Key, Type, DownstreamOp, FirstOp ), %% Append to the write set of the updated partition GeneratedUpdate = {Key, Type, DownstreamOp}, NewUpdatedPartitions = append_updated_partitions( UpdatedPartitions, WriteSet, Partition, GeneratedUpdate ), UpdatedOps = [{Key, Type, PostHookUpdate} | ClientOps], {NewUpdatedPartitions, UpdatedOps} end end. %% @doc Add new updates to the write set of the given partition. %% %% If there's no write set, create a new one. %% -type write_set_item() :: { {key(), bucket()}, type(), effect() }. -type write_set() :: [ write_set_item() ]. -type updated_partitions() :: [{ index_node(), write_set() }]. -spec append_updated_partitions( updated_partitions(), write_set(), index_node(), write_set_item() ) -> updated_partitions(). append_updated_partitions(UpdatedPartitions, [], Partition, Update) -> [{Partition, [Update]} | UpdatedPartitions]; append_updated_partitions(UpdatedPartitions, WriteSet, Partition, Update) -> %% Update the write set entry with the new record AllUpdates = {Partition, [Update | WriteSet]}, lists:keyreplace(Partition, 1, UpdatedPartitions, AllUpdates). -spec async_log_propagation(index_node(), txid(), key(), type(), effect(), boolean()) -> ok. async_log_propagation(Partition, TxId, Key, Type, Record, FirstOp) -> LogRecord = #log_operation{ op_type = if FirstOp -> update_start; true -> update end, tx_id = TxId, log_payload = #update_log_payload{key = Key, type = Type, op = Record} }, LogId = log_utilities:get_logid_from_key(Key), logging_vnode:asyn_append(Partition, LogId, LogRecord, {fsm, undefined, self()}). %% @doc this function sends a prepare message to all updated partitions and goes %% to the "receive_prepared"state. -spec prepare(state()) -> gen_statem:event_handler_result(state()). prepare( State = #state{ num_to_read = NumToRead, full_commit = FullCommit, transaction = Transaction, updated_partitions = UpdatedPartitions, commit_protocol = CommitProtocol } ) -> case UpdatedPartitions of [] -> if %TODO explain this condition, it makes no sense CommitProtocol == two_phase orelse NumToRead == 0 -> case FullCommit of true -> prepare_done(State, commit_read_only); false -> Transaction = State#state.transaction, SnapshotTimeLocal = Transaction#transaction.snapshot_time_local, prepare_done(State, {reply_and_then_commit, SnapshotTimeLocal}) end; true -> {next_state, receive_prepared, State#state{state = prepared}} end; [_] when CommitProtocol /= two_phase -> prepare_done(State, single_committing); [_ | _] -> ok = clocksi_vnode:prepare(UpdatedPartitions, Transaction), Num_to_ack = length(UpdatedPartitions), {next_state, receive_prepared, State#state{num_to_ack = Num_to_ack, state = prepared}} end. %% This function is called when we are done with the prepare phase. %% There are different options to continue the commit phase: %% single_committing: special case for when we just touched a single partition %% commit_read_only: special case for when we have not updated anything { reply_and_then_commit , ( ) } : first reply that we have successfully committed and then try to commit TODO rly ? { normal_commit , ( ): wait until all participants have acknowledged the commit and then reply to the client -spec prepare_done(state(), Action) -> gen_statem:event_handler_result(state()) when Action :: single_committing | commit_read_only | {reply_and_then_commit, clock_time()} | {normal_commit, clock_time()}. prepare_done(State, Action) -> case Action of single_committing -> UpdatedPartitions = State#state.updated_partitions, Transaction = State#state.transaction, ok = clocksi_vnode:single_commit(UpdatedPartitions, Transaction), {next_state, single_committing, State#state{state = committing, num_to_ack = 1}}; commit_read_only -> reply_to_client(State#state{state = committed_read_only}); {reply_and_then_commit, CommitSnapshotTime} -> From = State#state.from, {next_state, committing, State#state{ state = committing, commit_time = CommitSnapshotTime }, [{reply, From, {ok, CommitSnapshotTime}}]}; {normal_commit, MaxPrepareTime} -> UpdatedPartitions = State#state.updated_partitions, Transaction = State#state.transaction, ok = clocksi_vnode:commit(UpdatedPartitions, Transaction, MaxPrepareTime), {next_state, receive_committed, State#state{ num_to_ack = length(UpdatedPartitions), commit_time = MaxPrepareTime, state = committing }} end. process_prepared( ReceivedPrepareTime, State = #state{ num_to_ack = NumToAck, full_commit = FullCommit, prepare_time = PrepareTime } ) -> MaxPrepareTime = max(PrepareTime, ReceivedPrepareTime), case NumToAck of 1 -> % this is the last ack we expected case FullCommit of true -> prepare_done(State, {normal_commit, MaxPrepareTime}); false -> prepare_done(State, {reply_and_then_commit, MaxPrepareTime}) end; _ -> {next_state, receive_prepared, State#state{ num_to_ack = NumToAck - 1, prepare_time = MaxPrepareTime }} end. %% @doc when an error occurs or an updated partition %% does not pass the certification check, the transaction aborts. abort( State = #state{ transaction = Transaction, updated_partitions = UpdatedPartitions } ) -> NumToAck = length(UpdatedPartitions), case NumToAck of 0 -> reply_to_client(State#state{state = aborted}); _ -> ok = clocksi_vnode:abort(UpdatedPartitions, Transaction), {next_state, receive_aborted, State#state{num_to_ack = NumToAck, state = aborted}} end. execute_post_commit_hooks(Ops) -> lists:foreach( fun({Key, Type, Update}) -> case antidote_hooks:execute_post_commit_hook(Key, Type, Update) of {error, Reason} -> ?LOG_INFO("Post commit hook failed. Reason ~p", [Reason]); _ -> ok end end, lists:reverse(Ops) ). %%%=================================================================== %%% Unit Tests %%%=================================================================== -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). meck_load() -> meck:new(dc_utilities, [passthrough]), meck:new(vectorclock, [passthrough]), meck:new(log_utilities), meck:new(logging_vnode), meck:new(clocksi_downstream), meck:new(clocksi_vnode), meck:expect(dc_utilities, get_my_dc_id, fun() -> mock_dc end), meck:expect(dc_utilities, get_stable_snapshot, fun() -> {ok, vectorclock:new()} end), meck:expect(vectorclock, get, fun(_, _) -> 0 end), meck:expect(log_utilities, get_key_partition, fun(A) -> mock_partition:get_key_partition(A) end), meck:expect(log_utilities, get_logid_from_key, fun(A) -> mock_partition:get_logid_from_key(A) end), % this is not implemented in mock_partition! meck : expect(clocksi_vnode , single_commit_sync , fun ( _ , _ ) - > 0 end ) , meck:expect(clocksi_vnode, commit, fun(_, _, _) -> ok end), meck:expect(clocksi_vnode, read_data_item, fun(A, B, K, C, D) -> mock_partition:read_data_item(A, B, K, C, D) end), meck:expect(clocksi_vnode, validate_or_read_data_item, fun( Node, TxId, Key, Type, Token, Updates ) -> mock_partition:validate_or_read_data_item(Node, TxId, Key, Type, Token, Updates) end), meck:expect(clocksi_vnode, prepare, fun(UpdatedPartition, A) -> mock_partition:prepare(UpdatedPartition, A) end), meck:expect(clocksi_vnode, single_commit, fun(UpdatedPartition, A) -> mock_partition:single_commit(UpdatedPartition, A) end), meck:expect(clocksi_vnode, abort, fun(UpdatedPartition, A) -> mock_partition:abort(UpdatedPartition, A) end), meck:expect(clocksi_downstream, generate_downstream_op, fun(A, B, Key, C, D, E) -> mock_partition:generate_downstream_op(A, B, Key, C, D, E) end), meck:expect(logging_vnode, append, fun(_, _, _) -> {ok, {0, node}} end), meck:expect(logging_vnode, asyn_append, fun(A, B, C, ReplyTo) -> mock_partition:asyn_append(A, B, C, ReplyTo) end). meck_unload() -> meck:unload(dc_utilities), meck:unload(vectorclock), meck:unload(log_utilities), meck:unload(logging_vnode), meck:unload(clocksi_downstream), meck:unload(clocksi_vnode). top_setup() -> meck_load(), {ok, Pid} = clocksi_interactive_coord:start_link(), {ok, _Tx} = gen_statem:call(Pid, {start_tx, ignore, []}), register(srv, Pid), Pid. top_cleanup(Pid) -> case process_info(Pid) of undefined -> io:format("Already crashed"); _ -> clocksi_interactive_coord:stop(Pid) end, meck_unload(). t_test_() -> {foreach, fun top_setup/0, fun top_cleanup/1, [ fun empty_prepare_/0, fun timeout_/0, fun update_single_abort_/0, fun update_single_success_/0, fun update_multi_abort1_/0, fun update_multi_abort2_/0, fun update_multi_success_/0, fun read_single_fail_/0, fun read_success_/0, fun validate_or_read_single_fail_no_token_/0, fun validate_or_read_single_fail_with_token_/0, fun validate_or_read_single_fail_invalid_token_/0, fun validate_or_read_success_/0, fun downstream_fail_/0, fun get_snapshot_time_/0, fun wait_for_clock_/0 ]}. empty_prepare_() -> Pid = whereis(srv), ?assertMatch({ok, _}, gen_statem:call(Pid, {prepare, empty}, infinity)). timeout_() -> Pid = whereis(srv), ?assertEqual(ok, gen_statem:call(Pid, {update, {timeout, nothing, nothing}}, infinity)), ?assertMatch({error, aborted}, gen_statem:call(Pid, {prepare, empty}, infinity)). update_single_abort_() -> Pid = whereis(srv), ?assertEqual(ok, gen_statem:call(Pid, {update, {fail, nothing, nothing}}, infinity)), ?assertMatch({error, aborted}, gen_statem:call(Pid, {prepare, empty}, infinity)). update_single_success_() -> Pid = whereis(srv), ?assertEqual(ok, gen_statem:call(Pid, {update, {single_commit, nothing, nothing}}, infinity)), ?assertMatch({ok, _}, gen_statem:call(Pid, {prepare, empty}, infinity)). update_multi_abort1_() -> Pid = whereis(srv), ?assertEqual(ok, gen_statem:call(Pid, {update, {success, nothing, nothing}}, infinity)), ?assertEqual(ok, gen_statem:call(Pid, {update, {success, nothing, nothing}}, infinity)), ?assertEqual(ok, gen_statem:call(Pid, {update, {fail, nothing, nothing}}, infinity)), ?assertMatch({error, aborted}, gen_statem:call(Pid, {prepare, empty}, infinity)). update_multi_abort2_() -> Pid = whereis(srv), ?assertEqual(ok, gen_statem:call(Pid, {update, {success, nothing, nothing}}, infinity)), ?assertEqual(ok, gen_statem:call(Pid, {update, {fail, nothing, nothing}}, infinity)), ?assertEqual(ok, gen_statem:call(Pid, {update, {fail, nothing, nothing}}, infinity)), ?assertMatch({error, aborted}, gen_statem:call(Pid, {prepare, empty}, infinity)). update_multi_success_() -> Pid = whereis(srv), ?assertEqual(ok, gen_statem:call(Pid, {update, {success, nothing, nothing}}, infinity)), ?assertEqual(ok, gen_statem:call(Pid, {update, {success, nothing, nothing}}, infinity)), ?assertMatch({ok, _}, gen_statem:call(Pid, {prepare, empty}, infinity)). read_single_fail_() -> Pid = whereis(srv), ?assertEqual( {error, mock_read_fail}, gen_statem:call(Pid, {read, {read_fail, nothing}}, infinity) ). read_success_() -> Pid = whereis(srv), {ok, State} = gen_statem:call(Pid, {read, {counter, antidote_crdt_counter_pn}}, infinity), ?assertEqual( {ok, 2}, {ok, antidote_crdt_counter_pn:value(State)} ), ?assertEqual( {ok, [a]}, gen_statem:call(Pid, {read, {set, antidote_crdt_set_go}}, infinity) ), ?assertEqual( {ok, mock_value}, gen_statem:call(Pid, {read, {mock_type, mock_partition_fsm}}, infinity) ), ?assertMatch({ok, _}, gen_statem:call(Pid, {prepare, empty}, infinity)). validate_or_read_single_fail_no_token_() -> Pid = whereis(srv), ?assertEqual( {error, mock_read_fail}, gen_statem:call( Pid, {validate_or_read, {read_fail, nothing, ?INVALID_OBJECT_TOKEN}}, infinity ) ). validate_or_read_single_fail_with_token_() -> Pid = whereis(srv), ?assertEqual( {error, mock_read_fail}, gen_statem:call(Pid, {validate_or_read, {read_fail, nothing, <<"valid">>}}, infinity) ). validate_or_read_single_fail_invalid_token_() -> Pid = whereis(srv), ?assertEqual( {error, mock_read_fail}, gen_statem:call(Pid, {validate_or_read, {read_fail, nothing, <<"invalid">>}}, infinity) ). validate_or_read_success_() -> Pid = whereis(srv), {ok, {invalid, State, Token}} = gen_statem:call( Pid, {validate_or_read, {counter, antidote_crdt_counter_pn, ?INVALID_OBJECT_TOKEN}}, infinity ), ?assertEqual(2, State), {ok, valid} = gen_statem:call( Pid, {validate_or_read, {counter, antidote_crdt_counter_pn, Token}}, infinity ), {ok, {invalid, State, Token}} = gen_statem:call( Pid, {validate_or_read, {counter, antidote_crdt_counter_pn, <<"past_token">>}}, infinity ). downstream_fail_() -> Pid = whereis(srv), ?assertMatch( {error, _}, gen_statem:call(Pid, {update, {downstream_fail, nothing, nothing}}, infinity) ). get_snapshot_time_() -> {ok, SnapshotTime} = get_snapshot_time(), ?assertMatch([{mock_dc, _}], vectorclock:to_list(SnapshotTime)). wait_for_clock_() -> {ok, SnapshotTime} = wait_for_clock(vectorclock:from_list([{mock_dc, 10}])), ?assertMatch([{mock_dc, _}], vectorclock:to_list(SnapshotTime)), VecClock = dc_utilities:now_microsec(), {ok, SnapshotTime2} = wait_for_clock(vectorclock:from_list([{mock_dc, VecClock}])), ?assertMatch([{mock_dc, _}], vectorclock:to_list(SnapshotTime2)). -endif.
null
https://raw.githubusercontent.com/electric-sql/vaxine/9dcf353fffb8fb98979a3046e13454ce2c6fc343/apps/antidote/src/clocksi_interactive_coord.erl
erlang
------------------------------------------------------------------- > Version 2.0 (the "License"); you may not use this file a copy of the License at -2.0 Unless required by applicable law or agreed to in writing, KIND, either expressed or implied. See the License for the specific language governing permissions and limitations under the License. Description and complete License: see LICENSE file. ------------------------------------------------------------------- @doc The coordinator for a given Clock SI interactive transaction. by sending each operation to the responsible clockSI_vnode of the also finishes. API gen_statem callbacks states =================================================================== API =================================================================== @doc This is a standalone function for directly contacting the read server located at the vnode of the key being read. This read is supposed to be light weight because it is done outside of a It either returns the object value or the object state. Read only transaction has no commit, hence return the snapshot time @doc This is a standalone function for directly contacting the update server vnode. This is lighter than creating a transaction Execute pre_commit_hook if any Execute post commit hook =================================================================== =================================================================== --------------------------------------------------------------------- @doc Data Type: state where: from: the pid of the calling process. updated_partitions: the partitions where update operations take place. number of partitions that have acknowledged. number of partitions that are asked. state: state of the transaction: {active|prepared|committing|committed} ---------------------------------------------------------------------- list of upstream updates, used for post commit hooks =================================================================== =================================================================== == init @doc Initialize the state. @doc Contact the leader computed in the prepare state for it to execute the operation, wait for it to finish (synchronous) and go to the prepareOP to execute the next operation. internal state timeout update kept for backwards compatibility with tests. == receive_prepared capture regular events (e.g. logging_vnode responses) == committing @doc after receiving all prepare_times, send the commit message to all updated partitions, and go to the "receive_committed" state. This state is used when no commit message from the client is expected == single_committing capture regular events (e.g. logging_vnode responses) == receive_aborted aborted the tx and finishes operation. Should we retry sending the aborted message if we don't receive a reply from every partition? What delivery guarantees does sending messages provide? capture regular events (e.g. logging_vnode responses) == receive_read_objects_result @doc After asynchronously reading a batch of keys, collect the responses here Apply local updates to the read snapshot Swap keys with their appropriate read values Loop back to the same state until we process all the replies capture regular events (e.g. logging_vnode responses) There are some local updates that must be applied. It is not safe to apply the update and to return the updated state with a token specific to this uncommitted transaction. In such case, always return a token that will be invalid so the client can know that returned value shouldn't be cached for later revalidation. == receive_logging_responses internal state timeout @doc This state reached after an execute_op(update_objects[Params]). update_objects calls the perform_update function, which asynchronously sends a log operation per update, to the vnode responsible of the updated key. After sending all those messages, the coordinator reaches this state to receive the responses of the vnodes. Loop back to the same state until we process all the replies capture regular events (e.g. logging_vnode responses) == receive_committed committed the tx and finishes operation. Should we retry sending the committed message if we don't receive a reply from every partition? What delivery guarantees does sending messages provide? capture regular events (e.g. logging_vnode responses) == committing_single @doc There was only a single partition with an update in this transaction so the transaction has already been committed so just wait for the commit message from the client ============================================================================= handle_sync_event(stop, _From, _StateName, StateData) -> {stop, normal, ok, StateData}. handle_call(From, stop, Data) -> {stop_and_reply, normal, {reply, From, ok}, Data}. handle_info(Info, StateName, Data) -> =================================================================== Internal Functions =================================================================== a new transaction was started, increment metrics Seed the random because you pick a random read server, this is stored in the process state @doc Execute the commit protocol @doc Abort the current transaction @doc Perform a single read, synchronous @doc Read a batch of objects, asynchronous If there are some local update, there is not point on trying to validate the value as it will be invalidated here anyway. Send an invalid token and expect a full result. @doc Perform update operations on a batch of Objects @doc when the transaction has committed or aborted, a reply is sent to the client that started the transaction. Execute post_commit_hooks TODO: What happens if commit hook fails? can never match (dialyzer) Reason -> {TxId, Reason} transaction is finished, decrement count @doc The following function is used to apply the updates that were performed by the running transaction, to the result returned by a read. 1.ClientClock, which is the last clock of the system the client starting this transaction has seen, and No need to wait wait for snapshot time to catch up with Client Clock TODO Refactor into constant yields error if there the element to be replaced is not in the list Execute pre_commit_hook if any Generate the appropriate state operations based on older snapshots Append to the write set of the updated partition @doc Add new updates to the write set of the given partition. If there's no write set, create a new one. Update the write set entry with the new record @doc this function sends a prepare message to all updated partitions and goes to the "receive_prepared"state. TODO explain this condition, it makes no sense This function is called when we are done with the prepare phase. There are different options to continue the commit phase: single_committing: special case for when we just touched a single partition commit_read_only: special case for when we have not updated anything this is the last ack we expected @doc when an error occurs or an updated partition does not pass the certification check, the transaction aborts. =================================================================== Unit Tests =================================================================== this is not implemented in mock_partition!
Copyright < 2013 - 2018 > < Technische Universität Kaiserslautern , Germany , France Universidade NOVA de Lisboa , Portugal Université catholique de Louvain ( UCL ) , Belgique , Portugal This file is provided to you under the Apache License , except in compliance with the License . You may obtain software distributed under the License is distributed on an " AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY List of the contributors to the development of Antidote : see file . It handles the state of the tx and executes the operations sequentially involved key . When a tx is finalized ( committed or aborted , the fsm -module(clocksi_interactive_coord). -behavior(gen_statem). -include("antidote.hrl"). -include_lib("kernel/include/logger.hrl"). -export([ start_link/0, perform_singleitem_operation/4, perform_singleitem_update/5, finish_op/3 ]). -export([ init/1, code_change/4, callback_mode/0, terminate/3, stop/1, wait_for_start_transaction/3 ]). -export([ receive_committed/3, receive_logging_responses/3, receive_read_objects_result/3, receive_validate_or_read_objects_result/3, receive_aborted/3, single_committing/3, receive_prepared/3, execute_op/3, committing/3, committing_single/3 ]). called by clocksi_interactive_coord_sup : start_fm -spec start_link() -> {ok, pid()}. start_link() -> gen_statem:start_link(?MODULE, [], []). TODO spec stop(Pid) -> gen_statem:stop(Pid). transaction fsm and directly in the calling thread . -spec perform_singleitem_operation( snapshot_time() | ignore, key(), type(), clocksi_readitem:read_property_list() ) -> {ok, val() | term(), snapshot_time()} | {error, reason()}. perform_singleitem_operation(Clock, Key, Type, Properties) -> Transaction = create_transaction_record(Clock, true, Properties), : { Transaction , _ TransactionId } = create_transaction_record(ignore , update_clock , false , undefined , true ) , Preflist = log_utilities:get_preflist_from_key(Key), IndexNode = hd(Preflist), case clocksi_readitem:read_data_item(IndexNode, Key, Type, Transaction, []) of {error, Reason} -> {error, Reason}; {ok, Snapshot} -> CommitTime = Transaction#transaction.vec_snapshot_time, {ok, Snapshot, CommitTime} end. because the update / prepare / commit are all done at one time -spec perform_singleitem_update(snapshot_time() | ignore, key(), type(), {atom(), term()}, list()) -> {ok, {txid(), [], snapshot_time()}} | {error, term()}. perform_singleitem_update(Clock, Key, Type, Params, Properties) -> Transaction = create_transaction_record(Clock, true, Properties), Partition = log_utilities:get_key_partition(Key), case antidote_hooks:execute_pre_commit_hook(Key, Type, Params) of {Key, Type, Params1} -> case clocksi_downstream:generate_downstream_op( Transaction, Partition, Key, Type, Params1, [] ) of {ok, DownstreamRecord} -> UpdatedPartitions = [{Partition, [{Key, Type, DownstreamRecord}]}], TxId = Transaction#transaction.txn_id, LogRecord = #log_operation{ tx_id = TxId, op_type = update_start, log_payload = #update_log_payload{ key = Key, type = Type, op = DownstreamRecord } }, LogId = log_utilities:get_logid_from_key(Key), case logging_vnode:append(Partition, LogId, LogRecord) of {ok, _} -> case clocksi_vnode:single_commit_sync(UpdatedPartitions, Transaction) of {committed, CommitTime} -> case antidote_hooks:execute_post_commit_hook(Key, Type, Params1) of {error, Reason} -> ?LOG_INFO("Post commit hook failed. Reason ~p", [Reason]); _ -> ok end, TxId = Transaction#transaction.txn_id, DcId = dc_utilities:get_my_dc_id(), CausalClock = vectorclock:set( DcId, CommitTime, Transaction#transaction.vec_snapshot_time ), {ok, {TxId, [], CausalClock}}; abort -> TODO increment aborted transaction metrics ? {error, aborted}; {error, Reason} -> {error, Reason} end; Error -> {error, Error} end; {error, Reason} -> {error, Reason} end; {error, Reason} -> {error, Reason} end. TODO spec finish_op(From, Key, Result) -> gen_statem:cast(From, {Key, Result}). Internal State txid : transaction i d handled by this fsm , as defined in src / antidote.hrl . : when sending prepare_commit , : when sending read requests prepare_time : transaction prepare time . commit_time : transaction commit time . -record(state, { from :: undefined | gen_statem:from(), transaction :: undefined | tx(), updated_partitions :: list(), client_ops :: list(), num_to_ack :: non_neg_integer(), num_to_read :: non_neg_integer(), prepare_time :: undefined | clock_time(), commit_time :: undefined | clock_time(), commit_protocol :: term(), state :: active | prepared | committing | committed | committed_read_only | undefined | aborted, operations :: undefined | list() | {update_objects, list()}, return_accumulator :: list() | ok | {error, reason()}, is_static :: boolean(), full_commit :: boolean(), properties :: txn_properties() }). -type state() :: #state{}. States init([]) -> {ok, wait_for_start_transaction, ignore}. wait_for_start_transaction({call, Sender}, {start_tx, ClientClock, Properties}, _State) -> BaseState = init_state(false, false, Properties), {ok, State} = start_tx_internal(ClientClock, Properties, BaseState), TxnId = (State#state.transaction)#transaction.txn_id, {next_state, execute_op, State, {reply, Sender, {ok, TxnId}}}. = = execute_op -spec execute_op( {call, gen_statem:from()}, {update | update_objects | read_objects | read | abort | prepare, list()}, state() ) -> gen_statem:event_handler_result(state()). execute_op({call, Sender}, {update, Args}, State) -> execute_op({call, Sender}, {update_objects, [Args]}, State); execute_op({call, Sender}, {OpType, Args}, State) -> execute_command(OpType, Args, Sender, State). @doc in this state , the fsm waits for prepare_time from each updated partitions in order to compute the final tx timestamp ( the maximum of the received prepare_time ) . receive_prepared(cast, {prepared, ReceivedPrepareTime}, State) -> process_prepared(ReceivedPrepareTime, State); receive_prepared(cast, abort, State) -> receive_prepared(cast, timeout, State); receive_prepared(cast, timeout, State) -> abort(State); receive_prepared(info, {_EventType, EventValue}, State) -> receive_prepared(cast, EventValue, State). committing( {call, Sender}, commit, State = #state{ transaction = Transaction, updated_partitions = UpdatedPartitions, commit_time = Commit_time } ) -> NumToAck = length(UpdatedPartitions), case NumToAck of 0 -> reply_to_client(State#state{state = committed_read_only, from = Sender}); _ -> ok = clocksi_vnode:commit(UpdatedPartitions, Transaction, Commit_time), {next_state, receive_committed, State#state{ num_to_ack = NumToAck, from = Sender, state = committing }} end. @doc TODO -spec single_committing (cast, {committed | clock_time()} | abort | timeout, state()) -> gen_statem:event_handler_result(state()); (info, {any(), any()}, state()) -> gen_statem:event_handler_result(state()). single_committing( cast, {committed, CommitTime}, State = #state{from = From, full_commit = FullCommit} ) -> case FullCommit of false -> {next_state, committing_single, State#state{commit_time = CommitTime, state = committing}, [ {reply, From, {ok, CommitTime}} ]}; true -> reply_to_client(State#state{ prepare_time = CommitTime, commit_time = CommitTime, state = committed }) end; single_committing(cast, abort, State) -> single_committing(cast, timeout, State); single_committing(cast, timeout, State) -> abort(State); single_committing(info, {_EventType, EventValue}, State) -> single_committing(cast, EventValue, State). @doc the fsm waits for indicating that each partition has successfully receive_aborted(cast, ack_abort, State = #state{num_to_ack = NumToAck}) -> case NumToAck of 1 -> reply_to_client(State#state{state = aborted}); _ -> {next_state, receive_aborted, State#state{num_to_ack = NumToAck - 1}} end; receive_aborted(cast, _, State) -> {next_state, receive_aborted, State}; receive_aborted(info, {_EventType, EventValue}, State) -> receive_aborted(cast, EventValue, State). receive_read_objects_result( cast, {ok, {Key, Type, Snapshot}}, CoordState = #state{ num_to_read = NumToRead, return_accumulator = ReadKeys } ) -> UpdatedSnapshot = apply_tx_updates_to_snapshot(Key, CoordState, Type, Snapshot), ReadValues = replace_first(ReadKeys, Key, UpdatedSnapshot), case NumToRead > 1 of true -> {next_state, receive_read_objects_result, CoordState#state{ num_to_read = NumToRead - 1, return_accumulator = ReadValues }}; false -> {next_state, execute_op, CoordState#state{num_to_read = 0}, [ {reply, CoordState#state.from, {ok, lists:reverse(ReadValues)}} ]} end; receive_read_objects_result(info, {_EventType, EventValue}, State) -> receive_read_objects_result(cast, EventValue, State). receive_validate_or_read_objects_result( cast, {ok, {invalid, Key, Type, ReadSnapshot, Token}}, CoordState = #state{ num_to_read = NumToRead, return_accumulator = ReadKeys, updated_partitions = UpdatedPartitions } ) -> {Snapshot, EffectiveToken} = case length(UpdatedPartitions) > 0 of true -> the token returned by the vnode noor it is safe to return UpdatedSnapshot = apply_tx_updates_to_snapshot(Key, CoordState, Type, ReadSnapshot), {UpdatedSnapshot, ?INVALID_OBJECT_TOKEN}; false -> {ReadSnapshot, Token} end, ReadValues = replace_first(ReadKeys, Key, {invalid, Snapshot, EffectiveToken}), case NumToRead > 1 of true -> {next_state, receive_validate_or_read_objects_result, CoordState#state{ num_to_read = NumToRead - 1, return_accumulator = ReadValues }}; false -> {next_state, execute_op, CoordState#state{num_to_read = 0}, [ {reply, CoordState#state.from, {ok, lists:reverse(ReadValues)}} ]} end; receive_validate_or_read_objects_result( cast, {ok, {valid, Key, _Type}}, CoordState = #state{ num_to_read = NumToRead, return_accumulator = ReadKeys } ) -> ReadValues = replace_first(ReadKeys, Key, valid), case NumToRead > 1 of true -> {next_state, receive_validate_or_read_objects_result, CoordState#state{ num_to_read = NumToRead - 1, return_accumulator = ReadValues }}; false -> {next_state, execute_op, CoordState#state{num_to_read = 0}, [ {reply, CoordState#state.from, {ok, lists:reverse(ReadValues)}} ]} end. receive_logging_responses(state_timeout, timeout, State) -> receive_logging_responses(cast, timeout, State); receive_logging_responses( cast, Response, State = #state{ is_static = IsStatic, num_to_read = NumToReply, return_accumulator = ReturnAcc } ) -> NewAcc = case Response of {error, _r} = Err -> Err; {ok, _OpId} -> ReturnAcc; timeout -> ReturnAcc end, case NumToReply > 1 of true -> {next_state, receive_logging_responses, State#state{ num_to_read = NumToReply - 1, return_accumulator = NewAcc }}; false -> case NewAcc of ok -> case IsStatic of true -> prepare(State); false -> {next_state, execute_op, State#state{num_to_read = 0, return_accumulator = []}, [ {reply, State#state.from, NewAcc} ]} end; _ -> abort(State) end end; receive_logging_responses(info, {_EventType, EventValue}, State) -> receive_logging_responses(cast, EventValue, State). @doc the fsm waits for indicating that each partition has successfully receive_committed(cast, committed, State = #state{num_to_ack = NumToAck}) -> case NumToAck of 1 -> reply_to_client(State#state{state = committed}); _ -> {next_state, receive_committed, State#state{num_to_ack = NumToAck - 1}} end; receive_committed(info, {_EventType, EventValue}, State) -> receive_committed(cast, EventValue, State). committing_single({call, Sender}, commit, State = #state{commit_time = Commit_time}) -> reply_to_client(State#state{ prepare_time = Commit_time, from = Sender, commit_time = Commit_time, state = committed }). TODO add to all state functions { stop , { shutdown , { unexpected , Info , StateName } } , , Data } . code_change(_OldVsn, StateName, State, _Extra) -> {ok, StateName, State}. terminate(_Reason, _SN, _SD) -> ok. callback_mode() -> state_functions. @doc TODO -spec init_state(boolean(), boolean(), proplists:proplist()) -> state(). init_state(FullCommit, IsStatic, Properties) -> #state{ from = undefined, transaction = undefined, updated_partitions = [], client_ops = [], num_to_ack = 0, num_to_read = 0, prepare_time = 0, operations = undefined, return_accumulator = [], is_static = IsStatic, full_commit = FullCommit, properties = Properties }. @doc TODO -spec start_tx_internal(snapshot_time(), proplists:proplist(), state()) -> {ok, state()} | {error, any()}. start_tx_internal(ClientClock, Properties, State = #state{}) -> TransactionRecord = create_transaction_record(ClientClock, false, Properties), ?STATS(open_transaction), {ok, State#state{transaction = TransactionRecord, num_to_read = 0, properties = Properties}}. @doc TODO -spec create_transaction_record(snapshot_time() | ignore, boolean(), txn_properties()) -> tx(). noinspection ErlangUnresolvedFunction create_transaction_record(ClientClock, _IsStatic, Properties) -> _Res = rand:seed(exsplus, { erlang:phash2([node()]), erlang:monotonic_time(), erlang:unique_integer() }), {ok, SnapshotTime} = case ClientClock of ignore -> get_snapshot_time(); _ -> case antidote:get_txn_property(update_clock, Properties) of update_clock -> get_snapshot_time(ClientClock); no_update_clock -> {ok, ClientClock} end end, DcId = dc_utilities:get_my_dc_id(), LocalClock = vectorclock:get(DcId, SnapshotTime), TransactionId = #tx_id{local_start_time = LocalClock, server_pid = self()}, #transaction{ snapshot_time_local = LocalClock, vec_snapshot_time = SnapshotTime, txn_id = TransactionId, properties = Properties }. -spec execute_command(atom(), term(), gen_statem:from(), state()) -> gen_statem:event_handler_result(state()). execute_command(prepare, Protocol, Sender, State0) -> State = State0#state{from = Sender, commit_protocol = Protocol}, prepare(State); execute_command(abort, _Protocol, Sender, State) -> abort(State#state{from = Sender}); execute_command( read, {Key, Type}, Sender, State = #state{ transaction = Transaction, updated_partitions = UpdatedPartitions } ) -> case perform_read({Key, Type}, UpdatedPartitions, Transaction, Sender) of {error, _} -> abort(State); ReadResult -> {next_state, execute_op, State, {reply, Sender, {ok, ReadResult}}} end; execute_command(read_objects, Objects, Sender, State = #state{transaction = Transaction}) -> ExecuteReads = fun({Key, Type}, AccState) -> ?STATS(operation_read_async), Partition = log_utilities:get_key_partition(Key), ok = clocksi_vnode:async_read_data_item(Partition, Transaction, Key, Type), ReadKeys = AccState#state.return_accumulator, AccState#state{return_accumulator = [Key | ReadKeys]} end, NewCoordState = lists:foldl( ExecuteReads, State#state{num_to_read = length(Objects), return_accumulator = []}, Objects ), {next_state, receive_read_objects_result, NewCoordState#state{from = Sender}}; execute_command( validate_or_read, {Key, Type, Token}, Sender, State = #state{ transaction = Transaction, updated_partitions = UpdatedPartitions } ) -> Partition = log_utilities:get_key_partition(Key), EffectiveToken = case has_tx_updates(Partition, State) of true -> ?INVALID_OBJECT_TOKEN; false -> Token end, case perform_validate_or_read( {Key, Type, EffectiveToken}, UpdatedPartitions, Transaction, Sender ) of {error, _} -> abort(State); ReadResult -> {next_state, execute_op, State, {reply, Sender, {ok, ReadResult}}} end; execute_command( validate_or_read_objects, {Objects, Tokens}, Sender, State = #state{transaction = Transaction} ) -> ExecuteReads = fun({{Key, Type}, Token}, AccState) -> Partition = log_utilities:get_key_partition(Key), EffectiveToken = case has_tx_updates(Partition, State) of true -> ?INVALID_OBJECT_TOKEN; false -> Token end, ok = clocksi_vnode:async_validate_or_read_data_item( Partition, Transaction, Key, Type, EffectiveToken ), ReadKeys = AccState#state.return_accumulator, AccState#state{return_accumulator = [Key | ReadKeys]} end, NewCoordState = lists:foldl( ExecuteReads, State#state{num_to_read = length(Objects), return_accumulator = []}, lists:zip(Objects, Tokens) ), {next_state, receive_validate_or_read_objects_result, NewCoordState#state{from = Sender}}; execute_command(update_objects, UpdateOps, Sender, State = #state{transaction = Transaction}) -> ExecuteUpdates = fun( Op, AccState = #state{ client_ops = ClientOps0, updated_partitions = UpdatedPartitions0 } ) -> case perform_update(Op, UpdatedPartitions0, Transaction, Sender, ClientOps0) of {error, _} = Err -> AccState#state{return_accumulator = Err}; {UpdatedPartitions, ClientOps} -> NumToRead = AccState#state.num_to_read, AccState#state{ client_ops = ClientOps, num_to_read = NumToRead + 1, updated_partitions = UpdatedPartitions } end end, NewCoordState = lists:foldl( ExecuteUpdates, State#state{num_to_read = 0, return_accumulator = ok}, UpdateOps ), LoggingState = NewCoordState#state{from = Sender}, case LoggingState#state.num_to_read > 0 of true -> {next_state, receive_logging_responses, LoggingState}; false -> {next_state, receive_logging_responses, LoggingState, [{state_timeout, 0, timeout}]} end. reply_to_client( State = #state{ from = From, state = TxState, is_static = IsStatic, client_ops = ClientOps, commit_time = CommitTime, transaction = Transaction, return_accumulator = ReturnAcc } ) -> TxId = Transaction#transaction.txn_id, _ = case From of undefined -> ok; {_Pid, _Tag} -> Reply = case TxState of committed_read_only -> case IsStatic of false -> {ok, {TxId, Transaction#transaction.vec_snapshot_time}}; true -> {ok, {TxId, ReturnAcc, Transaction#transaction.vec_snapshot_time}} end; committed -> _Result = execute_post_commit_hooks(ClientOps), DcId = dc_utilities:get_my_dc_id(), CausalClock = vectorclock:set( DcId, CommitTime, Transaction#transaction.vec_snapshot_time ), case IsStatic of false -> {ok, {TxId, CausalClock}}; true -> {ok, CausalClock} end; aborted -> ?STATS(transaction_aborted), case ReturnAcc of {error, Reason} -> {error, Reason}; _ -> {error, aborted} end end, gen_statem:reply(From, Reply) end, ?STATS(transaction_finished), {stop, normal, State}. -spec apply_tx_updates_to_snapshot(key(), state(), type(), snapshot()) -> snapshot(). apply_tx_updates_to_snapshot(Key, CoordState, Type, Snapshot) -> Partition = log_utilities:get_key_partition(Key), Found = lists:keyfind(Partition, 1, CoordState#state.updated_partitions), case Found of false -> Snapshot; {Partition, WS} -> FilteredAndReversedUpdates = clocksi_vnode:reverse_and_filter_updates_per_key(WS, Key), clocksi_materializer:materialize_eager(Type, Snapshot, FilteredAndReversedUpdates) end. -spec has_tx_updates(index_node(), state()) -> boolean(). has_tx_updates(Partition, CoordState) -> case lists:keyfind(Partition, 1, CoordState#state.updated_partitions) of false -> false; _ -> true end. @doc Set the transaction Snapshot Time to the maximum value of : 2.machine 's local time , as returned by : now ( ) . -spec get_snapshot_time(snapshot_time()) -> {ok, snapshot_time()}. get_snapshot_time(ClientClock) -> wait_for_clock(ClientClock). -spec get_snapshot_time() -> {ok, snapshot_time()}. get_snapshot_time() -> Now = dc_utilities:now_microsec() - ?OLD_SS_MICROSEC, {ok, VecSnapshotTime} = dc_utilities:get_stable_snapshot(), DcId = dc_utilities:get_my_dc_id(), SnapshotTime = vectorclock:set(DcId, Now, VecSnapshotTime), {ok, SnapshotTime}. -spec wait_for_clock(snapshot_time()) -> {ok, snapshot_time()}. wait_for_clock(Clock) -> {ok, VecSnapshotTime} = get_snapshot_time(), case vectorclock:ge(VecSnapshotTime, Clock) of true -> {ok, VecSnapshotTime}; false -> timer:sleep(10), wait_for_clock(Clock) end. Replaces the first occurrence of an entry ; replace_first([], _, _) -> error; replace_first([Key | Rest], Key, NewKey) -> [NewKey | Rest]; replace_first([NotMyKey | Rest], Key, NewKey) -> [NotMyKey | replace_first(Rest, Key, NewKey)]. perform_read({Key, Type}, UpdatedPartitions, Transaction, Sender) -> ?STATS(operation_read), Partition = log_utilities:get_key_partition(Key), WriteSet = case lists:keyfind(Partition, 1, UpdatedPartitions) of false -> []; {Partition, WS} -> WS end, case clocksi_vnode:read_data_item(Partition, Transaction, Key, Type, WriteSet) of {ok, Snapshot} -> Snapshot; {error, Reason} -> gen_statem:reply(Sender, {error, Reason}), {error, Reason} end. perform_validate_or_read({Key, Type, Token}, UpdatedPartitions, Transaction, Sender) -> Partition = log_utilities:get_key_partition(Key), WriteSet = case lists:keyfind(Partition, 1, UpdatedPartitions) of false -> []; {Partition, WS} -> ?INVALID_OBJECT_TOKEN = Token, WS end, case clocksi_vnode:validate_or_read_data_item(Partition, Transaction, Key, Type, Token, WriteSet) of {ok, Result} -> Result; {error, Reason} -> gen_statem:reply(Sender, {error, Reason}), {error, Reason} end. perform_update(Op, UpdatedPartitions, Transaction, _Sender, ClientOps) -> ?STATS(operation_update), {Key, Type, Update} = Op, Partition = log_utilities:get_key_partition(Key), {FirstOp, WriteSet} = case lists:keyfind(Partition, 1, UpdatedPartitions) of false -> {true, []}; {Partition, WS} -> {false, WS} end, case antidote_hooks:execute_pre_commit_hook(Key, Type, Update) of {error, Reason} -> ?LOG_DEBUG("Execute pre-commit hook failed ~p", [Reason]), {error, Reason}; {Key, Type, PostHookUpdate} -> GenerateResult = clocksi_downstream:generate_downstream_op( Transaction, Partition, Key, Type, PostHookUpdate, WriteSet ), case GenerateResult of {error, Reason} -> {error, Reason}; {ok, DownstreamOp} -> ok = async_log_propagation( Partition, Transaction#transaction.txn_id, Key, Type, DownstreamOp, FirstOp ), GeneratedUpdate = {Key, Type, DownstreamOp}, NewUpdatedPartitions = append_updated_partitions( UpdatedPartitions, WriteSet, Partition, GeneratedUpdate ), UpdatedOps = [{Key, Type, PostHookUpdate} | ClientOps], {NewUpdatedPartitions, UpdatedOps} end end. -type write_set_item() :: { {key(), bucket()}, type(), effect() }. -type write_set() :: [ write_set_item() ]. -type updated_partitions() :: [{ index_node(), write_set() }]. -spec append_updated_partitions( updated_partitions(), write_set(), index_node(), write_set_item() ) -> updated_partitions(). append_updated_partitions(UpdatedPartitions, [], Partition, Update) -> [{Partition, [Update]} | UpdatedPartitions]; append_updated_partitions(UpdatedPartitions, WriteSet, Partition, Update) -> AllUpdates = {Partition, [Update | WriteSet]}, lists:keyreplace(Partition, 1, UpdatedPartitions, AllUpdates). -spec async_log_propagation(index_node(), txid(), key(), type(), effect(), boolean()) -> ok. async_log_propagation(Partition, TxId, Key, Type, Record, FirstOp) -> LogRecord = #log_operation{ op_type = if FirstOp -> update_start; true -> update end, tx_id = TxId, log_payload = #update_log_payload{key = Key, type = Type, op = Record} }, LogId = log_utilities:get_logid_from_key(Key), logging_vnode:asyn_append(Partition, LogId, LogRecord, {fsm, undefined, self()}). -spec prepare(state()) -> gen_statem:event_handler_result(state()). prepare( State = #state{ num_to_read = NumToRead, full_commit = FullCommit, transaction = Transaction, updated_partitions = UpdatedPartitions, commit_protocol = CommitProtocol } ) -> case UpdatedPartitions of [] -> if CommitProtocol == two_phase orelse NumToRead == 0 -> case FullCommit of true -> prepare_done(State, commit_read_only); false -> Transaction = State#state.transaction, SnapshotTimeLocal = Transaction#transaction.snapshot_time_local, prepare_done(State, {reply_and_then_commit, SnapshotTimeLocal}) end; true -> {next_state, receive_prepared, State#state{state = prepared}} end; [_] when CommitProtocol /= two_phase -> prepare_done(State, single_committing); [_ | _] -> ok = clocksi_vnode:prepare(UpdatedPartitions, Transaction), Num_to_ack = length(UpdatedPartitions), {next_state, receive_prepared, State#state{num_to_ack = Num_to_ack, state = prepared}} end. { reply_and_then_commit , ( ) } : first reply that we have successfully committed and then try to commit TODO rly ? { normal_commit , ( ): wait until all participants have acknowledged the commit and then reply to the client -spec prepare_done(state(), Action) -> gen_statem:event_handler_result(state()) when Action :: single_committing | commit_read_only | {reply_and_then_commit, clock_time()} | {normal_commit, clock_time()}. prepare_done(State, Action) -> case Action of single_committing -> UpdatedPartitions = State#state.updated_partitions, Transaction = State#state.transaction, ok = clocksi_vnode:single_commit(UpdatedPartitions, Transaction), {next_state, single_committing, State#state{state = committing, num_to_ack = 1}}; commit_read_only -> reply_to_client(State#state{state = committed_read_only}); {reply_and_then_commit, CommitSnapshotTime} -> From = State#state.from, {next_state, committing, State#state{ state = committing, commit_time = CommitSnapshotTime }, [{reply, From, {ok, CommitSnapshotTime}}]}; {normal_commit, MaxPrepareTime} -> UpdatedPartitions = State#state.updated_partitions, Transaction = State#state.transaction, ok = clocksi_vnode:commit(UpdatedPartitions, Transaction, MaxPrepareTime), {next_state, receive_committed, State#state{ num_to_ack = length(UpdatedPartitions), commit_time = MaxPrepareTime, state = committing }} end. process_prepared( ReceivedPrepareTime, State = #state{ num_to_ack = NumToAck, full_commit = FullCommit, prepare_time = PrepareTime } ) -> MaxPrepareTime = max(PrepareTime, ReceivedPrepareTime), case NumToAck of 1 -> case FullCommit of true -> prepare_done(State, {normal_commit, MaxPrepareTime}); false -> prepare_done(State, {reply_and_then_commit, MaxPrepareTime}) end; _ -> {next_state, receive_prepared, State#state{ num_to_ack = NumToAck - 1, prepare_time = MaxPrepareTime }} end. abort( State = #state{ transaction = Transaction, updated_partitions = UpdatedPartitions } ) -> NumToAck = length(UpdatedPartitions), case NumToAck of 0 -> reply_to_client(State#state{state = aborted}); _ -> ok = clocksi_vnode:abort(UpdatedPartitions, Transaction), {next_state, receive_aborted, State#state{num_to_ack = NumToAck, state = aborted}} end. execute_post_commit_hooks(Ops) -> lists:foreach( fun({Key, Type, Update}) -> case antidote_hooks:execute_post_commit_hook(Key, Type, Update) of {error, Reason} -> ?LOG_INFO("Post commit hook failed. Reason ~p", [Reason]); _ -> ok end end, lists:reverse(Ops) ). -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). meck_load() -> meck:new(dc_utilities, [passthrough]), meck:new(vectorclock, [passthrough]), meck:new(log_utilities), meck:new(logging_vnode), meck:new(clocksi_downstream), meck:new(clocksi_vnode), meck:expect(dc_utilities, get_my_dc_id, fun() -> mock_dc end), meck:expect(dc_utilities, get_stable_snapshot, fun() -> {ok, vectorclock:new()} end), meck:expect(vectorclock, get, fun(_, _) -> 0 end), meck:expect(log_utilities, get_key_partition, fun(A) -> mock_partition:get_key_partition(A) end), meck:expect(log_utilities, get_logid_from_key, fun(A) -> mock_partition:get_logid_from_key(A) end), meck : expect(clocksi_vnode , single_commit_sync , fun ( _ , _ ) - > 0 end ) , meck:expect(clocksi_vnode, commit, fun(_, _, _) -> ok end), meck:expect(clocksi_vnode, read_data_item, fun(A, B, K, C, D) -> mock_partition:read_data_item(A, B, K, C, D) end), meck:expect(clocksi_vnode, validate_or_read_data_item, fun( Node, TxId, Key, Type, Token, Updates ) -> mock_partition:validate_or_read_data_item(Node, TxId, Key, Type, Token, Updates) end), meck:expect(clocksi_vnode, prepare, fun(UpdatedPartition, A) -> mock_partition:prepare(UpdatedPartition, A) end), meck:expect(clocksi_vnode, single_commit, fun(UpdatedPartition, A) -> mock_partition:single_commit(UpdatedPartition, A) end), meck:expect(clocksi_vnode, abort, fun(UpdatedPartition, A) -> mock_partition:abort(UpdatedPartition, A) end), meck:expect(clocksi_downstream, generate_downstream_op, fun(A, B, Key, C, D, E) -> mock_partition:generate_downstream_op(A, B, Key, C, D, E) end), meck:expect(logging_vnode, append, fun(_, _, _) -> {ok, {0, node}} end), meck:expect(logging_vnode, asyn_append, fun(A, B, C, ReplyTo) -> mock_partition:asyn_append(A, B, C, ReplyTo) end). meck_unload() -> meck:unload(dc_utilities), meck:unload(vectorclock), meck:unload(log_utilities), meck:unload(logging_vnode), meck:unload(clocksi_downstream), meck:unload(clocksi_vnode). top_setup() -> meck_load(), {ok, Pid} = clocksi_interactive_coord:start_link(), {ok, _Tx} = gen_statem:call(Pid, {start_tx, ignore, []}), register(srv, Pid), Pid. top_cleanup(Pid) -> case process_info(Pid) of undefined -> io:format("Already crashed"); _ -> clocksi_interactive_coord:stop(Pid) end, meck_unload(). t_test_() -> {foreach, fun top_setup/0, fun top_cleanup/1, [ fun empty_prepare_/0, fun timeout_/0, fun update_single_abort_/0, fun update_single_success_/0, fun update_multi_abort1_/0, fun update_multi_abort2_/0, fun update_multi_success_/0, fun read_single_fail_/0, fun read_success_/0, fun validate_or_read_single_fail_no_token_/0, fun validate_or_read_single_fail_with_token_/0, fun validate_or_read_single_fail_invalid_token_/0, fun validate_or_read_success_/0, fun downstream_fail_/0, fun get_snapshot_time_/0, fun wait_for_clock_/0 ]}. empty_prepare_() -> Pid = whereis(srv), ?assertMatch({ok, _}, gen_statem:call(Pid, {prepare, empty}, infinity)). timeout_() -> Pid = whereis(srv), ?assertEqual(ok, gen_statem:call(Pid, {update, {timeout, nothing, nothing}}, infinity)), ?assertMatch({error, aborted}, gen_statem:call(Pid, {prepare, empty}, infinity)). update_single_abort_() -> Pid = whereis(srv), ?assertEqual(ok, gen_statem:call(Pid, {update, {fail, nothing, nothing}}, infinity)), ?assertMatch({error, aborted}, gen_statem:call(Pid, {prepare, empty}, infinity)). update_single_success_() -> Pid = whereis(srv), ?assertEqual(ok, gen_statem:call(Pid, {update, {single_commit, nothing, nothing}}, infinity)), ?assertMatch({ok, _}, gen_statem:call(Pid, {prepare, empty}, infinity)). update_multi_abort1_() -> Pid = whereis(srv), ?assertEqual(ok, gen_statem:call(Pid, {update, {success, nothing, nothing}}, infinity)), ?assertEqual(ok, gen_statem:call(Pid, {update, {success, nothing, nothing}}, infinity)), ?assertEqual(ok, gen_statem:call(Pid, {update, {fail, nothing, nothing}}, infinity)), ?assertMatch({error, aborted}, gen_statem:call(Pid, {prepare, empty}, infinity)). update_multi_abort2_() -> Pid = whereis(srv), ?assertEqual(ok, gen_statem:call(Pid, {update, {success, nothing, nothing}}, infinity)), ?assertEqual(ok, gen_statem:call(Pid, {update, {fail, nothing, nothing}}, infinity)), ?assertEqual(ok, gen_statem:call(Pid, {update, {fail, nothing, nothing}}, infinity)), ?assertMatch({error, aborted}, gen_statem:call(Pid, {prepare, empty}, infinity)). update_multi_success_() -> Pid = whereis(srv), ?assertEqual(ok, gen_statem:call(Pid, {update, {success, nothing, nothing}}, infinity)), ?assertEqual(ok, gen_statem:call(Pid, {update, {success, nothing, nothing}}, infinity)), ?assertMatch({ok, _}, gen_statem:call(Pid, {prepare, empty}, infinity)). read_single_fail_() -> Pid = whereis(srv), ?assertEqual( {error, mock_read_fail}, gen_statem:call(Pid, {read, {read_fail, nothing}}, infinity) ). read_success_() -> Pid = whereis(srv), {ok, State} = gen_statem:call(Pid, {read, {counter, antidote_crdt_counter_pn}}, infinity), ?assertEqual( {ok, 2}, {ok, antidote_crdt_counter_pn:value(State)} ), ?assertEqual( {ok, [a]}, gen_statem:call(Pid, {read, {set, antidote_crdt_set_go}}, infinity) ), ?assertEqual( {ok, mock_value}, gen_statem:call(Pid, {read, {mock_type, mock_partition_fsm}}, infinity) ), ?assertMatch({ok, _}, gen_statem:call(Pid, {prepare, empty}, infinity)). validate_or_read_single_fail_no_token_() -> Pid = whereis(srv), ?assertEqual( {error, mock_read_fail}, gen_statem:call( Pid, {validate_or_read, {read_fail, nothing, ?INVALID_OBJECT_TOKEN}}, infinity ) ). validate_or_read_single_fail_with_token_() -> Pid = whereis(srv), ?assertEqual( {error, mock_read_fail}, gen_statem:call(Pid, {validate_or_read, {read_fail, nothing, <<"valid">>}}, infinity) ). validate_or_read_single_fail_invalid_token_() -> Pid = whereis(srv), ?assertEqual( {error, mock_read_fail}, gen_statem:call(Pid, {validate_or_read, {read_fail, nothing, <<"invalid">>}}, infinity) ). validate_or_read_success_() -> Pid = whereis(srv), {ok, {invalid, State, Token}} = gen_statem:call( Pid, {validate_or_read, {counter, antidote_crdt_counter_pn, ?INVALID_OBJECT_TOKEN}}, infinity ), ?assertEqual(2, State), {ok, valid} = gen_statem:call( Pid, {validate_or_read, {counter, antidote_crdt_counter_pn, Token}}, infinity ), {ok, {invalid, State, Token}} = gen_statem:call( Pid, {validate_or_read, {counter, antidote_crdt_counter_pn, <<"past_token">>}}, infinity ). downstream_fail_() -> Pid = whereis(srv), ?assertMatch( {error, _}, gen_statem:call(Pid, {update, {downstream_fail, nothing, nothing}}, infinity) ). get_snapshot_time_() -> {ok, SnapshotTime} = get_snapshot_time(), ?assertMatch([{mock_dc, _}], vectorclock:to_list(SnapshotTime)). wait_for_clock_() -> {ok, SnapshotTime} = wait_for_clock(vectorclock:from_list([{mock_dc, 10}])), ?assertMatch([{mock_dc, _}], vectorclock:to_list(SnapshotTime)), VecClock = dc_utilities:now_microsec(), {ok, SnapshotTime2} = wait_for_clock(vectorclock:from_list([{mock_dc, VecClock}])), ?assertMatch([{mock_dc, _}], vectorclock:to_list(SnapshotTime2)). -endif.
a0a6a67bffc083524ff6d337fcc4a1413bcaa819d72775adbbc0e86be12cf49a
janestreet/memtrace_viewer_with_deps
ipaddr.ml
* Copyright ( c ) 2013 - 2015 < > * * Permission to use , copy , modify , and distribute this software for any * purpose with or without fee is hereby granted , provided that the above * copyright notice and this permission notice appear in all copies . * * THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN * ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE . * * Copyright (c) 2013-2015 David Sheets <> * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. * *) exception Parse_error of string * string type scope = | Point | Interface | Link | Admin | Site | Organization | Global let try_with_result fn a = try Ok (fn a) with Parse_error (msg, _) -> Error (`Msg ("Ipaddr: " ^ msg)) let failwith_msg = function | Ok x -> x | Error (`Msg m) -> failwith m let map_result v f = match v with Ok v -> Ok (f v) | Error _ as e -> e let string_of_scope = function | Point -> "point" | Interface -> "interface" | Link -> "link" | Admin -> "admin" | Site -> "site" | Organization -> "organization" | Global -> "global" let scope_of_string = function | "point" -> Ok Point | "interface" -> Ok Interface | "link" -> Ok Link | "admin" -> Ok Admin | "site" -> Ok Site | "organization" -> Ok Organization | "global" -> Ok Global | s -> Error (`Msg ("unknown scope: " ^ s)) let pp_scope fmt s = Format.pp_print_string fmt (string_of_scope s) let (~|) = Int32.of_int let (|~) = Int32.to_int let (&&&) x y = Int32.logand x y let (|||) x y = Int32.logor x y let (<|<) x y = Int32.shift_left x y let (>|>) x y = Int32.shift_right_logical x y let (>!) x y = (x >|> y) &&& 0xFF_l let (<!) x y = (x &&& 0xFF_l) <|< y let need_more x = Parse_error ("not enough data", x) let char_0 = int_of_char '0' let char_a = int_of_char 'a' let char_A = int_of_char 'A' let int_of_char c = match c with | '0'..'9' -> Stdlib.int_of_char c - char_0 | 'a'..'f' -> 10 + Stdlib.int_of_char c - char_a | 'A'..'F' -> 10 + Stdlib.int_of_char c - char_A | _ -> -1 let bad_char i s = let msg = Printf.sprintf "invalid character '%c' at %d" s.[i] i in Parse_error (msg, s) let is_number base n = n >=0 && n < base let parse_int base s i = let len = String.length s in let rec next prev = let j = !i in if j >= len then prev else let c = s.[j] in let k = int_of_char c in if is_number base k then (incr i; next (prev*base + k)) else prev in let i = !i in if i < len then if is_number base (int_of_char s.[i]) then next 0 else raise (bad_char i s) else raise (need_more s) let parse_dec_int s i = parse_int 10 s i let parse_hex_int s i = parse_int 16 s i let expect_char s i c = if !i < String.length s then if s.[!i] <> c then raise (bad_char !i s) else incr i else raise (need_more s) let expect_end s i = if String.length s <= !i then () else raise (bad_char !i s) let hex_char_of_int = function | 0 -> '0' | 1 -> '1' | 2 -> '2' | 3 -> '3' | 4 -> '4' | 5 -> '5' | 6 -> '6' | 7 -> '7' | 8 -> '8' | 9 -> '9' | 10 -> 'a' | 11 -> 'b' | 12 -> 'c' | 13 -> 'd' | 14 -> 'e' | 15 -> 'f' | _ -> raise (Invalid_argument "not a hex int") let hex_string_of_int32 i = String.make 1 (hex_char_of_int (Int32.to_int i)) module V4 = struct type t = int32 let compare a b = (* ignore the sign *) let c = Int32.compare (a >|> 1) (b >|> 1) in if c = 0 then Int32.compare (a &&& 1l) (b &&& 1l) else c let make a b c d = ((~| a <! 24) ||| (~| b <! 16)) ||| ((~| c <! 8) ||| (~| d <! 0)) (* parsing *) let parse_dotted_quad s i = let a = parse_dec_int s i in expect_char s i '.'; let b = parse_dec_int s i in expect_char s i '.'; let c = parse_dec_int s i in expect_char s i '.'; let d = parse_dec_int s i in let valid a = a land 0xff <> a in if valid a then raise (Parse_error ("first octet out of bounds", s)) else if valid b then raise (Parse_error ("second octet out of bounds", s)) else if valid c then raise (Parse_error ("third octet out of bounds", s)) else if valid d then raise (Parse_error ("fourth octet out of bounds", s)) else make a b c d (* string conversion *) let of_string_raw = parse_dotted_quad let of_string_exn s = let o = ref 0 in let x = of_string_raw s o in expect_end s o; x let of_string s = try_with_result of_string_exn s let to_buffer b i = Printf.bprintf b "%ld.%ld.%ld.%ld" (i >! 24) (i >! 16) (i >! 8) (i >! 0) let to_string i = let b = Buffer.create 15 in to_buffer b i; Buffer.contents b let pp ppf i = Format.fprintf ppf "%s" (to_string i) Octets conversion let of_octets_exn ?(off=0) bs = try make (Char.code bs.[0 + off]) (Char.code bs.[1 + off]) (Char.code bs.[2 + off]) (Char.code bs.[3 + off]) with _ -> raise (need_more bs) let of_octets ?off bs = try_with_result (of_octets_exn ?off) bs let write_octets_exn ?(off=0) i b = try Bytes.set b (0 + off) (Char.chr ((|~) (i >! 24))); Bytes.set b (1 + off) (Char.chr ((|~) (i >! 16))); Bytes.set b (2 + off) (Char.chr ((|~) (i >! 8))); Bytes.set b (3 + off) (Char.chr ((|~) (i >! 0))) with _ -> raise (need_more (Bytes.to_string b)) let write_octets ?off i bs = try_with_result (write_octets_exn ?off i) bs let to_octets i = String.init 4 (function | 0 -> Char.chr ((|~) (i >! 24)) | 1 -> Char.chr ((|~) (i >! 16)) | 2 -> Char.chr ((|~) (i >! 8)) | 3 -> Char.chr ((|~) (i >! 0)) | _ -> assert false) Int32 let of_int32 i = i let to_int32 i = i (* Int16 *) let of_int16 (a,b) = (~| a <|< 16) ||| (~| b) let to_int16 a = ((|~) (a >|> 16), (|~) (a &&& 0xFF_FF_l)) MAC { { : #section-6.2}RFC 1112 } . let multicast_to_mac i = let macb = Bytes.create 6 in Bytes.set macb 0 (Char.chr 0x01); Bytes.set macb 1 (Char.chr 0x00); Bytes.set macb 2 (Char.chr 0x5E); Bytes.set macb 3 (Char.chr ((|~) (i >|> 16 &&& 0x7F_l))); Bytes.set macb 4 (Char.chr ((|~) (i >! 8))); Bytes.set macb 5 (Char.chr ((|~) (i >! 0))); Macaddr.of_octets_exn (Bytes.to_string macb) (* Host *) let to_domain_name i = let name = [ Int32.to_string (i >! 0); Int32.to_string (i >! 8); Int32.to_string (i >! 16); Int32.to_string (i >! 24); "in-addr"; "arpa" ] in Domain_name.(host_exn (of_strings_exn name)) let of_domain_name n = match Domain_name.to_strings n with | [ a ; b ; c ; d ; in_addr ; arpa ] when Domain_name.(equal_label arpa "arpa" && equal_label in_addr "in-addr") -> begin let conv bits data = let i = Int32.of_int (parse_dec_int data (ref 0)) in if i > 0xFFl then raise (Parse_error ("label with a too big number", data)) else i <! bits in try let ( + ) = Int32.add in Some ((conv 0 a) + (conv 8 b) + (conv 16 c) + (conv 24 d)) with | Parse_error _ -> None end | _ -> None let succ t = if Int32.equal t 0xFF_FF_FF_FFl then Error (`Msg "Ipaddr: highest address has been reached") else Ok (Int32.succ t) let pred t = if Int32.equal t 0x00_00_00_00l then Error (`Msg "Ipaddr: lowest address has been reached") else Ok (Int32.pred t) (* constant *) let any = make 0 0 0 0 let unspecified = make 0 0 0 0 let broadcast = make 255 255 255 255 let localhost = make 127 0 0 1 let nodes = make 224 0 0 1 let routers = make 224 0 0 2 module Prefix = struct type addr = t type t = addr * int let compare (pre,sz) (pre',sz') = let c = compare pre pre' in if c = 0 then Stdlib.compare sz sz' else c let ip = make let mask sz = if sz <= 0 then 0_l else if sz >= 32 then 0x0_FF_FF_FF_FF_l else 0x0_FF_FF_FF_FF_l <|< (32 - sz) let prefix (pre,sz) = (pre &&& (mask sz), sz) let make sz pre = (pre,sz) let network_address (pre,sz) addr = (pre &&& (mask sz)) ||| (addr &&& Int32.lognot (mask sz)) (* string conversion *) let _of_string_raw s i = let quad = of_string_raw s i in expect_char s i '/'; let p = parse_dec_int s i in if p > 32 || p < 0 then raise (Parse_error ("invalid prefix size", s)); (p,quad) let of_string_raw s i = let (p,quad) = _of_string_raw s i in make p quad let _of_string_exn s = let i = ref 0 in let res = _of_string_raw s i in expect_end s i; res let of_string_exn s = let (p,quad) = _of_string_exn s in make p quad let of_string s = try_with_result of_string_exn s let _of_netmask_exn ~netmask address = let rec find_greatest_one bits i = if bits = 0_l then i-1 else find_greatest_one (bits >|> 1) (i+1) in let one = netmask &&& (Int32.neg netmask) in let sz = 32 - (find_greatest_one one (if one = 0_l then 33 else 0)) in if netmask <> (mask sz) then raise (Parse_error ("invalid netmask",to_string netmask)) else make sz address let of_netmask_exn ~netmask ~address = _of_netmask_exn ~netmask address let of_netmask ~netmask ~address = try_with_result (_of_netmask_exn ~netmask) address let to_buffer buf (pre,sz) = Printf.bprintf buf "%a/%d" to_buffer pre sz let to_string subnet = let b = Buffer.create 18 in to_buffer b subnet; Buffer.contents b let pp ppf i = Format.fprintf ppf "%s" (to_string i) let mem ip (pre,sz) = let m = mask sz in (ip &&& m) = (pre &&& m) let subset ~subnet:(pre1,sz1) ~network:(pre2,sz2) = sz1 >= sz2 && mem pre1 (pre2,sz2) let of_addr ip = make 32 ip let global = make 0 (ip 0 0 0 0) let relative = make 8 (ip 0 0 0 0) let loopback = make 8 (ip 127 0 0 0) let link = make 16 (ip 169 254 0 0) let multicast = make 4 (ip 224 0 0 0) let multicast_org = make 14 (ip 239 192 0 0) let multicast_admin = make 16 (ip 239 255 0 0) let multicast_link = make 24 (ip 224 0 0 0) let private_10 = make 8 (ip 10 0 0 0) let private_172 = make 12 (ip 172 16 0 0) let private_192 = make 16 (ip 192 168 0 0) let private_blocks = [ loopback ; link ; private_10 ; private_172 ; private_192 ] let broadcast (pre,sz) = pre ||| (0x0_FF_FF_FF_FF_l >|> sz) let network (pre,sz) = pre &&& mask sz let address (addr,_) = addr let bits (_,sz) = sz let netmask subnet = mask (bits subnet) let first (_,sz as cidr) = if sz > 30 then network cidr else network cidr |> succ |> failwith_msg let last (_,sz as cidr) = if sz > 30 then broadcast cidr else broadcast cidr |> pred |> failwith_msg end (* TODO: this could be optimized with something trie-like *) let scope i = let mem = Prefix.mem i in if mem Prefix.loopback then Interface else if mem Prefix.link then Link else if List.exists mem Prefix.private_blocks then Organization else if i = unspecified then Point else if i = broadcast then Admin else if mem Prefix.relative then Admin else if mem Prefix.multicast then (if mem Prefix.multicast_org then Organization else if mem Prefix.multicast_admin then Admin else if mem Prefix.multicast_link then Link else Global) else Global let is_global i = (scope i) = Global let is_multicast i = Prefix.(mem i multicast) let is_private i = (scope i) <> Global end module B128 = struct type t = int32 * int32 * int32 * int32 let of_int64 (a, b) = Int64.( to_int32 (shift_right_logical a 32), to_int32 a, to_int32 (shift_right_logical b 32), to_int32 b) let to_int64 (a,b,c,d) = Int64.( logor (shift_left (of_int32 a) 32) (of_int32 b), logor (shift_left (of_int32 c) 32) (of_int32 d)) let of_int32 x = x let to_int32 x = x let of_int16 (a, b, c, d, e, f, g, h) = V4.of_int16 (a,b), V4.of_int16 (c,d), V4.of_int16 (e,f), V4.of_int16 (g,h) let to_int16 (x,y,z,t) = let a,b = V4.to_int16 x and c,d = V4.to_int16 y and e,f = V4.to_int16 z and g,h = V4.to_int16 t in (a,b,c,d,e,f,g,h) let write_octets_exn ?(off=0) (a,b,c,d) byte = V4.write_octets_exn ~off a byte; V4.write_octets_exn ~off:(off+4) b byte; V4.write_octets_exn ~off:(off+8) c byte; V4.write_octets_exn ~off:(off+12) d byte let compare (a1,b1,c1,d1) (a2,b2,c2,d2) = match V4.compare a1 a2 with | 0 -> begin match V4.compare b1 b2 with | 0 -> begin match V4.compare c1 c2 with | 0 -> V4.compare d1 d2 | n -> n end | n -> n end | n -> n let logand (a1,b1,c1,d1) (a2,b2,c2,d2) = (a1 &&& a2, b1 &&& b2, c1 &&& c2, d1 &&& d2) let logor (a1,b1,c1,d1) (a2,b2,c2,d2) = (a1 ||| a2, b1 ||| b2, c1 ||| c2, d1 ||| d2) let lognot (a,b,c,d) = Int32.(lognot a, lognot b, lognot c, lognot d) let succ (a,b,c,d) = let cb (n,tl) v = match n with | 0l -> (0l,v::tl) | n -> let n = if Int32.equal v 0xFF_FF_FF_FFl then n else 0l in (n,Int32.succ v::tl) in match List.fold_left cb (1l,[]) [d;c;b;a] with | 0l, [a;b;c;d] -> Ok (of_int32 (a,b,c,d)) | n, [_;_;_;_] when n > 0l -> Error (`Msg "Ipaddr: highest address has been reached") | _ -> Error (`Msg "Ipaddr: unexpected error with B128") let pred (a,b,c,d) = let cb (n,tl) v = match n with | 0l -> (0l,v::tl) | n -> let n = if v = 0x00_00_00_00l then n else 0l in (n,Int32.pred v::tl) in match List.fold_left cb (-1l,[]) [d;c;b;a] with | 0l, [a;b;c;d] -> Ok (of_int32 (a,b,c,d)) | n, [_;_;_;_] when n < 0l -> Error (`Msg "Ipaddr: lowest address has been reached") | _ -> Error (`Msg "Ipaddr: unexpected error with B128") let shift_right (a,b,c,d) sz = let rec loop (a,b,c,d) sz = if sz < 32 then (sz, (a,b,c,d)) else loop (0l,a,b,c) (sz - 32) in let (sz, (a,b,c,d)) = loop (a,b,c,d) sz in let fn (saved,tl) part = let new_saved = Int32.logand part (0xFF_FF_FF_FFl >|> sz) in let new_part = (part >|> sz) ||| (saved <|< 32 - sz) in (new_saved, new_part::tl) in match List.fold_left fn (0l,[]) [a;b;c;d] with | _, [d;c;b;a] -> Ok (of_int32 (a, b, c, d)) | _ -> Error (`Msg "Ipaddr: unexpected error with B128.shift_right") end module V6 = struct include B128 (* TODO: Perhaps represent with bytestring? *) let make a b c d e f g h = of_int16 (a,b,c,d,e,f,g,h) (* parsing *) let parse_ipv6 s i = let compressed = ref false in (* :: *) let len = String.length s in if len < !i + 1 then (raise (need_more s)); let use_bracket = s.[!i] = '['; in if use_bracket then incr i; if len < !i + 2 then (raise (need_more s)); (* check if it starts with :: *) let l = if s.[!i] = ':' then begin incr i; if s.[!i] = ':' then begin compressed := true; incr i; [-1] end else raise (bad_char !i s); end else [] in let rec loop nb acc = if nb >= 8 then acc else if !i >= len then acc else let pos = !i in let x = try parse_hex_int s i with _ -> -1 in if x < 0 then acc else if nb = 7 then x::acc else if !i < len && s.[!i] = ':' then begin incr i; if !i < len then if s.[!i] = ':' then if !compressed then (decr i; x::acc) (* trailing :: *) else begin compressed:=true; incr i; loop (nb + 2) (-1::x::acc) end else begin if is_number 16 (int_of_char s.[!i]) then loop (nb+1) (x::acc) else raise (bad_char !i s) end else raise (need_more s) end else if !i < len && s.[!i] = '.' then begin i:= pos; let v4 = V4.of_string_raw s i in let (hi,lo) = V4.to_int16 v4 in lo :: hi :: acc end else x::acc in let res = loop (List.length l) l in let res_len = List.length res in if res_len > 8 then raise (Parse_error ("too many components",s)) else if res_len = 0 then raise (need_more s) else let a = Array.make 8 0 in let missing = if !compressed then 8 - (res_len - 1) else if res_len <> 8 then if !i < len then raise (bad_char !i s) else raise (need_more s) else 0 in let _ = List.fold_left (fun i x -> if x = -1 then i - missing else begin if x land 0xffff <> x then raise (Parse_error (Printf.sprintf "component %d out of bounds" i, s)); a.(i) <- x; i - 1 end ) 7 res in (if use_bracket then expect_char s i ']'); a (* string conversion *) let of_string_raw s offset = let a = parse_ipv6 s offset in make a.(0) a.(1) a.(2) a.(3) a.(4) a.(5) a.(6) a.(7) let of_string_exn s = let o = ref 0 in let x = of_string_raw s o in expect_end s o; x let of_string s = try_with_result of_string_exn s (* *) let to_buffer buf addr = let (a,b,c,d,e,f,g,h) as comp = to_int16 addr in let v4 = match comp with | (0,0,0,0,0,0xffff,_,_) -> true | _ -> false in let rec loop elide zeros acc = function | 0 :: xs -> loop elide (zeros - 1) acc xs | n :: xs when zeros = 0 -> loop elide 0 (n::acc) xs | n :: xs -> loop (min elide zeros) 0 (n::zeros::acc) xs | [] -> let elide = min elide zeros in (if elide < -1 then Some elide else None), (if zeros = 0 then acc else zeros::acc) in let elide,l = loop 0 0 [] [h;g;f;e;d;c;b;a] in assert(match elide with Some x when x < -8 -> false | _ -> true); let rec cons_zeros l x = if x >= 0 then l else cons_zeros (Some 0::l) (x+1) in let _,lrev = List.fold_left (fun (patt, l) x -> if Some x = patt then (None, (None::l)) else if x < 0 then (patt, (cons_zeros l x)) else (patt, ((Some x)::l)) ) (elide, []) l in let rec fill = function | [Some hi;Some lo] when v4 -> let addr = V4.of_int16 (hi, lo) in V4.to_buffer buf addr | None::xs -> Buffer.add_string buf "::"; fill xs | [Some n] -> Printf.bprintf buf "%x" n | (Some n)::None::xs -> Printf.bprintf buf "%x::" n; fill xs | (Some n)::xs -> Printf.bprintf buf "%x:" n; fill xs | [] -> () in fill (List.rev lrev) let to_string l = let buf = Buffer.create 39 in to_buffer buf l; Buffer.contents buf let pp ppf i = Format.fprintf ppf "%s" (to_string i) (* byte conversion *) let of_octets_exn ?(off=0) bs = (* TODO : from cstruct *) let hihi = V4.of_octets_exn ~off bs in let hilo = V4.of_octets_exn ~off:(off+4) bs in let lohi = V4.of_octets_exn ~off:(off+8) bs in let lolo = V4.of_octets_exn ~off:(off+12) bs in of_int32 (hihi, hilo, lohi, lolo) let of_octets ?off bs = try_with_result (of_octets_exn ?off) bs let write_octets ?off i bs = try_with_result (write_octets_exn ?off i) bs let to_octets i = let b = Bytes.create 16 in write_octets_exn i b; Bytes.to_string b MAC (* {{:#section-7}RFC 2464}. *) let multicast_to_mac i = let (_,_,_,i) = to_int32 i in let macb = Bytes.create 6 in Bytes.set macb 0 (Char.chr 0x33); Bytes.set macb 1 (Char.chr 0x33); Bytes.set macb 2 (Char.chr ((|~) (i >! 24))); Bytes.set macb 3 (Char.chr ((|~) (i >! 16))); Bytes.set macb 4 (Char.chr ((|~) (i >! 8))); Bytes.set macb 5 (Char.chr ((|~) (i >! 0))); Macaddr.of_octets_exn (Bytes.to_string macb) (* Host *) let to_domain_name (a,b,c,d) = let name = [ hex_string_of_int32 ((d >|> 0) &&& 0xF_l); hex_string_of_int32 ((d >|> 4) &&& 0xF_l); hex_string_of_int32 ((d >|> 8) &&& 0xF_l); hex_string_of_int32 ((d >|> 12) &&& 0xF_l); hex_string_of_int32 ((d >|> 16) &&& 0xF_l); hex_string_of_int32 ((d >|> 20) &&& 0xF_l); hex_string_of_int32 ((d >|> 24) &&& 0xF_l); hex_string_of_int32 ((d >|> 28) &&& 0xF_l); hex_string_of_int32 ((c >|> 0) &&& 0xF_l); hex_string_of_int32 ((c >|> 4) &&& 0xF_l); hex_string_of_int32 ((c >|> 8) &&& 0xF_l); hex_string_of_int32 ((c >|> 12) &&& 0xF_l); hex_string_of_int32 ((c >|> 16) &&& 0xF_l); hex_string_of_int32 ((c >|> 20) &&& 0xF_l); hex_string_of_int32 ((c >|> 24) &&& 0xF_l); hex_string_of_int32 ((c >|> 28) &&& 0xF_l); hex_string_of_int32 ((b >|> 0) &&& 0xF_l); hex_string_of_int32 ((b >|> 4) &&& 0xF_l); hex_string_of_int32 ((b >|> 8) &&& 0xF_l); hex_string_of_int32 ((b >|> 12) &&& 0xF_l); hex_string_of_int32 ((b >|> 16) &&& 0xF_l); hex_string_of_int32 ((b >|> 20) &&& 0xF_l); hex_string_of_int32 ((b >|> 24) &&& 0xF_l); hex_string_of_int32 ((b >|> 28) &&& 0xF_l); hex_string_of_int32 ((a >|> 0) &&& 0xF_l); hex_string_of_int32 ((a >|> 4) &&& 0xF_l); hex_string_of_int32 ((a >|> 8) &&& 0xF_l); hex_string_of_int32 ((a >|> 12) &&& 0xF_l); hex_string_of_int32 ((a >|> 16) &&& 0xF_l); hex_string_of_int32 ((a >|> 20) &&& 0xF_l); hex_string_of_int32 ((a >|> 24) &&& 0xF_l); hex_string_of_int32 ((a >|> 28) &&& 0xF_l); "ip6"; "arpa" ] in Domain_name.(host_exn (of_strings_exn name)) let of_domain_name n = let open Domain_name in if count_labels n = 34 then let ip6 = get_label_exn n 32 and arpa = get_label_exn n 33 in if equal_label ip6 "ip6" && equal_label arpa "arpa" then let rev = true in let n' = drop_label_exn ~rev ~amount:2 n in let d = drop_label_exn ~rev ~amount:24 n' and c = drop_label_exn ~amount:8 (drop_label_exn ~rev ~amount:16 n') and b = drop_label_exn ~amount:16 (drop_label_exn ~rev ~amount:8 n') and a = drop_label_exn ~amount:24 n' in let t b d = let v = Int32.of_int (parse_hex_int d (ref 0)) in if v > 0xFl then raise (Parse_error ("number in label too big", d)) else v <|< b in let f d = List.fold_left (fun (acc, b) d -> Int32.add acc (t b d), b + 4) (0l, 0) (to_strings d) in try let a', _ = f a and b', _ = f b and c', _ = f c and d', _ = f d in Some (a', b', c', d') with | Parse_error _ -> None else None else None (* constant *) let unspecified = make 0 0 0 0 0 0 0 0 let localhost = make 0 0 0 0 0 0 0 1 let interface_nodes = make 0xff01 0 0 0 0 0 0 1 let link_nodes = make 0xff02 0 0 0 0 0 0 1 let interface_routers = make 0xff01 0 0 0 0 0 0 2 let link_routers = make 0xff02 0 0 0 0 0 0 2 let site_routers = make 0xff05 0 0 0 0 0 0 2 module Prefix = struct type addr = t type t = addr * int let compare (pre,sz) (pre',sz') = let c = compare pre pre' in if c = 0 then Stdlib.compare sz sz' else c let ip = make let _full = let f = 0x0_FFFF_FFFF_l in f,f,f,f let mask sz = V4.Prefix.( mask (sz - 0), mask (sz - 32), mask (sz - 64), mask (sz - 96)) let prefix (pre,sz) = (logand pre (mask sz),sz) let make sz pre = (pre,sz) let network_address (pre,sz) addr = logor (logand pre (mask sz)) (logand addr (lognot (mask sz))) let _of_string_raw s i = let v6 = of_string_raw s i in expect_char s i '/'; let p = parse_dec_int s i in if p > 128 || p < 0 then raise (Parse_error ("invalid prefix size", s)); (p, v6) let of_string_raw s i = let (p,v6) = _of_string_raw s i in make p v6 let _of_string_exn s = let i = ref 0 in let res = _of_string_raw s i in expect_end s i; res let of_string_exn s = let (p,v6) = _of_string_exn s in make p v6 let of_string s = try_with_result of_string_exn s let _of_netmask_exn ~netmask address = let nm = let bits netmask = V4.Prefix.bits (V4.Prefix.of_netmask_exn ~netmask ~address:V4.any) in match netmask with | (0_l,0_l,0_l,0_l) -> 0 | (lsw ,0_l ,0_l ,0_l) -> bits lsw | (-1_l,lsw ,0_l ,0_l) -> bits lsw + 32 | (-1_l,-1_l,lsw ,0_l) -> bits lsw + 64 | (-1_l,-1_l,-1_l,lsw) -> bits lsw + 96 | _ -> raise (Parse_error ("invalid netmask", to_string netmask)) in make nm address let of_netmask_exn ~netmask ~address = _of_netmask_exn ~netmask address let of_netmask ~netmask ~address = try_with_result (_of_netmask_exn ~netmask) address let to_buffer buf (pre,sz) = Printf.bprintf buf "%a/%d" to_buffer pre sz let to_string subnet = let buf = Buffer.create 43 in to_buffer buf subnet; Buffer.contents buf let pp ppf i = Format.fprintf ppf "%s" (to_string i) let mem ip (pre,sz) = let m = mask sz in logand ip m = logand pre m let subset ~subnet:(pre1,sz1) ~network:(pre2,sz2) = sz1 >= sz2 && mem pre1 (pre2,sz2) let of_addr ip = make 128 ip let global_unicast_001 = make 3 (ip 0x2000 0 0 0 0 0 0 0) let link = make 64 (ip 0xfe80 0 0 0 0 0 0 0) let unique_local = make 7 (ip 0xfc00 0 0 0 0 0 0 0) let multicast = make 8 (ip 0xff00 0 0 0 0 0 0 0) let ipv4_mapped = make 96 (ip 0 0 0 0 0 0xffff 0 0) let noneui64_interface = make 3 (ip 0x0000 0 0 0 0 0 0 0) let solicited_node = make 104 (ip 0xff02 0 0 0 0 1 0xff00 0) let network (pre,sz) = logand pre (mask sz) let address (addr,_) = addr let bits (_,sz) = sz let netmask subnet = mask (bits subnet) let first (_,sz as cidr) = if sz > 126 then network cidr else network cidr |> succ |> failwith_msg let last (_,sz as cidr) = let ffff = ip 0xffff 0xffff 0xffff 0xffff 0xffff 0xffff 0xffff 0xffff in logor (network cidr) (shift_right ffff sz |> failwith_msg) end (* TODO: This could be optimized with something trie-like *) let scope i = let mem = Prefix.mem i in if mem Prefix.global_unicast_001 then Global else if mem Prefix.ipv4_mapped (* rfc says they are technically global but... *) then V4.scope (let (_,_,_,v4) = to_int32 i in V4.of_int32 v4) else if mem Prefix.multicast then let (x,_,_,_,_,_,_,_) = to_int16 i in match x land 0xf with | 0 -> Point | 1 -> Interface | 2 | 3 -> Link | 4 -> Admin | 5 | 6 | 7 -> Site | 8 | 9 | 10 | 11 | 12 | 13 -> Organization | 14 | 15 -> Global | _ -> assert false else if mem Prefix.link then Link else if mem Prefix.unique_local then Global else if i = localhost then Interface else if i = unspecified then Point else Global let link_address_of_mac = let c b i = Char.code (String.get b i) in fun mac -> let bmac = Macaddr.to_octets mac in let c_0 = c bmac 0 lxor 2 in let addr = make 0 0 0 0 (c_0 lsl 8 + c bmac 1) (c bmac 2 lsl 8 + 0xff ) (0xfe00 + c bmac 3) (c bmac 4 lsl 8 + c bmac 5) in Prefix.(network_address link addr) let is_global i = (scope i) = Global let is_multicast i = Prefix.(mem i multicast) let is_private i = (scope i) <> Global end type ('v4,'v6) v4v6 = V4 of 'v4 | V6 of 'v6 type t = (V4.t,V6.t) v4v6 let compare a b = match a,b with | V4 a, V4 b -> V4.compare a b | V6 a, V6 b -> V6.compare a b | V4 _, V6 _ -> -1 | V6 _, V4 _ -> 1 let to_string = function | V4 x -> V4.to_string x | V6 x -> V6.to_string x let to_buffer buf = function | V4 x -> V4.to_buffer buf x | V6 x -> V6.to_buffer buf x let pp ppf i = Format.fprintf ppf "%s" (to_string i) let of_string_raw s offset = let len = String.length s in if len < !offset + 1 then raise (need_more s); match s.[0] with | '[' -> V6 (V6.of_string_raw s offset) | _ -> let pos = !offset in try V4 (V4.of_string_raw s offset) with Parse_error (v4_msg,_) -> offset := pos; try V6 (V6.of_string_raw s offset) with Parse_error(v6_msg,s) -> let msg = Printf.sprintf "not an IPv4 address: %s\nnot an IPv6 address: %s" v4_msg v6_msg in raise (Parse_error (msg,s)) let of_string_exn s = of_string_raw s (ref 0) let of_string s = try_with_result of_string_exn s let v6_of_v4 v4 = V6.(Prefix.(network_address ipv4_mapped (of_int32 (0l,0l,0l,v4)))) let v4_of_v6 v6 = if V6.Prefix.(mem v6 ipv4_mapped) then let (_,_,_,v4) = V6.to_int32 v6 in Some V4.(of_int32 v4) else None let to_v4 = function V4 v4 -> Some v4 | V6 v6 -> v4_of_v6 v6 let to_v6 = function V4 v4 -> v6_of_v4 v4 | V6 v6 -> v6 let scope = function V4 v4 -> V4.scope v4 | V6 v6 -> V6.scope v6 let is_global = function | V4 v4 -> V4.is_global v4 | V6 v6 -> V6.is_global v6 let is_multicast = function | V4 v4 -> V4.is_multicast v4 | V6 v6 -> V6.is_multicast v6 let is_private = function | V4 v4 -> V4.is_private v4 | V6 v6 -> V6.is_private v6 let multicast_to_mac = function | V4 v4 -> V4.multicast_to_mac v4 | V6 v6 -> V6.multicast_to_mac v6 let to_domain_name = function | V4 v4 -> V4.to_domain_name v4 | V6 v6 -> V6.to_domain_name v6 let of_domain_name n = match Domain_name.count_labels n with | 6 -> begin match V4.of_domain_name n with | None -> None | Some x -> Some (V4 x) end | 34 -> begin match V6.of_domain_name n with | None -> None | Some x -> Some (V6 x) end | _ -> None let succ = function | V4 addr -> map_result (V4.succ addr) (fun v -> V4 v) | V6 addr -> map_result (V6.succ addr) (fun v -> V6 v) let pred = function | V4 addr -> map_result (V4.pred addr) (fun v -> V4 v) | V6 addr -> map_result (V6.pred addr) (fun v -> V6 v) module Prefix = struct module Addr = struct let to_v6 = to_v6 end type addr = t type t = (V4.Prefix.t,V6.Prefix.t) v4v6 let compare a b = match a,b with | V4 a , V4 b -> V4.Prefix.compare a b | V6 a , V6 b -> V6.Prefix.compare a b | V4 _ , V6 _ -> -1 | V6 _ , V4 _ -> 1 let of_string_raw s offset = let len = String.length s in if len < !offset + 1 then raise (need_more s); match s.[0] with | '[' -> V6 (V6.Prefix.of_string_raw s offset) | _ -> let pos = !offset in try V4 (V4.Prefix.of_string_raw s offset) with Parse_error (v4_msg,_) -> offset := pos; try V6 (V6.Prefix.of_string_raw s offset) with Parse_error(v6_msg,s) -> let msg = Printf.sprintf "not an IPv4 prefix: %s\nnot an IPv6 prefix: %s" v4_msg v6_msg in raise (Parse_error (msg,s)) let of_string_exn s = of_string_raw s (ref 0) let of_string s = try_with_result of_string_exn s let v6_of_v4 v4 = V6.Prefix.make (96 + V4.Prefix.bits v4) (v6_of_v4 (V4.Prefix.network v4)) let v4_of_v6 v6 = match v4_of_v6 (V6.Prefix.network v6) with | Some v4 -> Some (V4.Prefix.make (V6.Prefix.bits v6 - 96) v4) | None -> None let to_v4 = function V4 v4 -> Some v4 | V6 v6 -> v4_of_v6 v6 let to_v6 = function V4 v4 -> v6_of_v4 v4 | V6 v6 -> v6 let mem ip prefix = V6.Prefix.mem (Addr.to_v6 ip) (to_v6 prefix) let subset ~subnet ~network = V6.Prefix.subset ~subnet:(to_v6 subnet) ~network:(to_v6 network) let of_addr = function | V4 p -> V4 (V4.Prefix.of_addr p) | V6 p -> V6 (V6.Prefix.of_addr p) let to_string = function | V4 p -> V4.Prefix.to_string p | V6 p -> V6.Prefix.to_string p let to_buffer buf = function | V4 p -> V4.Prefix.to_buffer buf p | V6 p -> V6.Prefix.to_buffer buf p let network = function | V4 p -> V4 (V4.Prefix.network p) | V6 p -> V6 (V6.Prefix.network p) let netmask = function | V4 p -> V4 (V4.Prefix.netmask p) | V6 p -> V6 (V6.Prefix.netmask p) let pp ppf i = Format.fprintf ppf "%s" (to_string i) let first = function | V4 p -> V4 (V4.Prefix.first p) | V6 p -> V6 (V6.Prefix.first p) let last = function | V4 p -> V4 (V4.Prefix.last p) | V6 p -> V6 (V6.Prefix.last p) end
null
https://raw.githubusercontent.com/janestreet/memtrace_viewer_with_deps/5a9e1f927f5f8333e2d71c8d3ca03a45587422c4/vendor/ipaddr/lib/ipaddr.ml
ocaml
ignore the sign parsing string conversion Int16 Host constant string conversion TODO: this could be optimized with something trie-like TODO: Perhaps represent with bytestring? parsing :: check if it starts with :: trailing :: string conversion byte conversion TODO : from cstruct {{:#section-7}RFC 2464}. Host constant TODO: This could be optimized with something trie-like rfc says they are technically global but...
* Copyright ( c ) 2013 - 2015 < > * * Permission to use , copy , modify , and distribute this software for any * purpose with or without fee is hereby granted , provided that the above * copyright notice and this permission notice appear in all copies . * * THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN * ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE . * * Copyright (c) 2013-2015 David Sheets <> * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. * *) exception Parse_error of string * string type scope = | Point | Interface | Link | Admin | Site | Organization | Global let try_with_result fn a = try Ok (fn a) with Parse_error (msg, _) -> Error (`Msg ("Ipaddr: " ^ msg)) let failwith_msg = function | Ok x -> x | Error (`Msg m) -> failwith m let map_result v f = match v with Ok v -> Ok (f v) | Error _ as e -> e let string_of_scope = function | Point -> "point" | Interface -> "interface" | Link -> "link" | Admin -> "admin" | Site -> "site" | Organization -> "organization" | Global -> "global" let scope_of_string = function | "point" -> Ok Point | "interface" -> Ok Interface | "link" -> Ok Link | "admin" -> Ok Admin | "site" -> Ok Site | "organization" -> Ok Organization | "global" -> Ok Global | s -> Error (`Msg ("unknown scope: " ^ s)) let pp_scope fmt s = Format.pp_print_string fmt (string_of_scope s) let (~|) = Int32.of_int let (|~) = Int32.to_int let (&&&) x y = Int32.logand x y let (|||) x y = Int32.logor x y let (<|<) x y = Int32.shift_left x y let (>|>) x y = Int32.shift_right_logical x y let (>!) x y = (x >|> y) &&& 0xFF_l let (<!) x y = (x &&& 0xFF_l) <|< y let need_more x = Parse_error ("not enough data", x) let char_0 = int_of_char '0' let char_a = int_of_char 'a' let char_A = int_of_char 'A' let int_of_char c = match c with | '0'..'9' -> Stdlib.int_of_char c - char_0 | 'a'..'f' -> 10 + Stdlib.int_of_char c - char_a | 'A'..'F' -> 10 + Stdlib.int_of_char c - char_A | _ -> -1 let bad_char i s = let msg = Printf.sprintf "invalid character '%c' at %d" s.[i] i in Parse_error (msg, s) let is_number base n = n >=0 && n < base let parse_int base s i = let len = String.length s in let rec next prev = let j = !i in if j >= len then prev else let c = s.[j] in let k = int_of_char c in if is_number base k then (incr i; next (prev*base + k)) else prev in let i = !i in if i < len then if is_number base (int_of_char s.[i]) then next 0 else raise (bad_char i s) else raise (need_more s) let parse_dec_int s i = parse_int 10 s i let parse_hex_int s i = parse_int 16 s i let expect_char s i c = if !i < String.length s then if s.[!i] <> c then raise (bad_char !i s) else incr i else raise (need_more s) let expect_end s i = if String.length s <= !i then () else raise (bad_char !i s) let hex_char_of_int = function | 0 -> '0' | 1 -> '1' | 2 -> '2' | 3 -> '3' | 4 -> '4' | 5 -> '5' | 6 -> '6' | 7 -> '7' | 8 -> '8' | 9 -> '9' | 10 -> 'a' | 11 -> 'b' | 12 -> 'c' | 13 -> 'd' | 14 -> 'e' | 15 -> 'f' | _ -> raise (Invalid_argument "not a hex int") let hex_string_of_int32 i = String.make 1 (hex_char_of_int (Int32.to_int i)) module V4 = struct type t = int32 let c = Int32.compare (a >|> 1) (b >|> 1) in if c = 0 then Int32.compare (a &&& 1l) (b &&& 1l) else c let make a b c d = ((~| a <! 24) ||| (~| b <! 16)) ||| ((~| c <! 8) ||| (~| d <! 0)) let parse_dotted_quad s i = let a = parse_dec_int s i in expect_char s i '.'; let b = parse_dec_int s i in expect_char s i '.'; let c = parse_dec_int s i in expect_char s i '.'; let d = parse_dec_int s i in let valid a = a land 0xff <> a in if valid a then raise (Parse_error ("first octet out of bounds", s)) else if valid b then raise (Parse_error ("second octet out of bounds", s)) else if valid c then raise (Parse_error ("third octet out of bounds", s)) else if valid d then raise (Parse_error ("fourth octet out of bounds", s)) else make a b c d let of_string_raw = parse_dotted_quad let of_string_exn s = let o = ref 0 in let x = of_string_raw s o in expect_end s o; x let of_string s = try_with_result of_string_exn s let to_buffer b i = Printf.bprintf b "%ld.%ld.%ld.%ld" (i >! 24) (i >! 16) (i >! 8) (i >! 0) let to_string i = let b = Buffer.create 15 in to_buffer b i; Buffer.contents b let pp ppf i = Format.fprintf ppf "%s" (to_string i) Octets conversion let of_octets_exn ?(off=0) bs = try make (Char.code bs.[0 + off]) (Char.code bs.[1 + off]) (Char.code bs.[2 + off]) (Char.code bs.[3 + off]) with _ -> raise (need_more bs) let of_octets ?off bs = try_with_result (of_octets_exn ?off) bs let write_octets_exn ?(off=0) i b = try Bytes.set b (0 + off) (Char.chr ((|~) (i >! 24))); Bytes.set b (1 + off) (Char.chr ((|~) (i >! 16))); Bytes.set b (2 + off) (Char.chr ((|~) (i >! 8))); Bytes.set b (3 + off) (Char.chr ((|~) (i >! 0))) with _ -> raise (need_more (Bytes.to_string b)) let write_octets ?off i bs = try_with_result (write_octets_exn ?off i) bs let to_octets i = String.init 4 (function | 0 -> Char.chr ((|~) (i >! 24)) | 1 -> Char.chr ((|~) (i >! 16)) | 2 -> Char.chr ((|~) (i >! 8)) | 3 -> Char.chr ((|~) (i >! 0)) | _ -> assert false) Int32 let of_int32 i = i let to_int32 i = i let of_int16 (a,b) = (~| a <|< 16) ||| (~| b) let to_int16 a = ((|~) (a >|> 16), (|~) (a &&& 0xFF_FF_l)) MAC { { : #section-6.2}RFC 1112 } . let multicast_to_mac i = let macb = Bytes.create 6 in Bytes.set macb 0 (Char.chr 0x01); Bytes.set macb 1 (Char.chr 0x00); Bytes.set macb 2 (Char.chr 0x5E); Bytes.set macb 3 (Char.chr ((|~) (i >|> 16 &&& 0x7F_l))); Bytes.set macb 4 (Char.chr ((|~) (i >! 8))); Bytes.set macb 5 (Char.chr ((|~) (i >! 0))); Macaddr.of_octets_exn (Bytes.to_string macb) let to_domain_name i = let name = [ Int32.to_string (i >! 0); Int32.to_string (i >! 8); Int32.to_string (i >! 16); Int32.to_string (i >! 24); "in-addr"; "arpa" ] in Domain_name.(host_exn (of_strings_exn name)) let of_domain_name n = match Domain_name.to_strings n with | [ a ; b ; c ; d ; in_addr ; arpa ] when Domain_name.(equal_label arpa "arpa" && equal_label in_addr "in-addr") -> begin let conv bits data = let i = Int32.of_int (parse_dec_int data (ref 0)) in if i > 0xFFl then raise (Parse_error ("label with a too big number", data)) else i <! bits in try let ( + ) = Int32.add in Some ((conv 0 a) + (conv 8 b) + (conv 16 c) + (conv 24 d)) with | Parse_error _ -> None end | _ -> None let succ t = if Int32.equal t 0xFF_FF_FF_FFl then Error (`Msg "Ipaddr: highest address has been reached") else Ok (Int32.succ t) let pred t = if Int32.equal t 0x00_00_00_00l then Error (`Msg "Ipaddr: lowest address has been reached") else Ok (Int32.pred t) let any = make 0 0 0 0 let unspecified = make 0 0 0 0 let broadcast = make 255 255 255 255 let localhost = make 127 0 0 1 let nodes = make 224 0 0 1 let routers = make 224 0 0 2 module Prefix = struct type addr = t type t = addr * int let compare (pre,sz) (pre',sz') = let c = compare pre pre' in if c = 0 then Stdlib.compare sz sz' else c let ip = make let mask sz = if sz <= 0 then 0_l else if sz >= 32 then 0x0_FF_FF_FF_FF_l else 0x0_FF_FF_FF_FF_l <|< (32 - sz) let prefix (pre,sz) = (pre &&& (mask sz), sz) let make sz pre = (pre,sz) let network_address (pre,sz) addr = (pre &&& (mask sz)) ||| (addr &&& Int32.lognot (mask sz)) let _of_string_raw s i = let quad = of_string_raw s i in expect_char s i '/'; let p = parse_dec_int s i in if p > 32 || p < 0 then raise (Parse_error ("invalid prefix size", s)); (p,quad) let of_string_raw s i = let (p,quad) = _of_string_raw s i in make p quad let _of_string_exn s = let i = ref 0 in let res = _of_string_raw s i in expect_end s i; res let of_string_exn s = let (p,quad) = _of_string_exn s in make p quad let of_string s = try_with_result of_string_exn s let _of_netmask_exn ~netmask address = let rec find_greatest_one bits i = if bits = 0_l then i-1 else find_greatest_one (bits >|> 1) (i+1) in let one = netmask &&& (Int32.neg netmask) in let sz = 32 - (find_greatest_one one (if one = 0_l then 33 else 0)) in if netmask <> (mask sz) then raise (Parse_error ("invalid netmask",to_string netmask)) else make sz address let of_netmask_exn ~netmask ~address = _of_netmask_exn ~netmask address let of_netmask ~netmask ~address = try_with_result (_of_netmask_exn ~netmask) address let to_buffer buf (pre,sz) = Printf.bprintf buf "%a/%d" to_buffer pre sz let to_string subnet = let b = Buffer.create 18 in to_buffer b subnet; Buffer.contents b let pp ppf i = Format.fprintf ppf "%s" (to_string i) let mem ip (pre,sz) = let m = mask sz in (ip &&& m) = (pre &&& m) let subset ~subnet:(pre1,sz1) ~network:(pre2,sz2) = sz1 >= sz2 && mem pre1 (pre2,sz2) let of_addr ip = make 32 ip let global = make 0 (ip 0 0 0 0) let relative = make 8 (ip 0 0 0 0) let loopback = make 8 (ip 127 0 0 0) let link = make 16 (ip 169 254 0 0) let multicast = make 4 (ip 224 0 0 0) let multicast_org = make 14 (ip 239 192 0 0) let multicast_admin = make 16 (ip 239 255 0 0) let multicast_link = make 24 (ip 224 0 0 0) let private_10 = make 8 (ip 10 0 0 0) let private_172 = make 12 (ip 172 16 0 0) let private_192 = make 16 (ip 192 168 0 0) let private_blocks = [ loopback ; link ; private_10 ; private_172 ; private_192 ] let broadcast (pre,sz) = pre ||| (0x0_FF_FF_FF_FF_l >|> sz) let network (pre,sz) = pre &&& mask sz let address (addr,_) = addr let bits (_,sz) = sz let netmask subnet = mask (bits subnet) let first (_,sz as cidr) = if sz > 30 then network cidr else network cidr |> succ |> failwith_msg let last (_,sz as cidr) = if sz > 30 then broadcast cidr else broadcast cidr |> pred |> failwith_msg end let scope i = let mem = Prefix.mem i in if mem Prefix.loopback then Interface else if mem Prefix.link then Link else if List.exists mem Prefix.private_blocks then Organization else if i = unspecified then Point else if i = broadcast then Admin else if mem Prefix.relative then Admin else if mem Prefix.multicast then (if mem Prefix.multicast_org then Organization else if mem Prefix.multicast_admin then Admin else if mem Prefix.multicast_link then Link else Global) else Global let is_global i = (scope i) = Global let is_multicast i = Prefix.(mem i multicast) let is_private i = (scope i) <> Global end module B128 = struct type t = int32 * int32 * int32 * int32 let of_int64 (a, b) = Int64.( to_int32 (shift_right_logical a 32), to_int32 a, to_int32 (shift_right_logical b 32), to_int32 b) let to_int64 (a,b,c,d) = Int64.( logor (shift_left (of_int32 a) 32) (of_int32 b), logor (shift_left (of_int32 c) 32) (of_int32 d)) let of_int32 x = x let to_int32 x = x let of_int16 (a, b, c, d, e, f, g, h) = V4.of_int16 (a,b), V4.of_int16 (c,d), V4.of_int16 (e,f), V4.of_int16 (g,h) let to_int16 (x,y,z,t) = let a,b = V4.to_int16 x and c,d = V4.to_int16 y and e,f = V4.to_int16 z and g,h = V4.to_int16 t in (a,b,c,d,e,f,g,h) let write_octets_exn ?(off=0) (a,b,c,d) byte = V4.write_octets_exn ~off a byte; V4.write_octets_exn ~off:(off+4) b byte; V4.write_octets_exn ~off:(off+8) c byte; V4.write_octets_exn ~off:(off+12) d byte let compare (a1,b1,c1,d1) (a2,b2,c2,d2) = match V4.compare a1 a2 with | 0 -> begin match V4.compare b1 b2 with | 0 -> begin match V4.compare c1 c2 with | 0 -> V4.compare d1 d2 | n -> n end | n -> n end | n -> n let logand (a1,b1,c1,d1) (a2,b2,c2,d2) = (a1 &&& a2, b1 &&& b2, c1 &&& c2, d1 &&& d2) let logor (a1,b1,c1,d1) (a2,b2,c2,d2) = (a1 ||| a2, b1 ||| b2, c1 ||| c2, d1 ||| d2) let lognot (a,b,c,d) = Int32.(lognot a, lognot b, lognot c, lognot d) let succ (a,b,c,d) = let cb (n,tl) v = match n with | 0l -> (0l,v::tl) | n -> let n = if Int32.equal v 0xFF_FF_FF_FFl then n else 0l in (n,Int32.succ v::tl) in match List.fold_left cb (1l,[]) [d;c;b;a] with | 0l, [a;b;c;d] -> Ok (of_int32 (a,b,c,d)) | n, [_;_;_;_] when n > 0l -> Error (`Msg "Ipaddr: highest address has been reached") | _ -> Error (`Msg "Ipaddr: unexpected error with B128") let pred (a,b,c,d) = let cb (n,tl) v = match n with | 0l -> (0l,v::tl) | n -> let n = if v = 0x00_00_00_00l then n else 0l in (n,Int32.pred v::tl) in match List.fold_left cb (-1l,[]) [d;c;b;a] with | 0l, [a;b;c;d] -> Ok (of_int32 (a,b,c,d)) | n, [_;_;_;_] when n < 0l -> Error (`Msg "Ipaddr: lowest address has been reached") | _ -> Error (`Msg "Ipaddr: unexpected error with B128") let shift_right (a,b,c,d) sz = let rec loop (a,b,c,d) sz = if sz < 32 then (sz, (a,b,c,d)) else loop (0l,a,b,c) (sz - 32) in let (sz, (a,b,c,d)) = loop (a,b,c,d) sz in let fn (saved,tl) part = let new_saved = Int32.logand part (0xFF_FF_FF_FFl >|> sz) in let new_part = (part >|> sz) ||| (saved <|< 32 - sz) in (new_saved, new_part::tl) in match List.fold_left fn (0l,[]) [a;b;c;d] with | _, [d;c;b;a] -> Ok (of_int32 (a, b, c, d)) | _ -> Error (`Msg "Ipaddr: unexpected error with B128.shift_right") end module V6 = struct include B128 let make a b c d e f g h = of_int16 (a,b,c,d,e,f,g,h) let parse_ipv6 s i = let len = String.length s in if len < !i + 1 then (raise (need_more s)); let use_bracket = s.[!i] = '['; in if use_bracket then incr i; if len < !i + 2 then (raise (need_more s)); let l = if s.[!i] = ':' then begin incr i; if s.[!i] = ':' then begin compressed := true; incr i; [-1] end else raise (bad_char !i s); end else [] in let rec loop nb acc = if nb >= 8 then acc else if !i >= len then acc else let pos = !i in let x = try parse_hex_int s i with _ -> -1 in if x < 0 then acc else if nb = 7 then x::acc else if !i < len && s.[!i] = ':' then begin incr i; if !i < len then if s.[!i] = ':' then else begin compressed:=true; incr i; loop (nb + 2) (-1::x::acc) end else begin if is_number 16 (int_of_char s.[!i]) then loop (nb+1) (x::acc) else raise (bad_char !i s) end else raise (need_more s) end else if !i < len && s.[!i] = '.' then begin i:= pos; let v4 = V4.of_string_raw s i in let (hi,lo) = V4.to_int16 v4 in lo :: hi :: acc end else x::acc in let res = loop (List.length l) l in let res_len = List.length res in if res_len > 8 then raise (Parse_error ("too many components",s)) else if res_len = 0 then raise (need_more s) else let a = Array.make 8 0 in let missing = if !compressed then 8 - (res_len - 1) else if res_len <> 8 then if !i < len then raise (bad_char !i s) else raise (need_more s) else 0 in let _ = List.fold_left (fun i x -> if x = -1 then i - missing else begin if x land 0xffff <> x then raise (Parse_error (Printf.sprintf "component %d out of bounds" i, s)); a.(i) <- x; i - 1 end ) 7 res in (if use_bracket then expect_char s i ']'); a let of_string_raw s offset = let a = parse_ipv6 s offset in make a.(0) a.(1) a.(2) a.(3) a.(4) a.(5) a.(6) a.(7) let of_string_exn s = let o = ref 0 in let x = of_string_raw s o in expect_end s o; x let of_string s = try_with_result of_string_exn s let to_buffer buf addr = let (a,b,c,d,e,f,g,h) as comp = to_int16 addr in let v4 = match comp with | (0,0,0,0,0,0xffff,_,_) -> true | _ -> false in let rec loop elide zeros acc = function | 0 :: xs -> loop elide (zeros - 1) acc xs | n :: xs when zeros = 0 -> loop elide 0 (n::acc) xs | n :: xs -> loop (min elide zeros) 0 (n::zeros::acc) xs | [] -> let elide = min elide zeros in (if elide < -1 then Some elide else None), (if zeros = 0 then acc else zeros::acc) in let elide,l = loop 0 0 [] [h;g;f;e;d;c;b;a] in assert(match elide with Some x when x < -8 -> false | _ -> true); let rec cons_zeros l x = if x >= 0 then l else cons_zeros (Some 0::l) (x+1) in let _,lrev = List.fold_left (fun (patt, l) x -> if Some x = patt then (None, (None::l)) else if x < 0 then (patt, (cons_zeros l x)) else (patt, ((Some x)::l)) ) (elide, []) l in let rec fill = function | [Some hi;Some lo] when v4 -> let addr = V4.of_int16 (hi, lo) in V4.to_buffer buf addr | None::xs -> Buffer.add_string buf "::"; fill xs | [Some n] -> Printf.bprintf buf "%x" n | (Some n)::None::xs -> Printf.bprintf buf "%x::" n; fill xs | (Some n)::xs -> Printf.bprintf buf "%x:" n; fill xs | [] -> () in fill (List.rev lrev) let to_string l = let buf = Buffer.create 39 in to_buffer buf l; Buffer.contents buf let pp ppf i = Format.fprintf ppf "%s" (to_string i) let hihi = V4.of_octets_exn ~off bs in let hilo = V4.of_octets_exn ~off:(off+4) bs in let lohi = V4.of_octets_exn ~off:(off+8) bs in let lolo = V4.of_octets_exn ~off:(off+12) bs in of_int32 (hihi, hilo, lohi, lolo) let of_octets ?off bs = try_with_result (of_octets_exn ?off) bs let write_octets ?off i bs = try_with_result (write_octets_exn ?off i) bs let to_octets i = let b = Bytes.create 16 in write_octets_exn i b; Bytes.to_string b MAC let multicast_to_mac i = let (_,_,_,i) = to_int32 i in let macb = Bytes.create 6 in Bytes.set macb 0 (Char.chr 0x33); Bytes.set macb 1 (Char.chr 0x33); Bytes.set macb 2 (Char.chr ((|~) (i >! 24))); Bytes.set macb 3 (Char.chr ((|~) (i >! 16))); Bytes.set macb 4 (Char.chr ((|~) (i >! 8))); Bytes.set macb 5 (Char.chr ((|~) (i >! 0))); Macaddr.of_octets_exn (Bytes.to_string macb) let to_domain_name (a,b,c,d) = let name = [ hex_string_of_int32 ((d >|> 0) &&& 0xF_l); hex_string_of_int32 ((d >|> 4) &&& 0xF_l); hex_string_of_int32 ((d >|> 8) &&& 0xF_l); hex_string_of_int32 ((d >|> 12) &&& 0xF_l); hex_string_of_int32 ((d >|> 16) &&& 0xF_l); hex_string_of_int32 ((d >|> 20) &&& 0xF_l); hex_string_of_int32 ((d >|> 24) &&& 0xF_l); hex_string_of_int32 ((d >|> 28) &&& 0xF_l); hex_string_of_int32 ((c >|> 0) &&& 0xF_l); hex_string_of_int32 ((c >|> 4) &&& 0xF_l); hex_string_of_int32 ((c >|> 8) &&& 0xF_l); hex_string_of_int32 ((c >|> 12) &&& 0xF_l); hex_string_of_int32 ((c >|> 16) &&& 0xF_l); hex_string_of_int32 ((c >|> 20) &&& 0xF_l); hex_string_of_int32 ((c >|> 24) &&& 0xF_l); hex_string_of_int32 ((c >|> 28) &&& 0xF_l); hex_string_of_int32 ((b >|> 0) &&& 0xF_l); hex_string_of_int32 ((b >|> 4) &&& 0xF_l); hex_string_of_int32 ((b >|> 8) &&& 0xF_l); hex_string_of_int32 ((b >|> 12) &&& 0xF_l); hex_string_of_int32 ((b >|> 16) &&& 0xF_l); hex_string_of_int32 ((b >|> 20) &&& 0xF_l); hex_string_of_int32 ((b >|> 24) &&& 0xF_l); hex_string_of_int32 ((b >|> 28) &&& 0xF_l); hex_string_of_int32 ((a >|> 0) &&& 0xF_l); hex_string_of_int32 ((a >|> 4) &&& 0xF_l); hex_string_of_int32 ((a >|> 8) &&& 0xF_l); hex_string_of_int32 ((a >|> 12) &&& 0xF_l); hex_string_of_int32 ((a >|> 16) &&& 0xF_l); hex_string_of_int32 ((a >|> 20) &&& 0xF_l); hex_string_of_int32 ((a >|> 24) &&& 0xF_l); hex_string_of_int32 ((a >|> 28) &&& 0xF_l); "ip6"; "arpa" ] in Domain_name.(host_exn (of_strings_exn name)) let of_domain_name n = let open Domain_name in if count_labels n = 34 then let ip6 = get_label_exn n 32 and arpa = get_label_exn n 33 in if equal_label ip6 "ip6" && equal_label arpa "arpa" then let rev = true in let n' = drop_label_exn ~rev ~amount:2 n in let d = drop_label_exn ~rev ~amount:24 n' and c = drop_label_exn ~amount:8 (drop_label_exn ~rev ~amount:16 n') and b = drop_label_exn ~amount:16 (drop_label_exn ~rev ~amount:8 n') and a = drop_label_exn ~amount:24 n' in let t b d = let v = Int32.of_int (parse_hex_int d (ref 0)) in if v > 0xFl then raise (Parse_error ("number in label too big", d)) else v <|< b in let f d = List.fold_left (fun (acc, b) d -> Int32.add acc (t b d), b + 4) (0l, 0) (to_strings d) in try let a', _ = f a and b', _ = f b and c', _ = f c and d', _ = f d in Some (a', b', c', d') with | Parse_error _ -> None else None else None let unspecified = make 0 0 0 0 0 0 0 0 let localhost = make 0 0 0 0 0 0 0 1 let interface_nodes = make 0xff01 0 0 0 0 0 0 1 let link_nodes = make 0xff02 0 0 0 0 0 0 1 let interface_routers = make 0xff01 0 0 0 0 0 0 2 let link_routers = make 0xff02 0 0 0 0 0 0 2 let site_routers = make 0xff05 0 0 0 0 0 0 2 module Prefix = struct type addr = t type t = addr * int let compare (pre,sz) (pre',sz') = let c = compare pre pre' in if c = 0 then Stdlib.compare sz sz' else c let ip = make let _full = let f = 0x0_FFFF_FFFF_l in f,f,f,f let mask sz = V4.Prefix.( mask (sz - 0), mask (sz - 32), mask (sz - 64), mask (sz - 96)) let prefix (pre,sz) = (logand pre (mask sz),sz) let make sz pre = (pre,sz) let network_address (pre,sz) addr = logor (logand pre (mask sz)) (logand addr (lognot (mask sz))) let _of_string_raw s i = let v6 = of_string_raw s i in expect_char s i '/'; let p = parse_dec_int s i in if p > 128 || p < 0 then raise (Parse_error ("invalid prefix size", s)); (p, v6) let of_string_raw s i = let (p,v6) = _of_string_raw s i in make p v6 let _of_string_exn s = let i = ref 0 in let res = _of_string_raw s i in expect_end s i; res let of_string_exn s = let (p,v6) = _of_string_exn s in make p v6 let of_string s = try_with_result of_string_exn s let _of_netmask_exn ~netmask address = let nm = let bits netmask = V4.Prefix.bits (V4.Prefix.of_netmask_exn ~netmask ~address:V4.any) in match netmask with | (0_l,0_l,0_l,0_l) -> 0 | (lsw ,0_l ,0_l ,0_l) -> bits lsw | (-1_l,lsw ,0_l ,0_l) -> bits lsw + 32 | (-1_l,-1_l,lsw ,0_l) -> bits lsw + 64 | (-1_l,-1_l,-1_l,lsw) -> bits lsw + 96 | _ -> raise (Parse_error ("invalid netmask", to_string netmask)) in make nm address let of_netmask_exn ~netmask ~address = _of_netmask_exn ~netmask address let of_netmask ~netmask ~address = try_with_result (_of_netmask_exn ~netmask) address let to_buffer buf (pre,sz) = Printf.bprintf buf "%a/%d" to_buffer pre sz let to_string subnet = let buf = Buffer.create 43 in to_buffer buf subnet; Buffer.contents buf let pp ppf i = Format.fprintf ppf "%s" (to_string i) let mem ip (pre,sz) = let m = mask sz in logand ip m = logand pre m let subset ~subnet:(pre1,sz1) ~network:(pre2,sz2) = sz1 >= sz2 && mem pre1 (pre2,sz2) let of_addr ip = make 128 ip let global_unicast_001 = make 3 (ip 0x2000 0 0 0 0 0 0 0) let link = make 64 (ip 0xfe80 0 0 0 0 0 0 0) let unique_local = make 7 (ip 0xfc00 0 0 0 0 0 0 0) let multicast = make 8 (ip 0xff00 0 0 0 0 0 0 0) let ipv4_mapped = make 96 (ip 0 0 0 0 0 0xffff 0 0) let noneui64_interface = make 3 (ip 0x0000 0 0 0 0 0 0 0) let solicited_node = make 104 (ip 0xff02 0 0 0 0 1 0xff00 0) let network (pre,sz) = logand pre (mask sz) let address (addr,_) = addr let bits (_,sz) = sz let netmask subnet = mask (bits subnet) let first (_,sz as cidr) = if sz > 126 then network cidr else network cidr |> succ |> failwith_msg let last (_,sz as cidr) = let ffff = ip 0xffff 0xffff 0xffff 0xffff 0xffff 0xffff 0xffff 0xffff in logor (network cidr) (shift_right ffff sz |> failwith_msg) end let scope i = let mem = Prefix.mem i in if mem Prefix.global_unicast_001 then Global else if mem Prefix.ipv4_mapped then V4.scope (let (_,_,_,v4) = to_int32 i in V4.of_int32 v4) else if mem Prefix.multicast then let (x,_,_,_,_,_,_,_) = to_int16 i in match x land 0xf with | 0 -> Point | 1 -> Interface | 2 | 3 -> Link | 4 -> Admin | 5 | 6 | 7 -> Site | 8 | 9 | 10 | 11 | 12 | 13 -> Organization | 14 | 15 -> Global | _ -> assert false else if mem Prefix.link then Link else if mem Prefix.unique_local then Global else if i = localhost then Interface else if i = unspecified then Point else Global let link_address_of_mac = let c b i = Char.code (String.get b i) in fun mac -> let bmac = Macaddr.to_octets mac in let c_0 = c bmac 0 lxor 2 in let addr = make 0 0 0 0 (c_0 lsl 8 + c bmac 1) (c bmac 2 lsl 8 + 0xff ) (0xfe00 + c bmac 3) (c bmac 4 lsl 8 + c bmac 5) in Prefix.(network_address link addr) let is_global i = (scope i) = Global let is_multicast i = Prefix.(mem i multicast) let is_private i = (scope i) <> Global end type ('v4,'v6) v4v6 = V4 of 'v4 | V6 of 'v6 type t = (V4.t,V6.t) v4v6 let compare a b = match a,b with | V4 a, V4 b -> V4.compare a b | V6 a, V6 b -> V6.compare a b | V4 _, V6 _ -> -1 | V6 _, V4 _ -> 1 let to_string = function | V4 x -> V4.to_string x | V6 x -> V6.to_string x let to_buffer buf = function | V4 x -> V4.to_buffer buf x | V6 x -> V6.to_buffer buf x let pp ppf i = Format.fprintf ppf "%s" (to_string i) let of_string_raw s offset = let len = String.length s in if len < !offset + 1 then raise (need_more s); match s.[0] with | '[' -> V6 (V6.of_string_raw s offset) | _ -> let pos = !offset in try V4 (V4.of_string_raw s offset) with Parse_error (v4_msg,_) -> offset := pos; try V6 (V6.of_string_raw s offset) with Parse_error(v6_msg,s) -> let msg = Printf.sprintf "not an IPv4 address: %s\nnot an IPv6 address: %s" v4_msg v6_msg in raise (Parse_error (msg,s)) let of_string_exn s = of_string_raw s (ref 0) let of_string s = try_with_result of_string_exn s let v6_of_v4 v4 = V6.(Prefix.(network_address ipv4_mapped (of_int32 (0l,0l,0l,v4)))) let v4_of_v6 v6 = if V6.Prefix.(mem v6 ipv4_mapped) then let (_,_,_,v4) = V6.to_int32 v6 in Some V4.(of_int32 v4) else None let to_v4 = function V4 v4 -> Some v4 | V6 v6 -> v4_of_v6 v6 let to_v6 = function V4 v4 -> v6_of_v4 v4 | V6 v6 -> v6 let scope = function V4 v4 -> V4.scope v4 | V6 v6 -> V6.scope v6 let is_global = function | V4 v4 -> V4.is_global v4 | V6 v6 -> V6.is_global v6 let is_multicast = function | V4 v4 -> V4.is_multicast v4 | V6 v6 -> V6.is_multicast v6 let is_private = function | V4 v4 -> V4.is_private v4 | V6 v6 -> V6.is_private v6 let multicast_to_mac = function | V4 v4 -> V4.multicast_to_mac v4 | V6 v6 -> V6.multicast_to_mac v6 let to_domain_name = function | V4 v4 -> V4.to_domain_name v4 | V6 v6 -> V6.to_domain_name v6 let of_domain_name n = match Domain_name.count_labels n with | 6 -> begin match V4.of_domain_name n with | None -> None | Some x -> Some (V4 x) end | 34 -> begin match V6.of_domain_name n with | None -> None | Some x -> Some (V6 x) end | _ -> None let succ = function | V4 addr -> map_result (V4.succ addr) (fun v -> V4 v) | V6 addr -> map_result (V6.succ addr) (fun v -> V6 v) let pred = function | V4 addr -> map_result (V4.pred addr) (fun v -> V4 v) | V6 addr -> map_result (V6.pred addr) (fun v -> V6 v) module Prefix = struct module Addr = struct let to_v6 = to_v6 end type addr = t type t = (V4.Prefix.t,V6.Prefix.t) v4v6 let compare a b = match a,b with | V4 a , V4 b -> V4.Prefix.compare a b | V6 a , V6 b -> V6.Prefix.compare a b | V4 _ , V6 _ -> -1 | V6 _ , V4 _ -> 1 let of_string_raw s offset = let len = String.length s in if len < !offset + 1 then raise (need_more s); match s.[0] with | '[' -> V6 (V6.Prefix.of_string_raw s offset) | _ -> let pos = !offset in try V4 (V4.Prefix.of_string_raw s offset) with Parse_error (v4_msg,_) -> offset := pos; try V6 (V6.Prefix.of_string_raw s offset) with Parse_error(v6_msg,s) -> let msg = Printf.sprintf "not an IPv4 prefix: %s\nnot an IPv6 prefix: %s" v4_msg v6_msg in raise (Parse_error (msg,s)) let of_string_exn s = of_string_raw s (ref 0) let of_string s = try_with_result of_string_exn s let v6_of_v4 v4 = V6.Prefix.make (96 + V4.Prefix.bits v4) (v6_of_v4 (V4.Prefix.network v4)) let v4_of_v6 v6 = match v4_of_v6 (V6.Prefix.network v6) with | Some v4 -> Some (V4.Prefix.make (V6.Prefix.bits v6 - 96) v4) | None -> None let to_v4 = function V4 v4 -> Some v4 | V6 v6 -> v4_of_v6 v6 let to_v6 = function V4 v4 -> v6_of_v4 v4 | V6 v6 -> v6 let mem ip prefix = V6.Prefix.mem (Addr.to_v6 ip) (to_v6 prefix) let subset ~subnet ~network = V6.Prefix.subset ~subnet:(to_v6 subnet) ~network:(to_v6 network) let of_addr = function | V4 p -> V4 (V4.Prefix.of_addr p) | V6 p -> V6 (V6.Prefix.of_addr p) let to_string = function | V4 p -> V4.Prefix.to_string p | V6 p -> V6.Prefix.to_string p let to_buffer buf = function | V4 p -> V4.Prefix.to_buffer buf p | V6 p -> V6.Prefix.to_buffer buf p let network = function | V4 p -> V4 (V4.Prefix.network p) | V6 p -> V6 (V6.Prefix.network p) let netmask = function | V4 p -> V4 (V4.Prefix.netmask p) | V6 p -> V6 (V6.Prefix.netmask p) let pp ppf i = Format.fprintf ppf "%s" (to_string i) let first = function | V4 p -> V4 (V4.Prefix.first p) | V6 p -> V6 (V6.Prefix.first p) let last = function | V4 p -> V4 (V4.Prefix.last p) | V6 p -> V6 (V6.Prefix.last p) end
6fce07728f8dd45cc284922ba847230f310c2dfc2bf60ebe2ec6a07ab3190b28
ndmitchell/rattle
Types.hs
# LANGUAGE RecordWildCards # # LANGUAGE FlexibleInstances # # LANGUAGE GeneralizedNewtypeDeriving , DeriveTraversable # {-# LANGUAGE StandaloneDeriving, DeriveGeneric #-} # OPTIONS_GHC -Wno - orphans # module Development.Rattle.Types( Trace(..), Touch(..), fsaTrace, normalizeTouch, TouchSet, tsRead, tsWrite, newTouchSet, addTouchSet, Cmd(..), mkCmd, RunIndex, runIndex0, nextRunIndex, ) where import Data.Hashable import Data.List.Extra import System.Directory import System.Info.Extra import Control.Monad import General.Binary import Data.Word import Development.Shake.Command import Data.Semigroup import qualified Data.ByteString.UTF8 as UTF8 import qualified Data.ByteString as BS import qualified Data.HashSet as Set import GHC.Generics import Prelude import System.Time.Extra import General.FileName record the hash as the first field data Cmd = Cmd Int [CmdOption] [String] deriving Eq instance Show Cmd where show (Cmd _ a b) = "Cmd " ++ show a ++ " " ++ show b instance Hashable Cmd where hashWithSalt _ = hash hash (Cmd x _ _) = x mkCmd :: [CmdOption] -> [String] -> Cmd mkCmd a b = Cmd (hash (a,b)) a b instance BinaryEx Cmd where getEx x = mkCmd (getEx a) (getEx b) where (a,b) = getExPair x putEx (Cmd _ a b) = putExPair (putEx a) (putEx b) The common values for CmdOption are [ ] , [ Shell ] and a few others - optimise those instance BinaryEx [CmdOption] where getEx x | BS.null x = [] | BS.length x == 1 = case getEx x :: Word8 of 0 -> [Shell] 1 -> [EchoStderr False] 2 -> [Shell,EchoStderr False] | otherwise = map read $ getEx x putEx [] = mempty putEx [Shell] = putEx (0 :: Word8) putEx [EchoStderr False] = putEx (1 :: Word8) putEx [Shell,EchoStderr False] = putEx (2 :: Word8) putEx xs = putEx $ map show xs deriving instance Generic CmdOption deriving instance Read CmdOption instance Hashable CmdOption data Trace a = Trace {tRun :: {-# UNPACK #-} !RunIndex ,tStart :: {-# UNPACK #-} !Seconds ,tStop :: {-# UNPACK #-} !Seconds ,tTouch :: Touch a } deriving (Show, Functor, Foldable, Traversable, Eq) instance BinaryEx a => BinaryEx (Trace a) where getEx x = Trace a b c $ getEx d where (a,b,c,d) = binarySplit3 x putEx (Trace a b c d) = putExStorable a <> putExStorable b <> putExStorable c <> putEx d data Touch a = Touch {tRead :: [a] ,tWrite :: [a] } deriving (Show, Functor, Foldable, Traversable, Eq) instance BinaryEx a => BinaryEx (Touch a) where getEx x = Touch (map getEx $ getExList a) (map getEx $ getExList b) where [a,b] = getExList x putEx (Touch a b) = putExList [putExList $ map putEx a, putExList $ map putEx b] instance Semigroup (Touch a) where Touch r1 w1 <> Touch r2 w2 = Touch (r1++r2) (w1++w2) instance Monoid (Touch a) where mempty = Touch [] [] mappend = (<>) mconcat xs = Touch (concatMap tRead xs) (concatMap tWrite xs) instance Hashable a => Hashable (Trace a) where hashWithSalt s (Trace a b c d) = hashWithSalt s (a,b,c,d) instance Hashable a => Hashable (Touch a) where hashWithSalt s (Touch r w) = hashWithSalt s (r,w) fsaTrace :: [FSATrace BS.ByteString] -> IO (Touch FileName) -- We want to get normalized traces. On Linux, things come out normalized, and we just want to dedupe them On Windows things come out as C:\windows\system32\KERNELBASE.dll instead of C:\Windows\System32\KernelBase.dll so important to call ( expensive ) normalizeTouch fsaTrace fs | isWindows = normalize twice because normalisation is cheap , but might be expensive fmap (normalizeTouch . fmap (byteStringToFileName . UTF8.fromString)) $ canonicalizeTouch $ fmap UTF8.toString $ normalizeTouch $ mconcatMap f fs | otherwise = -- We know the file names are already normalized from Shake so avoid a redundant conversion pure $ normalizeTouch $ byteStringToFileName <$> mconcatMap f fs where f (FSAWrite x) = Touch [] [x] f (FSARead x) = Touch [x] [] f (FSADelete x) = Touch [] [x] f (FSAMove x y) = Touch [] [x,y] f (FSAQuery x) = Touch [x] [] f (FSATouch x) = Touch [] [x] normalizeTouch :: (Ord a, Hashable a) => Touch a -> Touch a added ' sort ' because HashSet uses the ordering of the hashes , which is confusing -- and since we are sorting, try and avoid doing too much hash manipulation of the reads normalizeTouch (Touch a b) = Touch (f $ sort a) (sort $ Set.toList b2) where b2 = Set.fromList b f (x1:x2:xs) | x1 == x2 = f (x1:xs) f (x:xs) | x `Set.member` b2 = f xs | otherwise = x : f xs f [] = [] canonicalizeTouch :: Touch FilePath -> IO (Touch FilePath) canonicalizeTouch (Touch a b) = Touch <$> mapM canonicalizePath a <*> mapM canonicalizePath b For sets , Set.fromList is fastest if there are no dupes Otherwise a Set.member/Set.insert is fastest data TouchSet = TouchSet {tsRead :: Set.HashSet FileName, tsWrite :: Set.HashSet FileName} newTouchSet :: [Touch FileName] -> TouchSet newTouchSet [] = TouchSet Set.empty Set.empty newTouchSet (Touch{..}:xs) = foldl' addTouchSet (TouchSet (Set.fromList tRead) (Set.fromList tWrite)) xs addTouchSet :: TouchSet -> Touch FileName -> TouchSet addTouchSet TouchSet{..} Touch{..} = TouchSet (f tsRead tRead) (f tsWrite tWrite) where f = foldl' (\mp k -> if Set.member k mp then mp else Set.insert k mp) -- | Which run we are in, monotonically increasing newtype RunIndex = RunIndex Int deriving (Eq,Ord,Show,Storable,BinaryEx,Hashable) runIndex0 :: RunIndex runIndex0 = RunIndex 0 nextRunIndex :: RunIndex -> RunIndex nextRunIndex (RunIndex i) = RunIndex $ i + 1
null
https://raw.githubusercontent.com/ndmitchell/rattle/f1f10504ef175dd005c8affdddfc1fb615c040f2/src/Development/Rattle/Types.hs
haskell
# LANGUAGE StandaloneDeriving, DeriveGeneric # # UNPACK # # UNPACK # # UNPACK # We want to get normalized traces. On Linux, things come out normalized, and we just want to dedupe them We know the file names are already normalized from Shake so avoid a redundant conversion and since we are sorting, try and avoid doing too much hash manipulation of the reads | Which run we are in, monotonically increasing
# LANGUAGE RecordWildCards # # LANGUAGE FlexibleInstances # # LANGUAGE GeneralizedNewtypeDeriving , DeriveTraversable # # OPTIONS_GHC -Wno - orphans # module Development.Rattle.Types( Trace(..), Touch(..), fsaTrace, normalizeTouch, TouchSet, tsRead, tsWrite, newTouchSet, addTouchSet, Cmd(..), mkCmd, RunIndex, runIndex0, nextRunIndex, ) where import Data.Hashable import Data.List.Extra import System.Directory import System.Info.Extra import Control.Monad import General.Binary import Data.Word import Development.Shake.Command import Data.Semigroup import qualified Data.ByteString.UTF8 as UTF8 import qualified Data.ByteString as BS import qualified Data.HashSet as Set import GHC.Generics import Prelude import System.Time.Extra import General.FileName record the hash as the first field data Cmd = Cmd Int [CmdOption] [String] deriving Eq instance Show Cmd where show (Cmd _ a b) = "Cmd " ++ show a ++ " " ++ show b instance Hashable Cmd where hashWithSalt _ = hash hash (Cmd x _ _) = x mkCmd :: [CmdOption] -> [String] -> Cmd mkCmd a b = Cmd (hash (a,b)) a b instance BinaryEx Cmd where getEx x = mkCmd (getEx a) (getEx b) where (a,b) = getExPair x putEx (Cmd _ a b) = putExPair (putEx a) (putEx b) The common values for CmdOption are [ ] , [ Shell ] and a few others - optimise those instance BinaryEx [CmdOption] where getEx x | BS.null x = [] | BS.length x == 1 = case getEx x :: Word8 of 0 -> [Shell] 1 -> [EchoStderr False] 2 -> [Shell,EchoStderr False] | otherwise = map read $ getEx x putEx [] = mempty putEx [Shell] = putEx (0 :: Word8) putEx [EchoStderr False] = putEx (1 :: Word8) putEx [Shell,EchoStderr False] = putEx (2 :: Word8) putEx xs = putEx $ map show xs deriving instance Generic CmdOption deriving instance Read CmdOption instance Hashable CmdOption data Trace a = Trace ,tTouch :: Touch a } deriving (Show, Functor, Foldable, Traversable, Eq) instance BinaryEx a => BinaryEx (Trace a) where getEx x = Trace a b c $ getEx d where (a,b,c,d) = binarySplit3 x putEx (Trace a b c d) = putExStorable a <> putExStorable b <> putExStorable c <> putEx d data Touch a = Touch {tRead :: [a] ,tWrite :: [a] } deriving (Show, Functor, Foldable, Traversable, Eq) instance BinaryEx a => BinaryEx (Touch a) where getEx x = Touch (map getEx $ getExList a) (map getEx $ getExList b) where [a,b] = getExList x putEx (Touch a b) = putExList [putExList $ map putEx a, putExList $ map putEx b] instance Semigroup (Touch a) where Touch r1 w1 <> Touch r2 w2 = Touch (r1++r2) (w1++w2) instance Monoid (Touch a) where mempty = Touch [] [] mappend = (<>) mconcat xs = Touch (concatMap tRead xs) (concatMap tWrite xs) instance Hashable a => Hashable (Trace a) where hashWithSalt s (Trace a b c d) = hashWithSalt s (a,b,c,d) instance Hashable a => Hashable (Touch a) where hashWithSalt s (Touch r w) = hashWithSalt s (r,w) fsaTrace :: [FSATrace BS.ByteString] -> IO (Touch FileName) On Windows things come out as C:\windows\system32\KERNELBASE.dll instead of C:\Windows\System32\KernelBase.dll so important to call ( expensive ) normalizeTouch fsaTrace fs | isWindows = normalize twice because normalisation is cheap , but might be expensive fmap (normalizeTouch . fmap (byteStringToFileName . UTF8.fromString)) $ canonicalizeTouch $ fmap UTF8.toString $ normalizeTouch $ mconcatMap f fs | otherwise = pure $ normalizeTouch $ byteStringToFileName <$> mconcatMap f fs where f (FSAWrite x) = Touch [] [x] f (FSARead x) = Touch [x] [] f (FSADelete x) = Touch [] [x] f (FSAMove x y) = Touch [] [x,y] f (FSAQuery x) = Touch [x] [] f (FSATouch x) = Touch [] [x] normalizeTouch :: (Ord a, Hashable a) => Touch a -> Touch a added ' sort ' because HashSet uses the ordering of the hashes , which is confusing normalizeTouch (Touch a b) = Touch (f $ sort a) (sort $ Set.toList b2) where b2 = Set.fromList b f (x1:x2:xs) | x1 == x2 = f (x1:xs) f (x:xs) | x `Set.member` b2 = f xs | otherwise = x : f xs f [] = [] canonicalizeTouch :: Touch FilePath -> IO (Touch FilePath) canonicalizeTouch (Touch a b) = Touch <$> mapM canonicalizePath a <*> mapM canonicalizePath b For sets , Set.fromList is fastest if there are no dupes Otherwise a Set.member/Set.insert is fastest data TouchSet = TouchSet {tsRead :: Set.HashSet FileName, tsWrite :: Set.HashSet FileName} newTouchSet :: [Touch FileName] -> TouchSet newTouchSet [] = TouchSet Set.empty Set.empty newTouchSet (Touch{..}:xs) = foldl' addTouchSet (TouchSet (Set.fromList tRead) (Set.fromList tWrite)) xs addTouchSet :: TouchSet -> Touch FileName -> TouchSet addTouchSet TouchSet{..} Touch{..} = TouchSet (f tsRead tRead) (f tsWrite tWrite) where f = foldl' (\mp k -> if Set.member k mp then mp else Set.insert k mp) newtype RunIndex = RunIndex Int deriving (Eq,Ord,Show,Storable,BinaryEx,Hashable) runIndex0 :: RunIndex runIndex0 = RunIndex 0 nextRunIndex :: RunIndex -> RunIndex nextRunIndex (RunIndex i) = RunIndex $ i + 1
5a128b24167860ec5b8bf5d147f279ccb4672df1d83858a3f3e88945d12a1074
input-output-hk/cardano-sl
Cluster.hs
module Test.Integration.Framework.Cluster ( startCluster , waitForNode ) where import Universum hiding (init) import Control.Concurrent (threadDelay) import Control.Concurrent.Async (race) import Data.Map.Strict ((!)) import qualified Data.Map.Strict as Map import qualified Data.Text as T import Options.Applicative (handleParseResult, info) import qualified Prelude import System.Environment (getEnvironment) import System.FilePath ((</>)) import Cardano.Cluster (MaxWaitingTime (..), NodeName (..), NodeType (..), startNode) import Cardano.Cluster.Environment (Artifact (..), Env, prepareEnvironment, withSystemStart) import Cardano.Cluster.Util (execParserEnv, oneSecond, runAsync, stripFilterPrefix, varFromParser) import Cardano.Wallet.Action (actionWithWallet) import Cardano.Wallet.Client.Http (ClientError (..), Manager, ServantError (..), WalletClient (getNodeInfo), WalletHttpClient) import Cardano.Wallet.Server.CLI (walletBackendParamsParser) import Pos.Chain.Genesis (GeneratedSecrets (..), configGeneratedSecretsThrow) import Pos.Client.CLI.NodeOptions (commonNodeArgsParser, nodeArgsParser) import Pos.Client.CLI.Params (loggingParams) import Pos.Launcher (LoggingParams (..), launchNode) import Pos.Launcher.Configuration (ConfigurationOptions (..), withConfigurations) import Pos.Node.API (ForceNtpCheck (..)) import Pos.Util.CompileInfo (withCompileInfo) import Pos.Util.Trace (noTrace) import Pos.Util.Wlog.Compatibility (usingNamedPureLogger) prefix :: String prefix = "INTEGRATION_" -- | All those can be overriden by environment variables. These values -- correspond to command line arguments that would be passed to underlying -- processes. -- -- As an example, if you wanted to enable the @--wallet-debug@ option for the -- underlying node, you would add an entry in this list: -- -- > ("WALLET_DEBUG", "True") -- Underscores ( @_@ ) are converted to hyphens ( ) , the text is lowercased , and -- a leading @--@ is added. defaultIntegrationEnv :: Env defaultIntegrationEnv = Map.fromList [ ("CONFIGURATION_FILE", "./test/integration/configuration.yaml") , ("CONFIGURATION_KEY", "default") , ("STATE_DIR", "./state-integration") , ("REBUILD_DB", "True") , ("WALLET_ADDRESS", "127.0.0.1:8090") , ("WALLET_DOC_ADDRESS", "127.0.0.1:8190") , ("WALLET_DB_PATH", "./state-integration/wallet-db/edge") , ("WALLET_REBUILD_DB", "True") , ("WALLET_NODE_API_ADDRESS", "127.0.0.1:8089") , ("NODE_API_ADDRESS", "127.0.0.1:8086") , ("NODE_DOC_ADDRESS", "127.0.0.1:3186") , ("NODE_TLS_CLIENT_CERT", "./state-integration/tls/relay/client.crt") , ("NODE_TLS_KEY", "./state-integration/tls/relay/client.key") , ("NODE_TLS_CA_CERT", "./state-integration/tls/relay/ca.crt") ] -- | Start an integration cluster. Quite identical to the original "start cluster". -- The main difference here is that we start a wallet node instead of the edge -- node. This will go as soon as decoupling is done; at this point we will need -- this edge node and the wallet will simply boil down to a webserver, started -- independently. startCluster :: [(NodeName, NodeType)] -> IO (Env, [FilePath], Manager) startCluster nodes = do env0 <- getEnvironment >>= withSystemStart . Map.union defaultIntegrationEnv . Map.fromList . stripFilterPrefix prefix let stateDir = env0 ! "STATE_DIR" -- Safe, we just defaulted it above let configFile = env0 ! "CONFIGURATION_FILE" -- Safe, we just defaulted it above let configKey = env0 ! "CONFIGURATION_KEY" -- Safe, we just defaulted it above handles <- forM nodes $ \node@(_, nodeType) -> runAsync $ \yield -> do let (artifacts, nodeEnv) = prepareEnvironment node nodes stateDir env0 let (genesis, topology, logger, tls) = artifacts case nodeType of NodeCore -> do void (init genesis >> init topology >> init logger >> init tls) yield (nodeEnv, Nothing) >> startNode node nodeEnv NodeRelay -> do void (init topology >> init logger >> init tls) yield (nodeEnv, Nothing) >> startNode node nodeEnv NodeEdge -> do manager <- init topology >> init logger >> init tls yield (nodeEnv, Just manager) >> startWallet node nodeEnv (env, manager) <- fmap (Prelude.head . catMaybes) $ forM handles $ \(_, (env, manager)) -> do printCartouche env >> return ((env,) <$> manager) let configOpts = ConfigurationOptions { cfoFilePath = configFile , cfoKey = toText configKey , cfoSystemStart = Just 0 , cfoSeed = Nothing } (env,,manager) <$> getGenesisKeys stateDir configOpts where init :: Artifact a b -> IO b init = initializeArtifact -- | Start a wallet, which is still a node (decoupling incoming!) startWallet :: (NodeName, NodeType) -- ^ The actual node name -> Env -- ^ A "simulation" of the system ENV as a 'Map String String' -> IO () startWallet (NodeName nodeIdT, _) env = do nArgs <- parseNodeArgs cArgs <- parseCommonNodeArgs wArgs <- parseWalletArgs let lArgs = getLoggingArgs cArgs withCompileInfo $ launchNode nArgs cArgs lArgs (actionWithWallet wArgs) where parseNodeArgs = do let nVars = varFromParser nodeArgsParser let nInfo = info nodeArgsParser mempty handleParseResult $ execParserEnv env nVars nInfo parseCommonNodeArgs = do let cVars = varFromParser commonNodeArgsParser let cInfo = info commonNodeArgsParser mempty handleParseResult $ execParserEnv env cVars cInfo parseWalletArgs = do let wVars = varFromParser walletBackendParamsParser let wInfo = info walletBackendParamsParser mempty handleParseResult (execParserEnv env wVars wInfo) getLoggingArgs cArgs = (loggingParams (fromString $ T.unpack nodeIdT) cArgs) { lpConsoleLog = Just False } -- | Make HttpRequest continuously for a while to wait after the node. -- This is a temporary, simplified version of what in 'Cardano.Cluster' that -- works with WalletHttpClient. waitForNode ^ An Http Client configured against a given node ^ Maximum waiting time , in seconds -> IO () waitForNode client (MaxWaitingTime s) = do res <- race (threadDelay $ s * oneSecond) retry case res of Left _ -> fail $ "Giving up waiting for node to start: it takes too long" Right _ -> return () where retry :: IO () retry = threadDelay oneSecond >> waitForNode' waitForNode' :: IO () waitForNode' = getNodeInfo client NoNtpCheck >>= \case Right _ -> return () Left (ClientHttpError ConnectionError{}) -> retry Left err -> fail $ "Failed to wait for node to start: " <> show err -- | Get poor keys getGenesisKeys :: FilePath -> ConfigurationOptions -> IO [FilePath] getGenesisKeys stateDir configOpts = do gs <- getGeneratedSecrets configOpts let genesisKeys = [ stateDir </> "generated-keys" </> "poor" </> (show i <> ".key") | i <- iterate (+1) (0 :: Int) ] return $ take (length $ gsPoorSecrets gs) genesisKeys where getGeneratedSecrets :: ConfigurationOptions -> IO GeneratedSecrets getGeneratedSecrets opts = fst <$> ( usingNamedPureLogger "_" $ withConfigurations noTrace Nothing Nothing False opts $ \config _ _ _ -> configGeneratedSecretsThrow config ) -- | Some debugging output upon starting a cluster printCartouche :: Env -> IO () printCartouche env = do let colSize = 35 putTextLn $ toText (env ! "NODE_ID") <> T.replicate (colSize - length (env ! "NODE_ID")) "-" when (Map.member "LISTEN" env) $ putTextLn $ "|.....listen: " <> toText (env ! "LISTEN") putTextLn $ "|.....api address: " <> toText (env ! "NODE_API_ADDRESS") putTextLn $ "|.....doc address: " <> toText (env ! "NODE_DOC_ADDRESS") putTextLn $ "|.....system start: " <> toText (env ! "SYSTEM_START") putTextLn $ T.replicate colSize "-" <> "\n"
null
https://raw.githubusercontent.com/input-output-hk/cardano-sl/1499214d93767b703b9599369a431e67d83f10a2/wallet/test/integration/Test/Integration/Framework/Cluster.hs
haskell
| All those can be overriden by environment variables. These values correspond to command line arguments that would be passed to underlying processes. As an example, if you wanted to enable the @--wallet-debug@ option for the underlying node, you would add an entry in this list: > ("WALLET_DEBUG", "True") a leading @--@ is added. | Start an integration cluster. Quite identical to the original "start cluster". The main difference here is that we start a wallet node instead of the edge node. This will go as soon as decoupling is done; at this point we will need this edge node and the wallet will simply boil down to a webserver, started independently. Safe, we just defaulted it above Safe, we just defaulted it above Safe, we just defaulted it above | Start a wallet, which is still a node (decoupling incoming!) ^ The actual node name ^ A "simulation" of the system ENV as a 'Map String String' | Make HttpRequest continuously for a while to wait after the node. This is a temporary, simplified version of what in 'Cardano.Cluster' that works with WalletHttpClient. | Get poor keys | Some debugging output upon starting a cluster
module Test.Integration.Framework.Cluster ( startCluster , waitForNode ) where import Universum hiding (init) import Control.Concurrent (threadDelay) import Control.Concurrent.Async (race) import Data.Map.Strict ((!)) import qualified Data.Map.Strict as Map import qualified Data.Text as T import Options.Applicative (handleParseResult, info) import qualified Prelude import System.Environment (getEnvironment) import System.FilePath ((</>)) import Cardano.Cluster (MaxWaitingTime (..), NodeName (..), NodeType (..), startNode) import Cardano.Cluster.Environment (Artifact (..), Env, prepareEnvironment, withSystemStart) import Cardano.Cluster.Util (execParserEnv, oneSecond, runAsync, stripFilterPrefix, varFromParser) import Cardano.Wallet.Action (actionWithWallet) import Cardano.Wallet.Client.Http (ClientError (..), Manager, ServantError (..), WalletClient (getNodeInfo), WalletHttpClient) import Cardano.Wallet.Server.CLI (walletBackendParamsParser) import Pos.Chain.Genesis (GeneratedSecrets (..), configGeneratedSecretsThrow) import Pos.Client.CLI.NodeOptions (commonNodeArgsParser, nodeArgsParser) import Pos.Client.CLI.Params (loggingParams) import Pos.Launcher (LoggingParams (..), launchNode) import Pos.Launcher.Configuration (ConfigurationOptions (..), withConfigurations) import Pos.Node.API (ForceNtpCheck (..)) import Pos.Util.CompileInfo (withCompileInfo) import Pos.Util.Trace (noTrace) import Pos.Util.Wlog.Compatibility (usingNamedPureLogger) prefix :: String prefix = "INTEGRATION_" Underscores ( @_@ ) are converted to hyphens ( ) , the text is lowercased , and defaultIntegrationEnv :: Env defaultIntegrationEnv = Map.fromList [ ("CONFIGURATION_FILE", "./test/integration/configuration.yaml") , ("CONFIGURATION_KEY", "default") , ("STATE_DIR", "./state-integration") , ("REBUILD_DB", "True") , ("WALLET_ADDRESS", "127.0.0.1:8090") , ("WALLET_DOC_ADDRESS", "127.0.0.1:8190") , ("WALLET_DB_PATH", "./state-integration/wallet-db/edge") , ("WALLET_REBUILD_DB", "True") , ("WALLET_NODE_API_ADDRESS", "127.0.0.1:8089") , ("NODE_API_ADDRESS", "127.0.0.1:8086") , ("NODE_DOC_ADDRESS", "127.0.0.1:3186") , ("NODE_TLS_CLIENT_CERT", "./state-integration/tls/relay/client.crt") , ("NODE_TLS_KEY", "./state-integration/tls/relay/client.key") , ("NODE_TLS_CA_CERT", "./state-integration/tls/relay/ca.crt") ] startCluster :: [(NodeName, NodeType)] -> IO (Env, [FilePath], Manager) startCluster nodes = do env0 <- getEnvironment >>= withSystemStart . Map.union defaultIntegrationEnv . Map.fromList . stripFilterPrefix prefix handles <- forM nodes $ \node@(_, nodeType) -> runAsync $ \yield -> do let (artifacts, nodeEnv) = prepareEnvironment node nodes stateDir env0 let (genesis, topology, logger, tls) = artifacts case nodeType of NodeCore -> do void (init genesis >> init topology >> init logger >> init tls) yield (nodeEnv, Nothing) >> startNode node nodeEnv NodeRelay -> do void (init topology >> init logger >> init tls) yield (nodeEnv, Nothing) >> startNode node nodeEnv NodeEdge -> do manager <- init topology >> init logger >> init tls yield (nodeEnv, Just manager) >> startWallet node nodeEnv (env, manager) <- fmap (Prelude.head . catMaybes) $ forM handles $ \(_, (env, manager)) -> do printCartouche env >> return ((env,) <$> manager) let configOpts = ConfigurationOptions { cfoFilePath = configFile , cfoKey = toText configKey , cfoSystemStart = Just 0 , cfoSeed = Nothing } (env,,manager) <$> getGenesisKeys stateDir configOpts where init :: Artifact a b -> IO b init = initializeArtifact startWallet -> IO () startWallet (NodeName nodeIdT, _) env = do nArgs <- parseNodeArgs cArgs <- parseCommonNodeArgs wArgs <- parseWalletArgs let lArgs = getLoggingArgs cArgs withCompileInfo $ launchNode nArgs cArgs lArgs (actionWithWallet wArgs) where parseNodeArgs = do let nVars = varFromParser nodeArgsParser let nInfo = info nodeArgsParser mempty handleParseResult $ execParserEnv env nVars nInfo parseCommonNodeArgs = do let cVars = varFromParser commonNodeArgsParser let cInfo = info commonNodeArgsParser mempty handleParseResult $ execParserEnv env cVars cInfo parseWalletArgs = do let wVars = varFromParser walletBackendParamsParser let wInfo = info walletBackendParamsParser mempty handleParseResult (execParserEnv env wVars wInfo) getLoggingArgs cArgs = (loggingParams (fromString $ T.unpack nodeIdT) cArgs) { lpConsoleLog = Just False } waitForNode ^ An Http Client configured against a given node ^ Maximum waiting time , in seconds -> IO () waitForNode client (MaxWaitingTime s) = do res <- race (threadDelay $ s * oneSecond) retry case res of Left _ -> fail $ "Giving up waiting for node to start: it takes too long" Right _ -> return () where retry :: IO () retry = threadDelay oneSecond >> waitForNode' waitForNode' :: IO () waitForNode' = getNodeInfo client NoNtpCheck >>= \case Right _ -> return () Left (ClientHttpError ConnectionError{}) -> retry Left err -> fail $ "Failed to wait for node to start: " <> show err getGenesisKeys :: FilePath -> ConfigurationOptions -> IO [FilePath] getGenesisKeys stateDir configOpts = do gs <- getGeneratedSecrets configOpts let genesisKeys = [ stateDir </> "generated-keys" </> "poor" </> (show i <> ".key") | i <- iterate (+1) (0 :: Int) ] return $ take (length $ gsPoorSecrets gs) genesisKeys where getGeneratedSecrets :: ConfigurationOptions -> IO GeneratedSecrets getGeneratedSecrets opts = fst <$> ( usingNamedPureLogger "_" $ withConfigurations noTrace Nothing Nothing False opts $ \config _ _ _ -> configGeneratedSecretsThrow config ) printCartouche :: Env -> IO () printCartouche env = do let colSize = 35 putTextLn $ toText (env ! "NODE_ID") <> T.replicate (colSize - length (env ! "NODE_ID")) "-" when (Map.member "LISTEN" env) $ putTextLn $ "|.....listen: " <> toText (env ! "LISTEN") putTextLn $ "|.....api address: " <> toText (env ! "NODE_API_ADDRESS") putTextLn $ "|.....doc address: " <> toText (env ! "NODE_DOC_ADDRESS") putTextLn $ "|.....system start: " <> toText (env ! "SYSTEM_START") putTextLn $ T.replicate colSize "-" <> "\n"
4066667b7226981220e8eb4bb252f8bf485bde704f105a30f383025944bf3a44
Eventuria/demonstration-gsd
OverEventStore.hs
# LANGUAGE FlexibleContexts # # LANGUAGE DuplicateRecordFields # # LANGUAGE NamedFieldPuns # # LANGUAGE RecordWildCards # module Eventuria.GSD.Monitoring.Service.OverEventStore where import Control.Exception import Streamly hiding (Streaming) import qualified Eventuria.Libraries.PersistedStreamEngine.Instances.EventStore.Client.Dependencies as EventStoreClient import Eventuria.Libraries.PersistedStreamEngine.Interface.PersistedItem import Eventuria.Libraries.PersistedStreamEngine.Instances.EventStore.Read.CqrsInstance import Eventuria.Libraries.CQRS.Write.StreamRepository import Eventuria.Libraries.CQRS.Write.Aggregate.Commands.Responses.CommandResponse import Eventuria.Libraries.PersistedStreamEngine.Instances.EventStore.Read.ReadProjections import Eventuria.GSD.Write.CommandConsumer.Handling.ProjectGSDWriteModel import Eventuria.GSD.Write.Model.Commands.Command import Eventuria.GSD.Write.Model.Events.Event import Eventuria.GSD.Write.Model.Core import Eventuria.GSD.Write.Model.WriteModel import Eventuria.GSD.Write.Repository.EventStoreStreams import Eventuria.Libraries.CQRS.Write.Serialization.Command () import qualified Eventuria.GSD.Monitoring.Service.Generic as GenericGSDMonitoring streamWorkspaceId :: EventStoreClient.Dependencies -> SerialT IO (Either SomeException (Persisted WorkspaceId)) streamWorkspaceId eventStoreClientDependencies = GenericGSDMonitoring.streamWorkspaceId (streamAllAggregateId (aggregateIdStream $ getEventStoreStreamRepository eventStoreClientDependencies)) streamCommand :: EventStoreClient.Dependencies -> WorkspaceId -> SerialT IO (Either SomeException (Persisted GSDCommand)) streamCommand eventStoreClientDependencies workspaceId = GenericGSDMonitoring.streamCommand (getCommandStream $ getEventStoreStreamRepository eventStoreClientDependencies) getEventStoreStreaming workspaceId streamCommandResponse :: EventStoreClient.Dependencies -> WorkspaceId -> SerialT IO (Either SomeException (Persisted CommandResponse)) streamCommandResponse eventStoreClientDependencies workspaceId = GenericGSDMonitoring.streamCommandResponse (getStreamAllCommandResponseByAggregateId (getCommandTransactionStream $ getEventStoreStreamRepository eventStoreClientDependencies)) workspaceId streamEvent :: EventStoreClient.Dependencies -> WorkspaceId -> SerialT IO (Either SomeException (Persisted GsdEvent)) streamEvent eventStoreClientDependencies workspaceId = GenericGSDMonitoring.streamEvent (getStreamAllEventsByAggregateId (getCommandTransactionStream $ getEventStoreStreamRepository eventStoreClientDependencies)) workspaceId streamWriteModelHistory :: EventStoreClient.Dependencies -> WorkspaceId -> SerialT IO (Either SomeException (Persisted (Maybe GsdWriteModel))) streamWriteModelHistory eventStoreClientDependencies workspaceId = GenericGSDMonitoring.streamWriteModelHistory (getStreamAllWriteModelByAggregateId (getCommandTransactionStream $ getEventStoreStreamRepository eventStoreClientDependencies) projectGSDWriteModel) workspaceId
null
https://raw.githubusercontent.com/Eventuria/demonstration-gsd/5c7692b310086bc172d3fd4e1eaf09ae51ea468f/src/Eventuria/GSD/Monitoring/Service/OverEventStore.hs
haskell
# LANGUAGE FlexibleContexts # # LANGUAGE DuplicateRecordFields # # LANGUAGE NamedFieldPuns # # LANGUAGE RecordWildCards # module Eventuria.GSD.Monitoring.Service.OverEventStore where import Control.Exception import Streamly hiding (Streaming) import qualified Eventuria.Libraries.PersistedStreamEngine.Instances.EventStore.Client.Dependencies as EventStoreClient import Eventuria.Libraries.PersistedStreamEngine.Interface.PersistedItem import Eventuria.Libraries.PersistedStreamEngine.Instances.EventStore.Read.CqrsInstance import Eventuria.Libraries.CQRS.Write.StreamRepository import Eventuria.Libraries.CQRS.Write.Aggregate.Commands.Responses.CommandResponse import Eventuria.Libraries.PersistedStreamEngine.Instances.EventStore.Read.ReadProjections import Eventuria.GSD.Write.CommandConsumer.Handling.ProjectGSDWriteModel import Eventuria.GSD.Write.Model.Commands.Command import Eventuria.GSD.Write.Model.Events.Event import Eventuria.GSD.Write.Model.Core import Eventuria.GSD.Write.Model.WriteModel import Eventuria.GSD.Write.Repository.EventStoreStreams import Eventuria.Libraries.CQRS.Write.Serialization.Command () import qualified Eventuria.GSD.Monitoring.Service.Generic as GenericGSDMonitoring streamWorkspaceId :: EventStoreClient.Dependencies -> SerialT IO (Either SomeException (Persisted WorkspaceId)) streamWorkspaceId eventStoreClientDependencies = GenericGSDMonitoring.streamWorkspaceId (streamAllAggregateId (aggregateIdStream $ getEventStoreStreamRepository eventStoreClientDependencies)) streamCommand :: EventStoreClient.Dependencies -> WorkspaceId -> SerialT IO (Either SomeException (Persisted GSDCommand)) streamCommand eventStoreClientDependencies workspaceId = GenericGSDMonitoring.streamCommand (getCommandStream $ getEventStoreStreamRepository eventStoreClientDependencies) getEventStoreStreaming workspaceId streamCommandResponse :: EventStoreClient.Dependencies -> WorkspaceId -> SerialT IO (Either SomeException (Persisted CommandResponse)) streamCommandResponse eventStoreClientDependencies workspaceId = GenericGSDMonitoring.streamCommandResponse (getStreamAllCommandResponseByAggregateId (getCommandTransactionStream $ getEventStoreStreamRepository eventStoreClientDependencies)) workspaceId streamEvent :: EventStoreClient.Dependencies -> WorkspaceId -> SerialT IO (Either SomeException (Persisted GsdEvent)) streamEvent eventStoreClientDependencies workspaceId = GenericGSDMonitoring.streamEvent (getStreamAllEventsByAggregateId (getCommandTransactionStream $ getEventStoreStreamRepository eventStoreClientDependencies)) workspaceId streamWriteModelHistory :: EventStoreClient.Dependencies -> WorkspaceId -> SerialT IO (Either SomeException (Persisted (Maybe GsdWriteModel))) streamWriteModelHistory eventStoreClientDependencies workspaceId = GenericGSDMonitoring.streamWriteModelHistory (getStreamAllWriteModelByAggregateId (getCommandTransactionStream $ getEventStoreStreamRepository eventStoreClientDependencies) projectGSDWriteModel) workspaceId
9e22057d3e180b68fd4d57c19eac6677be29de60e0c54441431927f1e40ea022
alanmarazzi/perfect
generics.clj
(ns perfect.reader.generics (:require [clojure.spec.alpha :as s] [expound.alpha :as expound] [expound.specs :as exspec]) (:import (java.lang IllegalArgumentException) (org.apache.poi.ss.util WorkbookUtil) (org.apache.poi.ss.usermodel Workbook Sheet Row Cell CellType DateUtil))) (set! s/*explain-out* expound/printer) (s/check-asserts true) (defn valid-name? [nm] (try (do (WorkbookUtil/validateSheetName nm) true) (catch IllegalArgumentException e false))) (expound/def ::valid-name? (s/and string? valid-name?) "should be an XLSX valid name: #createSafeSheetName-java.lang.String-") (s/def ::sheet-identity (s/or :idx ::exspec/nat-int :name ::valid-name?)) (defn sheets [^Workbook wb] (map #(.getSheetAt wb %) (range (.getNumberOfSheets wb)))) (defn rows [^Sheet sheet] (seq sheet)) (defn cells [row] (seq row)) (defn columnar [d header?] (if header? (zipmap (first d) (apply mapv vector (rest d))) (apply mapv vector (rest d)))) (defn blank? [cell] (identical? :blank (:type cell)))
null
https://raw.githubusercontent.com/alanmarazzi/perfect/c3caecbf64383a1befd0dc1fef8f414e55d3dfa2/src/perfect/reader/generics.clj
clojure
(ns perfect.reader.generics (:require [clojure.spec.alpha :as s] [expound.alpha :as expound] [expound.specs :as exspec]) (:import (java.lang IllegalArgumentException) (org.apache.poi.ss.util WorkbookUtil) (org.apache.poi.ss.usermodel Workbook Sheet Row Cell CellType DateUtil))) (set! s/*explain-out* expound/printer) (s/check-asserts true) (defn valid-name? [nm] (try (do (WorkbookUtil/validateSheetName nm) true) (catch IllegalArgumentException e false))) (expound/def ::valid-name? (s/and string? valid-name?) "should be an XLSX valid name: #createSafeSheetName-java.lang.String-") (s/def ::sheet-identity (s/or :idx ::exspec/nat-int :name ::valid-name?)) (defn sheets [^Workbook wb] (map #(.getSheetAt wb %) (range (.getNumberOfSheets wb)))) (defn rows [^Sheet sheet] (seq sheet)) (defn cells [row] (seq row)) (defn columnar [d header?] (if header? (zipmap (first d) (apply mapv vector (rest d))) (apply mapv vector (rest d)))) (defn blank? [cell] (identical? :blank (:type cell)))
c70fbc69995dfc94004e0673914366103fe89ef5f727c17bb031b1db3e9e5574
naoto-ogawa/h-xproto-mysql
Example03_data.hs
module Example.Example03_data where -- +----+----------+-------------+----------+-------------------------+ -- | ID | Name | CountryCode | District | Info | -- +----+----------+-------------+----------+-------------------------+ | 1 | Kabul | AFG | Kabol | { " Population " : 1780000 } | -- | 2 | Qandahar | AFG | Qandahar | {"Population": 237500} | -- +----+----------+-------------+----------+-------------------------+ data MyRecord = MyRecord {id :: Int, name :: String, country_code :: String, district :: String, info :: String} deriving (Show, Eq)
null
https://raw.githubusercontent.com/naoto-ogawa/h-xproto-mysql/1eacd6486c99b849016bf088788cb8d8b166f964/src/Example/Example03_data.hs
haskell
+----+----------+-------------+----------+-------------------------+ | ID | Name | CountryCode | District | Info | +----+----------+-------------+----------+-------------------------+ | 2 | Qandahar | AFG | Qandahar | {"Population": 237500} | +----+----------+-------------+----------+-------------------------+
module Example.Example03_data where | 1 | Kabul | AFG | Kabol | { " Population " : 1780000 } | data MyRecord = MyRecord {id :: Int, name :: String, country_code :: String, district :: String, info :: String} deriving (Show, Eq)
1a5826a430fe59cd58be30af295bf5de894f06d5904856f76c996341dd65e657
wfnuser/sicp-solutions
e3-24.scm
(define (assoc key records same-key?) (cond ((null? records) false) ((same-key? key (caar records)) (car records)) (else (assoc key (cdr records))) ) ) (define (make-table same-key?) (let ((local-table (list '*table*))) (define (assoc key records) (cond ((null? records) false) ((same-key? key (caar records)) (car records)) (else (assoc key (cdr records))) ) ) (define (lookup key-1 key-2) (let ((subtable (assoc key-1 (cdr local-table)))) (if subtable (let ((record (assoc key-2 (cdr subtable)))) (if record (cdr record) false)) false))) (define (insert! key-1 key-2 value) (let ((subtable (assoc key-1 (cdr local-table)))) (if subtable (let ((record (assoc key-2 (cdr subtable)))) (if record (set-cdr! record value) (set-cdr! subtable (cons (cons key-2 value) (cdr subtable))))) (set-cdr! local-table (cons (list key-1 (cons key-2 value)) (cdr local-table))))) 'ok ) (define (dispatch m) (cond ((eq? m 'lookup-proc) lookup) ((eq? m 'insert-proc!) insert!) (else (error "Unknown operation: TABLE" m)))) dispatch ) ) (define number-table (make-table =)) ((number-table 'insert-proc!) 10086 10086 'hello-moto) ((number-table 'lookup-proc) 10086 10086)
null
https://raw.githubusercontent.com/wfnuser/sicp-solutions/2c94b28d8ee004dcbfe7311f866e5a346ee01d12/ch3/e3-24.scm
scheme
(define (assoc key records same-key?) (cond ((null? records) false) ((same-key? key (caar records)) (car records)) (else (assoc key (cdr records))) ) ) (define (make-table same-key?) (let ((local-table (list '*table*))) (define (assoc key records) (cond ((null? records) false) ((same-key? key (caar records)) (car records)) (else (assoc key (cdr records))) ) ) (define (lookup key-1 key-2) (let ((subtable (assoc key-1 (cdr local-table)))) (if subtable (let ((record (assoc key-2 (cdr subtable)))) (if record (cdr record) false)) false))) (define (insert! key-1 key-2 value) (let ((subtable (assoc key-1 (cdr local-table)))) (if subtable (let ((record (assoc key-2 (cdr subtable)))) (if record (set-cdr! record value) (set-cdr! subtable (cons (cons key-2 value) (cdr subtable))))) (set-cdr! local-table (cons (list key-1 (cons key-2 value)) (cdr local-table))))) 'ok ) (define (dispatch m) (cond ((eq? m 'lookup-proc) lookup) ((eq? m 'insert-proc!) insert!) (else (error "Unknown operation: TABLE" m)))) dispatch ) ) (define number-table (make-table =)) ((number-table 'insert-proc!) 10086 10086 'hello-moto) ((number-table 'lookup-proc) 10086 10086)
082998f2668f1de6a079f062e6f47fea6d3a0f1a4356097addedce4b0a8e6431
pgujjula/hilbert
Partition.hs
| Module : Math . . Digit Description : Counting partitions of an integer . Copyright : ( c ) , 2020 License : BSD-3 - Clause Maintainer : Stability : experimental Counting partitions of an integer . Description : Counting partitions of an integer. Copyright : (c) Preetham Gujjula, 2020 License : BSD-3-Clause Maintainer : Stability : experimental Counting partitions of an integer. -} module Math.Combinatorics.Partition (numPartitions, partitions) where import Data.Chimera (memoizeFix) import Data.List (foldl') | The number of partitions of n. For example , since we can write 4 as 1 + 1 + 1 + 1 , 1 + 1 + 2 , 1 + 3 , 2 + 2 , and 4 , we have > > > numPartitions 4 5 Some special cases : > > > numPartitions 0 1 > > > numPartitions ( -1 ) -- or any negative number 0 1 + 1 + 1 + 1, 1 + 1 + 2, 1 + 3, 2 + 2, and 4, we have >>> numPartitions 4 5 Some special cases: >>> numPartitions 0 1 >>> numPartitions (-1) -- or any negative number 0 -} numPartitions :: (Integral a) => a -> a numPartitions n | n < 0 = 0 | n == 0 = 1 | otherwise = fromIntegral $ part (fromIntegral n) part :: Word -> Integer part = memoizeFix partFix -- use the partition function formula -- see Wikipedia partFix :: (Word -> Integer) -> Word -> Integer partFix p n | n == 0 = 1 | otherwise = foldl' (+) 0 $ zipWith (*) weights $ map p recursiveArgs where weights :: [Integer] weights = cycle [1, 1, -1, -1] -- recursively call the partition function with these arguments recursiveArgs :: [Word] recursiveArgs = map fromIntegral $ takeWhile (>= 0) $ map (fromIntegral n -) offsets -- alternate positive and negative pentagonal numbers offsets :: [Int] offsets = map pentagonal $ alternate [1..] [-1, -2..] where pentagonal :: (Integral a) => Int -> a pentagonal k = k' * (3*k' - 1) `div` 2 where k' = fromIntegral k alternate :: [a] -> [a] -> [a] alternate (x:xs) (y:ys) = x : y : alternate xs ys alternate _ _ = error "finite list" {-| Enumerate the partitions of n. >>> partitions 4 [[4],[3,1],[2,2],[2,1,1],[1,1,1,1]] -} partitions :: Integral a => a -> [[a]] partitions n = partitionsWithMax n n partitionsWithMax :: Integral a => a -> a -> [[a]] partitionsWithMax m n | n < 0 = [] | n == 0 = [[]] | otherwise = [m, m-1..1] >>= \i -> fmap (i:) (partitionsWithMax i (n - i))
null
https://raw.githubusercontent.com/pgujjula/hilbert/ea64f2d06ceeb96da11fe6f2b868cef1cd817e28/src/Math/Combinatorics/Partition.hs
haskell
or any negative number or any negative number use the partition function formula -- see Wikipedia recursively call the partition function with these arguments alternate positive and negative pentagonal numbers | Enumerate the partitions of n. >>> partitions 4 [[4],[3,1],[2,2],[2,1,1],[1,1,1,1]]
| Module : Math . . Digit Description : Counting partitions of an integer . Copyright : ( c ) , 2020 License : BSD-3 - Clause Maintainer : Stability : experimental Counting partitions of an integer . Description : Counting partitions of an integer. Copyright : (c) Preetham Gujjula, 2020 License : BSD-3-Clause Maintainer : Stability : experimental Counting partitions of an integer. -} module Math.Combinatorics.Partition (numPartitions, partitions) where import Data.Chimera (memoizeFix) import Data.List (foldl') | The number of partitions of n. For example , since we can write 4 as 1 + 1 + 1 + 1 , 1 + 1 + 2 , 1 + 3 , 2 + 2 , and 4 , we have > > > numPartitions 4 5 Some special cases : > > > numPartitions 0 1 0 1 + 1 + 1 + 1, 1 + 1 + 2, 1 + 3, 2 + 2, and 4, we have >>> numPartitions 4 5 Some special cases: >>> numPartitions 0 1 0 -} numPartitions :: (Integral a) => a -> a numPartitions n | n < 0 = 0 | n == 0 = 1 | otherwise = fromIntegral $ part (fromIntegral n) part :: Word -> Integer part = memoizeFix partFix partFix :: (Word -> Integer) -> Word -> Integer partFix p n | n == 0 = 1 | otherwise = foldl' (+) 0 $ zipWith (*) weights $ map p recursiveArgs where weights :: [Integer] weights = cycle [1, 1, -1, -1] recursiveArgs :: [Word] recursiveArgs = map fromIntegral $ takeWhile (>= 0) $ map (fromIntegral n -) offsets offsets :: [Int] offsets = map pentagonal $ alternate [1..] [-1, -2..] where pentagonal :: (Integral a) => Int -> a pentagonal k = k' * (3*k' - 1) `div` 2 where k' = fromIntegral k alternate :: [a] -> [a] -> [a] alternate (x:xs) (y:ys) = x : y : alternate xs ys alternate _ _ = error "finite list" partitions :: Integral a => a -> [[a]] partitions n = partitionsWithMax n n partitionsWithMax :: Integral a => a -> a -> [[a]] partitionsWithMax m n | n < 0 = [] | n == 0 = [[]] | otherwise = [m, m-1..1] >>= \i -> fmap (i:) (partitionsWithMax i (n - i))
12ee9e2c5137dc68e74381a86121fedf324f87c4c5e07c13ea703a57551aa58b
screenshotbot/screenshotbot-oss
promoter.lisp
;;;; Copyright 2018-Present Modern Interpreters Inc. ;;;; This Source Code Form is subject to the terms of the Mozilla Public License , v. 2.0 . If a copy of the MPL was not distributed with this file , You can obtain one at /. (defpackage :screenshotbot/pro/bitbucket/promoter (:nicknames :screenshotbot/bitbucket/promoter) (:use #:cl #:screenshotbot/abstract-pr-promoter) (:import-from #:screenshotbot/promote-api #:maybe-send-tasks #:plugin-promoter) (:import-from #:screenshotbot/pro/bitbucket/plugin #:bitbucket-repo #:bitbucket-plugin) (:import-from #:screenshotbot/abstract-pr-promoter #:promoter-pull-id #:make-promoter-for-acceptable #:abstract-pr-acceptable #:push-remote-check #:format-updated-summary #:check-title #:check-summary #:make-acceptable #:details-url #:send-task-args #:check-status #:valid-repo? #:plugin-installed?) (:import-from #:screenshotbot/pro/bitbucket/settings #:get-access-token-from-refresh-token #:refresh-token #:bitbucket-settings-for-company) (:import-from #:screenshotbot/model/channel #:github-get-canonical-repo) (:import-from #:screenshotbot/model/recorder-run #:override-commit-hash #:recorder-run-company) (:import-from #:screenshotbot/user-api #:pull-request-url #:channel-repo #:current-user #:recorder-run-channel #:channel-name #:recorder-run-commit) (:import-from #:screenshotbot/model/report #:base-acceptable #:acceptable-state) (:import-from #:bknr.datastore #:persistent-class) (:import-from #:screenshotbot/model/company #:company) (:import-from #:screenshotbot/report-api #:report-run) (:import-from #:bknr.datastore #:with-transaction) (:import-from #:screenshotbot/dashboard/run-page #:run-page) (:import-from #:util/object-id #:oid) (:import-from #:screenshotbot/installation #:installation #:installation-domain) (:import-from #:screenshotbot/pro/bitbucket/audit-log #:with-audit-log #:parse-error-response #:http-result-code #:audit-log-error-response #:audit-log-error #:build-status-audit-log) (:import-from #:util/misc #:not-empty! #:not-null!) (:import-from #:screenshotbot/pro/bitbucket/core #:http-success-response? #:bitbucket-error) (:import-from #:screenshotbot/events #:push-event) (:import-from #:util/store #:with-class-validation) (:import-from #:screenshotbot/abstract-pr-promoter #:abstract-pr-promoter) (:local-nicknames (#:a #:alexandria))) (in-package :screenshotbot/bitbucket/promoter) (with-class-validation (defclass bitbucket-acceptable (abstract-pr-acceptable) ((send-task-args :initarg :report :accessor send-task-args) (%company :initarg :company :reader company)) (:metaclass persistent-class))) (defclass bitbucket-promoter (abstract-pr-promoter) ((plugin :initarg :plugin :reader plugin))) (defmethod make-promoter-for-acceptable ((self bitbucket-acceptable)) (make-instance 'bitbucket-promoter)) (defmethod plugin-installed? ((promoter bitbucket-promoter) company repo-url) (bitbucket-settings-for-company company)) (defmethod make-acceptable ((promoter bitbucket-promoter) report &rest args) (apply #'make-instance 'bitbucket-acceptable :company (recorder-run-company (report-run report)) :report report args)) (defmethod valid-repo? ((promoter bitbucket-promoter) repo) (typep repo 'bitbucket-repo)) (defun build-status-url (full-name commit) "See -group-commit-statuses/" (format nil "/~a/commit/~a/statuses/build/" full-name commit)) (defmethod push-remote-check ((promoter bitbucket-promoter) run check) "Send the build status. Log any error message, but don't propagate the errors" (handler-case (let* ((company (recorder-run-company run)) (bitbucket-token (not-null! (car (bitbucket-settings-for-company company)))) (token (get-access-token-from-refresh-token company (refresh-token bitbucket-token))) (args (make-build-status-args run check))) (assert token) (let* ((commit (not-empty! (a:assoc-value args :commit))) (full-name (not-empty! (a:assoc-value args :full-name)))) (with-audit-log (audit-log (make-instance 'build-status-audit-log :company company :commit commit :full-name full-name)) (let* ((url (build-status-url full-name commit))) (multiple-value-bind (stream result-code) (util/request:http-request url :method :post :content-type "application/json" :want-stream t :additional-headers `(("Authorization" . ,(Format nil "Bearer ~a" token))) :force-binary nil :content (json:encode-json-to-string args)) (let ((ret (uiop:slurp-input-stream 'string stream))) (cond ((http-success-response? result-code) (push-event :bitbucket.update-success) (log:info "Got bitbucket result: ~a" ret)) (t ;; error (push-event :bitbucket.update-failure) (log:info "Got BitBucket response code: ~a" result-code) (parse-error-response ret result-code audit-log))))))))) (bitbucket-error (e) (values)))) (auto-restart:with-auto-restart () (defmethod maybe-send-tasks ((promoter bitbucket-promoter) run) (values))) (defmethod plugin-promoter ((plugin bitbucket-plugin)) (make-instance 'bitbucket-promoter :plugin plugin)) (defun nullify (str) (if (str:emptyp str) nil str)) (defun make-key (channel-name) (let ((old-key (format nil "screenshotbot--~a" channel-name))) (cond ((<= (length old-key) 40) old-key) (t (ironclad:byte-array-to-hex-string (md5:md5sum-string old-key)))))) (defmethod promoter-pull-id ((promoter bitbucket-promoter) run) (pull-request-url run)) (defun make-build-status-args (run check) (let* ((channel (recorder-run-channel run)) (repo (channel-repo channel)) (channel-name (channel-name channel))) (flet ((make-details-url (&rest args) (format nil "~a~a" (installation-domain (installation)) (apply #'hex:make-url args)))) `((:key . ,(make-key channel-name)) TODO : refactor repo - full - name to not use GitHub specific code . (:full-name . ,(screenshotbot/github/pull-request-promoter::repo-full-name repo)) (:commit . ,(or (nullify (override-commit-hash run)) (recorder-run-commit run))) (:state . ,(ecase (check-status check) (:success "SUCCESSFUL") (:failure "FAILED") (:accepted "SUCCESSFUL") (:rejected "FAILED") (:pending "INPROGRESS") (:action_required "FAILED") (:action-required "FAILED"))) (:name . ,(format nil "Screenshots for ~a" channel-name)) (:url . ,(or (details-url check) (make-details-url 'run-page :id (oid run)))) (:description . ,(check-title check))))))
null
https://raw.githubusercontent.com/screenshotbot/screenshotbot-oss/182e8c70f2a428859b42e592f1281d988f4c41d1/src/screenshotbot/bitbucket/promoter.lisp
lisp
Copyright 2018-Present Modern Interpreters Inc. error
This Source Code Form is subject to the terms of the Mozilla Public License , v. 2.0 . If a copy of the MPL was not distributed with this file , You can obtain one at /. (defpackage :screenshotbot/pro/bitbucket/promoter (:nicknames :screenshotbot/bitbucket/promoter) (:use #:cl #:screenshotbot/abstract-pr-promoter) (:import-from #:screenshotbot/promote-api #:maybe-send-tasks #:plugin-promoter) (:import-from #:screenshotbot/pro/bitbucket/plugin #:bitbucket-repo #:bitbucket-plugin) (:import-from #:screenshotbot/abstract-pr-promoter #:promoter-pull-id #:make-promoter-for-acceptable #:abstract-pr-acceptable #:push-remote-check #:format-updated-summary #:check-title #:check-summary #:make-acceptable #:details-url #:send-task-args #:check-status #:valid-repo? #:plugin-installed?) (:import-from #:screenshotbot/pro/bitbucket/settings #:get-access-token-from-refresh-token #:refresh-token #:bitbucket-settings-for-company) (:import-from #:screenshotbot/model/channel #:github-get-canonical-repo) (:import-from #:screenshotbot/model/recorder-run #:override-commit-hash #:recorder-run-company) (:import-from #:screenshotbot/user-api #:pull-request-url #:channel-repo #:current-user #:recorder-run-channel #:channel-name #:recorder-run-commit) (:import-from #:screenshotbot/model/report #:base-acceptable #:acceptable-state) (:import-from #:bknr.datastore #:persistent-class) (:import-from #:screenshotbot/model/company #:company) (:import-from #:screenshotbot/report-api #:report-run) (:import-from #:bknr.datastore #:with-transaction) (:import-from #:screenshotbot/dashboard/run-page #:run-page) (:import-from #:util/object-id #:oid) (:import-from #:screenshotbot/installation #:installation #:installation-domain) (:import-from #:screenshotbot/pro/bitbucket/audit-log #:with-audit-log #:parse-error-response #:http-result-code #:audit-log-error-response #:audit-log-error #:build-status-audit-log) (:import-from #:util/misc #:not-empty! #:not-null!) (:import-from #:screenshotbot/pro/bitbucket/core #:http-success-response? #:bitbucket-error) (:import-from #:screenshotbot/events #:push-event) (:import-from #:util/store #:with-class-validation) (:import-from #:screenshotbot/abstract-pr-promoter #:abstract-pr-promoter) (:local-nicknames (#:a #:alexandria))) (in-package :screenshotbot/bitbucket/promoter) (with-class-validation (defclass bitbucket-acceptable (abstract-pr-acceptable) ((send-task-args :initarg :report :accessor send-task-args) (%company :initarg :company :reader company)) (:metaclass persistent-class))) (defclass bitbucket-promoter (abstract-pr-promoter) ((plugin :initarg :plugin :reader plugin))) (defmethod make-promoter-for-acceptable ((self bitbucket-acceptable)) (make-instance 'bitbucket-promoter)) (defmethod plugin-installed? ((promoter bitbucket-promoter) company repo-url) (bitbucket-settings-for-company company)) (defmethod make-acceptable ((promoter bitbucket-promoter) report &rest args) (apply #'make-instance 'bitbucket-acceptable :company (recorder-run-company (report-run report)) :report report args)) (defmethod valid-repo? ((promoter bitbucket-promoter) repo) (typep repo 'bitbucket-repo)) (defun build-status-url (full-name commit) "See -group-commit-statuses/" (format nil "/~a/commit/~a/statuses/build/" full-name commit)) (defmethod push-remote-check ((promoter bitbucket-promoter) run check) "Send the build status. Log any error message, but don't propagate the errors" (handler-case (let* ((company (recorder-run-company run)) (bitbucket-token (not-null! (car (bitbucket-settings-for-company company)))) (token (get-access-token-from-refresh-token company (refresh-token bitbucket-token))) (args (make-build-status-args run check))) (assert token) (let* ((commit (not-empty! (a:assoc-value args :commit))) (full-name (not-empty! (a:assoc-value args :full-name)))) (with-audit-log (audit-log (make-instance 'build-status-audit-log :company company :commit commit :full-name full-name)) (let* ((url (build-status-url full-name commit))) (multiple-value-bind (stream result-code) (util/request:http-request url :method :post :content-type "application/json" :want-stream t :additional-headers `(("Authorization" . ,(Format nil "Bearer ~a" token))) :force-binary nil :content (json:encode-json-to-string args)) (let ((ret (uiop:slurp-input-stream 'string stream))) (cond ((http-success-response? result-code) (push-event :bitbucket.update-success) (log:info "Got bitbucket result: ~a" ret)) (push-event :bitbucket.update-failure) (log:info "Got BitBucket response code: ~a" result-code) (parse-error-response ret result-code audit-log))))))))) (bitbucket-error (e) (values)))) (auto-restart:with-auto-restart () (defmethod maybe-send-tasks ((promoter bitbucket-promoter) run) (values))) (defmethod plugin-promoter ((plugin bitbucket-plugin)) (make-instance 'bitbucket-promoter :plugin plugin)) (defun nullify (str) (if (str:emptyp str) nil str)) (defun make-key (channel-name) (let ((old-key (format nil "screenshotbot--~a" channel-name))) (cond ((<= (length old-key) 40) old-key) (t (ironclad:byte-array-to-hex-string (md5:md5sum-string old-key)))))) (defmethod promoter-pull-id ((promoter bitbucket-promoter) run) (pull-request-url run)) (defun make-build-status-args (run check) (let* ((channel (recorder-run-channel run)) (repo (channel-repo channel)) (channel-name (channel-name channel))) (flet ((make-details-url (&rest args) (format nil "~a~a" (installation-domain (installation)) (apply #'hex:make-url args)))) `((:key . ,(make-key channel-name)) TODO : refactor repo - full - name to not use GitHub specific code . (:full-name . ,(screenshotbot/github/pull-request-promoter::repo-full-name repo)) (:commit . ,(or (nullify (override-commit-hash run)) (recorder-run-commit run))) (:state . ,(ecase (check-status check) (:success "SUCCESSFUL") (:failure "FAILED") (:accepted "SUCCESSFUL") (:rejected "FAILED") (:pending "INPROGRESS") (:action_required "FAILED") (:action-required "FAILED"))) (:name . ,(format nil "Screenshots for ~a" channel-name)) (:url . ,(or (details-url check) (make-details-url 'run-page :id (oid run)))) (:description . ,(check-title check))))))
d3818b2d0f1c958485d77cc380cd456cc41cbe6004a7aff422a5120fa968bda8
JonyEpsilon/darwin
reproduction.clj
; This file is part of . ; Copyright ( C ) 2014- , Imperial College , London , All rights reserved . ; Contributors : ; Released under the MIT license .. ; (ns darwin.evolution.reproduction "The purpose of a reproduction step is to take a mating pool - a set of individuals that have somehow been selected from the population - and generate a new generation of the population. In the simplest case the mating pool is just the previous population, but in more complex algorithms it may also be made up from members of an archive etc. This implementation tracks the age of each individual. Individuals are represented by maps, which must have a :genotype key that contains the genetic material. The age will be tracked as an :age key on this map. It is permissible to store any other information you like on the individual maps, such as score information etc, but this will be destroyed in the reproduction step.") (defn- unary-genotype-op-with-age-tracking "Applies an operation to the genotype of an individual, generating a new individual. The :age key of the individual is carried through and incremented." [op individual] (let [new-genotype (op (:genotype individual)) new-age (inc (or (:age individual) 0))] {:genotype new-genotype :age new-age})) (defn- binary-genotype-op-with-age-tracking "Applies an operation to the genotypes of two individuals, generating a pair of new individuals. The :age key of the new individual is the age of the eldest parent plus one." [op i1 i2] (let [new-genotypes (op (:genotype i1) (:genotype i2)) new-age (inc (max (or (:age i1) 0) (or (:age i2) 0)))] [{:genotype (first new-genotypes) :age new-age} {:genotype (second new-genotypes) :age new-age}])) (defn- apply-unary-operation "Takes a unary operation, the operation the requested number of times. Gathers all of the generated children into a list which it returns." [op reps pool selector] (repeatedly reps #(unary-genotype-op-with-age-tracking op (selector pool)))) (defn- apply-binary-operation "Takes a unary operation, the operation the requested number of times. Gathers all of the generated children into a list which it returns." [op reps pool selector] (reduce into [] (repeatedly reps #(binary-genotype-op-with-age-tracking op (selector pool) (selector pool))))) (defn reproduce "Generates a population from a mating pool. The config contains a selector function which will be used to pull individuals from the pool. The list of operations that will be applied are also in the config, in the keys :unary-ops for ops that act on one individual and :binary-ops that act on two individuals. Each operation is specified as a function :op, a :count of how many times to apply this operation. Unary operations are expected to return one individual, and binary operations a sequence of two individuals. It is up to the user to make sure that the total number of individuals returned gives the correct population size. The operations should be functions that operate directly on genetic material: this function will take care of extracting the genetic material from individuals and rebuilding new individuals after reproduction. During this process it will keep track of the age of each individual." [config pool] (let [{:keys [selector unary-ops binary-ops]} config unary-results (map #(apply-unary-operation (:op %) (:repeat %) pool selector) unary-ops) binary-results (map #(apply-binary-operation (:op %) (:repeat %) pool selector) binary-ops)] (doall (reduce into [(reduce into [] unary-results) (reduce into [] binary-results)]))))
null
https://raw.githubusercontent.com/JonyEpsilon/darwin/2b27aa83ec0b7bbc37effed243bf92673de586ea/src/darwin/evolution/reproduction.clj
clojure
This file is part of . Copyright ( C ) 2014- , Imperial College , London , All rights reserved . Contributors : Released under the MIT license .. (ns darwin.evolution.reproduction "The purpose of a reproduction step is to take a mating pool - a set of individuals that have somehow been selected from the population - and generate a new generation of the population. In the simplest case the mating pool is just the previous population, but in more complex algorithms it may also be made up from members of an archive etc. This implementation tracks the age of each individual. Individuals are represented by maps, which must have a :genotype key that contains the genetic material. The age will be tracked as an :age key on this map. It is permissible to store any other information you like on the individual maps, such as score information etc, but this will be destroyed in the reproduction step.") (defn- unary-genotype-op-with-age-tracking "Applies an operation to the genotype of an individual, generating a new individual. The :age key of the individual is carried through and incremented." [op individual] (let [new-genotype (op (:genotype individual)) new-age (inc (or (:age individual) 0))] {:genotype new-genotype :age new-age})) (defn- binary-genotype-op-with-age-tracking "Applies an operation to the genotypes of two individuals, generating a pair of new individuals. The :age key of the new individual is the age of the eldest parent plus one." [op i1 i2] (let [new-genotypes (op (:genotype i1) (:genotype i2)) new-age (inc (max (or (:age i1) 0) (or (:age i2) 0)))] [{:genotype (first new-genotypes) :age new-age} {:genotype (second new-genotypes) :age new-age}])) (defn- apply-unary-operation "Takes a unary operation, the operation the requested number of times. Gathers all of the generated children into a list which it returns." [op reps pool selector] (repeatedly reps #(unary-genotype-op-with-age-tracking op (selector pool)))) (defn- apply-binary-operation "Takes a unary operation, the operation the requested number of times. Gathers all of the generated children into a list which it returns." [op reps pool selector] (reduce into [] (repeatedly reps #(binary-genotype-op-with-age-tracking op (selector pool) (selector pool))))) (defn reproduce "Generates a population from a mating pool. The config contains a selector function which will be used to pull individuals from the pool. The list of operations that will be applied are also in the config, in the keys :unary-ops for ops that act on one individual and :binary-ops that act on two individuals. Each operation is specified as a function :op, a :count of how many times to apply this operation. Unary operations are expected to return one individual, and binary operations a sequence of two individuals. It is up to the user to make sure that the total number of individuals returned gives the correct population size. The operations should be functions that operate directly on genetic material: this function will take care of extracting the genetic material from individuals and rebuilding new individuals after reproduction. During this process it will keep track of the age of each individual." [config pool] (let [{:keys [selector unary-ops binary-ops]} config unary-results (map #(apply-unary-operation (:op %) (:repeat %) pool selector) unary-ops) binary-results (map #(apply-binary-operation (:op %) (:repeat %) pool selector) binary-ops)] (doall (reduce into [(reduce into [] unary-results) (reduce into [] binary-results)]))))
dde36b71d58f88253afd75dd2dca8dfebada7030913c1bd589f12f8c7c6ccf6a
Decentralized-Pictures/T4L3NT
roll_storage_legacy.mli
(*****************************************************************************) (* *) (* Open Source License *) Copyright ( c ) 2018 Dynamic Ledger Solutions , Inc. < > Copyright ( c ) 2019 AG < > (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) to deal in the Software without restriction , including without limitation (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) and/or sell copies of the Software , and to permit persons to whom the (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************) (** Basic roll manipulation. The storage related to roll (i.e. `Storage.Roll`) is not used outside of this module. And, this interface enforces the invariant that a roll is always either in the limbo list or owned by a delegate. *) type error += | (* `Permanent *) Consume_roll_change | (* `Permanent *) No_roll_for_delegate | (* `Permanent *) No_stake_snapshot_for_cycle of Cycle_repr.t | (* `Permanent *) Unregistered_delegate of Signature.Public_key_hash.t * [ fold ctxt f init ] folds [ f ] on the list of all rolls from [ Roll_repr.first ] to [ Storage . Next . Roll ] of the context [ ctxt ] . Only rolls which have owners are considered , rolls without owners are skipped . The first parameter of [ f ] is a roll [ r ] , the second parameter of [ f ] is the owner of [ r ] , and the last parameter is the initial value of the accumulator . [fold ctxt f init] folds [f] on the list of all rolls from [Roll_repr.first] to [Storage.Next.Roll] of the context [ctxt]. Only rolls which have owners are considered, rolls without owners are skipped. The first parameter of [f] is a roll [r], the second parameter of [f] is the owner of [r], and the last parameter is the initial value of the accumulator. *) val fold : Raw_context.t -> f:(Roll_repr_legacy.roll -> Signature.Public_key.t -> 'a -> 'a tzresult Lwt.t) -> 'a -> 'a tzresult Lwt.t module Delegate : sig val is_inactive : Raw_context.t -> Signature.Public_key_hash.t -> bool tzresult Lwt.t * [ add_amount am ] performs the following actions : 1 . if the delegate [ ] is inactive , increase its change [ chg ] by [ am ] , 2 . if the [ dlg ] is active , update [ ] 's number of rolls [ nr ] , and change [ chg ] so that [ ] 's number of tokens is increased by [ am ] , and equal to [ nr * tokens_per_roll + chg ] , where [ chg < tokens_per_roll ] . [add_amount ctxt dlg am] performs the following actions: 1. if the delegate [dlg] is inactive, increase its change [chg] by [am], 2. if the [dlg] is active, update [dlg]'s number of rolls [nr], and change [chg] so that [dlg]'s number of tokens is increased by [am], and equal to [nr * tokens_per_roll + chg], where [chg < tokens_per_roll]. *) val add_amount : Raw_context.t -> Signature.Public_key_hash.t -> Tez_repr.t -> Raw_context.t tzresult Lwt.t * [ remove_amount am ] performs the following actions : 1 . if the delegate [ ] is inactive , decrease its change [ chg ] by [ am ] , 2 . if the [ dlg ] is active , update [ ] 's number of rolls [ nr ] , and change [ chg ] so that [ ] 's number of tokens is decreased by [ am ] , and equal to [ nr * tokens_per_roll + chg ] , where [ chg < tokens_per_roll ] . [remove_amount ctxt dlg am] performs the following actions: 1. if the delegate [dlg] is inactive, decrease its change [chg] by [am], 2. if the [dlg] is active, update [dlg]'s number of rolls [nr], and change [chg] so that [dlg]'s number of tokens is decreased by [am], and equal to [nr * tokens_per_roll + chg], where [chg < tokens_per_roll]. *) val remove_amount : Raw_context.t -> Signature.Public_key_hash.t -> Tez_repr.t -> Raw_context.t tzresult Lwt.t * [ ] renders delegate [ ] inactive and performs the following actions : 1 . empty the list of rolls of [ ] , 2 . increase the change of [ ] by [ nr * tokens_per_roll ] , where [ nr ] is [ ] 's number of rolls prior to inactivation . [set_inactive ctxt dlg] renders delegate [dlg] inactive and performs the following actions: 1. empty the list of rolls of [dlg], 2. increase the change of [dlg] by [nr * tokens_per_roll], where [nr] is [dlg]'s number of rolls prior to inactivation. *) val set_inactive : Raw_context.t -> Signature.Public_key_hash.t -> Raw_context.t tzresult Lwt.t * If the delegate [ ] is already active then [ ] performs the following sequence of actions : 1 . if the delegate is not scheduled to become inactive , then schedule the delegate to become inactive after [ ( preserved_cycles * 2 ) + 1 ] cycles , 2 . if the delegate is already scheduled to become inactive at cycle [ ic ] , then re - schedule it to become inactive at cycle [ max ic ( cc + preserved_cycles + 1 ) ] , where [ cc ] is the current cycle . If [ ] is inactive then this function puts [ ] in active state and performs the following actions : 1 . if [ ] is not scheduled to become inactive , schedule [ ] to become inactive after [ ( preserved_cycles * 2 ) + 1 ] cycles , 2 . if the [ dlg ] is already scheduled to become inactive at cycle [ ic ] , then re - schedule it to become inactive at cycle [ max ic ( cc + ( preserved_cycles * 2 ) + 1 ) ] , where [ cc ] is the current cycle , 3 . dispatch [ ] 's change [ chg ] into [ nr ] rolls of size [ tokens_per_roll ] so that the total amount managed by [ ] is unchanged and equal to [ ( nr * tokens_per_roll ) + chg ] , where [ chg < tokens_per_roll ] . If the delegate [dlg] is already active then [set_active ctxt dlg] performs the following sequence of actions: 1. if the delegate is not scheduled to become inactive, then schedule the delegate to become inactive after [(preserved_cycles * 2) + 1] cycles, 2. if the delegate is already scheduled to become inactive at cycle [ic], then re-schedule it to become inactive at cycle [max ic (cc + preserved_cycles + 1)], where [cc] is the current cycle. If [dlg] is inactive then this function puts [dlg] in active state and performs the following actions: 1. if [dlg] is not scheduled to become inactive, schedule [dlg] to become inactive after [(preserved_cycles * 2) + 1] cycles, 2. if the [dlg] is already scheduled to become inactive at cycle [ic], then re-schedule it to become inactive at cycle [max ic (cc + (preserved_cycles * 2) + 1)], where [cc] is the current cycle, 3. dispatch [dlg]'s change [chg] into [nr] rolls of size [tokens_per_roll] so that the total amount managed by [dlg] is unchanged and equal to [(nr * tokens_per_roll) + chg], where [chg < tokens_per_roll]. *) val set_active : Raw_context.t -> Signature.Public_key_hash.t -> Raw_context.t tzresult Lwt.t end module Contract : sig * Calls [ contract am ] if a delegate is associated to [ contract ] , or returns unchanged [ ctxt ] otherwise . Calls [Delegate.add_amount ctxt contract am] if a delegate is associated to [contract], or returns unchanged [ctxt] otherwise. *) val add_amount : Raw_context.t -> Contract_repr.t -> Tez_repr.t -> Raw_context.t tzresult Lwt.t * Calls [ contract am ] if a delegate is associated to [ contract ] , or returns unchanged [ ctxt ] otherwise . Calls [Delegate.remove_amount ctxt contract am] if a delegate is associated to [contract], or returns unchanged [ctxt] otherwise. *) val remove_amount : Raw_context.t -> Contract_repr.t -> Tez_repr.t -> Raw_context.t tzresult Lwt.t end * [ delegate_pubkey delegate ] returns the public key of [ delegate ] found in context [ ctxt ] if there exists a registered contract . [delegate_pubkey ctxt delegate] returns the public key of [delegate] found in context [ctxt] if there exists a registered contract. *) val delegate_pubkey : Raw_context.t -> Signature.Public_key_hash.t -> Signature.Public_key.t tzresult Lwt.t * [ get_change ctxt delegate ] returns the amount of change held by [ delegate ] in context [ ctxt ] . The change is the part of the staking balance of a delegate that is not part of a roll , i.e. , the amount of staking balance ( smaller than the value of a roll ) not being taken into account for baking rights computation . [get_change ctxt delegate] returns the amount of change held by [delegate] in context [ctxt]. The change is the part of the staking balance of a delegate that is not part of a roll, i.e., the amount of staking balance (smaller than the value of a roll) not being taken into account for baking rights computation. *) val get_change : Raw_context.t -> Signature.Public_key_hash.t -> Tez_repr.t tzresult Lwt.t * [ get_contract_delegate contract ] returns the public key hash of the delegate whose contract is [ contract ] in context [ ctxt ] . [get_contract_delegate ctxt contract] returns the public key hash of the delegate whose contract is [contract] in context [ctxt]. *) val get_contract_delegate : Raw_context.t -> Contract_repr.t -> Signature.Public_key_hash.t option tzresult Lwt.t
null
https://raw.githubusercontent.com/Decentralized-Pictures/T4L3NT/6d4d3edb2d73575384282ad5a633518cba3d29e3/src/proto_012_Psithaca/lib_protocol/roll_storage_legacy.mli
ocaml
*************************************************************************** Open Source License Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), the rights to use, copy, modify, merge, publish, distribute, sublicense, Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. *************************************************************************** * Basic roll manipulation. The storage related to roll (i.e. `Storage.Roll`) is not used outside of this module. And, this interface enforces the invariant that a roll is always either in the limbo list or owned by a delegate. `Permanent `Permanent `Permanent `Permanent
Copyright ( c ) 2018 Dynamic Ledger Solutions , Inc. < > Copyright ( c ) 2019 AG < > to deal in the Software without restriction , including without limitation and/or sell copies of the Software , and to permit persons to whom the THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING type error += * [ fold ctxt f init ] folds [ f ] on the list of all rolls from [ Roll_repr.first ] to [ Storage . Next . Roll ] of the context [ ctxt ] . Only rolls which have owners are considered , rolls without owners are skipped . The first parameter of [ f ] is a roll [ r ] , the second parameter of [ f ] is the owner of [ r ] , and the last parameter is the initial value of the accumulator . [fold ctxt f init] folds [f] on the list of all rolls from [Roll_repr.first] to [Storage.Next.Roll] of the context [ctxt]. Only rolls which have owners are considered, rolls without owners are skipped. The first parameter of [f] is a roll [r], the second parameter of [f] is the owner of [r], and the last parameter is the initial value of the accumulator. *) val fold : Raw_context.t -> f:(Roll_repr_legacy.roll -> Signature.Public_key.t -> 'a -> 'a tzresult Lwt.t) -> 'a -> 'a tzresult Lwt.t module Delegate : sig val is_inactive : Raw_context.t -> Signature.Public_key_hash.t -> bool tzresult Lwt.t * [ add_amount am ] performs the following actions : 1 . if the delegate [ ] is inactive , increase its change [ chg ] by [ am ] , 2 . if the [ dlg ] is active , update [ ] 's number of rolls [ nr ] , and change [ chg ] so that [ ] 's number of tokens is increased by [ am ] , and equal to [ nr * tokens_per_roll + chg ] , where [ chg < tokens_per_roll ] . [add_amount ctxt dlg am] performs the following actions: 1. if the delegate [dlg] is inactive, increase its change [chg] by [am], 2. if the [dlg] is active, update [dlg]'s number of rolls [nr], and change [chg] so that [dlg]'s number of tokens is increased by [am], and equal to [nr * tokens_per_roll + chg], where [chg < tokens_per_roll]. *) val add_amount : Raw_context.t -> Signature.Public_key_hash.t -> Tez_repr.t -> Raw_context.t tzresult Lwt.t * [ remove_amount am ] performs the following actions : 1 . if the delegate [ ] is inactive , decrease its change [ chg ] by [ am ] , 2 . if the [ dlg ] is active , update [ ] 's number of rolls [ nr ] , and change [ chg ] so that [ ] 's number of tokens is decreased by [ am ] , and equal to [ nr * tokens_per_roll + chg ] , where [ chg < tokens_per_roll ] . [remove_amount ctxt dlg am] performs the following actions: 1. if the delegate [dlg] is inactive, decrease its change [chg] by [am], 2. if the [dlg] is active, update [dlg]'s number of rolls [nr], and change [chg] so that [dlg]'s number of tokens is decreased by [am], and equal to [nr * tokens_per_roll + chg], where [chg < tokens_per_roll]. *) val remove_amount : Raw_context.t -> Signature.Public_key_hash.t -> Tez_repr.t -> Raw_context.t tzresult Lwt.t * [ ] renders delegate [ ] inactive and performs the following actions : 1 . empty the list of rolls of [ ] , 2 . increase the change of [ ] by [ nr * tokens_per_roll ] , where [ nr ] is [ ] 's number of rolls prior to inactivation . [set_inactive ctxt dlg] renders delegate [dlg] inactive and performs the following actions: 1. empty the list of rolls of [dlg], 2. increase the change of [dlg] by [nr * tokens_per_roll], where [nr] is [dlg]'s number of rolls prior to inactivation. *) val set_inactive : Raw_context.t -> Signature.Public_key_hash.t -> Raw_context.t tzresult Lwt.t * If the delegate [ ] is already active then [ ] performs the following sequence of actions : 1 . if the delegate is not scheduled to become inactive , then schedule the delegate to become inactive after [ ( preserved_cycles * 2 ) + 1 ] cycles , 2 . if the delegate is already scheduled to become inactive at cycle [ ic ] , then re - schedule it to become inactive at cycle [ max ic ( cc + preserved_cycles + 1 ) ] , where [ cc ] is the current cycle . If [ ] is inactive then this function puts [ ] in active state and performs the following actions : 1 . if [ ] is not scheduled to become inactive , schedule [ ] to become inactive after [ ( preserved_cycles * 2 ) + 1 ] cycles , 2 . if the [ dlg ] is already scheduled to become inactive at cycle [ ic ] , then re - schedule it to become inactive at cycle [ max ic ( cc + ( preserved_cycles * 2 ) + 1 ) ] , where [ cc ] is the current cycle , 3 . dispatch [ ] 's change [ chg ] into [ nr ] rolls of size [ tokens_per_roll ] so that the total amount managed by [ ] is unchanged and equal to [ ( nr * tokens_per_roll ) + chg ] , where [ chg < tokens_per_roll ] . If the delegate [dlg] is already active then [set_active ctxt dlg] performs the following sequence of actions: 1. if the delegate is not scheduled to become inactive, then schedule the delegate to become inactive after [(preserved_cycles * 2) + 1] cycles, 2. if the delegate is already scheduled to become inactive at cycle [ic], then re-schedule it to become inactive at cycle [max ic (cc + preserved_cycles + 1)], where [cc] is the current cycle. If [dlg] is inactive then this function puts [dlg] in active state and performs the following actions: 1. if [dlg] is not scheduled to become inactive, schedule [dlg] to become inactive after [(preserved_cycles * 2) + 1] cycles, 2. if the [dlg] is already scheduled to become inactive at cycle [ic], then re-schedule it to become inactive at cycle [max ic (cc + (preserved_cycles * 2) + 1)], where [cc] is the current cycle, 3. dispatch [dlg]'s change [chg] into [nr] rolls of size [tokens_per_roll] so that the total amount managed by [dlg] is unchanged and equal to [(nr * tokens_per_roll) + chg], where [chg < tokens_per_roll]. *) val set_active : Raw_context.t -> Signature.Public_key_hash.t -> Raw_context.t tzresult Lwt.t end module Contract : sig * Calls [ contract am ] if a delegate is associated to [ contract ] , or returns unchanged [ ctxt ] otherwise . Calls [Delegate.add_amount ctxt contract am] if a delegate is associated to [contract], or returns unchanged [ctxt] otherwise. *) val add_amount : Raw_context.t -> Contract_repr.t -> Tez_repr.t -> Raw_context.t tzresult Lwt.t * Calls [ contract am ] if a delegate is associated to [ contract ] , or returns unchanged [ ctxt ] otherwise . Calls [Delegate.remove_amount ctxt contract am] if a delegate is associated to [contract], or returns unchanged [ctxt] otherwise. *) val remove_amount : Raw_context.t -> Contract_repr.t -> Tez_repr.t -> Raw_context.t tzresult Lwt.t end * [ delegate_pubkey delegate ] returns the public key of [ delegate ] found in context [ ctxt ] if there exists a registered contract . [delegate_pubkey ctxt delegate] returns the public key of [delegate] found in context [ctxt] if there exists a registered contract. *) val delegate_pubkey : Raw_context.t -> Signature.Public_key_hash.t -> Signature.Public_key.t tzresult Lwt.t * [ get_change ctxt delegate ] returns the amount of change held by [ delegate ] in context [ ctxt ] . The change is the part of the staking balance of a delegate that is not part of a roll , i.e. , the amount of staking balance ( smaller than the value of a roll ) not being taken into account for baking rights computation . [get_change ctxt delegate] returns the amount of change held by [delegate] in context [ctxt]. The change is the part of the staking balance of a delegate that is not part of a roll, i.e., the amount of staking balance (smaller than the value of a roll) not being taken into account for baking rights computation. *) val get_change : Raw_context.t -> Signature.Public_key_hash.t -> Tez_repr.t tzresult Lwt.t * [ get_contract_delegate contract ] returns the public key hash of the delegate whose contract is [ contract ] in context [ ctxt ] . [get_contract_delegate ctxt contract] returns the public key hash of the delegate whose contract is [contract] in context [ctxt]. *) val get_contract_delegate : Raw_context.t -> Contract_repr.t -> Signature.Public_key_hash.t option tzresult Lwt.t
b60c4b6132ccd305676d7ed3ead583f20e7d695a707ad973c3da2a7935cf20c0
haskell-foundation/foundation
Collection.hs
-- | -- Module : Foundation.Collection.Collection -- License : BSD-style -- Maintainer : Foundation -- Stability : experimental -- Portability : portable -- -- Provide basic collection information. It's difficult to provide a -- unified interface to all sorts of collection, but when creating this -- API we had the following types in mind: -- -- * List (e.g [a]) -- * Array -- * Collection of collection (e.g. deque) -- * Hashtables, Trees -- -- an API to rules them all, and in the darkness bind them. -- # LANGUAGE FlexibleContexts # # LANGUAGE ExistentialQuantification # # LANGUAGE StandaloneDeriving # # LANGUAGE GeneralizedNewtypeDeriving # # LANGUAGE TypeOperators # module Foundation.Collection.Collection ( Collection(..) * NonEmpty Property , NonEmpty , getNonEmpty , nonEmpty , nonEmpty_ , nonEmptyFmap , and , or ) where import Basement.Compat.Base hiding (and) import Basement.Types.OffsetSize import Basement.Types.AsciiString import Basement.Exception (NonEmptyCollectionIsEmpty(..)) import Foundation.Collection.Element import Basement.NonEmpty import qualified Data.List import qualified Basement.Block as BLK import qualified Basement.UArray as UV import qualified Basement.BoxedArray as BA import qualified Basement.String as S | Smart constructor to create a NonEmpty collection -- -- If the collection is empty, then Nothing is returned Otherwise , the collection is wrapped in the NonEmpty property nonEmpty :: Collection c => c -> Maybe (NonEmpty c) nonEmpty c | null c = Nothing | otherwise = Just (NonEmpty c) | same as ' nonEmpty ' , but assume that the collection is non empty , -- and return an asynchronous error if it is. nonEmpty_ :: Collection c => c -> NonEmpty c nonEmpty_ c | null c = throw NonEmptyCollectionIsEmpty | otherwise = NonEmpty c nonEmptyFmap :: Functor f => (a -> b) -> NonEmpty (f a) -> NonEmpty (f b) nonEmptyFmap f (NonEmpty l) = NonEmpty (fmap f l) -- | A set of methods for ordered colection class (IsList c, Item c ~ Element c) => Collection c where # MINIMAL null , length , ( elem | notElem ) , minimum , maximum , all , any # -- | Check if a collection is empty null :: c -> Bool -- | Length of a collection (number of Element c) length :: c -> CountOf (Element c) -- | Check if a collection contains a specific element -- -- This is the inverse of `notElem`. elem :: forall a . (Eq a, a ~ Element c) => Element c -> c -> Bool elem e col = not $ e `notElem` col -- | Check if a collection does *not* contain a specific element -- -- This is the inverse of `elem`. notElem :: forall a . (Eq a, a ~ Element c) => Element c -> c -> Bool notElem e col = not $ e `elem` col -- | Get the maximum element of a collection maximum :: forall a . (Ord a, a ~ Element c) => NonEmpty c -> Element c -- | Get the minimum element of a collection minimum :: forall a . (Ord a, a ~ Element c) => NonEmpty c -> Element c -- | Determine is any elements of the collection satisfy the predicate any :: (Element c -> Bool) -> c -> Bool -- | Determine is all elements of the collection satisfy the predicate all :: (Element c -> Bool) -> c -> Bool instance Collection [a] where null = Data.List.null length = CountOf . Data.List.length elem = Data.List.elem notElem = Data.List.notElem minimum = Data.List.minimum . getNonEmpty maximum = Data.List.maximum . getNonEmpty any = Data.List.any all = Data.List.all instance UV.PrimType ty => Collection (BLK.Block ty) where null = (==) 0 . BLK.length length = BLK.length elem = BLK.elem minimum = BLK.foldl1' min maximum = BLK.foldl1' max all = BLK.all any = BLK.any instance UV.PrimType ty => Collection (UV.UArray ty) where null = UV.null length = UV.length elem = UV.elem minimum = UV.foldl1' min maximum = UV.foldl1' max all = UV.all any = UV.any instance Collection (BA.Array ty) where null = BA.null length = BA.length elem = BA.elem minimum = BA.foldl1' min maximum = BA.foldl1' max all = BA.all any = BA.any deriving instance Collection AsciiString instance Collection S.String where null = S.null length = S.length elem = S.elem TODO faster implementation TODO faster implementation all = S.all any = S.any instance Collection c => Collection (NonEmpty c) where null _ = False length = length . getNonEmpty elem e = elem e . getNonEmpty maximum = maximum . getNonEmpty minimum = minimum . getNonEmpty all p = all p . getNonEmpty any p = any p . getNonEmpty -- | Return True if all the elements in the collection are True and :: (Collection col, Element col ~ Bool) => col -> Bool and = all (== True) | Return True if at least one element in the collection is True or :: (Collection col, Element col ~ Bool) => col -> Bool or = any (== True)
null
https://raw.githubusercontent.com/haskell-foundation/foundation/39985b94b4de4d02e8decb5e378b53ad3f72c0cc/foundation/Foundation/Collection/Collection.hs
haskell
| Module : Foundation.Collection.Collection License : BSD-style Maintainer : Foundation Stability : experimental Portability : portable Provide basic collection information. It's difficult to provide a unified interface to all sorts of collection, but when creating this API we had the following types in mind: * List (e.g [a]) * Array * Collection of collection (e.g. deque) * Hashtables, Trees an API to rules them all, and in the darkness bind them. If the collection is empty, then Nothing is returned and return an asynchronous error if it is. | A set of methods for ordered colection | Check if a collection is empty | Length of a collection (number of Element c) | Check if a collection contains a specific element This is the inverse of `notElem`. | Check if a collection does *not* contain a specific element This is the inverse of `elem`. | Get the maximum element of a collection | Get the minimum element of a collection | Determine is any elements of the collection satisfy the predicate | Determine is all elements of the collection satisfy the predicate | Return True if all the elements in the collection are True
# LANGUAGE FlexibleContexts # # LANGUAGE ExistentialQuantification # # LANGUAGE StandaloneDeriving # # LANGUAGE GeneralizedNewtypeDeriving # # LANGUAGE TypeOperators # module Foundation.Collection.Collection ( Collection(..) * NonEmpty Property , NonEmpty , getNonEmpty , nonEmpty , nonEmpty_ , nonEmptyFmap , and , or ) where import Basement.Compat.Base hiding (and) import Basement.Types.OffsetSize import Basement.Types.AsciiString import Basement.Exception (NonEmptyCollectionIsEmpty(..)) import Foundation.Collection.Element import Basement.NonEmpty import qualified Data.List import qualified Basement.Block as BLK import qualified Basement.UArray as UV import qualified Basement.BoxedArray as BA import qualified Basement.String as S | Smart constructor to create a NonEmpty collection Otherwise , the collection is wrapped in the NonEmpty property nonEmpty :: Collection c => c -> Maybe (NonEmpty c) nonEmpty c | null c = Nothing | otherwise = Just (NonEmpty c) | same as ' nonEmpty ' , but assume that the collection is non empty , nonEmpty_ :: Collection c => c -> NonEmpty c nonEmpty_ c | null c = throw NonEmptyCollectionIsEmpty | otherwise = NonEmpty c nonEmptyFmap :: Functor f => (a -> b) -> NonEmpty (f a) -> NonEmpty (f b) nonEmptyFmap f (NonEmpty l) = NonEmpty (fmap f l) class (IsList c, Item c ~ Element c) => Collection c where # MINIMAL null , length , ( elem | notElem ) , minimum , maximum , all , any # null :: c -> Bool length :: c -> CountOf (Element c) elem :: forall a . (Eq a, a ~ Element c) => Element c -> c -> Bool elem e col = not $ e `notElem` col notElem :: forall a . (Eq a, a ~ Element c) => Element c -> c -> Bool notElem e col = not $ e `elem` col maximum :: forall a . (Ord a, a ~ Element c) => NonEmpty c -> Element c minimum :: forall a . (Ord a, a ~ Element c) => NonEmpty c -> Element c any :: (Element c -> Bool) -> c -> Bool all :: (Element c -> Bool) -> c -> Bool instance Collection [a] where null = Data.List.null length = CountOf . Data.List.length elem = Data.List.elem notElem = Data.List.notElem minimum = Data.List.minimum . getNonEmpty maximum = Data.List.maximum . getNonEmpty any = Data.List.any all = Data.List.all instance UV.PrimType ty => Collection (BLK.Block ty) where null = (==) 0 . BLK.length length = BLK.length elem = BLK.elem minimum = BLK.foldl1' min maximum = BLK.foldl1' max all = BLK.all any = BLK.any instance UV.PrimType ty => Collection (UV.UArray ty) where null = UV.null length = UV.length elem = UV.elem minimum = UV.foldl1' min maximum = UV.foldl1' max all = UV.all any = UV.any instance Collection (BA.Array ty) where null = BA.null length = BA.length elem = BA.elem minimum = BA.foldl1' min maximum = BA.foldl1' max all = BA.all any = BA.any deriving instance Collection AsciiString instance Collection S.String where null = S.null length = S.length elem = S.elem TODO faster implementation TODO faster implementation all = S.all any = S.any instance Collection c => Collection (NonEmpty c) where null _ = False length = length . getNonEmpty elem e = elem e . getNonEmpty maximum = maximum . getNonEmpty minimum = minimum . getNonEmpty all p = all p . getNonEmpty any p = any p . getNonEmpty and :: (Collection col, Element col ~ Bool) => col -> Bool and = all (== True) | Return True if at least one element in the collection is True or :: (Collection col, Element col ~ Bool) => col -> Bool or = any (== True)
935c4fdd1c475191bd2830124039088fdbebecbb54bbfa6b51f7ecc57e382acd
hyperfiddle/electric
missionary_contrib.cljc
(ns contrib.missionary-contrib "staging area, to be considered for missionary inclusion?" (:require [clojure.core.async :as a] [missionary.core :as m] [hyperfiddle.rcf :refer [tests]]) (:import (missionary Cancelled))) (defn iterator-consumer "blocking iterable pattern" [^java.util.Iterator it] why not one thread tied to the iterator extent ? ( future ( while ( .hasNext it ) ( ! ( .next it ) ) ) ) (m/ap (loop [] (if (m/? (m/via m/blk (.hasNext it))) (m/amb (m/? (m/via m/blk (.next it))) (recur)) (m/amb))))) (defn seq-consumer [xs] ; xs is iterable (m/ap (loop [xs xs] (if (m/? (m/via m/blk (seq xs))) (m/amb (m/? (m/via m/blk (first xs))) (recur (rest xs))) (m/amb))))) #?(:clj (tests (def !it (.iterator (.keySet (java.lang.System/getProperties)))) (->> (iterator-consumer !it) (m/eduction (take 3)) (m/reduce conj []) m/?) := ["java.specification.version" "sun.jnu.encoding" "java.class.path"] careful , Java iterator is stateful (def xs (iterator-seq (.iterator (.keySet (java.lang.System/getProperties))))) (take 3 xs) := ["java.specification.version" "sun.jnu.encoding" "java.class.path"] (->> (seq-consumer xs) (m/eduction (take 3)) (m/reduce conj []) m/?) := ["java.specification.version" "sun.jnu.encoding" "java.class.path"])) ; Core.async interop (defn poll-task "run task (or mbox) repeatedly, producing a stream of results" [task] (m/ap (loop [v (m/? task)] (m/amb v (recur (m/? task)))))) (defn chan-read! "Return a task taking one value from `chan`. Return nil if chan is closed. Does not close chan, and when cancelled stops waiting for chan." ([chan] (chan-read! chan (Cancelled.))) ([chan cancelled-value] a task is a 2 - args function , success and failure are callbacks . (let [cancel-chan (a/chan)] ; we will put a value on this chan to cancel reading from `chan` race two chans (if (= port cancel-chan) ; if the winning chan is the cancelation one, then task has been cancelled (failure cancelled-value) ; task has been cancelled, must produce a failure state (success v)))) ; complete task with value from chan (fn cancel [] ;; if this task is cancelled by its parent process, close the cancel-chan ;; which will make cancel-chan produce `nil` and cause cancellation of read on `chan`. (a/close! cancel-chan)))))) (defn chan->ap "Adapt a core.async channel to a discrete flow" [ch] (m/ap (loop [] wait for one value , nil means channel closed . We successfully read a non - nil value , we use ` m / amb ` with two branches . m / amb will fork the current process ( ap ) and do two things sequentially , in two branches : - return x , meaning ` loop ` ends and return x , will produce x ;; - recur to read the next value from chan (m/amb x (recur)) ;; nil means the channel has been closed, so terminate this flow without producing any value ;; (not even nil). We use (m/amb) which produces nothing and terminates immediately. The ;; parent m/ap block has nothing to produce anymore and will also terminate. (m/amb))))) (defn chan->task [ch] ; for streaming database results into a vector at the repl (which is not great) (->> (chan->ap ch) (m/reduce into []))) ( defn chan->cp [ ch ] ( - > > ( chan->ap ch ) ( m / reductions into [ ] ) ) ) ; is this useful ? Channels are discrete
null
https://raw.githubusercontent.com/hyperfiddle/electric/1c6c3891cbf13123fef8d33e6555d300f0dac134/src/contrib/missionary_contrib.cljc
clojure
xs is iterable Core.async interop we will put a value on this chan to cancel reading from `chan` if the winning chan is the cancelation one, then task has been cancelled task has been cancelled, must produce a failure state complete task with value from chan if this task is cancelled by its parent process, close the cancel-chan which will make cancel-chan produce `nil` and cause cancellation of read on `chan`. - recur to read the next value from chan nil means the channel has been closed, so terminate this flow without producing any value (not even nil). We use (m/amb) which produces nothing and terminates immediately. The parent m/ap block has nothing to produce anymore and will also terminate. for streaming database results into a vector at the repl (which is not great) is this useful ? Channels are discrete
(ns contrib.missionary-contrib "staging area, to be considered for missionary inclusion?" (:require [clojure.core.async :as a] [missionary.core :as m] [hyperfiddle.rcf :refer [tests]]) (:import (missionary Cancelled))) (defn iterator-consumer "blocking iterable pattern" [^java.util.Iterator it] why not one thread tied to the iterator extent ? ( future ( while ( .hasNext it ) ( ! ( .next it ) ) ) ) (m/ap (loop [] (if (m/? (m/via m/blk (.hasNext it))) (m/amb (m/? (m/via m/blk (.next it))) (recur)) (m/amb))))) (m/ap (loop [xs xs] (if (m/? (m/via m/blk (seq xs))) (m/amb (m/? (m/via m/blk (first xs))) (recur (rest xs))) (m/amb))))) #?(:clj (tests (def !it (.iterator (.keySet (java.lang.System/getProperties)))) (->> (iterator-consumer !it) (m/eduction (take 3)) (m/reduce conj []) m/?) := ["java.specification.version" "sun.jnu.encoding" "java.class.path"] careful , Java iterator is stateful (def xs (iterator-seq (.iterator (.keySet (java.lang.System/getProperties))))) (take 3 xs) := ["java.specification.version" "sun.jnu.encoding" "java.class.path"] (->> (seq-consumer xs) (m/eduction (take 3)) (m/reduce conj []) m/?) := ["java.specification.version" "sun.jnu.encoding" "java.class.path"])) (defn poll-task "run task (or mbox) repeatedly, producing a stream of results" [task] (m/ap (loop [v (m/? task)] (m/amb v (recur (m/? task)))))) (defn chan-read! "Return a task taking one value from `chan`. Return nil if chan is closed. Does not close chan, and when cancelled stops waiting for chan." ([chan] (chan-read! chan (Cancelled.))) ([chan cancelled-value] a task is a 2 - args function , success and failure are callbacks . race two chans (fn cancel [] (a/close! cancel-chan)))))) (defn chan->ap "Adapt a core.async channel to a discrete flow" [ch] (m/ap (loop [] wait for one value , nil means channel closed . We successfully read a non - nil value , we use ` m / amb ` with two branches . m / amb will fork the current process ( ap ) and do two things sequentially , in two branches : - return x , meaning ` loop ` ends and return x , will produce x (m/amb x (recur)) (m/amb))))) (defn chan->task [ch] (->> (chan->ap ch) (m/reduce into [])))
2e18378f04fea460c414822bed62f3d10e6acee010fc9b280c84ca6dd084580a
senapk/funcional_arcade
main.hs
import Data.List pertence x xs = not $ null [ y | y < - xs , y = = x ] -- pertence x xs = not $ null $ filter (== x) xs -- pertence _ [] = False pertence x ( y : xs ) = x = = y || pertence x xs -- pertence x xs = foldl fn False xs -- where fn y z = y || z == x
null
https://raw.githubusercontent.com/senapk/funcional_arcade/89625b4559b3e590d88dd70a27b6cbdc07cc2988/base/015/main.hs
haskell
pertence x xs = not $ null $ filter (== x) xs pertence _ [] = False pertence x xs = foldl fn False xs where fn y z = y || z == x
import Data.List pertence x xs = not $ null [ y | y < - xs , y = = x ] pertence x ( y : xs ) = x = = y || pertence x xs
19988e3773ccc35cccb5ad24168692b5bb150b96a46fe484a711446a7b9d7fd4
dimitri/pgloader
retry.lisp
;;; Test cases for issue ;;; ;;; #| CREATE TABLE `retry` ( `id` int(10) unsigned NOT NULL AUTO_INCREMENT, `content` text, PRIMARY KEY (`id`) ); |# (defpackage #:pgloader.test.retry (:use #:cl #:pgloader.params #:pgloader.mysql) (:export #:produce-data)) (in-package #:pgloader.test.retry) (defvar *inject-null-bytes* (coerce (loop for previous = 0 then (+ previous offset) for offset in '(15769 54 7 270 8752) collect (+ previous offset)) 'vector) "Line numbers in the batch where to inject erroneous data.") (defvar *string-with-null-byte* (concatenate 'string "Hello" (list #\Nul) "World!")) (defvar *random-string* (make-string (random 42) :initial-element #\a) "A random string.") (defvar *query* "INSERT INTO `~a`(`content`) VALUES ('~a')") (defun produce-data (&key (*myconn-host* *myconn-host*) (*myconn-port* *myconn-port*) (*myconn-user* *myconn-user*) (*myconn-pass* *myconn-pass*) (dbname "retry") (table-name "retry") (rows 150000)) "Produce a data set that looks like the one in issue #22." (with-mysql-connection (dbname) (let ((next-error-pos 0)) (loop for n from 1 to rows for str = (if (and (< next-error-pos (length *inject-null-bytes*)) (= n (aref *inject-null-bytes* next-error-pos))) (progn (incf next-error-pos) *string-with-null-byte*) *random-string*) do (pgloader.mysql::mysql-query (format nil *query* table-name str))))))
null
https://raw.githubusercontent.com/dimitri/pgloader/3047c9afe141763e9e7ec05b7f2a6aa97cf06801/test/data/retry.lisp
lisp
Test cases for issue CREATE TABLE `retry` ( `id` int(10) unsigned NOT NULL AUTO_INCREMENT, `content` text, PRIMARY KEY (`id`) );
(defpackage #:pgloader.test.retry (:use #:cl #:pgloader.params #:pgloader.mysql) (:export #:produce-data)) (in-package #:pgloader.test.retry) (defvar *inject-null-bytes* (coerce (loop for previous = 0 then (+ previous offset) for offset in '(15769 54 7 270 8752) collect (+ previous offset)) 'vector) "Line numbers in the batch where to inject erroneous data.") (defvar *string-with-null-byte* (concatenate 'string "Hello" (list #\Nul) "World!")) (defvar *random-string* (make-string (random 42) :initial-element #\a) "A random string.") (defvar *query* "INSERT INTO `~a`(`content`) VALUES ('~a')") (defun produce-data (&key (*myconn-host* *myconn-host*) (*myconn-port* *myconn-port*) (*myconn-user* *myconn-user*) (*myconn-pass* *myconn-pass*) (dbname "retry") (table-name "retry") (rows 150000)) "Produce a data set that looks like the one in issue #22." (with-mysql-connection (dbname) (let ((next-error-pos 0)) (loop for n from 1 to rows for str = (if (and (< next-error-pos (length *inject-null-bytes*)) (= n (aref *inject-null-bytes* next-error-pos))) (progn (incf next-error-pos) *string-with-null-byte*) *random-string*) do (pgloader.mysql::mysql-query (format nil *query* table-name str))))))
bad2fcdf27b918b83a128925a84da7073f80888fc120454ebb7794510207bad6
RyanMcG/lein-npm
plugin.clj
(ns lein-npm.plugin (:require [leiningen.npm :as npm] [leiningen.npm.node-exec :as exec])) (defn hooks [] (npm/install-hooks) (exec/install-hooks))
null
https://raw.githubusercontent.com/RyanMcG/lein-npm/aca88a27a7eace2f0f4fc19a095b9c51bd19bd46/src/lein_npm/plugin.clj
clojure
(ns lein-npm.plugin (:require [leiningen.npm :as npm] [leiningen.npm.node-exec :as exec])) (defn hooks [] (npm/install-hooks) (exec/install-hooks))
cfa26c641cc6cef2733ce2c6b43bd98426ac7a5d77ec7f3f8408fe1de61d58ed
disco-lang/disco
Graph.hs
{-# LANGUAGE OverloadedStrings #-} ----------------------------------------------------------------------------- -- | Module : Disco . . Graph -- Copyright : disco team and contributors -- Maintainer : -- SPDX - License - Identifier : BSD-3 - Clause -- -- A thin layer on top of graphs from the @fgl@ package, which -- allows dealing with vertices by label instead of by integer -- @Node@ values. ----------------------------------------------------------------------------- module Disco.Typecheck.Graph where import Prelude hiding (map, (<>)) import qualified Prelude as P import Control.Arrow ((&&&)) import Data.Map (Map) import qualified Data.Map as M import Data.Maybe (fromJust, isJust, mapMaybe) import Data.Set (Set) import qualified Data.Set as S import Data.Tuple (swap) import qualified Data.Graph.Inductive.Graph as G import Data.Graph.Inductive.PatriciaTree (Gr) import qualified Data.Graph.Inductive.Query.DFS as G (components, condensation, topsort') import Disco.Pretty import Disco.Util ((!)) -- | Directed graphs, with vertices labelled by @a@ and unlabelled -- edges. data Graph a = G (Gr a ()) (Map a G.Node) (Map G.Node a) deriving Show instance Pretty a => Pretty (Graph a) where pretty (G g _ _) = parens (prettyVertices <> ", " <> prettyEdges) ( V = { ( 0 , x ) , ( 1 , N ) } , E = { 0 - > 1 , 2 - > 3 } ) where vs = G.labNodes g es = G.labEdges g prettyVertex (n,a) = parens (text (show n) <> ", " <> pretty a) prettyVertices = "V = " <> braces (intercalate "," (P.map prettyVertex vs)) prettyEdge (v1,v2,_) = text (show v1) <+> "->" <+> text (show v2) prettyEdges = "E = " <> braces (intercalate "," (P.map prettyEdge es)) -- | Create a graph with the given set of vertices and directed edges. -- If any edges refer to vertices that are not in the given vertex -- set, they will simply be dropped. mkGraph :: (Show a, Ord a) => Set a -> Set (a,a) -> Graph a mkGraph vs es = G (G.mkGraph vs' es') a2n n2a where vs' = zip [0..] (S.toList vs) n2a = M.fromList vs' a2n = M.fromList . P.map swap $ vs' es' = mapMaybe mkEdge (S.toList es) mkEdge (a1,a2) = (,,) <$> M.lookup a1 a2n <*> M.lookup a2 a2n <*> pure () -- | Return the set of vertices (nodes) of a graph. nodes :: Graph a -> Set a nodes (G _ m _) = M.keysSet m -- | Return the set of directed edges of a graph. edges :: Ord a => Graph a -> Set (a,a) edges (G g _ m) = S.fromList $ P.map (\(n1,n2,()) -> (m ! n1, m ! n2)) (G.labEdges g) | Map a function over all the vertices of a graph . @Graph@ is not a @Functor@ instance because of the @Ord@ constraint on @b@. map :: Ord b => (a -> b) -> Graph a -> Graph b map f (G g m1 m2) = G (G.nmap f g) (M.mapKeys f m1) (M.map f m2) -- | Delete a vertex. delete :: (Show a, Ord a) => a -> Graph a -> Graph a delete a (G g a2n n2a) = G (G.delNode n g) (M.delete a a2n) (M.delete n n2a) where n = a2n ! a -- | The @condensation@ of a graph is the graph of its strongly -- connected components, /i.e./ each strongly connected component is -- compressed to a single node, labelled by the set of vertices in -- the component. There is an edge from component A to component B -- in the condensed graph iff there is an edge from any vertex in -- component A to any vertex in component B in the original graph. condensation :: Ord a => Graph a -> Graph (Set a) condensation (G g _ n2a) = G g' as2n n2as where g' = G.nmap (S.fromList . P.map (n2a !)) (G.condensation g) vs' = G.labNodes g' n2as = M.fromList vs' as2n = M.fromList . P.map swap $ vs' -- | Get a list of the weakly connected components of a graph, -- providing the set of vertices in each. Equivalently, return the -- strongly connected components of the graph when considered as an -- undirected graph. wcc :: Ord a => Graph a -> [Set a] wcc = P.map (S.map snd) . wccIDs wccIDs :: Ord a => Graph a -> [Set (G.Node, a)] wccIDs (G g _a2n n2a) = P.map (S.fromList . P.map (id &&& (n2a !))) (G.components g) | Do a topological sort on a DAG . topsort :: Graph a -> [a] topsort (G g _a2n _n2a) = G.topsort' g -- | A miscellaneous utility function to turn a @Graph Maybe@ into a @Maybe Graph@ : the result is @Just@ iff all the vertices in the -- input graph are. sequenceGraph :: Ord a => Graph (Maybe a) -> Maybe (Graph a) sequenceGraph g = case all isJust (nodes g) of False -> Nothing True -> Just $ map fromJust g -- | Get a list of all the /successors/ of a given node in the graph, -- /i.e./ all the nodes reachable from the given node by a directed -- path. Does not include the given node itself. suc :: (Show a, Ord a) => Graph a -> a -> [a] suc (G g a2n n2a) = P.map (n2a !) . G.suc g . (a2n !) -- | Get a list of all the /predecessors/ of a given node in the -- graph, /i.e./ all the nodes from which from the given node is -- reachable by a directed path. Does not include the given node -- itself. pre :: (Show a, Ord a) => Graph a -> a -> [a] pre (G g a2n n2a) = P.map (n2a !) . G.pre g . (a2n !) | Given a graph , return two mappings : the first maps each vertex to its set of successors ; the second maps each vertex to its set of -- predecessors. Equivalent to -- > ( M.fromList * * * M.fromList ) . unzip . map ( \a - > ( ( a , suc g a ) , ( a , pre g a ) ) ) . nodes $ g -- -- but much more efficient. cessors :: (Show a, Ord a) => Graph a -> (Map a (Set a), Map a (Set a)) cessors g@(G gg _ _) = (succs, preds) where as = G.topsort' gg succs = foldr collectSuccs M.empty as -- build successors map collectSuccs a m = M.insert a succsSet m where ss = suc g a succsSet = S.fromList ss `S.union` S.unions (P.map (m !) ss) preds = foldr collectPreds M.empty (reverse as) -- build predecessors map collectPreds a m = M.insert a predsSet m where ss = pre g a predsSet = S.fromList ss `S.union` S.unions (P.map (m !) ss)
null
https://raw.githubusercontent.com/disco-lang/disco/300195cdb385ca1178a01d85a72f91b2c4e98dd2/src/Disco/Typecheck/Graph.hs
haskell
# LANGUAGE OverloadedStrings # --------------------------------------------------------------------------- | Copyright : disco team and contributors Maintainer : A thin layer on top of graphs from the @fgl@ package, which allows dealing with vertices by label instead of by integer @Node@ values. --------------------------------------------------------------------------- | Directed graphs, with vertices labelled by @a@ and unlabelled edges. | Create a graph with the given set of vertices and directed edges. If any edges refer to vertices that are not in the given vertex set, they will simply be dropped. | Return the set of vertices (nodes) of a graph. | Return the set of directed edges of a graph. | Delete a vertex. | The @condensation@ of a graph is the graph of its strongly connected components, /i.e./ each strongly connected component is compressed to a single node, labelled by the set of vertices in the component. There is an edge from component A to component B in the condensed graph iff there is an edge from any vertex in component A to any vertex in component B in the original graph. | Get a list of the weakly connected components of a graph, providing the set of vertices in each. Equivalently, return the strongly connected components of the graph when considered as an undirected graph. | A miscellaneous utility function to turn a @Graph Maybe@ into a input graph are. | Get a list of all the /successors/ of a given node in the graph, /i.e./ all the nodes reachable from the given node by a directed path. Does not include the given node itself. | Get a list of all the /predecessors/ of a given node in the graph, /i.e./ all the nodes from which from the given node is reachable by a directed path. Does not include the given node itself. predecessors. Equivalent to but much more efficient. build successors map build predecessors map
Module : Disco . . Graph SPDX - License - Identifier : BSD-3 - Clause module Disco.Typecheck.Graph where import Prelude hiding (map, (<>)) import qualified Prelude as P import Control.Arrow ((&&&)) import Data.Map (Map) import qualified Data.Map as M import Data.Maybe (fromJust, isJust, mapMaybe) import Data.Set (Set) import qualified Data.Set as S import Data.Tuple (swap) import qualified Data.Graph.Inductive.Graph as G import Data.Graph.Inductive.PatriciaTree (Gr) import qualified Data.Graph.Inductive.Query.DFS as G (components, condensation, topsort') import Disco.Pretty import Disco.Util ((!)) data Graph a = G (Gr a ()) (Map a G.Node) (Map G.Node a) deriving Show instance Pretty a => Pretty (Graph a) where pretty (G g _ _) = parens (prettyVertices <> ", " <> prettyEdges) ( V = { ( 0 , x ) , ( 1 , N ) } , E = { 0 - > 1 , 2 - > 3 } ) where vs = G.labNodes g es = G.labEdges g prettyVertex (n,a) = parens (text (show n) <> ", " <> pretty a) prettyVertices = "V = " <> braces (intercalate "," (P.map prettyVertex vs)) prettyEdge (v1,v2,_) = text (show v1) <+> "->" <+> text (show v2) prettyEdges = "E = " <> braces (intercalate "," (P.map prettyEdge es)) mkGraph :: (Show a, Ord a) => Set a -> Set (a,a) -> Graph a mkGraph vs es = G (G.mkGraph vs' es') a2n n2a where vs' = zip [0..] (S.toList vs) n2a = M.fromList vs' a2n = M.fromList . P.map swap $ vs' es' = mapMaybe mkEdge (S.toList es) mkEdge (a1,a2) = (,,) <$> M.lookup a1 a2n <*> M.lookup a2 a2n <*> pure () nodes :: Graph a -> Set a nodes (G _ m _) = M.keysSet m edges :: Ord a => Graph a -> Set (a,a) edges (G g _ m) = S.fromList $ P.map (\(n1,n2,()) -> (m ! n1, m ! n2)) (G.labEdges g) | Map a function over all the vertices of a graph . @Graph@ is not a @Functor@ instance because of the @Ord@ constraint on @b@. map :: Ord b => (a -> b) -> Graph a -> Graph b map f (G g m1 m2) = G (G.nmap f g) (M.mapKeys f m1) (M.map f m2) delete :: (Show a, Ord a) => a -> Graph a -> Graph a delete a (G g a2n n2a) = G (G.delNode n g) (M.delete a a2n) (M.delete n n2a) where n = a2n ! a condensation :: Ord a => Graph a -> Graph (Set a) condensation (G g _ n2a) = G g' as2n n2as where g' = G.nmap (S.fromList . P.map (n2a !)) (G.condensation g) vs' = G.labNodes g' n2as = M.fromList vs' as2n = M.fromList . P.map swap $ vs' wcc :: Ord a => Graph a -> [Set a] wcc = P.map (S.map snd) . wccIDs wccIDs :: Ord a => Graph a -> [Set (G.Node, a)] wccIDs (G g _a2n n2a) = P.map (S.fromList . P.map (id &&& (n2a !))) (G.components g) | Do a topological sort on a DAG . topsort :: Graph a -> [a] topsort (G g _a2n _n2a) = G.topsort' g @Maybe Graph@ : the result is @Just@ iff all the vertices in the sequenceGraph :: Ord a => Graph (Maybe a) -> Maybe (Graph a) sequenceGraph g = case all isJust (nodes g) of False -> Nothing True -> Just $ map fromJust g suc :: (Show a, Ord a) => Graph a -> a -> [a] suc (G g a2n n2a) = P.map (n2a !) . G.suc g . (a2n !) pre :: (Show a, Ord a) => Graph a -> a -> [a] pre (G g a2n n2a) = P.map (n2a !) . G.pre g . (a2n !) | Given a graph , return two mappings : the first maps each vertex to its set of successors ; the second maps each vertex to its set of > ( M.fromList * * * M.fromList ) . unzip . map ( \a - > ( ( a , suc g a ) , ( a , pre g a ) ) ) . nodes $ g cessors :: (Show a, Ord a) => Graph a -> (Map a (Set a), Map a (Set a)) cessors g@(G gg _ _) = (succs, preds) where as = G.topsort' gg collectSuccs a m = M.insert a succsSet m where ss = suc g a succsSet = S.fromList ss `S.union` S.unions (P.map (m !) ss) collectPreds a m = M.insert a predsSet m where ss = pre g a predsSet = S.fromList ss `S.union` S.unions (P.map (m !) ss)
c7d8934568dec50ace5b79ff8c7201522142f3a7277f5a4c7a1e3e983f505da2
agda/agda
Common.hs
| Common syntax highlighting functions for Emacs and JSON module Agda.Interaction.Highlighting.Common ( toAtoms , chooseHighlightingMethod ) where import Agda.Interaction.Highlighting.Precise import Agda.Syntax.Common import Agda.TypeChecking.Monad (HighlightingMethod(..)) import Data.Maybe (maybeToList) import Data.Char (toLower) import qualified Data.Set as Set -- | Converts the 'aspect' and 'otherAspects' fields to strings that are -- friendly to editors. toAtoms :: Aspects -> [String] toAtoms m = map toAtom (Set.toList $ otherAspects m) ++ toAtoms' (aspect m) where toAtom :: Show a => a -> String toAtom = map toLower . show kindToAtom (Constructor Inductive) = "inductiveconstructor" kindToAtom (Constructor CoInductive) = "coinductiveconstructor" kindToAtom k = toAtom k toAtoms' Nothing = [] toAtoms' (Just (Name mKind op)) = map kindToAtom (maybeToList mKind) ++ opAtom where opAtom | op = ["operator"] | otherwise = [] toAtoms' (Just a) = [toAtom a] | Choose which method to use based on HighlightingInfo and HighlightingMethod chooseHighlightingMethod :: HighlightingInfo -> HighlightingMethod -> HighlightingMethod chooseHighlightingMethod info method = case toList info of _ | method == Direct -> Direct ((_, mi) : _) | check mi -> Direct _ -> Indirect where check mi = otherAspects mi == Set.singleton TypeChecks || mi == mempty
null
https://raw.githubusercontent.com/agda/agda/cbba4e4c6f658e8d9d332ed6173caf98ea075904/src/full/Agda/Interaction/Highlighting/Common.hs
haskell
| Converts the 'aspect' and 'otherAspects' fields to strings that are friendly to editors.
| Common syntax highlighting functions for Emacs and JSON module Agda.Interaction.Highlighting.Common ( toAtoms , chooseHighlightingMethod ) where import Agda.Interaction.Highlighting.Precise import Agda.Syntax.Common import Agda.TypeChecking.Monad (HighlightingMethod(..)) import Data.Maybe (maybeToList) import Data.Char (toLower) import qualified Data.Set as Set toAtoms :: Aspects -> [String] toAtoms m = map toAtom (Set.toList $ otherAspects m) ++ toAtoms' (aspect m) where toAtom :: Show a => a -> String toAtom = map toLower . show kindToAtom (Constructor Inductive) = "inductiveconstructor" kindToAtom (Constructor CoInductive) = "coinductiveconstructor" kindToAtom k = toAtom k toAtoms' Nothing = [] toAtoms' (Just (Name mKind op)) = map kindToAtom (maybeToList mKind) ++ opAtom where opAtom | op = ["operator"] | otherwise = [] toAtoms' (Just a) = [toAtom a] | Choose which method to use based on HighlightingInfo and HighlightingMethod chooseHighlightingMethod :: HighlightingInfo -> HighlightingMethod -> HighlightingMethod chooseHighlightingMethod info method = case toList info of _ | method == Direct -> Direct ((_, mi) : _) | check mi -> Direct _ -> Indirect where check mi = otherAspects mi == Set.singleton TypeChecks || mi == mempty
1a4372f71dd73c168606394b9b40b360983e7c151e83bd981fe9a2c760965bab
clojure-interop/google-cloud-clients
CloudRedisStub.clj
(ns com.google.cloud.redis.v1beta1.stub.CloudRedisStub "Base stub class for Google Cloud Memorystore for Redis API. This class is for advanced usage and reflects the underlying API directly." (:refer-clojure :only [require comment defn ->]) (:import [com.google.cloud.redis.v1beta1.stub CloudRedisStub])) (defn ->cloud-redis-stub "Constructor." (^CloudRedisStub [] (new CloudRedisStub ))) (defn delete-instance-operation-callable "returns: `(value="The surface for use by generated code is not stable yet and may change in the future.") com.google.api.gax.rpc.OperationCallable<com.google.cloud.redis.v1beta1.DeleteInstanceRequest,com.google.protobuf.Empty,com.google.protobuf.Any>`" ([^CloudRedisStub this] (-> this (.deleteInstanceOperationCallable)))) (defn failover-instance-callable "returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.FailoverInstanceRequest,com.google.longrunning.Operation>`" (^com.google.api.gax.rpc.UnaryCallable [^CloudRedisStub this] (-> this (.failoverInstanceCallable)))) (defn export-instance-callable "returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.ExportInstanceRequest,com.google.longrunning.Operation>`" (^com.google.api.gax.rpc.UnaryCallable [^CloudRedisStub this] (-> this (.exportInstanceCallable)))) (defn get-operations-stub "returns: `(value="The surface for use by generated code is not stable yet and may change in the future.") com.google.longrunning.stub.OperationsStub`" ([^CloudRedisStub this] (-> this (.getOperationsStub)))) (defn list-instances-callable "returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.ListInstancesRequest,com.google.cloud.redis.v1beta1.ListInstancesResponse>`" (^com.google.api.gax.rpc.UnaryCallable [^CloudRedisStub this] (-> this (.listInstancesCallable)))) (defn export-instance-operation-callable "returns: `(value="The surface for use by generated code is not stable yet and may change in the future.") com.google.api.gax.rpc.OperationCallable<com.google.cloud.redis.v1beta1.ExportInstanceRequest,com.google.cloud.redis.v1beta1.Instance,com.google.protobuf.Any>`" ([^CloudRedisStub this] (-> this (.exportInstanceOperationCallable)))) (defn list-instances-paged-callable "returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.ListInstancesRequest,com.google.cloud.redis.v1beta1.CloudRedisClient$ListInstancesPagedResponse>`" (^com.google.api.gax.rpc.UnaryCallable [^CloudRedisStub this] (-> this (.listInstancesPagedCallable)))) (defn failover-instance-operation-callable "returns: `(value="The surface for use by generated code is not stable yet and may change in the future.") com.google.api.gax.rpc.OperationCallable<com.google.cloud.redis.v1beta1.FailoverInstanceRequest,com.google.cloud.redis.v1beta1.Instance,com.google.protobuf.Any>`" ([^CloudRedisStub this] (-> this (.failoverInstanceOperationCallable)))) (defn import-instance-callable "returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.ImportInstanceRequest,com.google.longrunning.Operation>`" (^com.google.api.gax.rpc.UnaryCallable [^CloudRedisStub this] (-> this (.importInstanceCallable)))) (defn create-instance-operation-callable "returns: `(value="The surface for use by generated code is not stable yet and may change in the future.") com.google.api.gax.rpc.OperationCallable<com.google.cloud.redis.v1beta1.CreateInstanceRequest,com.google.cloud.redis.v1beta1.Instance,com.google.protobuf.Any>`" ([^CloudRedisStub this] (-> this (.createInstanceOperationCallable)))) (defn update-instance-operation-callable "returns: `(value="The surface for use by generated code is not stable yet and may change in the future.") com.google.api.gax.rpc.OperationCallable<com.google.cloud.redis.v1beta1.UpdateInstanceRequest,com.google.cloud.redis.v1beta1.Instance,com.google.protobuf.Any>`" ([^CloudRedisStub this] (-> this (.updateInstanceOperationCallable)))) (defn close "" ([^CloudRedisStub this] (-> this (.close)))) (defn delete-instance-callable "returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.DeleteInstanceRequest,com.google.longrunning.Operation>`" (^com.google.api.gax.rpc.UnaryCallable [^CloudRedisStub this] (-> this (.deleteInstanceCallable)))) (defn create-instance-callable "returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.CreateInstanceRequest,com.google.longrunning.Operation>`" (^com.google.api.gax.rpc.UnaryCallable [^CloudRedisStub this] (-> this (.createInstanceCallable)))) (defn import-instance-operation-callable "returns: `(value="The surface for use by generated code is not stable yet and may change in the future.") com.google.api.gax.rpc.OperationCallable<com.google.cloud.redis.v1beta1.ImportInstanceRequest,com.google.cloud.redis.v1beta1.Instance,com.google.protobuf.Any>`" ([^CloudRedisStub this] (-> this (.importInstanceOperationCallable)))) (defn update-instance-callable "returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.UpdateInstanceRequest,com.google.longrunning.Operation>`" (^com.google.api.gax.rpc.UnaryCallable [^CloudRedisStub this] (-> this (.updateInstanceCallable)))) (defn get-instance-callable "returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.GetInstanceRequest,com.google.cloud.redis.v1beta1.Instance>`" (^com.google.api.gax.rpc.UnaryCallable [^CloudRedisStub this] (-> this (.getInstanceCallable))))
null
https://raw.githubusercontent.com/clojure-interop/google-cloud-clients/80852d0496057c22f9cdc86d6f9ffc0fa3cd7904/com.google.cloud.redis/src/com/google/cloud/redis/v1beta1/stub/CloudRedisStub.clj
clojure
(ns com.google.cloud.redis.v1beta1.stub.CloudRedisStub "Base stub class for Google Cloud Memorystore for Redis API. This class is for advanced usage and reflects the underlying API directly." (:refer-clojure :only [require comment defn ->]) (:import [com.google.cloud.redis.v1beta1.stub CloudRedisStub])) (defn ->cloud-redis-stub "Constructor." (^CloudRedisStub [] (new CloudRedisStub ))) (defn delete-instance-operation-callable "returns: `(value="The surface for use by generated code is not stable yet and may change in the future.") com.google.api.gax.rpc.OperationCallable<com.google.cloud.redis.v1beta1.DeleteInstanceRequest,com.google.protobuf.Empty,com.google.protobuf.Any>`" ([^CloudRedisStub this] (-> this (.deleteInstanceOperationCallable)))) (defn failover-instance-callable "returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.FailoverInstanceRequest,com.google.longrunning.Operation>`" (^com.google.api.gax.rpc.UnaryCallable [^CloudRedisStub this] (-> this (.failoverInstanceCallable)))) (defn export-instance-callable "returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.ExportInstanceRequest,com.google.longrunning.Operation>`" (^com.google.api.gax.rpc.UnaryCallable [^CloudRedisStub this] (-> this (.exportInstanceCallable)))) (defn get-operations-stub "returns: `(value="The surface for use by generated code is not stable yet and may change in the future.") com.google.longrunning.stub.OperationsStub`" ([^CloudRedisStub this] (-> this (.getOperationsStub)))) (defn list-instances-callable "returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.ListInstancesRequest,com.google.cloud.redis.v1beta1.ListInstancesResponse>`" (^com.google.api.gax.rpc.UnaryCallable [^CloudRedisStub this] (-> this (.listInstancesCallable)))) (defn export-instance-operation-callable "returns: `(value="The surface for use by generated code is not stable yet and may change in the future.") com.google.api.gax.rpc.OperationCallable<com.google.cloud.redis.v1beta1.ExportInstanceRequest,com.google.cloud.redis.v1beta1.Instance,com.google.protobuf.Any>`" ([^CloudRedisStub this] (-> this (.exportInstanceOperationCallable)))) (defn list-instances-paged-callable "returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.ListInstancesRequest,com.google.cloud.redis.v1beta1.CloudRedisClient$ListInstancesPagedResponse>`" (^com.google.api.gax.rpc.UnaryCallable [^CloudRedisStub this] (-> this (.listInstancesPagedCallable)))) (defn failover-instance-operation-callable "returns: `(value="The surface for use by generated code is not stable yet and may change in the future.") com.google.api.gax.rpc.OperationCallable<com.google.cloud.redis.v1beta1.FailoverInstanceRequest,com.google.cloud.redis.v1beta1.Instance,com.google.protobuf.Any>`" ([^CloudRedisStub this] (-> this (.failoverInstanceOperationCallable)))) (defn import-instance-callable "returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.ImportInstanceRequest,com.google.longrunning.Operation>`" (^com.google.api.gax.rpc.UnaryCallable [^CloudRedisStub this] (-> this (.importInstanceCallable)))) (defn create-instance-operation-callable "returns: `(value="The surface for use by generated code is not stable yet and may change in the future.") com.google.api.gax.rpc.OperationCallable<com.google.cloud.redis.v1beta1.CreateInstanceRequest,com.google.cloud.redis.v1beta1.Instance,com.google.protobuf.Any>`" ([^CloudRedisStub this] (-> this (.createInstanceOperationCallable)))) (defn update-instance-operation-callable "returns: `(value="The surface for use by generated code is not stable yet and may change in the future.") com.google.api.gax.rpc.OperationCallable<com.google.cloud.redis.v1beta1.UpdateInstanceRequest,com.google.cloud.redis.v1beta1.Instance,com.google.protobuf.Any>`" ([^CloudRedisStub this] (-> this (.updateInstanceOperationCallable)))) (defn close "" ([^CloudRedisStub this] (-> this (.close)))) (defn delete-instance-callable "returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.DeleteInstanceRequest,com.google.longrunning.Operation>`" (^com.google.api.gax.rpc.UnaryCallable [^CloudRedisStub this] (-> this (.deleteInstanceCallable)))) (defn create-instance-callable "returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.CreateInstanceRequest,com.google.longrunning.Operation>`" (^com.google.api.gax.rpc.UnaryCallable [^CloudRedisStub this] (-> this (.createInstanceCallable)))) (defn import-instance-operation-callable "returns: `(value="The surface for use by generated code is not stable yet and may change in the future.") com.google.api.gax.rpc.OperationCallable<com.google.cloud.redis.v1beta1.ImportInstanceRequest,com.google.cloud.redis.v1beta1.Instance,com.google.protobuf.Any>`" ([^CloudRedisStub this] (-> this (.importInstanceOperationCallable)))) (defn update-instance-callable "returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.UpdateInstanceRequest,com.google.longrunning.Operation>`" (^com.google.api.gax.rpc.UnaryCallable [^CloudRedisStub this] (-> this (.updateInstanceCallable)))) (defn get-instance-callable "returns: `com.google.api.gax.rpc.UnaryCallable<com.google.cloud.redis.v1beta1.GetInstanceRequest,com.google.cloud.redis.v1beta1.Instance>`" (^com.google.api.gax.rpc.UnaryCallable [^CloudRedisStub this] (-> this (.getInstanceCallable))))
12a9c300ea084c6377a53feeb65882a59cb50d77d863bbee1b4aa4b4c1d87477
anurudhp/CPHaskell
c.hs
# LANGUAGE LambdaCase # import Control.Applicative (liftA2) import Control.Arrow ((>>>)) import Control.Monad.State import qualified Data.ByteString.Lazy.Char8 as C import Data.List (group) import Data.Maybe (fromJust, fromMaybe) chunksOf _ [] = [] chunksOf k xs = let (hs, ts) = splitAt k xs in hs : chunksOf k ts main :: IO () main = C.interact $ runScanner (numberOf testCase) >>> C.unlines testCase :: Scanner C.ByteString testCase = do n <- int xs <- n >< int return . C.pack . show $ solve n xs solve n xs = 2 * n - 1 + compute xs where compute [] = 0 compute [_] = 0 compute [_, _] = 0 compute (x : y : z : xs) = good3 x y z + (case xs of [] -> 0; (w:_) -> good4 x y z w) + compute (y : z : xs) good p q r = q < min p r || q > max p r good3 p q r | good p q r = 1 | otherwise = 0 good4 p q r s | good p q r && good p r s && good p q s && good q r s = 1 | otherwise = 0 --- Template --- type Scanner = State [C.ByteString] runScanner :: Scanner a -> C.ByteString -> a runScanner = runScannerWith C.words runScannerWith :: (C.ByteString -> [C.ByteString]) -> Scanner a -> C.ByteString -> a runScannerWith t s = evalState s . t peek :: Scanner C.ByteString peek = gets head str :: Scanner C.ByteString str = get >>= \case s : ss -> put ss >> return s int :: Scanner Int int = fst . fromJust . C.readInt <$> str integer :: Scanner Integer integer = read . C.unpack <$> str double :: Scanner Double double = read . C.unpack <$> str decimal :: Int -> Scanner Int decimal p = round . ((10 ^ p) *) <$> double numberOf :: Scanner a -> Scanner [a] numberOf s = int >>= flip replicateM s many :: Scanner a -> Scanner [a] many s = get >>= \case [] -> return []; _ -> (:) <$> s <*> many s till :: (C.ByteString -> Bool) -> Scanner a -> Scanner [a] till p s = do t <- peek if p t then return [] else (:) <$> s <*> till p s times :: Int -> Scanner a -> Scanner [a] times = replicateM (><) = times two, three, four :: Scanner a -> Scanner [a] [two, three, four] = map times [2 .. 4] pair :: Scanner a -> Scanner b -> Scanner (a, b) pair = liftA2 (,)
null
https://raw.githubusercontent.com/anurudhp/CPHaskell/67a55ab34c0365d42fb092c2378bc3972186970c/contests/codeforces/1550/c.hs
haskell
- Template ---
# LANGUAGE LambdaCase # import Control.Applicative (liftA2) import Control.Arrow ((>>>)) import Control.Monad.State import qualified Data.ByteString.Lazy.Char8 as C import Data.List (group) import Data.Maybe (fromJust, fromMaybe) chunksOf _ [] = [] chunksOf k xs = let (hs, ts) = splitAt k xs in hs : chunksOf k ts main :: IO () main = C.interact $ runScanner (numberOf testCase) >>> C.unlines testCase :: Scanner C.ByteString testCase = do n <- int xs <- n >< int return . C.pack . show $ solve n xs solve n xs = 2 * n - 1 + compute xs where compute [] = 0 compute [_] = 0 compute [_, _] = 0 compute (x : y : z : xs) = good3 x y z + (case xs of [] -> 0; (w:_) -> good4 x y z w) + compute (y : z : xs) good p q r = q < min p r || q > max p r good3 p q r | good p q r = 1 | otherwise = 0 good4 p q r s | good p q r && good p r s && good p q s && good q r s = 1 | otherwise = 0 type Scanner = State [C.ByteString] runScanner :: Scanner a -> C.ByteString -> a runScanner = runScannerWith C.words runScannerWith :: (C.ByteString -> [C.ByteString]) -> Scanner a -> C.ByteString -> a runScannerWith t s = evalState s . t peek :: Scanner C.ByteString peek = gets head str :: Scanner C.ByteString str = get >>= \case s : ss -> put ss >> return s int :: Scanner Int int = fst . fromJust . C.readInt <$> str integer :: Scanner Integer integer = read . C.unpack <$> str double :: Scanner Double double = read . C.unpack <$> str decimal :: Int -> Scanner Int decimal p = round . ((10 ^ p) *) <$> double numberOf :: Scanner a -> Scanner [a] numberOf s = int >>= flip replicateM s many :: Scanner a -> Scanner [a] many s = get >>= \case [] -> return []; _ -> (:) <$> s <*> many s till :: (C.ByteString -> Bool) -> Scanner a -> Scanner [a] till p s = do t <- peek if p t then return [] else (:) <$> s <*> till p s times :: Int -> Scanner a -> Scanner [a] times = replicateM (><) = times two, three, four :: Scanner a -> Scanner [a] [two, three, four] = map times [2 .. 4] pair :: Scanner a -> Scanner b -> Scanner (a, b) pair = liftA2 (,)
a4c8dc0eccbad687ae8df3029f183061ae64fd48b292e8a628b76b132e179f7f
alexanderjamesking/clj-wiremock
core.clj
(ns clj-wiremock.core (:require [cheshire.core :refer [generate-string parse-string]] [clj-http.client :as client]) (:import com.github.tomakehurst.wiremock.WireMockServer com.github.tomakehurst.wiremock.core.WireMockConfiguration)) (defn config "Creates a new instance of WireMockConfiguration" ([] (new WireMockConfiguration)) ([config-map] (let [config (new WireMockConfiguration)] (when-let [v (:port config-map)] (.port config (int v))) (when-let [v (:https-port config-map)] (.httpsPort config (int v))) (when-let [v (:keystore-path config-map)] (.keystorePath config v)) (when-let [v (:keystore-password config-map)] (.keystorePassword config v)) (when-let [v (:trust-store-path config-map)] (.trustStorePath config v)) (when-let [v (:trust-store-password config-map)] (.trustStorePassword config v)) (when-let [v (:need-client-auth config-map)] (.needClientAuth config v)) config))) (defn server "Create a new instance of WireMockServer" ([] (new WireMockServer (config))) ([config] (new WireMockServer config))) (defn start "Starts the WireMockServer" [server] (.start server)) (defn stop "Stops the WireMockServer" [server] (.stop server)) (defn reset "Removes all stub mappings and deletes the request log" [server] (.resetAll server)) (defn- admin-post [endpoint body & [base-url]] (let [base-url (or base-url ":8080") admin-url (str base-url "/__admin/" endpoint) content-to-post {:body (generate-string body)} response (client/post admin-url content-to-post)] (parse-string (:body response) true))) (defn count-requests [body & [base-url]] (:count (admin-post "requests/count" body base-url))) (defn find-requests [body & [base-url]] (:requests (admin-post "requests/find" body base-url))) (defn stub [body & [base-url]] (admin-post "mappings/new" body base-url))
null
https://raw.githubusercontent.com/alexanderjamesking/clj-wiremock/9384bdfada3181002db31043c361b5efe69489e3/src/clj_wiremock/core.clj
clojure
(ns clj-wiremock.core (:require [cheshire.core :refer [generate-string parse-string]] [clj-http.client :as client]) (:import com.github.tomakehurst.wiremock.WireMockServer com.github.tomakehurst.wiremock.core.WireMockConfiguration)) (defn config "Creates a new instance of WireMockConfiguration" ([] (new WireMockConfiguration)) ([config-map] (let [config (new WireMockConfiguration)] (when-let [v (:port config-map)] (.port config (int v))) (when-let [v (:https-port config-map)] (.httpsPort config (int v))) (when-let [v (:keystore-path config-map)] (.keystorePath config v)) (when-let [v (:keystore-password config-map)] (.keystorePassword config v)) (when-let [v (:trust-store-path config-map)] (.trustStorePath config v)) (when-let [v (:trust-store-password config-map)] (.trustStorePassword config v)) (when-let [v (:need-client-auth config-map)] (.needClientAuth config v)) config))) (defn server "Create a new instance of WireMockServer" ([] (new WireMockServer (config))) ([config] (new WireMockServer config))) (defn start "Starts the WireMockServer" [server] (.start server)) (defn stop "Stops the WireMockServer" [server] (.stop server)) (defn reset "Removes all stub mappings and deletes the request log" [server] (.resetAll server)) (defn- admin-post [endpoint body & [base-url]] (let [base-url (or base-url ":8080") admin-url (str base-url "/__admin/" endpoint) content-to-post {:body (generate-string body)} response (client/post admin-url content-to-post)] (parse-string (:body response) true))) (defn count-requests [body & [base-url]] (:count (admin-post "requests/count" body base-url))) (defn find-requests [body & [base-url]] (:requests (admin-post "requests/find" body base-url))) (defn stub [body & [base-url]] (admin-post "mappings/new" body base-url))
3e945dc8f56c933ac20faae39a74db3cafb998746ce1ce441c232789bc3d346f
yutopp/rill
codegen_context.ml
* Copyright yutopp 2015 - . * * Distributed under the Boost Software License , Version 1.0 . * ( See accompanying file LICENSE_1_0.txt or copy at * ) * Copyright yutopp 2015 - . * * Distributed under the Boost Software License, Version 1.0. * (See accompanying file LICENSE_1_0.txt or copy at * ) *) open Batteries module type CONTEXT_TYPE = sig type ir_context_t type ir_builder_t type ir_module_t type ir_value_t type ir_intrinsics type 'ty ir_cache_value_t type ('ty, 'ctx) value_t end module Make (Cgt : CONTEXT_TYPE) = struct module EnvIdOrd = struct type t = Env_system.EnvId.t let compare = Env_system.EnvId.compare end module EnvIdSet = Set.Make(EnvIdOrd) module NodeOrderedType = struct type t = Env_system.EnvId.t let compare = Env_system.EnvId.compare end type ('env, 'c_id, 'ty, 'v) t = { mutable ir_context : Cgt.ir_context_t; mutable ir_builder : Cgt.ir_builder_t; mutable ir_module : Cgt.ir_module_t; intrinsics : Cgt.ir_intrinsics; env_to_record_tbl : (EnvIdOrd.t, ('env, 'c_id, 'ty, 'v) value_t) Hashtbl.t; name_to_record_tbl : (string, ('env, 'c_id, 'ty, 'v) value_t) Hashtbl.t; env_to_meta_record_tbl : (EnvIdOrd.t, ('env, 'c_id, 'ty, 'v) value_t) Hashtbl.t; cache_id_to_cache_tbl : ('c_id, 'ty Cgt.ir_cache_value_t) Hashtbl.t; mutable defined_env : EnvIdSet.t; type_sets : 'env Type_sets.type_sets_t; uni_map : ('ty, 'v) Unification.t; target_module : 'env option; external_functions : (string, Cgt.ir_value_t) Hashtbl.t; processing_functions : Cgt.ir_value_t Stack.t; places_for_sto_array_elem : ('env, 'c_id, 'ty, 'v) value_t Stack.t; } and ('env, 'c_id, 'ty, 'v) value_t = ('ty, (('env, 'c_id, 'ty, 'v) t)) Cgt.value_t let init ~ir_context ~ir_builder ~ir_module ~ir_intrinsics ~type_sets ~uni_map ~target_module = { ir_context = ir_context; ir_builder = ir_builder; ir_module = ir_module; intrinsics = ir_intrinsics; env_to_record_tbl = Hashtbl.create 32; name_to_record_tbl = Hashtbl.create 32; env_to_meta_record_tbl = Hashtbl.create 32; cache_id_to_cache_tbl = Hashtbl.create 32; defined_env = EnvIdSet.empty; type_sets = type_sets; uni_map = uni_map; target_module = target_module; external_functions = Hashtbl.create 32; processing_functions = Stack.create (); places_for_sto_array_elem = Stack.create (); } let target_module ctx = ctx.target_module (**) let mark_env_as_defined ctx env = ctx.defined_env <- (EnvIdSet.add env.Env.env_id ctx.defined_env) let is_env_defined ctx env = EnvIdSet.mem env.Env.env_id ctx.defined_env (**) let bind_val_to_env ctx value env = Hashtbl.add ctx.env_to_record_tbl env.Env.env_id value let find_val_by_env ctx env = Hashtbl.find ctx.env_to_record_tbl env.Env.env_id (**) let bind_val_to_name ctx value name = Hashtbl.add ctx.name_to_record_tbl name value let find_val_by_name ctx name = Hashtbl.find ctx.name_to_record_tbl name (**) let bind_external_function ctx name f = Hashtbl.add ctx.external_functions name f let find_external_function_by_name ctx name = Hashtbl.find ctx.external_functions name let enum_of_external_function_names ctx = Hashtbl.keys ctx.external_functions (**) let bind_metaval_to_env ctx value env = Hashtbl.add ctx.env_to_meta_record_tbl env.Env.env_id value let find_metaval_by_env ctx env = Hashtbl.find ctx.env_to_meta_record_tbl env.Env.env_id (**) let bind_values_to_cache_id ctx values cache_id = Hashtbl.add ctx.cache_id_to_cache_tbl cache_id values let find_values_by_cache_id ctx cache_id = Hashtbl.find ctx.cache_id_to_cache_tbl cache_id (**) let push_processing_function ctx f = Stack.push f ctx.processing_functions let pop_processing_function ctx = Stack.pop ctx.processing_functions let current_processing_function ctx = Stack.top ctx.processing_functions (**) let push_array_storage ctx array_value = Stack.push array_value ctx.places_for_sto_array_elem let pop_array_storage ctx = Stack.pop ctx.places_for_sto_array_elem let current_array_storage ctx = Stack.top ctx.places_for_sto_array_elem end
null
https://raw.githubusercontent.com/yutopp/rill/375b67c03ab2087d0a2a833bd9e80f3e51e2694f/rillc/_migrating/codegen_context.ml
ocaml
* Copyright yutopp 2015 - . * * Distributed under the Boost Software License , Version 1.0 . * ( See accompanying file LICENSE_1_0.txt or copy at * ) * Copyright yutopp 2015 - . * * Distributed under the Boost Software License, Version 1.0. * (See accompanying file LICENSE_1_0.txt or copy at * ) *) open Batteries module type CONTEXT_TYPE = sig type ir_context_t type ir_builder_t type ir_module_t type ir_value_t type ir_intrinsics type 'ty ir_cache_value_t type ('ty, 'ctx) value_t end module Make (Cgt : CONTEXT_TYPE) = struct module EnvIdOrd = struct type t = Env_system.EnvId.t let compare = Env_system.EnvId.compare end module EnvIdSet = Set.Make(EnvIdOrd) module NodeOrderedType = struct type t = Env_system.EnvId.t let compare = Env_system.EnvId.compare end type ('env, 'c_id, 'ty, 'v) t = { mutable ir_context : Cgt.ir_context_t; mutable ir_builder : Cgt.ir_builder_t; mutable ir_module : Cgt.ir_module_t; intrinsics : Cgt.ir_intrinsics; env_to_record_tbl : (EnvIdOrd.t, ('env, 'c_id, 'ty, 'v) value_t) Hashtbl.t; name_to_record_tbl : (string, ('env, 'c_id, 'ty, 'v) value_t) Hashtbl.t; env_to_meta_record_tbl : (EnvIdOrd.t, ('env, 'c_id, 'ty, 'v) value_t) Hashtbl.t; cache_id_to_cache_tbl : ('c_id, 'ty Cgt.ir_cache_value_t) Hashtbl.t; mutable defined_env : EnvIdSet.t; type_sets : 'env Type_sets.type_sets_t; uni_map : ('ty, 'v) Unification.t; target_module : 'env option; external_functions : (string, Cgt.ir_value_t) Hashtbl.t; processing_functions : Cgt.ir_value_t Stack.t; places_for_sto_array_elem : ('env, 'c_id, 'ty, 'v) value_t Stack.t; } and ('env, 'c_id, 'ty, 'v) value_t = ('ty, (('env, 'c_id, 'ty, 'v) t)) Cgt.value_t let init ~ir_context ~ir_builder ~ir_module ~ir_intrinsics ~type_sets ~uni_map ~target_module = { ir_context = ir_context; ir_builder = ir_builder; ir_module = ir_module; intrinsics = ir_intrinsics; env_to_record_tbl = Hashtbl.create 32; name_to_record_tbl = Hashtbl.create 32; env_to_meta_record_tbl = Hashtbl.create 32; cache_id_to_cache_tbl = Hashtbl.create 32; defined_env = EnvIdSet.empty; type_sets = type_sets; uni_map = uni_map; target_module = target_module; external_functions = Hashtbl.create 32; processing_functions = Stack.create (); places_for_sto_array_elem = Stack.create (); } let target_module ctx = ctx.target_module let mark_env_as_defined ctx env = ctx.defined_env <- (EnvIdSet.add env.Env.env_id ctx.defined_env) let is_env_defined ctx env = EnvIdSet.mem env.Env.env_id ctx.defined_env let bind_val_to_env ctx value env = Hashtbl.add ctx.env_to_record_tbl env.Env.env_id value let find_val_by_env ctx env = Hashtbl.find ctx.env_to_record_tbl env.Env.env_id let bind_val_to_name ctx value name = Hashtbl.add ctx.name_to_record_tbl name value let find_val_by_name ctx name = Hashtbl.find ctx.name_to_record_tbl name let bind_external_function ctx name f = Hashtbl.add ctx.external_functions name f let find_external_function_by_name ctx name = Hashtbl.find ctx.external_functions name let enum_of_external_function_names ctx = Hashtbl.keys ctx.external_functions let bind_metaval_to_env ctx value env = Hashtbl.add ctx.env_to_meta_record_tbl env.Env.env_id value let find_metaval_by_env ctx env = Hashtbl.find ctx.env_to_meta_record_tbl env.Env.env_id let bind_values_to_cache_id ctx values cache_id = Hashtbl.add ctx.cache_id_to_cache_tbl cache_id values let find_values_by_cache_id ctx cache_id = Hashtbl.find ctx.cache_id_to_cache_tbl cache_id let push_processing_function ctx f = Stack.push f ctx.processing_functions let pop_processing_function ctx = Stack.pop ctx.processing_functions let current_processing_function ctx = Stack.top ctx.processing_functions let push_array_storage ctx array_value = Stack.push array_value ctx.places_for_sto_array_elem let pop_array_storage ctx = Stack.pop ctx.places_for_sto_array_elem let current_array_storage ctx = Stack.top ctx.places_for_sto_array_elem end
8e494962ac9e74fc5394f07cc1450d2ac535811392f98d31edd01e25904bc9b2
LdBeth/star-lisp
no-bang-bang.lisp
-*- SYNTAX : COMMON - LISP ; MODE : LISP ; BASE : 10 ; PACKAGE : * LISP - I ; MUSER : YES -*- ;;;> ************************************************************************************ > Copyright 1990 Thinking Machines Corporation , Inc. of Cambridge , Massachusetts . ;;;> All rights reserved ;;;> ************************************************************************************ (in-package :*lisp-i) ;;; No-Bang-Bang.lisp By , with pointers from JP Massar Created : 3/19/90 Revised : 4/3/90 - I decided to do things more generically . WRS 8/90 - Added simple - pvar - argument ! ! for simulator functions . WRS 9/5 - Added & front - end keyword WRS ;;; The global variables and macros defined in this file allow the *Lisp user to ;;; provide scalar arguments to *Lisp functions that expect pvar arguments. These scalars are automatically converted to pvars ( ala ! ! ) , thus obviating ;;; the need to use the !! operator in most expressions. ;;; The main purpose of this feature is to eliminate the need for !! in supplying arguments to * Lisp functions where some arguments may be constant pvar expressions ( i.e. , ( + ! ! ( ! ! 1 ) ( ! ! 2 ) ( ! ! 23) ... &ad - nauseum ) ) ;;; Conversion is performed by any function or macro in which the forms below are ;;; applied to arguments. ;;; Conversion is NOT performed for operators such as *funcall and *apply, where ;;; it is obvious that some arguments MUST be scalars. Also, conversion is not ;;; performed for any argument to a *Lisp operator that obviously MUST be a " real " pvar ( i.e. , pvar - name , pvar - field , etc . ) , or to an operation where such ;;; conversion would obviously defeat the intended purpose of the operation ( tests such as booleanp ! ! , characterp ! ! , etc . ) ;;; This variable controls whether the feature is included when *Lisp ;;; is compiled. (defvar *convert-scalar-args-to-pvars* t "Whether to automatically convert scalar arguments that should be pvars into pvars") ;;; This variable controls whether the feature is active, and may be ;;; altered at run-time. It is inactive by default. (defvar *convert-scalar-args-p* t "Whether to automatically convert scalar arguments (conditional at run-time)") ;;; A simple function to make turning it on and off easy: (defun no-bang-bang (&optional (enable-feature-p t)) (setq *convert-scalar-args-to-pvars* enable-feature-p) (setq *convert-scalar-args-p* enable-feature-p)) ;;; Functions to toggle scalar promotion (defun disable-scalar-promotion () (setq *lisp-i::*convert-scalar-args-p* nil)) (defun enable-scalar-promotion () (setq *lisp-i::*convert-scalar-args-p* t)) ;;; Scalar to Pvar Argument Conversion Macros: ;;; A call to these macros may be included within any *Lisp operator where ;;; conversion should be applied to an argument. If argument conversion is ;;; disabled, these macros do nothing. (Expand to NIL or pass args unchanged) ;;; Pvar-argument is called in the following way: ( pvar - argument ! ! < arg - desc1 > < > < arg - desc3 > ... ) where < arg - descn > is one of : ;;; argument-name data-type -- useful for single-arg functions ;;; (argname1 argname2 ...) data-type -- useful for multi-arg functions ;;; &opt argument-name data-type -- makes handling optionals simpler ;;; &opt (argname1 argname2 ...) data-type -- useful for multi-arg optionals ;;; &front-end argument-name data-type -- for front-end arguments ;;; &front-end (argname1 argname2 ...) data-type -- for multiple fe arguments ;;; &rest argument-name data-type -- &rest args _must_ be handled seperately ;;; A sample call to pvar-argument!! might look like: ;;; (pvar-argument!! elephantp boolean (ears trunk tail integer) &rest wrinkle-types complex) (defmacro pvar-argument!! (&rest argument-descriptors) (when *convert-scalar-args-to-pvars* (let (result class type) (do* ((nextarg (when argument-descriptors (pop argument-descriptors)) (when argument-descriptors (pop argument-descriptors)))) ((null nextarg)) (if (symbolp nextarg) (case nextarg (&rest (setq class :&rest nextarg (pop argument-descriptors))) (&opt (setq class :&opt nextarg (pop argument-descriptors)) (when (listp nextarg) (setq class :&opt-mult))) (&front-end (setq class :&front-end nextarg (pop argument-descriptors)) (when (listp nextarg) (setq class :&front-end-mult))) (otherwise (setq class :single))) (setq class :multiple)) (setq type (pop argument-descriptors)) (multiple-value-bind (declaration test conversion optimization) (conversion-for-type type) (when test ;;; if the type is one we know how to convert (case class (:single (push (conversion-form-for nextarg test declaration conversion optimization) result)) (:&opt (push `(when ,nextarg ,(conversion-form-for nextarg test declaration conversion optimization)) result)) (:&opt-mult (dolist (argname nextarg) (push `(when ,argname ,(conversion-form-for argname test declaration conversion optimization)) result))) (:&front-end (push (conversion-form-for nextarg test declaration conversion optimization t) result)) (:&front-end-mult (dolist (argname nextarg) (push (conversion-form-for argname test declaration conversion optimization t) result))) (:&rest (push (rest-conversion-form nextarg test declaration conversion optimization) result)) (:multiple (dolist (argname nextarg) (push (conversion-form-for argname test declaration conversion optimization) result))) )))) (when result `(*lisp-i::*nocompile (when *convert-scalar-args-p* ,@(nreverse result))))))) ;;; A macro for simple cases: (defmacro simple-pvar-argument!! (&rest arglist) (labels ((expandify!! (arglist) (if (null arglist) NIL (if (member (car arglist) '(&opt &rest)) (if (null (cdr arglist)) (progn (warn "&opt or &rest pvar-argument expression missing argument.") nil) `(,(car arglist) ,(cadr arglist) legal-pvar-value ,@(expandify!! (cddr arglist)))) `(,(car arglist) legal-pvar-value ,@(expandify!! (cdr arglist))))))) (unless (null arglist) `(pvar-argument!! ,@(expandify!! arglist))))) ;;; These functions produce conversion forms for the above macros (defun bang-bang-form-for (argname &optional (declaration nil) (conversion nil) (front-end-p nil)) (let ((form argname)) (when conversion (setq form `(,conversion ,form))) (when declaration (setq form `(the ,declaration ,form))) (if front-end-p (setq form `(front-end!! ,form)) (setq form `(!! ,form))))) (defun conversion-form-for (argname test &optional (declaration nil) (conversion nil) (optimization nil) (front-end-p nil)) (let ((form `(if (,test ,argname) (setq ,argname ,(bang-bang-form-for argname declaration conversion front-end-p))))) (when optimization (setq form `(progn (setq ,argname (,optimization ,argname)) ,form))) form)) (defun rest-conversion-form (argname test &optional (declaration nil) (conversion nil) (optimization nil)) `(do ((%arglist% ,argname (cdr %arglist%))) ((null %arglist%)) ,@(when optimization `((rplaca %arglist% (,optimization (car %arglist%))))) (if (,test (car %arglist%)) (rplaca %arglist% ,(bang-bang-form-for '(car %arglist%) declaration conversion))))) ;;; This variable contains conversion information for all types ;;; that need to be converted (thus far). (defvar *conversion-list* nil "Scalar-to-pvar conversion info for types that can be converted.") (setq *conversion-list* ;;; Elements of the form: (nametag declaration test [conversion optimization]) ;;; listed in order of relative frequency of use of each data type. ;;; nametag is conversion tag used in pvar-argument!! to determine test needed ;;; declaration is how scalar value being converted should be declared ;;; test is a boolean test that checks for that scalar type ;;; conversion is an optional function that gets called on the scalar value ;;; immediately prior to testing, to possibly convert it into a nicer form. ;;; optimization is an optional function that gets called on the scalar value ;;; before (and independant of) testing, to allow possible optimizations that ;;; make test unnecessary. '((float float floatp) (integer integer integerp) (boolean boolean booleanp nil boolean-optimize) (boolarg nil boolargp nil boolean-optimize) (complex complex complexp) (non-complex number non-complexp) (character character characterp) (number number numberp) (legal-pvar-value nil legal-pvar-valuep) (vector vector front-end-vector-p) (array array front-end-array-p) (*defstruct nil *defstructp) (*sequence vector *sequencep) (sf-vector vector sf-vectorp) (bit-array array bit-arrayp) (charint nil charintp) (char-bitspec integer char-bitspecp bitspec-to-integer) Bytespecs are a pain . (*bytespec #-(AND LUCID *LISP-SIMULATOR) integer #+(AND LUCID *LISP-SIMULATOR) nil *bytespecp nil bytespec-optimize) (address-object nil address-object-p) (segment-set-object nil segment-set-objectp) )) (defun conversion-for-type (type) (let* ((known (member type *conversion-list* :key 'car))) (if known (setq known (cdr (car known))) (warn "Not one of known conversion types: ~A." type)) (values-list known))) This used to be in , but it is used further ;;; down in this file so now its here. (defmacro fast-pvarp (x) `(internal-pvarp ,x)) ;;; For now, only the following types are important: ;;; >>> Boolean: T or NIL (defun booleanp (thingy) (or (eq thingy t) (eq thingy nil))) Boolean argument may be either a boolean value , or any non - NIL value . (defun boolargp (thingy) (legal-pvar-valuep thingy)) We can optimize by changing to a specific pvar in two cases : (defun boolean-optimize (thingy) (cond ((eq thingy t) t!!) ((eq thingy nil) nil!!) (t thingy))) > > > Number : numberp exists > > > Integer : integerp exists ;;; >>> Float: floatp exists > > > Complex : exists ;;; >>> Non-complex: (defun non-complexp (thingy) (and (numberp thingy) (not (complexp thingy)))) ;;; >>> Character: characterp exists ;;; >>> Array: arrayp exists (defun front-end-array-p (x) (and (not (fast-pvarp x)) (arrayp x)) ) (defun front-end-vector-p (x) (and (not (fast-pvarp x)) (vectorp x)) ) > > > Bit - array : use arrayp , and let bit array functions catch non - bit array pvars (defun bit-arrayp (thingy) (front-end-array-p thingy)) > > > Vector : exists > > > Sf - vector : ( single - float vector ) use , let ( d)sf functions catch non - sf vectors (defun sf-vectorp (thingy) (and (not (fast-pvarp thingy)) (vectorp thingy)) ) > > > * sequence : ( * Lisp sequence ) use , since * Lisp sequences can only be vectors (defun *sequencep (thingy) (and (not (fast-pvarp thingy)) (vectorp thingy)) ) ;;; >>> *defstruct: (structure object defined by *defstruct) (defun *defstructp (thingy) (let ((thing-type (type-of thingy))) (and (symbolp thing-type) (structure-pvar-type-known-*defstruct-type thing-type)))) ;;; >>> legal-pvar-value: (pvar value) Any legal argument to !! (defun legal-pvar-valuep (thing) (or (booleanp thing) ; t or nil (numberp thing) ; integer, float, complex (characterp thing) ; string-char and character (*defstructp thing) ; *defstructs (and (not (fast-pvarp thing)) (arrayp thing)) ; arrays )) ;;;(arrayp thing) ; arrays, vectors/sequences ( * thing ) ) ) ; * defstruct objects ;;; <<< The following are specialized types >>> > > > : ( character or integer ) (defun charintp (thingy) (or (integerp thingy) (characterp thingy))) ;;; >>> char-bitspec: (bit specifier for char-bit!!) (defun char-bitspecp (thingy) (member thingy '(0 :control 1 :meta 2 :super 3 :hyper))) (defun bitspec-to-integer (thingy) (if (integerp thingy) thingy (position thingy '(:control :meta :super :hyper)))) > > > * : ( * Lisp byte - specifier ) ;;; This is really ridiculous. On , bytespecs are always integers , and * bytespecs are integer pvars In Lucid , bytespecs are an ugly internal data type , and * bytespecs are : ;;; on the hardware, integer pvars, since that's the only way to represent them in the simulator , " pvars " of the ugly data type (defconstant *bytespec-type* #+symbolics 'fixnum 0 0 is illegal in Allegro this gives the nasty type in Lucid (defun *bytespecp (thingy) (or (typep thingy *bytespec-type*) allow integer bytespecs under Lucid )) ;;; This stub keeps the call to constant-byte!! below from returning a warning ;;; at compile-time, since I don't think it really makes sense to move constant-byte!! ;;; into this file. #+(AND LUCID *LISP-HARDWARE) (defun constant-byte!! (size position) (ignore size position) (error "CONSTANT-BYTE!!, defined in no-bang-bang.lisp, was not redefined by *Lisp interpreter.")) Turn Lucid 's nasty bytespec structure into a friendly integer ( except in simulator ! ) , so operators requiring bytespec operators will eat calls to CL byte function . In simulator under Lucid , are always the nasty type , so ;;; take care of 'em by turning them into the right byte!! call! (defun bytespec-optimize (thingy) only need to convert Lucid bytespecs on hardware (if (eql (type-of thingy) *bytespec-type*) (setq thingy (constant-byte!! (byte-size thingy) (byte-position thingy)))) #+(AND LUCID *LISP-SIMULATOR) (if (eql (type-of thingy) *bytespec-type*) (setq thingy (byte!! (!! (byte-size thingy)) (!! (byte-position thingy))))) thingy) < < < The following two are both * defstructs , so these tests may not be needed > > > ;;; >>> Address-Object: *Lisp-I:address-object-p exists ;;; >>> Segment-Set-Object: (defun segment-set-objectp (thingy) (typep thingy 'segment-set)) ;;;; Tests for the above ; ( ( val type ) ` ( let ( ( arg , ) ) ; (format t "~S " arg) ; (pvar-argument!! arg ,type) ; (format t "~S~%" arg))) ; ( defun nbtest ( ) ; (*let () ; to cleanup stack afterwards ( nbtestarm 3.14159 float ) ( nbtestarm 3 integer ) ( nbtestarm t boolean ) ( nbtestarm nil boolean ) ( nbtestarm 3 boolarg ) ( nbtestarm # C(3.0 4.0 ) complex ) ( nbtestarm 5 non - complex ) ( nbtestarm # \G character ) ( nbtestarm 0 number ) ( nbtestarm 23 legal - pvar - value ) ( nbtestarm # ( 1 2 3 ) vector ) ( nbtestarm # ( 1 2 3 ) array ) ( nbtestarm # ( 1 2 3 ) * sequence ) ( nbtestarm # ( T NIL T ) bit - array ) ( nbtestarm 23 charint ) ( nbtestarm : control char - bitspec ) ( nbtestarm 0 char - bitspec ) ( nbtestarm ( grid 2 3 ) address - object ) ; )) ;
null
https://raw.githubusercontent.com/LdBeth/star-lisp/034fb97fe8780d6e9fbff7c1d8c4a6b8c331797b/source/no-bang-bang.lisp
lisp
MODE : LISP ; BASE : 10 ; PACKAGE : * LISP - I ; MUSER : YES -*- > ************************************************************************************ > All rights reserved > ************************************************************************************ No-Bang-Bang.lisp The global variables and macros defined in this file allow the *Lisp user to provide scalar arguments to *Lisp functions that expect pvar arguments. the need to use the !! operator in most expressions. The main purpose of this feature is to eliminate the need for !! in supplying Conversion is performed by any function or macro in which the forms below are applied to arguments. Conversion is NOT performed for operators such as *funcall and *apply, where it is obvious that some arguments MUST be scalars. Also, conversion is not performed for any argument to a *Lisp operator that obviously MUST be a conversion would obviously defeat the intended purpose of the operation This variable controls whether the feature is included when *Lisp is compiled. This variable controls whether the feature is active, and may be altered at run-time. It is inactive by default. A simple function to make turning it on and off easy: Functions to toggle scalar promotion Scalar to Pvar Argument Conversion Macros: A call to these macros may be included within any *Lisp operator where conversion should be applied to an argument. If argument conversion is disabled, these macros do nothing. (Expand to NIL or pass args unchanged) Pvar-argument is called in the following way: argument-name data-type -- useful for single-arg functions (argname1 argname2 ...) data-type -- useful for multi-arg functions &opt argument-name data-type -- makes handling optionals simpler &opt (argname1 argname2 ...) data-type -- useful for multi-arg optionals &front-end argument-name data-type -- for front-end arguments &front-end (argname1 argname2 ...) data-type -- for multiple fe arguments &rest argument-name data-type -- &rest args _must_ be handled seperately A sample call to pvar-argument!! might look like: (pvar-argument!! elephantp boolean (ears trunk tail integer) &rest wrinkle-types complex) if the type is one we know how to convert A macro for simple cases: These functions produce conversion forms for the above macros This variable contains conversion information for all types that need to be converted (thus far). Elements of the form: (nametag declaration test [conversion optimization]) listed in order of relative frequency of use of each data type. nametag is conversion tag used in pvar-argument!! to determine test needed declaration is how scalar value being converted should be declared test is a boolean test that checks for that scalar type conversion is an optional function that gets called on the scalar value immediately prior to testing, to possibly convert it into a nicer form. optimization is an optional function that gets called on the scalar value before (and independant of) testing, to allow possible optimizations that make test unnecessary. down in this file so now its here. For now, only the following types are important: >>> Boolean: T or NIL >>> Float: floatp exists >>> Non-complex: >>> Character: characterp exists >>> Array: arrayp exists >>> *defstruct: (structure object defined by *defstruct) >>> legal-pvar-value: (pvar value) Any legal argument to !! t or nil integer, float, complex string-char and character *defstructs arrays (arrayp thing) ; arrays, vectors/sequences * defstruct objects <<< The following are specialized types >>> >>> char-bitspec: (bit specifier for char-bit!!) This is really ridiculous. on the hardware, integer pvars, since that's the only way to represent them This stub keeps the call to constant-byte!! below from returning a warning at compile-time, since I don't think it really makes sense to move constant-byte!! into this file. take care of 'em by turning them into the right byte!! call! >>> Address-Object: *Lisp-I:address-object-p exists >>> Segment-Set-Object: Tests for the above (format t "~S " arg) (pvar-argument!! arg ,type) (format t "~S~%" arg))) (*let () ; to cleanup stack afterwards ))
> Copyright 1990 Thinking Machines Corporation , Inc. of Cambridge , Massachusetts . (in-package :*lisp-i) By , with pointers from JP Massar Created : 3/19/90 Revised : 4/3/90 - I decided to do things more generically . WRS 8/90 - Added simple - pvar - argument ! ! for simulator functions . WRS 9/5 - Added & front - end keyword WRS These scalars are automatically converted to pvars ( ala ! ! ) , thus obviating arguments to * Lisp functions where some arguments may be constant pvar expressions ( i.e. , ( + ! ! ( ! ! 1 ) ( ! ! 2 ) ( ! ! 23) ... &ad - nauseum ) ) " real " pvar ( i.e. , pvar - name , pvar - field , etc . ) , or to an operation where such ( tests such as booleanp ! ! , characterp ! ! , etc . ) (defvar *convert-scalar-args-to-pvars* t "Whether to automatically convert scalar arguments that should be pvars into pvars") (defvar *convert-scalar-args-p* t "Whether to automatically convert scalar arguments (conditional at run-time)") (defun no-bang-bang (&optional (enable-feature-p t)) (setq *convert-scalar-args-to-pvars* enable-feature-p) (setq *convert-scalar-args-p* enable-feature-p)) (defun disable-scalar-promotion () (setq *lisp-i::*convert-scalar-args-p* nil)) (defun enable-scalar-promotion () (setq *lisp-i::*convert-scalar-args-p* t)) ( pvar - argument ! ! < arg - desc1 > < > < arg - desc3 > ... ) where < arg - descn > is one of : (defmacro pvar-argument!! (&rest argument-descriptors) (when *convert-scalar-args-to-pvars* (let (result class type) (do* ((nextarg (when argument-descriptors (pop argument-descriptors)) (when argument-descriptors (pop argument-descriptors)))) ((null nextarg)) (if (symbolp nextarg) (case nextarg (&rest (setq class :&rest nextarg (pop argument-descriptors))) (&opt (setq class :&opt nextarg (pop argument-descriptors)) (when (listp nextarg) (setq class :&opt-mult))) (&front-end (setq class :&front-end nextarg (pop argument-descriptors)) (when (listp nextarg) (setq class :&front-end-mult))) (otherwise (setq class :single))) (setq class :multiple)) (setq type (pop argument-descriptors)) (multiple-value-bind (declaration test conversion optimization) (conversion-for-type type) (case class (:single (push (conversion-form-for nextarg test declaration conversion optimization) result)) (:&opt (push `(when ,nextarg ,(conversion-form-for nextarg test declaration conversion optimization)) result)) (:&opt-mult (dolist (argname nextarg) (push `(when ,argname ,(conversion-form-for argname test declaration conversion optimization)) result))) (:&front-end (push (conversion-form-for nextarg test declaration conversion optimization t) result)) (:&front-end-mult (dolist (argname nextarg) (push (conversion-form-for argname test declaration conversion optimization t) result))) (:&rest (push (rest-conversion-form nextarg test declaration conversion optimization) result)) (:multiple (dolist (argname nextarg) (push (conversion-form-for argname test declaration conversion optimization) result))) )))) (when result `(*lisp-i::*nocompile (when *convert-scalar-args-p* ,@(nreverse result))))))) (defmacro simple-pvar-argument!! (&rest arglist) (labels ((expandify!! (arglist) (if (null arglist) NIL (if (member (car arglist) '(&opt &rest)) (if (null (cdr arglist)) (progn (warn "&opt or &rest pvar-argument expression missing argument.") nil) `(,(car arglist) ,(cadr arglist) legal-pvar-value ,@(expandify!! (cddr arglist)))) `(,(car arglist) legal-pvar-value ,@(expandify!! (cdr arglist))))))) (unless (null arglist) `(pvar-argument!! ,@(expandify!! arglist))))) (defun bang-bang-form-for (argname &optional (declaration nil) (conversion nil) (front-end-p nil)) (let ((form argname)) (when conversion (setq form `(,conversion ,form))) (when declaration (setq form `(the ,declaration ,form))) (if front-end-p (setq form `(front-end!! ,form)) (setq form `(!! ,form))))) (defun conversion-form-for (argname test &optional (declaration nil) (conversion nil) (optimization nil) (front-end-p nil)) (let ((form `(if (,test ,argname) (setq ,argname ,(bang-bang-form-for argname declaration conversion front-end-p))))) (when optimization (setq form `(progn (setq ,argname (,optimization ,argname)) ,form))) form)) (defun rest-conversion-form (argname test &optional (declaration nil) (conversion nil) (optimization nil)) `(do ((%arglist% ,argname (cdr %arglist%))) ((null %arglist%)) ,@(when optimization `((rplaca %arglist% (,optimization (car %arglist%))))) (if (,test (car %arglist%)) (rplaca %arglist% ,(bang-bang-form-for '(car %arglist%) declaration conversion))))) (defvar *conversion-list* nil "Scalar-to-pvar conversion info for types that can be converted.") (setq *conversion-list* '((float float floatp) (integer integer integerp) (boolean boolean booleanp nil boolean-optimize) (boolarg nil boolargp nil boolean-optimize) (complex complex complexp) (non-complex number non-complexp) (character character characterp) (number number numberp) (legal-pvar-value nil legal-pvar-valuep) (vector vector front-end-vector-p) (array array front-end-array-p) (*defstruct nil *defstructp) (*sequence vector *sequencep) (sf-vector vector sf-vectorp) (bit-array array bit-arrayp) (charint nil charintp) (char-bitspec integer char-bitspecp bitspec-to-integer) Bytespecs are a pain . (*bytespec #-(AND LUCID *LISP-SIMULATOR) integer #+(AND LUCID *LISP-SIMULATOR) nil *bytespecp nil bytespec-optimize) (address-object nil address-object-p) (segment-set-object nil segment-set-objectp) )) (defun conversion-for-type (type) (let* ((known (member type *conversion-list* :key 'car))) (if known (setq known (cdr (car known))) (warn "Not one of known conversion types: ~A." type)) (values-list known))) This used to be in , but it is used further (defmacro fast-pvarp (x) `(internal-pvarp ,x)) (defun booleanp (thingy) (or (eq thingy t) (eq thingy nil))) Boolean argument may be either a boolean value , or any non - NIL value . (defun boolargp (thingy) (legal-pvar-valuep thingy)) We can optimize by changing to a specific pvar in two cases : (defun boolean-optimize (thingy) (cond ((eq thingy t) t!!) ((eq thingy nil) nil!!) (t thingy))) > > > Number : numberp exists > > > Integer : integerp exists > > > Complex : exists (defun non-complexp (thingy) (and (numberp thingy) (not (complexp thingy)))) (defun front-end-array-p (x) (and (not (fast-pvarp x)) (arrayp x)) ) (defun front-end-vector-p (x) (and (not (fast-pvarp x)) (vectorp x)) ) > > > Bit - array : use arrayp , and let bit array functions catch non - bit array pvars (defun bit-arrayp (thingy) (front-end-array-p thingy)) > > > Vector : exists > > > Sf - vector : ( single - float vector ) use , let ( d)sf functions catch non - sf vectors (defun sf-vectorp (thingy) (and (not (fast-pvarp thingy)) (vectorp thingy)) ) > > > * sequence : ( * Lisp sequence ) use , since * Lisp sequences can only be vectors (defun *sequencep (thingy) (and (not (fast-pvarp thingy)) (vectorp thingy)) ) (defun *defstructp (thingy) (let ((thing-type (type-of thingy))) (and (symbolp thing-type) (structure-pvar-type-known-*defstruct-type thing-type)))) (defun legal-pvar-valuep (thing) )) > > > : ( character or integer ) (defun charintp (thingy) (or (integerp thingy) (characterp thingy))) (defun char-bitspecp (thingy) (member thingy '(0 :control 1 :meta 2 :super 3 :hyper))) (defun bitspec-to-integer (thingy) (if (integerp thingy) thingy (position thingy '(:control :meta :super :hyper)))) > > > * : ( * Lisp byte - specifier ) On , bytespecs are always integers , and * bytespecs are integer pvars In Lucid , bytespecs are an ugly internal data type , and * bytespecs are : in the simulator , " pvars " of the ugly data type (defconstant *bytespec-type* #+symbolics 'fixnum 0 0 is illegal in Allegro this gives the nasty type in Lucid (defun *bytespecp (thingy) (or (typep thingy *bytespec-type*) allow integer bytespecs under Lucid )) #+(AND LUCID *LISP-HARDWARE) (defun constant-byte!! (size position) (ignore size position) (error "CONSTANT-BYTE!!, defined in no-bang-bang.lisp, was not redefined by *Lisp interpreter.")) Turn Lucid 's nasty bytespec structure into a friendly integer ( except in simulator ! ) , so operators requiring bytespec operators will eat calls to CL byte function . In simulator under Lucid , are always the nasty type , so (defun bytespec-optimize (thingy) only need to convert Lucid bytespecs on hardware (if (eql (type-of thingy) *bytespec-type*) (setq thingy (constant-byte!! (byte-size thingy) (byte-position thingy)))) #+(AND LUCID *LISP-SIMULATOR) (if (eql (type-of thingy) *bytespec-type*) (setq thingy (byte!! (!! (byte-size thingy)) (!! (byte-position thingy))))) thingy) < < < The following two are both * defstructs , so these tests may not be needed > > > (defun segment-set-objectp (thingy) (typep thingy 'segment-set)) ( ( val type ) ` ( let ( ( arg , ) ) ( defun nbtest ( ) ( nbtestarm 3.14159 float ) ( nbtestarm 3 integer ) ( nbtestarm t boolean ) ( nbtestarm nil boolean ) ( nbtestarm 3 boolarg ) ( nbtestarm # C(3.0 4.0 ) complex ) ( nbtestarm 5 non - complex ) ( nbtestarm # \G character ) ( nbtestarm 0 number ) ( nbtestarm 23 legal - pvar - value ) ( nbtestarm # ( 1 2 3 ) vector ) ( nbtestarm # ( 1 2 3 ) array ) ( nbtestarm # ( 1 2 3 ) * sequence ) ( nbtestarm # ( T NIL T ) bit - array ) ( nbtestarm 23 charint ) ( nbtestarm : control char - bitspec ) ( nbtestarm 0 char - bitspec ) ( nbtestarm ( grid 2 3 ) address - object )
a449895350f2bd0b2ce6ccca4d761ff9b0f29505cb051e6e67e64e55f891612d
Tener/deeplearning-thesis
Agent.hs
module Agent where import Board import CairoRender import CommonDatatypes import Control.Monad (when) import Text.Printf import System.Random.MWC import Data.Ord import Data.List (sortBy) import qualified Data.Tree.Game_tree.Negascout as GTreeAlgo -- plansza i aktualny kolor gracza data AgentRandom = AgentRandom { gen :: GenIO, color :: Color } data AgentSimple = AgentSimple { gen's :: GenIO, color's :: Color } data AgentGameTree = AgentGameTree GenIO Color instance Agent AgentGameTree where mkAgent col = do g <- withSystemRandom $ asGenIO $ return return (AgentGameTree g col) makeMove _agent@(AgentGameTree _gen col) brd = do let gst = GameState brd (\ g -> evalBoardB (gtColorNow g) (gtBoard g)) col col depth = 4 (princ, score) = GTreeAlgo.negascout gst depth when (score /= 0) (print ("gtree",score,col)) return (morph $ gtBoard $ head $ tail $ princ) instance Agent AgentRandom where mkAgent col = do g <- withSystemRandom $ asGenIO $ return return (AgentRandom g col) makeMove agent brd = do let moves = getMovesFixColor (color agent) brd case moves of [] -> do print "Stuck, cant do anything." print (unwrap brd) -- saveBoard (unwrap brd) "board-stuck.svg" return (morph brd :: WBoard) _ -> do pick <- uniformR (0, length moves - 1) (gen agent) let chosen = (moves !! pick) -- print ("random",chosen) return (WBoard chosen) instance Agent AgentSimple where mkAgent col = do g <- withSystemRandom $ asGenIO $ return return (AgentSimple g col) makeMove agent brd = do let moves = getMovesFixColor (color's agent) brd case moves of [] -> do print "Stuck, cant do anything." print (unwrap brd) -- saveBoard (unwrap brd) "board-stuck.svg" return (morph brd) _ -> do let maxdepth = 2 print (length moves) bare <- mapM (negmax maxdepth (color's agent)) moves let weighted = sortBy (comparing fst) $ zip bare moves bestScore = fst $ last $ weighted best = map snd $ filter ((==bestScore).fst) weighted putStrLn (printf "Color: %s, score: %f" (show (color's agent)) bestScore) pick <- uniformR (0, length best - 1) (gen's agent) return (WBoard (best !! pick)) negmax :: Int -> Color -> Board -> IO Double negmax 0 col brd = do let val = (evalBoard col brd) putStr " . " -- print ( " negmax finish " , ) return val negmax n col brd -- | isFinished brd = evalBoard col brd -- | otherwise = do negmax n col brd = do vals <- mapM (negmax (n-1) (negColor col)) (getMoves col brd) let val = (maximum ((-1/0) : map negate vals)) return $! val według heurystyki evalBoard :: Color -> Board -> Double evalBoard col brd = case getWinner brd of Just col'win -> if col'win == col then 1000 else -1000 Nothing -> fromIntegral $ marbleCount col brd - marbleCount (negColor col) brd evalBoardI :: Color -> Board -> Int evalBoardI col brd = case getWinner brd of Just col'win -> if col'win == col then 1000 else -1000 Nothing -> fromIntegral $ marbleCount col brd - marbleCount (negColor col) brd evalBoardB :: Color -> BBoard -> Int evalBoardB Black (BBoard brd) = evalBoardI Black brd evalBoardB White (BBoard brd) = evalBoardI White (negateBoard brd) --------------------------
null
https://raw.githubusercontent.com/Tener/deeplearning-thesis/c56866bf6f48db3185b4b62348d292bf39a7a2af/lib/Agent.hs
haskell
plansza i aktualny kolor gracza saveBoard (unwrap brd) "board-stuck.svg" print ("random",chosen) saveBoard (unwrap brd) "board-stuck.svg" print ( " negmax finish " , ) | isFinished brd = evalBoard col brd | otherwise = do ------------------------
module Agent where import Board import CairoRender import CommonDatatypes import Control.Monad (when) import Text.Printf import System.Random.MWC import Data.Ord import Data.List (sortBy) import qualified Data.Tree.Game_tree.Negascout as GTreeAlgo data AgentRandom = AgentRandom { gen :: GenIO, color :: Color } data AgentSimple = AgentSimple { gen's :: GenIO, color's :: Color } data AgentGameTree = AgentGameTree GenIO Color instance Agent AgentGameTree where mkAgent col = do g <- withSystemRandom $ asGenIO $ return return (AgentGameTree g col) makeMove _agent@(AgentGameTree _gen col) brd = do let gst = GameState brd (\ g -> evalBoardB (gtColorNow g) (gtBoard g)) col col depth = 4 (princ, score) = GTreeAlgo.negascout gst depth when (score /= 0) (print ("gtree",score,col)) return (morph $ gtBoard $ head $ tail $ princ) instance Agent AgentRandom where mkAgent col = do g <- withSystemRandom $ asGenIO $ return return (AgentRandom g col) makeMove agent brd = do let moves = getMovesFixColor (color agent) brd case moves of [] -> do print "Stuck, cant do anything." print (unwrap brd) return (morph brd :: WBoard) _ -> do pick <- uniformR (0, length moves - 1) (gen agent) let chosen = (moves !! pick) return (WBoard chosen) instance Agent AgentSimple where mkAgent col = do g <- withSystemRandom $ asGenIO $ return return (AgentSimple g col) makeMove agent brd = do let moves = getMovesFixColor (color's agent) brd case moves of [] -> do print "Stuck, cant do anything." print (unwrap brd) return (morph brd) _ -> do let maxdepth = 2 print (length moves) bare <- mapM (negmax maxdepth (color's agent)) moves let weighted = sortBy (comparing fst) $ zip bare moves bestScore = fst $ last $ weighted best = map snd $ filter ((==bestScore).fst) weighted putStrLn (printf "Color: %s, score: %f" (show (color's agent)) bestScore) pick <- uniformR (0, length best - 1) (gen's agent) return (WBoard (best !! pick)) negmax :: Int -> Color -> Board -> IO Double negmax 0 col brd = do let val = (evalBoard col brd) return val negmax n col brd = do vals <- mapM (negmax (n-1) (negColor col)) (getMoves col brd) let val = (maximum ((-1/0) : map negate vals)) return $! val według heurystyki evalBoard :: Color -> Board -> Double evalBoard col brd = case getWinner brd of Just col'win -> if col'win == col then 1000 else -1000 Nothing -> fromIntegral $ marbleCount col brd - marbleCount (negColor col) brd evalBoardI :: Color -> Board -> Int evalBoardI col brd = case getWinner brd of Just col'win -> if col'win == col then 1000 else -1000 Nothing -> fromIntegral $ marbleCount col brd - marbleCount (negColor col) brd evalBoardB :: Color -> BBoard -> Int evalBoardB Black (BBoard brd) = evalBoardI Black brd evalBoardB White (BBoard brd) = evalBoardI White (negateBoard brd)