_id
stringlengths
64
64
repository
stringlengths
6
84
name
stringlengths
4
110
content
stringlengths
0
248k
license
null
download_url
stringlengths
89
454
language
stringclasses
7 values
comments
stringlengths
0
74.6k
code
stringlengths
0
248k
ca3bbe5e4a461285a434b3c03c10d3fb91d38743ef03f9bf40eeafce3cf68c32
pokepay/cl-fluent-logger
broadcast.lisp
(defpackage #:cl-fluent-logger/logger/broadcast (:use #:cl #:cl-fluent-logger/logger/base) (:export #:broadcast-logger #:make-broadcast-logger)) (in-package #:cl-fluent-logger/logger/broadcast) (defclass broadcast-logger (base-logger) ((children :initarg :children :initform nil :accessor broadcast-logger-children))) (defun make-broadcast-logger (&rest loggers) (flet ((loggerp (obj) (typep obj 'base-logger))) (assert (every #'loggerp loggers))) (make-instance 'broadcast-logger :children loggers)) (defmethod open-logger ((logger broadcast-logger)) (mapc #'open-logger (broadcast-logger-children logger)) (values)) (defmethod close-logger ((logger broadcast-logger)) (mapc #'close-logger (broadcast-logger-children logger)) (values)) (defmethod post-with-time ((logger broadcast-logger) tag data time) (let ((successed t)) (dolist (logger (broadcast-logger-children logger) successed) (unless (post-with-time logger tag data time) (setf successed nil)))))
null
https://raw.githubusercontent.com/pokepay/cl-fluent-logger/688e4fbf8762b9fa7c4d2f72fb524a9f1c3236ac/logger/broadcast.lisp
lisp
(defpackage #:cl-fluent-logger/logger/broadcast (:use #:cl #:cl-fluent-logger/logger/base) (:export #:broadcast-logger #:make-broadcast-logger)) (in-package #:cl-fluent-logger/logger/broadcast) (defclass broadcast-logger (base-logger) ((children :initarg :children :initform nil :accessor broadcast-logger-children))) (defun make-broadcast-logger (&rest loggers) (flet ((loggerp (obj) (typep obj 'base-logger))) (assert (every #'loggerp loggers))) (make-instance 'broadcast-logger :children loggers)) (defmethod open-logger ((logger broadcast-logger)) (mapc #'open-logger (broadcast-logger-children logger)) (values)) (defmethod close-logger ((logger broadcast-logger)) (mapc #'close-logger (broadcast-logger-children logger)) (values)) (defmethod post-with-time ((logger broadcast-logger) tag data time) (let ((successed t)) (dolist (logger (broadcast-logger-children logger) successed) (unless (post-with-time logger tag data time) (setf successed nil)))))
bd0c723c9030649699a417613f192888dc623fc7d63cf2adfcbbd81482620f52
play-co/hermes
gremlin.clj
(ns hermes.gremlin (:import (com.tinkerpop.gremlin.java GremlinPipeline))) (defn gremlin-eval [ob] (if (instance? GremlinPipeline ob) (seq (.toList ob)) ob)) (defmacro query [vertex & body] `(gremlin-eval (.. (GremlinPipeline. ~vertex) ~@body)))
null
https://raw.githubusercontent.com/play-co/hermes/9d203ec7b239e7800c89e6ee161486f35dabd110/src/hermes/gremlin.clj
clojure
(ns hermes.gremlin (:import (com.tinkerpop.gremlin.java GremlinPipeline))) (defn gremlin-eval [ob] (if (instance? GremlinPipeline ob) (seq (.toList ob)) ob)) (defmacro query [vertex & body] `(gremlin-eval (.. (GremlinPipeline. ~vertex) ~@body)))
2c64c4ab44a959f7a896b3ad89275fe09ba3c588336042c78e2e04c37aaa0466
shentufoundation/deepsea
StmtExpressionless.ml
open AST open Cop open Ctypes open Datatypes open Globalenvs open Integers open LowValues open MachineModel type function_kind = | Coq_normalFunction | Coq_constructorFunction type ret_type = | Tvoid_method | Tconstructor | Tfun | Tsome_method type statement = | Spush of (coq_val, label) sum | Sdup of nat | Ssload | Smload | Sunop of unary_operation | Sbinop of binary_operation * bool | Scall0 of builtin0 | Scall1 of builtin1 | Sskip | Spop | Ssstore | Smstore | Sswap of nat | Sdone of ret_type | Slabel of label | Sjump | Sjumpi | Shash | Stransfer | Scallargs of Int.int * nat * nat | Scallmethod of bool | Slog of nat * nat | Srevert | Scalldataload | Sconstructordataload of nat type code = statement list type coq_function = code (* singleton inductive, whose constructor was mkfunction *) (** val fn_code : coq_function -> code **) let fn_code f = f type genv = (coq_function, coq_type) Genv.t type program = (genv, label) prod
null
https://raw.githubusercontent.com/shentufoundation/deepsea/970576a97c8992655ed2f173f576502d73b827e1/src/backend/extraction/StmtExpressionless.ml
ocaml
singleton inductive, whose constructor was mkfunction * val fn_code : coq_function -> code *
open AST open Cop open Ctypes open Datatypes open Globalenvs open Integers open LowValues open MachineModel type function_kind = | Coq_normalFunction | Coq_constructorFunction type ret_type = | Tvoid_method | Tconstructor | Tfun | Tsome_method type statement = | Spush of (coq_val, label) sum | Sdup of nat | Ssload | Smload | Sunop of unary_operation | Sbinop of binary_operation * bool | Scall0 of builtin0 | Scall1 of builtin1 | Sskip | Spop | Ssstore | Smstore | Sswap of nat | Sdone of ret_type | Slabel of label | Sjump | Sjumpi | Shash | Stransfer | Scallargs of Int.int * nat * nat | Scallmethod of bool | Slog of nat * nat | Srevert | Scalldataload | Sconstructordataload of nat type code = statement list type coq_function = code let fn_code f = f type genv = (coq_function, coq_type) Genv.t type program = (genv, label) prod
c52c76e22b86143388027aaad33d314d23fabf9a2d3186520f978181b16691b7
onyx-platform/onyx
ready_encoder.clj
(ns ^{:no-doc true} onyx.messaging.serializers.ready-encoder (:require [onyx.messaging.serializers.helpers :refer [type->byte]]) (:import [org.agrona.concurrent UnsafeBuffer])) (defprotocol PEncoder (set-src-peer-id [this peer-id]) (offset [this]) (length [this]) (wrap-impl [this buf offset])) (deftype Encoder [^:unsynchronized-mutable ^UnsafeBuffer buffer ^:unsynchronized-mutable offset] PEncoder (set-src-peer-id [this [peer-type peer-id]] (.putByte buffer offset (type->byte peer-type)) (.putLong buffer (unchecked-add-int offset 1) (.getMostSignificantBits ^java.util.UUID peer-id)) (.putLong buffer (unchecked-add-int offset 9) (.getLeastSignificantBits ^java.util.UUID peer-id)) this) (offset [this] offset) (length [this] 17) (wrap-impl [this buf new-offset] (set! offset new-offset) (set! buffer buf) this)) (defn wrap [buffer offset] (-> (->Encoder nil nil) (wrap-impl buffer offset)))
null
https://raw.githubusercontent.com/onyx-platform/onyx/74f9ae58cdbcfcb1163464595f1e6ae6444c9782/src/onyx/messaging/serializers/ready_encoder.clj
clojure
(ns ^{:no-doc true} onyx.messaging.serializers.ready-encoder (:require [onyx.messaging.serializers.helpers :refer [type->byte]]) (:import [org.agrona.concurrent UnsafeBuffer])) (defprotocol PEncoder (set-src-peer-id [this peer-id]) (offset [this]) (length [this]) (wrap-impl [this buf offset])) (deftype Encoder [^:unsynchronized-mutable ^UnsafeBuffer buffer ^:unsynchronized-mutable offset] PEncoder (set-src-peer-id [this [peer-type peer-id]] (.putByte buffer offset (type->byte peer-type)) (.putLong buffer (unchecked-add-int offset 1) (.getMostSignificantBits ^java.util.UUID peer-id)) (.putLong buffer (unchecked-add-int offset 9) (.getLeastSignificantBits ^java.util.UUID peer-id)) this) (offset [this] offset) (length [this] 17) (wrap-impl [this buf new-offset] (set! offset new-offset) (set! buffer buf) this)) (defn wrap [buffer offset] (-> (->Encoder nil nil) (wrap-impl buffer offset)))
4a202cd0a7e53c58d411c911826d140ea0536d6b35324a925b2decfd1b86d5df
earl-ducaine/cl-garnet
accelerators.lisp
-*- Mode : LISP ; Syntax : Common - Lisp ; Package : INTERACTORS ; Base : 10 -*- ;;*******************************************************************;; The Garnet User Interface Development Environment . ; ; ;;*******************************************************************;; ;; This code was written as part of the Garnet project at ;; Carnegie Mellon University , and has been placed in the public ; ; ;; domain. ;; ;;*******************************************************************;; ;;; $Id:: $ ;; This file contains the mouse and keyboard ACCELERATORS code ;; Designed and implemented by ;; ;; The punchline for those who hate to read is that if you type the ;; following keys into a Garnet window, you get the associated action: ;; : SHIFT - F1 - raise window ;; :SHIFT-F2 - lower window ;; :SHIFT-F3 - iconify window : SHIFT - F4 - zoom window ;; :SHIFT-F5 - fullzoom window ;; :SHIFT-F6 - refresh window : SHIFT - F7 - destroy window ;; ;; :HELP - INSPECT object ;; :SHIFT-HELP - print out object under the mouse (also in inspector.lisp) ;; In interactors.lisp , the function " process - event " first checks the " first " ;; accelerators, as follows ;; 1) Try to match event (using assoc) against one in window's ;; :first-Accelerators slot, which should contain an alist ;; of the form ( (char1 . fun1) (char2 . fun2) ... ) [ Note : these functions take 1 arg , the low - level event struct ] ;; 2) If that succeeds, invoke the found function, else repeat the same process using the global variable * global - first - accelerators * ;; Then each low-level event to see if any interactors (or priority-levels ;; with :stop-when of :always) claim the event. If not, then it does the following: ;; 1) Try to match event (using assoc) against one in window's ;; :Accelerators slot, which should contain an alist of the form ;; ( (char1 . fun1) (char2 . fun2) ... ) [ Note : these functions take 1 arg , the low - level event struct ] ;; ;; 2) If that succeeds, invoke the found function, else repeat the ;; same process using the global variable *global-accelerators* ;; The variables * global - accelerators * and * global - first - accelerators * are defvar'd ;; in interactors.lisp, so they can be properly referenced. ;; This file first defines the * default - global - accelerators * and all the ;; functions it references. At the end, it invokes this function. ;; ;; This file also defines a programmatic interface to accelerators ;; ;; ;;; Change log: 1/18/93 - supply accelerators that go FIRST before the inters ;; - removed ident as accelerator, since garnet-debug might ;; not be loaded 10/22/92 created (in-package "INTERACTORS") ;;; the exported functions and variables (eval-when (:execute :load-toplevel :compile-toplevel) (export '( *global-accelerators* ; defined in interactors.lisp *global-first-accelerators* ; defined in interactors.lisp *default-global-accelerators* add-global-accelerator add-window-accelerator remove-global-accelerator remove-window-accelerator clear-global-accelerators clear-window-accelerators default-global-accelerators ))) ;;; Default Support Fns ;; ;; args should be a list: (variable event) (defmacro with-event-win (args &rest body) `(let ((,(first args) (event-window ,(second args)))) (when ,(first args) ,@body))) (defun raise-acc (event) (with-event-win (win event) (opal:raise-window win))) (defun lower-acc (event) (with-event-win (win event) (opal:lower-window win))) (defun iconify-acc (event)(with-event-win (win event) (opal:iconify-window win))) (defun zoom-acc (event) (with-event-win (win event) (opal:zoom-window win))) (defun fullzoom-acc (event) (with-event-win (win event) (opal:fullzoom-window win))) (defun refresh-acc (event)(with-event-win (win event) (opal:update win T))) (defun destroy-acc (event)(with-event-win (win event) (opal:destroy win))) (defvar *default-global-accelerators* '( (:SHIFT-F1 . raise-acc) (:SHIFT-F2 . lower-acc) (:SHIFT-F3 . iconify-acc) (:SHIFT-F4 . zoom-acc) (:SHIFT-F5 . fullzoom-acc) (:SHIFT-F6 . refresh-acc) (:SHIFT-F7 . destroy-acc))) ;;;;;;;;;;;;;;;;;;;;;;;; Basic Interface ; ; ; ;;;;;;;;;;;;;;;;;;;;;;;; ;;; internal (defun add-accelerator (alist key fn replace-existing?) (let ((alist-entry (assoc key alist))) (if (and replace-existing? alist-entry) (setf (cdr alist-entry) fn) (push (cons key fn) alist)) alist)) (defun remove-accelerator (alist key remove-all?) (delete key alist :count (if remove-all? NIL 1) :key #'car)) ;;; exported (defun add-global-accelerator (key fn &key replace-existing? first?) (if first? (setq *global-first-accelerators* (add-accelerator *global-first-accelerators* key fn replace-existing?)) (setq *global-accelerators* (add-accelerator *global-accelerators* key fn replace-existing?)))) (defun add-window-accelerator (win key fn &key replace-existing? first?) (if first? (s-value win :first-accelerators (add-accelerator (g-value win :first-accelerators) key fn replace-existing?)) (s-value win :accelerators (add-accelerator (g-value win :accelerators) key fn replace-existing?)))) (defun remove-global-accelerator (key &key remove-all? first?) (if first? (setq *global-first-accelerators* (remove-accelerator *global-first-accelerators* key remove-all?)) (setq *global-accelerators* (remove-accelerator *global-accelerators* key remove-all?)))) (defun remove-window-accelerator (win key &key remove-all? first?) (if first? (s-value win :first-accelerators (remove-accelerator (g-value win :first-accelerators) key remove-all?)) (s-value win :accelerators (remove-accelerator (g-value win :accelerators) key remove-all?)))) (defun clear-global-accelerators () (setf *global-accelerators* NIL) (setf *global-first-accelerators* NIL)) (defun clear-window-accelerators (win) (s-value win :accelerators NIL) (s-value win :first-accelerators NIL)) (defun default-global-accelerators () (setf *global-accelerators* (copy-tree *default-global-accelerators*)) (setf *global-first-accelerators* NIL)) ;; Finally, invoke function to actually set the accelerators (default-global-accelerators)
null
https://raw.githubusercontent.com/earl-ducaine/cl-garnet/f0095848513ba69c370ed1dc51ee01f0bb4dd108/src/inter/accelerators.lisp
lisp
Syntax : Common - Lisp ; Package : INTERACTORS ; Base : 10 -*- *******************************************************************;; ; *******************************************************************;; This code was written as part of the Garnet project at ;; ; domain. ;; *******************************************************************;; $Id:: $ The punchline for those who hate to read is that if you type the following keys into a Garnet window, you get the associated action: :SHIFT-F2 - lower window :SHIFT-F3 - iconify window :SHIFT-F5 - fullzoom window :SHIFT-F6 - refresh window :HELP - INSPECT object :SHIFT-HELP - print out object under the mouse (also in inspector.lisp) accelerators, as follows 1) Try to match event (using assoc) against one in window's :first-Accelerators slot, which should contain an alist of the form ( (char1 . fun1) (char2 . fun2) ... ) 2) If that succeeds, invoke the found function, else repeat the Then each low-level event to see if any interactors (or priority-levels with :stop-when of :always) claim the event. If not, then it does the following: 1) Try to match event (using assoc) against one in window's :Accelerators slot, which should contain an alist of the form ( (char1 . fun1) (char2 . fun2) ... ) 2) If that succeeds, invoke the found function, else repeat the same process using the global variable *global-accelerators* in interactors.lisp, so they can be properly referenced. functions it references. At the end, it invokes this function. This file also defines a programmatic interface to accelerators Change log: - removed ident as accelerator, since garnet-debug might not be loaded the exported functions and variables defined in interactors.lisp defined in interactors.lisp Default Support Fns args should be a list: (variable event) ; ; internal exported Finally, invoke function to actually set the accelerators
This file contains the mouse and keyboard ACCELERATORS code Designed and implemented by : SHIFT - F1 - raise window : SHIFT - F4 - zoom window : SHIFT - F7 - destroy window In interactors.lisp , the function " process - event " first checks the " first " [ Note : these functions take 1 arg , the low - level event struct ] same process using the global variable * global - first - accelerators * [ Note : these functions take 1 arg , the low - level event struct ] The variables * global - accelerators * and * global - first - accelerators * are defvar'd This file first defines the * default - global - accelerators * and all the 1/18/93 - supply accelerators that go FIRST before the inters 10/22/92 created (in-package "INTERACTORS") (eval-when (:execute :load-toplevel :compile-toplevel) (export '( *default-global-accelerators* add-global-accelerator add-window-accelerator remove-global-accelerator remove-window-accelerator clear-global-accelerators clear-window-accelerators default-global-accelerators ))) (defmacro with-event-win (args &rest body) `(let ((,(first args) (event-window ,(second args)))) (when ,(first args) ,@body))) (defun raise-acc (event) (with-event-win (win event) (opal:raise-window win))) (defun lower-acc (event) (with-event-win (win event) (opal:lower-window win))) (defun iconify-acc (event)(with-event-win (win event) (opal:iconify-window win))) (defun zoom-acc (event) (with-event-win (win event) (opal:zoom-window win))) (defun fullzoom-acc (event) (with-event-win (win event) (opal:fullzoom-window win))) (defun refresh-acc (event)(with-event-win (win event) (opal:update win T))) (defun destroy-acc (event)(with-event-win (win event) (opal:destroy win))) (defvar *default-global-accelerators* '( (:SHIFT-F1 . raise-acc) (:SHIFT-F2 . lower-acc) (:SHIFT-F3 . iconify-acc) (:SHIFT-F4 . zoom-acc) (:SHIFT-F5 . fullzoom-acc) (:SHIFT-F6 . refresh-acc) (:SHIFT-F7 . destroy-acc))) (defun add-accelerator (alist key fn replace-existing?) (let ((alist-entry (assoc key alist))) (if (and replace-existing? alist-entry) (setf (cdr alist-entry) fn) (push (cons key fn) alist)) alist)) (defun remove-accelerator (alist key remove-all?) (delete key alist :count (if remove-all? NIL 1) :key #'car)) (defun add-global-accelerator (key fn &key replace-existing? first?) (if first? (setq *global-first-accelerators* (add-accelerator *global-first-accelerators* key fn replace-existing?)) (setq *global-accelerators* (add-accelerator *global-accelerators* key fn replace-existing?)))) (defun add-window-accelerator (win key fn &key replace-existing? first?) (if first? (s-value win :first-accelerators (add-accelerator (g-value win :first-accelerators) key fn replace-existing?)) (s-value win :accelerators (add-accelerator (g-value win :accelerators) key fn replace-existing?)))) (defun remove-global-accelerator (key &key remove-all? first?) (if first? (setq *global-first-accelerators* (remove-accelerator *global-first-accelerators* key remove-all?)) (setq *global-accelerators* (remove-accelerator *global-accelerators* key remove-all?)))) (defun remove-window-accelerator (win key &key remove-all? first?) (if first? (s-value win :first-accelerators (remove-accelerator (g-value win :first-accelerators) key remove-all?)) (s-value win :accelerators (remove-accelerator (g-value win :accelerators) key remove-all?)))) (defun clear-global-accelerators () (setf *global-accelerators* NIL) (setf *global-first-accelerators* NIL)) (defun clear-window-accelerators (win) (s-value win :accelerators NIL) (s-value win :first-accelerators NIL)) (defun default-global-accelerators () (setf *global-accelerators* (copy-tree *default-global-accelerators*)) (setf *global-first-accelerators* NIL)) (default-global-accelerators)
5ed402aca29be65f83d5c4d70abefac1ebf6d44fe6945c6ee7c8ca25163524c2
zcaudate-me/lein-repack
data.clj
(ns repack.util.data (:use repack.util.array clojure.core))
null
https://raw.githubusercontent.com/zcaudate-me/lein-repack/1eb542d66a77f55c4b5625783027c31fd2dddfe5/example/repack.advance/src/clj/repack/util/data.clj
clojure
(ns repack.util.data (:use repack.util.array clojure.core))
642a5a2cb34c0f265a6eb31d6c1bf5d4ef8b73beb9fb19b64e9a7da2d95347ed
mrmcc3/tailwind-clj
config.clj
(ns tailwind.config (:require [clojure.edn :as edn] [clojure.java.io :as io] [meta-merge.core :as mm])) (defn negate [m] (reduce-kv #(assoc %1 (str "-" %2) (str "-" %3)) {} m)) (defn final [v f & args] (if (:final (meta v)) v (apply f v args))) (defn border-color-default [cfg] (let [default (get-in cfg ["border-color" "default"])] (if (vector? default) (assoc-in cfg ["border-color" "default"] (get-in cfg default "currentColor")) cfg))) (defn expand [{:strs [colors spacing] :as cfg}] (-> (border-color-default cfg) (update "background-color" final merge colors) (update "border-color" final merge colors) (update "text-color" final merge colors) (update "height" final merge spacing) (update "padding" final merge spacing) (update "width" final merge spacing) (update "margin" final merge spacing (negate spacing)))) (def init-defaults (-> "tailwind/defaults.edn" io/resource slurp edn/read-string)) (def init-user (some-> "tailwind.edn" io/resource slurp edn/read-string)) (def default-config (expand init-defaults)) (def config (expand (mm/meta-merge init-defaults init-user))) (defn cfg-> [& paths] (get-in config (map name paths)))
null
https://raw.githubusercontent.com/mrmcc3/tailwind-clj/4c5e25c2502bcc49a065e06f6919241686d11eab/src/tailwind/config.clj
clojure
(ns tailwind.config (:require [clojure.edn :as edn] [clojure.java.io :as io] [meta-merge.core :as mm])) (defn negate [m] (reduce-kv #(assoc %1 (str "-" %2) (str "-" %3)) {} m)) (defn final [v f & args] (if (:final (meta v)) v (apply f v args))) (defn border-color-default [cfg] (let [default (get-in cfg ["border-color" "default"])] (if (vector? default) (assoc-in cfg ["border-color" "default"] (get-in cfg default "currentColor")) cfg))) (defn expand [{:strs [colors spacing] :as cfg}] (-> (border-color-default cfg) (update "background-color" final merge colors) (update "border-color" final merge colors) (update "text-color" final merge colors) (update "height" final merge spacing) (update "padding" final merge spacing) (update "width" final merge spacing) (update "margin" final merge spacing (negate spacing)))) (def init-defaults (-> "tailwind/defaults.edn" io/resource slurp edn/read-string)) (def init-user (some-> "tailwind.edn" io/resource slurp edn/read-string)) (def default-config (expand init-defaults)) (def config (expand (mm/meta-merge init-defaults init-user))) (defn cfg-> [& paths] (get-in config (map name paths)))
2096cc76a80de54bdb63ddea383a96b0cad8879a9efd7faa9553057d03d39146
ulrikstrid/ocaml-oidc
Jwks.ml
open Helpers let () = Mirage_crypto_rng_unix.initialize () let rsa = Mirage_crypto_pk.Rsa.generate ~bits:1024 () let jwk = Jose.Jwk.make_priv_rsa rsa let jwks : Jose.Jwks.t = { keys = [Jose.Jwk.make_pub_rsa (rsa |> Mirage_crypto_pk.Rsa.pub_of_priv)] } let header = Jose.Header.make_header jwk let jwt_with_kid = Jose.Jwt.sign jwk ~header ~payload:(`Assoc [("sub", `String "sub")]) |> Result.get_ok let jwt_without_kid = Jose.Jwt.sign jwk ~header:{ header with kid = None } ~payload:(`Assoc [("sub", `String "sub")]) |> Result.get_ok let find_jwk_with_kid () = let found_jwk = Oidc.Jwks.find_jwk ~jwt:jwt_with_kid jwks in match found_jwk with | Some found_jwk -> check_result_string "thumbprint" (Jose.Jwk.get_thumbprint `SHA1 jwk) (Jose.Jwk.get_thumbprint `SHA1 found_jwk) | None -> print_endline "Did not find jwk"; raise Not_found let find_jwk_without_kid () = let found_jwk = Oidc.Jwks.find_jwk ~jwt:jwt_without_kid jwks in match found_jwk with | Some found_jwk -> check_result_string "thumbprint" (Jose.Jwk.get_thumbprint `SHA1 jwk) (Jose.Jwk.get_thumbprint `SHA1 found_jwk) | None -> print_endline "Did not find jwk"; raise Not_found let tests = List.map make_test_case [("With kid", find_jwk_with_kid); ("Without kid", find_jwk_without_kid)] let suite, _ = Junit_alcotest.run_and_report ~package:"oidc" "Jwks" [("OIDC - JWKs", tests)]
null
https://raw.githubusercontent.com/ulrikstrid/ocaml-oidc/281e7578c08f73c1c555b06b405ca31d16e041ca/test/Jwks.ml
ocaml
open Helpers let () = Mirage_crypto_rng_unix.initialize () let rsa = Mirage_crypto_pk.Rsa.generate ~bits:1024 () let jwk = Jose.Jwk.make_priv_rsa rsa let jwks : Jose.Jwks.t = { keys = [Jose.Jwk.make_pub_rsa (rsa |> Mirage_crypto_pk.Rsa.pub_of_priv)] } let header = Jose.Header.make_header jwk let jwt_with_kid = Jose.Jwt.sign jwk ~header ~payload:(`Assoc [("sub", `String "sub")]) |> Result.get_ok let jwt_without_kid = Jose.Jwt.sign jwk ~header:{ header with kid = None } ~payload:(`Assoc [("sub", `String "sub")]) |> Result.get_ok let find_jwk_with_kid () = let found_jwk = Oidc.Jwks.find_jwk ~jwt:jwt_with_kid jwks in match found_jwk with | Some found_jwk -> check_result_string "thumbprint" (Jose.Jwk.get_thumbprint `SHA1 jwk) (Jose.Jwk.get_thumbprint `SHA1 found_jwk) | None -> print_endline "Did not find jwk"; raise Not_found let find_jwk_without_kid () = let found_jwk = Oidc.Jwks.find_jwk ~jwt:jwt_without_kid jwks in match found_jwk with | Some found_jwk -> check_result_string "thumbprint" (Jose.Jwk.get_thumbprint `SHA1 jwk) (Jose.Jwk.get_thumbprint `SHA1 found_jwk) | None -> print_endline "Did not find jwk"; raise Not_found let tests = List.map make_test_case [("With kid", find_jwk_with_kid); ("Without kid", find_jwk_without_kid)] let suite, _ = Junit_alcotest.run_and_report ~package:"oidc" "Jwks" [("OIDC - JWKs", tests)]
1ca3cf8318bdd02aa29a243dd0fa6495845a59f0e4a7bcbed5e6fd4b37f91182
re-xyr/avail
Internal.hs
| This module defines the ' M ' wrapper monad and the ' Eff ' phantom constraint . All safe functionalities in this -- module are reexported in the "Avail" module, so you wouldn't need to import this module most of the times. # LANGUAGE AllowAmbiguousTypes # # LANGUAGE CPP # # OPTIONS_HADDOCK not - home # module Avail.Internal where import Control.Monad.Fix (MonadFix) import Control.Monad.Zip (MonadZip) import Data.Kind (Constraint, Type) import Data.Proxy (Proxy (Proxy)) import Unsafe.Coerce (unsafeCoerce) #ifdef AVAIL_semigroupoids import Data.Functor.Apply (Apply) import Data.Functor.Bind (Bind (join, (>>-))) #endif | The ' M ' monad transformer acts as a /barrier of effects/. For example , for a monad type @App@ and any effect that @App@ has an instance of , the constraint is required to perform the methods of @MonadOvO@ in the monad @'M ' App@ as defined for the @App@ monad . -- -- In particular, 'M' is expected to be used on a __concrete__ monad instead of a /polymorphic/ one. This is -- particularly good in terms of program performance, and generally means instead of writing this: -- -- @ f : : ' Control . Monad . State . MonadState ' ' Int ' m = > m ( ) -- @ -- -- You should write -- -- @ f : : ' Eff ' ( ' Control . Monad . State . MonadState ' ' Int ' ) = > ' M ' App ( ) -- @ -- where @App@ is a monad stack of your choice that has support of @'Control . Monad . State . MonadState ' ' Int'@. This also means there is no ' Control . . Trans . Class . MonadTrans ' instance for ' M ' . -- -- Note: __you should not define instances of 'M' for effect typeclasses directly by hand__ as that is error-prone -- and may create holes in effect management. For defining instances of effect typeclasses for 'M', check out -- the "Avail.Derive" module and specifically the 'Avail.Derive.avail' and 'Avail.Derive.avail'' TH functions. -- Also keep in mind that typeclasses inside @mtl@ , @exceptions@ , @unliftio@ , @monad - control@ and @capability@ work -- with 'M' out-of-the-box so no instance for them is needed to be defined on 'M' /by you/. newtype M m a = UnsafeLift (m a) -- ^ Unsafely lift an @m@ action into @'M' m@. This completely sidesteps the -- effect management mechanism; __You should not use this.__ deriving newtype (Functor, Applicative, Monad, MonadFix, MonadZip) #ifdef AVAIL_semigroupoids deriving newtype instance Apply m => Apply (M m) instance Bind m => Bind (M m) where UnsafeLift m >>- f = UnsafeLift $ m >>- (unM . f) join (UnsafeLift m) = UnsafeLift $ join $ unM <$> m #endif -- | The kind of /effect typeclasses/, i.e. those that define a set of operations on a monad. Examples include ' Control . Monad . IO.Class . MonadIO ' and ' Control . . Reader . MonadReader ' . -- This type is the same as the ' Capability . Constraints . Capability ' type in @capability@. type Effect = (Type -> Type) -> Constraint -- | Any 'Effect' being used with @avail@ should have an instance of this class. Specifically, this class stores the /superclasses/ of effect typeclasses . For example , ' Control . . IO.Unlift . MonadUnliftIO ' has a superclass ' Control . Monad . IO.Class . MonadIO ' . -- -- You won't need to define instances of this by hand; instead, use the 'Avail.Derive.avail'' Template Haskell function. class KnownList (Superclasses e) => IsEff (e :: Effect) where -- | The superclasses of this typeclass. type Superclasses e :: [Effect] | The /primitive/ phantom effect constraint that does not take superclasses into account . You should not use this directly ; use ' Eff ' or ' Effs ' instead . Additionally , you definitely should n't define instances for this class . class Eff' (e :: Effect) where -- | The dummy method of the phantom typeclass, to be instantiated via the reflection trick in 'rip''. instEffect :: Proxy e instEffect = error "unimplemented" -- | The constraint that indicates an effect is available for use, i.e. you can perform methods defined by instances of the effect in a ' M ' monad . type Eff (e :: Effect) = (Eff' e, Effs (Superclasses e)) | Convenient alias for @('Eff ' e1 , ' Eff ' e2 , ... , ' ' type family Effs (es :: [Effect]) :: Constraint where Effs '[] = () Effs (e ': es) = (Eff e, Effs es) | The newtype wrapper used to circumvent the impredicative problem of GHC and perform the reflection trick in -- 'rip''. You have no reason to use this directly. newtype InstEff e a = InstEff (Eff' e => a) | Brutally rip off an ' Eff '' constraint , a la -- [the reflection trick](-4.16.0.0/docs/Unsafe-Coerce.html#v:unsafeCoerce). -- __This is highly unsafe__ in terms of effect management. rip' :: forall e a. (Eff' e => a) -> a rip' x = unsafeCoerce (InstEff @e x) Proxy | Brutally rip off an ' Eff ' constraint . This means ' rip''ing off the ' Eff '' constraint of the current ' Effect ' and then ' rips ' off constraints of all ' Superclasses ' recursively . _ _ This is highly unsafe _ _ in terms of effect -- management. rip :: forall e a. IsEff e => (Eff e => a) -> a rip x = rips @(Superclasses e) $ rip' @e x | The list of effect is known at compile time . This is required for functions like ' runM ' . class KnownList (es :: [Effect]) where | Brutally rip off many ' Eff ' constraints . _ _ This is highly unsafe _ _ in terms of effect management . rips :: (Effs es => a) -> a rips _ = error "unimplemented" instance KnownList '[] where rips x = x instance (IsEff e, KnownList es) => KnownList (e ': es) where rips x = rips @es $ rip @e x -- | Unwrap the 'M' monad into the underlying concrete monad. This is rarely needed as most of the time you would also want to eliminate ' Eff ' constraints at the same time ; for that see ' runM ' . unM :: M m a -> m a unM (UnsafeLift m) = m | Unwrap the ' M ' monad into the underlying concrete monad and also eliminating ' Eff ' constraints . You need @TypeApplications@ in order to specify the list of ' Effect 's you want to eliminate ' Eff ' constraints for : -- -- @ ' runM ' @'[MonadReader Env , MonadState Store , MonadError MyErr ] app -- @ -- -- Note that functions like '(Data.Function.&)' generally does not work with this function; either apply directly or -- use '($)' only. runM :: forall es m a. KnownList es => (Effs es => M m a) -> m a runM m = rips @es $ unM m
null
https://raw.githubusercontent.com/re-xyr/avail/1ecb0518f5e05a409213f6f1901423bd7b45786c/src/Avail/Internal.hs
haskell
module are reexported in the "Avail" module, so you wouldn't need to import this module most of the times. In particular, 'M' is expected to be used on a __concrete__ monad instead of a /polymorphic/ one. This is particularly good in terms of program performance, and generally means instead of writing this: @ @ You should write @ @ Note: __you should not define instances of 'M' for effect typeclasses directly by hand__ as that is error-prone and may create holes in effect management. For defining instances of effect typeclasses for 'M', check out the "Avail.Derive" module and specifically the 'Avail.Derive.avail' and 'Avail.Derive.avail'' TH functions. with 'M' out-of-the-box so no instance for them is needed to be defined on 'M' /by you/. ^ Unsafely lift an @m@ action into @'M' m@. This completely sidesteps the effect management mechanism; __You should not use this.__ | The kind of /effect typeclasses/, i.e. those that define a set of operations on a monad. Examples include | Any 'Effect' being used with @avail@ should have an instance of this class. Specifically, this class stores You won't need to define instances of this by hand; instead, use the 'Avail.Derive.avail'' Template Haskell function. | The superclasses of this typeclass. | The dummy method of the phantom typeclass, to be instantiated via the reflection trick in 'rip''. | The constraint that indicates an effect is available for use, i.e. you can perform methods defined by instances 'rip''. You have no reason to use this directly. [the reflection trick](-4.16.0.0/docs/Unsafe-Coerce.html#v:unsafeCoerce). __This is highly unsafe__ in terms of effect management. management. | Unwrap the 'M' monad into the underlying concrete monad. This is rarely needed as most of the time you would also @ @ Note that functions like '(Data.Function.&)' generally does not work with this function; either apply directly or use '($)' only.
| This module defines the ' M ' wrapper monad and the ' Eff ' phantom constraint . All safe functionalities in this # LANGUAGE AllowAmbiguousTypes # # LANGUAGE CPP # # OPTIONS_HADDOCK not - home # module Avail.Internal where import Control.Monad.Fix (MonadFix) import Control.Monad.Zip (MonadZip) import Data.Kind (Constraint, Type) import Data.Proxy (Proxy (Proxy)) import Unsafe.Coerce (unsafeCoerce) #ifdef AVAIL_semigroupoids import Data.Functor.Apply (Apply) import Data.Functor.Bind (Bind (join, (>>-))) #endif | The ' M ' monad transformer acts as a /barrier of effects/. For example , for a monad type @App@ and any effect that @App@ has an instance of , the constraint is required to perform the methods of @MonadOvO@ in the monad @'M ' App@ as defined for the @App@ monad . f : : ' Control . Monad . State . MonadState ' ' Int ' m = > m ( ) f : : ' Eff ' ( ' Control . Monad . State . MonadState ' ' Int ' ) = > ' M ' App ( ) where @App@ is a monad stack of your choice that has support of @'Control . Monad . State . MonadState ' ' Int'@. This also means there is no ' Control . . Trans . Class . MonadTrans ' instance for ' M ' . Also keep in mind that typeclasses inside @mtl@ , @exceptions@ , @unliftio@ , @monad - control@ and @capability@ work deriving newtype (Functor, Applicative, Monad, MonadFix, MonadZip) #ifdef AVAIL_semigroupoids deriving newtype instance Apply m => Apply (M m) instance Bind m => Bind (M m) where UnsafeLift m >>- f = UnsafeLift $ m >>- (unM . f) join (UnsafeLift m) = UnsafeLift $ join $ unM <$> m #endif ' Control . Monad . IO.Class . MonadIO ' and ' Control . . Reader . MonadReader ' . This type is the same as the ' Capability . Constraints . Capability ' type in @capability@. type Effect = (Type -> Type) -> Constraint the /superclasses/ of effect typeclasses . For example , ' Control . . IO.Unlift . MonadUnliftIO ' has a superclass ' Control . Monad . IO.Class . MonadIO ' . class KnownList (Superclasses e) => IsEff (e :: Effect) where type Superclasses e :: [Effect] | The /primitive/ phantom effect constraint that does not take superclasses into account . You should not use this directly ; use ' Eff ' or ' Effs ' instead . Additionally , you definitely should n't define instances for this class . class Eff' (e :: Effect) where instEffect :: Proxy e instEffect = error "unimplemented" of the effect in a ' M ' monad . type Eff (e :: Effect) = (Eff' e, Effs (Superclasses e)) | Convenient alias for @('Eff ' e1 , ' Eff ' e2 , ... , ' ' type family Effs (es :: [Effect]) :: Constraint where Effs '[] = () Effs (e ': es) = (Eff e, Effs es) | The newtype wrapper used to circumvent the impredicative problem of GHC and perform the reflection trick in newtype InstEff e a = InstEff (Eff' e => a) | Brutally rip off an ' Eff '' constraint , a la rip' :: forall e a. (Eff' e => a) -> a rip' x = unsafeCoerce (InstEff @e x) Proxy | Brutally rip off an ' Eff ' constraint . This means ' rip''ing off the ' Eff '' constraint of the current ' Effect ' and then ' rips ' off constraints of all ' Superclasses ' recursively . _ _ This is highly unsafe _ _ in terms of effect rip :: forall e a. IsEff e => (Eff e => a) -> a rip x = rips @(Superclasses e) $ rip' @e x | The list of effect is known at compile time . This is required for functions like ' runM ' . class KnownList (es :: [Effect]) where | Brutally rip off many ' Eff ' constraints . _ _ This is highly unsafe _ _ in terms of effect management . rips :: (Effs es => a) -> a rips _ = error "unimplemented" instance KnownList '[] where rips x = x instance (IsEff e, KnownList es) => KnownList (e ': es) where rips x = rips @es $ rip @e x want to eliminate ' Eff ' constraints at the same time ; for that see ' runM ' . unM :: M m a -> m a unM (UnsafeLift m) = m | Unwrap the ' M ' monad into the underlying concrete monad and also eliminating ' Eff ' constraints . You need @TypeApplications@ in order to specify the list of ' Effect 's you want to eliminate ' Eff ' constraints for : ' runM ' @'[MonadReader Env , MonadState Store , MonadError MyErr ] app runM :: forall es m a. KnownList es => (Effs es => M m a) -> m a runM m = rips @es $ unM m
dbdd33869cdf887b9d300362d86cfc29953d344a81494f0f7fcad0f4eae7b6a8
vikram/lisplibraries
address.lisp
;;;; -*- lisp -*- (in-package :it.bese.ucw-user) (defentry-point "address.ucw" (:application *example-application*) () (call 'show-address :address (call 'get-address))) (defclass address () ((name :accessor name :initarg :name :initform "") (email :accessor email :initarg :email :initform ""))) (defcomponent address-manipulator-mixin () ((address :accessor address :initarg :address :backtrack t :initform (make-instance 'address)))) (defcomponent show-address (simple-window-component address-manipulator-mixin) ()) (defmethod render ((s show-address)) (<:p "Your address:") (<:table (<:tr (<:td "Name:") (<:td (<:as-html (name (address s))))) (<:tr (<:td "Email:") (<:td (<:as-html (email (address s))))))) (defcomponent get-address (simple-window-component address-manipulator-mixin) ((message :accessor message :initarg :message :initform nil))) (defmethod render ((g get-address)) (<:p "Please submit an address:") (when (message g) (<:p :style "color: #ff0000; font-weight: bold" (<:as-html (message g)))) (<ucw:form :action (print-info g) (<:table (<:tr (<:td "Name:") (<:td (<ucw:text :accessor (name (address g))))) (<:tr (<:td "Email:") (<:td (<ucw:text :accessor (email (address g))))) (<:td (<:td :colspan 2 :align "center" (<:input :type "submit")))))) (defaction print-info ((g get-address)) (if (and (name (address g)) (string/= "" (name (address g))) (email (address g)) (string/= "" (email (address g)))) (answer (make-instance 'address :name (name (address g)) :email (email (address g)))) (setf (message g) "Sorry, you must supply both a name and an email.")))
null
https://raw.githubusercontent.com/vikram/lisplibraries/105e3ef2d165275eb78f36f5090c9e2cdd0754dd/site/ucw-boxset/ucw_dev/examples/address.lisp
lisp
-*- lisp -*-
(in-package :it.bese.ucw-user) (defentry-point "address.ucw" (:application *example-application*) () (call 'show-address :address (call 'get-address))) (defclass address () ((name :accessor name :initarg :name :initform "") (email :accessor email :initarg :email :initform ""))) (defcomponent address-manipulator-mixin () ((address :accessor address :initarg :address :backtrack t :initform (make-instance 'address)))) (defcomponent show-address (simple-window-component address-manipulator-mixin) ()) (defmethod render ((s show-address)) (<:p "Your address:") (<:table (<:tr (<:td "Name:") (<:td (<:as-html (name (address s))))) (<:tr (<:td "Email:") (<:td (<:as-html (email (address s))))))) (defcomponent get-address (simple-window-component address-manipulator-mixin) ((message :accessor message :initarg :message :initform nil))) (defmethod render ((g get-address)) (<:p "Please submit an address:") (when (message g) (<:p :style "color: #ff0000; font-weight: bold" (<:as-html (message g)))) (<ucw:form :action (print-info g) (<:table (<:tr (<:td "Name:") (<:td (<ucw:text :accessor (name (address g))))) (<:tr (<:td "Email:") (<:td (<ucw:text :accessor (email (address g))))) (<:td (<:td :colspan 2 :align "center" (<:input :type "submit")))))) (defaction print-info ((g get-address)) (if (and (name (address g)) (string/= "" (name (address g))) (email (address g)) (string/= "" (email (address g)))) (answer (make-instance 'address :name (name (address g)) :email (email (address g)))) (setf (message g) "Sorry, you must supply both a name and an email.")))
85b9a41ee5e67f224aa5c72be3e43e44987589b3905b03284769cd89a13d9a3f
REMath/mit_16.399
cbexp.mli
(* cbexp.mli *) open Abstract_Syntax open Cvalues open Cenv (* evaluation of boolean operations *) val c_bexp : bexp -> Cenv.t -> Cenv.t
null
https://raw.githubusercontent.com/REMath/mit_16.399/3f395d6a9dfa1ed232d307c3c542df3dbd5b614a/project/Collecting-Interpreter/cbexp.mli
ocaml
cbexp.mli evaluation of boolean operations
open Abstract_Syntax open Cvalues open Cenv val c_bexp : bexp -> Cenv.t -> Cenv.t
9e3dcd600ce71ec50284e7075e068e53c594e13372db6b68ebabd327b8c5940f
yetibot/core
yeti.clj
(ns yetibot.core.commands.yeti (:require [yetibot.core.hooks :refer [cmd-hook]])) (defn self-cmd "yeti # show known info about Yetibot" [_] "TODO") (cmd-hook #"yeti" #"self" self-cmd)
null
https://raw.githubusercontent.com/yetibot/core/e35cc772622e91aec3ad7f411a99fff09acbd3f9/src/yetibot/core/commands/yeti.clj
clojure
(ns yetibot.core.commands.yeti (:require [yetibot.core.hooks :refer [cmd-hook]])) (defn self-cmd "yeti # show known info about Yetibot" [_] "TODO") (cmd-hook #"yeti" #"self" self-cmd)
2a10ddfac6223fd8e89c01005bdb359a85c8dbf16232c5bd0bdb71ac41529228
discoproject/odisco
protocol.mli
(** The Disco worker protocol. *) (** Messages from Disco to worker. *) type taskinfo = { (* info from protocol *) task_jobname : string; task_jobfile : string; task_stage : Pipeline.stage; task_grouping : Pipeline.grouping; task_group_label : Pipeline.label; task_group_node : string option; task_id : int; task_host : string; task_master : string; task_disco_port : int; task_put_port : int; task_disco_root : string; task_ddfs_root : string; (* runtime state *) task_rootpath : string; } type scheme = | Dir | Disco | File | Raw | Http | Other of string val string_of_scheme : scheme -> string type task_input_status = | Task_input_more | Task_input_done type input_status = | Input_ok | Input_failed type input_label = | Input_label_all | Input_label of Pipeline.label type input_id = int type replica_id = int type replica = replica_id * Uri.t type input = input_id * input_status * input_label * replica list type master_msg = | M_ok | M_die | M_taskinfo of taskinfo | M_task_input of task_input_status * input list | M_retry of replica list | M_fail val master_msg_name : master_msg -> string (** Messages from worker to Disco. *) type output = { label : int; filename : string; } type worker_msg = | W_worker of (* version *) string * (* pid *) int | W_taskinfo | W_input_exclude of input_id list | W_input_include of input_id list | W_input_failure of input_id * replica_id list | W_message of string | W_error of string | W_fatal of string | W_output of output * (* size *) int | W_done val protocol_version : string * One message exchange using the request - response protocol . val send_request : worker_msg -> in_channel -> out_channel -> master_msg (** parse an index payload *) val parse_index : string -> (int * (string * int)) list
null
https://raw.githubusercontent.com/discoproject/odisco/1dda9b921625a7c6af442a33938279afdc2a8600/lib/protocol.mli
ocaml
* The Disco worker protocol. * Messages from Disco to worker. info from protocol runtime state * Messages from worker to Disco. version pid size * parse an index payload
type taskinfo = { task_jobname : string; task_jobfile : string; task_stage : Pipeline.stage; task_grouping : Pipeline.grouping; task_group_label : Pipeline.label; task_group_node : string option; task_id : int; task_host : string; task_master : string; task_disco_port : int; task_put_port : int; task_disco_root : string; task_ddfs_root : string; task_rootpath : string; } type scheme = | Dir | Disco | File | Raw | Http | Other of string val string_of_scheme : scheme -> string type task_input_status = | Task_input_more | Task_input_done type input_status = | Input_ok | Input_failed type input_label = | Input_label_all | Input_label of Pipeline.label type input_id = int type replica_id = int type replica = replica_id * Uri.t type input = input_id * input_status * input_label * replica list type master_msg = | M_ok | M_die | M_taskinfo of taskinfo | M_task_input of task_input_status * input list | M_retry of replica list | M_fail val master_msg_name : master_msg -> string type output = { label : int; filename : string; } type worker_msg = | W_taskinfo | W_input_exclude of input_id list | W_input_include of input_id list | W_input_failure of input_id * replica_id list | W_message of string | W_error of string | W_fatal of string | W_done val protocol_version : string * One message exchange using the request - response protocol . val send_request : worker_msg -> in_channel -> out_channel -> master_msg val parse_index : string -> (int * (string * int)) list
d97be94568f07b9a7af48c24a3d6139844afabe4bf02fcaed9b2f57aab544935
well-typed-lightbulbs/ocaml-esp32
escape_error.ml
(* TEST * toplevel *) Nothing to see here , parse.ml dictates that these be printed as regular " Syntax error " . "Syntax error". *) try foo () with ;; (3 : );; (3 :> );;
null
https://raw.githubusercontent.com/well-typed-lightbulbs/ocaml-esp32/c24fcbfbee0e3aa6bb71c9b467c60c6bac326cc7/testsuite/tests/parse-errors/escape_error.ml
ocaml
TEST * toplevel
Nothing to see here , parse.ml dictates that these be printed as regular " Syntax error " . "Syntax error". *) try foo () with ;; (3 : );; (3 :> );;
94c440c7886d98aca74a51822a600476973ee959a1f58afdc4b6eef042600c41
BillHallahan/G2
Helpers.hs
{-# LANGUAGE BangPatterns #-} # LANGUAGE CPP # module G2.Liquid.Helpers ( MeasureSymbols (..) , getGHCInfos , funcSpecs , getTySigs , putTySigs , getAssumedSigs , putAssumedSigs , getQualifiers , putQualifiers , findFuncSpec , measureSpecs , measureSymbols , measureNames , varToName , varEqName , namesEq , fillLHDictArgs ) where import G2.Language as G2 import G2.Liquid.Types import G2.Translation.Haskell import qualified Language.Haskell.Liquid.GHC.Interface as LHI import Language.Fixpoint.Types.Names #if MIN_VERSION_liquidhaskell(0,8,10) import Language.Haskell.Liquid.Types hiding (Config, TargetInfo (..), TargetSpec (..), GhcSpec (..), cls, names) #else import Language.Haskell.Liquid.Types #endif import qualified Language.Haskell.Liquid.UX.Config as LHC import Language.Fixpoint.Types (Qualifier (..)) import Data.List import qualified Data.Map as M import qualified Data.Text as T import GHC as GHC import Name import Var as V -- | Interface with LH getGHCInfos :: LHC.Config -> [FilePath] -> [FilePath] -> IO [GhcInfo] getGHCInfos config proj fp = do let config' = config {idirs = idirs config ++ proj , files = files config , ghcOptions = ["-v"]} GhcInfo #if MIN_VERSION_liquidhaskell(0,8,10) (ghci, _) <- LHI.getTargetInfos Nothing config' fp #else (ghci, _) <- LHI.getGhcInfos Nothing config' fp #endif return ghci funcSpecs :: [GhcInfo] -> [(Var, LocSpecType)] funcSpecs fs = let asserted = concatMap getTySigs fs assumed = concatMap getAssumedSigs fs in asserted ++ assumed | Functions asserted in LH getTySigs :: GhcInfo -> [(Var, LocSpecType)] #if MIN_VERSION_liquidhaskell(0,8,6) getTySigs = gsTySigs . gsSig . giSpec #else getTySigs = gsTySigs . spec #endif putTySigs :: GhcInfo -> [(Var, LocSpecType)] -> GhcInfo #if MIN_VERSION_liquidhaskell(0,8,6) putTySigs gi@(GI { giSpec = sp@(SP { gsSig = sp_sig }) } ) new_ty_sigs = gi { giSpec = sp { gsSig = sp_sig { gsTySigs = new_ty_sigs } } } #else putTySigs gi@(GI { spec = sp }) new_ty_sigs = gi { spec = sp { gsTySigs = new_ty_sigs }} #endif | Functions assumed in LH getAssumedSigs :: GhcInfo -> [(Var, LocSpecType)] #if MIN_VERSION_liquidhaskell(0,8,6) getAssumedSigs = gsAsmSigs . gsSig . giSpec #else getAssumedSigs = gsAsmSigs . spec #endif putAssumedSigs :: GhcInfo -> [(Var, LocSpecType)] -> GhcInfo #if MIN_VERSION_liquidhaskell(0,8,6) putAssumedSigs gi@(GI { giSpec = sp@(SP { gsSig = sp_sig }) } ) new_ty_sigs = gi { giSpec = sp { gsSig = sp_sig { gsTySigs = new_ty_sigs } } } #else putAssumedSigs gi@(GI { spec = sp }) new_ty_sigs = gi { spec = sp { gsTySigs = new_ty_sigs }} #endif getQualifiers :: GhcInfo -> [Qualifier] #if MIN_VERSION_liquidhaskell(0,8,6) getQualifiers = gsQualifiers . gsQual . giSpec #else getQualifiers = gsQualifiers . spec #endif putQualifiers :: GhcInfo -> [Qualifier] -> GhcInfo #if MIN_VERSION_liquidhaskell(0,8,6) putQualifiers gi@(GI { giSpec = sp@(SP { gsQual = quals }) } ) new_quals = gi { giSpec = sp { gsQual = quals { gsQualifiers = new_quals } } } #else putQualifiers gi@(GI { spec = sp }) new_quals = gi { spec = sp { gsQualifiers = new_quals }} #endif findFuncSpec :: [GhcInfo] -> G2.Name -> Maybe SpecType findFuncSpec ghci g2_n = let fs = funcSpecs ghci fs' = map (\(v, lst) -> (V.varName v, lst)) fs in case find (\(n, _) -> namesEq n g2_n) fs' of Just st -> Just . val . snd $ st Nothing -> Nothing varToName :: V.Var -> G2.Name varToName = mkName . V.varName varEqName :: V.Var -> G2.Name -> Bool varEqName v = namesEq (V.varName v) namesEq :: GHC.Name -> G2.Name -> Bool namesEq ghc_n (Name n m _ _) = T.pack (occNameString $ nameOccName ghc_n) == n && (case nameModule_maybe ghc_n of Just m' -> Just (T.pack . moduleNameString . moduleName $ m') == m Nothing -> m == Nothing) measureSpecs :: [GhcInfo] -> [Measure SpecType GHC.DataCon] #if MIN_VERSION_liquidhaskell(0,8,6) measureSpecs = concatMap (gsMeasures . gsData . giSpec) #else measureSpecs = concatMap (gsMeasures . spec) #endif newtype MeasureSymbols = MeasureSymbols { symbols :: [Symbol] } measureSymbols :: [GhcInfo] -> MeasureSymbols measureSymbols = MeasureSymbols . measureNames measureNames :: [GhcInfo] -> [Symbol] #if MIN_VERSION_liquidhaskell(0,8,6) measureNames = map (val . msName) . measureSpecs #else measureNames = map (val . name) . measureSpecs #endif -- The walk function takes lhDict arguments that are not correctly accounted for by mkStrict. -- The arguments are not actually used, so, here, we fill them in with undefined. fillLHDictArgs :: Walkers -> Expr -> Expr fillLHDictArgs w = modifyAppTop (fillLHDictArgs' w) fillLHDictArgs' :: Walkers -> Expr -> Expr fillLHDictArgs' w e | f@(Var i):xs <- unApp e , any (\(_, i') -> i == i') (M.toList w) = mkApp $ f:fillLHDictArgs'' 0 xs | otherwise = e fillLHDictArgs'' :: Int -> [Expr] -> [Expr] fillLHDictArgs'' !n [] = replicate n (Prim Undefined TyBottom) fillLHDictArgs'' !n (t@(Type _):xs) = t:fillLHDictArgs'' (n + 1) xs fillLHDictArgs'' !n xs = replicate n (Prim Undefined TyBottom) ++ xs
null
https://raw.githubusercontent.com/BillHallahan/G2/21c648d38c380041a9036d0e375ec1d54120f6b4/src/G2/Liquid/Helpers.hs
haskell
# LANGUAGE BangPatterns # | Interface with LH The walk function takes lhDict arguments that are not correctly accounted for by mkStrict. The arguments are not actually used, so, here, we fill them in with undefined.
# LANGUAGE CPP # module G2.Liquid.Helpers ( MeasureSymbols (..) , getGHCInfos , funcSpecs , getTySigs , putTySigs , getAssumedSigs , putAssumedSigs , getQualifiers , putQualifiers , findFuncSpec , measureSpecs , measureSymbols , measureNames , varToName , varEqName , namesEq , fillLHDictArgs ) where import G2.Language as G2 import G2.Liquid.Types import G2.Translation.Haskell import qualified Language.Haskell.Liquid.GHC.Interface as LHI import Language.Fixpoint.Types.Names #if MIN_VERSION_liquidhaskell(0,8,10) import Language.Haskell.Liquid.Types hiding (Config, TargetInfo (..), TargetSpec (..), GhcSpec (..), cls, names) #else import Language.Haskell.Liquid.Types #endif import qualified Language.Haskell.Liquid.UX.Config as LHC import Language.Fixpoint.Types (Qualifier (..)) import Data.List import qualified Data.Map as M import qualified Data.Text as T import GHC as GHC import Name import Var as V getGHCInfos :: LHC.Config -> [FilePath] -> [FilePath] -> IO [GhcInfo] getGHCInfos config proj fp = do let config' = config {idirs = idirs config ++ proj , files = files config , ghcOptions = ["-v"]} GhcInfo #if MIN_VERSION_liquidhaskell(0,8,10) (ghci, _) <- LHI.getTargetInfos Nothing config' fp #else (ghci, _) <- LHI.getGhcInfos Nothing config' fp #endif return ghci funcSpecs :: [GhcInfo] -> [(Var, LocSpecType)] funcSpecs fs = let asserted = concatMap getTySigs fs assumed = concatMap getAssumedSigs fs in asserted ++ assumed | Functions asserted in LH getTySigs :: GhcInfo -> [(Var, LocSpecType)] #if MIN_VERSION_liquidhaskell(0,8,6) getTySigs = gsTySigs . gsSig . giSpec #else getTySigs = gsTySigs . spec #endif putTySigs :: GhcInfo -> [(Var, LocSpecType)] -> GhcInfo #if MIN_VERSION_liquidhaskell(0,8,6) putTySigs gi@(GI { giSpec = sp@(SP { gsSig = sp_sig }) } ) new_ty_sigs = gi { giSpec = sp { gsSig = sp_sig { gsTySigs = new_ty_sigs } } } #else putTySigs gi@(GI { spec = sp }) new_ty_sigs = gi { spec = sp { gsTySigs = new_ty_sigs }} #endif | Functions assumed in LH getAssumedSigs :: GhcInfo -> [(Var, LocSpecType)] #if MIN_VERSION_liquidhaskell(0,8,6) getAssumedSigs = gsAsmSigs . gsSig . giSpec #else getAssumedSigs = gsAsmSigs . spec #endif putAssumedSigs :: GhcInfo -> [(Var, LocSpecType)] -> GhcInfo #if MIN_VERSION_liquidhaskell(0,8,6) putAssumedSigs gi@(GI { giSpec = sp@(SP { gsSig = sp_sig }) } ) new_ty_sigs = gi { giSpec = sp { gsSig = sp_sig { gsTySigs = new_ty_sigs } } } #else putAssumedSigs gi@(GI { spec = sp }) new_ty_sigs = gi { spec = sp { gsTySigs = new_ty_sigs }} #endif getQualifiers :: GhcInfo -> [Qualifier] #if MIN_VERSION_liquidhaskell(0,8,6) getQualifiers = gsQualifiers . gsQual . giSpec #else getQualifiers = gsQualifiers . spec #endif putQualifiers :: GhcInfo -> [Qualifier] -> GhcInfo #if MIN_VERSION_liquidhaskell(0,8,6) putQualifiers gi@(GI { giSpec = sp@(SP { gsQual = quals }) } ) new_quals = gi { giSpec = sp { gsQual = quals { gsQualifiers = new_quals } } } #else putQualifiers gi@(GI { spec = sp }) new_quals = gi { spec = sp { gsQualifiers = new_quals }} #endif findFuncSpec :: [GhcInfo] -> G2.Name -> Maybe SpecType findFuncSpec ghci g2_n = let fs = funcSpecs ghci fs' = map (\(v, lst) -> (V.varName v, lst)) fs in case find (\(n, _) -> namesEq n g2_n) fs' of Just st -> Just . val . snd $ st Nothing -> Nothing varToName :: V.Var -> G2.Name varToName = mkName . V.varName varEqName :: V.Var -> G2.Name -> Bool varEqName v = namesEq (V.varName v) namesEq :: GHC.Name -> G2.Name -> Bool namesEq ghc_n (Name n m _ _) = T.pack (occNameString $ nameOccName ghc_n) == n && (case nameModule_maybe ghc_n of Just m' -> Just (T.pack . moduleNameString . moduleName $ m') == m Nothing -> m == Nothing) measureSpecs :: [GhcInfo] -> [Measure SpecType GHC.DataCon] #if MIN_VERSION_liquidhaskell(0,8,6) measureSpecs = concatMap (gsMeasures . gsData . giSpec) #else measureSpecs = concatMap (gsMeasures . spec) #endif newtype MeasureSymbols = MeasureSymbols { symbols :: [Symbol] } measureSymbols :: [GhcInfo] -> MeasureSymbols measureSymbols = MeasureSymbols . measureNames measureNames :: [GhcInfo] -> [Symbol] #if MIN_VERSION_liquidhaskell(0,8,6) measureNames = map (val . msName) . measureSpecs #else measureNames = map (val . name) . measureSpecs #endif fillLHDictArgs :: Walkers -> Expr -> Expr fillLHDictArgs w = modifyAppTop (fillLHDictArgs' w) fillLHDictArgs' :: Walkers -> Expr -> Expr fillLHDictArgs' w e | f@(Var i):xs <- unApp e , any (\(_, i') -> i == i') (M.toList w) = mkApp $ f:fillLHDictArgs'' 0 xs | otherwise = e fillLHDictArgs'' :: Int -> [Expr] -> [Expr] fillLHDictArgs'' !n [] = replicate n (Prim Undefined TyBottom) fillLHDictArgs'' !n (t@(Type _):xs) = t:fillLHDictArgs'' (n + 1) xs fillLHDictArgs'' !n xs = replicate n (Prim Undefined TyBottom) ++ xs
c072e739e2e4c78401082d5f645eac8969d6ce8b93bc45f082b0c936c25de215
bagucode/clj-native
test_lib.clj
(ns clj-native.test.test-lib (:use [clj-native.direct :only [defclib loadlib typeof]] [clj-native.structs :only [byref byval]] [clj-native.callbacks :only [callback]] [clojure.test])) ;; added test cases to help resolve an issue that was plaguing the Overtone project. ;; See discussion here: =#!topic/overtone/wrlzi3dNhr0 ;; Feel free to add new functions/tests as issues pop up. (defclib test_lib (:libname "test_lib") (:structs (n-buf :n int :buf void*) (point :x int :y int :name constchar*)) (:functions (mul [int int] int) (and2 [byte byte] byte) (and3 [byte byte byte*] void) (and3_buf [byte byte byte* int n-buf*] void) (static_point [int int] point*))) (println "NOTE: Testing assumes a built test/clj_native/test/test_lib library") (System/setProperty "jna.library.path" "./test/clj_native/test") (loadlib test_lib) (deftest test-mul (are [a b z] (= z (mul a b)) 1 1 (* 1 1) 100 -10 (* 100 -10) 65535 10 (* 65535 10) 10000000 10 (* 10000000 10) )) (deftest test-and2 (are [a b z] (= z (and2 a b)) 0 0 0 0 1 0 1 0 0 1 1 1 )) (deftest test-and3 (are [a b z] (= z (let [r (java.nio.ByteBuffer/allocate 1)] (and3 a b r) (.get r 0))) 0 0 0 0 1 0 1 0 0 1 1 1 )) ;; This is trying to be a close match for ;; overtone/src/overtone/sc/machinery/server/native.clj:scsynth-get-buffer-data (deftest test-and3-buf (are [a b z n] (= [(apply vector (range n)) n z] (let [r (java.nio.ByteBuffer/allocate 1) nbr (byref n-buf)] (and3_buf a b r n nbr) (vector (apply vector (for [i (range n)] (.getInt (.buf nbr) (* 4 i)))) (.n nbr) (.get r 0)))) 1 1 1 4 0 0 0 3 )) This was motivated by the function from Portmidi , ;; which has the signature: ;; PMEXPORT const PmDeviceInfo * ( PmDeviceID i d ) ; ;; The memory pointed to is owned by Portmidi and is not to be ;; freed. A static variable has this property, so is a similar ;; use-case. (deftest test-point (are [x y x' y'] (let [thepoint (static_point x y)] (and (= x' (.x thepoint)) (= y' (.y thepoint)) (= "foo" (.name thepoint)))) 1 2 1 2 -1 -4 -1 -4))
null
https://raw.githubusercontent.com/bagucode/clj-native/31f3862c00cb5160360195c5c679553bcaf99e13/test/clj_native/test/test_lib.clj
clojure
added test cases to help resolve an issue that was plaguing the Overtone project. See discussion here: =#!topic/overtone/wrlzi3dNhr0 Feel free to add new functions/tests as issues pop up. This is trying to be a close match for overtone/src/overtone/sc/machinery/server/native.clj:scsynth-get-buffer-data which has the signature: freed. A static variable has this property, so is a similar use-case.
(ns clj-native.test.test-lib (:use [clj-native.direct :only [defclib loadlib typeof]] [clj-native.structs :only [byref byval]] [clj-native.callbacks :only [callback]] [clojure.test])) (defclib test_lib (:libname "test_lib") (:structs (n-buf :n int :buf void*) (point :x int :y int :name constchar*)) (:functions (mul [int int] int) (and2 [byte byte] byte) (and3 [byte byte byte*] void) (and3_buf [byte byte byte* int n-buf*] void) (static_point [int int] point*))) (println "NOTE: Testing assumes a built test/clj_native/test/test_lib library") (System/setProperty "jna.library.path" "./test/clj_native/test") (loadlib test_lib) (deftest test-mul (are [a b z] (= z (mul a b)) 1 1 (* 1 1) 100 -10 (* 100 -10) 65535 10 (* 65535 10) 10000000 10 (* 10000000 10) )) (deftest test-and2 (are [a b z] (= z (and2 a b)) 0 0 0 0 1 0 1 0 0 1 1 1 )) (deftest test-and3 (are [a b z] (= z (let [r (java.nio.ByteBuffer/allocate 1)] (and3 a b r) (.get r 0))) 0 0 0 0 1 0 1 0 0 1 1 1 )) (deftest test-and3-buf (are [a b z n] (= [(apply vector (range n)) n z] (let [r (java.nio.ByteBuffer/allocate 1) nbr (byref n-buf)] (and3_buf a b r n nbr) (vector (apply vector (for [i (range n)] (.getInt (.buf nbr) (* 4 i)))) (.n nbr) (.get r 0)))) 1 1 1 4 0 0 0 3 )) This was motivated by the function from Portmidi , The memory pointed to is owned by Portmidi and is not to be (deftest test-point (are [x y x' y'] (let [thepoint (static_point x y)] (and (= x' (.x thepoint)) (= y' (.y thepoint)) (= "foo" (.name thepoint)))) 1 2 1 2 -1 -4 -1 -4))
2f89c16269cf41089fec90da5111f95028828e03aee34f9201ebb3ef2f7efaf1
g000001/MacLISP-compat
MLSUB.lisp
MLSUB -*-Mode : Lisp;Package : SI;Lowercase : T-*- ;;; ************************************************************************* * * * * * * * * * * * * MACLISP - ONLY SUBR 's used by * * * * * * * * * * * * * * * * ;;; ************************************************************************* * * ( c ) Copyright 1981 Massachusetts Institute of Technology * * * * * * * * * * * * * ;;; ************************************************************************* (herald MLSUB /17) Contains the open - codings , as SUBRs , of some common ;;; macros. Also has some "helper" functions needed by macro output. (include ((lisp) subload lsp)) (eval-when (compile) (let ((OBARRAY COBARRAY) (x 'SI:ARRAY-HEADERP) (y 'P1BOOL1ABLE)) (unwind-protect (progn (remob 'SI:ARRAY-HEADERP) (remob 'P1BOOL1ABLE) (setq OBARRAY SOBARRAY x (intern 'SI:ARRAY-HEADERP) y (intern 'P1BOOL1ABLE))) (setq OBARRAY COBARRAY) (intern x) (intern y)) (putprop x (get 'TYPEP 'SUBR) 'SUBR)) ) (eval-when (compile) (mapc '(lambda (x) (putprop x 'T 'SKIP-WARNING)) '(<= >= FIXNUMP FLONUMP EVENP LISTP ARRAYP LOGAND LOGIOR LOGXOR LOGNOT SI:CHECK-MULTIPLICITIES MULTIPLE-VALUE-LIST/| VALUES-LIST )) (setq MUZZLED 'T STRT7 'T MACROS () ) (and (alphalessp (symeval ((lambda (OBARRAY) (intern 'INITIAVERNO)) SOBARRAY)) "112") (+internal-lossage 'INITIAVERNO 'COMPILE INITIAVERNO)) ) (declare (own-symbol HERALD) (mapex () )) (declare (genprefix |mlsb|) ) Simple open - coded like LOGAND etc as 's , (eval-when (compile) (defmacro GEN-OPENS (&rest l) `(PROGN 'COMPILE ,.(mapcar #'(lambda (x) (or (getl x '(MACRO SOURCE-TRANS)) (get x 'P1BOOL1ABLE) (+internal-lossage '|Not open-codeable| 'gen-opens x)) `(DEFUN ,x (Y) (AND (,x Y) *:TRUTH))) l))) (defmacro GEN-LOGS (&rest l &aux i n nargs) (si:gen-local-var i "i") (si:gen-local-var n "n") (si:gen-local-var nargs "Nargs") `(PROGN 'COMPILE ,.(mapcan #'(lambda (x) (or (getl x '(MACRO SOURCE-TRANS)) (+internal-lossage '|Not open-codeable| 'gen-logs x)) `(PROGN 'COMPILE (DEFUN ,x ,NARGS (DO ((,I 2 (1+ ,I)) (,N (ARG 1))) ((> ,I ,NARGS) ,N) (DECLARE (FIXNUM ,I ,N)) (SETQ ,N (,x (ARG ,I) ,N)))) (ARGS ',x '(2 . 510.)))) l))) () ) (eval-when (eval) (defun lose-opens (l) (princ '|/ Warning! | msgfiles) (princ (car l) msgfiles) (princ '| can't do these functions interpretively:/ / | msgfiles) (prin1 (cdr l) msgfiles) (terpri msgfiles)) (defprop gen-opens lose-opens macro) (defprop gen-logs lose-opens macro) () ) (gen-opens FIXNUMP FLONUMP EVENP LISTP) (defun ARRAYP (x) (and (si:array-headerp x) (memq (array-type x) '(NIL T FIXNUM FLONUM)) *:TRUTH)) (gen-logs LOGAND LOGIOR LOGXOR) (defun LOGNOT (x) (boole 10. x -1)) ;;;; Multi-arg <= and <=, and SI:CHECK-MULTIPLICITIES (defun <= nargs (si:<=>-aux nargs '<=)) (defun >= nargs (si:<=>-aux nargs '>=)) (defun SI:<=>-AUX (nargs fun &aux inverter x y type-tester) (or (> nargs 1) (error '|Too few args| (cons fun (listify nargs)))) (or (setq inverter (cond ((eq fun '<=) '>) ((eq fun '>=) '<))) (memq fun '(< >)) (error 'SI:<=>-AUX fun)) (setq x (arg 1)) (do () ((memq (setq type-tester (typep x)) '(FIXNUM FLONUM))) (check-type x #'NUMBERP fun)) (do ((i 2 (1+ i)) ) ((> i nargs) *:TRUTH) (declare (fixnum i)) (setq y (arg i) ) (if (or *RSET (not (eq type-tester (typep y)))) (check-type y (if (eq type-tester 'FIXNUM) #'FIXNUMP #'FLONUMP) fun)) (and (cond (inverter (if (eq inverter '>) (> x y) (< x y))) ((eq fun '>) (not (> x y))) ('T (not (< x y)))) (return () )) (setq x y))) (eval-when (eval compile) (setq retvec-vars '(*:AR2 *:AR3 *:AR4 *:AR5 *:AR6 *:AR7 *:AR8) max-retvec (length retvec-vars)) ) (let ((x '#.`(*:ARlist *:ARn ,.retvec-vars))) (if (boundp '+INTERNAL-INTERRUPT-BOUND-VARIABLES) (if (and (not (memq '*:AR2 +INTERNAL-INTERRUPT-BOUND-VARIABLES)) (not (memq '*:ARlist +INTERNAL-INTERRUPT-BOUND-VARIABLES))) (setq +INTERNAL-INTERRUPT-BOUND-VARIABLES (append x +INTERNAL-INTERRUPT-BOUND-VARIABLES))) (setq +INTERNAL-INTERRUPT-BOUND-VARIABLES x))) (defvar SI:CHECK-MULTIPLICITIES () " () means pad out unsupplied multiple-return-values with nulls; CERROR means run an error if not enough values supplied; any thing else means to funcall that function.") (defun SI:CHECK-MULTIPLICITIES (n) ;; What if the desired number of extra-return-values is greater than the ;; actual number (of "extra-return-values")? Well, then get some more! (cond ((not (> n *:ARn)) () ) ((null SI:CHECK-MULTIPLICITIES) ;; Just supply ()'s for the missing return values (do ((x (nthcdr *:ARn '#.retvec-vars) (cdr x)) (i *:ARn (1+ i))) ((not (< i n)) ) (set (car x) () ))) ((eq SI:CHECK-MULTIPLICITIES 'CERROR) (prog (l) (setq l (cdr (multiple-value-list/| () ))) ;; Here, "l" is a list of the values actually returned, except for the first . B (setq l (error '|Too few (extra) values returned for MULTIPLE-VALUE| l 'WRNG-TYPE-ARG)) (if (< (length l) n) (go B)) ;; Get some more, and spread them out. (values-list (cons () l)))) ('T (funcall SI:CHECK-MULTIPLICITIES n))) () ) VALUES - LIST , MULTIPLE - VALUE - LIST/| (defun VALUES-LIST (l) "Set up the multiple-values vector from a list." (let (first-val (n 0)) (declare (fixnum n)) (do () ((and (not (atom l)) (not (< (setq n (1- (length l))) 0)))) (setq l (error "Atomic arg to VALUES-LIST?" l 'WRNG-TYPE-ARG))) (pop l first-val) (setq *:ARlist () ) (cond ((< n 4) Do the case of 1 to 4 ret vals quickly ! (cond ((< n 2) (if (= n 1) (setq *:AR2 (car l)))) ('T (pop l *:AR2) (pop l *:AR3) (if (= n 3) (setq *:AR4 (car l)))))) ('T (mapc #'SET '#.retvec-vars l) (if (> n #.max-retvec) (setq *:ARlist (nthcdr #.max-retvec l))))) (setq *:ARn n) first-val)) (defun MULTIPLE-VALUE-LIST/| (x) "Listify the elements of the multiple-values vector. *:ARn holds the number of 'extra' return values, and the arg to this fun is first val." (let ((n *:ARN)) (declare (fixnum n)) (prog1 (cons x (and (> n 0) (cons *:AR2 (and (> n 1) (cons *:AR3 (and (> n 2) (cons *:AR4 (and (> n 3) (cons *:AR5 (and (> n 4) (cons *:AR6 (and (> n 5) (cons *:AR7 (and (> n 6) (cons *:AR8 (and (> n 7) (append *:ARLIST () ))))))))))))))))) (setq *:ARn 0)))) (or (fboundp 'MULTIPLE-VALUE-LIST) (equal (get 'MULTIPLE-VALUE-LIST 'AUTOLOAD) #%(autoload-filename MLMAC)) (defun MULTIPLE-VALUE-LIST macro (X) (remprop 'MULTIPLE-VALUE-LIST 'MACRO) #%(subload MLMAC) (eval x))) 
null
https://raw.githubusercontent.com/g000001/MacLISP-compat/a147d09b98dca4d7c089424c3cbaf832d2fd857a/MLSUB.lisp
lisp
Package : SI;Lowercase : T-*- ************************************************************************* ************************************************************************* ************************************************************************* macros. Also has some "helper" functions needed by macro output. Multi-arg <= and <=, and SI:CHECK-MULTIPLICITIES What if the desired number of extra-return-values is greater than the actual number (of "extra-return-values")? Well, then get some more! Just supply ()'s for the missing return values Here, "l" is a list of the values actually returned, Get some more, and spread them out.
* * * * * * * * * * * * MACLISP - ONLY SUBR 's used by * * * * * * * * * * * * * * * * * * ( c ) Copyright 1981 Massachusetts Institute of Technology * * * * * * * * * * * * * (herald MLSUB /17) Contains the open - codings , as SUBRs , of some common (include ((lisp) subload lsp)) (eval-when (compile) (let ((OBARRAY COBARRAY) (x 'SI:ARRAY-HEADERP) (y 'P1BOOL1ABLE)) (unwind-protect (progn (remob 'SI:ARRAY-HEADERP) (remob 'P1BOOL1ABLE) (setq OBARRAY SOBARRAY x (intern 'SI:ARRAY-HEADERP) y (intern 'P1BOOL1ABLE))) (setq OBARRAY COBARRAY) (intern x) (intern y)) (putprop x (get 'TYPEP 'SUBR) 'SUBR)) ) (eval-when (compile) (mapc '(lambda (x) (putprop x 'T 'SKIP-WARNING)) '(<= >= FIXNUMP FLONUMP EVENP LISTP ARRAYP LOGAND LOGIOR LOGXOR LOGNOT SI:CHECK-MULTIPLICITIES MULTIPLE-VALUE-LIST/| VALUES-LIST )) (setq MUZZLED 'T STRT7 'T MACROS () ) (and (alphalessp (symeval ((lambda (OBARRAY) (intern 'INITIAVERNO)) SOBARRAY)) "112") (+internal-lossage 'INITIAVERNO 'COMPILE INITIAVERNO)) ) (declare (own-symbol HERALD) (mapex () )) (declare (genprefix |mlsb|) ) Simple open - coded like LOGAND etc as 's , (eval-when (compile) (defmacro GEN-OPENS (&rest l) `(PROGN 'COMPILE ,.(mapcar #'(lambda (x) (or (getl x '(MACRO SOURCE-TRANS)) (get x 'P1BOOL1ABLE) (+internal-lossage '|Not open-codeable| 'gen-opens x)) `(DEFUN ,x (Y) (AND (,x Y) *:TRUTH))) l))) (defmacro GEN-LOGS (&rest l &aux i n nargs) (si:gen-local-var i "i") (si:gen-local-var n "n") (si:gen-local-var nargs "Nargs") `(PROGN 'COMPILE ,.(mapcan #'(lambda (x) (or (getl x '(MACRO SOURCE-TRANS)) (+internal-lossage '|Not open-codeable| 'gen-logs x)) `(PROGN 'COMPILE (DEFUN ,x ,NARGS (DO ((,I 2 (1+ ,I)) (,N (ARG 1))) ((> ,I ,NARGS) ,N) (DECLARE (FIXNUM ,I ,N)) (SETQ ,N (,x (ARG ,I) ,N)))) (ARGS ',x '(2 . 510.)))) l))) () ) (eval-when (eval) (defun lose-opens (l) (princ '|/ Warning! | msgfiles) (princ (car l) msgfiles) (princ '| can't do these functions interpretively:/ / | msgfiles) (prin1 (cdr l) msgfiles) (terpri msgfiles)) (defprop gen-opens lose-opens macro) (defprop gen-logs lose-opens macro) () ) (gen-opens FIXNUMP FLONUMP EVENP LISTP) (defun ARRAYP (x) (and (si:array-headerp x) (memq (array-type x) '(NIL T FIXNUM FLONUM)) *:TRUTH)) (gen-logs LOGAND LOGIOR LOGXOR) (defun LOGNOT (x) (boole 10. x -1)) (defun <= nargs (si:<=>-aux nargs '<=)) (defun >= nargs (si:<=>-aux nargs '>=)) (defun SI:<=>-AUX (nargs fun &aux inverter x y type-tester) (or (> nargs 1) (error '|Too few args| (cons fun (listify nargs)))) (or (setq inverter (cond ((eq fun '<=) '>) ((eq fun '>=) '<))) (memq fun '(< >)) (error 'SI:<=>-AUX fun)) (setq x (arg 1)) (do () ((memq (setq type-tester (typep x)) '(FIXNUM FLONUM))) (check-type x #'NUMBERP fun)) (do ((i 2 (1+ i)) ) ((> i nargs) *:TRUTH) (declare (fixnum i)) (setq y (arg i) ) (if (or *RSET (not (eq type-tester (typep y)))) (check-type y (if (eq type-tester 'FIXNUM) #'FIXNUMP #'FLONUMP) fun)) (and (cond (inverter (if (eq inverter '>) (> x y) (< x y))) ((eq fun '>) (not (> x y))) ('T (not (< x y)))) (return () )) (setq x y))) (eval-when (eval compile) (setq retvec-vars '(*:AR2 *:AR3 *:AR4 *:AR5 *:AR6 *:AR7 *:AR8) max-retvec (length retvec-vars)) ) (let ((x '#.`(*:ARlist *:ARn ,.retvec-vars))) (if (boundp '+INTERNAL-INTERRUPT-BOUND-VARIABLES) (if (and (not (memq '*:AR2 +INTERNAL-INTERRUPT-BOUND-VARIABLES)) (not (memq '*:ARlist +INTERNAL-INTERRUPT-BOUND-VARIABLES))) (setq +INTERNAL-INTERRUPT-BOUND-VARIABLES (append x +INTERNAL-INTERRUPT-BOUND-VARIABLES))) (setq +INTERNAL-INTERRUPT-BOUND-VARIABLES x))) (defvar SI:CHECK-MULTIPLICITIES () any thing else means to funcall that function.") (defun SI:CHECK-MULTIPLICITIES (n) (cond ((not (> n *:ARn)) () ) ((null SI:CHECK-MULTIPLICITIES) (do ((x (nthcdr *:ARn '#.retvec-vars) (cdr x)) (i *:ARn (1+ i))) ((not (< i n)) ) (set (car x) () ))) ((eq SI:CHECK-MULTIPLICITIES 'CERROR) (prog (l) (setq l (cdr (multiple-value-list/| () ))) except for the first . B (setq l (error '|Too few (extra) values returned for MULTIPLE-VALUE| l 'WRNG-TYPE-ARG)) (if (< (length l) n) (go B)) (values-list (cons () l)))) ('T (funcall SI:CHECK-MULTIPLICITIES n))) () ) VALUES - LIST , MULTIPLE - VALUE - LIST/| (defun VALUES-LIST (l) "Set up the multiple-values vector from a list." (let (first-val (n 0)) (declare (fixnum n)) (do () ((and (not (atom l)) (not (< (setq n (1- (length l))) 0)))) (setq l (error "Atomic arg to VALUES-LIST?" l 'WRNG-TYPE-ARG))) (pop l first-val) (setq *:ARlist () ) (cond ((< n 4) Do the case of 1 to 4 ret vals quickly ! (cond ((< n 2) (if (= n 1) (setq *:AR2 (car l)))) ('T (pop l *:AR2) (pop l *:AR3) (if (= n 3) (setq *:AR4 (car l)))))) ('T (mapc #'SET '#.retvec-vars l) (if (> n #.max-retvec) (setq *:ARlist (nthcdr #.max-retvec l))))) (setq *:ARn n) first-val)) (defun MULTIPLE-VALUE-LIST/| (x) "Listify the elements of the multiple-values vector. *:ARn holds the number of 'extra' return values, and the arg to this fun is first val." (let ((n *:ARN)) (declare (fixnum n)) (prog1 (cons x (and (> n 0) (cons *:AR2 (and (> n 1) (cons *:AR3 (and (> n 2) (cons *:AR4 (and (> n 3) (cons *:AR5 (and (> n 4) (cons *:AR6 (and (> n 5) (cons *:AR7 (and (> n 6) (cons *:AR8 (and (> n 7) (append *:ARLIST () ))))))))))))))))) (setq *:ARn 0)))) (or (fboundp 'MULTIPLE-VALUE-LIST) (equal (get 'MULTIPLE-VALUE-LIST 'AUTOLOAD) #%(autoload-filename MLMAC)) (defun MULTIPLE-VALUE-LIST macro (X) (remprop 'MULTIPLE-VALUE-LIST 'MACRO) #%(subload MLMAC) (eval x))) 
88ffeefaac04459299eedbcb279bad6199fe4495bf5d5e611e41aa96854c3368
ocaml/opam
opamTypesBase.ml
(**************************************************************************) (* *) Copyright 2012 - 2019 OCamlPro Copyright 2012 INRIA (* *) (* All rights reserved. This file is distributed under the terms of the *) GNU Lesser General Public License version 2.1 , with the special (* exception on linking described in the file LICENSE. *) (* *) (**************************************************************************) open OpamParserTypes.FullPos open OpamTypes include OpamCompat let std_path_of_string = function | "prefix" -> Prefix | "lib" -> Lib | "bin" -> Bin | "sbin" -> Sbin | "share" -> Share | "doc" -> Doc | "etc" -> Etc | "man" -> Man | "toplevel" -> Toplevel | "stublibs" -> Stublibs | _ -> failwith "Wrong standard path" let string_of_std_path = function | Prefix -> "prefix" | Lib -> "lib" | Bin -> "bin" | Sbin -> "sbin" | Share -> "share" | Doc -> "doc" | Etc -> "etc" | Man -> "man" | Toplevel -> "toplevel" | Stublibs -> "stublibs" let all_std_paths = [ Prefix; Lib; Bin; Sbin; Share; Doc; Etc; Man; Toplevel; Stublibs ] let string_of_shell = function | SH_fish -> "fish" | SH_csh -> "csh" | SH_zsh -> "zsh" | SH_sh -> "sh" | SH_bash -> "bash" | SH_pwsh Powershell_pwsh -> "pwsh" | SH_pwsh Powershell -> "powershell" | SH_win_cmd -> "cmd" let file_null = "" let pos_file filename = { filename = OpamFilename.to_string filename; start = -1, -1; stop = -1, -1; } let pos_null = { filename = file_null; start = -1, -1; stop = -1, -1; } let nullify_pos pelem = {pelem; pos = pos_null} (* XXX update *) let pos_best pos1 pos2 = match pos1, pos2 with | { filename = ""; _ }, _ -> pos2 | _, { filename = ""; _ } -> pos1 | { start = (-1,_) ; _ }, _ -> pos2 | _, { start = (-1,_) ; _ } -> pos1 | _, _ -> pos1 if f1 = file_null then pos2 else if f2 = file_null then pos1 else if col1 = -1 then pos2 else pos1 if f1 = file_null then pos2 else if f2 = file_null then pos1 else if col1 = -1 then pos2 else pos1 *) let string_of_pos pos = let check x = if x >= 0 then string_of_int x else "-" in Printf.sprintf "%s:%s:%s-%s:%s:" pos.filename (check (fst pos.start)) (check (snd pos.start)) (check (fst pos.stop)) (check (snd pos.stop)) let string_of_user_action = function | Query -> "query" | Install -> "install" | Upgrade -> "upgrade" | Reinstall -> "reinstall" | Remove -> "remove" | Switch -> "switch" | Import -> "import" (* Command line arguments *) let env_array l = (* The env list may contain successive bindings of the same variable, make sure to keep only the last *) let bindings = List.fold_left (fun acc (k,v,_) -> OpamStd.Env.Name.Map.add k v acc) OpamStd.Env.Name.Map.empty l in let a = Array.make (OpamStd.Env.Name.Map.cardinal bindings) "" in OpamStd.Env.Name.Map.fold (fun k v i -> a.(i) <- (k :> string) ^ "=" ^ v; succ i) bindings 0 |> ignore; a let string_of_filter_ident (pkgs,var,converter) = OpamStd.List.concat_map ~nil:"" "+" ~right:":" (function None -> "_" | Some n -> OpamPackage.Name.to_string n) pkgs ^ OpamVariable.to_string var ^ (match converter with | Some (it,ifu) -> "?"^it^":"^ifu | None -> "") let filter_ident_of_string s = match OpamStd.String.rcut_at s ':' with | None -> [], OpamVariable.of_string s, None | Some (p,last) -> let get_names s = List.map (function "_" -> None | s -> Some (OpamPackage.Name.of_string s)) (OpamStd.String.split s '+') in match OpamStd.String.rcut_at p '?' with | None -> get_names p, OpamVariable.of_string last, None | Some (p,val_if_true) -> let converter = Some (val_if_true, last) in match OpamStd.String.rcut_at p ':' with | None -> [], OpamVariable.of_string p, converter | Some (packages,var) -> get_names packages, OpamVariable.of_string var, converter let all_package_flags = [ Pkgflag_LightUninstall; (* Pkgflag_AllSwitches; This has no "official" existence yet and does nothing *) Pkgflag_Verbose; Pkgflag_Plugin; Pkgflag_Compiler; Pkgflag_Conf; ] let string_of_pkg_flag = function | Pkgflag_LightUninstall -> "light-uninstall" | Pkgflag_Verbose -> "verbose" | Pkgflag_Plugin -> "plugin" | Pkgflag_Compiler -> "compiler" | Pkgflag_Conf -> "conf" | Pkgflag_AvoidVersion -> "avoid-version" | Pkgflag_Deprecated -> "deprecated" | Pkgflag_Unknown s -> s let pkg_flag_of_string = function | "light-uninstall" -> Pkgflag_LightUninstall | "verbose" -> Pkgflag_Verbose | "plugin" -> Pkgflag_Plugin | "compiler" -> Pkgflag_Compiler | "conf" -> Pkgflag_Conf | "avoid-version" -> Pkgflag_AvoidVersion | "deprecated" -> Pkgflag_Deprecated | s -> Pkgflag_Unknown s let action_contents = function | `Remove p | `Install p | `Reinstall p | `Build p | `Change (_,_,p) -> [p] | `Fetch pl -> pl let full_action_contents = function | `Change (_,p1,p2) -> [p1; p2] | a -> action_contents a let map_atomic_action f = function | `Remove p -> `Remove (f p) | `Install p -> `Install (f p) let map_highlevel_action f = function | #atomic_action as a -> map_atomic_action f a | `Change (direction, p1, p2) -> `Change (direction, f p1, f p2) | `Reinstall p -> `Reinstall (f p) let map_concrete_action f = function | #atomic_action as a -> map_atomic_action f a | `Build p -> `Build (f p) | `Fetch pl -> `Fetch (List.map f pl) let map_action f = function | #highlevel_action as a -> map_highlevel_action f a | #concrete_action as a -> map_concrete_action f a let string_of_cause to_string = let list_to_string l = match List.map to_string l with | a::b::c::_::_::_ -> Printf.sprintf "%s, %s, %s, etc." a b c | l -> String.concat ", " l in function | Upstream_changes -> OpamConsole.colorise `bold "upstream or system changes" | Use pkgs -> Printf.sprintf "uses %s" (list_to_string pkgs) | Required_by pkgs -> Printf.sprintf "required by %s" (list_to_string pkgs) | Conflicts_with pkgs -> Printf.sprintf "conflicts with %s" (list_to_string pkgs) | Requested -> "" | Unavailable -> OpamConsole.colorise `bold "no longer available" | Unknown -> "" let map_success f = function | Success x -> Success (f x) | Conflicts c -> Conflicts c let iter_success f = function | Success x -> f x | Conflicts _ -> ()
null
https://raw.githubusercontent.com/ocaml/opam/c53b1a83d73d05e0abd361f9bc8361dcd121b6e8/src/format/opamTypesBase.ml
ocaml
************************************************************************ All rights reserved. This file is distributed under the terms of the exception on linking described in the file LICENSE. ************************************************************************ XXX update Command line arguments The env list may contain successive bindings of the same variable, make sure to keep only the last Pkgflag_AllSwitches; This has no "official" existence yet and does nothing
Copyright 2012 - 2019 OCamlPro Copyright 2012 INRIA GNU Lesser General Public License version 2.1 , with the special open OpamParserTypes.FullPos open OpamTypes include OpamCompat let std_path_of_string = function | "prefix" -> Prefix | "lib" -> Lib | "bin" -> Bin | "sbin" -> Sbin | "share" -> Share | "doc" -> Doc | "etc" -> Etc | "man" -> Man | "toplevel" -> Toplevel | "stublibs" -> Stublibs | _ -> failwith "Wrong standard path" let string_of_std_path = function | Prefix -> "prefix" | Lib -> "lib" | Bin -> "bin" | Sbin -> "sbin" | Share -> "share" | Doc -> "doc" | Etc -> "etc" | Man -> "man" | Toplevel -> "toplevel" | Stublibs -> "stublibs" let all_std_paths = [ Prefix; Lib; Bin; Sbin; Share; Doc; Etc; Man; Toplevel; Stublibs ] let string_of_shell = function | SH_fish -> "fish" | SH_csh -> "csh" | SH_zsh -> "zsh" | SH_sh -> "sh" | SH_bash -> "bash" | SH_pwsh Powershell_pwsh -> "pwsh" | SH_pwsh Powershell -> "powershell" | SH_win_cmd -> "cmd" let file_null = "" let pos_file filename = { filename = OpamFilename.to_string filename; start = -1, -1; stop = -1, -1; } let pos_null = { filename = file_null; start = -1, -1; stop = -1, -1; } let nullify_pos pelem = {pelem; pos = pos_null} let pos_best pos1 pos2 = match pos1, pos2 with | { filename = ""; _ }, _ -> pos2 | _, { filename = ""; _ } -> pos1 | { start = (-1,_) ; _ }, _ -> pos2 | _, { start = (-1,_) ; _ } -> pos1 | _, _ -> pos1 if f1 = file_null then pos2 else if f2 = file_null then pos1 else if col1 = -1 then pos2 else pos1 if f1 = file_null then pos2 else if f2 = file_null then pos1 else if col1 = -1 then pos2 else pos1 *) let string_of_pos pos = let check x = if x >= 0 then string_of_int x else "-" in Printf.sprintf "%s:%s:%s-%s:%s:" pos.filename (check (fst pos.start)) (check (snd pos.start)) (check (fst pos.stop)) (check (snd pos.stop)) let string_of_user_action = function | Query -> "query" | Install -> "install" | Upgrade -> "upgrade" | Reinstall -> "reinstall" | Remove -> "remove" | Switch -> "switch" | Import -> "import" let env_array l = let bindings = List.fold_left (fun acc (k,v,_) -> OpamStd.Env.Name.Map.add k v acc) OpamStd.Env.Name.Map.empty l in let a = Array.make (OpamStd.Env.Name.Map.cardinal bindings) "" in OpamStd.Env.Name.Map.fold (fun k v i -> a.(i) <- (k :> string) ^ "=" ^ v; succ i) bindings 0 |> ignore; a let string_of_filter_ident (pkgs,var,converter) = OpamStd.List.concat_map ~nil:"" "+" ~right:":" (function None -> "_" | Some n -> OpamPackage.Name.to_string n) pkgs ^ OpamVariable.to_string var ^ (match converter with | Some (it,ifu) -> "?"^it^":"^ifu | None -> "") let filter_ident_of_string s = match OpamStd.String.rcut_at s ':' with | None -> [], OpamVariable.of_string s, None | Some (p,last) -> let get_names s = List.map (function "_" -> None | s -> Some (OpamPackage.Name.of_string s)) (OpamStd.String.split s '+') in match OpamStd.String.rcut_at p '?' with | None -> get_names p, OpamVariable.of_string last, None | Some (p,val_if_true) -> let converter = Some (val_if_true, last) in match OpamStd.String.rcut_at p ':' with | None -> [], OpamVariable.of_string p, converter | Some (packages,var) -> get_names packages, OpamVariable.of_string var, converter let all_package_flags = [ Pkgflag_LightUninstall; Pkgflag_Verbose; Pkgflag_Plugin; Pkgflag_Compiler; Pkgflag_Conf; ] let string_of_pkg_flag = function | Pkgflag_LightUninstall -> "light-uninstall" | Pkgflag_Verbose -> "verbose" | Pkgflag_Plugin -> "plugin" | Pkgflag_Compiler -> "compiler" | Pkgflag_Conf -> "conf" | Pkgflag_AvoidVersion -> "avoid-version" | Pkgflag_Deprecated -> "deprecated" | Pkgflag_Unknown s -> s let pkg_flag_of_string = function | "light-uninstall" -> Pkgflag_LightUninstall | "verbose" -> Pkgflag_Verbose | "plugin" -> Pkgflag_Plugin | "compiler" -> Pkgflag_Compiler | "conf" -> Pkgflag_Conf | "avoid-version" -> Pkgflag_AvoidVersion | "deprecated" -> Pkgflag_Deprecated | s -> Pkgflag_Unknown s let action_contents = function | `Remove p | `Install p | `Reinstall p | `Build p | `Change (_,_,p) -> [p] | `Fetch pl -> pl let full_action_contents = function | `Change (_,p1,p2) -> [p1; p2] | a -> action_contents a let map_atomic_action f = function | `Remove p -> `Remove (f p) | `Install p -> `Install (f p) let map_highlevel_action f = function | #atomic_action as a -> map_atomic_action f a | `Change (direction, p1, p2) -> `Change (direction, f p1, f p2) | `Reinstall p -> `Reinstall (f p) let map_concrete_action f = function | #atomic_action as a -> map_atomic_action f a | `Build p -> `Build (f p) | `Fetch pl -> `Fetch (List.map f pl) let map_action f = function | #highlevel_action as a -> map_highlevel_action f a | #concrete_action as a -> map_concrete_action f a let string_of_cause to_string = let list_to_string l = match List.map to_string l with | a::b::c::_::_::_ -> Printf.sprintf "%s, %s, %s, etc." a b c | l -> String.concat ", " l in function | Upstream_changes -> OpamConsole.colorise `bold "upstream or system changes" | Use pkgs -> Printf.sprintf "uses %s" (list_to_string pkgs) | Required_by pkgs -> Printf.sprintf "required by %s" (list_to_string pkgs) | Conflicts_with pkgs -> Printf.sprintf "conflicts with %s" (list_to_string pkgs) | Requested -> "" | Unavailable -> OpamConsole.colorise `bold "no longer available" | Unknown -> "" let map_success f = function | Success x -> Success (f x) | Conflicts c -> Conflicts c let iter_success f = function | Success x -> f x | Conflicts _ -> ()
c79b6eb30bff2926825253123b5b1225b985b13ee599cedad68dfcce7c31310a
audreyt/interpolatedstring-perl6
TestRewrite.hs
# LANGUAGE OverloadedStrings , QuasiQuotes # module Main where import Text.InterpolatedString.Perl6 import Test.HUnit import Data.ByteString.Char8 as BS(ByteString, pack) import Data.Text as T(Text, pack) -- the primary purpose of these tests is to ensure that -- the Text and ByteString rewrite rules are firing, to avoid -- needlessly converting string types testByteString = assertBool "" $ [$qc|{"a" :: ByteString} {"b" :: ByteString}|] == BS.pack ("a b") testText = assertBool "" $ [$qc|{"a" :: Text} {"b" :: Text}|] == T.pack ("a b") tests = TestList [TestLabel "ByteString Test" $ TestCase testByteString ,TestLabel "Text Test" $ TestCase testText ] main = runTestTT tests
null
https://raw.githubusercontent.com/audreyt/interpolatedstring-perl6/81841be3828a5ed5bbe0c9aa4e3335e12befdd01/tests/TestRewrite.hs
haskell
the primary purpose of these tests is to ensure that the Text and ByteString rewrite rules are firing, to avoid needlessly converting string types
# LANGUAGE OverloadedStrings , QuasiQuotes # module Main where import Text.InterpolatedString.Perl6 import Test.HUnit import Data.ByteString.Char8 as BS(ByteString, pack) import Data.Text as T(Text, pack) testByteString = assertBool "" $ [$qc|{"a" :: ByteString} {"b" :: ByteString}|] == BS.pack ("a b") testText = assertBool "" $ [$qc|{"a" :: Text} {"b" :: Text}|] == T.pack ("a b") tests = TestList [TestLabel "ByteString Test" $ TestCase testByteString ,TestLabel "Text Test" $ TestCase testText ] main = runTestTT tests
4601f1b44aeae24346a0f5856cb5e2c3f2823c0379dcc14d0616a3c4bea6e0fe
ThoughtWorksInc/DeepDarkFantasy
TestPoly.hs
module Main where import DDF.Sam.Poly hiding (main) import Control.Monad import System.Exit (exitFailure) main :: IO () main = do x <- solve (const $ return ()) (const . const $ return ()) unless (x - 4 < 0.1) exitFailure return ()
null
https://raw.githubusercontent.com/ThoughtWorksInc/DeepDarkFantasy/4c569aefc03a2bcfb6113b65367201d30077f2b6/test/TestPoly.hs
haskell
module Main where import DDF.Sam.Poly hiding (main) import Control.Monad import System.Exit (exitFailure) main :: IO () main = do x <- solve (const $ return ()) (const . const $ return ()) unless (x - 4 < 0.1) exitFailure return ()
0c99eee63010e371e88943d63d24f91ccf941be8acc9053754c6afd0dbbe6bbd
ocaml/merlin
a.ml
module T = struct type t = X of int end module Y = struct let y = T.X 1 end let z = Y.y let z2 = B.x
null
https://raw.githubusercontent.com/ocaml/merlin/8f9dba1cc8885700145d6d1c33141ec0dc8544f5/tests/test-dirs/locate-type.t/a.ml
ocaml
module T = struct type t = X of int end module Y = struct let y = T.X 1 end let z = Y.y let z2 = B.x
7172caa789ff59bfc82781199da5d51cc8b885b0d86ed48c7e05d086560f5e55
returntocorp/semgrep
Rule_fetching.mli
(* input *) type rules_source = -e/-l/--replacement . In theory we could even parse the string to get * a * a XPattern.t *) | Pattern of string * Xlang.t * string option (* replacement *) (* --config. In theory we could even parse the string to get * some Semgrep_dashdash_config.config_kind list *) | Configs of Semgrep_dashdash_config.config_str list TODO ? | ProjectUrl of Uri.t ? or just use for it ? [@@deriving show] (* output *) type rules_and_origin = { origin : origin; rules : Rule.rules; errors : Rule.invalid_rule_error list; } and origin = Common.filename option (* None for remote files *) [@@deriving show] val partition_rules_and_errors : rules_and_origin list -> Rule.rules * Rule.invalid_rule_error list (* [rules_from_rules_source] returns rules from --config or -e * TODO: does it rewrite the rule_id? *) val rules_from_rules_source : rules_source -> rules_and_origin list (* internals *) [ rules_from_dashdash_config config ] returns a list of rules_and_origin * because the string can correspond to a folder , in which case we return * one rules_and_origin per files in this folder . * because the string can correspond to a folder, in which case we return * one rules_and_origin per files in this folder. *) val rules_from_dashdash_config : Semgrep_dashdash_config.config_kind -> rules_and_origin list (* low-level API *) val load_rules_from_file : Common.filename -> rules_and_origin val load_rules_from_url : Uri.t -> rules_and_origin
null
https://raw.githubusercontent.com/returntocorp/semgrep/855abad9ada6ea5fd72d437fd69ff2e5fa42c1f1/src/osemgrep/cli_scan/Rule_fetching.mli
ocaml
input replacement --config. In theory we could even parse the string to get * some Semgrep_dashdash_config.config_kind list output None for remote files [rules_from_rules_source] returns rules from --config or -e * TODO: does it rewrite the rule_id? internals low-level API
type rules_source = -e/-l/--replacement . In theory we could even parse the string to get * a * a XPattern.t *) | Configs of Semgrep_dashdash_config.config_str list TODO ? | ProjectUrl of Uri.t ? or just use for it ? [@@deriving show] type rules_and_origin = { origin : origin; rules : Rule.rules; errors : Rule.invalid_rule_error list; } [@@deriving show] val partition_rules_and_errors : rules_and_origin list -> Rule.rules * Rule.invalid_rule_error list val rules_from_rules_source : rules_source -> rules_and_origin list [ rules_from_dashdash_config config ] returns a list of rules_and_origin * because the string can correspond to a folder , in which case we return * one rules_and_origin per files in this folder . * because the string can correspond to a folder, in which case we return * one rules_and_origin per files in this folder. *) val rules_from_dashdash_config : Semgrep_dashdash_config.config_kind -> rules_and_origin list val load_rules_from_file : Common.filename -> rules_and_origin val load_rules_from_url : Uri.t -> rules_and_origin
cc933d7a8166b46b4806cb49128a8254d8ad56a67b4339eda217aa6b0cab4153
rm-hull/project-euler
euler028.clj
EULER # 028 ;; ========== Starting with the number 1 and moving to the right in a clockwise direction a 5 by 5 spiral is formed as follows : ;; ;; 21 22 23 24 25 20 7 8 9 10 19 6 1 2 11 18 5 4 3 12 ;; 17 16 15 14 13 ;; It can be verified that the sum of the numbers on the diagonals is 101 . ;; What is the sum of the numbers on the diagonals in a 1001 by 1001 spiral ;; formed in the same way?) ;; (ns euler028 (:use [util.misc])) (def spiral-seq (mapcat #(repeat 4 %) (map #(* % 2) integers))) (defn accumulate [coll n] (concat coll [(+ (last coll) n)])) (defn solve [n] (reduce + (reduce accumulate (list 1) (take-while #(<= % n) spiral-seq)))) (time (solve 1001))
null
https://raw.githubusercontent.com/rm-hull/project-euler/04e689e87a1844cfd83229bb4628051e3ac6a325/src/euler028.clj
clojure
========== 21 22 23 24 25 17 16 15 14 13 formed in the same way?)
EULER # 028 Starting with the number 1 and moving to the right in a clockwise direction a 5 by 5 spiral is formed as follows : 20 7 8 9 10 19 6 1 2 11 18 5 4 3 12 It can be verified that the sum of the numbers on the diagonals is 101 . What is the sum of the numbers on the diagonals in a 1001 by 1001 spiral (ns euler028 (:use [util.misc])) (def spiral-seq (mapcat #(repeat 4 %) (map #(* % 2) integers))) (defn accumulate [coll n] (concat coll [(+ (last coll) n)])) (defn solve [n] (reduce + (reduce accumulate (list 1) (take-while #(<= % n) spiral-seq)))) (time (solve 1001))
f2a88005f21871f4dec1ffe06f0340c13504b5d15da356efba4ab1a130f48da1
mindreframer/clojure-stuff
04_sets.clj
(ns koans.04-sets (:require [koan-engine.core :refer :all])) (meditations "You can create a set by converting another collection" (= #{3} (set __)) "Counting them is like counting other collections" (= __ (count #{1 2 3})) "Remember that a set is a *mathematical* set" (= __ (set '(1 1 2 2 3 3 4 4 5 5))) "You can ask clojure for the union of two sets" (= __ (clojure.set/union #{1 2 3 4} #{2 3 5})) "And also the intersection" (= __ (clojure.set/intersection #{1 2 3 4} #{2 3 5})) "But don't forget about the difference" (= __ (clojure.set/difference #{1 2 3 4 5} #{2 3 5})))
null
https://raw.githubusercontent.com/mindreframer/clojure-stuff/1e761b2dacbbfbeec6f20530f136767e788e0fe3/github.com/functional-koans/clojure-koans/src/koans/04_sets.clj
clojure
(ns koans.04-sets (:require [koan-engine.core :refer :all])) (meditations "You can create a set by converting another collection" (= #{3} (set __)) "Counting them is like counting other collections" (= __ (count #{1 2 3})) "Remember that a set is a *mathematical* set" (= __ (set '(1 1 2 2 3 3 4 4 5 5))) "You can ask clojure for the union of two sets" (= __ (clojure.set/union #{1 2 3 4} #{2 3 5})) "And also the intersection" (= __ (clojure.set/intersection #{1 2 3 4} #{2 3 5})) "But don't forget about the difference" (= __ (clojure.set/difference #{1 2 3 4 5} #{2 3 5})))
80cdb66ee31edc58336cd7f08c097406188ac345e0254ac44137791e1173db98
david-vanderson/warp
combined.rkt
#lang racket/base (require racket/gui) (require "defs.rkt" "client.rkt" "server.rkt") (define b (box #f)) (thread (lambda () (start-server))) (start-client PORT) (yield 'wait)
null
https://raw.githubusercontent.com/david-vanderson/warp/cdc1d0bd942780fb5360dc6a34a2a06cf9518408/combined.rkt
racket
#lang racket/base (require racket/gui) (require "defs.rkt" "client.rkt" "server.rkt") (define b (box #f)) (thread (lambda () (start-server))) (start-client PORT) (yield 'wait)
992854176757e7c82c0644f26b5c6fcd4f185e9dabda1bd82c0e722ff66e661e
oreillymedia/etudes-for-erlang
dates.erl
%% @author J D Eisenberg <> %% @doc Functions for splitting a date into a list of year - month - day and finding date . 2013 J D Eisenberg %% @version 0.1 -module(dates). -export([date_parts/1, julian/1, is_leap_year/1]). @doc Takes a string in ISO date format ( yyyy - mm - dd ) and %% returns a list of integers in form [year, month, day]. -spec(date_parts(string()) -> list(integer())). date_parts(DateStr) -> [YStr, MStr, DStr] = re:split(DateStr, "-", [{return, list}]), [element(1, string:to_integer(YStr)), element(1, string:to_integer(MStr)), element(1, string:to_integer(DStr))]. @doc Takes a string in ISO date format ( yyyy - mm - dd ) and returns the day of the year ( Julian date ) . -spec(julian(string()) -> pos_integer()). julian(IsoDate) -> DaysPerMonth = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31], [Y, M, D] = date_parts(IsoDate), julian(Y, M, D, DaysPerMonth, 0). %% @doc Helper function that recursively accumulates the number of days %% up to the specified date. -spec(julian(integer(), integer(), integer(), [integer()], integer) -> integer()). julian(Y, M, D, MonthList, Total) when M > 13 - length(MonthList) -> [ThisMonth|RemainingMonths] = MonthList, julian(Y, M, D, RemainingMonths, Total + ThisMonth); julian(Y, M, D, _MonthList, Total) -> case M > 2 andalso is_leap_year(Y) of true -> Total + D + 1; false -> Total + D end. @doc Given a year , return true or false depending on whether the year is a leap year . -spec(is_leap_year(pos_integer()) -> boolean()). is_leap_year(Year) -> (Year rem 4 == 0 andalso Year rem 100 /= 0) orelse (Year rem 400 == 0).
null
https://raw.githubusercontent.com/oreillymedia/etudes-for-erlang/07200372503a8819f9fcc2856f8cb82451be7b48/code/ch06-03/dates.erl
erlang
@author J D Eisenberg <> @doc Functions for splitting a date into a list of @version 0.1 returns a list of integers in form [year, month, day]. @doc Helper function that recursively accumulates the number of days up to the specified date.
year - month - day and finding date . 2013 J D Eisenberg -module(dates). -export([date_parts/1, julian/1, is_leap_year/1]). @doc Takes a string in ISO date format ( yyyy - mm - dd ) and -spec(date_parts(string()) -> list(integer())). date_parts(DateStr) -> [YStr, MStr, DStr] = re:split(DateStr, "-", [{return, list}]), [element(1, string:to_integer(YStr)), element(1, string:to_integer(MStr)), element(1, string:to_integer(DStr))]. @doc Takes a string in ISO date format ( yyyy - mm - dd ) and returns the day of the year ( Julian date ) . -spec(julian(string()) -> pos_integer()). julian(IsoDate) -> DaysPerMonth = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31], [Y, M, D] = date_parts(IsoDate), julian(Y, M, D, DaysPerMonth, 0). -spec(julian(integer(), integer(), integer(), [integer()], integer) -> integer()). julian(Y, M, D, MonthList, Total) when M > 13 - length(MonthList) -> [ThisMonth|RemainingMonths] = MonthList, julian(Y, M, D, RemainingMonths, Total + ThisMonth); julian(Y, M, D, _MonthList, Total) -> case M > 2 andalso is_leap_year(Y) of true -> Total + D + 1; false -> Total + D end. @doc Given a year , return true or false depending on whether the year is a leap year . -spec(is_leap_year(pos_integer()) -> boolean()). is_leap_year(Year) -> (Year rem 4 == 0 andalso Year rem 100 /= 0) orelse (Year rem 400 == 0).
25d4c4060d26d6a86b1ace82b8ddfb129d55ad55cecb8ae3c62a0c00fb7905d6
lspitzner/brittany
Test214.hs
{-# language TypeFamilies #-} module M where data family F a data instance F Int = D Int
null
https://raw.githubusercontent.com/lspitzner/brittany/a15eed5f3608bf1fa7084fcf008c6ecb79542562/data/Test214.hs
haskell
# language TypeFamilies #
module M where data family F a data instance F Int = D Int
b7b31f783b8d393f36d5eb9842583304258171d3420f39a7c3d344130cd98d87
fogfish/hash
hash_pbkdf2.erl
%% Copyright 2012 , All Rights Reserved %% Licensed under the Apache License , Version 2.0 ( the " License " ) ; %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% %% -2.0 %% %% Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an " AS IS " BASIS , %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. %% %% @doc %% -module(hash_pbkdf2). -export([pbkdf2/5]). -compile({no_auto_import,[ceil/1]}). pbkdf2(PRF, Pass, Salt, C, DkLen) -> Init = crypto:hmac(PRF, Pass, Salt), N = ceil(DkLen / (byte_size(Init) * 8)), binary:part( erlang:iolist_to_binary( [fpbkdf2(C, PRF, Pass, <<Salt/binary, I:32/integer>>) || I <- lists:seq(1, N)] ), 0, DkLen div 8 ). fpbkdf2(C, PRF, Pass, Data) -> Init = crypto:hmac(PRF, Pass, Data), fpbkdf2(C - 1, PRF, Pass, Init, Init). fpbkdf2(0, _PRF, _Pass, _Data, Acc) -> Acc; fpbkdf2(C, PRF, Pass, Data, Acc) -> Next = crypto:hmac(PRF, Pass, Data), fpbkdf2(C - 1, PRF, Pass, Next, crypto:exor(Acc, Next)). ceil(X) -> case trunc(X) of Y when Y < X -> Y + 1; Y -> Y end.
null
https://raw.githubusercontent.com/fogfish/hash/a1b9101189e115b4eabbe941639f3c626614e986/src/hash_pbkdf2.erl
erlang
you may not use this file except in compliance with the License. You may obtain a copy of the License at -2.0 Unless required by applicable law or agreed to in writing, software WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. @doc
Copyright 2012 , All Rights Reserved Licensed under the Apache License , Version 2.0 ( the " License " ) ; distributed under the License is distributed on an " AS IS " BASIS , -module(hash_pbkdf2). -export([pbkdf2/5]). -compile({no_auto_import,[ceil/1]}). pbkdf2(PRF, Pass, Salt, C, DkLen) -> Init = crypto:hmac(PRF, Pass, Salt), N = ceil(DkLen / (byte_size(Init) * 8)), binary:part( erlang:iolist_to_binary( [fpbkdf2(C, PRF, Pass, <<Salt/binary, I:32/integer>>) || I <- lists:seq(1, N)] ), 0, DkLen div 8 ). fpbkdf2(C, PRF, Pass, Data) -> Init = crypto:hmac(PRF, Pass, Data), fpbkdf2(C - 1, PRF, Pass, Init, Init). fpbkdf2(0, _PRF, _Pass, _Data, Acc) -> Acc; fpbkdf2(C, PRF, Pass, Data, Acc) -> Next = crypto:hmac(PRF, Pass, Data), fpbkdf2(C - 1, PRF, Pass, Next, crypto:exor(Acc, Next)). ceil(X) -> case trunc(X) of Y when Y < X -> Y + 1; Y -> Y end.
ada0c2c7521bf3d7dc877b19029d02835766ea1bddf74487478bc91d76146033
igorhvr/bedlam
quartz.scm
Code by - this is licensed under GNU GPL v2 . (require-extension (lib iasylum/jcode)) (module iasylum/quartz (; high level: schedule-job-easier ; lower level: create-scheduler create-quartz-job-from-closure create-quartz-cron-trigger schedule-job) (include "quartz/quartz-code.scm"))
null
https://raw.githubusercontent.com/igorhvr/bedlam/b62e0d047105bb0473bdb47c58b23f6ca0f79a4e/iasylum/quartz.scm
scheme
high level: lower level:
Code by - this is licensed under GNU GPL v2 . (require-extension (lib iasylum/jcode)) (module iasylum/quartz schedule-job-easier create-scheduler create-quartz-job-from-closure create-quartz-cron-trigger schedule-job) (include "quartz/quartz-code.scm"))
8c970886b44e0aefaac33751bb0c4a7af05ac46f4f7915693d1cb5fb9efcb707
tcsprojects/ocaml-sat-solvers
zchaffwrapper.mli
open Satwrapper;; open Zchaff;; class zchaffSolverFactory: object inherit solverFactory method description: string method identifier: string method short_identifier: string method copyright: string method url: string method new_instance: abstractSolver end
null
https://raw.githubusercontent.com/tcsprojects/ocaml-sat-solvers/2c36605fb3e38a1bee41e079031ab5b173794910/deprecated/zchaff/zchaffwrapper.mli
ocaml
open Satwrapper;; open Zchaff;; class zchaffSolverFactory: object inherit solverFactory method description: string method identifier: string method short_identifier: string method copyright: string method url: string method new_instance: abstractSolver end
fa945f416960363ad07cdf4edb57428ca77dc2313de176305e5eab804e385fdd
containium/containium
project.clj
This Source Code Form is subject to the terms of the Mozilla Public License , v. 2.0 . If a copy of the MPL was not distributed with this file , You can obtain one at /. (defproject containium.systems/core "0.1.0" :description "Containium System Protocol and utility functions" :url "" :scm {:dir "../../"} :license {:name "Mozilla Public License 2.0" :url "/"} :dependencies [[org.clojure/clojure "1.8.0"]] :java-source-paths ["src-java"] :global-vars {*warn-on-reflection* true} :pom-plugins [[com.theoryinpractise/clojure-maven-plugin "1.7.1" {:extensions "true" :configuration ([:sourceDirectories [:sourceDirectory "src"]]) :executions ([:execution [:id "non-aot-compile"] [:phase "compile"] [:configuration [:temporaryOutputDirectory "true"] [:copyDeclaredNamespaceOnly "false"] [:compileDeclaredNamespaceOnly "false"]] [:goals [:goal "compile"]]] [:execution [:id "test-clojure"] [:phase "test"] [:goals [:goal "test"]]])}] [org.apache.maven.plugins/maven-compiler-plugin "3.1" {:configuration ([:source "1.7"] [:target "1.7"])}] [org.codehaus.mojo/buildnumber-maven-plugin "1.2" {:executions [:execution [:phase "validate"] [:goals [:goal "create"]]] :configuration ([:doCheck "false"] ; Set to true to prevent packaging with local changes. [:doUpdate "false"] [:shortRevisionLength "8"])}] [org.apache.maven.plugins/maven-jar-plugin "2.1" {:configuration [:archive [:manifest [:addDefaultImplementationEntries "true"]] [:manifestEntries [:Containium-Version "${buildNumber}"]]]}]] :pom-addition [:properties [:project.build.sourceEncoding "UTF-8"]] )
null
https://raw.githubusercontent.com/containium/containium/dede4098de928bed9ce8fccfc0a3891655ee162e/systems/core/project.clj
clojure
Set to true to prevent packaging with local changes.
This Source Code Form is subject to the terms of the Mozilla Public License , v. 2.0 . If a copy of the MPL was not distributed with this file , You can obtain one at /. (defproject containium.systems/core "0.1.0" :description "Containium System Protocol and utility functions" :url "" :scm {:dir "../../"} :license {:name "Mozilla Public License 2.0" :url "/"} :dependencies [[org.clojure/clojure "1.8.0"]] :java-source-paths ["src-java"] :global-vars {*warn-on-reflection* true} :pom-plugins [[com.theoryinpractise/clojure-maven-plugin "1.7.1" {:extensions "true" :configuration ([:sourceDirectories [:sourceDirectory "src"]]) :executions ([:execution [:id "non-aot-compile"] [:phase "compile"] [:configuration [:temporaryOutputDirectory "true"] [:copyDeclaredNamespaceOnly "false"] [:compileDeclaredNamespaceOnly "false"]] [:goals [:goal "compile"]]] [:execution [:id "test-clojure"] [:phase "test"] [:goals [:goal "test"]]])}] [org.apache.maven.plugins/maven-compiler-plugin "3.1" {:configuration ([:source "1.7"] [:target "1.7"])}] [org.codehaus.mojo/buildnumber-maven-plugin "1.2" {:executions [:execution [:phase "validate"] [:goals [:goal "create"]]] [:doUpdate "false"] [:shortRevisionLength "8"])}] [org.apache.maven.plugins/maven-jar-plugin "2.1" {:configuration [:archive [:manifest [:addDefaultImplementationEntries "true"]] [:manifestEntries [:Containium-Version "${buildNumber}"]]]}]] :pom-addition [:properties [:project.build.sourceEncoding "UTF-8"]] )
54fe4e06779c2b83911a0301e656d700b800e7ffa25931272ce089e54d889e41
dongcarl/guix
pam.scm
;;; GNU Guix --- Functional package management for GNU Copyright © 2013 , 2014 , 2015 , 2016 , 2017 , 2019 , 2020 < > ;;; ;;; This file is part of GNU Guix. ;;; GNU is free software ; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation ; either version 3 of the License , or ( at ;;; your option) any later version. ;;; ;;; GNU Guix is distributed in the hope that it will be useful, but ;;; WITHOUT ANY WARRANTY; without even the implied warranty of ;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ;;; GNU General Public License for more details. ;;; You should have received a copy of the GNU General Public License along with GNU . If not , see < / > . (define-module (gnu system pam) #:use-module (guix records) #:use-module (guix derivations) #:use-module (guix gexp) #:use-module (gnu services) #:use-module (ice-9 match) #:use-module (srfi srfi-1) #:use-module (srfi srfi-9) #:use-module (srfi srfi-11) #:use-module (srfi srfi-26) #:use-module ((guix utils) #:select (%current-system)) #:export (pam-service pam-service-name pam-service-account pam-service-auth pam-service-password pam-service-session pam-entry pam-entry-control pam-entry-module pam-entry-arguments pam-limits-entry pam-limits-entry-domain pam-limits-entry-type pam-limits-entry-item pam-limits-entry-value pam-limits-entry->string pam-services->directory unix-pam-service base-pam-services session-environment-service session-environment-service-type pam-root-service-type pam-root-service)) ;;; Commentary: ;;; ;;; Configuration of the pluggable authentication modules (PAM). ;;; ;;; Code: ;; PAM services (see ;; <-pam.org/Linux-PAM-html/sag-configuration-file.html>.) (define-record-type* <pam-service> pam-service make-pam-service pam-service? (name pam-service-name) ; string The four " management groups " . list of > (default '())) (auth pam-service-auth (default '())) (password pam-service-password (default '())) (session pam-service-session (default '()))) (define-record-type* <pam-entry> pam-entry make-pam-entry pam-entry? (control pam-entry-control) ; string (module pam-entry-module) ; file name (arguments pam-entry-arguments ; list of string-valued g-expressions (default '()))) PAM limits entries are used by the pam_limits PAM module to set or override ;; limits on system resources for user sessions. The format is specified here : (define-record-type <pam-limits-entry> (make-pam-limits-entry domain type item value) pam-limits-entry? (domain pam-limits-entry-domain) ; string (type pam-limits-entry-type) ; symbol (item pam-limits-entry-item) ; symbol (value pam-limits-entry-value)) ; symbol or number (define (pam-limits-entry domain type item value) "Construct a pam-limits-entry ensuring that the provided values are valid." (define (valid? value) (case item ((priority) (number? value)) ((nice) (and (number? value) (>= value -20) (<= value 19))) (else (or (and (number? value) (>= value -1)) (member value '(unlimited infinity)))))) (define items (list 'core 'data 'fsize 'memlock 'nofile 'rss 'stack 'cpu 'nproc 'as 'maxlogins 'maxsyslogins 'priority 'locks 'sigpending 'msgqueue 'nice 'rtprio)) (when (not (member type '(hard soft both))) (error "invalid limit type" type)) (when (not (member item items)) (error "invalid limit item" item)) (when (not (valid? value)) (error "invalid limit value" value)) (make-pam-limits-entry domain type item value)) (define (pam-limits-entry->string entry) "Convert a pam-limits-entry record to a string." (match entry (($ <pam-limits-entry> domain type item value) (string-join (list domain (if (eq? type 'both) "-" (symbol->string type)) (symbol->string item) (cond ((symbol? value) (symbol->string value)) (else (number->string value)))) " ")))) (define (pam-service->configuration service) "Return the derivation building the configuration file for SERVICE, to be dumped in /etc/pam.d/NAME, where NAME is the name of SERVICE." (define (entry->gexp type entry) (match entry (($ <pam-entry> control module (arguments ...)) #~(format #t "~a ~a ~a ~a~%" #$type #$control #$module (string-join (list #$@arguments)))))) (match service (($ <pam-service> name account auth password session) (define builder #~(begin (with-output-to-file #$output (lambda () #$@(append (map (cut entry->gexp "account" <>) account) (map (cut entry->gexp "auth" <>) auth) (map (cut entry->gexp "password" <>) password) (map (cut entry->gexp "session" <>) session)) #t)))) (computed-file name builder)))) (define (pam-services->directory services) "Return the derivation to build the configuration directory to be used as /etc/pam.d for SERVICES." (let ((names (map pam-service-name services)) (files (map pam-service->configuration services))) (define builder #~(begin (use-modules (ice-9 match) (srfi srfi-1)) (mkdir #$output) (for-each (match-lambda ((name file) (symlink file (string-append #$output "/" name)))) ;; Since <pam-service> objects cannot be compared with ' equal ? ' since they contain gexps , which contain ;; closures, use 'delete-duplicates' on the build-side ;; instead. See <>. (delete-duplicates '#$(zip names files))))) (computed-file "pam.d" builder))) (define %pam-other-services ;; The "other" PAM configuration, which denies everything (see ;; <-pam.org/Linux-PAM-html/sag-configuration-example.html>.) (let ((deny (pam-entry (control "required") (module "pam_deny.so")))) (pam-service (name "other") (account (list deny)) (auth (list deny)) (password (list deny)) (session (list deny))))) (define unix-pam-service (let ((unix (pam-entry (control "required") (module "pam_unix.so"))) (env (pam-entry ; to honor /etc/environment. (control "required") (module "pam_env.so")))) (lambda* (name #:key allow-empty-passwords? (allow-root? #f) motd login-uid?) "Return a standard Unix-style PAM service for NAME. When ALLOW-EMPTY-PASSWORDS? is true, allow empty passwords. When ALLOW-ROOT? is true, allow root to run the command without authentication. When MOTD is true, it should be a file-like object used as the message-of-the-day. When LOGIN-UID? is true, require the 'pam_loginuid' module; that module sets /proc/self/loginuid, which the libc 'getlogin' function relies on." ;; See <-pam.org/Linux-PAM-html/sag-configuration-example.html>. (pam-service (name name) (account (list unix)) (auth (append (if allow-root? (list (pam-entry (control "sufficient") (module "pam_rootok.so"))) '()) (list (if allow-empty-passwords? (pam-entry (control "required") (module "pam_unix.so") (arguments '("nullok"))) unix)))) (password (list (pam-entry (control "required") (module "pam_unix.so") ;; Store SHA-512 encrypted passwords in /etc/shadow. (arguments '("sha512" "shadow"))))) (session `(,@(if motd (list (pam-entry (control "optional") (module "pam_motd.so") (arguments (list #~(string-append "motd=" #$motd))))) '()) ,@(if login-uid? (list (pam-entry ;to fill in /proc/self/loginuid (control "required") (module "pam_loginuid.so"))) '()) ,env ,unix)))))) (define (rootok-pam-service command) "Return a PAM service for COMMAND such that 'root' does not need to authenticate to run COMMAND." (let ((unix (pam-entry (control "required") (module "pam_unix.so")))) (pam-service (name command) (account (list unix)) (auth (list (pam-entry (control "sufficient") (module "pam_rootok.so")))) (password (list unix)) (session (list unix))))) (define* (base-pam-services #:key allow-empty-passwords?) "Return the list of basic PAM services everyone would want." TODO : Add other Shadow programs ? (append (list %pam-other-services) ;; These programs are setuid-root. (map (cut unix-pam-service <> #:allow-empty-passwords? allow-empty-passwords?) '("passwd" "sudo")) ;; This is setuid-root, as well. Allow root to run "su" without ;; authenticating. (list (unix-pam-service "su" #:allow-empty-passwords? allow-empty-passwords? #:allow-root? #t)) ;; These programs are not setuid-root, and we want root to be able ;; to run them without having to authenticate (notably because ;; 'useradd' and 'groupadd' are run during system activation.) (map rootok-pam-service '("useradd" "userdel" "usermod" "groupadd" "groupdel" "groupmod")))) ;;; ;;; System-wide environment variables. ;;; (define (environment-variables->environment-file vars) "Return a file for pam_env(8) that contains environment variables VARS." (apply mixed-text-file "environment" (append-map (match-lambda ((key . value) (list key "=" value "\n"))) vars))) (define session-environment-service-type (service-type (name 'session-environment) (extensions (list (service-extension etc-service-type (lambda (vars) (list `("environment" ,(environment-variables->environment-file vars))))))) (compose concatenate) (extend append) (description "Populate @file{/etc/environment}, which is honored by @code{pam_env}, with the specified environment variables. The value of this service is a list of name/value pairs for environments variables, such as: @example '((\"TZ\" . \"Canada/Pacific\")) @end example\n"))) (define (session-environment-service vars) "Return a service that builds the @file{/etc/environment}, which can be read by PAM-aware applications to set environment variables for sessions. VARS should be an association list in which both the keys and the values are strings or string-valued gexps." (service session-environment-service-type vars)) ;;; ;;; PAM root service. ;;; ;; Overall PAM configuration: a list of services, plus a procedure that takes one < pam - service > and returns a < pam - service > . The procedure is used to ;; implement cross-cutting concerns such as the use of the 'elogind.so' ;; session module that keeps track of logged-in users. (define-record-type* <pam-configuration> pam-configuration make-pam-configuration? pam-configuration? (services pam-configuration-services) ;list of <pam-service> (transform pam-configuration-transform)) ;procedure (define (/etc-entry config) "Return the /etc/pam.d entry corresponding to CONFIG." (match config (($ <pam-configuration> services transform) (let ((services (map transform services))) `(("pam.d" ,(pam-services->directory services))))))) (define (extend-configuration initial extensions) "Extend INITIAL with NEW." (let-values (((services procs) (partition pam-service? extensions))) (pam-configuration (services (append (pam-configuration-services initial) services)) (transform (apply compose (pam-configuration-transform initial) procs))))) (define pam-root-service-type (service-type (name 'pam) (extensions (list (service-extension etc-service-type /etc-entry))) ;; Arguments include <pam-service> as well as procedures. (compose concatenate) (extend extend-configuration) (description "Configure the Pluggable Authentication Modules (PAM) for all the specified @dfn{PAM services}. Each PAM service corresponds to a program, such as @command{login} or @command{sshd}, and specifies for instance how the program may authenticate users or what it should do when opening a new session."))) (define* (pam-root-service base #:key (transform identity)) "The \"root\" PAM service, which collects <pam-service> instance and turns them into a /etc/pam.d directory, including the <pam-service> listed in BASE. TRANSFORM is a procedure that takes a <pam-service> and returns a <pam-service>. It can be used to implement cross-cutting concerns that affect all the PAM services." (service pam-root-service-type (pam-configuration (services base) (transform transform))))
null
https://raw.githubusercontent.com/dongcarl/guix/82543e9649da2da9a5285ede4ec4f718fd740fcb/gnu/system/pam.scm
scheme
GNU Guix --- Functional package management for GNU This file is part of GNU Guix. you can redistribute it and/or modify it either version 3 of the License , or ( at your option) any later version. GNU Guix is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. Commentary: Configuration of the pluggable authentication modules (PAM). Code: PAM services (see <-pam.org/Linux-PAM-html/sag-configuration-file.html>.) string string file name list of string-valued g-expressions limits on system resources for user sessions. The format is specified string symbol symbol symbol or number Since <pam-service> objects cannot be compared with closures, use 'delete-duplicates' on the build-side instead. See <>. The "other" PAM configuration, which denies everything (see <-pam.org/Linux-PAM-html/sag-configuration-example.html>.) to honor /etc/environment. that module sets See <-pam.org/Linux-PAM-html/sag-configuration-example.html>. Store SHA-512 encrypted passwords in /etc/shadow. to fill in /proc/self/loginuid These programs are setuid-root. This is setuid-root, as well. Allow root to run "su" without authenticating. These programs are not setuid-root, and we want root to be able to run them without having to authenticate (notably because 'useradd' and 'groupadd' are run during system activation.) System-wide environment variables. PAM root service. Overall PAM configuration: a list of services, plus a procedure that takes implement cross-cutting concerns such as the use of the 'elogind.so' session module that keeps track of logged-in users. list of <pam-service> procedure Arguments include <pam-service> as well as procedures.
Copyright © 2013 , 2014 , 2015 , 2016 , 2017 , 2019 , 2020 < > under the terms of the GNU General Public License as published by You should have received a copy of the GNU General Public License along with GNU . If not , see < / > . (define-module (gnu system pam) #:use-module (guix records) #:use-module (guix derivations) #:use-module (guix gexp) #:use-module (gnu services) #:use-module (ice-9 match) #:use-module (srfi srfi-1) #:use-module (srfi srfi-9) #:use-module (srfi srfi-11) #:use-module (srfi srfi-26) #:use-module ((guix utils) #:select (%current-system)) #:export (pam-service pam-service-name pam-service-account pam-service-auth pam-service-password pam-service-session pam-entry pam-entry-control pam-entry-module pam-entry-arguments pam-limits-entry pam-limits-entry-domain pam-limits-entry-type pam-limits-entry-item pam-limits-entry-value pam-limits-entry->string pam-services->directory unix-pam-service base-pam-services session-environment-service session-environment-service-type pam-root-service-type pam-root-service)) (define-record-type* <pam-service> pam-service make-pam-service pam-service? The four " management groups " . list of > (default '())) (auth pam-service-auth (default '())) (password pam-service-password (default '())) (session pam-service-session (default '()))) (define-record-type* <pam-entry> pam-entry make-pam-entry pam-entry? (default '()))) PAM limits entries are used by the pam_limits PAM module to set or override here : (define-record-type <pam-limits-entry> (make-pam-limits-entry domain type item value) pam-limits-entry? (define (pam-limits-entry domain type item value) "Construct a pam-limits-entry ensuring that the provided values are valid." (define (valid? value) (case item ((priority) (number? value)) ((nice) (and (number? value) (>= value -20) (<= value 19))) (else (or (and (number? value) (>= value -1)) (member value '(unlimited infinity)))))) (define items (list 'core 'data 'fsize 'memlock 'nofile 'rss 'stack 'cpu 'nproc 'as 'maxlogins 'maxsyslogins 'priority 'locks 'sigpending 'msgqueue 'nice 'rtprio)) (when (not (member type '(hard soft both))) (error "invalid limit type" type)) (when (not (member item items)) (error "invalid limit item" item)) (when (not (valid? value)) (error "invalid limit value" value)) (make-pam-limits-entry domain type item value)) (define (pam-limits-entry->string entry) "Convert a pam-limits-entry record to a string." (match entry (($ <pam-limits-entry> domain type item value) (string-join (list domain (if (eq? type 'both) "-" (symbol->string type)) (symbol->string item) (cond ((symbol? value) (symbol->string value)) (else (number->string value)))) " ")))) (define (pam-service->configuration service) "Return the derivation building the configuration file for SERVICE, to be dumped in /etc/pam.d/NAME, where NAME is the name of SERVICE." (define (entry->gexp type entry) (match entry (($ <pam-entry> control module (arguments ...)) #~(format #t "~a ~a ~a ~a~%" #$type #$control #$module (string-join (list #$@arguments)))))) (match service (($ <pam-service> name account auth password session) (define builder #~(begin (with-output-to-file #$output (lambda () #$@(append (map (cut entry->gexp "account" <>) account) (map (cut entry->gexp "auth" <>) auth) (map (cut entry->gexp "password" <>) password) (map (cut entry->gexp "session" <>) session)) #t)))) (computed-file name builder)))) (define (pam-services->directory services) "Return the derivation to build the configuration directory to be used as /etc/pam.d for SERVICES." (let ((names (map pam-service-name services)) (files (map pam-service->configuration services))) (define builder #~(begin (use-modules (ice-9 match) (srfi srfi-1)) (mkdir #$output) (for-each (match-lambda ((name file) (symlink file (string-append #$output "/" name)))) ' equal ? ' since they contain gexps , which contain (delete-duplicates '#$(zip names files))))) (computed-file "pam.d" builder))) (define %pam-other-services (let ((deny (pam-entry (control "required") (module "pam_deny.so")))) (pam-service (name "other") (account (list deny)) (auth (list deny)) (password (list deny)) (session (list deny))))) (define unix-pam-service (let ((unix (pam-entry (control "required") (module "pam_unix.so"))) (control "required") (module "pam_env.so")))) (lambda* (name #:key allow-empty-passwords? (allow-root? #f) motd login-uid?) "Return a standard Unix-style PAM service for NAME. When ALLOW-EMPTY-PASSWORDS? is true, allow empty passwords. When ALLOW-ROOT? is true, allow root to run the command without authentication. When MOTD is true, it should be a file-like object used as the message-of-the-day. /proc/self/loginuid, which the libc 'getlogin' function relies on." (pam-service (name name) (account (list unix)) (auth (append (if allow-root? (list (pam-entry (control "sufficient") (module "pam_rootok.so"))) '()) (list (if allow-empty-passwords? (pam-entry (control "required") (module "pam_unix.so") (arguments '("nullok"))) unix)))) (password (list (pam-entry (control "required") (module "pam_unix.so") (arguments '("sha512" "shadow"))))) (session `(,@(if motd (list (pam-entry (control "optional") (module "pam_motd.so") (arguments (list #~(string-append "motd=" #$motd))))) '()) ,@(if login-uid? (control "required") (module "pam_loginuid.so"))) '()) ,env ,unix)))))) (define (rootok-pam-service command) "Return a PAM service for COMMAND such that 'root' does not need to authenticate to run COMMAND." (let ((unix (pam-entry (control "required") (module "pam_unix.so")))) (pam-service (name command) (account (list unix)) (auth (list (pam-entry (control "sufficient") (module "pam_rootok.so")))) (password (list unix)) (session (list unix))))) (define* (base-pam-services #:key allow-empty-passwords?) "Return the list of basic PAM services everyone would want." TODO : Add other Shadow programs ? (append (list %pam-other-services) (map (cut unix-pam-service <> #:allow-empty-passwords? allow-empty-passwords?) '("passwd" "sudo")) (list (unix-pam-service "su" #:allow-empty-passwords? allow-empty-passwords? #:allow-root? #t)) (map rootok-pam-service '("useradd" "userdel" "usermod" "groupadd" "groupdel" "groupmod")))) (define (environment-variables->environment-file vars) "Return a file for pam_env(8) that contains environment variables VARS." (apply mixed-text-file "environment" (append-map (match-lambda ((key . value) (list key "=" value "\n"))) vars))) (define session-environment-service-type (service-type (name 'session-environment) (extensions (list (service-extension etc-service-type (lambda (vars) (list `("environment" ,(environment-variables->environment-file vars))))))) (compose concatenate) (extend append) (description "Populate @file{/etc/environment}, which is honored by @code{pam_env}, with the specified environment variables. The value of this service is a list of name/value pairs for environments variables, such as: @example '((\"TZ\" . \"Canada/Pacific\")) @end example\n"))) (define (session-environment-service vars) "Return a service that builds the @file{/etc/environment}, which can be read by PAM-aware applications to set environment variables for sessions. VARS should be an association list in which both the keys and the values are strings or string-valued gexps." (service session-environment-service-type vars)) one < pam - service > and returns a < pam - service > . The procedure is used to (define-record-type* <pam-configuration> pam-configuration make-pam-configuration? pam-configuration? (define (/etc-entry config) "Return the /etc/pam.d entry corresponding to CONFIG." (match config (($ <pam-configuration> services transform) (let ((services (map transform services))) `(("pam.d" ,(pam-services->directory services))))))) (define (extend-configuration initial extensions) "Extend INITIAL with NEW." (let-values (((services procs) (partition pam-service? extensions))) (pam-configuration (services (append (pam-configuration-services initial) services)) (transform (apply compose (pam-configuration-transform initial) procs))))) (define pam-root-service-type (service-type (name 'pam) (extensions (list (service-extension etc-service-type /etc-entry))) (compose concatenate) (extend extend-configuration) (description "Configure the Pluggable Authentication Modules (PAM) for all the specified @dfn{PAM services}. Each PAM service corresponds to a program, such as @command{login} or @command{sshd}, and specifies for instance how the program may authenticate users or what it should do when opening a new session."))) (define* (pam-root-service base #:key (transform identity)) "The \"root\" PAM service, which collects <pam-service> instance and turns them into a /etc/pam.d directory, including the <pam-service> listed in BASE. TRANSFORM is a procedure that takes a <pam-service> and returns a <pam-service>. It can be used to implement cross-cutting concerns that affect all the PAM services." (service pam-root-service-type (pam-configuration (services base) (transform transform))))
5032a8874bda2ef8aff3fa4746de4edb7744b4f792bfd28148bfb801d1623726
nanocaml/nanocaml
test_main.ml
open OUnit2 let () = [ Parsing_tests.tt; Lang_codegen_tests.tt; Expand_tests.tt; Pass_typeck_tests.tt; Pass_codegen_tests.tt; Docs_example.tt ] |> test_list |> run_test_tt_main
null
https://raw.githubusercontent.com/nanocaml/nanocaml/c856268dc8986bd9fb739c8cd6b972052997a30c/test/test_main.ml
ocaml
open OUnit2 let () = [ Parsing_tests.tt; Lang_codegen_tests.tt; Expand_tests.tt; Pass_typeck_tests.tt; Pass_codegen_tests.tt; Docs_example.tt ] |> test_list |> run_test_tt_main
472b8aea0066c1ba2f3ee531eec8ae68acab8743c6d16e2b167f24f785cd5353
antono/guix-debian
python-build-system.scm
;;; GNU Guix --- Functional package management for GNU Copyright © 2013 < > Copyright © 2013 < > Copyright © 2013 < > ;;; ;;; This file is part of GNU Guix. ;;; GNU is free software ; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation ; either version 3 of the License , or ( at ;;; your option) any later version. ;;; ;;; GNU Guix is distributed in the hope that it will be useful, but ;;; WITHOUT ANY WARRANTY; without even the implied warranty of ;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ;;; GNU General Public License for more details. ;;; You should have received a copy of the GNU General Public License along with GNU . If not , see < / > . (define-module (guix build python-build-system) #:use-module ((guix build gnu-build-system) #:renamer (symbol-prefix-proc 'gnu:)) #:use-module (guix build utils) #:use-module (ice-9 match) #:use-module (ice-9 ftw) #:use-module (srfi srfi-1) #:use-module (srfi srfi-26) #:export (%standard-phases python-build)) ;; Commentary: ;; ;; Builder-side code of the standard Python package build procedure. ;; ;; Code: (define (call-setuppy command params) (if (file-exists? "setup.py") (begin (format #t "running \"python setup.py\" with command ~s and parameters ~s~%" command params) (zero? (apply system* "python" "setup.py" command params))) (error "no setup.py found"))) (define* (build #:rest empty) "Build a given Python package." (call-setuppy "build" '())) (define* (check #:key tests? test-target #:allow-other-keys) "Run the test suite of a given Python package." (if tests? (call-setuppy test-target '()) #t)) (define (get-python-version python) (string-take (string-take-right python 5) 3)) (define* (install #:key outputs inputs (configure-flags '()) #:allow-other-keys) "Install a given Python package." (let* ((out (assoc-ref outputs "out")) (params (append (list (string-append "--prefix=" out)) configure-flags)) (python-version (get-python-version (assoc-ref inputs "python"))) (old-path (getenv "PYTHONPATH")) (add-path (string-append out "/lib/python" python-version "/site-packages/"))) ;; create the module installation directory and add it to PYTHONPATH ;; to make setuptools happy (mkdir-p add-path) (setenv "PYTHONPATH" (string-append (if old-path (string-append old-path ":") "") add-path)) (call-setuppy "install" params))) (define* (wrap #:key inputs outputs #:allow-other-keys) (define (list-of-files dir) (map (cut string-append dir "/" <>) (or (scandir dir (lambda (f) (let ((s (stat (string-append dir "/" f)))) (eq? 'regular (stat:type s))))) '()))) (define bindirs (append-map (match-lambda ((_ . dir) (list (string-append dir "/bin") (string-append dir "/sbin")))) outputs)) (let* ((out (assoc-ref outputs "out")) (python (assoc-ref inputs "python")) (var `("PYTHONPATH" prefix ,(cons (string-append out "/lib/python" (get-python-version python) "/site-packages") (search-path-as-string->list (or (getenv "PYTHONPATH") "")))))) (for-each (lambda (dir) (let ((files (list-of-files dir))) (for-each (cut wrap-program <> var) files))) bindirs))) (define %standard-phases ;; 'configure' and 'build' phases are not needed. Everything is done during ;; 'install'. (alist-cons-after 'install 'wrap wrap (alist-replace 'build build (alist-replace 'check check (alist-replace 'install install (alist-delete 'configure gnu:%standard-phases)))))) (define* (python-build #:key inputs (phases %standard-phases) #:allow-other-keys #:rest args) "Build the given Python package, applying all of PHASES in order." (apply gnu:gnu-build #:inputs inputs #:phases phases args)) ;;; python-build-system.scm ends here
null
https://raw.githubusercontent.com/antono/guix-debian/85ef443788f0788a62010a942973d4f7714d10b4/guix/build/python-build-system.scm
scheme
GNU Guix --- Functional package management for GNU This file is part of GNU Guix. you can redistribute it and/or modify it either version 3 of the License , or ( at your option) any later version. GNU Guix is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. Commentary: Builder-side code of the standard Python package build procedure. Code: create the module installation directory and add it to PYTHONPATH to make setuptools happy 'configure' and 'build' phases are not needed. Everything is done during 'install'. python-build-system.scm ends here
Copyright © 2013 < > Copyright © 2013 < > Copyright © 2013 < > under the terms of the GNU General Public License as published by You should have received a copy of the GNU General Public License along with GNU . If not , see < / > . (define-module (guix build python-build-system) #:use-module ((guix build gnu-build-system) #:renamer (symbol-prefix-proc 'gnu:)) #:use-module (guix build utils) #:use-module (ice-9 match) #:use-module (ice-9 ftw) #:use-module (srfi srfi-1) #:use-module (srfi srfi-26) #:export (%standard-phases python-build)) (define (call-setuppy command params) (if (file-exists? "setup.py") (begin (format #t "running \"python setup.py\" with command ~s and parameters ~s~%" command params) (zero? (apply system* "python" "setup.py" command params))) (error "no setup.py found"))) (define* (build #:rest empty) "Build a given Python package." (call-setuppy "build" '())) (define* (check #:key tests? test-target #:allow-other-keys) "Run the test suite of a given Python package." (if tests? (call-setuppy test-target '()) #t)) (define (get-python-version python) (string-take (string-take-right python 5) 3)) (define* (install #:key outputs inputs (configure-flags '()) #:allow-other-keys) "Install a given Python package." (let* ((out (assoc-ref outputs "out")) (params (append (list (string-append "--prefix=" out)) configure-flags)) (python-version (get-python-version (assoc-ref inputs "python"))) (old-path (getenv "PYTHONPATH")) (add-path (string-append out "/lib/python" python-version "/site-packages/"))) (mkdir-p add-path) (setenv "PYTHONPATH" (string-append (if old-path (string-append old-path ":") "") add-path)) (call-setuppy "install" params))) (define* (wrap #:key inputs outputs #:allow-other-keys) (define (list-of-files dir) (map (cut string-append dir "/" <>) (or (scandir dir (lambda (f) (let ((s (stat (string-append dir "/" f)))) (eq? 'regular (stat:type s))))) '()))) (define bindirs (append-map (match-lambda ((_ . dir) (list (string-append dir "/bin") (string-append dir "/sbin")))) outputs)) (let* ((out (assoc-ref outputs "out")) (python (assoc-ref inputs "python")) (var `("PYTHONPATH" prefix ,(cons (string-append out "/lib/python" (get-python-version python) "/site-packages") (search-path-as-string->list (or (getenv "PYTHONPATH") "")))))) (for-each (lambda (dir) (let ((files (list-of-files dir))) (for-each (cut wrap-program <> var) files))) bindirs))) (define %standard-phases (alist-cons-after 'install 'wrap wrap (alist-replace 'build build (alist-replace 'check check (alist-replace 'install install (alist-delete 'configure gnu:%standard-phases)))))) (define* (python-build #:key inputs (phases %standard-phases) #:allow-other-keys #:rest args) "Build the given Python package, applying all of PHASES in order." (apply gnu:gnu-build #:inputs inputs #:phases phases args))
30bbbd415d1b7751a07981bf5b97775b83bb14522e4d3ef1b8b4893a094c0897
informatimago/lisp
make.lisp
(eval-when (:compile-toplevel :load-toplevel :execute) (setf *readtable* (copy-readtable nil))) (defvar *modules* '(common-lisp clext clmisc sbcl clisp susv3)) (defmacro define-implementation (name &key feature executable default-options load-option eval-option quit-option ; to quit once arguments processing is done quit-expression) `'(,name ,feature ,executable ,default-options ,load-option ,eval-option ,quit-option ,quit-expression)) (define-implementation abcl :feature :abcl :executable "abcl" :default-options () :load-option ("--load" <arg>) :eval-option ("--eval" <arg>) :quit-option ("--eval" "(quit)") :quit-expression "(extensions:quit)") (define-implementation allegro :feature :allegro :executable "alisp" :default-options ("-batch" "-q") :load-option ("-L" <arg>) :eval-option () :quit-option "-kill" :quit-expression "(excl:exit)") ; dumps an "; Exiting" message... (define-implementation ccl :feature :ccl :executable "ccl" :default-options ("--batch" "--o-init") :load-option ("--load" <arg>) :eval-option ("--eval" <arg>) :quit-option "--quit" :quit-expression "(ccl:quit)") (define-implementation clisp :feature :clisp :executable "clisp" :default-options ("-ansi" "-q" "-norc" "-Kfull" "-Efile" "UTF-8" #|"-on-error" "debug"|#) :load-option (<arg>) :eval-option ("-x" <arg>) :quit-option () :quit-expression "(ext:quit)") (define-implementation ecl :feature :ecl :executable "ecl" :default-options ("-norc") :load-option ("-shell" <arg>) :eval-option ("-eval" <arg>) :quit-option ("-quit") :quit-expression "(ext:quit)") sbcl := sbcl cmucl := cmucl openmcl := openmcl abcl_flags := allegro_flags := ccl_flags := clisp_flags := -ansi -q -norc -kfull -e iso-8859-1 -efile utf-8 -eterminal utf-8 -on-error debug cmucl_flags := -noinit -nositeinit ecl_flags := -norc openmcl_flags := sbcl_flags := --noinform --sysinit /dev/null --userinit /dev/null
null
https://raw.githubusercontent.com/informatimago/lisp/571af24c06ba466e01b4c9483f8bb7690bc46d03/tools/make.lisp
lisp
to quit once arguments processing is done dumps an "; Exiting" message... "-on-error" "debug"
(eval-when (:compile-toplevel :load-toplevel :execute) (setf *readtable* (copy-readtable nil))) (defvar *modules* '(common-lisp clext clmisc sbcl clisp susv3)) (defmacro define-implementation (name &key feature executable default-options load-option eval-option quit-expression) `'(,name ,feature ,executable ,default-options ,load-option ,eval-option ,quit-option ,quit-expression)) (define-implementation abcl :feature :abcl :executable "abcl" :default-options () :load-option ("--load" <arg>) :eval-option ("--eval" <arg>) :quit-option ("--eval" "(quit)") :quit-expression "(extensions:quit)") (define-implementation allegro :feature :allegro :executable "alisp" :default-options ("-batch" "-q") :load-option ("-L" <arg>) :eval-option () :quit-option "-kill" (define-implementation ccl :feature :ccl :executable "ccl" :default-options ("--batch" "--o-init") :load-option ("--load" <arg>) :eval-option ("--eval" <arg>) :quit-option "--quit" :quit-expression "(ccl:quit)") (define-implementation clisp :feature :clisp :executable "clisp" :default-options ("-ansi" "-q" "-norc" "-Kfull" "-Efile" "UTF-8" :load-option (<arg>) :eval-option ("-x" <arg>) :quit-option () :quit-expression "(ext:quit)") (define-implementation ecl :feature :ecl :executable "ecl" :default-options ("-norc") :load-option ("-shell" <arg>) :eval-option ("-eval" <arg>) :quit-option ("-quit") :quit-expression "(ext:quit)") sbcl := sbcl cmucl := cmucl openmcl := openmcl abcl_flags := allegro_flags := ccl_flags := clisp_flags := -ansi -q -norc -kfull -e iso-8859-1 -efile utf-8 -eterminal utf-8 -on-error debug cmucl_flags := -noinit -nositeinit ecl_flags := -norc openmcl_flags := sbcl_flags := --noinform --sysinit /dev/null --userinit /dev/null
2ebce672781e45eefde12b112bd3ae9ac4507fb7705f5ec1fdfddef88bed04e7
bobzhang/fan
compile_stream.mli
* [ stream ] DDSL compiler , original 's stream extension open Astf type spat_comp = | When of Locf.t * pat * exp option | Match of Locf.t * pat * exp | Str of Locf.t * pat type sexp_comp = | Trm of Locf.t * exp | Ntr of Locf.t * exp type stream_pat = (spat_comp * exp option) type stream_pats = stream_pat list type stream_case = (stream_pats * pat option * exp) type stream_cases = stream_case list val grammar_module_name : string ref val gm : unit -> string val strm_n : string val peek_fun : loc -> exp val junk_fun : loc -> exp val empty : loc -> exp val handle_failure : exp -> bool val is_constr_apply : exp -> bool val stream_pattern_component : exp -> exp -> spat_comp -> exp val stream_pattern : loc -> stream_case -> (exp option -> exp) -> exp val stream_patterns_term : loc -> (unit -> exp) -> (pat * exp option * loc * stream_pats * pat option * exp) list -> exp val parser_cases : loc -> stream_cases -> exp val cparser : loc -> stream_cases -> exp (* val cparser_match : loc -> exp -> pat option -> stream_cases -> exp *) val not_computing : exp -> bool val slazy : loc -> exp -> exp val cstream : loc -> sexp_comp list -> exp
null
https://raw.githubusercontent.com/bobzhang/fan/7ed527d96c5a006da43d3813f32ad8a5baa31b7f/src/cold/compile_stream.mli
ocaml
val cparser_match : loc -> exp -> pat option -> stream_cases -> exp
* [ stream ] DDSL compiler , original 's stream extension open Astf type spat_comp = | When of Locf.t * pat * exp option | Match of Locf.t * pat * exp | Str of Locf.t * pat type sexp_comp = | Trm of Locf.t * exp | Ntr of Locf.t * exp type stream_pat = (spat_comp * exp option) type stream_pats = stream_pat list type stream_case = (stream_pats * pat option * exp) type stream_cases = stream_case list val grammar_module_name : string ref val gm : unit -> string val strm_n : string val peek_fun : loc -> exp val junk_fun : loc -> exp val empty : loc -> exp val handle_failure : exp -> bool val is_constr_apply : exp -> bool val stream_pattern_component : exp -> exp -> spat_comp -> exp val stream_pattern : loc -> stream_case -> (exp option -> exp) -> exp val stream_patterns_term : loc -> (unit -> exp) -> (pat * exp option * loc * stream_pats * pat option * exp) list -> exp val parser_cases : loc -> stream_cases -> exp val cparser : loc -> stream_cases -> exp val not_computing : exp -> bool val slazy : loc -> exp -> exp val cstream : loc -> sexp_comp list -> exp
f63641a1d9420cfb422b9036b3c8ddf725060c670fb018fc9434b44afdabab73
WhatsApp/erlt
struct_mod03.erl
-file("dev_struct/src/struct_mod03.erlt", 1). -module(struct_mod03). -eqwalizer_unchecked([{index, 1}]). -export([expr/0, pattern/3, guard/3, field/1, update/1, index/1]). -spec expr() -> {struct_mod02:foo(), struct_mod02:bar()}. expr() -> {{'$#struct_mod02:foo'}, {'$#struct_mod02:bar', 1, 2}}. -spec pattern(struct_mod02:foo(), struct_mod02:bar(), integer()) -> integer(). pattern({'$#struct_mod02:foo'}, {'$#struct_mod02:bar', 1, B}, B) -> B. -spec guard(struct_mod02:foo(), struct_mod02:bar(), integer()) -> atom(). guard(Value1, Value2, B) when Value1 =:= {'$#struct_mod02:foo'}, Value2 =:= {'$#struct_mod02:bar', 1, B} -> ok. -spec field(struct_mod02:bar()) -> integer(). field(Value) when erlang:is_record(Value, '$#struct_mod02:bar', 3) orelse fail, erlang:element(2, Value) =:= 1 -> case Value of {'$#struct_mod02:bar', _, StructGenVar@0@b} -> StructGenVar@0@b; _ -> erlang:error({badstruct, '$#struct_mod02:bar'}) end. -spec update(struct_mod02:bar()) -> struct_mod02:bar(). update(Value) -> case Value of {'$#struct_mod02:bar', _, StructGenVar@1@b} -> {'$#struct_mod02:bar', 2, StructGenVar@1@b}; _ -> erlang:error({badstruct, '$#struct_mod02:bar'}) end. index(2) when 2 =:= 2 -> 3.
null
https://raw.githubusercontent.com/WhatsApp/erlt/616a4adc628ca8754112e659701e57f1cd7fecd1/tests/dev_struct/ir-spec/struct_mod03.erl
erlang
-file("dev_struct/src/struct_mod03.erlt", 1). -module(struct_mod03). -eqwalizer_unchecked([{index, 1}]). -export([expr/0, pattern/3, guard/3, field/1, update/1, index/1]). -spec expr() -> {struct_mod02:foo(), struct_mod02:bar()}. expr() -> {{'$#struct_mod02:foo'}, {'$#struct_mod02:bar', 1, 2}}. -spec pattern(struct_mod02:foo(), struct_mod02:bar(), integer()) -> integer(). pattern({'$#struct_mod02:foo'}, {'$#struct_mod02:bar', 1, B}, B) -> B. -spec guard(struct_mod02:foo(), struct_mod02:bar(), integer()) -> atom(). guard(Value1, Value2, B) when Value1 =:= {'$#struct_mod02:foo'}, Value2 =:= {'$#struct_mod02:bar', 1, B} -> ok. -spec field(struct_mod02:bar()) -> integer(). field(Value) when erlang:is_record(Value, '$#struct_mod02:bar', 3) orelse fail, erlang:element(2, Value) =:= 1 -> case Value of {'$#struct_mod02:bar', _, StructGenVar@0@b} -> StructGenVar@0@b; _ -> erlang:error({badstruct, '$#struct_mod02:bar'}) end. -spec update(struct_mod02:bar()) -> struct_mod02:bar(). update(Value) -> case Value of {'$#struct_mod02:bar', _, StructGenVar@1@b} -> {'$#struct_mod02:bar', 2, StructGenVar@1@b}; _ -> erlang:error({badstruct, '$#struct_mod02:bar'}) end. index(2) when 2 =:= 2 -> 3.
c4bb9dc3685b84547ce42dcfe5949bb534a8b3d2ee1b3573680d105f8aa693ae
mfikes/coal-mine
problem_51.cljc
(ns coal-mine.problem-51 (:require [coal-mine.checks :refer [defcheck-51] :rename {defcheck-51 defcheck}] [clojure.test])) (defcheck solution-12f0614e [1 2 3 4 5]) (defcheck solution-1d566772 [ 1 2 3 4 5]) (defcheck solution-310469e3 '( 1 2 3 4 5 )) (defcheck solution-41b1a75 ;The :as keyword can be used to retain access to ;the entire collection that is being destructured. [1 2 3 4 5]) (defcheck solution-576d2cb9 (take 5 (iterate inc 1))) (defcheck solution-5b27165e (range 1 6)) (defcheck solution-6bad7c34 (list 1 2 3 4 5)) (defcheck solution-78017b6 '[1 2 3 4 5]) (defcheck solution-78653f05 '(1 2 3 4 5)) (defcheck solution-7c6cd60f [1 2 3 4 5]) (defcheck solution-871e1dc5 (range 1 (inc 5))) (defcheck solution-87ce8e9b (take 5 (drop 1 (range)))) (defcheck solution-a7496ea4 (apply vector (range 1 6))) (defcheck solution-c26faa8a [1 2 3 4 5 ]) (defcheck solution-cdb888be [ 1 2 3 4 5 ]) (defcheck solution-d7d0d3f2 (drop 1 (range 6))) (defcheck solution-ea904f03 (map inc (range 5))) (defn run-tests [] (clojure.test/run-tests 'coal-mine.problem-51)) (defn -main [] (run-tests)) #?(:cljs (set! *main-cli-fn* -main))
null
https://raw.githubusercontent.com/mfikes/coal-mine/0961d085b37f4e59489a8cf6a2b8fef0a698d8fb/src/coal_mine/problem_51.cljc
clojure
The :as keyword can be used to retain access to the entire collection that is being destructured.
(ns coal-mine.problem-51 (:require [coal-mine.checks :refer [defcheck-51] :rename {defcheck-51 defcheck}] [clojure.test])) (defcheck solution-12f0614e [1 2 3 4 5]) (defcheck solution-1d566772 [ 1 2 3 4 5]) (defcheck solution-310469e3 '( 1 2 3 4 5 )) (defcheck solution-41b1a75 [1 2 3 4 5]) (defcheck solution-576d2cb9 (take 5 (iterate inc 1))) (defcheck solution-5b27165e (range 1 6)) (defcheck solution-6bad7c34 (list 1 2 3 4 5)) (defcheck solution-78017b6 '[1 2 3 4 5]) (defcheck solution-78653f05 '(1 2 3 4 5)) (defcheck solution-7c6cd60f [1 2 3 4 5]) (defcheck solution-871e1dc5 (range 1 (inc 5))) (defcheck solution-87ce8e9b (take 5 (drop 1 (range)))) (defcheck solution-a7496ea4 (apply vector (range 1 6))) (defcheck solution-c26faa8a [1 2 3 4 5 ]) (defcheck solution-cdb888be [ 1 2 3 4 5 ]) (defcheck solution-d7d0d3f2 (drop 1 (range 6))) (defcheck solution-ea904f03 (map inc (range 5))) (defn run-tests [] (clojure.test/run-tests 'coal-mine.problem-51)) (defn -main [] (run-tests)) #?(:cljs (set! *main-cli-fn* -main))
348a2ac22d1696212f7fb65d33364884dcf2017b16d64fccd6fe3943ef7938c7
emqx/mria
mria_schema.erl
%%-------------------------------------------------------------------- Copyright ( c ) 2021 - 2023 EMQ Technologies Co. , Ltd. All Rights Reserved . %% Licensed under the Apache License , Version 2.0 ( the " License " ) ; %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at %% %% -2.0 %% %% Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an " AS IS " BASIS , %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. %%-------------------------------------------------------------------- %% Functions related to the management and synchronization of the mria %% schema. %% %% This server serializes all schema operations on a local node %% (effectively it means that all `mria:create_table' calls are %% executed sequentially. Not a big deal since we don't expect this to %% be a hotspot). -module(mria_schema). -behaviour(gen_server). %% API: -export([ create_table/2 , subscribe_to_shard_schema_updates/1 , ensure_local_table/1 , tables_of_shard/1 , shard_of_table/1 , table_specs_of_shard/1 , shards/0 , start_link/0 , wait_for_tables/1 ]). %% gen_server callbacks -export([ init/1 , handle_call/3 , handle_cast/2 , handle_info/2 ]). -include("mria_rlog.hrl"). -include_lib("snabbkaffe/include/trace.hrl"). -type entry() :: #?schema{ mnesia_table :: mria:table() , shard :: mria_rlog:shard() , storage :: mria:storage() , config :: list() }. -type subscribers() :: #{mria_rlog:shard() => [pid()]}. -type event() :: {schema_event, subscription(), {new_table, mria_rlog:shard(), entry()}}. -opaque subscription() :: pid(). -define(SERVER, ?MODULE). -export_type([entry/0, subscription/0, event/0]). %%================================================================================ %% Type declarations %%================================================================================ %%================================================================================ %% API %%================================================================================ @private Add a table to the shard . Warning : table may not be ready for the writes after this function returns . One should wait for it %% using `mria_schema:ensure_local_table/1' %% %% Note: currently it's the only schema operation that we support. No %% removal and no handover of the table between the shards is %% possible. %% %% These operations are too rare and expensive to implement, because %% they require precise coordination of the shard processes across the %% entire cluster. %% %% Adding an API to remove or modify schema would open possibility to move a table from one shard to another . This requires restarting %% both shards in a synchronized manner to avoid a race condition when %% the replicant processes from the old shard import in-flight %% transactions while the new shard is bootstrapping the table. %% %% This is further complicated by the fact that the replicant nodes %% may consume shard transactions from different core nodes. %% %% So the operation of removing a table from the shard would look like %% this: %% 1 . Do an RPC call to all core nodes to stop the shard 2 . Each core node synchronously stops all the attached replicant %% processes 3 . Only then we are sure that we can avoid data corruption %% %% Currently there is no requirement to implement this, so we can get %% away with managing each shard separately -spec create_table(mria:table(), _Properties :: list()) -> mria:t_result(ok). create_table(Table, TabDef) -> core = mria_config:role(), % assert gen_server:call(?MODULE, {create_table, Table, TabDef}, infinity). %% @private Return the list of tables that belong to the shard and their %% properties: -spec table_specs_of_shard(mria_rlog:shard()) -> [mria_schema:entry()]. table_specs_of_shard(Shard) -> Pattern = #?schema{mnesia_table = '_', shard = Shard, storage = '_', config = '_'}, {atomic, Result} = mnesia:transaction(fun mnesia:match_object/1, [Pattern]), Result. %% @private Return the list of tables that belong to the shard. -spec tables_of_shard(mria_rlog:shard()) -> [mria:table()]. tables_of_shard(Shard) -> [Tab || #?schema{mnesia_table = Tab} <- table_specs_of_shard(Shard)]. %% @private Subscribe to the schema events %% %% The subscriber will get events of type `event()' every time a new %% table is added to the shard. -spec subscribe_to_shard_schema_updates(mria_rlog:shard()) -> {ok, subscription()}. subscribe_to_shard_schema_updates(Shard) -> gen_server:call(?SERVER, {subscribe_to_shard_schema_updates, Shard, self()}). @private Ensure the local mnesia table is ready to accept writes -spec ensure_local_table(mria:table()) -> true. ensure_local_table(Table) -> ?tp_span(debug, ?FUNCTION_NAME, #{table => Table}, mria_status:local_table_present(Table)). @private Get the shard of a table -spec shard_of_table(mria:table()) -> {ok, mria_rlog:shard()} | undefined. shard_of_table(Table) -> case mnesia:dirty_read(?schema, Table) of [#?schema{shard = Shard}] -> {ok, Shard}; [] -> undefined end. %% @private Return the list of known shards -spec shards() -> [mria_rlog:shard()]. shards() -> MS = {#?schema{mnesia_table = '_', shard = '$1', config = '_', storage = '_'}, [], ['$1']}, lists:usort(ets:select(?schema, [MS])). -spec wait_for_tables([mria:table()]) -> ok | {error, _Reason}. wait_for_tables(Tables) -> [mria_status:local_table_present(T) || T <- Tables], mria_mnesia:wait_for_tables(Tables). -spec start_link() -> {ok, pid()}. start_link() -> gen_server:start_link({local, ?SERVER}, ?MODULE, [], []). %%================================================================================ %% gen_server callbacks %%================================================================================ -record(s, { %% We cache the contents of the schema in the process state to notify the local %% processes about the schema updates. Note that we cannot %% simply use the `?schema' table for it: it can be updated %% remotely at unpredicatable time specs :: [entry()] , subscribers = #{} :: subscribers() }). init([]) -> logger:set_process_metadata(#{domain => [mria, rlog, schema]}), ?tp(debug, rlog_schema_init, #{}), State0 = boostrap(), {ok, _} = mnesia:subscribe({table, ?schema, simple}), Recreate all the known tables : ?tp(notice, "Converging schema", #{}), Specs = table_specs_of_shard('_'), State = converge_schema(Specs, State0), {ok, State}. handle_call({subscribe_to_shard_schema_updates, Shard, Pid}, _From, State0 = #s{subscribers = Subs0}) -> Pids0 = maps:get(Shard, Subs0, []), Pids = case lists:member(Pid, Pids0) of true -> Pids0; false -> _MRef = monitor(process, Pid), [Pid|Pids0] end, State = State0#s{subscribers = Subs0#{Shard => Pids}}, {reply, {ok, self()}, State}; handle_call({create_table, Table, TabDef}, _From, State) -> {reply, do_create_table(Table, TabDef), State}; handle_call(Call, From, State) -> ?unexpected_event_tp(#{call => Call, from => From, state => State}), {reply, {error, unknown_call}, State}. handle_cast(Cast, State) -> ?unexpected_event_tp(#{cast => Cast, state => State}), {noreply, State}. handle_info({mnesia_table_event, Event}, State0) -> case Event of {write, Entry = #?schema{}, _ActivityId} -> ?tp(mria_schema_apply_schema_op, #{entry => Entry, activity_id => _ActivityId}), {noreply, apply_schema_op(Entry, State0)}; _SchemaEvent -> {noreply, State0} end; handle_info({'DOWN', _MRef, process, Pid, _Info}, State = #s{subscribers = Subs0}) -> Subs = maps:map(fun(_Shard, Pids) -> lists:delete(Pid, Pids) end, Subs0), {noreply, State#s{subscribers = Subs}}; handle_info(Info, State) -> ?unexpected_event_tp(#{info => Info, state => State}), {noreply, State}. %%================================================================================ Internal functions %%================================================================================ -spec do_create_table(mria:table(), list()) -> mria:t_result(ok). do_create_table(Table, TabDef) -> case make_entry(Table, TabDef) of {ok, Entry} -> case create_table(Entry) of ok -> add_entry(Entry); Err -> {aborted, Err} end; {error, Err} -> {aborted, Err} end. -spec add_entry(entry()) -> mria:t_result(ok). add_entry(Entry) -> core = mria_config:role(), %% assert mnesia:transaction( fun() -> #?schema{ mnesia_table = Table , shard = Shard } = Entry, case mnesia:wread({?schema, Table}) of [] -> ?tp(info, "Adding table to a shard", #{ shard => Shard , table => Table }), mnesia:write(?schema, Entry, write), ok; [#?schema{shard = Shard}] -> ok; Prev -> %% We already have the definition of the table and %% it's incompatible with the new one (changed %% shard) Info = #{ reason => incompatible_schema , shard => Shard , table => Table , new_spec => Entry , prev_spec => Prev }, mnesia:abort(Info) end end). -spec make_entry(mria:table(), _Properties :: list()) -> {ok, entry()} | {error, map()}. make_entry(Table, TabDef) -> Storage = proplists:get_value(storage, TabDef, ram_copies), Options = lists:filter(fun({Key, _}) -> not lists:member(Key, [ ram_copies, disc_copies, disc_only_copies , rocksdb_copies, storage, rlog_shard ]); (_) -> true end, TabDef), case {proplists:get_value(rlog_shard, TabDef, ?LOCAL_CONTENT_SHARD), proplists:get_value(local_content, TabDef, false)} of {?LOCAL_CONTENT_SHARD, false} -> {error, #{ reason => missing_shard , table => Table }}; {Shard, _} -> {ok, #?schema{ mnesia_table = Table , shard = Shard , storage = Storage , config = Options }} end. Mnesia schema initialization at the startup @private Init mnesia tables . -spec converge_schema([entry()], #s{}) -> #s{}. converge_schema(Entries, InitialState) -> lists:foldl(fun apply_schema_op/2, InitialState, Entries). @private Create schema of the schema table and the meta shard . This %% is needed so we can replicate schema updates just like regular %% transactions. boostrap() -> Storage = ram_copies, Opts = [{type, ordered_set}, {record_name, ?schema}, {attributes, record_info(fields, ?schema)} ], MetaSpec = #?schema{ mnesia_table = ?schema , shard = ?mria_meta_shard , storage = Storage , config = Opts }, %% Create (or copy) the mnesia table and wait for it: ok = create_table(MetaSpec), ok = mria_mnesia:copy_table(?schema, Storage), mria_mnesia:wait_for_tables([?schema]), %% Seed the table with the metadata: {atomic, _} = mnesia:transaction(fun mnesia:write/3, [?schema, MetaSpec, write], infinity), apply_schema_op(MetaSpec, #s{specs = []}). %%%%% Handling of the online schema updates -spec apply_schema_op(entry(), #s{}) -> #s{}. apply_schema_op( #?schema{mnesia_table = Table, storage = Storage, shard = Shard} = Entry , #s{specs = OldEntries, subscribers = Subscribers} = State ) -> case lists:keyfind(Table, #?schema.mnesia_table, OldEntries) of false -> % new entry Ret = case mria_rlog:role() of core -> mria_lib:ensure_ok(mria_mnesia:copy_table(Table, Storage)); replicant -> create_table(Entry) end, ok = Ret, %% TODO: print an error message under some conditions? Tables = tables_of_shard(Shard), mria_config:load_shard_config(Shard, Tables), mria_status:notify_local_table(Table), notify_change(Shard, Entry, Subscribers), State#s{specs = [Entry|OldEntries]}; _CachedEntry -> State end. -spec notify_change(mria_rlog:shard(), entry(), subscribers()) -> ok. notify_change(Shard, Entry, Subscribers) -> Pids = maps:get(Shard, Subscribers, []), [Pid ! {schema_event, self(), {new_table, Shard, Entry}} || Pid <- Pids], ok. %% @doc Try to create a mnesia table according to the spec -spec create_table(entry()) -> ok | _. create_table(#?schema{mnesia_table = Table, storage = Storage, config = Config}) -> mria_lib:ensure_tab(mnesia:create_table(Table, [{Storage, [node()]} | Config])).
null
https://raw.githubusercontent.com/emqx/mria/2cec4de3ee718ddae22dc0dc75708103f29bb768/src/mria_schema.erl
erlang
-------------------------------------------------------------------- you may not use this file except in compliance with the License. You may obtain a copy of the License at -2.0 Unless required by applicable law or agreed to in writing, software WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -------------------------------------------------------------------- Functions related to the management and synchronization of the mria schema. This server serializes all schema operations on a local node (effectively it means that all `mria:create_table' calls are executed sequentially. Not a big deal since we don't expect this to be a hotspot). API: gen_server callbacks ================================================================================ Type declarations ================================================================================ ================================================================================ API ================================================================================ using `mria_schema:ensure_local_table/1' Note: currently it's the only schema operation that we support. No removal and no handover of the table between the shards is possible. These operations are too rare and expensive to implement, because they require precise coordination of the shard processes across the entire cluster. Adding an API to remove or modify schema would open possibility to both shards in a synchronized manner to avoid a race condition when the replicant processes from the old shard import in-flight transactions while the new shard is bootstrapping the table. This is further complicated by the fact that the replicant nodes may consume shard transactions from different core nodes. So the operation of removing a table from the shard would look like this: processes Currently there is no requirement to implement this, so we can get away with managing each shard separately assert @private Return the list of tables that belong to the shard and their properties: @private Return the list of tables that belong to the shard. @private Subscribe to the schema events The subscriber will get events of type `event()' every time a new table is added to the shard. @private Return the list of known shards ================================================================================ gen_server callbacks ================================================================================ We cache the contents of the schema in the process state to notify the local processes about the schema updates. Note that we cannot simply use the `?schema' table for it: it can be updated remotely at unpredicatable time ================================================================================ ================================================================================ assert We already have the definition of the table and it's incompatible with the new one (changed shard) is needed so we can replicate schema updates just like regular transactions. Create (or copy) the mnesia table and wait for it: Seed the table with the metadata: Handling of the online schema updates new entry TODO: print an error message under some conditions? @doc Try to create a mnesia table according to the spec
Copyright ( c ) 2021 - 2023 EMQ Technologies Co. , Ltd. All Rights Reserved . Licensed under the Apache License , Version 2.0 ( the " License " ) ; distributed under the License is distributed on an " AS IS " BASIS , -module(mria_schema). -behaviour(gen_server). -export([ create_table/2 , subscribe_to_shard_schema_updates/1 , ensure_local_table/1 , tables_of_shard/1 , shard_of_table/1 , table_specs_of_shard/1 , shards/0 , start_link/0 , wait_for_tables/1 ]). -export([ init/1 , handle_call/3 , handle_cast/2 , handle_info/2 ]). -include("mria_rlog.hrl"). -include_lib("snabbkaffe/include/trace.hrl"). -type entry() :: #?schema{ mnesia_table :: mria:table() , shard :: mria_rlog:shard() , storage :: mria:storage() , config :: list() }. -type subscribers() :: #{mria_rlog:shard() => [pid()]}. -type event() :: {schema_event, subscription(), {new_table, mria_rlog:shard(), entry()}}. -opaque subscription() :: pid(). -define(SERVER, ?MODULE). -export_type([entry/0, subscription/0, event/0]). @private Add a table to the shard . Warning : table may not be ready for the writes after this function returns . One should wait for it move a table from one shard to another . This requires restarting 1 . Do an RPC call to all core nodes to stop the shard 2 . Each core node synchronously stops all the attached replicant 3 . Only then we are sure that we can avoid data corruption -spec create_table(mria:table(), _Properties :: list()) -> mria:t_result(ok). create_table(Table, TabDef) -> gen_server:call(?MODULE, {create_table, Table, TabDef}, infinity). -spec table_specs_of_shard(mria_rlog:shard()) -> [mria_schema:entry()]. table_specs_of_shard(Shard) -> Pattern = #?schema{mnesia_table = '_', shard = Shard, storage = '_', config = '_'}, {atomic, Result} = mnesia:transaction(fun mnesia:match_object/1, [Pattern]), Result. -spec tables_of_shard(mria_rlog:shard()) -> [mria:table()]. tables_of_shard(Shard) -> [Tab || #?schema{mnesia_table = Tab} <- table_specs_of_shard(Shard)]. -spec subscribe_to_shard_schema_updates(mria_rlog:shard()) -> {ok, subscription()}. subscribe_to_shard_schema_updates(Shard) -> gen_server:call(?SERVER, {subscribe_to_shard_schema_updates, Shard, self()}). @private Ensure the local mnesia table is ready to accept writes -spec ensure_local_table(mria:table()) -> true. ensure_local_table(Table) -> ?tp_span(debug, ?FUNCTION_NAME, #{table => Table}, mria_status:local_table_present(Table)). @private Get the shard of a table -spec shard_of_table(mria:table()) -> {ok, mria_rlog:shard()} | undefined. shard_of_table(Table) -> case mnesia:dirty_read(?schema, Table) of [#?schema{shard = Shard}] -> {ok, Shard}; [] -> undefined end. -spec shards() -> [mria_rlog:shard()]. shards() -> MS = {#?schema{mnesia_table = '_', shard = '$1', config = '_', storage = '_'}, [], ['$1']}, lists:usort(ets:select(?schema, [MS])). -spec wait_for_tables([mria:table()]) -> ok | {error, _Reason}. wait_for_tables(Tables) -> [mria_status:local_table_present(T) || T <- Tables], mria_mnesia:wait_for_tables(Tables). -spec start_link() -> {ok, pid()}. start_link() -> gen_server:start_link({local, ?SERVER}, ?MODULE, [], []). -record(s, specs :: [entry()] , subscribers = #{} :: subscribers() }). init([]) -> logger:set_process_metadata(#{domain => [mria, rlog, schema]}), ?tp(debug, rlog_schema_init, #{}), State0 = boostrap(), {ok, _} = mnesia:subscribe({table, ?schema, simple}), Recreate all the known tables : ?tp(notice, "Converging schema", #{}), Specs = table_specs_of_shard('_'), State = converge_schema(Specs, State0), {ok, State}. handle_call({subscribe_to_shard_schema_updates, Shard, Pid}, _From, State0 = #s{subscribers = Subs0}) -> Pids0 = maps:get(Shard, Subs0, []), Pids = case lists:member(Pid, Pids0) of true -> Pids0; false -> _MRef = monitor(process, Pid), [Pid|Pids0] end, State = State0#s{subscribers = Subs0#{Shard => Pids}}, {reply, {ok, self()}, State}; handle_call({create_table, Table, TabDef}, _From, State) -> {reply, do_create_table(Table, TabDef), State}; handle_call(Call, From, State) -> ?unexpected_event_tp(#{call => Call, from => From, state => State}), {reply, {error, unknown_call}, State}. handle_cast(Cast, State) -> ?unexpected_event_tp(#{cast => Cast, state => State}), {noreply, State}. handle_info({mnesia_table_event, Event}, State0) -> case Event of {write, Entry = #?schema{}, _ActivityId} -> ?tp(mria_schema_apply_schema_op, #{entry => Entry, activity_id => _ActivityId}), {noreply, apply_schema_op(Entry, State0)}; _SchemaEvent -> {noreply, State0} end; handle_info({'DOWN', _MRef, process, Pid, _Info}, State = #s{subscribers = Subs0}) -> Subs = maps:map(fun(_Shard, Pids) -> lists:delete(Pid, Pids) end, Subs0), {noreply, State#s{subscribers = Subs}}; handle_info(Info, State) -> ?unexpected_event_tp(#{info => Info, state => State}), {noreply, State}. Internal functions -spec do_create_table(mria:table(), list()) -> mria:t_result(ok). do_create_table(Table, TabDef) -> case make_entry(Table, TabDef) of {ok, Entry} -> case create_table(Entry) of ok -> add_entry(Entry); Err -> {aborted, Err} end; {error, Err} -> {aborted, Err} end. -spec add_entry(entry()) -> mria:t_result(ok). add_entry(Entry) -> mnesia:transaction( fun() -> #?schema{ mnesia_table = Table , shard = Shard } = Entry, case mnesia:wread({?schema, Table}) of [] -> ?tp(info, "Adding table to a shard", #{ shard => Shard , table => Table }), mnesia:write(?schema, Entry, write), ok; [#?schema{shard = Shard}] -> ok; Prev -> Info = #{ reason => incompatible_schema , shard => Shard , table => Table , new_spec => Entry , prev_spec => Prev }, mnesia:abort(Info) end end). -spec make_entry(mria:table(), _Properties :: list()) -> {ok, entry()} | {error, map()}. make_entry(Table, TabDef) -> Storage = proplists:get_value(storage, TabDef, ram_copies), Options = lists:filter(fun({Key, _}) -> not lists:member(Key, [ ram_copies, disc_copies, disc_only_copies , rocksdb_copies, storage, rlog_shard ]); (_) -> true end, TabDef), case {proplists:get_value(rlog_shard, TabDef, ?LOCAL_CONTENT_SHARD), proplists:get_value(local_content, TabDef, false)} of {?LOCAL_CONTENT_SHARD, false} -> {error, #{ reason => missing_shard , table => Table }}; {Shard, _} -> {ok, #?schema{ mnesia_table = Table , shard = Shard , storage = Storage , config = Options }} end. Mnesia schema initialization at the startup @private Init mnesia tables . -spec converge_schema([entry()], #s{}) -> #s{}. converge_schema(Entries, InitialState) -> lists:foldl(fun apply_schema_op/2, InitialState, Entries). @private Create schema of the schema table and the meta shard . This boostrap() -> Storage = ram_copies, Opts = [{type, ordered_set}, {record_name, ?schema}, {attributes, record_info(fields, ?schema)} ], MetaSpec = #?schema{ mnesia_table = ?schema , shard = ?mria_meta_shard , storage = Storage , config = Opts }, ok = create_table(MetaSpec), ok = mria_mnesia:copy_table(?schema, Storage), mria_mnesia:wait_for_tables([?schema]), {atomic, _} = mnesia:transaction(fun mnesia:write/3, [?schema, MetaSpec, write], infinity), apply_schema_op(MetaSpec, #s{specs = []}). -spec apply_schema_op(entry(), #s{}) -> #s{}. apply_schema_op( #?schema{mnesia_table = Table, storage = Storage, shard = Shard} = Entry , #s{specs = OldEntries, subscribers = Subscribers} = State ) -> case lists:keyfind(Table, #?schema.mnesia_table, OldEntries) of Ret = case mria_rlog:role() of core -> mria_lib:ensure_ok(mria_mnesia:copy_table(Table, Storage)); replicant -> create_table(Entry) end, Tables = tables_of_shard(Shard), mria_config:load_shard_config(Shard, Tables), mria_status:notify_local_table(Table), notify_change(Shard, Entry, Subscribers), State#s{specs = [Entry|OldEntries]}; _CachedEntry -> State end. -spec notify_change(mria_rlog:shard(), entry(), subscribers()) -> ok. notify_change(Shard, Entry, Subscribers) -> Pids = maps:get(Shard, Subscribers, []), [Pid ! {schema_event, self(), {new_table, Shard, Entry}} || Pid <- Pids], ok. -spec create_table(entry()) -> ok | _. create_table(#?schema{mnesia_table = Table, storage = Storage, config = Config}) -> mria_lib:ensure_tab(mnesia:create_table(Table, [{Storage, [node()]} | Config])).
1b2fa1f9b54852a6fea1950bbcd422ac27d5ffac17aafcf59067b3357ece0807
Perry961002/SICP
exa2.2.1-list.scm
(define one-through-four (list 1 2 3 4)) (car one-through-four) (cdr one-through-four) (cons 0 one-through-four) (cons one-through-four 5) ;---------------------------------------- ;返回表的第n个项, scheme已经定义这个过程 (define (list-ref items n) (cond ((null? items) (display "error: list is null")) ((= n 0) (car items)) (else (list-ref (cdr items) (- n 1))))) 返回表的长度 , scheme已经定义 (define (length items) (if (null? items) 0 (+ 1 (length (cdr items))))) , scheme已经定义 (define (append list1 list2) (if (null? list1) list2 (cons (car list1) (append (cdr list1) list2)))) 对表的映射map , scheme已经定义 (define (map proc items) (if (null? items) '() (cons (proc (car items)) (map proc (cdr items)))))
null
https://raw.githubusercontent.com/Perry961002/SICP/89d539e600a73bec42d350592f0ac626e041bf16/Chap2/example/exa2.2.1-list.scm
scheme
---------------------------------------- 返回表的第n个项, scheme已经定义这个过程
(define one-through-four (list 1 2 3 4)) (car one-through-four) (cdr one-through-four) (cons 0 one-through-four) (cons one-through-four 5) (define (list-ref items n) (cond ((null? items) (display "error: list is null")) ((= n 0) (car items)) (else (list-ref (cdr items) (- n 1))))) 返回表的长度 , scheme已经定义 (define (length items) (if (null? items) 0 (+ 1 (length (cdr items))))) , scheme已经定义 (define (append list1 list2) (if (null? list1) list2 (cons (car list1) (append (cdr list1) list2)))) 对表的映射map , scheme已经定义 (define (map proc items) (if (null? items) '() (cons (proc (car items)) (map proc (cdr items)))))
e137ffac695079cf9457d217335798968923fa605c7f464b2711e8d4df570df8
camsaul/toucan2
after_update_test.clj
(ns toucan2.tools.after-update-test (:require [clojure.string :as str] [clojure.test :refer :all] [clojure.walk :as walk] [toucan2.instance :as instance] [toucan2.select :as select] [toucan2.test :as test] [toucan2.test.track-realized-columns :as test.track-realized] [toucan2.tools.after-update :as after-update] [toucan2.update :as update]) (:import (java.time LocalDateTime))) (set! *warn-on-reflection* true) (def ^:private ^:dynamic *venues-awaiting-moderation* nil) (derive ::venues.after-update ::test.track-realized/venues) (defn- ensure-persistent! [x] (if (instance? clojure.lang.ITransientCollection x) (persistent! x) x)) (after-update/define-after-update ::venues.after-update [venue] (assert (map? venue)) (when *venues-awaiting-moderation* (swap! *venues-awaiting-moderation* conj (ensure-persistent! (select-keys venue [:id :name :category])))) nil) (deftest ^:synchronized after-update-test (doseq [f [#'update/update! #'update/update-returning-pks!]] (testing f (test/with-discarded-table-changes :venues (binding [*venues-awaiting-moderation* (atom [])] (test.track-realized/with-realized-columns [realized-columns] (is (= (condp = f #'update/update! 2 #'update/update-returning-pks! [1 2]) (f ::venues.after-update :category "bar" {:category "BARRR"}))) (is (= #{:venues/id :venues/name :venues/category} (realized-columns)))) (testing "rows should be updated in DB" (is (= [(instance/instance ::venues.after-update {:id 1, :name "Tempest", :category "BARRR"}) (instance/instance ::venues.after-update {:id 2, :name "Ho's Tavern", :category "BARRR"})] (select/select [::venues.after-update :id :name :category] :category "BARRR" {:order-by [[:id :asc]]})))) (testing (str "rows should have been added to " `*venues-awaiting-moderation*) (is (= [(instance/instance ::venues.after-update {:id 1, :name "Tempest", :category "BARRR"}) (instance/instance ::venues.after-update {:id 2, :name "Ho's Tavern", :category "BARRR"})] @*venues-awaiting-moderation*)))))))) (derive ::venues.after-update.composed ::venues.after-update) (def ^:private ^:dynamic *recently-updated-venues* (atom [])) (after-update/define-after-update ::venues.after-update.composed [venue] (when *recently-updated-venues* (swap! *recently-updated-venues* conj (ensure-persistent! (select-keys venue [:id :name])))) venue) (deftest ^:synchronized compose-test (testing "after-update should compose" (test/with-discarded-table-changes :venues (binding [*venues-awaiting-moderation* (atom []) *recently-updated-venues* (atom [])] (test.track-realized/with-realized-columns [realized-columns] (is (= 2 (update/update! ::venues.after-update.composed :category "bar" {:category "BARRR"}))) (is (= #{:venues/name :venues/id :venues/category} (realized-columns)))) (testing (str '*venues-awaiting-moderation* " from " ::venues.after-update) (is (= [(instance/instance ::venues.after-update.composed {:id 1, :name "Tempest", :category "BARRR"}) (instance/instance ::venues.after-update.composed {:id 2, :name "Ho's Tavern", :category "BARRR"})] @*venues-awaiting-moderation*))) (testing (str '*recently-updated-venues* " from " ::venues.after-update.composed) (is (= [(instance/instance ::venues.after-update.composed {:id 1, :name "Tempest"}) (instance/instance ::venues.after-update.composed {:id 2, :name "Ho's Tavern"})] @*recently-updated-venues*))))))) (derive ::people.record-updates ::test/people) (def ^:private ^:dynamic *updated-people*) (after-update/define-after-update ::people.record-updates [person] (when *updated-people* (swap! *updated-people* conj (:id person))) person) (deftest ^:synchronized only-call-once-test (test/with-discarded-table-changes :people (testing "after-update method should be applied exactly once" (binding [*updated-people* (atom [])] (is (= 1 (update/update! ::people.record-updates 1 {:name "CAM"}))) (is (= [1] @*updated-people*)) (is (= {:id 1, :name "CAM"} (select/select-one [::people.record-updates :id :name] 1))))))) (derive ::venues.exception.clojure-land ::test/venues) (after-update/define-after-update ::venues.exception.clojure-land [venue] (update/update! ::test/venues 1 {:category "place"}) trigger a Clojure - land error (when (= (:category venue) "store") (throw (ex-info "Don't update a store!" {:venue venue}))) venue) (derive ::venues.exception.db-land ::test/venues) (after-update/define-after-update ::venues.exception.db-land [venue] (update/update! ::test/venues 1 {:category "place"}) ;; trigger a DB-land error (when (= (:category venue) "store") (update/update! ::test/venues 1 {:venue_name "this column doesn't exist"})) venue) (deftest ^:synchronized exception-test (doseq [model [::venues.exception.clojure-land ::venues.exception.db-land]] (testing (format "Model = %s" model) (testing "\nexception in after-update" (test/with-discarded-table-changes :venues (is (thrown-with-msg? clojure.lang.ExceptionInfo (case model ::venues.exception.clojure-land #"Don't update a store" ::venues.exception.db-land (case (test/current-db-type) :postgres #"ERROR: column \"venue_name\" of relation \"venues\" does not exist" :h2 #"Column \"VENUE_NAME\" not found" :mariadb #"Unknown column 'venue_name' in 'field list'")) (update/update! model 2 {:category "store", :name "My Store"}))) (testing "\nShould be done inside a transaction" (is (= [(instance/instance model {:id 1 :name "Tempest" :updated-at (LocalDateTime/parse "2017-01-01T00:00")}) (instance/instance model {:id 2 :name "Ho's Tavern" :updated-at (LocalDateTime/parse "2017-01-01T00:00")}) (instance/instance model {:id 3 :name "BevMo" :updated-at (LocalDateTime/parse "2017-01-01T00:00")})] (select/select [model :id :name :updated-at] {:order-by [[:id :asc]]}))))))))) (deftest ^:parallel macroexpansion-test (testing "define-after-update should define vars with different names based on the model." (letfn [(generated-name* [form] (cond (sequential? form) (some generated-name* form) (and (symbol? form) (str/starts-with? (name form) "each-row-fn-primary-method-toucan-query-type-update")) form)) (generated-name [form] (let [expanded (walk/macroexpand-all form)] (or (generated-name* expanded) ['no-match expanded])))] (is (= 'each-row-fn-primary-method-toucan-query-type-update-*-model-1 (generated-name `(after-update/define-after-update :model-1 [~'venue] ~'venue)))) (is (= 'each-row-fn-primary-method-toucan-query-type-update-*-model-2 (generated-name `(after-update/define-after-update :model-2 [~'venue] ~'venue)))))))
null
https://raw.githubusercontent.com/camsaul/toucan2/5204b34d46f5adb3e52b022218049abe9b336928/test/toucan2/tools/after_update_test.clj
clojure
trigger a DB-land error
(ns toucan2.tools.after-update-test (:require [clojure.string :as str] [clojure.test :refer :all] [clojure.walk :as walk] [toucan2.instance :as instance] [toucan2.select :as select] [toucan2.test :as test] [toucan2.test.track-realized-columns :as test.track-realized] [toucan2.tools.after-update :as after-update] [toucan2.update :as update]) (:import (java.time LocalDateTime))) (set! *warn-on-reflection* true) (def ^:private ^:dynamic *venues-awaiting-moderation* nil) (derive ::venues.after-update ::test.track-realized/venues) (defn- ensure-persistent! [x] (if (instance? clojure.lang.ITransientCollection x) (persistent! x) x)) (after-update/define-after-update ::venues.after-update [venue] (assert (map? venue)) (when *venues-awaiting-moderation* (swap! *venues-awaiting-moderation* conj (ensure-persistent! (select-keys venue [:id :name :category])))) nil) (deftest ^:synchronized after-update-test (doseq [f [#'update/update! #'update/update-returning-pks!]] (testing f (test/with-discarded-table-changes :venues (binding [*venues-awaiting-moderation* (atom [])] (test.track-realized/with-realized-columns [realized-columns] (is (= (condp = f #'update/update! 2 #'update/update-returning-pks! [1 2]) (f ::venues.after-update :category "bar" {:category "BARRR"}))) (is (= #{:venues/id :venues/name :venues/category} (realized-columns)))) (testing "rows should be updated in DB" (is (= [(instance/instance ::venues.after-update {:id 1, :name "Tempest", :category "BARRR"}) (instance/instance ::venues.after-update {:id 2, :name "Ho's Tavern", :category "BARRR"})] (select/select [::venues.after-update :id :name :category] :category "BARRR" {:order-by [[:id :asc]]})))) (testing (str "rows should have been added to " `*venues-awaiting-moderation*) (is (= [(instance/instance ::venues.after-update {:id 1, :name "Tempest", :category "BARRR"}) (instance/instance ::venues.after-update {:id 2, :name "Ho's Tavern", :category "BARRR"})] @*venues-awaiting-moderation*)))))))) (derive ::venues.after-update.composed ::venues.after-update) (def ^:private ^:dynamic *recently-updated-venues* (atom [])) (after-update/define-after-update ::venues.after-update.composed [venue] (when *recently-updated-venues* (swap! *recently-updated-venues* conj (ensure-persistent! (select-keys venue [:id :name])))) venue) (deftest ^:synchronized compose-test (testing "after-update should compose" (test/with-discarded-table-changes :venues (binding [*venues-awaiting-moderation* (atom []) *recently-updated-venues* (atom [])] (test.track-realized/with-realized-columns [realized-columns] (is (= 2 (update/update! ::venues.after-update.composed :category "bar" {:category "BARRR"}))) (is (= #{:venues/name :venues/id :venues/category} (realized-columns)))) (testing (str '*venues-awaiting-moderation* " from " ::venues.after-update) (is (= [(instance/instance ::venues.after-update.composed {:id 1, :name "Tempest", :category "BARRR"}) (instance/instance ::venues.after-update.composed {:id 2, :name "Ho's Tavern", :category "BARRR"})] @*venues-awaiting-moderation*))) (testing (str '*recently-updated-venues* " from " ::venues.after-update.composed) (is (= [(instance/instance ::venues.after-update.composed {:id 1, :name "Tempest"}) (instance/instance ::venues.after-update.composed {:id 2, :name "Ho's Tavern"})] @*recently-updated-venues*))))))) (derive ::people.record-updates ::test/people) (def ^:private ^:dynamic *updated-people*) (after-update/define-after-update ::people.record-updates [person] (when *updated-people* (swap! *updated-people* conj (:id person))) person) (deftest ^:synchronized only-call-once-test (test/with-discarded-table-changes :people (testing "after-update method should be applied exactly once" (binding [*updated-people* (atom [])] (is (= 1 (update/update! ::people.record-updates 1 {:name "CAM"}))) (is (= [1] @*updated-people*)) (is (= {:id 1, :name "CAM"} (select/select-one [::people.record-updates :id :name] 1))))))) (derive ::venues.exception.clojure-land ::test/venues) (after-update/define-after-update ::venues.exception.clojure-land [venue] (update/update! ::test/venues 1 {:category "place"}) trigger a Clojure - land error (when (= (:category venue) "store") (throw (ex-info "Don't update a store!" {:venue venue}))) venue) (derive ::venues.exception.db-land ::test/venues) (after-update/define-after-update ::venues.exception.db-land [venue] (update/update! ::test/venues 1 {:category "place"}) (when (= (:category venue) "store") (update/update! ::test/venues 1 {:venue_name "this column doesn't exist"})) venue) (deftest ^:synchronized exception-test (doseq [model [::venues.exception.clojure-land ::venues.exception.db-land]] (testing (format "Model = %s" model) (testing "\nexception in after-update" (test/with-discarded-table-changes :venues (is (thrown-with-msg? clojure.lang.ExceptionInfo (case model ::venues.exception.clojure-land #"Don't update a store" ::venues.exception.db-land (case (test/current-db-type) :postgres #"ERROR: column \"venue_name\" of relation \"venues\" does not exist" :h2 #"Column \"VENUE_NAME\" not found" :mariadb #"Unknown column 'venue_name' in 'field list'")) (update/update! model 2 {:category "store", :name "My Store"}))) (testing "\nShould be done inside a transaction" (is (= [(instance/instance model {:id 1 :name "Tempest" :updated-at (LocalDateTime/parse "2017-01-01T00:00")}) (instance/instance model {:id 2 :name "Ho's Tavern" :updated-at (LocalDateTime/parse "2017-01-01T00:00")}) (instance/instance model {:id 3 :name "BevMo" :updated-at (LocalDateTime/parse "2017-01-01T00:00")})] (select/select [model :id :name :updated-at] {:order-by [[:id :asc]]}))))))))) (deftest ^:parallel macroexpansion-test (testing "define-after-update should define vars with different names based on the model." (letfn [(generated-name* [form] (cond (sequential? form) (some generated-name* form) (and (symbol? form) (str/starts-with? (name form) "each-row-fn-primary-method-toucan-query-type-update")) form)) (generated-name [form] (let [expanded (walk/macroexpand-all form)] (or (generated-name* expanded) ['no-match expanded])))] (is (= 'each-row-fn-primary-method-toucan-query-type-update-*-model-1 (generated-name `(after-update/define-after-update :model-1 [~'venue] ~'venue)))) (is (= 'each-row-fn-primary-method-toucan-query-type-update-*-model-2 (generated-name `(after-update/define-after-update :model-2 [~'venue] ~'venue)))))))
4c3fa2ecb3da4afc156363fd8e7580086d8ab2bab1fa44ad65690d8dae96827b
chiroptical/book-of-monads
Inspecting.hs
# LANGUAGE TupleSections # # LANGUAGE FlexibleInstances # # LANGUAGE LambdaCase # module Inspecting where import RPNInFreer (Freer (..), RPNInstruction, IStack (..), push, pop) data LastOp = Return | LastPop | LastPush Integer class MonadIStack m where pop' :: m Integer push' :: Integer -> m () newtype WithContext c m a = C { unC :: c -> m (a, c) } Exercise 13.19 -- Implement Functor and Applicative for ` WithContext LastOp m ` instance Monad m => Functor (WithContext LastOp m) where fmap f x = C $ \context -> do (x', context') <- unC x context pure (f x', context') instance Monad m => Applicative (WithContext LastOp m) where pure x = C $ \_ -> return (x, Return) f <*> x = C $ \context -> do (f', context') <- unC f context (x', context_) <- unC x context' pure (f' x', context_) instance Monad m => Monad (WithContext LastOp m) where C x >>= f = C $ \context -> do (x', context') <- x context unC (f x') context' instance (Monad m, MonadIStack m) => MonadIStack (WithContext LastOp m) where pop' = C $ \case LastPush n -> return (n, Return) _ -> (, LastPop) <$> pop' push' v = C $ \_ -> (, LastPush v) <$> push' v optimize :: (Monad m, MonadIStack m) => WithContext LastOp m a -> m a optimize p = fst <$> unC p Return -- Exercise 13.20 - Convince yourself the above code works
null
https://raw.githubusercontent.com/chiroptical/book-of-monads/c2eff1c67a8958b28cfd2001d652f8b68e7c84df/chapter13/src/Inspecting.hs
haskell
Implement Functor and Applicative for ` WithContext LastOp m ` Exercise 13.20 - Convince yourself the above code works
# LANGUAGE TupleSections # # LANGUAGE FlexibleInstances # # LANGUAGE LambdaCase # module Inspecting where import RPNInFreer (Freer (..), RPNInstruction, IStack (..), push, pop) data LastOp = Return | LastPop | LastPush Integer class MonadIStack m where pop' :: m Integer push' :: Integer -> m () newtype WithContext c m a = C { unC :: c -> m (a, c) } instance Monad m => Functor (WithContext LastOp m) where fmap f x = C $ \context -> do (x', context') <- unC x context pure (f x', context') instance Monad m => Applicative (WithContext LastOp m) where pure x = C $ \_ -> return (x, Return) f <*> x = C $ \context -> do (f', context') <- unC f context (x', context_) <- unC x context' pure (f' x', context_) instance Monad m => Monad (WithContext LastOp m) where C x >>= f = C $ \context -> do (x', context') <- x context unC (f x') context' instance (Monad m, MonadIStack m) => MonadIStack (WithContext LastOp m) where pop' = C $ \case LastPush n -> return (n, Return) _ -> (, LastPop) <$> pop' push' v = C $ \_ -> (, LastPush v) <$> push' v optimize :: (Monad m, MonadIStack m) => WithContext LastOp m a -> m a optimize p = fst <$> unC p Return
c4bc5897e7b2160a4610925e17be642de02bdfb664cc7516c4ec0f901f3f80d1
racket/racket7
renumber.rkt
#lang racket/base (define filename "stypes.h") (define lines (with-input-from-file filename (lambda () (let loop () (let ([l (read-line)]) (if (eof-object? l) null (cons l (loop)))))))) (define n 0) (with-output-to-file filename #:exists 'truncate (lambda () (for-each (lambda (l) (cond [(regexp-match #rx"^( +[a-z_A-Z][a-z_A-Z0-9]*,) *(?:/[*] [0-9]* [*]/)? *$" l) => (lambda (m) (let ([s (cadr m)]) (printf "~a~a\n" s (format "~a/* ~a */" (make-string (max 0 (- 40 (string-length s))) #\space) n))) (set! n (add1 n)))] [(regexp-match #rx"^ +[a-zA-Z_][a-z_A-Z0-9]*," l) (set! n (add1 n)) (printf "~a\n" l)] [else (printf "~a\n" l)])) lines)))
null
https://raw.githubusercontent.com/racket/racket7/5dbb62c6bbec198b4a790f1dc08fef0c45c2e32b/racket/src/racket/src/renumber.rkt
racket
#lang racket/base (define filename "stypes.h") (define lines (with-input-from-file filename (lambda () (let loop () (let ([l (read-line)]) (if (eof-object? l) null (cons l (loop)))))))) (define n 0) (with-output-to-file filename #:exists 'truncate (lambda () (for-each (lambda (l) (cond [(regexp-match #rx"^( +[a-z_A-Z][a-z_A-Z0-9]*,) *(?:/[*] [0-9]* [*]/)? *$" l) => (lambda (m) (let ([s (cadr m)]) (printf "~a~a\n" s (format "~a/* ~a */" (make-string (max 0 (- 40 (string-length s))) #\space) n))) (set! n (add1 n)))] [(regexp-match #rx"^ +[a-zA-Z_][a-z_A-Z0-9]*," l) (set! n (add1 n)) (printf "~a\n" l)] [else (printf "~a\n" l)])) lines)))
c3df4be10754a33c61b8d5660f41f5c3b752004383809999a765224c8c29a905
racket/rackunit
base.rkt
#lang racket/base (require racket/contract/base "check-info.rkt" "location.rkt") ;; struct test : (define-struct test ()) ;; struct (rackunit-test-case test) : (U string #f) thunk (define-struct (rackunit-test-case test) (name action) #:transparent) struct ( rackunit - test - suite test ) : string ( fdown fup fhere seed - > ( listof test - result ) ) thunk thunk (define-struct (rackunit-test-suite test) (name tests before after) #:transparent) ;; struct exn:test exn:fail : () ;; ;; The exception throw by test failures (define-struct (exn:test exn:fail) ()) ;; struct (exn:test:check struct:exn:test) : (list-of check-info) ;; ;; The exception thrown to indicate a check has failed (define-struct (exn:test:check exn:test) (stack) #:property prop:exn:srclocs (lambda (self) ;; Try to get a location from the stack. (define maybe-location (for/or ([check-info (exn:test:check-stack self)]) (and (check-location? check-info) check-info))) (cond [maybe-location (define loc (location-info-value (check-info-value maybe-location))) (list (location->srcloc loc))] [else (list)]))) ;; struct (exn:test:check:internal exn:test:check) : () ;; ;; Exception thrown to indicate an internal failure in an ;; check, distinguished from a failure in user code. (define-struct (exn:test:check:internal exn:test:check) ()) ;; struct test-result : (U string #f) (define-struct test-result (test-case-name)) ;; struct (test-failure test-result) : exn:test (define-struct (test-failure test-result) (result)) ;; struct (test-error test-result) : any (define-struct (test-error test-result) (result)) ;; struct (test-success test-result) : any (define-struct (test-success test-result) (result)) (provide/contract (struct (rackunit-test-case test) ((name (or/c string? false/c)) (action (-> any)))) (struct (rackunit-test-suite test) ((name string?) (tests procedure?) (before (-> any)) (after (-> any))))) (provide (struct-out test) (struct-out exn:test) (struct-out exn:test:check) (struct-out exn:test:check:internal) (struct-out test-result) (struct-out test-failure) (struct-out test-error) (struct-out test-success))
null
https://raw.githubusercontent.com/racket/rackunit/478e1d07d5a8eb2a60aad4a0a050a115b752ba69/rackunit-lib/rackunit/private/base.rkt
racket
struct test : struct (rackunit-test-case test) : (U string #f) thunk struct exn:test exn:fail : () The exception throw by test failures struct (exn:test:check struct:exn:test) : (list-of check-info) The exception thrown to indicate a check has failed Try to get a location from the stack. struct (exn:test:check:internal exn:test:check) : () Exception thrown to indicate an internal failure in an check, distinguished from a failure in user code. struct test-result : (U string #f) struct (test-failure test-result) : exn:test struct (test-error test-result) : any struct (test-success test-result) : any
#lang racket/base (require racket/contract/base "check-info.rkt" "location.rkt") (define-struct test ()) (define-struct (rackunit-test-case test) (name action) #:transparent) struct ( rackunit - test - suite test ) : string ( fdown fup fhere seed - > ( listof test - result ) ) thunk thunk (define-struct (rackunit-test-suite test) (name tests before after) #:transparent) (define-struct (exn:test exn:fail) ()) (define-struct (exn:test:check exn:test) (stack) #:property prop:exn:srclocs (lambda (self) (define maybe-location (for/or ([check-info (exn:test:check-stack self)]) (and (check-location? check-info) check-info))) (cond [maybe-location (define loc (location-info-value (check-info-value maybe-location))) (list (location->srcloc loc))] [else (list)]))) (define-struct (exn:test:check:internal exn:test:check) ()) (define-struct test-result (test-case-name)) (define-struct (test-failure test-result) (result)) (define-struct (test-error test-result) (result)) (define-struct (test-success test-result) (result)) (provide/contract (struct (rackunit-test-case test) ((name (or/c string? false/c)) (action (-> any)))) (struct (rackunit-test-suite test) ((name string?) (tests procedure?) (before (-> any)) (after (-> any))))) (provide (struct-out test) (struct-out exn:test) (struct-out exn:test:check) (struct-out exn:test:check:internal) (struct-out test-result) (struct-out test-failure) (struct-out test-error) (struct-out test-success))
84efa454770d05961aab5a479564a0393eb100d6cdc08280506ac12d8c74a28d
vbmithr/ocaml-libbitcoin
mnemonic.mli
type dict = | English | Spanish | Japanese | Chinese_simplified | Chinese_traditional val of_entropy : ?dict:dict -> string -> string list val to_seed : ?passphrase:string -> string list -> string option val to_seed_exn : ?passphrase:string -> string list -> string
null
https://raw.githubusercontent.com/vbmithr/ocaml-libbitcoin/b93a5cca1d430c38dd822b45e47fc5132fb28ab9/src/mnemonic.mli
ocaml
type dict = | English | Spanish | Japanese | Chinese_simplified | Chinese_traditional val of_entropy : ?dict:dict -> string -> string list val to_seed : ?passphrase:string -> string list -> string option val to_seed_exn : ?passphrase:string -> string list -> string
24a347851fdd11d64580fc18f31ee39d98a26c35955479f279129c3f63b0fc61
jafingerhut/clojure-benchmarks
knucleotide.clj-14.clj
The Computer Language Benchmarks Game ;; / contributed by (ns knucleotide (:gen-class)) (set! *warn-on-reflection* true) (definterface ITallyCounter (^int get_count []) (inc_BANG_ [])) (deftype TallyCounter [^{:unsynchronized-mutable true :tag int} cnt] ITallyCounter (get-count [this] cnt) (inc! [this] (set! cnt (unchecked-inc cnt)))) (defn my-lazy-map [f coll] (lazy-seq (when-let [s (seq coll)] (cons (f (first s)) (my-lazy-map f (rest s)))))) modified - pmap is like pmap from Clojure 1.1 , but with only as much ;; parallelism as specified by the parameter num-threads. Uses ;; my-lazy-map instead of map from core.clj, since that version of map ;; can use unwanted additional parallelism for chunked collections, ;; like ranges. (defn modified-pmap ([num-threads f coll] (if (== num-threads 1) (map f coll) (let [n (if (>= num-threads 2) (dec num-threads) 1) rets (my-lazy-map #(future (f %)) coll) step (fn step [[x & xs :as vs] fs] (lazy-seq (if-let [s (seq fs)] (cons (deref x) (step xs (rest s))) (map deref vs))))] (step rets (drop n rets))))) ([num-threads f coll & colls] (let [step (fn step [cs] (lazy-seq (let [ss (my-lazy-map seq cs)] (when (every? identity ss) (cons (my-lazy-map first ss) (step (my-lazy-map rest ss)))))))] (modified-pmap num-threads #(apply f %) (step (cons coll colls)))))) ;; Return true when the line l is a FASTA description line (defn fasta-description-line [l] (= \> (first (seq l)))) ;; Return true when the line l is a FASTA description line that begins ;; with the string desc-str. (defn fasta-description-line-beginning [desc-str l] (and (fasta-description-line l) (= desc-str (subs l 1 (min (count l) (inc (count desc-str))))))) Take a sequence of lines from a FASTA format file , and a string ;; desc-str. Look for a FASTA record with a description that begins ;; with desc-str, and if one is found, return its DNA sequence as a ;; single (potentially quite long) string. If input file is big, ;; you'll save lots of memory if you call this function in a with-open ;; for the file, and don't hold on to the head of the lines parameter. (defn fasta-dna-str-with-desc-beginning [desc-str lines] (when-let [x (drop-while (fn [l] (not (fasta-description-line-beginning desc-str l))) lines)] (when-let [x (seq x)] (let [y (take-while (fn [l] (not (fasta-description-line l))) (map (fn [#^java.lang.String s] (.toUpperCase s)) (rest x)))] (apply str y))))) (def dna-char-to-code-val {\A 0, \C 1, \T 2, \G 3}) (def code-val-to-dna-char {0 \A, 1 \C, 2 \T, 3 \G}) ;; In the hash map 'tally' in tally-dna-subs-with-len, it is more straightforward to use a Clojure string ( same as a Java string ) as ;; the key, but such a key is significantly bigger than it needs to ;; be, increasing memory and time required to hash the value. By ;; converting a string of A, C, T, and G characters down to an integer ;; that contains only 2 bits for each character, we make a value that ;; is significantly smaller and faster to use as a key in the map. ;; most least ;; significant significant ;; bits of int bits of int ;; | | ;; V V ;; code code code .... code code ;; ^ ^ ;; | | ;; code for code for ;; *latest* *earliest* ;; char in char in ;; sequence sequence ;; Note: Given Clojure 1.2's implementation of bit-shift-left/right operations , when the value being shifted is larger than a 32 - bit ;; int, they are faster when the shift amount is a compile time ;; constant. (defn dna-str-to-key [s] ;; Accessing a local let binding is much faster than accessing a var (let [dna-char-to-code-val dna-char-to-code-val] (loop [key 0 offset (int (dec (count s)))] (if (neg? offset) key (let [c (nth s offset) code (int (dna-char-to-code-val c)) new-key (+ (bit-shift-left key 2) code)] (recur new-key (dec offset))))))) (defn key-to-dna-str [k len] (apply str (map code-val-to-dna-char (map (fn [pos] (bit-and 3 (bit-shift-right k pos))) (range 0 (* 2 len) 2))))) ;; Handle slight difference in function name between Clojure 1.2.0 and 1.3.0 - alpha1 ability to use type hints to infer fast bit ;; operations. (defmacro key-type [num] (if (and (== (*clojure-version* :major) 1) (== (*clojure-version* :minor) 2)) num `(long ~num))) (defmacro my-int [num] (if (and (== (*clojure-version* :major) 1) (== (*clojure-version* :minor) 2)) num `(int ~num))) (defn tally-dna-subs-with-len [len dna-str] (let [mask-width (key-type (* 2 len)) mask (key-type (dec (bit-shift-left 1 mask-width))) dna-char-to-code-val dna-char-to-code-val] (loop [offset (int (- (count dna-str) len)) key (key-type (dna-str-to-key (subs dna-str offset (+ offset len)))) tally (let [h (java.util.HashMap.) one (TallyCounter. (int 1))] (.put h key one) h)] (if (zero? offset) tally (let [new-offset (dec offset) new-first-char-code (my-int (dna-char-to-code-val (nth dna-str new-offset))) new-key (key-type (bit-and mask (+ (bit-shift-left key 2) new-first-char-code)))] (if-let [^TallyCounter cur-count (get tally new-key)] (.inc! cur-count) (let [one (TallyCounter. (int 1))] (.put tally new-key one))) (recur new-offset new-key tally)))))) (defn getcnt [^TallyCounter tc] (.get-count tc)) (defn all-tally-to-str [tally fn-key-to-str] (with-out-str (let [total (reduce + (map getcnt (vals tally))) cmp-keys (fn [k1 k2] ;; Return negative integer if k1 should come earlier ;; in the sort order than k2, 0 if they are equal, ;; otherwise a positive integer. (let [cnt1 (int (getcnt (get tally k1))) cnt2 (int (getcnt (get tally k2)))] (if (not= cnt1 cnt2) (- cnt2 cnt1) (let [^String s1 (fn-key-to-str k1) ^String s2 (fn-key-to-str k2)] (.compareTo s1 s2)))))] (doseq [k (sort cmp-keys (keys tally))] (printf "%s %.3f\n" (fn-key-to-str k) (double (* 100 (/ (getcnt (get tally k)) total)))))))) (defn one-tally-to-str [dna-str tally] (let [zerotc (TallyCounter. 0)] (format "%d\t%s" (getcnt (get tally (dna-str-to-key dna-str) zerotc)) dna-str))) (defn compute-one-part [dna-str part] [part (condp = part 0 (all-tally-to-str (tally-dna-subs-with-len 1 dna-str) (fn [k] (key-to-dna-str k 1))) 1 (all-tally-to-str (tally-dna-subs-with-len 2 dna-str) (fn [k] (key-to-dna-str k 2))) 2 (one-tally-to-str "GGT" (tally-dna-subs-with-len 3 dna-str)) 3 (one-tally-to-str "GGTA" (tally-dna-subs-with-len 4 dna-str)) 4 (one-tally-to-str "GGTATT" (tally-dna-subs-with-len 6 dna-str)) 5 (one-tally-to-str "GGTATTTTAATT" (tally-dna-subs-with-len 12 dna-str)) 6 (one-tally-to-str "GGTATTTTAATTTATAGT" (tally-dna-subs-with-len 18 dna-str)))]) (def *default-modified-pmap-num-threads* (+ 2 (.. Runtime getRuntime availableProcessors))) (defn -main [& args] (def num-threads (if (and (>= (count args) 1) (re-matches #"^\d+$" (nth args 0))) (let [n (. Integer valueOf (nth args 0) 10)] (if (== n 0) *default-modified-pmap-num-threads* n)) *default-modified-pmap-num-threads*)) (with-open [br (java.io.BufferedReader. *in*)] (let [dna-str (fasta-dna-str-with-desc-beginning "THREE" (line-seq br)) ;; Select the order of computing parts such that it is unlikely that parts 5 and 6 will be computed concurrently . Those are the two that take the most memory . It would be nice if we could specify a DAG for which jobs should finish before others begin -- then we could prevent those two ;; parts from running simultaneously. results (map second (sort #(< (first %1) (first %2)) (modified-pmap num-threads #(compute-one-part dna-str %) '(0 5 6 1 2 3 4) )))] (doseq [r results] (println r) (flush)))) (shutdown-agents))
null
https://raw.githubusercontent.com/jafingerhut/clojure-benchmarks/474a8a4823727dd371f1baa9809517f9e0b508d4/knucleotide/knucleotide.clj-14.clj
clojure
/ parallelism as specified by the parameter num-threads. Uses my-lazy-map instead of map from core.clj, since that version of map can use unwanted additional parallelism for chunked collections, like ranges. Return true when the line l is a FASTA description line Return true when the line l is a FASTA description line that begins with the string desc-str. desc-str. Look for a FASTA record with a description that begins with desc-str, and if one is found, return its DNA sequence as a single (potentially quite long) string. If input file is big, you'll save lots of memory if you call this function in a with-open for the file, and don't hold on to the head of the lines parameter. In the hash map 'tally' in tally-dna-subs-with-len, it is more the key, but such a key is significantly bigger than it needs to be, increasing memory and time required to hash the value. By converting a string of A, C, T, and G characters down to an integer that contains only 2 bits for each character, we make a value that is significantly smaller and faster to use as a key in the map. most least significant significant bits of int bits of int | | V V code code code .... code code ^ ^ | | code for code for *latest* *earliest* char in char in sequence sequence Note: Given Clojure 1.2's implementation of bit-shift-left/right int, they are faster when the shift amount is a compile time constant. Accessing a local let binding is much faster than accessing a var Handle slight difference in function name between Clojure 1.2.0 and operations. Return negative integer if k1 should come earlier in the sort order than k2, 0 if they are equal, otherwise a positive integer. Select the order of computing parts such that it is parts from running simultaneously.
The Computer Language Benchmarks Game contributed by (ns knucleotide (:gen-class)) (set! *warn-on-reflection* true) (definterface ITallyCounter (^int get_count []) (inc_BANG_ [])) (deftype TallyCounter [^{:unsynchronized-mutable true :tag int} cnt] ITallyCounter (get-count [this] cnt) (inc! [this] (set! cnt (unchecked-inc cnt)))) (defn my-lazy-map [f coll] (lazy-seq (when-let [s (seq coll)] (cons (f (first s)) (my-lazy-map f (rest s)))))) modified - pmap is like pmap from Clojure 1.1 , but with only as much (defn modified-pmap ([num-threads f coll] (if (== num-threads 1) (map f coll) (let [n (if (>= num-threads 2) (dec num-threads) 1) rets (my-lazy-map #(future (f %)) coll) step (fn step [[x & xs :as vs] fs] (lazy-seq (if-let [s (seq fs)] (cons (deref x) (step xs (rest s))) (map deref vs))))] (step rets (drop n rets))))) ([num-threads f coll & colls] (let [step (fn step [cs] (lazy-seq (let [ss (my-lazy-map seq cs)] (when (every? identity ss) (cons (my-lazy-map first ss) (step (my-lazy-map rest ss)))))))] (modified-pmap num-threads #(apply f %) (step (cons coll colls)))))) (defn fasta-description-line [l] (= \> (first (seq l)))) (defn fasta-description-line-beginning [desc-str l] (and (fasta-description-line l) (= desc-str (subs l 1 (min (count l) (inc (count desc-str))))))) Take a sequence of lines from a FASTA format file , and a string (defn fasta-dna-str-with-desc-beginning [desc-str lines] (when-let [x (drop-while (fn [l] (not (fasta-description-line-beginning desc-str l))) lines)] (when-let [x (seq x)] (let [y (take-while (fn [l] (not (fasta-description-line l))) (map (fn [#^java.lang.String s] (.toUpperCase s)) (rest x)))] (apply str y))))) (def dna-char-to-code-val {\A 0, \C 1, \T 2, \G 3}) (def code-val-to-dna-char {0 \A, 1 \C, 2 \T, 3 \G}) straightforward to use a Clojure string ( same as a Java string ) as operations , when the value being shifted is larger than a 32 - bit (defn dna-str-to-key [s] (let [dna-char-to-code-val dna-char-to-code-val] (loop [key 0 offset (int (dec (count s)))] (if (neg? offset) key (let [c (nth s offset) code (int (dna-char-to-code-val c)) new-key (+ (bit-shift-left key 2) code)] (recur new-key (dec offset))))))) (defn key-to-dna-str [k len] (apply str (map code-val-to-dna-char (map (fn [pos] (bit-and 3 (bit-shift-right k pos))) (range 0 (* 2 len) 2))))) 1.3.0 - alpha1 ability to use type hints to infer fast bit (defmacro key-type [num] (if (and (== (*clojure-version* :major) 1) (== (*clojure-version* :minor) 2)) num `(long ~num))) (defmacro my-int [num] (if (and (== (*clojure-version* :major) 1) (== (*clojure-version* :minor) 2)) num `(int ~num))) (defn tally-dna-subs-with-len [len dna-str] (let [mask-width (key-type (* 2 len)) mask (key-type (dec (bit-shift-left 1 mask-width))) dna-char-to-code-val dna-char-to-code-val] (loop [offset (int (- (count dna-str) len)) key (key-type (dna-str-to-key (subs dna-str offset (+ offset len)))) tally (let [h (java.util.HashMap.) one (TallyCounter. (int 1))] (.put h key one) h)] (if (zero? offset) tally (let [new-offset (dec offset) new-first-char-code (my-int (dna-char-to-code-val (nth dna-str new-offset))) new-key (key-type (bit-and mask (+ (bit-shift-left key 2) new-first-char-code)))] (if-let [^TallyCounter cur-count (get tally new-key)] (.inc! cur-count) (let [one (TallyCounter. (int 1))] (.put tally new-key one))) (recur new-offset new-key tally)))))) (defn getcnt [^TallyCounter tc] (.get-count tc)) (defn all-tally-to-str [tally fn-key-to-str] (with-out-str (let [total (reduce + (map getcnt (vals tally))) cmp-keys (fn [k1 k2] (let [cnt1 (int (getcnt (get tally k1))) cnt2 (int (getcnt (get tally k2)))] (if (not= cnt1 cnt2) (- cnt2 cnt1) (let [^String s1 (fn-key-to-str k1) ^String s2 (fn-key-to-str k2)] (.compareTo s1 s2)))))] (doseq [k (sort cmp-keys (keys tally))] (printf "%s %.3f\n" (fn-key-to-str k) (double (* 100 (/ (getcnt (get tally k)) total)))))))) (defn one-tally-to-str [dna-str tally] (let [zerotc (TallyCounter. 0)] (format "%d\t%s" (getcnt (get tally (dna-str-to-key dna-str) zerotc)) dna-str))) (defn compute-one-part [dna-str part] [part (condp = part 0 (all-tally-to-str (tally-dna-subs-with-len 1 dna-str) (fn [k] (key-to-dna-str k 1))) 1 (all-tally-to-str (tally-dna-subs-with-len 2 dna-str) (fn [k] (key-to-dna-str k 2))) 2 (one-tally-to-str "GGT" (tally-dna-subs-with-len 3 dna-str)) 3 (one-tally-to-str "GGTA" (tally-dna-subs-with-len 4 dna-str)) 4 (one-tally-to-str "GGTATT" (tally-dna-subs-with-len 6 dna-str)) 5 (one-tally-to-str "GGTATTTTAATT" (tally-dna-subs-with-len 12 dna-str)) 6 (one-tally-to-str "GGTATTTTAATTTATAGT" (tally-dna-subs-with-len 18 dna-str)))]) (def *default-modified-pmap-num-threads* (+ 2 (.. Runtime getRuntime availableProcessors))) (defn -main [& args] (def num-threads (if (and (>= (count args) 1) (re-matches #"^\d+$" (nth args 0))) (let [n (. Integer valueOf (nth args 0) 10)] (if (== n 0) *default-modified-pmap-num-threads* n)) *default-modified-pmap-num-threads*)) (with-open [br (java.io.BufferedReader. *in*)] (let [dna-str (fasta-dna-str-with-desc-beginning "THREE" (line-seq br)) unlikely that parts 5 and 6 will be computed concurrently . Those are the two that take the most memory . It would be nice if we could specify a DAG for which jobs should finish before others begin -- then we could prevent those two results (map second (sort #(< (first %1) (first %2)) (modified-pmap num-threads #(compute-one-part dna-str %) '(0 5 6 1 2 3 4) )))] (doseq [r results] (println r) (flush)))) (shutdown-agents))
62b29b071520b7eb008ca9b0915c0079ac42e9193e9d0c2346434bb9187a04f2
BoeingX/haskell-programming-from-first-principles
FixingDividedBy.hs
module Recursion.ChapterExercises.FixingDividedBy where data DividedResult = Result Integer | DividedByZero deriving (Eq, Show) div' :: Integer -> Integer -> DividedResult div' _ 0 = DividedByZero div' 0 _ = Result 0 div' x y | x < 0 && y < 0 = div' (-x) (-y) div' x y | x < 0 = let Result z = div' (x + y) y in Result $ -1 + z | y < 0 = let Result z = div' (x + y) y in Result $ -1 + z div' x y | x < y = Result 0 | otherwise = let Result z = div' (x - y) y in Result $ 1 + z
null
https://raw.githubusercontent.com/BoeingX/haskell-programming-from-first-principles/ffb637f536597f552a4e4567fee848ed27f3ba74/src/Recursion/ChapterExercises/FixingDividedBy.hs
haskell
module Recursion.ChapterExercises.FixingDividedBy where data DividedResult = Result Integer | DividedByZero deriving (Eq, Show) div' :: Integer -> Integer -> DividedResult div' _ 0 = DividedByZero div' 0 _ = Result 0 div' x y | x < 0 && y < 0 = div' (-x) (-y) div' x y | x < 0 = let Result z = div' (x + y) y in Result $ -1 + z | y < 0 = let Result z = div' (x + y) y in Result $ -1 + z div' x y | x < y = Result 0 | otherwise = let Result z = div' (x - y) y in Result $ 1 + z
bef9605e3cbb4d6eef470e444d06f68aaab2d68dc785896544f1de81a867c855
OlivierSohn/hamazed
Show.hs
# LANGUAGE NoImplicitPrelude # # LANGUAGE DeriveGeneric # # LANGUAGE GeneralizedNewtypeDeriving # {-# LANGUAGE OverloadedStrings #-} # LANGUAGE FlexibleContexts # # LANGUAGE UndecidableInstances # module Imj.Game.Show ( showPlayerName , getPlayerUIName' , getPlayerUIName'' , welcome ) where import Imj.Prelude import qualified Data.Map.Strict as Map import Data.Map.Strict(Map) import Data.String(IsString) import Data.Text(unpack, pack) import Imj.ClientView.Types import Imj.Game.Class import Imj.Game.Modify import Imj.Graphics.Color import Imj.Graphics.Font import Imj.Graphics.Text.ColorString(ColorString) import qualified Imj.Graphics.Text.ColorString as ColorString(colored, intercalate) import Imj.Graphics.Text.ColoredGlyphList(ColoredGlyphList) import qualified Imj.Graphics.Text.ColoredGlyphList as ColoredGlyphList(colored) import Imj.Graphics.UI.Chat import Imj.Network showPlayerName :: MonadState (AppState g) m => ClientId -> m ColorString TODO unify with getPlayerUIName (ColorString.colored (pack $ show x) white) (\(Player (ClientName name) _ (PlayerColors c _)) -> ColorString.colored name c) <$> getPlayer x getPlayerUIName' :: Maybe (Player g) -> ColorString getPlayerUIName' = getPlayerUIName (ColorString.colored . unClientName) getPlayerUIName'' :: Maybe (Player g) -> ColoredGlyphList getPlayerUIName'' = getPlayerUIName (ColoredGlyphList.colored . map textGlyph . unpack . unClientName) getPlayerUIName :: (IsString a, Semigroup a) => (ClientName Approved -> Color8 Foreground -> a) -> Maybe (Player g) -> a ' Nothing ' happens when 2 players disconnect while playing : the first one to reconnect will not know about the name of the other disconnected player , until the other player reconnects ( TODO is it still the case ? ) . getPlayerUIName _ Nothing = "? (away)" getPlayerUIName f (Just (Player name status (PlayerColors c _))) = case status of Present -> n Absent -> n <> f " (away)" chatMsgColor where n = f name c welcome :: Map ClientId (Player g) -> ColorString welcome l = text "Welcome! Players are: " <> ColorString.intercalate (text ", ") (map (getPlayerUIName' . Just) $ Map.elems l) where text x = ColorString.colored x chatMsgColor
null
https://raw.githubusercontent.com/OlivierSohn/hamazed/6c2b20d839ede7b8651fb7b425cb27ea93808a4a/imj-game/src/Imj/Game/Show.hs
haskell
# LANGUAGE OverloadedStrings #
# LANGUAGE NoImplicitPrelude # # LANGUAGE DeriveGeneric # # LANGUAGE GeneralizedNewtypeDeriving # # LANGUAGE FlexibleContexts # # LANGUAGE UndecidableInstances # module Imj.Game.Show ( showPlayerName , getPlayerUIName' , getPlayerUIName'' , welcome ) where import Imj.Prelude import qualified Data.Map.Strict as Map import Data.Map.Strict(Map) import Data.String(IsString) import Data.Text(unpack, pack) import Imj.ClientView.Types import Imj.Game.Class import Imj.Game.Modify import Imj.Graphics.Color import Imj.Graphics.Font import Imj.Graphics.Text.ColorString(ColorString) import qualified Imj.Graphics.Text.ColorString as ColorString(colored, intercalate) import Imj.Graphics.Text.ColoredGlyphList(ColoredGlyphList) import qualified Imj.Graphics.Text.ColoredGlyphList as ColoredGlyphList(colored) import Imj.Graphics.UI.Chat import Imj.Network showPlayerName :: MonadState (AppState g) m => ClientId -> m ColorString TODO unify with getPlayerUIName (ColorString.colored (pack $ show x) white) (\(Player (ClientName name) _ (PlayerColors c _)) -> ColorString.colored name c) <$> getPlayer x getPlayerUIName' :: Maybe (Player g) -> ColorString getPlayerUIName' = getPlayerUIName (ColorString.colored . unClientName) getPlayerUIName'' :: Maybe (Player g) -> ColoredGlyphList getPlayerUIName'' = getPlayerUIName (ColoredGlyphList.colored . map textGlyph . unpack . unClientName) getPlayerUIName :: (IsString a, Semigroup a) => (ClientName Approved -> Color8 Foreground -> a) -> Maybe (Player g) -> a ' Nothing ' happens when 2 players disconnect while playing : the first one to reconnect will not know about the name of the other disconnected player , until the other player reconnects ( TODO is it still the case ? ) . getPlayerUIName _ Nothing = "? (away)" getPlayerUIName f (Just (Player name status (PlayerColors c _))) = case status of Present -> n Absent -> n <> f " (away)" chatMsgColor where n = f name c welcome :: Map ClientId (Player g) -> ColorString welcome l = text "Welcome! Players are: " <> ColorString.intercalate (text ", ") (map (getPlayerUIName' . Just) $ Map.elems l) where text x = ColorString.colored x chatMsgColor
5d9f1f728b2910780f0934113a4961de8cd88903f2cc03042d28bd7c64254f57
testedminds/edgewise
reader.clj
(in-ns 'edgewise.tgf) (import '[java.io BufferedReader StringReader]) (require '[edgewise.core :refer :all]) (defn- line->vertex [g line] (let [[x label] (rest (re-matches #"(\d+) (.+)$" line))] (add-vertex g label {:_id (Integer. x)}))) (defn- line->edge [g line] (let [[x y lbl] (rest (re-matches #"(\d+) (\d+)( .+)?$" line)) label (if lbl (clojure.string/trim lbl) "")] (add-edge g (Integer. x) (Integer. y) {:label label}))) (defn- next-line [[line & rest] {:keys [g line-fn] :as config}] #(cond (nil? line) g (= line "#") (next-line rest (assoc config :line-fn line->edge)) :else (next-line rest (assoc config :g (line-fn g line))))) Parsing TGF amounts to a simple FSM with two states , : vertex and : edge . ;; Mutually recursive functions work well for this. (defn- parse-tgf ([reader] (parse-tgf reader (empty-graph))) ([reader g] (with-open [rdr reader] (trampoline next-line (line-seq rdr) {:g g :line-fn line->vertex})))) (defmulti read-tgf "Parses TGF strings of the form: 1 Mike Ditka\n2 DA BEARS\n3 Chicago\n#\n1 2 coaches\n1 3 lives in" type) (defmethod read-tgf java.io.File [f] (parse-tgf (clojure.java.io/reader f))) (defmethod read-tgf java.lang.String [str] (parse-tgf (BufferedReader. (StringReader. str))))
null
https://raw.githubusercontent.com/testedminds/edgewise/0fb64c718e6a8e70eda87b77677a6679770569d1/src/edgewise/tgf/reader.clj
clojure
Mutually recursive functions work well for this.
(in-ns 'edgewise.tgf) (import '[java.io BufferedReader StringReader]) (require '[edgewise.core :refer :all]) (defn- line->vertex [g line] (let [[x label] (rest (re-matches #"(\d+) (.+)$" line))] (add-vertex g label {:_id (Integer. x)}))) (defn- line->edge [g line] (let [[x y lbl] (rest (re-matches #"(\d+) (\d+)( .+)?$" line)) label (if lbl (clojure.string/trim lbl) "")] (add-edge g (Integer. x) (Integer. y) {:label label}))) (defn- next-line [[line & rest] {:keys [g line-fn] :as config}] #(cond (nil? line) g (= line "#") (next-line rest (assoc config :line-fn line->edge)) :else (next-line rest (assoc config :g (line-fn g line))))) Parsing TGF amounts to a simple FSM with two states , : vertex and : edge . (defn- parse-tgf ([reader] (parse-tgf reader (empty-graph))) ([reader g] (with-open [rdr reader] (trampoline next-line (line-seq rdr) {:g g :line-fn line->vertex})))) (defmulti read-tgf "Parses TGF strings of the form: 1 Mike Ditka\n2 DA BEARS\n3 Chicago\n#\n1 2 coaches\n1 3 lives in" type) (defmethod read-tgf java.io.File [f] (parse-tgf (clojure.java.io/reader f))) (defmethod read-tgf java.lang.String [str] (parse-tgf (BufferedReader. (StringReader. str))))
bb947f7ba159c0cbcfb433fb6bd063860f3f7147467eb5ed7db0b3400d258684
Gbury/dolmen
parse.ml
(* This file is free software, part of dolmen. See file "LICENSE" formore information *) (** Interface for Dolmen parsers. *) module type S = sig * { 2 Main interface } type token (** The type of token consumed by the parser. *) type statement (** The type of top-level declarations returned by the parser. *) exception Error of int (** Exception raised by the parser when it encounters an error. *) val file : (Lexing.lexbuf -> token) -> Lexing.lexbuf -> statement list (** A function that parses an entire file, i.e until the end-of-file token, and return the list of parsed statements. *) val input : (Lexing.lexbuf -> token) -> Lexing.lexbuf -> statement option (** A function to parse a single statement. Returns [None] if it encounters the end-of-file token. Used for incremental parsing. *) end
null
https://raw.githubusercontent.com/Gbury/dolmen/58903349a3c422402f25ec479ce90d944f44f148/src/interface/parse.ml
ocaml
This file is free software, part of dolmen. See file "LICENSE" formore information * Interface for Dolmen parsers. * The type of token consumed by the parser. * The type of top-level declarations returned by the parser. * Exception raised by the parser when it encounters an error. * A function that parses an entire file, i.e until the end-of-file token, and return the list of parsed statements. * A function to parse a single statement. Returns [None] if it encounters the end-of-file token. Used for incremental parsing.
module type S = sig * { 2 Main interface } type token type statement exception Error of int val file : (Lexing.lexbuf -> token) -> Lexing.lexbuf -> statement list val input : (Lexing.lexbuf -> token) -> Lexing.lexbuf -> statement option end
8fef6ca8a329ab26d49b2dc154da16474f8bc060c56fd61b4fa3e4794bfa580d
jordwalke/rehp
commonArg.ml
Js_of_ocaml compiler * / * Copyright ( C ) 2014 Hugo Heuzard * * This program is free software ; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation , with linking exception ; * either version 2.1 of the License , or ( at your option ) any later version . * * This program is distributed in the hope that it will be useful , * but WITHOUT ANY WARRANTY ; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the * GNU Lesser General Public License for more details . * * You should have received a copy of the GNU Lesser General Public License * along with this program ; if not , write to the Free Software * Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA . * / * Copyright (C) 2014 Hugo Heuzard * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, with linking exception; * either version 2.1 of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) open Js_of_ocaml_compiler open Js_of_ocaml_compiler.Stdlib open Cmdliner type 'a on_off = { enable : 'a ; disable : 'a } type t = { debug : string list on_off ; optim : string list on_off ; quiet : bool ; implicit_ext : string option ; custom_header : string option ; hide_compilation_summary : bool ; async_compilation_summary : bool ; use_hashing : bool } let debug = let doc = "enable debug [$(docv)]." in let all = List.map (Debug.available ()) ~f:(fun s -> s, s) in let arg = Arg.(value & opt_all (list (enum all)) [] & info ["debug"] ~docv:"SECTION" ~doc) in Term.(pure List.flatten $ arg) let enable = let doc = "Enable optimization [$(docv)]." in let all = List.map (Config.Flag.available ()) ~f:(fun s -> s, s) in let arg = Arg.(value & opt_all (list (enum all)) [] & info ["enable"] ~docv:"OPT" ~doc) in Term.(pure List.flatten $ arg) let disable = let doc = "Disable optimization [$(docv)]." in let all = List.map (Config.Flag.available ()) ~f:(fun s -> s, s) in let arg = Arg.(value & opt_all (list (enum all)) [] & info ["disable"] ~docv:"OPT" ~doc) in Term.(pure List.flatten $ arg) let pretty = let doc = "Pretty print the output." in Arg.(value & flag & info ["pretty"] ~doc) let prettiestJs = let doc = "Whether or not the pretty printing of Js should be extra pretty." in Arg.(value & flag & info ["prettiest-js"] ~doc) let debuginfo = let doc = "Output debug information." in Arg.(value & flag & info ["debuginfo"; "debug-info"] ~doc) let noinline = let doc = "Disable inlining." in Arg.(value & flag & info ["noinline"; "no-inline"] ~doc) let is_quiet = let doc = "suppress non-error messages." in Arg.(value & flag & info ["quiet"; "q"] ~doc) let custom_header = let doc = "Provide a custom header for the generated compiler output, useful for making the \ script an executable file with #!/usr/bin/env node or integrating with module \ loaders. Certain strings will be replaced with the names of relevant compilation \ units.____CompilationUnitName____compilationUnitName \ /*____CompilationOutput*/____ForEachDependencyCompilationUnitName____forEachDependencyCompilationUnitNameThe \ comment /*____hashes*/ will be replaced with a commentlike /*____hashes flags:y \ bytecode:hash2 debug-data:hash3 primitives:hash4*/This is used to speed up \ incremental recompilation times for .cmasand requires that you supply the \ --keep-unit-names flag.The comment /*____CompilationSummary*/ will be replaced \ withbackend specific named 'exports'" in Arg.(value & opt (some string) None & info ["custom-header"] ~doc) let hide_compilation_summary = let doc = "Enables hiding of /*____CompilationSummary*/ replacement inthe custom header (if \ /*____CompilationSummary*/ is present in the first place)." in Arg.(value & flag & info ["hide-compilation-summary"] ~doc) let async_compilation_summary = let doc = "Enables backend specific async transforms of /*____CompilationSummary*/ in the custom header (if \ /*____CompilationSummary*/ is present in the first place)." in Arg.(value & flag & info ["async-compilation-summary"] ~doc) let use_hashing = let doc = "If enabled, then avoids rebuilds via hashing of inputs." in Arg.(value & flag & info ["use-hashing"; "u"] ~doc) let implicit_ext = let doc = "File extension to use if it isn't explicitly specified and would otherwise be \ inferred (such as when compiling .cma libraries) into multiple files." in Arg.(value & opt (some string) None & info ["implicit-ext"; "e"] ~doc) let t = Term.( pure (fun debug enable disable pretty prettiestJs debuginfo noinline quiet implicit_ext use_hashing c_header hide_compilation_summary async_compilation_summary -> let enable = if pretty then "pretty" :: enable else enable in let enable = if prettiestJs then "prettiest-js" :: enable else enable in let enable = if debuginfo then "debuginfo" :: enable else enable in let disable = if noinline then "inline" :: disable else disable in let disable_if_pretty name disable = if pretty && not (List.mem name ~set:enable) then name :: disable else disable in let disable = disable_if_pretty "shortvar" disable in let disable = disable_if_pretty "share" disable in { debug = {enable = debug; disable = []} ; optim = {enable; disable} ; quiet ; implicit_ext ; custom_header = c_header ; hide_compilation_summary ; async_compilation_summary ; use_hashing }) $ debug $ enable $ disable $ pretty $ prettiestJs $ debuginfo $ noinline $ is_quiet $ implicit_ext $ use_hashing $ custom_header $ hide_compilation_summary $ async_compilation_summary) let on_off on off t = List.iter ~f:on t.enable; List.iter ~f:off t.disable let eval t = Config.Flag.(on_off enable disable t.optim); Debug.(on_off enable disable t.debug); quiet := t.quiet
null
https://raw.githubusercontent.com/jordwalke/rehp/f122b94f0a3f06410ddba59e3c9c603b33aadabf/compiler/commonArg.ml
ocaml
Js_of_ocaml compiler * / * Copyright ( C ) 2014 Hugo Heuzard * * This program is free software ; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation , with linking exception ; * either version 2.1 of the License , or ( at your option ) any later version . * * This program is distributed in the hope that it will be useful , * but WITHOUT ANY WARRANTY ; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the * GNU Lesser General Public License for more details . * * You should have received a copy of the GNU Lesser General Public License * along with this program ; if not , write to the Free Software * Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA . * / * Copyright (C) 2014 Hugo Heuzard * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, with linking exception; * either version 2.1 of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) open Js_of_ocaml_compiler open Js_of_ocaml_compiler.Stdlib open Cmdliner type 'a on_off = { enable : 'a ; disable : 'a } type t = { debug : string list on_off ; optim : string list on_off ; quiet : bool ; implicit_ext : string option ; custom_header : string option ; hide_compilation_summary : bool ; async_compilation_summary : bool ; use_hashing : bool } let debug = let doc = "enable debug [$(docv)]." in let all = List.map (Debug.available ()) ~f:(fun s -> s, s) in let arg = Arg.(value & opt_all (list (enum all)) [] & info ["debug"] ~docv:"SECTION" ~doc) in Term.(pure List.flatten $ arg) let enable = let doc = "Enable optimization [$(docv)]." in let all = List.map (Config.Flag.available ()) ~f:(fun s -> s, s) in let arg = Arg.(value & opt_all (list (enum all)) [] & info ["enable"] ~docv:"OPT" ~doc) in Term.(pure List.flatten $ arg) let disable = let doc = "Disable optimization [$(docv)]." in let all = List.map (Config.Flag.available ()) ~f:(fun s -> s, s) in let arg = Arg.(value & opt_all (list (enum all)) [] & info ["disable"] ~docv:"OPT" ~doc) in Term.(pure List.flatten $ arg) let pretty = let doc = "Pretty print the output." in Arg.(value & flag & info ["pretty"] ~doc) let prettiestJs = let doc = "Whether or not the pretty printing of Js should be extra pretty." in Arg.(value & flag & info ["prettiest-js"] ~doc) let debuginfo = let doc = "Output debug information." in Arg.(value & flag & info ["debuginfo"; "debug-info"] ~doc) let noinline = let doc = "Disable inlining." in Arg.(value & flag & info ["noinline"; "no-inline"] ~doc) let is_quiet = let doc = "suppress non-error messages." in Arg.(value & flag & info ["quiet"; "q"] ~doc) let custom_header = let doc = "Provide a custom header for the generated compiler output, useful for making the \ script an executable file with #!/usr/bin/env node or integrating with module \ loaders. Certain strings will be replaced with the names of relevant compilation \ units.____CompilationUnitName____compilationUnitName \ /*____CompilationOutput*/____ForEachDependencyCompilationUnitName____forEachDependencyCompilationUnitNameThe \ comment /*____hashes*/ will be replaced with a commentlike /*____hashes flags:y \ bytecode:hash2 debug-data:hash3 primitives:hash4*/This is used to speed up \ incremental recompilation times for .cmasand requires that you supply the \ --keep-unit-names flag.The comment /*____CompilationSummary*/ will be replaced \ withbackend specific named 'exports'" in Arg.(value & opt (some string) None & info ["custom-header"] ~doc) let hide_compilation_summary = let doc = "Enables hiding of /*____CompilationSummary*/ replacement inthe custom header (if \ /*____CompilationSummary*/ is present in the first place)." in Arg.(value & flag & info ["hide-compilation-summary"] ~doc) let async_compilation_summary = let doc = "Enables backend specific async transforms of /*____CompilationSummary*/ in the custom header (if \ /*____CompilationSummary*/ is present in the first place)." in Arg.(value & flag & info ["async-compilation-summary"] ~doc) let use_hashing = let doc = "If enabled, then avoids rebuilds via hashing of inputs." in Arg.(value & flag & info ["use-hashing"; "u"] ~doc) let implicit_ext = let doc = "File extension to use if it isn't explicitly specified and would otherwise be \ inferred (such as when compiling .cma libraries) into multiple files." in Arg.(value & opt (some string) None & info ["implicit-ext"; "e"] ~doc) let t = Term.( pure (fun debug enable disable pretty prettiestJs debuginfo noinline quiet implicit_ext use_hashing c_header hide_compilation_summary async_compilation_summary -> let enable = if pretty then "pretty" :: enable else enable in let enable = if prettiestJs then "prettiest-js" :: enable else enable in let enable = if debuginfo then "debuginfo" :: enable else enable in let disable = if noinline then "inline" :: disable else disable in let disable_if_pretty name disable = if pretty && not (List.mem name ~set:enable) then name :: disable else disable in let disable = disable_if_pretty "shortvar" disable in let disable = disable_if_pretty "share" disable in { debug = {enable = debug; disable = []} ; optim = {enable; disable} ; quiet ; implicit_ext ; custom_header = c_header ; hide_compilation_summary ; async_compilation_summary ; use_hashing }) $ debug $ enable $ disable $ pretty $ prettiestJs $ debuginfo $ noinline $ is_quiet $ implicit_ext $ use_hashing $ custom_header $ hide_compilation_summary $ async_compilation_summary) let on_off on off t = List.iter ~f:on t.enable; List.iter ~f:off t.disable let eval t = Config.Flag.(on_off enable disable t.optim); Debug.(on_off enable disable t.debug); quiet := t.quiet
bd6b45f5d63dcf2807056434666c1d9d4f9d92adfb6c1a20fbddf3cfdc6dc3fc
igstan/programming-in-haskell
higher-order-functions.hs
import Data.Char (chr, ord) squaresOfEvens :: Integral a => [a] -> [a] squaresOfEvens = map (^ 2) . filter even all even [ 2,4,6,8 ] -- True all' :: (a -> Bool) -> [a] -> Bool all' f = and . map f any odd [ 2,4,6,8 ] -- False any' :: (a -> Bool) -> [a] -> Bool any' f = or . map f takeWhile' :: (a -> Bool) -> [a] -> [a] takeWhile' _ [] = [] takeWhile' f (x:xs) | f x = x : takeWhile' f xs | otherwise = [] dropWhile' :: (a -> Bool) -> [a] -> [a] dropWhile' _ [] = [] dropWhile' f (x:xs) | f x = dropWhile' f xs | otherwise = x:xs map' :: (a -> b) -> [a] -> [b] map' f = foldr (\ x ys -> f x : ys) [] filter' :: (a -> Bool) -> [a] -> [a] filter' f = foldr (\ x ys -> if f x then x:ys else ys) [] dec2int [ 2,3,4,5 ] 2345 2 * 1000 + 3 * 100 + 4 * 10 + 5 * 1 ( 2 * 100 + 3 * 10 + 4 * 1)*10 + 5 * 1 ( ( 2 * 10 + 3)*10 + 4)*10 + 5 dec2int :: [Int] -> Int dec2int = foldl (\a b -> a * 10 + b) 0 compose :: [(a -> a)] -> (a -> a) compose = foldr (.) id curry' :: ((a, b) -> c) -> (a -> b -> c) curry' f = \a b -> f (a, b) uncurry' :: (a -> b -> c) -> ((a, b) -> c) uncurry' f = \(a, b) -> f a b unfold :: (t -> Bool) -> (t -> a) -> (t -> t) -> t -> [a] unfold p h t x | p x = [] | otherwise = h x : unfold p h t (t x) int2bin :: Int -> [Int] int2bin = unfold (== 0) (`mod` 2) (`div` 2) type Bit = Int bin2int :: [Bit] -> Int -- bin2int bits = sum [w * b | (w, b) <- zip weights bits] where weights = iterate ( * 2 ) 1 -- bin2int bits = sum $ zipWith (*) weights bits where weights = iterate ( * 2 ) 1 bin2int = foldr (\x y -> x + 2 * y) 0 make8 :: [Bit] -> [Bit] make8 bits = take 8 (bits ++ repeat 0) make9 :: [Bit] -> [Bit] make9 bits = bits' ++ [parity] where bits' = make8 bits parity = if even (sum bits') then 0 else 1 chop8 :: [Bit] -> [[Bit]] chop8 [] = [] chop8 bits = first8 : chop8 (drop 9 bits) where parity = take 1 (drop 8 bits) first8 = if parity == [0] && even (sum (take 8 bits)) then take 8 bits else error "Parity check failed" chop8' :: [Bit] -> [[Bit]] chop8' = unfold null (take 8) (drop 8) map'' :: (a -> b) -> [a] -> [b] map'' f = unfold null (f . head) tail iterate' :: (a -> a) -> a -> [a] iterate' f = unfold (const False) id f encode :: String -> [Bit] encode = concat . map (make9 . int2bin . ord) decode :: [Bit] -> String decode = map (chr . bin2int) . chop8 transmit :: String -> String transmit = decode . id . encode
null
https://raw.githubusercontent.com/igstan/programming-in-haskell/6d5df9ff2e0ba7e1699fdc79caa943ef9937d2e5/higher-order-functions.hs
haskell
True False bin2int bits = sum [w * b | (w, b) <- zip weights bits] bin2int bits = sum $ zipWith (*) weights bits
import Data.Char (chr, ord) squaresOfEvens :: Integral a => [a] -> [a] squaresOfEvens = map (^ 2) . filter even all even [ 2,4,6,8 ] all' :: (a -> Bool) -> [a] -> Bool all' f = and . map f any odd [ 2,4,6,8 ] any' :: (a -> Bool) -> [a] -> Bool any' f = or . map f takeWhile' :: (a -> Bool) -> [a] -> [a] takeWhile' _ [] = [] takeWhile' f (x:xs) | f x = x : takeWhile' f xs | otherwise = [] dropWhile' :: (a -> Bool) -> [a] -> [a] dropWhile' _ [] = [] dropWhile' f (x:xs) | f x = dropWhile' f xs | otherwise = x:xs map' :: (a -> b) -> [a] -> [b] map' f = foldr (\ x ys -> f x : ys) [] filter' :: (a -> Bool) -> [a] -> [a] filter' f = foldr (\ x ys -> if f x then x:ys else ys) [] dec2int [ 2,3,4,5 ] 2345 2 * 1000 + 3 * 100 + 4 * 10 + 5 * 1 ( 2 * 100 + 3 * 10 + 4 * 1)*10 + 5 * 1 ( ( 2 * 10 + 3)*10 + 4)*10 + 5 dec2int :: [Int] -> Int dec2int = foldl (\a b -> a * 10 + b) 0 compose :: [(a -> a)] -> (a -> a) compose = foldr (.) id curry' :: ((a, b) -> c) -> (a -> b -> c) curry' f = \a b -> f (a, b) uncurry' :: (a -> b -> c) -> ((a, b) -> c) uncurry' f = \(a, b) -> f a b unfold :: (t -> Bool) -> (t -> a) -> (t -> t) -> t -> [a] unfold p h t x | p x = [] | otherwise = h x : unfold p h t (t x) int2bin :: Int -> [Int] int2bin = unfold (== 0) (`mod` 2) (`div` 2) type Bit = Int bin2int :: [Bit] -> Int where weights = iterate ( * 2 ) 1 where weights = iterate ( * 2 ) 1 bin2int = foldr (\x y -> x + 2 * y) 0 make8 :: [Bit] -> [Bit] make8 bits = take 8 (bits ++ repeat 0) make9 :: [Bit] -> [Bit] make9 bits = bits' ++ [parity] where bits' = make8 bits parity = if even (sum bits') then 0 else 1 chop8 :: [Bit] -> [[Bit]] chop8 [] = [] chop8 bits = first8 : chop8 (drop 9 bits) where parity = take 1 (drop 8 bits) first8 = if parity == [0] && even (sum (take 8 bits)) then take 8 bits else error "Parity check failed" chop8' :: [Bit] -> [[Bit]] chop8' = unfold null (take 8) (drop 8) map'' :: (a -> b) -> [a] -> [b] map'' f = unfold null (f . head) tail iterate' :: (a -> a) -> a -> [a] iterate' f = unfold (const False) id f encode :: String -> [Bit] encode = concat . map (make9 . int2bin . ord) decode :: [Bit] -> String decode = map (chr . bin2int) . chop8 transmit :: String -> String transmit = decode . id . encode
0f52c87a1a77874f92069ff9dbb95c2b51fb5ff3ccefe7ae467466302f2fe9ba
RunOrg/RunOrg
request.mli
(* © 2013 RunOrg *) open Std (** An error that can occur while parsing a request. *) type error = | HeaderTooLong | BodyTooLong | SyntaxError of string | NotImplemented of string | Timeout (** All the information required to send back a compatible CORS response: - the extracted 'Origin:' header - a map of all headers in the request *) type cors = string * (string, string) Map.t type t = < host : string ; client_ip : IpAddress.t ; path : string list ; verb : [ `GET | `PUT | `POST | `DELETE | `OPTIONS ] ; body : [ `JSON of Json.t | `Raw of string ] option ; headers : (string, string) Map.t ; params : (string, string) Map.t ; token : Token.I.t option ; at : Cqrs.Clock.t option ; as_ : PId.t option ; limit : int option ; offset : int option ; content_type : string option ; accept: [ `JSON | `MSGPACK ] ; origin : string option ; > ;; (** Parses a request. Attempts to determine the CORS context even if other aspects of parsing fail (so that a correct response may be sent to the right place). Does not raise exceptions. *) val parse : Common.config -> Ssl.socket -> ('any, cors option * (t,error) result) Run.t val to_string : t -> string
null
https://raw.githubusercontent.com/RunOrg/RunOrg/b53ee2357f4bcb919ac48577426d632dffc25062/server/httpdLib/request.mli
ocaml
© 2013 RunOrg * An error that can occur while parsing a request. * All the information required to send back a compatible CORS response: - the extracted 'Origin:' header - a map of all headers in the request * Parses a request. Attempts to determine the CORS context even if other aspects of parsing fail (so that a correct response may be sent to the right place). Does not raise exceptions.
open Std type error = | HeaderTooLong | BodyTooLong | SyntaxError of string | NotImplemented of string | Timeout type cors = string * (string, string) Map.t type t = < host : string ; client_ip : IpAddress.t ; path : string list ; verb : [ `GET | `PUT | `POST | `DELETE | `OPTIONS ] ; body : [ `JSON of Json.t | `Raw of string ] option ; headers : (string, string) Map.t ; params : (string, string) Map.t ; token : Token.I.t option ; at : Cqrs.Clock.t option ; as_ : PId.t option ; limit : int option ; offset : int option ; content_type : string option ; accept: [ `JSON | `MSGPACK ] ; origin : string option ; > ;; val parse : Common.config -> Ssl.socket -> ('any, cors option * (t,error) result) Run.t val to_string : t -> string
ab32b25cb3232413d083b54e622619891e63c083839748622981118c78e4c453
skrah/minicaml
Symtable.mli
* Copyright ( c ) 2015 . All rights reserved . * * This file is distributed under the terms of the Q Public License * version 1.0 . * Copyright (c) 2015 Stefan Krah. All rights reserved. * * This file is distributed under the terms of the Q Public License * version 1.0. *) module Make : functor (ModuleState : ModuleState.S) -> sig val trans_main : ParseTree.module_expr * ParseTree.module_expr -> Ast.module_expr * Ast.module_expr end
null
https://raw.githubusercontent.com/skrah/minicaml/e5f5cad7fdbcfc11561f717042fae73fa743823f/Symtable.mli
ocaml
* Copyright ( c ) 2015 . All rights reserved . * * This file is distributed under the terms of the Q Public License * version 1.0 . * Copyright (c) 2015 Stefan Krah. All rights reserved. * * This file is distributed under the terms of the Q Public License * version 1.0. *) module Make : functor (ModuleState : ModuleState.S) -> sig val trans_main : ParseTree.module_expr * ParseTree.module_expr -> Ast.module_expr * Ast.module_expr end
4cb7de8bd0a766a144da352b3adf501afc9ec9b2c650161bc9bacadff6950795
Millak/my-guix
minitube.scm
Copyright © 2022 Efraim < > ;;; ;;; This file is an addendum to GNU Guix. ;;; GNU is free software ; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation ; either version 3 of the License , or ( at ;;; your option) any later version. ;;; ;;; GNU Guix is distributed in the hope that it will be useful, but ;;; WITHOUT ANY WARRANTY; without even the implied warranty of ;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ;;; GNU General Public License for more details. ;;; You should have received a copy of the GNU General Public License along with GNU . If not , see < / > . (define-module (dfsg main minitube) #:use-module ((guix licenses) #:prefix license:) #:use-module (guix download) #:use-module (guix packages) #:use-module (guix utils) #:use-module (guix build-system qt) #:use-module (gnu packages qt) #:use-module (gnu packages video)) (define-public minitube (package (name "minitube") (version "3.9.3") (source (origin (method url-fetch) (uri (string-append "" "/releases/download/" version "/minitube-" version ".tar.bz2")) (sha256 (base32 "13349a8ap3cgj7f8a9088w559vsxqqfgnj2s2hzka6326vzp0bhf")))) (build-system qt-build-system) (arguments `(#:tests? #f ; No tests? #:phases (modify-phases %standard-phases (replace 'configure (lambda* (#:key outputs #:allow-other-keys) (invoke "qmake" (string-append "PREFIX=" (assoc-ref outputs "out")) "QMAKE_LRELEASE=lrelease" "QMAKE_LUPDATE=lupdate")))))) (native-inputs (list qttools-5)) (inputs (list mpv qtbase-5 qtdeclarative-5 qtx11extras)) (home-page "") (synopsis "Native YouTube client") (description "Minitube is a native YouTube client. With it you can watch YouTube videos in a new way: you type a keyword, Minitube gives you an endless video stream.") (license license:gpl3+)))
null
https://raw.githubusercontent.com/Millak/my-guix/508bdc01f794275a54130fb49765a1f779e02e76/dfsg/main/minitube.scm
scheme
This file is an addendum to GNU Guix. you can redistribute it and/or modify it either version 3 of the License , or ( at your option) any later version. GNU Guix is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. No tests?
Copyright © 2022 Efraim < > under the terms of the GNU General Public License as published by You should have received a copy of the GNU General Public License along with GNU . If not , see < / > . (define-module (dfsg main minitube) #:use-module ((guix licenses) #:prefix license:) #:use-module (guix download) #:use-module (guix packages) #:use-module (guix utils) #:use-module (guix build-system qt) #:use-module (gnu packages qt) #:use-module (gnu packages video)) (define-public minitube (package (name "minitube") (version "3.9.3") (source (origin (method url-fetch) (uri (string-append "" "/releases/download/" version "/minitube-" version ".tar.bz2")) (sha256 (base32 "13349a8ap3cgj7f8a9088w559vsxqqfgnj2s2hzka6326vzp0bhf")))) (build-system qt-build-system) (arguments #:phases (modify-phases %standard-phases (replace 'configure (lambda* (#:key outputs #:allow-other-keys) (invoke "qmake" (string-append "PREFIX=" (assoc-ref outputs "out")) "QMAKE_LRELEASE=lrelease" "QMAKE_LUPDATE=lupdate")))))) (native-inputs (list qttools-5)) (inputs (list mpv qtbase-5 qtdeclarative-5 qtx11extras)) (home-page "") (synopsis "Native YouTube client") (description "Minitube is a native YouTube client. With it you can watch YouTube videos in a new way: you type a keyword, Minitube gives you an endless video stream.") (license license:gpl3+)))
19db706365ec0511bc5f463ae4885aae758b7536c5efba0dac3c847df6037622
cbaggers/nineveh
docs.lisp
(in-package :nineveh.random) (docs:define-docs (defun rand " -- Arg -- seed :vec2 -- Purpose -- Returns a 'random' float. -- Explaination -- Based on the fact that sin(<huge multiplier>*x) modulates extremely quickly. So quickly that sampling the sin function at every fragment location effectively gives you “random” numbers -- Notes -- This could have issues on some ES/WebGL implementations. Some implementations might not be preconditioning sin to a reasonable 2PI range. This has been shown to cause issues before. At the time of writing Varjo does not support float precision declarations but when it does we can provide a safer implementation of this -- Credit -- Impementation - Unknown but see this possible answer: Notes - -to-the-canonical-one-liner-glsl-rand-for-opengl-es-2-0/"))
null
https://raw.githubusercontent.com/cbaggers/nineveh/0a10a84669cd9d1c584f54b9eab062986a5f1c47/random/docs.lisp
lisp
(in-package :nineveh.random) (docs:define-docs (defun rand " -- Arg -- seed :vec2 -- Purpose -- Returns a 'random' float. -- Explaination -- Based on the fact that sin(<huge multiplier>*x) modulates extremely quickly. So quickly that sampling the sin function at every fragment location effectively gives you “random” numbers -- Notes -- This could have issues on some ES/WebGL implementations. Some implementations might not be preconditioning sin to a reasonable 2PI range. This has been shown to cause issues before. At the time of writing Varjo does not support float precision declarations but when it does we can provide a safer implementation of this -- Credit -- Impementation - Unknown but see this possible answer: Notes - -to-the-canonical-one-liner-glsl-rand-for-opengl-es-2-0/"))
703c59e6c7bd8bf91df590d97083cd6207e906aed5bdc4816d4918a80485e575
Naupio/pitcp
piweb_app.erl
%%%------------------------------------------------------------------- %% @doc piweb public API %% @end %%%------------------------------------------------------------------- -module(piweb_app). -behaviour(application). %% Application callbacks -export([start/2, stop/1]). %%==================================================================== %% API %%==================================================================== start(_StartType, _StartArgs) -> piweb_sup:start_link(). %%-------------------------------------------------------------------- stop(_State) -> ok. %%==================================================================== Internal functions %%====================================================================
null
https://raw.githubusercontent.com/Naupio/pitcp/59a7da3a8980695f8efdb3697b026ab0f42754cb/apps/piweb/src/piweb_app.erl
erlang
------------------------------------------------------------------- @doc piweb public API @end ------------------------------------------------------------------- Application callbacks ==================================================================== API ==================================================================== -------------------------------------------------------------------- ==================================================================== ====================================================================
-module(piweb_app). -behaviour(application). -export([start/2, stop/1]). start(_StartType, _StartArgs) -> piweb_sup:start_link(). stop(_State) -> ok. Internal functions
38355337aeca0d84d57cdeadfd2277a9e5df749b4dfbcd0cfe7ab5067ba85719
brendanhay/gogol
Product.hs
# LANGUAGE DataKinds # # LANGUAGE DeriveGeneric # # LANGUAGE DerivingStrategies # # LANGUAGE DuplicateRecordFields # # LANGUAGE FlexibleInstances # # LANGUAGE GeneralizedNewtypeDeriving # # LANGUAGE LambdaCase # {-# LANGUAGE OverloadedStrings #-} # LANGUAGE PatternSynonyms # # LANGUAGE RecordWildCards # {-# LANGUAGE StrictData #-} # LANGUAGE TypeFamilies # # LANGUAGE TypeOperators # # LANGUAGE NoImplicitPrelude # # OPTIONS_GHC -fno - warn - duplicate - exports # # OPTIONS_GHC -fno - warn - name - shadowing # # OPTIONS_GHC -fno - warn - unused - binds # # OPTIONS_GHC -fno - warn - unused - imports # # OPTIONS_GHC -fno - warn - unused - matches # -- | Module : . . Internal . Product Copyright : ( c ) 2015 - 2022 License : Mozilla Public License , v. 2.0 . Maintainer : < brendan.g.hay+ > -- Stability : auto-generated Portability : non - portable ( GHC extensions ) module Gogol.DoubleClickSearch.Internal.Product ( -- * Availability Availability (..), newAvailability, -- * Conversion Conversion (..), newConversion, * ConversionList ConversionList (..), newConversionList, -- * CustomDimension CustomDimension (..), newCustomDimension, -- * CustomMetric CustomMetric (..), newCustomMetric, -- * Report Report (..), newReport, * Report_FilesItem (..), newReport_FilesItem, * ReportApiColumnSpec (..), newReportApiColumnSpec, -- * ReportRequest ReportRequest (..), newReportRequest, -- * ReportRequest_FiltersItem ReportRequest_FiltersItem (..), newReportRequest_FiltersItem, * ReportRequest_OrderByItem (..), newReportRequest_OrderByItem, * ReportRequest_ReportScope (..), newReportRequest_ReportScope, -- * ReportRequest_TimeRange ReportRequest_TimeRange (..), newReportRequest_TimeRange, * ReportRow (..), newReportRow, -- * SavedColumn SavedColumn (..), newSavedColumn, -- * SavedColumnList SavedColumnList (..), newSavedColumnList, -- * UpdateAvailabilityRequest UpdateAvailabilityRequest (..), newUpdateAvailabilityRequest, -- * UpdateAvailabilityResponse UpdateAvailabilityResponse (..), newUpdateAvailabilityResponse, ) where import Gogol.DoubleClickSearch.Internal.Sum import qualified Gogol.Prelude as Core | A message containing availability data relevant to DoubleClick Search . -- -- /See:/ 'newAvailability' smart constructor. data Availability = Availability { -- | DS advertiser ID. advertiserId :: (Core.Maybe Core.Int64), -- | DS agency ID. agencyId :: (Core.Maybe Core.Int64), -- | The time by which all conversions have been uploaded, in epoch millis UTC. availabilityTimestamp :: (Core.Maybe Core.Int64), | The numeric segmentation identifier ( for example , DoubleClick Search Floodlight activity ID ) . segmentationId :: (Core.Maybe Core.Int64), | The friendly segmentation identifier ( for example , DoubleClick Search Floodlight activity name ) . segmentationName :: (Core.Maybe Core.Text), | The segmentation type that this availability is for ( its default value is ) . segmentationType :: (Core.Maybe Core.Text) } deriving (Core.Eq, Core.Show, Core.Generic) -- | Creates a value of 'Availability' with the minimum fields required to make a request. newAvailability :: Availability newAvailability = Availability { advertiserId = Core.Nothing, agencyId = Core.Nothing, availabilityTimestamp = Core.Nothing, segmentationId = Core.Nothing, segmentationName = Core.Nothing, segmentationType = Core.Nothing } instance Core.FromJSON Availability where parseJSON = Core.withObject "Availability" ( \o -> Availability Core.<$> ( o Core..:? "advertiserId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> ( o Core..:? "agencyId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> ( o Core..:? "availabilityTimestamp" Core.<&> Core.fmap Core.fromAsText ) Core.<*> ( o Core..:? "segmentationId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> (o Core..:? "segmentationName") Core.<*> (o Core..:? "segmentationType") ) instance Core.ToJSON Availability where toJSON Availability {..} = Core.object ( Core.catMaybes [ ("advertiserId" Core..=) Core.. Core.AsText Core.<$> advertiserId, ("agencyId" Core..=) Core.. Core.AsText Core.<$> agencyId, ("availabilityTimestamp" Core..=) Core.. Core.AsText Core.<$> availabilityTimestamp, ("segmentationId" Core..=) Core.. Core.AsText Core.<$> segmentationId, ("segmentationName" Core..=) Core.<$> segmentationName, ("segmentationType" Core..=) Core.<$> segmentationType ] ) | A conversion containing data relevant to DoubleClick Search . -- -- /See:/ 'newConversion' smart constructor. data Conversion = Conversion { -- | DS ad group ID. adGroupId :: (Core.Maybe Core.Int64), -- | DS ad ID. adId :: (Core.Maybe Core.Int64), -- | DS advertiser ID. advertiserId :: (Core.Maybe Core.Int64), -- | DS agency ID. agencyId :: (Core.Maybe Core.Int64), | Available to advertisers only after contacting DoubleClick Search customer support . attributionModel :: (Core.Maybe Core.Text), -- | DS campaign ID. campaignId :: (Core.Maybe Core.Int64), -- | Sales channel for the product. Acceptable values are: - \"@local@\": a physical store - \"@online@\": an online store channel :: (Core.Maybe Core.Text), -- | DS click ID for the conversion. clickId :: (Core.Maybe Core.Text), | For offline conversions , advertisers provide this ID . Advertisers can specify any ID that is meaningful to them . Each conversion in a request must specify a unique ID , and the combination of ID and timestamp must be unique amongst all conversions within the advertiser . For online conversions , DS copies the @dsConversionId@ or @floodlightOrderId@ into this property depending on the advertiser\ 's Floodlight instructions . conversionId :: (Core.Maybe Core.Text), -- | The time at which the conversion was last modified, in epoch millis UTC. conversionModifiedTimestamp :: (Core.Maybe Core.Int64), -- | The time at which the conversion took place, in epoch millis UTC. conversionTimestamp :: (Core.Maybe Core.Text), | Available to advertisers only after contacting DoubleClick Search customer support . countMillis :: (Core.Maybe Core.Int64), -- | DS criterion (keyword) ID. criterionId :: (Core.Maybe Core.Int64), | The currency code for the conversion\ 's revenue . Should be in ISO 4217 alphabetic ( 3 - char ) format . currencyCode :: (Core.Maybe Core.Text), -- | Custom dimensions for the conversion, which can be used to filter data in a report. customDimension :: (Core.Maybe [CustomDimension]), -- | Custom metrics for the conversion. customMetric :: (Core.Maybe [CustomMetric]), -- | The type of device on which the conversion occurred. deviceType :: (Core.Maybe Core.Text), | ID that DoubleClick Search generates for each conversion . dsConversionId :: (Core.Maybe Core.Int64), -- | DS engine account ID. engineAccountId :: (Core.Maybe Core.Int64), -- | The Floodlight order ID provided by the advertiser for the conversion. floodlightOrderId :: (Core.Maybe Core.Text), -- | ID that DS generates and uses to uniquely identify the inventory account that contains the product. inventoryAccountId :: (Core.Maybe Core.Int64), | The country registered for the Merchant Center feed that contains the product . Use an ISO 3166 code to specify a country . productCountry :: (Core.Maybe Core.Text), -- | DS product group ID. productGroupId :: (Core.Maybe Core.Int64), -- | The product ID (SKU). productId :: (Core.Maybe Core.Text), | The language registered for the Merchant Center feed that contains the product . Use an ISO 639 code to specify a language . productLanguage :: (Core.Maybe Core.Text), | The quantity of this conversion , in millis . quantityMillis :: (Core.Maybe Core.Int64), | The revenue amount of this @TRANSACTION@ conversion , in micros ( value multiplied by 1000000 , no decimal ) . For example , to specify a revenue value of \"10\ " enter \"10000000\ " ( 10 million ) in your request . revenueMicros :: (Core.Maybe Core.Text), | The numeric segmentation identifier ( for example , DoubleClick Search Floodlight activity ID ) . segmentationId :: (Core.Maybe Core.Int64), | The friendly segmentation identifier ( for example , DoubleClick Search Floodlight activity name ) . segmentationName :: (Core.Maybe Core.Text), | The segmentation type of this conversion ( for example , ) . segmentationType :: (Core.Maybe Core.Text), -- | The state of the conversion, that is, either @ACTIVE@ or @REMOVED@. Note: state DELETED is deprecated. state :: (Core.Maybe Core.Text), -- | The ID of the local store for which the product was advertised. Applicable only when the channel is \"@local@\". storeId :: (Core.Maybe Core.Text), | The type of the conversion , that is , either @ACTION@ or @TRANSACTION@. An @ACTION@ conversion is an action by the user that has no monetarily quantifiable value , while a @TRANSACTION@ conversion is an action that does have a monetarily quantifiable value . Examples are email list signups ( ) versus ecommerce purchases ( @TRANSACTION@ ) . type' :: (Core.Maybe Core.Text) } deriving (Core.Eq, Core.Show, Core.Generic) -- | Creates a value of 'Conversion' with the minimum fields required to make a request. newConversion :: Conversion newConversion = Conversion { adGroupId = Core.Nothing, adId = Core.Nothing, advertiserId = Core.Nothing, agencyId = Core.Nothing, attributionModel = Core.Nothing, campaignId = Core.Nothing, channel = Core.Nothing, clickId = Core.Nothing, conversionId = Core.Nothing, conversionModifiedTimestamp = Core.Nothing, conversionTimestamp = Core.Nothing, countMillis = Core.Nothing, criterionId = Core.Nothing, currencyCode = Core.Nothing, customDimension = Core.Nothing, customMetric = Core.Nothing, deviceType = Core.Nothing, dsConversionId = Core.Nothing, engineAccountId = Core.Nothing, floodlightOrderId = Core.Nothing, inventoryAccountId = Core.Nothing, productCountry = Core.Nothing, productGroupId = Core.Nothing, productId = Core.Nothing, productLanguage = Core.Nothing, quantityMillis = Core.Nothing, revenueMicros = Core.Nothing, segmentationId = Core.Nothing, segmentationName = Core.Nothing, segmentationType = Core.Nothing, state = Core.Nothing, storeId = Core.Nothing, type' = Core.Nothing } instance Core.FromJSON Conversion where parseJSON = Core.withObject "Conversion" ( \o -> Conversion Core.<$> ( o Core..:? "adGroupId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> ( o Core..:? "adId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> ( o Core..:? "advertiserId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> ( o Core..:? "agencyId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> (o Core..:? "attributionModel") Core.<*> ( o Core..:? "campaignId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> (o Core..:? "channel") Core.<*> (o Core..:? "clickId") Core.<*> (o Core..:? "conversionId") Core.<*> ( o Core..:? "conversionModifiedTimestamp" Core.<&> Core.fmap Core.fromAsText ) Core.<*> (o Core..:? "conversionTimestamp") Core.<*> ( o Core..:? "countMillis" Core.<&> Core.fmap Core.fromAsText ) Core.<*> ( o Core..:? "criterionId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> (o Core..:? "currencyCode") Core.<*> (o Core..:? "customDimension") Core.<*> (o Core..:? "customMetric") Core.<*> (o Core..:? "deviceType") Core.<*> ( o Core..:? "dsConversionId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> ( o Core..:? "engineAccountId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> (o Core..:? "floodlightOrderId") Core.<*> ( o Core..:? "inventoryAccountId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> (o Core..:? "productCountry") Core.<*> ( o Core..:? "productGroupId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> (o Core..:? "productId") Core.<*> (o Core..:? "productLanguage") Core.<*> ( o Core..:? "quantityMillis" Core.<&> Core.fmap Core.fromAsText ) Core.<*> (o Core..:? "revenueMicros") Core.<*> ( o Core..:? "segmentationId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> (o Core..:? "segmentationName") Core.<*> (o Core..:? "segmentationType") Core.<*> (o Core..:? "state") Core.<*> (o Core..:? "storeId") Core.<*> (o Core..:? "type") ) instance Core.ToJSON Conversion where toJSON Conversion {..} = Core.object ( Core.catMaybes [ ("adGroupId" Core..=) Core.. Core.AsText Core.<$> adGroupId, ("adId" Core..=) Core.. Core.AsText Core.<$> adId, ("advertiserId" Core..=) Core.. Core.AsText Core.<$> advertiserId, ("agencyId" Core..=) Core.. Core.AsText Core.<$> agencyId, ("attributionModel" Core..=) Core.<$> attributionModel, ("campaignId" Core..=) Core.. Core.AsText Core.<$> campaignId, ("channel" Core..=) Core.<$> channel, ("clickId" Core..=) Core.<$> clickId, ("conversionId" Core..=) Core.<$> conversionId, ("conversionModifiedTimestamp" Core..=) Core.. Core.AsText Core.<$> conversionModifiedTimestamp, ("conversionTimestamp" Core..=) Core.<$> conversionTimestamp, ("countMillis" Core..=) Core.. Core.AsText Core.<$> countMillis, ("criterionId" Core..=) Core.. Core.AsText Core.<$> criterionId, ("currencyCode" Core..=) Core.<$> currencyCode, ("customDimension" Core..=) Core.<$> customDimension, ("customMetric" Core..=) Core.<$> customMetric, ("deviceType" Core..=) Core.<$> deviceType, ("dsConversionId" Core..=) Core.. Core.AsText Core.<$> dsConversionId, ("engineAccountId" Core..=) Core.. Core.AsText Core.<$> engineAccountId, ("floodlightOrderId" Core..=) Core.<$> floodlightOrderId, ("inventoryAccountId" Core..=) Core.. Core.AsText Core.<$> inventoryAccountId, ("productCountry" Core..=) Core.<$> productCountry, ("productGroupId" Core..=) Core.. Core.AsText Core.<$> productGroupId, ("productId" Core..=) Core.<$> productId, ("productLanguage" Core..=) Core.<$> productLanguage, ("quantityMillis" Core..=) Core.. Core.AsText Core.<$> quantityMillis, ("revenueMicros" Core..=) Core.<$> revenueMicros, ("segmentationId" Core..=) Core.. Core.AsText Core.<$> segmentationId, ("segmentationName" Core..=) Core.<$> segmentationName, ("segmentationType" Core..=) Core.<$> segmentationType, ("state" Core..=) Core.<$> state, ("storeId" Core..=) Core.<$> storeId, ("type" Core..=) Core.<$> type' ] ) -- | A list of conversions. -- -- /See:/ 'newConversionList' smart constructor. data ConversionList = ConversionList { -- | The conversions being requested. conversion :: (Core.Maybe [Conversion]), | Identifies this as a ConversionList resource . Value : the fixed string doubleclicksearch#conversionList . kind :: (Core.Maybe Core.Text) } deriving (Core.Eq, Core.Show, Core.Generic) | Creates a value of ' ConversionList ' with the minimum fields required to make a request . newConversionList :: ConversionList newConversionList = ConversionList {conversion = Core.Nothing, kind = Core.Nothing} instance Core.FromJSON ConversionList where parseJSON = Core.withObject "ConversionList" ( \o -> ConversionList Core.<$> (o Core..:? "conversion") Core.<*> (o Core..:? "kind") ) instance Core.ToJSON ConversionList where toJSON ConversionList {..} = Core.object ( Core.catMaybes [ ("conversion" Core..=) Core.<$> conversion, ("kind" Core..=) Core.<$> kind ] ) -- | A message containing the custom dimension. -- -- /See:/ 'newCustomDimension' smart constructor. data CustomDimension = CustomDimension { -- | Custom dimension name. name :: (Core.Maybe Core.Text), -- | Custom dimension value. value :: (Core.Maybe Core.Text) } deriving (Core.Eq, Core.Show, Core.Generic) -- | Creates a value of 'CustomDimension' with the minimum fields required to make a request. newCustomDimension :: CustomDimension newCustomDimension = CustomDimension {name = Core.Nothing, value = Core.Nothing} instance Core.FromJSON CustomDimension where parseJSON = Core.withObject "CustomDimension" ( \o -> CustomDimension Core.<$> (o Core..:? "name") Core.<*> (o Core..:? "value") ) instance Core.ToJSON CustomDimension where toJSON CustomDimension {..} = Core.object ( Core.catMaybes [ ("name" Core..=) Core.<$> name, ("value" Core..=) Core.<$> value ] ) -- | A message containing the custom metric. -- -- /See:/ 'newCustomMetric' smart constructor. data CustomMetric = CustomMetric { -- | Custom metric name. name :: (Core.Maybe Core.Text), -- | Custom metric numeric value. value :: (Core.Maybe Core.Double) } deriving (Core.Eq, Core.Show, Core.Generic) -- | Creates a value of 'CustomMetric' with the minimum fields required to make a request. newCustomMetric :: CustomMetric newCustomMetric = CustomMetric {name = Core.Nothing, value = Core.Nothing} instance Core.FromJSON CustomMetric where parseJSON = Core.withObject "CustomMetric" ( \o -> CustomMetric Core.<$> (o Core..:? "name") Core.<*> (o Core..:? "value") ) instance Core.ToJSON CustomMetric where toJSON CustomMetric {..} = Core.object ( Core.catMaybes [ ("name" Core..=) Core.<$> name, ("value" Core..=) Core.<$> value ] ) | A DoubleClick Search report . This object contains the report request , some report metadata such as currency code , and the generated report rows or report files . -- /See:/ ' newReport ' smart constructor . data Report = Report { -- | Asynchronous report only. Contains a list of generated report files once the report has successfully completed. files :: (Core.Maybe [Report_FilesItem]), -- | Asynchronous report only. Id of the report. id :: (Core.Maybe Core.Text), -- | Asynchronous report only. True if and only if the report has completed successfully and the report files are ready to be downloaded. isReportReady :: (Core.Maybe Core.Bool), | Identifies this as a Report resource . Value : the fixed string @doubleclicksearch#report@. kind :: (Core.Maybe Core.Text), -- | The request that created the report. Optional fields not specified in the original request are filled with default values. request' :: (Core.Maybe ReportRequest), -- | The number of report rows generated by the report, not including headers. rowCount :: (Core.Maybe Core.Int32), -- | Synchronous report only. Generated report rows. rows :: (Core.Maybe [ReportRow]), -- | The currency code of all monetary values produced in the report, including values that are set by users (e.g., keyword bid settings) and metrics (e.g., cost and revenue). The currency code of a report is determined by the @statisticsCurrency@ field of the report request. statisticsCurrencyCode :: (Core.Maybe Core.Text), -- | If all statistics of the report are sourced from the same time zone, this would be it. Otherwise the field is unset. statisticsTimeZone :: (Core.Maybe Core.Text) } deriving (Core.Eq, Core.Show, Core.Generic) -- | Creates a value of 'Report' with the minimum fields required to make a request. newReport :: Report newReport = Report { files = Core.Nothing, id = Core.Nothing, isReportReady = Core.Nothing, kind = Core.Nothing, request' = Core.Nothing, rowCount = Core.Nothing, rows = Core.Nothing, statisticsCurrencyCode = Core.Nothing, statisticsTimeZone = Core.Nothing } instance Core.FromJSON Report where parseJSON = Core.withObject "Report" ( \o -> Report Core.<$> (o Core..:? "files") Core.<*> (o Core..:? "id") Core.<*> (o Core..:? "isReportReady") Core.<*> (o Core..:? "kind") Core.<*> (o Core..:? "request") Core.<*> (o Core..:? "rowCount") Core.<*> (o Core..:? "rows") Core.<*> (o Core..:? "statisticsCurrencyCode") Core.<*> (o Core..:? "statisticsTimeZone") ) instance Core.ToJSON Report where toJSON Report {..} = Core.object ( Core.catMaybes [ ("files" Core..=) Core.<$> files, ("id" Core..=) Core.<$> id, ("isReportReady" Core..=) Core.<$> isReportReady, ("kind" Core..=) Core.<$> kind, ("request" Core..=) Core.<$> request', ("rowCount" Core..=) Core.<$> rowCount, ("rows" Core..=) Core.<$> rows, ("statisticsCurrencyCode" Core..=) Core.<$> statisticsCurrencyCode, ("statisticsTimeZone" Core..=) Core.<$> statisticsTimeZone ] ) -- -- /See:/ 'newReport_FilesItem' smart constructor. data Report_FilesItem = Report_FilesItem { -- | The size of this report file in bytes. byteCount :: (Core.Maybe Core.Int64), -- | Use this url to download the report file. url :: (Core.Maybe Core.Text) } deriving (Core.Eq, Core.Show, Core.Generic) | Creates a value of ' ' with the minimum fields required to make a request . newReport_FilesItem :: Report_FilesItem newReport_FilesItem = Report_FilesItem {byteCount = Core.Nothing, url = Core.Nothing} instance Core.FromJSON Report_FilesItem where parseJSON = Core.withObject "Report_FilesItem" ( \o -> Report_FilesItem Core.<$> ( o Core..:? "byteCount" Core.<&> Core.fmap Core.fromAsText ) Core.<*> (o Core..:? "url") ) instance Core.ToJSON Report_FilesItem where toJSON Report_FilesItem {..} = Core.object ( Core.catMaybes [ ("byteCount" Core..=) Core.. Core.AsText Core.<$> byteCount, ("url" Core..=) Core.<$> url ] ) | A request object used to create a DoubleClick Search report . -- -- /See:/ 'newReportApiColumnSpec' smart constructor. data ReportApiColumnSpec = ReportApiColumnSpec | Name of a DoubleClick Search column to include in the report . columnName :: (Core.Maybe Core.Text), | Segments a report by a custom dimension . The report must be scoped to an advertiser or lower , and the custom dimension must already be set up in DoubleClick Search . The custom dimension name , which appears in DoubleClick Search , is case sensitive.\\ If used in a conversion report , returns the value of the specified custom dimension for the given conversion , if set . This column does not segment the conversion report . customDimensionName :: (Core.Maybe Core.Text), | Name of a custom metric to include in the report . The report must be scoped to an advertiser or lower , and the custom metric must already be set up in DoubleClick Search . The custom metric name , which appears in DoubleClick Search , is case sensitive . customMetricName :: (Core.Maybe Core.Text), -- | Inclusive day in YYYY-MM-DD format. When provided, this overrides the overall time range of the report for this column only. Must be provided together with @startDate@. endDate :: (Core.Maybe Core.Text), -- | Synchronous report only. Set to @true@ to group by this column. Defaults to @false@. groupByColumn :: (Core.Maybe Core.Bool), | Text used to identify this column in the report output ; defaults to @columnName@ or @savedColumnName@ when not specified . This can be used to prevent collisions between DoubleClick Search columns and saved columns with the same name . headerText :: (Core.Maybe Core.Text), -- | The platform that is used to provide data for the custom dimension. Acceptable values are \"floodlight\". platformSource :: (Core.Maybe Core.Text), -- | Returns metrics only for a specific type of product activity. Accepted values are: - \"@sold@\": returns metrics only for products that were sold - \"@advertised@\": returns metrics only for products that were advertised in a Shopping campaign, and that might or might not have been sold productReportPerspective :: (Core.Maybe Core.Text), | Name of a saved column to include in the report . The report must be scoped at advertiser or lower , and this saved column must already be created in the DoubleClick Search UI . savedColumnName :: (Core.Maybe Core.Text), -- | Inclusive date in YYYY-MM-DD format. When provided, this overrides the overall time range of the report for this column only. Must be provided together with @endDate@. startDate :: (Core.Maybe Core.Text) } deriving (Core.Eq, Core.Show, Core.Generic) -- | Creates a value of 'ReportApiColumnSpec' with the minimum fields required to make a request. newReportApiColumnSpec :: ReportApiColumnSpec newReportApiColumnSpec = ReportApiColumnSpec { columnName = Core.Nothing, customDimensionName = Core.Nothing, customMetricName = Core.Nothing, endDate = Core.Nothing, groupByColumn = Core.Nothing, headerText = Core.Nothing, platformSource = Core.Nothing, productReportPerspective = Core.Nothing, savedColumnName = Core.Nothing, startDate = Core.Nothing } instance Core.FromJSON ReportApiColumnSpec where parseJSON = Core.withObject "ReportApiColumnSpec" ( \o -> ReportApiColumnSpec Core.<$> (o Core..:? "columnName") Core.<*> (o Core..:? "customDimensionName") Core.<*> (o Core..:? "customMetricName") Core.<*> (o Core..:? "endDate") Core.<*> (o Core..:? "groupByColumn") Core.<*> (o Core..:? "headerText") Core.<*> (o Core..:? "platformSource") Core.<*> (o Core..:? "productReportPerspective") Core.<*> (o Core..:? "savedColumnName") Core.<*> (o Core..:? "startDate") ) instance Core.ToJSON ReportApiColumnSpec where toJSON ReportApiColumnSpec {..} = Core.object ( Core.catMaybes [ ("columnName" Core..=) Core.<$> columnName, ("customDimensionName" Core..=) Core.<$> customDimensionName, ("customMetricName" Core..=) Core.<$> customMetricName, ("endDate" Core..=) Core.<$> endDate, ("groupByColumn" Core..=) Core.<$> groupByColumn, ("headerText" Core..=) Core.<$> headerText, ("platformSource" Core..=) Core.<$> platformSource, ("productReportPerspective" Core..=) Core.<$> productReportPerspective, ("savedColumnName" Core..=) Core.<$> savedColumnName, ("startDate" Core..=) Core.<$> startDate ] ) | A request object used to create a DoubleClick Search report . -- -- /See:/ 'newReportRequest' smart constructor. data ReportRequest = ReportRequest | The columns to include in the report . This includes both DoubleClick Search columns and saved columns . For DoubleClick Search columns , only the @columnName@ parameter is required . For saved columns only the @savedColumnName@ parameter is required . Both @columnName@ and @savedColumnName@ can not be set in the same stanza.\\ The maximum number of columns per request is 300 . columns :: (Core.Maybe [ReportApiColumnSpec]), | Format that the report should be returned in . Currently @csv@ or @tsv@ is supported . downloadFormat :: (Core.Maybe Core.Text), | A list of filters to be applied to the report.\\ The maximum number of filters per request is 300 . filters :: (Core.Maybe [ReportRequest_FiltersItem]), | Determines if removed entities should be included in the report . Defaults to @false@. Deprecated , please use @includeRemovedEntities@ instead . includeDeletedEntities :: (Core.Maybe Core.Bool), -- | Determines if removed entities should be included in the report. Defaults to @false@. includeRemovedEntities :: (Core.Maybe Core.Bool), -- | Asynchronous report only. The maximum number of rows per report file. A large report is split into many files based on this field. Acceptable values are @1000000@ to @100000000@, inclusive. maxRowsPerFile :: (Core.Maybe Core.Int32), | Synchronous report only . A list of columns and directions defining sorting to be performed on the report rows.\\ The maximum number of orderings per request is 300 . orderBy :: (Core.Maybe [ReportRequest_OrderByItem]), -- | The reportScope is a set of IDs that are used to determine which subset of entities will be returned in the report. The full lineage of IDs from the lowest scoped level desired up through agency is required. reportScope :: (Core.Maybe ReportRequest_ReportScope), -- | Determines the type of rows that are returned in the report. For example, if you specify @reportType: keyword@, each row in the report will contain data about a keyword. See the </search-ads/v2/report-types/ Types of Reports> reference for the columns that are available for each type. reportType :: (Core.Maybe Core.Text), | Synchronous report only . The maximum number of rows to return ; additional rows are dropped . Acceptable values are @0@ to @10000@ , inclusive . Defaults to @10000@. rowCount :: (Core.Maybe Core.Int32), | Synchronous report only . Zero - based index of the first row to return . Acceptable values are @0@ to @50000@ , inclusive . Defaults to @0@. startRow :: (Core.Maybe Core.Int32), | Specifies the currency in which monetary will be returned . Possible values are : @usd@ , @agency@ ( valid if the report is scoped to agency or lower ) , @advertiser@ ( valid if the report is scoped to * advertiser or lower ) , or @account@ ( valid if the report is scoped to engine account or lower ) . statisticsCurrency :: (Core.Maybe Core.Text), -- | If metrics are requested in a report, this argument will be used to restrict the metrics to a specific time range. timeRange :: (Core.Maybe ReportRequest_TimeRange), | If @true@ , the report would only be created if all the requested stat data are sourced from a single timezone . Defaults to @false@. verifySingleTimeZone :: (Core.Maybe Core.Bool) } deriving (Core.Eq, Core.Show, Core.Generic) -- | Creates a value of 'ReportRequest' with the minimum fields required to make a request. newReportRequest :: ReportRequest newReportRequest = ReportRequest { columns = Core.Nothing, downloadFormat = Core.Nothing, filters = Core.Nothing, includeDeletedEntities = Core.Nothing, includeRemovedEntities = Core.Nothing, maxRowsPerFile = Core.Nothing, orderBy = Core.Nothing, reportScope = Core.Nothing, reportType = Core.Nothing, rowCount = Core.Nothing, startRow = Core.Nothing, statisticsCurrency = Core.Nothing, timeRange = Core.Nothing, verifySingleTimeZone = Core.Nothing } instance Core.FromJSON ReportRequest where parseJSON = Core.withObject "ReportRequest" ( \o -> ReportRequest Core.<$> (o Core..:? "columns") Core.<*> (o Core..:? "downloadFormat") Core.<*> (o Core..:? "filters") Core.<*> (o Core..:? "includeDeletedEntities") Core.<*> (o Core..:? "includeRemovedEntities") Core.<*> (o Core..:? "maxRowsPerFile") Core.<*> (o Core..:? "orderBy") Core.<*> (o Core..:? "reportScope") Core.<*> (o Core..:? "reportType") Core.<*> (o Core..:? "rowCount") Core.<*> (o Core..:? "startRow") Core.<*> (o Core..:? "statisticsCurrency") Core.<*> (o Core..:? "timeRange") Core.<*> (o Core..:? "verifySingleTimeZone") ) instance Core.ToJSON ReportRequest where toJSON ReportRequest {..} = Core.object ( Core.catMaybes [ ("columns" Core..=) Core.<$> columns, ("downloadFormat" Core..=) Core.<$> downloadFormat, ("filters" Core..=) Core.<$> filters, ("includeDeletedEntities" Core..=) Core.<$> includeDeletedEntities, ("includeRemovedEntities" Core..=) Core.<$> includeRemovedEntities, ("maxRowsPerFile" Core..=) Core.<$> maxRowsPerFile, ("orderBy" Core..=) Core.<$> orderBy, ("reportScope" Core..=) Core.<$> reportScope, ("reportType" Core..=) Core.<$> reportType, ("rowCount" Core..=) Core.<$> rowCount, ("startRow" Core..=) Core.<$> startRow, ("statisticsCurrency" Core..=) Core.<$> statisticsCurrency, ("timeRange" Core..=) Core.<$> timeRange, ("verifySingleTimeZone" Core..=) Core.<$> verifySingleTimeZone ] ) -- -- /See:/ 'newReportRequest_FiltersItem' smart constructor. data ReportRequest_FiltersItem = ReportRequest_FiltersItem | Column to perform the filter on . This can be a DoubleClick Search column or a saved column . column :: (Core.Maybe ReportApiColumnSpec), -- | Operator to use in the filter. See the filter reference for a list of available operators. operator :: (Core.Maybe Core.Text), | A list of values to filter the column value against.\\ The maximum number of filter values per request is 300 . values :: (Core.Maybe [Core.Value]) } deriving (Core.Eq, Core.Show, Core.Generic) -- | Creates a value of 'ReportRequest_FiltersItem' with the minimum fields required to make a request. newReportRequest_FiltersItem :: ReportRequest_FiltersItem newReportRequest_FiltersItem = ReportRequest_FiltersItem { column = Core.Nothing, operator = Core.Nothing, values = Core.Nothing } instance Core.FromJSON ReportRequest_FiltersItem where parseJSON = Core.withObject "ReportRequest_FiltersItem" ( \o -> ReportRequest_FiltersItem Core.<$> (o Core..:? "column") Core.<*> (o Core..:? "operator") Core.<*> (o Core..:? "values") ) instance Core.ToJSON ReportRequest_FiltersItem where toJSON ReportRequest_FiltersItem {..} = Core.object ( Core.catMaybes [ ("column" Core..=) Core.<$> column, ("operator" Core..=) Core.<$> operator, ("values" Core..=) Core.<$> values ] ) -- -- /See:/ 'newReportRequest_OrderByItem' smart constructor. data ReportRequest_OrderByItem = ReportRequest_OrderByItem | Column to perform the sort on . This can be a DoubleClick Search - defined column or a saved column . column :: (Core.Maybe ReportApiColumnSpec), | The sort direction , which is either @ascending@ or sortOrder :: (Core.Maybe Core.Text) } deriving (Core.Eq, Core.Show, Core.Generic) -- | Creates a value of 'ReportRequest_OrderByItem' with the minimum fields required to make a request. newReportRequest_OrderByItem :: ReportRequest_OrderByItem newReportRequest_OrderByItem = ReportRequest_OrderByItem {column = Core.Nothing, sortOrder = Core.Nothing} instance Core.FromJSON ReportRequest_OrderByItem where parseJSON = Core.withObject "ReportRequest_OrderByItem" ( \o -> ReportRequest_OrderByItem Core.<$> (o Core..:? "column") Core.<*> (o Core..:? "sortOrder") ) instance Core.ToJSON ReportRequest_OrderByItem where toJSON ReportRequest_OrderByItem {..} = Core.object ( Core.catMaybes [ ("column" Core..=) Core.<$> column, ("sortOrder" Core..=) Core.<$> sortOrder ] ) -- | The reportScope is a set of IDs that are used to determine which subset of entities will be returned in the report. The full lineage of IDs from the lowest scoped level desired up through agency is required. -- -- /See:/ 'newReportRequest_ReportScope' smart constructor. data ReportRequest_ReportScope = ReportRequest_ReportScope { -- | DS ad group ID. adGroupId :: (Core.Maybe Core.Int64), -- | DS ad ID. adId :: (Core.Maybe Core.Int64), -- | DS advertiser ID. advertiserId :: (Core.Maybe Core.Int64), -- | DS agency ID. agencyId :: (Core.Maybe Core.Int64), -- | DS campaign ID. campaignId :: (Core.Maybe Core.Int64), -- | DS engine account ID. engineAccountId :: (Core.Maybe Core.Int64), -- | DS keyword ID. keywordId :: (Core.Maybe Core.Int64) } deriving (Core.Eq, Core.Show, Core.Generic) | Creates a value of ' ReportRequest_ReportScope ' with the minimum fields required to make a request . newReportRequest_ReportScope :: ReportRequest_ReportScope newReportRequest_ReportScope = ReportRequest_ReportScope { adGroupId = Core.Nothing, adId = Core.Nothing, advertiserId = Core.Nothing, agencyId = Core.Nothing, campaignId = Core.Nothing, engineAccountId = Core.Nothing, keywordId = Core.Nothing } instance Core.FromJSON ReportRequest_ReportScope where parseJSON = Core.withObject "ReportRequest_ReportScope" ( \o -> ReportRequest_ReportScope Core.<$> ( o Core..:? "adGroupId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> ( o Core..:? "adId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> ( o Core..:? "advertiserId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> ( o Core..:? "agencyId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> ( o Core..:? "campaignId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> ( o Core..:? "engineAccountId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> ( o Core..:? "keywordId" Core.<&> Core.fmap Core.fromAsText ) ) instance Core.ToJSON ReportRequest_ReportScope where toJSON ReportRequest_ReportScope {..} = Core.object ( Core.catMaybes [ ("adGroupId" Core..=) Core.. Core.AsText Core.<$> adGroupId, ("adId" Core..=) Core.. Core.AsText Core.<$> adId, ("advertiserId" Core..=) Core.. Core.AsText Core.<$> advertiserId, ("agencyId" Core..=) Core.. Core.AsText Core.<$> agencyId, ("campaignId" Core..=) Core.. Core.AsText Core.<$> campaignId, ("engineAccountId" Core..=) Core.. Core.AsText Core.<$> engineAccountId, ("keywordId" Core..=) Core.. Core.AsText Core.<$> keywordId ] ) -- | If metrics are requested in a report, this argument will be used to restrict the metrics to a specific time range. -- -- /See:/ 'newReportRequest_TimeRange' smart constructor. data ReportRequest_TimeRange = ReportRequest_TimeRange | Inclusive UTC timestamp in RFC format , e.g. , @2013 - 07 - 16T10:16:23.555Z@. See additional references on how changed attribute reports work . changedAttributesSinceTimestamp :: (Core.Maybe Core.Text), | Inclusive UTC timestamp in RFC format , e.g. , @2013 - 07 - 16T10:16:23.555Z@. See additional references on how changed metrics reports work . changedMetricsSinceTimestamp :: (Core.Maybe Core.Text), -- | Inclusive date in YYYY-MM-DD format. endDate :: (Core.Maybe Core.Text), -- | Inclusive date in YYYY-MM-DD format. startDate :: (Core.Maybe Core.Text) } deriving (Core.Eq, Core.Show, Core.Generic) -- | Creates a value of 'ReportRequest_TimeRange' with the minimum fields required to make a request. newReportRequest_TimeRange :: ReportRequest_TimeRange newReportRequest_TimeRange = ReportRequest_TimeRange { changedAttributesSinceTimestamp = Core.Nothing, changedMetricsSinceTimestamp = Core.Nothing, endDate = Core.Nothing, startDate = Core.Nothing } instance Core.FromJSON ReportRequest_TimeRange where parseJSON = Core.withObject "ReportRequest_TimeRange" ( \o -> ReportRequest_TimeRange Core.<$> (o Core..:? "changedAttributesSinceTimestamp") Core.<*> (o Core..:? "changedMetricsSinceTimestamp") Core.<*> (o Core..:? "endDate") Core.<*> (o Core..:? "startDate") ) instance Core.ToJSON ReportRequest_TimeRange where toJSON ReportRequest_TimeRange {..} = Core.object ( Core.catMaybes [ ("changedAttributesSinceTimestamp" Core..=) Core.<$> changedAttributesSinceTimestamp, ("changedMetricsSinceTimestamp" Core..=) Core.<$> changedMetricsSinceTimestamp, ("endDate" Core..=) Core.<$> endDate, ("startDate" Core..=) Core.<$> startDate ] ) | A row in a DoubleClick Search report . -- -- /See:/ 'newReportRow' smart constructor. newtype ReportRow = ReportRow | Indicates the columns that are represented in this row . That is , each key corresponds to a column with a non - empty cell in this row . additional :: (Core.HashMap Core.Text Core.Value) } deriving (Core.Eq, Core.Show, Core.Generic) | Creates a value of ' ' with the minimum fields required to make a request . newReportRow :: | Indicates the columns that are represented in this row . That is , each key corresponds to a column with a non - empty cell in this row . See ' additional ' . Core.HashMap Core.Text Core.Value -> ReportRow newReportRow additional = ReportRow {additional = additional} instance Core.FromJSON ReportRow where parseJSON = Core.withObject "ReportRow" (\o -> ReportRow Core.<$> (Core.parseJSONObject o)) instance Core.ToJSON ReportRow where toJSON ReportRow {..} = Core.toJSON additional -- | A saved column -- -- /See:/ 'newSavedColumn' smart constructor. data SavedColumn = SavedColumn | Identifies this as a SavedColumn resource . Value : the fixed string doubleclicksearch#savedColumn . kind :: (Core.Maybe Core.Text), -- | The name of the saved column. savedColumnName :: (Core.Maybe Core.Text), -- | The type of data this saved column will produce. type' :: (Core.Maybe Core.Text) } deriving (Core.Eq, Core.Show, Core.Generic) | Creates a value of ' SavedColumn ' with the minimum fields required to make a request . newSavedColumn :: SavedColumn newSavedColumn = SavedColumn { kind = Core.Nothing, savedColumnName = Core.Nothing, type' = Core.Nothing } instance Core.FromJSON SavedColumn where parseJSON = Core.withObject "SavedColumn" ( \o -> SavedColumn Core.<$> (o Core..:? "kind") Core.<*> (o Core..:? "savedColumnName") Core.<*> (o Core..:? "type") ) instance Core.ToJSON SavedColumn where toJSON SavedColumn {..} = Core.object ( Core.catMaybes [ ("kind" Core..=) Core.<$> kind, ("savedColumnName" Core..=) Core.<$> savedColumnName, ("type" Core..=) Core.<$> type' ] ) | A list of saved columns . Advertisers create saved columns to report on Floodlight activities , Google Analytics goals , or custom KPIs . To request reports with saved columns , you\'ll need the saved column names that are available from this list . -- -- /See:/ 'newSavedColumnList' smart constructor. data SavedColumnList = SavedColumnList { -- | The saved columns being requested. items :: (Core.Maybe [SavedColumn]), | Identifies this as a SavedColumnList resource . Value : the fixed string doubleclicksearch#savedColumnList . kind :: (Core.Maybe Core.Text) } deriving (Core.Eq, Core.Show, Core.Generic) | Creates a value of ' SavedColumnList ' with the minimum fields required to make a request . newSavedColumnList :: SavedColumnList newSavedColumnList = SavedColumnList {items = Core.Nothing, kind = Core.Nothing} instance Core.FromJSON SavedColumnList where parseJSON = Core.withObject "SavedColumnList" ( \o -> SavedColumnList Core.<$> (o Core..:? "items") Core.<*> (o Core..:? "kind") ) instance Core.ToJSON SavedColumnList where toJSON SavedColumnList {..} = Core.object ( Core.catMaybes [ ("items" Core..=) Core.<$> items, ("kind" Core..=) Core.<$> kind ] ) -- | The request to update availability. -- -- /See:/ 'newUpdateAvailabilityRequest' smart constructor. newtype UpdateAvailabilityRequest = UpdateAvailabilityRequest { -- | The availabilities being requested. availabilities :: (Core.Maybe [Availability]) } deriving (Core.Eq, Core.Show, Core.Generic) -- | Creates a value of 'UpdateAvailabilityRequest' with the minimum fields required to make a request. newUpdateAvailabilityRequest :: UpdateAvailabilityRequest newUpdateAvailabilityRequest = UpdateAvailabilityRequest {availabilities = Core.Nothing} instance Core.FromJSON UpdateAvailabilityRequest where parseJSON = Core.withObject "UpdateAvailabilityRequest" ( \o -> UpdateAvailabilityRequest Core.<$> (o Core..:? "availabilities") ) instance Core.ToJSON UpdateAvailabilityRequest where toJSON UpdateAvailabilityRequest {..} = Core.object ( Core.catMaybes [("availabilities" Core..=) Core.<$> availabilities] ) -- | The response to a update availability request. -- -- /See:/ 'newUpdateAvailabilityResponse' smart constructor. newtype UpdateAvailabilityResponse = UpdateAvailabilityResponse { -- | The availabilities being returned. availabilities :: (Core.Maybe [Availability]) } deriving (Core.Eq, Core.Show, Core.Generic) | Creates a value of ' UpdateAvailabilityResponse ' with the minimum fields required to make a request . newUpdateAvailabilityResponse :: UpdateAvailabilityResponse newUpdateAvailabilityResponse = UpdateAvailabilityResponse {availabilities = Core.Nothing} instance Core.FromJSON UpdateAvailabilityResponse where parseJSON = Core.withObject "UpdateAvailabilityResponse" ( \o -> UpdateAvailabilityResponse Core.<$> (o Core..:? "availabilities") ) instance Core.ToJSON UpdateAvailabilityResponse where toJSON UpdateAvailabilityResponse {..} = Core.object ( Core.catMaybes [("availabilities" Core..=) Core.<$> availabilities] )
null
https://raw.githubusercontent.com/brendanhay/gogol/8cbceeaaba36a3c08712b2e272606161500fbe91/lib/services/gogol-doubleclick-search/gen/Gogol/DoubleClickSearch/Internal/Product.hs
haskell
# LANGUAGE OverloadedStrings # # LANGUAGE StrictData # | Stability : auto-generated * Availability * Conversion * CustomDimension * CustomMetric * Report * ReportRequest * ReportRequest_FiltersItem * ReportRequest_TimeRange * SavedColumn * SavedColumnList * UpdateAvailabilityRequest * UpdateAvailabilityResponse /See:/ 'newAvailability' smart constructor. | DS advertiser ID. | DS agency ID. | The time by which all conversions have been uploaded, in epoch millis UTC. | Creates a value of 'Availability' with the minimum fields required to make a request. /See:/ 'newConversion' smart constructor. | DS ad group ID. | DS ad ID. | DS advertiser ID. | DS agency ID. | DS campaign ID. | Sales channel for the product. Acceptable values are: - \"@local@\": a physical store - \"@online@\": an online store | DS click ID for the conversion. | The time at which the conversion was last modified, in epoch millis UTC. | The time at which the conversion took place, in epoch millis UTC. | DS criterion (keyword) ID. | Custom dimensions for the conversion, which can be used to filter data in a report. | Custom metrics for the conversion. | The type of device on which the conversion occurred. | DS engine account ID. | The Floodlight order ID provided by the advertiser for the conversion. | ID that DS generates and uses to uniquely identify the inventory account that contains the product. | DS product group ID. | The product ID (SKU). | The state of the conversion, that is, either @ACTIVE@ or @REMOVED@. Note: state DELETED is deprecated. | The ID of the local store for which the product was advertised. Applicable only when the channel is \"@local@\". | Creates a value of 'Conversion' with the minimum fields required to make a request. | A list of conversions. /See:/ 'newConversionList' smart constructor. | The conversions being requested. | A message containing the custom dimension. /See:/ 'newCustomDimension' smart constructor. | Custom dimension name. | Custom dimension value. | Creates a value of 'CustomDimension' with the minimum fields required to make a request. | A message containing the custom metric. /See:/ 'newCustomMetric' smart constructor. | Custom metric name. | Custom metric numeric value. | Creates a value of 'CustomMetric' with the minimum fields required to make a request. | Asynchronous report only. Contains a list of generated report files once the report has successfully completed. | Asynchronous report only. Id of the report. | Asynchronous report only. True if and only if the report has completed successfully and the report files are ready to be downloaded. | The request that created the report. Optional fields not specified in the original request are filled with default values. | The number of report rows generated by the report, not including headers. | Synchronous report only. Generated report rows. | The currency code of all monetary values produced in the report, including values that are set by users (e.g., keyword bid settings) and metrics (e.g., cost and revenue). The currency code of a report is determined by the @statisticsCurrency@ field of the report request. | If all statistics of the report are sourced from the same time zone, this would be it. Otherwise the field is unset. | Creates a value of 'Report' with the minimum fields required to make a request. /See:/ 'newReport_FilesItem' smart constructor. | The size of this report file in bytes. | Use this url to download the report file. /See:/ 'newReportApiColumnSpec' smart constructor. | Inclusive day in YYYY-MM-DD format. When provided, this overrides the overall time range of the report for this column only. Must be provided together with @startDate@. | Synchronous report only. Set to @true@ to group by this column. Defaults to @false@. | The platform that is used to provide data for the custom dimension. Acceptable values are \"floodlight\". | Returns metrics only for a specific type of product activity. Accepted values are: - \"@sold@\": returns metrics only for products that were sold - \"@advertised@\": returns metrics only for products that were advertised in a Shopping campaign, and that might or might not have been sold | Inclusive date in YYYY-MM-DD format. When provided, this overrides the overall time range of the report for this column only. Must be provided together with @endDate@. | Creates a value of 'ReportApiColumnSpec' with the minimum fields required to make a request. /See:/ 'newReportRequest' smart constructor. | Determines if removed entities should be included in the report. Defaults to @false@. | Asynchronous report only. The maximum number of rows per report file. A large report is split into many files based on this field. Acceptable values are @1000000@ to @100000000@, inclusive. | The reportScope is a set of IDs that are used to determine which subset of entities will be returned in the report. The full lineage of IDs from the lowest scoped level desired up through agency is required. | Determines the type of rows that are returned in the report. For example, if you specify @reportType: keyword@, each row in the report will contain data about a keyword. See the </search-ads/v2/report-types/ Types of Reports> reference for the columns that are available for each type. | If metrics are requested in a report, this argument will be used to restrict the metrics to a specific time range. | Creates a value of 'ReportRequest' with the minimum fields required to make a request. /See:/ 'newReportRequest_FiltersItem' smart constructor. | Operator to use in the filter. See the filter reference for a list of available operators. | Creates a value of 'ReportRequest_FiltersItem' with the minimum fields required to make a request. /See:/ 'newReportRequest_OrderByItem' smart constructor. | Creates a value of 'ReportRequest_OrderByItem' with the minimum fields required to make a request. | The reportScope is a set of IDs that are used to determine which subset of entities will be returned in the report. The full lineage of IDs from the lowest scoped level desired up through agency is required. /See:/ 'newReportRequest_ReportScope' smart constructor. | DS ad group ID. | DS ad ID. | DS advertiser ID. | DS agency ID. | DS campaign ID. | DS engine account ID. | DS keyword ID. | If metrics are requested in a report, this argument will be used to restrict the metrics to a specific time range. /See:/ 'newReportRequest_TimeRange' smart constructor. | Inclusive date in YYYY-MM-DD format. | Inclusive date in YYYY-MM-DD format. | Creates a value of 'ReportRequest_TimeRange' with the minimum fields required to make a request. /See:/ 'newReportRow' smart constructor. | A saved column /See:/ 'newSavedColumn' smart constructor. | The name of the saved column. | The type of data this saved column will produce. /See:/ 'newSavedColumnList' smart constructor. | The saved columns being requested. | The request to update availability. /See:/ 'newUpdateAvailabilityRequest' smart constructor. | The availabilities being requested. | Creates a value of 'UpdateAvailabilityRequest' with the minimum fields required to make a request. | The response to a update availability request. /See:/ 'newUpdateAvailabilityResponse' smart constructor. | The availabilities being returned.
# LANGUAGE DataKinds # # LANGUAGE DeriveGeneric # # LANGUAGE DerivingStrategies # # LANGUAGE DuplicateRecordFields # # LANGUAGE FlexibleInstances # # LANGUAGE GeneralizedNewtypeDeriving # # LANGUAGE LambdaCase # # LANGUAGE PatternSynonyms # # LANGUAGE RecordWildCards # # LANGUAGE TypeFamilies # # LANGUAGE TypeOperators # # LANGUAGE NoImplicitPrelude # # OPTIONS_GHC -fno - warn - duplicate - exports # # OPTIONS_GHC -fno - warn - name - shadowing # # OPTIONS_GHC -fno - warn - unused - binds # # OPTIONS_GHC -fno - warn - unused - imports # # OPTIONS_GHC -fno - warn - unused - matches # Module : . . Internal . Product Copyright : ( c ) 2015 - 2022 License : Mozilla Public License , v. 2.0 . Maintainer : < brendan.g.hay+ > Portability : non - portable ( GHC extensions ) module Gogol.DoubleClickSearch.Internal.Product Availability (..), newAvailability, Conversion (..), newConversion, * ConversionList ConversionList (..), newConversionList, CustomDimension (..), newCustomDimension, CustomMetric (..), newCustomMetric, Report (..), newReport, * Report_FilesItem (..), newReport_FilesItem, * ReportApiColumnSpec (..), newReportApiColumnSpec, ReportRequest (..), newReportRequest, ReportRequest_FiltersItem (..), newReportRequest_FiltersItem, * ReportRequest_OrderByItem (..), newReportRequest_OrderByItem, * ReportRequest_ReportScope (..), newReportRequest_ReportScope, ReportRequest_TimeRange (..), newReportRequest_TimeRange, * ReportRow (..), newReportRow, SavedColumn (..), newSavedColumn, SavedColumnList (..), newSavedColumnList, UpdateAvailabilityRequest (..), newUpdateAvailabilityRequest, UpdateAvailabilityResponse (..), newUpdateAvailabilityResponse, ) where import Gogol.DoubleClickSearch.Internal.Sum import qualified Gogol.Prelude as Core | A message containing availability data relevant to DoubleClick Search . data Availability = Availability advertiserId :: (Core.Maybe Core.Int64), agencyId :: (Core.Maybe Core.Int64), availabilityTimestamp :: (Core.Maybe Core.Int64), | The numeric segmentation identifier ( for example , DoubleClick Search Floodlight activity ID ) . segmentationId :: (Core.Maybe Core.Int64), | The friendly segmentation identifier ( for example , DoubleClick Search Floodlight activity name ) . segmentationName :: (Core.Maybe Core.Text), | The segmentation type that this availability is for ( its default value is ) . segmentationType :: (Core.Maybe Core.Text) } deriving (Core.Eq, Core.Show, Core.Generic) newAvailability :: Availability newAvailability = Availability { advertiserId = Core.Nothing, agencyId = Core.Nothing, availabilityTimestamp = Core.Nothing, segmentationId = Core.Nothing, segmentationName = Core.Nothing, segmentationType = Core.Nothing } instance Core.FromJSON Availability where parseJSON = Core.withObject "Availability" ( \o -> Availability Core.<$> ( o Core..:? "advertiserId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> ( o Core..:? "agencyId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> ( o Core..:? "availabilityTimestamp" Core.<&> Core.fmap Core.fromAsText ) Core.<*> ( o Core..:? "segmentationId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> (o Core..:? "segmentationName") Core.<*> (o Core..:? "segmentationType") ) instance Core.ToJSON Availability where toJSON Availability {..} = Core.object ( Core.catMaybes [ ("advertiserId" Core..=) Core.. Core.AsText Core.<$> advertiserId, ("agencyId" Core..=) Core.. Core.AsText Core.<$> agencyId, ("availabilityTimestamp" Core..=) Core.. Core.AsText Core.<$> availabilityTimestamp, ("segmentationId" Core..=) Core.. Core.AsText Core.<$> segmentationId, ("segmentationName" Core..=) Core.<$> segmentationName, ("segmentationType" Core..=) Core.<$> segmentationType ] ) | A conversion containing data relevant to DoubleClick Search . data Conversion = Conversion adGroupId :: (Core.Maybe Core.Int64), adId :: (Core.Maybe Core.Int64), advertiserId :: (Core.Maybe Core.Int64), agencyId :: (Core.Maybe Core.Int64), | Available to advertisers only after contacting DoubleClick Search customer support . attributionModel :: (Core.Maybe Core.Text), campaignId :: (Core.Maybe Core.Int64), channel :: (Core.Maybe Core.Text), clickId :: (Core.Maybe Core.Text), | For offline conversions , advertisers provide this ID . Advertisers can specify any ID that is meaningful to them . Each conversion in a request must specify a unique ID , and the combination of ID and timestamp must be unique amongst all conversions within the advertiser . For online conversions , DS copies the @dsConversionId@ or @floodlightOrderId@ into this property depending on the advertiser\ 's Floodlight instructions . conversionId :: (Core.Maybe Core.Text), conversionModifiedTimestamp :: (Core.Maybe Core.Int64), conversionTimestamp :: (Core.Maybe Core.Text), | Available to advertisers only after contacting DoubleClick Search customer support . countMillis :: (Core.Maybe Core.Int64), criterionId :: (Core.Maybe Core.Int64), | The currency code for the conversion\ 's revenue . Should be in ISO 4217 alphabetic ( 3 - char ) format . currencyCode :: (Core.Maybe Core.Text), customDimension :: (Core.Maybe [CustomDimension]), customMetric :: (Core.Maybe [CustomMetric]), deviceType :: (Core.Maybe Core.Text), | ID that DoubleClick Search generates for each conversion . dsConversionId :: (Core.Maybe Core.Int64), engineAccountId :: (Core.Maybe Core.Int64), floodlightOrderId :: (Core.Maybe Core.Text), inventoryAccountId :: (Core.Maybe Core.Int64), | The country registered for the Merchant Center feed that contains the product . Use an ISO 3166 code to specify a country . productCountry :: (Core.Maybe Core.Text), productGroupId :: (Core.Maybe Core.Int64), productId :: (Core.Maybe Core.Text), | The language registered for the Merchant Center feed that contains the product . Use an ISO 639 code to specify a language . productLanguage :: (Core.Maybe Core.Text), | The quantity of this conversion , in millis . quantityMillis :: (Core.Maybe Core.Int64), | The revenue amount of this @TRANSACTION@ conversion , in micros ( value multiplied by 1000000 , no decimal ) . For example , to specify a revenue value of \"10\ " enter \"10000000\ " ( 10 million ) in your request . revenueMicros :: (Core.Maybe Core.Text), | The numeric segmentation identifier ( for example , DoubleClick Search Floodlight activity ID ) . segmentationId :: (Core.Maybe Core.Int64), | The friendly segmentation identifier ( for example , DoubleClick Search Floodlight activity name ) . segmentationName :: (Core.Maybe Core.Text), | The segmentation type of this conversion ( for example , ) . segmentationType :: (Core.Maybe Core.Text), state :: (Core.Maybe Core.Text), storeId :: (Core.Maybe Core.Text), | The type of the conversion , that is , either @ACTION@ or @TRANSACTION@. An @ACTION@ conversion is an action by the user that has no monetarily quantifiable value , while a @TRANSACTION@ conversion is an action that does have a monetarily quantifiable value . Examples are email list signups ( ) versus ecommerce purchases ( @TRANSACTION@ ) . type' :: (Core.Maybe Core.Text) } deriving (Core.Eq, Core.Show, Core.Generic) newConversion :: Conversion newConversion = Conversion { adGroupId = Core.Nothing, adId = Core.Nothing, advertiserId = Core.Nothing, agencyId = Core.Nothing, attributionModel = Core.Nothing, campaignId = Core.Nothing, channel = Core.Nothing, clickId = Core.Nothing, conversionId = Core.Nothing, conversionModifiedTimestamp = Core.Nothing, conversionTimestamp = Core.Nothing, countMillis = Core.Nothing, criterionId = Core.Nothing, currencyCode = Core.Nothing, customDimension = Core.Nothing, customMetric = Core.Nothing, deviceType = Core.Nothing, dsConversionId = Core.Nothing, engineAccountId = Core.Nothing, floodlightOrderId = Core.Nothing, inventoryAccountId = Core.Nothing, productCountry = Core.Nothing, productGroupId = Core.Nothing, productId = Core.Nothing, productLanguage = Core.Nothing, quantityMillis = Core.Nothing, revenueMicros = Core.Nothing, segmentationId = Core.Nothing, segmentationName = Core.Nothing, segmentationType = Core.Nothing, state = Core.Nothing, storeId = Core.Nothing, type' = Core.Nothing } instance Core.FromJSON Conversion where parseJSON = Core.withObject "Conversion" ( \o -> Conversion Core.<$> ( o Core..:? "adGroupId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> ( o Core..:? "adId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> ( o Core..:? "advertiserId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> ( o Core..:? "agencyId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> (o Core..:? "attributionModel") Core.<*> ( o Core..:? "campaignId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> (o Core..:? "channel") Core.<*> (o Core..:? "clickId") Core.<*> (o Core..:? "conversionId") Core.<*> ( o Core..:? "conversionModifiedTimestamp" Core.<&> Core.fmap Core.fromAsText ) Core.<*> (o Core..:? "conversionTimestamp") Core.<*> ( o Core..:? "countMillis" Core.<&> Core.fmap Core.fromAsText ) Core.<*> ( o Core..:? "criterionId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> (o Core..:? "currencyCode") Core.<*> (o Core..:? "customDimension") Core.<*> (o Core..:? "customMetric") Core.<*> (o Core..:? "deviceType") Core.<*> ( o Core..:? "dsConversionId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> ( o Core..:? "engineAccountId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> (o Core..:? "floodlightOrderId") Core.<*> ( o Core..:? "inventoryAccountId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> (o Core..:? "productCountry") Core.<*> ( o Core..:? "productGroupId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> (o Core..:? "productId") Core.<*> (o Core..:? "productLanguage") Core.<*> ( o Core..:? "quantityMillis" Core.<&> Core.fmap Core.fromAsText ) Core.<*> (o Core..:? "revenueMicros") Core.<*> ( o Core..:? "segmentationId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> (o Core..:? "segmentationName") Core.<*> (o Core..:? "segmentationType") Core.<*> (o Core..:? "state") Core.<*> (o Core..:? "storeId") Core.<*> (o Core..:? "type") ) instance Core.ToJSON Conversion where toJSON Conversion {..} = Core.object ( Core.catMaybes [ ("adGroupId" Core..=) Core.. Core.AsText Core.<$> adGroupId, ("adId" Core..=) Core.. Core.AsText Core.<$> adId, ("advertiserId" Core..=) Core.. Core.AsText Core.<$> advertiserId, ("agencyId" Core..=) Core.. Core.AsText Core.<$> agencyId, ("attributionModel" Core..=) Core.<$> attributionModel, ("campaignId" Core..=) Core.. Core.AsText Core.<$> campaignId, ("channel" Core..=) Core.<$> channel, ("clickId" Core..=) Core.<$> clickId, ("conversionId" Core..=) Core.<$> conversionId, ("conversionModifiedTimestamp" Core..=) Core.. Core.AsText Core.<$> conversionModifiedTimestamp, ("conversionTimestamp" Core..=) Core.<$> conversionTimestamp, ("countMillis" Core..=) Core.. Core.AsText Core.<$> countMillis, ("criterionId" Core..=) Core.. Core.AsText Core.<$> criterionId, ("currencyCode" Core..=) Core.<$> currencyCode, ("customDimension" Core..=) Core.<$> customDimension, ("customMetric" Core..=) Core.<$> customMetric, ("deviceType" Core..=) Core.<$> deviceType, ("dsConversionId" Core..=) Core.. Core.AsText Core.<$> dsConversionId, ("engineAccountId" Core..=) Core.. Core.AsText Core.<$> engineAccountId, ("floodlightOrderId" Core..=) Core.<$> floodlightOrderId, ("inventoryAccountId" Core..=) Core.. Core.AsText Core.<$> inventoryAccountId, ("productCountry" Core..=) Core.<$> productCountry, ("productGroupId" Core..=) Core.. Core.AsText Core.<$> productGroupId, ("productId" Core..=) Core.<$> productId, ("productLanguage" Core..=) Core.<$> productLanguage, ("quantityMillis" Core..=) Core.. Core.AsText Core.<$> quantityMillis, ("revenueMicros" Core..=) Core.<$> revenueMicros, ("segmentationId" Core..=) Core.. Core.AsText Core.<$> segmentationId, ("segmentationName" Core..=) Core.<$> segmentationName, ("segmentationType" Core..=) Core.<$> segmentationType, ("state" Core..=) Core.<$> state, ("storeId" Core..=) Core.<$> storeId, ("type" Core..=) Core.<$> type' ] ) data ConversionList = ConversionList conversion :: (Core.Maybe [Conversion]), | Identifies this as a ConversionList resource . Value : the fixed string doubleclicksearch#conversionList . kind :: (Core.Maybe Core.Text) } deriving (Core.Eq, Core.Show, Core.Generic) | Creates a value of ' ConversionList ' with the minimum fields required to make a request . newConversionList :: ConversionList newConversionList = ConversionList {conversion = Core.Nothing, kind = Core.Nothing} instance Core.FromJSON ConversionList where parseJSON = Core.withObject "ConversionList" ( \o -> ConversionList Core.<$> (o Core..:? "conversion") Core.<*> (o Core..:? "kind") ) instance Core.ToJSON ConversionList where toJSON ConversionList {..} = Core.object ( Core.catMaybes [ ("conversion" Core..=) Core.<$> conversion, ("kind" Core..=) Core.<$> kind ] ) data CustomDimension = CustomDimension name :: (Core.Maybe Core.Text), value :: (Core.Maybe Core.Text) } deriving (Core.Eq, Core.Show, Core.Generic) newCustomDimension :: CustomDimension newCustomDimension = CustomDimension {name = Core.Nothing, value = Core.Nothing} instance Core.FromJSON CustomDimension where parseJSON = Core.withObject "CustomDimension" ( \o -> CustomDimension Core.<$> (o Core..:? "name") Core.<*> (o Core..:? "value") ) instance Core.ToJSON CustomDimension where toJSON CustomDimension {..} = Core.object ( Core.catMaybes [ ("name" Core..=) Core.<$> name, ("value" Core..=) Core.<$> value ] ) data CustomMetric = CustomMetric name :: (Core.Maybe Core.Text), value :: (Core.Maybe Core.Double) } deriving (Core.Eq, Core.Show, Core.Generic) newCustomMetric :: CustomMetric newCustomMetric = CustomMetric {name = Core.Nothing, value = Core.Nothing} instance Core.FromJSON CustomMetric where parseJSON = Core.withObject "CustomMetric" ( \o -> CustomMetric Core.<$> (o Core..:? "name") Core.<*> (o Core..:? "value") ) instance Core.ToJSON CustomMetric where toJSON CustomMetric {..} = Core.object ( Core.catMaybes [ ("name" Core..=) Core.<$> name, ("value" Core..=) Core.<$> value ] ) | A DoubleClick Search report . This object contains the report request , some report metadata such as currency code , and the generated report rows or report files . /See:/ ' newReport ' smart constructor . data Report = Report files :: (Core.Maybe [Report_FilesItem]), id :: (Core.Maybe Core.Text), isReportReady :: (Core.Maybe Core.Bool), | Identifies this as a Report resource . Value : the fixed string @doubleclicksearch#report@. kind :: (Core.Maybe Core.Text), request' :: (Core.Maybe ReportRequest), rowCount :: (Core.Maybe Core.Int32), rows :: (Core.Maybe [ReportRow]), statisticsCurrencyCode :: (Core.Maybe Core.Text), statisticsTimeZone :: (Core.Maybe Core.Text) } deriving (Core.Eq, Core.Show, Core.Generic) newReport :: Report newReport = Report { files = Core.Nothing, id = Core.Nothing, isReportReady = Core.Nothing, kind = Core.Nothing, request' = Core.Nothing, rowCount = Core.Nothing, rows = Core.Nothing, statisticsCurrencyCode = Core.Nothing, statisticsTimeZone = Core.Nothing } instance Core.FromJSON Report where parseJSON = Core.withObject "Report" ( \o -> Report Core.<$> (o Core..:? "files") Core.<*> (o Core..:? "id") Core.<*> (o Core..:? "isReportReady") Core.<*> (o Core..:? "kind") Core.<*> (o Core..:? "request") Core.<*> (o Core..:? "rowCount") Core.<*> (o Core..:? "rows") Core.<*> (o Core..:? "statisticsCurrencyCode") Core.<*> (o Core..:? "statisticsTimeZone") ) instance Core.ToJSON Report where toJSON Report {..} = Core.object ( Core.catMaybes [ ("files" Core..=) Core.<$> files, ("id" Core..=) Core.<$> id, ("isReportReady" Core..=) Core.<$> isReportReady, ("kind" Core..=) Core.<$> kind, ("request" Core..=) Core.<$> request', ("rowCount" Core..=) Core.<$> rowCount, ("rows" Core..=) Core.<$> rows, ("statisticsCurrencyCode" Core..=) Core.<$> statisticsCurrencyCode, ("statisticsTimeZone" Core..=) Core.<$> statisticsTimeZone ] ) data Report_FilesItem = Report_FilesItem byteCount :: (Core.Maybe Core.Int64), url :: (Core.Maybe Core.Text) } deriving (Core.Eq, Core.Show, Core.Generic) | Creates a value of ' ' with the minimum fields required to make a request . newReport_FilesItem :: Report_FilesItem newReport_FilesItem = Report_FilesItem {byteCount = Core.Nothing, url = Core.Nothing} instance Core.FromJSON Report_FilesItem where parseJSON = Core.withObject "Report_FilesItem" ( \o -> Report_FilesItem Core.<$> ( o Core..:? "byteCount" Core.<&> Core.fmap Core.fromAsText ) Core.<*> (o Core..:? "url") ) instance Core.ToJSON Report_FilesItem where toJSON Report_FilesItem {..} = Core.object ( Core.catMaybes [ ("byteCount" Core..=) Core.. Core.AsText Core.<$> byteCount, ("url" Core..=) Core.<$> url ] ) | A request object used to create a DoubleClick Search report . data ReportApiColumnSpec = ReportApiColumnSpec | Name of a DoubleClick Search column to include in the report . columnName :: (Core.Maybe Core.Text), | Segments a report by a custom dimension . The report must be scoped to an advertiser or lower , and the custom dimension must already be set up in DoubleClick Search . The custom dimension name , which appears in DoubleClick Search , is case sensitive.\\ If used in a conversion report , returns the value of the specified custom dimension for the given conversion , if set . This column does not segment the conversion report . customDimensionName :: (Core.Maybe Core.Text), | Name of a custom metric to include in the report . The report must be scoped to an advertiser or lower , and the custom metric must already be set up in DoubleClick Search . The custom metric name , which appears in DoubleClick Search , is case sensitive . customMetricName :: (Core.Maybe Core.Text), endDate :: (Core.Maybe Core.Text), groupByColumn :: (Core.Maybe Core.Bool), | Text used to identify this column in the report output ; defaults to @columnName@ or @savedColumnName@ when not specified . This can be used to prevent collisions between DoubleClick Search columns and saved columns with the same name . headerText :: (Core.Maybe Core.Text), platformSource :: (Core.Maybe Core.Text), productReportPerspective :: (Core.Maybe Core.Text), | Name of a saved column to include in the report . The report must be scoped at advertiser or lower , and this saved column must already be created in the DoubleClick Search UI . savedColumnName :: (Core.Maybe Core.Text), startDate :: (Core.Maybe Core.Text) } deriving (Core.Eq, Core.Show, Core.Generic) newReportApiColumnSpec :: ReportApiColumnSpec newReportApiColumnSpec = ReportApiColumnSpec { columnName = Core.Nothing, customDimensionName = Core.Nothing, customMetricName = Core.Nothing, endDate = Core.Nothing, groupByColumn = Core.Nothing, headerText = Core.Nothing, platformSource = Core.Nothing, productReportPerspective = Core.Nothing, savedColumnName = Core.Nothing, startDate = Core.Nothing } instance Core.FromJSON ReportApiColumnSpec where parseJSON = Core.withObject "ReportApiColumnSpec" ( \o -> ReportApiColumnSpec Core.<$> (o Core..:? "columnName") Core.<*> (o Core..:? "customDimensionName") Core.<*> (o Core..:? "customMetricName") Core.<*> (o Core..:? "endDate") Core.<*> (o Core..:? "groupByColumn") Core.<*> (o Core..:? "headerText") Core.<*> (o Core..:? "platformSource") Core.<*> (o Core..:? "productReportPerspective") Core.<*> (o Core..:? "savedColumnName") Core.<*> (o Core..:? "startDate") ) instance Core.ToJSON ReportApiColumnSpec where toJSON ReportApiColumnSpec {..} = Core.object ( Core.catMaybes [ ("columnName" Core..=) Core.<$> columnName, ("customDimensionName" Core..=) Core.<$> customDimensionName, ("customMetricName" Core..=) Core.<$> customMetricName, ("endDate" Core..=) Core.<$> endDate, ("groupByColumn" Core..=) Core.<$> groupByColumn, ("headerText" Core..=) Core.<$> headerText, ("platformSource" Core..=) Core.<$> platformSource, ("productReportPerspective" Core..=) Core.<$> productReportPerspective, ("savedColumnName" Core..=) Core.<$> savedColumnName, ("startDate" Core..=) Core.<$> startDate ] ) | A request object used to create a DoubleClick Search report . data ReportRequest = ReportRequest | The columns to include in the report . This includes both DoubleClick Search columns and saved columns . For DoubleClick Search columns , only the @columnName@ parameter is required . For saved columns only the @savedColumnName@ parameter is required . Both @columnName@ and @savedColumnName@ can not be set in the same stanza.\\ The maximum number of columns per request is 300 . columns :: (Core.Maybe [ReportApiColumnSpec]), | Format that the report should be returned in . Currently @csv@ or @tsv@ is supported . downloadFormat :: (Core.Maybe Core.Text), | A list of filters to be applied to the report.\\ The maximum number of filters per request is 300 . filters :: (Core.Maybe [ReportRequest_FiltersItem]), | Determines if removed entities should be included in the report . Defaults to @false@. Deprecated , please use @includeRemovedEntities@ instead . includeDeletedEntities :: (Core.Maybe Core.Bool), includeRemovedEntities :: (Core.Maybe Core.Bool), maxRowsPerFile :: (Core.Maybe Core.Int32), | Synchronous report only . A list of columns and directions defining sorting to be performed on the report rows.\\ The maximum number of orderings per request is 300 . orderBy :: (Core.Maybe [ReportRequest_OrderByItem]), reportScope :: (Core.Maybe ReportRequest_ReportScope), reportType :: (Core.Maybe Core.Text), | Synchronous report only . The maximum number of rows to return ; additional rows are dropped . Acceptable values are @0@ to @10000@ , inclusive . Defaults to @10000@. rowCount :: (Core.Maybe Core.Int32), | Synchronous report only . Zero - based index of the first row to return . Acceptable values are @0@ to @50000@ , inclusive . Defaults to @0@. startRow :: (Core.Maybe Core.Int32), | Specifies the currency in which monetary will be returned . Possible values are : @usd@ , @agency@ ( valid if the report is scoped to agency or lower ) , @advertiser@ ( valid if the report is scoped to * advertiser or lower ) , or @account@ ( valid if the report is scoped to engine account or lower ) . statisticsCurrency :: (Core.Maybe Core.Text), timeRange :: (Core.Maybe ReportRequest_TimeRange), | If @true@ , the report would only be created if all the requested stat data are sourced from a single timezone . Defaults to @false@. verifySingleTimeZone :: (Core.Maybe Core.Bool) } deriving (Core.Eq, Core.Show, Core.Generic) newReportRequest :: ReportRequest newReportRequest = ReportRequest { columns = Core.Nothing, downloadFormat = Core.Nothing, filters = Core.Nothing, includeDeletedEntities = Core.Nothing, includeRemovedEntities = Core.Nothing, maxRowsPerFile = Core.Nothing, orderBy = Core.Nothing, reportScope = Core.Nothing, reportType = Core.Nothing, rowCount = Core.Nothing, startRow = Core.Nothing, statisticsCurrency = Core.Nothing, timeRange = Core.Nothing, verifySingleTimeZone = Core.Nothing } instance Core.FromJSON ReportRequest where parseJSON = Core.withObject "ReportRequest" ( \o -> ReportRequest Core.<$> (o Core..:? "columns") Core.<*> (o Core..:? "downloadFormat") Core.<*> (o Core..:? "filters") Core.<*> (o Core..:? "includeDeletedEntities") Core.<*> (o Core..:? "includeRemovedEntities") Core.<*> (o Core..:? "maxRowsPerFile") Core.<*> (o Core..:? "orderBy") Core.<*> (o Core..:? "reportScope") Core.<*> (o Core..:? "reportType") Core.<*> (o Core..:? "rowCount") Core.<*> (o Core..:? "startRow") Core.<*> (o Core..:? "statisticsCurrency") Core.<*> (o Core..:? "timeRange") Core.<*> (o Core..:? "verifySingleTimeZone") ) instance Core.ToJSON ReportRequest where toJSON ReportRequest {..} = Core.object ( Core.catMaybes [ ("columns" Core..=) Core.<$> columns, ("downloadFormat" Core..=) Core.<$> downloadFormat, ("filters" Core..=) Core.<$> filters, ("includeDeletedEntities" Core..=) Core.<$> includeDeletedEntities, ("includeRemovedEntities" Core..=) Core.<$> includeRemovedEntities, ("maxRowsPerFile" Core..=) Core.<$> maxRowsPerFile, ("orderBy" Core..=) Core.<$> orderBy, ("reportScope" Core..=) Core.<$> reportScope, ("reportType" Core..=) Core.<$> reportType, ("rowCount" Core..=) Core.<$> rowCount, ("startRow" Core..=) Core.<$> startRow, ("statisticsCurrency" Core..=) Core.<$> statisticsCurrency, ("timeRange" Core..=) Core.<$> timeRange, ("verifySingleTimeZone" Core..=) Core.<$> verifySingleTimeZone ] ) data ReportRequest_FiltersItem = ReportRequest_FiltersItem | Column to perform the filter on . This can be a DoubleClick Search column or a saved column . column :: (Core.Maybe ReportApiColumnSpec), operator :: (Core.Maybe Core.Text), | A list of values to filter the column value against.\\ The maximum number of filter values per request is 300 . values :: (Core.Maybe [Core.Value]) } deriving (Core.Eq, Core.Show, Core.Generic) newReportRequest_FiltersItem :: ReportRequest_FiltersItem newReportRequest_FiltersItem = ReportRequest_FiltersItem { column = Core.Nothing, operator = Core.Nothing, values = Core.Nothing } instance Core.FromJSON ReportRequest_FiltersItem where parseJSON = Core.withObject "ReportRequest_FiltersItem" ( \o -> ReportRequest_FiltersItem Core.<$> (o Core..:? "column") Core.<*> (o Core..:? "operator") Core.<*> (o Core..:? "values") ) instance Core.ToJSON ReportRequest_FiltersItem where toJSON ReportRequest_FiltersItem {..} = Core.object ( Core.catMaybes [ ("column" Core..=) Core.<$> column, ("operator" Core..=) Core.<$> operator, ("values" Core..=) Core.<$> values ] ) data ReportRequest_OrderByItem = ReportRequest_OrderByItem | Column to perform the sort on . This can be a DoubleClick Search - defined column or a saved column . column :: (Core.Maybe ReportApiColumnSpec), | The sort direction , which is either @ascending@ or sortOrder :: (Core.Maybe Core.Text) } deriving (Core.Eq, Core.Show, Core.Generic) newReportRequest_OrderByItem :: ReportRequest_OrderByItem newReportRequest_OrderByItem = ReportRequest_OrderByItem {column = Core.Nothing, sortOrder = Core.Nothing} instance Core.FromJSON ReportRequest_OrderByItem where parseJSON = Core.withObject "ReportRequest_OrderByItem" ( \o -> ReportRequest_OrderByItem Core.<$> (o Core..:? "column") Core.<*> (o Core..:? "sortOrder") ) instance Core.ToJSON ReportRequest_OrderByItem where toJSON ReportRequest_OrderByItem {..} = Core.object ( Core.catMaybes [ ("column" Core..=) Core.<$> column, ("sortOrder" Core..=) Core.<$> sortOrder ] ) data ReportRequest_ReportScope = ReportRequest_ReportScope adGroupId :: (Core.Maybe Core.Int64), adId :: (Core.Maybe Core.Int64), advertiserId :: (Core.Maybe Core.Int64), agencyId :: (Core.Maybe Core.Int64), campaignId :: (Core.Maybe Core.Int64), engineAccountId :: (Core.Maybe Core.Int64), keywordId :: (Core.Maybe Core.Int64) } deriving (Core.Eq, Core.Show, Core.Generic) | Creates a value of ' ReportRequest_ReportScope ' with the minimum fields required to make a request . newReportRequest_ReportScope :: ReportRequest_ReportScope newReportRequest_ReportScope = ReportRequest_ReportScope { adGroupId = Core.Nothing, adId = Core.Nothing, advertiserId = Core.Nothing, agencyId = Core.Nothing, campaignId = Core.Nothing, engineAccountId = Core.Nothing, keywordId = Core.Nothing } instance Core.FromJSON ReportRequest_ReportScope where parseJSON = Core.withObject "ReportRequest_ReportScope" ( \o -> ReportRequest_ReportScope Core.<$> ( o Core..:? "adGroupId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> ( o Core..:? "adId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> ( o Core..:? "advertiserId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> ( o Core..:? "agencyId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> ( o Core..:? "campaignId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> ( o Core..:? "engineAccountId" Core.<&> Core.fmap Core.fromAsText ) Core.<*> ( o Core..:? "keywordId" Core.<&> Core.fmap Core.fromAsText ) ) instance Core.ToJSON ReportRequest_ReportScope where toJSON ReportRequest_ReportScope {..} = Core.object ( Core.catMaybes [ ("adGroupId" Core..=) Core.. Core.AsText Core.<$> adGroupId, ("adId" Core..=) Core.. Core.AsText Core.<$> adId, ("advertiserId" Core..=) Core.. Core.AsText Core.<$> advertiserId, ("agencyId" Core..=) Core.. Core.AsText Core.<$> agencyId, ("campaignId" Core..=) Core.. Core.AsText Core.<$> campaignId, ("engineAccountId" Core..=) Core.. Core.AsText Core.<$> engineAccountId, ("keywordId" Core..=) Core.. Core.AsText Core.<$> keywordId ] ) data ReportRequest_TimeRange = ReportRequest_TimeRange | Inclusive UTC timestamp in RFC format , e.g. , @2013 - 07 - 16T10:16:23.555Z@. See additional references on how changed attribute reports work . changedAttributesSinceTimestamp :: (Core.Maybe Core.Text), | Inclusive UTC timestamp in RFC format , e.g. , @2013 - 07 - 16T10:16:23.555Z@. See additional references on how changed metrics reports work . changedMetricsSinceTimestamp :: (Core.Maybe Core.Text), endDate :: (Core.Maybe Core.Text), startDate :: (Core.Maybe Core.Text) } deriving (Core.Eq, Core.Show, Core.Generic) newReportRequest_TimeRange :: ReportRequest_TimeRange newReportRequest_TimeRange = ReportRequest_TimeRange { changedAttributesSinceTimestamp = Core.Nothing, changedMetricsSinceTimestamp = Core.Nothing, endDate = Core.Nothing, startDate = Core.Nothing } instance Core.FromJSON ReportRequest_TimeRange where parseJSON = Core.withObject "ReportRequest_TimeRange" ( \o -> ReportRequest_TimeRange Core.<$> (o Core..:? "changedAttributesSinceTimestamp") Core.<*> (o Core..:? "changedMetricsSinceTimestamp") Core.<*> (o Core..:? "endDate") Core.<*> (o Core..:? "startDate") ) instance Core.ToJSON ReportRequest_TimeRange where toJSON ReportRequest_TimeRange {..} = Core.object ( Core.catMaybes [ ("changedAttributesSinceTimestamp" Core..=) Core.<$> changedAttributesSinceTimestamp, ("changedMetricsSinceTimestamp" Core..=) Core.<$> changedMetricsSinceTimestamp, ("endDate" Core..=) Core.<$> endDate, ("startDate" Core..=) Core.<$> startDate ] ) | A row in a DoubleClick Search report . newtype ReportRow = ReportRow | Indicates the columns that are represented in this row . That is , each key corresponds to a column with a non - empty cell in this row . additional :: (Core.HashMap Core.Text Core.Value) } deriving (Core.Eq, Core.Show, Core.Generic) | Creates a value of ' ' with the minimum fields required to make a request . newReportRow :: | Indicates the columns that are represented in this row . That is , each key corresponds to a column with a non - empty cell in this row . See ' additional ' . Core.HashMap Core.Text Core.Value -> ReportRow newReportRow additional = ReportRow {additional = additional} instance Core.FromJSON ReportRow where parseJSON = Core.withObject "ReportRow" (\o -> ReportRow Core.<$> (Core.parseJSONObject o)) instance Core.ToJSON ReportRow where toJSON ReportRow {..} = Core.toJSON additional data SavedColumn = SavedColumn | Identifies this as a SavedColumn resource . Value : the fixed string doubleclicksearch#savedColumn . kind :: (Core.Maybe Core.Text), savedColumnName :: (Core.Maybe Core.Text), type' :: (Core.Maybe Core.Text) } deriving (Core.Eq, Core.Show, Core.Generic) | Creates a value of ' SavedColumn ' with the minimum fields required to make a request . newSavedColumn :: SavedColumn newSavedColumn = SavedColumn { kind = Core.Nothing, savedColumnName = Core.Nothing, type' = Core.Nothing } instance Core.FromJSON SavedColumn where parseJSON = Core.withObject "SavedColumn" ( \o -> SavedColumn Core.<$> (o Core..:? "kind") Core.<*> (o Core..:? "savedColumnName") Core.<*> (o Core..:? "type") ) instance Core.ToJSON SavedColumn where toJSON SavedColumn {..} = Core.object ( Core.catMaybes [ ("kind" Core..=) Core.<$> kind, ("savedColumnName" Core..=) Core.<$> savedColumnName, ("type" Core..=) Core.<$> type' ] ) | A list of saved columns . Advertisers create saved columns to report on Floodlight activities , Google Analytics goals , or custom KPIs . To request reports with saved columns , you\'ll need the saved column names that are available from this list . data SavedColumnList = SavedColumnList items :: (Core.Maybe [SavedColumn]), | Identifies this as a SavedColumnList resource . Value : the fixed string doubleclicksearch#savedColumnList . kind :: (Core.Maybe Core.Text) } deriving (Core.Eq, Core.Show, Core.Generic) | Creates a value of ' SavedColumnList ' with the minimum fields required to make a request . newSavedColumnList :: SavedColumnList newSavedColumnList = SavedColumnList {items = Core.Nothing, kind = Core.Nothing} instance Core.FromJSON SavedColumnList where parseJSON = Core.withObject "SavedColumnList" ( \o -> SavedColumnList Core.<$> (o Core..:? "items") Core.<*> (o Core..:? "kind") ) instance Core.ToJSON SavedColumnList where toJSON SavedColumnList {..} = Core.object ( Core.catMaybes [ ("items" Core..=) Core.<$> items, ("kind" Core..=) Core.<$> kind ] ) newtype UpdateAvailabilityRequest = UpdateAvailabilityRequest availabilities :: (Core.Maybe [Availability]) } deriving (Core.Eq, Core.Show, Core.Generic) newUpdateAvailabilityRequest :: UpdateAvailabilityRequest newUpdateAvailabilityRequest = UpdateAvailabilityRequest {availabilities = Core.Nothing} instance Core.FromJSON UpdateAvailabilityRequest where parseJSON = Core.withObject "UpdateAvailabilityRequest" ( \o -> UpdateAvailabilityRequest Core.<$> (o Core..:? "availabilities") ) instance Core.ToJSON UpdateAvailabilityRequest where toJSON UpdateAvailabilityRequest {..} = Core.object ( Core.catMaybes [("availabilities" Core..=) Core.<$> availabilities] ) newtype UpdateAvailabilityResponse = UpdateAvailabilityResponse availabilities :: (Core.Maybe [Availability]) } deriving (Core.Eq, Core.Show, Core.Generic) | Creates a value of ' UpdateAvailabilityResponse ' with the minimum fields required to make a request . newUpdateAvailabilityResponse :: UpdateAvailabilityResponse newUpdateAvailabilityResponse = UpdateAvailabilityResponse {availabilities = Core.Nothing} instance Core.FromJSON UpdateAvailabilityResponse where parseJSON = Core.withObject "UpdateAvailabilityResponse" ( \o -> UpdateAvailabilityResponse Core.<$> (o Core..:? "availabilities") ) instance Core.ToJSON UpdateAvailabilityResponse where toJSON UpdateAvailabilityResponse {..} = Core.object ( Core.catMaybes [("availabilities" Core..=) Core.<$> availabilities] )
3a7f54f36c661df7484543778e05f550cfacd96c6837f1a6f0ed153841f7b524
goldfirere/thesis
Util.hs
-- | -- Module : Data.Vector.Fusion.Util Copyright : ( c ) Roman Leshchinskiy 2009 -- License : BSD-style -- Maintainer : < > -- Stability : experimental -- Portability : portable -- -- Fusion-related utility types -- module Data.Vector.Fusion.Util ( Id(..), Box(..), delay_inline, delayed_min ) where import Control.Applicative -- | Identity monad newtype Id a = Id { unId :: a } instance Functor Id where fmap f (Id x) = Id (f x) instance Applicative Id where pure = Id Id f <*> Id x = Id (f x) instance Monad Id where return = Id Id x >>= f = f x -- | Box monad data Box a = Box { unBox :: a } instance Functor Box where fmap f (Box x) = Box (f x) instance Applicative Box where pure = Box Box f <*> Box x = Box (f x) instance Monad Box where return = Box Box x >>= f = f x -- | Delay inlining a function until late in the game (simplifier phase 0). delay_inline :: (a -> b) -> a -> b {-# INLINE [0] delay_inline #-} delay_inline f = f -- | `min` inlined in phase 0 delayed_min :: Int -> Int -> Int {-# INLINE [0] delayed_min #-} delayed_min m n = min m n
null
https://raw.githubusercontent.com/goldfirere/thesis/22f066bc26b1147530525aabb3df686416b3e4aa/cab/vector-0.10.12.3/Data/Vector/Fusion/Util.hs
haskell
| Module : Data.Vector.Fusion.Util License : BSD-style Stability : experimental Portability : portable Fusion-related utility types | Identity monad | Box monad | Delay inlining a function until late in the game (simplifier phase 0). # INLINE [0] delay_inline # | `min` inlined in phase 0 # INLINE [0] delayed_min #
Copyright : ( c ) Roman Leshchinskiy 2009 Maintainer : < > module Data.Vector.Fusion.Util ( Id(..), Box(..), delay_inline, delayed_min ) where import Control.Applicative newtype Id a = Id { unId :: a } instance Functor Id where fmap f (Id x) = Id (f x) instance Applicative Id where pure = Id Id f <*> Id x = Id (f x) instance Monad Id where return = Id Id x >>= f = f x data Box a = Box { unBox :: a } instance Functor Box where fmap f (Box x) = Box (f x) instance Applicative Box where pure = Box Box f <*> Box x = Box (f x) instance Monad Box where return = Box Box x >>= f = f x delay_inline :: (a -> b) -> a -> b delay_inline f = f delayed_min :: Int -> Int -> Int delayed_min m n = min m n
de8b9a36ee4654443113ea8142f6a27f9c1c586715008f2a35ee9b31927a1a69
jonase/eq
pp.ml
open Easy_format type colors = Black | Red | Green | Yellow | Blue | Magenta | Cyan | White let color_code color = match color with Black -> 30 | Red -> 31 | Green -> 32 | Yellow -> 33 | Blue -> 34 | Magenta -> 35 | Cyan -> 36 | White -> 37 let wrap_color color string = match color with Some color -> Printf.sprintf "\027[%dm%s\027[0m" (color_code color) string | None -> string let with_color color x = match x with Atom (s, a) -> Atom ((wrap_color color s), a) | a -> a let edn_color_scheme x = match x with (`String _ | `Char _) -> Some Magenta | (`Symbol _ | `Keyword _ | `Tag _) -> Some Green | _ -> None let edn_no_color_scheme x = None let edn_list = { list with space_after_opening = false; space_before_closing = false; align_closing = false; } let string_of_symbol (ns, name) = match ns with None -> name | Some s -> s ^ "/" ^ name let rec format color_scheme (x : Edn.t) = let colorize = with_color (color_scheme x) in match x with `Null -> colorize (Atom ("nil", atom)) | `Bool b -> colorize (Atom ((if b then "true" else "false"), atom)) | `String s -> colorize (Atom ("\"" ^ s ^ "\"", atom)) | `Char b -> colorize (Atom (b, atom)) | `Symbol sym -> colorize (Atom (string_of_symbol sym, atom)) | `Keyword kw -> colorize (Atom (":" ^ string_of_symbol kw, atom)) | `Int i -> colorize (Atom (string_of_int i, atom)) | `BigInt s -> colorize (Atom (s, atom)) | `Float f -> colorize (Atom(string_of_float f, atom)) | `Decimal d -> colorize (Atom (d, atom)) | `List l -> List (("(", "", ")", edn_list), List.map (format color_scheme) l) | `Vector v -> List (("[", "", "]", edn_list), List.map (format color_scheme) v) | `Set s -> List (("#{", "", "}", edn_list), List.map (format color_scheme) s) | `Assoc kvs -> List (("{", "", "}", edn_list), List.map (format_assoc color_scheme) kvs) | `Tag tv -> format_tag color_scheme tv and format_assoc color_scheme (k, v) = Label ((format color_scheme k, label), format color_scheme v) and format_tag color_scheme (ns, name, value) = let color = color_scheme (`Symbol (None, "")) in let tag = with_color color (Atom ("#" ^ string_of_symbol (ns, name), atom)) in Label ((tag, label), format color_scheme value)
null
https://raw.githubusercontent.com/jonase/eq/08bd514e78a026af077fa5a996d43f0d7ae89795/src/pp.ml
ocaml
open Easy_format type colors = Black | Red | Green | Yellow | Blue | Magenta | Cyan | White let color_code color = match color with Black -> 30 | Red -> 31 | Green -> 32 | Yellow -> 33 | Blue -> 34 | Magenta -> 35 | Cyan -> 36 | White -> 37 let wrap_color color string = match color with Some color -> Printf.sprintf "\027[%dm%s\027[0m" (color_code color) string | None -> string let with_color color x = match x with Atom (s, a) -> Atom ((wrap_color color s), a) | a -> a let edn_color_scheme x = match x with (`String _ | `Char _) -> Some Magenta | (`Symbol _ | `Keyword _ | `Tag _) -> Some Green | _ -> None let edn_no_color_scheme x = None let edn_list = { list with space_after_opening = false; space_before_closing = false; align_closing = false; } let string_of_symbol (ns, name) = match ns with None -> name | Some s -> s ^ "/" ^ name let rec format color_scheme (x : Edn.t) = let colorize = with_color (color_scheme x) in match x with `Null -> colorize (Atom ("nil", atom)) | `Bool b -> colorize (Atom ((if b then "true" else "false"), atom)) | `String s -> colorize (Atom ("\"" ^ s ^ "\"", atom)) | `Char b -> colorize (Atom (b, atom)) | `Symbol sym -> colorize (Atom (string_of_symbol sym, atom)) | `Keyword kw -> colorize (Atom (":" ^ string_of_symbol kw, atom)) | `Int i -> colorize (Atom (string_of_int i, atom)) | `BigInt s -> colorize (Atom (s, atom)) | `Float f -> colorize (Atom(string_of_float f, atom)) | `Decimal d -> colorize (Atom (d, atom)) | `List l -> List (("(", "", ")", edn_list), List.map (format color_scheme) l) | `Vector v -> List (("[", "", "]", edn_list), List.map (format color_scheme) v) | `Set s -> List (("#{", "", "}", edn_list), List.map (format color_scheme) s) | `Assoc kvs -> List (("{", "", "}", edn_list), List.map (format_assoc color_scheme) kvs) | `Tag tv -> format_tag color_scheme tv and format_assoc color_scheme (k, v) = Label ((format color_scheme k, label), format color_scheme v) and format_tag color_scheme (ns, name, value) = let color = color_scheme (`Symbol (None, "")) in let tag = with_color color (Atom ("#" ^ string_of_symbol (ns, name), atom)) in Label ((tag, label), format color_scheme value)
afa7d3ebfcf270a6d3101c9f305aec26d6be4713b5d319670c5317ccd4149b95
camllight/camllight
patch.ml
(* To relocate a block of object bytecode *) #open "reloc";; #open "symtable";; let patch_short buff pos val = set_nth_char buff pos (char_of_int val); set_nth_char buff (succ pos) (char_of_int (lshift_right val 8)) ;; let patch_object buff offset = do_list (function Reloc_literal sc, pos -> patch_short buff (pos + offset) (get_slot_for_literal sc) | Reloc_getglobal id, pos -> patch_short buff (pos + offset) (get_slot_for_variable id) | Reloc_setglobal id, pos -> patch_short buff (pos + offset) (get_slot_for_defined_variable id) | Reloc_tag(id, stamp), pos -> set_nth_char buff (pos + offset) (char_of_int (get_num_of_exn(id,stamp))) | Reloc_primitive name, pos -> patch_short buff (pos + offset) (get_num_of_prim name)) ;;
null
https://raw.githubusercontent.com/camllight/camllight/0cc537de0846393322058dbb26449427bfc76786/sources/src/linker/patch.ml
ocaml
To relocate a block of object bytecode
#open "reloc";; #open "symtable";; let patch_short buff pos val = set_nth_char buff pos (char_of_int val); set_nth_char buff (succ pos) (char_of_int (lshift_right val 8)) ;; let patch_object buff offset = do_list (function Reloc_literal sc, pos -> patch_short buff (pos + offset) (get_slot_for_literal sc) | Reloc_getglobal id, pos -> patch_short buff (pos + offset) (get_slot_for_variable id) | Reloc_setglobal id, pos -> patch_short buff (pos + offset) (get_slot_for_defined_variable id) | Reloc_tag(id, stamp), pos -> set_nth_char buff (pos + offset) (char_of_int (get_num_of_exn(id,stamp))) | Reloc_primitive name, pos -> patch_short buff (pos + offset) (get_num_of_prim name)) ;;
f7f6001c0c41a536eed5130a04b2dcc303e78b7b704d879df738e7bf67f9e448
kelsey-sorrels/robinson
add_all_crafting_items.clj
(require '[robinson.itemgen :as ig] '[robinson.monstergen :as mg] '[robinson.crafting :as rcrafting]) (let [item-ids (->> ig/items (filter rcrafting/item-satisfies-any-recipe-clause?) (map :item/id)) rat (mg/id->monster :rat) rat-corpse (ig/gen-corpse rat) rat-bones (ig/gen-bones rat-corpse) rat-hide (ig/gen-hide rat-corpse)] (ri/add-to-inventory *state* (concat [rat-corpse rat-bones rat-hide] (map (fn [id] (assoc (ig/gen-item id) :count 50)) item-ids))))
null
https://raw.githubusercontent.com/kelsey-sorrels/robinson/337fd2646882708331257d1f3db78a3074ccc67a/src/robinson_tools/snippets/add_all_crafting_items.clj
clojure
(require '[robinson.itemgen :as ig] '[robinson.monstergen :as mg] '[robinson.crafting :as rcrafting]) (let [item-ids (->> ig/items (filter rcrafting/item-satisfies-any-recipe-clause?) (map :item/id)) rat (mg/id->monster :rat) rat-corpse (ig/gen-corpse rat) rat-bones (ig/gen-bones rat-corpse) rat-hide (ig/gen-hide rat-corpse)] (ri/add-to-inventory *state* (concat [rat-corpse rat-bones rat-hide] (map (fn [id] (assoc (ig/gen-item id) :count 50)) item-ids))))
3a7d9938909f117e34162eb3c8fca2e9cbf8490d7e8b21253e6b4ddaea22db18
skynet-gh/skylobby
resources.clj
(ns skylobby.main.resources (:require [clojure.tools.cli :as cli] [skylobby.auto-resources :as auto-resources] [skylobby.cli.util :as cu] skylobby.core [skylobby.task :as task] [taoensso.timbre :as log])) (set! *warn-on-reflection* true) (def cli-options [ [nil "--engine ENGINE" "Engine to get"] [nil "--game GAME" "Game to get"] [nil "--map MAP" "Map to get"]]) (def max-tries 10) (defn -main [& args] (let [{:keys [errors options]} (cli/parse-opts args cli-options :in-order true) {engine-version :engine mod-name :game map-name :map} options] (cond errors (apply cu/print-and-exit -1 "Error parsing arguments:\n" errors) (not (or engine-version mod-name map-name)) (cu/print-and-exit -1 "At least one of [--engine, --map, --mod] is required") :else (let [ _ (log/info "Loading initial state") initial-state (skylobby.core/initial-state) state (merge initial-state {:ipc-server-enabled false :use-db-for-rapid false :use-db-for-replays false}) tries (atom 0) resources {:engine-version engine-version :map-name map-name :mod-name mod-name} has-all-resources (fn [state] (let [{:keys [engine-details map-details mod-details]} (auto-resources/resource-details resources state)] (and (when engine-version engine-details) (when map-name map-details) (when mod-name mod-details)))) add-tasks (fn [state] (let [tasks (auto-resources/auto-resources-tasks (assoc resources :battle-changed true) state)] (if (seq tasks) (task/add-tasks! skylobby.core/*state tasks) (log/error "No tasks to auto-get resources")))) exit (fn [code message] (skylobby.core/spit-state-config-to-edn nil @skylobby.core/*state) (cu/print-and-exit code message))] (when (has-all-resources state) (cu/print-and-exit 0 "All resources gotten")) (reset! skylobby.core/*state state) (skylobby.core/init skylobby.core/*state {:initial-task-delay-ms 0 :skip-tasks true}) (add-watch skylobby.core/*state :retry-when-no-tasks (fn [_ _ _ new-state] (let [all-tasks (task/all-tasks new-state)] (if (empty? all-tasks) (cond (has-all-resources new-state) (exit 0 "All resources gotten") (>= @tries max-tries) (exit 1 "Max tries reached") :else (do (log/info "No tasks, trying to get resources") (swap! tries inc) (add-tasks new-state))) (log/info "Waiting for" (count all-tasks) "tasks before retrying"))))) (add-tasks @skylobby.core/*state)))))
null
https://raw.githubusercontent.com/skynet-gh/skylobby/125c3e7542356220f8fc1032f4cc37f189571afe/graal/clj/skylobby/main/resources.clj
clojure
(ns skylobby.main.resources (:require [clojure.tools.cli :as cli] [skylobby.auto-resources :as auto-resources] [skylobby.cli.util :as cu] skylobby.core [skylobby.task :as task] [taoensso.timbre :as log])) (set! *warn-on-reflection* true) (def cli-options [ [nil "--engine ENGINE" "Engine to get"] [nil "--game GAME" "Game to get"] [nil "--map MAP" "Map to get"]]) (def max-tries 10) (defn -main [& args] (let [{:keys [errors options]} (cli/parse-opts args cli-options :in-order true) {engine-version :engine mod-name :game map-name :map} options] (cond errors (apply cu/print-and-exit -1 "Error parsing arguments:\n" errors) (not (or engine-version mod-name map-name)) (cu/print-and-exit -1 "At least one of [--engine, --map, --mod] is required") :else (let [ _ (log/info "Loading initial state") initial-state (skylobby.core/initial-state) state (merge initial-state {:ipc-server-enabled false :use-db-for-rapid false :use-db-for-replays false}) tries (atom 0) resources {:engine-version engine-version :map-name map-name :mod-name mod-name} has-all-resources (fn [state] (let [{:keys [engine-details map-details mod-details]} (auto-resources/resource-details resources state)] (and (when engine-version engine-details) (when map-name map-details) (when mod-name mod-details)))) add-tasks (fn [state] (let [tasks (auto-resources/auto-resources-tasks (assoc resources :battle-changed true) state)] (if (seq tasks) (task/add-tasks! skylobby.core/*state tasks) (log/error "No tasks to auto-get resources")))) exit (fn [code message] (skylobby.core/spit-state-config-to-edn nil @skylobby.core/*state) (cu/print-and-exit code message))] (when (has-all-resources state) (cu/print-and-exit 0 "All resources gotten")) (reset! skylobby.core/*state state) (skylobby.core/init skylobby.core/*state {:initial-task-delay-ms 0 :skip-tasks true}) (add-watch skylobby.core/*state :retry-when-no-tasks (fn [_ _ _ new-state] (let [all-tasks (task/all-tasks new-state)] (if (empty? all-tasks) (cond (has-all-resources new-state) (exit 0 "All resources gotten") (>= @tries max-tries) (exit 1 "Max tries reached") :else (do (log/info "No tasks, trying to get resources") (swap! tries inc) (add-tasks new-state))) (log/info "Waiting for" (count all-tasks) "tasks before retrying"))))) (add-tasks @skylobby.core/*state)))))
9232214b51f77ecd000b837948d87ef37cfe31a57a4c8d83ea5d0e3cc8673645
TrustInSoft/tis-interpreter
GuiPanel.mli
Modified by TrustInSoft (**************************************************************************) (* *) This file is part of WP plug - in of Frama - C. (* *) Copyright ( C ) 2007 - 2015 CEA ( Commissariat a l'energie atomique et aux energies (* alternatives) *) (* *) (* you can redistribute it and/or modify it under the terms of the GNU *) Lesser General Public License as published by the Free Software Foundation , version 2.1 . (* *) (* It is distributed in the hope that it will be useful, *) (* but WITHOUT ANY WARRANTY; without even the implied warranty of *) (* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *) (* GNU Lesser General Public License for more details. *) (* *) See the GNU Lesser General Public License version 2.1 for more details ( enclosed in the file licenses / LGPLv2.1 ) . (* *) (**************************************************************************) val update : unit -> unit val on_update : (unit -> unit) -> unit val reload : unit -> unit val on_reload : (unit -> unit) -> unit val run_and_prove : Design.main_window_extension_points -> GuiSource.selection -> unit val register : main:Design.main_window_extension_points -> available_provers:GuiConfig.provers -> enabled_provers:GuiConfig.provers -> configure_provers:(unit -> unit) -> unit
null
https://raw.githubusercontent.com/TrustInSoft/tis-interpreter/33132ce4a825494ea48bf2dd6fd03a56b62cc5c3/src/plugins/wp/GuiPanel.mli
ocaml
************************************************************************ alternatives) you can redistribute it and/or modify it under the terms of the GNU It is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. ************************************************************************
Modified by TrustInSoft This file is part of WP plug - in of Frama - C. Copyright ( C ) 2007 - 2015 CEA ( Commissariat a l'energie atomique et aux energies Lesser General Public License as published by the Free Software Foundation , version 2.1 . See the GNU Lesser General Public License version 2.1 for more details ( enclosed in the file licenses / LGPLv2.1 ) . val update : unit -> unit val on_update : (unit -> unit) -> unit val reload : unit -> unit val on_reload : (unit -> unit) -> unit val run_and_prove : Design.main_window_extension_points -> GuiSource.selection -> unit val register : main:Design.main_window_extension_points -> available_provers:GuiConfig.provers -> enabled_provers:GuiConfig.provers -> configure_provers:(unit -> unit) -> unit
1fdee10dba500d5147e10ad8b3d9a8253606d87b94a9bf98a38dd0422a4a464b
babashka/babashka
alg.cljc
(ns loom.test.alg (:require [loom.graph :refer [graph weighted-graph digraph weighted-digraph nodes successors remove-nodes add-nodes edges add-edges]] [loom.alg :refer [pre-traverse post-traverse pre-span topsort bf-traverse bf-span bf-path bf-path-bi dijkstra-path dijkstra-path-dist dijkstra-traverse dijkstra-span johnson all-pairs-shortest-paths connected-components connected? scc strongly-connected? connect dag? shortest-path loners bellman-ford bipartite-color bipartite? bipartite-sets coloring? greedy-coloring prim-mst-edges prim-mst-edges prim-mst astar-path astar-dist degeneracy-ordering maximal-cliques subgraph? eql? isomorphism?]] [loom.derived :refer [mapped-by]] clojure.walk #?@(:clj [[clojure.test :refer :all]] :cljs [cljs.test])) #?@(:cljs [(:require-macros [cljs.test :refer (deftest testing are is)])])) ;; 's_algorithm (def g1 (weighted-graph [1 2 7] [1 3 9] [1 6 14] [2 3 10] [2 4 15] [3 4 11] [3 6 2] [4 5 6] [5 6 9])) ;; 's_algorithm (def g2 (weighted-graph [:r :g 10] [:r :b 5] [:r :o 8] [:g :b 3] [:b :p 7] [:p :o 2])) ;; (def g4 (weighted-graph [:a :b 85] [:b :f 80] [:f :i 250] [:i :j 84] [:a :c 217] [:c :g 186] [:c :h 103] [:d :h 183] [:h :j 167] [:a :e 173] [:e :j 502])) Algorithm Design Manual , p 179 (def g5 (digraph {:a [:b :c] :b [:c :d] :c [:e :f] :d [] :e [:d] :f [:e] :g [:a :f]})) (def g6 (graph [0 1] [1 2] [1 3] [2 4] [3 4] [0 5])) (def g7 (digraph [1 2] [2 3] [3 1] [5 6] [6 7])) (def g8 (graph {1 [2 3 4] 5 [6 7 8]})) Algorithm Design Manual , p 182 (def g9 (digraph {8 #{6}, 7 #{5}, 6 #{7}, 5 #{6}, 4 #{1 6 8}, 3 #{1}, 2 #{3 4 5}, 1 #{2}})) ;; (def g10 (digraph {:a [:b] :b [:c :e :f] :c [:d :g] :d [:c :h] :e [:a :f] :f [:g] :g [:f] :h [:g :d]})) ;; Weighted directed graph with a negative-weight cycle ;; which is reachable from sources :a, :b, :d, and :e. ;; /~simhaweb/alg/lectures/module9/module9.html (def g11 (weighted-digraph [:a :b 3] [:b :c 4] [:b :d 5] [:d :e 2] [:e :b -8])) ;; Weighted directed graph with a non-negative-weight cycle, similar to , but with the edge [: e : b ] reweighed . (def g12 (weighted-digraph [:a :b 3] [:b :c 4] [:b :d 5] [:d :e 2] [:e :b -7])) Directed graph with 4 strongly connected components . (def g13 (digraph [1 5] [2 4] [3 1] [3 2] [3 6] [4 10] [5 3] [6 1] [6 10] [7 8] [8 9] [8 11] [9 3] [9 5] [9 7] [10 2] [11 2] [11 4])) (def g14 (digraph [1 2] [2 3] [2 4])) (def g15 (digraph [1 2] [3 2] [2 4])) (def g16 (digraph [:a :e] [:a :b] [:a :c] [:e :d] [:d :c])) ;; simple directed "triangle" graph (def triangle (digraph [:a :b] [:b :c] [:c :a])) ;; graphs for mst ;; 's_algorithm (def mst_wt_g1 (weighted-graph '(:a, :e , 1) '(:c, :d ,2) '(:a,:b, 3), '(:b,:e,4), '(:b,:c,5) '(:e,:c,6) '(:e,:d,7))) graph with 2 components (def mst_wt_g2 (weighted-graph [:a :b 2] [:a :d 1] [:b :d 2] [:c :d 3] [:b :c 1] [:e :f 1] )) (def mst_unweighted_g3 (graph [:a :b] [:a :c] [:a :d] [:b :d] [:c :d])) (def mst_wt_g4 (weighted-graph [:a :b 1])) (def mst_wt_g5 (weighted-graph [:a :b 5] [:a :c 2] [:b :c 2])) graph from Cormen et all (def mst_wt_g6 (weighted-graph [:a :b 4] [:a :h 8] [:b :c 8] [:b :h 11] [:c :d 7] [:c :f 4] [:c :i 2] [:d :f 14] [:d :e 9] [:e :f 10] [:f :g 2] [:i :h 7] [:i :g 6] [:h :g 1] )) graph with 2 components and 2 isolated nodes (def mst_wt_g7 (weighted-graph [:a :b 2] [:b :d 2] [:e :f 1] :g :h )) (deftest depth-first-test (are [expected got] (= expected got) #{1 2 3 5 6 7} (set (pre-traverse g7)) #{1 2 3} (set (pre-traverse g7 1)) #{1 2 3 4 5 6 7 8} (set (pre-traverse g8)) #{1 2 3 4 5 6 7 8} (set (post-traverse g8)) [:d :e :f :c :b :a :g] (post-traverse g5 :g) false (not (some #{(pre-traverse g16 :a)} [[:a :e :d :c :b] [:a :b :c :e :d] [:a :b :e :d :c] [:a :c :b :e :d] [:a :c :e :d :b]])) false (not (some #{(post-traverse g7 1)} [[3 2 1] [2 3 1]])) #{1 2 3 4 5 6 7 8} (set (nodes (digraph (pre-span g8)))) #{2 3 4} (set (successors (digraph (pre-span g8)) 1)) #{1 5} (set (successors (digraph (pre-span g6)) 0)) true (let [span (digraph (pre-span g6))] (and (or (= #{3} (set (successors span 4))) (= #{2} (set (successors span 4)))) (or (= #{3} (set (successors span 1))) (= #{2} (set (successors span 1)))))) [:g :a :b :c :f :e :d] (topsort g5) nil (topsort g7) [5 6 7] (topsort g7 5) [1 2 4] (topsort g15 1))) (deftest depth-first-test-2 (is (#{[1 2 3 4] [1 2 4 3]} (topsort g14 1)))) (deftest breadth-first-test (are [expected got] (= expected got) #{1 2 3 5 6 7} (set (bf-traverse g7)) #{1 2 3} (set (bf-traverse g7 1)) #{1 2 3 4 5 6 7 8} (set (bf-traverse g8)) #{1 2 3 4 5 6 7 8} (set (nodes (digraph (bf-span g8)))) #{2 3} (set (successors (digraph (bf-span g6)) 1)) false (not (some #{(bf-traverse (remove-nodes g6 5))} [[0 1 2 3 4] [0 1 3 2 4]])) #{:r} (set (bf-traverse g2 :r :when #(< %3 1))) #{:r :o :b :g} (set (bf-traverse g2 :r :when #(< %3 2))) #{:r :o :b :g :p} (set (bf-traverse g2 :r :when #(< %3 3))) [:a :e :j] (bf-path g4 :a :j) [:a :c :h :j] (bf-path g4 :a :j :when (fn [n p d] (not= :e n))) #?@(:bb [] ;; TODO: flaky test sometimes files on some versions of linux, why? :clj [[:a :e :j] (bf-path-bi g4 :a :j) true (some #(= % (bf-path-bi g5 :g :d)) [[:g :a :b :d] [:g :f :e :d]])]))) (deftest dijkstra-test (are [expected got] (= expected got) [:a :c :h :j] (dijkstra-path g4 :a :j) [[:a :c :h :j] 487] (dijkstra-path-dist g4 :a :j) [[:r :o :p] 10] (dijkstra-path-dist g2 :r :p) #{:r :g :b :o :p} (set (map first (dijkstra-traverse g2))) {:r {:o 8 :b 5} :b {:g 8} :o {:p 10}} (dijkstra-span g2 :r))) (deftest johnson-test (are [expected got] (= expected got) {:p {:p {:o 2, :b 7} :o {:r 10} :b {:g 10}} :o {:o {:p 2, :r 8} :p {:b 9} :b {:g 12}} :g {:g {:b 3} :b {:r 8, :p 10} :p {:o 12}} :b {:b {:p 7, :g 3, :r 5} :p {:o 9}} :r {:r {:o 8, :b 5} :b {:g 8} :o {:p 10}}} (johnson g2) {1 {1 {5 1}, 5 {3 2}, 3 {2 3, 6 3}, 2 {4 4}, 6 {10 4}} 2 {2 {4 1}, 4 {10 2}} 3 {3 {1 1, 2 1, 6 1}, 1 {5 2}, 2 {4 2}, 6 {10 2}} 4 {4 {10 1}, 10 {2 2}} 5 {5 {3 1}, 3 {1 2, 2 2, 6 2}, 2 {4 3}, 6 {10 3}} 6 {6 {1 1, 10 1}, 1 {5 2}, 10 {2 2}, 2 {4 3}, 5 {3 3}} 7 {4 {10 4}, 8 {11 2, 9 2}, 7 {8 1}, 9 {5 3, 3 3}, 11 {4 3, 2 3}, 3 {6 4, 1 4}} 8 {4 {10 3}, 8 {11 1, 9 1}, 9 {7 2, 5 2, 3 2}, 11 {4 2, 2 2}, 3 {6 3, 1 3}} 9 {8 {11 3}, 6 {10 3}, 7 {8 2}, 2 {4 3}, 9 {7 1, 5 1, 3 1}, 3 {6 2, 2 2, 1 2}} 10 {10 {2 1}, 2 {4 2}} 11 {11 {2 1, 4 1}, 4 {10 2}}} (johnson g13) false (johnson g11) {:e {:e {:b 0} :b {:d 0, :c 0}} :d {:d {:e 0} :e {:b 0} :b {:c 0}} :b {:b {:d 0, :c 0} :d {:e 0}} :c {} :a {:a {:b 10} :b {:d 10, :c 10} :d {:e 10}}} (johnson g12))) (deftest all-pairs-shortest-paths-test (is (= {:p {:p {:o 2, :b 7} :o {:r 10} :b {:g 10}} :o {:o {:p 2, :r 8} :p {:b 9} :b {:g 12}} :g {:g {:b 3} :b {:r 8, :p 10} :p {:o 12}} :b {:b {:p 7, :g 3, :r 5} :p {:o 9}} :r {:r {:o 8, :b 5} :b {:g 8} :o {:p 10}}} (all-pairs-shortest-paths g2))) (let [vecs->sets #(clojure.walk/postwalk (fn [x] (if-not (map? x) x (reduce (fn [m [k v]] (assoc m k (if (vector? v) (set v) v))) {} x))) %)] (is (= (vecs->sets {1 {1 [5], 5 [3], 3 [6 2], 2 [4], 6 [10]} 2 {2 [4], 4 [10]} 3 {3 [1 6 2], 1 [5], 2 [4], 6 [10]} 4 {4 [10], 10 [2]} 5 {5 [3], 3 [1 6 2], 2 [4], 6 [10]} 6 {6 [1 10], 1 [5], 10 [2], 5 [3], 2 [4]} 7 {4 [10], 8 [11 9], 7 [8], 9 [3 5], 11 [4 2], 3 [1 6]} 8 {4 [10], 8 [11 9], 9 [7 3 5], 11 [4 2], 3 [1 6]} 9 {8 [11], 6 [10], 7 [8], 2 [4], 9 [7 3 5], 3 [1 6 2]} 10 {10 [2], 2 [4]} 11 {11 [4 2], 4 [10]}}) (vecs->sets (all-pairs-shortest-paths g13)))))) (deftest connectivity-test (are [expected got] (= expected got) #{#{5 6 7 8} #{1 2 3 4} #{9}} (set (map set (connected-components (add-nodes g8 9)))) [#{:r :g :b :o :p}] (map set (connected-components g2)) [#{1 2 3 4 5 6 8 7}] (map set (connected-components g9)) true (connected? g6) false (connected? g7) true (connected? g9) #{#{2 3 4 1} #{8} #{7 5 6}} (set (map set (scc g9))) #{#{:b :e :a} #{:h :d :c} #{:f :g}} (set (map set (scc g10))) false (strongly-connected? g9) true (strongly-connected? (digraph g2)) #{1 2 3 4 5 6 7 8} (set (nodes (connect g8))) #{:r :g :b :o :p} (set (nodes (connect g2))))) (deftest other-stuff-test (are [expected got] (= expected got) false (dag? g2) true (dag? (digraph (bf-span g2))) true (dag? g5) [:a :c :h :j] (shortest-path g4 :a :j) [:a :e :j] (shortest-path (graph g4) :a :j) #{9 10} (set (loners (add-nodes g8 9 10))) ;; TODO: the rest )) (deftest bellman-ford-test (are [expected graph start] (= expected (bellman-ford graph start)) false g11 :a false g11 :b [{:e ##Inf :d ##Inf :b ##Inf :a ##Inf :c 0}{:c [:c]}] g11 :c false g11 :d false g11 :e [{:e 10, :d 8, :b 3, :c 7, :a 0} {:a [:a], :c [:a :b :c], :b [:a :b], :d [:a :b :d], :e [:a :b :d :e]}] g12 :a [{:e 7, :d 5, :c 4, :a ##Inf, :b 0} {:b [:b], :c [:b :c], :d [:b :d], :e [:b :d :e]}] g12 :b [{:e ##Inf :d ##Inf :b ##Inf :a ##Inf :c 0} {:c [:c]}] g12 :c [{:e 2, :b -5, :c -1, :a ##Inf, :d 0} {:d [:d], :c [:d :e :b :c], :b [:d :e :b], :e [:d :e]}] g12 :d [{:d -2, :b -7, :c -3, :a ##Inf, :e 0} {:e [:e], :c [:e :b :c], :b [:e :b], :d [:e :b :d]}] g12 :e)) (deftest bipartite-test (are [expected got] (= expected got) nil (bipartite-color g1) true (bipartite? g6) true (bipartite? g8) false (bipartite? g1)) (are [options result] (contains? options result) #{{0 1, 1 0, 5 0, 2 1, 3 1, 4 0}} (bipartite-color g6) #{{1 1, 2 0, 3 0, 4 0, 5 0, 6 1, 7 1, 8 1} {1 1, 2 0, 3 0, 4 0, 5 1, 6 0, 7 0, 8 0}} (bipartite-color g8) #{#{#{2 3 4 5} #{1 6 7 8}} #{#{2 3 4 6 7 8} #{1 5}}} (set (bipartite-sets g8)))) (deftest coloring?-test (are [expected got] (= expected got) true (coloring? g1 {1 0, 2 1, 3 2, 4 0, 5 2, 6 1}) false (coloring? g1 {1 0, 2 1, 3 2, 4 0, 5 1, 6 1}) true (coloring? g2 {:r 0, :g 1, :b 2, :p 0, :o 1}) true (coloring? g5 {:a 0, :b 1, :c 2, :d 0, :e 1, :f 0, :g 1}) false (coloring? g5 {:a 0 :b 1 :c 2 :d 0 :e 1 :f 0 :g nil}))) (deftest greedy-coloring-test (are [expected got] (= expected got) true (coloring? g1 (greedy-coloring g1)) true (coloring? g2 (greedy-coloring g2)) true (coloring? g4 (greedy-coloring g4)) true (coloring? g5 (greedy-coloring g5)) true (coloring? g6 (greedy-coloring g6)) true (coloring? g13 (greedy-coloring g13)) expected colors are 0 , 1 , and 2 2 (apply max (vals (greedy-coloring triangle))))) (deftest scc-test (are [expected got] (= expected got) #{#{2 4 10} #{1 3 5 6} #{11} #{7 8 9}} (set (map set (scc g13))))) (deftest prim-mst-edges-weighted-test ; edges are described in different orders depending on platform, probably due ; to priority map impl differences -- thus testing edges as sets (letfn [(edge-set [edge] (into [(set (take 2 edge))] (drop 2 edge))) (edge-sets [edges] (set (map edge-set edges)))] (are [expected got] (= (edge-sets expected) (edge-sets got)) [[:e :a 1] [:a :b 3] [:b :c 5] [:c :d 2]] (prim-mst-edges mst_wt_g1) [[:b :a 1]] (prim-mst-edges mst_wt_g4) [[:c :a 2] [:c :b 2]] (prim-mst-edges mst_wt_g5) [[:b :a 4] [:c :b 8] [:c :i 2] [:c :f 4] [:f :g 2] [:g :h 1] [:d :c 7] [:e :d 9]] (prim-mst-edges mst_wt_g6)) (are [solutions result] (contains? solutions result) #{(edge-sets [[:d :a 1] [:b :d 2] [:c :b 1] [:e :f 1]]) (edge-sets [[:d :a 1] [:a :b 2] [:c :b 1] [:e :f 1]])} (edge-sets (prim-mst-edges mst_wt_g2)) #{(edge-sets [[:c :a] [:d :b] [:c :d]]) (edge-sets [[:a :b] [:a :c] [:a :d]])} (edge-sets (prim-mst-edges mst_unweighted_g3))))) (deftest prim-mst-test (are [expected got] (= expected got) [#{:a :b :d :e :f :g :h} (set [[:a :b] [:b :d] [:b :a] [:f :e] [:d :b] [:e :f]])] (let [mst (prim-mst mst_wt_g7)] [(nodes mst) (set (edges mst))]) [#{:a :b :c} (set [[:a :c] [:c :b] [:c :a] [:b :c]])] (let [mst (prim-mst mst_wt_g5)] [(nodes mst) (set (edges mst))]))) ;;;;graphs for A* path (def astar-simple-path-g1 (graph [:a :b] [:b :c] [:c :d] [:d :e])) ;;graph, with unreachable node (def astar-with-unreachable-target-g2 (graph [:a :b] [:b :c] [:d :e])) (def astar-with-cycle-g3 (digraph [:a :b] [:b :c] [:c :d] [:d :a])) (def astar-weighted-graph-g4 (weighted-digraph [:a :b 10] [:b :c 20] [:c :d 5] [:a :e 10] [:e :d 100])) (deftest astar-path-test (are [expected got](= expected got) {:e :d :d :c :c :b :b :a :a nil} (astar-path astar-simple-path-g1 :a :e (fn [x y] 0)) {:a nil :b :a :c :b} (astar-path astar-with-cycle-g3 :a :c (fn [x y] 0)) {:a nil :b :a :c :b :d :c} (astar-path astar-with-cycle-g3 :a :d (fn [x y] 0)) {:a nil :b :a :c :b :d :c} (astar-path astar-weighted-graph-g4 :a :d (fn [x y] 0)) all test graphs used for Dijkstra should work for A * as well {:a nil, :c :a, :h :c, :j :h} (astar-path g4 :a :j nil) {:r nil, :o :r, :p :o} (astar-path g2 :r :p nil)) (is (thrown? #?(:clj Exception :cljs js/Error) (astar-path astar-with-unreachable-target-g2 :a :e nil)))) (deftest astar-dist-test (are [expected got](= expected got) 4 (astar-dist astar-simple-path-g1 :a :e (fn [x y] 0)) 2 (astar-dist astar-with-cycle-g3 :a :c (fn [x y] 0)) 3 (astar-dist astar-with-cycle-g3 :a :d (fn [x y] 0)) 35 (astar-dist astar-weighted-graph-g4 :a :d (fn [x y] 0)) ) ) (deftest astar-visit-test (let [g (graph [0 1] [1 2] [2 3] [3 4]) i (atom 0)] (astar-path g 2 4 (fn [x y] (swap! i inc) (if (> x y) (- x y) (- y x)))) (is (= 3 @i) "This implementation of A* is incorrect. It is not optimal."))) (def degeneracy-g1 (graph {:a [:b] :b [:c :d]})) (def degeneracy-g2 (graph {:a [:b] :b [:c :d :e :f] :d [:e :f] :e [:f]})) (deftest degeneracy-ordering-test (let [ns (degeneracy-ordering degeneracy-g1)] (is (= #{:a :c :b :d} (set ns))) (is (contains? (set (drop 2 ns)) :b))) (let [ns (degeneracy-ordering degeneracy-g2)] (is (= #{:a :c :b :d :e :f} (set ns))) (is (= #{:a :c} (set (take 2 ns)))) (is (contains? (set (drop 2 ns)) :b)))) Graph with 4 maximal cliques : [: a : b : c ] , [: c : d ] , [: d : e : f : g ] , [: d : h ] . (def maximal-cliques-g1 (graph {:a [:b :c] :b [:c] :c [:d] :d [:e :f :g :h] :e [:f :g] :f [:g]})) Graph with 3 maximal cliques : # { : a : b : c } # { : b : d : e } # { : e : f } (def maximal-cliques-g2 (weighted-graph [:a :b 1] [:a :c 1] [:b :c 1] [:b :d 1] [:d :e 1] [:b :e 1] [:e :f 1])) (deftest maximal-cliques-test (are [expected got](= expected got) #{#{:a :b :c} #{:c :d} #{:d :e :f :g} #{:d :h}} (set (maximal-cliques maximal-cliques-g1)) #{#{:a :b :c} #{:b :d :e} #{:e :f}} (set (maximal-cliques maximal-cliques-g2)))) (def subgraph-g6 (graph [0 1] [1 2] [1 3])) (def subgraph-g7 (digraph [1 2] [2 3] [3 1])) (deftest subgraph-test (are [expected got] (= expected got) true (subgraph? subgraph-g6 g6) false (subgraph? (add-edges subgraph-g6 [0 3]) g6) true (subgraph? subgraph-g7 g7) false (subgraph? (add-nodes subgraph-g7 0) g7) false (subgraph? (digraph [2 1] [2 3] [3 1]) g7))) (deftest eql-test (are [expected got] (= expected got) true (eql? (graph) (graph)) true (eql? (digraph) (digraph)) true (eql? g6 (graph g6)) true (eql? g7 (digraph g7)) false (eql? (digraph) (graph)) false (eql? (graph) (digraph)) false (eql? g6 (graph 1 2)) false (eql? g7 (digraph 1 2)) false (eql? (digraph [1 2]) (graph [1 2])) false (eql? g7 g6))) (deftest isomorphism-test (are [expected got] (= expected got) true (isomorphism? (graph) (graph) identity) true (isomorphism? g6 g6 identity) true (isomorphism? g7 g7 identity) true (isomorphism? (graph) (graph) identity) true (isomorphism? g6 (mapped-by inc g6) inc) true (isomorphism? g7 (mapped-by inc g7) inc) false (isomorphism? g7 (mapped-by inc g7) dec) false (isomorphism? (digraph) (graph) identity) false(isomorphism? (digraph [1 2]) (graph [1 2]) identity)))
null
https://raw.githubusercontent.com/babashka/babashka/cda02cd3a3b0fd745fe80b9349dedf06c6265916/test-resources/lib_tests/loom/test/alg.cljc
clojure
's_algorithm 's_algorithm Weighted directed graph with a negative-weight cycle which is reachable from sources :a, :b, :d, and :e. /~simhaweb/alg/lectures/module9/module9.html Weighted directed graph with a non-negative-weight cycle, simple directed "triangle" graph graphs for mst 's_algorithm TODO: flaky test sometimes files on some versions of linux, why? TODO: the rest edges are described in different orders depending on platform, probably due to priority map impl differences -- thus testing edges as sets graphs for A* path graph, with unreachable node
(ns loom.test.alg (:require [loom.graph :refer [graph weighted-graph digraph weighted-digraph nodes successors remove-nodes add-nodes edges add-edges]] [loom.alg :refer [pre-traverse post-traverse pre-span topsort bf-traverse bf-span bf-path bf-path-bi dijkstra-path dijkstra-path-dist dijkstra-traverse dijkstra-span johnson all-pairs-shortest-paths connected-components connected? scc strongly-connected? connect dag? shortest-path loners bellman-ford bipartite-color bipartite? bipartite-sets coloring? greedy-coloring prim-mst-edges prim-mst-edges prim-mst astar-path astar-dist degeneracy-ordering maximal-cliques subgraph? eql? isomorphism?]] [loom.derived :refer [mapped-by]] clojure.walk #?@(:clj [[clojure.test :refer :all]] :cljs [cljs.test])) #?@(:cljs [(:require-macros [cljs.test :refer (deftest testing are is)])])) (def g1 (weighted-graph [1 2 7] [1 3 9] [1 6 14] [2 3 10] [2 4 15] [3 4 11] [3 6 2] [4 5 6] [5 6 9])) (def g2 (weighted-graph [:r :g 10] [:r :b 5] [:r :o 8] [:g :b 3] [:b :p 7] [:p :o 2])) (def g4 (weighted-graph [:a :b 85] [:b :f 80] [:f :i 250] [:i :j 84] [:a :c 217] [:c :g 186] [:c :h 103] [:d :h 183] [:h :j 167] [:a :e 173] [:e :j 502])) Algorithm Design Manual , p 179 (def g5 (digraph {:a [:b :c] :b [:c :d] :c [:e :f] :d [] :e [:d] :f [:e] :g [:a :f]})) (def g6 (graph [0 1] [1 2] [1 3] [2 4] [3 4] [0 5])) (def g7 (digraph [1 2] [2 3] [3 1] [5 6] [6 7])) (def g8 (graph {1 [2 3 4] 5 [6 7 8]})) Algorithm Design Manual , p 182 (def g9 (digraph {8 #{6}, 7 #{5}, 6 #{7}, 5 #{6}, 4 #{1 6 8}, 3 #{1}, 2 #{3 4 5}, 1 #{2}})) (def g10 (digraph {:a [:b] :b [:c :e :f] :c [:d :g] :d [:c :h] :e [:a :f] :f [:g] :g [:f] :h [:g :d]})) (def g11 (weighted-digraph [:a :b 3] [:b :c 4] [:b :d 5] [:d :e 2] [:e :b -8])) similar to , but with the edge [: e : b ] reweighed . (def g12 (weighted-digraph [:a :b 3] [:b :c 4] [:b :d 5] [:d :e 2] [:e :b -7])) Directed graph with 4 strongly connected components . (def g13 (digraph [1 5] [2 4] [3 1] [3 2] [3 6] [4 10] [5 3] [6 1] [6 10] [7 8] [8 9] [8 11] [9 3] [9 5] [9 7] [10 2] [11 2] [11 4])) (def g14 (digraph [1 2] [2 3] [2 4])) (def g15 (digraph [1 2] [3 2] [2 4])) (def g16 (digraph [:a :e] [:a :b] [:a :c] [:e :d] [:d :c])) (def triangle (digraph [:a :b] [:b :c] [:c :a])) (def mst_wt_g1 (weighted-graph '(:a, :e , 1) '(:c, :d ,2) '(:a,:b, 3), '(:b,:e,4), '(:b,:c,5) '(:e,:c,6) '(:e,:d,7))) graph with 2 components (def mst_wt_g2 (weighted-graph [:a :b 2] [:a :d 1] [:b :d 2] [:c :d 3] [:b :c 1] [:e :f 1] )) (def mst_unweighted_g3 (graph [:a :b] [:a :c] [:a :d] [:b :d] [:c :d])) (def mst_wt_g4 (weighted-graph [:a :b 1])) (def mst_wt_g5 (weighted-graph [:a :b 5] [:a :c 2] [:b :c 2])) graph from Cormen et all (def mst_wt_g6 (weighted-graph [:a :b 4] [:a :h 8] [:b :c 8] [:b :h 11] [:c :d 7] [:c :f 4] [:c :i 2] [:d :f 14] [:d :e 9] [:e :f 10] [:f :g 2] [:i :h 7] [:i :g 6] [:h :g 1] )) graph with 2 components and 2 isolated nodes (def mst_wt_g7 (weighted-graph [:a :b 2] [:b :d 2] [:e :f 1] :g :h )) (deftest depth-first-test (are [expected got] (= expected got) #{1 2 3 5 6 7} (set (pre-traverse g7)) #{1 2 3} (set (pre-traverse g7 1)) #{1 2 3 4 5 6 7 8} (set (pre-traverse g8)) #{1 2 3 4 5 6 7 8} (set (post-traverse g8)) [:d :e :f :c :b :a :g] (post-traverse g5 :g) false (not (some #{(pre-traverse g16 :a)} [[:a :e :d :c :b] [:a :b :c :e :d] [:a :b :e :d :c] [:a :c :b :e :d] [:a :c :e :d :b]])) false (not (some #{(post-traverse g7 1)} [[3 2 1] [2 3 1]])) #{1 2 3 4 5 6 7 8} (set (nodes (digraph (pre-span g8)))) #{2 3 4} (set (successors (digraph (pre-span g8)) 1)) #{1 5} (set (successors (digraph (pre-span g6)) 0)) true (let [span (digraph (pre-span g6))] (and (or (= #{3} (set (successors span 4))) (= #{2} (set (successors span 4)))) (or (= #{3} (set (successors span 1))) (= #{2} (set (successors span 1)))))) [:g :a :b :c :f :e :d] (topsort g5) nil (topsort g7) [5 6 7] (topsort g7 5) [1 2 4] (topsort g15 1))) (deftest depth-first-test-2 (is (#{[1 2 3 4] [1 2 4 3]} (topsort g14 1)))) (deftest breadth-first-test (are [expected got] (= expected got) #{1 2 3 5 6 7} (set (bf-traverse g7)) #{1 2 3} (set (bf-traverse g7 1)) #{1 2 3 4 5 6 7 8} (set (bf-traverse g8)) #{1 2 3 4 5 6 7 8} (set (nodes (digraph (bf-span g8)))) #{2 3} (set (successors (digraph (bf-span g6)) 1)) false (not (some #{(bf-traverse (remove-nodes g6 5))} [[0 1 2 3 4] [0 1 3 2 4]])) #{:r} (set (bf-traverse g2 :r :when #(< %3 1))) #{:r :o :b :g} (set (bf-traverse g2 :r :when #(< %3 2))) #{:r :o :b :g :p} (set (bf-traverse g2 :r :when #(< %3 3))) [:a :e :j] (bf-path g4 :a :j) [:a :c :h :j] (bf-path g4 :a :j :when (fn [n p d] (not= :e n))) :clj [[:a :e :j] (bf-path-bi g4 :a :j) true (some #(= % (bf-path-bi g5 :g :d)) [[:g :a :b :d] [:g :f :e :d]])]))) (deftest dijkstra-test (are [expected got] (= expected got) [:a :c :h :j] (dijkstra-path g4 :a :j) [[:a :c :h :j] 487] (dijkstra-path-dist g4 :a :j) [[:r :o :p] 10] (dijkstra-path-dist g2 :r :p) #{:r :g :b :o :p} (set (map first (dijkstra-traverse g2))) {:r {:o 8 :b 5} :b {:g 8} :o {:p 10}} (dijkstra-span g2 :r))) (deftest johnson-test (are [expected got] (= expected got) {:p {:p {:o 2, :b 7} :o {:r 10} :b {:g 10}} :o {:o {:p 2, :r 8} :p {:b 9} :b {:g 12}} :g {:g {:b 3} :b {:r 8, :p 10} :p {:o 12}} :b {:b {:p 7, :g 3, :r 5} :p {:o 9}} :r {:r {:o 8, :b 5} :b {:g 8} :o {:p 10}}} (johnson g2) {1 {1 {5 1}, 5 {3 2}, 3 {2 3, 6 3}, 2 {4 4}, 6 {10 4}} 2 {2 {4 1}, 4 {10 2}} 3 {3 {1 1, 2 1, 6 1}, 1 {5 2}, 2 {4 2}, 6 {10 2}} 4 {4 {10 1}, 10 {2 2}} 5 {5 {3 1}, 3 {1 2, 2 2, 6 2}, 2 {4 3}, 6 {10 3}} 6 {6 {1 1, 10 1}, 1 {5 2}, 10 {2 2}, 2 {4 3}, 5 {3 3}} 7 {4 {10 4}, 8 {11 2, 9 2}, 7 {8 1}, 9 {5 3, 3 3}, 11 {4 3, 2 3}, 3 {6 4, 1 4}} 8 {4 {10 3}, 8 {11 1, 9 1}, 9 {7 2, 5 2, 3 2}, 11 {4 2, 2 2}, 3 {6 3, 1 3}} 9 {8 {11 3}, 6 {10 3}, 7 {8 2}, 2 {4 3}, 9 {7 1, 5 1, 3 1}, 3 {6 2, 2 2, 1 2}} 10 {10 {2 1}, 2 {4 2}} 11 {11 {2 1, 4 1}, 4 {10 2}}} (johnson g13) false (johnson g11) {:e {:e {:b 0} :b {:d 0, :c 0}} :d {:d {:e 0} :e {:b 0} :b {:c 0}} :b {:b {:d 0, :c 0} :d {:e 0}} :c {} :a {:a {:b 10} :b {:d 10, :c 10} :d {:e 10}}} (johnson g12))) (deftest all-pairs-shortest-paths-test (is (= {:p {:p {:o 2, :b 7} :o {:r 10} :b {:g 10}} :o {:o {:p 2, :r 8} :p {:b 9} :b {:g 12}} :g {:g {:b 3} :b {:r 8, :p 10} :p {:o 12}} :b {:b {:p 7, :g 3, :r 5} :p {:o 9}} :r {:r {:o 8, :b 5} :b {:g 8} :o {:p 10}}} (all-pairs-shortest-paths g2))) (let [vecs->sets #(clojure.walk/postwalk (fn [x] (if-not (map? x) x (reduce (fn [m [k v]] (assoc m k (if (vector? v) (set v) v))) {} x))) %)] (is (= (vecs->sets {1 {1 [5], 5 [3], 3 [6 2], 2 [4], 6 [10]} 2 {2 [4], 4 [10]} 3 {3 [1 6 2], 1 [5], 2 [4], 6 [10]} 4 {4 [10], 10 [2]} 5 {5 [3], 3 [1 6 2], 2 [4], 6 [10]} 6 {6 [1 10], 1 [5], 10 [2], 5 [3], 2 [4]} 7 {4 [10], 8 [11 9], 7 [8], 9 [3 5], 11 [4 2], 3 [1 6]} 8 {4 [10], 8 [11 9], 9 [7 3 5], 11 [4 2], 3 [1 6]} 9 {8 [11], 6 [10], 7 [8], 2 [4], 9 [7 3 5], 3 [1 6 2]} 10 {10 [2], 2 [4]} 11 {11 [4 2], 4 [10]}}) (vecs->sets (all-pairs-shortest-paths g13)))))) (deftest connectivity-test (are [expected got] (= expected got) #{#{5 6 7 8} #{1 2 3 4} #{9}} (set (map set (connected-components (add-nodes g8 9)))) [#{:r :g :b :o :p}] (map set (connected-components g2)) [#{1 2 3 4 5 6 8 7}] (map set (connected-components g9)) true (connected? g6) false (connected? g7) true (connected? g9) #{#{2 3 4 1} #{8} #{7 5 6}} (set (map set (scc g9))) #{#{:b :e :a} #{:h :d :c} #{:f :g}} (set (map set (scc g10))) false (strongly-connected? g9) true (strongly-connected? (digraph g2)) #{1 2 3 4 5 6 7 8} (set (nodes (connect g8))) #{:r :g :b :o :p} (set (nodes (connect g2))))) (deftest other-stuff-test (are [expected got] (= expected got) false (dag? g2) true (dag? (digraph (bf-span g2))) true (dag? g5) [:a :c :h :j] (shortest-path g4 :a :j) [:a :e :j] (shortest-path (graph g4) :a :j) #{9 10} (set (loners (add-nodes g8 9 10))) )) (deftest bellman-ford-test (are [expected graph start] (= expected (bellman-ford graph start)) false g11 :a false g11 :b [{:e ##Inf :d ##Inf :b ##Inf :a ##Inf :c 0}{:c [:c]}] g11 :c false g11 :d false g11 :e [{:e 10, :d 8, :b 3, :c 7, :a 0} {:a [:a], :c [:a :b :c], :b [:a :b], :d [:a :b :d], :e [:a :b :d :e]}] g12 :a [{:e 7, :d 5, :c 4, :a ##Inf, :b 0} {:b [:b], :c [:b :c], :d [:b :d], :e [:b :d :e]}] g12 :b [{:e ##Inf :d ##Inf :b ##Inf :a ##Inf :c 0} {:c [:c]}] g12 :c [{:e 2, :b -5, :c -1, :a ##Inf, :d 0} {:d [:d], :c [:d :e :b :c], :b [:d :e :b], :e [:d :e]}] g12 :d [{:d -2, :b -7, :c -3, :a ##Inf, :e 0} {:e [:e], :c [:e :b :c], :b [:e :b], :d [:e :b :d]}] g12 :e)) (deftest bipartite-test (are [expected got] (= expected got) nil (bipartite-color g1) true (bipartite? g6) true (bipartite? g8) false (bipartite? g1)) (are [options result] (contains? options result) #{{0 1, 1 0, 5 0, 2 1, 3 1, 4 0}} (bipartite-color g6) #{{1 1, 2 0, 3 0, 4 0, 5 0, 6 1, 7 1, 8 1} {1 1, 2 0, 3 0, 4 0, 5 1, 6 0, 7 0, 8 0}} (bipartite-color g8) #{#{#{2 3 4 5} #{1 6 7 8}} #{#{2 3 4 6 7 8} #{1 5}}} (set (bipartite-sets g8)))) (deftest coloring?-test (are [expected got] (= expected got) true (coloring? g1 {1 0, 2 1, 3 2, 4 0, 5 2, 6 1}) false (coloring? g1 {1 0, 2 1, 3 2, 4 0, 5 1, 6 1}) true (coloring? g2 {:r 0, :g 1, :b 2, :p 0, :o 1}) true (coloring? g5 {:a 0, :b 1, :c 2, :d 0, :e 1, :f 0, :g 1}) false (coloring? g5 {:a 0 :b 1 :c 2 :d 0 :e 1 :f 0 :g nil}))) (deftest greedy-coloring-test (are [expected got] (= expected got) true (coloring? g1 (greedy-coloring g1)) true (coloring? g2 (greedy-coloring g2)) true (coloring? g4 (greedy-coloring g4)) true (coloring? g5 (greedy-coloring g5)) true (coloring? g6 (greedy-coloring g6)) true (coloring? g13 (greedy-coloring g13)) expected colors are 0 , 1 , and 2 2 (apply max (vals (greedy-coloring triangle))))) (deftest scc-test (are [expected got] (= expected got) #{#{2 4 10} #{1 3 5 6} #{11} #{7 8 9}} (set (map set (scc g13))))) (deftest prim-mst-edges-weighted-test (letfn [(edge-set [edge] (into [(set (take 2 edge))] (drop 2 edge))) (edge-sets [edges] (set (map edge-set edges)))] (are [expected got] (= (edge-sets expected) (edge-sets got)) [[:e :a 1] [:a :b 3] [:b :c 5] [:c :d 2]] (prim-mst-edges mst_wt_g1) [[:b :a 1]] (prim-mst-edges mst_wt_g4) [[:c :a 2] [:c :b 2]] (prim-mst-edges mst_wt_g5) [[:b :a 4] [:c :b 8] [:c :i 2] [:c :f 4] [:f :g 2] [:g :h 1] [:d :c 7] [:e :d 9]] (prim-mst-edges mst_wt_g6)) (are [solutions result] (contains? solutions result) #{(edge-sets [[:d :a 1] [:b :d 2] [:c :b 1] [:e :f 1]]) (edge-sets [[:d :a 1] [:a :b 2] [:c :b 1] [:e :f 1]])} (edge-sets (prim-mst-edges mst_wt_g2)) #{(edge-sets [[:c :a] [:d :b] [:c :d]]) (edge-sets [[:a :b] [:a :c] [:a :d]])} (edge-sets (prim-mst-edges mst_unweighted_g3))))) (deftest prim-mst-test (are [expected got] (= expected got) [#{:a :b :d :e :f :g :h} (set [[:a :b] [:b :d] [:b :a] [:f :e] [:d :b] [:e :f]])] (let [mst (prim-mst mst_wt_g7)] [(nodes mst) (set (edges mst))]) [#{:a :b :c} (set [[:a :c] [:c :b] [:c :a] [:b :c]])] (let [mst (prim-mst mst_wt_g5)] [(nodes mst) (set (edges mst))]))) (def astar-simple-path-g1 (graph [:a :b] [:b :c] [:c :d] [:d :e])) (def astar-with-unreachable-target-g2 (graph [:a :b] [:b :c] [:d :e])) (def astar-with-cycle-g3 (digraph [:a :b] [:b :c] [:c :d] [:d :a])) (def astar-weighted-graph-g4 (weighted-digraph [:a :b 10] [:b :c 20] [:c :d 5] [:a :e 10] [:e :d 100])) (deftest astar-path-test (are [expected got](= expected got) {:e :d :d :c :c :b :b :a :a nil} (astar-path astar-simple-path-g1 :a :e (fn [x y] 0)) {:a nil :b :a :c :b} (astar-path astar-with-cycle-g3 :a :c (fn [x y] 0)) {:a nil :b :a :c :b :d :c} (astar-path astar-with-cycle-g3 :a :d (fn [x y] 0)) {:a nil :b :a :c :b :d :c} (astar-path astar-weighted-graph-g4 :a :d (fn [x y] 0)) all test graphs used for Dijkstra should work for A * as well {:a nil, :c :a, :h :c, :j :h} (astar-path g4 :a :j nil) {:r nil, :o :r, :p :o} (astar-path g2 :r :p nil)) (is (thrown? #?(:clj Exception :cljs js/Error) (astar-path astar-with-unreachable-target-g2 :a :e nil)))) (deftest astar-dist-test (are [expected got](= expected got) 4 (astar-dist astar-simple-path-g1 :a :e (fn [x y] 0)) 2 (astar-dist astar-with-cycle-g3 :a :c (fn [x y] 0)) 3 (astar-dist astar-with-cycle-g3 :a :d (fn [x y] 0)) 35 (astar-dist astar-weighted-graph-g4 :a :d (fn [x y] 0)) ) ) (deftest astar-visit-test (let [g (graph [0 1] [1 2] [2 3] [3 4]) i (atom 0)] (astar-path g 2 4 (fn [x y] (swap! i inc) (if (> x y) (- x y) (- y x)))) (is (= 3 @i) "This implementation of A* is incorrect. It is not optimal."))) (def degeneracy-g1 (graph {:a [:b] :b [:c :d]})) (def degeneracy-g2 (graph {:a [:b] :b [:c :d :e :f] :d [:e :f] :e [:f]})) (deftest degeneracy-ordering-test (let [ns (degeneracy-ordering degeneracy-g1)] (is (= #{:a :c :b :d} (set ns))) (is (contains? (set (drop 2 ns)) :b))) (let [ns (degeneracy-ordering degeneracy-g2)] (is (= #{:a :c :b :d :e :f} (set ns))) (is (= #{:a :c} (set (take 2 ns)))) (is (contains? (set (drop 2 ns)) :b)))) Graph with 4 maximal cliques : [: a : b : c ] , [: c : d ] , [: d : e : f : g ] , [: d : h ] . (def maximal-cliques-g1 (graph {:a [:b :c] :b [:c] :c [:d] :d [:e :f :g :h] :e [:f :g] :f [:g]})) Graph with 3 maximal cliques : # { : a : b : c } # { : b : d : e } # { : e : f } (def maximal-cliques-g2 (weighted-graph [:a :b 1] [:a :c 1] [:b :c 1] [:b :d 1] [:d :e 1] [:b :e 1] [:e :f 1])) (deftest maximal-cliques-test (are [expected got](= expected got) #{#{:a :b :c} #{:c :d} #{:d :e :f :g} #{:d :h}} (set (maximal-cliques maximal-cliques-g1)) #{#{:a :b :c} #{:b :d :e} #{:e :f}} (set (maximal-cliques maximal-cliques-g2)))) (def subgraph-g6 (graph [0 1] [1 2] [1 3])) (def subgraph-g7 (digraph [1 2] [2 3] [3 1])) (deftest subgraph-test (are [expected got] (= expected got) true (subgraph? subgraph-g6 g6) false (subgraph? (add-edges subgraph-g6 [0 3]) g6) true (subgraph? subgraph-g7 g7) false (subgraph? (add-nodes subgraph-g7 0) g7) false (subgraph? (digraph [2 1] [2 3] [3 1]) g7))) (deftest eql-test (are [expected got] (= expected got) true (eql? (graph) (graph)) true (eql? (digraph) (digraph)) true (eql? g6 (graph g6)) true (eql? g7 (digraph g7)) false (eql? (digraph) (graph)) false (eql? (graph) (digraph)) false (eql? g6 (graph 1 2)) false (eql? g7 (digraph 1 2)) false (eql? (digraph [1 2]) (graph [1 2])) false (eql? g7 g6))) (deftest isomorphism-test (are [expected got] (= expected got) true (isomorphism? (graph) (graph) identity) true (isomorphism? g6 g6 identity) true (isomorphism? g7 g7 identity) true (isomorphism? (graph) (graph) identity) true (isomorphism? g6 (mapped-by inc g6) inc) true (isomorphism? g7 (mapped-by inc g7) inc) false (isomorphism? g7 (mapped-by inc g7) dec) false (isomorphism? (digraph) (graph) identity) false(isomorphism? (digraph [1 2]) (graph [1 2]) identity)))
c32dd90c8136391f23527b18c5d92f0d0e197ea81886538cf44aa4338fb6c27a
input-output-hk/rscoin-haskell
Updater.hs
-- | Regularly updates the state of the wallet. module GUI.RSCoin.Updater (runUpdater) where import Control.Concurrent (threadDelay) import Control.Concurrent.STM.TBQueue (TBQueue, writeTBQueue) import Control.Monad.STM (atomically) import GUI.RSCoin.Action (Action (Update)) -- | Regularly requests to perform Update action. runUpdater :: TBQueue Action -> IO () runUpdater queue = do atomically $ writeTBQueue queue Update threadDelay 30000000 runUpdater queue
null
https://raw.githubusercontent.com/input-output-hk/rscoin-haskell/109d8f6f226e9d0b360fcaac14c5a90da112a810/src/User/GUI/RSCoin/Updater.hs
haskell
| Regularly updates the state of the wallet. | Regularly requests to perform Update action.
module GUI.RSCoin.Updater (runUpdater) where import Control.Concurrent (threadDelay) import Control.Concurrent.STM.TBQueue (TBQueue, writeTBQueue) import Control.Monad.STM (atomically) import GUI.RSCoin.Action (Action (Update)) runUpdater :: TBQueue Action -> IO () runUpdater queue = do atomically $ writeTBQueue queue Update threadDelay 30000000 runUpdater queue
28a605159929334be0c16c33dc6d9b51b93013035aa8b243ad1618a8ed639912
gabebw/croniker
Application.hs
# OPTIONS_GHC -fno - warn - orphans # module Application ( getApplicationDev , appMain , develMain , makeFoundation , makeLogWare -- * for DevelMain , getApplicationRepl , shutdownApp -- * for GHCI , handler , db , todaysProfilesTaskMain , allProfilesTaskMain ) where import Control.Monad.Logger (liftLoc, runLoggingT) import Database.Persist.Postgresql (createPostgresqlPool, pgConnStr, pgPoolSize, runSqlPool) import Import import Language.Haskell.TH.Syntax (qLocation) import Network.Wai (Middleware) import Network.Wai.Handler.Warp (Settings, defaultSettings, defaultShouldDisplayException, runSettings, setHost, setOnException, setPort, getPort) import Network.Wai.Middleware.RequestLogger (Destination (Logger, Callback), IPAddrSource (..), OutputFormat (..), destination, mkRequestLogger, outputFormat) import System.Log.FastLogger (defaultBufSize, newStdoutLoggerSet, toLogStr) import LoadEnv (loadEnv) import System.Environment (getEnv) import qualified Data.ByteString.Char8 as BSC -- Import all relevant handler modules here. -- Don't forget to add new modules to your cabal file! import Handler.ChooseTimezone import Handler.Common import Handler.Feed import Handler.Profile import Handler.Root import Handler.Settings import Handler.UpdateProfilesTask (updateTodaysProfiles, updateAllProfiles) import Handler.UpdateUser This line actually creates our YesodDispatch instance . It is the second half of the call to mkYesodData which occurs in Foundation.hs . Please see the -- comments there for more details. mkYesodDispatch "App" resourcesApp -- | This function allocates resources (such as a database connection pool), -- performs initialization and returns a foundation datatype value. This is also -- the place to put your migrate statements to have automatic database migrations handled by Yesod . makeFoundation :: AppSettings -> IO App makeFoundation appSettings = do -- Some basic initializations: HTTP connection manager, logger, and static -- subsite. appHttpManager <- newManager appLogger <- newStdoutLoggerSet defaultBufSize >>= makeYesodLogger appStatic <- (if appMutableStatic appSettings then staticDevel else static) (appStaticDir appSettings) twitterConsumerKey <- BSC.pack <$> getEnv "TWITTER_CONSUMER_KEY" twitterConsumerSecret <- BSC.pack <$> getEnv "TWITTER_CONSUMER_SECRET" googleApiKey <- getEnv "GOOGLE_API_KEY" -- We need a log function to create a connection pool. We need a connection -- pool to create our foundation. And we need our foundation to get a -- logging function. To get out of this loop, we initially create a -- temporary foundation without a real connection pool, get a log function -- from there, and then create the real foundation. let mkFoundation appConnPool = App {..} -- The App {..} syntax is an example of record wild cards. For more -- information, see: -- -12-04-record-wildcards.html tempFoundation = mkFoundation $ error "connPool forced in tempFoundation" logFunc = messageLoggerSource tempFoundation appLogger -- Create the database connection pool pool <- flip runLoggingT logFunc $ createPostgresqlPool (pgConnStr $ appDatabaseConf appSettings) (pgPoolSize $ appDatabaseConf appSettings) -- Perform database migration using our application's logging settings. runLoggingT (runSqlPool (runMigration migrateAll) pool) logFunc -- Return the foundation return $ mkFoundation pool | Convert our foundation to a WAI Application by calling @toWaiAppPlain@ and -- applying some additional middlewares. makeApplication :: App -> IO Application makeApplication foundation = do logWare <- makeLogWare foundation -- Create the WAI application and apply middlewares appPlain <- toWaiAppPlain foundation return $ logWare $ defaultMiddlewaresNoLogging appPlain makeLogWare :: App -> IO Middleware makeLogWare foundation = mkRequestLogger def { outputFormat = if appSettings foundation `allowsLevel` LevelDebug then Detailed True else Apache (if appIpFromHeader $ appSettings foundation then FromFallback else FromSocket) , destination = if appSettings foundation `allowsLevel` LevelInfo then Logger $ loggerSet $ appLogger foundation else Callback $ \_ -> return () } -- | Warp settings for the given foundation value. warpSettings :: App -> Settings warpSettings foundation = setPort (appPort $ appSettings foundation) $ setHost (appHost $ appSettings foundation) $ setOnException (\_req e -> when (defaultShouldDisplayException e) $ messageLoggerSource foundation (appLogger foundation) $(qLocation >>= liftLoc) "yesod" LevelError (toLogStr $ "Exception from Warp: " ++ show e)) defaultSettings | For yesod devel , return the Warp settings and WAI Application . getApplicationDev :: IO (Settings, Application) getApplicationDev = do settings <- getAppSettings foundation <- makeFoundation settings wsettings <- getDevSettings $ warpSettings foundation app <- makeApplication foundation return (wsettings, app) getAppSettings :: IO AppSettings getAppSettings = do loadEnv loadYamlSettings [configSettingsYml] [] useEnv | main function for use by yesod devel develMain :: IO () develMain = develMainHelper getApplicationDev -- | The @main@ function for an executable running this site. appMain :: IO () appMain = do -- Get the settings from all relevant sources settings <- loadYamlSettingsArgs -- fall back to compile-time values, set to [] to require values at runtime [configSettingsYmlValue] -- allow environment variables to override useEnv -- Generate the foundation from the settings foundation <- makeFoundation settings -- Generate a WAI Application from the foundation app <- makeApplication foundation -- Run the application with Warp runSettings (warpSettings foundation) app -------------------------------------------------------------- -- Functions for DevelMain.hs (a way to run the app from GHCi) -------------------------------------------------------------- getApplicationRepl :: IO (Int, App, Application) getApplicationRepl = do settings <- getAppSettings foundation <- makeFoundation settings wsettings <- getDevSettings $ warpSettings foundation app1 <- makeApplication foundation return (getPort wsettings, foundation, app1) shutdownApp :: App -> IO () shutdownApp _ = return () todaysProfilesTaskMain :: IO () todaysProfilesTaskMain = handler updateTodaysProfiles allProfilesTaskMain :: IO () allProfilesTaskMain = handler updateAllProfiles --------------------------------------------- -- Functions for use in development with GHCi --------------------------------------------- -- | Run a handler handler :: Handler a -> IO a handler h = getAppSettings >>= makeFoundation >>= flip unsafeHandler h -- | Run DB queries db :: ReaderT SqlBackend (HandlerFor App) a -> IO a db = handler . runDB
null
https://raw.githubusercontent.com/gabebw/croniker/89f081738c229a3302af3b0123fea208bccbda11/src/Application.hs
haskell
* for DevelMain * for GHCI Import all relevant handler modules here. Don't forget to add new modules to your cabal file! comments there for more details. | This function allocates resources (such as a database connection pool), performs initialization and returns a foundation datatype value. This is also the place to put your migrate statements to have automatic database Some basic initializations: HTTP connection manager, logger, and static subsite. We need a log function to create a connection pool. We need a connection pool to create our foundation. And we need our foundation to get a logging function. To get out of this loop, we initially create a temporary foundation without a real connection pool, get a log function from there, and then create the real foundation. The App {..} syntax is an example of record wild cards. For more information, see: -12-04-record-wildcards.html Create the database connection pool Perform database migration using our application's logging settings. Return the foundation applying some additional middlewares. Create the WAI application and apply middlewares | Warp settings for the given foundation value. | The @main@ function for an executable running this site. Get the settings from all relevant sources fall back to compile-time values, set to [] to require values at runtime allow environment variables to override Generate the foundation from the settings Generate a WAI Application from the foundation Run the application with Warp ------------------------------------------------------------ Functions for DevelMain.hs (a way to run the app from GHCi) ------------------------------------------------------------ ------------------------------------------- Functions for use in development with GHCi ------------------------------------------- | Run a handler | Run DB queries
# OPTIONS_GHC -fno - warn - orphans # module Application ( getApplicationDev , appMain , develMain , makeFoundation , makeLogWare , getApplicationRepl , shutdownApp , handler , db , todaysProfilesTaskMain , allProfilesTaskMain ) where import Control.Monad.Logger (liftLoc, runLoggingT) import Database.Persist.Postgresql (createPostgresqlPool, pgConnStr, pgPoolSize, runSqlPool) import Import import Language.Haskell.TH.Syntax (qLocation) import Network.Wai (Middleware) import Network.Wai.Handler.Warp (Settings, defaultSettings, defaultShouldDisplayException, runSettings, setHost, setOnException, setPort, getPort) import Network.Wai.Middleware.RequestLogger (Destination (Logger, Callback), IPAddrSource (..), OutputFormat (..), destination, mkRequestLogger, outputFormat) import System.Log.FastLogger (defaultBufSize, newStdoutLoggerSet, toLogStr) import LoadEnv (loadEnv) import System.Environment (getEnv) import qualified Data.ByteString.Char8 as BSC import Handler.ChooseTimezone import Handler.Common import Handler.Feed import Handler.Profile import Handler.Root import Handler.Settings import Handler.UpdateProfilesTask (updateTodaysProfiles, updateAllProfiles) import Handler.UpdateUser This line actually creates our YesodDispatch instance . It is the second half of the call to mkYesodData which occurs in Foundation.hs . Please see the mkYesodDispatch "App" resourcesApp migrations handled by Yesod . makeFoundation :: AppSettings -> IO App makeFoundation appSettings = do appHttpManager <- newManager appLogger <- newStdoutLoggerSet defaultBufSize >>= makeYesodLogger appStatic <- (if appMutableStatic appSettings then staticDevel else static) (appStaticDir appSettings) twitterConsumerKey <- BSC.pack <$> getEnv "TWITTER_CONSUMER_KEY" twitterConsumerSecret <- BSC.pack <$> getEnv "TWITTER_CONSUMER_SECRET" googleApiKey <- getEnv "GOOGLE_API_KEY" let mkFoundation appConnPool = App {..} tempFoundation = mkFoundation $ error "connPool forced in tempFoundation" logFunc = messageLoggerSource tempFoundation appLogger pool <- flip runLoggingT logFunc $ createPostgresqlPool (pgConnStr $ appDatabaseConf appSettings) (pgPoolSize $ appDatabaseConf appSettings) runLoggingT (runSqlPool (runMigration migrateAll) pool) logFunc return $ mkFoundation pool | Convert our foundation to a WAI Application by calling @toWaiAppPlain@ and makeApplication :: App -> IO Application makeApplication foundation = do logWare <- makeLogWare foundation appPlain <- toWaiAppPlain foundation return $ logWare $ defaultMiddlewaresNoLogging appPlain makeLogWare :: App -> IO Middleware makeLogWare foundation = mkRequestLogger def { outputFormat = if appSettings foundation `allowsLevel` LevelDebug then Detailed True else Apache (if appIpFromHeader $ appSettings foundation then FromFallback else FromSocket) , destination = if appSettings foundation `allowsLevel` LevelInfo then Logger $ loggerSet $ appLogger foundation else Callback $ \_ -> return () } warpSettings :: App -> Settings warpSettings foundation = setPort (appPort $ appSettings foundation) $ setHost (appHost $ appSettings foundation) $ setOnException (\_req e -> when (defaultShouldDisplayException e) $ messageLoggerSource foundation (appLogger foundation) $(qLocation >>= liftLoc) "yesod" LevelError (toLogStr $ "Exception from Warp: " ++ show e)) defaultSettings | For yesod devel , return the Warp settings and WAI Application . getApplicationDev :: IO (Settings, Application) getApplicationDev = do settings <- getAppSettings foundation <- makeFoundation settings wsettings <- getDevSettings $ warpSettings foundation app <- makeApplication foundation return (wsettings, app) getAppSettings :: IO AppSettings getAppSettings = do loadEnv loadYamlSettings [configSettingsYml] [] useEnv | main function for use by yesod devel develMain :: IO () develMain = develMainHelper getApplicationDev appMain :: IO () appMain = do settings <- loadYamlSettingsArgs [configSettingsYmlValue] useEnv foundation <- makeFoundation settings app <- makeApplication foundation runSettings (warpSettings foundation) app getApplicationRepl :: IO (Int, App, Application) getApplicationRepl = do settings <- getAppSettings foundation <- makeFoundation settings wsettings <- getDevSettings $ warpSettings foundation app1 <- makeApplication foundation return (getPort wsettings, foundation, app1) shutdownApp :: App -> IO () shutdownApp _ = return () todaysProfilesTaskMain :: IO () todaysProfilesTaskMain = handler updateTodaysProfiles allProfilesTaskMain :: IO () allProfilesTaskMain = handler updateAllProfiles handler :: Handler a -> IO a handler h = getAppSettings >>= makeFoundation >>= flip unsafeHandler h db :: ReaderT SqlBackend (HandlerFor App) a -> IO a db = handler . runDB
a7e51b4c7f11b46c953af271627e5d0e6433a4303949fd78df166a7e3c940ed8
braidchat/braid
predicates.clj
(ns braid.chat.predicates (:require [datomic.api :as d] [braid.chat.db.thread :as db.thread])) ;; *-exists? (defn ^:private exists? [db id-key entity-id] (->> (d/q '[:find ?entity . :in $ ?entity-id ?key :where [?entity ?key ?entity-id]] db entity-id id-key) boolean)) (defn user-exists? [db user-id] (exists? db :user/id user-id)) (defn group-exists? [db group-id] (exists? db :group/id group-id)) (defn thread-exists? [db thread-id] (exists? db :thread/id thread-id)) (defn tag-exists? [db tag-id] (exists? db :tag/id tag-id)) (defn message-exists? [db message-id] (exists? db :message/id message-id)) ;; OTHER (defn tag-in-group-with-name-exists? [db group-id tag-name] (->> (d/q '[:find ?tag . :in $ ?group-id ?tag-name :where [?group :group/id ?group-id] [?tag :tag/name ?tag-name] [?tag :tag/group ?group]] db group-id tag-name) boolean)) (defn group-with-slug-exists? [db slug] (->> (d/q '[:find ?group . :in $ ?slug :where [?group :group/slug ?slug]] db slug) boolean)) (defn user-in-group? [db user-id group-id] (->> (d/q '[:find ?user . :in $ ?user-id ?group-id :where [?group :group/id ?group-id] [?user :user/id ?user-id] [?group :group/user ?user]] db user-id group-id) boolean)) (defn user-has-thread-open? [db user-id thread-id] (->> (d/q '[:find ?user . :in $ ?user-id ?thread-id :where [?thread :thread/id ?thread-id] [?user :user/id ?user-id] [?user :user/open-thread ?thread]] db user-id thread-id) boolean)) (defn user-can-access-thread? [db user-id thread-id] (->> (db.thread/user-can-see-thread? user-id thread-id) boolean)) (defn thread-user-same-group? [db thread-id user-id] (->> (d/q '[:find ?user . :in $ ?user-id ?thread-id :where [?user :user/id ?user-id] [?thread :thread/id ?thread-id] [?thread :thread/group ?group] [?group :group/user ?user]] db user-id thread-id) boolean)) (defn thread-tag-same-group? [db thread-id user-id] (->> (d/q '[:find ?tag . :in $ ?user-id ?thread-id :where [?thread :thread/id ?thread-id] [?tag :tag/id ?tag-id] [?tag :tag/group ?group] [?thread :thread/group ?group]] db user-id thread-id) boolean))
null
https://raw.githubusercontent.com/braidchat/braid/2e44eb6e77f1d203115f9b9c529bd865fa3d7302/src/braid/chat/predicates.clj
clojure
*-exists? OTHER
(ns braid.chat.predicates (:require [datomic.api :as d] [braid.chat.db.thread :as db.thread])) (defn ^:private exists? [db id-key entity-id] (->> (d/q '[:find ?entity . :in $ ?entity-id ?key :where [?entity ?key ?entity-id]] db entity-id id-key) boolean)) (defn user-exists? [db user-id] (exists? db :user/id user-id)) (defn group-exists? [db group-id] (exists? db :group/id group-id)) (defn thread-exists? [db thread-id] (exists? db :thread/id thread-id)) (defn tag-exists? [db tag-id] (exists? db :tag/id tag-id)) (defn message-exists? [db message-id] (exists? db :message/id message-id)) (defn tag-in-group-with-name-exists? [db group-id tag-name] (->> (d/q '[:find ?tag . :in $ ?group-id ?tag-name :where [?group :group/id ?group-id] [?tag :tag/name ?tag-name] [?tag :tag/group ?group]] db group-id tag-name) boolean)) (defn group-with-slug-exists? [db slug] (->> (d/q '[:find ?group . :in $ ?slug :where [?group :group/slug ?slug]] db slug) boolean)) (defn user-in-group? [db user-id group-id] (->> (d/q '[:find ?user . :in $ ?user-id ?group-id :where [?group :group/id ?group-id] [?user :user/id ?user-id] [?group :group/user ?user]] db user-id group-id) boolean)) (defn user-has-thread-open? [db user-id thread-id] (->> (d/q '[:find ?user . :in $ ?user-id ?thread-id :where [?thread :thread/id ?thread-id] [?user :user/id ?user-id] [?user :user/open-thread ?thread]] db user-id thread-id) boolean)) (defn user-can-access-thread? [db user-id thread-id] (->> (db.thread/user-can-see-thread? user-id thread-id) boolean)) (defn thread-user-same-group? [db thread-id user-id] (->> (d/q '[:find ?user . :in $ ?user-id ?thread-id :where [?user :user/id ?user-id] [?thread :thread/id ?thread-id] [?thread :thread/group ?group] [?group :group/user ?user]] db user-id thread-id) boolean)) (defn thread-tag-same-group? [db thread-id user-id] (->> (d/q '[:find ?tag . :in $ ?user-id ?thread-id :where [?thread :thread/id ?thread-id] [?tag :tag/id ?tag-id] [?tag :tag/group ?group] [?thread :thread/group ?group]] db user-id thread-id) boolean))
cc30f5004c433987f9311ccb5147ba03b3c354d38a580e3d3089691dcea974f8
wedesoft/aiscm
2d_matching.scm
(use-modules (aiscm core)) (arr ((1 2) (3 4)) ((5 6) (7 8))) ;#<multiarray<int<8,unsigned>,3>>: ( ( ( 1 2 ) ( 3 4 ) ) ( ( 5 6 ) ; (7 8)))
null
https://raw.githubusercontent.com/wedesoft/aiscm/2c3db8d00cad6e042150714ada85da19cf4338ad/tests/integration/2d_matching.scm
scheme
#<multiarray<int<8,unsigned>,3>>: (7 8)))
(use-modules (aiscm core)) (arr ((1 2) (3 4)) ((5 6) (7 8))) ( ( ( 1 2 ) ( 3 4 ) ) ( ( 5 6 )
a538795c939c501c12d6e09e0f4f1e5707ec57bdb0430107e74aab72e15d24cf
fizruk/rzk
Decl.hs
module Rzk.Syntax.Decl where import Rzk.Syntax.Term data Decl var = Decl { declName :: var , declType :: Term var , declBody :: Term var }
null
https://raw.githubusercontent.com/fizruk/rzk/502eb7655655d8fe1be8f4fa2561464141b01e2e/rzk/src/Rzk/Syntax/Decl.hs
haskell
module Rzk.Syntax.Decl where import Rzk.Syntax.Term data Decl var = Decl { declName :: var , declType :: Term var , declBody :: Term var }
1c9777c75c9de4312540f293ecaadfedec38947ce979385bdd6240a58bb38790
ghc/ghc
Orphans.hs
# OPTIONS_GHC -fno - warn - orphans # # LANGUAGE FlexibleInstances # module Orphans where import Data . Default import GHC hiding (EpaComment) -- --------------------------------------------------------------------- class Default a where def :: a -- --------------------------------------------------------------------- Orphan Default instances . See instance Default [a] where def = [] instance Default NameAnn where def = mempty instance Default AnnList where def = mempty instance Default AnnListItem where def = mempty instance Default AnnPragma where def = AnnPragma def def def instance Semigroup EpAnnImportDecl where (<>) = error "unimplemented" instance Default EpAnnImportDecl where def = EpAnnImportDecl def Nothing Nothing Nothing Nothing Nothing instance Default HsRuleAnn where def = HsRuleAnn Nothing Nothing def instance Default AnnSig where def = AnnSig def def instance Default GrhsAnn where def = GrhsAnn Nothing def instance Default EpAnnUnboundVar where def = EpAnnUnboundVar def def instance (Default a, Default b) => Default (a, b) where def = (def, def) instance Default NoEpAnns where def = NoEpAnns instance Default AnnParen where def = AnnParen AnnParens def def instance Default AnnExplicitSum where def = AnnExplicitSum def def def def instance Default EpAnnHsCase where def = EpAnnHsCase def def def instance Default AnnsIf where def = AnnsIf def def def def def instance Default (Maybe a) where def = Nothing instance Default AnnProjection where def = AnnProjection def def instance Default AnnFieldLabel where def = AnnFieldLabel Nothing instance Default EpaLocation where def = EpaDelta (SameLine 0) [] instance Default AddEpAnn where def = AddEpAnn def def instance Default AnnKeywordId where got ta pick one instance Default AnnContext where def = AnnContext Nothing [] [] instance Default EpAnnSumPat where def = EpAnnSumPat def def def instance Default AnnsModule where def = AnnsModule [] mempty Nothing
null
https://raw.githubusercontent.com/ghc/ghc/14b2e3d3dda104c62c5abafd3353dd0315de71ad/utils/check-exact/Orphans.hs
haskell
--------------------------------------------------------------------- ---------------------------------------------------------------------
# OPTIONS_GHC -fno - warn - orphans # # LANGUAGE FlexibleInstances # module Orphans where import Data . Default import GHC hiding (EpaComment) class Default a where def :: a Orphan Default instances . See instance Default [a] where def = [] instance Default NameAnn where def = mempty instance Default AnnList where def = mempty instance Default AnnListItem where def = mempty instance Default AnnPragma where def = AnnPragma def def def instance Semigroup EpAnnImportDecl where (<>) = error "unimplemented" instance Default EpAnnImportDecl where def = EpAnnImportDecl def Nothing Nothing Nothing Nothing Nothing instance Default HsRuleAnn where def = HsRuleAnn Nothing Nothing def instance Default AnnSig where def = AnnSig def def instance Default GrhsAnn where def = GrhsAnn Nothing def instance Default EpAnnUnboundVar where def = EpAnnUnboundVar def def instance (Default a, Default b) => Default (a, b) where def = (def, def) instance Default NoEpAnns where def = NoEpAnns instance Default AnnParen where def = AnnParen AnnParens def def instance Default AnnExplicitSum where def = AnnExplicitSum def def def def instance Default EpAnnHsCase where def = EpAnnHsCase def def def instance Default AnnsIf where def = AnnsIf def def def def def instance Default (Maybe a) where def = Nothing instance Default AnnProjection where def = AnnProjection def def instance Default AnnFieldLabel where def = AnnFieldLabel Nothing instance Default EpaLocation where def = EpaDelta (SameLine 0) [] instance Default AddEpAnn where def = AddEpAnn def def instance Default AnnKeywordId where got ta pick one instance Default AnnContext where def = AnnContext Nothing [] [] instance Default EpAnnSumPat where def = EpAnnSumPat def def def instance Default AnnsModule where def = AnnsModule [] mempty Nothing
a77b73bc3e789193c20b6f9939ca96866dd2fbfdda2f946f030b54ebb1dcb1f0
open-company/open-company-web
ws_client_ids.cljs
(ns oc.web.utils.ws-client-ids) (def change-client-id (atom nil)) (def interaction-client-id (atom nil)) (def notify-client-id (atom nil))
null
https://raw.githubusercontent.com/open-company/open-company-web/dfce3dd9bc115df91003179bceb87cca1f84b6cf/src/main/oc/web/utils/ws_client_ids.cljs
clojure
(ns oc.web.utils.ws-client-ids) (def change-client-id (atom nil)) (def interaction-client-id (atom nil)) (def notify-client-id (atom nil))
7d8bbbe0c89b911ff6deacdab85d2703d3dc20ab74859ca86688915c0071aa5c
stettberger/ispositive
IsPositive.hs
module Integer.IsPositive ( is_positive_integer , is_not_positive_integer ) where is_positive_integer x = 1 <= x && x == fromInteger (floor x) is_not_positive_integer = not . is_positive_integer
null
https://raw.githubusercontent.com/stettberger/ispositive/c9ce01c3ea7c505d5e9428f9746aead93c7abebd/src/Integer/IsPositive.hs
haskell
module Integer.IsPositive ( is_positive_integer , is_not_positive_integer ) where is_positive_integer x = 1 <= x && x == fromInteger (floor x) is_not_positive_integer = not . is_positive_integer
82636027b99ce5f4fd53d8cfb3124bc9ac5459738582731d0b8659f14abdd3db
ogaml/ogaml
graphs.ml
open OgamlUtils let () = Printf.printf "Beginning graph tests...\n%!" module G = Graph.Make (struct type t = int let compare (i : int) (j : int) = compare i j end) let graph1 = let open G in empty |> add ~cost:1. 1 2 |> add ~cost:2. 1 3 |> add ~cost:3. 1 4 |> add ~cost:2. 2 5 |> add ~cost:1. 2 6 |> add ~cost:2. 3 7 |> add ~cost:3. 3 8 |> add ~cost:1. 4 9 |> add ~cost:2. 4 10 |> add ~cost:3. 4 11 let graph2 = let open G in empty |> add ~cost:1. 13 14 |> add ~cost:2. 13 15 |> add ~cost:3. 13 16 |> add ~cost:6. 14 17 |> add ~cost:2. 15 17 |> add ~cost:3. 16 17 let graph3 = let open G in empty |> add ~cost:10. 11 12 |> add ~cost:10. 12 13 |> add ~cost:15. 11 13 let graph4 = let open G in empty |> add ~cost:5. 12 2 |> add ~cost:4. 17 1 |> add ~cost:3. 4 1 |> add ~cost:1. 16 3 let cycle = let open G in empty |> add 1 2 |> add 2 3 |> add 3 4 |> add 4 1 |> add 4 5 |> add 5 6 let disjgraph = G.merge graph1 graph2 let biggraph = G.merge (G.merge (G.merge graph1 graph2) graph3) graph4 let testgraph1 () = assert (G.neighbours graph1 11 = []); assert (G.neighbours graph1 1 = [4;3;2]); assert (G.neighbours graph4 17 = [1]); assert (G.neighbours graph2 13 = [16;15;14]) let assert_dijkstra graph s t dist = match G.dijkstra graph s t, dist with |None, None -> true |Some _, None -> false |None, Some _ -> false |Some (d,_), Some d' -> d = d' let assert_dijkstra_path graph s t path = match G.dijkstra graph s t with |None -> false |Some (_,p) -> p = path let testgraph2 () = assert (assert_dijkstra graph1 11 1 None); assert (assert_dijkstra graph1 1 11 (Some 6.)); assert (assert_dijkstra graph1 2 5 (Some 2.)); assert (assert_dijkstra graph1 5 2 None); assert (assert_dijkstra graph3 11 13 (Some 15.)); assert (assert_dijkstra graph2 13 17 (Some 4.)); assert (assert_dijkstra (G.remove_edge graph2 13 15) 13 17 (Some 6.)) let testgraph3 () = assert (assert_dijkstra disjgraph 1 13 None); assert (assert_dijkstra disjgraph 13 1 None); assert (assert_dijkstra disjgraph 1 17 None); assert (assert_dijkstra disjgraph 1 11 (Some 6.)); assert (assert_dijkstra disjgraph 2 5 (Some 2.)); assert (assert_dijkstra disjgraph 13 17 (Some 4.)) let testgraph4 () = assert (assert_dijkstra biggraph 4 1 (Some 3.)); assert (assert_dijkstra biggraph 11 3 (Some 19.)); assert (assert_dijkstra biggraph 12 5 (Some 7.)); assert (assert_dijkstra biggraph 4 5 (Some 6.)) let testgraph5 () = assert (assert_dijkstra_path biggraph 4 1 [4;1]); assert (assert_dijkstra_path biggraph 11 3 [11;13;16;3]); assert (assert_dijkstra_path biggraph 1 17 [1;4;11;13;15;17]); assert (assert_dijkstra_path biggraph 4 5 [4;1;2;5]) let assert_dfs graph s l = let l = ref l in let b = ref true in G.dfs graph s (fun v -> assert (!l <> []); b := !b && (List.hd !l = v); l := List.tl !l ); (!l = []) && !b let assert_bfs graph s l = let l = ref l in let b = ref true in G.bfs graph s (fun v -> assert (!l <> []); b := !b && (List.hd !l = v); l := List.tl !l ); (!l = []) && !b let testgraph6 () = assert (assert_dfs graph1 1 [1;4;11;10;9;3;8;7;2;6;5]); assert (assert_bfs graph1 1 [1;4;3;2;11;10;9;8;7;6;5]); assert (assert_dfs graph1 11 [11]); assert (assert_bfs graph1 11 [11]); assert (assert_dfs graph2 13 [13;16;17;15;14]); assert (assert_bfs graph2 13 [13;16;15;14;17]); assert (assert_dfs graph4 12 [12;2]); assert (assert_bfs graph4 12 [12;2]); assert (assert_dfs cycle 1 [1;2;3;4;5;6]); assert (assert_bfs cycle 1 [1;2;3;4;5;6]); assert (assert_dfs cycle 4 [4;5;6;1;2;3]); assert (assert_bfs cycle 4 [4;5;1;6;2;3]) let () = testgraph1 (); Printf.printf "\tTest 1 passed\n%!"; testgraph2 (); Printf.printf "\tTest 2 passed\n%!"; testgraph3 (); Printf.printf "\tTest 3 passed\n%!"; testgraph4 (); Printf.printf "\tTest 4 passed\n%!"; testgraph5 (); Printf.printf "\tTest 5 passed\n%!"; testgraph6 (); Printf.printf "\tTest 6 passed\n%!";
null
https://raw.githubusercontent.com/ogaml/ogaml/5e74597521abf7ba2833a9247e55780eabfbab78/tests/graphs.ml
ocaml
open OgamlUtils let () = Printf.printf "Beginning graph tests...\n%!" module G = Graph.Make (struct type t = int let compare (i : int) (j : int) = compare i j end) let graph1 = let open G in empty |> add ~cost:1. 1 2 |> add ~cost:2. 1 3 |> add ~cost:3. 1 4 |> add ~cost:2. 2 5 |> add ~cost:1. 2 6 |> add ~cost:2. 3 7 |> add ~cost:3. 3 8 |> add ~cost:1. 4 9 |> add ~cost:2. 4 10 |> add ~cost:3. 4 11 let graph2 = let open G in empty |> add ~cost:1. 13 14 |> add ~cost:2. 13 15 |> add ~cost:3. 13 16 |> add ~cost:6. 14 17 |> add ~cost:2. 15 17 |> add ~cost:3. 16 17 let graph3 = let open G in empty |> add ~cost:10. 11 12 |> add ~cost:10. 12 13 |> add ~cost:15. 11 13 let graph4 = let open G in empty |> add ~cost:5. 12 2 |> add ~cost:4. 17 1 |> add ~cost:3. 4 1 |> add ~cost:1. 16 3 let cycle = let open G in empty |> add 1 2 |> add 2 3 |> add 3 4 |> add 4 1 |> add 4 5 |> add 5 6 let disjgraph = G.merge graph1 graph2 let biggraph = G.merge (G.merge (G.merge graph1 graph2) graph3) graph4 let testgraph1 () = assert (G.neighbours graph1 11 = []); assert (G.neighbours graph1 1 = [4;3;2]); assert (G.neighbours graph4 17 = [1]); assert (G.neighbours graph2 13 = [16;15;14]) let assert_dijkstra graph s t dist = match G.dijkstra graph s t, dist with |None, None -> true |Some _, None -> false |None, Some _ -> false |Some (d,_), Some d' -> d = d' let assert_dijkstra_path graph s t path = match G.dijkstra graph s t with |None -> false |Some (_,p) -> p = path let testgraph2 () = assert (assert_dijkstra graph1 11 1 None); assert (assert_dijkstra graph1 1 11 (Some 6.)); assert (assert_dijkstra graph1 2 5 (Some 2.)); assert (assert_dijkstra graph1 5 2 None); assert (assert_dijkstra graph3 11 13 (Some 15.)); assert (assert_dijkstra graph2 13 17 (Some 4.)); assert (assert_dijkstra (G.remove_edge graph2 13 15) 13 17 (Some 6.)) let testgraph3 () = assert (assert_dijkstra disjgraph 1 13 None); assert (assert_dijkstra disjgraph 13 1 None); assert (assert_dijkstra disjgraph 1 17 None); assert (assert_dijkstra disjgraph 1 11 (Some 6.)); assert (assert_dijkstra disjgraph 2 5 (Some 2.)); assert (assert_dijkstra disjgraph 13 17 (Some 4.)) let testgraph4 () = assert (assert_dijkstra biggraph 4 1 (Some 3.)); assert (assert_dijkstra biggraph 11 3 (Some 19.)); assert (assert_dijkstra biggraph 12 5 (Some 7.)); assert (assert_dijkstra biggraph 4 5 (Some 6.)) let testgraph5 () = assert (assert_dijkstra_path biggraph 4 1 [4;1]); assert (assert_dijkstra_path biggraph 11 3 [11;13;16;3]); assert (assert_dijkstra_path biggraph 1 17 [1;4;11;13;15;17]); assert (assert_dijkstra_path biggraph 4 5 [4;1;2;5]) let assert_dfs graph s l = let l = ref l in let b = ref true in G.dfs graph s (fun v -> assert (!l <> []); b := !b && (List.hd !l = v); l := List.tl !l ); (!l = []) && !b let assert_bfs graph s l = let l = ref l in let b = ref true in G.bfs graph s (fun v -> assert (!l <> []); b := !b && (List.hd !l = v); l := List.tl !l ); (!l = []) && !b let testgraph6 () = assert (assert_dfs graph1 1 [1;4;11;10;9;3;8;7;2;6;5]); assert (assert_bfs graph1 1 [1;4;3;2;11;10;9;8;7;6;5]); assert (assert_dfs graph1 11 [11]); assert (assert_bfs graph1 11 [11]); assert (assert_dfs graph2 13 [13;16;17;15;14]); assert (assert_bfs graph2 13 [13;16;15;14;17]); assert (assert_dfs graph4 12 [12;2]); assert (assert_bfs graph4 12 [12;2]); assert (assert_dfs cycle 1 [1;2;3;4;5;6]); assert (assert_bfs cycle 1 [1;2;3;4;5;6]); assert (assert_dfs cycle 4 [4;5;6;1;2;3]); assert (assert_bfs cycle 4 [4;5;1;6;2;3]) let () = testgraph1 (); Printf.printf "\tTest 1 passed\n%!"; testgraph2 (); Printf.printf "\tTest 2 passed\n%!"; testgraph3 (); Printf.printf "\tTest 3 passed\n%!"; testgraph4 (); Printf.printf "\tTest 4 passed\n%!"; testgraph5 (); Printf.printf "\tTest 5 passed\n%!"; testgraph6 (); Printf.printf "\tTest 6 passed\n%!";
be767c1843d878f1e9ff584e711b3757fae4b779ce72ba29ec375ece8a367e24
inria-parkas/sundialsml
nvector_pthreads.ml
open Sundials type data = RealArray.t type kind = [`Pthreads|Nvector_serial.kind] type t = (data, kind) Nvector.t (* Selectively enable and disable fused and array operations *) external c_enablefusedops_pthreads : ('d, 'k) Nvector.t -> bool -> unit = "sunml_nvec_pthreads_enablefusedops" external c_enablelinearcombination_pthreads : ('d, 'k) Nvector.t -> bool -> unit = "sunml_nvec_pthreads_enablelinearcombination" external c_enablescaleaddmulti_pthreads : ('d, 'k) Nvector.t -> bool -> unit = "sunml_nvec_pthreads_enablescaleaddmulti" external c_enabledotprodmulti_pthreads : ('d, 'k) Nvector.t -> bool -> unit = "sunml_nvec_pthreads_enabledotprodmulti" external c_enablelinearsumvectorarray_pthreads : ('d, 'k) Nvector.t -> bool -> unit = "sunml_nvec_pthreads_enablelinearsumvectorarray" external c_enablescalevectorarray_pthreads : ('d, 'k) Nvector.t -> bool -> unit = "sunml_nvec_pthreads_enablescalevectorarray" external c_enableconstvectorarray_pthreads : ('d, 'k) Nvector.t -> bool -> unit = "sunml_nvec_pthreads_enableconstvectorarray" external c_enablewrmsnormvectorarray_pthreads : ('d, 'k) Nvector.t -> bool -> unit = "sunml_nvec_pthreads_enablewrmsnormvectorarray" external c_enablewrmsnormmaskvectorarray_pthreads : ('d, 'k) Nvector.t -> bool -> unit = "sunml_nvec_pthreads_enablewrmsnormmaskvectorarray" external c_enablescaleaddmultivectorarray_pthreads : ('d, 'k) Nvector.t -> bool -> unit = "sunml_nvec_pthreads_enablescaleaddmultivectorarray" external c_enablelinearcombinationvectorarray_pthreads : ('d, 'k) Nvector.t -> bool -> unit = "sunml_nvec_pthreads_enablelinearcombinationvectorarray" let unwrap = Nvector.unwrap external c_wrap : int -> RealArray.t -> (t -> bool) -> (t -> t) -> Context.t -> t = "sunml_nvec_wrap_pthreads" let rec wrap ?context ?(with_fused_ops=false) nthreads v = let len = RealArray.length v in let check nv' = (len = RealArray.length (unwrap nv')) in let ctx = Sundials_impl.Context.get context in let nv = c_wrap nthreads v check (clone nthreads) ctx in if with_fused_ops then c_enablefusedops_pthreads nv true; nv and clone nthreads nv = let nv' = wrap ~context:(Nvector.context nv) nthreads (RealArray.copy (unwrap nv)) in if Sundials_impl.Version.lt400 then () else begin c_enablelinearcombination_pthreads nv' (Nvector.Ops.has_linearcombination nv); c_enablescaleaddmulti_pthreads nv' (Nvector.Ops.has_scaleaddmulti nv); c_enabledotprodmulti_pthreads nv' (Nvector.Ops.has_dotprodmulti nv); c_enablelinearsumvectorarray_pthreads nv' (Nvector.Ops.has_linearsumvectorarray nv); c_enablescalevectorarray_pthreads nv' (Nvector.Ops.has_scalevectorarray nv); c_enableconstvectorarray_pthreads nv' (Nvector.Ops.has_constvectorarray nv); c_enablewrmsnormvectorarray_pthreads nv' (Nvector.Ops.has_wrmsnormvectorarray nv); c_enablewrmsnormmaskvectorarray_pthreads nv' (Nvector.Ops.has_wrmsnormmaskvectorarray nv); c_enablescaleaddmultivectorarray_pthreads nv' (Nvector.Ops.has_scaleaddmultivectorarray nv); c_enablelinearcombinationvectorarray_pthreads nv' (Nvector.Ops.has_linearcombinationvectorarray nv) end; nv' let pp fmt v = RealArray.pp fmt (unwrap v) let make ?context ?with_fused_ops nthreads n iv = wrap ?context ?with_fused_ops nthreads (RealArray.make n iv) external num_threads : t -> int = "sunml_nvec_pthreads_num_threads" let do_enable f nv v = match v with | None -> () | Some v -> f nv v let enable ?with_fused_ops ?with_linear_combination ?with_scale_add_multi ?with_dot_prod_multi ?with_linear_sum_vector_array ?with_scale_vector_array ?with_const_vector_array ?with_wrms_norm_vector_array ?with_wrms_norm_mask_vector_array ?with_scale_add_multi_vector_array ?with_linear_combination_vector_array nv = do_enable c_enablefusedops_pthreads nv with_fused_ops; do_enable c_enablelinearcombination_pthreads nv with_linear_combination; do_enable c_enablescaleaddmulti_pthreads nv with_scale_add_multi; do_enable c_enabledotprodmulti_pthreads nv with_dot_prod_multi; do_enable c_enablelinearsumvectorarray_pthreads nv with_linear_sum_vector_array; do_enable c_enablescalevectorarray_pthreads nv with_scale_vector_array; do_enable c_enableconstvectorarray_pthreads nv with_const_vector_array; do_enable c_enablewrmsnormvectorarray_pthreads nv with_wrms_norm_vector_array; do_enable c_enablewrmsnormmaskvectorarray_pthreads nv with_wrms_norm_mask_vector_array; do_enable c_enablescaleaddmultivectorarray_pthreads nv with_scale_add_multi_vector_array; do_enable c_enablelinearcombinationvectorarray_pthreads nv with_linear_combination_vector_array module Any = struct (* {{{ *) external c_any_wrap : extension_constructor -> int -> RealArray.t -> (Nvector.any -> bool) -> (Nvector.any -> Nvector.any) -> Context.t -> Nvector.any = "sunml_nvec_anywrap_pthreads_byte" "sunml_nvec_anywrap_pthreads" let rec wrap ?context ?(with_fused_ops=false) ?(with_linear_combination=false) ?(with_scale_add_multi=false) ?(with_dot_prod_multi=false) ?(with_linear_sum_vector_array=false) ?(with_scale_vector_array=false) ?(with_const_vector_array=false) ?(with_wrms_norm_vector_array=false) ?(with_wrms_norm_mask_vector_array=false) ?(with_scale_add_multi_vector_array=false) ?(with_linear_combination_vector_array=false) nthreads v = if not Sundials_impl.Version.has_nvector_get_id then raise Config.NotImplementedBySundialsVersion; let len = RealArray.length v in let check nv = match unwrap nv with | Nvector.RA ra -> len = RealArray.length ra && Nvector.get_id nv = Nvector.Pthreads | _ -> false in let ctx = Sundials_impl.Context.get context in let nv = c_any_wrap [%extension_constructor Nvector.RA] nthreads v check (clone nthreads) ctx in if with_fused_ops then c_enablefusedops_pthreads nv true; if with_fused_ops then c_enablefusedops_pthreads nv true; if with_linear_combination then c_enablelinearcombination_pthreads nv true; if with_scale_add_multi then c_enablescaleaddmulti_pthreads nv true; if with_dot_prod_multi then c_enabledotprodmulti_pthreads nv true; if with_linear_sum_vector_array then c_enablelinearsumvectorarray_pthreads nv true; if with_scale_vector_array then c_enablescalevectorarray_pthreads nv true; if with_const_vector_array then c_enableconstvectorarray_pthreads nv true; if with_wrms_norm_vector_array then c_enablewrmsnormvectorarray_pthreads nv true; if with_wrms_norm_mask_vector_array then c_enablewrmsnormmaskvectorarray_pthreads nv true; if with_scale_add_multi_vector_array then c_enablescaleaddmultivectorarray_pthreads nv true; if with_linear_combination_vector_array then c_enablelinearcombinationvectorarray_pthreads nv true; nv and clone nthreads nv = let v = match unwrap nv with | Nvector.RA v -> v | _ -> assert false in let nv' = wrap ~context:(Nvector.context nv) nthreads (RealArray.copy v) in c_enablelinearcombination_pthreads nv' (Nvector.Ops.has_linearcombination nv); c_enablescaleaddmulti_pthreads nv' (Nvector.Ops.has_scaleaddmulti nv); c_enabledotprodmulti_pthreads nv' (Nvector.Ops.has_dotprodmulti nv); c_enablelinearsumvectorarray_pthreads nv' (Nvector.Ops.has_linearsumvectorarray nv); c_enablescalevectorarray_pthreads nv' (Nvector.Ops.has_scalevectorarray nv); c_enableconstvectorarray_pthreads nv' (Nvector.Ops.has_constvectorarray nv); c_enablewrmsnormvectorarray_pthreads nv' (Nvector.Ops.has_wrmsnormvectorarray nv); c_enablewrmsnormmaskvectorarray_pthreads nv' (Nvector.Ops.has_wrmsnormmaskvectorarray nv); c_enablescaleaddmultivectorarray_pthreads nv' (Nvector.Ops.has_scaleaddmultivectorarray nv); c_enablelinearcombinationvectorarray_pthreads nv' (Nvector.Ops.has_linearcombinationvectorarray nv); nv' let make ?context ?with_fused_ops ?with_linear_combination ?with_scale_add_multi ?with_dot_prod_multi ?with_linear_sum_vector_array ?with_scale_vector_array ?with_const_vector_array ?with_wrms_norm_vector_array ?with_wrms_norm_mask_vector_array ?with_scale_add_multi_vector_array ?with_linear_combination_vector_array nthreads n iv = wrap ?context ?with_fused_ops ?with_linear_combination ?with_scale_add_multi ?with_dot_prod_multi ?with_linear_sum_vector_array ?with_scale_vector_array ?with_const_vector_array ?with_wrms_norm_vector_array ?with_wrms_norm_mask_vector_array ?with_scale_add_multi_vector_array ?with_linear_combination_vector_array nthreads (RealArray.make n iv) let unwrap nv = match Nvector.unwrap nv with | Nvector.RA a -> a | _ -> raise Nvector.BadGenericType let enable ?with_fused_ops ?with_linear_combination ?with_scale_add_multi ?with_dot_prod_multi ?with_linear_sum_vector_array ?with_scale_vector_array ?with_const_vector_array ?with_wrms_norm_vector_array ?with_wrms_norm_mask_vector_array ?with_scale_add_multi_vector_array ?with_linear_combination_vector_array nv = if Sundials_impl.Version.lt400 then raise Config.NotImplementedBySundialsVersion; if Nvector.get_id nv <> Nvector.Pthreads then raise Nvector.BadGenericType; do_enable c_enablefusedops_pthreads nv with_fused_ops; do_enable c_enablelinearcombination_pthreads nv with_linear_combination; do_enable c_enablescaleaddmulti_pthreads nv with_scale_add_multi; do_enable c_enabledotprodmulti_pthreads nv with_dot_prod_multi; do_enable c_enablelinearsumvectorarray_pthreads nv with_linear_sum_vector_array; do_enable c_enablescalevectorarray_pthreads nv with_scale_vector_array; do_enable c_enableconstvectorarray_pthreads nv with_const_vector_array; do_enable c_enablewrmsnormvectorarray_pthreads nv with_wrms_norm_vector_array; do_enable c_enablewrmsnormmaskvectorarray_pthreads nv with_wrms_norm_mask_vector_array; do_enable c_enablescaleaddmultivectorarray_pthreads nv with_scale_add_multi_vector_array; do_enable c_enablelinearcombinationvectorarray_pthreads nv with_linear_combination_vector_array end (* }}} *) module Ops = struct (* {{{ *) type t = (RealArray.t, kind) Nvector.t let check = Nvector.check let clone nv = let data = Nvector.unwrap nv in wrap (num_threads nv) (RealArray.copy data) external c_linearsum : float -> t -> float -> t -> t -> unit = "sunml_nvec_pthreads_linearsum" [@@noalloc] let linearsum a (x : t) b (y : t) (z : t) = if Sundials_configuration.safe then (check x y; check x z); c_linearsum a x b y z external const : float -> t -> unit = "sunml_nvec_pthreads_const" [@@noalloc] external c_prod : t -> t -> t -> unit = "sunml_nvec_pthreads_prod" [@@noalloc] let prod (x : t) (y : t) (z : t) = if Sundials_configuration.safe then (check x y; check x z); c_prod x y z external c_div : t -> t -> t -> unit = "sunml_nvec_pthreads_div" [@@noalloc] let div (x : t) (y : t) (z : t) = if Sundials_configuration.safe then (check x y; check x z); c_div x y z external c_scale : float -> t -> t -> unit = "sunml_nvec_pthreads_scale" [@@noalloc] let scale c (x : t) (z : t) = if Sundials_configuration.safe then check x z; c_scale c x z external c_abs : t -> t -> unit = "sunml_nvec_pthreads_abs" [@@noalloc] let abs (x : t) (z : t) = if Sundials_configuration.safe then check x z; c_abs x z external c_inv : t -> t -> unit = "sunml_nvec_pthreads_inv" [@@noalloc] let inv (x : t) (z : t) = if Sundials_configuration.safe then check x z; c_inv x z external c_addconst : t -> float -> t -> unit = "sunml_nvec_pthreads_addconst" [@@noalloc] let addconst (x : t) b (z : t) = if Sundials_configuration.safe then check x z; c_addconst x b z external c_dotprod : t -> t -> float = "sunml_nvec_pthreads_dotprod" let dotprod (x : t) (y : t) = if Sundials_configuration.safe then check x y; c_dotprod x y external maxnorm : t -> float = "sunml_nvec_pthreads_maxnorm" external c_wrmsnorm : t -> t -> float = "sunml_nvec_pthreads_wrmsnorm" let wrmsnorm (x : t) (w : t) = if Sundials_configuration.safe then check x w; c_wrmsnorm x w external c_wrmsnormmask : t -> t -> t -> float = "sunml_nvec_pthreads_wrmsnormmask" let wrmsnormmask (x : t) (w : t) (id : t) = if Sundials_configuration.safe then (check x w; check x id); c_wrmsnormmask x w id external min : t -> float = "sunml_nvec_pthreads_min" external c_wl2norm : t -> t -> float = "sunml_nvec_pthreads_wl2norm" let wl2norm (x : t) (w : t) = if Sundials_configuration.safe then check x w; c_wl2norm x w external l1norm : t -> float = "sunml_nvec_pthreads_l1norm" external c_compare : float -> t -> t -> unit = "sunml_nvec_pthreads_compare" [@@noalloc] let compare c (x : t) (z : t) = if Sundials_configuration.safe then check x z; c_compare c x z external c_invtest : t -> t -> bool = "sunml_nvec_pthreads_invtest" [@@noalloc] let invtest (x : t) (z : t) = if Sundials_configuration.safe then check x z; c_invtest x z external c_constrmask : t -> t -> t -> bool = "sunml_nvec_pthreads_constrmask" [@@noalloc] let constrmask (c : t) (x : t) (m : t) = if Sundials_configuration.safe then (check c x; check c m); c_constrmask c x m external c_minquotient : t -> t -> float = "sunml_nvec_pthreads_minquotient" let minquotient (n : t) (d : t) = if Sundials_configuration.safe then check n d; c_minquotient n d external space : t -> int * int = "sunml_nvec_pthreads_space" external getlength : t -> int = "sunml_nvec_pthreads_getlength" external c_print_file : t -> Logfile.t option -> unit = "sunml_nvec_pthreads_print_file" let print ?logfile nv = c_print_file nv logfile external c_linearcombination : RealArray.t -> t array -> t -> unit = "sunml_nvec_pthreads_linearcombination" let linearcombination ca (xa : t array) (z : t) = if Sundials_impl.Version.lt400 then raise Config.NotImplementedBySundialsVersion; if Sundials_configuration.safe then Array.iter (check z) xa; c_linearcombination ca xa z let same_len' n ya = if n <> Array.length ya then invalid_arg "arrays of unequal length" let same_len xa ya = same_len' (Array.length xa) ya external c_scaleaddmulti : RealArray.t -> t -> t array -> t array -> unit = "sunml_nvec_pthreads_scaleaddmulti" let scaleaddmulti aa (x : t) (ya : t array) (za : t array) = if Sundials_impl.Version.lt400 then raise Config.NotImplementedBySundialsVersion; if Sundials_configuration.safe then (Array.iter (check x) ya; Array.iter (check x) za; let nv = RealArray.length aa in same_len' nv ya; same_len' nv za); c_scaleaddmulti aa x ya za external c_dotprodmulti : t -> t array -> RealArray.t -> unit = "sunml_nvec_pthreads_dotprodmulti" let dotprodmulti (x : t) (ya : t array) (dp : RealArray.t) = if Sundials_impl.Version.lt400 then raise Config.NotImplementedBySundialsVersion; if Sundials_configuration.safe then (let nv = RealArray.length dp in same_len' nv ya; Array.iter (check x) ya); c_dotprodmulti x ya dp external c_linearsumvectorarray : float -> t array -> float -> t array -> t array -> unit = "sunml_nvec_pthreads_linearsumvectorarray" let linearsumvectorarray a (xa : t array) b (ya : t array) (za : t array) = if Sundials_impl.Version.lt400 then raise Config.NotImplementedBySundialsVersion; if Sundials_configuration.safe then (let x = Array.get xa 0 in Array.iter (check x) xa; Array.iter (check x) ya; Array.iter (check x) za; same_len xa ya; same_len xa za); c_linearsumvectorarray a xa b ya za external c_scalevectorarray : RealArray.t -> t array -> t array -> unit = "sunml_nvec_pthreads_scalevectorarray" let scalevectorarray c (xa : t array) (za : t array) = if Sundials_impl.Version.lt400 then raise Config.NotImplementedBySundialsVersion; if Sundials_configuration.safe then (let x = Array.get xa 0 in Array.iter (check x) xa; Array.iter (check x) za; same_len xa za); c_scalevectorarray c xa za external c_constvectorarray : float -> t array -> unit = "sunml_nvec_pthreads_constvectorarray" let constvectorarray c (za : t array) = if Sundials_impl.Version.lt400 then raise Config.NotImplementedBySundialsVersion; if Sundials_configuration.safe then (let z = Array.get za 0 in Array.iter (check z) za); c_constvectorarray c za external c_wrmsnormvectorarray : t array -> t array -> RealArray.t -> unit = "sunml_nvec_pthreads_wrmsnormvectorarray" let wrmsnormvectorarray (xa : t array) (wa : t array) nrm = if Sundials_impl.Version.lt400 then raise Config.NotImplementedBySundialsVersion; if Sundials_configuration.safe then (let x = Array.get xa 0 in Array.iter (check x) xa; Array.iter (check x) wa; same_len xa wa); c_wrmsnormvectorarray xa wa nrm external c_wrmsnormmaskvectorarray : t array -> t array -> t -> RealArray.t -> unit = "sunml_nvec_pthreads_wrmsnormmaskvectorarray" let wrmsnormmaskvectorarray (xa : t array) (wa : t array) (id : t) nrm = if Sundials_impl.Version.lt400 then raise Config.NotImplementedBySundialsVersion; if Sundials_configuration.safe then (Array.iter (check id) xa; Array.iter (check id) wa; same_len xa wa); c_wrmsnormmaskvectorarray xa wa id nrm external c_scaleaddmultivectorarray : RealArray.t -> t array -> t array array -> t array array -> unit = "sunml_nvec_pthreads_scaleaddmultivectorarray" let scaleaddmultivectorarray ra (xa : t array) (yaa : t array array) (zaa : t array array) = if Sundials_impl.Version.lt400 then raise Config.NotImplementedBySundialsVersion; if Sundials_configuration.safe then (let x = Array.get xa 0 in let ns = RealArray.length ra in let nv = Array.length xa in same_len' ns yaa; same_len' ns zaa; Array.iter (check x) xa; Array.iter (fun ya -> same_len' nv ya; Array.iter (check x) ya) yaa; Array.iter (fun za -> same_len' nv za; Array.iter (check x) za) zaa; same_len yaa zaa); c_scaleaddmultivectorarray ra xa yaa zaa external c_linearcombinationvectorarray : RealArray.t -> t array array -> t array -> unit = "sunml_nvec_pthreads_linearcombinationvectorarray" let linearcombinationvectorarray ca (xaa : t array array) (za : t array) = if Sundials_impl.Version.lt400 then raise Config.NotImplementedBySundialsVersion; if Sundials_configuration.safe then (let z = Array.get za 0 in let ns = RealArray.length ca in let nv = Array.length za in same_len' ns xaa; Array.iter (check z) za; Array.iter (fun xa -> same_len' nv xa; Array.iter (check z) xa) xaa); c_linearcombinationvectorarray ca xaa za module Local = struct let dotprod = dotprod let maxnorm = maxnorm let min = min let l1norm = l1norm let invtest = invtest let constrmask = constrmask let minquotient = minquotient external c_wsqrsum : t -> t -> float = "sunml_nvec_pthreads_wsqrsumlocal" let wsqrsum (x : t) (w : t) = if Sundials_impl.Version.lt500 then raise Config.NotImplementedBySundialsVersion; if Sundials_configuration.safe then check x w; c_wsqrsum x w external c_wsqrsummask : t -> t -> t -> float = "sunml_nvec_pthreads_wsqrsummasklocal" let wsqrsummask (x : t) (w : t) (id : t) = if Sundials_impl.Version.lt500 then raise Config.NotImplementedBySundialsVersion; if Sundials_configuration.safe then (check x w; check x id); c_wsqrsummask x w id let dotprodmulti = dotprodmulti let dotprodmulti_allreduce _ _ = raise Nvector.OperationNotProvided end end (* }}} *)
null
https://raw.githubusercontent.com/inria-parkas/sundialsml/a72ebfc84b55470ed97fbb0b45d700deebfc1664/src/nvectors/nvector_pthreads.ml
ocaml
Selectively enable and disable fused and array operations {{{ }}} {{{ }}}
open Sundials type data = RealArray.t type kind = [`Pthreads|Nvector_serial.kind] type t = (data, kind) Nvector.t external c_enablefusedops_pthreads : ('d, 'k) Nvector.t -> bool -> unit = "sunml_nvec_pthreads_enablefusedops" external c_enablelinearcombination_pthreads : ('d, 'k) Nvector.t -> bool -> unit = "sunml_nvec_pthreads_enablelinearcombination" external c_enablescaleaddmulti_pthreads : ('d, 'k) Nvector.t -> bool -> unit = "sunml_nvec_pthreads_enablescaleaddmulti" external c_enabledotprodmulti_pthreads : ('d, 'k) Nvector.t -> bool -> unit = "sunml_nvec_pthreads_enabledotprodmulti" external c_enablelinearsumvectorarray_pthreads : ('d, 'k) Nvector.t -> bool -> unit = "sunml_nvec_pthreads_enablelinearsumvectorarray" external c_enablescalevectorarray_pthreads : ('d, 'k) Nvector.t -> bool -> unit = "sunml_nvec_pthreads_enablescalevectorarray" external c_enableconstvectorarray_pthreads : ('d, 'k) Nvector.t -> bool -> unit = "sunml_nvec_pthreads_enableconstvectorarray" external c_enablewrmsnormvectorarray_pthreads : ('d, 'k) Nvector.t -> bool -> unit = "sunml_nvec_pthreads_enablewrmsnormvectorarray" external c_enablewrmsnormmaskvectorarray_pthreads : ('d, 'k) Nvector.t -> bool -> unit = "sunml_nvec_pthreads_enablewrmsnormmaskvectorarray" external c_enablescaleaddmultivectorarray_pthreads : ('d, 'k) Nvector.t -> bool -> unit = "sunml_nvec_pthreads_enablescaleaddmultivectorarray" external c_enablelinearcombinationvectorarray_pthreads : ('d, 'k) Nvector.t -> bool -> unit = "sunml_nvec_pthreads_enablelinearcombinationvectorarray" let unwrap = Nvector.unwrap external c_wrap : int -> RealArray.t -> (t -> bool) -> (t -> t) -> Context.t -> t = "sunml_nvec_wrap_pthreads" let rec wrap ?context ?(with_fused_ops=false) nthreads v = let len = RealArray.length v in let check nv' = (len = RealArray.length (unwrap nv')) in let ctx = Sundials_impl.Context.get context in let nv = c_wrap nthreads v check (clone nthreads) ctx in if with_fused_ops then c_enablefusedops_pthreads nv true; nv and clone nthreads nv = let nv' = wrap ~context:(Nvector.context nv) nthreads (RealArray.copy (unwrap nv)) in if Sundials_impl.Version.lt400 then () else begin c_enablelinearcombination_pthreads nv' (Nvector.Ops.has_linearcombination nv); c_enablescaleaddmulti_pthreads nv' (Nvector.Ops.has_scaleaddmulti nv); c_enabledotprodmulti_pthreads nv' (Nvector.Ops.has_dotprodmulti nv); c_enablelinearsumvectorarray_pthreads nv' (Nvector.Ops.has_linearsumvectorarray nv); c_enablescalevectorarray_pthreads nv' (Nvector.Ops.has_scalevectorarray nv); c_enableconstvectorarray_pthreads nv' (Nvector.Ops.has_constvectorarray nv); c_enablewrmsnormvectorarray_pthreads nv' (Nvector.Ops.has_wrmsnormvectorarray nv); c_enablewrmsnormmaskvectorarray_pthreads nv' (Nvector.Ops.has_wrmsnormmaskvectorarray nv); c_enablescaleaddmultivectorarray_pthreads nv' (Nvector.Ops.has_scaleaddmultivectorarray nv); c_enablelinearcombinationvectorarray_pthreads nv' (Nvector.Ops.has_linearcombinationvectorarray nv) end; nv' let pp fmt v = RealArray.pp fmt (unwrap v) let make ?context ?with_fused_ops nthreads n iv = wrap ?context ?with_fused_ops nthreads (RealArray.make n iv) external num_threads : t -> int = "sunml_nvec_pthreads_num_threads" let do_enable f nv v = match v with | None -> () | Some v -> f nv v let enable ?with_fused_ops ?with_linear_combination ?with_scale_add_multi ?with_dot_prod_multi ?with_linear_sum_vector_array ?with_scale_vector_array ?with_const_vector_array ?with_wrms_norm_vector_array ?with_wrms_norm_mask_vector_array ?with_scale_add_multi_vector_array ?with_linear_combination_vector_array nv = do_enable c_enablefusedops_pthreads nv with_fused_ops; do_enable c_enablelinearcombination_pthreads nv with_linear_combination; do_enable c_enablescaleaddmulti_pthreads nv with_scale_add_multi; do_enable c_enabledotprodmulti_pthreads nv with_dot_prod_multi; do_enable c_enablelinearsumvectorarray_pthreads nv with_linear_sum_vector_array; do_enable c_enablescalevectorarray_pthreads nv with_scale_vector_array; do_enable c_enableconstvectorarray_pthreads nv with_const_vector_array; do_enable c_enablewrmsnormvectorarray_pthreads nv with_wrms_norm_vector_array; do_enable c_enablewrmsnormmaskvectorarray_pthreads nv with_wrms_norm_mask_vector_array; do_enable c_enablescaleaddmultivectorarray_pthreads nv with_scale_add_multi_vector_array; do_enable c_enablelinearcombinationvectorarray_pthreads nv with_linear_combination_vector_array external c_any_wrap : extension_constructor -> int -> RealArray.t -> (Nvector.any -> bool) -> (Nvector.any -> Nvector.any) -> Context.t -> Nvector.any = "sunml_nvec_anywrap_pthreads_byte" "sunml_nvec_anywrap_pthreads" let rec wrap ?context ?(with_fused_ops=false) ?(with_linear_combination=false) ?(with_scale_add_multi=false) ?(with_dot_prod_multi=false) ?(with_linear_sum_vector_array=false) ?(with_scale_vector_array=false) ?(with_const_vector_array=false) ?(with_wrms_norm_vector_array=false) ?(with_wrms_norm_mask_vector_array=false) ?(with_scale_add_multi_vector_array=false) ?(with_linear_combination_vector_array=false) nthreads v = if not Sundials_impl.Version.has_nvector_get_id then raise Config.NotImplementedBySundialsVersion; let len = RealArray.length v in let check nv = match unwrap nv with | Nvector.RA ra -> len = RealArray.length ra && Nvector.get_id nv = Nvector.Pthreads | _ -> false in let ctx = Sundials_impl.Context.get context in let nv = c_any_wrap [%extension_constructor Nvector.RA] nthreads v check (clone nthreads) ctx in if with_fused_ops then c_enablefusedops_pthreads nv true; if with_fused_ops then c_enablefusedops_pthreads nv true; if with_linear_combination then c_enablelinearcombination_pthreads nv true; if with_scale_add_multi then c_enablescaleaddmulti_pthreads nv true; if with_dot_prod_multi then c_enabledotprodmulti_pthreads nv true; if with_linear_sum_vector_array then c_enablelinearsumvectorarray_pthreads nv true; if with_scale_vector_array then c_enablescalevectorarray_pthreads nv true; if with_const_vector_array then c_enableconstvectorarray_pthreads nv true; if with_wrms_norm_vector_array then c_enablewrmsnormvectorarray_pthreads nv true; if with_wrms_norm_mask_vector_array then c_enablewrmsnormmaskvectorarray_pthreads nv true; if with_scale_add_multi_vector_array then c_enablescaleaddmultivectorarray_pthreads nv true; if with_linear_combination_vector_array then c_enablelinearcombinationvectorarray_pthreads nv true; nv and clone nthreads nv = let v = match unwrap nv with | Nvector.RA v -> v | _ -> assert false in let nv' = wrap ~context:(Nvector.context nv) nthreads (RealArray.copy v) in c_enablelinearcombination_pthreads nv' (Nvector.Ops.has_linearcombination nv); c_enablescaleaddmulti_pthreads nv' (Nvector.Ops.has_scaleaddmulti nv); c_enabledotprodmulti_pthreads nv' (Nvector.Ops.has_dotprodmulti nv); c_enablelinearsumvectorarray_pthreads nv' (Nvector.Ops.has_linearsumvectorarray nv); c_enablescalevectorarray_pthreads nv' (Nvector.Ops.has_scalevectorarray nv); c_enableconstvectorarray_pthreads nv' (Nvector.Ops.has_constvectorarray nv); c_enablewrmsnormvectorarray_pthreads nv' (Nvector.Ops.has_wrmsnormvectorarray nv); c_enablewrmsnormmaskvectorarray_pthreads nv' (Nvector.Ops.has_wrmsnormmaskvectorarray nv); c_enablescaleaddmultivectorarray_pthreads nv' (Nvector.Ops.has_scaleaddmultivectorarray nv); c_enablelinearcombinationvectorarray_pthreads nv' (Nvector.Ops.has_linearcombinationvectorarray nv); nv' let make ?context ?with_fused_ops ?with_linear_combination ?with_scale_add_multi ?with_dot_prod_multi ?with_linear_sum_vector_array ?with_scale_vector_array ?with_const_vector_array ?with_wrms_norm_vector_array ?with_wrms_norm_mask_vector_array ?with_scale_add_multi_vector_array ?with_linear_combination_vector_array nthreads n iv = wrap ?context ?with_fused_ops ?with_linear_combination ?with_scale_add_multi ?with_dot_prod_multi ?with_linear_sum_vector_array ?with_scale_vector_array ?with_const_vector_array ?with_wrms_norm_vector_array ?with_wrms_norm_mask_vector_array ?with_scale_add_multi_vector_array ?with_linear_combination_vector_array nthreads (RealArray.make n iv) let unwrap nv = match Nvector.unwrap nv with | Nvector.RA a -> a | _ -> raise Nvector.BadGenericType let enable ?with_fused_ops ?with_linear_combination ?with_scale_add_multi ?with_dot_prod_multi ?with_linear_sum_vector_array ?with_scale_vector_array ?with_const_vector_array ?with_wrms_norm_vector_array ?with_wrms_norm_mask_vector_array ?with_scale_add_multi_vector_array ?with_linear_combination_vector_array nv = if Sundials_impl.Version.lt400 then raise Config.NotImplementedBySundialsVersion; if Nvector.get_id nv <> Nvector.Pthreads then raise Nvector.BadGenericType; do_enable c_enablefusedops_pthreads nv with_fused_ops; do_enable c_enablelinearcombination_pthreads nv with_linear_combination; do_enable c_enablescaleaddmulti_pthreads nv with_scale_add_multi; do_enable c_enabledotprodmulti_pthreads nv with_dot_prod_multi; do_enable c_enablelinearsumvectorarray_pthreads nv with_linear_sum_vector_array; do_enable c_enablescalevectorarray_pthreads nv with_scale_vector_array; do_enable c_enableconstvectorarray_pthreads nv with_const_vector_array; do_enable c_enablewrmsnormvectorarray_pthreads nv with_wrms_norm_vector_array; do_enable c_enablewrmsnormmaskvectorarray_pthreads nv with_wrms_norm_mask_vector_array; do_enable c_enablescaleaddmultivectorarray_pthreads nv with_scale_add_multi_vector_array; do_enable c_enablelinearcombinationvectorarray_pthreads nv with_linear_combination_vector_array type t = (RealArray.t, kind) Nvector.t let check = Nvector.check let clone nv = let data = Nvector.unwrap nv in wrap (num_threads nv) (RealArray.copy data) external c_linearsum : float -> t -> float -> t -> t -> unit = "sunml_nvec_pthreads_linearsum" [@@noalloc] let linearsum a (x : t) b (y : t) (z : t) = if Sundials_configuration.safe then (check x y; check x z); c_linearsum a x b y z external const : float -> t -> unit = "sunml_nvec_pthreads_const" [@@noalloc] external c_prod : t -> t -> t -> unit = "sunml_nvec_pthreads_prod" [@@noalloc] let prod (x : t) (y : t) (z : t) = if Sundials_configuration.safe then (check x y; check x z); c_prod x y z external c_div : t -> t -> t -> unit = "sunml_nvec_pthreads_div" [@@noalloc] let div (x : t) (y : t) (z : t) = if Sundials_configuration.safe then (check x y; check x z); c_div x y z external c_scale : float -> t -> t -> unit = "sunml_nvec_pthreads_scale" [@@noalloc] let scale c (x : t) (z : t) = if Sundials_configuration.safe then check x z; c_scale c x z external c_abs : t -> t -> unit = "sunml_nvec_pthreads_abs" [@@noalloc] let abs (x : t) (z : t) = if Sundials_configuration.safe then check x z; c_abs x z external c_inv : t -> t -> unit = "sunml_nvec_pthreads_inv" [@@noalloc] let inv (x : t) (z : t) = if Sundials_configuration.safe then check x z; c_inv x z external c_addconst : t -> float -> t -> unit = "sunml_nvec_pthreads_addconst" [@@noalloc] let addconst (x : t) b (z : t) = if Sundials_configuration.safe then check x z; c_addconst x b z external c_dotprod : t -> t -> float = "sunml_nvec_pthreads_dotprod" let dotprod (x : t) (y : t) = if Sundials_configuration.safe then check x y; c_dotprod x y external maxnorm : t -> float = "sunml_nvec_pthreads_maxnorm" external c_wrmsnorm : t -> t -> float = "sunml_nvec_pthreads_wrmsnorm" let wrmsnorm (x : t) (w : t) = if Sundials_configuration.safe then check x w; c_wrmsnorm x w external c_wrmsnormmask : t -> t -> t -> float = "sunml_nvec_pthreads_wrmsnormmask" let wrmsnormmask (x : t) (w : t) (id : t) = if Sundials_configuration.safe then (check x w; check x id); c_wrmsnormmask x w id external min : t -> float = "sunml_nvec_pthreads_min" external c_wl2norm : t -> t -> float = "sunml_nvec_pthreads_wl2norm" let wl2norm (x : t) (w : t) = if Sundials_configuration.safe then check x w; c_wl2norm x w external l1norm : t -> float = "sunml_nvec_pthreads_l1norm" external c_compare : float -> t -> t -> unit = "sunml_nvec_pthreads_compare" [@@noalloc] let compare c (x : t) (z : t) = if Sundials_configuration.safe then check x z; c_compare c x z external c_invtest : t -> t -> bool = "sunml_nvec_pthreads_invtest" [@@noalloc] let invtest (x : t) (z : t) = if Sundials_configuration.safe then check x z; c_invtest x z external c_constrmask : t -> t -> t -> bool = "sunml_nvec_pthreads_constrmask" [@@noalloc] let constrmask (c : t) (x : t) (m : t) = if Sundials_configuration.safe then (check c x; check c m); c_constrmask c x m external c_minquotient : t -> t -> float = "sunml_nvec_pthreads_minquotient" let minquotient (n : t) (d : t) = if Sundials_configuration.safe then check n d; c_minquotient n d external space : t -> int * int = "sunml_nvec_pthreads_space" external getlength : t -> int = "sunml_nvec_pthreads_getlength" external c_print_file : t -> Logfile.t option -> unit = "sunml_nvec_pthreads_print_file" let print ?logfile nv = c_print_file nv logfile external c_linearcombination : RealArray.t -> t array -> t -> unit = "sunml_nvec_pthreads_linearcombination" let linearcombination ca (xa : t array) (z : t) = if Sundials_impl.Version.lt400 then raise Config.NotImplementedBySundialsVersion; if Sundials_configuration.safe then Array.iter (check z) xa; c_linearcombination ca xa z let same_len' n ya = if n <> Array.length ya then invalid_arg "arrays of unequal length" let same_len xa ya = same_len' (Array.length xa) ya external c_scaleaddmulti : RealArray.t -> t -> t array -> t array -> unit = "sunml_nvec_pthreads_scaleaddmulti" let scaleaddmulti aa (x : t) (ya : t array) (za : t array) = if Sundials_impl.Version.lt400 then raise Config.NotImplementedBySundialsVersion; if Sundials_configuration.safe then (Array.iter (check x) ya; Array.iter (check x) za; let nv = RealArray.length aa in same_len' nv ya; same_len' nv za); c_scaleaddmulti aa x ya za external c_dotprodmulti : t -> t array -> RealArray.t -> unit = "sunml_nvec_pthreads_dotprodmulti" let dotprodmulti (x : t) (ya : t array) (dp : RealArray.t) = if Sundials_impl.Version.lt400 then raise Config.NotImplementedBySundialsVersion; if Sundials_configuration.safe then (let nv = RealArray.length dp in same_len' nv ya; Array.iter (check x) ya); c_dotprodmulti x ya dp external c_linearsumvectorarray : float -> t array -> float -> t array -> t array -> unit = "sunml_nvec_pthreads_linearsumvectorarray" let linearsumvectorarray a (xa : t array) b (ya : t array) (za : t array) = if Sundials_impl.Version.lt400 then raise Config.NotImplementedBySundialsVersion; if Sundials_configuration.safe then (let x = Array.get xa 0 in Array.iter (check x) xa; Array.iter (check x) ya; Array.iter (check x) za; same_len xa ya; same_len xa za); c_linearsumvectorarray a xa b ya za external c_scalevectorarray : RealArray.t -> t array -> t array -> unit = "sunml_nvec_pthreads_scalevectorarray" let scalevectorarray c (xa : t array) (za : t array) = if Sundials_impl.Version.lt400 then raise Config.NotImplementedBySundialsVersion; if Sundials_configuration.safe then (let x = Array.get xa 0 in Array.iter (check x) xa; Array.iter (check x) za; same_len xa za); c_scalevectorarray c xa za external c_constvectorarray : float -> t array -> unit = "sunml_nvec_pthreads_constvectorarray" let constvectorarray c (za : t array) = if Sundials_impl.Version.lt400 then raise Config.NotImplementedBySundialsVersion; if Sundials_configuration.safe then (let z = Array.get za 0 in Array.iter (check z) za); c_constvectorarray c za external c_wrmsnormvectorarray : t array -> t array -> RealArray.t -> unit = "sunml_nvec_pthreads_wrmsnormvectorarray" let wrmsnormvectorarray (xa : t array) (wa : t array) nrm = if Sundials_impl.Version.lt400 then raise Config.NotImplementedBySundialsVersion; if Sundials_configuration.safe then (let x = Array.get xa 0 in Array.iter (check x) xa; Array.iter (check x) wa; same_len xa wa); c_wrmsnormvectorarray xa wa nrm external c_wrmsnormmaskvectorarray : t array -> t array -> t -> RealArray.t -> unit = "sunml_nvec_pthreads_wrmsnormmaskvectorarray" let wrmsnormmaskvectorarray (xa : t array) (wa : t array) (id : t) nrm = if Sundials_impl.Version.lt400 then raise Config.NotImplementedBySundialsVersion; if Sundials_configuration.safe then (Array.iter (check id) xa; Array.iter (check id) wa; same_len xa wa); c_wrmsnormmaskvectorarray xa wa id nrm external c_scaleaddmultivectorarray : RealArray.t -> t array -> t array array -> t array array -> unit = "sunml_nvec_pthreads_scaleaddmultivectorarray" let scaleaddmultivectorarray ra (xa : t array) (yaa : t array array) (zaa : t array array) = if Sundials_impl.Version.lt400 then raise Config.NotImplementedBySundialsVersion; if Sundials_configuration.safe then (let x = Array.get xa 0 in let ns = RealArray.length ra in let nv = Array.length xa in same_len' ns yaa; same_len' ns zaa; Array.iter (check x) xa; Array.iter (fun ya -> same_len' nv ya; Array.iter (check x) ya) yaa; Array.iter (fun za -> same_len' nv za; Array.iter (check x) za) zaa; same_len yaa zaa); c_scaleaddmultivectorarray ra xa yaa zaa external c_linearcombinationvectorarray : RealArray.t -> t array array -> t array -> unit = "sunml_nvec_pthreads_linearcombinationvectorarray" let linearcombinationvectorarray ca (xaa : t array array) (za : t array) = if Sundials_impl.Version.lt400 then raise Config.NotImplementedBySundialsVersion; if Sundials_configuration.safe then (let z = Array.get za 0 in let ns = RealArray.length ca in let nv = Array.length za in same_len' ns xaa; Array.iter (check z) za; Array.iter (fun xa -> same_len' nv xa; Array.iter (check z) xa) xaa); c_linearcombinationvectorarray ca xaa za module Local = struct let dotprod = dotprod let maxnorm = maxnorm let min = min let l1norm = l1norm let invtest = invtest let constrmask = constrmask let minquotient = minquotient external c_wsqrsum : t -> t -> float = "sunml_nvec_pthreads_wsqrsumlocal" let wsqrsum (x : t) (w : t) = if Sundials_impl.Version.lt500 then raise Config.NotImplementedBySundialsVersion; if Sundials_configuration.safe then check x w; c_wsqrsum x w external c_wsqrsummask : t -> t -> t -> float = "sunml_nvec_pthreads_wsqrsummasklocal" let wsqrsummask (x : t) (w : t) (id : t) = if Sundials_impl.Version.lt500 then raise Config.NotImplementedBySundialsVersion; if Sundials_configuration.safe then (check x w; check x id); c_wsqrsummask x w id let dotprodmulti = dotprodmulti let dotprodmulti_allreduce _ _ = raise Nvector.OperationNotProvided end
07107eaa5ebe1858ae97c48b4a0eda3ff089423a84875625826ebe43cc6143b9
Phaetec/pogo-cruncher
rename.cljs
(ns cruncher.rename (:require [om.next :as om :refer-macros [defui]] [om.dom :as dom :include-macros true] [ajax.core :refer [POST]] [goog.dom :as gdom] [cruncher.utils.views :as vlib] [cruncher.utils.lib :as lib] [cruncher.config :as config] [cruncher.communication.progress :as progress] [cruncher.communication.utils :as clib] [cruncher.communication.main :as com])) (defn- dispatch-scheme "Gets a string containing the user's selection. Construct the new nickname from it." [scheme iv at df st t1 t2] (let [iv-int (lib/str->int iv)] (cond (and (= "rename-scheme-1" scheme) (= iv-int 100)) (str iv-int "%") (= "rename-scheme-1" scheme) (str iv-int "% " at "/" df "/" st) (= "rename-scheme-2" scheme) (str at "/" df "/" st) (= "rename-scheme-3" scheme) (str iv-int "%") (= "rename-scheme-4" scheme) (str iv-int "% " t1 "/" t2)))) (defn- create-list-of-new-names "Returns list of maps like this one: {:id 42, :name 33% 15/0/0}, while :name is also a string." [scheme] (let [rows (gdom/getElementsByClass "poketable-row")] (doall (remove nil? (map (fn [row] (let [id (.getAttribute row "data-id") iv (.getAttribute row "data-iv-perfect") at (.getAttribute row "data-at") df (.getAttribute row "data-df") st (.getAttribute row "data-st") t1 (.getAttribute row "data-type-1") t2 (.getAttribute row "data-type-2") checkbox (gdom/getElement (str "poketable-checkbox-" id))] (when (.-checked checkbox) {:id id :name (dispatch-scheme scheme iv at df st t1 t2)}))) rows))))) (defn- do-the-rename-dance! "Rename all selected pokemon according to the selected scheme. Not reversible!" [this scheme] (let [url (:rename-selected-pokemon config/api) new-nicknames (create-list-of-new-names scheme)] (when (pos? (count new-nicknames)) (lib/loading!) (lib/update-progress-status! {:status "ok", :to_delete (count new-nicknames), :deleted 0}) (progress/query-status this) (POST (clib/make-url url) {:body (clib/clj->json new-nicknames) :handler progress/finished-progress-handler :error-handler com/error-handler :response-format :json :headers {"Content-Type" "application/json"} :keywords? true})))) (defui SelectSchemes Object (render [this] (let [scheme (or (om/get-state this :scheme) "rename-scheme-1")] (dom/div nil (dom/div #js {:className "input-group"} (dom/select #js {:className "form-control" :onChange #(vlib/commit-component-state this :scheme %) :value scheme} (dom/option #js {:value "rename-scheme-1"} "IV% AT/DF/ST") (dom/option #js {:value "rename-scheme-2"} "AT/DF/ST") (dom/option #js {:value "rename-scheme-3"} "IV%") (dom/option #js {:value "rename-scheme-4"} "IV% Type1/Type2")) (dom/div #js {:className "input-group-btn"} (vlib/button-primary #(do-the-rename-dance! this scheme) "Rename"))))))) (def select-schemes (om/factory SelectSchemes {})) (defui RenamingControls Object (render [this] (dom/div nil (dom/p #js {:className "lead"} "Renaming " (dom/span #js {:className "badge"} "new")) (dom/label #js {:className "control-label"} "Renames selected Pokemon. Not reversible") (select-schemes (om/props this))))) (def controls (om/factory RenamingControls))
null
https://raw.githubusercontent.com/Phaetec/pogo-cruncher/a93bd16cc4d118d2ec57c2a641bd1b11be0d133d/frontend/src/cruncher/rename.cljs
clojure
(ns cruncher.rename (:require [om.next :as om :refer-macros [defui]] [om.dom :as dom :include-macros true] [ajax.core :refer [POST]] [goog.dom :as gdom] [cruncher.utils.views :as vlib] [cruncher.utils.lib :as lib] [cruncher.config :as config] [cruncher.communication.progress :as progress] [cruncher.communication.utils :as clib] [cruncher.communication.main :as com])) (defn- dispatch-scheme "Gets a string containing the user's selection. Construct the new nickname from it." [scheme iv at df st t1 t2] (let [iv-int (lib/str->int iv)] (cond (and (= "rename-scheme-1" scheme) (= iv-int 100)) (str iv-int "%") (= "rename-scheme-1" scheme) (str iv-int "% " at "/" df "/" st) (= "rename-scheme-2" scheme) (str at "/" df "/" st) (= "rename-scheme-3" scheme) (str iv-int "%") (= "rename-scheme-4" scheme) (str iv-int "% " t1 "/" t2)))) (defn- create-list-of-new-names "Returns list of maps like this one: {:id 42, :name 33% 15/0/0}, while :name is also a string." [scheme] (let [rows (gdom/getElementsByClass "poketable-row")] (doall (remove nil? (map (fn [row] (let [id (.getAttribute row "data-id") iv (.getAttribute row "data-iv-perfect") at (.getAttribute row "data-at") df (.getAttribute row "data-df") st (.getAttribute row "data-st") t1 (.getAttribute row "data-type-1") t2 (.getAttribute row "data-type-2") checkbox (gdom/getElement (str "poketable-checkbox-" id))] (when (.-checked checkbox) {:id id :name (dispatch-scheme scheme iv at df st t1 t2)}))) rows))))) (defn- do-the-rename-dance! "Rename all selected pokemon according to the selected scheme. Not reversible!" [this scheme] (let [url (:rename-selected-pokemon config/api) new-nicknames (create-list-of-new-names scheme)] (when (pos? (count new-nicknames)) (lib/loading!) (lib/update-progress-status! {:status "ok", :to_delete (count new-nicknames), :deleted 0}) (progress/query-status this) (POST (clib/make-url url) {:body (clib/clj->json new-nicknames) :handler progress/finished-progress-handler :error-handler com/error-handler :response-format :json :headers {"Content-Type" "application/json"} :keywords? true})))) (defui SelectSchemes Object (render [this] (let [scheme (or (om/get-state this :scheme) "rename-scheme-1")] (dom/div nil (dom/div #js {:className "input-group"} (dom/select #js {:className "form-control" :onChange #(vlib/commit-component-state this :scheme %) :value scheme} (dom/option #js {:value "rename-scheme-1"} "IV% AT/DF/ST") (dom/option #js {:value "rename-scheme-2"} "AT/DF/ST") (dom/option #js {:value "rename-scheme-3"} "IV%") (dom/option #js {:value "rename-scheme-4"} "IV% Type1/Type2")) (dom/div #js {:className "input-group-btn"} (vlib/button-primary #(do-the-rename-dance! this scheme) "Rename"))))))) (def select-schemes (om/factory SelectSchemes {})) (defui RenamingControls Object (render [this] (dom/div nil (dom/p #js {:className "lead"} "Renaming " (dom/span #js {:className "badge"} "new")) (dom/label #js {:className "control-label"} "Renames selected Pokemon. Not reversible") (select-schemes (om/props this))))) (def controls (om/factory RenamingControls))
5e454194e637babf2b793a909065a602b2c2026c6989f49c9fc7e21c83be983c
MaybeJustJames/zephyr
Options.hs
-- | `zephyr` command line option parser -- module Command.Options ( Options (..) , parseOptions ) where import Data.List (intercalate) import qualified Data.Map as M import qualified Data.Set as S import qualified Data.Text as T import Data.Traversable (for) import qualified Language.PureScript as P import Language.PureScript.DCE.Errors (EntryPoint (..)) import qualified Options.Applicative as Opts -- | @zephyr@ options -- data Options = Options { optEntryPoints :: [EntryPoint] -- ^ List of entry points. , optInputDir :: FilePath ^ Input directory , default : @outout@. , optOutputDir :: FilePath -- ^ Output directory, default: @dce-output@. , optVerbose :: Bool -- ^ Verbose output. , optForeign :: Bool -- ^ Dead code eliminate foreign javascript module. , optPureScriptOptions :: P.Options ^ PureScription options , optUsePrefix :: Bool , optJsonErrors :: Bool -- ^ Print errors in `JSON` format; default 'False'. , optEvaluate :: Bool -- ^ Rewirite using an evaluation; it can reduce literal expressions; default -- 'False'. } inputDirectoryOpt :: Opts.Parser FilePath inputDirectoryOpt = Opts.strOption $ Opts.short 'i' <> Opts.long "input-directory" <> Opts.value "output" <> Opts.showDefault <> Opts.help "Input directory (purs output directory)." outputDirectoryOpt :: Opts.Parser FilePath outputDirectoryOpt = Opts.strOption $ Opts.short 'o' <> Opts.long "dce-output" <> Opts.value "dce-output" <> Opts.showDefault <> Opts.help "Output directory." entryPointOpt :: Opts.Parser EntryPoint entryPointOpt = Opts.argument (Opts.auto >>= checkIfQualified) $ Opts.metavar "entry-point" <> Opts.help "Qualified identifier or a module name (it may be prefixed with `ident:` or `module:`). All code which is not a transitive dependency of an entry point (or any exported identifier from a give module) will be removed. You can pass multiple entry points." where checkIfQualified (EntryPoint q@(P.Qualified (P.BySourcePos _) _)) = fail $ "not a qualified indentifier: '" ++ T.unpack (P.showQualified P.runIdent q) ++ "'" checkIfQualified e = return e verboseOutputOpt :: Opts.Parser Bool verboseOutputOpt = Opts.switch $ Opts.short 'v' <> Opts.long "verbose" <> Opts.showDefault <> Opts.help "Verbose CoreFn parser errors." dceForeignOpt :: Opts.Parser Bool dceForeignOpt = Opts.switch $ Opts.short 'f' <> Opts.long "dce-foreign" <> Opts.showDefault <> Opts.help "dce foreign modules" comments :: Opts.Parser Bool comments = Opts.switch $ Opts.short 'c' <> Opts.long "comments" <> Opts.help "Include comments in the generated code" verboseErrors :: Opts.Parser Bool verboseErrors = Opts.switch $ Opts.short 'v' <> Opts.long "verbose-errors" <> Opts.help "Display verbose error messages" codegenTargets :: Opts.Parser [P.CodegenTarget] codegenTargets = Opts.option targetParser $ Opts.short 'g' <> Opts.long "codegen" <> Opts.value [P.JS] <> Opts.help ( "Specifies comma-separated codegen targets to include. " <> targetsMessage <> " The default target is 'js', but if this option is used only the targets specified will be used." ) dceEvalOpt :: Opts.Parser Bool dceEvalOpt = Opts.switch $ Opts.short 'e' <> Opts.long "evaluate" <> Opts.showDefault <> Opts.help "rewrite using simple evaluation" targets :: M.Map String P.CodegenTarget targets = M.fromList [ ("js", P.JS) , ("sourcemaps", P.JSSourceMap) , ("corefn", P.CoreFn) ] targetsMessage :: String targetsMessage = "Accepted codegen targets are '" <> intercalate "', '" (M.keys targets) <> "'." targetParser :: Opts.ReadM [P.CodegenTarget] targetParser = Opts.str >>= \s -> for (T.split (== ',') s) $ maybe (Opts.readerError targetsMessage) pure . flip M.lookup targets . T.unpack . T.strip noPrefix :: Opts.Parser Bool noPrefix = Opts.switch $ Opts.short 'p' <> Opts.long "no-prefix" <> Opts.help "Do not include comment header" jsonErrors :: Opts.Parser Bool jsonErrors = Opts.switch $ Opts.long "json-errors" <> Opts.help "Print errors to stderr as JSON" pureScriptOptions :: Opts.Parser P.Options pureScriptOptions = P.Options <$> verboseErrors <*> (not <$> comments) <*> (handleTargets <$> codegenTargets) where Ensure that the JS target is included if sourcemaps are handleTargets :: [P.CodegenTarget] -> S.Set P.CodegenTarget handleTargets ts = S.fromList (if P.JSSourceMap `elem` ts then P.JS : ts else ts) parseOptions :: Opts.Parser Options parseOptions = Options <$> Opts.many entryPointOpt <*> inputDirectoryOpt <*> outputDirectoryOpt <*> verboseOutputOpt <*> dceForeignOpt <*> pureScriptOptions <*> (not <$> noPrefix) <*> jsonErrors <*> dceEvalOpt
null
https://raw.githubusercontent.com/MaybeJustJames/zephyr/30b6d25813592123dd4dadc34b4df4eb0d150a0b/app/Command/Options.hs
haskell
| `zephyr` command line option parser | @zephyr@ options ^ List of entry points. ^ Output directory, default: @dce-output@. ^ Verbose output. ^ Dead code eliminate foreign javascript module. ^ Print errors in `JSON` format; default 'False'. ^ Rewirite using an evaluation; it can reduce literal expressions; default 'False'.
module Command.Options ( Options (..) , parseOptions ) where import Data.List (intercalate) import qualified Data.Map as M import qualified Data.Set as S import qualified Data.Text as T import Data.Traversable (for) import qualified Language.PureScript as P import Language.PureScript.DCE.Errors (EntryPoint (..)) import qualified Options.Applicative as Opts data Options = Options { optEntryPoints :: [EntryPoint] , optInputDir :: FilePath ^ Input directory , default : @outout@. , optOutputDir :: FilePath , optVerbose :: Bool , optForeign :: Bool , optPureScriptOptions :: P.Options ^ PureScription options , optUsePrefix :: Bool , optJsonErrors :: Bool , optEvaluate :: Bool } inputDirectoryOpt :: Opts.Parser FilePath inputDirectoryOpt = Opts.strOption $ Opts.short 'i' <> Opts.long "input-directory" <> Opts.value "output" <> Opts.showDefault <> Opts.help "Input directory (purs output directory)." outputDirectoryOpt :: Opts.Parser FilePath outputDirectoryOpt = Opts.strOption $ Opts.short 'o' <> Opts.long "dce-output" <> Opts.value "dce-output" <> Opts.showDefault <> Opts.help "Output directory." entryPointOpt :: Opts.Parser EntryPoint entryPointOpt = Opts.argument (Opts.auto >>= checkIfQualified) $ Opts.metavar "entry-point" <> Opts.help "Qualified identifier or a module name (it may be prefixed with `ident:` or `module:`). All code which is not a transitive dependency of an entry point (or any exported identifier from a give module) will be removed. You can pass multiple entry points." where checkIfQualified (EntryPoint q@(P.Qualified (P.BySourcePos _) _)) = fail $ "not a qualified indentifier: '" ++ T.unpack (P.showQualified P.runIdent q) ++ "'" checkIfQualified e = return e verboseOutputOpt :: Opts.Parser Bool verboseOutputOpt = Opts.switch $ Opts.short 'v' <> Opts.long "verbose" <> Opts.showDefault <> Opts.help "Verbose CoreFn parser errors." dceForeignOpt :: Opts.Parser Bool dceForeignOpt = Opts.switch $ Opts.short 'f' <> Opts.long "dce-foreign" <> Opts.showDefault <> Opts.help "dce foreign modules" comments :: Opts.Parser Bool comments = Opts.switch $ Opts.short 'c' <> Opts.long "comments" <> Opts.help "Include comments in the generated code" verboseErrors :: Opts.Parser Bool verboseErrors = Opts.switch $ Opts.short 'v' <> Opts.long "verbose-errors" <> Opts.help "Display verbose error messages" codegenTargets :: Opts.Parser [P.CodegenTarget] codegenTargets = Opts.option targetParser $ Opts.short 'g' <> Opts.long "codegen" <> Opts.value [P.JS] <> Opts.help ( "Specifies comma-separated codegen targets to include. " <> targetsMessage <> " The default target is 'js', but if this option is used only the targets specified will be used." ) dceEvalOpt :: Opts.Parser Bool dceEvalOpt = Opts.switch $ Opts.short 'e' <> Opts.long "evaluate" <> Opts.showDefault <> Opts.help "rewrite using simple evaluation" targets :: M.Map String P.CodegenTarget targets = M.fromList [ ("js", P.JS) , ("sourcemaps", P.JSSourceMap) , ("corefn", P.CoreFn) ] targetsMessage :: String targetsMessage = "Accepted codegen targets are '" <> intercalate "', '" (M.keys targets) <> "'." targetParser :: Opts.ReadM [P.CodegenTarget] targetParser = Opts.str >>= \s -> for (T.split (== ',') s) $ maybe (Opts.readerError targetsMessage) pure . flip M.lookup targets . T.unpack . T.strip noPrefix :: Opts.Parser Bool noPrefix = Opts.switch $ Opts.short 'p' <> Opts.long "no-prefix" <> Opts.help "Do not include comment header" jsonErrors :: Opts.Parser Bool jsonErrors = Opts.switch $ Opts.long "json-errors" <> Opts.help "Print errors to stderr as JSON" pureScriptOptions :: Opts.Parser P.Options pureScriptOptions = P.Options <$> verboseErrors <*> (not <$> comments) <*> (handleTargets <$> codegenTargets) where Ensure that the JS target is included if sourcemaps are handleTargets :: [P.CodegenTarget] -> S.Set P.CodegenTarget handleTargets ts = S.fromList (if P.JSSourceMap `elem` ts then P.JS : ts else ts) parseOptions :: Opts.Parser Options parseOptions = Options <$> Opts.many entryPointOpt <*> inputDirectoryOpt <*> outputDirectoryOpt <*> verboseOutputOpt <*> dceForeignOpt <*> pureScriptOptions <*> (not <$> noPrefix) <*> jsonErrors <*> dceEvalOpt
5ffe24e09eb54ee9588e8f1a6710c01179eb4287f8e83225f12d92f989ff1cd1
mainland/nikola
Main.hs
# LANGUAGE CPP # # LANGUAGE ScopedTypeVariables # # LANGUAGE TemplateHaskell # -- | -- Module : Main Copyright : ( c ) The President and Fellows of Harvard College 2009 - 2010 Copyright : ( c ) 2012 -- License : BSD-style -- Maintainer : < > -- Stability : experimental -- Portability : non-portable module Main where import Prelude hiding (map, zipWith, zipWith3) import qualified Control.Exception as E import Control.Monad (forM_) import qualified Criterion as C import qualified Criterion.Main as C import qualified Data.Vector.Storable as V import System.Environment import Text.Printf #if !MIN_VERSION_vector(0,10,0) import Control.DeepSeq import Foreign (Storable) #endif /* !MIN_VERSION_vector(0,10,0) */ import qualified Data.Array.Nikola.Backend.CUDA as N import qualified Data.Array.Nikola.Backend.CUDA.Haskell as NH import qualified Data.Array.Nikola.Backend.CUDA.TH as NTH import Data.Array.Nikola.Util.Statistics import Data.Array.Nikola.Util.Random import qualified BlackScholes.Nikola as BSN import qualified BlackScholes.Vector as BSV type F = Double rISKFREE :: F rISKFREE = 0.02 vOLATILITY :: F vOLATILITY = 0.30; main :: IO () main = do args <- System.Environment.getArgs N.initializeCUDACtx case args of ["--validate"] -> validate _ -> mapM benchmarksForN [0,2..20] >>= C.defaultMain benchmarksForN :: Double -> IO C.Benchmark benchmarksForN logn = do (ss, xs, ts) <- generateData n return $ C.bgroup (printf "2**%-2.0f" logn) [ C.bench (printf " vector 2**%-2.0f" logn) $ C.nf blackscholesVector (ss, xs, ts) , C.bench (printf "nikola interpreter 2**%-2.0f" logn) $ C.nf blackscholesNikola (ss, xs, ts) , C.bench (printf " nikola compiled 2**%-2.0f" logn) $ C.nf blackscholesNikolaCompiled (ss, xs, ts) ] where n :: Int n = truncate (2**logn) generateData :: Int -> IO (V.Vector F, V.Vector F, V.Vector F) generateData n = do ss <- randomsRange n (5.0, 30.0) xs <- randomsRange n (1.0, 100.0) ts <- randomsRange n (0.25, 10.0) E.evaluate ss E.evaluate xs E.evaluate ts return (ss, xs, ts) blackscholesNikola :: (V.Vector F, V.Vector F, V.Vector F) -> V.Vector F blackscholesNikola (ss, xs, ts) = NH.compile BSN.blackscholes ss xs ts rISKFREE vOLATILITY blackscholesNikolaCompiled :: (V.Vector F, V.Vector F, V.Vector F) -> V.Vector F blackscholesNikolaCompiled (ss, xs, ts) = blackscholes ss xs ts rISKFREE vOLATILITY where blackscholes :: V.Vector F -> V.Vector F -> V.Vector F -> F -> F -> V.Vector F blackscholes = $(NTH.compileSig BSN.blackscholes (undefined :: V.Vector F -> V.Vector F -> V.Vector F -> F -> F -> V.Vector F)) blackscholesVector :: (V.Vector F, V.Vector F, V.Vector F) -> V.Vector F # INLINE blackscholesVector # blackscholesVector (ss, xs, ts) = V.zipWith3 (\s x t -> BSV.blackscholes True s x t rISKFREE vOLATILITY) ss xs ts validate :: IO () validate = forM_ [0,2..16] $ \(logn :: Double) -> do let n = truncate (2**logn) (ss, xs, ts) <- generateData n let v1 = blackscholesNikolaCompiled (ss, xs, ts) let v2 = blackscholesVector (ss, xs, ts) validateL1Norm ePSILON (printf "2**%-2.0f elements" logn) v1 v2 where ePSILON :: F ePSILON = 1e-10 #if !MIN_VERSION_vector(0,10,0) instance Storable a => NFData (V.Vector a) where rnf v = V.length v `seq` () #endif /* !MIN_VERSION_vector(0,10,0) */
null
https://raw.githubusercontent.com/mainland/nikola/d86398888c0a76f8ad1556a269a708de9dd92644/examples/blackscholes/Main.hs
haskell
| Module : Main License : BSD-style Stability : experimental Portability : non-portable
# LANGUAGE CPP # # LANGUAGE ScopedTypeVariables # # LANGUAGE TemplateHaskell # Copyright : ( c ) The President and Fellows of Harvard College 2009 - 2010 Copyright : ( c ) 2012 Maintainer : < > module Main where import Prelude hiding (map, zipWith, zipWith3) import qualified Control.Exception as E import Control.Monad (forM_) import qualified Criterion as C import qualified Criterion.Main as C import qualified Data.Vector.Storable as V import System.Environment import Text.Printf #if !MIN_VERSION_vector(0,10,0) import Control.DeepSeq import Foreign (Storable) #endif /* !MIN_VERSION_vector(0,10,0) */ import qualified Data.Array.Nikola.Backend.CUDA as N import qualified Data.Array.Nikola.Backend.CUDA.Haskell as NH import qualified Data.Array.Nikola.Backend.CUDA.TH as NTH import Data.Array.Nikola.Util.Statistics import Data.Array.Nikola.Util.Random import qualified BlackScholes.Nikola as BSN import qualified BlackScholes.Vector as BSV type F = Double rISKFREE :: F rISKFREE = 0.02 vOLATILITY :: F vOLATILITY = 0.30; main :: IO () main = do args <- System.Environment.getArgs N.initializeCUDACtx case args of ["--validate"] -> validate _ -> mapM benchmarksForN [0,2..20] >>= C.defaultMain benchmarksForN :: Double -> IO C.Benchmark benchmarksForN logn = do (ss, xs, ts) <- generateData n return $ C.bgroup (printf "2**%-2.0f" logn) [ C.bench (printf " vector 2**%-2.0f" logn) $ C.nf blackscholesVector (ss, xs, ts) , C.bench (printf "nikola interpreter 2**%-2.0f" logn) $ C.nf blackscholesNikola (ss, xs, ts) , C.bench (printf " nikola compiled 2**%-2.0f" logn) $ C.nf blackscholesNikolaCompiled (ss, xs, ts) ] where n :: Int n = truncate (2**logn) generateData :: Int -> IO (V.Vector F, V.Vector F, V.Vector F) generateData n = do ss <- randomsRange n (5.0, 30.0) xs <- randomsRange n (1.0, 100.0) ts <- randomsRange n (0.25, 10.0) E.evaluate ss E.evaluate xs E.evaluate ts return (ss, xs, ts) blackscholesNikola :: (V.Vector F, V.Vector F, V.Vector F) -> V.Vector F blackscholesNikola (ss, xs, ts) = NH.compile BSN.blackscholes ss xs ts rISKFREE vOLATILITY blackscholesNikolaCompiled :: (V.Vector F, V.Vector F, V.Vector F) -> V.Vector F blackscholesNikolaCompiled (ss, xs, ts) = blackscholes ss xs ts rISKFREE vOLATILITY where blackscholes :: V.Vector F -> V.Vector F -> V.Vector F -> F -> F -> V.Vector F blackscholes = $(NTH.compileSig BSN.blackscholes (undefined :: V.Vector F -> V.Vector F -> V.Vector F -> F -> F -> V.Vector F)) blackscholesVector :: (V.Vector F, V.Vector F, V.Vector F) -> V.Vector F # INLINE blackscholesVector # blackscholesVector (ss, xs, ts) = V.zipWith3 (\s x t -> BSV.blackscholes True s x t rISKFREE vOLATILITY) ss xs ts validate :: IO () validate = forM_ [0,2..16] $ \(logn :: Double) -> do let n = truncate (2**logn) (ss, xs, ts) <- generateData n let v1 = blackscholesNikolaCompiled (ss, xs, ts) let v2 = blackscholesVector (ss, xs, ts) validateL1Norm ePSILON (printf "2**%-2.0f elements" logn) v1 v2 where ePSILON :: F ePSILON = 1e-10 #if !MIN_VERSION_vector(0,10,0) instance Storable a => NFData (V.Vector a) where rnf v = V.length v `seq` () #endif /* !MIN_VERSION_vector(0,10,0) */
098f6b48d171c78e042227fc93e247e802cd78c2fc1dfffd47c69768867d6a3e
eeng/mercurius
main.cljs
(ns mercurius.core.presentation.main (:require [reagent.dom :as rd] [re-frame.core :as re-frame :refer [dispatch-sync]] [mercurius.core.presentation.util.reframe :refer [>evt]] [mercurius.core.presentation.socket :as socket] [mercurius.core.presentation.app :refer [app]])) (defn start [] (re-frame/clear-subscription-cache!) (rd/render [app] (.getElementById js/document "app"))) (defn init [] (println "Starting app...") (dispatch-sync [:core/initialize]) (socket/connect! :on-connect #(>evt [:core/socket-connected (:uid %)])) (start))
null
https://raw.githubusercontent.com/eeng/mercurius/f83778ddde99aa13692e4fe2e70b2e9dc2fd70e9/src/mercurius/core/presentation/main.cljs
clojure
(ns mercurius.core.presentation.main (:require [reagent.dom :as rd] [re-frame.core :as re-frame :refer [dispatch-sync]] [mercurius.core.presentation.util.reframe :refer [>evt]] [mercurius.core.presentation.socket :as socket] [mercurius.core.presentation.app :refer [app]])) (defn start [] (re-frame/clear-subscription-cache!) (rd/render [app] (.getElementById js/document "app"))) (defn init [] (println "Starting app...") (dispatch-sync [:core/initialize]) (socket/connect! :on-connect #(>evt [:core/socket-connected (:uid %)])) (start))
fc5e8f3153bee362f5c60f673292708439f70d062a1887d9b4f04c983657b386
hannesm/logs-syslog
logs_syslog_mirage.mli
* Logs reporter via syslog using MirageOS Please read { ! Logs_syslog } first . Please read {!Logs_syslog} first. *) * UDP syslog module Udp (C : Mirage_console.S) (CLOCK : Mirage_clock.PCLOCK) (STACK : Tcpip.Stack.V4V6) : sig * [ create c udp ~hostname ip ~port ~truncate ( ) ] is [ reporter ] , which sends log messages to [ ip , port ] via UDP . Upon failure , a message is emitted to the console [ c ] . Each message can be truncated : [ truncate ] defaults to 65535 bytes . The [ hostname ] is part of each syslog message . The [ port ] defaults to 514 . [ facility ] is the default syslog facility ( see { ! Logs_syslog.message } ) . sends log messages to [ip, port] via UDP. Upon failure, a message is emitted to the console [c]. Each message can be truncated: [truncate] defaults to 65535 bytes. The [hostname] is part of each syslog message. The [port] defaults to 514. [facility] is the default syslog facility (see {!Logs_syslog.message}). *) val create : C.t -> STACK.t -> hostname:string -> STACK.IP.ipaddr -> ?port:int -> ?truncate:int -> ?facility:Syslog_message.facility -> unit -> Logs.reporter end (** TCP syslog *) module Tcp (C : Mirage_console.S) (CLOCK : Mirage_clock.PCLOCK) (STACK : Tcpip.Stack.V4V6) : sig * [ create c tcp ~hostname ip ~port ~truncate ~framing ( ) ] is [ Ok reporter ] or [ Error msg ] . The [ reporter ] sends log messages to [ ip , port ] via TCP . If the initial TCP connection to the [ remote_ip ] fails , an [ Error msg ] is returned instead . If the TCP connection fails , an error is logged to the console [ c ] and attempts are made to re - establish the TCP connection . Each syslog message can be truncated , depending on [ truncate ] ( defaults to no truncating ) . The [ hostname ] is part of each syslog message . The default value of [ port ] is 514 , the default behaviour of [ framing ] is to append a zero byte . [ facility ] is the default syslog facility ( see { ! Logs_syslog.message } ) . [Ok reporter] or [Error msg]. The [reporter] sends log messages to [ip, port] via TCP. If the initial TCP connection to the [remote_ip] fails, an [Error msg] is returned instead. If the TCP connection fails, an error is logged to the console [c] and attempts are made to re-establish the TCP connection. Each syslog message can be truncated, depending on [truncate] (defaults to no truncating). The [hostname] is part of each syslog message. The default value of [port] is 514, the default behaviour of [framing] is to append a zero byte. [facility] is the default syslog facility (see {!Logs_syslog.message}). *) val create : C.t -> STACK.t -> hostname:string -> STACK.IP.ipaddr -> ?port:int -> ?truncate:int -> ?framing:Logs_syslog.framing -> ?facility:Syslog_message.facility -> unit -> (Logs.reporter, string) result Lwt.t end * { 2 : mirage_example Example usage } To install a Mirage syslog reporter , sending via UDP to localhost , use the following snippet : { [ module Main ( C : Mirage_console . S ) ( S : Tcpip . Stack . V4V6 ) ( CLOCK : Mirage_clock . ) module LU = Logs_syslog_mirage . Udp(C)(CLOCK)(S ) let start c s _ = let ip = Ipaddr . V4 ( Ipaddr . V4.of_string_exn " 127.0.0.1 " ) in let r = LU.create c s ip ~hostname:"MirageOS.example " ( ) in Logs.set_reporter r ; Lwt.return_unit end ] } The TCP transport is very similar : { [ module Main ( C : Mirage_console . S ) ( S : Tcpip . Stack . V4V6 ) ( CLOCK : Mirage_clock . ) module LT = Logs_syslog_mirage . Tcp(C)(CLOCK)(S ) let start c s _ = let ip = Ipaddr . V4 ( Ipaddr . V4.of_string_exn " 127.0.0.1 " ) in LT.create c s ip ~hostname:"MirageOS.example " ( ) > > = function | Ok r - > Logs.set_reporter r ; Lwt.return_unit | Error e - > Lwt.fail_invalid_arg e end ] } To install a Mirage syslog reporter, sending via UDP to localhost, use the following snippet: {[ module Main (C : Mirage_console.S) (S : Tcpip.Stack.V4V6) (CLOCK : Mirage_clock.PCLOCK) module LU = Logs_syslog_mirage.Udp(C)(CLOCK)(S) let start c s _ = let ip = Ipaddr.V4 (Ipaddr.V4.of_string_exn "127.0.0.1") in let r = LU.create c s ip ~hostname:"MirageOS.example" () in Logs.set_reporter r ; Lwt.return_unit end ]} The TCP transport is very similar: {[ module Main (C : Mirage_console.S) (S : Tcpip.Stack.V4V6) (CLOCK : Mirage_clock.PCLOCK) module LT = Logs_syslog_mirage.Tcp(C)(CLOCK)(S) let start c s _ = let ip = Ipaddr.V4 (Ipaddr.V4.of_string_exn "127.0.0.1") in LT.create c s ip ~hostname:"MirageOS.example" () >>= function | Ok r -> Logs.set_reporter r ; Lwt.return_unit | Error e -> Lwt.fail_invalid_arg e end ]} *)
null
https://raw.githubusercontent.com/hannesm/logs-syslog/aadb766b7b1e239d92b88765720fae6cdd75577f/src/mirage/logs_syslog_mirage.mli
ocaml
* TCP syslog
* Logs reporter via syslog using MirageOS Please read { ! Logs_syslog } first . Please read {!Logs_syslog} first. *) * UDP syslog module Udp (C : Mirage_console.S) (CLOCK : Mirage_clock.PCLOCK) (STACK : Tcpip.Stack.V4V6) : sig * [ create c udp ~hostname ip ~port ~truncate ( ) ] is [ reporter ] , which sends log messages to [ ip , port ] via UDP . Upon failure , a message is emitted to the console [ c ] . Each message can be truncated : [ truncate ] defaults to 65535 bytes . The [ hostname ] is part of each syslog message . The [ port ] defaults to 514 . [ facility ] is the default syslog facility ( see { ! Logs_syslog.message } ) . sends log messages to [ip, port] via UDP. Upon failure, a message is emitted to the console [c]. Each message can be truncated: [truncate] defaults to 65535 bytes. The [hostname] is part of each syslog message. The [port] defaults to 514. [facility] is the default syslog facility (see {!Logs_syslog.message}). *) val create : C.t -> STACK.t -> hostname:string -> STACK.IP.ipaddr -> ?port:int -> ?truncate:int -> ?facility:Syslog_message.facility -> unit -> Logs.reporter end module Tcp (C : Mirage_console.S) (CLOCK : Mirage_clock.PCLOCK) (STACK : Tcpip.Stack.V4V6) : sig * [ create c tcp ~hostname ip ~port ~truncate ~framing ( ) ] is [ Ok reporter ] or [ Error msg ] . The [ reporter ] sends log messages to [ ip , port ] via TCP . If the initial TCP connection to the [ remote_ip ] fails , an [ Error msg ] is returned instead . If the TCP connection fails , an error is logged to the console [ c ] and attempts are made to re - establish the TCP connection . Each syslog message can be truncated , depending on [ truncate ] ( defaults to no truncating ) . The [ hostname ] is part of each syslog message . The default value of [ port ] is 514 , the default behaviour of [ framing ] is to append a zero byte . [ facility ] is the default syslog facility ( see { ! Logs_syslog.message } ) . [Ok reporter] or [Error msg]. The [reporter] sends log messages to [ip, port] via TCP. If the initial TCP connection to the [remote_ip] fails, an [Error msg] is returned instead. If the TCP connection fails, an error is logged to the console [c] and attempts are made to re-establish the TCP connection. Each syslog message can be truncated, depending on [truncate] (defaults to no truncating). The [hostname] is part of each syslog message. The default value of [port] is 514, the default behaviour of [framing] is to append a zero byte. [facility] is the default syslog facility (see {!Logs_syslog.message}). *) val create : C.t -> STACK.t -> hostname:string -> STACK.IP.ipaddr -> ?port:int -> ?truncate:int -> ?framing:Logs_syslog.framing -> ?facility:Syslog_message.facility -> unit -> (Logs.reporter, string) result Lwt.t end * { 2 : mirage_example Example usage } To install a Mirage syslog reporter , sending via UDP to localhost , use the following snippet : { [ module Main ( C : Mirage_console . S ) ( S : Tcpip . Stack . V4V6 ) ( CLOCK : Mirage_clock . ) module LU = Logs_syslog_mirage . Udp(C)(CLOCK)(S ) let start c s _ = let ip = Ipaddr . V4 ( Ipaddr . V4.of_string_exn " 127.0.0.1 " ) in let r = LU.create c s ip ~hostname:"MirageOS.example " ( ) in Logs.set_reporter r ; Lwt.return_unit end ] } The TCP transport is very similar : { [ module Main ( C : Mirage_console . S ) ( S : Tcpip . Stack . V4V6 ) ( CLOCK : Mirage_clock . ) module LT = Logs_syslog_mirage . Tcp(C)(CLOCK)(S ) let start c s _ = let ip = Ipaddr . V4 ( Ipaddr . V4.of_string_exn " 127.0.0.1 " ) in LT.create c s ip ~hostname:"MirageOS.example " ( ) > > = function | Ok r - > Logs.set_reporter r ; Lwt.return_unit | Error e - > Lwt.fail_invalid_arg e end ] } To install a Mirage syslog reporter, sending via UDP to localhost, use the following snippet: {[ module Main (C : Mirage_console.S) (S : Tcpip.Stack.V4V6) (CLOCK : Mirage_clock.PCLOCK) module LU = Logs_syslog_mirage.Udp(C)(CLOCK)(S) let start c s _ = let ip = Ipaddr.V4 (Ipaddr.V4.of_string_exn "127.0.0.1") in let r = LU.create c s ip ~hostname:"MirageOS.example" () in Logs.set_reporter r ; Lwt.return_unit end ]} The TCP transport is very similar: {[ module Main (C : Mirage_console.S) (S : Tcpip.Stack.V4V6) (CLOCK : Mirage_clock.PCLOCK) module LT = Logs_syslog_mirage.Tcp(C)(CLOCK)(S) let start c s _ = let ip = Ipaddr.V4 (Ipaddr.V4.of_string_exn "127.0.0.1") in LT.create c s ip ~hostname:"MirageOS.example" () >>= function | Ok r -> Logs.set_reporter r ; Lwt.return_unit | Error e -> Lwt.fail_invalid_arg e end ]} *)
f26fbb80d4a16a32cdf62553c86b43c672c783ad3ceed47d7524fab2b9a4f3d2
e-bigmoon/haskell-blog
Override_proxy.hs
#!/usr/bin/env stack -- stack script --resolver lts-17.3 {-# LANGUAGE OverloadedStrings #-} import qualified Data.ByteString.Lazy.Char8 as L8 import Network.HTTP.Simple main :: IO () main = do let request = setRequestProxy (Just (Proxy "127.0.0.1" 3128)) $ "" response <- httpLBS request putStrLn $ "The status code was: " ++ show (getResponseStatusCode response) print $ getResponseHeader "Content-Type" response L8.putStrLn $ getResponseBody response
null
https://raw.githubusercontent.com/e-bigmoon/haskell-blog/5c9e7c25f31ea6856c5d333e8e991dbceab21c56/sample-code/yesod/appendix/ap4/Override_proxy.hs
haskell
stack script --resolver lts-17.3 # LANGUAGE OverloadedStrings #
#!/usr/bin/env stack import qualified Data.ByteString.Lazy.Char8 as L8 import Network.HTTP.Simple main :: IO () main = do let request = setRequestProxy (Just (Proxy "127.0.0.1" 3128)) $ "" response <- httpLBS request putStrLn $ "The status code was: " ++ show (getResponseStatusCode response) print $ getResponseHeader "Content-Type" response L8.putStrLn $ getResponseBody response
4f7c02fd5faff79645c6aa8cea151feca63994f8ab4165bf7c7f3a232be0ec55
cubicle-model-checker/cubicle
approx.ml
(**************************************************************************) (* *) Cubicle (* *) Copyright ( C ) 2011 - 2014 (* *) and Universite Paris - Sud 11 (* *) (* *) This file is distributed under the terms of the Apache Software (* License version 2.0 *) (* *) (**************************************************************************) open Options open Format open Ast open Types module SA = SAtom let bad_candidates = ref Cubetrie.empty let non_cfm_literals = ref SA.empty let contains_non_cfm s = not (SA.is_empty (SA.inter s !non_cfm_literals)) let lit_non_cfm a = SA.mem a !non_cfm_literals let register_bad system cand trace = let cvars = Node.variables cand in assert (cand.kind = Approx); let bads = List.fold_left (fun acc sigma -> Cubetrie.add_array (Cube.subst sigma cand.cube).Cube.array () acc) !bad_candidates (Variable.all_permutations cvars cvars) in bad_candidates := bads; match trace with | [] -> () | _ -> let bads = List.fold_left (fun acc sa -> Cubetrie.add (SAtom.elements sa) () acc) !bad_candidates (Forward.conflicting_from_trace system trace) in bad_candidates := bads let remove_non_cfm_cand system candidates = List.filter (fun sc -> if contains_non_cfm (Node.litterals sc) then false else (register_bad system sc []; true)) candidates let node_same n1 n2 = ArrayAtom.equal (Node.array n1) (Node.array n2) let rec remove_bad_candidates sys faulty candidates = let trace = faulty.from in let cand = Node.origin faulty in let nc = List.fold_left (fun acc c' -> if node_same cand c' then (* raise UNSAFE if we try to remove a candidate which is an unsafe property *) if List.exists (node_same c') sys.t_unsafe then raise (Safety.Unsafe faulty) else (register_bad sys c' trace; acc) else if Forward.spurious_due_to_cfm sys faulty then Find out if bactrack is due to crash failure model , in which case record literals that do not respect CMF model case record literals that do not respect CMF model *) begin non_cfm_literals := SA.union (Node.litterals cand) !non_cfm_literals; if not quiet && verbose > 0 then eprintf "Non CFM literals = %a@." SAtom.print !non_cfm_literals; remove_non_cfm_cand sys acc end else (* remove candidates that are reachable on the same trace modulo renaming of parameters *) if Forward.reachable_on_trace_from_init sys c' trace <> Forward.Unreach then (register_bad sys c' []; acc) else begin (* This candidate seems ok, reset its delete flag *) c'.deleted <- false; c'::acc end ) [] candidates in List.rev nc module SSAtoms = Set.Make(SAtom) let nb_arrays_sa sa = SAtom.fold (fun a n -> match a with | Atom.Comp (Elem _, _, Elem _) -> n | Atom.Comp (Elem _, _, Access _) | Atom.Comp (Access _, _, Elem _) -> n + 1 | Atom.Comp (Access _, _, Access _) -> n + 2 | _ -> n ) sa 0 let nb_arrays s = nb_arrays_sa (Node.litterals s) let nb_neq s = SAtom.fold (fun a n -> match a with | Atom.Comp (_, Neq, _) -> n + 1 | _ -> n ) (Node.litterals s) 0 let nb_arith s = SAtom.fold (fun a n -> match a with | Atom.Comp (_, (Le|Lt), _) | Atom.Comp (Arith _, _, _) | Atom.Comp (_, _, Arith _) | Atom.Comp (Const _, _, _) | Atom.Comp (_, _, Const _) -> n + 1 | _ -> n ) (Node.litterals s) 0 let respect_finite_order = SAtom.for_all (function | Atom.Comp (Elem (x, Var), Le, Elem (y, Var)) -> Hstring.compare x y <= 0 | Atom.Comp (Elem (x, Var), Lt, Elem (y, Var)) -> Hstring.compare x y < 0 | _ -> true ) let hsort = Hstring.make "Sort" let hhome = Hstring.make "Home" let sorted_variables sa = let procs = SAtom.variables sa in Variable.Set.for_all (fun p -> SAtom.exists (function | Atom.Comp (Access (s, [x]), _, _) when Hstring.equal s hsort && Hstring.equal x p -> true | _ -> false) sa) procs let isolate_sorts = SAtom.partition (function | Atom.Comp (Access (s, _), _, _) -> Hstring.equal s hsort | Atom.Comp (Elem (h, Glob), _, _) -> Hstring.equal h hhome | _ -> false) let reattach_sorts sorts sa = let procs = Variable.Set.elements (SAtom.variables sa) in SAtom.fold (fun a sa -> match a with | Atom.Comp (Access (s, [x]), _, _) when Hstring.equal s hsort && Hstring.list_mem x procs -> SAtom.add a sa | Atom.Comp (Elem (h, Glob), _, Elem (x, Var)) | Atom.Comp (Elem (x, Var), _, Elem (h, Glob)) when Hstring.equal h hhome && Hstring.list_mem x procs -> SAtom.add a sa | _ -> sa) sorts sa let proc_present p a sa = let rest = SAtom.remove a sa in SAtom.exists (function | Atom.Comp (Elem (h, Var), _, _) | Atom.Comp (_, _, Elem (h, Var)) -> Hstring.equal h p | _ -> false) rest let useless_candidate sa = let open Atom in SAtom.exists (function (* heuristic: remove proc variables *) | (Comp (Elem (p, Var), _, _) as a) | (Comp (_, _, Elem (p, Var)) as a) -> not (proc_present p a sa) | (Comp (Access (s, [p]), _, _) as a) | (Comp (_, _, Access (s, [p])) as a) -> Hstring.equal s hsort && not (proc_present p a sa) | Comp ((Elem (x, _) | Access (x,_)), _, _) | Comp (_, _, (Elem (x, _) | Access (x,_))) -> (* Smt.Symbol.has_type_proc x || *) (enumerative <> -1 && Smt.Symbol.has_abstract_type x) ( Hstring.equal ( snd ( Smt . Symbol.type_of x ) ) Smt . Type.type_real ) || ( Hstring.equal ( snd ( Smt . Symbol.type_of x ) ) Smt . Type.type_int ) | _ -> false) sa let arith_atom = function | Atom.Comp ((Arith _), _, _) | Atom.Comp (_, _, (Arith _)) | Atom.Comp ((Const _), _, _) | Atom.Comp (_, _, (Const _)) -> true | _ -> false let cube_likely_bad c = (* heuristic *) Cubetrie.mem_array_poly c.Cube.array !bad_candidates let cube_known_bad c = try Cubetrie.iter_subsumed (fun _ -> raise Exit) (Array.to_list c.Cube.array) !bad_candidates; false with Exit -> true (*****************************************) (* Potential approximations for a node s *) (*****************************************) let approx_arith a = match a with | Atom.Comp (t, Eq, Const c) -> begin match const_sign c with | None | Some 0 -> a | Some n -> let zer = Const (add_constants c (mult_const (-1) c)) in if n < 0 then Atom.Comp (t, Lt, zer) else Atom.Comp (zer, Lt, t) end | _ -> a let approximations s = let args, sa = Node.variables s, Node.litterals s in let sorts_sa, sa = isolate_sorts sa in (* Heuristics for generating candidates *) let max_procs = enumerative in let max_literals = max 2 (candidate_heuristic + 1) in let max_ratio_arrays_after = (3, candidate_heuristic - 1) in let init = SAtom.fold (fun a acc -> if useless_candidate (SAtom.singleton a) || lit_non_cfm a then acc else SSAtoms.add (SAtom.singleton a) acc) sa SSAtoms.empty in All subsets of sa of relevant size let parts = SAtom.fold (fun a acc -> let a = approx_arith a in if useless_candidate (SAtom.singleton a) then acc else if not abstr_num && arith_atom a then acc else if lit_non_cfm a then acc else SSAtoms.fold (fun sa' acc -> let nsa = SAtom.add a sa' in if Variable.Set.cardinal (SAtom.variables nsa) > max_procs then acc else if SAtom.cardinal nsa > max_literals then acc else SSAtoms.add nsa acc ) acc acc ) sa init in (* Filter non interresting candidates *) let parts = SSAtoms.fold (fun sa' acc -> if SAtom.equal sa' sa then acc (* Heuristic : usefull for flash *) else if SAtom.cardinal sa' >= fst max_ratio_arrays_after && nb_arrays_sa sa' > snd max_ratio_arrays_after then acc else if ( Cube.args_of_atoms sa ' ) > SAtom.cardinal sa ' then acc acc *) else let sa' = reattach_sorts sorts_sa sa' in if SAtom.equal sa' sa then acc else let c = Cube.create_normal sa' in if cube_known_bad c || cube_likely_bad c then acc else (Node.create ~kind:Approx c) :: acc ) parts [] in Sorting heuristic of approximations with most general ones first List.fast_sort (fun s1 s2 -> let c = Stdlib.compare (Node.dim s1) (Node.dim s2) in if c <> 0 then c else let c = Stdlib.compare (Node.size s1) (Node.size s2) in if c <> 0 then c else let c = Stdlib.compare (nb_neq s2) (nb_neq s1) in if c <> 0 then c else Stdlib.compare (nb_arrays s1) (nb_arrays s2) (* if c <> 0 then c *) (* else *) (* SAtom.compare (Node.litterals s1) (Node.litterals s1) *) ) parts (* TODO : approx trees or bdds *) let keep n l = let rec aux acc n l = match l,n with | [], _ | _, 0 -> List.rev acc | x::r, _ -> aux (x::acc) (n-1) r in aux [] n l module type S = sig val good : Node.t -> Node.t option end module Make ( O : Oracle.S ) : S = struct let subsuming_candidate s = let approx = approximations s in let approx = if max_cands = -1 then approx else keep max_cands approx in if verbose > 0 && not quiet then eprintf "Checking %d approximations:@." (List.length approx); O.first_good_candidate approx let good n = match n.kind with | Approx -> (* It's useless to look for approximations of an approximation *) None | _ -> subsuming_candidate n end module GrumpyOracle : Oracle.S = struct let init _ = () let first_good_candidate _ = failwith "You should not call Grumpy Oracle." end module GrumpyApprox : S = struct let good _ = None end let select_oracle = if do_brab then if murphi then (module Murphi : Oracle.S) else (module Enumerative : Oracle.S) else (module GrumpyOracle : Oracle.S) module SelectedOracle : Oracle.S = (val select_oracle) let select_approx = if do_brab then (module Make(SelectedOracle) : S) else (module GrumpyApprox) module Selected : S = (val select_approx)
null
https://raw.githubusercontent.com/cubicle-model-checker/cubicle/04f045573093353447a5afc586b0b6ced7630acd/approx.ml
ocaml
************************************************************************ License version 2.0 ************************************************************************ raise UNSAFE if we try to remove a candidate which is an unsafe property remove candidates that are reachable on the same trace modulo renaming of parameters This candidate seems ok, reset its delete flag heuristic: remove proc variables Smt.Symbol.has_type_proc x || heuristic *************************************** Potential approximations for a node s *************************************** Heuristics for generating candidates Filter non interresting candidates Heuristic : usefull for flash if c <> 0 then c else SAtom.compare (Node.litterals s1) (Node.litterals s1) TODO : approx trees or bdds It's useless to look for approximations of an approximation
Cubicle Copyright ( C ) 2011 - 2014 and Universite Paris - Sud 11 This file is distributed under the terms of the Apache Software open Options open Format open Ast open Types module SA = SAtom let bad_candidates = ref Cubetrie.empty let non_cfm_literals = ref SA.empty let contains_non_cfm s = not (SA.is_empty (SA.inter s !non_cfm_literals)) let lit_non_cfm a = SA.mem a !non_cfm_literals let register_bad system cand trace = let cvars = Node.variables cand in assert (cand.kind = Approx); let bads = List.fold_left (fun acc sigma -> Cubetrie.add_array (Cube.subst sigma cand.cube).Cube.array () acc) !bad_candidates (Variable.all_permutations cvars cvars) in bad_candidates := bads; match trace with | [] -> () | _ -> let bads = List.fold_left (fun acc sa -> Cubetrie.add (SAtom.elements sa) () acc) !bad_candidates (Forward.conflicting_from_trace system trace) in bad_candidates := bads let remove_non_cfm_cand system candidates = List.filter (fun sc -> if contains_non_cfm (Node.litterals sc) then false else (register_bad system sc []; true)) candidates let node_same n1 n2 = ArrayAtom.equal (Node.array n1) (Node.array n2) let rec remove_bad_candidates sys faulty candidates = let trace = faulty.from in let cand = Node.origin faulty in let nc = List.fold_left (fun acc c' -> if node_same cand c' then if List.exists (node_same c') sys.t_unsafe then raise (Safety.Unsafe faulty) else (register_bad sys c' trace; acc) else if Forward.spurious_due_to_cfm sys faulty then Find out if bactrack is due to crash failure model , in which case record literals that do not respect CMF model case record literals that do not respect CMF model *) begin non_cfm_literals := SA.union (Node.litterals cand) !non_cfm_literals; if not quiet && verbose > 0 then eprintf "Non CFM literals = %a@." SAtom.print !non_cfm_literals; remove_non_cfm_cand sys acc end else if Forward.reachable_on_trace_from_init sys c' trace <> Forward.Unreach then (register_bad sys c' []; acc) else begin c'.deleted <- false; c'::acc end ) [] candidates in List.rev nc module SSAtoms = Set.Make(SAtom) let nb_arrays_sa sa = SAtom.fold (fun a n -> match a with | Atom.Comp (Elem _, _, Elem _) -> n | Atom.Comp (Elem _, _, Access _) | Atom.Comp (Access _, _, Elem _) -> n + 1 | Atom.Comp (Access _, _, Access _) -> n + 2 | _ -> n ) sa 0 let nb_arrays s = nb_arrays_sa (Node.litterals s) let nb_neq s = SAtom.fold (fun a n -> match a with | Atom.Comp (_, Neq, _) -> n + 1 | _ -> n ) (Node.litterals s) 0 let nb_arith s = SAtom.fold (fun a n -> match a with | Atom.Comp (_, (Le|Lt), _) | Atom.Comp (Arith _, _, _) | Atom.Comp (_, _, Arith _) | Atom.Comp (Const _, _, _) | Atom.Comp (_, _, Const _) -> n + 1 | _ -> n ) (Node.litterals s) 0 let respect_finite_order = SAtom.for_all (function | Atom.Comp (Elem (x, Var), Le, Elem (y, Var)) -> Hstring.compare x y <= 0 | Atom.Comp (Elem (x, Var), Lt, Elem (y, Var)) -> Hstring.compare x y < 0 | _ -> true ) let hsort = Hstring.make "Sort" let hhome = Hstring.make "Home" let sorted_variables sa = let procs = SAtom.variables sa in Variable.Set.for_all (fun p -> SAtom.exists (function | Atom.Comp (Access (s, [x]), _, _) when Hstring.equal s hsort && Hstring.equal x p -> true | _ -> false) sa) procs let isolate_sorts = SAtom.partition (function | Atom.Comp (Access (s, _), _, _) -> Hstring.equal s hsort | Atom.Comp (Elem (h, Glob), _, _) -> Hstring.equal h hhome | _ -> false) let reattach_sorts sorts sa = let procs = Variable.Set.elements (SAtom.variables sa) in SAtom.fold (fun a sa -> match a with | Atom.Comp (Access (s, [x]), _, _) when Hstring.equal s hsort && Hstring.list_mem x procs -> SAtom.add a sa | Atom.Comp (Elem (h, Glob), _, Elem (x, Var)) | Atom.Comp (Elem (x, Var), _, Elem (h, Glob)) when Hstring.equal h hhome && Hstring.list_mem x procs -> SAtom.add a sa | _ -> sa) sorts sa let proc_present p a sa = let rest = SAtom.remove a sa in SAtom.exists (function | Atom.Comp (Elem (h, Var), _, _) | Atom.Comp (_, _, Elem (h, Var)) -> Hstring.equal h p | _ -> false) rest let useless_candidate sa = let open Atom in SAtom.exists (function | (Comp (Elem (p, Var), _, _) as a) | (Comp (_, _, Elem (p, Var)) as a) -> not (proc_present p a sa) | (Comp (Access (s, [p]), _, _) as a) | (Comp (_, _, Access (s, [p])) as a) -> Hstring.equal s hsort && not (proc_present p a sa) | Comp ((Elem (x, _) | Access (x,_)), _, _) | Comp (_, _, (Elem (x, _) | Access (x,_))) -> (enumerative <> -1 && Smt.Symbol.has_abstract_type x) ( Hstring.equal ( snd ( Smt . Symbol.type_of x ) ) Smt . Type.type_real ) || ( Hstring.equal ( snd ( Smt . Symbol.type_of x ) ) Smt . Type.type_int ) | _ -> false) sa let arith_atom = function | Atom.Comp ((Arith _), _, _) | Atom.Comp (_, _, (Arith _)) | Atom.Comp ((Const _), _, _) | Atom.Comp (_, _, (Const _)) -> true | _ -> false Cubetrie.mem_array_poly c.Cube.array !bad_candidates let cube_known_bad c = try Cubetrie.iter_subsumed (fun _ -> raise Exit) (Array.to_list c.Cube.array) !bad_candidates; false with Exit -> true let approx_arith a = match a with | Atom.Comp (t, Eq, Const c) -> begin match const_sign c with | None | Some 0 -> a | Some n -> let zer = Const (add_constants c (mult_const (-1) c)) in if n < 0 then Atom.Comp (t, Lt, zer) else Atom.Comp (zer, Lt, t) end | _ -> a let approximations s = let args, sa = Node.variables s, Node.litterals s in let sorts_sa, sa = isolate_sorts sa in let max_procs = enumerative in let max_literals = max 2 (candidate_heuristic + 1) in let max_ratio_arrays_after = (3, candidate_heuristic - 1) in let init = SAtom.fold (fun a acc -> if useless_candidate (SAtom.singleton a) || lit_non_cfm a then acc else SSAtoms.add (SAtom.singleton a) acc) sa SSAtoms.empty in All subsets of sa of relevant size let parts = SAtom.fold (fun a acc -> let a = approx_arith a in if useless_candidate (SAtom.singleton a) then acc else if not abstr_num && arith_atom a then acc else if lit_non_cfm a then acc else SSAtoms.fold (fun sa' acc -> let nsa = SAtom.add a sa' in if Variable.Set.cardinal (SAtom.variables nsa) > max_procs then acc else if SAtom.cardinal nsa > max_literals then acc else SSAtoms.add nsa acc ) acc acc ) sa init in let parts = SSAtoms.fold (fun sa' acc -> if SAtom.equal sa' sa then acc else if SAtom.cardinal sa' >= fst max_ratio_arrays_after && nb_arrays_sa sa' > snd max_ratio_arrays_after then acc else if ( Cube.args_of_atoms sa ' ) > SAtom.cardinal sa ' then acc acc *) else let sa' = reattach_sorts sorts_sa sa' in if SAtom.equal sa' sa then acc else let c = Cube.create_normal sa' in if cube_known_bad c || cube_likely_bad c then acc else (Node.create ~kind:Approx c) :: acc ) parts [] in Sorting heuristic of approximations with most general ones first List.fast_sort (fun s1 s2 -> let c = Stdlib.compare (Node.dim s1) (Node.dim s2) in if c <> 0 then c else let c = Stdlib.compare (Node.size s1) (Node.size s2) in if c <> 0 then c else let c = Stdlib.compare (nb_neq s2) (nb_neq s1) in if c <> 0 then c else Stdlib.compare (nb_arrays s1) (nb_arrays s2) ) parts let keep n l = let rec aux acc n l = match l,n with | [], _ | _, 0 -> List.rev acc | x::r, _ -> aux (x::acc) (n-1) r in aux [] n l module type S = sig val good : Node.t -> Node.t option end module Make ( O : Oracle.S ) : S = struct let subsuming_candidate s = let approx = approximations s in let approx = if max_cands = -1 then approx else keep max_cands approx in if verbose > 0 && not quiet then eprintf "Checking %d approximations:@." (List.length approx); O.first_good_candidate approx let good n = match n.kind with | Approx -> None | _ -> subsuming_candidate n end module GrumpyOracle : Oracle.S = struct let init _ = () let first_good_candidate _ = failwith "You should not call Grumpy Oracle." end module GrumpyApprox : S = struct let good _ = None end let select_oracle = if do_brab then if murphi then (module Murphi : Oracle.S) else (module Enumerative : Oracle.S) else (module GrumpyOracle : Oracle.S) module SelectedOracle : Oracle.S = (val select_oracle) let select_approx = if do_brab then (module Make(SelectedOracle) : S) else (module GrumpyApprox) module Selected : S = (val select_approx)
da7c74fa466ca05fd195eb00a3303e4234d3a5b1f62b525763efae0e6d6039d7
static-analysis-engineering/codehawk
jCHExprFeatureExtraction.mli
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = CodeHawk Java Analyzer Author : and ------------------------------------------------------------------------------ The MIT License ( MIT ) Copyright ( c ) 2005 - 2020 Kestrel Technology LLC Permission is hereby granted , free of charge , to any person obtaining a copy of this software and associated documentation files ( the " Software " ) , to deal in the Software without restriction , including without limitation the rights to use , copy , modify , merge , publish , distribute , sublicense , and/or sell copies of the Software , and to permit persons to whom the Software is furnished to do so , subject to the following conditions : The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software . THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = CodeHawk Java Analyzer Author: Henny Sipma and Andrew McGraw ------------------------------------------------------------------------------ The MIT License (MIT) Copyright (c) 2005-2020 Kestrel Technology LLC Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ============================================================================= *) (* chutil *) open CHXmlDocument (* jchpre *) open JCHPreAPI val write_xml_class_expr_features: xml_element_int -> class_info_int -> unit
null
https://raw.githubusercontent.com/static-analysis-engineering/codehawk/98ced4d5e6d7989575092df232759afc2cb851f6/CodeHawk/CHJ/jchfeatures/jCHExprFeatureExtraction.mli
ocaml
chutil jchpre
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = CodeHawk Java Analyzer Author : and ------------------------------------------------------------------------------ The MIT License ( MIT ) Copyright ( c ) 2005 - 2020 Kestrel Technology LLC Permission is hereby granted , free of charge , to any person obtaining a copy of this software and associated documentation files ( the " Software " ) , to deal in the Software without restriction , including without limitation the rights to use , copy , modify , merge , publish , distribute , sublicense , and/or sell copies of the Software , and to permit persons to whom the Software is furnished to do so , subject to the following conditions : The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software . THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = CodeHawk Java Analyzer Author: Henny Sipma and Andrew McGraw ------------------------------------------------------------------------------ The MIT License (MIT) Copyright (c) 2005-2020 Kestrel Technology LLC Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ============================================================================= *) open CHXmlDocument open JCHPreAPI val write_xml_class_expr_features: xml_element_int -> class_info_int -> unit
75cf1127ff8575b5011ab7fed36bfd901fe68628ee640f0f6824fc1783e92298
sharplispers/montezuma
bit-vector.lisp
(in-package #:montezuma) ;; There may be no reason not to use a standard bit-vector. (defun make-bit-vector () (list 0)) (defun bit-set-p (bit-vector index) (logbitp index (car bit-vector))) (defun set-bit (bit-vector index) (setf (ldb (byte 1 index) (car bit-vector)) 1)) (defun clear-bit (bit-vector index) (setf (ldb (byte 1 index) (car bit-vector)) 0)) (defun bit-vector-count (bit-vector) (logcount (car bit-vector))) (defun write-bit-vector (bit-vector directory filename) (let ((output (create-output directory filename))) (unwind-protect (write-string output (format nil "~S" (car bit-vector))) (close output)))) (defun read-bit-vector (directory filename) (let ((input (open-input directory filename))) (unwind-protect (let ((bit-string (read-string input))) (list (parse-integer bit-string))) (close input))))
null
https://raw.githubusercontent.com/sharplispers/montezuma/ee2129eece7065760de4ebbaeffaadcb27644738/src/util/bit-vector.lisp
lisp
There may be no reason not to use a standard bit-vector.
(in-package #:montezuma) (defun make-bit-vector () (list 0)) (defun bit-set-p (bit-vector index) (logbitp index (car bit-vector))) (defun set-bit (bit-vector index) (setf (ldb (byte 1 index) (car bit-vector)) 1)) (defun clear-bit (bit-vector index) (setf (ldb (byte 1 index) (car bit-vector)) 0)) (defun bit-vector-count (bit-vector) (logcount (car bit-vector))) (defun write-bit-vector (bit-vector directory filename) (let ((output (create-output directory filename))) (unwind-protect (write-string output (format nil "~S" (car bit-vector))) (close output)))) (defun read-bit-vector (directory filename) (let ((input (open-input directory filename))) (unwind-protect (let ((bit-string (read-string input))) (list (parse-integer bit-string))) (close input))))
7e6066ac610a5acb55df5630116ffadc7a08ca7b69745488b1afdc4a79e6d329
jebberjeb/fsmviz
project.clj
(defproject demo-cljs "0.1.0-SNAPSHOT" :description "FIXME: write this!" :url "" :license {:name "Eclipse Public License" :url "-v10.html"} :min-lein-version "2.7.1" :dependencies [[org.clojure/clojure "1.9.0-beta4"] [org.clojure/clojurescript "1.9.946"] [org.clojure/core.async "0.3.443"] [fsmviz "0.1.3"]] :plugins [[lein-figwheel "0.5.14"] [lein-cljsbuild "1.1.7" :exclusions [[org.clojure/clojure]]]] :source-paths ["src"] :cljsbuild {:builds [{:id "dev" :source-paths ["src"] ;; The presence of a :figwheel configuration here ;; will cause figwheel to inject the figwheel client ;; into your build :figwheel {:on-jsload "demo-cljs.core/on-js-reload" ;; :open-urls will pop open your application in the default browser once Figwheel has ;; started and compiled your application. ;; Comment this out once it no longer serves you. :open-urls [":3449/index.html"]} :compiler {:main demo-cljs.core :asset-path "js/compiled/out" :output-to "resources/public/js/compiled/demo_cljs.js" :output-dir "resources/public/js/compiled/out" :source-map-timestamp true To console.log CLJS data - structures make sure you enable devtools in Chrome ;; -devtools :preloads [devtools.preload]}} ;; This next build is a compressed minified build for ;; production. You can build this with: once min {:id "min" :source-paths ["src"] :compiler {:output-to "resources/public/js/compiled/demo_cljs.js" :main demo-cljs.core :optimizations :advanced :pretty-print false}}]} :figwheel {;; :http-server-root "public" ;; default and assumes "resources" : server - port 3449 ; ; default : server - ip " 127.0.0.1 " :css-dirs ["resources/public/css"] ;; watch and update CSS Start an nREPL server into the running figwheel process : nrepl - port 7888 Server Ring Handler ( optional ) ;; if you want to embed a ring handler into the figwheel http-kit ;; server, this is for simple ring servers, if this does n't work for you just run your own server :) ( see ) ;; :ring-handler hello_world.server/handler ;; To be able to open files in your editor from the heads up display ;; you will need to put a script on your path. ;; that script will have to take a file path and a line number ;; ie. in ~/bin/myfile-opener ;; #! /bin/sh emacsclient -n + $ 2 $ 1 ;; ;; :open-file-command "myfile-opener" ;; if you are using emacsclient you can just use ;; :open-file-command "emacsclient" ;; if you want to disable the REPL ;; :repl false ;; to configure a different figwheel logfile path ;; :server-logfile "tmp/logs/figwheel-logfile.log" ;; to pipe all the output to the repl ;; :server-logfile false } Setting up nREPL for Figwheel and ClojureScript dev ;; Please see: ;; -figwheel/wiki/Using-the-Figwheel-REPL-within-NRepl :profiles {:dev {:dependencies [[binaryage/devtools "0.9.4"] [figwheel-sidecar "0.5.14"] [com.cemerick/piggieback "0.2.2"]] ;; need to add dev source path here to get user.clj loaded :source-paths ["src" "dev"] for CIDER ;; :plugins [[cider/cider-nrepl "0.12.0"]] :repl-options {:nrepl-middleware [cemerick.piggieback/wrap-cljs-repl]} ;; need to add the compliled assets to the :clean-targets :clean-targets ^{:protect false} ["resources/public/js/compiled" :target-path]}})
null
https://raw.githubusercontent.com/jebberjeb/fsmviz/05c89fbc29f8bfe76eda74212a85cb2e30369d29/examples/demo-cljs/project.clj
clojure
The presence of a :figwheel configuration here will cause figwheel to inject the figwheel client into your build :open-urls will pop open your application started and compiled your application. Comment this out once it no longer serves you. -devtools This next build is a compressed minified build for production. You can build this with: :http-server-root "public" ;; default and assumes "resources" ; default watch and update CSS if you want to embed a ring handler into the figwheel http-kit server, this is for simple ring servers, if this :ring-handler hello_world.server/handler To be able to open files in your editor from the heads up display you will need to put a script on your path. that script will have to take a file path and a line number ie. in ~/bin/myfile-opener #! /bin/sh :open-file-command "myfile-opener" if you are using emacsclient you can just use :open-file-command "emacsclient" if you want to disable the REPL :repl false to configure a different figwheel logfile path :server-logfile "tmp/logs/figwheel-logfile.log" to pipe all the output to the repl :server-logfile false Please see: -figwheel/wiki/Using-the-Figwheel-REPL-within-NRepl need to add dev source path here to get user.clj loaded :plugins [[cider/cider-nrepl "0.12.0"]] need to add the compliled assets to the :clean-targets
(defproject demo-cljs "0.1.0-SNAPSHOT" :description "FIXME: write this!" :url "" :license {:name "Eclipse Public License" :url "-v10.html"} :min-lein-version "2.7.1" :dependencies [[org.clojure/clojure "1.9.0-beta4"] [org.clojure/clojurescript "1.9.946"] [org.clojure/core.async "0.3.443"] [fsmviz "0.1.3"]] :plugins [[lein-figwheel "0.5.14"] [lein-cljsbuild "1.1.7" :exclusions [[org.clojure/clojure]]]] :source-paths ["src"] :cljsbuild {:builds [{:id "dev" :source-paths ["src"] :figwheel {:on-jsload "demo-cljs.core/on-js-reload" in the default browser once Figwheel has :open-urls [":3449/index.html"]} :compiler {:main demo-cljs.core :asset-path "js/compiled/out" :output-to "resources/public/js/compiled/demo_cljs.js" :output-dir "resources/public/js/compiled/out" :source-map-timestamp true To console.log CLJS data - structures make sure you enable devtools in Chrome :preloads [devtools.preload]}} once min {:id "min" :source-paths ["src"] :compiler {:output-to "resources/public/js/compiled/demo_cljs.js" :main demo-cljs.core :optimizations :advanced :pretty-print false}}]} : server - ip " 127.0.0.1 " Start an nREPL server into the running figwheel process : nrepl - port 7888 Server Ring Handler ( optional ) does n't work for you just run your own server :) ( see ) emacsclient -n + $ 2 $ 1 } Setting up nREPL for Figwheel and ClojureScript dev :profiles {:dev {:dependencies [[binaryage/devtools "0.9.4"] [figwheel-sidecar "0.5.14"] [com.cemerick/piggieback "0.2.2"]] :source-paths ["src" "dev"] for CIDER :repl-options {:nrepl-middleware [cemerick.piggieback/wrap-cljs-repl]} :clean-targets ^{:protect false} ["resources/public/js/compiled" :target-path]}})
cf61ff92fd75fc9ea345225b616c1fe6834a7ac4a3bb257e64f969af3e8e9d8d
konn/dl-first-principle-tutorial
Main.hs
# LANGUAGE AllowAmbiguousTypes # # LANGUAGE ApplicativeDo # {-# LANGUAGE BangPatterns #-} # LANGUAGE DataKinds # # LANGUAGE DeriveGeneric # # LANGUAGE DerivingVia # # LANGUAGE FlexibleContexts # # LANGUAGE FlexibleInstances # {-# LANGUAGE GADTs #-} # LANGUAGE MultiParamTypeClasses # # LANGUAGE PolyKinds # # LANGUAGE QuantifiedConstraints # {-# LANGUAGE RankNTypes #-} # LANGUAGE RecordWildCards # # LANGUAGE ScopedTypeVariables # # LANGUAGE TupleSections # # LANGUAGE TypeApplications # # LANGUAGE TypeOperators # {-# OPTIONS_GHC -funbox-strict-fields #-} module Main (main) where import Control.Applicative (optional, (<**>)) import Control.DeepSeq (force) import Control.Exception (evaluate) import Control.Lens hiding (Snoc) import Control.Monad ((<=<)) import qualified Data.DList as DL import Data.Foldable (foldlM, forM_) import Data.Functor (void) import qualified Data.List as List import Data.List.NonEmpty (NonEmpty (..)) import qualified Data.List.NonEmpty as NE import Data.List.Split (splitOn) import Data.Maybe (fromMaybe) import Data.Monoid (Sum (..)) import Data.Strict (Pair (..)) import Data.Time (defaultTimeLocale, formatTime, getZonedTime) import qualified Data.Vector.Unboxed as U import DeepLearning.Circles import DeepLearning.NeuralNetowrk.Massiv hiding (scale) import Diagrams.Backend.Rasterific import Diagrams.Prelude (Diagram, alignB, alignT, bg, black, blend, centerXY, fc, green, lc, mkHeight, orange, p2, pad, scale, strokeOpacity, white, (===), (|||)) import qualified Diagrams.Prelude as Dia import GHC.TypeNats import Generic.Data import Linear import Linear.Affine import Numeric.Natural (Natural) import qualified Options.Applicative as Opts import System.Directory (createDirectoryIfMissing) import System.FilePath ((</>)) import System.IO (BufferMode (LineBuffering), hSetBuffering, stdout) import System.Random.Stateful (globalStdGen) import Text.Printf (printf) import Text.Read (readMaybe) main :: IO () main = do hSetBuffering stdout LineBuffering cmd <- Opts.execParser cmdP case cmd of Circles opts -> dualCircleTest opts Spirals opts -> dualSpiralTest opts data Cmd = Circles Opts | Spirals Opts deriving (Show, Eq, Ord, Generic) cmdP :: Opts.ParserInfo Cmd cmdP = Opts.info (p <**> Opts.helper) $ mconcat [ Opts.header "circles - hidden layer demo (Day 2)" , Opts.progDesc "Binary point classification with hidden layers" ] where p = Opts.subparser $ mconcat [ Opts.command "circles" $ Opts.info (Circles <$> optsP) (Opts.progDesc "Classifies dual circle datasets") , Opts.command "spirals" $ Opts.info (Spirals <$> optsP) (Opts.progDesc "Classifies dual spiral datasets") ] data Opts = Opts { epochs :: !Int , gamma :: !Double , layers :: NonEmpty (NonEmpty Natural) } deriving (Show, Eq, Ord) optsP :: Opts.Parser Opts optsP = do epochs <- Opts.option Opts.auto $ Opts.short 'n' <> Opts.value 500 <> Opts.showDefault <> Opts.metavar "N" <> Opts.help "# of epochs" gamma <- Opts.option Opts.auto $ Opts.long "gamma" <> Opts.short 'g' <> Opts.value 0.001 <> Opts.metavar "GAMMA" <> Opts.showDefault <> Opts.help "Learning rate" layers <- fmap (fromMaybe $ (128 :| []) :| []) $ optional $ NE.some1 $ Opts.option (Opts.maybeReader intsP) $ Opts.long "layers" <> Opts.short 'L' pure Opts {..} intsP :: String -> Maybe (NonEmpty Natural) intsP = NE.nonEmpty <=< mapM readMaybe . splitOn "," workDir, circleWorkDir, spiralWorkDir :: FilePath workDir = "workspace" circleWorkDir = workDir </> "circles" spiralWorkDir = workDir </> "spirals" savePointImage :: FilePath -> U.Vector ClusteredPoint -> IO () savePointImage fp pts = renderRasterific fp (mkHeight 256) $ drawClusteredPoints pts & bg white mkPredictionImage :: (Dia.N b ~ Double, Dia.V b ~ V2, Dia.Renderable (Dia.Path V2 Double) b) => NeuralNetwork 2 ls 1 Double -> Vector ClusteredPoint -> Diagram b mkPredictionImage nn pts = mconcat [ drawClusteredPoints pts & lc black & strokeOpacity 1.0 , pixelateCluster 64 (\α -> blend (min 1.0 $ max 0.0 α) green orange) (p2 (-1.25, -1.25)) (p2 (1.25, 1.25)) nn ] savePredictionComparisonImage :: FilePath -> NeuralNetwork 2 ls 1 Double -> (String, Vector ClusteredPoint) -> (String, Vector ClusteredPoint) -> IO () savePredictionComparisonImage fp nn (lab0, pts0) (lab1, pts1) = renderRasterific fp (mkHeight 256) $ ( ( (mkPredictionImage nn pts0 & centerXY & alignB) === (texterific lab0 & scale 0.2 & fc white & centerXY & pad 1.5 & alignT) ) ||| ( (mkPredictionImage nn pts1 & centerXY & alignB) === (texterific lab1 & scale 0.2 & fc white & centerXY & pad 1.1 & alignT) ) ) & centerXY & pad 1.1 & bg green showDim :: Show a => [a] -> String showDim = List.intercalate "x" . map show putNetworkInfo :: KnownNat i => NeuralNetwork i hs o a -> IO () putNetworkInfo net = let NetworkStat {..} = networkStat net !lays = DL.toList layers in putStrLn $ printf "** Network of %d layers (%s), %d parameters." (length lays) (show lays) (getSum parameters) dualCircleTest :: Opts -> IO () dualCircleTest Opts {..} = do createDirectoryIfMissing True circleWorkDir trainSet <- evaluate . force =<< dualCircles globalStdGen 200 0.6 0.1 testSet <- evaluate . force =<< dualCircles globalStdGen 100 0.6 0.1 savePointImage (circleWorkDir </> "train.png") trainSet savePointImage (circleWorkDir </> "test.png") testSet putStrLn $ replicate 20 '-' putStrLn $ printf "* Circle isolation, Circle isolation, %d epochs, learning rate = %f" epochs gamma forM_ layers $ \lay -> withSimpleNetwork (map (,ReLU) $ NE.toList lay) $ \seeds -> do !net0 <- randomNetwork globalStdGen seeds putNetworkInfo net0 putStrLn $ printf "Initial training accuracy: %f%%" $ predictionAccuracy net0 trainSet * 100 putStrLn $ printf "Initial validation accuracy: %f%%" $ predictionAccuracy net0 testSet * 100 !net' <- evaluate $ trainByGradientDescent gamma epochs trainSet net0 savePredictionComparisonImage ( circleWorkDir </> printf "predict-gd-%s.png" (showDim $ NE.toList lay) ) net' ("Train", trainSet) ("Test", testSet) putStrLn $ printf "Training accuracy (GD): %f" $ predictionAccuracy net' trainSet * 100 putStrLn $ printf "Validation accuracy (GD): %f" $ predictionAccuracy net' testSet * 100 adams :: AdamParams Double adams = AdamParams {beta1 = 0.9, beta2 = 0.999, epsilon = 1e-16} dualSpiralTest :: Opts -> IO () dualSpiralTest Opts {..} = do now <- getZonedTime let stamp = formatTime defaultTimeLocale "%Y%m%d-%H%M%S" now work = spiralWorkDir </> stamp createDirectoryIfMissing True work trainSet <- evaluate . force =<< dualSpirals globalStdGen 400 0.05 testSet <- evaluate . force =<< dualSpirals globalStdGen 200 0.05 putStrLn "" putStrLn $ replicate 20 '-' putStrLn $ printf "* Dual spiral classification, %d epochs, learn rate = %f" epochs gamma savePointImage (work </> "train.png") trainSet savePointImage (work </> "test.png") testSet forM_ layers $ \lay -> withSimpleNetwork (map (,ReLU) $ NE.toList lay) $ \seeds -> do let dimStr = showDim $ NE.toList lay layDir = work </> dimStr createDirectoryIfMissing True layDir !net0 <- evaluate =<< randomNetwork globalStdGen seeds putNetworkInfo net0 let (qs, r) = epochs `quotRem` 10 es | qs <= 0 = [epochs] | r == 0 = replicate 10 qs | otherwise = replicate 10 qs ++ [r] putStrLn $ printf "Initial: training accuracy: %f%%" $! predictionAccuracy net0 trainSet * 100 putStrLn $ printf "Initial: Validation accuracy: %f%%" $! predictionAccuracy net0 testSet * 100 savePredictionComparisonImage (layDir </> "initial.png") net0 ("Train", trainSet) ("Test", testSet) void $ foldlM ( \(total :!: (netGD0 :!: netAdam0)) n -> do let !total' = total + n putStrLn $ printf "*** Epoch %d" total' !netGD <- evaluate $ trainByGradientDescent gamma n trainSet netGD0 putStrLn $ printf "[Gradient Descent] Training accuracy: %f%%" $! predictionAccuracy netGD trainSet * 100 putStrLn $ printf "[Gradient Descent] Validation accuracy: %f%%" $! predictionAccuracy netGD testSet * 100 savePredictionComparisonImage (layDir </> printf "predict-gd-%d.png" total') netGD ("Train", trainSet) ("Test", testSet) putStrLn "---" !netAdam <- evaluate $ trainByAdam gamma adams n trainSet netAdam0 putStrLn $ printf "[Adam] Training accuracy: %f%%" $! predictionAccuracy netAdam trainSet * 100 putStrLn $ printf "[Adam] Validation accuracy: %f%%" $! predictionAccuracy netAdam testSet * 100 savePredictionComparisonImage (layDir </> printf "predict-adam-%d.png" total') netAdam ("Train", trainSet) ("Test", testSet) pure $ total' :!: (netGD :!: netAdam) ) (0 :!: (net0 :!: net0)) es
null
https://raw.githubusercontent.com/konn/dl-first-principle-tutorial/c00a37369c44b68736fa39d32a955fddd6844b38/app/circles/Main.hs
haskell
# LANGUAGE BangPatterns # # LANGUAGE GADTs # # LANGUAGE RankNTypes # # OPTIONS_GHC -funbox-strict-fields #
# LANGUAGE AllowAmbiguousTypes # # LANGUAGE ApplicativeDo # # LANGUAGE DataKinds # # LANGUAGE DeriveGeneric # # LANGUAGE DerivingVia # # LANGUAGE FlexibleContexts # # LANGUAGE FlexibleInstances # # LANGUAGE MultiParamTypeClasses # # LANGUAGE PolyKinds # # LANGUAGE QuantifiedConstraints # # LANGUAGE RecordWildCards # # LANGUAGE ScopedTypeVariables # # LANGUAGE TupleSections # # LANGUAGE TypeApplications # # LANGUAGE TypeOperators # module Main (main) where import Control.Applicative (optional, (<**>)) import Control.DeepSeq (force) import Control.Exception (evaluate) import Control.Lens hiding (Snoc) import Control.Monad ((<=<)) import qualified Data.DList as DL import Data.Foldable (foldlM, forM_) import Data.Functor (void) import qualified Data.List as List import Data.List.NonEmpty (NonEmpty (..)) import qualified Data.List.NonEmpty as NE import Data.List.Split (splitOn) import Data.Maybe (fromMaybe) import Data.Monoid (Sum (..)) import Data.Strict (Pair (..)) import Data.Time (defaultTimeLocale, formatTime, getZonedTime) import qualified Data.Vector.Unboxed as U import DeepLearning.Circles import DeepLearning.NeuralNetowrk.Massiv hiding (scale) import Diagrams.Backend.Rasterific import Diagrams.Prelude (Diagram, alignB, alignT, bg, black, blend, centerXY, fc, green, lc, mkHeight, orange, p2, pad, scale, strokeOpacity, white, (===), (|||)) import qualified Diagrams.Prelude as Dia import GHC.TypeNats import Generic.Data import Linear import Linear.Affine import Numeric.Natural (Natural) import qualified Options.Applicative as Opts import System.Directory (createDirectoryIfMissing) import System.FilePath ((</>)) import System.IO (BufferMode (LineBuffering), hSetBuffering, stdout) import System.Random.Stateful (globalStdGen) import Text.Printf (printf) import Text.Read (readMaybe) main :: IO () main = do hSetBuffering stdout LineBuffering cmd <- Opts.execParser cmdP case cmd of Circles opts -> dualCircleTest opts Spirals opts -> dualSpiralTest opts data Cmd = Circles Opts | Spirals Opts deriving (Show, Eq, Ord, Generic) cmdP :: Opts.ParserInfo Cmd cmdP = Opts.info (p <**> Opts.helper) $ mconcat [ Opts.header "circles - hidden layer demo (Day 2)" , Opts.progDesc "Binary point classification with hidden layers" ] where p = Opts.subparser $ mconcat [ Opts.command "circles" $ Opts.info (Circles <$> optsP) (Opts.progDesc "Classifies dual circle datasets") , Opts.command "spirals" $ Opts.info (Spirals <$> optsP) (Opts.progDesc "Classifies dual spiral datasets") ] data Opts = Opts { epochs :: !Int , gamma :: !Double , layers :: NonEmpty (NonEmpty Natural) } deriving (Show, Eq, Ord) optsP :: Opts.Parser Opts optsP = do epochs <- Opts.option Opts.auto $ Opts.short 'n' <> Opts.value 500 <> Opts.showDefault <> Opts.metavar "N" <> Opts.help "# of epochs" gamma <- Opts.option Opts.auto $ Opts.long "gamma" <> Opts.short 'g' <> Opts.value 0.001 <> Opts.metavar "GAMMA" <> Opts.showDefault <> Opts.help "Learning rate" layers <- fmap (fromMaybe $ (128 :| []) :| []) $ optional $ NE.some1 $ Opts.option (Opts.maybeReader intsP) $ Opts.long "layers" <> Opts.short 'L' pure Opts {..} intsP :: String -> Maybe (NonEmpty Natural) intsP = NE.nonEmpty <=< mapM readMaybe . splitOn "," workDir, circleWorkDir, spiralWorkDir :: FilePath workDir = "workspace" circleWorkDir = workDir </> "circles" spiralWorkDir = workDir </> "spirals" savePointImage :: FilePath -> U.Vector ClusteredPoint -> IO () savePointImage fp pts = renderRasterific fp (mkHeight 256) $ drawClusteredPoints pts & bg white mkPredictionImage :: (Dia.N b ~ Double, Dia.V b ~ V2, Dia.Renderable (Dia.Path V2 Double) b) => NeuralNetwork 2 ls 1 Double -> Vector ClusteredPoint -> Diagram b mkPredictionImage nn pts = mconcat [ drawClusteredPoints pts & lc black & strokeOpacity 1.0 , pixelateCluster 64 (\α -> blend (min 1.0 $ max 0.0 α) green orange) (p2 (-1.25, -1.25)) (p2 (1.25, 1.25)) nn ] savePredictionComparisonImage :: FilePath -> NeuralNetwork 2 ls 1 Double -> (String, Vector ClusteredPoint) -> (String, Vector ClusteredPoint) -> IO () savePredictionComparisonImage fp nn (lab0, pts0) (lab1, pts1) = renderRasterific fp (mkHeight 256) $ ( ( (mkPredictionImage nn pts0 & centerXY & alignB) === (texterific lab0 & scale 0.2 & fc white & centerXY & pad 1.5 & alignT) ) ||| ( (mkPredictionImage nn pts1 & centerXY & alignB) === (texterific lab1 & scale 0.2 & fc white & centerXY & pad 1.1 & alignT) ) ) & centerXY & pad 1.1 & bg green showDim :: Show a => [a] -> String showDim = List.intercalate "x" . map show putNetworkInfo :: KnownNat i => NeuralNetwork i hs o a -> IO () putNetworkInfo net = let NetworkStat {..} = networkStat net !lays = DL.toList layers in putStrLn $ printf "** Network of %d layers (%s), %d parameters." (length lays) (show lays) (getSum parameters) dualCircleTest :: Opts -> IO () dualCircleTest Opts {..} = do createDirectoryIfMissing True circleWorkDir trainSet <- evaluate . force =<< dualCircles globalStdGen 200 0.6 0.1 testSet <- evaluate . force =<< dualCircles globalStdGen 100 0.6 0.1 savePointImage (circleWorkDir </> "train.png") trainSet savePointImage (circleWorkDir </> "test.png") testSet putStrLn $ replicate 20 '-' putStrLn $ printf "* Circle isolation, Circle isolation, %d epochs, learning rate = %f" epochs gamma forM_ layers $ \lay -> withSimpleNetwork (map (,ReLU) $ NE.toList lay) $ \seeds -> do !net0 <- randomNetwork globalStdGen seeds putNetworkInfo net0 putStrLn $ printf "Initial training accuracy: %f%%" $ predictionAccuracy net0 trainSet * 100 putStrLn $ printf "Initial validation accuracy: %f%%" $ predictionAccuracy net0 testSet * 100 !net' <- evaluate $ trainByGradientDescent gamma epochs trainSet net0 savePredictionComparisonImage ( circleWorkDir </> printf "predict-gd-%s.png" (showDim $ NE.toList lay) ) net' ("Train", trainSet) ("Test", testSet) putStrLn $ printf "Training accuracy (GD): %f" $ predictionAccuracy net' trainSet * 100 putStrLn $ printf "Validation accuracy (GD): %f" $ predictionAccuracy net' testSet * 100 adams :: AdamParams Double adams = AdamParams {beta1 = 0.9, beta2 = 0.999, epsilon = 1e-16} dualSpiralTest :: Opts -> IO () dualSpiralTest Opts {..} = do now <- getZonedTime let stamp = formatTime defaultTimeLocale "%Y%m%d-%H%M%S" now work = spiralWorkDir </> stamp createDirectoryIfMissing True work trainSet <- evaluate . force =<< dualSpirals globalStdGen 400 0.05 testSet <- evaluate . force =<< dualSpirals globalStdGen 200 0.05 putStrLn "" putStrLn $ replicate 20 '-' putStrLn $ printf "* Dual spiral classification, %d epochs, learn rate = %f" epochs gamma savePointImage (work </> "train.png") trainSet savePointImage (work </> "test.png") testSet forM_ layers $ \lay -> withSimpleNetwork (map (,ReLU) $ NE.toList lay) $ \seeds -> do let dimStr = showDim $ NE.toList lay layDir = work </> dimStr createDirectoryIfMissing True layDir !net0 <- evaluate =<< randomNetwork globalStdGen seeds putNetworkInfo net0 let (qs, r) = epochs `quotRem` 10 es | qs <= 0 = [epochs] | r == 0 = replicate 10 qs | otherwise = replicate 10 qs ++ [r] putStrLn $ printf "Initial: training accuracy: %f%%" $! predictionAccuracy net0 trainSet * 100 putStrLn $ printf "Initial: Validation accuracy: %f%%" $! predictionAccuracy net0 testSet * 100 savePredictionComparisonImage (layDir </> "initial.png") net0 ("Train", trainSet) ("Test", testSet) void $ foldlM ( \(total :!: (netGD0 :!: netAdam0)) n -> do let !total' = total + n putStrLn $ printf "*** Epoch %d" total' !netGD <- evaluate $ trainByGradientDescent gamma n trainSet netGD0 putStrLn $ printf "[Gradient Descent] Training accuracy: %f%%" $! predictionAccuracy netGD trainSet * 100 putStrLn $ printf "[Gradient Descent] Validation accuracy: %f%%" $! predictionAccuracy netGD testSet * 100 savePredictionComparisonImage (layDir </> printf "predict-gd-%d.png" total') netGD ("Train", trainSet) ("Test", testSet) putStrLn "---" !netAdam <- evaluate $ trainByAdam gamma adams n trainSet netAdam0 putStrLn $ printf "[Adam] Training accuracy: %f%%" $! predictionAccuracy netAdam trainSet * 100 putStrLn $ printf "[Adam] Validation accuracy: %f%%" $! predictionAccuracy netAdam testSet * 100 savePredictionComparisonImage (layDir </> printf "predict-adam-%d.png" total') netAdam ("Train", trainSet) ("Test", testSet) pure $ total' :!: (netGD :!: netAdam) ) (0 :!: (net0 :!: net0)) es
549dc275730956df1ec21fa8c8b48b2dc590e75df4a7b1600709e15318b849c2
nyu-acsys/drift
simple.ml
let rec mapfilter (f:int->int list) (l:int list) = match l with | [] -> [] | h::t -> let r = mapfilter f t in let x = f h in match x with | [] -> r | z::e -> (z::r) let pos y = if 0 < y then y::[] else [] let main (u:unit(*-:{v:Unit | unit}*)) = let xs = [1;2;1] in let ys = mapfilter pos xs in assert(List.length ys = List.length xs) ( ( lambda mapfilter^0 . ( ( lambda pos^1 . ( ( lambda main^2 . ( main^3 prefu^4)^5)^6 ( lambda u^7 . ( ( lambda xs^8 . ( ( lambda ys^9 . ( ( ( List.length^10 ys^11)^12 = ( List.length^13 xs^14)^15)^16 ? ( ) ^17 : ( ) ^18)^19)^20 ( ( xs^24)^25)^26)^27 [ 1;2;1]^28)^29)^30)^31)^32 ( lambda y^33 . ( ( 0 ^ 34 < y^35)^36 ? ( : : [ ] ^38)^39 : [ ] ^40)^41)^42)^43)^44 ( mu . ( lambda l^47 . ( match l^48 with [ ] ^49 - > [ ] ^50 | ( h^51 : : t^52)^53 - > ( ( lambda r^54 . ( ( lambda x^55 . ( match x^56 with [ ] ^57 - > r^58 | ( z^59 : : e^60)^61 - > ( z^62 : : r^63)^64)^65)^66 ( f^67 h^68)^69)^70)^71 ( ( mapfilter^72 f^73)^74 t^75)^76)^77)^78)^79)^80)^81 ((lambda mapfilter^0. ((lambda pos^1. ((lambda main^2. (main^3 prefu^4)^5)^6 (lambda u^7. ((lambda xs^8. ((lambda ys^9. (((List.length^10 ys^11)^12 = (List.length^13 xs^14)^15)^16 ? ()^17 : ()^18)^19)^20 ((mapfilter^21 pos^22)^23 xs^24)^25)^26)^27 [1;2;1]^28)^29)^30)^31)^32 (lambda y^33. ((0^34 < y^35)^36 ? (y^37 :: []^38)^39 : []^40)^41)^42)^43)^44 (mu mapfilter^45 f^46. (lambda l^47. (match l^48 with []^49 -> []^50 | (h^51 :: t^52)^53 -> ((lambda r^54. ((lambda x^55. (match x^56 with []^57 -> r^58 | (z^59 :: e^60)^61 -> (z^62 :: r^63)^64)^65)^66 (f^67 h^68)^69)^70)^71 ((mapfilter^72 f^73)^74 t^75)^76)^77)^78)^79)^80)^81 *) let licons_earray env ( vars : string list ) = let mult_lst = Util.extract 2 vars in let mult_const_lst = Util.arrange ( ThresholdsSetType.elements ! thresholdsSet ) in let size = ( mult_lst * 4 * List.length mult_const_lst ) in let thehold_ary = Lincons1.array_make env size in let = ref 0 in List.iter ( fun lst - > let lvar , rvar = List.nth lst 0 , List.nth lst 1 in List.iter ( fun lst ' - > let lconst , rconst = List.nth lst ' 0 , List.nth lst ' 1 in let eq = ( string_of_int lconst)^ " * " ^lvar^ " < = " ^(string_of_int rconst)^ " * " ^rvar^ " + " ^(string_of_int rconst ) in ( * v < = threshold_const let licons_earray env (vars : string list) = let mult_lst = Util.extract 2 vars in let mult_const_lst = Util.arrange (ThresholdsSetType.elements !thresholdsSet) in let size = (List.length mult_lst * 4 * List.length mult_const_lst) in let thehold_ary = Lincons1.array_make env size in let idx2 = ref 0 in List.iter (fun lst -> let lvar, rvar = List.nth lst 0, List.nth lst 1 in List.iter (fun lst' -> let lconst, rconst = List.nth lst' 0, List.nth lst' 1 in let eq = (string_of_int lconst)^" * "^lvar^" <= "^(string_of_int rconst)^" * "^rvar^" + "^(string_of_int rconst) in (* v <= threshold_const *) Lincons1.array_set thehold_ary (!idx2) (Parser.lincons1_of_string env eq); idx2 := !idx2 + 1; let eq = (string_of_int lconst)^" * "^lvar^" >= "^(string_of_int rconst)^" * "^rvar^" + "^(string_of_int rconst) in (* v >= threshold_const *) Lincons1.array_set thehold_ary (!idx2) (Parser.lincons1_of_string env eq); idx2 := !idx2 + 1; let eq = (string_of_int lconst)^" * "^lvar^" <= "^(string_of_int rconst)^" * "^rvar^" - "^(string_of_int rconst) in (* v <= threshold_const *) Lincons1.array_set thehold_ary (!idx2) (Parser.lincons1_of_string env eq); idx2 := !idx2 + 1; let eq = (string_of_int lconst)^" * "^lvar^" >= "^(string_of_int rconst)^" * "^rvar^" - "^(string_of_int rconst) in (* v >= threshold_const *) Lincons1.array_set thehold_ary (!idx2) (Parser.lincons1_of_string env eq); idx2 := !idx2 + 1; ) mult_const_lst ) mult_lst; thehold_ary let generate_threshold_earray env = if Environment.size env = 0 then Lincons1.array_make env 0 (* else if Environment.mem_var env (Var.of_string "cur_v") then !licons_ref *) else let int_vars, _ = Environment.vars env in let lst = Array.fold_left (fun lst item -> let var = Var.to_string item in if var = "cur_v" || String.sub var 0 1 = "l" || String.sub var 0 1 = "e" || String.sub var 0 1 = "z" || String.sub var 0 1 = "n" || String.sub var 0 1 = "m" || String.length var > 4 && String.sub var 0 1 = "pref" then var :: lst else lst (* var :: lst *) ) [] int_vars in if Environment.mem_var env ( Var.of_string " min " ) & & Environment.mem_var env ( Var.of_string " max " ) then ( Environment.print Format.std_formatter env ; licons_earray ary ( Some " max " ) ) else (Environment.print Format.std_formatter env; licons_earray ary (Some "max")) else *) licons_earray env lst *)
null
https://raw.githubusercontent.com/nyu-acsys/drift/51a3160d74b761626180da4f7dd0bb950cfe40c0/tests/benchmarks/DOrder/list/simple.ml
ocaml
-:{v:Unit | unit} v <= threshold_const v >= threshold_const v <= threshold_const v >= threshold_const else if Environment.mem_var env (Var.of_string "cur_v") then !licons_ref var :: lst
let rec mapfilter (f:int->int list) (l:int list) = match l with | [] -> [] | h::t -> let r = mapfilter f t in let x = f h in match x with | [] -> r | z::e -> (z::r) let pos y = if 0 < y then y::[] else [] let xs = [1;2;1] in let ys = mapfilter pos xs in assert(List.length ys = List.length xs) ( ( lambda mapfilter^0 . ( ( lambda pos^1 . ( ( lambda main^2 . ( main^3 prefu^4)^5)^6 ( lambda u^7 . ( ( lambda xs^8 . ( ( lambda ys^9 . ( ( ( List.length^10 ys^11)^12 = ( List.length^13 xs^14)^15)^16 ? ( ) ^17 : ( ) ^18)^19)^20 ( ( xs^24)^25)^26)^27 [ 1;2;1]^28)^29)^30)^31)^32 ( lambda y^33 . ( ( 0 ^ 34 < y^35)^36 ? ( : : [ ] ^38)^39 : [ ] ^40)^41)^42)^43)^44 ( mu . ( lambda l^47 . ( match l^48 with [ ] ^49 - > [ ] ^50 | ( h^51 : : t^52)^53 - > ( ( lambda r^54 . ( ( lambda x^55 . ( match x^56 with [ ] ^57 - > r^58 | ( z^59 : : e^60)^61 - > ( z^62 : : r^63)^64)^65)^66 ( f^67 h^68)^69)^70)^71 ( ( mapfilter^72 f^73)^74 t^75)^76)^77)^78)^79)^80)^81 ((lambda mapfilter^0. ((lambda pos^1. ((lambda main^2. (main^3 prefu^4)^5)^6 (lambda u^7. ((lambda xs^8. ((lambda ys^9. (((List.length^10 ys^11)^12 = (List.length^13 xs^14)^15)^16 ? ()^17 : ()^18)^19)^20 ((mapfilter^21 pos^22)^23 xs^24)^25)^26)^27 [1;2;1]^28)^29)^30)^31)^32 (lambda y^33. ((0^34 < y^35)^36 ? (y^37 :: []^38)^39 : []^40)^41)^42)^43)^44 (mu mapfilter^45 f^46. (lambda l^47. (match l^48 with []^49 -> []^50 | (h^51 :: t^52)^53 -> ((lambda r^54. ((lambda x^55. (match x^56 with []^57 -> r^58 | (z^59 :: e^60)^61 -> (z^62 :: r^63)^64)^65)^66 (f^67 h^68)^69)^70)^71 ((mapfilter^72 f^73)^74 t^75)^76)^77)^78)^79)^80)^81 *) let licons_earray env ( vars : string list ) = let mult_lst = Util.extract 2 vars in let mult_const_lst = Util.arrange ( ThresholdsSetType.elements ! thresholdsSet ) in let size = ( mult_lst * 4 * List.length mult_const_lst ) in let thehold_ary = Lincons1.array_make env size in let = ref 0 in List.iter ( fun lst - > let lvar , rvar = List.nth lst 0 , List.nth lst 1 in List.iter ( fun lst ' - > let lconst , rconst = List.nth lst ' 0 , List.nth lst ' 1 in let eq = ( string_of_int lconst)^ " * " ^lvar^ " < = " ^(string_of_int rconst)^ " * " ^rvar^ " + " ^(string_of_int rconst ) in ( * v < = threshold_const let licons_earray env (vars : string list) = let mult_lst = Util.extract 2 vars in let mult_const_lst = Util.arrange (ThresholdsSetType.elements !thresholdsSet) in let size = (List.length mult_lst * 4 * List.length mult_const_lst) in let thehold_ary = Lincons1.array_make env size in let idx2 = ref 0 in List.iter (fun lst -> let lvar, rvar = List.nth lst 0, List.nth lst 1 in List.iter (fun lst' -> let lconst, rconst = List.nth lst' 0, List.nth lst' 1 in Lincons1.array_set thehold_ary (!idx2) (Parser.lincons1_of_string env eq); idx2 := !idx2 + 1; Lincons1.array_set thehold_ary (!idx2) (Parser.lincons1_of_string env eq); idx2 := !idx2 + 1; Lincons1.array_set thehold_ary (!idx2) (Parser.lincons1_of_string env eq); idx2 := !idx2 + 1; Lincons1.array_set thehold_ary (!idx2) (Parser.lincons1_of_string env eq); idx2 := !idx2 + 1; ) mult_const_lst ) mult_lst; thehold_ary let generate_threshold_earray env = if Environment.size env = 0 then Lincons1.array_make env 0 else let int_vars, _ = Environment.vars env in let lst = Array.fold_left (fun lst item -> let var = Var.to_string item in if var = "cur_v" || String.sub var 0 1 = "l" || String.sub var 0 1 = "e" || String.sub var 0 1 = "z" || String.sub var 0 1 = "n" || String.sub var 0 1 = "m" || String.length var > 4 && String.sub var 0 1 = "pref" then var :: lst else lst ) [] int_vars in if Environment.mem_var env ( Var.of_string " min " ) & & Environment.mem_var env ( Var.of_string " max " ) then ( Environment.print Format.std_formatter env ; licons_earray ary ( Some " max " ) ) else (Environment.print Format.std_formatter env; licons_earray ary (Some "max")) else *) licons_earray env lst *)
95c03ad8b09b7a1eb0eda1cea535115f6ceb5f9cc305483fa9ad606a12b0bf8b
amperity/lein-monolith
core.clj
(ns lib-c.core) (defn foo "I don't do a whole lot." [x] (println x "Hello, World!"))
null
https://raw.githubusercontent.com/amperity/lein-monolith/f7d476b3b746498d0d0539e090a1db33a6602280/example/libs/subdir/lib-c/src/lib_c/core.clj
clojure
(ns lib-c.core) (defn foo "I don't do a whole lot." [x] (println x "Hello, World!"))
270ee8b29a3565ba31340c14cff37551e7a0dfe074dd43a4e0eea0b66c8cbb90
finos/metadata-tool
metadata.clj
; Copyright 2017 Fintech Open Source Foundation SPDX - License - Identifier : Apache-2.0 ; Licensed under the Apache License , Version 2.0 ( the " License " ) ; ; you may not use this file except in compliance with the License. ; You may obtain a copy of the License at ; ; -2.0 ; ; Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an " AS IS " BASIS , ; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ; See the License for the specific language governing permissions and ; limitations under the License. ; (ns metadata-tool.sources.metadata (:require [clojure.string :as str] [clojure.tools.logging :as log] [clojure.java.io :as io] [clojure.pprint :as pp] [mount.core :as mnt :refer [defstate]] [cheshire.core :as ch] [clj-time.core :as tm] [clj-time.format :as tf] [metadata-tool.sources.github :as gh] [metadata-tool.sources.schemas :as sch])) (defstate ^:private organization-metadata-directory :start (str gh/metadata-directory "/organizations")) (defstate ^:private people-metadata-directory :start (str gh/metadata-directory "/people")) (defstate ^:private program-metadata-directory :start (str gh/metadata-directory "/programs")) (defstate ^:private projects-metadata-directory :start (if (some? gh/projects-directory) gh/projects-directory (str gh/metadata-directory "/toplevel"))) (def ^:private organization-filename "organization-metadata.json") (def ^:private person-filename "person-metadata.json") (def ^:private program-filename "program-metadata.json") (def ^:private activity-filename "activity-metadata.json") (def ^:private repository-filename "repository-metadata.json") (defn- list-metadata-files [filename] (doall (sort-by #(.getCanonicalPath ^java.io.File %) (filter #(= filename (.getName ^java.io.File %)) (file-seq (io/file gh/metadata-directory)))))) (defstate ^:private organization-metadata-files :start (list-metadata-files organization-filename)) (defstate ^:private person-metadata-files :start (list-metadata-files person-filename)) (defstate ^:private program-metadata-files :start (list-metadata-files program-filename)) (defstate ^:private activity-metadata-files :start (list-metadata-files activity-filename)) (defstate ^:private repository-metadata-files :start (list-metadata-files repository-filename)) (defstate ^:private metadata-files :start {:organization organization-metadata-files :person person-metadata-files :program program-metadata-files :activity activity-metadata-files :repository repository-metadata-files}) (defn- list-subdirs "Returns a sequence of the immediate subdirectories of dir, as java.io.File objects." [^java.io.File dir] (seq (.listFiles dir (reify java.io.FileFilter (accept [this f] (.isDirectory f)))))) (defstate organizations :start (doall (sort (map #(.getName ^java.io.File %) (list-subdirs (io/file organization-metadata-directory)))))) (defstate people :start (doall (sort (map #(.getName ^java.io.File %) (list-subdirs (io/file people-metadata-directory)))))) (defstate programs :start (concat (doall (sort (map #(.getName ^java.io.File %) (list-subdirs (io/file program-metadata-directory))))) ["toplevel"])) (defn- clojurise-json-key "Converts nasty JSON String keys (e.g. \"fullName\") to nice Clojure keys (e.g. :full-name)." [k] (keyword (str/replace (str/join "-" (map str/lower-case (str/split k #"(?<!(^|[A-Z]))(?=[A-Z])|(?<!^)(?=[A-Z][a-z])"))) "git-hub" "github"))) (defn- read-metadata-file-fn [metadata-file] (let [the-file (io/file metadata-file)] (when (.exists the-file) (ch/parse-string (slurp the-file) clojurise-json-key)))) (def ^:private read-metadata-file (memoize read-metadata-file-fn)) (defn- validate-metadata-file "Validates the given metadata file against the given schema-type, automatically determining which version the metadata file is." [schema-type ^java.io.File file] (log/debug "Validating" schema-type "metadata file" (.getCanonicalPath file)) (try (let [json-string (slurp file) json (ch/parse-string json-string clojurise-json-key) metadata-version (:metadata-version json) schema-id [schema-type metadata-version]] (if metadata-version (sch/validate-json schema-id json-string) (throw (Exception. (str "No metadataVersion property."))))) (catch Exception e (throw (Exception. (str (.getCanonicalPath file) " failed to validate, due to " (.getMessage e)) e))))) (defn- validate-metadata-files [schema-type files] (doall (map (partial validate-metadata-file schema-type) files))) (defn validate-metadata "Validates all metadata in the repository." [] (doall (map #(validate-metadata-files (key %) (val %)) metadata-files))) (defn organization-metadata "Organization metadata of the given organization-id, or nil if there is none." [organization-id] (if organization-id (if-let [result (read-metadata-file (str organization-metadata-directory "/" organization-id "/" organization-filename))] (assoc result :organization-id organization-id)))) (defn organizations-metadata "A seq containing the metadata of all organizations, sorted by organization-name." [] (sort-by :organization-name (keep organization-metadata organizations))) (defn person-metadata "Person metadata of the given person-id, or nil if there is none." [person-id] (if person-id (if-let [person-metadata (read-metadata-file (str people-metadata-directory "/" person-id "/" person-filename))] (assoc person-metadata :person-id person-id)))) (defn person-metadata-with-organizations "Person metadata of the given person-id, with affiliations expanded to include full organization metadata." [person-id] (if-let [person (person-metadata person-id)] (if-let [affiliations (:affiliations person)] (assoc person :affiliations (seq (map #(assoc % :organization (organization-metadata (:organization-id %))) affiliations))) person))) (defn people-metadata "A seq containing the metadata of all people, sorted by full-name." [] (sort-by :full-name (keep person-metadata people))) (defn people-metadata-with-organizations "A seq containing the metadata of all people, sorted by full-name." [] (sort-by :full-name (keep person-metadata-with-organizations people))) (defn person-metadata-by-github-login-fn [github-login] (if github-login (first (filter #(some #{github-login} (:github-logins %)) (people-metadata))))) (def person-metadata-by-github-login "Person metadata of the given GitHub login, or nil if there is none." (memoize person-metadata-by-github-login-fn)) (defn matches-person [person ghid name email] (or (and ; (try (not (str/blank? ghid)) ( catch Exception e ( " " name " " email " - caught exception : " ( .getMessage e ) ) ) ) (some #{ghid} (:github-logins person))) (and (not (str/blank? name)) (= name (:full-name person))) (and (not (str/blank? email)) (some #{email} (:email-addresses person))))) (defn person-metadata-by-fn [ghid name email] (if (or ghid name email) (first (filter #(matches-person % ghid name email) (people-metadata))))) (def person-metadata-by "Person metadata of either a given GitHub login, name or email address; returns nil if there is none." (memoize person-metadata-by-fn)) (defn lower-emails [item] (map #(str/lower-case %) (:email-addresses item))) (defn person-metadata-by-email-address-fn [email-address] (if email-address (first (filter #(some #{(str/lower-case email-address)} (lower-emails %)) (people-metadata))))) (def person-metadata-by-email-address "Person metadata of the given email address, or nil if there is none." (memoize person-metadata-by-email-address-fn)) (defn person-metadata-by-fullname-fn [full-name] (if full-name (first (filter #(= full-name (:full-name %)) (people-metadata))))) (def person-metadata-by-fullname "Person metadata of the given fullname, or nil if there is none." (memoize person-metadata-by-email-address-fn)) (defn- program-activities "A seq of the ids of all activities in the given program." [program-id] (sort (map #(.getName ^java.io.File %) (list-subdirs (io/file (if (= "toplevel" program-id) (str projects-metadata-directory) (str program-metadata-directory "/" program-id))))))) (defn- get-gh-org [activity-gh-org program] (if activity-gh-org activity-gh-org (:github-org program))) (defn- github-urls [program repos & [activity-gh-org]] (seq (map #(str "/" (get-gh-org activity-gh-org program) "/" %) repos))) (defn gitlab-url [repo] ;; TODO - change to gitlab.com/finos - when migration is completed (str "/" repo)) (defn- program-activity-github-urls [program activity] (github-urls program (:github-repos activity) (:github-org activity))) (defn- pmc-github-urls [program] (github-urls program (:pmc-repos program))) (defn- expand-mailing-list-address [mailing-list-address] (if-not (str/blank? mailing-list-address) {:email-address mailing-list-address :web-archive-url (let [[list-name domain] (str/split mailing-list-address #"@")] (if (and (not (str/blank? list-name)) (not (str/blank? domain)) (or (= domain "finos.org") (= domain "symphony.foundation"))) (str "/" domain "/forum/#!forum/" list-name)))})) (defn- expand-confluence-space-key [confluence-space-key] (if-not (str/blank? confluence-space-key) {:key confluence-space-key :url (str "/" confluence-space-key "/overview")})) (defn- program-activities-metadata "A seq containing the metadata of all activities in the given program." [program] (let [program-id (:program-id program) prj-folder (if (= "toplevel" program-id) projects-metadata-directory (str program-metadata-directory "/" program-id))] (seq (keep #(if-let [activity (read-metadata-file (str prj-folder "/" % "/" activity-filename))] (assoc activity :program-id program-id :program-name (:program-name program) :program-short-name (:program-short-name program) :activity-id % Normalise tags to lower case , de - dupe and sort (seq (sort (distinct (map str/lower-case (remove str/blank? current-tags)))))) :lead-or-chair-person-id (:lead-or-chair activity) :lead-or-chair (person-metadata (:lead-or-chair activity)) :github-urls (program-activity-github-urls program activity) :github-org (or (:github-org activity) (:github-org program)) :mailing-lists (map expand-mailing-list-address (:mailing-list-addresses activity)) :confluence-spaces (map expand-confluence-space-key (:confluence-space-keys activity)))) (program-activities program-id))))) (def toplevel-program-metadata {:program-id "toplevel" :program-name "Top Level" :program-short-name "TopLevel" :github-org "finos"}) (defn- program-metadata-fn "Program metadata of the given program-id, or nil if there is none." [program-id] (if (= "toplevel" program-id) (assoc toplevel-program-metadata :activities (program-activities-metadata toplevel-program-metadata)) (if-let [program (read-metadata-file (str program-metadata-directory "/" program-id "/" program-filename))] Note : this assoc has to happen first , since ( program - activities - metadata ) depends on it . (assoc program :github-url (if (:github-org program) (str "/" (:github-org program))) :pmc-github-urls (pmc-github-urls program) :activities (program-activities-metadata program) :pmc-mailing-list (expand-mailing-list-address (:pmc-mailing-list-address program)) :pmc-private-mailing-list (expand-mailing-list-address (:pmc-private-mailing-list-address program)) :program-mailing-list (expand-mailing-list-address (:program-mailing-list-address program)) :confluence-space (expand-confluence-space-key (:confluence-space-key program))))))) (def program-metadata (memoize program-metadata-fn)) (defn programs-metadata "A seq containing the metadata of all programs." [] (keep program-metadata programs)) (defn activities-metadata "A seq containing the metadata of all activities, regardless of program." [] (sort-by :activity-name (remove nil? (mapcat :activities (programs-metadata))))) (defn- to-top-level "Cast a program to top-level, if disbanded" [program] (if (:disbanded program) (assoc (program-metadata "toplevel") :activities (map #(assoc % :program-short-name "TopLevel") (:activities program))) program)) (defn activities-metadata-after-disband "A seq containing the metadata of all activities, regardless of program, after the program disband." [] (sort-by :activity-name (remove nil? (mapcat :activities (map #(to-top-level %) (programs-metadata)))))) (defn project-lead "Returns a project and lead data" [project] (let [lead-raw (:lead-or-chair project) ret (assoc {} :project (:activity-name project) :state (:state project) :full-name (:full-name lead-raw) :email (first (:email-addresses lead-raw)) :github (first (:github-logins lead-raw)))] ret)) (defn project-leads "Returns the list of projects and lead data" [& [use-emails]] (let [projects (activities-metadata-after-disband) with-leads (map #(project-lead %) (remove #(= (:state %) "ARCHIVED") projects))] ;; with-leads)) (if use-emails (map :email with-leads) (map :github with-leads)))) (defn activity-metadata "The metadata for a specific activity." [activity-id] (filter #(= activity-id (:activity-id %)) activities-metadata)) (defn- activity-metadata-by-name-fn [activity-name] (if-not (str/blank? activity-name) (if-let [result (first (filter #(= activity-name (:activity-name %)) (activities-metadata)))] result (log/warn "Could not find metadata for" activity-name)))) (def activity-metadata-by-name "The metadata for a specific activity, identified by name." (memoize activity-metadata-by-name-fn)) (defn filter-activity-by-github-coords "Inner code of activity-by-github-coords" [activity repo-name org-name] (let [lower-case-repos (map #(str/lower-case %) (:github-repos activity)) lower-repo-name (str/lower-case repo-name)] (and (some #(= lower-repo-name %) (set lower-case-repos)) (= (str/lower-case org-name) (str/lower-case (:github-org activity)))))) (defn activity-by-github-coords "Returns a metadata project, given a GitHub org and repo names" [org-name repo-name] (if-let [repos (filter #(filter-activity-by-github-coords % repo-name org-name) (activities-metadata))] (first repos))) (defn projects-metadata "A seq containing the metadata of all activities of type PROJECT, regardless of program." [] (filter #(= (:type %) "PROJECT") (activities-metadata))) (defn sigs-metadata "A seq containing the metadata of all activities of type SIG, regardless of program." [] (filter #(= (:type %) "SIG") (activities-metadata))) (defn projects-sigs-metadata "A seq containing the metadata of all activities of type PROJECT or SIG, regardless of program." [] (flatten [(projects-metadata) (sigs-metadata)])) (defn working-groups-metadata "A seq containing the metadata of all activities of type WORKING_GROUP, regardless of program." [] (filter #(= (:type %) "WORKING_GROUP") (activities-metadata))) (defn- current? "True if the given 'date range' map (with a :start-date and/or :end-date key) is current i.e. spans today." [m] (if m (let [today (tf/unparse (tf/formatters :date) (tm/now)) start-date (:start-date m) end-date (:end-date m)] (and (or (nil? start-date) (neg? (compare start-date today))) (or (nil? end-date) (neg? (compare today end-date))))))) (defn current-approved-contributors "A seq of person metadata for the *currently* approved contributors for the given organization-id, or nil if there are none." [organization-id] (if-let [organization-metadata (organization-metadata organization-id)] (if-let [current-contributors (seq (filter current? (:approved-contributors organization-metadata)))] (map #(person-metadata (:person-id %)) current-contributors)))) (defn current-affiliations "A seq of organization metadata the given person-id is *currently* affiliated with, or nil if there are none." [person-id] (if-let [person-metadata (person-metadata person-id)] (if-let [current-affiliations (seq (filter current? (:affiliations person-metadata)))] (map #(organization-metadata (:organization-id %)) current-affiliations)))) (defn assoc-org-name [person] (let [id (:person-id person)] (assoc person :org-name (or (str " (" (:organization-name (first (current-affiliations id))) ")") "")))) (defn orgs-in-pmc [program] (let [pmc-list (:pmc program)] (distinct (remove #(= "Individual Contributor" %) (map #(:organization-name (first (current-affiliations %))) pmc-list))))) (defn activities [program type] (map :activity-name (remove #(= "ARCHIVED" (:state %)) (filter #(= type (:type %)) (:activities program))))) (defn pmc-lead [program] (let [pmc-lead (:pmc-lead program) lead-enriched (person-metadata pmc-lead) full-name (:full-name lead-enriched) org-name (:org-name (assoc-org-name lead-enriched))] (str full-name org-name))) (defn pmc-list [program] (let [pmc-list (:pmc program) people-enriched (map person-metadata pmc-list) orgs-enriched (map assoc-org-name people-enriched)] (map #(str (:full-name %) (:org-name %)) orgs-enriched))) (defn has-icla? [person-id] (boolean (:has-icla (person-metadata person-id)))) (defn has-ccla? [person-id] (if-let [current-affiliations-with-cclas (seq (filter :has-ccla (current-affiliations person-id)))] (let [current-approved-contributors (map :person-id (mapcat #(current-approved-contributors (:organization-id %)) current-affiliations-with-cclas))] (or (empty? current-approved-contributors) (boolean (some #{person-id} current-approved-contributors)))) false)) (defn has-cla? [person-id] (or (has-icla? person-id) (has-ccla? person-id))) (defn people-with-clas "A seq of person metadata for all people who currently have CLAs with the Foundation." [] (map person-metadata (filter has-cla? people))) (defn all-activity-tags "A seq of all of the tags in activities, normalised to lower-case." [] (seq (sort (distinct (map str/lower-case (remove str/blank? (mapcat :tags (activities-metadata))))))))
null
https://raw.githubusercontent.com/finos/metadata-tool/61a48a91411f39fc543992f72535fd7ba82959f1/src/metadata_tool/sources/metadata.clj
clojure
you may not use this file except in compliance with the License. You may obtain a copy of the License at -2.0 Unless required by applicable law or agreed to in writing, software WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. (try TODO - change to gitlab.com/finos - when migration is completed with-leads))
Copyright 2017 Fintech Open Source Foundation SPDX - License - Identifier : Apache-2.0 distributed under the License is distributed on an " AS IS " BASIS , (ns metadata-tool.sources.metadata (:require [clojure.string :as str] [clojure.tools.logging :as log] [clojure.java.io :as io] [clojure.pprint :as pp] [mount.core :as mnt :refer [defstate]] [cheshire.core :as ch] [clj-time.core :as tm] [clj-time.format :as tf] [metadata-tool.sources.github :as gh] [metadata-tool.sources.schemas :as sch])) (defstate ^:private organization-metadata-directory :start (str gh/metadata-directory "/organizations")) (defstate ^:private people-metadata-directory :start (str gh/metadata-directory "/people")) (defstate ^:private program-metadata-directory :start (str gh/metadata-directory "/programs")) (defstate ^:private projects-metadata-directory :start (if (some? gh/projects-directory) gh/projects-directory (str gh/metadata-directory "/toplevel"))) (def ^:private organization-filename "organization-metadata.json") (def ^:private person-filename "person-metadata.json") (def ^:private program-filename "program-metadata.json") (def ^:private activity-filename "activity-metadata.json") (def ^:private repository-filename "repository-metadata.json") (defn- list-metadata-files [filename] (doall (sort-by #(.getCanonicalPath ^java.io.File %) (filter #(= filename (.getName ^java.io.File %)) (file-seq (io/file gh/metadata-directory)))))) (defstate ^:private organization-metadata-files :start (list-metadata-files organization-filename)) (defstate ^:private person-metadata-files :start (list-metadata-files person-filename)) (defstate ^:private program-metadata-files :start (list-metadata-files program-filename)) (defstate ^:private activity-metadata-files :start (list-metadata-files activity-filename)) (defstate ^:private repository-metadata-files :start (list-metadata-files repository-filename)) (defstate ^:private metadata-files :start {:organization organization-metadata-files :person person-metadata-files :program program-metadata-files :activity activity-metadata-files :repository repository-metadata-files}) (defn- list-subdirs "Returns a sequence of the immediate subdirectories of dir, as java.io.File objects." [^java.io.File dir] (seq (.listFiles dir (reify java.io.FileFilter (accept [this f] (.isDirectory f)))))) (defstate organizations :start (doall (sort (map #(.getName ^java.io.File %) (list-subdirs (io/file organization-metadata-directory)))))) (defstate people :start (doall (sort (map #(.getName ^java.io.File %) (list-subdirs (io/file people-metadata-directory)))))) (defstate programs :start (concat (doall (sort (map #(.getName ^java.io.File %) (list-subdirs (io/file program-metadata-directory))))) ["toplevel"])) (defn- clojurise-json-key "Converts nasty JSON String keys (e.g. \"fullName\") to nice Clojure keys (e.g. :full-name)." [k] (keyword (str/replace (str/join "-" (map str/lower-case (str/split k #"(?<!(^|[A-Z]))(?=[A-Z])|(?<!^)(?=[A-Z][a-z])"))) "git-hub" "github"))) (defn- read-metadata-file-fn [metadata-file] (let [the-file (io/file metadata-file)] (when (.exists the-file) (ch/parse-string (slurp the-file) clojurise-json-key)))) (def ^:private read-metadata-file (memoize read-metadata-file-fn)) (defn- validate-metadata-file "Validates the given metadata file against the given schema-type, automatically determining which version the metadata file is." [schema-type ^java.io.File file] (log/debug "Validating" schema-type "metadata file" (.getCanonicalPath file)) (try (let [json-string (slurp file) json (ch/parse-string json-string clojurise-json-key) metadata-version (:metadata-version json) schema-id [schema-type metadata-version]] (if metadata-version (sch/validate-json schema-id json-string) (throw (Exception. (str "No metadataVersion property."))))) (catch Exception e (throw (Exception. (str (.getCanonicalPath file) " failed to validate, due to " (.getMessage e)) e))))) (defn- validate-metadata-files [schema-type files] (doall (map (partial validate-metadata-file schema-type) files))) (defn validate-metadata "Validates all metadata in the repository." [] (doall (map #(validate-metadata-files (key %) (val %)) metadata-files))) (defn organization-metadata "Organization metadata of the given organization-id, or nil if there is none." [organization-id] (if organization-id (if-let [result (read-metadata-file (str organization-metadata-directory "/" organization-id "/" organization-filename))] (assoc result :organization-id organization-id)))) (defn organizations-metadata "A seq containing the metadata of all organizations, sorted by organization-name." [] (sort-by :organization-name (keep organization-metadata organizations))) (defn person-metadata "Person metadata of the given person-id, or nil if there is none." [person-id] (if person-id (if-let [person-metadata (read-metadata-file (str people-metadata-directory "/" person-id "/" person-filename))] (assoc person-metadata :person-id person-id)))) (defn person-metadata-with-organizations "Person metadata of the given person-id, with affiliations expanded to include full organization metadata." [person-id] (if-let [person (person-metadata person-id)] (if-let [affiliations (:affiliations person)] (assoc person :affiliations (seq (map #(assoc % :organization (organization-metadata (:organization-id %))) affiliations))) person))) (defn people-metadata "A seq containing the metadata of all people, sorted by full-name." [] (sort-by :full-name (keep person-metadata people))) (defn people-metadata-with-organizations "A seq containing the metadata of all people, sorted by full-name." [] (sort-by :full-name (keep person-metadata-with-organizations people))) (defn person-metadata-by-github-login-fn [github-login] (if github-login (first (filter #(some #{github-login} (:github-logins %)) (people-metadata))))) (def person-metadata-by-github-login "Person metadata of the given GitHub login, or nil if there is none." (memoize person-metadata-by-github-login-fn)) (defn matches-person [person ghid name email] (or (and (not (str/blank? ghid)) ( catch Exception e ( " " name " " email " - caught exception : " ( .getMessage e ) ) ) ) (some #{ghid} (:github-logins person))) (and (not (str/blank? name)) (= name (:full-name person))) (and (not (str/blank? email)) (some #{email} (:email-addresses person))))) (defn person-metadata-by-fn [ghid name email] (if (or ghid name email) (first (filter #(matches-person % ghid name email) (people-metadata))))) (def person-metadata-by "Person metadata of either a given GitHub login, name or email address; returns nil if there is none." (memoize person-metadata-by-fn)) (defn lower-emails [item] (map #(str/lower-case %) (:email-addresses item))) (defn person-metadata-by-email-address-fn [email-address] (if email-address (first (filter #(some #{(str/lower-case email-address)} (lower-emails %)) (people-metadata))))) (def person-metadata-by-email-address "Person metadata of the given email address, or nil if there is none." (memoize person-metadata-by-email-address-fn)) (defn person-metadata-by-fullname-fn [full-name] (if full-name (first (filter #(= full-name (:full-name %)) (people-metadata))))) (def person-metadata-by-fullname "Person metadata of the given fullname, or nil if there is none." (memoize person-metadata-by-email-address-fn)) (defn- program-activities "A seq of the ids of all activities in the given program." [program-id] (sort (map #(.getName ^java.io.File %) (list-subdirs (io/file (if (= "toplevel" program-id) (str projects-metadata-directory) (str program-metadata-directory "/" program-id))))))) (defn- get-gh-org [activity-gh-org program] (if activity-gh-org activity-gh-org (:github-org program))) (defn- github-urls [program repos & [activity-gh-org]] (seq (map #(str "/" (get-gh-org activity-gh-org program) "/" %) repos))) (defn gitlab-url [repo] (str "/" repo)) (defn- program-activity-github-urls [program activity] (github-urls program (:github-repos activity) (:github-org activity))) (defn- pmc-github-urls [program] (github-urls program (:pmc-repos program))) (defn- expand-mailing-list-address [mailing-list-address] (if-not (str/blank? mailing-list-address) {:email-address mailing-list-address :web-archive-url (let [[list-name domain] (str/split mailing-list-address #"@")] (if (and (not (str/blank? list-name)) (not (str/blank? domain)) (or (= domain "finos.org") (= domain "symphony.foundation"))) (str "/" domain "/forum/#!forum/" list-name)))})) (defn- expand-confluence-space-key [confluence-space-key] (if-not (str/blank? confluence-space-key) {:key confluence-space-key :url (str "/" confluence-space-key "/overview")})) (defn- program-activities-metadata "A seq containing the metadata of all activities in the given program." [program] (let [program-id (:program-id program) prj-folder (if (= "toplevel" program-id) projects-metadata-directory (str program-metadata-directory "/" program-id))] (seq (keep #(if-let [activity (read-metadata-file (str prj-folder "/" % "/" activity-filename))] (assoc activity :program-id program-id :program-name (:program-name program) :program-short-name (:program-short-name program) :activity-id % Normalise tags to lower case , de - dupe and sort (seq (sort (distinct (map str/lower-case (remove str/blank? current-tags)))))) :lead-or-chair-person-id (:lead-or-chair activity) :lead-or-chair (person-metadata (:lead-or-chair activity)) :github-urls (program-activity-github-urls program activity) :github-org (or (:github-org activity) (:github-org program)) :mailing-lists (map expand-mailing-list-address (:mailing-list-addresses activity)) :confluence-spaces (map expand-confluence-space-key (:confluence-space-keys activity)))) (program-activities program-id))))) (def toplevel-program-metadata {:program-id "toplevel" :program-name "Top Level" :program-short-name "TopLevel" :github-org "finos"}) (defn- program-metadata-fn "Program metadata of the given program-id, or nil if there is none." [program-id] (if (= "toplevel" program-id) (assoc toplevel-program-metadata :activities (program-activities-metadata toplevel-program-metadata)) (if-let [program (read-metadata-file (str program-metadata-directory "/" program-id "/" program-filename))] Note : this assoc has to happen first , since ( program - activities - metadata ) depends on it . (assoc program :github-url (if (:github-org program) (str "/" (:github-org program))) :pmc-github-urls (pmc-github-urls program) :activities (program-activities-metadata program) :pmc-mailing-list (expand-mailing-list-address (:pmc-mailing-list-address program)) :pmc-private-mailing-list (expand-mailing-list-address (:pmc-private-mailing-list-address program)) :program-mailing-list (expand-mailing-list-address (:program-mailing-list-address program)) :confluence-space (expand-confluence-space-key (:confluence-space-key program))))))) (def program-metadata (memoize program-metadata-fn)) (defn programs-metadata "A seq containing the metadata of all programs." [] (keep program-metadata programs)) (defn activities-metadata "A seq containing the metadata of all activities, regardless of program." [] (sort-by :activity-name (remove nil? (mapcat :activities (programs-metadata))))) (defn- to-top-level "Cast a program to top-level, if disbanded" [program] (if (:disbanded program) (assoc (program-metadata "toplevel") :activities (map #(assoc % :program-short-name "TopLevel") (:activities program))) program)) (defn activities-metadata-after-disband "A seq containing the metadata of all activities, regardless of program, after the program disband." [] (sort-by :activity-name (remove nil? (mapcat :activities (map #(to-top-level %) (programs-metadata)))))) (defn project-lead "Returns a project and lead data" [project] (let [lead-raw (:lead-or-chair project) ret (assoc {} :project (:activity-name project) :state (:state project) :full-name (:full-name lead-raw) :email (first (:email-addresses lead-raw)) :github (first (:github-logins lead-raw)))] ret)) (defn project-leads "Returns the list of projects and lead data" [& [use-emails]] (let [projects (activities-metadata-after-disband) with-leads (map #(project-lead %) (remove #(= (:state %) "ARCHIVED") projects))] (if use-emails (map :email with-leads) (map :github with-leads)))) (defn activity-metadata "The metadata for a specific activity." [activity-id] (filter #(= activity-id (:activity-id %)) activities-metadata)) (defn- activity-metadata-by-name-fn [activity-name] (if-not (str/blank? activity-name) (if-let [result (first (filter #(= activity-name (:activity-name %)) (activities-metadata)))] result (log/warn "Could not find metadata for" activity-name)))) (def activity-metadata-by-name "The metadata for a specific activity, identified by name." (memoize activity-metadata-by-name-fn)) (defn filter-activity-by-github-coords "Inner code of activity-by-github-coords" [activity repo-name org-name] (let [lower-case-repos (map #(str/lower-case %) (:github-repos activity)) lower-repo-name (str/lower-case repo-name)] (and (some #(= lower-repo-name %) (set lower-case-repos)) (= (str/lower-case org-name) (str/lower-case (:github-org activity)))))) (defn activity-by-github-coords "Returns a metadata project, given a GitHub org and repo names" [org-name repo-name] (if-let [repos (filter #(filter-activity-by-github-coords % repo-name org-name) (activities-metadata))] (first repos))) (defn projects-metadata "A seq containing the metadata of all activities of type PROJECT, regardless of program." [] (filter #(= (:type %) "PROJECT") (activities-metadata))) (defn sigs-metadata "A seq containing the metadata of all activities of type SIG, regardless of program." [] (filter #(= (:type %) "SIG") (activities-metadata))) (defn projects-sigs-metadata "A seq containing the metadata of all activities of type PROJECT or SIG, regardless of program." [] (flatten [(projects-metadata) (sigs-metadata)])) (defn working-groups-metadata "A seq containing the metadata of all activities of type WORKING_GROUP, regardless of program." [] (filter #(= (:type %) "WORKING_GROUP") (activities-metadata))) (defn- current? "True if the given 'date range' map (with a :start-date and/or :end-date key) is current i.e. spans today." [m] (if m (let [today (tf/unparse (tf/formatters :date) (tm/now)) start-date (:start-date m) end-date (:end-date m)] (and (or (nil? start-date) (neg? (compare start-date today))) (or (nil? end-date) (neg? (compare today end-date))))))) (defn current-approved-contributors "A seq of person metadata for the *currently* approved contributors for the given organization-id, or nil if there are none." [organization-id] (if-let [organization-metadata (organization-metadata organization-id)] (if-let [current-contributors (seq (filter current? (:approved-contributors organization-metadata)))] (map #(person-metadata (:person-id %)) current-contributors)))) (defn current-affiliations "A seq of organization metadata the given person-id is *currently* affiliated with, or nil if there are none." [person-id] (if-let [person-metadata (person-metadata person-id)] (if-let [current-affiliations (seq (filter current? (:affiliations person-metadata)))] (map #(organization-metadata (:organization-id %)) current-affiliations)))) (defn assoc-org-name [person] (let [id (:person-id person)] (assoc person :org-name (or (str " (" (:organization-name (first (current-affiliations id))) ")") "")))) (defn orgs-in-pmc [program] (let [pmc-list (:pmc program)] (distinct (remove #(= "Individual Contributor" %) (map #(:organization-name (first (current-affiliations %))) pmc-list))))) (defn activities [program type] (map :activity-name (remove #(= "ARCHIVED" (:state %)) (filter #(= type (:type %)) (:activities program))))) (defn pmc-lead [program] (let [pmc-lead (:pmc-lead program) lead-enriched (person-metadata pmc-lead) full-name (:full-name lead-enriched) org-name (:org-name (assoc-org-name lead-enriched))] (str full-name org-name))) (defn pmc-list [program] (let [pmc-list (:pmc program) people-enriched (map person-metadata pmc-list) orgs-enriched (map assoc-org-name people-enriched)] (map #(str (:full-name %) (:org-name %)) orgs-enriched))) (defn has-icla? [person-id] (boolean (:has-icla (person-metadata person-id)))) (defn has-ccla? [person-id] (if-let [current-affiliations-with-cclas (seq (filter :has-ccla (current-affiliations person-id)))] (let [current-approved-contributors (map :person-id (mapcat #(current-approved-contributors (:organization-id %)) current-affiliations-with-cclas))] (or (empty? current-approved-contributors) (boolean (some #{person-id} current-approved-contributors)))) false)) (defn has-cla? [person-id] (or (has-icla? person-id) (has-ccla? person-id))) (defn people-with-clas "A seq of person metadata for all people who currently have CLAs with the Foundation." [] (map person-metadata (filter has-cla? people))) (defn all-activity-tags "A seq of all of the tags in activities, normalised to lower-case." [] (seq (sort (distinct (map str/lower-case (remove str/blank? (mapcat :tags (activities-metadata))))))))
c316ac22140a3f3b9940bf5246c8efb294cb8ccf17523c4e032002e7c95e572d
Copilot-Language/copilot-libraries
PTLTL.hs
-- | -- Module: PTLTL -- Description: Past-Time Linear-Temporal Logic Copyright : ( c ) 2011 National Institute of Aerospace / Galois , Inc. -- Provides past - time linear - temporal logic ( ptLTL operators ) . -- -- /Interface:/ See @Examples/PTLTLExamples.hs@ in the -- < Copilot repository>. -- You can embed a ptLTL specification within a Copilot specification using -- the form: -- -- @ -- operator stream -- @ # LANGUAGE NoImplicitPrelude # module Copilot.Library.PTLTL ( since, alwaysBeen, eventuallyPrev, previous ) where import Prelude () import Copilot.Language -- | Did @s@ hold in the previous period? previous :: Stream Bool -> Stream Bool previous s = [ False ] ++ s -- | Has @s@ always held (up to and including the current state)? alwaysBeen :: Stream Bool -> Stream Bool alwaysBeen s = s && tmp where tmp = [ True ] ++ s && tmp -- | Did @s@ hold at some time in the past (including the current state)? eventuallyPrev :: Stream Bool -> Stream Bool eventuallyPrev s = s || tmp where tmp = [ False ] ++ s || tmp -- | Once @s2@ holds, in the following state (period), does @s1@ continuously hold? since :: Stream Bool -> Stream Bool -> Stream Bool since s1 s2 = alwaysBeen ( tmp ==> s1 ) where tmp = eventuallyPrev $ [ False ] ++ s2
null
https://raw.githubusercontent.com/Copilot-Language/copilot-libraries/ae95ad91d5b135769871cb42056908bffba5a009/src/Copilot/Library/PTLTL.hs
haskell
| Module: PTLTL Description: Past-Time Linear-Temporal Logic /Interface:/ See @Examples/PTLTLExamples.hs@ in the < Copilot repository>. the form: @ operator stream @ | Did @s@ hold in the previous period? | Has @s@ always held (up to and including the current state)? | Did @s@ hold at some time in the past (including the current state)? | Once @s2@ holds, in the following state (period), does @s1@ continuously hold?
Copyright : ( c ) 2011 National Institute of Aerospace / Galois , Inc. Provides past - time linear - temporal logic ( ptLTL operators ) . You can embed a ptLTL specification within a Copilot specification using # LANGUAGE NoImplicitPrelude # module Copilot.Library.PTLTL ( since, alwaysBeen, eventuallyPrev, previous ) where import Prelude () import Copilot.Language previous :: Stream Bool -> Stream Bool previous s = [ False ] ++ s alwaysBeen :: Stream Bool -> Stream Bool alwaysBeen s = s && tmp where tmp = [ True ] ++ s && tmp eventuallyPrev :: Stream Bool -> Stream Bool eventuallyPrev s = s || tmp where tmp = [ False ] ++ s || tmp since :: Stream Bool -> Stream Bool -> Stream Bool since s1 s2 = alwaysBeen ( tmp ==> s1 ) where tmp = eventuallyPrev $ [ False ] ++ s2
7f83dab0d64dbb4cd9bdaca31e8cb8dd9eef879dbed7e0adff308c8cbcd31b34
2600hz/kazoo
trunkstore_listener.erl
%%%----------------------------------------------------------------------------- ( C ) 2013 - 2020 , 2600Hz %%% @doc This Source Code Form is subject to the terms of the Mozilla Public License , v. 2.0 . If a copy of the MPL was not distributed with this file , You can obtain one at /. %%% %%% @end %%%----------------------------------------------------------------------------- -module(trunkstore_listener). -behaviour(gen_listener). -export([start_link/0]). -export([init/1 ,handle_call/3 ,handle_cast/2 ,handle_info/2 ,handle_event/2 ,terminate/2 ,code_change/3 ]). -include("ts.hrl"). -define(SERVER, ?MODULE). -record(state, {}). -type state() :: #state{}. %% By convention, we put the options here in macros, but not required. -define(BINDINGS, [{'conf', [{'doc_type', <<"sys_info">>}]}]). -define(RESPONDERS, [{{'trunkstore_handlers', 'handle_config_change'} ,[{<<"configuration">>, <<"*">>}] } ]). -define(QUEUE_NAME, <<>>). -define(QUEUE_OPTIONS, []). -define(CONSUME_OPTIONS, []). %%%============================================================================= %%% API %%%============================================================================= %%------------------------------------------------------------------------------ %% @doc Starts the server. %% @end %%------------------------------------------------------------------------------ -spec start_link() -> kz_types:startlink_ret(). start_link() -> gen_listener:start_link(?SERVER, [{'bindings', ?BINDINGS} ,{'responders', ?RESPONDERS} ,{'queue_name', ?QUEUE_NAME} % optional to include ,{'queue_options', ?QUEUE_OPTIONS} % optional to include ,{'consume_options', ?CONSUME_OPTIONS} % optional to include ], []). %%%============================================================================= %%% gen_server callbacks %%%============================================================================= %%------------------------------------------------------------------------------ %% @doc Initializes the server. %% @end %%------------------------------------------------------------------------------ -spec init([]) -> {'ok', state()}. init([]) -> {'ok', #state{}}. %%------------------------------------------------------------------------------ %% @doc Handling call messages. %% @end %%------------------------------------------------------------------------------ -spec handle_call(any(), kz_term:pid_ref(), state()) -> kz_types:handle_call_ret_state(state()). handle_call(_Request, _From, State) -> {'reply', {'error', 'not_implemented'}, State}. %%------------------------------------------------------------------------------ %% @doc Handling cast messages. %% @end %%------------------------------------------------------------------------------ -spec handle_cast(any(), state()) -> kz_types:handle_cast_ret_state(state()). handle_cast({'gen_listener', {'created_queue', _QueueNAme}}, State) -> {'noreply', State}; handle_cast({'gen_listener', {'is_consuming', _IsConsuming}}, State) -> {'noreply', State}; handle_cast(_Msg, State) -> {'noreply', State}. %%------------------------------------------------------------------------------ %% @doc Handling all non call/cast messages. %% @end %%------------------------------------------------------------------------------ -spec handle_info(any(), state()) -> kz_types:handle_info_ret_state(state()). handle_info(_Info, State) -> {'noreply', State}. %%------------------------------------------------------------------------------ %% @doc Allows listener to pass options to handlers. %% @end %%------------------------------------------------------------------------------ -spec handle_event(kz_json:object(), kz_term:proplist()) -> gen_listener:handle_event_return(). handle_event(_JObj, _State) -> {'reply', []}. %%------------------------------------------------------------------------------ %% @doc This function is called by a `gen_server' when it is about to %% terminate. It should be the opposite of `Module:init/1' and do any %% necessary cleaning up. When it returns, the `gen_server' terminates with . The return value is ignored . %% %% @end %%------------------------------------------------------------------------------ -spec terminate(any(), state()) -> 'ok'. terminate(_Reason, _State) -> lager:debug("listener terminating: ~p", [_Reason]). %%------------------------------------------------------------------------------ %% @doc Convert process state when code is changed. %% @end %%------------------------------------------------------------------------------ -spec code_change(any(), state(), any()) -> {'ok', state()}. code_change(_OldVsn, State, _Extra) -> {'ok', State}. %%%============================================================================= Internal functions %%%=============================================================================
null
https://raw.githubusercontent.com/2600hz/kazoo/24519b9af9792caa67f7c09bbb9d27e2418f7ad6/applications/trunkstore/src/trunkstore_listener.erl
erlang
----------------------------------------------------------------------------- @doc @end ----------------------------------------------------------------------------- By convention, we put the options here in macros, but not required. ============================================================================= API ============================================================================= ------------------------------------------------------------------------------ @doc Starts the server. @end ------------------------------------------------------------------------------ optional to include optional to include optional to include ============================================================================= gen_server callbacks ============================================================================= ------------------------------------------------------------------------------ @doc Initializes the server. @end ------------------------------------------------------------------------------ ------------------------------------------------------------------------------ @doc Handling call messages. @end ------------------------------------------------------------------------------ ------------------------------------------------------------------------------ @doc Handling cast messages. @end ------------------------------------------------------------------------------ ------------------------------------------------------------------------------ @doc Handling all non call/cast messages. @end ------------------------------------------------------------------------------ ------------------------------------------------------------------------------ @doc Allows listener to pass options to handlers. @end ------------------------------------------------------------------------------ ------------------------------------------------------------------------------ @doc This function is called by a `gen_server' when it is about to terminate. It should be the opposite of `Module:init/1' and do any necessary cleaning up. When it returns, the `gen_server' terminates @end ------------------------------------------------------------------------------ ------------------------------------------------------------------------------ @doc Convert process state when code is changed. @end ------------------------------------------------------------------------------ ============================================================================= =============================================================================
( C ) 2013 - 2020 , 2600Hz This Source Code Form is subject to the terms of the Mozilla Public License , v. 2.0 . If a copy of the MPL was not distributed with this file , You can obtain one at /. -module(trunkstore_listener). -behaviour(gen_listener). -export([start_link/0]). -export([init/1 ,handle_call/3 ,handle_cast/2 ,handle_info/2 ,handle_event/2 ,terminate/2 ,code_change/3 ]). -include("ts.hrl"). -define(SERVER, ?MODULE). -record(state, {}). -type state() :: #state{}. -define(BINDINGS, [{'conf', [{'doc_type', <<"sys_info">>}]}]). -define(RESPONDERS, [{{'trunkstore_handlers', 'handle_config_change'} ,[{<<"configuration">>, <<"*">>}] } ]). -define(QUEUE_NAME, <<>>). -define(QUEUE_OPTIONS, []). -define(CONSUME_OPTIONS, []). -spec start_link() -> kz_types:startlink_ret(). start_link() -> gen_listener:start_link(?SERVER, [{'bindings', ?BINDINGS} ,{'responders', ?RESPONDERS} ], []). -spec init([]) -> {'ok', state()}. init([]) -> {'ok', #state{}}. -spec handle_call(any(), kz_term:pid_ref(), state()) -> kz_types:handle_call_ret_state(state()). handle_call(_Request, _From, State) -> {'reply', {'error', 'not_implemented'}, State}. -spec handle_cast(any(), state()) -> kz_types:handle_cast_ret_state(state()). handle_cast({'gen_listener', {'created_queue', _QueueNAme}}, State) -> {'noreply', State}; handle_cast({'gen_listener', {'is_consuming', _IsConsuming}}, State) -> {'noreply', State}; handle_cast(_Msg, State) -> {'noreply', State}. -spec handle_info(any(), state()) -> kz_types:handle_info_ret_state(state()). handle_info(_Info, State) -> {'noreply', State}. -spec handle_event(kz_json:object(), kz_term:proplist()) -> gen_listener:handle_event_return(). handle_event(_JObj, _State) -> {'reply', []}. with . The return value is ignored . -spec terminate(any(), state()) -> 'ok'. terminate(_Reason, _State) -> lager:debug("listener terminating: ~p", [_Reason]). -spec code_change(any(), state(), any()) -> {'ok', state()}. code_change(_OldVsn, State, _Extra) -> {'ok', State}. Internal functions
65273ffabc1792724bfd633f1e0aadd12fb79ffaa634299279ac68c0f0bbf25e
snmsts/cl-langserver
events-signal.lisp
(in-package :ls-base) Signal driven IO (defun install-sigio-handler (connection) (add-sigio-handler (connection-socket-io connection) (lambda () (process-io-interrupt connection))) (handle-requests connection t)) (defvar *io-interupt-level* 0) (defun process-io-interrupt (connection) (log-event "process-io-interrupt ~d ...~%" *io-interupt-level*) (let ((*io-interupt-level* (1+ *io-interupt-level*))) (invoke-or-queue-interrupt (lambda () (handle-requests connection t)))) (log-event "process-io-interrupt ~d ... done ~%" *io-interupt-level*)) (defun deinstall-sigio-handler (connection) (log-event "deinstall-sigio-handler...~%") (remove-sigio-handlers (connection-socket-io connection)) (log-event "deinstall-sigio-handler...done~%"))
null
https://raw.githubusercontent.com/snmsts/cl-langserver/3b1246a5d0bd58459e7a64708f820bf718cf7175/src/helitage/events-signal.lisp
lisp
(in-package :ls-base) Signal driven IO (defun install-sigio-handler (connection) (add-sigio-handler (connection-socket-io connection) (lambda () (process-io-interrupt connection))) (handle-requests connection t)) (defvar *io-interupt-level* 0) (defun process-io-interrupt (connection) (log-event "process-io-interrupt ~d ...~%" *io-interupt-level*) (let ((*io-interupt-level* (1+ *io-interupt-level*))) (invoke-or-queue-interrupt (lambda () (handle-requests connection t)))) (log-event "process-io-interrupt ~d ... done ~%" *io-interupt-level*)) (defun deinstall-sigio-handler (connection) (log-event "deinstall-sigio-handler...~%") (remove-sigio-handlers (connection-socket-io connection)) (log-event "deinstall-sigio-handler...done~%"))
a282327a0ea2e6a8c89e5270f2eb8d7b331b6d709f3ec13f649c968e7a146472
kennknowles/aspcc
mod_aspcc.ml
* Mod_caml based apache module to interpret ASP / VbScript pages in a 100 % compatible way , including intrinsic objects and ADO compatible way, including intrinsic objects and ADO *) * The " real " intrinsic ASP objects are defined here , so that they can interface with the apache API provided by Mod_caml interface with the apache API provided by Mod_caml *) open Apache open VbTools open VbTypes open Run let scope = Run.create_runtime ( ) ; ; let runtime = Run.create () ;; let _ = List.iter (fun x -> Run.Loader.apply_module runtime x) [] ;; let invalid_arg_count funcname num li = raise (Invalid_arg_count (funcname, num, List.length li)) * { 4 Response Object } exception Response_end (** The class of response objects *) class response request = object (self) val request = request (** Flag to indicate whether any output has been sent to the client *) val mutable have_output = false (** If [have_output] is true, then it is illegal to change this *) val mutable buffer_on = false (** Internal buffer *) val buffer = Buffer.create 80 (*=================== properties =======================*) (** [Response.buffer] indicates whether output will be buffered *) method private buffer_get = function params -> match params with | [] -> ref (Bool buffer_on) | z -> invalid_arg_count "Response.Buffer" 0 z (** [Response.buffer = bool] sets the buffer on/off, but cannot be changed once any output has been sent to the client *) method private buffer_let = function params -> match params with | [x] -> if have_output then raise (Failure ("Cannot change the value of Response.Buffer " ^ "after data has been output.")); buffer_on <- (get_bool !x); ref Null | _ -> raise (Invalid_arg_list "Response.Write") (*===================== methods =======================*) (** [Response.Clear] clears the buffer if buffering is on *) method private clear_method = function params -> match params with | [] -> if buffer_on then (Buffer.clear buffer; ref Null) else raise (Failure ("Cannot clear Response buffer when buffering " ^ "is not enabled")) | z -> invalid_arg_count "Response.Clear" 0 z (** [Response.end] terminates the response, flushing all data to the client *) method private end_method = function params -> match params with | [] -> if buffer_on then print_string request (Buffer.contents buffer); raise Response_end | z -> invalid_arg_count "Response.End" 0 z (** [Response.flush] outputs everything in the buffered response *) method private flush_method = function params -> match params with | [] -> if buffer_on then (print_string request (Buffer.contents buffer); Buffer.clear buffer; ref Null) else raise (Failure ("Cannot flush Response buffer when buffering " ^ "is not enabled")) | z -> invalid_arg_count "Response.Clear" 0 z (** [Response.Write( str )] writes [str] to the client request *) method private write_method = function params -> match params with | [x] -> have_output <- true; if buffer_on then Buffer.add_string buffer (get_string !x) else print_string request (get_string !x); ref Null | z -> invalid_arg_count "Response.Write" 0 z * There are no accessable fields , so Symbol_not_found is immediately raised immediately raised *) method field name = raise (Symbol_not_found name); ref Null (** See [field] *) method set_field name (value:value_t ref) : unit = raise (Symbol_not_found name) (*=========== Constructor/destructior ==============*) method private destructor = function params -> if buffer_on then print_string request (Buffer.contents buffer); ref Null method property ?(action = Get) name = match action, String.lowercase name with | _, "" -> raise (Symbol_not_found "Default property for 'Response' object") Destructor | Get, "class_destroy" -> Builtin self#destructor (* Properties *) | Get, "buffer" -> Builtin self#buffer_get | Let, "buffer" -> Builtin self#buffer_let (* Methods *) | Get, "clear" -> Builtin self#clear_method | Get, "end" -> Builtin self#end_method | Get, "flush" -> Builtin self#flush_method | Get, "write" -> Builtin self#write_method (* (* Properties *) "CacheControl", not_implemented_member; "Charset", not_implemented_member; "ContentType", not_implemented_member; "Expires", not_implemented_member; "ExpiresAbsolute", not_implemented_member; "IsClientConnected", not_implemented_member; "PICS", not_implemented_member; "Status", not_implemented_member; (* Collections *) "Cookies", not_implemented_member; (* Methods *) "AddHeader", not_implemented_member; "AppendToLog", not_implemented_member; "BinaryWrite", not_implemented_member; "Redirect", not_implemented_member; *) | _, name -> raise (Symbol_not_found ("Response." ^ name)) end (** The class that generates response objects *) class response_class request = object val request = request method create_object () = let x = (new response request) in (x :> object_t) end * { 4 Apache interface } let handle_page request filename = let infile = Unix.in_channel_of_descr (Unix.openfile filename [Unix.O_RDONLY] 0) in let lexbuf = Lexing.from_channel infile in try let ast = AspParser.page AspLexer.token lexbuf in let my_runtime = Run.copy runtime in (* create a local runtime so that we can point Response and Request to the proper places *) Run.set_html_func my_runtime (print_string request); Run.add_opaque_class my_runtime "Response" (new response_class request); Run.add_builtin_object my_runtime "Response" "response"; (try Run.page my_runtime ast with Response_end -> ()) with Parsing.Parse_error -> print_string request ("<table><tr><td><pre>" ^ (Asp.get_errors ()) ^ "</pre></td></tr></table>") (** The request handler; it checks the filename for .asp and declines if it doesn't match *) let handler request = match Request.filename request with failwith " No filename supplied " | Some s -> if not (try (String.lowercase (String.sub s ((String.length s) - 4) 4)) = ".asp" with _ -> false) then DECLINED else (handle_page request s; DONE) let _ = Mod_caml.register_handler handler "handler"
null
https://raw.githubusercontent.com/kennknowles/aspcc/951a91cc21e291b1d3c750bbbca7fa79209edd08/frontends/mod_aspcc.ml
ocaml
* The class of response objects * Flag to indicate whether any output has been sent to the client * If [have_output] is true, then it is illegal to change this * Internal buffer =================== properties ======================= * [Response.buffer] indicates whether output will be buffered * [Response.buffer = bool] sets the buffer on/off, but cannot be changed once any output has been sent to the client ===================== methods ======================= * [Response.Clear] clears the buffer if buffering is on * [Response.end] terminates the response, flushing all data to the client * [Response.flush] outputs everything in the buffered response * [Response.Write( str )] writes [str] to the client request * See [field] =========== Constructor/destructior ============== Properties Methods (* Properties Collections Methods * The class that generates response objects create a local runtime so that we can point Response and Request to the proper places * The request handler; it checks the filename for .asp and declines if it doesn't match
* Mod_caml based apache module to interpret ASP / VbScript pages in a 100 % compatible way , including intrinsic objects and ADO compatible way, including intrinsic objects and ADO *) * The " real " intrinsic ASP objects are defined here , so that they can interface with the apache API provided by Mod_caml interface with the apache API provided by Mod_caml *) open Apache open VbTools open VbTypes open Run let scope = Run.create_runtime ( ) ; ; let runtime = Run.create () ;; let _ = List.iter (fun x -> Run.Loader.apply_module runtime x) [] ;; let invalid_arg_count funcname num li = raise (Invalid_arg_count (funcname, num, List.length li)) * { 4 Response Object } exception Response_end class response request = object (self) val request = request val mutable have_output = false val mutable buffer_on = false val buffer = Buffer.create 80 method private buffer_get = function params -> match params with | [] -> ref (Bool buffer_on) | z -> invalid_arg_count "Response.Buffer" 0 z method private buffer_let = function params -> match params with | [x] -> if have_output then raise (Failure ("Cannot change the value of Response.Buffer " ^ "after data has been output.")); buffer_on <- (get_bool !x); ref Null | _ -> raise (Invalid_arg_list "Response.Write") method private clear_method = function params -> match params with | [] -> if buffer_on then (Buffer.clear buffer; ref Null) else raise (Failure ("Cannot clear Response buffer when buffering " ^ "is not enabled")) | z -> invalid_arg_count "Response.Clear" 0 z method private end_method = function params -> match params with | [] -> if buffer_on then print_string request (Buffer.contents buffer); raise Response_end | z -> invalid_arg_count "Response.End" 0 z method private flush_method = function params -> match params with | [] -> if buffer_on then (print_string request (Buffer.contents buffer); Buffer.clear buffer; ref Null) else raise (Failure ("Cannot flush Response buffer when buffering " ^ "is not enabled")) | z -> invalid_arg_count "Response.Clear" 0 z method private write_method = function params -> match params with | [x] -> have_output <- true; if buffer_on then Buffer.add_string buffer (get_string !x) else print_string request (get_string !x); ref Null | z -> invalid_arg_count "Response.Write" 0 z * There are no accessable fields , so Symbol_not_found is immediately raised immediately raised *) method field name = raise (Symbol_not_found name); ref Null method set_field name (value:value_t ref) : unit = raise (Symbol_not_found name) method private destructor = function params -> if buffer_on then print_string request (Buffer.contents buffer); ref Null method property ?(action = Get) name = match action, String.lowercase name with | _, "" -> raise (Symbol_not_found "Default property for 'Response' object") Destructor | Get, "class_destroy" -> Builtin self#destructor | Get, "buffer" -> Builtin self#buffer_get | Let, "buffer" -> Builtin self#buffer_let | Get, "clear" -> Builtin self#clear_method | Get, "end" -> Builtin self#end_method | Get, "flush" -> Builtin self#flush_method | Get, "write" -> Builtin self#write_method "CacheControl", not_implemented_member; "Charset", not_implemented_member; "ContentType", not_implemented_member; "Expires", not_implemented_member; "ExpiresAbsolute", not_implemented_member; "IsClientConnected", not_implemented_member; "PICS", not_implemented_member; "Status", not_implemented_member; "Cookies", not_implemented_member; "AddHeader", not_implemented_member; "AppendToLog", not_implemented_member; "BinaryWrite", not_implemented_member; "Redirect", not_implemented_member; *) | _, name -> raise (Symbol_not_found ("Response." ^ name)) end class response_class request = object val request = request method create_object () = let x = (new response request) in (x :> object_t) end * { 4 Apache interface } let handle_page request filename = let infile = Unix.in_channel_of_descr (Unix.openfile filename [Unix.O_RDONLY] 0) in let lexbuf = Lexing.from_channel infile in try let ast = AspParser.page AspLexer.token lexbuf in let my_runtime = Run.copy runtime in Run.set_html_func my_runtime (print_string request); Run.add_opaque_class my_runtime "Response" (new response_class request); Run.add_builtin_object my_runtime "Response" "response"; (try Run.page my_runtime ast with Response_end -> ()) with Parsing.Parse_error -> print_string request ("<table><tr><td><pre>" ^ (Asp.get_errors ()) ^ "</pre></td></tr></table>") let handler request = match Request.filename request with failwith " No filename supplied " | Some s -> if not (try (String.lowercase (String.sub s ((String.length s) - 4) 4)) = ".asp" with _ -> false) then DECLINED else (handle_page request s; DONE) let _ = Mod_caml.register_handler handler "handler"
e87c499c77549d0a844d71dfc48f16e7c8d4a99d507a9089af5ec4eb155b282e
haskell-servant/servant-swagger
TMap.hs
{-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE DataKinds #-} # LANGUAGE FlexibleInstances # # LANGUAGE KindSignatures # # LANGUAGE MultiParamTypeClasses # # LANGUAGE PolyKinds # {-# LANGUAGE RankNTypes #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TypeOperators #-} # LANGUAGE UndecidableInstances # module Servant.Swagger.Internal.TypeLevel.TMap where import Data.Proxy import GHC.Exts (Constraint) -- $setup -- >>> :set -XDataKinds -- >>> :set -XFlexibleContexts -- >>> :set -XGADTs -- >>> :set -XRankNTypes -- >>> :set -XScopedTypeVariables > > > import -- >>> import Data.List -- | Map a list of constrained types to a list of values. -- > > > tmap ( Proxy : : Proxy ) symbolVal ( Proxy : : Proxy [ " hello " , " world " ] ) -- ["hello","world"] class TMap (q :: k -> Constraint) (xs :: [k]) where tmap :: p q -> (forall x p'. q x => p' x -> a) -> p'' xs -> [a] instance TMap q '[] where tmap _ _ _ = [] instance (q x, TMap q xs) => TMap q (x ': xs) where tmap q f _ = f (Proxy :: Proxy x) : tmap q f (Proxy :: Proxy xs)
null
https://raw.githubusercontent.com/haskell-servant/servant-swagger/1909e44e965dca24cb1f5cee4b08c0781dfdbff6/src/Servant/Swagger/Internal/TypeLevel/TMap.hs
haskell
# LANGUAGE ConstraintKinds # # LANGUAGE DataKinds # # LANGUAGE RankNTypes # # LANGUAGE ScopedTypeVariables # # LANGUAGE TypeOperators # $setup >>> :set -XDataKinds >>> :set -XFlexibleContexts >>> :set -XGADTs >>> :set -XRankNTypes >>> :set -XScopedTypeVariables >>> import Data.List | Map a list of constrained types to a list of values. ["hello","world"]
# LANGUAGE FlexibleInstances # # LANGUAGE KindSignatures # # LANGUAGE MultiParamTypeClasses # # LANGUAGE PolyKinds # # LANGUAGE UndecidableInstances # module Servant.Swagger.Internal.TypeLevel.TMap where import Data.Proxy import GHC.Exts (Constraint) > > > import > > > tmap ( Proxy : : Proxy ) symbolVal ( Proxy : : Proxy [ " hello " , " world " ] ) class TMap (q :: k -> Constraint) (xs :: [k]) where tmap :: p q -> (forall x p'. q x => p' x -> a) -> p'' xs -> [a] instance TMap q '[] where tmap _ _ _ = [] instance (q x, TMap q xs) => TMap q (x ': xs) where tmap q f _ = f (Proxy :: Proxy x) : tmap q f (Proxy :: Proxy xs)
a998f9348767535e4ab8728cfe927bccb21cd0155a75c3cd3a83379e50c67809
bos/stanford-cs240h
DB1.hs
{-# LANGUAGE Rank2Types #-} module DB1 where import DB newtype DB c a = DB { fromDB :: IO a } instance Monad (DB c) where DB a >>= m = DB (a >>= \b -> fromDB (m b)) return a = DB (return a) fail s = DB (fail s) newtype SafeConn c = Safe Connection withConnection :: Pool -> (forall c. SafeConn c -> DB c a) -> IO a withConnection pool act = withConn pool $ \conn -> fromDB (act (Safe conn)) safeQuery :: SafeConn c -> String -> DB c [String] safeQuery (Safe conn) str = DB (query conn str) withConnectio :: Pool -> (forall c. ((->) (SafeConn c) (DB c a))) -> IO a withConnectio = undefined
null
https://raw.githubusercontent.com/bos/stanford-cs240h/ef304e15ae74bb13bdcbb432b18519b9b24a1a14/notes/l7/DB1.hs
haskell
# LANGUAGE Rank2Types #
module DB1 where import DB newtype DB c a = DB { fromDB :: IO a } instance Monad (DB c) where DB a >>= m = DB (a >>= \b -> fromDB (m b)) return a = DB (return a) fail s = DB (fail s) newtype SafeConn c = Safe Connection withConnection :: Pool -> (forall c. SafeConn c -> DB c a) -> IO a withConnection pool act = withConn pool $ \conn -> fromDB (act (Safe conn)) safeQuery :: SafeConn c -> String -> DB c [String] safeQuery (Safe conn) str = DB (query conn str) withConnectio :: Pool -> (forall c. ((->) (SafeConn c) (DB c a))) -> IO a withConnectio = undefined
30e2ace8aaf898c62b36925d1288947b0a13f5f3ce2bda7a910e304ce8292a9b
aantron/luv
progress.ml
let () = let progress = ref 0. in let show_progress () = Printf.printf "%i%%\n%!" (int_of_float (!progress *. 100.)) in let notification = Luv.Async.init (fun _ -> show_progress ()) |> Result.get_ok in let rec do_work total n = if n >= total then () else begin Luv.Time.sleep 1000; progress := float_of_int (n + 1) /. float_of_int total; ignore (Luv.Async.send notification); do_work total (n + 1) end in let finished _ = Luv.Handle.close notification ignore; print_endline "Done" in Luv.Thread_pool.queue_work (fun () -> do_work 3 0) finished; ignore (Luv.Loop.run () : bool)
null
https://raw.githubusercontent.com/aantron/luv/4b49d3edad2179c76d685500edf1b44f61ec4be8/example/progress.ml
ocaml
let () = let progress = ref 0. in let show_progress () = Printf.printf "%i%%\n%!" (int_of_float (!progress *. 100.)) in let notification = Luv.Async.init (fun _ -> show_progress ()) |> Result.get_ok in let rec do_work total n = if n >= total then () else begin Luv.Time.sleep 1000; progress := float_of_int (n + 1) /. float_of_int total; ignore (Luv.Async.send notification); do_work total (n + 1) end in let finished _ = Luv.Handle.close notification ignore; print_endline "Done" in Luv.Thread_pool.queue_work (fun () -> do_work 3 0) finished; ignore (Luv.Loop.run () : bool)
b1ad2bc0267d17d2305cb0835f0586ff7b21789bbdcf2d450e616cb2beef5279
ocaml-ppx/ocamlformat
quoted_strings.ml
let foo = {%foo| foooooooooooooo |} let foo = (* A *) ({%foo| foooooooooooooo |} (* B *) [@attr]) (* C *) let foo = (* A *) {%foo sep| foooooooooooooo |sep} let foo = {%foo| foooooooooooooo |} [@@attr] let foo = {%foo| foooooooooooooo |} (* A *) [@@attr] (* B *) let foo = ({%foo| foooooooooooooo |} [@attr]) [@@attr] let foo = ({%foo| foooooooooooooo |} (* A *) [@attr]) (* B *) [@@attr] let foo = (* A *) ({%foo| foooooooooooooo |} [@attr]) (* B *) [@@attr] let foo = (* A *) {%foo sep| foooooooooooooo |sep} (* B *) [@@attr] {%%foo| foooooooooooooo |} {%%foo| foooooooooooooo |} (* A *) [@@attr] (* B *) {%%foo sep| foooooooooooooo |sep} {%%foo sep| foooooooooooooo |sep} (* A *) [@@attr] (* Structures *) {%%M.foo| <hello>{x} |} {%%M.foo bar| <hello>{|x|} |bar} (* Signatures *) module type S = sig {%%M.foo| <hello>{x} |} {%%M.foo bar| <hello>{|x|} |bar} end Expressions / Pattern / Types let ({%M.foo| <hello>{x} |} : {%M.foo| <hello>{x} |}) = {%M.foo| <hello>{x} |} let ({%M.foo bar| <hello>{|x|} |bar} : {%M.foo bar| <hello>{|x|} |bar}) = {%M.foo bar| <hello>{|x|} |bar} (* Multiline *) {%%M.foo| <hello> {x} </hello> |}
null
https://raw.githubusercontent.com/ocaml-ppx/ocamlformat/3d1c992240f7d30bcb8151285274f44619dae197/test/passing/tests/quoted_strings.ml
ocaml
A B C A A B A B A B A B A B A Structures Signatures Multiline
let foo = {%foo| foooooooooooooo |} let foo = {%foo| foooooooooooooo |} [@@attr] let foo = ({%foo| foooooooooooooo |} [@attr]) [@@attr] {%%foo| foooooooooooooo |} {%%foo sep| foooooooooooooo |sep} {%%M.foo| <hello>{x} |} {%%M.foo bar| <hello>{|x|} |bar} module type S = sig {%%M.foo| <hello>{x} |} {%%M.foo bar| <hello>{|x|} |bar} end Expressions / Pattern / Types let ({%M.foo| <hello>{x} |} : {%M.foo| <hello>{x} |}) = {%M.foo| <hello>{x} |} let ({%M.foo bar| <hello>{|x|} |bar} : {%M.foo bar| <hello>{|x|} |bar}) = {%M.foo bar| <hello>{|x|} |bar} {%%M.foo| <hello> {x} </hello> |}
22518051b6c1f393902808067b5d7945b5332c532ec936390c65c747d5a90742
foshardware/lsc
Logger.hs
Copyright 2018 - < > SPDX - License - Identifier : GPL-3.0 - or - later module LSC.Logger where import Control.Concurrent import Data.Time.Format import Data.Time.LocalTime import System.Console.Concurrent import System.Console.Pretty import System.IO data LogLevel = Silent | Error | Warning | Info | Debug deriving (Eq, Ord, Enum, Show) levelColor :: Bool -> LogLevel -> String -> String levelColor False _ = id levelColor _ Info = color Green levelColor _ Warning = color Yellow levelColor _ Error = color Magenta levelColor _ _ = id timestamp :: IO String timestamp = formatTime defaultTimeLocale "[%F %X]" <$> getZonedTime levelString :: LogLevel -> String levelString = show withStderrLog :: IO a -> IO a withStderrLog = withConcurrentOutput logStderr :: LogLevel -> [String] -> IO () logStderr k (x : xs) = do time <- timestamp pretty <- hIsTerminalDevice stderr threadId <- myThreadId let indent = replicate (length time + length (levelString k) + 3) ' ' thread = replicate (maximum (length <$> x : xs) - length x + 4) ' ' ++ show threadId errorConcurrent $ unlines $ unwords ([time, levelColor pretty k (levelString k) ++ ":", x] ++ [thread | k /= Info]) : map (indent ++) xs logStderr _ _ = pure ()
null
https://raw.githubusercontent.com/foshardware/lsc/006c245a89b0a0056286205917438c7d031d04b9/src/LSC/Logger.hs
haskell
Copyright 2018 - < > SPDX - License - Identifier : GPL-3.0 - or - later module LSC.Logger where import Control.Concurrent import Data.Time.Format import Data.Time.LocalTime import System.Console.Concurrent import System.Console.Pretty import System.IO data LogLevel = Silent | Error | Warning | Info | Debug deriving (Eq, Ord, Enum, Show) levelColor :: Bool -> LogLevel -> String -> String levelColor False _ = id levelColor _ Info = color Green levelColor _ Warning = color Yellow levelColor _ Error = color Magenta levelColor _ _ = id timestamp :: IO String timestamp = formatTime defaultTimeLocale "[%F %X]" <$> getZonedTime levelString :: LogLevel -> String levelString = show withStderrLog :: IO a -> IO a withStderrLog = withConcurrentOutput logStderr :: LogLevel -> [String] -> IO () logStderr k (x : xs) = do time <- timestamp pretty <- hIsTerminalDevice stderr threadId <- myThreadId let indent = replicate (length time + length (levelString k) + 3) ' ' thread = replicate (maximum (length <$> x : xs) - length x + 4) ' ' ++ show threadId errorConcurrent $ unlines $ unwords ([time, levelColor pretty k (levelString k) ++ ":", x] ++ [thread | k /= Info]) : map (indent ++) xs logStderr _ _ = pure ()
f06f7117bc80bd451f816d9776fbaa8bb8cae35a9844c0dbd8ad649c8028c0fb
promesante/hn-clj-pedestal-re-frame
db.cljs
(ns hn-clj-pedestal-re-frame.db) (def default-db { ; :name "re-frame" :loading? false :error false :new-links [] :search-links [] :top-links [] :link {} :count 0 ; :new? false })
null
https://raw.githubusercontent.com/promesante/hn-clj-pedestal-re-frame/76b62dbbcc1c803c8e233809796eda75893cf7c9/src/cljs/hn_clj_pedestal_re_frame/db.cljs
clojure
:name "re-frame" :new? false
(ns hn-clj-pedestal-re-frame.db) (def default-db { :loading? false :error false :new-links [] :search-links [] :top-links [] :link {} :count 0 })
0c6f45a6fa58fd6208cd7090f152cf324166bc058c041b5b7b21478ea3ab7081
yzh44yzh/practical_erlang
url_parser_tests.erl
-module(url_parser_tests). -include_lib("eunit/include/eunit.hrl"). parse_test_() -> Tests = [ {<<"/">>, {ok, #{protocol => <<"https">>, domain => <<"erlangcentral.org">>, path => [<<"books">>], query => <<>>, date => undefined }}}, %% {<<"">>, {ok, #{protocol => <<"https">>, domain => <<"www.youtube.com">>, path => [<<"user">>, <<"ErlangSolutions">>, <<"playlists">>], query => <<>>, date => undefined }}}, %% {<<"-GPwM">>, {ok, #{protocol => <<"https">>, domain => <<"www.youtube.com">>, path => [<<"playlist">>], query => <<"list=PLWbHc_FXPo2jN8cLhwLg7frCx6fJ-GPwM">>, date => undefined }}}, %% {<<"-rebar-3">>, {ok, #{protocol => <<"http">>, domain => <<"mostlyerlang.com">>, path => [<<"2015">>, <<"05">>, <<"12">>, <<"067-rebar-3">>], query => <<>>, date => {2015, 5, 12} }}}, %% {<<"-with-joe-armstrong/">>, {ok, #{protocol => <<"http">>, domain => <<"mostlyerlang.com">>, path => [<<"2015">>, <<"05">>, <<"21">>, <<"interview-with-joe-armstrong">>], query => <<>>, date => {2015, 5, 21} }}}, %% {<<"-with-guido-van-rossum/">>, {ok, #{protocol => <<"http">>, domain => <<"mostlyerlang.com">>, path => [<<"2015">>, <<"05">>, <<"07">>, <<"interview-with-guido-van-rossum">>], query => <<>>, date => {2015, 5, 7} }}}, %% {<<"-binaries-and-garbage-collection.html">>, {ok, #{protocol => <<"http">>, domain => <<"dieswaytoofast.blogspot.com.by">>, path => [<<"2012">>, <<"12">>, <<"erlang-binaries-and-garbage-collection.html">>], query => <<>>, date => undefined }}}, %% {<<"">>, {ok, #{protocol => <<"http">>, domain => <<"www.erlangpatterns.org">>, path => [<<"patterns.html">>], query => <<>>, date => undefined }}}, %% {<<"www.erlang.org">>, {error, invalid_protocol}}, %% {<<"http://">>, {error, invalid_domain}}, %% {<<"-with-guido-van-rossum/">>, {ok, #{protocol => <<"http">>, domain => <<"mostlyerlang.com">>, path => [<<"2015">>, <<"45">>, <<"07">>, <<"interview-with-guido-van-rossum">>], query => <<>>, date => undefined }}} ], {generator, fun() -> lists:map( fun({Url, Res}) -> ?_assertEqual(Res, url_parser:parse(Url)) end, Tests) end}.
null
https://raw.githubusercontent.com/yzh44yzh/practical_erlang/c9eec8cf44e152bf50d9bc6d5cb87fee4764f609/15_error_processing/solution/test/url_parser_tests.erl
erlang
-module(url_parser_tests). -include_lib("eunit/include/eunit.hrl"). parse_test_() -> Tests = [ {<<"/">>, {ok, #{protocol => <<"https">>, domain => <<"erlangcentral.org">>, path => [<<"books">>], query => <<>>, date => undefined }}}, {<<"">>, {ok, #{protocol => <<"https">>, domain => <<"www.youtube.com">>, path => [<<"user">>, <<"ErlangSolutions">>, <<"playlists">>], query => <<>>, date => undefined }}}, {<<"-GPwM">>, {ok, #{protocol => <<"https">>, domain => <<"www.youtube.com">>, path => [<<"playlist">>], query => <<"list=PLWbHc_FXPo2jN8cLhwLg7frCx6fJ-GPwM">>, date => undefined }}}, {<<"-rebar-3">>, {ok, #{protocol => <<"http">>, domain => <<"mostlyerlang.com">>, path => [<<"2015">>, <<"05">>, <<"12">>, <<"067-rebar-3">>], query => <<>>, date => {2015, 5, 12} }}}, {<<"-with-joe-armstrong/">>, {ok, #{protocol => <<"http">>, domain => <<"mostlyerlang.com">>, path => [<<"2015">>, <<"05">>, <<"21">>, <<"interview-with-joe-armstrong">>], query => <<>>, date => {2015, 5, 21} }}}, {<<"-with-guido-van-rossum/">>, {ok, #{protocol => <<"http">>, domain => <<"mostlyerlang.com">>, path => [<<"2015">>, <<"05">>, <<"07">>, <<"interview-with-guido-van-rossum">>], query => <<>>, date => {2015, 5, 7} }}}, {<<"-binaries-and-garbage-collection.html">>, {ok, #{protocol => <<"http">>, domain => <<"dieswaytoofast.blogspot.com.by">>, path => [<<"2012">>, <<"12">>, <<"erlang-binaries-and-garbage-collection.html">>], query => <<>>, date => undefined }}}, {<<"">>, {ok, #{protocol => <<"http">>, domain => <<"www.erlangpatterns.org">>, path => [<<"patterns.html">>], query => <<>>, date => undefined }}}, {<<"www.erlang.org">>, {error, invalid_protocol}}, {<<"http://">>, {error, invalid_domain}}, {<<"-with-guido-van-rossum/">>, {ok, #{protocol => <<"http">>, domain => <<"mostlyerlang.com">>, path => [<<"2015">>, <<"45">>, <<"07">>, <<"interview-with-guido-van-rossum">>], query => <<>>, date => undefined }}} ], {generator, fun() -> lists:map( fun({Url, Res}) -> ?_assertEqual(Res, url_parser:parse(Url)) end, Tests) end}.