_id
stringlengths 64
64
| repository
stringlengths 6
84
| name
stringlengths 4
110
| content
stringlengths 0
248k
| license
null | download_url
stringlengths 89
454
| language
stringclasses 7
values | comments
stringlengths 0
74.6k
| code
stringlengths 0
248k
|
---|---|---|---|---|---|---|---|---|
82108cab6ab9323104486f690651d7c059e0fb4e4c3060b9d8da04e5bf918f44 | pmundkur/flowcaml | flowperv.ml | (**************************************************************************)
(* *)
(* *)
, Projet Cristal , INRIA Rocquencourt
(* *)
Copyright 2002 , 2003 Institut National de Recherche en Informatique
(* et en Automatique. All rights reserved. This file is distributed *)
under the terms of the GNU Library General Public License , with the
(* special exception on linking described in file ../LICENSE. *)
(* *)
(* Author contact: *)
(* Software page: /~simonet/soft/flowcaml/ *)
(* *)
(**************************************************************************)
$ I d : flowperv.ml , v 1.6 2003/06/27 13:09:57 simonet Exp $
(***************************************************************************)
(** {2 Strings} *)
type charray = string
let (^^) = (^)
let ($$) s i = s.[i]
let charray_of_string = String.copy
let string_of_charray = String.copy
(***************************************************************************)
* { 2 Exceptions }
(** [_propagate_ (fun () -> e1) exn] is an implementation of the
construct [try ... with exn -> e1 propagate]. *)
let _propagate_ f exn =
begin try ignore (f ()) with _ -> () end;
raise exn
* [ ( fun ( ) - > e1 ) ( fun ( ) - > e2 ) ] is an implementation of the
construct [ try e1 finally e2 ] .
construct [try e1 finally e2]. *)
let _try_finally_ f1 f2 =
let x1 =
try
f1 ()
with
exn ->
begin try ignore (f2 ()) with _ -> () end;
raise exn
in
f2 ();
x1
(** [catchable exn] tests wether the exception [exn] may be catched. *)
let _catchable_ = function
Out_of_memory
| Stack_overflow
| Assert_failure _
| Match_failure _ -> false
| Invalid_argument tag ->
let len = String.length tag in
not ((len > 6 && String.sub tag 0 6 = "Array.")
or (len > 7 && String.sub tag 0 7 = "String."))
| _ -> true
| null | https://raw.githubusercontent.com/pmundkur/flowcaml/ddfa8a37e1cb60f42650bed8030036ac313e048a/src-flowcaml/runlib/flowperv.ml | ocaml | ************************************************************************
et en Automatique. All rights reserved. This file is distributed
special exception on linking described in file ../LICENSE.
Author contact:
Software page: /~simonet/soft/flowcaml/
************************************************************************
*************************************************************************
* {2 Strings}
*************************************************************************
* [_propagate_ (fun () -> e1) exn] is an implementation of the
construct [try ... with exn -> e1 propagate].
* [catchable exn] tests wether the exception [exn] may be catched. |
, Projet Cristal , INRIA Rocquencourt
Copyright 2002 , 2003 Institut National de Recherche en Informatique
under the terms of the GNU Library General Public License , with the
$ I d : flowperv.ml , v 1.6 2003/06/27 13:09:57 simonet Exp $
type charray = string
let (^^) = (^)
let ($$) s i = s.[i]
let charray_of_string = String.copy
let string_of_charray = String.copy
* { 2 Exceptions }
let _propagate_ f exn =
begin try ignore (f ()) with _ -> () end;
raise exn
* [ ( fun ( ) - > e1 ) ( fun ( ) - > e2 ) ] is an implementation of the
construct [ try e1 finally e2 ] .
construct [try e1 finally e2]. *)
let _try_finally_ f1 f2 =
let x1 =
try
f1 ()
with
exn ->
begin try ignore (f2 ()) with _ -> () end;
raise exn
in
f2 ();
x1
let _catchable_ = function
Out_of_memory
| Stack_overflow
| Assert_failure _
| Match_failure _ -> false
| Invalid_argument tag ->
let len = String.length tag in
not ((len > 6 && String.sub tag 0 6 = "Array.")
or (len > 7 && String.sub tag 0 7 = "String."))
| _ -> true
|
934a64f45b8429118c29c9d2f892ab0fa385f328e0fd98376265276b01e5a66b | craigl64/clim-ccl | db-label.lisp | ;; -*- mode: common-lisp; package: silica -*-
;;
;;
;; See the file LICENSE for the full license governing this code.
;;
(in-package :silica)
(defclass generic-label-pane
(label-pane
space-requirement-mixin
leaf-pane)
()
(:default-initargs :align-x :left
:text-style *default-label-text-style*))
(defmethod compose-space ((pane generic-label-pane) &key width height)
(declare (ignore width height))
(multiple-value-bind (width height)
(compute-gadget-label-size pane)
(make-space-requirement :width width :height height)))
(defmethod handle-repaint ((pane generic-label-pane) region)
(declare (ignore region)) ;not worth checking
(with-sheet-medium (medium pane)
(with-bounding-rectangle* (left top right bottom) (sheet-region pane)
(declare (ignore right bottom))
(draw-gadget-label pane medium left top
:align-x (gadget-alignment pane) :align-y :top))))
(defmethod draw-gadget-label ((pane labelled-gadget-mixin) medium x y
&key (align-x (gadget-alignment pane))
(align-y :baseline))
(let ((label (gadget-label pane)))
(etypecase label
(string
(let ((text-style (slot-value pane 'text-style)))
(draw-text* medium label x y
:text-style text-style
:align-x align-x :align-y align-y)))
(null)
(pattern
(let ((width (pattern-width label))
(height (pattern-height label)))
(ecase align-x
(:left)
(:right (decf x width))
(:center (decf x (floor width 2))))
(ecase align-y
((:top :baseline))
(:bottom (decf x height))
(:center (decf x (floor height 2))))
(draw-pattern* medium label x y)))
(pixmap
(let ((width (pixmap-width label))
(height (pixmap-height label)))
(ecase align-x
(:left)
(:right (decf x width))
(:center (decf x (floor width 2))))
(ecase align-y
((:top :baseline))
(:bottom (decf x height))
(:center (decf x (floor height 2))))
(copy-from-pixmap label 0 0 width height
medium x y))))))
| null | https://raw.githubusercontent.com/craigl64/clim-ccl/301efbd770745b429f2b00b4e8ca6624de9d9ea9/homegrown/db-label.lisp | lisp | -*- mode: common-lisp; package: silica -*-
See the file LICENSE for the full license governing this code.
not worth checking |
(in-package :silica)
(defclass generic-label-pane
(label-pane
space-requirement-mixin
leaf-pane)
()
(:default-initargs :align-x :left
:text-style *default-label-text-style*))
(defmethod compose-space ((pane generic-label-pane) &key width height)
(declare (ignore width height))
(multiple-value-bind (width height)
(compute-gadget-label-size pane)
(make-space-requirement :width width :height height)))
(defmethod handle-repaint ((pane generic-label-pane) region)
(with-sheet-medium (medium pane)
(with-bounding-rectangle* (left top right bottom) (sheet-region pane)
(declare (ignore right bottom))
(draw-gadget-label pane medium left top
:align-x (gadget-alignment pane) :align-y :top))))
(defmethod draw-gadget-label ((pane labelled-gadget-mixin) medium x y
&key (align-x (gadget-alignment pane))
(align-y :baseline))
(let ((label (gadget-label pane)))
(etypecase label
(string
(let ((text-style (slot-value pane 'text-style)))
(draw-text* medium label x y
:text-style text-style
:align-x align-x :align-y align-y)))
(null)
(pattern
(let ((width (pattern-width label))
(height (pattern-height label)))
(ecase align-x
(:left)
(:right (decf x width))
(:center (decf x (floor width 2))))
(ecase align-y
((:top :baseline))
(:bottom (decf x height))
(:center (decf x (floor height 2))))
(draw-pattern* medium label x y)))
(pixmap
(let ((width (pixmap-width label))
(height (pixmap-height label)))
(ecase align-x
(:left)
(:right (decf x width))
(:center (decf x (floor width 2))))
(ecase align-y
((:top :baseline))
(:bottom (decf x height))
(:center (decf x (floor height 2))))
(copy-from-pixmap label 0 0 width height
medium x y))))))
|
7098838641ef4e652838ca0a1460dd842474f4bdb00ba4f44c0d5733bf23c261 | r0man/sablono | data_readers.clj | Use # j to read JSValue , om.next redefined # js to read Clojure maps :/
{j cljs.tagged-literals/read-js}
| null | https://raw.githubusercontent.com/r0man/sablono/3221c5bacd1c81f1f79bc1ec86f3d2652b1a12d2/test-resources/data_readers.clj | clojure | Use # j to read JSValue , om.next redefined # js to read Clojure maps :/
{j cljs.tagged-literals/read-js}
|
|
b941e0f94be18d5964706b3285933491d93d9315ea09f975533c0e7eeaa47171 | corecursive/sicp-study-group | install.scm | (load "table.scm")
(load "type.scm")
;; installing polynomials
(define (install-polynomial-package)
(load "polynomial-package/representation.scm")
(load "polynomial-package/operation.scm")
(define (make-polynomial p)
(attach-type 'polynomial p))
(define (+polynomial p1 p2)
(make-polynomial (+poly p1 p2)))
(define (*polynomial p1 p2)
(make-polynomial (*poly p1 p2)))
(put 'polynomial 'add +polynomial)
(put 'polynomial 'mul *polynomial)
(put 'polynomial
'make
(lambda (var term-list)
(make-polynomial (make-poly var term-list))))
'done)
(define (make-polynomial var term-list)
((get 'polynomial 'make) var term-list))
(install-polynomial-package)
| null | https://raw.githubusercontent.com/corecursive/sicp-study-group/82b92a9759ed6c72d15cf955c806ce2a94336f83/wulab/lecture-4b/generic-operation/polynomial-package/install.scm | scheme | installing polynomials | (load "table.scm")
(load "type.scm")
(define (install-polynomial-package)
(load "polynomial-package/representation.scm")
(load "polynomial-package/operation.scm")
(define (make-polynomial p)
(attach-type 'polynomial p))
(define (+polynomial p1 p2)
(make-polynomial (+poly p1 p2)))
(define (*polynomial p1 p2)
(make-polynomial (*poly p1 p2)))
(put 'polynomial 'add +polynomial)
(put 'polynomial 'mul *polynomial)
(put 'polynomial
'make
(lambda (var term-list)
(make-polynomial (make-poly var term-list))))
'done)
(define (make-polynomial var term-list)
((get 'polynomial 'make) var term-list))
(install-polynomial-package)
|
65ee1a7882a6bdc50a32d227ae6ed39a454b6950aecb2c5a8ad973cfacd5f38f | ZeusWPI/contests | ptaal.hs | import Control.Applicative ((<$>))
import Control.Monad (replicateM_)
import Data.Char (toLower)
isVowel :: String -> Bool
isVowel st = (fmap toLower st) `elem` ["a","e","i","o","u","ij"]
decode :: String -> String
decode str = decode' str 0
decode' (x:y:xs) i
| isVowel [x,y] = x : y : decode' xs (i+2)
| isVowel [x] = x : decode' (y:xs) (i+1)
| otherwise = let rest = drop i (y:xs)
in if i == 0
then x : decode' rest 0
else decode' rest 0
decode' x _ = x
main = do
n <- read <$> getLine
replicateM_ n $ do
line <- getLine
putStrLn $ unwords $ fmap decode $ words line
| null | https://raw.githubusercontent.com/ZeusWPI/contests/d78aec91be3ce32a436d160cd7a13825d36bbf3a/2010-vpw/ptaal.hs | haskell | import Control.Applicative ((<$>))
import Control.Monad (replicateM_)
import Data.Char (toLower)
isVowel :: String -> Bool
isVowel st = (fmap toLower st) `elem` ["a","e","i","o","u","ij"]
decode :: String -> String
decode str = decode' str 0
decode' (x:y:xs) i
| isVowel [x,y] = x : y : decode' xs (i+2)
| isVowel [x] = x : decode' (y:xs) (i+1)
| otherwise = let rest = drop i (y:xs)
in if i == 0
then x : decode' rest 0
else decode' rest 0
decode' x _ = x
main = do
n <- read <$> getLine
replicateM_ n $ do
line <- getLine
putStrLn $ unwords $ fmap decode $ words line
|
|
f1516a0ff080cc916dc8a2d578d8d8274f0f8510e16264e958f79ed3d5258433 | uwplse/ferrite | append-chrome.rkt | #lang s-exp rosette
(require "../fs.rkt" "../lang.rkt" "../litmus.rkt" "../verifier.rkt" "../synth.rkt"
"../advfs.rkt" "../seqfs.rkt" "../ext4.rkt"
rackunit rackunit/text-ui)
(provide chrome-tests)
(current-bitwidth 16)
(define small? #f)
(define writes (if small? '(33 2 31) '(2509 13 2500)))
(define block-size (if small? 64 4096))
(define chrome-setup
(list
(creat 0) ; fd 0
(write 0 (for/list ([i (first writes)]) #t))
(fsync 0)))
(define chrome-test
(list
(write 0 (for/list ([i (second writes)]) #t))
(write 0 (for/list ([i (third writes)]) #t))))
SeqFS
(define (chrome-allow oldfs newfs)
file must be a prefix of # ts
(define new-0 (ondisk newfs 0))
(list (apply && new-0)))
(define (chrome-fs-seqfs)
(seq-fs 2))
Ext4
(define (chrome-fs-ext4)
(ext4-fs #:capacity 2 #:blocksize block-size))
(define (chrome-fs-ext4-nodelalloc)
(ext4-fs #:capacity 2 #:blocksize block-size #:nodelalloc? #t))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(define (test-seqfs)
(printf "test-seqfs\n")
(define test
(litmus chrome-fs-seqfs chrome-setup chrome-test chrome-allow))
(define-values (cex state) (verify-correctness test))
(check-true (unsat? cex))
(check-false state)
(define all-states (all-outcomes test))
(check equal? (length all-states) 3)
)
(define (test-ext4)
(printf "test-ext4 ~a\n" small?)
(printf " verify-correctness\n")
(define test
(litmus chrome-fs-ext4 chrome-setup chrome-test chrome-allow))
(define-values (cex state) (verify-correctness test))
(check-true (sat? cex))
(printf " all-states\n")
(define all-states (all-outcomes test))
(check equal? (length all-states) 7)
)
(define (test-ext4-nodelalloc)
(printf "test-ext4-nodelalloc ~a\n" small?)
(printf " verify-correctness\n")
(define test
(litmus chrome-fs-ext4-nodelalloc chrome-setup chrome-test chrome-allow))
(define-values (cex state) (verify-correctness test))
(check-true (unsat? cex))
(check-false state)
(printf " all-states\n")
(define all-states (all-outcomes test))
(check equal? (length all-states) 4)
)
(define (test-ext4-synth)
(printf "test-ext4-synth ~a\n" small?)
(define test
(litmus chrome-fs-ext4 chrome-setup chrome-test chrome-allow))
(define prog (synth test))
(check-true (false? prog))) ; no fences will fix this program
(define (test-ext4-synth-nodelalloc)
(printf "test-ext4-synth-nodelalloc ~a\n" small?)
(printf " synth\n")
(define test
(litmus chrome-fs-ext4-nodelalloc chrome-setup chrome-test chrome-allow))
(define prog (synth test))
(check-false (false? prog))
(check-false (term? prog))
(define cost (sync-cost prog))
(check equal? cost 0) ; program is already correct with nodelalloc; no fences needed
(printf " verify-correctness\n")
(define test*
(litmus chrome-fs-ext4-nodelalloc chrome-setup prog chrome-allow))
(define-values (cex state) (verify-correctness test*))
(check-true (unsat? cex)))
(define chrome-tests
(test-suite
"chrome litmus test"
#:before (thunk (printf "-----chrome-----\n"))
(test-seqfs)
(test-ext4)
(test-ext4-nodelalloc)
))
(define chrome-synth-tests
(test-suite
"chrome synth test"
#:before (thunk (printf "-----chrome synth-----\n"))
(test-ext4-synth)
(test-ext4-synth-nodelalloc)
))
small ? = # f : passes in 72s
small ? = # f : passes in 195s
| null | https://raw.githubusercontent.com/uwplse/ferrite/923736167fb00aec979f244787a8c33e48e1c551/rosette/litmus/append-chrome.rkt | racket | fd 0
no fences will fix this program
program is already correct with nodelalloc; no fences needed | #lang s-exp rosette
(require "../fs.rkt" "../lang.rkt" "../litmus.rkt" "../verifier.rkt" "../synth.rkt"
"../advfs.rkt" "../seqfs.rkt" "../ext4.rkt"
rackunit rackunit/text-ui)
(provide chrome-tests)
(current-bitwidth 16)
(define small? #f)
(define writes (if small? '(33 2 31) '(2509 13 2500)))
(define block-size (if small? 64 4096))
(define chrome-setup
(list
(write 0 (for/list ([i (first writes)]) #t))
(fsync 0)))
(define chrome-test
(list
(write 0 (for/list ([i (second writes)]) #t))
(write 0 (for/list ([i (third writes)]) #t))))
SeqFS
(define (chrome-allow oldfs newfs)
file must be a prefix of # ts
(define new-0 (ondisk newfs 0))
(list (apply && new-0)))
(define (chrome-fs-seqfs)
(seq-fs 2))
Ext4
(define (chrome-fs-ext4)
(ext4-fs #:capacity 2 #:blocksize block-size))
(define (chrome-fs-ext4-nodelalloc)
(ext4-fs #:capacity 2 #:blocksize block-size #:nodelalloc? #t))
(define (test-seqfs)
(printf "test-seqfs\n")
(define test
(litmus chrome-fs-seqfs chrome-setup chrome-test chrome-allow))
(define-values (cex state) (verify-correctness test))
(check-true (unsat? cex))
(check-false state)
(define all-states (all-outcomes test))
(check equal? (length all-states) 3)
)
(define (test-ext4)
(printf "test-ext4 ~a\n" small?)
(printf " verify-correctness\n")
(define test
(litmus chrome-fs-ext4 chrome-setup chrome-test chrome-allow))
(define-values (cex state) (verify-correctness test))
(check-true (sat? cex))
(printf " all-states\n")
(define all-states (all-outcomes test))
(check equal? (length all-states) 7)
)
(define (test-ext4-nodelalloc)
(printf "test-ext4-nodelalloc ~a\n" small?)
(printf " verify-correctness\n")
(define test
(litmus chrome-fs-ext4-nodelalloc chrome-setup chrome-test chrome-allow))
(define-values (cex state) (verify-correctness test))
(check-true (unsat? cex))
(check-false state)
(printf " all-states\n")
(define all-states (all-outcomes test))
(check equal? (length all-states) 4)
)
(define (test-ext4-synth)
(printf "test-ext4-synth ~a\n" small?)
(define test
(litmus chrome-fs-ext4 chrome-setup chrome-test chrome-allow))
(define prog (synth test))
(define (test-ext4-synth-nodelalloc)
(printf "test-ext4-synth-nodelalloc ~a\n" small?)
(printf " synth\n")
(define test
(litmus chrome-fs-ext4-nodelalloc chrome-setup chrome-test chrome-allow))
(define prog (synth test))
(check-false (false? prog))
(check-false (term? prog))
(define cost (sync-cost prog))
(printf " verify-correctness\n")
(define test*
(litmus chrome-fs-ext4-nodelalloc chrome-setup prog chrome-allow))
(define-values (cex state) (verify-correctness test*))
(check-true (unsat? cex)))
(define chrome-tests
(test-suite
"chrome litmus test"
#:before (thunk (printf "-----chrome-----\n"))
(test-seqfs)
(test-ext4)
(test-ext4-nodelalloc)
))
(define chrome-synth-tests
(test-suite
"chrome synth test"
#:before (thunk (printf "-----chrome synth-----\n"))
(test-ext4-synth)
(test-ext4-synth-nodelalloc)
))
small ? = # f : passes in 72s
small ? = # f : passes in 195s
|
14e66664ec67da3fad00d8fe2fa19c5fe64b0821c6b4cffe17033f44a3597f8c | fossas/fossa-cli | ListSpec.hs | module Extra.ListSpec (spec) where
import Data.List.Extra (singleton, (!?))
import Test.Hspec (Spec, describe, it, shouldBe)
spec :: Spec
spec = do
describe "singleton" $ do
it "should create a one-item list" $
singleton (3 :: Int) `shouldBe` [3]
describe "(!?)" $ do
it "should return the zero-indexed item at the index" $
[False, True, False, False, False] !? 1 `shouldBe` Just True
it "should return Nothing if the index >= length" $ do
let list = replicate 5 ()
list !? 5 `shouldBe` Nothing -- index == length
index = = length + 1
list !? 20 `shouldBe` Nothing -- index > length
| null | https://raw.githubusercontent.com/fossas/fossa-cli/b2e0f7e08ad555c933ad750359348d54966e7e72/test/Extra/ListSpec.hs | haskell | index == length
index > length | module Extra.ListSpec (spec) where
import Data.List.Extra (singleton, (!?))
import Test.Hspec (Spec, describe, it, shouldBe)
spec :: Spec
spec = do
describe "singleton" $ do
it "should create a one-item list" $
singleton (3 :: Int) `shouldBe` [3]
describe "(!?)" $ do
it "should return the zero-indexed item at the index" $
[False, True, False, False, False] !? 1 `shouldBe` Just True
it "should return Nothing if the index >= length" $ do
let list = replicate 5 ()
index = = length + 1
|
f4208d2ebabe5f476b9e9229c870f7faabb44ff4e9c6774296754e7b02b5d9f0 | biocaml/biocaml | bin_pred.ml | module Bin_pred = Biocaml_unix.Bin_pred
open OUnit
* This is a test against the R library ROCR . The reference result
* ( 0.8341875 ) is obtained as follows :
*
* data(ROCR.simple )
* performance(prediction ( ROCR.simple$predictions , ROCR.simple$labels ) , " auc " )
*
* This is a test against the R library ROCR. The reference result
* (0.8341875) is obtained as follows:
*
* data(ROCR.simple)
* performance(prediction( ROCR.simple$predictions, ROCR.simple$labels), "auc")
*
*)
let rocr_pos =
[|
0.612547843;
0.364270971;
0.244415489;
0.970641299;
0.890172812;
0.781781371;
0.716680598;
0.547983407;
0.628095575;
0.744769966;
0.657732644;
0.890078186;
0.984667270;
0.014823599;
0.543533783;
0.701561487;
0.715459280;
0.714985914;
0.911723615;
0.757325590;
0.529402244;
0.589909284;
0.326672910;
0.879459891;
0.230157183;
0.876086217;
0.353281048;
0.703293499;
0.627012496;
0.665444679;
0.536339509;
0.623494622;
0.885179651;
0.932159806;
0.858876675;
0.694457482;
0.517308606;
0.865639036;
0.005422562;
0.772728821;
0.277656869;
0.133257805;
0.531958184;
0.717845453;
0.537091350;
0.930846938;
0.663367560;
0.844415442;
0.943432189;
0.598162949;
0.834803976;
0.912325837;
0.642933593;
0.586857799;
0.700501359;
0.531464015;
0.938583020;
0.531006532;
0.785213140;
0.905121019;
0.748438143;
0.842974300;
0.835981859;
0.991096434;
0.757364019;
0.773336236;
0.110241034;
0.984599159;
0.253271061;
0.697235328;
0.620501132;
0.814586047;
0.698826511;
0.658692553;
0.501489336;
0.746588080;
0.579511087;
0.770178504;
0.537336015;
0.790240205;
0.883431431;
0.745110673;
0.012653524;
0.868331219;
0.540221346;
0.567043171;
0.806543942;
0.336315317;
0.268138293;
0.728536415;
0.739554341;
0.858970526;
0.606960209;
|]
let rocr_neg =
[|
0.432136142;
0.140291078;
0.384895941;
0.868751832;
0.360168796;
0.385240464;
0.423739359;
0.101699993;
0.490119891;
0.072369921;
0.172741714;
0.105722115;
0.945548941;
0.360180429;
0.448687336;
0.292368449;
0.120604738;
0.319672178;
0.090988280;
0.257402979;
0.708412104;
0.086546283;
0.362693564;
0.779771989;
0.212014560;
0.689075677;
0.240911145;
0.402801992;
0.134794140;
0.120473353;
0.353777439;
0.408939895;
0.265686095;
0.248500489;
0.491735594;
0.151350957;
0.496513160;
0.123504905;
0.499788081;
0.310718619;
0.907651100;
0.340078180;
0.195097957;
0.371936985;
0.419560072;
0.018527600;
0.539086009;
0.703885141;
0.348213542;
0.458674210;
0.059045866;
0.083685883;
0.429650397;
0.212404891;
0.083048377;
0.468610247;
0.393378108;
0.349540913;
0.194398425;
0.959417835;
0.211378771;
0.576836208;
0.380396459;
0.161874325;
0.392173971;
0.122284044;
0.180631658;
0.085993218;
0.060413627;
0.084254795;
0.448484671;
0.605235403;
0.364288579;
0.492596896;
0.488179708;
0.259278968;
0.288258273;
0.040906997;
0.760726142;
0.300973098;
0.378092079;
0.016694412;
0.470206008;
0.239143340;
0.050999138;
0.088450984;
0.107031842;
0.480100183;
0.336592126;
0.118555284;
0.233160827;
0.461150807;
0.370549294;
0.463227453;
0.007746305;
0.439399995;
0.035815400;
0.248707470;
0.696702150;
0.081439129;
0.126480399;
0.636728451;
0.030235062;
0.983494405;
0.522384507;
0.383807972;
0.138387070;
|]
let scores = Array.append rocr_pos rocr_neg
let labels =
Array.append
(Array.map rocr_pos ~f:(fun _ -> true))
(Array.map rocr_neg ~f:(fun _ -> false))
let assert_float_equal ?msg x y =
assert_equal
~cmp:Float.(fun x y -> abs (x -. y) < 0.00001)
~printer:Float.to_string ?msg x y
let p x = BatArray.print ( BatTuple.Tuple2.print BatFloat.print BatFloat.print ) BatIO.stdout x
let test_empty_data () =
let _, auc = Bin_pred.roc_curve ~scores:[||] ~labels:[||] in
assert_bool "Test with empty data" (Float.is_nan auc)
let test_2_points_good () =
let _, auc =
Bin_pred.roc_curve ~scores:[| 0.; 2. |] ~labels:[| false; true |]
in
assert_float_equal ~msg:"Test with two points and a good classifier" 1. auc
let test_2_points_bad () =
let _, auc =
Bin_pred.roc_curve ~scores:[| 0.; 2. |] ~labels:[| true; false |]
in
assert_float_equal ~msg:"Test with two points and bad classifier" 0. auc
let test_against_rocr () =
assert_float_equal ~msg:"Test against ROCR failed"
(snd (Bin_pred.roc_curve ~scores ~labels))
0.8341875
let tests =
"Bin_pred"
>::: [
"Test ROC with empty data" >:: test_empty_data;
"Test ROC with two points (perfect classifier) " >:: test_2_points_good;
"Test ROC with two points (worst classifier)" >:: test_2_points_bad;
"Test against ROCR implementation" >:: test_against_rocr;
]
| null | https://raw.githubusercontent.com/biocaml/biocaml/ac619539fed348747d686b8f628e80c1bb8bfc59/lib/test/bin_pred.ml | ocaml | module Bin_pred = Biocaml_unix.Bin_pred
open OUnit
* This is a test against the R library ROCR . The reference result
* ( 0.8341875 ) is obtained as follows :
*
* data(ROCR.simple )
* performance(prediction ( ROCR.simple$predictions , ROCR.simple$labels ) , " auc " )
*
* This is a test against the R library ROCR. The reference result
* (0.8341875) is obtained as follows:
*
* data(ROCR.simple)
* performance(prediction( ROCR.simple$predictions, ROCR.simple$labels), "auc")
*
*)
let rocr_pos =
[|
0.612547843;
0.364270971;
0.244415489;
0.970641299;
0.890172812;
0.781781371;
0.716680598;
0.547983407;
0.628095575;
0.744769966;
0.657732644;
0.890078186;
0.984667270;
0.014823599;
0.543533783;
0.701561487;
0.715459280;
0.714985914;
0.911723615;
0.757325590;
0.529402244;
0.589909284;
0.326672910;
0.879459891;
0.230157183;
0.876086217;
0.353281048;
0.703293499;
0.627012496;
0.665444679;
0.536339509;
0.623494622;
0.885179651;
0.932159806;
0.858876675;
0.694457482;
0.517308606;
0.865639036;
0.005422562;
0.772728821;
0.277656869;
0.133257805;
0.531958184;
0.717845453;
0.537091350;
0.930846938;
0.663367560;
0.844415442;
0.943432189;
0.598162949;
0.834803976;
0.912325837;
0.642933593;
0.586857799;
0.700501359;
0.531464015;
0.938583020;
0.531006532;
0.785213140;
0.905121019;
0.748438143;
0.842974300;
0.835981859;
0.991096434;
0.757364019;
0.773336236;
0.110241034;
0.984599159;
0.253271061;
0.697235328;
0.620501132;
0.814586047;
0.698826511;
0.658692553;
0.501489336;
0.746588080;
0.579511087;
0.770178504;
0.537336015;
0.790240205;
0.883431431;
0.745110673;
0.012653524;
0.868331219;
0.540221346;
0.567043171;
0.806543942;
0.336315317;
0.268138293;
0.728536415;
0.739554341;
0.858970526;
0.606960209;
|]
let rocr_neg =
[|
0.432136142;
0.140291078;
0.384895941;
0.868751832;
0.360168796;
0.385240464;
0.423739359;
0.101699993;
0.490119891;
0.072369921;
0.172741714;
0.105722115;
0.945548941;
0.360180429;
0.448687336;
0.292368449;
0.120604738;
0.319672178;
0.090988280;
0.257402979;
0.708412104;
0.086546283;
0.362693564;
0.779771989;
0.212014560;
0.689075677;
0.240911145;
0.402801992;
0.134794140;
0.120473353;
0.353777439;
0.408939895;
0.265686095;
0.248500489;
0.491735594;
0.151350957;
0.496513160;
0.123504905;
0.499788081;
0.310718619;
0.907651100;
0.340078180;
0.195097957;
0.371936985;
0.419560072;
0.018527600;
0.539086009;
0.703885141;
0.348213542;
0.458674210;
0.059045866;
0.083685883;
0.429650397;
0.212404891;
0.083048377;
0.468610247;
0.393378108;
0.349540913;
0.194398425;
0.959417835;
0.211378771;
0.576836208;
0.380396459;
0.161874325;
0.392173971;
0.122284044;
0.180631658;
0.085993218;
0.060413627;
0.084254795;
0.448484671;
0.605235403;
0.364288579;
0.492596896;
0.488179708;
0.259278968;
0.288258273;
0.040906997;
0.760726142;
0.300973098;
0.378092079;
0.016694412;
0.470206008;
0.239143340;
0.050999138;
0.088450984;
0.107031842;
0.480100183;
0.336592126;
0.118555284;
0.233160827;
0.461150807;
0.370549294;
0.463227453;
0.007746305;
0.439399995;
0.035815400;
0.248707470;
0.696702150;
0.081439129;
0.126480399;
0.636728451;
0.030235062;
0.983494405;
0.522384507;
0.383807972;
0.138387070;
|]
let scores = Array.append rocr_pos rocr_neg
let labels =
Array.append
(Array.map rocr_pos ~f:(fun _ -> true))
(Array.map rocr_neg ~f:(fun _ -> false))
let assert_float_equal ?msg x y =
assert_equal
~cmp:Float.(fun x y -> abs (x -. y) < 0.00001)
~printer:Float.to_string ?msg x y
let p x = BatArray.print ( BatTuple.Tuple2.print BatFloat.print BatFloat.print ) BatIO.stdout x
let test_empty_data () =
let _, auc = Bin_pred.roc_curve ~scores:[||] ~labels:[||] in
assert_bool "Test with empty data" (Float.is_nan auc)
let test_2_points_good () =
let _, auc =
Bin_pred.roc_curve ~scores:[| 0.; 2. |] ~labels:[| false; true |]
in
assert_float_equal ~msg:"Test with two points and a good classifier" 1. auc
let test_2_points_bad () =
let _, auc =
Bin_pred.roc_curve ~scores:[| 0.; 2. |] ~labels:[| true; false |]
in
assert_float_equal ~msg:"Test with two points and bad classifier" 0. auc
let test_against_rocr () =
assert_float_equal ~msg:"Test against ROCR failed"
(snd (Bin_pred.roc_curve ~scores ~labels))
0.8341875
let tests =
"Bin_pred"
>::: [
"Test ROC with empty data" >:: test_empty_data;
"Test ROC with two points (perfect classifier) " >:: test_2_points_good;
"Test ROC with two points (worst classifier)" >:: test_2_points_bad;
"Test against ROCR implementation" >:: test_against_rocr;
]
|
|
e08d44b3642eaf22ddc83f3deda7616f4d58b17438259436bb2ddc77a1d9b99d | sangkilc/ofuzz | ballmutgen.ml | (* ofuzz - ocaml fuzzing platform *)
* ball - based mutational generator
@author < sangkil.cha\@gmail.com >
@since 2014 - 03 - 19
@author Sang Kil Cha <sangkil.cha\@gmail.com>
@since 2014-03-19
*)
Copyright ( c ) 2014 ,
All rights reserved .
Redistribution and use in source and binary forms , with or without
modification , are permitted provided that the following conditions are met :
* Redistributions of source code must retain the above copyright
notice , this list of conditions and the following disclaimer .
* Redistributions in binary form must reproduce the above copyright
notice , this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution .
* Neither the name of the < organization > nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission .
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " AND
ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED . IN NO EVENT SHALL SANG KIL CHA BE LIABLE FOR ANY
DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES
( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ;
LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE OF THIS
Copyright (c) 2014, Sang Kil Cha
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the <organization> nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL SANG KIL CHA BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
*)
open Fuzztypes
open Testgenlib
open Misc
open Comblib
let mutate_file r buf filesize rseed mratio =
let mratio = get_ratio rseed mratio in
let bits = filesize * 8 in
let bits_to_mod = (float_of_int bits) *. mratio |> int_of_float in
let bits_to_mod = if bits_to_mod = 0 then 1 else bits_to_mod in
let bits_to_mod = get_random_partition r bits bits_to_mod in
let set = floyds_sampling r bits bits_to_mod in
Hashtbl.iter (fun bitpos _ ->
let byte_pos, bit_offset = bitpos / 8, bitpos mod 8 in
let newval = 1 lsl bit_offset |> char_of_int in
Fastlib.mod_file buf byte_pos newval
) set
let generate conf knobs rseed =
let myfile, mapsize, filesize = prepare_fuzztarget conf true in
let buf = Fastlib.map_file myfile mapsize in
let r = init_rseed rseed in
let () = mutate_file r buf filesize rseed conf.mratio in
let () = Fastlib.unmap_file buf in
myfile, rseed
| null | https://raw.githubusercontent.com/sangkilc/ofuzz/ba53cc90cc06512eb90459a7159772d75ebe954f/src/testgen/ballmutgen.ml | ocaml | ofuzz - ocaml fuzzing platform |
* ball - based mutational generator
@author < sangkil.cha\@gmail.com >
@since 2014 - 03 - 19
@author Sang Kil Cha <sangkil.cha\@gmail.com>
@since 2014-03-19
*)
Copyright ( c ) 2014 ,
All rights reserved .
Redistribution and use in source and binary forms , with or without
modification , are permitted provided that the following conditions are met :
* Redistributions of source code must retain the above copyright
notice , this list of conditions and the following disclaimer .
* Redistributions in binary form must reproduce the above copyright
notice , this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution .
* Neither the name of the < organization > nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission .
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " AND
ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED . IN NO EVENT SHALL SANG KIL CHA BE LIABLE FOR ANY
DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES
( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ;
LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE OF THIS
Copyright (c) 2014, Sang Kil Cha
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the <organization> nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL SANG KIL CHA BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
*)
open Fuzztypes
open Testgenlib
open Misc
open Comblib
let mutate_file r buf filesize rseed mratio =
let mratio = get_ratio rseed mratio in
let bits = filesize * 8 in
let bits_to_mod = (float_of_int bits) *. mratio |> int_of_float in
let bits_to_mod = if bits_to_mod = 0 then 1 else bits_to_mod in
let bits_to_mod = get_random_partition r bits bits_to_mod in
let set = floyds_sampling r bits bits_to_mod in
Hashtbl.iter (fun bitpos _ ->
let byte_pos, bit_offset = bitpos / 8, bitpos mod 8 in
let newval = 1 lsl bit_offset |> char_of_int in
Fastlib.mod_file buf byte_pos newval
) set
let generate conf knobs rseed =
let myfile, mapsize, filesize = prepare_fuzztarget conf true in
let buf = Fastlib.map_file myfile mapsize in
let r = init_rseed rseed in
let () = mutate_file r buf filesize rseed conf.mratio in
let () = Fastlib.unmap_file buf in
myfile, rseed
|
b22f36f2af6e46f237dd058470610009da6f0e1aee857007e57d613fc0bbcdc5 | ocaml/ocaml | odoc_module.mli | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cambium , INRIA Paris
(* *)
Copyright 2022 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
(** Representation and manipulation of modules and module types. *)
module String = Misc.Stdlib.String
module Name = Odoc_name
(** {1 Types} *)
type module_element =
Element_module of t_module
| Element_module_type of t_module_type
| Element_included_module of included_module
| Element_class of Odoc_class.t_class
| Element_class_type of Odoc_class.t_class_type
| Element_value of Odoc_value.t_value
| Element_type_extension of Odoc_extension.t_type_extension
| Element_exception of Odoc_exception.t_exception
| Element_type of Odoc_type.t_type
| Element_module_comment of Odoc_types.text
(** To keep the order of elements in a module. *)
and mmt = Mod of t_module | Modtype of t_module_type
and included_module = {
im_name : Name.t;
mutable im_module : mmt option;
mutable im_info : Odoc_types.info option;
}
and module_alias = { ma_name : Name.t; mutable ma_module : mmt option; }
and module_parameter = {
mp_name : string;
mp_type : Types.module_type option;
mp_type_code : string;
mp_kind : module_type_kind;
}
and module_kind =
Module_struct of module_element list
| Module_alias of module_alias
| Module_functor of module_parameter * module_kind
| Module_apply of module_kind * module_kind
| Module_apply_unit of module_kind
| Module_with of module_type_kind * string
| Module_constraint of module_kind * module_type_kind
| Module_typeof of string
| Module_unpack of string * module_type_alias
and t_module = {
m_name : Name.t;
mutable m_type : Types.module_type;
mutable m_info : Odoc_types.info option;
m_is_interface : bool;
m_file : string;
mutable m_kind : module_kind;
mutable m_loc : Odoc_types.location;
mutable m_top_deps : Name.t list;
mutable m_code : string option;
mutable m_code_intf : string option;
m_text_only : bool;
}
and module_type_alias = {
mta_name : Name.t;
mutable mta_module : t_module_type option;
}
and module_type_kind =
Module_type_struct of module_element list
| Module_type_functor of module_parameter * module_type_kind
| Module_type_alias of module_type_alias
| Module_type_with of module_type_kind * string
| Module_type_typeof of string
and t_module_type = {
mt_name : Name.t;
mutable mt_info : Odoc_types.info option;
mutable mt_type : Types.module_type option;
mt_is_interface : bool;
mt_file : string;
mutable mt_kind : module_type_kind option;
mutable mt_loc : Odoc_types.location;
}
* { 1 Functions }
val values : module_element list -> Odoc_value.t_value list
(** Returns the list of values from a list of module_element. *)
val types : module_element list -> Odoc_type.t_type list
(** Returns the list of types from a list of module_element. *)
val type_extensions :
module_element list -> Odoc_extension.t_type_extension list
(** Returns the list of type extensions from a list of module_element. *)
val exceptions : module_element list -> Odoc_exception.t_exception list
(** Returns the list of exceptions from a list of module_element. *)
val classes : module_element list -> Odoc_class.t_class list
(** Returns the list of classes from a list of module_element. *)
val class_types : module_element list -> Odoc_class.t_class_type list
(** Returns the list of class types from a list of module_element. *)
val modules : module_element list -> t_module list
(** Returns the list of modules from a list of module_element. *)
val mod_types : module_element list -> t_module_type list
(** Returns the list of module types from a list of module_element. *)
val comments : module_element list -> Odoc_types.text list
(** Returns the list of module comment from a list of module_element. *)
val included_modules : module_element list -> included_module list
(** Returns the list of included modules from a list of module_element. *)
val module_type_elements :
?trans:bool -> t_module_type -> module_element list
(** Returns the list of elements of a module type.
@param trans indicates if, for aliased modules, we must perform a transitive search.*)
val module_elements : ?trans:bool -> t_module -> module_element list
(** Returns the list of elements of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.
*)
val module_values : ?trans:bool -> t_module -> Odoc_value.t_value list
(** Returns the list of values of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.*)
val module_functions : ?trans:bool -> t_module -> Odoc_value.t_value list
(** Returns the list of functional values of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.*)
val module_simple_values : ?trans:bool -> t_module -> Odoc_value.t_value list
(** Returns the list of non-functional values of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.*)
val module_types : ?trans:bool -> t_module -> Odoc_type.t_type list
(** Returns the list of types of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.*)
val module_type_extensions :
?trans:bool -> t_module -> Odoc_extension.t_type_extension list
(** Returns the list of type extensions of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.*)
val module_exceptions :
?trans:bool -> t_module -> Odoc_exception.t_exception list
(** Returns the list of exceptions of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.*)
val module_classes : ?trans:bool -> t_module -> Odoc_class.t_class list
(** Returns the list of classes of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.*)
val module_class_types :
?trans:bool -> t_module -> Odoc_class.t_class_type list
(** Returns the list of class types of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.*)
val module_modules : ?trans:bool -> t_module -> t_module list
(** Returns the list of modules of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.*)
val module_module_types : ?trans:bool -> t_module -> t_module_type list
(** Returns the list of module types of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.*)
val module_included_modules : ?trans:bool -> t_module -> included_module list
(** Returns the list of included module of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.*)
val module_comments : ?trans:bool -> t_module -> Odoc_types.text list
(** Returns the list of comments of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.*)
val module_type_parameters :
?trans:bool ->
t_module_type -> (module_parameter * Odoc_types.text option) list
(** Access to the parameters, for a functor type.
@param trans indicates if, for aliased modules, we must perform a transitive search.*)
val module_parameters :
?trans:bool -> t_module -> (module_parameter * Odoc_types.text option) list
(** Access to the parameters, for a functor.
@param trans indicates if, for aliased modules, we must perform a transitive search.*)
val module_all_submodules : ?trans:bool -> t_module -> t_module list
(** access to all submodules and submodules of submodules ... of the given module.
@param trans indicates if, for aliased modules, we must perform a transitive search.*)
val module_type_is_functor : t_module_type -> bool
(** The module type is a functor if it is defined as a functor or if it is an alias for a functor. *)
val module_is_functor : t_module -> bool
(** The module is a functor if it is defined as a functor or if it is an alias for a functor. *)
val module_type_values :
?trans:bool -> t_module_type -> Odoc_value.t_value list
(** Returns the list of values of a module type.
@param trans indicates if, for aliased modules, we must perform a transitive search.*)
val module_type_types : ?trans:bool -> t_module_type -> Odoc_type.t_type list
(** Returns the list of types of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.*)
val module_type_type_extensions :
?trans:bool -> t_module_type -> Odoc_extension.t_type_extension list
(** Returns the list of type extensions of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.*)
val module_type_exceptions :
?trans:bool -> t_module_type -> Odoc_exception.t_exception list
(** Returns the list of exceptions of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.*)
val module_type_classes :
?trans:bool -> t_module_type -> Odoc_class.t_class list
(** Returns the list of classes of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.*)
val module_type_class_types :
?trans:bool -> t_module_type -> Odoc_class.t_class_type list
(** Returns the list of class types of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.*)
val module_type_modules : ?trans:bool -> t_module_type -> t_module list
(** Returns the list of modules of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.*)
val module_type_module_types :
?trans:bool -> t_module_type -> t_module_type list
(** Returns the list of module types of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.*)
val module_type_included_modules :
?trans:bool -> t_module_type -> included_module list
(** Returns the list of included module of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.*)
val module_type_comments :
?trans:bool -> t_module_type -> Odoc_types.text list
(** Returns the list of comments of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.*)
val module_type_functions :
?trans:bool -> t_module_type -> Odoc_value.t_value list
(** Returns the list of functional values of a module type.
@param trans indicates if, for aliased modules, we must perform a transitive search.*)
val module_type_simple_values :
?trans:bool -> t_module_type -> Odoc_value.t_value list
(** Returns the list of non-functional values of a module type.
@param trans indicates if, for aliased modules, we must perform a transitive search.*)
(** {1 Functions for modules and module types} *)
val module_all_classes : ?trans:bool -> t_module -> Odoc_class.t_class list
(** The list of classes defined in this module and all its modules, functors, ....
@param trans indicates if, for aliased modules, we must perform a transitive search.*)
val module_type_all_classes :
?trans:bool -> t_module_type -> Odoc_class.t_class list
(** The list of classes defined in this module type and all its modules, functors, ....
@param trans indicates if, for aliased modules, we must perform a transitive search.*)
| null | https://raw.githubusercontent.com/ocaml/ocaml/8a61778d2716304203974d20ead1b2736c1694a8/ocamldoc/odoc_module.mli | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
* Representation and manipulation of modules and module types.
* {1 Types}
* To keep the order of elements in a module.
* Returns the list of values from a list of module_element.
* Returns the list of types from a list of module_element.
* Returns the list of type extensions from a list of module_element.
* Returns the list of exceptions from a list of module_element.
* Returns the list of classes from a list of module_element.
* Returns the list of class types from a list of module_element.
* Returns the list of modules from a list of module_element.
* Returns the list of module types from a list of module_element.
* Returns the list of module comment from a list of module_element.
* Returns the list of included modules from a list of module_element.
* Returns the list of elements of a module type.
@param trans indicates if, for aliased modules, we must perform a transitive search.
* Returns the list of elements of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.
* Returns the list of values of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.
* Returns the list of functional values of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.
* Returns the list of non-functional values of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.
* Returns the list of types of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.
* Returns the list of type extensions of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.
* Returns the list of exceptions of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.
* Returns the list of classes of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.
* Returns the list of class types of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.
* Returns the list of modules of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.
* Returns the list of module types of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.
* Returns the list of included module of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.
* Returns the list of comments of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.
* Access to the parameters, for a functor type.
@param trans indicates if, for aliased modules, we must perform a transitive search.
* Access to the parameters, for a functor.
@param trans indicates if, for aliased modules, we must perform a transitive search.
* access to all submodules and submodules of submodules ... of the given module.
@param trans indicates if, for aliased modules, we must perform a transitive search.
* The module type is a functor if it is defined as a functor or if it is an alias for a functor.
* The module is a functor if it is defined as a functor or if it is an alias for a functor.
* Returns the list of values of a module type.
@param trans indicates if, for aliased modules, we must perform a transitive search.
* Returns the list of types of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.
* Returns the list of type extensions of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.
* Returns the list of exceptions of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.
* Returns the list of classes of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.
* Returns the list of class types of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.
* Returns the list of modules of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.
* Returns the list of module types of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.
* Returns the list of included module of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.
* Returns the list of comments of a module.
@param trans indicates if, for aliased modules, we must perform a transitive search.
* Returns the list of functional values of a module type.
@param trans indicates if, for aliased modules, we must perform a transitive search.
* Returns the list of non-functional values of a module type.
@param trans indicates if, for aliased modules, we must perform a transitive search.
* {1 Functions for modules and module types}
* The list of classes defined in this module and all its modules, functors, ....
@param trans indicates if, for aliased modules, we must perform a transitive search.
* The list of classes defined in this module type and all its modules, functors, ....
@param trans indicates if, for aliased modules, we must perform a transitive search. | , projet Cambium , INRIA Paris
Copyright 2022 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
module String = Misc.Stdlib.String
module Name = Odoc_name
type module_element =
Element_module of t_module
| Element_module_type of t_module_type
| Element_included_module of included_module
| Element_class of Odoc_class.t_class
| Element_class_type of Odoc_class.t_class_type
| Element_value of Odoc_value.t_value
| Element_type_extension of Odoc_extension.t_type_extension
| Element_exception of Odoc_exception.t_exception
| Element_type of Odoc_type.t_type
| Element_module_comment of Odoc_types.text
and mmt = Mod of t_module | Modtype of t_module_type
and included_module = {
im_name : Name.t;
mutable im_module : mmt option;
mutable im_info : Odoc_types.info option;
}
and module_alias = { ma_name : Name.t; mutable ma_module : mmt option; }
and module_parameter = {
mp_name : string;
mp_type : Types.module_type option;
mp_type_code : string;
mp_kind : module_type_kind;
}
and module_kind =
Module_struct of module_element list
| Module_alias of module_alias
| Module_functor of module_parameter * module_kind
| Module_apply of module_kind * module_kind
| Module_apply_unit of module_kind
| Module_with of module_type_kind * string
| Module_constraint of module_kind * module_type_kind
| Module_typeof of string
| Module_unpack of string * module_type_alias
and t_module = {
m_name : Name.t;
mutable m_type : Types.module_type;
mutable m_info : Odoc_types.info option;
m_is_interface : bool;
m_file : string;
mutable m_kind : module_kind;
mutable m_loc : Odoc_types.location;
mutable m_top_deps : Name.t list;
mutable m_code : string option;
mutable m_code_intf : string option;
m_text_only : bool;
}
and module_type_alias = {
mta_name : Name.t;
mutable mta_module : t_module_type option;
}
and module_type_kind =
Module_type_struct of module_element list
| Module_type_functor of module_parameter * module_type_kind
| Module_type_alias of module_type_alias
| Module_type_with of module_type_kind * string
| Module_type_typeof of string
and t_module_type = {
mt_name : Name.t;
mutable mt_info : Odoc_types.info option;
mutable mt_type : Types.module_type option;
mt_is_interface : bool;
mt_file : string;
mutable mt_kind : module_type_kind option;
mutable mt_loc : Odoc_types.location;
}
* { 1 Functions }
val values : module_element list -> Odoc_value.t_value list
val types : module_element list -> Odoc_type.t_type list
val type_extensions :
module_element list -> Odoc_extension.t_type_extension list
val exceptions : module_element list -> Odoc_exception.t_exception list
val classes : module_element list -> Odoc_class.t_class list
val class_types : module_element list -> Odoc_class.t_class_type list
val modules : module_element list -> t_module list
val mod_types : module_element list -> t_module_type list
val comments : module_element list -> Odoc_types.text list
val included_modules : module_element list -> included_module list
val module_type_elements :
?trans:bool -> t_module_type -> module_element list
val module_elements : ?trans:bool -> t_module -> module_element list
val module_values : ?trans:bool -> t_module -> Odoc_value.t_value list
val module_functions : ?trans:bool -> t_module -> Odoc_value.t_value list
val module_simple_values : ?trans:bool -> t_module -> Odoc_value.t_value list
val module_types : ?trans:bool -> t_module -> Odoc_type.t_type list
val module_type_extensions :
?trans:bool -> t_module -> Odoc_extension.t_type_extension list
val module_exceptions :
?trans:bool -> t_module -> Odoc_exception.t_exception list
val module_classes : ?trans:bool -> t_module -> Odoc_class.t_class list
val module_class_types :
?trans:bool -> t_module -> Odoc_class.t_class_type list
val module_modules : ?trans:bool -> t_module -> t_module list
val module_module_types : ?trans:bool -> t_module -> t_module_type list
val module_included_modules : ?trans:bool -> t_module -> included_module list
val module_comments : ?trans:bool -> t_module -> Odoc_types.text list
val module_type_parameters :
?trans:bool ->
t_module_type -> (module_parameter * Odoc_types.text option) list
val module_parameters :
?trans:bool -> t_module -> (module_parameter * Odoc_types.text option) list
val module_all_submodules : ?trans:bool -> t_module -> t_module list
val module_type_is_functor : t_module_type -> bool
val module_is_functor : t_module -> bool
val module_type_values :
?trans:bool -> t_module_type -> Odoc_value.t_value list
val module_type_types : ?trans:bool -> t_module_type -> Odoc_type.t_type list
val module_type_type_extensions :
?trans:bool -> t_module_type -> Odoc_extension.t_type_extension list
val module_type_exceptions :
?trans:bool -> t_module_type -> Odoc_exception.t_exception list
val module_type_classes :
?trans:bool -> t_module_type -> Odoc_class.t_class list
val module_type_class_types :
?trans:bool -> t_module_type -> Odoc_class.t_class_type list
val module_type_modules : ?trans:bool -> t_module_type -> t_module list
val module_type_module_types :
?trans:bool -> t_module_type -> t_module_type list
val module_type_included_modules :
?trans:bool -> t_module_type -> included_module list
val module_type_comments :
?trans:bool -> t_module_type -> Odoc_types.text list
val module_type_functions :
?trans:bool -> t_module_type -> Odoc_value.t_value list
val module_type_simple_values :
?trans:bool -> t_module_type -> Odoc_value.t_value list
val module_all_classes : ?trans:bool -> t_module -> Odoc_class.t_class list
val module_type_all_classes :
?trans:bool -> t_module_type -> Odoc_class.t_class list
|
d67ae694470282c7feb8161af7f357d3c51f7e9e03b15ec2204a118e17e4f39d | pedestal/samples | service_test.clj | (ns jboss.service-test
(:require [clojure.test :refer :all]
[io.pedestal.service.test :refer :all]
[io.pedestal.service.http :as bootstrap]
[jboss.service :as service]))
(def service
(::bootstrap/service-fn (bootstrap/create-servlet service/service)))
(deftest home-page-test
(is (=
(:body (response-for service :get "/"))
"Hello World!")))
(deftest about-page-test
(is (.contains
(:body (response-for service :get "/about"))
"Clojure 1.5")))
| null | https://raw.githubusercontent.com/pedestal/samples/caaf04afe255586f8f4e1235deeb0c1904179355/jboss/test/jboss/service_test.clj | clojure | (ns jboss.service-test
(:require [clojure.test :refer :all]
[io.pedestal.service.test :refer :all]
[io.pedestal.service.http :as bootstrap]
[jboss.service :as service]))
(def service
(::bootstrap/service-fn (bootstrap/create-servlet service/service)))
(deftest home-page-test
(is (=
(:body (response-for service :get "/"))
"Hello World!")))
(deftest about-page-test
(is (.contains
(:body (response-for service :get "/about"))
"Clojure 1.5")))
|
|
9967aa3721abfd9e484511cb7442b33ab834bb2cee6799944222bcc2d93b313f | msgpack/msgpack-haskell | devel.hs | {-# LANGUAGE PackageImports #-}
import "mpidl-web" Application (getApplicationDev)
import Network.Wai.Handler.Warp
(runSettings, defaultSettings, settingsPort)
import Control.Concurrent (forkIO)
import System.Directory (doesFileExist, removeFile)
import System.Exit (exitSuccess)
import Control.Concurrent (threadDelay)
main :: IO ()
main = do
putStrLn "Starting devel application"
(port, app) <- getApplicationDev
forkIO $ runSettings defaultSettings
{ settingsPort = port
} app
loop
loop :: IO ()
loop = do
threadDelay 100000
e <- doesFileExist "dist/devel-terminate"
if e then terminateDevel else loop
terminateDevel :: IO ()
terminateDevel = exitSuccess
| null | https://raw.githubusercontent.com/msgpack/msgpack-haskell/f52a5d2db620a7be70810eca648fd152141f8b14/msgpack-idl-web/src/devel.hs | haskell | # LANGUAGE PackageImports # | import "mpidl-web" Application (getApplicationDev)
import Network.Wai.Handler.Warp
(runSettings, defaultSettings, settingsPort)
import Control.Concurrent (forkIO)
import System.Directory (doesFileExist, removeFile)
import System.Exit (exitSuccess)
import Control.Concurrent (threadDelay)
main :: IO ()
main = do
putStrLn "Starting devel application"
(port, app) <- getApplicationDev
forkIO $ runSettings defaultSettings
{ settingsPort = port
} app
loop
loop :: IO ()
loop = do
threadDelay 100000
e <- doesFileExist "dist/devel-terminate"
if e then terminateDevel else loop
terminateDevel :: IO ()
terminateDevel = exitSuccess
|
b22381264087bab6db0e10f1f8997396613393e1be6ded9c5b2c2617483c7da8 | kowainik/hakyll-shortcut-links | ShortcutLinks.hs | # LANGUAGE FlexibleContexts #
|
Copyright : ( c ) 2019 - 2021 Kowainik
License : MPL-2.0
Maintainer : < >
This package allows to use [ shortcut - links]( / package / shortcut - links )
package in websites generated by [ hakyll]( / package / hakyll ) .
The flexible interface allows to use the supported huge collection of shortcuts
along with using custom ones .
Here is a few examples of the ` @github ` shortcut :
- Link to a user :
+ ----------------------------------------+----------------------------------------------------+
| Shortcut | Plain markdown |
+ = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = + = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = +
| @[foo]\(\@github)@ | @[foo]\(https:\/\/github.com\/foo)@ |
+ ----------------------------------------+----------------------------------------------------+
| @[foo | @[foo Github profile]\(https:\/\/github.com\/foo)@ |
+ ----------------------------------------+----------------------------------------------------+
- Link to a repository :
+ ---------------------------------------+----------------------------------------------------+
| Shortcut | Plain markdown |
+ = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = + = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = +
| @[bar]\(\@github : foo)@ | @[bar]\(https:\/\/github.com\/foo\/bar)@ |
+ ---------------------------------------+----------------------------------------------------+
| @[Github Source]\(\@github(foo):bar)@ | @[Github Source]\(https:\/\/github.com\/foo\/bar)@ |
+ ---------------------------------------+----------------------------------------------------+
Copyright: (c) 2019-2021 Kowainik
License: MPL-2.0
Maintainer: Kowainik <>
This package allows to use [shortcut-links](-links)
package in websites generated by [hakyll]().
The flexible interface allows to use the supported huge collection of shortcuts
along with using custom ones.
Here is a few examples of the `@github` shortcut:
- Link to a user:
+----------------------------------------+----------------------------------------------------+
| Shortcut | Plain markdown |
+========================================+====================================================+
| @[foo]\(\@github)@ | @[foo]\(https:\/\/github.com\/foo)@ |
+----------------------------------------+----------------------------------------------------+
| @[foo Github profile]\(\@github(foo))@ | @[foo Github profile]\(https:\/\/github.com\/foo)@ |
+----------------------------------------+----------------------------------------------------+
- Link to a repository:
+---------------------------------------+----------------------------------------------------+
| Shortcut | Plain markdown |
+=======================================+====================================================+
| @[bar]\(\@github:foo)@ | @[bar]\(https:\/\/github.com\/foo\/bar)@ |
+---------------------------------------+----------------------------------------------------+
| @[Github Source]\(\@github(foo):bar)@ | @[Github Source]\(https:\/\/github.com\/foo\/bar)@ |
+---------------------------------------+----------------------------------------------------+
-}
module Hakyll.ShortcutLinks
( -- * Pandoc functions
-- $pandoc
applyShortcuts
, applyAllShortcuts
-- * Hakyll functions
$ hakyll
, shortcutLinksCompiler
, allShortcutLinksCompiler
-- * Shortcut-links reexports
-- $sh
, module Sh
$ allSh
, module ShortcutLinks.All
) where
import Control.Monad.Except (MonadError (..))
import Data.Text (Text)
import Hakyll (Compiler, Item, defaultHakyllReaderOptions, defaultHakyllWriterOptions,
pandocCompilerWithTransformM)
import ShortcutLinks (Result (..), Shortcut, allShortcuts, useShortcutFrom)
import Text.Pandoc.Generic (bottomUpM)
import Hakyll.ShortcutLinks.Parser (parseShortcut)
-- exports
import ShortcutLinks as Sh
import ShortcutLinks.All
import qualified Text.Pandoc.Definition as Pandoc
$ pandoc
Functions to transform ' Pandoc . Pandoc ' documents . These functions modify
markdown links to the extended links .
These are the most generic functions . They work inside the monad @m@ that has
@'MonadError ' [ ' String']@ instance .
You can use the pure version of these function because there 's ' MonadError '
instance for ' Either ' :
@
applyShorcuts : : [ ( [ ' Text ' ] , ' Shortcut ' ) ] - > ' Pandoc . Pandoc ' - > ' Either ' [ ' String ' ] ' Pandoc . Pandoc '
applyAllShorcuts : : ' Pandoc . Pandoc ' - > ' Either ' [ ' String ' ] ' Pandoc . Pandoc '
@
If you have your own @hakyll@ options for your custom pandoc compiler , you can
use this function like this :
@
' pandocCompilerWithTransformM '
myHakyllReaderOptions
myHakyllWriterOptions
( ' applyShortcuts ' myShortcuts )
@
Functions to transform 'Pandoc.Pandoc' documents. These functions modify
markdown links to the extended links.
These are the most generic functions. They work inside the monad @m@ that has
@'MonadError' ['String']@ instance.
You can use the pure version of these function because there's 'MonadError'
instance for 'Either':
@
applyShorcuts :: [(['Text'], 'Shortcut')] -> 'Pandoc.Pandoc' -> 'Either' ['String'] 'Pandoc.Pandoc'
applyAllShorcuts :: 'Pandoc.Pandoc' -> 'Either' ['String'] 'Pandoc.Pandoc'
@
If you have your own @hakyll@ options for your custom pandoc compiler, you can
use this function like this:
@
'pandocCompilerWithTransformM'
myHakyllReaderOptions
myHakyllWriterOptions
('applyShortcuts' myShortcuts)
@
-}
| Modifies markdown shortcut links to the extended version and returns
' Pandoc . Pandoc ' with the complete links instead .
Unlike ' applyAllShortcuts ' which uses the hardcoded list of the possible
shortcuts ( see ' allShortcuts ' ) , the ' applyShortcuts ' function uses the given
list of custom provided shortcuts .
For your help you can use ' ShortcutLinks . All ' module to see all available
shortcuts .
If you want to add a couple of custom shortcuts to the list of already existing
shortcuts you can do it in the following way :
@
( [ " hk " , " hackage " ] , ' hackage ' ) : ' allShortcuts '
@
'Pandoc.Pandoc' with the complete links instead.
Unlike 'applyAllShortcuts' which uses the hardcoded list of the possible
shortcuts (see 'allShortcuts'), the 'applyShortcuts' function uses the given
list of custom provided shortcuts.
For your help you can use 'ShortcutLinks.All' module to see all available
shortcuts.
If you want to add a couple of custom shortcuts to the list of already existing
shortcuts you can do it in the following way:
@
(["hk", "hackage"], 'hackage') : 'allShortcuts'
@
-}
applyShortcuts
:: forall m . MonadError [String] m
=> [([Text], Shortcut)] -- ^ Shortcuts
-> Pandoc.Pandoc -- ^ Pandoc document that possibly contains shortened links
-> m Pandoc.Pandoc -- ^ Result pandoc document with shorcuts expanded
applyShortcuts shortcuts = bottomUpM applyLink
where
applyLink :: Pandoc.Inline -> m Pandoc.Inline
applyLink l@(Pandoc.Link attr inl (url, title)) = case parseShortcut url of
Right (name, option, text) -> maybe (checkTitle inl) pure text >>= \txtTitle ->
case useShortcutFrom shortcuts name option txtTitle of
Success link -> pure $ Pandoc.Link attr inl (link, title)
Warning ws _ -> throwError ws
Failure msg -> throwError [msg]
Left _ -> pure l -- the link is not shortcut
applyLink other = pure other
checkTitle :: [Pandoc.Inline] -> m Text
checkTitle = \case
[] -> throwError ["Empty shortcut link title arguments"]
[Pandoc.Str s] -> pure s
_ -> throwError ["Shortcut title is not a single string element"]
| Modifies markdown shortcut links to the extended version and returns
' Pandoc . Pandoc ' with the complete links instead .
Similar to ' applyShortcuts ' but uses ' allShortcuts ' as a list of shortcuts to
parse against .
'Pandoc.Pandoc' with the complete links instead.
Similar to 'applyShortcuts' but uses 'allShortcuts' as a list of shortcuts to
parse against.
-}
applyAllShortcuts :: MonadError [String] m => Pandoc.Pandoc -> m Pandoc.Pandoc
applyAllShortcuts = applyShortcuts allShortcuts
$ hakyll
Functions to integrate shortcut links to [ hakyll]( / package / hakyll ) .
@hakyll - shortcut - links@ provides out - of - the - box ' Compiler 's that translate
markdown documents with shortcut links into the documents with extended links .
Usually you would want to use this feature on your blog post markdown files .
Assuming that you already have similar code for it :
@
match " blog/ * " $ do
route $ setExtension " html "
compile $
_ _ pandocCompiler _ _
> > = loadAndApplyTemplate " templates / post.html " defaultContext
> > = relativizeUrls
@
All that you would need to do is to replace ' Hakyll.pandocCompiler ' with
' shortcutLinksCompiler ' or ' allShortcutLinksCompiler ' :
@
match " blog/ * " $ do
route $ setExtension " html "
compile $
_ _ ' allShortcutLinksCompiler ' _ _
> > = loadAndApplyTemplate " templates / post.html " defaultContext
> > = relativizeUrls
@
Functions to integrate shortcut links to [hakyll]().
@hakyll-shortcut-links@ provides out-of-the-box 'Compiler's that translate
markdown documents with shortcut links into the documents with extended links.
Usually you would want to use this feature on your blog post markdown files.
Assuming that you already have similar code for it:
@
match "blog/*" $ do
route $ setExtension "html"
compile $
__pandocCompiler__
>>= loadAndApplyTemplate "templates/post.html" defaultContext
>>= relativizeUrls
@
All that you would need to do is to replace 'Hakyll.pandocCompiler' with
'shortcutLinksCompiler' or 'allShortcutLinksCompiler':
@
match "blog/*" $ do
route $ setExtension "html"
compile $
__'allShortcutLinksCompiler'__
>>= loadAndApplyTemplate "templates/post.html" defaultContext
>>= relativizeUrls
@
-}
{- | Our own pandoc compiler which parses shortcut links automatically.
It takes a custom list of shortcut links to be used in the document.
-}
shortcutLinksCompiler :: [([Text], Shortcut)] -> Compiler (Item String)
shortcutLinksCompiler = pandocCompilerWithTransformM
defaultHakyllReaderOptions
defaultHakyllWriterOptions
. applyShortcuts
| Our own pandoc compiler which parses shortcut links automatically . Same as
' shortcutLinksCompiler ' but passes ' allShortcuts ' as an argument .
'shortcutLinksCompiler' but passes 'allShortcuts' as an argument.
-}
allShortcutLinksCompiler :: Compiler (Item String)
allShortcutLinksCompiler = shortcutLinksCompiler allShortcuts
$ sh
This is the module from @shortcut - links@ library that introduces the functions
that by given shortcuts creates the ' Result'ing URL ( if possible ) .
This is the module from @shortcut-links@ library that introduces the functions
that by given shortcuts creates the 'Result'ing URL (if possible).
-}
$ allSh
This module stores a large number of supported ' Shortcut 's .
It also reexports a useful function ' allShortcuts ' that is a list of all
shortcuts , together with suggested names for them .
In @hakyll - shortcut - links@ we are exporting both functions that work with the
standard list of ' allShortcuts ' , but also we provide the option to use your own
lists of shortcuts ( including self - created ones ) .
For example , if you want to use just ' github ' and ' hackage ' shortcuts you can
create the following list :
@
( [ " github " ] , github ) : ( [ " hackage " ] , hackage ) : [ ]
@
If you want to create your own shortcut that is not included in
" ShortcutLinks . All " module you can achieve that implementing the following
function :
@
kowainik : : ' Shortcut '
kowainik _ text = pure $ " / " < > text
myShortcuts : : [ ( [ ' Text ' ] , ' Shortcut ' ) ]
myShortcuts = [ ( [ " kowainik " ] , ) ]
@
And it would work like this :
@
[ blog post]\(@kowainik:2019 - 02 - 06 - style - guide )
= >
[ blog post]\(https:\/\/kowainik.github.io\/posts\/2019 - 02 - 06 - style - guide )
@
This module stores a large number of supported 'Shortcut's.
It also reexports a useful function 'allShortcuts' that is a list of all
shortcuts, together with suggested names for them.
In @hakyll-shortcut-links@ we are exporting both functions that work with the
standard list of 'allShortcuts', but also we provide the option to use your own
lists of shortcuts (including self-created ones).
For example, if you want to use just 'github' and 'hackage' shortcuts you can
create the following list:
@
(["github"], github) : (["hackage"], hackage) : []
@
If you want to create your own shortcut that is not included in
"ShortcutLinks.All" module you can achieve that implementing the following
function:
@
kowainik :: 'Shortcut'
kowainik _ text = pure $ "/" <> text
myShortcuts :: [(['Text'], 'Shortcut')]
myShortcuts = [(["kowainik"], kowainik)]
@
And it would work like this:
@
[blog post]\(@kowainik:2019-02-06-style-guide)
=>
[blog post]\(https:\/\/kowainik.github.io\/posts\/2019-02-06-style-guide)
@
-}
| null | https://raw.githubusercontent.com/kowainik/hakyll-shortcut-links/5fcd2efad7c8d239039ba52f5e4f6a335f896b68/src/Hakyll/ShortcutLinks.hs | haskell | --------------------------------------+----------------------------------------------------+
--------------------------------------+----------------------------------------------------+
--------------------------------------+----------------------------------------------------+
-------------------------------------+----------------------------------------------------+
-------------------------------------+----------------------------------------------------+
-------------------------------------+----------------------------------------------------+
--------------------------------------+----------------------------------------------------+
--------------------------------------+----------------------------------------------------+
--------------------------------------+----------------------------------------------------+
-------------------------------------+----------------------------------------------------+
-------------------------------------+----------------------------------------------------+
-------------------------------------+----------------------------------------------------+
* Pandoc functions
$pandoc
* Hakyll functions
* Shortcut-links reexports
$sh
exports
^ Shortcuts
^ Pandoc document that possibly contains shortened links
^ Result pandoc document with shorcuts expanded
the link is not shortcut
| Our own pandoc compiler which parses shortcut links automatically.
It takes a custom list of shortcut links to be used in the document.
| # LANGUAGE FlexibleContexts #
|
Copyright : ( c ) 2019 - 2021 Kowainik
License : MPL-2.0
Maintainer : < >
This package allows to use [ shortcut - links]( / package / shortcut - links )
package in websites generated by [ hakyll]( / package / hakyll ) .
The flexible interface allows to use the supported huge collection of shortcuts
along with using custom ones .
Here is a few examples of the ` @github ` shortcut :
- Link to a user :
| Shortcut | Plain markdown |
+ = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = + = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = +
| @[foo]\(\@github)@ | @[foo]\(https:\/\/github.com\/foo)@ |
| @[foo | @[foo Github profile]\(https:\/\/github.com\/foo)@ |
- Link to a repository :
| Shortcut | Plain markdown |
+ = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = + = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = +
| @[bar]\(\@github : foo)@ | @[bar]\(https:\/\/github.com\/foo\/bar)@ |
| @[Github Source]\(\@github(foo):bar)@ | @[Github Source]\(https:\/\/github.com\/foo\/bar)@ |
Copyright: (c) 2019-2021 Kowainik
License: MPL-2.0
Maintainer: Kowainik <>
This package allows to use [shortcut-links](-links)
package in websites generated by [hakyll]().
The flexible interface allows to use the supported huge collection of shortcuts
along with using custom ones.
Here is a few examples of the `@github` shortcut:
- Link to a user:
| Shortcut | Plain markdown |
+========================================+====================================================+
| @[foo]\(\@github)@ | @[foo]\(https:\/\/github.com\/foo)@ |
| @[foo Github profile]\(\@github(foo))@ | @[foo Github profile]\(https:\/\/github.com\/foo)@ |
- Link to a repository:
| Shortcut | Plain markdown |
+=======================================+====================================================+
| @[bar]\(\@github:foo)@ | @[bar]\(https:\/\/github.com\/foo\/bar)@ |
| @[Github Source]\(\@github(foo):bar)@ | @[Github Source]\(https:\/\/github.com\/foo\/bar)@ |
-}
module Hakyll.ShortcutLinks
applyShortcuts
, applyAllShortcuts
$ hakyll
, shortcutLinksCompiler
, allShortcutLinksCompiler
, module Sh
$ allSh
, module ShortcutLinks.All
) where
import Control.Monad.Except (MonadError (..))
import Data.Text (Text)
import Hakyll (Compiler, Item, defaultHakyllReaderOptions, defaultHakyllWriterOptions,
pandocCompilerWithTransformM)
import ShortcutLinks (Result (..), Shortcut, allShortcuts, useShortcutFrom)
import Text.Pandoc.Generic (bottomUpM)
import Hakyll.ShortcutLinks.Parser (parseShortcut)
import ShortcutLinks as Sh
import ShortcutLinks.All
import qualified Text.Pandoc.Definition as Pandoc
$ pandoc
Functions to transform ' Pandoc . Pandoc ' documents . These functions modify
markdown links to the extended links .
These are the most generic functions . They work inside the monad @m@ that has
@'MonadError ' [ ' String']@ instance .
You can use the pure version of these function because there 's ' MonadError '
instance for ' Either ' :
@
applyShorcuts : : [ ( [ ' Text ' ] , ' Shortcut ' ) ] - > ' Pandoc . Pandoc ' - > ' Either ' [ ' String ' ] ' Pandoc . Pandoc '
applyAllShorcuts : : ' Pandoc . Pandoc ' - > ' Either ' [ ' String ' ] ' Pandoc . Pandoc '
@
If you have your own @hakyll@ options for your custom pandoc compiler , you can
use this function like this :
@
' pandocCompilerWithTransformM '
myHakyllReaderOptions
myHakyllWriterOptions
( ' applyShortcuts ' myShortcuts )
@
Functions to transform 'Pandoc.Pandoc' documents. These functions modify
markdown links to the extended links.
These are the most generic functions. They work inside the monad @m@ that has
@'MonadError' ['String']@ instance.
You can use the pure version of these function because there's 'MonadError'
instance for 'Either':
@
applyShorcuts :: [(['Text'], 'Shortcut')] -> 'Pandoc.Pandoc' -> 'Either' ['String'] 'Pandoc.Pandoc'
applyAllShorcuts :: 'Pandoc.Pandoc' -> 'Either' ['String'] 'Pandoc.Pandoc'
@
If you have your own @hakyll@ options for your custom pandoc compiler, you can
use this function like this:
@
'pandocCompilerWithTransformM'
myHakyllReaderOptions
myHakyllWriterOptions
('applyShortcuts' myShortcuts)
@
-}
| Modifies markdown shortcut links to the extended version and returns
' Pandoc . Pandoc ' with the complete links instead .
Unlike ' applyAllShortcuts ' which uses the hardcoded list of the possible
shortcuts ( see ' allShortcuts ' ) , the ' applyShortcuts ' function uses the given
list of custom provided shortcuts .
For your help you can use ' ShortcutLinks . All ' module to see all available
shortcuts .
If you want to add a couple of custom shortcuts to the list of already existing
shortcuts you can do it in the following way :
@
( [ " hk " , " hackage " ] , ' hackage ' ) : ' allShortcuts '
@
'Pandoc.Pandoc' with the complete links instead.
Unlike 'applyAllShortcuts' which uses the hardcoded list of the possible
shortcuts (see 'allShortcuts'), the 'applyShortcuts' function uses the given
list of custom provided shortcuts.
For your help you can use 'ShortcutLinks.All' module to see all available
shortcuts.
If you want to add a couple of custom shortcuts to the list of already existing
shortcuts you can do it in the following way:
@
(["hk", "hackage"], 'hackage') : 'allShortcuts'
@
-}
applyShortcuts
:: forall m . MonadError [String] m
applyShortcuts shortcuts = bottomUpM applyLink
where
applyLink :: Pandoc.Inline -> m Pandoc.Inline
applyLink l@(Pandoc.Link attr inl (url, title)) = case parseShortcut url of
Right (name, option, text) -> maybe (checkTitle inl) pure text >>= \txtTitle ->
case useShortcutFrom shortcuts name option txtTitle of
Success link -> pure $ Pandoc.Link attr inl (link, title)
Warning ws _ -> throwError ws
Failure msg -> throwError [msg]
applyLink other = pure other
checkTitle :: [Pandoc.Inline] -> m Text
checkTitle = \case
[] -> throwError ["Empty shortcut link title arguments"]
[Pandoc.Str s] -> pure s
_ -> throwError ["Shortcut title is not a single string element"]
| Modifies markdown shortcut links to the extended version and returns
' Pandoc . Pandoc ' with the complete links instead .
Similar to ' applyShortcuts ' but uses ' allShortcuts ' as a list of shortcuts to
parse against .
'Pandoc.Pandoc' with the complete links instead.
Similar to 'applyShortcuts' but uses 'allShortcuts' as a list of shortcuts to
parse against.
-}
applyAllShortcuts :: MonadError [String] m => Pandoc.Pandoc -> m Pandoc.Pandoc
applyAllShortcuts = applyShortcuts allShortcuts
$ hakyll
Functions to integrate shortcut links to [ hakyll]( / package / hakyll ) .
@hakyll - shortcut - links@ provides out - of - the - box ' Compiler 's that translate
markdown documents with shortcut links into the documents with extended links .
Usually you would want to use this feature on your blog post markdown files .
Assuming that you already have similar code for it :
@
match " blog/ * " $ do
route $ setExtension " html "
compile $
_ _ pandocCompiler _ _
> > = loadAndApplyTemplate " templates / post.html " defaultContext
> > = relativizeUrls
@
All that you would need to do is to replace ' Hakyll.pandocCompiler ' with
' shortcutLinksCompiler ' or ' allShortcutLinksCompiler ' :
@
match " blog/ * " $ do
route $ setExtension " html "
compile $
_ _ ' allShortcutLinksCompiler ' _ _
> > = loadAndApplyTemplate " templates / post.html " defaultContext
> > = relativizeUrls
@
Functions to integrate shortcut links to [hakyll]().
@hakyll-shortcut-links@ provides out-of-the-box 'Compiler's that translate
markdown documents with shortcut links into the documents with extended links.
Usually you would want to use this feature on your blog post markdown files.
Assuming that you already have similar code for it:
@
match "blog/*" $ do
route $ setExtension "html"
compile $
__pandocCompiler__
>>= loadAndApplyTemplate "templates/post.html" defaultContext
>>= relativizeUrls
@
All that you would need to do is to replace 'Hakyll.pandocCompiler' with
'shortcutLinksCompiler' or 'allShortcutLinksCompiler':
@
match "blog/*" $ do
route $ setExtension "html"
compile $
__'allShortcutLinksCompiler'__
>>= loadAndApplyTemplate "templates/post.html" defaultContext
>>= relativizeUrls
@
-}
shortcutLinksCompiler :: [([Text], Shortcut)] -> Compiler (Item String)
shortcutLinksCompiler = pandocCompilerWithTransformM
defaultHakyllReaderOptions
defaultHakyllWriterOptions
. applyShortcuts
| Our own pandoc compiler which parses shortcut links automatically . Same as
' shortcutLinksCompiler ' but passes ' allShortcuts ' as an argument .
'shortcutLinksCompiler' but passes 'allShortcuts' as an argument.
-}
allShortcutLinksCompiler :: Compiler (Item String)
allShortcutLinksCompiler = shortcutLinksCompiler allShortcuts
$ sh
This is the module from @shortcut - links@ library that introduces the functions
that by given shortcuts creates the ' Result'ing URL ( if possible ) .
This is the module from @shortcut-links@ library that introduces the functions
that by given shortcuts creates the 'Result'ing URL (if possible).
-}
$ allSh
This module stores a large number of supported ' Shortcut 's .
It also reexports a useful function ' allShortcuts ' that is a list of all
shortcuts , together with suggested names for them .
In @hakyll - shortcut - links@ we are exporting both functions that work with the
standard list of ' allShortcuts ' , but also we provide the option to use your own
lists of shortcuts ( including self - created ones ) .
For example , if you want to use just ' github ' and ' hackage ' shortcuts you can
create the following list :
@
( [ " github " ] , github ) : ( [ " hackage " ] , hackage ) : [ ]
@
If you want to create your own shortcut that is not included in
" ShortcutLinks . All " module you can achieve that implementing the following
function :
@
kowainik : : ' Shortcut '
kowainik _ text = pure $ " / " < > text
myShortcuts : : [ ( [ ' Text ' ] , ' Shortcut ' ) ]
myShortcuts = [ ( [ " kowainik " ] , ) ]
@
And it would work like this :
@
[ blog post]\(@kowainik:2019 - 02 - 06 - style - guide )
= >
[ blog post]\(https:\/\/kowainik.github.io\/posts\/2019 - 02 - 06 - style - guide )
@
This module stores a large number of supported 'Shortcut's.
It also reexports a useful function 'allShortcuts' that is a list of all
shortcuts, together with suggested names for them.
In @hakyll-shortcut-links@ we are exporting both functions that work with the
standard list of 'allShortcuts', but also we provide the option to use your own
lists of shortcuts (including self-created ones).
For example, if you want to use just 'github' and 'hackage' shortcuts you can
create the following list:
@
(["github"], github) : (["hackage"], hackage) : []
@
If you want to create your own shortcut that is not included in
"ShortcutLinks.All" module you can achieve that implementing the following
function:
@
kowainik :: 'Shortcut'
kowainik _ text = pure $ "/" <> text
myShortcuts :: [(['Text'], 'Shortcut')]
myShortcuts = [(["kowainik"], kowainik)]
@
And it would work like this:
@
[blog post]\(@kowainik:2019-02-06-style-guide)
=>
[blog post]\(https:\/\/kowainik.github.io\/posts\/2019-02-06-style-guide)
@
-}
|
7abb8dd7d9666c91ad25a8bfc6ea5bbf7919ef5d034dd0d4212ae78bf4981da7 | venantius/yagni | graph.clj | (ns yagni.graph
"Functions for dealing with graphs."
(:require [clojure.set :as set]))
(defn- dfs
"Using a depth-first search algorithm, explore the graph from the given
starting node. Return what remains of the graph (should only include
nodes that aren't findable from the starting node.)"
[g n v]
(swap! v conj n)
(let [edges (remove @v (get @g n))]
(swap! g dissoc n)
(doall (map (fn [e] (dfs g e v)) edges))
g))
(defn prune-findable-nodes!
"Repeatedly search the graph from a list of initial endpoints, e. Remove
all findable nodes from the graph, and add them to the set of found nodes."
[g e v]
(doall (map (fn [x] (dfs g x v)) e)))
(defn find-children-and-parents
"Given a graph of 'orphaned' vars, figure out which of these vars have
*something* referring to them (children), and which have nothing referring
to them (parents)."
[g]
(let [edges (reduce into #{} (vals g))
nodes (into #{} (keys g))
children (set/intersection edges nodes)
parents (set/difference nodes children)]
{:children children
:parents parents}))
| null | https://raw.githubusercontent.com/venantius/yagni/54aa78d06279c3258c6bd932e75c9a2d91bb2fc6/src/yagni/graph.clj | clojure | (ns yagni.graph
"Functions for dealing with graphs."
(:require [clojure.set :as set]))
(defn- dfs
"Using a depth-first search algorithm, explore the graph from the given
starting node. Return what remains of the graph (should only include
nodes that aren't findable from the starting node.)"
[g n v]
(swap! v conj n)
(let [edges (remove @v (get @g n))]
(swap! g dissoc n)
(doall (map (fn [e] (dfs g e v)) edges))
g))
(defn prune-findable-nodes!
"Repeatedly search the graph from a list of initial endpoints, e. Remove
all findable nodes from the graph, and add them to the set of found nodes."
[g e v]
(doall (map (fn [x] (dfs g x v)) e)))
(defn find-children-and-parents
"Given a graph of 'orphaned' vars, figure out which of these vars have
*something* referring to them (children), and which have nothing referring
to them (parents)."
[g]
(let [edges (reduce into #{} (vals g))
nodes (into #{} (keys g))
children (set/intersection edges nodes)
parents (set/difference nodes children)]
{:children children
:parents parents}))
|
|
368e21b6df4e3f95f834aca7ca9081a0ec4f40f33b16dc2738f4354b9984e9b7 | bhaskara/programmable-reinforcement-learning | create-sets.lisp | ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; data-struct/set/create-sets.lisp
;; Ways of creating new sets
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(in-package set)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; composing a function
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defclass <image-set> (<numbered-set>)
((f :initarg :f
:reader f
:type function)
(f-inv :initarg :f-inv
:reader f-inv
:initform nil
:type function)
(s :initarg :s
:reader s
:type [set])))
(defun make-image-set (s f &optional (f-inv nil))
"make-image-set SET FUNCTION &optional (F-INV nil). Returns a new set which is the image of SET under FUNCTION. The new set will not be stored in memory; rather, its members will be computed as needed by applying FUNCTION to elements of SET. F-INV, if provided, must be the inverse of FUNCTION. It is optional, but the operations may be slower if it is not provided."
(make-instance '<image-set> :s s :f f :f-inv f-inv))
(defmethod member? (item (is <image-set>))
(let ((s (s is)))
(aif (f-inv is)
(member? (funcall it item) s)
(do-elements (x s nil)
(when (same x item)
(return t))))))
(defmethod iterator ((is <image-set>))
(let ((iter (iterator (s is)))
(f (f is)))
(lambda ()
(multiple-value-bind (item done?)
(funcall iter)
(if done?
(values nil t)
(values (funcall f item) nil))))))
(defmethod item-number (item (is <image-set>))
(let ((s (s is)))
(aif (f-inv is)
(item-number (funcall (f-inv is) item) (s is))
(progn
(do-elements (x s nil i)
(when (same x item)
(return-from item-number i)))
(error 'item-not-in-set :item item :set is)))))
(defmethod item (num (is <image-set>))
(funcall (f is) (item num (s is))))
(defmethod size ((is <image-set>))
(size (s is)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; recursive closure
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defun recursive-closure (init-set successor-fn &key (test #'equalp) (print-progress nil)
(key-fn (constantly 0)) (max-key 0))
"recursive-closure INIT-SET SUCCESSOR-FN &key (TEST #'equalp) (PRINT-PROGRESS nil) (KEY-FN (constantly 0)) (MAX-KEY 0)
Compute the recursive closure of INIT-SET under SUCCESSOR-FN. Returns the set (as an <indexed-set> over a vector).
PRINT-PROGRESS : print a '.' every so-many new elements.
KEY-FN : function that takes in an element and outputs a nonnegative integer. Used when we have prior knowledge about a good hash function for the items.
MAX-KEY : must be provided if KEY-FN is provided. The max value that KEY-FN can take."
(let ((v (make-array 0 :adjustable t :fill-pointer 0))
(hta (hta:make-hta key-fn max-key :test test))
(ind 0))
(do-elements (x init-set)
(vector-push-extend x v)
(hta:set-val x hta t))
(while (< ind (length v))
(let ((x (aref v ind)))
(do-elements (y (funcall successor-fn x))
(unless (hta:get-val y hta)
(vector-push-extend y v)
(hta:set-val y hta t)
(when (and print-progress (= 0 (mod (length v) print-progress)))
(format t ".")))))
(incf ind))
(when print-progress (format t "."))
(indexed-set:make-indexed-set v :key-fn key-fn :max-key max-key)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; powerset of a set
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defclass <powerset> (<set>)
((base-set :initarg :base-set :reader base-set)))
(defun powerset (s)
"powerset S. Return the set of all subsets of S. The powerset is not stored explicitly. The only supported operations are size, member?, clone, and print-object."
(make-instance '<powerset> :base-set s))
(defmethod size ((s <powerset>))
(expt 2 (size (base-set s))))
(defmethod member? (x (s <powerset>))
(subset x (base-set s)))
(defmethod clone (s)
s)
(defmethod print-object ((s <powerset>) str)
(if *print-readably*
(format str "#.(powerset ~W)" (base-set s))
(print-unreadable-object (s str :type t)
(format str "of ~W" (base-set s))))) | null | https://raw.githubusercontent.com/bhaskara/programmable-reinforcement-learning/8afc98116a8f78163b3f86076498d84b3f596217/lisp/data-struct/set/create-sets.lisp | lisp |
data-struct/set/create-sets.lisp
Ways of creating new sets
composing a function
recursive closure
powerset of a set
|
(in-package set)
(defclass <image-set> (<numbered-set>)
((f :initarg :f
:reader f
:type function)
(f-inv :initarg :f-inv
:reader f-inv
:initform nil
:type function)
(s :initarg :s
:reader s
:type [set])))
(defun make-image-set (s f &optional (f-inv nil))
"make-image-set SET FUNCTION &optional (F-INV nil). Returns a new set which is the image of SET under FUNCTION. The new set will not be stored in memory; rather, its members will be computed as needed by applying FUNCTION to elements of SET. F-INV, if provided, must be the inverse of FUNCTION. It is optional, but the operations may be slower if it is not provided."
(make-instance '<image-set> :s s :f f :f-inv f-inv))
(defmethod member? (item (is <image-set>))
(let ((s (s is)))
(aif (f-inv is)
(member? (funcall it item) s)
(do-elements (x s nil)
(when (same x item)
(return t))))))
(defmethod iterator ((is <image-set>))
(let ((iter (iterator (s is)))
(f (f is)))
(lambda ()
(multiple-value-bind (item done?)
(funcall iter)
(if done?
(values nil t)
(values (funcall f item) nil))))))
(defmethod item-number (item (is <image-set>))
(let ((s (s is)))
(aif (f-inv is)
(item-number (funcall (f-inv is) item) (s is))
(progn
(do-elements (x s nil i)
(when (same x item)
(return-from item-number i)))
(error 'item-not-in-set :item item :set is)))))
(defmethod item (num (is <image-set>))
(funcall (f is) (item num (s is))))
(defmethod size ((is <image-set>))
(size (s is)))
(defun recursive-closure (init-set successor-fn &key (test #'equalp) (print-progress nil)
(key-fn (constantly 0)) (max-key 0))
"recursive-closure INIT-SET SUCCESSOR-FN &key (TEST #'equalp) (PRINT-PROGRESS nil) (KEY-FN (constantly 0)) (MAX-KEY 0)
Compute the recursive closure of INIT-SET under SUCCESSOR-FN. Returns the set (as an <indexed-set> over a vector).
PRINT-PROGRESS : print a '.' every so-many new elements.
KEY-FN : function that takes in an element and outputs a nonnegative integer. Used when we have prior knowledge about a good hash function for the items.
MAX-KEY : must be provided if KEY-FN is provided. The max value that KEY-FN can take."
(let ((v (make-array 0 :adjustable t :fill-pointer 0))
(hta (hta:make-hta key-fn max-key :test test))
(ind 0))
(do-elements (x init-set)
(vector-push-extend x v)
(hta:set-val x hta t))
(while (< ind (length v))
(let ((x (aref v ind)))
(do-elements (y (funcall successor-fn x))
(unless (hta:get-val y hta)
(vector-push-extend y v)
(hta:set-val y hta t)
(when (and print-progress (= 0 (mod (length v) print-progress)))
(format t ".")))))
(incf ind))
(when print-progress (format t "."))
(indexed-set:make-indexed-set v :key-fn key-fn :max-key max-key)))
(defclass <powerset> (<set>)
((base-set :initarg :base-set :reader base-set)))
(defun powerset (s)
"powerset S. Return the set of all subsets of S. The powerset is not stored explicitly. The only supported operations are size, member?, clone, and print-object."
(make-instance '<powerset> :base-set s))
(defmethod size ((s <powerset>))
(expt 2 (size (base-set s))))
(defmethod member? (x (s <powerset>))
(subset x (base-set s)))
(defmethod clone (s)
s)
(defmethod print-object ((s <powerset>) str)
(if *print-readably*
(format str "#.(powerset ~W)" (base-set s))
(print-unreadable-object (s str :type t)
(format str "of ~W" (base-set s))))) |
cf1ddd81711627f0fbcf4eef43a86493601d4e8c5c40600caad8669042e8ba1f | dcastro/haskell-flatbuffers | Examples.hs | module Examples
( module Examples.Generated
) where
import Examples.Generated
| null | https://raw.githubusercontent.com/dcastro/haskell-flatbuffers/cea6a75109de109ae906741ee73cbb0f356a8e0d/test/Examples.hs | haskell | module Examples
( module Examples.Generated
) where
import Examples.Generated
|
|
c20f289d5fcacba0ad76e3db52b09856b63951b5bac4d8362843bb52c38f7ffd | ucsd-progsys/nate | m1.ml | let m1_val = 1 | null | https://raw.githubusercontent.com/ucsd-progsys/nate/8b1267cd8b10283d8bc239d16a28c654a4cb8942/eval/sherrloc/easyocaml%2B%2B/easyocaml-additional/easyocaml-sample-lib/lang-levels/lang-foo/m1.ml | ocaml | let m1_val = 1 |
|
f1252969772bc6f01e30f27c2416717007586ba186a0f61a5f56cd280e62e543 | reanimate/reanimate | vis.hs | #!/usr/bin/env stack
-- stack runghc --package reanimate
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Codec.Picture.Types
import Control.Lens ()
import Control.Monad
import Data.Function
import Data.List
import Data.List.NonEmpty (NonEmpty)
import qualified Data.List.NonEmpty as NE
import Data.Maybe
import Data.Ratio
import qualified Data.Text as T
import Data.Tuple
import qualified Data.Vector as V
import Debug.Trace
import Linear.Matrix hiding (trace)
import Linear.Metric
import Linear.V2
import Linear.V3
import Linear.Vector
import Numeric.LinearAlgebra hiding (polar, scale, (<>))
import qualified Numeric.LinearAlgebra as Matrix
import Numeric.LinearAlgebra.HMatrix hiding (polar, scale, (<>))
import Reanimate
import Reanimate.Animation
import Reanimate.Math.Balloon
import Reanimate.Math.Common
import Reanimate.Math.Triangulate
import Reanimate.Math.Polygon
import Reanimate.Math.EarClip
import Reanimate.Math.SSSP
import Reanimate.Math.Render
import Reanimate.Math.Visibility
import Reanimate.Math.Compatible
import Reanimate.Morph.Common
import Reanimate.PolyShape (svgToPolygons)
p :: Polygon
p = mkPolygon $ V.fromList [V2 (0 % 1) (0 % 1),V2 (1 % 1) (0 % 1),V2 (1 % 1) (1 % 1),V2 (2 % 1) (1 % 1),V2 (2 % 1) ((-1) % 1),V2 (3 % 1) ((-1) % 1),V2 (3 % 1) (2 % 1),V2 (0 % 1) (2 % 1)]
pCuts' :: Polygon -> [(Int,Int)]
pCuts' p =
[ (i, j)
| i <- [0 .. pSize p-1 ]
, j <- [i+2 .. pSize p-1 ]
, (j+1) `mod` pSize p /= i
, trace ("Check: " ++ show (i,j, pSize p)) $ pParent p i j == i ]
p : :
p = pScale 6 $ unsafeSVGToPolygon 0.1 $
lowerTransformations $ pathify $ center $ latex " $ 1 $ "
main :: IO ()
main = reanimate $ scene $ do
bg <- newSpriteSVG $ mkBackground "black"
spriteZ bg (-1)
newSpriteSVG_ $ translate 0 1 $ mkGroup
[ withFillColor "grey" $ polygonShape p
, polygonNumDots p
]
forM_ (pCuts p) $ \(l,r) -> do
play $ mkAnimation (1/60) $ \_ -> mkGroup
[ translate (-3) 0 $ withFillColor "grey" $ polygonShape l
, translate (-3) 0 $ polygonNumDots l
, translate 3 0 $ withFillColor "grey" $ polygonShape r
, translate 3 0 $ polygonNumDots r
]
wait 1
fork $ play $ drawTriangulation shape1 earCut '
-- # mapA (translate (-3) 0)
play $ drawTriangulation shape1 earClip '
# mapA ( translate 3 0 )
| null | https://raw.githubusercontent.com/reanimate/reanimate/5ea023980ff7f488934d40593cc5069f5fd038b0/videos/morph/vis.hs | haskell | stack runghc --package reanimate
# LANGUAGE OverloadedStrings #
# mapA (translate (-3) 0) | #!/usr/bin/env stack
module Main where
import Codec.Picture.Types
import Control.Lens ()
import Control.Monad
import Data.Function
import Data.List
import Data.List.NonEmpty (NonEmpty)
import qualified Data.List.NonEmpty as NE
import Data.Maybe
import Data.Ratio
import qualified Data.Text as T
import Data.Tuple
import qualified Data.Vector as V
import Debug.Trace
import Linear.Matrix hiding (trace)
import Linear.Metric
import Linear.V2
import Linear.V3
import Linear.Vector
import Numeric.LinearAlgebra hiding (polar, scale, (<>))
import qualified Numeric.LinearAlgebra as Matrix
import Numeric.LinearAlgebra.HMatrix hiding (polar, scale, (<>))
import Reanimate
import Reanimate.Animation
import Reanimate.Math.Balloon
import Reanimate.Math.Common
import Reanimate.Math.Triangulate
import Reanimate.Math.Polygon
import Reanimate.Math.EarClip
import Reanimate.Math.SSSP
import Reanimate.Math.Render
import Reanimate.Math.Visibility
import Reanimate.Math.Compatible
import Reanimate.Morph.Common
import Reanimate.PolyShape (svgToPolygons)
p :: Polygon
p = mkPolygon $ V.fromList [V2 (0 % 1) (0 % 1),V2 (1 % 1) (0 % 1),V2 (1 % 1) (1 % 1),V2 (2 % 1) (1 % 1),V2 (2 % 1) ((-1) % 1),V2 (3 % 1) ((-1) % 1),V2 (3 % 1) (2 % 1),V2 (0 % 1) (2 % 1)]
pCuts' :: Polygon -> [(Int,Int)]
pCuts' p =
[ (i, j)
| i <- [0 .. pSize p-1 ]
, j <- [i+2 .. pSize p-1 ]
, (j+1) `mod` pSize p /= i
, trace ("Check: " ++ show (i,j, pSize p)) $ pParent p i j == i ]
p : :
p = pScale 6 $ unsafeSVGToPolygon 0.1 $
lowerTransformations $ pathify $ center $ latex " $ 1 $ "
main :: IO ()
main = reanimate $ scene $ do
bg <- newSpriteSVG $ mkBackground "black"
spriteZ bg (-1)
newSpriteSVG_ $ translate 0 1 $ mkGroup
[ withFillColor "grey" $ polygonShape p
, polygonNumDots p
]
forM_ (pCuts p) $ \(l,r) -> do
play $ mkAnimation (1/60) $ \_ -> mkGroup
[ translate (-3) 0 $ withFillColor "grey" $ polygonShape l
, translate (-3) 0 $ polygonNumDots l
, translate 3 0 $ withFillColor "grey" $ polygonShape r
, translate 3 0 $ polygonNumDots r
]
wait 1
fork $ play $ drawTriangulation shape1 earCut '
play $ drawTriangulation shape1 earClip '
# mapA ( translate 3 0 )
|
1cfe13d96c4ad7beae8861c83b5587b3cdf3e26737b69639943ddc4ddb1e0408 | Apress/haskell-quick-syntax-reference | ch2.hs | plus = \ a -> (\ b -> a + b)
plus :: Int -> Int -> Int
factorial = \ n -> if n==1 then 1 else n*factorial(n-1)
factorial :: Int -> Int
plus = \ a b -> a + b
plus :: int -> int -> int
plus a b = a + b
comp m n = (\ a -> m(n a))
comp :: (b -> c) -> (d -> e) -> d -> e
ff = (\ a -> a*a) `comp` (\ a -> a+a)
ff :: Integer -> Integer
mkpair1 :: forall a b. a -> b -> (a,b)
mkpair1 aa bb = (ida aa, bb)
where
ida :: a -> a -- This refers to a in the function's type signature
ida = id
mkpair2 :: forall a b. a -> b -> (a,b)
mkpair2 aa bb = (ida aa, bb)
where
ida :: b -> b -- Illegal, because refers to b in type signature
ida = id
mkpair3 :: a -> b -> (a,b)
mkpair3 aa bb = (ida aa, bb)
where
ida :: b -> b -- Legal, because b is now a free variable
ida = id
is_even =
let {is_even n = n == 0 || n > 0 && is_odd(n-1);
is_odd n = n == 1 || n > 1 && is_even(n-1)}
in is_even
is_even:: Integer -> Bool
is_even' = is_even where
{is_even n = n == 0 || n > 0 && is_odd(n-1);
is_odd n = n == 1 || n > 1 && is_even(n-1)}
| null | https://raw.githubusercontent.com/Apress/haskell-quick-syntax-reference/8bcb2773532de752d6297a91a3aaf49fd92ed03b/ch2.hs | haskell | This refers to a in the function's type signature
Illegal, because refers to b in type signature
Legal, because b is now a free variable | plus = \ a -> (\ b -> a + b)
plus :: Int -> Int -> Int
factorial = \ n -> if n==1 then 1 else n*factorial(n-1)
factorial :: Int -> Int
plus = \ a b -> a + b
plus :: int -> int -> int
plus a b = a + b
comp m n = (\ a -> m(n a))
comp :: (b -> c) -> (d -> e) -> d -> e
ff = (\ a -> a*a) `comp` (\ a -> a+a)
ff :: Integer -> Integer
mkpair1 :: forall a b. a -> b -> (a,b)
mkpair1 aa bb = (ida aa, bb)
where
ida = id
mkpair2 :: forall a b. a -> b -> (a,b)
mkpair2 aa bb = (ida aa, bb)
where
ida = id
mkpair3 :: a -> b -> (a,b)
mkpair3 aa bb = (ida aa, bb)
where
ida = id
is_even =
let {is_even n = n == 0 || n > 0 && is_odd(n-1);
is_odd n = n == 1 || n > 1 && is_even(n-1)}
in is_even
is_even:: Integer -> Bool
is_even' = is_even where
{is_even n = n == 0 || n > 0 && is_odd(n-1);
is_odd n = n == 1 || n > 1 && is_even(n-1)}
|
c88d8d0ea3e3e7504b16b8a79f80f4b5c13257fc4f9780839ded3c70acfc35f9 | stylewarning/deprecated-coalton-prototype | global-environment.lisp | global-environment.lisp
(in-package #:coalton-impl)
Global Value Bindings ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ;
(defstruct entry
"An entry in the global value database."
(declared-type nil :type (or null ty))
(derived-type nil :type (or null ty))
source-form
node)
(define-global-var **global-value-definitions**
(make-hash-table :test 'eql)
"Database of Coalton global value definitions. This is a map from vars (symbols) to ENTRYs.")
(defun var-knownp (var)
"Have we seen VAR?"
(check-type var symbol)
(nth-value 1 (gethash var **global-value-definitions**)))
(defun var-info (var)
"What do we know about the known variable VAR?"
(check-type var symbol)
(multiple-value-bind (val exists?) (gethash var **global-value-definitions**)
(unless exists?
(error "Could not retrieve the type of ~S because it is unknown." var))
val))
(defun var-definedp (var)
"Is the var actually defined (as opposed to just declared)?"
(and (var-knownp var)
(entry-source-form (var-info var))
t))
(defun (setf var-info) (new-value var)
(check-type new-value entry)
(check-type var symbol)
(when (var-knownp var)
(style-warn "Overwriting info entry for ~S" var))
(setf (gethash var **global-value-definitions**) new-value))
(defun forward-declare-variable (var &optional (declared-type nil declaredp))
(check-type var symbol)
(check-type declared-type (or ty null))
(when (var-knownp var)
(error "Can't forward declare ~S, which is already known." var))
(setf (gethash var **global-value-definitions**)
(make-entry))
(when declaredp
(setf (var-declared-type var) declared-type))
var)
(defun var-declared-type (var)
(let ((info (var-info var)))
(entry-declared-type info)))
(defun (setf var-declared-type) (new-value var)
(check-type new-value ty)
(let ((info (var-info var)))
(alexandria:when-let ((existing-declared-type (entry-declared-type info)))
(when (type= existing-declared-type new-value)
(return-from var-declared-type var))
(style-warn "Overwriting declared type of ~S from ~A to ~A"
var
(unparse-type existing-declared-type)
(unparse-type new-value)))
(alexandria:when-let ((derived (var-derived-type var)))
(unless (more-or-equally-specific-type-p derived new-value)
(error "Cannot declare ~S as ~S because that is ~
inconsistent with its derived type ~S."
var
(unparse-type new-value)
(unparse-type derived))))
(setf (entry-declared-type info) new-value)))
(defun var-derived-type (var)
(let ((info (var-info var)))
(entry-derived-type info)))
(defun (setf var-derived-type) (new-value var)
(check-type new-value ty)
(let ((info (var-info var)))
(alexandria:when-let ((existing-derived-type (entry-derived-type info)))
(when (type= existing-derived-type new-value)
(return-from var-derived-type var))
(style-warn "Overwriting derived type of ~S from ~A to ~A"
var
(unparse-type existing-derived-type)
(unparse-type new-value)))
(alexandria:when-let ((declared (var-declared-type var)))
(unless (more-or-equally-specific-type-p new-value declared)
(error "The derived type of ~S, which is ~S, is incompatible ~
with its previously declared type ~S."
var
(unparse-type new-value)
(unparse-type declared))))
(setf (entry-derived-type info) new-value)))
;;;;;;;;;;;;;;;;;;;;;; Global Type Definitions ;;;;;;;;;;;;;;;;;;;;;;;
See
| null | https://raw.githubusercontent.com/stylewarning/deprecated-coalton-prototype/4a42ffb4222fde3abfd1b50d96e455ff2eef9fe8/src/global-environment.lisp | lisp | ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ;
Global Type Definitions ;;;;;;;;;;;;;;;;;;;;;;; | global-environment.lisp
(in-package #:coalton-impl)
(defstruct entry
"An entry in the global value database."
(declared-type nil :type (or null ty))
(derived-type nil :type (or null ty))
source-form
node)
(define-global-var **global-value-definitions**
(make-hash-table :test 'eql)
"Database of Coalton global value definitions. This is a map from vars (symbols) to ENTRYs.")
(defun var-knownp (var)
"Have we seen VAR?"
(check-type var symbol)
(nth-value 1 (gethash var **global-value-definitions**)))
(defun var-info (var)
"What do we know about the known variable VAR?"
(check-type var symbol)
(multiple-value-bind (val exists?) (gethash var **global-value-definitions**)
(unless exists?
(error "Could not retrieve the type of ~S because it is unknown." var))
val))
(defun var-definedp (var)
"Is the var actually defined (as opposed to just declared)?"
(and (var-knownp var)
(entry-source-form (var-info var))
t))
(defun (setf var-info) (new-value var)
(check-type new-value entry)
(check-type var symbol)
(when (var-knownp var)
(style-warn "Overwriting info entry for ~S" var))
(setf (gethash var **global-value-definitions**) new-value))
(defun forward-declare-variable (var &optional (declared-type nil declaredp))
(check-type var symbol)
(check-type declared-type (or ty null))
(when (var-knownp var)
(error "Can't forward declare ~S, which is already known." var))
(setf (gethash var **global-value-definitions**)
(make-entry))
(when declaredp
(setf (var-declared-type var) declared-type))
var)
(defun var-declared-type (var)
(let ((info (var-info var)))
(entry-declared-type info)))
(defun (setf var-declared-type) (new-value var)
(check-type new-value ty)
(let ((info (var-info var)))
(alexandria:when-let ((existing-declared-type (entry-declared-type info)))
(when (type= existing-declared-type new-value)
(return-from var-declared-type var))
(style-warn "Overwriting declared type of ~S from ~A to ~A"
var
(unparse-type existing-declared-type)
(unparse-type new-value)))
(alexandria:when-let ((derived (var-derived-type var)))
(unless (more-or-equally-specific-type-p derived new-value)
(error "Cannot declare ~S as ~S because that is ~
inconsistent with its derived type ~S."
var
(unparse-type new-value)
(unparse-type derived))))
(setf (entry-declared-type info) new-value)))
(defun var-derived-type (var)
(let ((info (var-info var)))
(entry-derived-type info)))
(defun (setf var-derived-type) (new-value var)
(check-type new-value ty)
(let ((info (var-info var)))
(alexandria:when-let ((existing-derived-type (entry-derived-type info)))
(when (type= existing-derived-type new-value)
(return-from var-derived-type var))
(style-warn "Overwriting derived type of ~S from ~A to ~A"
var
(unparse-type existing-derived-type)
(unparse-type new-value)))
(alexandria:when-let ((declared (var-declared-type var)))
(unless (more-or-equally-specific-type-p new-value declared)
(error "The derived type of ~S, which is ~S, is incompatible ~
with its previously declared type ~S."
var
(unparse-type new-value)
(unparse-type declared))))
(setf (entry-derived-type info) new-value)))
See
|
46275f913f12b648d0dcce8429a55bab7b541bcfcdbbc6e0bc7dabf0edfe00b4 | ryanpbrewster/haskell | polynomial.hs | {-# LANGUAGE GADTs #-}
import Data.List (intercalate)
import qualified Data.IntMap as M
import Debug.Trace
data Polynomial where
Polynomial :: M.IntMap Int -> Polynomial
instance Show Polynomial where
show (Polynomial coeffMap)
| M.null coeffMap = "0"
| otherwise = intercalate " + " (map showCoeffPair $ M.toList coeffMap)
showCoeffPair :: (Int, Int) -> String
showCoeffPair (0, c) = show c
showCoeffPair (1, c) = (if c == 1 then "" else show c) ++ "x"
showCoeffPair (n, c) = (if c == 1 then "" else show c) ++ "x^" ++ show n
(Polynomial as) `multiply` (Polynomial bs) =
Polynomial $ M.fromListWith (+) [ (na + nb, ca*cb) | (na, ca) <- M.toList as, (nb, cb) <- M.toList bs ]
pnull (Polynomial ps) = M.null ps
empty = Polynomial (M.empty)
singleton n = Polynomial (M.singleton n 1)
scale (Polynomial ps) c = Polynomial $ M.map (*c) ps
(Polynomial as) `plus` (Polynomial bs) = Polynomial $ M.filter (>0) $ M.unionWith (+) as bs
(Polynomial as) `minus` (Polynomial bs) = Polynomial $ M.filter (>0) $ M.unionWith (-) as bs
a@(Polynomial as) `divmod` b@(Polynomial bs)
| pnull a = (a, empty)
| otherwise =
let
((na, ca), (nb, cb)) = (M.findMax as, M.findMax bs)
(n, c) = (na - nb, ca `div` cb)
p = scale (singleton n) c
a' = a `minus` (b `multiply` p)
b' = b `minus` p
in traceShow (a, b, p, a', b') $ if n == 0 then p else p `plus` (a' `divide` b')
| null | https://raw.githubusercontent.com/ryanpbrewster/haskell/6edd0afe234008a48b4871032dedfd143ca6e412/hello-world/polynomial.hs | haskell | # LANGUAGE GADTs # | import Data.List (intercalate)
import qualified Data.IntMap as M
import Debug.Trace
data Polynomial where
Polynomial :: M.IntMap Int -> Polynomial
instance Show Polynomial where
show (Polynomial coeffMap)
| M.null coeffMap = "0"
| otherwise = intercalate " + " (map showCoeffPair $ M.toList coeffMap)
showCoeffPair :: (Int, Int) -> String
showCoeffPair (0, c) = show c
showCoeffPair (1, c) = (if c == 1 then "" else show c) ++ "x"
showCoeffPair (n, c) = (if c == 1 then "" else show c) ++ "x^" ++ show n
(Polynomial as) `multiply` (Polynomial bs) =
Polynomial $ M.fromListWith (+) [ (na + nb, ca*cb) | (na, ca) <- M.toList as, (nb, cb) <- M.toList bs ]
pnull (Polynomial ps) = M.null ps
empty = Polynomial (M.empty)
singleton n = Polynomial (M.singleton n 1)
scale (Polynomial ps) c = Polynomial $ M.map (*c) ps
(Polynomial as) `plus` (Polynomial bs) = Polynomial $ M.filter (>0) $ M.unionWith (+) as bs
(Polynomial as) `minus` (Polynomial bs) = Polynomial $ M.filter (>0) $ M.unionWith (-) as bs
a@(Polynomial as) `divmod` b@(Polynomial bs)
| pnull a = (a, empty)
| otherwise =
let
((na, ca), (nb, cb)) = (M.findMax as, M.findMax bs)
(n, c) = (na - nb, ca `div` cb)
p = scale (singleton n) c
a' = a `minus` (b `multiply` p)
b' = b `minus` p
in traceShow (a, b, p, a', b') $ if n == 0 then p else p `plus` (a' `divide` b')
|
0539bdbb07f115f86c7dd628a1a055880dd8ae8a339122df0e7d33fd3a742156 | orbitz/web_typed | exception.ml | name : exception.ml
* synopsis : exception monad
* author :
* last revision : Thu Nov 13 09:51:57 UTC 2008
* ocaml version : 3.11
*
* Copyright ( C ) 2006 - 2008 , , O. Kiselyov
*
* This library is free software ; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation ; either
* version 2 of the License , or ( at your option ) any later version .
*
* This library is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* Library General Public License for more details .
*
* You should have received a copy of the GNU Library General Public
* License along with this library ; if not , write to the Free
* Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
* synopsis: exception monad
* author: Lydia E. van Dijk
* last revision: Thu Nov 13 09:51:57 UTC 2008
* ocaml version: 3.11
*
* Copyright (C) 2006-2008 J. Carette, L. E. van Dijk, O. Kiselyov
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the Free
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
(** The values [('left, 'right) t] of generated from this module
represent (mutually exclusive) alternatives. Following the
Haskell folklore we put the correct or "right" result of a
computation into [Right] components and incorrect, or
exceptional values into [Left] components. *)
module Either =
struct
* alternatives
type ('left, 'right) t = Left of 'left | Right of 'right
(** [either f g x]
Apply [f] to [x] for [Left x] and [g] to [x] for [Right x]. *)
let either f g = function
Left x -> f x
| Right y -> g y
end
type ('left, 'right) t = ('left, 'right) Either.t
let bind an_exception_monad a_function =
match an_exception_monad with
Either.Right value -> a_function value
| Either.Left _ as error -> error
let return a_value = Either.Right a_value
let throw an_error = Either.Left an_error
let catch an_exception_monad an_error_handler =
(function
Either.Right _ as value -> value
| Either.Left error -> an_error_handler error)
an_exception_monad
let run a_failure_function a_success_function =
Either.either a_failure_function a_success_function
| null | https://raw.githubusercontent.com/orbitz/web_typed/e224c1be6a2d4fd0013ff9cdb27075c145a4b77e/libs/pa_monad/exception.ml | ocaml | * The values [('left, 'right) t] of generated from this module
represent (mutually exclusive) alternatives. Following the
Haskell folklore we put the correct or "right" result of a
computation into [Right] components and incorrect, or
exceptional values into [Left] components.
* [either f g x]
Apply [f] to [x] for [Left x] and [g] to [x] for [Right x]. | name : exception.ml
* synopsis : exception monad
* author :
* last revision : Thu Nov 13 09:51:57 UTC 2008
* ocaml version : 3.11
*
* Copyright ( C ) 2006 - 2008 , , O. Kiselyov
*
* This library is free software ; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation ; either
* version 2 of the License , or ( at your option ) any later version .
*
* This library is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* Library General Public License for more details .
*
* You should have received a copy of the GNU Library General Public
* License along with this library ; if not , write to the Free
* Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
* synopsis: exception monad
* author: Lydia E. van Dijk
* last revision: Thu Nov 13 09:51:57 UTC 2008
* ocaml version: 3.11
*
* Copyright (C) 2006-2008 J. Carette, L. E. van Dijk, O. Kiselyov
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the Free
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
module Either =
struct
* alternatives
type ('left, 'right) t = Left of 'left | Right of 'right
let either f g = function
Left x -> f x
| Right y -> g y
end
type ('left, 'right) t = ('left, 'right) Either.t
let bind an_exception_monad a_function =
match an_exception_monad with
Either.Right value -> a_function value
| Either.Left _ as error -> error
let return a_value = Either.Right a_value
let throw an_error = Either.Left an_error
let catch an_exception_monad an_error_handler =
(function
Either.Right _ as value -> value
| Either.Left error -> an_error_handler error)
an_exception_monad
let run a_failure_function a_success_function =
Either.either a_failure_function a_success_function
|
b82e993eb49f088c6a3a04c6ff4ad8bb4f89de1c13a6e89b1a7e733b2e686e7d | samrushing/irken-compiler | t_string_split.scm | ;; -*- Mode: Irken -*-
(include "lib/core.scm")
(include "lib/pair.scm")
(include "lib/string.scm")
(include "lib/format.scm")
(printn (string-split "quick brown fox" #\space))
(printn (string-split "%%vcon/list/cons" #\/))
(printn (string-split "thing" #\/))
(printn (string-split "" #\a))
(printn (string-split "..." #\.))
(printn (string-split "%0 %% %1" #\%))
| null | https://raw.githubusercontent.com/samrushing/irken-compiler/690da48852d55497f873738df54f14e8e135d006/tests/t_string_split.scm | scheme | -*- Mode: Irken -*- |
(include "lib/core.scm")
(include "lib/pair.scm")
(include "lib/string.scm")
(include "lib/format.scm")
(printn (string-split "quick brown fox" #\space))
(printn (string-split "%%vcon/list/cons" #\/))
(printn (string-split "thing" #\/))
(printn (string-split "" #\a))
(printn (string-split "..." #\.))
(printn (string-split "%0 %% %1" #\%))
|
99bc353da03c43204cc9950e8f131a4438a87d02bb9d90628dbfbc67390a1918 | vlstill/hsExprTest | Solution.hs | # LANGUAGE Unsafe #
module Solution where
{-# LINE 1 "Solution.hs" #-}
foo = id
| null | https://raw.githubusercontent.com/vlstill/hsExprTest/0c7754979cf837d48f5740674639e2decb96e547/examples/Solution.hs | haskell | # LINE 1 "Solution.hs" # | # LANGUAGE Unsafe #
module Solution where
foo = id
|
7e6aa63c880c5416210c2a1a6622233ef4e5f11ed62bf6b5071fcd2fae53b9c6 | BitGameEN/bitgamex | websocket_client.erl | @author
@doc Erlang websocket client
-module(websocket_client).
-export([start_link/3,
start_link/4,
cast/2,
send/2
]).
-export([ws_client_init/7]).
-type opt() :: {async_start, boolean()}
| {extra_headers, [{string() | binary(), string() | binary()}]}
.
-type opts() :: [opt()].
%% @doc Start the websocket client
-spec start_link(URL :: string() | binary(), Handler :: module(), HandlerArgs :: list()) ->
{ok, pid()} | {error, term()}.
start_link(URL, Handler, HandlerArgs) ->
start_link(URL, Handler, HandlerArgs, []).
start_link(URL, Handler, HandlerArgs, AsyncStart) when is_boolean(AsyncStart) ->
start_link(URL, Handler, HandlerArgs, [{async_start, AsyncStart}]);
start_link(URL, Handler, HandlerArgs, Opts) when is_binary(URL) ->
start_link(erlang:binary_to_list(URL), Handler, HandlerArgs, Opts);
start_link(URL, Handler, HandlerArgs, Opts) when is_list(Opts) ->
case http_uri:parse(URL, [{scheme_defaults, [{ws,80},{wss,443}]}]) of
{ok, {Protocol, _, Host, Port, Path, Query}} ->
proc_lib:start_link(?MODULE, ws_client_init,
[Handler, Protocol, Host, Port, Path ++ Query, HandlerArgs, Opts]);
{error, _} = Error ->
Error
end.
%% Send a frame asynchronously
-spec cast(Client :: pid(), Frame :: websocket_req:frame()) ->
ok.
cast(Client, Frame) ->
Client ! {cast, Frame},
ok.
%% @doc Create socket, execute handshake, and enter loop
-spec ws_client_init(Handler :: module(), Protocol :: websocket_req:protocol(),
Host :: string(), Port :: inet:port_number(), Path :: string(),
Args :: list(), Opts :: opts()) ->
no_return().
ws_client_init(Handler, Protocol, Host, Port, Path, Args, Opts) ->
Transport = case Protocol of
wss ->
ssl;
ws ->
gen_tcp
end,
SockReply = case Transport of
ssl ->
ssl:connect(Host, Port,
[{mode, binary},
{verify, verify_none},
{active, false},
{packet, 0}
], 6000);
gen_tcp ->
gen_tcp:connect(Host, Port,
[binary,
{active, false},
{packet, 0}
], 6000)
end,
{ok, Socket} = case SockReply of
{ok, Sock} -> {ok, Sock};
{error, _} = ConnectError ->
proc_lib:init_ack(ConnectError),
exit(normal)
end,
WSReq = websocket_req:new(
Protocol,
Host,
Port,
Path,
Socket,
Transport,
Handler,
generate_ws_key()
),
ExtraHeaders = proplists:get_value(extra_headers, Opts, []),
case websocket_handshake(WSReq, ExtraHeaders) of
{error, _} = HandshakeError ->
proc_lib:init_ack(HandshakeError),
exit(normal);
{ok, Buffer} ->
AsyncStart = proplists:get_value(async_start, Opts, true),
AsyncStart andalso proc_lib:init_ack({ok, self()}),
{ok, HandlerState, KeepAlive} = case Handler:init(Args, WSReq) of
{ok, HS} ->
{ok, HS, infinity};
{ok, HS, KA} ->
{ok, HS, KA}
end,
AsyncStart orelse proc_lib:init_ack({ok, self()}),
case Socket of
{sslsocket, _, _} ->
ssl:setopts(Socket, [{active, true}]);
_ ->
inet:setopts(Socket, [{active, true}])
end,
%% Since we could have already received some data already, we simulate a Socket message.
case Buffer of
<<>> -> ok;
_ -> self() ! {Transport, Socket, Buffer}
end,
KATimer = case KeepAlive of
infinity ->
undefined;
_ ->
erlang:send_after(KeepAlive, self(), keepalive)
end,
websocket_loop(websocket_req:set([{keepalive,KeepAlive},{keepalive_timer,KATimer}], WSReq), HandlerState, <<>>)
end.
%% @doc Send http upgrade request and validate handshake response challenge
-spec websocket_handshake(WSReq :: websocket_req:req(), [{string(), string()}]) -> {ok, binary()} | {error, term()}.
websocket_handshake(WSReq, ExtraHeaders) ->
[Path, Host, Key, Transport, Socket] =
websocket_req:get([path, host, key, transport, socket], WSReq),
Handshake = ["GET ", Path, " HTTP/1.1\r\n"
"Host: ", Host, "\r\n"
"Connection: Upgrade\r\n"
"Sec-WebSocket-Version: 13\r\n"
"Sec-WebSocket-Key: ", Key, "\r\n"
"Upgrade: websocket\r\n",
[ [Header, ": ", Value, "\r\n"] || {Header, Value} <- ExtraHeaders],
"\r\n"],
Transport:send(Socket, Handshake),
{ok, HandshakeResponse} = receive_handshake(<<>>, Transport, Socket),
validate_handshake(HandshakeResponse, Key).
%% @doc Blocks and waits until handshake response data is received
-spec receive_handshake(Buffer :: binary(),
Transport :: module(),
Socket :: term()) ->
{ok, binary()}.
receive_handshake(Buffer, Transport, Socket) ->
case re:run(Buffer, "\\r\\n\\r\\n") of
{match, _} ->
{ok, Buffer};
_ ->
{ok, Data} = Transport:recv(Socket, 0, 6000),
receive_handshake(<< Buffer/binary, Data/binary >>,
Transport, Socket)
end.
%% @doc Send frame to server
-spec send(websocket_req:frame(), websocket_req:req()) -> ok | {error, term()}.
send(Frame, WSReq) ->
Socket = websocket_req:socket(WSReq),
Transport = websocket_req:transport(WSReq),
Transport:send(Socket, encode_frame(Frame)).
%% @doc Main loop
-spec websocket_loop(WSReq :: websocket_req:req(), HandlerState :: any(),
Buffer :: binary()) ->
ok.
websocket_loop(WSReq, HandlerState, Buffer) ->
receive
Message -> handle_websocket_message(WSReq, HandlerState, Buffer, Message)
end.
handle_websocket_message(WSReq, HandlerState, Buffer, Message) ->
[Handler, Remaining, Socket] =
websocket_req:get([handler, remaining, socket], WSReq),
case Message of
keepalive ->
cancel_keepalive_timer(WSReq),
ok = send({ping, <<>>}, WSReq),
KATimer = erlang:send_after(websocket_req:keepalive(WSReq), self(), keepalive),
websocket_loop(websocket_req:keepalive_timer(KATimer, WSReq), HandlerState, Buffer);
{cast, Frame} ->
ok = send(Frame, WSReq),
websocket_loop(WSReq, HandlerState, Buffer);
{_Closed, Socket} ->
websocket_close(WSReq, HandlerState, remote);
{_TransportType, Socket, Data} ->
case Remaining of
undefined ->
retrieve_frame(WSReq, HandlerState,
<< Buffer/binary, Data/binary >>);
_ ->
retrieve_frame(WSReq, HandlerState,
websocket_req:opcode(WSReq), Remaining, Data, Buffer)
end;
Msg ->
try Handler:websocket_info(Msg, WSReq, HandlerState) of
HandlerResponse ->
handle_response(WSReq, HandlerResponse, Buffer)
catch
_:Reason ->
websocket_close(WSReq, HandlerState, {handler, Reason})
end
end.
-spec cancel_keepalive_timer(websocket_req:req()) -> ok.
cancel_keepalive_timer(WSReq) ->
case websocket_req:keepalive_timer(WSReq) of
undefined ->
ok;
OldTimer ->
erlang:cancel_timer(OldTimer),
ok
end.
-spec websocket_close(WSReq :: websocket_req:req(),
HandlerState :: any(),
Reason :: tuple()) -> ok.
websocket_close(WSReq, HandlerState, Reason) ->
Handler = websocket_req:handler(WSReq),
try Handler:websocket_terminate(Reason, WSReq, HandlerState) of
_ ->
case Reason of
normal -> ok;
_ -> error_info(Handler, Reason, HandlerState)
end,
exit(Reason)
catch
_:Reason2 ->
error_info(Handler, Reason2, HandlerState),
exit(Reason2)
end.
error_info(Handler, Reason, State) ->
error_logger:error_msg(
"** Websocket handler ~p terminating~n"
"** for the reason ~p~n"
"** Handler state was ~p~n"
"** Stacktrace: ~p~n~n",
[Handler, Reason, State, erlang:get_stacktrace()]).
%% @doc Key sent in initial handshake
-spec generate_ws_key() ->
binary().
generate_ws_key() ->
base64:encode(crypto:strong_rand_bytes(16)).
@doc Validate handshake response challenge
-spec validate_handshake(HandshakeResponse :: binary(), Key :: binary()) -> {ok, binary()} | {error, term()}.
validate_handshake(HandshakeResponse, Key) ->
Challenge = base64:encode(
crypto:hash(sha, << Key/binary, "258EAFA5-E914-47DA-95CA-C5AB0DC85B11" >>)),
%% Consume the response...
{ok, Status, Header, Buffer} = consume_response(HandshakeResponse),
{_Version, Code, Message} = Status,
case Code of
101 means Switching Protocol
101 ->
%% ...and make sure the challenge is valid.
Challenge = proplists:get_value(<<"Sec-Websocket-Accept">>, Header),
{ok, Buffer};
_ -> {error, {Code, Message}}
end.
%% @doc Consumes the HTTP response and extracts status, header and the body.
consume_response(Response) ->
{ok, {http_response, Version, Code, Message}, Header} = erlang:decode_packet(http_bin, Response, []),
consume_response({Version, Code, Message}, Header, []).
consume_response(Status, Response, HeaderAcc) ->
case erlang:decode_packet(httph_bin, Response, []) of
{ok, {http_header, _Length, Field, _Reserved, Value}, Rest} ->
consume_response(Status, Rest, [{Field, Value} | HeaderAcc]);
{ok, http_eoh, Body} ->
{ok, Status, HeaderAcc, Body}
end.
@doc Start or continue continuation payload with length less than 126 bytes
retrieve_frame(WSReq, HandlerWSReq,
<< 0:4, Opcode:4, 0:1, Len:7, Rest/bits >>)
when Len < 126 ->
WSReq1 = set_continuation_if_empty(WSReq, Opcode),
WSReq2 = websocket_req:fin(0, WSReq1),
retrieve_frame(WSReq2, HandlerWSReq, Opcode, Len, Rest, <<>>);
@doc Start or continue continuation payload with length a 2 byte int
retrieve_frame(WSReq, HandlerWSReq,
<< 0:4, Opcode:4, 0:1, 126:7, Len:16, Rest/bits >>)
when Len > 125, Opcode < 8 ->
WSReq1 = set_continuation_if_empty(WSReq, Opcode),
WSReq2 = websocket_req:fin(0, WSReq1),
retrieve_frame(WSReq2, HandlerWSReq, Opcode, Len, Rest, <<>>);
@doc Start or continue continuation payload with length a 64 bit int
retrieve_frame(WSReq, HandlerWSReq,
<< 0:4, Opcode:4, 0:1, 127:7, 0:1, Len:63, Rest/bits >>)
when Len > 16#ffff, Opcode < 8 ->
WSReq1 = set_continuation_if_empty(WSReq, Opcode),
WSReq2 = websocket_req:fin(0, WSReq1),
retrieve_frame(WSReq2, HandlerWSReq, Opcode, Len, Rest, <<>>);
@doc Length is less 126 bytes
retrieve_frame(WSReq, HandlerWSReq,
<< 1:1, 0:3, Opcode:4, 0:1, Len:7, Rest/bits >>)
when Len < 126 ->
WSReq1 = websocket_req:fin(1, WSReq),
retrieve_frame(WSReq1, HandlerWSReq, Opcode, Len, Rest, <<>>);
@doc Length is a 2 byte integer
retrieve_frame(WSReq, HandlerWSReq,
<< 1:1, 0:3, Opcode:4, 0:1, 126:7, Len:16, Rest/bits >>)
when Len > 125, Opcode < 8 ->
WSReq1 = websocket_req:fin(1, WSReq),
retrieve_frame(WSReq1, HandlerWSReq, Opcode, Len, Rest, <<>>);
@doc Length is a 64 bit integer
retrieve_frame(WSReq, HandlerWSReq,
<< 1:1, 0:3, Opcode:4, 0:1, 127:7, 0:1, Len:63, Rest/bits >>)
when Len > 16#ffff, Opcode < 8 ->
WSReq1 = websocket_req:fin(1, WSReq),
retrieve_frame(WSReq1, HandlerWSReq, Opcode, Len, Rest, <<>>);
%% @doc Need more data to read length properly
retrieve_frame(WSReq, HandlerWSReq, Data) ->
websocket_loop(WSReq, HandlerWSReq, Data).
%% @doc Length known and still missing data
retrieve_frame(WSReq, HandlerWSReq, Opcode, Len, Data, Buffer)
when byte_size(Data) < Len ->
Remaining = Len - byte_size(Data),
WSReq1 = websocket_req:remaining(Remaining, WSReq),
WSReq2 = websocket_req:opcode(Opcode, WSReq1),
websocket_loop(WSReq2, HandlerWSReq, << Buffer/bits, Data/bits >>);
%% @doc Length known and remaining data is appended to the buffer
retrieve_frame(WSReq, HandlerState, Opcode, Len, Data, Buffer) ->
[Handler, Continuation, ContinuationOpcode] =
websocket_req:get([handler, continuation, continuation_opcode], WSReq),
Fin = websocket_req:fin(WSReq),
<< Payload:Len/binary, Rest/bits >> = Data,
FullPayload = << Buffer/binary, Payload/binary >>,
OpcodeName = websocket_req:opcode_to_name(Opcode),
case OpcodeName of
ping ->
%% If a ping is received, send a pong automatically
ok = send({pong, FullPayload}, WSReq);
_ ->
ok
end,
case OpcodeName of
close when byte_size(FullPayload) >= 2 ->
<< CodeBin:2/binary, _ClosePayload/binary >> = FullPayload,
Code = binary:decode_unsigned(CodeBin),
Reason = case Code of
1000 indicates a normal closure , meaning that the purpose for
% which the connection was established has been fulfilled.
1000 -> normal;
1001 indicates that an endpoint is " going away " , such as a server
% going down or a browser having navigated away from a page.
1001 -> normal;
% See #section-7.4.1
% for error code descriptions.
_ -> {remote, Code}
end,
websocket_close(WSReq, HandlerState, Reason);
close ->
websocket_close(WSReq, HandlerState, remote);
%% Non-control continuation frame
_ when Opcode < 8, Continuation =/= undefined, Fin == 0 ->
%% Append to previously existing continuation payloads and continue
Continuation1 = << Continuation/binary, FullPayload/binary >>,
WSReq1 = websocket_req:continuation(Continuation1, WSReq),
retrieve_frame(WSReq1, HandlerState, Rest);
%% Terminate continuation frame sequence with non-control frame
_ when Opcode < 8, Continuation =/= undefined, Fin == 1 ->
DefragPayload = << Continuation/binary, FullPayload/binary >>,
WSReq1 = websocket_req:continuation(undefined, WSReq),
WSReq2 = websocket_req:continuation_opcode(undefined, WSReq1),
ContinuationOpcodeName = websocket_req:opcode_to_name(ContinuationOpcode),
try Handler:websocket_handle(
{ContinuationOpcodeName, DefragPayload},
WSReq2, HandlerState) of
HandlerResponse ->
handle_response(websocket_req:remaining(undefined, WSReq1),
HandlerResponse, Rest)
catch _:Reason ->
websocket_close(WSReq, HandlerState, {handler, Reason})
end;
_ ->
try Handler:websocket_handle(
{OpcodeName, FullPayload},
WSReq, HandlerState) of
HandlerResponse ->
handle_response(websocket_req:remaining(undefined, WSReq),
HandlerResponse, Rest)
catch _:Reason ->
websocket_close(WSReq, HandlerState, {handler, Reason})
end
end.
%% @doc Handles return values from the callback module
handle_response(WSReq, {reply, Frame, HandlerState}, Buffer) ->
[Socket, Transport] = websocket_req:get([socket, transport], WSReq),
case Transport:send(Socket, encode_frame(Frame)) of
ok ->
%% we can still have more messages in buffer
case websocket_req:remaining(WSReq) of
%% buffer should not contain uncomplete messages
undefined ->
retrieve_frame(WSReq, HandlerState, Buffer);
%% buffer contain uncomplete message that shouldnt be parsed
_ ->
websocket_loop(WSReq, HandlerState, Buffer)
end;
{error, Reason} ->
websocket_close(WSReq, HandlerState, {local, Reason})
end;
handle_response(WSReq, {ok, HandlerState}, Buffer) ->
%% we can still have more messages in buffer
case websocket_req:remaining(WSReq) of
%% buffer should not contain uncomplete messages
undefined -> retrieve_frame(WSReq, HandlerState, Buffer);
%% buffer contain uncomplete message that shouldnt be parsed
_ -> websocket_loop(WSReq, HandlerState, Buffer)
end;
handle_response(WSReq, {close, Payload, HandlerState}, _) ->
send({close, Payload}, WSReq),
websocket_close(WSReq, HandlerState, normal).
@doc Encodes the data with a header ( including a masking key ) and
%% masks the data
-spec encode_frame(websocket_req:frame()) ->
binary().
encode_frame({Type, Payload}) ->
Opcode = websocket_req:name_to_opcode(Type),
Len = iolist_size(Payload),
BinLen = payload_length_to_binary(Len),
MaskingKeyBin = crypto:strong_rand_bytes(4),
<< MaskingKey:32 >> = MaskingKeyBin,
Header = << 1:1, 0:3, Opcode:4, 1:1, BinLen/bits, MaskingKeyBin/bits >>,
MaskedPayload = mask_payload(MaskingKey, Payload),
<< Header/binary, MaskedPayload/binary >>;
encode_frame(Type) when is_atom(Type) ->
encode_frame({Type, <<>>}).
%% @doc The payload is masked using a masking key byte by byte.
Can do it in 4 byte chunks to save time until there is left than 4 bytes left
mask_payload(MaskingKey, Payload) ->
mask_payload(MaskingKey, Payload, <<>>).
mask_payload(_, <<>>, Acc) ->
Acc;
mask_payload(MaskingKey, << D:32, Rest/bits >>, Acc) ->
T = D bxor MaskingKey,
mask_payload(MaskingKey, Rest, << Acc/binary, T:32 >>);
mask_payload(MaskingKey, << D:24 >>, Acc) ->
<< MaskingKeyPart:24, _:8 >> = << MaskingKey:32 >>,
T = D bxor MaskingKeyPart,
<< Acc/binary, T:24 >>;
mask_payload(MaskingKey, << D:16 >>, Acc) ->
<< MaskingKeyPart:16, _:16 >> = << MaskingKey:32 >>,
T = D bxor MaskingKeyPart,
<< Acc/binary, T:16 >>;
mask_payload(MaskingKey, << D:8 >>, Acc) ->
<< MaskingKeyPart:8, _:24 >> = << MaskingKey:32 >>,
T = D bxor MaskingKeyPart,
<< Acc/binary, T:8 >>.
%% @doc Encode the payload length as binary in a variable number of bits.
See for more details
payload_length_to_binary(Len) when Len =<125 ->
<< Len:7 >>;
payload_length_to_binary(Len) when Len =< 16#ffff ->
<< 126:7, Len:16 >>;
payload_length_to_binary(Len) when Len =< 16#7fffffffffffffff ->
<< 127:7, Len:64 >>.
@doc If this is the first continuation frame , set the opcode and initialize
%% continuation to an empty binary. Otherwise, return the request object untouched.
-spec set_continuation_if_empty(WSReq :: websocket_req:req(),
Opcode :: websocket_req:opcode()) ->
websocket_req:req().
set_continuation_if_empty(WSReq, Opcode) ->
case websocket_req:continuation(WSReq) of
undefined ->
WSReq1 = websocket_req:continuation_opcode(Opcode, WSReq),
websocket_req:continuation(<<>>, WSReq1);
_ ->
WSReq
end.
| null | https://raw.githubusercontent.com/BitGameEN/bitgamex/151ba70a481615379f9648581a5d459b503abe19/src/deps/websocket_client/src/websocket_client.erl | erlang | @doc Start the websocket client
Send a frame asynchronously
@doc Create socket, execute handshake, and enter loop
Since we could have already received some data already, we simulate a Socket message.
@doc Send http upgrade request and validate handshake response challenge
@doc Blocks and waits until handshake response data is received
@doc Send frame to server
@doc Main loop
@doc Key sent in initial handshake
Consume the response...
...and make sure the challenge is valid.
@doc Consumes the HTTP response and extracts status, header and the body.
@doc Need more data to read length properly
@doc Length known and still missing data
@doc Length known and remaining data is appended to the buffer
If a ping is received, send a pong automatically
which the connection was established has been fulfilled.
going down or a browser having navigated away from a page.
See #section-7.4.1
for error code descriptions.
Non-control continuation frame
Append to previously existing continuation payloads and continue
Terminate continuation frame sequence with non-control frame
@doc Handles return values from the callback module
we can still have more messages in buffer
buffer should not contain uncomplete messages
buffer contain uncomplete message that shouldnt be parsed
we can still have more messages in buffer
buffer should not contain uncomplete messages
buffer contain uncomplete message that shouldnt be parsed
masks the data
@doc The payload is masked using a masking key byte by byte.
@doc Encode the payload length as binary in a variable number of bits.
continuation to an empty binary. Otherwise, return the request object untouched. | @author
@doc Erlang websocket client
-module(websocket_client).
-export([start_link/3,
start_link/4,
cast/2,
send/2
]).
-export([ws_client_init/7]).
-type opt() :: {async_start, boolean()}
| {extra_headers, [{string() | binary(), string() | binary()}]}
.
-type opts() :: [opt()].
-spec start_link(URL :: string() | binary(), Handler :: module(), HandlerArgs :: list()) ->
{ok, pid()} | {error, term()}.
start_link(URL, Handler, HandlerArgs) ->
start_link(URL, Handler, HandlerArgs, []).
start_link(URL, Handler, HandlerArgs, AsyncStart) when is_boolean(AsyncStart) ->
start_link(URL, Handler, HandlerArgs, [{async_start, AsyncStart}]);
start_link(URL, Handler, HandlerArgs, Opts) when is_binary(URL) ->
start_link(erlang:binary_to_list(URL), Handler, HandlerArgs, Opts);
start_link(URL, Handler, HandlerArgs, Opts) when is_list(Opts) ->
case http_uri:parse(URL, [{scheme_defaults, [{ws,80},{wss,443}]}]) of
{ok, {Protocol, _, Host, Port, Path, Query}} ->
proc_lib:start_link(?MODULE, ws_client_init,
[Handler, Protocol, Host, Port, Path ++ Query, HandlerArgs, Opts]);
{error, _} = Error ->
Error
end.
-spec cast(Client :: pid(), Frame :: websocket_req:frame()) ->
ok.
cast(Client, Frame) ->
Client ! {cast, Frame},
ok.
-spec ws_client_init(Handler :: module(), Protocol :: websocket_req:protocol(),
Host :: string(), Port :: inet:port_number(), Path :: string(),
Args :: list(), Opts :: opts()) ->
no_return().
ws_client_init(Handler, Protocol, Host, Port, Path, Args, Opts) ->
Transport = case Protocol of
wss ->
ssl;
ws ->
gen_tcp
end,
SockReply = case Transport of
ssl ->
ssl:connect(Host, Port,
[{mode, binary},
{verify, verify_none},
{active, false},
{packet, 0}
], 6000);
gen_tcp ->
gen_tcp:connect(Host, Port,
[binary,
{active, false},
{packet, 0}
], 6000)
end,
{ok, Socket} = case SockReply of
{ok, Sock} -> {ok, Sock};
{error, _} = ConnectError ->
proc_lib:init_ack(ConnectError),
exit(normal)
end,
WSReq = websocket_req:new(
Protocol,
Host,
Port,
Path,
Socket,
Transport,
Handler,
generate_ws_key()
),
ExtraHeaders = proplists:get_value(extra_headers, Opts, []),
case websocket_handshake(WSReq, ExtraHeaders) of
{error, _} = HandshakeError ->
proc_lib:init_ack(HandshakeError),
exit(normal);
{ok, Buffer} ->
AsyncStart = proplists:get_value(async_start, Opts, true),
AsyncStart andalso proc_lib:init_ack({ok, self()}),
{ok, HandlerState, KeepAlive} = case Handler:init(Args, WSReq) of
{ok, HS} ->
{ok, HS, infinity};
{ok, HS, KA} ->
{ok, HS, KA}
end,
AsyncStart orelse proc_lib:init_ack({ok, self()}),
case Socket of
{sslsocket, _, _} ->
ssl:setopts(Socket, [{active, true}]);
_ ->
inet:setopts(Socket, [{active, true}])
end,
case Buffer of
<<>> -> ok;
_ -> self() ! {Transport, Socket, Buffer}
end,
KATimer = case KeepAlive of
infinity ->
undefined;
_ ->
erlang:send_after(KeepAlive, self(), keepalive)
end,
websocket_loop(websocket_req:set([{keepalive,KeepAlive},{keepalive_timer,KATimer}], WSReq), HandlerState, <<>>)
end.
-spec websocket_handshake(WSReq :: websocket_req:req(), [{string(), string()}]) -> {ok, binary()} | {error, term()}.
websocket_handshake(WSReq, ExtraHeaders) ->
[Path, Host, Key, Transport, Socket] =
websocket_req:get([path, host, key, transport, socket], WSReq),
Handshake = ["GET ", Path, " HTTP/1.1\r\n"
"Host: ", Host, "\r\n"
"Connection: Upgrade\r\n"
"Sec-WebSocket-Version: 13\r\n"
"Sec-WebSocket-Key: ", Key, "\r\n"
"Upgrade: websocket\r\n",
[ [Header, ": ", Value, "\r\n"] || {Header, Value} <- ExtraHeaders],
"\r\n"],
Transport:send(Socket, Handshake),
{ok, HandshakeResponse} = receive_handshake(<<>>, Transport, Socket),
validate_handshake(HandshakeResponse, Key).
-spec receive_handshake(Buffer :: binary(),
Transport :: module(),
Socket :: term()) ->
{ok, binary()}.
receive_handshake(Buffer, Transport, Socket) ->
case re:run(Buffer, "\\r\\n\\r\\n") of
{match, _} ->
{ok, Buffer};
_ ->
{ok, Data} = Transport:recv(Socket, 0, 6000),
receive_handshake(<< Buffer/binary, Data/binary >>,
Transport, Socket)
end.
-spec send(websocket_req:frame(), websocket_req:req()) -> ok | {error, term()}.
send(Frame, WSReq) ->
Socket = websocket_req:socket(WSReq),
Transport = websocket_req:transport(WSReq),
Transport:send(Socket, encode_frame(Frame)).
-spec websocket_loop(WSReq :: websocket_req:req(), HandlerState :: any(),
Buffer :: binary()) ->
ok.
websocket_loop(WSReq, HandlerState, Buffer) ->
receive
Message -> handle_websocket_message(WSReq, HandlerState, Buffer, Message)
end.
handle_websocket_message(WSReq, HandlerState, Buffer, Message) ->
[Handler, Remaining, Socket] =
websocket_req:get([handler, remaining, socket], WSReq),
case Message of
keepalive ->
cancel_keepalive_timer(WSReq),
ok = send({ping, <<>>}, WSReq),
KATimer = erlang:send_after(websocket_req:keepalive(WSReq), self(), keepalive),
websocket_loop(websocket_req:keepalive_timer(KATimer, WSReq), HandlerState, Buffer);
{cast, Frame} ->
ok = send(Frame, WSReq),
websocket_loop(WSReq, HandlerState, Buffer);
{_Closed, Socket} ->
websocket_close(WSReq, HandlerState, remote);
{_TransportType, Socket, Data} ->
case Remaining of
undefined ->
retrieve_frame(WSReq, HandlerState,
<< Buffer/binary, Data/binary >>);
_ ->
retrieve_frame(WSReq, HandlerState,
websocket_req:opcode(WSReq), Remaining, Data, Buffer)
end;
Msg ->
try Handler:websocket_info(Msg, WSReq, HandlerState) of
HandlerResponse ->
handle_response(WSReq, HandlerResponse, Buffer)
catch
_:Reason ->
websocket_close(WSReq, HandlerState, {handler, Reason})
end
end.
-spec cancel_keepalive_timer(websocket_req:req()) -> ok.
cancel_keepalive_timer(WSReq) ->
case websocket_req:keepalive_timer(WSReq) of
undefined ->
ok;
OldTimer ->
erlang:cancel_timer(OldTimer),
ok
end.
-spec websocket_close(WSReq :: websocket_req:req(),
HandlerState :: any(),
Reason :: tuple()) -> ok.
websocket_close(WSReq, HandlerState, Reason) ->
Handler = websocket_req:handler(WSReq),
try Handler:websocket_terminate(Reason, WSReq, HandlerState) of
_ ->
case Reason of
normal -> ok;
_ -> error_info(Handler, Reason, HandlerState)
end,
exit(Reason)
catch
_:Reason2 ->
error_info(Handler, Reason2, HandlerState),
exit(Reason2)
end.
error_info(Handler, Reason, State) ->
error_logger:error_msg(
"** Websocket handler ~p terminating~n"
"** for the reason ~p~n"
"** Handler state was ~p~n"
"** Stacktrace: ~p~n~n",
[Handler, Reason, State, erlang:get_stacktrace()]).
-spec generate_ws_key() ->
binary().
generate_ws_key() ->
base64:encode(crypto:strong_rand_bytes(16)).
@doc Validate handshake response challenge
-spec validate_handshake(HandshakeResponse :: binary(), Key :: binary()) -> {ok, binary()} | {error, term()}.
validate_handshake(HandshakeResponse, Key) ->
Challenge = base64:encode(
crypto:hash(sha, << Key/binary, "258EAFA5-E914-47DA-95CA-C5AB0DC85B11" >>)),
{ok, Status, Header, Buffer} = consume_response(HandshakeResponse),
{_Version, Code, Message} = Status,
case Code of
101 means Switching Protocol
101 ->
Challenge = proplists:get_value(<<"Sec-Websocket-Accept">>, Header),
{ok, Buffer};
_ -> {error, {Code, Message}}
end.
consume_response(Response) ->
{ok, {http_response, Version, Code, Message}, Header} = erlang:decode_packet(http_bin, Response, []),
consume_response({Version, Code, Message}, Header, []).
consume_response(Status, Response, HeaderAcc) ->
case erlang:decode_packet(httph_bin, Response, []) of
{ok, {http_header, _Length, Field, _Reserved, Value}, Rest} ->
consume_response(Status, Rest, [{Field, Value} | HeaderAcc]);
{ok, http_eoh, Body} ->
{ok, Status, HeaderAcc, Body}
end.
@doc Start or continue continuation payload with length less than 126 bytes
retrieve_frame(WSReq, HandlerWSReq,
<< 0:4, Opcode:4, 0:1, Len:7, Rest/bits >>)
when Len < 126 ->
WSReq1 = set_continuation_if_empty(WSReq, Opcode),
WSReq2 = websocket_req:fin(0, WSReq1),
retrieve_frame(WSReq2, HandlerWSReq, Opcode, Len, Rest, <<>>);
@doc Start or continue continuation payload with length a 2 byte int
retrieve_frame(WSReq, HandlerWSReq,
<< 0:4, Opcode:4, 0:1, 126:7, Len:16, Rest/bits >>)
when Len > 125, Opcode < 8 ->
WSReq1 = set_continuation_if_empty(WSReq, Opcode),
WSReq2 = websocket_req:fin(0, WSReq1),
retrieve_frame(WSReq2, HandlerWSReq, Opcode, Len, Rest, <<>>);
@doc Start or continue continuation payload with length a 64 bit int
retrieve_frame(WSReq, HandlerWSReq,
<< 0:4, Opcode:4, 0:1, 127:7, 0:1, Len:63, Rest/bits >>)
when Len > 16#ffff, Opcode < 8 ->
WSReq1 = set_continuation_if_empty(WSReq, Opcode),
WSReq2 = websocket_req:fin(0, WSReq1),
retrieve_frame(WSReq2, HandlerWSReq, Opcode, Len, Rest, <<>>);
@doc Length is less 126 bytes
retrieve_frame(WSReq, HandlerWSReq,
<< 1:1, 0:3, Opcode:4, 0:1, Len:7, Rest/bits >>)
when Len < 126 ->
WSReq1 = websocket_req:fin(1, WSReq),
retrieve_frame(WSReq1, HandlerWSReq, Opcode, Len, Rest, <<>>);
@doc Length is a 2 byte integer
retrieve_frame(WSReq, HandlerWSReq,
<< 1:1, 0:3, Opcode:4, 0:1, 126:7, Len:16, Rest/bits >>)
when Len > 125, Opcode < 8 ->
WSReq1 = websocket_req:fin(1, WSReq),
retrieve_frame(WSReq1, HandlerWSReq, Opcode, Len, Rest, <<>>);
@doc Length is a 64 bit integer
retrieve_frame(WSReq, HandlerWSReq,
<< 1:1, 0:3, Opcode:4, 0:1, 127:7, 0:1, Len:63, Rest/bits >>)
when Len > 16#ffff, Opcode < 8 ->
WSReq1 = websocket_req:fin(1, WSReq),
retrieve_frame(WSReq1, HandlerWSReq, Opcode, Len, Rest, <<>>);
retrieve_frame(WSReq, HandlerWSReq, Data) ->
websocket_loop(WSReq, HandlerWSReq, Data).
retrieve_frame(WSReq, HandlerWSReq, Opcode, Len, Data, Buffer)
when byte_size(Data) < Len ->
Remaining = Len - byte_size(Data),
WSReq1 = websocket_req:remaining(Remaining, WSReq),
WSReq2 = websocket_req:opcode(Opcode, WSReq1),
websocket_loop(WSReq2, HandlerWSReq, << Buffer/bits, Data/bits >>);
retrieve_frame(WSReq, HandlerState, Opcode, Len, Data, Buffer) ->
[Handler, Continuation, ContinuationOpcode] =
websocket_req:get([handler, continuation, continuation_opcode], WSReq),
Fin = websocket_req:fin(WSReq),
<< Payload:Len/binary, Rest/bits >> = Data,
FullPayload = << Buffer/binary, Payload/binary >>,
OpcodeName = websocket_req:opcode_to_name(Opcode),
case OpcodeName of
ping ->
ok = send({pong, FullPayload}, WSReq);
_ ->
ok
end,
case OpcodeName of
close when byte_size(FullPayload) >= 2 ->
<< CodeBin:2/binary, _ClosePayload/binary >> = FullPayload,
Code = binary:decode_unsigned(CodeBin),
Reason = case Code of
1000 indicates a normal closure , meaning that the purpose for
1000 -> normal;
1001 indicates that an endpoint is " going away " , such as a server
1001 -> normal;
_ -> {remote, Code}
end,
websocket_close(WSReq, HandlerState, Reason);
close ->
websocket_close(WSReq, HandlerState, remote);
_ when Opcode < 8, Continuation =/= undefined, Fin == 0 ->
Continuation1 = << Continuation/binary, FullPayload/binary >>,
WSReq1 = websocket_req:continuation(Continuation1, WSReq),
retrieve_frame(WSReq1, HandlerState, Rest);
_ when Opcode < 8, Continuation =/= undefined, Fin == 1 ->
DefragPayload = << Continuation/binary, FullPayload/binary >>,
WSReq1 = websocket_req:continuation(undefined, WSReq),
WSReq2 = websocket_req:continuation_opcode(undefined, WSReq1),
ContinuationOpcodeName = websocket_req:opcode_to_name(ContinuationOpcode),
try Handler:websocket_handle(
{ContinuationOpcodeName, DefragPayload},
WSReq2, HandlerState) of
HandlerResponse ->
handle_response(websocket_req:remaining(undefined, WSReq1),
HandlerResponse, Rest)
catch _:Reason ->
websocket_close(WSReq, HandlerState, {handler, Reason})
end;
_ ->
try Handler:websocket_handle(
{OpcodeName, FullPayload},
WSReq, HandlerState) of
HandlerResponse ->
handle_response(websocket_req:remaining(undefined, WSReq),
HandlerResponse, Rest)
catch _:Reason ->
websocket_close(WSReq, HandlerState, {handler, Reason})
end
end.
handle_response(WSReq, {reply, Frame, HandlerState}, Buffer) ->
[Socket, Transport] = websocket_req:get([socket, transport], WSReq),
case Transport:send(Socket, encode_frame(Frame)) of
ok ->
case websocket_req:remaining(WSReq) of
undefined ->
retrieve_frame(WSReq, HandlerState, Buffer);
_ ->
websocket_loop(WSReq, HandlerState, Buffer)
end;
{error, Reason} ->
websocket_close(WSReq, HandlerState, {local, Reason})
end;
handle_response(WSReq, {ok, HandlerState}, Buffer) ->
case websocket_req:remaining(WSReq) of
undefined -> retrieve_frame(WSReq, HandlerState, Buffer);
_ -> websocket_loop(WSReq, HandlerState, Buffer)
end;
handle_response(WSReq, {close, Payload, HandlerState}, _) ->
send({close, Payload}, WSReq),
websocket_close(WSReq, HandlerState, normal).
@doc Encodes the data with a header ( including a masking key ) and
-spec encode_frame(websocket_req:frame()) ->
binary().
encode_frame({Type, Payload}) ->
Opcode = websocket_req:name_to_opcode(Type),
Len = iolist_size(Payload),
BinLen = payload_length_to_binary(Len),
MaskingKeyBin = crypto:strong_rand_bytes(4),
<< MaskingKey:32 >> = MaskingKeyBin,
Header = << 1:1, 0:3, Opcode:4, 1:1, BinLen/bits, MaskingKeyBin/bits >>,
MaskedPayload = mask_payload(MaskingKey, Payload),
<< Header/binary, MaskedPayload/binary >>;
encode_frame(Type) when is_atom(Type) ->
encode_frame({Type, <<>>}).
Can do it in 4 byte chunks to save time until there is left than 4 bytes left
mask_payload(MaskingKey, Payload) ->
mask_payload(MaskingKey, Payload, <<>>).
mask_payload(_, <<>>, Acc) ->
Acc;
mask_payload(MaskingKey, << D:32, Rest/bits >>, Acc) ->
T = D bxor MaskingKey,
mask_payload(MaskingKey, Rest, << Acc/binary, T:32 >>);
mask_payload(MaskingKey, << D:24 >>, Acc) ->
<< MaskingKeyPart:24, _:8 >> = << MaskingKey:32 >>,
T = D bxor MaskingKeyPart,
<< Acc/binary, T:24 >>;
mask_payload(MaskingKey, << D:16 >>, Acc) ->
<< MaskingKeyPart:16, _:16 >> = << MaskingKey:32 >>,
T = D bxor MaskingKeyPart,
<< Acc/binary, T:16 >>;
mask_payload(MaskingKey, << D:8 >>, Acc) ->
<< MaskingKeyPart:8, _:24 >> = << MaskingKey:32 >>,
T = D bxor MaskingKeyPart,
<< Acc/binary, T:8 >>.
See for more details
payload_length_to_binary(Len) when Len =<125 ->
<< Len:7 >>;
payload_length_to_binary(Len) when Len =< 16#ffff ->
<< 126:7, Len:16 >>;
payload_length_to_binary(Len) when Len =< 16#7fffffffffffffff ->
<< 127:7, Len:64 >>.
@doc If this is the first continuation frame , set the opcode and initialize
-spec set_continuation_if_empty(WSReq :: websocket_req:req(),
Opcode :: websocket_req:opcode()) ->
websocket_req:req().
set_continuation_if_empty(WSReq, Opcode) ->
case websocket_req:continuation(WSReq) of
undefined ->
WSReq1 = websocket_req:continuation_opcode(Opcode, WSReq),
websocket_req:continuation(<<>>, WSReq1);
_ ->
WSReq
end.
|
69edd7a3157bc4f298973aec31ae59c0bf3212c06dd6ac620630daf592a7ad53 | gvolpe/shopping-cart-haskell | HasDigits.hs | # LANGUAGE DataKinds , DeriveGeneric , OverloadedStrings #
# LANGUAGE FlexibleInstances , KindSignatures , MultiParamTypeClasses #
{-# LANGUAGE ScopedTypeVariables, TypeApplications #-}
module Refined.HasDigits where
import Data.Typeable ( typeOf )
import GHC.Generics ( Generic )
import GHC.TypeLits ( KnownNat
, Nat
)
import Orphan ( )
import Refined
import Refined.Helper ( i2text
, nv
)
data HasDigits (n :: Nat) = HasDigits deriving Generic
instance (Integral x, Show x, KnownNat n) => Predicate (HasDigits n) x where
validate p x = do
let n = fromIntegral (nv @n)
if n == toInteger (length $ show x)
then Nothing
else throwRefineOtherException
(typeOf p)
("Invalid number of digits. Expected " <> i2text n)
| null | https://raw.githubusercontent.com/gvolpe/shopping-cart-haskell/a9fbd4a7ce8b88fc5a19b2e22099c896cae0d1f6/src/Refined/HasDigits.hs | haskell | # LANGUAGE ScopedTypeVariables, TypeApplications # | # LANGUAGE DataKinds , DeriveGeneric , OverloadedStrings #
# LANGUAGE FlexibleInstances , KindSignatures , MultiParamTypeClasses #
module Refined.HasDigits where
import Data.Typeable ( typeOf )
import GHC.Generics ( Generic )
import GHC.TypeLits ( KnownNat
, Nat
)
import Orphan ( )
import Refined
import Refined.Helper ( i2text
, nv
)
data HasDigits (n :: Nat) = HasDigits deriving Generic
instance (Integral x, Show x, KnownNat n) => Predicate (HasDigits n) x where
validate p x = do
let n = fromIntegral (nv @n)
if n == toInteger (length $ show x)
then Nothing
else throwRefineOtherException
(typeOf p)
("Invalid number of digits. Expected " <> i2text n)
|
1acaadd6f0a510727215b9daf1d2049867a511a930253ecfc84857a14a52fe61 | WorksHub/client | events.cljs | (ns wh.promotions.create-promotion.events
(:require [cljs-time.core :as t]
[cljs-time.format :as tf]
[re-frame.core :refer [reg-event-db reg-event-fx]]
[wh.blogs.blog.events]
[wh.db :as db]
[wh.graphql-cache :refer [reg-query]]
[wh.graphql.company]
[wh.graphql.issues]
[wh.graphql.jobs]
[wh.pages.core :refer [on-page-load]]
[wh.promotions.create-promotion.db :as create-promotion])
(:require-macros [wh.graphql-macros :refer [defquery]]))
(defn job-preview [{:keys [wh.db/page-params] :as db}]
;; query registerd in wh.graphql.jobs
[:job {:id (:id page-params)}])
(defn company-preview [{:keys [wh.db/page-params] :as db}]
;; query registerd in wh.graphql.company
[:company {:id (:id page-params)}])
(defn issue-preview [{:keys [wh.db/page-params] :as db}]
;; query registerd in wh.graphql.issues
[:issue {:id (:id page-params)}])
(defn blog-preview [{:keys [wh.db/page-params] :as db}]
;; query registerd in wh.blogs.blog.events
[:blog {:id (:id page-params)}])
(defn unkown-query []
(js/console.error "Unkown object type given!")
[:unkown-query-error])
(defn preview-query [type]
(case type
:article blog-preview
:issue issue-preview
:company company-preview
:job job-preview
unkown-query))
(defmethod on-page-load :create-promotion [{:keys [wh.db/page-params] :as db}]
(let [type (keyword (:type page-params))
query-fn (preview-query type)]
[(into [:graphql/query] (query-fn db))
[::init-db]]))
(reg-event-db
::edit-description
db/default-interceptors
(fn [db [description]]
(assoc db ::create-promotion/description description)))
(reg-event-db
::init-db
db/default-interceptors
(fn [db _]
(-> db
(assoc ::create-promotion/promotion-status {})
(assoc ::create-promotion/description ""))))
(defquery create-promotion-mutation
{:venia/operation {:operation/type :mutation
:operation/name "create_promotion"}
:venia/variables [{:variable/name "object_type"
:variable/type :object_type!}
{:variable/name "object_id"
:variable/type :String!}
{:variable/name "channel"
:variable/type :channel!}
{:variable/name "start_date"
:variable/type :date!}
{:variable/name "description"
:variable/type :String}]
:venia/queries [[:create_promotion {:object_type :$object_type
:object_id :$object_id
:channel :$channel
:start_date :$start_date
:description :$description}
[:id]]]})
(reg-event-fx
::send-promotion!
db/default-interceptors
(fn [{db :db} [{:keys [channel object-type object-id start-date description] :as args}]]
{:db (assoc-in db [::create-promotion/promotion-status channel] :sending)
:graphql {:query create-promotion-mutation
:variables (cond->
{:object_type object-type
:object_id object-id
:channel channel
:start_date (tf/unparse (tf/formatters :date-time) (t/now))}
description (merge {:description description}))
:on-success [::send-promotion-success channel]
:on-failure [::send-promotion-failure channel]}}))
(reg-event-db
::send-promotion-success
db/default-interceptors
(fn [db [channel]]
(assoc-in db [::create-promotion/promotion-status channel] :success)))
(reg-event-db
::send-promotion-failure
db/default-interceptors
(fn [db [channel]]
(assoc-in db [::create-promotion/promotion-status channel] :failure)))
(reg-event-db
::select-channel
db/default-interceptors
(fn [db [channel]]
(assoc db ::create-promotion/selected-channel channel)))
| null | https://raw.githubusercontent.com/WorksHub/client/a51729585c2b9d7692e57b3edcd5217c228cf47c/client/src/wh/promotions/create_promotion/events.cljs | clojure | query registerd in wh.graphql.jobs
query registerd in wh.graphql.company
query registerd in wh.graphql.issues
query registerd in wh.blogs.blog.events | (ns wh.promotions.create-promotion.events
(:require [cljs-time.core :as t]
[cljs-time.format :as tf]
[re-frame.core :refer [reg-event-db reg-event-fx]]
[wh.blogs.blog.events]
[wh.db :as db]
[wh.graphql-cache :refer [reg-query]]
[wh.graphql.company]
[wh.graphql.issues]
[wh.graphql.jobs]
[wh.pages.core :refer [on-page-load]]
[wh.promotions.create-promotion.db :as create-promotion])
(:require-macros [wh.graphql-macros :refer [defquery]]))
(defn job-preview [{:keys [wh.db/page-params] :as db}]
[:job {:id (:id page-params)}])
(defn company-preview [{:keys [wh.db/page-params] :as db}]
[:company {:id (:id page-params)}])
(defn issue-preview [{:keys [wh.db/page-params] :as db}]
[:issue {:id (:id page-params)}])
(defn blog-preview [{:keys [wh.db/page-params] :as db}]
[:blog {:id (:id page-params)}])
(defn unkown-query []
(js/console.error "Unkown object type given!")
[:unkown-query-error])
(defn preview-query [type]
(case type
:article blog-preview
:issue issue-preview
:company company-preview
:job job-preview
unkown-query))
(defmethod on-page-load :create-promotion [{:keys [wh.db/page-params] :as db}]
(let [type (keyword (:type page-params))
query-fn (preview-query type)]
[(into [:graphql/query] (query-fn db))
[::init-db]]))
(reg-event-db
::edit-description
db/default-interceptors
(fn [db [description]]
(assoc db ::create-promotion/description description)))
(reg-event-db
::init-db
db/default-interceptors
(fn [db _]
(-> db
(assoc ::create-promotion/promotion-status {})
(assoc ::create-promotion/description ""))))
(defquery create-promotion-mutation
{:venia/operation {:operation/type :mutation
:operation/name "create_promotion"}
:venia/variables [{:variable/name "object_type"
:variable/type :object_type!}
{:variable/name "object_id"
:variable/type :String!}
{:variable/name "channel"
:variable/type :channel!}
{:variable/name "start_date"
:variable/type :date!}
{:variable/name "description"
:variable/type :String}]
:venia/queries [[:create_promotion {:object_type :$object_type
:object_id :$object_id
:channel :$channel
:start_date :$start_date
:description :$description}
[:id]]]})
(reg-event-fx
::send-promotion!
db/default-interceptors
(fn [{db :db} [{:keys [channel object-type object-id start-date description] :as args}]]
{:db (assoc-in db [::create-promotion/promotion-status channel] :sending)
:graphql {:query create-promotion-mutation
:variables (cond->
{:object_type object-type
:object_id object-id
:channel channel
:start_date (tf/unparse (tf/formatters :date-time) (t/now))}
description (merge {:description description}))
:on-success [::send-promotion-success channel]
:on-failure [::send-promotion-failure channel]}}))
(reg-event-db
::send-promotion-success
db/default-interceptors
(fn [db [channel]]
(assoc-in db [::create-promotion/promotion-status channel] :success)))
(reg-event-db
::send-promotion-failure
db/default-interceptors
(fn [db [channel]]
(assoc-in db [::create-promotion/promotion-status channel] :failure)))
(reg-event-db
::select-channel
db/default-interceptors
(fn [db [channel]]
(assoc db ::create-promotion/selected-channel channel)))
|
07fc26c55cf5a888ac6abbb27674a0434664c66f5556f9ccdada716b821ed3d2 | Rober-t/apxr_run | morphology_test.erl | -module(morphology_test).
-include_lib("eunit/include/eunit.hrl").
%% runners
morphology_test_() ->
[
fun get_init_sensors_subtest/0,
fun get_init_actuators_subtest/0,
fun get_sensors_subtest/0,
fun get_actuators_subtest/0,
fun get_init_substrate_cpps_subtest/0,
fun get_init_substrate_ceps_subtest/0,
fun get_substrate_cpps_subtest/0,
fun get_substrate_ceps_subtest/0
].
%% tests
get_init_sensors_subtest() ->
?assertEqual([#{data =>
#{cx_id => undefined, fanout_ids => [],
format => no_geo, generation => undefined,
id => undefined,
name => {flatland_sensor, distance_scanner},
parameters => [[1.5707963267948966], [5], [0.0]],
scape => {public, flatland},
type => standard, vl => 5}}],
morphology:get_init_sensors(prey)).
get_init_actuators_subtest() ->
?assertEqual([#{data =>
#{cx_id => undefined, fanin_ids => [], format => no_geo,
generation => undefined, id => undefined,
name => {flatland_actuator, two_wheels},
parameters => [2],
scape => {public, flatland},
type => standard, vl => 2}}],
morphology:get_init_actuators(predator)).
get_sensors_subtest() ->
?assertEqual([#{data =>
#{cx_id => undefined, fanout_ids => [],
format => no_geo, generation => undefined,
id => undefined,
name => {flatland_sensor, distance_scanner},
parameters => [[1.5707963267948966], [5], [0.0]],
scape => {public, flatland},
type => standard, vl => 5}},
#{data =>
#{cx_id => undefined, fanout_ids => [],
format => no_geo, generation => undefined,
id => undefined,
name => {flatland_sensor, color_scanner},
parameters => [[1.5707963267948966], [5], [0.0]],
scape => {public, flatland},
type => standard, vl => 5}}],
morphology:get_sensors(predator)).
get_actuators_subtest() ->
?assertEqual([#{data =>
#{cx_id => undefined, fanin_ids => [], format => no_geo,
generation => undefined, id => undefined,
name => {flatland_actuator, two_wheels},
parameters => [2],
scape => {public, flatland},
type => standard, vl => 2}}],
morphology:get_actuators(prey)).
get_init_substrate_cpps_subtest() ->
?assertEqual([#{data =>
#{cx_id => undefined, fanout_ids => [],
format => undefined, generation => undefined,
id => undefined, name => cartesian,
parameters => undefined, scape => undefined,
type => substrate, vl => 4}}],
morphology:get_init_substrate_cpps(2, none)).
get_init_substrate_ceps_subtest() ->
?assertEqual([#{data =>
#{cx_id => undefined, fanin_ids => [],
format => undefined, generation => undefined,
id => undefined, name => set_weight,
parameters => undefined, scape => undefined,
type => substrate, vl => 1}}],
morphology:get_init_substrate_ceps(4, none)).
get_substrate_cpps_subtest() ->
?assertEqual([#{data =>
#{cx_id => undefined, fanout_ids => [],
format => undefined, generation => undefined,
id => undefined, name => cartesian,
parameters => undefined, scape => undefined,
type => substrate, vl => 9}},
#{data =>
#{cx_id => undefined, fanout_ids => [],
format => undefined, generation => undefined,
id => undefined, name => centripital_distances,
parameters => undefined, scape => undefined,
type => substrate, vl => 5}},
#{data =>
#{cx_id => undefined, fanout_ids => [],
format => undefined, generation => undefined,
id => undefined, name => cartesian_distance,
parameters => undefined, scape => undefined,
type => substrate, vl => 4}},
#{data =>
#{cx_id => undefined, fanout_ids => [],
format => undefined, generation => undefined,
id => undefined, name => cartesian_coord_diffs,
parameters => undefined, scape => undefined,
type => substrate, vl => 6}},
#{data =>
#{cx_id => undefined, fanout_ids => [],
format => undefined, generation => undefined,
id => undefined,
name => cartesian_gaussed_coord_diffs,
parameters => undefined, scape => undefined,
type => substrate, vl => 6}},
#{data =>
#{cx_id => undefined, fanout_ids => [],
format => undefined, generation => undefined,
id => undefined, name => iow, parameters => undefined,
scape => undefined, type => substrate, vl => 3}},
#{data =>
#{cx_id => undefined, fanout_ids => [],
format => undefined, generation => undefined,
id => undefined, name => spherical,
parameters => undefined, scape => undefined,
type => substrate, vl => 9}}],
morphology:get_substrate_cpps(3, iterative)),
?assertEqual([#{data =>
#{cx_id => undefined, fanout_ids => [],
format => undefined, generation => undefined,
id => undefined, name => cartesian,
parameters => undefined, scape => undefined,
type => substrate, vl => 4}},
#{data =>
#{cx_id => undefined, fanout_ids => [],
format => undefined, generation => undefined,
id => undefined, name => centripital_distances,
parameters => undefined, scape => undefined,
type => substrate, vl => 2}},
#{data =>
#{cx_id => undefined, fanout_ids => [],
format => undefined, generation => undefined,
id => undefined, name => cartesian_distance,
parameters => undefined, scape => undefined,
type => substrate, vl => 1}},
#{data =>
#{cx_id => undefined, fanout_ids => [],
format => undefined, generation => undefined,
id => undefined, name => cartesian_coord_diffs,
parameters => undefined, scape => undefined,
type => substrate, vl => 2}},
#{data =>
#{cx_id => undefined, fanout_ids => [],
format => undefined, generation => undefined,
id => undefined,
name => cartesian_gaussed_coord_diffs,
parameters => undefined, scape => undefined,
type => substrate, vl => 2}},
#{data =>
#{cx_id => undefined, fanout_ids => [],
format => undefined, generation => undefined,
id => undefined, name => polar,
parameters => undefined, scape => undefined,
type => substrate, vl => 4}}],
morphology:get_substrate_cpps(2, none)).
get_substrate_ceps_subtest() ->
?assertEqual([#{data =>
#{cx_id => undefined, fanin_ids => [],
format => undefined, generation => undefined,
id => undefined, name => set_abcn,
parameters => undefined, scape => undefined,
type => substrate, vl => 5}}],
morphology:get_substrate_ceps(2, abcn)). | null | https://raw.githubusercontent.com/Rober-t/apxr_run/9c62ab028af7ff3768ffe3f27b8eef1799540f05/test/morphology_test.erl | erlang | runners
tests | -module(morphology_test).
-include_lib("eunit/include/eunit.hrl").
morphology_test_() ->
[
fun get_init_sensors_subtest/0,
fun get_init_actuators_subtest/0,
fun get_sensors_subtest/0,
fun get_actuators_subtest/0,
fun get_init_substrate_cpps_subtest/0,
fun get_init_substrate_ceps_subtest/0,
fun get_substrate_cpps_subtest/0,
fun get_substrate_ceps_subtest/0
].
get_init_sensors_subtest() ->
?assertEqual([#{data =>
#{cx_id => undefined, fanout_ids => [],
format => no_geo, generation => undefined,
id => undefined,
name => {flatland_sensor, distance_scanner},
parameters => [[1.5707963267948966], [5], [0.0]],
scape => {public, flatland},
type => standard, vl => 5}}],
morphology:get_init_sensors(prey)).
get_init_actuators_subtest() ->
?assertEqual([#{data =>
#{cx_id => undefined, fanin_ids => [], format => no_geo,
generation => undefined, id => undefined,
name => {flatland_actuator, two_wheels},
parameters => [2],
scape => {public, flatland},
type => standard, vl => 2}}],
morphology:get_init_actuators(predator)).
get_sensors_subtest() ->
?assertEqual([#{data =>
#{cx_id => undefined, fanout_ids => [],
format => no_geo, generation => undefined,
id => undefined,
name => {flatland_sensor, distance_scanner},
parameters => [[1.5707963267948966], [5], [0.0]],
scape => {public, flatland},
type => standard, vl => 5}},
#{data =>
#{cx_id => undefined, fanout_ids => [],
format => no_geo, generation => undefined,
id => undefined,
name => {flatland_sensor, color_scanner},
parameters => [[1.5707963267948966], [5], [0.0]],
scape => {public, flatland},
type => standard, vl => 5}}],
morphology:get_sensors(predator)).
get_actuators_subtest() ->
?assertEqual([#{data =>
#{cx_id => undefined, fanin_ids => [], format => no_geo,
generation => undefined, id => undefined,
name => {flatland_actuator, two_wheels},
parameters => [2],
scape => {public, flatland},
type => standard, vl => 2}}],
morphology:get_actuators(prey)).
get_init_substrate_cpps_subtest() ->
?assertEqual([#{data =>
#{cx_id => undefined, fanout_ids => [],
format => undefined, generation => undefined,
id => undefined, name => cartesian,
parameters => undefined, scape => undefined,
type => substrate, vl => 4}}],
morphology:get_init_substrate_cpps(2, none)).
get_init_substrate_ceps_subtest() ->
?assertEqual([#{data =>
#{cx_id => undefined, fanin_ids => [],
format => undefined, generation => undefined,
id => undefined, name => set_weight,
parameters => undefined, scape => undefined,
type => substrate, vl => 1}}],
morphology:get_init_substrate_ceps(4, none)).
get_substrate_cpps_subtest() ->
?assertEqual([#{data =>
#{cx_id => undefined, fanout_ids => [],
format => undefined, generation => undefined,
id => undefined, name => cartesian,
parameters => undefined, scape => undefined,
type => substrate, vl => 9}},
#{data =>
#{cx_id => undefined, fanout_ids => [],
format => undefined, generation => undefined,
id => undefined, name => centripital_distances,
parameters => undefined, scape => undefined,
type => substrate, vl => 5}},
#{data =>
#{cx_id => undefined, fanout_ids => [],
format => undefined, generation => undefined,
id => undefined, name => cartesian_distance,
parameters => undefined, scape => undefined,
type => substrate, vl => 4}},
#{data =>
#{cx_id => undefined, fanout_ids => [],
format => undefined, generation => undefined,
id => undefined, name => cartesian_coord_diffs,
parameters => undefined, scape => undefined,
type => substrate, vl => 6}},
#{data =>
#{cx_id => undefined, fanout_ids => [],
format => undefined, generation => undefined,
id => undefined,
name => cartesian_gaussed_coord_diffs,
parameters => undefined, scape => undefined,
type => substrate, vl => 6}},
#{data =>
#{cx_id => undefined, fanout_ids => [],
format => undefined, generation => undefined,
id => undefined, name => iow, parameters => undefined,
scape => undefined, type => substrate, vl => 3}},
#{data =>
#{cx_id => undefined, fanout_ids => [],
format => undefined, generation => undefined,
id => undefined, name => spherical,
parameters => undefined, scape => undefined,
type => substrate, vl => 9}}],
morphology:get_substrate_cpps(3, iterative)),
?assertEqual([#{data =>
#{cx_id => undefined, fanout_ids => [],
format => undefined, generation => undefined,
id => undefined, name => cartesian,
parameters => undefined, scape => undefined,
type => substrate, vl => 4}},
#{data =>
#{cx_id => undefined, fanout_ids => [],
format => undefined, generation => undefined,
id => undefined, name => centripital_distances,
parameters => undefined, scape => undefined,
type => substrate, vl => 2}},
#{data =>
#{cx_id => undefined, fanout_ids => [],
format => undefined, generation => undefined,
id => undefined, name => cartesian_distance,
parameters => undefined, scape => undefined,
type => substrate, vl => 1}},
#{data =>
#{cx_id => undefined, fanout_ids => [],
format => undefined, generation => undefined,
id => undefined, name => cartesian_coord_diffs,
parameters => undefined, scape => undefined,
type => substrate, vl => 2}},
#{data =>
#{cx_id => undefined, fanout_ids => [],
format => undefined, generation => undefined,
id => undefined,
name => cartesian_gaussed_coord_diffs,
parameters => undefined, scape => undefined,
type => substrate, vl => 2}},
#{data =>
#{cx_id => undefined, fanout_ids => [],
format => undefined, generation => undefined,
id => undefined, name => polar,
parameters => undefined, scape => undefined,
type => substrate, vl => 4}}],
morphology:get_substrate_cpps(2, none)).
get_substrate_ceps_subtest() ->
?assertEqual([#{data =>
#{cx_id => undefined, fanin_ids => [],
format => undefined, generation => undefined,
id => undefined, name => set_abcn,
parameters => undefined, scape => undefined,
type => substrate, vl => 5}}],
morphology:get_substrate_ceps(2, abcn)). |
2d634b91e269d7dac2eee2e61778a51b29ec236269f277149f1969570c852c79 | janestreet/async_unix | shutdown.ml | (* Unit tests are in ../../lib_test/shutdown_tests.ml *)
open Core
open Import
module Status_compatibility = struct
type t =
| Incompatible
| Compatible_and_replace
| Compatible_and_do_not_replace
end
module Status = struct
type t =
| Exit of int
| Signal of Signal.t
[@@deriving equal, sexp_of]
let compatibility t ~prior : Status_compatibility.t =
if equal t prior
then Compatible_and_do_not_replace
else (
match prior, t with
| _, Exit 0 -> Compatible_and_do_not_replace
| Exit 0, _ -> Compatible_and_replace
| _, _ -> Incompatible)
;;
end
module Maybe_status = struct
type t =
| No
| Yes of Status.t
[@@deriving sexp_of]
end
let debug = Debug.shutdown
let todo = ref []
let at_shutdown f =
let backtrace = Backtrace.get () in
if debug then Debug.log "at_shutdown" backtrace [%sexp_of: Backtrace.t];
todo := (backtrace, f) :: !todo
;;
let shutting_down_ref = ref Maybe_status.No
let default_force_ref = ref (fun () -> Clock.after (sec 10.))
let default_force () = !default_force_ref
let set_default_force force = default_force_ref := force
let shutting_down () = !shutting_down_ref
let is_shutting_down () =
match shutting_down () with
| No -> false
| Yes _ -> true
;;
(* Be careful to ensure [shutdown] doesn't raise just because
stderr is closed *)
let ignore_exn f =
try f () with
| _ -> ()
;;
let exit_reliably status =
match (status : Status.t) with
| Exit code ->
(match (exit code : Nothing.t) with
| exception exn ->
ignore_exn (fun () -> Core.Debug.eprints "Caml.exit raised" exn [%sexp_of: Exn.t]);
Core_unix.exit_immediately (if code = 0 then 1 else code)
| _ -> .)
| Signal signal ->
Signal.Expert.set signal `Default;
Signal_unix.send_exn signal (`Pid (Core_unix.getpid ()));
ignore_exn (fun () ->
Core.Debug.eprints
"Signal_unix.send_exn failed to kill process"
signal
[%sexp_of: Signal.t]);
Core_unix.exit_immediately 1
;;
let shutdown_with_status ?force status =
if debug then ignore_exn (fun () -> Debug.log "shutdown" status [%sexp_of: Status.t]);
match !shutting_down_ref with
| Yes prior ->
(match Status.compatibility status ~prior with
| Incompatible ->
raise_s
[%message
"shutdown with inconsistent status" (status : Status.t) (prior : Status.t)]
| Compatible_and_replace -> shutting_down_ref := Yes status
| Compatible_and_do_not_replace -> ())
| No ->
shutting_down_ref := Yes status;
upon
(Deferred.all
(List.map !todo ~f:(fun (backtrace, f) ->
let%map result =
Monitor.try_with_or_error
~rest:`Log
f
in
(match result with
| Ok () -> ()
| Error error ->
ignore_exn (fun () ->
Core.Debug.eprints
"at_shutdown function raised"
(error, backtrace)
[%sexp_of: Error.t * Backtrace.t]));
if debug
then
ignore_exn (fun () ->
Debug.log
"one at_shutdown function finished"
backtrace
[%sexp_of: Backtrace.t]);
result)))
(fun results ->
match shutting_down () with
| No -> assert false
| Yes status ->
let status =
match Or_error.combine_errors_unit results with
| Ok () -> status
| Error _ ->
(match status with
| Exit 0 -> Exit 1
| _ -> status)
in
exit_reliably status);
let force =
match force with
| None -> !default_force_ref ()
| Some f -> f
in
upon force (fun () ->
ignore_exn (fun () -> Debug.log_string "Shutdown forced.");
exit_reliably (Exit 1))
;;
let shutdown ?force exit_code = shutdown_with_status ?force (Exit exit_code)
let shutdown_with_signal_exn ?force signal =
match Signal.default_sys_behavior signal with
| `Terminate | `Dump_core -> shutdown_with_status ?force (Signal signal)
| (`Stop | `Continue | `Ignore) as default_sys_behavior ->
raise_s
[%message
"Shutdown.shutdown_with_signal_exn: not a terminating signal"
(signal : Signal.t)
(default_sys_behavior : [ `Stop | `Continue | `Ignore ])]
;;
let shutdown_on_unhandled_exn () =
Monitor.detach_and_iter_errors Monitor.main ~f:(fun exn ->
ignore_exn (fun () ->
Debug.log "shutting down due to unhandled exception" exn [%sexp_of: exn]);
try shutdown 1 with
| _ ->
The above [ shutdown ] call raises if we have already called shutdown with a
different non - zero status .
different non-zero status. *)
())
;;
let exit ?force status =
shutdown ?force status;
Deferred.never ()
;;
let don't_finish_before =
let proceed_with_shutdown = Ivar.create () in
let num_waiting = ref 0 in
let check () = if !num_waiting = 0 then Ivar.fill proceed_with_shutdown () in
at_shutdown (fun () ->
check ();
Ivar.read proceed_with_shutdown);
fun d ->
match shutting_down () with
| Yes _ ->
()
| No ->
incr num_waiting;
upon d (fun () ->
decr num_waiting;
match shutting_down () with
| No -> ()
| Yes _ -> check ())
;;
| null | https://raw.githubusercontent.com/janestreet/async_unix/e38241dc5c2ffad9842b0100ffbc44fb7941bfe2/src/shutdown.ml | ocaml | Unit tests are in ../../lib_test/shutdown_tests.ml
Be careful to ensure [shutdown] doesn't raise just because
stderr is closed |
open Core
open Import
module Status_compatibility = struct
type t =
| Incompatible
| Compatible_and_replace
| Compatible_and_do_not_replace
end
module Status = struct
type t =
| Exit of int
| Signal of Signal.t
[@@deriving equal, sexp_of]
let compatibility t ~prior : Status_compatibility.t =
if equal t prior
then Compatible_and_do_not_replace
else (
match prior, t with
| _, Exit 0 -> Compatible_and_do_not_replace
| Exit 0, _ -> Compatible_and_replace
| _, _ -> Incompatible)
;;
end
module Maybe_status = struct
type t =
| No
| Yes of Status.t
[@@deriving sexp_of]
end
let debug = Debug.shutdown
let todo = ref []
let at_shutdown f =
let backtrace = Backtrace.get () in
if debug then Debug.log "at_shutdown" backtrace [%sexp_of: Backtrace.t];
todo := (backtrace, f) :: !todo
;;
let shutting_down_ref = ref Maybe_status.No
let default_force_ref = ref (fun () -> Clock.after (sec 10.))
let default_force () = !default_force_ref
let set_default_force force = default_force_ref := force
let shutting_down () = !shutting_down_ref
let is_shutting_down () =
match shutting_down () with
| No -> false
| Yes _ -> true
;;
let ignore_exn f =
try f () with
| _ -> ()
;;
let exit_reliably status =
match (status : Status.t) with
| Exit code ->
(match (exit code : Nothing.t) with
| exception exn ->
ignore_exn (fun () -> Core.Debug.eprints "Caml.exit raised" exn [%sexp_of: Exn.t]);
Core_unix.exit_immediately (if code = 0 then 1 else code)
| _ -> .)
| Signal signal ->
Signal.Expert.set signal `Default;
Signal_unix.send_exn signal (`Pid (Core_unix.getpid ()));
ignore_exn (fun () ->
Core.Debug.eprints
"Signal_unix.send_exn failed to kill process"
signal
[%sexp_of: Signal.t]);
Core_unix.exit_immediately 1
;;
let shutdown_with_status ?force status =
if debug then ignore_exn (fun () -> Debug.log "shutdown" status [%sexp_of: Status.t]);
match !shutting_down_ref with
| Yes prior ->
(match Status.compatibility status ~prior with
| Incompatible ->
raise_s
[%message
"shutdown with inconsistent status" (status : Status.t) (prior : Status.t)]
| Compatible_and_replace -> shutting_down_ref := Yes status
| Compatible_and_do_not_replace -> ())
| No ->
shutting_down_ref := Yes status;
upon
(Deferred.all
(List.map !todo ~f:(fun (backtrace, f) ->
let%map result =
Monitor.try_with_or_error
~rest:`Log
f
in
(match result with
| Ok () -> ()
| Error error ->
ignore_exn (fun () ->
Core.Debug.eprints
"at_shutdown function raised"
(error, backtrace)
[%sexp_of: Error.t * Backtrace.t]));
if debug
then
ignore_exn (fun () ->
Debug.log
"one at_shutdown function finished"
backtrace
[%sexp_of: Backtrace.t]);
result)))
(fun results ->
match shutting_down () with
| No -> assert false
| Yes status ->
let status =
match Or_error.combine_errors_unit results with
| Ok () -> status
| Error _ ->
(match status with
| Exit 0 -> Exit 1
| _ -> status)
in
exit_reliably status);
let force =
match force with
| None -> !default_force_ref ()
| Some f -> f
in
upon force (fun () ->
ignore_exn (fun () -> Debug.log_string "Shutdown forced.");
exit_reliably (Exit 1))
;;
let shutdown ?force exit_code = shutdown_with_status ?force (Exit exit_code)
let shutdown_with_signal_exn ?force signal =
match Signal.default_sys_behavior signal with
| `Terminate | `Dump_core -> shutdown_with_status ?force (Signal signal)
| (`Stop | `Continue | `Ignore) as default_sys_behavior ->
raise_s
[%message
"Shutdown.shutdown_with_signal_exn: not a terminating signal"
(signal : Signal.t)
(default_sys_behavior : [ `Stop | `Continue | `Ignore ])]
;;
let shutdown_on_unhandled_exn () =
Monitor.detach_and_iter_errors Monitor.main ~f:(fun exn ->
ignore_exn (fun () ->
Debug.log "shutting down due to unhandled exception" exn [%sexp_of: exn]);
try shutdown 1 with
| _ ->
The above [ shutdown ] call raises if we have already called shutdown with a
different non - zero status .
different non-zero status. *)
())
;;
let exit ?force status =
shutdown ?force status;
Deferred.never ()
;;
let don't_finish_before =
let proceed_with_shutdown = Ivar.create () in
let num_waiting = ref 0 in
let check () = if !num_waiting = 0 then Ivar.fill proceed_with_shutdown () in
at_shutdown (fun () ->
check ();
Ivar.read proceed_with_shutdown);
fun d ->
match shutting_down () with
| Yes _ ->
()
| No ->
incr num_waiting;
upon d (fun () ->
decr num_waiting;
match shutting_down () with
| No -> ()
| Yes _ -> check ())
;;
|
6b3e54f4e40d651be231d5efde9c18b9b97fef6bb13d0ace842fd1ee5f98bb92 | scheme/scsh | scsh-version.scm | (define scsh-major-version 0)
(define scsh-minor-version 7)
(define scsh-version-string "0.7")
(define scsh-release-name "Revival")
| null | https://raw.githubusercontent.com/scheme/scsh/114432435e4eadd54334df6b37fcae505079b49f/scheme/scsh-version.scm | scheme | (define scsh-major-version 0)
(define scsh-minor-version 7)
(define scsh-version-string "0.7")
(define scsh-release-name "Revival")
|
|
3ac9f5affc9fe51787248fa6ec66df535d0526a778edde7c86fcbd20dbfeda5a | pavlobaron/ErlangOTPBookSamples | exeval_logger.erl | -module(exeval_logger).
-behaviour(gen_event).
-export([init/1, handle_event/2, terminate/2]).
init(_Args) ->
{ok, F} = file:open(env_lib:get_env(logger, file), write),
{ok, F}.
handle_event(start_eval, F) ->
io:format(F, "---- Starting ----~n", []),
{ok, F};
handle_event({add, String, LVal}, F) ->
io:format(F, "---- Added: ~p, new l-value: ~p ----~n", [String, LVal]),
{ok, F};
handle_event({eval, LVal, Res}, F) ->
io:format(F, "---- Evaluated: ~p, result: ~p ----~n", [LVal, Res]),
{ok, F}.
terminate(_Args, F) ->
file:close(F).
| null | https://raw.githubusercontent.com/pavlobaron/ErlangOTPBookSamples/50094964ad814932760174914490e49618b2b8c2/otp/src/exeval_logger.erl | erlang | -module(exeval_logger).
-behaviour(gen_event).
-export([init/1, handle_event/2, terminate/2]).
init(_Args) ->
{ok, F} = file:open(env_lib:get_env(logger, file), write),
{ok, F}.
handle_event(start_eval, F) ->
io:format(F, "---- Starting ----~n", []),
{ok, F};
handle_event({add, String, LVal}, F) ->
io:format(F, "---- Added: ~p, new l-value: ~p ----~n", [String, LVal]),
{ok, F};
handle_event({eval, LVal, Res}, F) ->
io:format(F, "---- Evaluated: ~p, result: ~p ----~n", [LVal, Res]),
{ok, F}.
terminate(_Args, F) ->
file:close(F).
|
|
5c0a10d8ba4297e1b8f4eb3957d1fba89bc090a251e8833b92992dca2e6a46b5 | mfoemmel/erlang-otp | wx_test_lib.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 2008 - 2009 . All Rights Reserved .
%%
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at /.
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% %CopyrightEnd%
%%%-------------------------------------------------------------------
%%% File : wx_test_lib.erl
Author : < >
Description : Library for testing wxerlang .
%%%
Created : 30 Oct 2008 by < >
%%%-------------------------------------------------------------------
-module(wx_test_lib).
-compile(export_all).
-include("wx_test_lib.hrl").
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
init_per_suite(Config) ->
try
case os:type() of
{unix,darwin} ->
exit("Can not test on MacOSX");
{unix, _} ->
io:format("DISPLAY ~s~n", [os:getenv("DISPLAY")]),
case proplists:get_value(xserver, Config, none) of
none -> ignore;
Server ->
os:putenv("DISPLAY", Server)
end;
_ -> ignore
end,
wx:new(),
wx:destroy(),
Config
catch
_:undef ->
{skipped, "No wx compiled for this platform"};
_:Reason ->
{skipped, lists:flatten(io_lib:format("Start wx failed: ~p", [Reason]))}
end.
end_per_suite(_Config) ->
ok.
init_per_testcase(_Func, Config) ->
global:register_name(wx_global_logger, group_leader()),
Config.
end_per_testcase(_Func, Config) ->
global:unregister_name(wx_global_logger),
Config.
Backwards compatible with test_server
tc_info(suite) -> [];
tc_info(doc) -> "".
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
Use ? log(Format , ) as wrapper
log(Format, Args, LongFile, Line) ->
File = filename:basename(LongFile),
Format2 = lists:concat([File, "(", Line, ")", ": ", Format]),
log(Format2, Args).
log(Format, Args) ->
case global:whereis_name(wx_global_logger) of
undefined ->
io:format(user, Format, Args);
Pid ->
io:format(Pid, Format, Args)
end.
verbose(Format, Args, File, Line) ->
Arg = wx_test_verbose,
case get(Arg) of
false ->
ok;
true ->
log(Format, Args, File, Line);
undefined ->
case init:get_argument(Arg) of
{ok, List} when is_list(List) ->
case lists:last(List) of
["true"] ->
put(Arg, true),
log(Format, Args, File, Line);
_ ->
put(Arg, false),
ok
end;
_ ->
put(Arg, false),
ok
end
end.
error(Format, Args, File, Line) ->
global:send(wx_global_logger, {failed, File, Line}),
Fail = {filename:basename(File),Line,Args},
case global:whereis_name(wx_test_case_sup) of
undefined -> ignore;
Pid -> Pid ! Fail
%% global:send(wx_test_case_sup, Fail),
end,
log("<ERROR>~n" ++ Format, Args, File, Line).
pick_msg() ->
receive
Message -> Message
after 4000 -> timeout
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Utility functions
user_available(Config) ->
false /= proplists:get_value(user, Config, false).
wx_destroy(Frame, Config) ->
case proplists:get_value(user, Config, false) of
false ->
timer:sleep(100),
?m(ok, wxFrame:destroy(Frame)),
?m(ok, wx:destroy());
true ->
timer:sleep(500),
?m(ok, wxFrame:destroy(Frame)),
?m(ok, wx:destroy());
step -> %% Wait for user to close window
?m(ok, wxEvtHandler:connect(Frame, close_window, [{skip,true}])),
wait_for_close()
end.
wait_for_close() ->
receive
#wx{event=#wxClose{}} ->
?log("Got close~n",[]),
?m(ok, wx:destroy());
#wx{obj=Obj, event=Event} ->
try
Name = wxTopLevelWindow:getTitle(Obj),
?log("~p Event: ~p~n", [Name, Event])
catch _:_ ->
?log("Event: ~p~n", [Event])
end,
wait_for_close();
Other ->
?log("Unexpected: ~p~n", [Other]),
wait_for_close()
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% A small test server, which can be run standalone in a shell
run_test(Test = {_,_},Config) ->
run_test([Test],Config);
run_test([{Module, TC}|Rest], Config) ->
[run_test(Module, TC, Config) |
run_test(Rest, Config)];
run_test([], _Config) -> [].
run_test(Module, all, Config) ->
All = [{Module, Test} || Test <- Module:all()],
run_test(All, Config);
run_test(Module, TestCase, Config) ->
log("Eval test case: ~w~n", [{Module, TestCase}]),
Sec = timer:seconds(1) * 1000,
{T, Res} =
timer:tc(?MODULE, eval_test_case, [Module, TestCase, Config]),
log("Tested ~w in ~w sec~n", [TestCase, T div Sec]),
{T div Sec, Res}.
eval_test_case(Mod, Fun, Config) ->
flush(),
global:register_name(wx_test_case_sup, self()),
Flag = process_flag(trap_exit, true),
Pid = spawn_link(?MODULE, test_case_evaluator, [Mod, Fun, [Config]]),
R = wait_for_evaluator(Pid, Mod, Fun, Config),
global:unregister_name(wx_test_case_sup),
process_flag(trap_exit, Flag),
R.
test_case_evaluator(Mod, Fun, [Config]) ->
NewConfig = Mod:init_per_testcase(Fun, Config),
R = apply(Mod, Fun, [NewConfig]),
Mod:fin_per_testcase(Fun, NewConfig),
exit({test_case_ok, R}).
wait_for_evaluator(Pid, Mod, Fun, Config) ->
receive
{'EXIT', Pid, {test_case_ok, _PidRes}} ->
Errors = flush(),
Res =
case Errors of
[] -> ok;
Errors -> failed
end,
{Res, {Mod, Fun}, Errors};
{'EXIT', Pid, {skipped, Reason}} ->
log("<WARNING> Test case ~w skipped, because ~p~n",
[{Mod, Fun}, Reason]),
Mod:fin_per_testcase(Fun, Config),
{skip, {Mod, Fun}, Reason};
{'EXIT', Pid, Reason} ->
log("<ERROR> Eval process ~w exited, because ~p~n",
[{Mod, Fun}, Reason]),
Mod:fin_per_testcase(Fun, Config),
{crash, {Mod, Fun}, Reason}
end.
flush() ->
receive Msg -> [Msg | flush()]
after 0 -> []
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
| null | https://raw.githubusercontent.com/mfoemmel/erlang-otp/9c6fdd21e4e6573ca6f567053ff3ac454d742bc2/lib/wx/test/wx_test_lib.erl | erlang |
%CopyrightBegin%
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at /.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
%CopyrightEnd%
-------------------------------------------------------------------
File : wx_test_lib.erl
-------------------------------------------------------------------
global:send(wx_test_case_sup, Fail),
Utility functions
Wait for user to close window
A small test server, which can be run standalone in a shell
| Copyright Ericsson AB 2008 - 2009 . All Rights Reserved .
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
Author : < >
Description : Library for testing wxerlang .
Created : 30 Oct 2008 by < >
-module(wx_test_lib).
-compile(export_all).
-include("wx_test_lib.hrl").
init_per_suite(Config) ->
try
case os:type() of
{unix,darwin} ->
exit("Can not test on MacOSX");
{unix, _} ->
io:format("DISPLAY ~s~n", [os:getenv("DISPLAY")]),
case proplists:get_value(xserver, Config, none) of
none -> ignore;
Server ->
os:putenv("DISPLAY", Server)
end;
_ -> ignore
end,
wx:new(),
wx:destroy(),
Config
catch
_:undef ->
{skipped, "No wx compiled for this platform"};
_:Reason ->
{skipped, lists:flatten(io_lib:format("Start wx failed: ~p", [Reason]))}
end.
end_per_suite(_Config) ->
ok.
init_per_testcase(_Func, Config) ->
global:register_name(wx_global_logger, group_leader()),
Config.
end_per_testcase(_Func, Config) ->
global:unregister_name(wx_global_logger),
Config.
Backwards compatible with test_server
tc_info(suite) -> [];
tc_info(doc) -> "".
Use ? log(Format , ) as wrapper
log(Format, Args, LongFile, Line) ->
File = filename:basename(LongFile),
Format2 = lists:concat([File, "(", Line, ")", ": ", Format]),
log(Format2, Args).
log(Format, Args) ->
case global:whereis_name(wx_global_logger) of
undefined ->
io:format(user, Format, Args);
Pid ->
io:format(Pid, Format, Args)
end.
verbose(Format, Args, File, Line) ->
Arg = wx_test_verbose,
case get(Arg) of
false ->
ok;
true ->
log(Format, Args, File, Line);
undefined ->
case init:get_argument(Arg) of
{ok, List} when is_list(List) ->
case lists:last(List) of
["true"] ->
put(Arg, true),
log(Format, Args, File, Line);
_ ->
put(Arg, false),
ok
end;
_ ->
put(Arg, false),
ok
end
end.
error(Format, Args, File, Line) ->
global:send(wx_global_logger, {failed, File, Line}),
Fail = {filename:basename(File),Line,Args},
case global:whereis_name(wx_test_case_sup) of
undefined -> ignore;
Pid -> Pid ! Fail
end,
log("<ERROR>~n" ++ Format, Args, File, Line).
pick_msg() ->
receive
Message -> Message
after 4000 -> timeout
end.
user_available(Config) ->
false /= proplists:get_value(user, Config, false).
wx_destroy(Frame, Config) ->
case proplists:get_value(user, Config, false) of
false ->
timer:sleep(100),
?m(ok, wxFrame:destroy(Frame)),
?m(ok, wx:destroy());
true ->
timer:sleep(500),
?m(ok, wxFrame:destroy(Frame)),
?m(ok, wx:destroy());
?m(ok, wxEvtHandler:connect(Frame, close_window, [{skip,true}])),
wait_for_close()
end.
wait_for_close() ->
receive
#wx{event=#wxClose{}} ->
?log("Got close~n",[]),
?m(ok, wx:destroy());
#wx{obj=Obj, event=Event} ->
try
Name = wxTopLevelWindow:getTitle(Obj),
?log("~p Event: ~p~n", [Name, Event])
catch _:_ ->
?log("Event: ~p~n", [Event])
end,
wait_for_close();
Other ->
?log("Unexpected: ~p~n", [Other]),
wait_for_close()
end.
run_test(Test = {_,_},Config) ->
run_test([Test],Config);
run_test([{Module, TC}|Rest], Config) ->
[run_test(Module, TC, Config) |
run_test(Rest, Config)];
run_test([], _Config) -> [].
run_test(Module, all, Config) ->
All = [{Module, Test} || Test <- Module:all()],
run_test(All, Config);
run_test(Module, TestCase, Config) ->
log("Eval test case: ~w~n", [{Module, TestCase}]),
Sec = timer:seconds(1) * 1000,
{T, Res} =
timer:tc(?MODULE, eval_test_case, [Module, TestCase, Config]),
log("Tested ~w in ~w sec~n", [TestCase, T div Sec]),
{T div Sec, Res}.
eval_test_case(Mod, Fun, Config) ->
flush(),
global:register_name(wx_test_case_sup, self()),
Flag = process_flag(trap_exit, true),
Pid = spawn_link(?MODULE, test_case_evaluator, [Mod, Fun, [Config]]),
R = wait_for_evaluator(Pid, Mod, Fun, Config),
global:unregister_name(wx_test_case_sup),
process_flag(trap_exit, Flag),
R.
test_case_evaluator(Mod, Fun, [Config]) ->
NewConfig = Mod:init_per_testcase(Fun, Config),
R = apply(Mod, Fun, [NewConfig]),
Mod:fin_per_testcase(Fun, NewConfig),
exit({test_case_ok, R}).
wait_for_evaluator(Pid, Mod, Fun, Config) ->
receive
{'EXIT', Pid, {test_case_ok, _PidRes}} ->
Errors = flush(),
Res =
case Errors of
[] -> ok;
Errors -> failed
end,
{Res, {Mod, Fun}, Errors};
{'EXIT', Pid, {skipped, Reason}} ->
log("<WARNING> Test case ~w skipped, because ~p~n",
[{Mod, Fun}, Reason]),
Mod:fin_per_testcase(Fun, Config),
{skip, {Mod, Fun}, Reason};
{'EXIT', Pid, Reason} ->
log("<ERROR> Eval process ~w exited, because ~p~n",
[{Mod, Fun}, Reason]),
Mod:fin_per_testcase(Fun, Config),
{crash, {Mod, Fun}, Reason}
end.
flush() ->
receive Msg -> [Msg | flush()]
after 0 -> []
end.
|
d98b6100d1c3e9dc5681eb0f796475e116ec4c527aaab54b7381b7a3a7255768 | nallen05/djula | load.lisp | ; a test in this test group is run by READ'ing the file "in.sexp", giving the resultant
sexp to COMPILE - TOKENS in a dynamic environment similar to COMPILE - TEMPLATE [ ie ,
; *SEEN-VARIABLES* is bound, etc] then simulating the rest of the COMPILE-TEMPLATE /
; render-template process and comparing the results with "out.txt"
;
if the resultant string is STRING= to the contents of " out.txt " then the test has
; passed
;
; note: trailing and leading whitespace are trimmed before comparison
;
; note: if there is a file "variable-plist.sexp" in the folder then that file is READ
; and the result bound to the variable *VARIABLE-PLIST*
;
; note: since it's hard to really set up the whole COMPILE-TEMPLATE environment, this
; test group is best used for simple tests, and complex tests that need the
; whole dynamic environment should be in the "compile-templates" test group
(in-package :djula-test)
(defun .test-compile-tokens (tokens variable-plist)
(let ((djula::*template-arguments* variable-plist)
djula::*known-translation-tables*
djula::*block-alist*)
(djula::.funcall-and-concatenate (mapcar 'djula::compile-token tokens))))
(defun !run-compile-tokens-test (test-folder)
(let ((in.sexp (read-from-string (cl-ffc:slurp-utf-8-file (merge-pathnames "in.sexp" test-folder))))
(out.txt (cl-ffc:slurp-utf-8-file (merge-pathnames "out.txt" test-folder)))
(maybe-plist (if #1=(cl-fad:file-exists-p (merge-pathnames "args.sexp" test-folder))
(read-from-string (cl-ffc:slurp-utf-8-file #1#)))))
(ptester:test out.txt
(.test-compile-tokens in.sexp maybe-plist)
:test '.normalized-string=))) | null | https://raw.githubusercontent.com/nallen05/djula/331e2d5b0c9967b636e4df22e847ac98f16f6ba9/test/2-compile-tokens-group/load.lisp | lisp | a test in this test group is run by READ'ing the file "in.sexp", giving the resultant
*SEEN-VARIABLES* is bound, etc] then simulating the rest of the COMPILE-TEMPLATE /
render-template process and comparing the results with "out.txt"
passed
note: trailing and leading whitespace are trimmed before comparison
note: if there is a file "variable-plist.sexp" in the folder then that file is READ
and the result bound to the variable *VARIABLE-PLIST*
note: since it's hard to really set up the whole COMPILE-TEMPLATE environment, this
test group is best used for simple tests, and complex tests that need the
whole dynamic environment should be in the "compile-templates" test group | sexp to COMPILE - TOKENS in a dynamic environment similar to COMPILE - TEMPLATE [ ie ,
if the resultant string is STRING= to the contents of " out.txt " then the test has
(in-package :djula-test)
(defun .test-compile-tokens (tokens variable-plist)
(let ((djula::*template-arguments* variable-plist)
djula::*known-translation-tables*
djula::*block-alist*)
(djula::.funcall-and-concatenate (mapcar 'djula::compile-token tokens))))
(defun !run-compile-tokens-test (test-folder)
(let ((in.sexp (read-from-string (cl-ffc:slurp-utf-8-file (merge-pathnames "in.sexp" test-folder))))
(out.txt (cl-ffc:slurp-utf-8-file (merge-pathnames "out.txt" test-folder)))
(maybe-plist (if #1=(cl-fad:file-exists-p (merge-pathnames "args.sexp" test-folder))
(read-from-string (cl-ffc:slurp-utf-8-file #1#)))))
(ptester:test out.txt
(.test-compile-tokens in.sexp maybe-plist)
:test '.normalized-string=))) |
20846a06d148a3d7302100f6021baa56d04b12a5a8341b9543caa10f4eb56339 | simingwang/emqx-plugin-kafkav5 | ssl_pem_cache.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 20016 - 2021 . All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%%----------------------------------------------------------------------
%% Purpose: Manages ssl sessions and trusted certifacates
%%----------------------------------------------------------------------
-module(ssl_pem_cache).
-behaviour(gen_server).
Internal application API
-export([start_link/1,
start_link_dist/1,
name/1,
insert/2,
clear/0]).
% Spawn export
-export([init_pem_cache_validator/1]).
%% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-include("ssl_handshake.hrl").
-include("ssl_internal.hrl").
-include_lib("kernel/include/file.hrl").
-record(state, {
pem_cache,
last_pem_check :: integer(),
clear :: integer()
}).
-define(CLEAR_PEM_CACHE, 120000).
-define(DEFAULT_MAX_SESSION_CACHE, 1000).
%%====================================================================
%% API
%%====================================================================
%%--------------------------------------------------------------------
-spec name(normal | dist) -> atom().
%%
%% Description: Returns the registered name of the ssl cache process
%% in the operation modes 'normal' and 'dist'.
%%--------------------------------------------------------------------
name(normal) ->
?MODULE;
name(dist) ->
list_to_atom(atom_to_list(?MODULE) ++ "_dist").
%%--------------------------------------------------------------------
-spec start_link(list()) -> {ok, pid()} | ignore | {error, term()}.
%%
%% Description: Starts the ssl pem cache handler
%%--------------------------------------------------------------------
start_link(_) ->
CacheName = name(normal),
gen_server:start_link({local, CacheName},
?MODULE, [CacheName], []).
%%--------------------------------------------------------------------
-spec start_link_dist(list()) -> {ok, pid()} | ignore | {error, term()}.
%%
%% Description: Starts a special instance of the ssl manager to
be used by the erlang distribution . Note disables soft upgrade !
%%--------------------------------------------------------------------
start_link_dist(_) ->
DistCacheName = name(dist),
gen_server:start_link({local, DistCacheName},
?MODULE, [DistCacheName], []).
%%--------------------------------------------------------------------
-spec insert(binary(), term()) -> ok | {error, reason()}.
%%
%% Description: Cache a pem file and return its content.
%%--------------------------------------------------------------------
insert(File, Content) ->
case bypass_cache() of
true ->
ok;
false ->
cast({cache_pem, File, Content}),
ok
end.
%%--------------------------------------------------------------------
-spec clear() -> ok.
%%
%% Description: Clear the PEM cache
%%--------------------------------------------------------------------
clear() ->
%% Not supported for distribution at the moement, should it be?
put(ssl_pem_cache, name(normal)),
call(unconditionally_clear_pem_cache).
-spec invalidate_pem(File::binary()) -> ok.
invalidate_pem(File) ->
cast({invalidate_pem, File}).
%%====================================================================
%% gen_server callbacks
%%====================================================================
%%--------------------------------------------------------------------
-spec init(list()) -> {ok, #state{}}.
%% Possible return values not used now.
%% | {ok, #state{}, timeout()} | ignore | {stop, term()}.
%%
%% Description: Initiates the server
%%--------------------------------------------------------------------
init([Name]) ->
put(ssl_pem_cache, Name),
process_flag(trap_exit, true),
PemCache = ssl_pkix_db:create_pem_cache(Name),
Interval = pem_check_interval(),
erlang:send_after(Interval, self(), clear_pem_cache),
erlang:system_time(second),
{ok, #state{pem_cache = PemCache,
last_pem_check = erlang:convert_time_unit(os:system_time(), native, second),
clear = Interval
}}.
%%--------------------------------------------------------------------
-spec handle_call(msg(), from(), #state{}) -> {reply, reply(), #state{}}.
%% Possible return values not used now.
%% {reply, reply(), #state{}, timeout()} |
{ noreply , # state { } } |
{ noreply , # state { } , timeout ( ) } |
%% {stop, reason(), reply(), #state{}} |
%% {stop, reason(), #state{}}.
%%
%% Description: Handling call messages
%%--------------------------------------------------------------------
handle_call({unconditionally_clear_pem_cache, _},_,
#state{pem_cache = PemCache} = State) ->
ssl_pkix_db:clear(PemCache),
Result = ssl_manager:refresh_trusted_db(ssl_manager_type()),
{reply, Result, State}.
%%--------------------------------------------------------------------
-spec handle_cast(msg(), #state{}) -> {noreply, #state{}}.
%% Possible return values not used now.
| { noreply , # state { } , timeout ( ) } |
%% {stop, reason(), #state{}}.
%%
%% Description: Handling cast messages
%%--------------------------------------------------------------------
handle_cast({cache_pem, File, Content}, #state{pem_cache = Db} = State) ->
ssl_pkix_db:insert(File, Content, Db),
{noreply, State};
handle_cast({invalidate_pem, File}, #state{pem_cache = Db} = State) ->
ssl_pkix_db:remove(File, Db),
ssl_manager:refresh_trusted_db(ssl_manager_type(), File),
{noreply, State}.
%%--------------------------------------------------------------------
-spec handle_info(msg(), #state{}) -> {noreply, #state{}}.
%% Possible return values not used now.
%% |{noreply, #state{}, timeout()} |
%% {stop, reason(), #state{}}.
%%
%% Description: Handling all non call/cast messages
%%-------------------------------------------------------------------
handle_info(clear_pem_cache, #state{pem_cache = PemCache,
clear = Interval,
last_pem_check = CheckPoint} = State) ->
NewCheckPoint = erlang:convert_time_unit(os:system_time(), native, second),
start_pem_cache_validator(PemCache, CheckPoint),
erlang:send_after(Interval, self(), clear_pem_cache),
{noreply, State#state{last_pem_check = NewCheckPoint}};
handle_info(_Info, State) ->
{noreply, State}.
%%--------------------------------------------------------------------
-spec terminate(reason(), #state{}) -> ok.
%%
%% Description: This function is called by a gen_server when it is about to
%% terminate. It should be the opposite of Module:init/1 and do any necessary
%% cleaning up. When it returns, the gen_server terminates with Reason.
%% The return value is ignored.
%%--------------------------------------------------------------------
terminate(_Reason, #state{}) ->
ok.
%%--------------------------------------------------------------------
-spec code_change(term(), #state{}, list()) -> {ok, #state{}}.
%%
%% Description: Convert process state when code is changed
%%--------------------------------------------------------------------
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%--------------------------------------------------------------------
Internal functions
%%--------------------------------------------------------------------
call(Msg) ->
gen_server:call(get(ssl_pem_cache), {Msg, self()}, infinity).
cast(Msg) ->
gen_server:cast(get(ssl_pem_cache), Msg).
start_pem_cache_validator(PemCache, CheckPoint) ->
spawn_link(?MODULE, init_pem_cache_validator,
[[get(ssl_pem_cache), PemCache, CheckPoint]]).
init_pem_cache_validator([CacheName, PemCache, CheckPoint]) ->
put(ssl_pem_cache, CacheName),
ssl_pkix_db:foldl(fun pem_cache_validate/2,
CheckPoint, PemCache).
pem_cache_validate({File, _}, CheckPoint) ->
case file:read_file_info(File, [{time, posix}]) of
{ok, #file_info{mtime = Time}} when Time < CheckPoint ->
ok;
_ ->
invalidate_pem(File)
end,
CheckPoint.
pem_check_interval() ->
case application:get_env(ssl, ssl_pem_cache_clean) of
{ok, Interval} when is_integer(Interval) ->
Interval;
_ ->
?CLEAR_PEM_CACHE
end.
bypass_cache() ->
case application:get_env(ssl, bypass_pem_cache) of
{ok, Bool} when is_boolean(Bool) ->
Bool;
_ ->
false
end.
ssl_manager_type() ->
case get(ssl_pem_cache) of
?MODULE ->
normal;
_ ->
dist
end.
| null | https://raw.githubusercontent.com/simingwang/emqx-plugin-kafkav5/bbf919e56dbc8fd2d4c1c541084532f844a11cbc/_build/default/rel/emqx_plugin_kafka/lib/ssl-10.7/src/ssl_pem_cache.erl | erlang |
%CopyrightBegin%
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
%CopyrightEnd%
----------------------------------------------------------------------
Purpose: Manages ssl sessions and trusted certifacates
----------------------------------------------------------------------
Spawn export
gen_server callbacks
====================================================================
API
====================================================================
--------------------------------------------------------------------
Description: Returns the registered name of the ssl cache process
in the operation modes 'normal' and 'dist'.
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Starts the ssl pem cache handler
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Starts a special instance of the ssl manager to
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Cache a pem file and return its content.
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Clear the PEM cache
--------------------------------------------------------------------
Not supported for distribution at the moement, should it be?
====================================================================
gen_server callbacks
====================================================================
--------------------------------------------------------------------
Possible return values not used now.
| {ok, #state{}, timeout()} | ignore | {stop, term()}.
Description: Initiates the server
--------------------------------------------------------------------
--------------------------------------------------------------------
Possible return values not used now.
{reply, reply(), #state{}, timeout()} |
{stop, reason(), reply(), #state{}} |
{stop, reason(), #state{}}.
Description: Handling call messages
--------------------------------------------------------------------
--------------------------------------------------------------------
Possible return values not used now.
{stop, reason(), #state{}}.
Description: Handling cast messages
--------------------------------------------------------------------
--------------------------------------------------------------------
Possible return values not used now.
|{noreply, #state{}, timeout()} |
{stop, reason(), #state{}}.
Description: Handling all non call/cast messages
-------------------------------------------------------------------
--------------------------------------------------------------------
Description: This function is called by a gen_server when it is about to
terminate. It should be the opposite of Module:init/1 and do any necessary
cleaning up. When it returns, the gen_server terminates with Reason.
The return value is ignored.
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Convert process state when code is changed
--------------------------------------------------------------------
--------------------------------------------------------------------
-------------------------------------------------------------------- | Copyright Ericsson AB 20016 - 2021 . All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(ssl_pem_cache).
-behaviour(gen_server).
Internal application API
-export([start_link/1,
start_link_dist/1,
name/1,
insert/2,
clear/0]).
-export([init_pem_cache_validator/1]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-include("ssl_handshake.hrl").
-include("ssl_internal.hrl").
-include_lib("kernel/include/file.hrl").
-record(state, {
pem_cache,
last_pem_check :: integer(),
clear :: integer()
}).
-define(CLEAR_PEM_CACHE, 120000).
-define(DEFAULT_MAX_SESSION_CACHE, 1000).
-spec name(normal | dist) -> atom().
name(normal) ->
?MODULE;
name(dist) ->
list_to_atom(atom_to_list(?MODULE) ++ "_dist").
-spec start_link(list()) -> {ok, pid()} | ignore | {error, term()}.
start_link(_) ->
CacheName = name(normal),
gen_server:start_link({local, CacheName},
?MODULE, [CacheName], []).
-spec start_link_dist(list()) -> {ok, pid()} | ignore | {error, term()}.
be used by the erlang distribution . Note disables soft upgrade !
start_link_dist(_) ->
DistCacheName = name(dist),
gen_server:start_link({local, DistCacheName},
?MODULE, [DistCacheName], []).
-spec insert(binary(), term()) -> ok | {error, reason()}.
insert(File, Content) ->
case bypass_cache() of
true ->
ok;
false ->
cast({cache_pem, File, Content}),
ok
end.
-spec clear() -> ok.
clear() ->
put(ssl_pem_cache, name(normal)),
call(unconditionally_clear_pem_cache).
-spec invalidate_pem(File::binary()) -> ok.
invalidate_pem(File) ->
cast({invalidate_pem, File}).
-spec init(list()) -> {ok, #state{}}.
init([Name]) ->
put(ssl_pem_cache, Name),
process_flag(trap_exit, true),
PemCache = ssl_pkix_db:create_pem_cache(Name),
Interval = pem_check_interval(),
erlang:send_after(Interval, self(), clear_pem_cache),
erlang:system_time(second),
{ok, #state{pem_cache = PemCache,
last_pem_check = erlang:convert_time_unit(os:system_time(), native, second),
clear = Interval
}}.
-spec handle_call(msg(), from(), #state{}) -> {reply, reply(), #state{}}.
{ noreply , # state { } } |
{ noreply , # state { } , timeout ( ) } |
handle_call({unconditionally_clear_pem_cache, _},_,
#state{pem_cache = PemCache} = State) ->
ssl_pkix_db:clear(PemCache),
Result = ssl_manager:refresh_trusted_db(ssl_manager_type()),
{reply, Result, State}.
-spec handle_cast(msg(), #state{}) -> {noreply, #state{}}.
| { noreply , # state { } , timeout ( ) } |
handle_cast({cache_pem, File, Content}, #state{pem_cache = Db} = State) ->
ssl_pkix_db:insert(File, Content, Db),
{noreply, State};
handle_cast({invalidate_pem, File}, #state{pem_cache = Db} = State) ->
ssl_pkix_db:remove(File, Db),
ssl_manager:refresh_trusted_db(ssl_manager_type(), File),
{noreply, State}.
-spec handle_info(msg(), #state{}) -> {noreply, #state{}}.
handle_info(clear_pem_cache, #state{pem_cache = PemCache,
clear = Interval,
last_pem_check = CheckPoint} = State) ->
NewCheckPoint = erlang:convert_time_unit(os:system_time(), native, second),
start_pem_cache_validator(PemCache, CheckPoint),
erlang:send_after(Interval, self(), clear_pem_cache),
{noreply, State#state{last_pem_check = NewCheckPoint}};
handle_info(_Info, State) ->
{noreply, State}.
-spec terminate(reason(), #state{}) -> ok.
terminate(_Reason, #state{}) ->
ok.
-spec code_change(term(), #state{}, list()) -> {ok, #state{}}.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
Internal functions
call(Msg) ->
gen_server:call(get(ssl_pem_cache), {Msg, self()}, infinity).
cast(Msg) ->
gen_server:cast(get(ssl_pem_cache), Msg).
start_pem_cache_validator(PemCache, CheckPoint) ->
spawn_link(?MODULE, init_pem_cache_validator,
[[get(ssl_pem_cache), PemCache, CheckPoint]]).
init_pem_cache_validator([CacheName, PemCache, CheckPoint]) ->
put(ssl_pem_cache, CacheName),
ssl_pkix_db:foldl(fun pem_cache_validate/2,
CheckPoint, PemCache).
pem_cache_validate({File, _}, CheckPoint) ->
case file:read_file_info(File, [{time, posix}]) of
{ok, #file_info{mtime = Time}} when Time < CheckPoint ->
ok;
_ ->
invalidate_pem(File)
end,
CheckPoint.
pem_check_interval() ->
case application:get_env(ssl, ssl_pem_cache_clean) of
{ok, Interval} when is_integer(Interval) ->
Interval;
_ ->
?CLEAR_PEM_CACHE
end.
bypass_cache() ->
case application:get_env(ssl, bypass_pem_cache) of
{ok, Bool} when is_boolean(Bool) ->
Bool;
_ ->
false
end.
ssl_manager_type() ->
case get(ssl_pem_cache) of
?MODULE ->
normal;
_ ->
dist
end.
|
2686eed6b63e5540501399e6ea79fe905159f0ec74d55b0d3a6bf7096540e3f7 | UberPyro/prowl | prowl_test.ml | open Batteries
open Alcotest
open Lib
open Util
let tests = [
"fundamentals", [
"lit1", "4\n5\nhi";
"arith1", "7";
"arith2", "5";
];
"bindings", [
"as1", "11";
"let1", "6";
"let-func", "25";
"let-as", "2";
"let-sect", "4";
"compose", "3";
"sect1", "1";
"sect-full", "20";
"sect-left", "2";
"sect-right", "1";
];
"patterns", [
"cat", "9";
"left", "13";
"right", "0";
"pair", "4";
"capture-direct", "7";
"capture-indirect", "7";
"capture-fun", "2";
"nest-capture-pair", "2";
"nest-either-pair", "1";
"long-pair", "5";
"const-int", "1";
"const-int-reject", "rejected";
"const-str", "0";
"pair-func", "2";
"eith-func", "1";
"bin-func", "2";
"stack", "3\n4\n5";
"stack-rejected-low", "rejected";
"stack-rejected-high", "rejected";
];
"flow", [
"cat", "5";
"alt", "0";
"alt-handle", "1";
"alt-rejected", "rejected";
"alt-greedy", "0";
"case", "2";
"intersect", "2";
"n-times", "7\n23";
"opt", "3";
"opt-handle", "2";
"star", "6";
"star-greedy", "1";
"plus", "3";
"plus-reject", "rejected";
"alt-cut-accepted", "0";
"alt-cut", "rejected";
"alt-cut-handle", "1";
"case-rel", "8";
"case-rel2", "11";
"case-cut", "rejected";
"star-cut", "rejected";
"star-rel", "5";
"inversion", "6";
"inversion-rejected", "rejected";
"noncap-accept", "6";
"noncap-reject", "rejected";
"atomic-accept", "1";
"atomic-reject", "rejected";
];
"combinators", [
"simple", "44";
"compound", "3";
];
"modules", [
"access", "3";
"open", "7";
" recursion " , " 720 " ;
];
"data", [
"rev", "1\n3\n5\n7\n9";
"map", "2\n4\n6";
" filter " , " 2 " ;
"cat", "1\n2\n3\n4";
"cat-rev", "1\n2\n4\n3";
head is getting elems
]
]
open Interpret
module L = Eval.LazySearch
open Run(L)
let run_file fname =
File.open_in ("test/" ^ fname ^ ".prw")
|> Gen.parse |> fun ast -> try
Interpret.S.init
|> program (Build.endow "std" ast)
|> L.unsafe_cut
|> Interpret.S.s
|> List.rev_map V.show
|> String.concat "\n" with
| L.Rejected -> "rejected"
let check_file group file output () =
run_file (Printf.sprintf "%s/%s" group file)
|> check string "outputs match" output
let () =
begin
let+ group, lst = tests in
group, let+ fn, out = lst in
check_file group fn out
|> test_case fn `Quick
end
|> run "Prowl Integration Tests"
| null | https://raw.githubusercontent.com/UberPyro/prowl/6d38a3227892fc4b79ce89719bb666a504d7d148/prowl_test.ml | ocaml | open Batteries
open Alcotest
open Lib
open Util
let tests = [
"fundamentals", [
"lit1", "4\n5\nhi";
"arith1", "7";
"arith2", "5";
];
"bindings", [
"as1", "11";
"let1", "6";
"let-func", "25";
"let-as", "2";
"let-sect", "4";
"compose", "3";
"sect1", "1";
"sect-full", "20";
"sect-left", "2";
"sect-right", "1";
];
"patterns", [
"cat", "9";
"left", "13";
"right", "0";
"pair", "4";
"capture-direct", "7";
"capture-indirect", "7";
"capture-fun", "2";
"nest-capture-pair", "2";
"nest-either-pair", "1";
"long-pair", "5";
"const-int", "1";
"const-int-reject", "rejected";
"const-str", "0";
"pair-func", "2";
"eith-func", "1";
"bin-func", "2";
"stack", "3\n4\n5";
"stack-rejected-low", "rejected";
"stack-rejected-high", "rejected";
];
"flow", [
"cat", "5";
"alt", "0";
"alt-handle", "1";
"alt-rejected", "rejected";
"alt-greedy", "0";
"case", "2";
"intersect", "2";
"n-times", "7\n23";
"opt", "3";
"opt-handle", "2";
"star", "6";
"star-greedy", "1";
"plus", "3";
"plus-reject", "rejected";
"alt-cut-accepted", "0";
"alt-cut", "rejected";
"alt-cut-handle", "1";
"case-rel", "8";
"case-rel2", "11";
"case-cut", "rejected";
"star-cut", "rejected";
"star-rel", "5";
"inversion", "6";
"inversion-rejected", "rejected";
"noncap-accept", "6";
"noncap-reject", "rejected";
"atomic-accept", "1";
"atomic-reject", "rejected";
];
"combinators", [
"simple", "44";
"compound", "3";
];
"modules", [
"access", "3";
"open", "7";
" recursion " , " 720 " ;
];
"data", [
"rev", "1\n3\n5\n7\n9";
"map", "2\n4\n6";
" filter " , " 2 " ;
"cat", "1\n2\n3\n4";
"cat-rev", "1\n2\n4\n3";
head is getting elems
]
]
open Interpret
module L = Eval.LazySearch
open Run(L)
let run_file fname =
File.open_in ("test/" ^ fname ^ ".prw")
|> Gen.parse |> fun ast -> try
Interpret.S.init
|> program (Build.endow "std" ast)
|> L.unsafe_cut
|> Interpret.S.s
|> List.rev_map V.show
|> String.concat "\n" with
| L.Rejected -> "rejected"
let check_file group file output () =
run_file (Printf.sprintf "%s/%s" group file)
|> check string "outputs match" output
let () =
begin
let+ group, lst = tests in
group, let+ fn, out = lst in
check_file group fn out
|> test_case fn `Quick
end
|> run "Prowl Integration Tests"
|
|
ec26d8732c9eebbc9e286efb52ea6ee8ea9c11154ae6d10e5d548d50e5e41ec5 | merijn/broadcast-chan | Utils.hs | import Control.Concurrent
import Control.Monad (forM_)
import Data.List (foldl')
import GHC.Conc (getNumProcessors)
import BroadcastChan
main :: IO ()
main = do
getNumProcessors >>= setNumCapabilities
start <- newEmptyMVar
done <- newEmptyMVar
chan <- newBroadcastChan
vals <- getBChanContents chan
forkIO $ do
putMVar start ()
putMVar done $! foldl' (+) 0 vals
readMVar start
forM_ [1..10000 :: Int] $ writeBChan chan
closeBChan chan
takeMVar done >>= print
| null | https://raw.githubusercontent.com/merijn/broadcast-chan/1a884f9ffa6f9f5628f575aec1e06502c853ab9f/broadcast-chan/benchmarks/Utils.hs | haskell | import Control.Concurrent
import Control.Monad (forM_)
import Data.List (foldl')
import GHC.Conc (getNumProcessors)
import BroadcastChan
main :: IO ()
main = do
getNumProcessors >>= setNumCapabilities
start <- newEmptyMVar
done <- newEmptyMVar
chan <- newBroadcastChan
vals <- getBChanContents chan
forkIO $ do
putMVar start ()
putMVar done $! foldl' (+) 0 vals
readMVar start
forM_ [1..10000 :: Int] $ writeBChan chan
closeBChan chan
takeMVar done >>= print
|
|
058cbdb2140c22fb23e4cfbdd12ee61c5bfff1b420a402150ce0f24fc244a86b | depressed-pho/HsOpenSSL | X509.hs | {-# LANGUAGE CPP #-}
{-# LANGUAGE EmptyDataDecls #-}
# LANGUAGE ForeignFunctionInterface #
{-# OPTIONS_HADDOCK prune #-}
-- |An interface to X.509 certificate.
module OpenSSL.X509
( -- * Type
X509
, X509_
-- * Functions to manipulate certificate
, newX509
, wrapX509 -- private
, withX509Ptr -- private
, withX509Stack -- private
, unsafeX509ToPtr -- private
, touchX509 -- private
, compareX509
, signX509
, verifyX509
, printX509
-- * Accessors
, getVersion
, setVersion
, getSerialNumber
, setSerialNumber
, getIssuerName
, setIssuerName
, getSubjectName
, setSubjectName
, getNotBefore
, setNotBefore
, getNotAfter
, setNotAfter
, getPublicKey
, setPublicKey
, getSubjectEmail
)
where
import Control.Monad
import Data.Time.Clock
import Data.Maybe
import Foreign.ForeignPtr
#if MIN_VERSION_base(4,4,0)
import Foreign.ForeignPtr.Unsafe as Unsafe
#else
import Foreign.ForeignPtr as Unsafe
#endif
import Foreign.Ptr
import Foreign.C
import OpenSSL.ASN1
import OpenSSL.BIO
import OpenSSL.EVP.Digest
import OpenSSL.EVP.PKey
import OpenSSL.EVP.Verify
import OpenSSL.EVP.Internal
import OpenSSL.Utils
import OpenSSL.Stack
import OpenSSL.X509.Name
-- |@'X509'@ is an opaque object that represents X.509 certificate.
newtype X509 = X509 (ForeignPtr X509_)
data X509_
foreign import ccall unsafe "X509_new"
_new :: IO (Ptr X509_)
foreign import ccall unsafe "&X509_free"
_free :: FunPtr (Ptr X509_ -> IO ())
foreign import ccall unsafe "X509_print"
_print :: Ptr BIO_ -> Ptr X509_ -> IO CInt
foreign import ccall unsafe "X509_cmp"
_cmp :: Ptr X509_ -> Ptr X509_ -> IO CInt
foreign import ccall unsafe "HsOpenSSL_X509_get_version"
_get_version :: Ptr X509_ -> IO CLong
foreign import ccall unsafe "X509_set_version"
_set_version :: Ptr X509_ -> CLong -> IO CInt
foreign import ccall unsafe "X509_get_serialNumber"
_get_serialNumber :: Ptr X509_ -> IO (Ptr ASN1_INTEGER)
foreign import ccall unsafe "X509_set_serialNumber"
_set_serialNumber :: Ptr X509_ -> Ptr ASN1_INTEGER -> IO CInt
foreign import ccall unsafe "X509_get_issuer_name"
_get_issuer_name :: Ptr X509_ -> IO (Ptr X509_NAME)
foreign import ccall unsafe "X509_set_issuer_name"
_set_issuer_name :: Ptr X509_ -> Ptr X509_NAME -> IO CInt
foreign import ccall unsafe "X509_get_subject_name"
_get_subject_name :: Ptr X509_ -> IO (Ptr X509_NAME)
foreign import ccall unsafe "X509_set_subject_name"
_set_subject_name :: Ptr X509_ -> Ptr X509_NAME -> IO CInt
foreign import ccall unsafe "HsOpenSSL_X509_get_notBefore"
_get_notBefore :: Ptr X509_ -> IO (Ptr ASN1_TIME)
foreign import ccall unsafe "X509_set_notBefore"
_set_notBefore :: Ptr X509_ -> Ptr ASN1_TIME -> IO CInt
foreign import ccall unsafe "HsOpenSSL_X509_get_notAfter"
_get_notAfter :: Ptr X509_ -> IO (Ptr ASN1_TIME)
foreign import ccall unsafe "X509_set_notAfter"
_set_notAfter :: Ptr X509_ -> Ptr ASN1_TIME -> IO CInt
foreign import ccall unsafe "X509_get_pubkey"
_get_pubkey :: Ptr X509_ -> IO (Ptr EVP_PKEY)
foreign import ccall unsafe "X509_set_pubkey"
_set_pubkey :: Ptr X509_ -> Ptr EVP_PKEY -> IO CInt
foreign import ccall unsafe "X509_get1_email"
_get1_email :: Ptr X509_ -> IO (Ptr STACK)
foreign import ccall unsafe "X509_email_free"
_email_free :: Ptr STACK -> IO ()
foreign import ccall unsafe "X509_sign"
_sign :: Ptr X509_ -> Ptr EVP_PKEY -> Ptr EVP_MD -> IO CInt
foreign import ccall unsafe "X509_verify"
_verify :: Ptr X509_ -> Ptr EVP_PKEY -> IO CInt
-- |@'newX509'@ creates an empty certificate. You must set the
following properties to and sign it ( see ) to actually
-- use the certificate.
--
-- [/Version/] See 'setVersion'.
--
-- [/Serial number/] See 'setSerialNumber'.
--
-- [/Issuer name/] See 'setIssuerName'.
--
-- [/Subject name/] See 'setSubjectName'.
--
-- [/Validity/] See 'setNotBefore' and 'setNotAfter'.
--
-- [/Public Key/] See 'setPublicKey'.
--
newX509 :: IO X509
newX509 = _new >>= failIfNull >>= wrapX509
wrapX509 :: Ptr X509_ -> IO X509
wrapX509 = fmap X509 . newForeignPtr _free
withX509Ptr :: X509 -> (Ptr X509_ -> IO a) -> IO a
withX509Ptr (X509 x509) = withForeignPtr x509
withX509Stack :: [X509] -> (Ptr STACK -> IO a) -> IO a
withX509Stack = withForeignStack unsafeX509ToPtr touchX509
unsafeX509ToPtr :: X509 -> Ptr X509_
unsafeX509ToPtr (X509 x509) = Unsafe.unsafeForeignPtrToPtr x509
touchX509 :: X509 -> IO ()
touchX509 (X509 x509) = touchForeignPtr x509
|@'compareX509 ' cert2@ compares two certificates .
compareX509 :: X509 -> X509 -> IO Ordering
compareX509 cert1 cert2
= withX509Ptr cert1 $ \ cert1Ptr ->
withX509Ptr cert2 $ \ cert2Ptr ->
fmap interpret (_cmp cert1Ptr cert2Ptr)
where
interpret :: CInt -> Ordering
interpret n
| n > 0 = GT
| n < 0 = LT
| otherwise = EQ
-- |@'signX509'@ signs a certificate with an issuer private key.
signX509 :: KeyPair key =>
X509 -- ^ The certificate to be signed.
-> key -- ^ The private key to sign with.
-> Maybe Digest -- ^ A hashing algorithm to use. If @Nothing@
-- the most suitable algorithm for the key
-- is automatically used.
-> IO ()
signX509 x509 key mDigest
= withX509Ptr x509 $ \ x509Ptr ->
withPKeyPtr' key $ \ pkeyPtr ->
do dig <- case mDigest of
Just md -> return md
Nothing -> pkeyDefaultMD key
withMDPtr dig $ \ digestPtr ->
_sign x509Ptr pkeyPtr digestPtr
>>= failIf_ (== 0)
return ()
-- |@'verifyX509'@ verifies a signature of certificate with an issuer
-- public key.
verifyX509 :: PublicKey key =>
X509 -- ^ The certificate to be verified.
-> key -- ^ The public key to verify with.
-> IO VerifyStatus
verifyX509 x509 key
= withX509Ptr x509 $ \ x509Ptr ->
withPKeyPtr' key $ \ pkeyPtr ->
_verify x509Ptr pkeyPtr
>>= interpret
where
interpret :: CInt -> IO VerifyStatus
interpret 1 = return VerifySuccess
interpret 0 = return VerifyFailure
interpret _ = raiseOpenSSLError
-- |@'printX509' cert@ translates a certificate into human-readable
-- format.
printX509 :: X509 -> IO String
printX509 x509
= do mem <- newMem
withX509Ptr x509 $ \ x509Ptr ->
withBioPtr mem $ \ memPtr ->
_print memPtr x509Ptr
>>= failIf_ (/= 1)
bioRead mem
-- |@'getVersion' cert@ returns the version number of certificate. It
seems the number is 0 - origin : version 2 means X.509 v3 .
getVersion :: X509 -> IO Int
getVersion x509
= withX509Ptr x509 $ \ x509Ptr ->
liftM fromIntegral $ _get_version x509Ptr
-- |@'setVersion' cert ver@ updates the version number of certificate.
setVersion :: X509 -> Int -> IO ()
setVersion x509 ver
= withX509Ptr x509 $ \ x509Ptr ->
_set_version x509Ptr (fromIntegral ver)
>>= failIf (/= 1)
>> return ()
-- |@'getSerialNumber' cert@ returns the serial number of certificate.
getSerialNumber :: X509 -> IO Integer
getSerialNumber x509
= withX509Ptr x509 $ \ x509Ptr ->
_get_serialNumber x509Ptr
>>= peekASN1Integer
-- |@'setSerialNumber' cert num@ updates the serial number of
-- certificate.
setSerialNumber :: X509 -> Integer -> IO ()
setSerialNumber x509 serial
= withX509Ptr x509 $ \ x509Ptr ->
withASN1Integer serial $ \ serialPtr ->
_set_serialNumber x509Ptr serialPtr
>>= failIf (/= 1)
>> return ()
|@'getIssuerName'@ returns the issuer name of certificate .
getIssuerName :: X509 -- ^ The certificate to examine.
^ if you want the keys of each parts
-- to be of long form (e.g. \"commonName\"),
-- or @False@ if you don't (e.g. \"CN\").
-> IO [(String, String)] -- ^ Pairs of key and value,
for example \[(\"C\ " ,
-- \"JP\"), (\"ST\",
-- \"Some-State\"), ...\].
getIssuerName x509 wantLongName
= withX509Ptr x509 $ \ x509Ptr ->
do namePtr <- _get_issuer_name x509Ptr
peekX509Name namePtr wantLongName
-- |@'setIssuerName' cert name@ updates the issuer name of
-- certificate. Keys of each parts may be of either long form or short
-- form. See 'getIssuerName'.
setIssuerName :: X509 -> [(String, String)] -> IO ()
setIssuerName x509 issuer
= withX509Ptr x509 $ \ x509Ptr ->
withX509Name issuer $ \ namePtr ->
_set_issuer_name x509Ptr namePtr
>>= failIf (/= 1)
>> return ()
|@'getSubjectName ' cert wantLongName@ returns the subject name of
-- certificate. See 'getIssuerName'.
getSubjectName :: X509 -> Bool -> IO [(String, String)]
getSubjectName x509 wantLongName
= withX509Ptr x509 $ \ x509Ptr ->
do namePtr <- _get_subject_name x509Ptr
peekX509Name namePtr wantLongName
-- |@'setSubjectName' cert name@ updates the subject name of
-- certificate. See 'setIssuerName'.
setSubjectName :: X509 -> [(String, String)] -> IO ()
setSubjectName x509 subject
= withX509Ptr x509 $ \ x509Ptr ->
withX509Name subject $ \ namePtr ->
_set_subject_name x509Ptr namePtr
>>= failIf (/= 1)
>> return ()
-- |@'getNotBefore' cert@ returns the time when the certificate begins
-- to be valid.
getNotBefore :: X509 -> IO UTCTime
getNotBefore x509
= withX509Ptr x509 $ \ x509Ptr ->
_get_notBefore x509Ptr
>>= peekASN1Time
-- |@'setNotBefore' cert utc@ updates the time when the certificate
-- begins to be valid.
setNotBefore :: X509 -> UTCTime -> IO ()
setNotBefore x509 utc
= withX509Ptr x509 $ \ x509Ptr ->
withASN1Time utc $ \ time ->
_set_notBefore x509Ptr time
>>= failIf (/= 1)
>> return ()
-- |@'getNotAfter' cert@ returns the time when the certificate
-- expires.
getNotAfter :: X509 -> IO UTCTime
getNotAfter x509
= withX509Ptr x509 $ \ x509Ptr ->
_get_notAfter x509Ptr
>>= peekASN1Time
-- |@'setNotAfter' cert utc@ updates the time when the certificate
-- expires.
setNotAfter :: X509 -> UTCTime -> IO ()
setNotAfter x509 utc
= withX509Ptr x509 $ \ x509Ptr ->
withASN1Time utc $ \ time ->
_set_notAfter x509Ptr time
>>= failIf (/= 1)
>> return ()
-- |@'getPublicKey' cert@ returns the public key of the subject of
-- certificate.
getPublicKey :: X509 -> IO SomePublicKey
getPublicKey x509
= withX509Ptr x509 $ \ x509Ptr ->
fmap fromJust ( _get_pubkey x509Ptr
>>= failIfNull
>>= wrapPKeyPtr
>>= fromPKey
)
-- |@'setPublicKey' cert pubkey@ updates the public key of the subject
-- of certificate.
setPublicKey :: PublicKey key => X509 -> key -> IO ()
setPublicKey x509 key
= withX509Ptr x509 $ \ x509Ptr ->
withPKeyPtr' key $ \ pkeyPtr ->
_set_pubkey x509Ptr pkeyPtr
>>= failIf (/= 1)
>> return ()
-- |@'getSubjectEmail' cert@ returns every subject email addresses in
-- the certificate.
getSubjectEmail :: X509 -> IO [String]
getSubjectEmail x509
= withX509Ptr x509 $ \ x509Ptr ->
do st <- _get1_email x509Ptr
list <- mapStack peekCString st
_email_free st
return list
| null | https://raw.githubusercontent.com/depressed-pho/HsOpenSSL/9e6a2be8298a9cbcffdfff55eab90e1e497628c3/OpenSSL/X509.hs | haskell | # LANGUAGE CPP #
# LANGUAGE EmptyDataDecls #
# OPTIONS_HADDOCK prune #
|An interface to X.509 certificate.
* Type
* Functions to manipulate certificate
private
private
private
private
private
* Accessors
|@'X509'@ is an opaque object that represents X.509 certificate.
|@'newX509'@ creates an empty certificate. You must set the
use the certificate.
[/Version/] See 'setVersion'.
[/Serial number/] See 'setSerialNumber'.
[/Issuer name/] See 'setIssuerName'.
[/Subject name/] See 'setSubjectName'.
[/Validity/] See 'setNotBefore' and 'setNotAfter'.
[/Public Key/] See 'setPublicKey'.
|@'signX509'@ signs a certificate with an issuer private key.
^ The certificate to be signed.
^ The private key to sign with.
^ A hashing algorithm to use. If @Nothing@
the most suitable algorithm for the key
is automatically used.
|@'verifyX509'@ verifies a signature of certificate with an issuer
public key.
^ The certificate to be verified.
^ The public key to verify with.
|@'printX509' cert@ translates a certificate into human-readable
format.
|@'getVersion' cert@ returns the version number of certificate. It
|@'setVersion' cert ver@ updates the version number of certificate.
|@'getSerialNumber' cert@ returns the serial number of certificate.
|@'setSerialNumber' cert num@ updates the serial number of
certificate.
^ The certificate to examine.
to be of long form (e.g. \"commonName\"),
or @False@ if you don't (e.g. \"CN\").
^ Pairs of key and value,
\"JP\"), (\"ST\",
\"Some-State\"), ...\].
|@'setIssuerName' cert name@ updates the issuer name of
certificate. Keys of each parts may be of either long form or short
form. See 'getIssuerName'.
certificate. See 'getIssuerName'.
|@'setSubjectName' cert name@ updates the subject name of
certificate. See 'setIssuerName'.
|@'getNotBefore' cert@ returns the time when the certificate begins
to be valid.
|@'setNotBefore' cert utc@ updates the time when the certificate
begins to be valid.
|@'getNotAfter' cert@ returns the time when the certificate
expires.
|@'setNotAfter' cert utc@ updates the time when the certificate
expires.
|@'getPublicKey' cert@ returns the public key of the subject of
certificate.
|@'setPublicKey' cert pubkey@ updates the public key of the subject
of certificate.
|@'getSubjectEmail' cert@ returns every subject email addresses in
the certificate. | # LANGUAGE ForeignFunctionInterface #
module OpenSSL.X509
X509
, X509_
, newX509
, compareX509
, signX509
, verifyX509
, printX509
, getVersion
, setVersion
, getSerialNumber
, setSerialNumber
, getIssuerName
, setIssuerName
, getSubjectName
, setSubjectName
, getNotBefore
, setNotBefore
, getNotAfter
, setNotAfter
, getPublicKey
, setPublicKey
, getSubjectEmail
)
where
import Control.Monad
import Data.Time.Clock
import Data.Maybe
import Foreign.ForeignPtr
#if MIN_VERSION_base(4,4,0)
import Foreign.ForeignPtr.Unsafe as Unsafe
#else
import Foreign.ForeignPtr as Unsafe
#endif
import Foreign.Ptr
import Foreign.C
import OpenSSL.ASN1
import OpenSSL.BIO
import OpenSSL.EVP.Digest
import OpenSSL.EVP.PKey
import OpenSSL.EVP.Verify
import OpenSSL.EVP.Internal
import OpenSSL.Utils
import OpenSSL.Stack
import OpenSSL.X509.Name
newtype X509 = X509 (ForeignPtr X509_)
data X509_
foreign import ccall unsafe "X509_new"
_new :: IO (Ptr X509_)
foreign import ccall unsafe "&X509_free"
_free :: FunPtr (Ptr X509_ -> IO ())
foreign import ccall unsafe "X509_print"
_print :: Ptr BIO_ -> Ptr X509_ -> IO CInt
foreign import ccall unsafe "X509_cmp"
_cmp :: Ptr X509_ -> Ptr X509_ -> IO CInt
foreign import ccall unsafe "HsOpenSSL_X509_get_version"
_get_version :: Ptr X509_ -> IO CLong
foreign import ccall unsafe "X509_set_version"
_set_version :: Ptr X509_ -> CLong -> IO CInt
foreign import ccall unsafe "X509_get_serialNumber"
_get_serialNumber :: Ptr X509_ -> IO (Ptr ASN1_INTEGER)
foreign import ccall unsafe "X509_set_serialNumber"
_set_serialNumber :: Ptr X509_ -> Ptr ASN1_INTEGER -> IO CInt
foreign import ccall unsafe "X509_get_issuer_name"
_get_issuer_name :: Ptr X509_ -> IO (Ptr X509_NAME)
foreign import ccall unsafe "X509_set_issuer_name"
_set_issuer_name :: Ptr X509_ -> Ptr X509_NAME -> IO CInt
foreign import ccall unsafe "X509_get_subject_name"
_get_subject_name :: Ptr X509_ -> IO (Ptr X509_NAME)
foreign import ccall unsafe "X509_set_subject_name"
_set_subject_name :: Ptr X509_ -> Ptr X509_NAME -> IO CInt
foreign import ccall unsafe "HsOpenSSL_X509_get_notBefore"
_get_notBefore :: Ptr X509_ -> IO (Ptr ASN1_TIME)
foreign import ccall unsafe "X509_set_notBefore"
_set_notBefore :: Ptr X509_ -> Ptr ASN1_TIME -> IO CInt
foreign import ccall unsafe "HsOpenSSL_X509_get_notAfter"
_get_notAfter :: Ptr X509_ -> IO (Ptr ASN1_TIME)
foreign import ccall unsafe "X509_set_notAfter"
_set_notAfter :: Ptr X509_ -> Ptr ASN1_TIME -> IO CInt
foreign import ccall unsafe "X509_get_pubkey"
_get_pubkey :: Ptr X509_ -> IO (Ptr EVP_PKEY)
foreign import ccall unsafe "X509_set_pubkey"
_set_pubkey :: Ptr X509_ -> Ptr EVP_PKEY -> IO CInt
foreign import ccall unsafe "X509_get1_email"
_get1_email :: Ptr X509_ -> IO (Ptr STACK)
foreign import ccall unsafe "X509_email_free"
_email_free :: Ptr STACK -> IO ()
foreign import ccall unsafe "X509_sign"
_sign :: Ptr X509_ -> Ptr EVP_PKEY -> Ptr EVP_MD -> IO CInt
foreign import ccall unsafe "X509_verify"
_verify :: Ptr X509_ -> Ptr EVP_PKEY -> IO CInt
following properties to and sign it ( see ) to actually
newX509 :: IO X509
newX509 = _new >>= failIfNull >>= wrapX509
wrapX509 :: Ptr X509_ -> IO X509
wrapX509 = fmap X509 . newForeignPtr _free
withX509Ptr :: X509 -> (Ptr X509_ -> IO a) -> IO a
withX509Ptr (X509 x509) = withForeignPtr x509
withX509Stack :: [X509] -> (Ptr STACK -> IO a) -> IO a
withX509Stack = withForeignStack unsafeX509ToPtr touchX509
unsafeX509ToPtr :: X509 -> Ptr X509_
unsafeX509ToPtr (X509 x509) = Unsafe.unsafeForeignPtrToPtr x509
touchX509 :: X509 -> IO ()
touchX509 (X509 x509) = touchForeignPtr x509
|@'compareX509 ' cert2@ compares two certificates .
compareX509 :: X509 -> X509 -> IO Ordering
compareX509 cert1 cert2
= withX509Ptr cert1 $ \ cert1Ptr ->
withX509Ptr cert2 $ \ cert2Ptr ->
fmap interpret (_cmp cert1Ptr cert2Ptr)
where
interpret :: CInt -> Ordering
interpret n
| n > 0 = GT
| n < 0 = LT
| otherwise = EQ
signX509 :: KeyPair key =>
-> IO ()
signX509 x509 key mDigest
= withX509Ptr x509 $ \ x509Ptr ->
withPKeyPtr' key $ \ pkeyPtr ->
do dig <- case mDigest of
Just md -> return md
Nothing -> pkeyDefaultMD key
withMDPtr dig $ \ digestPtr ->
_sign x509Ptr pkeyPtr digestPtr
>>= failIf_ (== 0)
return ()
verifyX509 :: PublicKey key =>
-> IO VerifyStatus
verifyX509 x509 key
= withX509Ptr x509 $ \ x509Ptr ->
withPKeyPtr' key $ \ pkeyPtr ->
_verify x509Ptr pkeyPtr
>>= interpret
where
interpret :: CInt -> IO VerifyStatus
interpret 1 = return VerifySuccess
interpret 0 = return VerifyFailure
interpret _ = raiseOpenSSLError
printX509 :: X509 -> IO String
printX509 x509
= do mem <- newMem
withX509Ptr x509 $ \ x509Ptr ->
withBioPtr mem $ \ memPtr ->
_print memPtr x509Ptr
>>= failIf_ (/= 1)
bioRead mem
seems the number is 0 - origin : version 2 means X.509 v3 .
getVersion :: X509 -> IO Int
getVersion x509
= withX509Ptr x509 $ \ x509Ptr ->
liftM fromIntegral $ _get_version x509Ptr
setVersion :: X509 -> Int -> IO ()
setVersion x509 ver
= withX509Ptr x509 $ \ x509Ptr ->
_set_version x509Ptr (fromIntegral ver)
>>= failIf (/= 1)
>> return ()
getSerialNumber :: X509 -> IO Integer
getSerialNumber x509
= withX509Ptr x509 $ \ x509Ptr ->
_get_serialNumber x509Ptr
>>= peekASN1Integer
setSerialNumber :: X509 -> Integer -> IO ()
setSerialNumber x509 serial
= withX509Ptr x509 $ \ x509Ptr ->
withASN1Integer serial $ \ serialPtr ->
_set_serialNumber x509Ptr serialPtr
>>= failIf (/= 1)
>> return ()
|@'getIssuerName'@ returns the issuer name of certificate .
^ if you want the keys of each parts
for example \[(\"C\ " ,
getIssuerName x509 wantLongName
= withX509Ptr x509 $ \ x509Ptr ->
do namePtr <- _get_issuer_name x509Ptr
peekX509Name namePtr wantLongName
setIssuerName :: X509 -> [(String, String)] -> IO ()
setIssuerName x509 issuer
= withX509Ptr x509 $ \ x509Ptr ->
withX509Name issuer $ \ namePtr ->
_set_issuer_name x509Ptr namePtr
>>= failIf (/= 1)
>> return ()
|@'getSubjectName ' cert wantLongName@ returns the subject name of
getSubjectName :: X509 -> Bool -> IO [(String, String)]
getSubjectName x509 wantLongName
= withX509Ptr x509 $ \ x509Ptr ->
do namePtr <- _get_subject_name x509Ptr
peekX509Name namePtr wantLongName
setSubjectName :: X509 -> [(String, String)] -> IO ()
setSubjectName x509 subject
= withX509Ptr x509 $ \ x509Ptr ->
withX509Name subject $ \ namePtr ->
_set_subject_name x509Ptr namePtr
>>= failIf (/= 1)
>> return ()
getNotBefore :: X509 -> IO UTCTime
getNotBefore x509
= withX509Ptr x509 $ \ x509Ptr ->
_get_notBefore x509Ptr
>>= peekASN1Time
setNotBefore :: X509 -> UTCTime -> IO ()
setNotBefore x509 utc
= withX509Ptr x509 $ \ x509Ptr ->
withASN1Time utc $ \ time ->
_set_notBefore x509Ptr time
>>= failIf (/= 1)
>> return ()
getNotAfter :: X509 -> IO UTCTime
getNotAfter x509
= withX509Ptr x509 $ \ x509Ptr ->
_get_notAfter x509Ptr
>>= peekASN1Time
setNotAfter :: X509 -> UTCTime -> IO ()
setNotAfter x509 utc
= withX509Ptr x509 $ \ x509Ptr ->
withASN1Time utc $ \ time ->
_set_notAfter x509Ptr time
>>= failIf (/= 1)
>> return ()
getPublicKey :: X509 -> IO SomePublicKey
getPublicKey x509
= withX509Ptr x509 $ \ x509Ptr ->
fmap fromJust ( _get_pubkey x509Ptr
>>= failIfNull
>>= wrapPKeyPtr
>>= fromPKey
)
setPublicKey :: PublicKey key => X509 -> key -> IO ()
setPublicKey x509 key
= withX509Ptr x509 $ \ x509Ptr ->
withPKeyPtr' key $ \ pkeyPtr ->
_set_pubkey x509Ptr pkeyPtr
>>= failIf (/= 1)
>> return ()
getSubjectEmail :: X509 -> IO [String]
getSubjectEmail x509
= withX509Ptr x509 $ \ x509Ptr ->
do st <- _get1_email x509Ptr
list <- mapStack peekCString st
_email_free st
return list
|
8968924f68a552d61856c00b3c5bfd56efdb10fab3d7e55867e2ef946e6f4b6a | camllight/camllight | latexmacros.ml | let cmdtable = (hashtbl__new 19 : (string, action list) hashtbl__t);;
let def name action =
hashtbl__add cmdtable name action;;
let find_macro name =
try
hashtbl__find cmdtable name
with Not_found ->
prerr_string "Unknown macro: "; prerr_endline name; [];;
(* General LaTeX macros *)
def "\\part"
[Print "<H0>"; Print_arg; Print "</H0>\n"];
def "\\chapter"
[Print "<H1>"; Print_arg; Print "</H1>\n"];
def "\\chapter*"
[Print "<H1>"; Print_arg; Print "</H1>\n"];
def "\\section"
[Print "<H2>"; Print_arg; Print "</H2>\n"];
def "\\section*"
[Print "<H2>"; Print_arg; Print "</H2>\n"];
def "\\subsection"
[Print "<H3>"; Print_arg; Print "</H3>\n"];
def "\\subsection*"
[Print "<H3>"; Print_arg; Print "</H3>\n"];
def "\\subsubsection"
[Print "<H4>"; Print_arg; Print "</H4>\n"];
def "\\subsubsection*"
[Print "<H4>"; Print_arg; Print "</H4>\n"];
def "\\paragraph"
[Print "<H5>"; Print_arg; Print "</H5>\n"];
def "\\begin{alltt}" [Print "<pre>"];
def "\\end{alltt}" [Print "</pre>"];
def "\\begin{itemize}" [Print "<p><ul>"];
def "\\end{itemize}" [Print "</ul>"];
def "\\begin{enumerate}" [Print "<p><ol>"];
def "\\end{enumerate}" [Print "</ol>"];
def "\\begin{description}" [Print "<p><dl>"];
def "\\end{description}" [Print "</dl>"];
def "\\begin{center}" [Print "<blockquote>"];
def "\\end{center}" [Print "</blockquote>"];
def "\\smallskip" [];
def "\\medskip" [];
def "\\bigskip" [];
def "\\markboth" [Skip_arg; Skip_arg];
def "\\ldots" [Print "..."];
def "\\ " [Print " "];
def "\\{" [Print "{"];
def "\\}" [Print "}"];
def "\\%" [Print "%"];
def "\\/" [];
def "\\newpage" [];
def "\\label" [Print "<A name=\""; Print_arg; Print "\"></A>"];
def "\\ref" [Print "<A href=\"#"; Print_arg; Print "\">X</A>"];
def "\\index" [Skip_arg];
def "\\oe" [Print "oe"];
def "\\&" [Print "&"];
def "\\_" [Print "_"];
def "\\leq" [Print "<="];
def "\\geq" [Print ">="];
def "\\hbox" [Print_arg];
def "\\copyright" [Print "(c)"];
def "\\noindent" [];
def "\\begin{flushleft}" [Print "<blockquote>"];
def "\\end{flushleft}" [Print "</blockquote>"];
def "\\\\" [Print "<br>"];
();;
Macros specific to the manual
def "\\begin{options}" [Print "<p><dl>"];
def "\\end{options}" [Print "</dl>"];
def "\\var" [Print "<i>"; Print_arg; Print "</i>"];
def "\\nth" [Print "<i>"; Print_arg;
Print "</i><sub>"; Print_arg; Print "</sub>"];
def "\\nmth" [Print "<i>"; Print_arg;
Print "</i><sub>"; Print_arg;
Print "</sub><sup>"; Print_arg;
Print "</sup>"];
def "\\begin{unix}" [Print "<dl><dt><b>Unix:</b><dd>"];
def "\\end{unix}" [Print "</dl>"];
def "\\begin{mac}" [Print "<dl><dt><b>Mac:</b><dd>"];
def "\\end{mac}" [Print "</dl>"];
def "\\begin{pc}" [Print "<dl><dt><b>PC:</b><dd>"];
def "\\end{pc}" [Print "</dl>"];
def "\\begin{requirements}" [Print "<dl><dt><b>Requirements:</b><dd>"];
def "\\end{requirements}" [Print "</dl>"];
def "\\begin{troubleshooting}" [Print "<dl><dt><b>Troubleshooting:</b><dd>"];
def "\\end{troubleshooting}" [Print "</dl>"];
def "\\begin{installation}" [Print "<dl><dt><b>Installation:</b><dd>"];
def "\\end{installation}" [Print "</dl>"];
def "\\index" [Skip_arg];
def "\\ikwd" [Skip_arg];
def "\\th" [Print "-th"];
def "\\begin{library}" [];
def "\\end{library}" [];
def "\\begin{comment}" [Print "<dl><dd>"];
def "\\end{comment}" [Print "</dl>"];
def "\\begin{tableau}"
[Skip_arg;
Print "<table border>\n<tr><th>";
Print_arg;
Print "</th><th>";
Print_arg;
Print "</th></tr>"];
def "\\entree"
[Print "<tr><td>"; Print_arg;
Print "</td><td>"; Print_arg; Print "</td></tr>"];
def "\\end{tableau}" [Print "</table>"];
def "\\begin{gcrule}" [Print "<dl><dt><b>Rule:</b><dd>"];
def "\\end{gcrule}" [Print "</dl>"];
def "\\begin{tableauoperateurs}"
[Print "<table border>\n<tr><th>Operator</th><th>Associated ident</th><th>Behavior in the default environment</th></tr>"];
def "\\end{tableauoperateurs}" [Print "</table>\n"];
def "\\entreeoperateur"
[Print "<tr><td>"; Print_arg; Print "</td><td>"; Print_arg;
Print "</td><td>"; Print_arg; Print "</td></tr>"];
def "\\fromoneto"
[Print "<i>"; Print_arg; Print "</i> = 1, ..., <i>";
Print_arg; Print "</i>"];
def "\\event" [Print "\164"];
def "\\optvar" [Print "[<i>"; Print_arg; Print "</i>]"];
();;
| null | https://raw.githubusercontent.com/camllight/camllight/0cc537de0846393322058dbb26449427bfc76786/doc/tools/latexmacros.ml | ocaml | General LaTeX macros | let cmdtable = (hashtbl__new 19 : (string, action list) hashtbl__t);;
let def name action =
hashtbl__add cmdtable name action;;
let find_macro name =
try
hashtbl__find cmdtable name
with Not_found ->
prerr_string "Unknown macro: "; prerr_endline name; [];;
def "\\part"
[Print "<H0>"; Print_arg; Print "</H0>\n"];
def "\\chapter"
[Print "<H1>"; Print_arg; Print "</H1>\n"];
def "\\chapter*"
[Print "<H1>"; Print_arg; Print "</H1>\n"];
def "\\section"
[Print "<H2>"; Print_arg; Print "</H2>\n"];
def "\\section*"
[Print "<H2>"; Print_arg; Print "</H2>\n"];
def "\\subsection"
[Print "<H3>"; Print_arg; Print "</H3>\n"];
def "\\subsection*"
[Print "<H3>"; Print_arg; Print "</H3>\n"];
def "\\subsubsection"
[Print "<H4>"; Print_arg; Print "</H4>\n"];
def "\\subsubsection*"
[Print "<H4>"; Print_arg; Print "</H4>\n"];
def "\\paragraph"
[Print "<H5>"; Print_arg; Print "</H5>\n"];
def "\\begin{alltt}" [Print "<pre>"];
def "\\end{alltt}" [Print "</pre>"];
def "\\begin{itemize}" [Print "<p><ul>"];
def "\\end{itemize}" [Print "</ul>"];
def "\\begin{enumerate}" [Print "<p><ol>"];
def "\\end{enumerate}" [Print "</ol>"];
def "\\begin{description}" [Print "<p><dl>"];
def "\\end{description}" [Print "</dl>"];
def "\\begin{center}" [Print "<blockquote>"];
def "\\end{center}" [Print "</blockquote>"];
def "\\smallskip" [];
def "\\medskip" [];
def "\\bigskip" [];
def "\\markboth" [Skip_arg; Skip_arg];
def "\\ldots" [Print "..."];
def "\\ " [Print " "];
def "\\{" [Print "{"];
def "\\}" [Print "}"];
def "\\%" [Print "%"];
def "\\/" [];
def "\\newpage" [];
def "\\label" [Print "<A name=\""; Print_arg; Print "\"></A>"];
def "\\ref" [Print "<A href=\"#"; Print_arg; Print "\">X</A>"];
def "\\index" [Skip_arg];
def "\\oe" [Print "oe"];
def "\\&" [Print "&"];
def "\\_" [Print "_"];
def "\\leq" [Print "<="];
def "\\geq" [Print ">="];
def "\\hbox" [Print_arg];
def "\\copyright" [Print "(c)"];
def "\\noindent" [];
def "\\begin{flushleft}" [Print "<blockquote>"];
def "\\end{flushleft}" [Print "</blockquote>"];
def "\\\\" [Print "<br>"];
();;
Macros specific to the manual
def "\\begin{options}" [Print "<p><dl>"];
def "\\end{options}" [Print "</dl>"];
def "\\var" [Print "<i>"; Print_arg; Print "</i>"];
def "\\nth" [Print "<i>"; Print_arg;
Print "</i><sub>"; Print_arg; Print "</sub>"];
def "\\nmth" [Print "<i>"; Print_arg;
Print "</i><sub>"; Print_arg;
Print "</sub><sup>"; Print_arg;
Print "</sup>"];
def "\\begin{unix}" [Print "<dl><dt><b>Unix:</b><dd>"];
def "\\end{unix}" [Print "</dl>"];
def "\\begin{mac}" [Print "<dl><dt><b>Mac:</b><dd>"];
def "\\end{mac}" [Print "</dl>"];
def "\\begin{pc}" [Print "<dl><dt><b>PC:</b><dd>"];
def "\\end{pc}" [Print "</dl>"];
def "\\begin{requirements}" [Print "<dl><dt><b>Requirements:</b><dd>"];
def "\\end{requirements}" [Print "</dl>"];
def "\\begin{troubleshooting}" [Print "<dl><dt><b>Troubleshooting:</b><dd>"];
def "\\end{troubleshooting}" [Print "</dl>"];
def "\\begin{installation}" [Print "<dl><dt><b>Installation:</b><dd>"];
def "\\end{installation}" [Print "</dl>"];
def "\\index" [Skip_arg];
def "\\ikwd" [Skip_arg];
def "\\th" [Print "-th"];
def "\\begin{library}" [];
def "\\end{library}" [];
def "\\begin{comment}" [Print "<dl><dd>"];
def "\\end{comment}" [Print "</dl>"];
def "\\begin{tableau}"
[Skip_arg;
Print "<table border>\n<tr><th>";
Print_arg;
Print "</th><th>";
Print_arg;
Print "</th></tr>"];
def "\\entree"
[Print "<tr><td>"; Print_arg;
Print "</td><td>"; Print_arg; Print "</td></tr>"];
def "\\end{tableau}" [Print "</table>"];
def "\\begin{gcrule}" [Print "<dl><dt><b>Rule:</b><dd>"];
def "\\end{gcrule}" [Print "</dl>"];
def "\\begin{tableauoperateurs}"
[Print "<table border>\n<tr><th>Operator</th><th>Associated ident</th><th>Behavior in the default environment</th></tr>"];
def "\\end{tableauoperateurs}" [Print "</table>\n"];
def "\\entreeoperateur"
[Print "<tr><td>"; Print_arg; Print "</td><td>"; Print_arg;
Print "</td><td>"; Print_arg; Print "</td></tr>"];
def "\\fromoneto"
[Print "<i>"; Print_arg; Print "</i> = 1, ..., <i>";
Print_arg; Print "</i>"];
def "\\event" [Print "\164"];
def "\\optvar" [Print "[<i>"; Print_arg; Print "</i>]"];
();;
|
d3f55714a376f013563017da4fc3c96ee4a1445dfaec4c0c5ba03fc28600fed8 | EveryTian/Haskell-Codewars | CryptoSquare.hs | module CryptoSquare (encode) where
import Data.Char (isAlpha, isDigit, toLower)
encode :: String -> String
encode xs = let s = preDeal xs
(r, c) = getRC s
sl = sep s c
in reGen sl
reGen :: [String] -> String
reGen [] = []
reGen xs
| null $ head xs = []
| otherwise = map head (filter (not . null) xs) ++ ' ' : reGen (map anotherTail xs)
where anotherTail [] = []
anotherTail (_:t) = t
getRC :: String -> (Int, Int)
getRC xs = let len = length xs
in f len 0 0
where f len r c
| r * c >= len = (r, c)
| r == c = f len r (c + 1)
| otherwise = f len (r + 1) c
preDeal :: String -> String
preDeal = map toLower . filter (\ x -> isAlpha x || isDigit x)
sep :: String -> Int -> [String]
sep s c = f s c (length s)
where f s c len
| len <= c = [s]
| otherwise = take c s : f (drop c s) c (len - c)
| null | https://raw.githubusercontent.com/EveryTian/Haskell-Codewars/dc48d95c676ce1a59f697d07672acb6d4722893b/exercism/crypto-square/src/CryptoSquare.hs | haskell | module CryptoSquare (encode) where
import Data.Char (isAlpha, isDigit, toLower)
encode :: String -> String
encode xs = let s = preDeal xs
(r, c) = getRC s
sl = sep s c
in reGen sl
reGen :: [String] -> String
reGen [] = []
reGen xs
| null $ head xs = []
| otherwise = map head (filter (not . null) xs) ++ ' ' : reGen (map anotherTail xs)
where anotherTail [] = []
anotherTail (_:t) = t
getRC :: String -> (Int, Int)
getRC xs = let len = length xs
in f len 0 0
where f len r c
| r * c >= len = (r, c)
| r == c = f len r (c + 1)
| otherwise = f len (r + 1) c
preDeal :: String -> String
preDeal = map toLower . filter (\ x -> isAlpha x || isDigit x)
sep :: String -> Int -> [String]
sep s c = f s c (length s)
where f s c len
| len <= c = [s]
| otherwise = take c s : f (drop c s) c (len - c)
|
|
e061e99ad24ff830969b51d640b54f8a0ff54e2e8150a995ebf3eeb492a4d38b | Gastove/doctopus | test_utilities.clj | (ns doctopus.test-utilities
(:require [clojure.string :as str]
[clojure.test :refer :all]
[doctopus.doctopus.head :refer [->Head]]
[doctopus.doctopus.tentacle :refer [map->Tentacle]])
(:import [org.joda.time DateTime]
[org.joda.time.format DateTimeFormat]))
(defn truthy? [v]
(or (true? v)
(and (not (nil? v)) (not (false? v)))))
(def iso-formatter (DateTimeFormat/forPattern "yyyy-MM-dd"))
(defn make-today
[]
(let [today (DateTime.)]
(.print iso-formatter today)))
;; Make mock requests
(defn fake-request
[routes method uri & params]
(routes {:request-method method :uri uri :params (first params)}))
;; Data-Mocking functions
(defmulti mock-data (fn [kind length] kind))
(defmethod mock-data :int
[_ length]
(rand-int length))
(defmethod mock-data :string
[_ length]
(let [upper-alphas "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
lower-alphas (str/lower-case upper-alphas)
nums "0123456789"
punct-and-spaces " -!,?:~_ \"'$%&"
candidate-chars (apply str
upper-alphas
lower-alphas
nums
punct-and-spaces)]
(loop [acc []]
(if (= (count acc) length)
(apply str acc)
(recur (conj acc (rand-nth candidate-chars)))))))
(defmethod mock-data :tentacle
[_ _]
(map->Tentacle {:name (mock-data :string 10)
:html-commands [(mock-data :string 10)]
:output-root (mock-data :string 15)
:source-control "git"
:source-location (mock-data :string 10)
:entry-point (mock-data :string 10)}))
(defmethod mock-data :head
[_ _]
(->Head (mock-data :string 18)))
| null | https://raw.githubusercontent.com/Gastove/doctopus/407ca58bb01a6da84f3a76a58c800ee0a7f14190/test/doctopus/test_utilities.clj | clojure | Make mock requests
Data-Mocking functions | (ns doctopus.test-utilities
(:require [clojure.string :as str]
[clojure.test :refer :all]
[doctopus.doctopus.head :refer [->Head]]
[doctopus.doctopus.tentacle :refer [map->Tentacle]])
(:import [org.joda.time DateTime]
[org.joda.time.format DateTimeFormat]))
(defn truthy? [v]
(or (true? v)
(and (not (nil? v)) (not (false? v)))))
(def iso-formatter (DateTimeFormat/forPattern "yyyy-MM-dd"))
(defn make-today
[]
(let [today (DateTime.)]
(.print iso-formatter today)))
(defn fake-request
[routes method uri & params]
(routes {:request-method method :uri uri :params (first params)}))
(defmulti mock-data (fn [kind length] kind))
(defmethod mock-data :int
[_ length]
(rand-int length))
(defmethod mock-data :string
[_ length]
(let [upper-alphas "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
lower-alphas (str/lower-case upper-alphas)
nums "0123456789"
punct-and-spaces " -!,?:~_ \"'$%&"
candidate-chars (apply str
upper-alphas
lower-alphas
nums
punct-and-spaces)]
(loop [acc []]
(if (= (count acc) length)
(apply str acc)
(recur (conj acc (rand-nth candidate-chars)))))))
(defmethod mock-data :tentacle
[_ _]
(map->Tentacle {:name (mock-data :string 10)
:html-commands [(mock-data :string 10)]
:output-root (mock-data :string 15)
:source-control "git"
:source-location (mock-data :string 10)
:entry-point (mock-data :string 10)}))
(defmethod mock-data :head
[_ _]
(->Head (mock-data :string 18)))
|
03eea96b4f134b42cb0ef2d266b8e365cc729978b3ebddfb9d103d39300ed721 | keyvanakbary/marko | db.cljs | (ns marko.db)
(def initial-content "# Hello world!
Check out the [repository]().
* Will work on Mac, Linux and Windows
* Notes will be saved as Markdown files
* Sync via Dropbox or Google Drive
**Feedback pretty welcome!**
")
(def default-db
{:editor-content initial-content})
| null | https://raw.githubusercontent.com/keyvanakbary/marko/392602803795d7a5ab4dc8242c1625aca9b0cc20/src/marko/db.cljs | clojure | (ns marko.db)
(def initial-content "# Hello world!
Check out the [repository]().
* Will work on Mac, Linux and Windows
* Notes will be saved as Markdown files
* Sync via Dropbox or Google Drive
**Feedback pretty welcome!**
")
(def default-db
{:editor-content initial-content})
|
|
166d6d22ebaf627b705fdeb70977a7a5c789de851971b0d9bd228ef442ba148a | haroldcarr/learn-haskell-coq-ml-etc | lab5-pmc.hs | module Lab5 where
data Concurrent a = Concurrent ((a -> Action) -> Action)
data Action
= Atom (IO Action)
| Fork Action Action
| Stop
instance Show Action where
show (Atom _) = "atom"
show (Fork x y) = "fork " ++ show x ++ " " ++ show y
show Stop = "stop"
-- ===================================
-- Ex. 0
-- ===================================
actionU :: ((a -> Action) -> Action) -> Action
actionU f = f (\_ -> Stop)
action :: Concurrent a -> Action
action (Concurrent f) = f (\_ -> Stop)
-- ===================================
Ex . 1
-- ===================================
stop :: Concurrent a
stop = Concurrent (\_ -> Stop)
-- ===================================
Ex . 2
-- ===================================
atomU :: IO a -> ((a -> Action) -> Action)
atomU ioa = \c -> Atom (ioa >>= \a -> return $ c a)
atom :: IO a -> Concurrent a
atom ioa = Concurrent (\c -> Atom (ioa >>= \a -> return $ c a))
-- ===================================
Ex . 3
-- ===================================
fork :: Concurrent a -> Concurrent ()
fork f = Concurrent (\c -> Fork (action f) (c ()))
par :: Concurrent a -> Concurrent a -> Concurrent a
par (Concurrent f1) (Concurrent f2) = Concurrent (\c -> Fork (f1 c) (f2 c))
-- ===================================
Ex . 4
-- ===================================
cb :: ((a -> Action) -> Action) ->
(a -> ((b -> Action) -> Action)) ->
((b -> Action) -> Action)
cb m f = \c -> m (\x -> (f x) c)
instance Monad Concurrent where
(Concurrent m) >>= f = Concurrent $ \c -> m (\x -> let (Concurrent fx) = f x in fx c)
return x = Concurrent (\c -> c x)
-- ===================================
Ex . 5
-- ===================================
roundRobin :: [Action] -> IO ()
roundRobin xs0 =
case xs0 of
[] -> return ()
((Atom ioa) : xs) -> ioa >>= \x -> roundRobin (xs++[x])
((Fork a1 a2) : xs) -> roundRobin (a1:a2:xs)
(Stop : xs) -> roundRobin xs
-- ===================================
-- Tests
-- ===================================
ex0 :: Concurrent ()
ex0 = par (loop (genRandom 1337)) (loop (genRandom 2600) >> atom (putStrLn ""))
ex1 :: Concurrent ()
ex1 = do atom (putStr "Haskell")
fork (loop $ genRandom 7331)
loop $ genRandom 42
atom (putStrLn "")
-- ===================================
-- Helper Functions
-- ===================================
run :: Concurrent a -> IO ()
run x = roundRobin [action x]
genRandom :: Int -> [Int]
genRandom 1337 = [ 1, 96, 36, 11, 42, 47, 9, 1, 62, 73]
genRandom 7331 = [17, 73, 92, 36, 22, 72, 19, 35, 6, 74]
genRandom 2600 = [83, 98, 35, 84, 44, 61, 54, 35, 83, 9]
genRandom 42 = [71, 71, 17, 14, 16, 91, 18, 71, 58, 75]
loop :: [Int] -> Concurrent ()
loop xs = mapM_ (atom . putStr . show) xs
-- Exercise 0
action ( Concurrent ( \a - > Stop ) )
= > stop
-- Exercise 1
: t action ( Concurrent ( \a - > Stop ) )
action ( Concurrent ( \a - > Stop ) ) : : Action
-- Exercise 2
action ( Concurrent ( \a - > Fork Stop $ Fork Stop Stop ) )
= > fork stop fork stop stop
-- Exercise 3
action ( Concurrent ( \a - > Atom $ putStr " Haskell " ) )
= > Could n't match type ` ( ) ' with ` Action '
Expected type : IO Action
Actual type : IO ( )
-- Exercise 4
action ( Concurrent ( \a - > Atom $ putStr " Haskell " > > return Stop ) )
= > atom
-- Exercise 5
: t Concurrent ( \a - > Atom $ putStr " Haskell " > > return Stop )
= > Concurrent ( \a - > Atom $ putStr " Haskell " > > return Stop ) : : Concurrent a
-- Exercise 6
action stop
= > stop
-- Exercise 7
stop
= > No instance for ( Show ( Concurrent a0 ) )
arising from a use of ` print '
-- Exercise 8
action . atom . " "
= > atom
-- Exercise 9
action $ atom undefined
= > atom
-- Exercise 10
atom . " "
= > No instance for ( Show ( Concurrent ( ) ) )
arising from a use of ` print '
-- Exercise 11
action $ fork stop
= > fork stop stop
-- Exercise 12
action ( fork ( atom ( putStr " Hacker " ) ) )
= > fork atom stop
-- Exercise 13
: t action ( fork ( atom ( putStr " Hacker " ) ) )
= > action ( fork ( atom ( putStr " Hacker " ) ) ) : : Action
-- Exercise 14
action ( fork undefined )
= > fork * * * Exception : Prelude.undefined
-- Exercise 15
action $ par stop stop
= > fork stop stop
-- Exercise 16
action ( par ( atom ( putStr " think " ) ) ( atom ( putStr " hack " ) ) )
= > fork atom atom
-- Exercise 17
action ( par stop $ fork stop )
= > fork stop fork stop stop
-- Exercise 18
action $ par ( atom $ putChar ' x ' ) ( fork stop )
= > fork atom fork stop stop
-- Exercise 19
action ( stop > > = ( \c - > stop ) )
= > stop
-- Exercise 20
action ( atom ( putStrLn " whatever ... " ) > > = stop )
= > Could n't match expected type ` ( ) - > Concurrent a0 '
with actual type ` Concurrent a1 '
In the second argument of ` ( > > =) ' , namely ` stop '
In the first argument of ` action ' , namely
-- Exercise 21
stop > > = stop
= > Could n't match expected type ` a0 - > Concurrent b0 '
with actual type ` Concurrent a1 '
In the second argument of ` ( > > =) ' , namely ` stop '
-- Exercise 22
: t stop > > = stop
= > Could n't match expected type ` a0 - > Concurrent b0 '
with actual type ` Concurrent a1 '
In the second argument of ` ( > > =) ' , namely ` stop '
-- Exercise 23
action ( fork stop > > = \ _ - > fork stop )
= > fork stop fork stop stop
-- Exercise 24
run > 183969836351184424447619541356283739
-- Exercise 25
run ex1
Haskell177173719217361422167291191835716587475
-- Exercise 0
action (Concurrent (\a -> Stop))
=> stop
-- Exercise 1
:t action (Concurrent (\a -> Stop))
action (Concurrent (\a -> Stop)) :: Action
-- Exercise 2
action (Concurrent (\a -> Fork Stop $ Fork Stop Stop))
=> fork stop fork stop stop
-- Exercise 3
action (Concurrent (\a -> Atom $ putStr "Haskell"))
=> Couldn't match type `()' with `Action'
Expected type: IO Action
Actual type: IO ()
-- Exercise 4
action (Concurrent (\a -> Atom $ putStr "Haskell" >> return Stop))
=> atom
-- Exercise 5
:t Concurrent (\a -> Atom $ putStr "Haskell" >> return Stop)
=> Concurrent (\a -> Atom $ putStr "Haskell" >> return Stop) :: Concurrent a
-- Exercise 6
action stop
=> stop
-- Exercise 7
stop
=> No instance for (Show (Concurrent a0))
arising from a use of `print'
-- Exercise 8
action . atom . putStrLn $ "Haskell"
=> atom
-- Exercise 9
action $ atom undefined
=> atom
-- Exercise 10
atom . putStrLn $ "Haskell"
=> No instance for (Show (Concurrent ()))
arising from a use of `print'
-- Exercise 11
action $ fork stop
=> fork stop stop
-- Exercise 12
action (fork (atom (putStr "Hacker")))
=> fork atom stop
-- Exercise 13
:t action (fork (atom (putStr "Hacker")))
=> action (fork (atom (putStr "Hacker"))) :: Action
-- Exercise 14
action (fork undefined)
=> fork *** Exception: Prelude.undefined
-- Exercise 15
action $ par stop stop
=> fork stop stop
-- Exercise 16
action (par (atom (putStr "think")) (atom (putStr "hack")))
=> fork atom atom
-- Exercise 17
action (par stop $ fork stop)
=> fork stop fork stop stop
-- Exercise 18
action $ par (atom $ putChar 'x') (fork stop)
=> fork atom fork stop stop
-- Exercise 19
action (stop >>= (\c -> stop))
=> stop
-- Exercise 20
action (atom (putStrLn "whatever...") >>= stop)
=> Couldn't match expected type `() -> Concurrent a0'
with actual type `Concurrent a1'
In the second argument of `(>>=)', namely `stop'
In the first argument of `action', namely
-- Exercise 21
stop >>= stop
=> Couldn't match expected type `a0 -> Concurrent b0'
with actual type `Concurrent a1'
In the second argument of `(>>=)', namely `stop'
-- Exercise 22
:t stop >>= stop
=> Couldn't match expected type `a0 -> Concurrent b0'
with actual type `Concurrent a1'
In the second argument of `(>>=)', namely `stop'
-- Exercise 23
action (fork stop >>= \_ -> fork stop)
=> fork stop fork stop stop
-- Exercise 24
run ex0
=> 183969836351184424447619541356283739
-- Exercise 25
run ex1
Haskell177173719217361422167291191835716587475
-}
| null | https://raw.githubusercontent.com/haroldcarr/learn-haskell-coq-ml-etc/b4e83ec7c7af730de688b7376497b9f49dc24a0e/haskell/course/2014-10-edx-delft-fp101x-intro-to-fp-erik-meijer/lab5-pmc.hs | haskell | ===================================
Ex. 0
===================================
===================================
===================================
===================================
===================================
===================================
===================================
===================================
===================================
===================================
===================================
===================================
Tests
===================================
===================================
Helper Functions
===================================
Exercise 0
Exercise 1
Exercise 2
Exercise 3
Exercise 4
Exercise 5
Exercise 6
Exercise 7
Exercise 8
Exercise 9
Exercise 10
Exercise 11
Exercise 12
Exercise 13
Exercise 14
Exercise 15
Exercise 16
Exercise 17
Exercise 18
Exercise 19
Exercise 20
Exercise 21
Exercise 22
Exercise 23
Exercise 24
Exercise 25
Exercise 0
Exercise 1
Exercise 2
Exercise 3
Exercise 4
Exercise 5
Exercise 6
Exercise 7
Exercise 8
Exercise 9
Exercise 10
Exercise 11
Exercise 12
Exercise 13
Exercise 14
Exercise 15
Exercise 16
Exercise 17
Exercise 18
Exercise 19
Exercise 20
Exercise 21
Exercise 22
Exercise 23
Exercise 24
Exercise 25 | module Lab5 where
data Concurrent a = Concurrent ((a -> Action) -> Action)
data Action
= Atom (IO Action)
| Fork Action Action
| Stop
instance Show Action where
show (Atom _) = "atom"
show (Fork x y) = "fork " ++ show x ++ " " ++ show y
show Stop = "stop"
actionU :: ((a -> Action) -> Action) -> Action
actionU f = f (\_ -> Stop)
action :: Concurrent a -> Action
action (Concurrent f) = f (\_ -> Stop)
Ex . 1
stop :: Concurrent a
stop = Concurrent (\_ -> Stop)
Ex . 2
atomU :: IO a -> ((a -> Action) -> Action)
atomU ioa = \c -> Atom (ioa >>= \a -> return $ c a)
atom :: IO a -> Concurrent a
atom ioa = Concurrent (\c -> Atom (ioa >>= \a -> return $ c a))
Ex . 3
fork :: Concurrent a -> Concurrent ()
fork f = Concurrent (\c -> Fork (action f) (c ()))
par :: Concurrent a -> Concurrent a -> Concurrent a
par (Concurrent f1) (Concurrent f2) = Concurrent (\c -> Fork (f1 c) (f2 c))
Ex . 4
cb :: ((a -> Action) -> Action) ->
(a -> ((b -> Action) -> Action)) ->
((b -> Action) -> Action)
cb m f = \c -> m (\x -> (f x) c)
instance Monad Concurrent where
(Concurrent m) >>= f = Concurrent $ \c -> m (\x -> let (Concurrent fx) = f x in fx c)
return x = Concurrent (\c -> c x)
Ex . 5
roundRobin :: [Action] -> IO ()
roundRobin xs0 =
case xs0 of
[] -> return ()
((Atom ioa) : xs) -> ioa >>= \x -> roundRobin (xs++[x])
((Fork a1 a2) : xs) -> roundRobin (a1:a2:xs)
(Stop : xs) -> roundRobin xs
ex0 :: Concurrent ()
ex0 = par (loop (genRandom 1337)) (loop (genRandom 2600) >> atom (putStrLn ""))
ex1 :: Concurrent ()
ex1 = do atom (putStr "Haskell")
fork (loop $ genRandom 7331)
loop $ genRandom 42
atom (putStrLn "")
run :: Concurrent a -> IO ()
run x = roundRobin [action x]
genRandom :: Int -> [Int]
genRandom 1337 = [ 1, 96, 36, 11, 42, 47, 9, 1, 62, 73]
genRandom 7331 = [17, 73, 92, 36, 22, 72, 19, 35, 6, 74]
genRandom 2600 = [83, 98, 35, 84, 44, 61, 54, 35, 83, 9]
genRandom 42 = [71, 71, 17, 14, 16, 91, 18, 71, 58, 75]
loop :: [Int] -> Concurrent ()
loop xs = mapM_ (atom . putStr . show) xs
action ( Concurrent ( \a - > Stop ) )
= > stop
: t action ( Concurrent ( \a - > Stop ) )
action ( Concurrent ( \a - > Stop ) ) : : Action
action ( Concurrent ( \a - > Fork Stop $ Fork Stop Stop ) )
= > fork stop fork stop stop
action ( Concurrent ( \a - > Atom $ putStr " Haskell " ) )
= > Could n't match type ` ( ) ' with ` Action '
Expected type : IO Action
Actual type : IO ( )
action ( Concurrent ( \a - > Atom $ putStr " Haskell " > > return Stop ) )
= > atom
: t Concurrent ( \a - > Atom $ putStr " Haskell " > > return Stop )
= > Concurrent ( \a - > Atom $ putStr " Haskell " > > return Stop ) : : Concurrent a
action stop
= > stop
stop
= > No instance for ( Show ( Concurrent a0 ) )
arising from a use of ` print '
action . atom . " "
= > atom
action $ atom undefined
= > atom
atom . " "
= > No instance for ( Show ( Concurrent ( ) ) )
arising from a use of ` print '
action $ fork stop
= > fork stop stop
action ( fork ( atom ( putStr " Hacker " ) ) )
= > fork atom stop
: t action ( fork ( atom ( putStr " Hacker " ) ) )
= > action ( fork ( atom ( putStr " Hacker " ) ) ) : : Action
action ( fork undefined )
= > fork * * * Exception : Prelude.undefined
action $ par stop stop
= > fork stop stop
action ( par ( atom ( putStr " think " ) ) ( atom ( putStr " hack " ) ) )
= > fork atom atom
action ( par stop $ fork stop )
= > fork stop fork stop stop
action $ par ( atom $ putChar ' x ' ) ( fork stop )
= > fork atom fork stop stop
action ( stop > > = ( \c - > stop ) )
= > stop
action ( atom ( putStrLn " whatever ... " ) > > = stop )
= > Could n't match expected type ` ( ) - > Concurrent a0 '
with actual type ` Concurrent a1 '
In the second argument of ` ( > > =) ' , namely ` stop '
In the first argument of ` action ' , namely
stop > > = stop
= > Could n't match expected type ` a0 - > Concurrent b0 '
with actual type ` Concurrent a1 '
In the second argument of ` ( > > =) ' , namely ` stop '
: t stop > > = stop
= > Could n't match expected type ` a0 - > Concurrent b0 '
with actual type ` Concurrent a1 '
In the second argument of ` ( > > =) ' , namely ` stop '
action ( fork stop > > = \ _ - > fork stop )
= > fork stop fork stop stop
run > 183969836351184424447619541356283739
run ex1
Haskell177173719217361422167291191835716587475
action (Concurrent (\a -> Stop))
=> stop
:t action (Concurrent (\a -> Stop))
action (Concurrent (\a -> Stop)) :: Action
action (Concurrent (\a -> Fork Stop $ Fork Stop Stop))
=> fork stop fork stop stop
action (Concurrent (\a -> Atom $ putStr "Haskell"))
=> Couldn't match type `()' with `Action'
Expected type: IO Action
Actual type: IO ()
action (Concurrent (\a -> Atom $ putStr "Haskell" >> return Stop))
=> atom
:t Concurrent (\a -> Atom $ putStr "Haskell" >> return Stop)
=> Concurrent (\a -> Atom $ putStr "Haskell" >> return Stop) :: Concurrent a
action stop
=> stop
stop
=> No instance for (Show (Concurrent a0))
arising from a use of `print'
action . atom . putStrLn $ "Haskell"
=> atom
action $ atom undefined
=> atom
atom . putStrLn $ "Haskell"
=> No instance for (Show (Concurrent ()))
arising from a use of `print'
action $ fork stop
=> fork stop stop
action (fork (atom (putStr "Hacker")))
=> fork atom stop
:t action (fork (atom (putStr "Hacker")))
=> action (fork (atom (putStr "Hacker"))) :: Action
action (fork undefined)
=> fork *** Exception: Prelude.undefined
action $ par stop stop
=> fork stop stop
action (par (atom (putStr "think")) (atom (putStr "hack")))
=> fork atom atom
action (par stop $ fork stop)
=> fork stop fork stop stop
action $ par (atom $ putChar 'x') (fork stop)
=> fork atom fork stop stop
action (stop >>= (\c -> stop))
=> stop
action (atom (putStrLn "whatever...") >>= stop)
=> Couldn't match expected type `() -> Concurrent a0'
with actual type `Concurrent a1'
In the second argument of `(>>=)', namely `stop'
In the first argument of `action', namely
stop >>= stop
=> Couldn't match expected type `a0 -> Concurrent b0'
with actual type `Concurrent a1'
In the second argument of `(>>=)', namely `stop'
:t stop >>= stop
=> Couldn't match expected type `a0 -> Concurrent b0'
with actual type `Concurrent a1'
In the second argument of `(>>=)', namely `stop'
action (fork stop >>= \_ -> fork stop)
=> fork stop fork stop stop
run ex0
=> 183969836351184424447619541356283739
run ex1
Haskell177173719217361422167291191835716587475
-}
|
1c095e6506f76d109f4efe841addd07034fb5492dcce66bb1f9061381303155c | huangz1990/SICP-answers | p22-iter-factorial.scm | ;;; p22-iter-factorial.scm
(define (factorial n)
(fact-iter 1 1 n))
(define (fact-iter product counter max-count)
(if (> counter max-count)
product
(fact-iter (* counter product)
(+ counter 1)
max-count)))
| null | https://raw.githubusercontent.com/huangz1990/SICP-answers/15e3475003ef10eb738cf93c1932277bc56bacbe/chp1/code/p22-iter-factorial.scm | scheme | p22-iter-factorial.scm |
(define (factorial n)
(fact-iter 1 1 n))
(define (fact-iter product counter max-count)
(if (> counter max-count)
product
(fact-iter (* counter product)
(+ counter 1)
max-count)))
|
e171d3b161bdd2bf24167ae817729cdc820a8443dc319b38f41d33708d78fa75 | typedclojure/typedclojure | rec_type.clj | (ns clojure.core.typed.test.rec-type
(:require [typed.clojure :as t])
(:import (clojure.lang IMapEntry)))
( t / defalias ( Rec [ x ] ( t / Map Any ( U [ Any - > Any ] x ) ) ) )
;
;(t/ann-form {:a (t/ann-form (fn [a] a)
; [Any -> Any])}
; RuleSet)
(t/defalias Rule [t/Any -> (t/Option t/Keyword)])
(t/defalias RuleSet
(t/Rec [x]
(t/Map t/Any (t/U Rule x))))
(t/defalias Report
(t/Rec [x]
(t/Map t/Any (t/U t/Keyword x))))
(t/defalias Data
(t/Map t/Any t/Any))
(t/ann clean [RuleSet Data -> Data])
(defn clean [rules data]
(reduce (t/ann-form (fn [cleaned-up kv]
(let [rule-path (key kv)
datum (val kv)]
(if-let [rule (get rules rule-path)]
(assoc cleaned-up rule-path datum)
cleaned-up)))
[Data (IMapEntry t/Any t/Any) -> Data])
{} data))
( t / ann enforce [ RuleSet Data - > ( t / Option Report ) ] )
( defn enforce [ ruleset data ]
; (let [result (reduce (ann-form (fn [report kv]
; (let [rule-path (key kv)
; rule (val kv)
; datum (get data rule-path)]
; (if-let [message (rule datum)]
; (assoc report rule-path message)
; report)))
; [Report (IMapEntry Any Rule) -> Report])
; (reduce (ann-form (fn [total k]
; (if (not (contains? ruleset k))
; (assoc total k ::not-in-schema)
; total))
; [Report Any -> Report])
; {} (keys data))
; (seq ruleset))]
; (if (not (empty? result))
; result)))
#_(t/ann enforce [RuleSet Data -> (t/Option Report)])
#_(defn enforce [ruleset data]
(let [result (reduce (ann-form (fn [report kv]
(let [rule-path (key kv)
sub (val kv)
datum (get data rule-path)]
(if (map? sub)
(if (map? datum)
(if-let [sub-errors (enforce sub datum)]
(assoc report rule-path sub-errors)
report)
(assoc report rule-path ::map-expected))
(if-let [message (sub datum)]
(assoc report rule-path message)
report))))
[Report (IMapEntry t/Any (t/U Rule RuleSet)) -> Report])
(reduce (ann-form (fn [total k]
(if (not (contains? ruleset k))
(assoc total k ::not-in-schema)
total))
[Report t/Any -> Report])
{} (keys data))
(seq ruleset))]
(if (not (empty? result))
result)))
| null | https://raw.githubusercontent.com/typedclojure/typedclojure/97f65c59f328abff3bc80796ff8a637e7c7de9fe/typed/clj.checker/test/clojure/core/typed/test/rec_type.clj | clojure |
(t/ann-form {:a (t/ann-form (fn [a] a)
[Any -> Any])}
RuleSet)
(let [result (reduce (ann-form (fn [report kv]
(let [rule-path (key kv)
rule (val kv)
datum (get data rule-path)]
(if-let [message (rule datum)]
(assoc report rule-path message)
report)))
[Report (IMapEntry Any Rule) -> Report])
(reduce (ann-form (fn [total k]
(if (not (contains? ruleset k))
(assoc total k ::not-in-schema)
total))
[Report Any -> Report])
{} (keys data))
(seq ruleset))]
(if (not (empty? result))
result))) | (ns clojure.core.typed.test.rec-type
(:require [typed.clojure :as t])
(:import (clojure.lang IMapEntry)))
( t / defalias ( Rec [ x ] ( t / Map Any ( U [ Any - > Any ] x ) ) ) )
(t/defalias Rule [t/Any -> (t/Option t/Keyword)])
(t/defalias RuleSet
(t/Rec [x]
(t/Map t/Any (t/U Rule x))))
(t/defalias Report
(t/Rec [x]
(t/Map t/Any (t/U t/Keyword x))))
(t/defalias Data
(t/Map t/Any t/Any))
(t/ann clean [RuleSet Data -> Data])
(defn clean [rules data]
(reduce (t/ann-form (fn [cleaned-up kv]
(let [rule-path (key kv)
datum (val kv)]
(if-let [rule (get rules rule-path)]
(assoc cleaned-up rule-path datum)
cleaned-up)))
[Data (IMapEntry t/Any t/Any) -> Data])
{} data))
( t / ann enforce [ RuleSet Data - > ( t / Option Report ) ] )
( defn enforce [ ruleset data ]
#_(t/ann enforce [RuleSet Data -> (t/Option Report)])
#_(defn enforce [ruleset data]
(let [result (reduce (ann-form (fn [report kv]
(let [rule-path (key kv)
sub (val kv)
datum (get data rule-path)]
(if (map? sub)
(if (map? datum)
(if-let [sub-errors (enforce sub datum)]
(assoc report rule-path sub-errors)
report)
(assoc report rule-path ::map-expected))
(if-let [message (sub datum)]
(assoc report rule-path message)
report))))
[Report (IMapEntry t/Any (t/U Rule RuleSet)) -> Report])
(reduce (ann-form (fn [total k]
(if (not (contains? ruleset k))
(assoc total k ::not-in-schema)
total))
[Report t/Any -> Report])
{} (keys data))
(seq ruleset))]
(if (not (empty? result))
result)))
|
01055c5724f31f52dbdd18ca79d9d458c77f00e1fb5a14b1e335f65725450bd4 | mccraigmccraig/twitter-streaming-client | project.clj | (def shared
'[[joda-time "2.8.2"]
[ch.qos.logback/logback-classic "1.0.11"]
[org.slf4j/slf4j-api "1.7.5"]
[org.slf4j/jcl-over-slf4j "1.7.5"]
[org.slf4j/log4j-over-slf4j "1.7.5"]
[org.slf4j/jul-to-slf4j "1.7.5"]
[org.clojure/core.incubator "0.1.3"]
[org.clojure/tools.logging "0.3.1"]
[org.clojure/tools.macro "0.1.2"]
[org.clojure/data.json "0.2.6"]
[twitter-api "0.7.8"]])
(defproject twitter-streaming-client/twitter-streaming-client "0.3.2"
:description "a clojure based client for Twitter's streaming API"
:url "-streaming-client"
:license {:name "Eclipse Public License"
:url "-v10.html"}
:min-lein-version "2.0.0"
:plugins [[lein-midje "3.1.3"]]
:dependencies ~(conj shared '[org.clojure/clojure "1.6.0"])
:dev-dependencies []
:aliases {"all" ["with-profile" "dev,1.4:dev,1.5:dev,1.6:dev,1.7"]}
:profiles {:all {:dependencies ~shared}
:dev {:dependencies [[midje "1.6.3"]]}
:production {}
:1.4 {:dependencies [[org.clojure/clojure "1.4.0"]]}
:1.5 {:dependencies [[org.clojure/clojure "1.5.1"]]}
:1.6 {:dependencies [[org.clojure/clojure "1.6.0"]]}
:1.7 {:dependencies [[org.clojure/clojure "1.7.0"]]}})
| null | https://raw.githubusercontent.com/mccraigmccraig/twitter-streaming-client/08d6b92d231a53e3262368646917058081fe2dd7/project.clj | clojure | (def shared
'[[joda-time "2.8.2"]
[ch.qos.logback/logback-classic "1.0.11"]
[org.slf4j/slf4j-api "1.7.5"]
[org.slf4j/jcl-over-slf4j "1.7.5"]
[org.slf4j/log4j-over-slf4j "1.7.5"]
[org.slf4j/jul-to-slf4j "1.7.5"]
[org.clojure/core.incubator "0.1.3"]
[org.clojure/tools.logging "0.3.1"]
[org.clojure/tools.macro "0.1.2"]
[org.clojure/data.json "0.2.6"]
[twitter-api "0.7.8"]])
(defproject twitter-streaming-client/twitter-streaming-client "0.3.2"
:description "a clojure based client for Twitter's streaming API"
:url "-streaming-client"
:license {:name "Eclipse Public License"
:url "-v10.html"}
:min-lein-version "2.0.0"
:plugins [[lein-midje "3.1.3"]]
:dependencies ~(conj shared '[org.clojure/clojure "1.6.0"])
:dev-dependencies []
:aliases {"all" ["with-profile" "dev,1.4:dev,1.5:dev,1.6:dev,1.7"]}
:profiles {:all {:dependencies ~shared}
:dev {:dependencies [[midje "1.6.3"]]}
:production {}
:1.4 {:dependencies [[org.clojure/clojure "1.4.0"]]}
:1.5 {:dependencies [[org.clojure/clojure "1.5.1"]]}
:1.6 {:dependencies [[org.clojure/clojure "1.6.0"]]}
:1.7 {:dependencies [[org.clojure/clojure "1.7.0"]]}})
|
|
de39717005a029f0afb57d28137ae1490fc8f3165ddcd83e575afc8f992243e0 | noamz/linlam | Utils.hs | module LinLam.Utils where
import qualified Data.Set as Set
-- orbit of an element under a function
orbit :: Ord a => a -> (a -> a) -> [a]
orbit x f = go Set.empty x
where
go s x
| Set.member x s = []
| otherwise = x : go (Set.insert x s) (f x)
| null | https://raw.githubusercontent.com/noamz/linlam/bbdcaf27b9ec0f52f5d9f0e4f68c05830429a140/src/LinLam/Utils.hs | haskell | orbit of an element under a function | module LinLam.Utils where
import qualified Data.Set as Set
orbit :: Ord a => a -> (a -> a) -> [a]
orbit x f = go Set.empty x
where
go s x
| Set.member x s = []
| otherwise = x : go (Set.insert x s) (f x)
|
eb89484801ceb3caf72f9b9733e8f99fd7a454da8d6285f4a57c021e937e7f94 | MarkCurtiss/sicp | 3_21_to_3_27_spec.scm | (load "3_21_to_3_27.scm")
(describe "Queues"
(it "prints a queue"
(lambda ()
(define a-queue (make-queue))
(for-each
(lambda (x) (insert-queue! a-queue x))
(list 'a 'b 2 4 '(6 8)))
(delete-queue! a-queue)
(assert (equal?
(print-queue a-queue)
'(b 2 4 (6 8))))))
)
(describe "Node"
(it "has a value and links to the previous and next node"
(lambda ()
(define first-node (make-node 'a))
(define second-node (make-node 'b))
(set-next-ptr-node! first-node second-node)
(set-prev-ptr-node! second-node first-node)
(assert (equal?
(get-value-node first-node)
'a))
(assert (equal?
(get-value-node second-node)
'b))
(assert (equal?
(get-next-ptr-node first-node)
second-node))
(assert (equal?
(get-next-ptr-node second-node)
'()))
(assert (equal?
(get-prev-ptr-node second-node)
first-node))
(assert (equal?
(get-value-node (get-prev-ptr-node second-node))
(get-value-node first-node)))
(assert (equal?
(get-prev-ptr-node first-node)
'()))))
)
(describe "Dequeue"
(it "tells you if you have an empty dequeue or not"
(lambda ()
(define dq (make-dequeue))
(assert (empty-dequeue? dq))))
(it "maintains pointers to the start and end of the dequeue"
(lambda ()
(define dq (make-dequeue))
(rear-insert-dequeue! dq 'a)
(rear-insert-dequeue! dq 'b)
(rear-insert-dequeue! dq 3)
(assert (equal?
(get-value-node (front-dequeue dq))
'a))
(assert (equal?
(get-value-node (rear-dequeue dq))
3))))
(it "lets you insert at the start of the dequeue"
(lambda ()
(define dq (make-dequeue))
(front-insert-dequeue! dq 'a)
(front-insert-dequeue! dq 'b)
(front-insert-dequeue! dq 3)
(assert (equal?
(print-dequeue dq)
'(3 b a)))))
(it "lets you insert at the end of the dequeue"
(lambda ()
(define dq (make-dequeue))
(rear-insert-dequeue! dq 'a)
(rear-insert-dequeue! dq 'b)
(rear-insert-dequeue! dq 3)
(assert (equal?
(print-dequeue dq)
'(a b 3)))))
(it "lets you delete from the start of the dequeue"
(lambda ()
(define dq (make-dequeue))
(for-each (lambda (x) (rear-insert-dequeue! dq x)) '(a b 3 4))
(front-delete-deque! dq)
(front-delete-deque! dq)
(assert (equal?
(print-dequeue dq)
'(3 4)))))
(it "lets you delete from the rear of the dequeue"
(lambda ()
(define dq (make-dequeue))
(for-each (lambda (x) (rear-insert-dequeue! dq x)) '(a b 3 4))
(rear-delete-dequeue! dq)
(rear-delete-dequeue! dq)
(assert (equal?
(print-dequeue dq)
'(a b)))))
)
(describe "Tables"
(it "lets you define your own equality test"
(lambda ()
(define (double-key? key-1 key-2) (= (* 2 key-1) key-2))
(define table (make-table double-key?))
((table 'insert!) 4 9)
(assert (equal?
((table 'lookup) 2)
9))
(assert (equal?
((table 'lookup) 4)
false))))
(it "defines a table of arbitrarily many keys"
(lambda ()
(define multi-key-table (make-multi-key-table))
((multi-key-table 'insert!) '(1 2 3) 6)
((multi-key-table 'insert!) '(1) 8)
(assert (equal?
((multi-key-table 'lookup) '(1 2 3))
6))
(assert (equal?
((multi-key-table 'lookup) '(1))
8))
(assert (equal?
((multi-key-table 'lookup) '(1 2))
false))))
)
| null | https://raw.githubusercontent.com/MarkCurtiss/sicp/8b55a3371458014c815ba8792218b6440127ab40/chapter_3_exercises/spec/3_21_to_3_27_spec.scm | scheme | (load "3_21_to_3_27.scm")
(describe "Queues"
(it "prints a queue"
(lambda ()
(define a-queue (make-queue))
(for-each
(lambda (x) (insert-queue! a-queue x))
(list 'a 'b 2 4 '(6 8)))
(delete-queue! a-queue)
(assert (equal?
(print-queue a-queue)
'(b 2 4 (6 8))))))
)
(describe "Node"
(it "has a value and links to the previous and next node"
(lambda ()
(define first-node (make-node 'a))
(define second-node (make-node 'b))
(set-next-ptr-node! first-node second-node)
(set-prev-ptr-node! second-node first-node)
(assert (equal?
(get-value-node first-node)
'a))
(assert (equal?
(get-value-node second-node)
'b))
(assert (equal?
(get-next-ptr-node first-node)
second-node))
(assert (equal?
(get-next-ptr-node second-node)
'()))
(assert (equal?
(get-prev-ptr-node second-node)
first-node))
(assert (equal?
(get-value-node (get-prev-ptr-node second-node))
(get-value-node first-node)))
(assert (equal?
(get-prev-ptr-node first-node)
'()))))
)
(describe "Dequeue"
(it "tells you if you have an empty dequeue or not"
(lambda ()
(define dq (make-dequeue))
(assert (empty-dequeue? dq))))
(it "maintains pointers to the start and end of the dequeue"
(lambda ()
(define dq (make-dequeue))
(rear-insert-dequeue! dq 'a)
(rear-insert-dequeue! dq 'b)
(rear-insert-dequeue! dq 3)
(assert (equal?
(get-value-node (front-dequeue dq))
'a))
(assert (equal?
(get-value-node (rear-dequeue dq))
3))))
(it "lets you insert at the start of the dequeue"
(lambda ()
(define dq (make-dequeue))
(front-insert-dequeue! dq 'a)
(front-insert-dequeue! dq 'b)
(front-insert-dequeue! dq 3)
(assert (equal?
(print-dequeue dq)
'(3 b a)))))
(it "lets you insert at the end of the dequeue"
(lambda ()
(define dq (make-dequeue))
(rear-insert-dequeue! dq 'a)
(rear-insert-dequeue! dq 'b)
(rear-insert-dequeue! dq 3)
(assert (equal?
(print-dequeue dq)
'(a b 3)))))
(it "lets you delete from the start of the dequeue"
(lambda ()
(define dq (make-dequeue))
(for-each (lambda (x) (rear-insert-dequeue! dq x)) '(a b 3 4))
(front-delete-deque! dq)
(front-delete-deque! dq)
(assert (equal?
(print-dequeue dq)
'(3 4)))))
(it "lets you delete from the rear of the dequeue"
(lambda ()
(define dq (make-dequeue))
(for-each (lambda (x) (rear-insert-dequeue! dq x)) '(a b 3 4))
(rear-delete-dequeue! dq)
(rear-delete-dequeue! dq)
(assert (equal?
(print-dequeue dq)
'(a b)))))
)
(describe "Tables"
(it "lets you define your own equality test"
(lambda ()
(define (double-key? key-1 key-2) (= (* 2 key-1) key-2))
(define table (make-table double-key?))
((table 'insert!) 4 9)
(assert (equal?
((table 'lookup) 2)
9))
(assert (equal?
((table 'lookup) 4)
false))))
(it "defines a table of arbitrarily many keys"
(lambda ()
(define multi-key-table (make-multi-key-table))
((multi-key-table 'insert!) '(1 2 3) 6)
((multi-key-table 'insert!) '(1) 8)
(assert (equal?
((multi-key-table 'lookup) '(1 2 3))
6))
(assert (equal?
((multi-key-table 'lookup) '(1))
8))
(assert (equal?
((multi-key-table 'lookup) '(1 2))
false))))
)
|
|
b5f61ddc6011a0b63f1b01907968ca8f29a89ff15ffe5eb6a72ededeb05a7039 | alvatar/spheres | dataformat-csv.scm | ;;; @section Tests
;;; The @code{csv.scm} test suite can be enabled by editing the source code
;;; file and loading @uref{/, Testeez}.
;; these snow tests were made from the testeez tests of the original package.
;; some of the conversion has been done manually, the rest is done by macros
(test-define "define an ascii CR char"
cr
(string (integer->char 13)))
(test-define "define an ascii LF char"
lf
(string (integer->char 10)))
(test-define "define a reader-maker that strips whitespace"
make-ws-stripping-reader
(make-csv-reader-maker
'((strip-leading-whitespace? . #t)
(strip-trailing-whitespace? . #t))))
(test-define "define a newline-adapting reader-maker"
make-nl-adapt-reader
(make-csv-reader-maker '((newline-type . adapt))))
(test-define "define an input string with pound char"
str
(string-append "a,b,c" lf
"#d,e,f" lf
"g,h,i" lf))
(test-define "define reader-maker with pound as comment char"
make-reader-with-pound-quote
(make-csv-reader-maker '((comment-chars . (#\#)))))
(%csv:testeez
"csv.scm"
(test/equal "simple"
(csv->list (string-append
"a" lf "b" lf "c" lf "d" lf ""))
'(("a") ("b") ("c") ("d")))
(test/equal "simple"
(csv->list (string-append " a "
lf
" b "
lf
" c "
lf
" d "
lf
""))
'((" a ") (" b ") (" c ") (" d ")))
(test/equal "simple"
(csv->list (string-append "aaa,bbb,ccc" cr lf
"1,2,3" cr lf))
'(("aaa" "bbb" "ccc") ("1" "2" "3")))
(test/equal "quoted field"
(csv->list "aaa,\"bbb\",ccc")
'(("aaa" "bbb" "ccc")))
(test/equal "quoted field with comma"
(csv->list "aaa,\"bbb,bbb\",ccc")
'(("aaa" "bbb,bbb" "ccc")))
(test/equal "quoted field followed by whitespace"
(csv->list "aaa,\"bbb\" ,ccc")
'(("aaa" "bbb" "ccc")))
(test/equal "quoted field with newline in it"
(csv->list (string-append "aaa,\"b" lf "b\",ccc" lf
"ddd,eee,fff" lf))
`(("aaa" ,(string-append "b" lf "b") "ccc")
("ddd" "eee" "fff")))
(test/equal "quoted field with doubling escape in middle"
(csv->list "aaa,\"b\"\"b\",ccc")
'(("aaa" "b\"b" "ccc")))
(test/equal "quoted field with doubling escape at beginning"
(csv->list "aaa,\"\"\"bbb\",ccc")
'(("aaa" "\"bbb" "ccc")))
(test/equal "quoted field with doubling escape at end"
(csv->list "aaa,\"bbb\"\"\",ccc")
'(("aaa" "bbb\"" "ccc")))
(test/equal "quoted field with unterminated quote"
(csv->list "aaa,\"bbb,ccc")
'(("aaa" "bbb,ccc")))
(test/equal "quoted field followed by eof"
(csv->list "aaa,\"bbb\"")
'(("aaa" "bbb")))
(test/equal "whitespace strip on simple row terminated by eof"
(csv->list (make-ws-stripping-reader
" a , b , c "))
'(("a" "b" "c")))
(test/equal "try newline-adapting reader-maker first time"
(csv->list (make-nl-adapt-reader
(string-append "aaa,bbb" lf
"ccc" cr ",ddd" cr lf
"eee,fff")))
`(("aaa" "bbb")
(,(string-append "ccc" cr)
,(string-append "ddd" cr))
("eee" "fff")))
(test/equal "try newline-adapting reader-maker second time"
(csv->list (make-nl-adapt-reader
(string-append "aaa,bbb" cr lf
"ccc" cr ",ddd" lf cr lf
"eee,fff" cr lf)))
`(("aaa" "bbb")
(,(string-append "ccc" cr)
,(string-append "ddd" lf))
("eee" "fff")))
(test/equal "read str without pound as comment char"
(csv->list str)
'(("a" "b" "c") ("#d" "e" "f") ("g" "h" "i")))
(test/equal "read str with pound as comment char"
(csv->list (make-reader-with-pound-quote str))
'(("a" "b" "c") ("g" "h" "i")))
(test/equal "csv->sxml without row and column names"
(csv->sxml (string-append "aaa,bbb,ccc" cr lf
"1,2,3" cr lf))
`(,(string->symbol "*TOP*")
(row (col-0 "aaa") (col-1 "bbb") (col-2 "ccc"))
(row (col-0 "1") (col-1 "2") (col-2 "3"))))
(test/equal "csv->sxml with row and column names"
(csv->sxml (string-append "aaa,bbb,ccc" cr lf
"1,2,3" cr lf)
'foo
'(first second third))
`(,(string->symbol "*TOP*")
(foo (first "aaa") (second "bbb") (third "ccc"))
(foo (first "1") (second "2") (third "3"))))
;; TODO: Add more test cases.
)
| null | https://raw.githubusercontent.com/alvatar/spheres/568836f234a469ef70c69f4a2d9b56d41c3fc5bd/test/dataformat-csv.scm | scheme | @section Tests
The @code{csv.scm} test suite can be enabled by editing the source code
file and loading @uref{/, Testeez}.
these snow tests were made from the testeez tests of the original package.
some of the conversion has been done manually, the rest is done by macros
TODO: Add more test cases. |
(test-define "define an ascii CR char"
cr
(string (integer->char 13)))
(test-define "define an ascii LF char"
lf
(string (integer->char 10)))
(test-define "define a reader-maker that strips whitespace"
make-ws-stripping-reader
(make-csv-reader-maker
'((strip-leading-whitespace? . #t)
(strip-trailing-whitespace? . #t))))
(test-define "define a newline-adapting reader-maker"
make-nl-adapt-reader
(make-csv-reader-maker '((newline-type . adapt))))
(test-define "define an input string with pound char"
str
(string-append "a,b,c" lf
"#d,e,f" lf
"g,h,i" lf))
(test-define "define reader-maker with pound as comment char"
make-reader-with-pound-quote
(make-csv-reader-maker '((comment-chars . (#\#)))))
(%csv:testeez
"csv.scm"
(test/equal "simple"
(csv->list (string-append
"a" lf "b" lf "c" lf "d" lf ""))
'(("a") ("b") ("c") ("d")))
(test/equal "simple"
(csv->list (string-append " a "
lf
" b "
lf
" c "
lf
" d "
lf
""))
'((" a ") (" b ") (" c ") (" d ")))
(test/equal "simple"
(csv->list (string-append "aaa,bbb,ccc" cr lf
"1,2,3" cr lf))
'(("aaa" "bbb" "ccc") ("1" "2" "3")))
(test/equal "quoted field"
(csv->list "aaa,\"bbb\",ccc")
'(("aaa" "bbb" "ccc")))
(test/equal "quoted field with comma"
(csv->list "aaa,\"bbb,bbb\",ccc")
'(("aaa" "bbb,bbb" "ccc")))
(test/equal "quoted field followed by whitespace"
(csv->list "aaa,\"bbb\" ,ccc")
'(("aaa" "bbb" "ccc")))
(test/equal "quoted field with newline in it"
(csv->list (string-append "aaa,\"b" lf "b\",ccc" lf
"ddd,eee,fff" lf))
`(("aaa" ,(string-append "b" lf "b") "ccc")
("ddd" "eee" "fff")))
(test/equal "quoted field with doubling escape in middle"
(csv->list "aaa,\"b\"\"b\",ccc")
'(("aaa" "b\"b" "ccc")))
(test/equal "quoted field with doubling escape at beginning"
(csv->list "aaa,\"\"\"bbb\",ccc")
'(("aaa" "\"bbb" "ccc")))
(test/equal "quoted field with doubling escape at end"
(csv->list "aaa,\"bbb\"\"\",ccc")
'(("aaa" "bbb\"" "ccc")))
(test/equal "quoted field with unterminated quote"
(csv->list "aaa,\"bbb,ccc")
'(("aaa" "bbb,ccc")))
(test/equal "quoted field followed by eof"
(csv->list "aaa,\"bbb\"")
'(("aaa" "bbb")))
(test/equal "whitespace strip on simple row terminated by eof"
(csv->list (make-ws-stripping-reader
" a , b , c "))
'(("a" "b" "c")))
(test/equal "try newline-adapting reader-maker first time"
(csv->list (make-nl-adapt-reader
(string-append "aaa,bbb" lf
"ccc" cr ",ddd" cr lf
"eee,fff")))
`(("aaa" "bbb")
(,(string-append "ccc" cr)
,(string-append "ddd" cr))
("eee" "fff")))
(test/equal "try newline-adapting reader-maker second time"
(csv->list (make-nl-adapt-reader
(string-append "aaa,bbb" cr lf
"ccc" cr ",ddd" lf cr lf
"eee,fff" cr lf)))
`(("aaa" "bbb")
(,(string-append "ccc" cr)
,(string-append "ddd" lf))
("eee" "fff")))
(test/equal "read str without pound as comment char"
(csv->list str)
'(("a" "b" "c") ("#d" "e" "f") ("g" "h" "i")))
(test/equal "read str with pound as comment char"
(csv->list (make-reader-with-pound-quote str))
'(("a" "b" "c") ("g" "h" "i")))
(test/equal "csv->sxml without row and column names"
(csv->sxml (string-append "aaa,bbb,ccc" cr lf
"1,2,3" cr lf))
`(,(string->symbol "*TOP*")
(row (col-0 "aaa") (col-1 "bbb") (col-2 "ccc"))
(row (col-0 "1") (col-1 "2") (col-2 "3"))))
(test/equal "csv->sxml with row and column names"
(csv->sxml (string-append "aaa,bbb,ccc" cr lf
"1,2,3" cr lf)
'foo
'(first second third))
`(,(string->symbol "*TOP*")
(foo (first "aaa") (second "bbb") (third "ccc"))
(foo (first "1") (second "2") (third "3"))))
)
|
db38c5e61d5f76fbce7b40c587ca767668686b1a38ac387037707445ae7c2bf6 | input-output-hk/cardano-sl | UnitsOfMeasure.hs | {-# LANGUAGE DataKinds #-}
# LANGUAGE KindSignatures #
module Pos.Util.UnitsOfMeasure
( UnitOfMeasure (..)
, MeasuredIn(..)
) where
import Control.Lens (at, (?~))
import Data.Aeson (FromJSON (..), ToJSON (..), Value (..), object,
withObject, (.:), (.=))
import qualified Data.Text.Lazy as T
import qualified Data.Text.Lazy.Builder as B
import Formatting ((%))
import qualified Formatting as F
import Formatting.Buildable (Buildable (..))
import Pos.Core.Util.LogSafe (BuildableSafeGen (..))
import Universum
import Data.Swagger (NamedSchema (..), Referenced (..),
SwaggerType (..), ToSchema (..), enum_, properties,
required, type_)
-- | A finite sum type representing time units we might want to show to
-- clients. The idea is that whenever we have a quantity represeting some
-- form of time, we should render it together with the relevant unit, to
-- not leave anything to guessing.
data UnitOfMeasure =
Seconds
| Milliseconds
| Microseconds
| % ranging from 0 to 100 .
| Percentage100
-- | Number of blocks.
| Blocks
| Number of blocks per second .
| BlocksPerSecond
| Bytes
| Lovelace
| LovelacePerByte
deriving (Show, Eq, Ord)
instance Buildable UnitOfMeasure where
build = \case
Bytes -> "bytes"
LovelacePerByte -> "lovelace/byte"
Lovelace -> "lovelace"
Seconds -> "seconds"
Milliseconds -> "milliseconds"
Microseconds -> "microseconds"
Percentage100 -> "percent"
Blocks -> "blocks"
BlocksPerSecond -> "blocks/second"
instance ToJSON UnitOfMeasure where
toJSON = String . T.toStrict . B.toLazyText . build
-- | Represent data with a given unit of measure
data MeasuredIn (u :: UnitOfMeasure) a
= MeasuredIn a
deriving (Show, Eq, Ord)
instance (Demote u, Buildable a) => BuildableSafeGen (MeasuredIn u a) where
buildSafeGen _ = build
instance (Demote u, Buildable a) => Buildable (MeasuredIn u a) where
build (MeasuredIn a) = F.bprint
(F.build % " " % F.build)
a
(demote $ Proxy @u)
instance (Demote u, ToJSON a) => ToJSON (MeasuredIn u a) where
toJSON (MeasuredIn a) = object
[ "unit" .= demote (Proxy @u)
, "quantity" .= toJSON a
]
instance (Demote u, FromJSON a) => FromJSON (MeasuredIn u a) where
parseJSON = withObject "MeasuredIn" $ \o -> do
verifyUnit =<< o .: "unit"
MeasuredIn <$> o .: "quantity"
where
unitS = toString $ T.toStrict $ B.toLazyText $ build $ demote $ Proxy @u
verifyUnit = \case
u@(String _) | u == toJSON (demote $ Proxy @u) ->
pure ()
_ ->
fail
$ "failed to parse quantified value. Expected value in '"
<> unitS <> "' but got something else. e.g.: "
<> "{ \"unit\": \"" <> unitS <> "\", \"quantity\": ...}"
instance (Demote u, ToSchema a) => ToSchema (MeasuredIn u a) where
declareNamedSchema _ = do
NamedSchema _ schema <- declareNamedSchema (Proxy @a)
pure $ NamedSchema (Just "MeasuredIn") $ mempty
& type_ ?~ SwaggerObject
& required .~ ["quantity", "unit"]
& properties .~ (mempty
& at "quantity" ?~ Inline schema
& at "unit" ?~ (Inline $ mempty
& type_ ?~ SwaggerString
& enum_ ?~ [toJSON $ demote $ Proxy @u]
)
)
--
Internal
--
-- | Bring a type back to the world of value (invert of promote)
class Demote (u :: UnitOfMeasure) where
demote :: Proxy u -> UnitOfMeasure
instance Demote 'Bytes where demote _ = Bytes
instance Demote 'LovelacePerByte where demote _ = LovelacePerByte
instance Demote 'Lovelace where demote _ = Lovelace
instance Demote 'Seconds where demote _ = Seconds
instance Demote 'Milliseconds where demote _ = Milliseconds
instance Demote 'Microseconds where demote _ = Microseconds
instance Demote 'Percentage100 where demote _ = Percentage100
instance Demote 'Blocks where demote _ = Blocks
instance Demote 'BlocksPerSecond where demote _ = BlocksPerSecond
| null | https://raw.githubusercontent.com/input-output-hk/cardano-sl/1499214d93767b703b9599369a431e67d83f10a2/lib/src/Pos/Util/UnitsOfMeasure.hs | haskell | # LANGUAGE DataKinds #
| A finite sum type representing time units we might want to show to
clients. The idea is that whenever we have a quantity represeting some
form of time, we should render it together with the relevant unit, to
not leave anything to guessing.
| Number of blocks.
| Represent data with a given unit of measure
| Bring a type back to the world of value (invert of promote) | # LANGUAGE KindSignatures #
module Pos.Util.UnitsOfMeasure
( UnitOfMeasure (..)
, MeasuredIn(..)
) where
import Control.Lens (at, (?~))
import Data.Aeson (FromJSON (..), ToJSON (..), Value (..), object,
withObject, (.:), (.=))
import qualified Data.Text.Lazy as T
import qualified Data.Text.Lazy.Builder as B
import Formatting ((%))
import qualified Formatting as F
import Formatting.Buildable (Buildable (..))
import Pos.Core.Util.LogSafe (BuildableSafeGen (..))
import Universum
import Data.Swagger (NamedSchema (..), Referenced (..),
SwaggerType (..), ToSchema (..), enum_, properties,
required, type_)
data UnitOfMeasure =
Seconds
| Milliseconds
| Microseconds
| % ranging from 0 to 100 .
| Percentage100
| Blocks
| Number of blocks per second .
| BlocksPerSecond
| Bytes
| Lovelace
| LovelacePerByte
deriving (Show, Eq, Ord)
instance Buildable UnitOfMeasure where
build = \case
Bytes -> "bytes"
LovelacePerByte -> "lovelace/byte"
Lovelace -> "lovelace"
Seconds -> "seconds"
Milliseconds -> "milliseconds"
Microseconds -> "microseconds"
Percentage100 -> "percent"
Blocks -> "blocks"
BlocksPerSecond -> "blocks/second"
instance ToJSON UnitOfMeasure where
toJSON = String . T.toStrict . B.toLazyText . build
data MeasuredIn (u :: UnitOfMeasure) a
= MeasuredIn a
deriving (Show, Eq, Ord)
instance (Demote u, Buildable a) => BuildableSafeGen (MeasuredIn u a) where
buildSafeGen _ = build
instance (Demote u, Buildable a) => Buildable (MeasuredIn u a) where
build (MeasuredIn a) = F.bprint
(F.build % " " % F.build)
a
(demote $ Proxy @u)
instance (Demote u, ToJSON a) => ToJSON (MeasuredIn u a) where
toJSON (MeasuredIn a) = object
[ "unit" .= demote (Proxy @u)
, "quantity" .= toJSON a
]
instance (Demote u, FromJSON a) => FromJSON (MeasuredIn u a) where
parseJSON = withObject "MeasuredIn" $ \o -> do
verifyUnit =<< o .: "unit"
MeasuredIn <$> o .: "quantity"
where
unitS = toString $ T.toStrict $ B.toLazyText $ build $ demote $ Proxy @u
verifyUnit = \case
u@(String _) | u == toJSON (demote $ Proxy @u) ->
pure ()
_ ->
fail
$ "failed to parse quantified value. Expected value in '"
<> unitS <> "' but got something else. e.g.: "
<> "{ \"unit\": \"" <> unitS <> "\", \"quantity\": ...}"
instance (Demote u, ToSchema a) => ToSchema (MeasuredIn u a) where
declareNamedSchema _ = do
NamedSchema _ schema <- declareNamedSchema (Proxy @a)
pure $ NamedSchema (Just "MeasuredIn") $ mempty
& type_ ?~ SwaggerObject
& required .~ ["quantity", "unit"]
& properties .~ (mempty
& at "quantity" ?~ Inline schema
& at "unit" ?~ (Inline $ mempty
& type_ ?~ SwaggerString
& enum_ ?~ [toJSON $ demote $ Proxy @u]
)
)
Internal
class Demote (u :: UnitOfMeasure) where
demote :: Proxy u -> UnitOfMeasure
instance Demote 'Bytes where demote _ = Bytes
instance Demote 'LovelacePerByte where demote _ = LovelacePerByte
instance Demote 'Lovelace where demote _ = Lovelace
instance Demote 'Seconds where demote _ = Seconds
instance Demote 'Milliseconds where demote _ = Milliseconds
instance Demote 'Microseconds where demote _ = Microseconds
instance Demote 'Percentage100 where demote _ = Percentage100
instance Demote 'Blocks where demote _ = Blocks
instance Demote 'BlocksPerSecond where demote _ = BlocksPerSecond
|
89f028e332c00ff84c05fe8958108ba96bf634992cb8f1eb9f542cd1c02f3bb4 | awalterschulze/the-little-typer-exercises | chapter10-2-2-alessthanb.rkt | #lang pie
In the following exercises we 'll use the function called < = that takes two
arguments a , b and evaluates to a type representing the proposition
;; that a is less than or equal to b.
;(claim <=
( - >
; U))
;
;(define <=
; (λ (a b)
; (Σ ([k Nat])
; (= Nat (+ k a) b))))
;; Define a funciton called <=-simplify to state and prove that for all
;; Nats a, b, n we have that n+a <= b implies a <= b
;;
NB : You may need to use plus - assoc that was proved in Exercise 8.3 .
;;
;; (claim plus-assoc
( Pi ( ( n Nat ) ( m ) ( k Nat ) )
( + k ( + n m ) ) ( + ( + k n ) m ) ) ) )
;(claim <=-simplify
; (Π ([a Nat]
; [b Nat]
; [n Nat])
; (-> (<= (+ n a) b)
; (<= a b))))
(claim +
(-> Nat Nat Nat))
(define +
(lambda (x y)
(rec-Nat x
y
(lambda (_ y+x-1)
(add1 y+x-1)))))
;; End of preamble
(claim <=
(-> Nat Nat
U))
(define <=
(lambda (a b)
(Sigma ((k Nat))
(= Nat (+ k a) b))))
;; a+b+1 == a+1+b
(claim a+b+1==a+1+b
(Pi ((a Nat) (b Nat))
(= Nat
(+ a (add1 b))
(+ (add1 a) b))))
(claim mot-a+b+1==a+1+b
(Pi ((a Nat) (b Nat))
U))
(define mot-a+b+1==a+1+b
(lambda (b a)
(= Nat
(+ a (add1 b))
(+ (add1 a) b))))
(claim base-a+b+1==a+1+b
(Pi ((b Nat))
(= Nat
(+ zero (add1 b))
(+ (add1 zero) b))))
(define base-a+b+1==a+1+b
(lambda (b)
(same (add1 b))))
(claim step-a+b+1==a+1+b
(Pi ((b Nat) (a-1 Nat))
(-> (= Nat
(+ a-1 (add1 b))
(+ (add1 a-1) b))
(= Nat
(+ (add1 a-1) (add1 b))
(+ (add1 (add1 a-1)) b)))))
(define step-a+b+1==a+1+b
(lambda (b a)
(lambda (a-1+b+1==a-1+1+b)
(cong a-1+b+1==a-1+1+b (+ 1)))))
(define a+b+1==a+1+b
(lambda (a b)
(ind-Nat a
(mot-a+b+1==a+1+b b)
(base-a+b+1==a+1+b b)
(step-a+b+1==a+1+b b))))
;; a+1<=b implies a<=b
(claim add1-smaller
(Pi ((a Nat)
(b Nat))
(-> (<= (add1 a) b)
(<= a b))))
;; we have
;; there exists k where k + (a + 1) == b
;; which is equivalent to
;; there exists (car a+1<=b) where (car a+1<=b) + (a + 1) == b
;; needs to be transform to
;; there exists (car a+1<=b) + 1 where ((car a+1<=b) + 1) + a == b
;; which is equal to
;; there exists k + 1 where (k + 1) + a == b
;; which is equal to
;; there exists k where k + a == b
;; which is our goal for add1-smaller
we also have a+b+1==a+1+b
;; which we can use to turn
;; (car a+1<=b) + (a + 1)
;; into
;; ((car a+1<=b) + 1) + a
;; (replace a+b+1==a+1+b
( lambda ( here ) (= here b ) )
( + ( car a+1<=b ) ( + a 1 ) ) b )
(define add1-smaller
(lambda (a b)
(lambda (a+1<=b)
(cons
(add1 (car a+1<=b))
(replace (a+b+1==a+1+b (car a+1<=b) a)
(lambda (here) (= Nat here b))
(cdr a+1<=b))))))
;; a+n<=b implies a<=b
(claim <=-simplify
(Pi ((a Nat)
(b Nat)
(n Nat))
(-> (<= (+ n a) b)
(<= a b))))
(claim mot-simplify
(Pi ((a Nat)
(b Nat)
(n Nat))
U))
(define mot-simplify
(lambda (a b n)
(-> (<= (+ n a) b)
(<= a b))))
(claim step-simplify
(Pi ((a Nat)
(b Nat)
(n-1 Nat))
(->
(-> (<= (+ n-1 a) b)
(<= a b))
(-> (<= (+ (add1 n-1) a) b)
(<= a b)))))
;; we have a function that expects
;; (<= (+ n-1 a) b)
;; and returns our goal
;; (<= a b)
;; we also have another input into our function
;; (<= (+ (add1 n-1) a) b)
;; which is equal to
;; (<= (add1 (+ n-1 a)) b)
;; we also have add1-smaller
;; (-> (<= (add1 a) b) (<= a b)))
;; which we can use to get
;; (<= (+ n-1 a) b)
which we can then pass to our first function to get our goal
(define step-simplify
(lambda (a b n-1)
(lambda (n-1+a<=b-implies-a<=b)
(lambda (n-1+1+a<=b)
(n-1+a<=b-implies-a<=b
(add1-smaller (+ n-1 a) b n-1+1+a<=b))))))
(define <=-simplify
(lambda (a b n)
(ind-Nat n
(mot-simplify a b)
(lambda (zero+a<=b) zero+a<=b)
(step-simplify a b))))
| null | https://raw.githubusercontent.com/awalterschulze/the-little-typer-exercises/91cad6c6d5c1733562aa952d8ca515addb2b301d/chapter10-2-2-alessthanb.rkt | racket | that a is less than or equal to b.
(claim <=
U))
(define <=
(λ (a b)
(Σ ([k Nat])
(= Nat (+ k a) b))))
Define a funciton called <=-simplify to state and prove that for all
Nats a, b, n we have that n+a <= b implies a <= b
(claim plus-assoc
(claim <=-simplify
(Π ([a Nat]
[b Nat]
[n Nat])
(-> (<= (+ n a) b)
(<= a b))))
End of preamble
a+b+1 == a+1+b
a+1<=b implies a<=b
we have
there exists k where k + (a + 1) == b
which is equivalent to
there exists (car a+1<=b) where (car a+1<=b) + (a + 1) == b
needs to be transform to
there exists (car a+1<=b) + 1 where ((car a+1<=b) + 1) + a == b
which is equal to
there exists k + 1 where (k + 1) + a == b
which is equal to
there exists k where k + a == b
which is our goal for add1-smaller
which we can use to turn
(car a+1<=b) + (a + 1)
into
((car a+1<=b) + 1) + a
(replace a+b+1==a+1+b
a+n<=b implies a<=b
we have a function that expects
(<= (+ n-1 a) b)
and returns our goal
(<= a b)
we also have another input into our function
(<= (+ (add1 n-1) a) b)
which is equal to
(<= (add1 (+ n-1 a)) b)
we also have add1-smaller
(-> (<= (add1 a) b) (<= a b)))
which we can use to get
(<= (+ n-1 a) b) | #lang pie
In the following exercises we 'll use the function called < = that takes two
arguments a , b and evaluates to a type representing the proposition
( - >
NB : You may need to use plus - assoc that was proved in Exercise 8.3 .
( Pi ( ( n Nat ) ( m ) ( k Nat ) )
( + k ( + n m ) ) ( + ( + k n ) m ) ) ) )
(claim +
(-> Nat Nat Nat))
(define +
(lambda (x y)
(rec-Nat x
y
(lambda (_ y+x-1)
(add1 y+x-1)))))
(claim <=
(-> Nat Nat
U))
(define <=
(lambda (a b)
(Sigma ((k Nat))
(= Nat (+ k a) b))))
(claim a+b+1==a+1+b
(Pi ((a Nat) (b Nat))
(= Nat
(+ a (add1 b))
(+ (add1 a) b))))
(claim mot-a+b+1==a+1+b
(Pi ((a Nat) (b Nat))
U))
(define mot-a+b+1==a+1+b
(lambda (b a)
(= Nat
(+ a (add1 b))
(+ (add1 a) b))))
(claim base-a+b+1==a+1+b
(Pi ((b Nat))
(= Nat
(+ zero (add1 b))
(+ (add1 zero) b))))
(define base-a+b+1==a+1+b
(lambda (b)
(same (add1 b))))
(claim step-a+b+1==a+1+b
(Pi ((b Nat) (a-1 Nat))
(-> (= Nat
(+ a-1 (add1 b))
(+ (add1 a-1) b))
(= Nat
(+ (add1 a-1) (add1 b))
(+ (add1 (add1 a-1)) b)))))
(define step-a+b+1==a+1+b
(lambda (b a)
(lambda (a-1+b+1==a-1+1+b)
(cong a-1+b+1==a-1+1+b (+ 1)))))
(define a+b+1==a+1+b
(lambda (a b)
(ind-Nat a
(mot-a+b+1==a+1+b b)
(base-a+b+1==a+1+b b)
(step-a+b+1==a+1+b b))))
(claim add1-smaller
(Pi ((a Nat)
(b Nat))
(-> (<= (add1 a) b)
(<= a b))))
we also have a+b+1==a+1+b
( lambda ( here ) (= here b ) )
( + ( car a+1<=b ) ( + a 1 ) ) b )
(define add1-smaller
(lambda (a b)
(lambda (a+1<=b)
(cons
(add1 (car a+1<=b))
(replace (a+b+1==a+1+b (car a+1<=b) a)
(lambda (here) (= Nat here b))
(cdr a+1<=b))))))
(claim <=-simplify
(Pi ((a Nat)
(b Nat)
(n Nat))
(-> (<= (+ n a) b)
(<= a b))))
(claim mot-simplify
(Pi ((a Nat)
(b Nat)
(n Nat))
U))
(define mot-simplify
(lambda (a b n)
(-> (<= (+ n a) b)
(<= a b))))
(claim step-simplify
(Pi ((a Nat)
(b Nat)
(n-1 Nat))
(->
(-> (<= (+ n-1 a) b)
(<= a b))
(-> (<= (+ (add1 n-1) a) b)
(<= a b)))))
which we can then pass to our first function to get our goal
(define step-simplify
(lambda (a b n-1)
(lambda (n-1+a<=b-implies-a<=b)
(lambda (n-1+1+a<=b)
(n-1+a<=b-implies-a<=b
(add1-smaller (+ n-1 a) b n-1+1+a<=b))))))
(define <=-simplify
(lambda (a b n)
(ind-Nat n
(mot-simplify a b)
(lambda (zero+a<=b) zero+a<=b)
(step-simplify a b))))
|
12ec87b6405289b965eb22f41982f42c5de2dedb1869df8f059993436d302c60 | emezeske/lein-cljsbuild | listen.clj | (ns cljsbuild.test.repl.listen
(:use
cljsbuild.repl.listen
midje.sweet)
(:require
[cljs.repl :as repl]
[cljs.repl.browser :as browser]
[cljsbuild.util :as util]))
(def port (Integer. 1234))
(def output-dir "output-dir")
(def command {:shell ["command"]})
(fact
(run-repl-listen port output-dir) => nil
(run-repl-launch port output-dir command) => nil
(provided
(delayed-process-start command) => (future {:kill (fn [] nil) :wait (fn [] nil)}))
(against-background
(browser/repl-env :port port :working-dir output-dir) => {} :times 1
(repl/repl {}) => nil :times 1))
| null | https://raw.githubusercontent.com/emezeske/lein-cljsbuild/089193c74e362c143d30dfca21a21e95c7ca112a/support/test/cljsbuild/test/repl/listen.clj | clojure | (ns cljsbuild.test.repl.listen
(:use
cljsbuild.repl.listen
midje.sweet)
(:require
[cljs.repl :as repl]
[cljs.repl.browser :as browser]
[cljsbuild.util :as util]))
(def port (Integer. 1234))
(def output-dir "output-dir")
(def command {:shell ["command"]})
(fact
(run-repl-listen port output-dir) => nil
(run-repl-launch port output-dir command) => nil
(provided
(delayed-process-start command) => (future {:kill (fn [] nil) :wait (fn [] nil)}))
(against-background
(browser/repl-env :port port :working-dir output-dir) => {} :times 1
(repl/repl {}) => nil :times 1))
|
|
26cda87b792f5ff6668eafeca3d7c5e6aabc1ff177d1982028c385851228724f | glguy/5puzzle | Handshakes.hs |
My wife and I recently attended a party at which there were four other married
couples . Various handshakes took place . No one shook hands with oneself , nor
with one 's spouse , and no one shook hands with the same person more than once .
After all the handshakes were over , I asked each person , including my wife , how
many hands he ( or she ) had shaken . To my surprise each gave a different answer .
How many hands did my wife shake ?
- the - knot.org / pigeonhole / FiveCouples.shtml
My wife and I recently attended a party at which there were four other married
couples. Various handshakes took place. No one shook hands with oneself, nor
with one's spouse, and no one shook hands with the same person more than once.
After all the handshakes were over, I asked each person, including my wife, how
many hands he (or she) had shaken. To my surprise each gave a different answer.
How many hands did my wife shake?
-the-knot.org/pigeonhole/FiveCouples.shtml
-}
module Main where
import Control.Applicative
import Booleans
import Ersatz
import Data.Map (Map)
import qualified Data.Map as Map
import Prelude hiding ((&&), (||), all)
data Spouse = Husband | Wife deriving (Eq, Ord, Show, Read)
couples :: Int
couples = 5
type Handshakes = Map (Int, Spouse, Int, Spouse) Bit
countHandshakes :: Handshakes -> Int -> Spouse -> Bits
countHandshakes m x xS = countBits [ met | ((i,iS,j,jS),met) <- Map.toList m
, i == x && iS == xS ||
j == x && jS == xS
]
handshakesExist :: MonadSAT s m => m (Map (Int, Spouse, Int, Spouse) Bit)
handshakesExist
= sequence
$ Map.fromList [ ((i,iS,j,jS), exists)
| i <- [1..couples]
, iS <- [Husband,Wife]
, j <- [i+1..couples]
, jS <- [Husband,Wife]
]
problem :: MonadSAT s m => m Bits
problem =
do m <- handshakesExist
let consider = (1,Wife) : liftA2 (,) [2..couples] [Husband,Wife]
handshakes = uncurry (countHandshakes m) <$> consider
assert (unique handshakes)
return (head handshakes)
main :: IO ()
main =
do Just res <- getModel problem
Nothing <- getModel (problem `checking` (/== encode res))
print res
| null | https://raw.githubusercontent.com/glguy/5puzzle/4d86cf9fad3ec3f70c57a167417adea6a3f9f30b/Handshakes.hs | haskell |
My wife and I recently attended a party at which there were four other married
couples . Various handshakes took place . No one shook hands with oneself , nor
with one 's spouse , and no one shook hands with the same person more than once .
After all the handshakes were over , I asked each person , including my wife , how
many hands he ( or she ) had shaken . To my surprise each gave a different answer .
How many hands did my wife shake ?
- the - knot.org / pigeonhole / FiveCouples.shtml
My wife and I recently attended a party at which there were four other married
couples. Various handshakes took place. No one shook hands with oneself, nor
with one's spouse, and no one shook hands with the same person more than once.
After all the handshakes were over, I asked each person, including my wife, how
many hands he (or she) had shaken. To my surprise each gave a different answer.
How many hands did my wife shake?
-the-knot.org/pigeonhole/FiveCouples.shtml
-}
module Main where
import Control.Applicative
import Booleans
import Ersatz
import Data.Map (Map)
import qualified Data.Map as Map
import Prelude hiding ((&&), (||), all)
data Spouse = Husband | Wife deriving (Eq, Ord, Show, Read)
couples :: Int
couples = 5
type Handshakes = Map (Int, Spouse, Int, Spouse) Bit
countHandshakes :: Handshakes -> Int -> Spouse -> Bits
countHandshakes m x xS = countBits [ met | ((i,iS,j,jS),met) <- Map.toList m
, i == x && iS == xS ||
j == x && jS == xS
]
handshakesExist :: MonadSAT s m => m (Map (Int, Spouse, Int, Spouse) Bit)
handshakesExist
= sequence
$ Map.fromList [ ((i,iS,j,jS), exists)
| i <- [1..couples]
, iS <- [Husband,Wife]
, j <- [i+1..couples]
, jS <- [Husband,Wife]
]
problem :: MonadSAT s m => m Bits
problem =
do m <- handshakesExist
let consider = (1,Wife) : liftA2 (,) [2..couples] [Husband,Wife]
handshakes = uncurry (countHandshakes m) <$> consider
assert (unique handshakes)
return (head handshakes)
main :: IO ()
main =
do Just res <- getModel problem
Nothing <- getModel (problem `checking` (/== encode res))
print res
|
|
888e0d220c8a173da90487c6e0e640ef204afef1f3d715641a68d77ed54ce7ad | WormBase/wormbase_rest | overview.clj | (ns rest-api.classes.laboratory.widgets.overview
(:require
[clojure.string :as str]
[rest-api.classes.generic-fields :as generic]
[rest-api.formatters.object :as obj :refer [pack-obj]]))
(defn website [lab]
{:data (when-let [url (first (:laboratory/url lab))]
(str/replace url #"https?:\/\/" ""))
:description "website of the lab"})
(defn representatives [lab]
{:data (when-let [rs (:laboratory/representative lab)]
(map pack-obj rs))
:description "official representatives of the laboratory"})
(defn email [lab]
{:data (first (:laboratory/e-mail lab))
:description "primary email address for the lab"})
(defn allele-designation [lab]
{:data (:laboratory/allele-designation lab)
:description "allele designation of the laboratory"})
(defn affiliation [lab]
{:data (first (:laboratory/mail lab))
:description "institute or affiliation of the laboratory"})
(defn strain-designation [lab]
{:data (:laboratory/id lab) ; gets name in ace code. this is true at least most of the time
:description "strain designation of the laboratory"})
(def widget
{:name generic/name-field
:website website
:representatives representatives
:email email
:allele_designation allele-designation
; :affiliation affiliation
:remarks generic/remarks
:strain_designation strain-designation})
| null | https://raw.githubusercontent.com/WormBase/wormbase_rest/e51026f35b87d96260b62ddb5458a81ee911bf3a/src/rest_api/classes/laboratory/widgets/overview.clj | clojure | gets name in ace code. this is true at least most of the time
:affiliation affiliation | (ns rest-api.classes.laboratory.widgets.overview
(:require
[clojure.string :as str]
[rest-api.classes.generic-fields :as generic]
[rest-api.formatters.object :as obj :refer [pack-obj]]))
(defn website [lab]
{:data (when-let [url (first (:laboratory/url lab))]
(str/replace url #"https?:\/\/" ""))
:description "website of the lab"})
(defn representatives [lab]
{:data (when-let [rs (:laboratory/representative lab)]
(map pack-obj rs))
:description "official representatives of the laboratory"})
(defn email [lab]
{:data (first (:laboratory/e-mail lab))
:description "primary email address for the lab"})
(defn allele-designation [lab]
{:data (:laboratory/allele-designation lab)
:description "allele designation of the laboratory"})
(defn affiliation [lab]
{:data (first (:laboratory/mail lab))
:description "institute or affiliation of the laboratory"})
(defn strain-designation [lab]
:description "strain designation of the laboratory"})
(def widget
{:name generic/name-field
:website website
:representatives representatives
:email email
:allele_designation allele-designation
:remarks generic/remarks
:strain_designation strain-designation})
|
6c3cca26bfd7d578fb28762c3ab2d287916b0f177dde57eb47a4344f6f8f5a29 | fukamachi/quri | decode.lisp | (in-package :cl-user)
(defpackage quri.decode
(:use :cl
:quri.util
:quri.error)
(:import-from :babel
:octets-to-string)
(:import-from :babel-encodings
:*default-character-encoding*)
(:import-from :cl-utilities
:collecting
:collect)
(:export :url-decode
:url-decode-params))
(in-package :quri.decode)
(declaim (ftype (function (character) (unsigned-byte 4)) hexdigit-to-integer))
(defun hexdigit-to-integer (char)
(declare (type character char)
(optimize (speed 3) (safety 0)))
(let ((code (char-code char)))
(declare (type fixnum code))
(cond
((<= #.(char-code #\0) code #.(char-code #\9))
(- code #.(char-code #\0)))
((<= #.(char-code #\A) code #.(char-code #\F))
(- code #.(- (char-code #\A) 10)))
((<= #.(char-code #\a) code #.(char-code #\f))
(- code #.(- (char-code #\a) 10)))
(t (error 'url-decoding-error)))))
(defun url-decode (data &key
(encoding babel-encodings:*default-character-encoding*)
(start 0)
end
(lenient nil))
(declare (type (or string simple-byte-vector) data)
(type integer start)
(optimize (speed 3) (safety 2)))
(let* ((end (or end (length data)))
(buffer (make-array (- end start)
:element-type '(unsigned-byte 8)))
(i 0)
parsing-encoded-part)
(declare (type integer end i)
(type simple-byte-vector buffer))
(flet ((write-to-buffer (byte)
(declare (optimize (speed 3) (safety 0)))
(setf (aref buffer i) byte)
(incf i)))
(with-array-parsing (char p data start end (and (not (stringp data))
#'code-char))
(parsing
(cond
((char= char #\%)
(gonext))
((char= char #\+)
(write-to-buffer #.(char-code #\Space))
(redo))
(t
(write-to-buffer (char-code char))
(redo))))
(parsing-encoded-part
(setq parsing-encoded-part char)
(gonext))
(parsing-encoded-part-second
(handler-bind ((url-decoding-error
(lambda (error)
(declare (ignore error))
(when lenient
(write-to-buffer #.(char-code #\%))
(write-to-buffer (char-code parsing-encoded-part))
(write-to-buffer (char-code char))
(setq parsing-encoded-part nil)
(goto parsing)))))
(write-to-buffer
(+ (* 16 (hexdigit-to-integer parsing-encoded-part))
(hexdigit-to-integer char))))
(setq parsing-encoded-part nil)
(goto parsing))
(:eof
(when parsing-encoded-part
(error 'url-decoding-error)))))
(babel:octets-to-string buffer :end i :encoding encoding :errorp (not lenient))))
(defun url-decode-params (data &key
(delimiter #\&)
(encoding babel-encodings:*default-character-encoding*)
(start 0)
end
(lenient nil))
(declare (type (or string simple-byte-vector) data)
(type integer start)
(type character delimiter)
(optimize (speed 3) (safety 2)))
(let ((end (or end (length data)))
(start-mark nil)
(=-mark nil))
(declare (type integer end))
(collecting
(flet ((collect-pair (p)
(tagbody
(handler-bind ((url-decoding-error
(lambda (error)
(declare (ignore error))
(when lenient
(go continue)))))
(collect
(cons (url-decode data :encoding encoding
:start start-mark :end =-mark
:lenient lenient)
(url-decode data :encoding encoding
:start (1+ =-mark) :end p
:lenient lenient))))
continue)
(setq start-mark nil
=-mark nil))
(collect-field (p)
(tagbody
(handler-bind ((url-decoding-error
(lambda (error)
(declare (ignore error))
(when lenient
(go continue)))))
(collect
(cons (url-decode data :encoding encoding
:start start-mark :end p
:lenient lenient)
nil)))
continue)
(setq start-mark nil)))
(with-array-parsing (char p data start end (and (not (stringp data))
#'code-char))
(start
(setq start-mark p)
(if lenient
(cond
((char= char #\=)
(setq =-mark p)
(goto parsing-value))
((char= char delimiter)
(redo)))
(when (or (char= char #\=)
(char= char delimiter))
(error 'uri-malformed-urlencoded-string)))
(gonext))
(parsing-field
(cond
((char= char #\=)
(setq =-mark p)
(gonext))
((char= char delimiter)
;; field only
(collect-field p)
(goto start)))
(redo))
(parsing-value
(cond
((char= char #\=)
(unless lenient
(error 'uri-malformed-urlencoded-string)))
((char= char delimiter)
(collect-pair p)
(goto start)))
(redo))
(:eof
(cond
(=-mark (collect-pair p))
(start-mark (collect-field p)))))))))
| null | https://raw.githubusercontent.com/fukamachi/quri/d09ebb553f4a51e5d52036312ba4ee5b67c79cd0/src/decode.lisp | lisp | field only | (in-package :cl-user)
(defpackage quri.decode
(:use :cl
:quri.util
:quri.error)
(:import-from :babel
:octets-to-string)
(:import-from :babel-encodings
:*default-character-encoding*)
(:import-from :cl-utilities
:collecting
:collect)
(:export :url-decode
:url-decode-params))
(in-package :quri.decode)
(declaim (ftype (function (character) (unsigned-byte 4)) hexdigit-to-integer))
(defun hexdigit-to-integer (char)
(declare (type character char)
(optimize (speed 3) (safety 0)))
(let ((code (char-code char)))
(declare (type fixnum code))
(cond
((<= #.(char-code #\0) code #.(char-code #\9))
(- code #.(char-code #\0)))
((<= #.(char-code #\A) code #.(char-code #\F))
(- code #.(- (char-code #\A) 10)))
((<= #.(char-code #\a) code #.(char-code #\f))
(- code #.(- (char-code #\a) 10)))
(t (error 'url-decoding-error)))))
(defun url-decode (data &key
(encoding babel-encodings:*default-character-encoding*)
(start 0)
end
(lenient nil))
(declare (type (or string simple-byte-vector) data)
(type integer start)
(optimize (speed 3) (safety 2)))
(let* ((end (or end (length data)))
(buffer (make-array (- end start)
:element-type '(unsigned-byte 8)))
(i 0)
parsing-encoded-part)
(declare (type integer end i)
(type simple-byte-vector buffer))
(flet ((write-to-buffer (byte)
(declare (optimize (speed 3) (safety 0)))
(setf (aref buffer i) byte)
(incf i)))
(with-array-parsing (char p data start end (and (not (stringp data))
#'code-char))
(parsing
(cond
((char= char #\%)
(gonext))
((char= char #\+)
(write-to-buffer #.(char-code #\Space))
(redo))
(t
(write-to-buffer (char-code char))
(redo))))
(parsing-encoded-part
(setq parsing-encoded-part char)
(gonext))
(parsing-encoded-part-second
(handler-bind ((url-decoding-error
(lambda (error)
(declare (ignore error))
(when lenient
(write-to-buffer #.(char-code #\%))
(write-to-buffer (char-code parsing-encoded-part))
(write-to-buffer (char-code char))
(setq parsing-encoded-part nil)
(goto parsing)))))
(write-to-buffer
(+ (* 16 (hexdigit-to-integer parsing-encoded-part))
(hexdigit-to-integer char))))
(setq parsing-encoded-part nil)
(goto parsing))
(:eof
(when parsing-encoded-part
(error 'url-decoding-error)))))
(babel:octets-to-string buffer :end i :encoding encoding :errorp (not lenient))))
(defun url-decode-params (data &key
(delimiter #\&)
(encoding babel-encodings:*default-character-encoding*)
(start 0)
end
(lenient nil))
(declare (type (or string simple-byte-vector) data)
(type integer start)
(type character delimiter)
(optimize (speed 3) (safety 2)))
(let ((end (or end (length data)))
(start-mark nil)
(=-mark nil))
(declare (type integer end))
(collecting
(flet ((collect-pair (p)
(tagbody
(handler-bind ((url-decoding-error
(lambda (error)
(declare (ignore error))
(when lenient
(go continue)))))
(collect
(cons (url-decode data :encoding encoding
:start start-mark :end =-mark
:lenient lenient)
(url-decode data :encoding encoding
:start (1+ =-mark) :end p
:lenient lenient))))
continue)
(setq start-mark nil
=-mark nil))
(collect-field (p)
(tagbody
(handler-bind ((url-decoding-error
(lambda (error)
(declare (ignore error))
(when lenient
(go continue)))))
(collect
(cons (url-decode data :encoding encoding
:start start-mark :end p
:lenient lenient)
nil)))
continue)
(setq start-mark nil)))
(with-array-parsing (char p data start end (and (not (stringp data))
#'code-char))
(start
(setq start-mark p)
(if lenient
(cond
((char= char #\=)
(setq =-mark p)
(goto parsing-value))
((char= char delimiter)
(redo)))
(when (or (char= char #\=)
(char= char delimiter))
(error 'uri-malformed-urlencoded-string)))
(gonext))
(parsing-field
(cond
((char= char #\=)
(setq =-mark p)
(gonext))
((char= char delimiter)
(collect-field p)
(goto start)))
(redo))
(parsing-value
(cond
((char= char #\=)
(unless lenient
(error 'uri-malformed-urlencoded-string)))
((char= char delimiter)
(collect-pair p)
(goto start)))
(redo))
(:eof
(cond
(=-mark (collect-pair p))
(start-mark (collect-field p)))))))))
|
755f3a4b4aa71549d31a20701399c77c273c8b949d37cacc4b16c8b0535482ba | cwtsteven/TSD | fold_sum.ml | open Tsd
open List
let (+^) = lift (+)
let rec createList n init =
match n with
| 0 -> [], []
| n -> let input_v = cell (lift init) in
let xs, ys = createList (n-1) init in
input_v :: xs, if n mod 1000 == 0 then input_v :: ys else ys
let rec stabilise () =
if step() then stabilise () else ()
let _ =
let n = int_of_string Sys.argv.(1) in
let ins, ins' = createList n 1 in
let out = fold_left (fun acc i -> cell [%dfg i +^ acc]) (lift 0) ins in
List.map (fun i -> set i 2) ins';
stabilise ();
out (*print_int (peek out); print_newline()*) | null | https://raw.githubusercontent.com/cwtsteven/TSD/32bd2cbca6d445ff6b0caecdbb2775de61fdfc6d/benchmarks/fold_sum/fold_sum.ml | ocaml | print_int (peek out); print_newline() | open Tsd
open List
let (+^) = lift (+)
let rec createList n init =
match n with
| 0 -> [], []
| n -> let input_v = cell (lift init) in
let xs, ys = createList (n-1) init in
input_v :: xs, if n mod 1000 == 0 then input_v :: ys else ys
let rec stabilise () =
if step() then stabilise () else ()
let _ =
let n = int_of_string Sys.argv.(1) in
let ins, ins' = createList n 1 in
let out = fold_left (fun acc i -> cell [%dfg i +^ acc]) (lift 0) ins in
List.map (fun i -> set i 2) ins';
stabilise (); |
98bb8ea6978c4be3e638ae1367d21eb27bfcc380e9eaf175b83f5656f3866149 | kztk-m/sparcl | Base.hs | module Language.Sparcl.Base where
import Language.Sparcl.Runtime
import Data.Function (on)
import Data.Ratio (Rational)
import qualified Data.Ratio ((%))
newtype Un a = U a
liftFunc :: (a -> b) -> a -> R b
liftFunc f a = return (f a)
liftFunc2 :: (a -> b -> c) -> a -> R (b -> R c)
liftFunc2 f = liftFunc (liftFunc . f)
(+) :: Int -> R (Int -> R Int)
(+) = liftFunc2 (Prelude.+)
(-) :: Int -> R (Int -> R Int)
(-) = liftFunc2 (Prelude.-)
(*) :: Int -> R (Int -> R Int)
(*) = liftFunc2 (Prelude.*)
(%) :: Int -> R (Int -> R Rational)
(%) = liftFunc2 ((Data.Ratio.%) `on` fromIntegral)
(+%) :: Rational -> R (Rational -> R Rational)
(+%) = liftFunc2 (Prelude.+)
(-%) :: Rational -> R (Rational -> R Rational)
(-%) = liftFunc2 (Prelude.-)
(*%) :: Rational -> R (Rational -> R Rational)
(*%) = liftFunc2 (Prelude.*)
(/%) :: Rational -> R (Rational -> R Rational)
(/%) = liftFunc2 (Prelude./)
eqInt :: Int -> R (Int -> R Bool)
eqInt = liftFunc2 (==)
eqChar :: Char -> R (Char -> R Bool)
eqChar = liftFunc2 (==)
eqRational :: Rational -> R (Rational -> R Bool)
eqRational = liftFunc2 (==)
leInt :: Int -> R (Int -> R Bool)
leInt = liftFunc2 (<=)
leChar :: Char -> R (Char -> R Bool)
leChar = liftFunc2 (<=)
leRational :: Rational -> R (Rational -> R Bool)
leRational = liftFunc2 (<=)
ltInt :: Int -> R (Int -> R Bool)
ltInt = liftFunc2 (<)
ltChar :: Char -> R (Char -> R Bool)
ltChar = liftFunc2 (<)
ltRational :: Rational -> R (Rational -> R Bool)
ltRational = liftFunc2 (<)
| null | https://raw.githubusercontent.com/kztk-m/sparcl/f52d333ce50e0aa6cb307da08811719f8c684f7d/runtime/src/Language/Sparcl/Base.hs | haskell | module Language.Sparcl.Base where
import Language.Sparcl.Runtime
import Data.Function (on)
import Data.Ratio (Rational)
import qualified Data.Ratio ((%))
newtype Un a = U a
liftFunc :: (a -> b) -> a -> R b
liftFunc f a = return (f a)
liftFunc2 :: (a -> b -> c) -> a -> R (b -> R c)
liftFunc2 f = liftFunc (liftFunc . f)
(+) :: Int -> R (Int -> R Int)
(+) = liftFunc2 (Prelude.+)
(-) :: Int -> R (Int -> R Int)
(-) = liftFunc2 (Prelude.-)
(*) :: Int -> R (Int -> R Int)
(*) = liftFunc2 (Prelude.*)
(%) :: Int -> R (Int -> R Rational)
(%) = liftFunc2 ((Data.Ratio.%) `on` fromIntegral)
(+%) :: Rational -> R (Rational -> R Rational)
(+%) = liftFunc2 (Prelude.+)
(-%) :: Rational -> R (Rational -> R Rational)
(-%) = liftFunc2 (Prelude.-)
(*%) :: Rational -> R (Rational -> R Rational)
(*%) = liftFunc2 (Prelude.*)
(/%) :: Rational -> R (Rational -> R Rational)
(/%) = liftFunc2 (Prelude./)
eqInt :: Int -> R (Int -> R Bool)
eqInt = liftFunc2 (==)
eqChar :: Char -> R (Char -> R Bool)
eqChar = liftFunc2 (==)
eqRational :: Rational -> R (Rational -> R Bool)
eqRational = liftFunc2 (==)
leInt :: Int -> R (Int -> R Bool)
leInt = liftFunc2 (<=)
leChar :: Char -> R (Char -> R Bool)
leChar = liftFunc2 (<=)
leRational :: Rational -> R (Rational -> R Bool)
leRational = liftFunc2 (<=)
ltInt :: Int -> R (Int -> R Bool)
ltInt = liftFunc2 (<)
ltChar :: Char -> R (Char -> R Bool)
ltChar = liftFunc2 (<)
ltRational :: Rational -> R (Rational -> R Bool)
ltRational = liftFunc2 (<)
|
|
194369010b6153b47c263199571c9147f74c8443fc1207387d3eebf753c6f0b9 | coccinelle/coccinelle | get_constants.mli |
* This file is part of Coccinelle , licensed under the terms of the GPL v2 .
* See copyright.txt in the Coccinelle source code for more information .
* The Coccinelle source code can be obtained at
* This file is part of Coccinelle, licensed under the terms of the GPL v2.
* See copyright.txt in the Coccinelle source code for more information.
* The Coccinelle source code can be obtained at
*)
val get_constants : Ast_cocci.rule list -> string list list
| null | https://raw.githubusercontent.com/coccinelle/coccinelle/57cbff0c5768e22bb2d8c20e8dae74294515c6b3/parsing_cocci/get_constants.mli | ocaml |
* This file is part of Coccinelle , licensed under the terms of the GPL v2 .
* See copyright.txt in the Coccinelle source code for more information .
* The Coccinelle source code can be obtained at
* This file is part of Coccinelle, licensed under the terms of the GPL v2.
* See copyright.txt in the Coccinelle source code for more information.
* The Coccinelle source code can be obtained at
*)
val get_constants : Ast_cocci.rule list -> string list list
|
|
eddcde09c004182f3298472098a5bf89f310b95ba5e761eb11806a633422f360 | reborg/clojure-essential-reference | 1.clj | < 1 >
;; true
< 2 >
(.exists file))
;; Reflection warning - reference to field exists can't be resolved.
(import '[java.io File])
(defn exists? [^File file] ; <3>
(.exists file)) | null | https://raw.githubusercontent.com/reborg/clojure-essential-reference/9a3eb82024c8e5fbe17412af541c2cd30820c92e/DynamicVariablesintheStandardLibrary/*warn-on-reflection*and*unchecked-math*/1.clj | clojure | true
Reflection warning - reference to field exists can't be resolved.
<3> | < 1 >
< 2 >
(.exists file))
(import '[java.io File])
(.exists file)) |
b94c8c521b6532c1974a34f1346351b061c01c67891e9dab725e49a8e1cdd6d3 | bhaskara/programmable-reinforcement-learning | gold-standard.lisp | (defpackage alisp-gold-standard
(:documentation "Defines the <alisp-gold-standard> reinforcement learning algorithm.
Exports
-------
<alisp-gold-standard>
make-alisp-gold-standard-learning-alg")
(:export
<alisp-gold-standard>
make-alisp-gold-standard-learning-alg)
(:use
cl
set
alisp-obs
utils)
(:import-from
mdp
make-outcome
outcome-state
outcome-duration
outcome-reward)
)
(in-package alisp-gold-standard)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Class def
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defclass <alisp-gold-standard> (<q-learning-algorithm> <alisp-learning-algorithm>)
((discount :type float
:initarg :discount
:initform 1.0
:reader discount)
(state-set :type [numbered-set]
:accessor state-set
:documentation "Numbered set of states. Numbers are used as indices in counts. The first state is always the special symbol 'dummy-terminal-state.")
(counts :type vector
:accessor counts
:documentation "A vector mapping state numbers to entries. Each entry is a cons of a numbered-set of choices and a vector that maps choices at the joint state (assumed to be integers) to count information. Finally, the count information is a pair consisting of 1) how many times this choice has been observed 2) a hashtable mapping outcomes to integers. An outcome is a triple of the form (OMEGA', R, DUR). Note that state number 0 is a dummy terminal state, so the corresponding entry consists of a dummy choice that just returns to this state.")
(previous-state :accessor prev-omega)
(previous-choice :accessor prev-u)
(previous-state-seen :accessor prev-omega-seen)
(total-reward :accessor total-reward)
(total-discount :accessor total-discount)
(num-steps-since-choice :accessor num-steps-since-choice))
(:documentation "Implements 'gold-standard' model-based reinforcement-learning for ALisp. The algorithm maintains a maximum-likelihood estimate of the SMDP transition, and does DP using this estimate when asked for the current policy.
Assumes for now that choice sets are integers, where n represents the set {0,...,n-1}.
"))
(defconstant *mpi-k* 8)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; creation
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defmethod shared-initialize :after ((alg <alisp-gold-standard>) names &rest args)
(declare (ignore names))
(setf (state-set alg) (indexed-set:make-indexed-set #(dummy-terminal-state))
(counts alg) (make-array 1 :adjustable t :fill-pointer 1)
(prev-omega-seen alg) nil
(aref (counts alg) 0) (make-dummy-terminal-entry)))
(defun make-alisp-gold-standard-learning-alg (&key (discount 1.0) (debug-str nil))
"make-alisp-gold-standard-learning-alg &key (DISCOUNT 1.0) (DEBUG-STR nil)"
(make-instance '<alisp-gold-standard> :discount discount :debug-str debug-str))
(defun make-dummy-terminal-entry ()
(let ((a (make-array 1)))
(setf (aref a 0)
(cons 1
(let ((h (make-hash-table :test #'equalp)))
(setf (gethash (make-outcome 'dummy-terminal-state 0 1) h) 1)
h)))
(cons 1 a)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
Operations from < alisp - observer >
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defmethod inform-env-step ((alg <alisp-gold-standard>) a r s2 term)
(declare (ignore s2 a))
(when (prev-omega-seen alg)
(incf (total-reward alg) (* (total-discount alg) r))
(multf (total-discount alg) (discount alg))
(incf (num-steps-since-choice alg))
;; if environment has terminated, make a note of this
(when term (observe-outcome alg (prev-omega alg) (prev-u alg)
(total-reward alg) nil nil t))))
(defmethod inform-alisp-step ((alg <alisp-gold-standard>) omega u)
(when (prev-omega-seen alg)
(observe-outcome alg (prev-omega alg) (prev-u alg) (total-reward alg)
(num-steps-since-choice alg) omega nil))
(notice-state alg omega)
(setf (prev-omega-seen alg) t
(prev-omega alg) omega
(prev-u alg) u
(total-reward alg) 0.0
(total-discount alg) 1.0
(num-steps-since-choice alg) 0))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
Operations from < q - learning - algorithm >
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defmethod reset ((alg <alisp-gold-standard>))
(reinitialize-instance alg))
(defmethod knowledge-state ((alg <alisp-gold-standard>) &optional (fresh t))
"When asked for state of knowledge, the algorithm computes the current SMDP, then does dynamic programming and returns the SMDP and the Q-function in a list."
(declare (ignore fresh)) ;; fresh is always treated as true
(let ((m (create-smdp alg)))
(multiple-value-bind (pol val)
(dp:policy-iteration m :k *mpi-k* :discount (discount alg))
(declare (ignore pol))
(list m (dp:q-from-v m val (discount alg))))))
(defmethod get-q-fn ((alg <alisp-gold-standard>) ks)
(second ks))
(defmethod get-smdp ((alg <alisp-gold-standard>) ks)
(first ks))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; smdp creation
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defun create-smdp (alg)
(let* ((states (clone (state-set alg)))
(term-vec (make-array (size states) :element-type 'boolean :initial-element nil))
(avail-actions-vec (map 'vector #'car (counts alg)))
(trans-dists (map 'vector #'create-smdp-helper (counts alg))))
(setf (aref term-vec 0) t)
(flet ((avail-actions (s)
(aref avail-actions-vec (item-number s states))))
(make-instance 'mdp:<smdp>
:state-set states
:avail-actions #'avail-actions
:term-pred (lambda (s) (aref term-vec (item-number s states)))
:trans-dist
(lambda (x)
(let* ((s (car x))
(i (item-number s states))
(j (item-number (cdr x) (avail-actions s))))
(aref (aref trans-dists i) j)))))))
(defun create-smdp-helper (x)
(map 'vector #'create-choice-dist (cdr x)))
(defun create-choice-dist (counts)
(let ((total (car counts)))
(if (eql total 0)
if there are no observations , assume this choice results in terminating in time 1 with reward 0
`((,(make-outcome 'dummy-terminal-state 0 1) . 1.0))
;; otherwise, normalize the counts of each observation
(loop
for outcome being each hash-key in (cdr counts) using (hash-value num)
for out2 = (if (outcome-duration outcome)
outcome
(make-outcome
(outcome-state outcome)
(outcome-reward outcome)
1))
collect (cons out2 (/ num total))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; code relating to responding to observations
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defun notice-state (alg omega)
"postcondition is that the state-set and counts for ALG include OMEGA"
(let* ((counts (counts alg))
(choices (js-choices omega))
(num-choices (size choices))
(states (state-set alg)))
(unless (member? omega states)
(addf states omega t)
(vector-push-extend
(cons choices
(let ((a (make-array num-choices)))
(dotimes (i num-choices)
(setf (aref a i)
(cons 0 (make-hash-table :test #'equalp))))
a))
counts))))
(defun observe-outcome (alg omega u r dur omega2 terminated?)
"Assumes ALG has an entry for OMEGA. If TERMINATED? is false, increments the count for observing (OMEGA', R, DUR) after doing U in OMEGA. If TERMINATED? is true, increments the count for observing termination with reward R after doing U in OMEGA."
(let* ((entry (if terminated?
(make-outcome 'dummy-terminal-state r nil)
(make-outcome omega2 r dur)))
(state-counts (aref (counts alg) (item-number omega (state-set alg))))
(counts (aref (cdr state-counts) (item-number u (car state-counts))))
(count-table (cdr counts))
(num (gethash entry count-table)))
(incf (car counts))
(setf (gethash entry count-table)
(aif num (incf it) 1)))
(setf (prev-omega-seen alg) nil))
(in-package cl-user)
| null | https://raw.githubusercontent.com/bhaskara/programmable-reinforcement-learning/8afc98116a8f78163b3f86076498d84b3f596217/lisp/alisp/learn/gold-standard.lisp | lisp |
Class def
creation
if environment has terminated, make a note of this
fresh is always treated as true
smdp creation
otherwise, normalize the counts of each observation
code relating to responding to observations
| (defpackage alisp-gold-standard
(:documentation "Defines the <alisp-gold-standard> reinforcement learning algorithm.
Exports
-------
<alisp-gold-standard>
make-alisp-gold-standard-learning-alg")
(:export
<alisp-gold-standard>
make-alisp-gold-standard-learning-alg)
(:use
cl
set
alisp-obs
utils)
(:import-from
mdp
make-outcome
outcome-state
outcome-duration
outcome-reward)
)
(in-package alisp-gold-standard)
(defclass <alisp-gold-standard> (<q-learning-algorithm> <alisp-learning-algorithm>)
((discount :type float
:initarg :discount
:initform 1.0
:reader discount)
(state-set :type [numbered-set]
:accessor state-set
:documentation "Numbered set of states. Numbers are used as indices in counts. The first state is always the special symbol 'dummy-terminal-state.")
(counts :type vector
:accessor counts
:documentation "A vector mapping state numbers to entries. Each entry is a cons of a numbered-set of choices and a vector that maps choices at the joint state (assumed to be integers) to count information. Finally, the count information is a pair consisting of 1) how many times this choice has been observed 2) a hashtable mapping outcomes to integers. An outcome is a triple of the form (OMEGA', R, DUR). Note that state number 0 is a dummy terminal state, so the corresponding entry consists of a dummy choice that just returns to this state.")
(previous-state :accessor prev-omega)
(previous-choice :accessor prev-u)
(previous-state-seen :accessor prev-omega-seen)
(total-reward :accessor total-reward)
(total-discount :accessor total-discount)
(num-steps-since-choice :accessor num-steps-since-choice))
(:documentation "Implements 'gold-standard' model-based reinforcement-learning for ALisp. The algorithm maintains a maximum-likelihood estimate of the SMDP transition, and does DP using this estimate when asked for the current policy.
Assumes for now that choice sets are integers, where n represents the set {0,...,n-1}.
"))
(defconstant *mpi-k* 8)
(defmethod shared-initialize :after ((alg <alisp-gold-standard>) names &rest args)
(declare (ignore names))
(setf (state-set alg) (indexed-set:make-indexed-set #(dummy-terminal-state))
(counts alg) (make-array 1 :adjustable t :fill-pointer 1)
(prev-omega-seen alg) nil
(aref (counts alg) 0) (make-dummy-terminal-entry)))
(defun make-alisp-gold-standard-learning-alg (&key (discount 1.0) (debug-str nil))
"make-alisp-gold-standard-learning-alg &key (DISCOUNT 1.0) (DEBUG-STR nil)"
(make-instance '<alisp-gold-standard> :discount discount :debug-str debug-str))
(defun make-dummy-terminal-entry ()
(let ((a (make-array 1)))
(setf (aref a 0)
(cons 1
(let ((h (make-hash-table :test #'equalp)))
(setf (gethash (make-outcome 'dummy-terminal-state 0 1) h) 1)
h)))
(cons 1 a)))
Operations from < alisp - observer >
(defmethod inform-env-step ((alg <alisp-gold-standard>) a r s2 term)
(declare (ignore s2 a))
(when (prev-omega-seen alg)
(incf (total-reward alg) (* (total-discount alg) r))
(multf (total-discount alg) (discount alg))
(incf (num-steps-since-choice alg))
(when term (observe-outcome alg (prev-omega alg) (prev-u alg)
(total-reward alg) nil nil t))))
(defmethod inform-alisp-step ((alg <alisp-gold-standard>) omega u)
(when (prev-omega-seen alg)
(observe-outcome alg (prev-omega alg) (prev-u alg) (total-reward alg)
(num-steps-since-choice alg) omega nil))
(notice-state alg omega)
(setf (prev-omega-seen alg) t
(prev-omega alg) omega
(prev-u alg) u
(total-reward alg) 0.0
(total-discount alg) 1.0
(num-steps-since-choice alg) 0))
Operations from < q - learning - algorithm >
(defmethod reset ((alg <alisp-gold-standard>))
(reinitialize-instance alg))
(defmethod knowledge-state ((alg <alisp-gold-standard>) &optional (fresh t))
"When asked for state of knowledge, the algorithm computes the current SMDP, then does dynamic programming and returns the SMDP and the Q-function in a list."
(let ((m (create-smdp alg)))
(multiple-value-bind (pol val)
(dp:policy-iteration m :k *mpi-k* :discount (discount alg))
(declare (ignore pol))
(list m (dp:q-from-v m val (discount alg))))))
(defmethod get-q-fn ((alg <alisp-gold-standard>) ks)
(second ks))
(defmethod get-smdp ((alg <alisp-gold-standard>) ks)
(first ks))
(defun create-smdp (alg)
(let* ((states (clone (state-set alg)))
(term-vec (make-array (size states) :element-type 'boolean :initial-element nil))
(avail-actions-vec (map 'vector #'car (counts alg)))
(trans-dists (map 'vector #'create-smdp-helper (counts alg))))
(setf (aref term-vec 0) t)
(flet ((avail-actions (s)
(aref avail-actions-vec (item-number s states))))
(make-instance 'mdp:<smdp>
:state-set states
:avail-actions #'avail-actions
:term-pred (lambda (s) (aref term-vec (item-number s states)))
:trans-dist
(lambda (x)
(let* ((s (car x))
(i (item-number s states))
(j (item-number (cdr x) (avail-actions s))))
(aref (aref trans-dists i) j)))))))
(defun create-smdp-helper (x)
(map 'vector #'create-choice-dist (cdr x)))
(defun create-choice-dist (counts)
(let ((total (car counts)))
(if (eql total 0)
if there are no observations , assume this choice results in terminating in time 1 with reward 0
`((,(make-outcome 'dummy-terminal-state 0 1) . 1.0))
(loop
for outcome being each hash-key in (cdr counts) using (hash-value num)
for out2 = (if (outcome-duration outcome)
outcome
(make-outcome
(outcome-state outcome)
(outcome-reward outcome)
1))
collect (cons out2 (/ num total))))))
(defun notice-state (alg omega)
"postcondition is that the state-set and counts for ALG include OMEGA"
(let* ((counts (counts alg))
(choices (js-choices omega))
(num-choices (size choices))
(states (state-set alg)))
(unless (member? omega states)
(addf states omega t)
(vector-push-extend
(cons choices
(let ((a (make-array num-choices)))
(dotimes (i num-choices)
(setf (aref a i)
(cons 0 (make-hash-table :test #'equalp))))
a))
counts))))
(defun observe-outcome (alg omega u r dur omega2 terminated?)
"Assumes ALG has an entry for OMEGA. If TERMINATED? is false, increments the count for observing (OMEGA', R, DUR) after doing U in OMEGA. If TERMINATED? is true, increments the count for observing termination with reward R after doing U in OMEGA."
(let* ((entry (if terminated?
(make-outcome 'dummy-terminal-state r nil)
(make-outcome omega2 r dur)))
(state-counts (aref (counts alg) (item-number omega (state-set alg))))
(counts (aref (cdr state-counts) (item-number u (car state-counts))))
(count-table (cdr counts))
(num (gethash entry count-table)))
(incf (car counts))
(setf (gethash entry count-table)
(aif num (incf it) 1)))
(setf (prev-omega-seen alg) nil))
(in-package cl-user)
|
95872217044ef413ef54ee1042426ef07e8d06181530549a6f1e07912f78d561 | wireless-net/erlang-nommu | cover.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 2001 - 2013 . All Rights Reserved .
%%
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at /.
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% %CopyrightEnd%
%%
-module(cover).
%%
This module implements the Erlang coverage tool . The module named
%% cover_web implements a user interface for the coverage tool to run
%% under webtool.
%%
ARCHITECTURE
The coverage tool consists of one process on each node involved in
%% coverage analysis. The process is registered as 'cover_server'
%% (?SERVER). The cover_server on the 'main' node is in charge, and
%% it monitors the cover_servers on all remote nodes. When it gets a
%% 'DOWN' message for another cover_server, it marks the node as
%% 'lost'. If a nodeup is received for a lost node the main node
%% ensures that the cover compiled modules are loaded again. If the
%% remote node was alive during the disconnected periode, cover data
%% for this periode will also be included in the analysis.
%%
%% The cover_server process on the main node is implemented by the
functions init_main/1 and main_process_loop/1 . The cover_server on
%% the remote nodes are implemented by the functions init_remote/2 and
%% remote_process_loop/1.
%%
%% TABLES
Each nodes has two tables : cover_internal_data_table ( ? COVER_TABLE ) and .
%% cover_internal_clause_table (?COVER_CLAUSE_TABLE).
%% ?COVER_TABLE contains the bump data i.e. the data about which lines
%% have been executed how many times.
%% ?COVER_CLAUSE_TABLE contains information about which clauses in which modules
%% cover is currently collecting statistics.
%%
%% The main node owns tables named
%% 'cover_collected_remote_data_table' (?COLLECTION_TABLE) and
%% 'cover_collected_remote_clause_table' (?COLLECTION_CLAUSE_TABLE).
%% These tables contain data which is collected from remote nodes (either when a
%% remote node is stopped with cover:stop/1 or when analysing). When
%% analysing, data is even moved from the COVER tables on the main
%% node to the COLLECTION tables.
%%
%% The main node also has a table named 'cover_binary_code_table'
%% (?BINARY_TABLE). This table contains the binary code for each cover
%% compiled module. This is necessary so that the code can be loaded
%% on remote nodes that are started after the compilation.
%%
PARALLELISM
%% To take advantage of SMP when doing the cover analysis both the data
collection and analysis has been parallelized . One process is spawned for
%% each node when collecting data, and on the remote node when collecting data
one process is spawned per module .
%%
%% When analyzing data it is possible to issue multiple analyse(_to_file)/X
%% calls at once. They are however all calls (for backwards compatibility
%% reasons) so the user of cover will have to spawn several processes to to the
%% calls ( or use async_analyse_to_file ).
%%
%% External exports
-export([start/0, start/1,
compile/1, compile/2, compile_module/1, compile_module/2,
compile_directory/0, compile_directory/1, compile_directory/2,
compile_beam/1, compile_beam_directory/0, compile_beam_directory/1,
analyse/1, analyse/2, analyse/3, analyze/1, analyze/2, analyze/3,
analyse_to_file/1, analyse_to_file/2, analyse_to_file/3,
analyze_to_file/1, analyze_to_file/2, analyze_to_file/3,
async_analyse_to_file/1,async_analyse_to_file/2,
async_analyse_to_file/3, async_analyze_to_file/1,
async_analyze_to_file/2, async_analyze_to_file/3,
export/1, export/2, import/1,
modules/0, imported/0, imported_modules/0, which_nodes/0, is_compiled/1,
reset/1, reset/0,
flush/1,
stop/0, stop/1]).
-export([remote_start/1,get_main_node/0]).
%% Used internally to ensure we upgrade the code to the latest version.
-export([main_process_loop/1,remote_process_loop/1]).
-record(main_state, {compiled=[], % [{Module,File}]
imported=[], % [{Module,File,ImportFile}]
stopper, % undefined | pid()
nodes=[], % [Node]
lost_nodes=[]}). % [Node]
-record(remote_state, {compiled=[], % [{Module,File}]
main_node}). % atom()
-record(bump, {module = '_', % atom()
function = '_', % atom()
arity = '_', % integer()
clause = '_', % integer()
line = '_' % integer()
}).
-define(BUMP_REC_NAME,bump).
-record(vars, {module, % atom() Module name
init_info=[], % [{M,F,A,C,L}]
function, % atom()
arity, % int()
clause, % int()
lines, % [int()]
no_bump_lines, % [int()]
depth, % int()
is_guard=false % boolean
}).
-define(COVER_TABLE, 'cover_internal_data_table').
-define(COVER_CLAUSE_TABLE, 'cover_internal_clause_table').
-define(BINARY_TABLE, 'cover_binary_code_table').
-define(COLLECTION_TABLE, 'cover_collected_remote_data_table').
-define(COLLECTION_CLAUSE_TABLE, 'cover_collected_remote_clause_table').
-define(TAG, cover_compiled).
-define(SERVER, cover_server).
%% Line doesn't matter.
-define(BLOCK(Expr), {block,0,[Expr]}).
-define(BLOCK1(Expr),
if
element(1, Expr) =:= block ->
Expr;
true -> ?BLOCK(Expr)
end).
-define(SPAWN_DBG(Tag,Value),put(Tag,Value)).
-include_lib("stdlib/include/ms_transform.hrl").
%%%----------------------------------------------------------------------
%%% External exports
%%%----------------------------------------------------------------------
start ( ) - > { ok , Pid } | { error , Reason }
Pid = pid ( )
Reason = { already_started , Pid } | term ( )
start() ->
case whereis(?SERVER) of
undefined ->
Starter = self(),
Pid = spawn(fun() ->
?SPAWN_DBG(start,[]),
init_main(Starter)
end),
Ref = erlang:monitor(process,Pid),
Return =
receive
{?SERVER,started} ->
{ok,Pid};
{'DOWN', Ref, _Type, _Object, Info} ->
{error,Info}
end,
erlang:demonitor(Ref),
Return;
Pid ->
{error,{already_started,Pid}}
end.
start(Nodes ) - > { ok , }
%% Nodes = Node | [Node,...]
%% Node = atom()
start(Node) when is_atom(Node) ->
start([Node]);
start(Nodes) ->
call({start_nodes,remove_myself(Nodes,[])}).
%% compile(ModFile) ->
%% compile(ModFile, Options) ->
%% compile_module(ModFile) -> Result
%% compile_module(ModFile, Options) -> Result
%% ModFile = Module | File
%% Module = atom()
%% File = string()
%% Options = [Option]
Option = { i , Dir } | { d , Macro } | { d , Macro , Value }
%% Result = {ok,Module} | {error,File}
compile(ModFile) ->
compile_module(ModFile, []).
compile(ModFile, Options) ->
compile_module(ModFile, Options).
compile_module(ModFile) when is_atom(ModFile);
is_list(ModFile) ->
compile_module(ModFile, []).
compile_module(Module, Options) when is_atom(Module), is_list(Options) ->
compile_module(atom_to_list(Module), Options);
compile_module(File, Options) when is_list(File), is_list(Options) ->
WithExt = case filename:extension(File) of
".erl" ->
File;
_ ->
File++".erl"
end,
AbsFile = filename:absname(WithExt),
[R] = compile_modules([AbsFile], Options),
R.
%% compile_directory() ->
%% compile_directory(Dir) ->
%% compile_directory(Dir, Options) -> [Result] | {error,Reason}
%% Dir = string()
%% Options - see compile/1
%% Result - see compile/1
%% Reason = eacces | enoent
compile_directory() ->
case file:get_cwd() of
{ok, Dir} ->
compile_directory(Dir, []);
Error ->
Error
end.
compile_directory(Dir) when is_list(Dir) ->
compile_directory(Dir, []).
compile_directory(Dir, Options) when is_list(Dir), is_list(Options) ->
case file:list_dir(Dir) of
{ok, Files} ->
ErlFiles = [filename:join(Dir, File) ||
File <- Files,
filename:extension(File) =:= ".erl"],
compile_modules(ErlFiles, Options);
Error ->
Error
end.
compile_modules(Files,Options) ->
Options2 = filter_options(Options),
compile_modules(Files,Options2,[]).
compile_modules([File|Files], Options, Result) ->
R = call({compile, File, Options}),
compile_modules(Files,Options,[R|Result]);
compile_modules([],_Opts,Result) ->
lists:reverse(Result).
filter_options(Options) ->
lists:filter(fun(Option) ->
case Option of
{i, Dir} when is_list(Dir) -> true;
{d, _Macro} -> true;
{d, _Macro, _Value} -> true;
export_all -> true;
_ -> false
end
end,
Options).
%% compile_beam(ModFile) -> Result | {error,Reason}
%% ModFile - see compile/1
%% Result - see compile/1
%% Reason = non_existing | already_cover_compiled
compile_beam(Module) when is_atom(Module) ->
case code:which(Module) of
non_existing ->
{error,non_existing};
?TAG ->
compile_beam(Module,?TAG);
File ->
compile_beam(Module,File)
end;
compile_beam(File) when is_list(File) ->
{WithExt,WithoutExt}
= case filename:rootname(File,".beam") of
File ->
{File++".beam",File};
Rootname ->
{File,Rootname}
end,
AbsFile = filename:absname(WithExt),
Module = list_to_atom(filename:basename(WithoutExt)),
compile_beam(Module,AbsFile).
compile_beam(Module,File) ->
call({compile_beam,Module,File}).
%% compile_beam_directory(Dir) -> [Result] | {error,Reason}
%% Dir - see compile_directory/1
%% Result - see compile/1
%% Reason = eacces | enoent
compile_beam_directory() ->
case file:get_cwd() of
{ok, Dir} ->
compile_beam_directory(Dir);
Error ->
Error
end.
compile_beam_directory(Dir) when is_list(Dir) ->
case file:list_dir(Dir) of
{ok, Files} ->
BeamFiles = [filename:join(Dir, File) ||
File <- Files,
filename:extension(File) =:= ".beam"],
compile_beams(BeamFiles);
Error ->
Error
end.
compile_beams(Files) ->
compile_beams(Files,[]).
compile_beams([File|Files],Result) ->
R = compile_beam(File),
compile_beams(Files,[R|Result]);
compile_beams([],Result) ->
lists:reverse(Result).
%% analyse(Module) ->
%% analyse(Module, Analysis) ->
%% analyse(Module, Level) ->
%% analyse(Module, Analysis, Level) -> {ok,Answer} | {error,Error}
%% Module = atom()
%% Analysis = coverage | calls
%% Level = line | clause | function | module
%% Answer = {Module,Value} | [{Item,Value}]
%% Item = Line | Clause | Function
%% Line = {M,N}
%% Clause = {M,F,A,C}
%% Function = {M,F,A}
%% M = F = atom()
%% N = A = C = integer()
%% Value = {Cov,NotCov} | Calls
%% Cov = NotCov = Calls = integer()
%% Error = {not_cover_compiled,Module}
analyse(Module) ->
analyse(Module, coverage).
analyse(Module, Analysis) when Analysis=:=coverage; Analysis=:=calls ->
analyse(Module, Analysis, function);
analyse(Module, Level) when Level=:=line; Level=:=clause; Level=:=function;
Level=:=module ->
analyse(Module, coverage, Level).
analyse(Module, Analysis, Level) when is_atom(Module),
Analysis=:=coverage; Analysis=:=calls,
Level=:=line; Level=:=clause;
Level=:=function; Level=:=module ->
call({{analyse, Analysis, Level}, Module}).
analyze(Module) -> analyse(Module).
analyze(Module, Analysis) -> analyse(Module, Analysis).
analyze(Module, Analysis, Level) -> analyse(Module, Analysis, Level).
%% analyse_to_file(Module) ->
%% analyse_to_file(Module, Options) ->
analyse_to_file(Module , OutFile ) - >
analyse_to_file(Module , OutFile , Options ) - > { ok , OutFile } | { error , Error }
%% Module = atom()
OutFile = string ( )
%% Options = [Option]
%% Option = html
%% Error = {not_cover_compiled,Module} | no_source_code_found |
%% {file,File,Reason}
%% File = string()
%% Reason = term()
analyse_to_file(Module) when is_atom(Module) ->
analyse_to_file(Module, outfilename(Module,[]), []).
analyse_to_file(Module, []) when is_atom(Module) ->
analyse_to_file(Module, outfilename(Module,[]), []);
analyse_to_file(Module, Options) when is_atom(Module),
is_list(Options), is_atom(hd(Options)) ->
analyse_to_file(Module, outfilename(Module,Options), Options);
analyse_to_file(Module, OutFile) when is_atom(Module), is_list(OutFile) ->
analyse_to_file(Module, OutFile, []).
analyse_to_file(Module, OutFile, Options) when is_atom(Module), is_list(OutFile) ->
call({{analyse_to_file, OutFile, Options}, Module}).
analyze_to_file(Module) -> analyse_to_file(Module).
analyze_to_file(Module, OptOrOut) -> analyse_to_file(Module, OptOrOut).
analyze_to_file(Module, OutFile, Options) ->
analyse_to_file(Module, OutFile, Options).
async_analyse_to_file(Module) ->
do_spawn(?MODULE, analyse_to_file, [Module]).
async_analyse_to_file(Module, OutFileOrOpts) ->
do_spawn(?MODULE, analyse_to_file, [Module, OutFileOrOpts]).
async_analyse_to_file(Module, OutFile, Options) ->
do_spawn(?MODULE, analyse_to_file, [Module, OutFile, Options]).
do_spawn(M,F,A) ->
spawn_link(fun() ->
case apply(M,F,A) of
{ok, _} ->
ok;
{error, Reason} ->
exit(Reason)
end
end).
async_analyze_to_file(Module) ->
async_analyse_to_file(Module).
async_analyze_to_file(Module, OutFileOrOpts) ->
async_analyse_to_file(Module, OutFileOrOpts).
async_analyze_to_file(Module, OutFile, Options) ->
async_analyse_to_file(Module, OutFile, Options).
outfilename(Module,Opts) ->
case lists:member(html,Opts) of
true ->
atom_to_list(Module)++".COVER.html";
false ->
atom_to_list(Module)++".COVER.out"
end.
%% export(File)
%% export(File,Module) -> ok | {error,Reason}
%% File = string(); file to write the exported data to
%% Module = atom()
export(File) ->
export(File, '_').
export(File, Module) ->
call({export,File,Module}).
%% import(File) -> ok | {error, Reason}
%% File = string(); file created with cover:export/1,2
import(File) ->
call({import,File}).
%% modules() -> [Module]
%% Module = atom()
modules() ->
call(modules).
%% imported_modules() -> [Module]
%% Module = atom()
imported_modules() ->
call(imported_modules).
%% imported() -> [ImportFile]
ImportFile = string ( )
imported() ->
call(imported).
%% which_nodes() -> [Node]
%% Node = atom()
which_nodes() ->
call(which_nodes).
%% is_compiled(Module) -> {file,File} | false
%% Module = atom()
%% File = string()
is_compiled(Module) when is_atom(Module) ->
call({is_compiled, Module}).
%% reset(Module) -> ok | {error,Error}
%% reset() -> ok
%% Module = atom()
%% Error = {not_cover_compiled,Module}
reset(Module) when is_atom(Module) ->
call({reset, Module}).
reset() ->
call(reset).
%% stop() -> ok
stop() ->
call(stop).
stop(Node) when is_atom(Node) ->
stop([Node]);
stop(Nodes) ->
call({stop,remove_myself(Nodes,[])}).
%% flush(Nodes) -> ok | {error,not_main_node}
%% Nodes = [Node] | Node
%% Node = atom()
%% Error = {not_cover_compiled,Module}
flush(Node) when is_atom(Node) ->
flush([Node]);
flush(Nodes) ->
call({flush,remove_myself(Nodes,[])}).
%% Used by test_server only. Not documented.
get_main_node() ->
call(get_main_node).
bump(Module , Function , Arity , Clause , Line )
%% Module = Function = atom()
%% Arity = Clause = Line = integer()
This function is inserted into Cover compiled modules , once for each
%% executable line.
bump(Module , Function , Arity , Clause , Line ) - >
Key = # bump{module = Module , function = Function , arity = Arity , clause = Clause ,
% line=Line},
ets : update_counter(?COVER_TABLE , Key , 1 ) .
call(Request) ->
Ref = erlang:monitor(process,?SERVER),
receive {'DOWN', Ref, _Type, _Object, noproc} ->
erlang:demonitor(Ref),
start(),
call(Request)
after 0 ->
?SERVER ! {self(),Request},
Return =
receive
{'DOWN', Ref, _Type, _Object, Info} ->
exit(Info);
{?SERVER,Reply} ->
Reply
end,
erlang:demonitor(Ref, [flush]),
Return
end.
reply(From, Reply) ->
From ! {?SERVER,Reply}.
is_from(From) ->
is_pid(From).
remote_call(Node,Request) ->
Ref = erlang:monitor(process,{?SERVER,Node}),
receive {'DOWN', Ref, _Type, _Object, noproc} ->
erlang:demonitor(Ref),
{error,node_dead}
after 0 ->
{?SERVER,Node} ! Request,
Return =
receive
{'DOWN', Ref, _Type, _Object, _Info} ->
case Request of
{remote,stop} -> ok;
_ -> {error,node_dead}
end;
{?SERVER,Reply} ->
Reply
end,
erlang:demonitor(Ref, [flush]),
Return
end.
remote_reply(Proc,Reply) when is_pid(Proc) ->
Proc ! {?SERVER,Reply};
remote_reply(MainNode,Reply) ->
{?SERVER,MainNode} ! {?SERVER,Reply}.
%%%----------------------------------------------------------------------
%%% cover_server on main node
%%%----------------------------------------------------------------------
init_main(Starter) ->
register(?SERVER,self()),
Having write here gives a 40 % performance boost
%% when collect/1 is called.
ets:new(?COVER_TABLE, [set, public, named_table
,{write_concurrency, true}
]),
ets:new(?COVER_CLAUSE_TABLE, [set, public, named_table]),
ets:new(?BINARY_TABLE, [set, named_table]),
ets:new(?COLLECTION_TABLE, [set, public, named_table]),
ets:new(?COLLECTION_CLAUSE_TABLE, [set, public, named_table]),
net_kernel:monitor_nodes(true),
Starter ! {?SERVER,started},
main_process_loop(#main_state{}).
main_process_loop(State) ->
receive
{From, {start_nodes,Nodes}} ->
{StartedNodes,State1} = do_start_nodes(Nodes, State),
reply(From, {ok,StartedNodes}),
main_process_loop(State1);
{From, {compile, File, Options}} ->
case do_compile(File, Options) of
{ok, Module} ->
remote_load_compiled(State#main_state.nodes,[{Module,File}]),
reply(From, {ok, Module}),
Compiled = add_compiled(Module, File,
State#main_state.compiled),
Imported = remove_imported(Module,State#main_state.imported),
NewState = State#main_state{compiled = Compiled,
imported = Imported},
%% This module (cover) could have been reloaded. Make
%% sure we run the new code.
?MODULE:main_process_loop(NewState);
error ->
reply(From, {error, File}),
main_process_loop(State)
end;
{From, {compile_beam, Module, BeamFile0}} ->
Compiled0 = State#main_state.compiled,
case get_beam_file(Module,BeamFile0,Compiled0) of
{ok,BeamFile} ->
UserOptions = get_compile_options(Module,BeamFile),
{Reply,Compiled} =
case do_compile_beam(Module,BeamFile,UserOptions) of
{ok, Module} ->
remote_load_compiled(State#main_state.nodes,
[{Module,BeamFile}]),
C = add_compiled(Module,BeamFile,Compiled0),
{{ok,Module},C};
error ->
{{error, BeamFile}, Compiled0};
{error,Reason} -> % no abstract code
{{error, {Reason, BeamFile}}, Compiled0}
end,
reply(From,Reply),
Imported = remove_imported(Module,State#main_state.imported),
NewState = State#main_state{compiled = Compiled,
imported = Imported},
%% This module (cover) could have been reloaded. Make
%% sure we run the new code.
?MODULE:main_process_loop(NewState);
{error,no_beam} ->
The module has first been compiled from .erl , and now
%% someone tries to compile it from .beam
reply(From,
{error,{already_cover_compiled,no_beam_found,Module}}),
main_process_loop(State)
end;
{From, {export,OutFile,Module}} ->
spawn(fun() ->
?SPAWN_DBG(export,{OutFile, Module}),
do_export(Module, OutFile, From, State)
end),
main_process_loop(State);
{From, {import,File}} ->
case file:open(File,[read,binary,raw]) of
{ok,Fd} ->
Imported = do_import_to_table(Fd,File,
State#main_state.imported),
reply(From, ok),
file:close(Fd),
main_process_loop(State#main_state{imported=Imported});
{error,Reason} ->
reply(From, {error, {cant_open_file,File,Reason}}),
main_process_loop(State)
end;
{From, modules} ->
%% Get all compiled modules which are still loaded
{LoadedModules,Compiled} =
get_compiled_still_loaded(State#main_state.nodes,
State#main_state.compiled),
reply(From, LoadedModules),
main_process_loop(State#main_state{compiled=Compiled});
{From, imported_modules} ->
%% Get all modules with imported data
ImportedModules = lists:map(fun({Mod,_File,_ImportFile}) -> Mod end,
State#main_state.imported),
reply(From, ImportedModules),
main_process_loop(State);
{From, imported} ->
%% List all imported files
reply(From, get_all_importfiles(State#main_state.imported,[])),
main_process_loop(State);
{From, which_nodes} ->
%% List all imported files
reply(From, State#main_state.nodes),
main_process_loop(State);
{From, reset} ->
lists:foreach(
fun({Module,_File}) ->
do_reset_main_node(Module,State#main_state.nodes)
end,
State#main_state.compiled),
reply(From, ok),
main_process_loop(State#main_state{imported=[]});
{From, {stop,Nodes}} ->
remote_collect('_',Nodes,true),
reply(From, ok),
Nodes1 = State#main_state.nodes--Nodes,
LostNodes1 = State#main_state.lost_nodes--Nodes,
main_process_loop(State#main_state{nodes=Nodes1,
lost_nodes=LostNodes1});
{From, {flush,Nodes}} ->
remote_collect('_',Nodes,false),
reply(From, ok),
main_process_loop(State);
{From, stop} ->
lists:foreach(
fun(Node) ->
remote_call(Node,{remote,stop})
end,
State#main_state.nodes),
reload_originals(State#main_state.compiled),
ets:delete(?COVER_TABLE),
ets:delete(?COVER_CLAUSE_TABLE),
ets:delete(?BINARY_TABLE),
ets:delete(?COLLECTION_TABLE),
ets:delete(?COLLECTION_CLAUSE_TABLE),
unregister(?SERVER),
reply(From, ok);
{From, {{analyse, Analysis, Level}, Module}} ->
S = try
Loaded = is_loaded(Module, State),
spawn(fun() ->
?SPAWN_DBG(analyse,{Module,Analysis, Level}),
do_parallel_analysis(
Module, Analysis, Level,
Loaded, From, State)
end),
State
catch throw:Reason ->
reply(From,{error, {not_cover_compiled,Module}}),
not_loaded(Module, Reason, State)
end,
main_process_loop(S);
{From, {{analyse_to_file, OutFile, Opts},Module}} ->
S = try
Loaded = is_loaded(Module, State),
spawn(fun() ->
?SPAWN_DBG(analyse_to_file,
{Module,OutFile, Opts}),
do_parallel_analysis_to_file(
Module, OutFile, Opts,
Loaded, From, State)
end),
State
catch throw:Reason ->
reply(From,{error, {not_cover_compiled,Module}}),
not_loaded(Module, Reason, State)
end,
main_process_loop(S);
{From, {is_compiled, Module}} ->
S = try is_loaded(Module, State) of
{loaded, File} ->
reply(From,{file, File}),
State;
{imported,_File,_ImportFiles} ->
reply(From,false),
State
catch throw:Reason ->
reply(From,false),
not_loaded(Module, Reason, State)
end,
main_process_loop(S);
{From, {reset, Module}} ->
S = try
Loaded = is_loaded(Module,State),
R = case Loaded of
{loaded, _File} ->
do_reset_main_node(
Module, State#main_state.nodes);
{imported, _File, _} ->
do_reset_collection_table(Module)
end,
Imported =
remove_imported(Module,
State#main_state.imported),
reply(From, R),
State#main_state{imported=Imported}
catch throw:Reason ->
reply(From,{error, {not_cover_compiled,Module}}),
not_loaded(Module, Reason, State)
end,
main_process_loop(S);
{'DOWN', _MRef, process, {?SERVER,Node}, _Info} ->
%% A remote cover_server is down, mark as lost
{Nodes,Lost} =
case lists:member(Node,State#main_state.nodes) of
true ->
N = State#main_state.nodes--[Node],
L = [Node|State#main_state.lost_nodes],
{N,L};
false -> % node stopped
{State#main_state.nodes,State#main_state.lost_nodes}
end,
main_process_loop(State#main_state{nodes=Nodes,lost_nodes=Lost});
{nodeup,Node} ->
State1 =
case lists:member(Node,State#main_state.lost_nodes) of
true ->
sync_compiled(Node,State);
false ->
State
end,
main_process_loop(State1);
{nodedown,_} ->
%% Will be taken care of when 'DOWN' message arrives
main_process_loop(State);
{From, get_main_node} ->
reply(From, node()),
main_process_loop(State);
get_status ->
io:format("~tp~n",[State]),
main_process_loop(State)
end.
%%%----------------------------------------------------------------------
%%% cover_server on remote node
%%%----------------------------------------------------------------------
init_remote(Starter,MainNode) ->
register(?SERVER,self()),
ets:new(?COVER_TABLE, [set, public, named_table
%% write_concurrency here makes otp_8270 break :(
%,{write_concurrency, true}
]),
ets:new(?COVER_CLAUSE_TABLE, [set, public, named_table]),
Starter ! {self(),started},
remote_process_loop(#remote_state{main_node=MainNode}).
remote_process_loop(State) ->
receive
{remote,load_compiled,Compiled} ->
Compiled1 = load_compiled(Compiled,State#remote_state.compiled),
remote_reply(State#remote_state.main_node, ok),
?MODULE:remote_process_loop(State#remote_state{compiled=Compiled1});
{remote,unload,UnloadedModules} ->
unload(UnloadedModules),
Compiled =
update_compiled(UnloadedModules, State#remote_state.compiled),
remote_reply(State#remote_state.main_node, ok),
remote_process_loop(State#remote_state{compiled=Compiled});
{remote,reset,Module} ->
do_reset(Module),
remote_reply(State#remote_state.main_node, ok),
remote_process_loop(State);
{remote,collect,Module,CollectorPid} ->
self() ! {remote,collect,Module,CollectorPid, ?SERVER};
{remote,collect,Module,CollectorPid,From} ->
spawn(fun() ->
?SPAWN_DBG(remote_collect,
{Module, CollectorPid, From}),
do_collect(Module, CollectorPid, From)
end),
remote_process_loop(State);
{remote,stop} ->
reload_originals(State#remote_state.compiled),
ets:delete(?COVER_TABLE),
ets:delete(?COVER_CLAUSE_TABLE),
unregister(?SERVER),
ok; % not replying since 'DOWN' message will be received anyway
{remote,get_compiled} ->
remote_reply(State#remote_state.main_node,
State#remote_state.compiled),
remote_process_loop(State);
{From, get_main_node} ->
remote_reply(From, State#remote_state.main_node),
remote_process_loop(State);
get_status ->
io:format("~tp~n",[State]),
remote_process_loop(State);
M ->
io:format("WARNING: remote cover_server received\n~p\n",[M]),
case M of
{From,_} ->
case is_from(From) of
true ->
reply(From,{error,not_main_node});
false ->
ok
end;
_ ->
ok
end,
remote_process_loop(State)
end.
do_collect(Module, CollectorPid, From) ->
AllMods =
case Module of
'_' -> ets:tab2list(?COVER_CLAUSE_TABLE);
_ -> ets:lookup(?COVER_CLAUSE_TABLE, Module)
end,
%% Sending clause by clause in order to avoid large lists
pmap(
fun({_Mod,Clauses}) ->
lists:map(fun(Clause) ->
send_collected_data(Clause, CollectorPid)
end,Clauses)
end,AllMods),
CollectorPid ! done,
remote_reply(From, ok).
send_collected_data({M,F,A,C,_L}, CollectorPid) ->
Pattern =
{#bump{module=M, function=F, arity=A, clause=C}, '_'},
Bumps = ets:match_object(?COVER_TABLE, Pattern),
%% Reset
lists:foreach(fun({Bump,_N}) ->
ets:insert(?COVER_TABLE, {Bump,0})
end,
Bumps),
CollectorPid ! {chunk,Bumps}.
reload_originals([{Module,_File}|Compiled]) ->
do_reload_original(Module),
reload_originals(Compiled);
reload_originals([]) ->
ok.
do_reload_original(Module) ->
case code:which(Module) of
?TAG ->
code:purge(Module), % remove code marked as 'old'
code:delete(Module), % mark cover compiled code as 'old'
%% Note: original beam code must be loaded before the cover
%% compiled code is purged, in order to for references to
%% 'fun M:F/A' and %% 'fun F/A' funs to be correct (they
%% refer to (M:)F/A in the *latest* version of the module)
code:load_file(Module), % load original code
code:purge(Module); % remove cover compiled code
_ ->
ignore
end.
load_compiled([{Module,File,Binary,InitialTable}|Compiled],Acc) ->
%% Make sure the #bump{} records are available *before* the
%% module is loaded.
insert_initial_data(InitialTable),
NewAcc =
case code:load_binary(Module, ?TAG, Binary) of
{module,Module} ->
add_compiled(Module, File, Acc);
_ ->
do_clear(Module),
Acc
end,
load_compiled(Compiled,NewAcc);
load_compiled([],Acc) ->
Acc.
insert_initial_data([Item|Items]) when is_atom(element(1,Item)) ->
ets:insert(?COVER_CLAUSE_TABLE, Item),
insert_initial_data(Items);
insert_initial_data([Item|Items]) ->
ets:insert(?COVER_TABLE, Item),
insert_initial_data(Items);
insert_initial_data([]) ->
ok.
unload([Module|Modules]) ->
do_clear(Module),
do_reload_original(Module),
unload(Modules);
unload([]) ->
ok.
%%%----------------------------------------------------------------------
Internal functions
%%%----------------------------------------------------------------------
%%%--Handling of remote nodes--------------------------------------------
do_start_nodes(Nodes, State) ->
ThisNode = node(),
StartedNodes =
lists:foldl(
fun(Node,Acc) ->
case rpc:call(Node,cover,remote_start,[ThisNode]) of
{ok,_RPid} ->
erlang:monitor(process,{?SERVER,Node}),
[Node|Acc];
Error ->
io:format("Could not start cover on ~w: ~tp\n",
[Node,Error]),
Acc
end
end,
[],
Nodes),
%% In case some of the compiled modules have been unloaded they
%% should not be loaded on the new node.
{_LoadedModules,Compiled} =
get_compiled_still_loaded(State#main_state.nodes,
State#main_state.compiled),
remote_load_compiled(StartedNodes,Compiled),
State1 =
State#main_state{nodes = State#main_state.nodes ++ StartedNodes,
compiled = Compiled},
{StartedNodes, State1}.
%% start the cover_server on a remote node
remote_start(MainNode) ->
case whereis(?SERVER) of
undefined ->
Starter = self(),
Pid = spawn(fun() ->
?SPAWN_DBG(remote_start,{MainNode}),
init_remote(Starter,MainNode)
end),
Ref = erlang:monitor(process,Pid),
Return =
receive
{Pid,started} ->
{ok,Pid};
{'DOWN', Ref, _Type, _Object, Info} ->
{error,Info}
end,
erlang:demonitor(Ref),
Return;
Pid ->
{error,{already_started,Pid}}
end.
%% If a lost node comes back, ensure that main and remote node has the
%% same cover compiled modules. Note that no action is taken if the
%% same {Mod,File} eksists on both, i.e. code change is not handled!
sync_compiled(Node,State) ->
#main_state{compiled=Compiled0,nodes=Nodes,lost_nodes=Lost}=State,
State1 =
case remote_call(Node,{remote,get_compiled}) of
{error,node_dead} ->
{_,S} = do_start_nodes([Node],State),
S;
{error,_} ->
State;
RemoteCompiled ->
{_,Compiled} = get_compiled_still_loaded(Nodes,Compiled0),
Unload = [UM || {UM,_}=U <- RemoteCompiled,
false == lists:member(U,Compiled)],
remote_unload([Node],Unload),
Load = [L || L <- Compiled,
false == lists:member(L,RemoteCompiled)],
remote_load_compiled([Node],Load),
State#main_state{compiled=Compiled, nodes=[Node|Nodes]}
end,
State1#main_state{lost_nodes=Lost--[Node]}.
%% Load a set of cover compiled modules on remote nodes,
%% We do it ?MAX_MODS modules at a time so that we don't
%% run out of memory on the cover_server node.
-define(MAX_MODS, 10).
remote_load_compiled(Nodes,Compiled) ->
remote_load_compiled(Nodes, Compiled, [], 0).
remote_load_compiled(_Nodes, [], [], _ModNum) ->
ok;
remote_load_compiled(Nodes, Compiled, Acc, ModNum)
when Compiled == []; ModNum == ?MAX_MODS ->
lists:foreach(
fun(Node) ->
remote_call(Node,{remote,load_compiled,Acc})
end,
Nodes),
remote_load_compiled(Nodes, Compiled, [], 0);
remote_load_compiled(Nodes, [MF | Rest], Acc, ModNum) ->
remote_load_compiled(
Nodes, Rest, [get_data_for_remote_loading(MF) | Acc], ModNum + 1).
%% Read all data needed for loading a cover compiled module on a remote node
Binary is the beam code for the module and InitialTable is the initial
%% data to insert in ?COVER_TABLE.
get_data_for_remote_loading({Module,File}) ->
[{Module,Binary}] = ets:lookup(?BINARY_TABLE,Module),
! The InitialTable list will be long if the module is big - what to do ? ?
InitialBumps = ets:select(?COVER_TABLE,ms(Module)),
InitialClauses = ets:lookup(?COVER_CLAUSE_TABLE,Module),
{Module,File,Binary,InitialBumps ++ InitialClauses}.
%% Create a match spec which returns the clause info {Module,InitInfo} and
%% all #bump keys for the given module with 0 number of calls.
ms(Module) ->
ets:fun2ms(fun({Key,_}) when Key#bump.module=:=Module ->
{Key,0}
end).
%% Unload modules on remote nodes
remote_unload(Nodes,UnloadedModules) ->
lists:foreach(
fun(Node) ->
remote_call(Node,{remote,unload,UnloadedModules})
end,
Nodes).
%% Reset one or all modules on remote nodes
remote_reset(Module,Nodes) ->
lists:foreach(
fun(Node) ->
remote_call(Node,{remote,reset,Module})
end,
Nodes).
%% Collect data from remote nodes - used for analyse or stop(Node)
remote_collect(Module,Nodes,Stop) ->
pmap(fun(Node) ->
?SPAWN_DBG(remote_collect,
{Module, Nodes, Stop}),
do_collection(Node, Module, Stop)
end,
Nodes).
do_collection(Node, Module, Stop) ->
CollectorPid = spawn(fun collector_proc/0),
case remote_call(Node,{remote,collect,Module,CollectorPid, self()}) of
{error,node_dead} ->
CollectorPid ! done,
ok;
ok when Stop ->
remote_call(Node,{remote,stop});
ok ->
ok
end.
%% Process which receives chunks of data from remote nodes - either when
%% analysing or when stopping cover on the remote nodes.
collector_proc() ->
?SPAWN_DBG(collector_proc, []),
receive
{chunk,Chunk} ->
insert_in_collection_table(Chunk),
collector_proc();
done ->
ok
end.
insert_in_collection_table([{Key,Val}|Chunk]) ->
insert_in_collection_table(Key,Val),
insert_in_collection_table(Chunk);
insert_in_collection_table([]) ->
ok.
insert_in_collection_table(Key,Val) ->
case ets:member(?COLLECTION_TABLE,Key) of
true ->
ets:update_counter(?COLLECTION_TABLE,
Key,Val);
false ->
%% Make sure that there are no race conditions from ets:member
case ets:insert_new(?COLLECTION_TABLE,{Key,Val}) of
false ->
insert_in_collection_table(Key,Val);
_ ->
ok
end
end.
remove_myself([Node|Nodes],Acc) when Node=:=node() ->
remove_myself(Nodes,Acc);
remove_myself([Node|Nodes],Acc) ->
remove_myself(Nodes,[Node|Acc]);
remove_myself([],Acc) ->
Acc.
%%%--Handling of modules state data--------------------------------------
analyse_info(_Module,[]) ->
ok;
analyse_info(Module,Imported) ->
imported_info("Analysis",Module,Imported).
export_info(_Module,[]) ->
ok;
export_info(_Module,_Imported) ->
%% Do not print that the export includes imported modules
ok.
export_info([]) ->
ok;
export_info(_Imported) ->
%% Do not print that the export includes imported modules
ok.
get_all_importfiles([{_M,_F,ImportFiles}|Imported],Acc) ->
NewAcc = do_get_all_importfiles(ImportFiles,Acc),
get_all_importfiles(Imported,NewAcc);
get_all_importfiles([],Acc) ->
Acc.
do_get_all_importfiles([ImportFile|ImportFiles],Acc) ->
case lists:member(ImportFile,Acc) of
true ->
do_get_all_importfiles(ImportFiles,Acc);
false ->
do_get_all_importfiles(ImportFiles,[ImportFile|Acc])
end;
do_get_all_importfiles([],Acc) ->
Acc.
imported_info(Text,Module,Imported) ->
case lists:keysearch(Module,1,Imported) of
{value,{Module,_File,ImportFiles}} ->
io:format("~ts includes data from imported files\n~tp\n",
[Text,ImportFiles]);
false ->
ok
end.
add_imported(Module, File, ImportFile, Imported) ->
add_imported(Module, File, filename:absname(ImportFile), Imported, []).
add_imported(M, F1, ImportFile, [{M,_F2,ImportFiles}|Imported], Acc) ->
case lists:member(ImportFile,ImportFiles) of
true ->
io:fwrite("WARNING: Module ~w already imported from ~tp~n"
"Not importing again!~n",[M,ImportFile]),
dont_import;
false ->
NewEntry = {M, F1, [ImportFile | ImportFiles]},
{ok, lists:reverse([NewEntry | Acc]) ++ Imported}
end;
add_imported(M, F, ImportFile, [H|Imported], Acc) ->
add_imported(M, F, ImportFile, Imported, [H|Acc]);
add_imported(M, F, ImportFile, [], Acc) ->
{ok, lists:reverse([{M, F, [ImportFile]} | Acc])}.
%% Removes a module from the list of imported modules and writes a warning
%% This is done when a module is compiled.
remove_imported(Module,Imported) ->
case lists:keysearch(Module,1,Imported) of
{value,{Module,_,ImportFiles}} ->
io:fwrite("WARNING: Deleting data for module ~w imported from~n"
"~tp~n",[Module,ImportFiles]),
lists:keydelete(Module,1,Imported);
false ->
Imported
end.
%% Adds information to the list of compiled modules, preserving time order
%% and without adding duplicate entries.
add_compiled(Module, File1, [{Module,_File2}|Compiled]) ->
[{Module,File1}|Compiled];
add_compiled(Module, File, [H|Compiled]) ->
[H|add_compiled(Module, File, Compiled)];
add_compiled(Module, File, []) ->
[{Module,File}].
is_loaded(Module, State) ->
case get_file(Module, State#main_state.compiled) of
{ok, File} ->
case code:which(Module) of
?TAG -> {loaded, File};
_ -> throw(unloaded)
end;
false ->
case get_file(Module,State#main_state.imported) of
{ok,File,ImportFiles} ->
{imported, File, ImportFiles};
false ->
throw(not_loaded)
end
end.
get_file(Module, [{Module, File}|_T]) ->
{ok, File};
get_file(Module, [{Module, File, ImportFiles}|_T]) ->
{ok, File, ImportFiles};
get_file(Module, [_H|T]) ->
get_file(Module, T);
get_file(_Module, []) ->
false.
get_beam_file(Module,?TAG,Compiled) ->
{value,{Module,File}} = lists:keysearch(Module,1,Compiled),
case filename:extension(File) of
".erl" -> {error,no_beam};
".beam" -> {ok,File}
end;
get_beam_file(_Module,BeamFile,_Compiled) ->
{ok,BeamFile}.
get_modules(Compiled) ->
lists:map(fun({Module, _File}) -> Module end, Compiled).
update_compiled([Module|Modules], [{Module,_File}|Compiled]) ->
update_compiled(Modules, Compiled);
update_compiled(Modules, [H|Compiled]) ->
[H|update_compiled(Modules, Compiled)];
update_compiled(_Modules, []) ->
[].
%% Get all compiled modules which are still loaded, and possibly an
%% updated version of the Compiled list.
get_compiled_still_loaded(Nodes,Compiled0) ->
Find all Cover compiled modules which are still loaded
CompiledModules = get_modules(Compiled0),
LoadedModules = lists:filter(fun(Module) ->
case code:which(Module) of
?TAG -> true;
_ -> false
end
end,
CompiledModules),
If some Cover compiled modules have been unloaded , update the database .
UnloadedModules = CompiledModules--LoadedModules,
Compiled =
case UnloadedModules of
[] ->
Compiled0;
_ ->
lists:foreach(fun(Module) -> do_clear(Module) end,
UnloadedModules),
remote_unload(Nodes,UnloadedModules),
update_compiled(UnloadedModules, Compiled0)
end,
{LoadedModules,Compiled}.
%%%--Compilation---------------------------------------------------------
%% do_compile(File, Options) -> {ok,Module} | {error,Error}
do_compile(File, UserOptions) ->
Options = [debug_info,binary,report_errors,report_warnings] ++ UserOptions,
case compile:file(File, Options) of
{ok, Module, Binary} ->
do_compile_beam(Module,Binary,UserOptions);
error ->
error
end.
%% Beam is a binary or a .beam file name
do_compile_beam(Module,Beam,UserOptions) ->
%% Clear database
do_clear(Module),
%% Extract the abstract format and insert calls to bump/6 at
%% every executable line and, as a side effect, initiate
%% the database
case get_abstract_code(Module, Beam) of
no_abstract_code=E ->
{error,E};
encrypted_abstract_code=E ->
{error,E};
{raw_abstract_v1,Code} ->
Forms0 = epp:interpret_file_attribute(Code),
{Forms,Vars} = transform(Forms0, Module),
%% We need to recover the source from the compilation
%% info otherwise the newly compiled module will have
%% source pointing to the current directory
SourceInfo = get_source_info(Module, Beam),
%% Compile and load the result
%% It's necessary to check the result of loading since it may
%% fail, for example if Module resides in a sticky directory
{ok, Module, Binary} = compile:forms(Forms, SourceInfo ++ UserOptions),
case code:load_binary(Module, ?TAG, Binary) of
{module, Module} ->
%% Store info about all function clauses in database
InitInfo = lists:reverse(Vars#vars.init_info),
ets:insert(?COVER_CLAUSE_TABLE, {Module, InitInfo}),
%% Store binary code so it can be loaded on remote nodes
ets:insert(?BINARY_TABLE, {Module, Binary}),
{ok, Module};
_Error ->
do_clear(Module),
error
end;
{_VSN,_Code} ->
%% Wrong version of abstract code. Just report that there
%% is no abstract code.
{error,no_abstract_code}
end.
get_abstract_code(Module, Beam) ->
case beam_lib:chunks(Beam, [abstract_code]) of
{ok, {Module, [{abstract_code, AbstractCode}]}} ->
AbstractCode;
{error,beam_lib,{key_missing_or_invalid,_,_}} ->
encrypted_abstract_code;
Error -> Error
end.
get_source_info(Module, Beam) ->
Compile = get_compile_info(Module, Beam),
case lists:keyfind(source, 1, Compile) of
{ source, _ } = Tuple -> [Tuple];
false -> []
end.
get_compile_options(Module, Beam) ->
Compile = get_compile_info(Module, Beam),
case lists:keyfind(options, 1, Compile) of
{options, Options } -> filter_options(Options);
false -> []
end.
get_compile_info(Module, Beam) ->
case beam_lib:chunks(Beam, [compile_info]) of
{ok, {Module, [{compile_info, Compile}]}} ->
Compile;
_ ->
[]
end.
transform(Code, Module) ->
MainFile=find_main_filename(Code),
Vars0 = #vars{module=Module},
{ok,MungedForms,Vars} = transform_2(Code,[],Vars0,MainFile,on),
{MungedForms,Vars}.
Helpfunction which returns the first found file - attribute , which can
%% be interpreted as the name of the main erlang source file.
find_main_filename([{attribute,_,file,{MainFile,_}}|_]) ->
MainFile;
find_main_filename([_|Rest]) ->
find_main_filename(Rest).
transform_2([Form0|Forms],MungedForms,Vars,MainFile,Switch) ->
Form = expand(Form0),
case munge(Form,Vars,MainFile,Switch) of
ignore ->
transform_2(Forms,MungedForms,Vars,MainFile,Switch);
{MungedForm,Vars2,NewSwitch} ->
transform_2(Forms,[MungedForm|MungedForms],Vars2,MainFile,NewSwitch)
end;
transform_2([],MungedForms,Vars,_,_) ->
{ok, lists:reverse(MungedForms), Vars}.
%% Expand short-circuit Boolean expressions.
expand(Expr) ->
AllVars = sets:from_list(ordsets:to_list(vars([], Expr))),
{Expr1,_} = expand(Expr, AllVars, 1),
Expr1.
expand({clause,Line,Pattern,Guards,Body}, Vs, N) ->
{ExpandedBody,N2} = expand(Body, Vs, N),
{{clause,Line,Pattern,Guards,ExpandedBody},N2};
expand({op,_Line,'andalso',ExprL,ExprR}, Vs, N) ->
{ExpandedExprL,N2} = expand(ExprL, Vs, N),
{ExpandedExprR,N3} = expand(ExprR, Vs, N2),
LineL = element(2, ExpandedExprL),
{bool_switch(ExpandedExprL,
ExpandedExprR,
{atom,LineL,false},
Vs, N3),
N3 + 1};
expand({op,_Line,'orelse',ExprL,ExprR}, Vs, N) ->
{ExpandedExprL,N2} = expand(ExprL, Vs, N),
{ExpandedExprR,N3} = expand(ExprR, Vs, N2),
LineL = element(2, ExpandedExprL),
{bool_switch(ExpandedExprL,
{atom,LineL,true},
ExpandedExprR,
Vs, N3),
N3 + 1};
expand(T, Vs, N) when is_tuple(T) ->
{TL,N2} = expand(tuple_to_list(T), Vs, N),
{list_to_tuple(TL),N2};
expand([E|Es], Vs, N) ->
{E2,N2} = expand(E, Vs, N),
{Es2,N3} = expand(Es, Vs, N2),
{[E2|Es2],N3};
expand(T, _Vs, N) ->
{T,N}.
vars(A, {var,_,V}) when V =/= '_' ->
[V|A];
vars(A, T) when is_tuple(T) ->
vars(A, tuple_to_list(T));
vars(A, [E|Es]) ->
vars(vars(A, E), Es);
vars(A, _T) ->
A.
bool_switch(E, T, F, AllVars, AuxVarN) ->
Line = element(2, E),
AuxVar = {var,Line,aux_var(AllVars, AuxVarN)},
{'case',Line,E,
[{clause,Line,[{atom,Line,true}],[],[T]},
{clause,Line,[{atom,Line,false}],[],[F]},
{clause,Line,[AuxVar],[],
[{call,Line,
{remote,Line,{atom,Line,erlang},{atom,Line,error}},
[{tuple,Line,[{atom,Line,badarg},AuxVar]}]}]}]}.
aux_var(Vars, N) ->
Name = list_to_atom(lists:concat(['_', N])),
case sets:is_element(Name, Vars) of
true -> aux_var(Vars, N + 1);
false -> Name
end.
%% This code traverses the abstract code, stored as the abstract_code
chunk in the BEAM file , as described in absform(3 ) .
%% The switch is turned off when we encounter other files than the main file.
%% This way we will be able to exclude functions defined in include files.
munge({function,Line,Function,Arity,Clauses},Vars,_MainFile,on) ->
Vars2 = Vars#vars{function=Function,
arity=Arity,
clause=1,
lines=[],
no_bump_lines=[],
depth=1},
{MungedClauses, Vars3} = munge_clauses(Clauses, Vars2),
{{function,Line,Function,Arity,MungedClauses},Vars3,on};
munge(Form={attribute,_,file,{MainFile,_}},Vars,MainFile,_Switch) ->
Switch on tranformation !
munge(Form={attribute,_,file,{_InclFile,_}},Vars,_MainFile,_Switch) ->
Switch off transformation !
munge({attribute,_,compile,{parse_transform,_}},_Vars,_MainFile,_Switch) ->
%% Don't want to run parse transforms more than once.
ignore;
munge(Form,Vars,_MainFile,Switch) -> % Other attributes and skipped includes.
{Form,Vars,Switch}.
munge_clauses(Clauses, Vars) ->
munge_clauses(Clauses, Vars, Vars#vars.lines, []).
munge_clauses([Clause|Clauses], Vars, Lines, MClauses) ->
{clause,Line,Pattern,Guards,Body} = Clause,
{MungedGuards, _Vars} = munge_exprs(Guards, Vars#vars{is_guard=true},[]),
case Vars#vars.depth of
1 -> % function clause
{MungedBody, Vars2} = munge_body(Body, Vars#vars{depth=2}),
ClauseInfo = {Vars2#vars.module,
Vars2#vars.function,
Vars2#vars.arity,
Vars2#vars.clause,
length(Vars2#vars.lines)}, % Not used?
InitInfo = [ClauseInfo | Vars2#vars.init_info],
Vars3 = Vars2#vars{init_info=InitInfo,
clause=(Vars2#vars.clause)+1,
lines=[],
no_bump_lines=[],
depth=1},
NewBumps = Vars2#vars.lines,
NewLines = NewBumps ++ Lines,
munge_clauses(Clauses, Vars3, NewLines,
[{clause,Line,Pattern,MungedGuards,MungedBody}|
MClauses]);
2 -> % receive-, case-, if-, or try-clause
Lines0 = Vars#vars.lines,
{MungedBody, Vars2} = munge_body(Body, Vars),
NewBumps = new_bumps(Vars2, Vars),
NewLines = NewBumps ++ Lines,
munge_clauses(Clauses, Vars2#vars{lines=Lines0},
NewLines,
[{clause,Line,Pattern,MungedGuards,MungedBody}|
MClauses])
end;
munge_clauses([], Vars, Lines, MungedClauses) ->
{lists:reverse(MungedClauses), Vars#vars{lines = Lines}}.
munge_body(Expr, Vars) ->
munge_body(Expr, Vars, [], []).
munge_body([Expr|Body], Vars, MungedBody, LastExprBumpLines) ->
%% Here is the place to add a call to cover:bump/6!
Line = element(2, Expr),
Lines = Vars#vars.lines,
case lists:member(Line,Lines) of
true -> % already a bump at this line
{MungedExpr, Vars2} = munge_expr(Expr, Vars),
NewBumps = new_bumps(Vars2, Vars),
NoBumpLines = [Line|Vars#vars.no_bump_lines],
Vars3 = Vars2#vars{no_bump_lines = NoBumpLines},
MungedBody1 =
maybe_fix_last_expr(MungedBody, Vars3, LastExprBumpLines),
MungedExprs1 = [MungedExpr|MungedBody1],
munge_body(Body, Vars3, MungedExprs1, NewBumps);
false ->
ets:insert(?COVER_TABLE, {#bump{module = Vars#vars.module,
function = Vars#vars.function,
arity = Vars#vars.arity,
clause = Vars#vars.clause,
line = Line},
0}),
Bump = bump_call(Vars, Line),
% Bump = {call, 0, {remote, 0, {atom,0,cover}, {atom,0,bump}},
% [{atom, 0, Vars#vars.module},
{ atom , 0 , Vars#vars.function } ,
{ integer , 0 , Vars#vars.arity } ,
{ integer , 0 , Vars#vars.clause } ,
% {integer, 0, Line}]},
Lines2 = [Line|Lines],
{MungedExpr, Vars2} = munge_expr(Expr, Vars#vars{lines=Lines2}),
NewBumps = new_bumps(Vars2, Vars),
NoBumpLines = subtract(Vars2#vars.no_bump_lines, NewBumps),
Vars3 = Vars2#vars{no_bump_lines = NoBumpLines},
MungedBody1 =
maybe_fix_last_expr(MungedBody, Vars3, LastExprBumpLines),
MungedExprs1 = [MungedExpr,Bump|MungedBody1],
munge_body(Body, Vars3, MungedExprs1, NewBumps)
end;
munge_body([], Vars, MungedBody, _LastExprBumpLines) ->
{lists:reverse(MungedBody), Vars}.
%%% Fix last expression (OTP-8188). A typical example:
%%%
3 : case X of
4 : 1 - > a ; % Bump line 5 after " a " has been evaluated !
5 : 2 - > b ; 3 - > c end , F ( )
%%%
%%% Line 5 wasn't bumped just before "F()" since it was already bumped
%%% before "b" (and before "c") (one mustn't bump a line more than
%%% once in a single "evaluation"). The expression "case X ... end" is
now traversed again ( " fixed " ) , this time adding bumps of line 5
where appropriate , in this case when X matches 1 .
%%%
%%% This doesn't solve all problems with expressions on the same line,
%%% though. 'case' and 'try' are tricky. An example:
%%%
7 : case case X of 1 - > foo ( ) ; % ?
8 : 2 - > bar ( ) end of a - > 1 ;
9 : b - > 2 end .
%%%
If X matches 1 and foo ( ) evaluates to a then line 8 should be
bumped , but not if foo ( ) evaluates to b. In other words , line 8
can not be bumped after " foo ( ) " on line 7 , so one has to bump line
8 before " begin 1 end " . But if X matches 2 and bar evaluates to a
then line 8 would be bumped twice ( there has to be a bump before
" bar ( ) " . It is like one would have to have two copies of the inner
clauses , one for each outer clause . Maybe the munging should be
%%% done on some of the compiler's "lower level" format.
%%%
%%% 'fun' is also problematic since a bump inside the body "shadows"
%%% the rest of the line.
maybe_fix_last_expr(MungedExprs, Vars, LastExprBumpLines) ->
case last_expr_needs_fixing(Vars, LastExprBumpLines) of
{yes, Line} ->
fix_last_expr(MungedExprs, Line, Vars);
no ->
MungedExprs
end.
last_expr_needs_fixing(Vars, LastExprBumpLines) ->
case common_elems(Vars#vars.no_bump_lines, LastExprBumpLines) of
[Line] -> {yes, Line};
_ -> no
end.
fix_last_expr([MungedExpr|MungedExprs], Line, Vars) ->
%% No need to update ?COVER_TABLE.
Bump = bump_call(Vars, Line),
[fix_expr(MungedExpr, Line, Bump)|MungedExprs].
fix_expr({'if',L,Clauses}, Line, Bump) ->
FixedClauses = fix_clauses(Clauses, Line, Bump),
{'if',L,FixedClauses};
fix_expr({'case',L,Expr,Clauses}, Line, Bump) ->
FixedExpr = fix_expr(Expr, Line, Bump),
FixedClauses = fix_clauses(Clauses, Line, Bump),
{'case',L,FixedExpr,FixedClauses};
fix_expr({'receive',L,Clauses}, Line, Bump) ->
FixedClauses = fix_clauses(Clauses, Line, Bump),
{'receive',L,FixedClauses};
fix_expr({'receive',L,Clauses,Expr,Body}, Line, Bump) ->
FixedClauses = fix_clauses(Clauses, Line, Bump),
FixedExpr = fix_expr(Expr, Line, Bump),
FixedBody = fix_expr(Body, Line, Bump),
{'receive',L,FixedClauses,FixedExpr,FixedBody};
fix_expr({'try',L,Exprs,Clauses,CatchClauses,After}, Line, Bump) ->
FixedExprs = fix_expr(Exprs, Line, Bump),
FixedClauses = fix_clauses(Clauses, Line, Bump),
FixedCatchClauses = fix_clauses(CatchClauses, Line, Bump),
FixedAfter = fix_expr(After, Line, Bump),
{'try',L,FixedExprs,FixedClauses,FixedCatchClauses,FixedAfter};
fix_expr([E | Es], Line, Bump) ->
[fix_expr(E, Line, Bump) | fix_expr(Es, Line, Bump)];
fix_expr(T, Line, Bump) when is_tuple(T) ->
list_to_tuple(fix_expr(tuple_to_list(T), Line, Bump));
fix_expr(E, _Line, _Bump) ->
E.
fix_clauses(Cs, Line, Bump) ->
case bumps_line(lists:last(Cs), Line) of
true ->
fix_cls(Cs, Line, Bump);
false ->
Cs
end.
fix_cls([], _Line, _Bump) ->
[];
fix_cls([Cl | Cls], Line, Bump) ->
case bumps_line(Cl, Line) of
true ->
[fix_expr(C, Line, Bump) || C <- [Cl | Cls]];
false ->
{clause,CL,P,G,Body} = Cl,
UniqueVarName = list_to_atom(lists:concat(["$cover$ ",Line])),
V = {var,0,UniqueVarName},
[Last|Rest] = lists:reverse(Body),
Body1 = lists:reverse(Rest, [{match,0,V,Last},Bump,V]),
[{clause,CL,P,G,Body1} | fix_cls(Cls, Line, Bump)]
end.
bumps_line(E, L) ->
try bumps_line1(E, L) catch true -> true end.
bumps_line1({call,0,{remote,0,{atom,0,ets},{atom,0,update_counter}},
[{atom,0,?COVER_TABLE},{tuple,0,[_,_,_,_,_,{integer,0,Line}]},_]},
Line) ->
throw(true);
bumps_line1([E | Es], Line) ->
bumps_line1(E, Line),
bumps_line1(Es, Line);
bumps_line1(T, Line) when is_tuple(T) ->
bumps_line1(tuple_to_list(T), Line);
bumps_line1(_, _) ->
false.
%%% End of fix of last expression.
bump_call(Vars, Line) ->
{call,0,{remote,0,{atom,0,ets},{atom,0,update_counter}},
[{atom,0,?COVER_TABLE},
{tuple,0,[{atom,0,?BUMP_REC_NAME},
{atom,0,Vars#vars.module},
{atom,0,Vars#vars.function},
{integer,0,Vars#vars.arity},
{integer,0,Vars#vars.clause},
{integer,0,Line}]},
{integer,0,1}]}.
munge_expr({match,Line,ExprL,ExprR}, Vars) ->
{MungedExprL, Vars2} = munge_expr(ExprL, Vars),
{MungedExprR, Vars3} = munge_expr(ExprR, Vars2),
{{match,Line,MungedExprL,MungedExprR}, Vars3};
munge_expr({tuple,Line,Exprs}, Vars) ->
{MungedExprs, Vars2} = munge_exprs(Exprs, Vars, []),
{{tuple,Line,MungedExprs}, Vars2};
munge_expr({record,Line,Name,Exprs}, Vars) ->
{MungedExprFields, Vars2} = munge_exprs(Exprs, Vars, []),
{{record,Line,Name,MungedExprFields}, Vars2};
munge_expr({record,Line,Arg,Name,Exprs}, Vars) ->
{MungedArg, Vars2} = munge_expr(Arg, Vars),
{MungedExprFields, Vars3} = munge_exprs(Exprs, Vars2, []),
{{record,Line,MungedArg,Name,MungedExprFields}, Vars3};
munge_expr({record_field,Line,ExprL,ExprR}, Vars) ->
{MungedExprR, Vars2} = munge_expr(ExprR, Vars),
{{record_field,Line,ExprL,MungedExprR}, Vars2};
munge_expr({map,Line,Fields}, Vars) ->
EEP 43
{MungedFields, Vars2} = munge_exprs(Fields, Vars, []),
{{map,Line,MungedFields}, Vars2};
munge_expr({map,Line,Arg,Fields}, Vars) ->
EEP 43
{MungedArg, Vars2} = munge_expr(Arg, Vars),
{MungedFields, Vars3} = munge_exprs(Fields, Vars2, []),
{{map,Line,MungedArg,MungedFields}, Vars3};
munge_expr({map_field_assoc,Line,Name,Value}, Vars) ->
EEP 43
{MungedName, Vars2} = munge_expr(Name, Vars),
{MungedValue, Vars3} = munge_expr(Value, Vars2),
{{map_field_assoc,Line,MungedName,MungedValue}, Vars3};
munge_expr({map_field_exact,Line,Name,Value}, Vars) ->
EEP 43
{MungedName, Vars2} = munge_expr(Name, Vars),
{MungedValue, Vars3} = munge_expr(Value, Vars2),
{{map_field_exact,Line,MungedName,MungedValue}, Vars3};
munge_expr({cons,Line,ExprH,ExprT}, Vars) ->
{MungedExprH, Vars2} = munge_expr(ExprH, Vars),
{MungedExprT, Vars3} = munge_expr(ExprT, Vars2),
{{cons,Line,MungedExprH,MungedExprT}, Vars3};
munge_expr({op,Line,Op,ExprL,ExprR}, Vars) ->
{MungedExprL, Vars2} = munge_expr(ExprL, Vars),
{MungedExprR, Vars3} = munge_expr(ExprR, Vars2),
{{op,Line,Op,MungedExprL,MungedExprR}, Vars3};
munge_expr({op,Line,Op,Expr}, Vars) ->
{MungedExpr, Vars2} = munge_expr(Expr, Vars),
{{op,Line,Op,MungedExpr}, Vars2};
munge_expr({'catch',Line,Expr}, Vars) ->
{MungedExpr, Vars2} = munge_expr(Expr, Vars),
{{'catch',Line,MungedExpr}, Vars2};
munge_expr({call,Line1,{remote,Line2,ExprM,ExprF},Exprs},
Vars) ->
{MungedExprM, Vars2} = munge_expr(ExprM, Vars),
{MungedExprF, Vars3} = munge_expr(ExprF, Vars2),
{MungedExprs, Vars4} = munge_exprs(Exprs, Vars3, []),
{{call,Line1,{remote,Line2,MungedExprM,MungedExprF},MungedExprs}, Vars4};
munge_expr({call,Line,Expr,Exprs}, Vars) ->
{MungedExpr, Vars2} = munge_expr(Expr, Vars),
{MungedExprs, Vars3} = munge_exprs(Exprs, Vars2, []),
{{call,Line,MungedExpr,MungedExprs}, Vars3};
munge_expr({lc,Line,Expr,Qs}, Vars) ->
{MungedExpr, Vars2} = munge_expr(?BLOCK1(Expr), Vars),
{MungedQs, Vars3} = munge_qualifiers(Qs, Vars2),
{{lc,Line,MungedExpr,MungedQs}, Vars3};
munge_expr({bc,Line,Expr,Qs}, Vars) ->
{bin,BLine,[{bin_element,EL,Val,Sz,TSL}|Es]} = Expr,
Expr2 = {bin,BLine,[{bin_element,EL,?BLOCK1(Val),Sz,TSL}|Es]},
{MungedExpr,Vars2} = munge_expr(Expr2, Vars),
{MungedQs, Vars3} = munge_qualifiers(Qs, Vars2),
{{bc,Line,MungedExpr,MungedQs}, Vars3};
munge_expr({block,Line,Body}, Vars) ->
{MungedBody, Vars2} = munge_body(Body, Vars),
{{block,Line,MungedBody}, Vars2};
munge_expr({'if',Line,Clauses}, Vars) ->
{MungedClauses,Vars2} = munge_clauses(Clauses, Vars),
{{'if',Line,MungedClauses}, Vars2};
munge_expr({'case',Line,Expr,Clauses}, Vars) ->
{MungedExpr,Vars2} = munge_expr(Expr, Vars),
{MungedClauses,Vars3} = munge_clauses(Clauses, Vars2),
{{'case',Line,MungedExpr,MungedClauses}, Vars3};
munge_expr({'receive',Line,Clauses}, Vars) ->
{MungedClauses,Vars2} = munge_clauses(Clauses, Vars),
{{'receive',Line,MungedClauses}, Vars2};
munge_expr({'receive',Line,Clauses,Expr,Body}, Vars) ->
{MungedExpr, Vars1} = munge_expr(Expr, Vars),
{MungedClauses,Vars2} = munge_clauses(Clauses, Vars1),
{MungedBody,Vars3} =
munge_body(Body, Vars2#vars{lines = Vars1#vars.lines}),
Vars4 = Vars3#vars{lines = Vars2#vars.lines ++ new_bumps(Vars3, Vars2)},
{{'receive',Line,MungedClauses,MungedExpr,MungedBody}, Vars4};
munge_expr({'try',Line,Body,Clauses,CatchClauses,After}, Vars) ->
{MungedBody, Vars1} = munge_body(Body, Vars),
{MungedClauses, Vars2} = munge_clauses(Clauses, Vars1),
{MungedCatchClauses, Vars3} = munge_clauses(CatchClauses, Vars2),
{MungedAfter, Vars4} = munge_body(After, Vars3),
{{'try',Line,MungedBody,MungedClauses,MungedCatchClauses,MungedAfter},
Vars4};
munge_expr({'fun',Line,{clauses,Clauses}}, Vars) ->
{MungedClauses,Vars2}=munge_clauses(Clauses, Vars),
{{'fun',Line,{clauses,MungedClauses}}, Vars2};
munge_expr({named_fun,Line,Name,Clauses}, Vars) ->
{MungedClauses,Vars2}=munge_clauses(Clauses, Vars),
{{named_fun,Line,Name,MungedClauses}, Vars2};
munge_expr({bin,Line,BinElements}, Vars) ->
{MungedBinElements,Vars2} = munge_exprs(BinElements, Vars, []),
{{bin,Line,MungedBinElements}, Vars2};
munge_expr({bin_element,Line,Value,Size,TypeSpecifierList}, Vars) ->
{MungedValue,Vars2} = munge_expr(Value, Vars),
{MungedSize,Vars3} = munge_expr(Size, Vars2),
{{bin_element,Line,MungedValue,MungedSize,TypeSpecifierList},Vars3};
munge_expr(Form, Vars) -> % var|char|integer|float|string|atom|nil|eof|default
{Form, Vars}.
munge_exprs([Expr|Exprs], Vars, MungedExprs) when Vars#vars.is_guard=:=true,
is_list(Expr) ->
{MungedExpr, _Vars} = munge_exprs(Expr, Vars, []),
munge_exprs(Exprs, Vars, [MungedExpr|MungedExprs]);
munge_exprs([Expr|Exprs], Vars, MungedExprs) ->
{MungedExpr, Vars2} = munge_expr(Expr, Vars),
munge_exprs(Exprs, Vars2, [MungedExpr|MungedExprs]);
munge_exprs([], Vars, MungedExprs) ->
{lists:reverse(MungedExprs), Vars}.
%% Every qualifier is decorated with a counter.
munge_qualifiers(Qualifiers, Vars) ->
munge_qs(Qualifiers, Vars, []).
munge_qs([{generate,Line,Pattern,Expr}|Qs], Vars, MQs) ->
L = element(2, Expr),
{MungedExpr, Vars2} = munge_expr(Expr, Vars),
munge_qs1(Qs, L, {generate,Line,Pattern,MungedExpr}, Vars, Vars2, MQs);
munge_qs([{b_generate,Line,Pattern,Expr}|Qs], Vars, MQs) ->
L = element(2, Expr),
{MExpr, Vars2} = munge_expr(Expr, Vars),
munge_qs1(Qs, L, {b_generate,Line,Pattern,MExpr}, Vars, Vars2, MQs);
munge_qs([Expr|Qs], Vars, MQs) ->
L = element(2, Expr),
{MungedExpr, Vars2} = munge_expr(Expr, Vars),
munge_qs1(Qs, L, MungedExpr, Vars, Vars2, MQs);
munge_qs([], Vars, MQs) ->
{lists:reverse(MQs), Vars}.
munge_qs1(Qs, Line, NQ, Vars, Vars2, MQs) ->
case new_bumps(Vars2, Vars) of
[_] ->
munge_qs(Qs, Vars2, [NQ | MQs]);
_ ->
{MungedTrue, Vars3} = munge_expr(?BLOCK({atom,Line,true}), Vars2),
munge_qs(Qs, Vars3, [NQ, MungedTrue | MQs])
end.
new_bumps(#vars{lines = New}, #vars{lines = Old}) ->
subtract(New, Old).
subtract(L1, L2) ->
[E || E <- L1, not lists:member(E, L2)].
common_elems(L1, L2) ->
[E || E <- L1, lists:member(E, L2)].
%%%--Analysis------------------------------------------------------------
%% Collect data for all modules
collect(Nodes) ->
%% local node
AllClauses = ets:tab2list(?COVER_CLAUSE_TABLE),
pmap(fun move_modules/1,AllClauses),
%% remote nodes
remote_collect('_',Nodes,false).
Collect data for one module
collect(Module,Clauses,Nodes) ->
%% local node
move_modules({Module,Clauses}),
%% remote nodes
remote_collect(Module,Nodes,false).
When analysing , the data from the local ? COVER_TABLE is moved to the
%% ?COLLECTION_TABLE. Resetting data in ?COVER_TABLE
move_modules({Module,Clauses}) ->
ets:insert(?COLLECTION_CLAUSE_TABLE,{Module,Clauses}),
move_clauses(Clauses).
move_clauses([{M,F,A,C,_L}|Clauses]) ->
Pattern = {#bump{module=M, function=F, arity=A, clause=C}, '_'},
Bumps = ets:match_object(?COVER_TABLE,Pattern),
lists:foreach(fun({Key,Val}) ->
ets:insert(?COVER_TABLE, {Key,0}),
insert_in_collection_table(Key,Val)
end,
Bumps),
move_clauses(Clauses);
move_clauses([]) ->
ok.
Given a .beam file , find the .erl file . Look first in same directory as
%% the .beam file, then in ../src, then in compile info.
find_source(Module, File0) ->
try
Root = filename:rootname(File0, ".beam"),
Root == File0 andalso throw(File0), %% not .beam
Look for .erl in pwd .
File = Root ++ ".erl",
throw_file(File),
%% Not in pwd: look in ../src.
BeamDir = filename:dirname(File),
Base = filename:basename(File),
throw_file(filename:join([BeamDir, "..", "src", Base])),
%% Not in ../src: look for source path in compile info, but
%% first look relative the beam directory.
Info = lists:keyfind(source, 1, Module:module_info(compile)),
false == Info andalso throw({beam, File0}), %% stripped
{source, SrcFile} = Info,
throw_file(splice(BeamDir, SrcFile)), %% below ../src
throw_file(SrcFile), %% or absolute
%% No success means that source is either not under ../src or
%% its relative path differs from that of compile info. (For
%% example, compiled under src/x but installed under src/y.)
%% An option to specify an arbitrary source path explicitly is
%% probably a better solution than either more heuristics or a
%% potentially slow filesystem search.
{beam, File0}
catch
Path -> Path
end.
throw_file(Path) ->
false /= Path andalso filelib:is_file(Path) andalso throw(Path).
%% Splice the tail of a source path, starting from the last "src"
%% component, onto the parent of a beam directory, or return false if
%% no "src" component is found.
%%
Eg . splice("/path / to / app-1.0 / ebin " , " /compiled / path / to / app / src / x / y.erl " )
%% --> "/path/to/app-1.0/ebin/../src/x/y.erl"
%%
%% This handles the case of source in subdirectories of ../src with
%% beams that have moved since compilation.
%%
splice(BeamDir, SrcFile) ->
case lists:splitwith(fun(C) -> C /= "src" end, revsplit(SrcFile)) of
{T, [_|_]} -> %% found src component
filename:join([BeamDir, "..", "src" | lists:reverse(T)]);
{_, []} -> %% or not
false
end.
revsplit(Path) ->
lists:reverse(filename:split(Path)).
do_parallel_analysis(Module, Analysis, Level, Loaded, From, State) ->
analyse_info(Module,State#main_state.imported),
C = case Loaded of
{loaded, _File} ->
[{Module,Clauses}] =
ets:lookup(?COVER_CLAUSE_TABLE,Module),
collect(Module,Clauses,State#main_state.nodes),
Clauses;
_ ->
[{Module,Clauses}] =
ets:lookup(?COLLECTION_CLAUSE_TABLE,Module),
Clauses
end,
R = do_analyse(Module, Analysis, Level, C),
reply(From, R).
%% do_analyse(Module, Analysis, Level, Clauses)-> {ok,Answer} | {error,Error}
%% Clauses = [{Module,Function,Arity,Clause,Lines}]
do_analyse(Module, Analysis, line, _Clauses) ->
Pattern = {#bump{module=Module},'_'},
Bumps = ets:match_object(?COLLECTION_TABLE, Pattern),
Fun = case Analysis of
coverage ->
fun({#bump{line=L}, 0}) ->
{{Module,L}, {0,1}};
({#bump{line=L}, _N}) ->
{{Module,L}, {1,0}}
end;
calls ->
fun({#bump{line=L}, N}) ->
{{Module,L}, N}
end
end,
Answer = lists:keysort(1, lists:map(Fun, Bumps)),
{ok, Answer};
do_analyse(_Module, Analysis, clause, Clauses) ->
Fun = case Analysis of
coverage ->
fun({M,F,A,C,Ls}) ->
Pattern = {#bump{module=M,function=F,arity=A,
clause=C},0},
Bumps = ets:match_object(?COLLECTION_TABLE, Pattern),
NotCov = length(Bumps),
{{M,F,A,C}, {Ls-NotCov, NotCov}}
end;
calls ->
fun({M,F,A,C,_Ls}) ->
Pattern = {#bump{module=M,function=F,arity=A,
clause=C},'_'},
Bumps = ets:match_object(?COLLECTION_TABLE, Pattern),
{_Bump, Calls} = hd(lists:keysort(1, Bumps)),
{{M,F,A,C}, Calls}
end
end,
Answer = lists:map(Fun, Clauses),
{ok, Answer};
do_analyse(Module, Analysis, function, Clauses) ->
{ok, ClauseResult} = do_analyse(Module, Analysis, clause, Clauses),
Result = merge_clauses(ClauseResult, merge_fun(Analysis)),
{ok, Result};
do_analyse(Module, Analysis, module, Clauses) ->
{ok, FunctionResult} = do_analyse(Module, Analysis, function, Clauses),
Result = merge_functions(FunctionResult, merge_fun(Analysis)),
{ok, {Module,Result}}.
merge_fun(coverage) ->
fun({Cov1,NotCov1}, {Cov2,NotCov2}) ->
{Cov1+Cov2, NotCov1+NotCov2}
end;
merge_fun(calls) ->
fun(Calls1, Calls2) ->
Calls1+Calls2
end.
merge_clauses(Clauses, MFun) -> merge_clauses(Clauses, MFun, []).
merge_clauses([{{M,F,A,_C1},R1},{{M,F,A,C2},R2}|Clauses], MFun, Result) ->
merge_clauses([{{M,F,A,C2},MFun(R1,R2)}|Clauses], MFun, Result);
merge_clauses([{{M,F,A,_C},R}|Clauses], MFun, Result) ->
merge_clauses(Clauses, MFun, [{{M,F,A},R}|Result]);
merge_clauses([], _Fun, Result) ->
lists:reverse(Result).
merge_functions([{_MFA,R}|Functions], MFun) ->
merge_functions(Functions, MFun, R);
merge_functions([],_MFun) -> % There are no clauses.
{0,0}. % No function can be covered or notcov.
merge_functions([{_MFA,R}|Functions], MFun, Result) ->
merge_functions(Functions, MFun, MFun(Result, R));
merge_functions([], _MFun, Result) ->
Result.
do_parallel_analysis_to_file(Module, OutFile, Opts, Loaded, From, State) ->
File = case Loaded of
{loaded, File0} ->
[{Module,Clauses}] =
ets:lookup(?COVER_CLAUSE_TABLE,Module),
collect(Module, Clauses,
State#main_state.nodes),
File0;
{imported, File0, _} ->
File0
end,
case find_source(Module, File) of
{beam,_BeamFile} ->
reply(From, {error,no_source_code_found});
ErlFile ->
analyse_info(Module,State#main_state.imported),
HTML = lists:member(html,Opts),
R = do_analyse_to_file(Module,OutFile,
ErlFile,HTML),
reply(From, R)
end.
do_analyse_to_file(Module , OutFile , ErlFile ) - > { ok , OutFile } | { error , Error }
%% Module = atom()
%% OutFile = ErlFile = string()
do_analyse_to_file(Module, OutFile, ErlFile, HTML) ->
case file:open(ErlFile, [read]) of
{ok, InFd} ->
case file:open(OutFile, [write]) of
{ok, OutFd} ->
if HTML ->
Encoding = encoding(ErlFile),
Header =
["<!DOCTYPE HTML PUBLIC "
"\"-//W3C//DTD HTML 3.2 Final//EN\">\n"
"<html>\n"
"<head>\n"
"<meta http-equiv=\"Content-Type\""
" content=\"text/html; charset=",
Encoding,"\"/>\n"
"<title>",OutFile,"</title>\n"
"</head>"
"<body style='background-color: white;"
" color: black'>\n"
"<pre>\n"],
file:write(OutFd,Header);
true -> ok
end,
%% Write some initial information to the output file
{{Y,Mo,D},{H,Mi,S}} = calendar:local_time(),
Timestamp =
io_lib:format("~p-~s-~s at ~s:~s:~s",
[Y,
string:right(integer_to_list(Mo), 2, $0),
string:right(integer_to_list(D), 2, $0),
string:right(integer_to_list(H), 2, $0),
string:right(integer_to_list(Mi), 2, $0),
string:right(integer_to_list(S), 2, $0)]),
file:write(OutFd,
["File generated from ",ErlFile," by COVER ",
Timestamp,"\n\n"
"**************************************"
"**************************************"
"\n\n"]),
print_lines(Module, InFd, OutFd, 1, HTML),
if HTML -> io:format(OutFd,"</pre>\n</body>\n</html>\n",[]);
true -> ok
end,
file:close(OutFd),
file:close(InFd),
{ok, OutFile};
{error, Reason} ->
{error, {file, OutFile, Reason}}
end;
{error, Reason} ->
{error, {file, ErlFile, Reason}}
end.
print_lines(Module, InFd, OutFd, L, HTML) ->
case io:get_line(InFd, '') of
eof ->
ignore;
"%"++_=Line -> %Comment line - not executed.
io:put_chars(OutFd, [tab(),escape_lt_and_gt(Line, HTML)]),
print_lines(Module, InFd, OutFd, L+1, HTML);
RawLine ->
Line = escape_lt_and_gt(RawLine,HTML),
Pattern = {#bump{module=Module,line=L},'$1'},
case ets:match(?COLLECTION_TABLE, Pattern) of
[] ->
io:put_chars(OutFd, [tab(),Line]);
Ns ->
N = lists:foldl(fun([Ni], Nacc) -> Nacc+Ni end, 0, Ns),
if
N=:=0, HTML=:=true ->
LineNoNL = Line -- "\n",
Str = " 0",
Str = string : right("0 " , 6 , 32 ) ,
RedLine = ["<font color=red>",Str,fill1(),
LineNoNL,"</font>\n"],
io:put_chars(OutFd, RedLine);
N<1000000 ->
Str = string:right(integer_to_list(N), 6, 32),
io:put_chars(OutFd, [Str,fill1(),Line]);
N<10000000 ->
Str = integer_to_list(N),
io:put_chars(OutFd, [Str,fill2(),Line]);
true ->
Str = integer_to_list(N),
io:put_chars(OutFd, [Str,fill3(),Line])
end
end,
print_lines(Module, InFd, OutFd, L+1, HTML)
end.
tab() -> " | ".
fill1() -> "..| ".
fill2() -> ".| ".
fill3() -> "| ".
%%%--Export--------------------------------------------------------------
do_export(Module, OutFile, From, State) ->
case file:open(OutFile,[write,binary,raw]) of
{ok,Fd} ->
Reply =
case Module of
'_' ->
export_info(State#main_state.imported),
collect(State#main_state.nodes),
do_export_table(State#main_state.compiled,
State#main_state.imported,
Fd);
_ ->
export_info(Module,State#main_state.imported),
try is_loaded(Module, State) of
{loaded, File} ->
[{Module,Clauses}] =
ets:lookup(?COVER_CLAUSE_TABLE,Module),
collect(Module, Clauses,
State#main_state.nodes),
do_export_table([{Module,File}],[],Fd);
{imported, File, ImportFiles} ->
%% don't know if I should allow this -
%% export a module which is only imported
Imported = [{Module,File,ImportFiles}],
do_export_table([],Imported,Fd)
catch throw:_ ->
{error,{not_cover_compiled,Module}}
end
end,
file:close(Fd),
reply(From, Reply);
{error,Reason} ->
reply(From, {error, {cant_open_file,OutFile,Reason}})
end.
do_export_table(Compiled, Imported, Fd) ->
ModList = merge(Imported,Compiled),
write_module_data(ModList,Fd).
merge([{Module,File,_ImportFiles}|Imported],ModuleList) ->
case lists:keymember(Module,1,ModuleList) of
true ->
merge(Imported,ModuleList);
false ->
merge(Imported,[{Module,File}|ModuleList])
end;
merge([],ModuleList) ->
ModuleList.
write_module_data([{Module,File}|ModList],Fd) ->
write({file,Module,File},Fd),
[Clauses] = ets:lookup(?COLLECTION_CLAUSE_TABLE,Module),
write(Clauses,Fd),
ModuleData = ets:match_object(?COLLECTION_TABLE,{#bump{module=Module},'_'}),
do_write_module_data(ModuleData,Fd),
write_module_data(ModList,Fd);
write_module_data([],_Fd) ->
ok.
do_write_module_data([H|T],Fd) ->
write(H,Fd),
do_write_module_data(T,Fd);
do_write_module_data([],_Fd) ->
ok.
write(Element,Fd) ->
Bin = term_to_binary(Element,[compressed]),
case byte_size(Bin) of
Size when Size > 255 ->
SizeBin = term_to_binary({'$size',Size}),
file:write(Fd,
<<(byte_size(SizeBin)):8,SizeBin/binary,Bin/binary>>);
Size ->
file:write(Fd,<<Size:8,Bin/binary>>)
end,
ok.
%%%--Import--------------------------------------------------------------
do_import_to_table(Fd,ImportFile,Imported) ->
do_import_to_table(Fd,ImportFile,Imported,[]).
do_import_to_table(Fd,ImportFile,Imported,DontImport) ->
case get_term(Fd) of
{file,Module,File} ->
case add_imported(Module, File, ImportFile, Imported) of
{ok,NewImported} ->
do_import_to_table(Fd,ImportFile,NewImported,DontImport);
dont_import ->
do_import_to_table(Fd,ImportFile,Imported,
[Module|DontImport])
end;
{Key=#bump{module=Module},Val} ->
case lists:member(Module,DontImport) of
false ->
insert_in_collection_table(Key,Val);
true ->
ok
end,
do_import_to_table(Fd,ImportFile,Imported,DontImport);
{Module,Clauses} ->
case lists:member(Module,DontImport) of
false ->
ets:insert(?COLLECTION_CLAUSE_TABLE,{Module,Clauses});
true ->
ok
end,
do_import_to_table(Fd,ImportFile,Imported,DontImport);
eof ->
Imported
end.
get_term(Fd) ->
case file:read(Fd,1) of
{ok,<<Size1:8>>} ->
{ok,Bin1} = file:read(Fd,Size1),
case binary_to_term(Bin1) of
{'$size',Size2} ->
{ok,Bin2} = file:read(Fd,Size2),
binary_to_term(Bin2);
Term ->
Term
end;
eof ->
eof
end.
%%%--Reset---------------------------------------------------------------
%% Reset main node and all remote nodes
do_reset_main_node(Module,Nodes) ->
do_reset(Module),
do_reset_collection_table(Module),
remote_reset(Module,Nodes).
do_reset_collection_table(Module) ->
ets:delete(?COLLECTION_CLAUSE_TABLE,Module),
ets:match_delete(?COLLECTION_TABLE, {#bump{module=Module},'_'}).
%% do_reset(Module) -> ok
%% The reset is done on a per-clause basis to avoid building
%% long lists in the case of very large modules
do_reset(Module) ->
[{Module,Clauses}] = ets:lookup(?COVER_CLAUSE_TABLE, Module),
do_reset2(Clauses).
do_reset2([{M,F,A,C,_L}|Clauses]) ->
Pattern = {#bump{module=M, function=F, arity=A, clause=C}, '_'},
Bumps = ets:match_object(?COVER_TABLE, Pattern),
lists:foreach(fun({Bump,_N}) ->
ets:insert(?COVER_TABLE, {Bump,0})
end,
Bumps),
do_reset2(Clauses);
do_reset2([]) ->
ok.
do_clear(Module) ->
ets:match_delete(?COVER_CLAUSE_TABLE, {Module,'_'}),
ets:match_delete(?COVER_TABLE, {#bump{module=Module},'_'}),
case lists:member(?COLLECTION_TABLE, ets:all()) of
true ->
%% We're on the main node
ets:match_delete(?COLLECTION_TABLE, {#bump{module=Module},'_'});
false ->
ok
end.
not_loaded(Module, unloaded, State) ->
do_clear(Module),
remote_unload(State#main_state.nodes,[Module]),
Compiled = update_compiled([Module],
State#main_state.compiled),
State#main_state{ compiled = Compiled };
not_loaded(_Module,_Else, State) ->
State.
%%%--Div-----------------------------------------------------------------
escape_lt_and_gt(Rawline,HTML) when HTML =/= true ->
Rawline;
escape_lt_and_gt(Rawline,_HTML) ->
escape_lt_and_gt1(Rawline,[]).
escape_lt_and_gt1([$<|T],Acc) ->
escape_lt_and_gt1(T,[$;,$t,$l,$&|Acc]);
escape_lt_and_gt1([$>|T],Acc) ->
escape_lt_and_gt1(T,[$;,$t,$g,$&|Acc]);
escape_lt_and_gt1([$&|T],Acc) ->
escape_lt_and_gt1(T,[$;,$p,$m,$a,$&|Acc]);
escape_lt_and_gt1([],Acc) ->
lists:reverse(Acc);
escape_lt_and_gt1([H|T],Acc) ->
escape_lt_and_gt1(T,[H|Acc]).
pmap(Fun, List) ->
pmap(Fun, List, 20).
pmap(Fun, List, Limit) ->
pmap(Fun, List, [], Limit, 0, []).
pmap(Fun, [E | Rest], Pids, Limit, Cnt, Acc) when Cnt < Limit ->
Collector = self(),
Pid = spawn_link(fun() ->
?SPAWN_DBG(pmap,E),
Collector ! {res,self(),Fun(E)}
end),
erlang:monitor(process, Pid),
pmap(Fun, Rest, Pids ++ [Pid], Limit, Cnt + 1, Acc);
pmap(Fun, List, [Pid | Pids], Limit, Cnt, Acc) ->
receive
{'DOWN', _Ref, process, X, _} when is_pid(X) ->
pmap(Fun, List, [Pid | Pids], Limit, Cnt - 1, Acc);
{res, Pid, Res} ->
pmap(Fun, List, Pids, Limit, Cnt, [Res | Acc])
end;
pmap(_Fun, [], [], _Limit, 0, Acc) ->
lists:reverse(Acc);
pmap(Fun, [], [], Limit, Cnt, Acc) ->
receive
{'DOWN', _Ref, process, X, _} when is_pid(X) ->
pmap(Fun, [], [], Limit, Cnt - 1, Acc)
end.
%%%-----------------------------------------------------------------
%%% Read encoding from source file
encoding(File) ->
Encoding =
case epp:read_encoding(File) of
none ->
epp:default_encoding();
E ->
E
end,
html_encoding(Encoding).
html_encoding(latin1) ->
"iso-8859-1";
html_encoding(utf8) ->
"utf-8".
| null | https://raw.githubusercontent.com/wireless-net/erlang-nommu/79f32f81418e022d8ad8e0e447deaea407289926/lib/tools/src/cover.erl | erlang |
%CopyrightBegin%
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at /.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
%CopyrightEnd%
cover_web implements a user interface for the coverage tool to run
under webtool.
coverage analysis. The process is registered as 'cover_server'
(?SERVER). The cover_server on the 'main' node is in charge, and
it monitors the cover_servers on all remote nodes. When it gets a
'DOWN' message for another cover_server, it marks the node as
'lost'. If a nodeup is received for a lost node the main node
ensures that the cover compiled modules are loaded again. If the
remote node was alive during the disconnected periode, cover data
for this periode will also be included in the analysis.
The cover_server process on the main node is implemented by the
the remote nodes are implemented by the functions init_remote/2 and
remote_process_loop/1.
TABLES
cover_internal_clause_table (?COVER_CLAUSE_TABLE).
?COVER_TABLE contains the bump data i.e. the data about which lines
have been executed how many times.
?COVER_CLAUSE_TABLE contains information about which clauses in which modules
cover is currently collecting statistics.
The main node owns tables named
'cover_collected_remote_data_table' (?COLLECTION_TABLE) and
'cover_collected_remote_clause_table' (?COLLECTION_CLAUSE_TABLE).
These tables contain data which is collected from remote nodes (either when a
remote node is stopped with cover:stop/1 or when analysing). When
analysing, data is even moved from the COVER tables on the main
node to the COLLECTION tables.
The main node also has a table named 'cover_binary_code_table'
(?BINARY_TABLE). This table contains the binary code for each cover
compiled module. This is necessary so that the code can be loaded
on remote nodes that are started after the compilation.
To take advantage of SMP when doing the cover analysis both the data
each node when collecting data, and on the remote node when collecting data
When analyzing data it is possible to issue multiple analyse(_to_file)/X
calls at once. They are however all calls (for backwards compatibility
reasons) so the user of cover will have to spawn several processes to to the
calls ( or use async_analyse_to_file ).
External exports
Used internally to ensure we upgrade the code to the latest version.
[{Module,File}]
[{Module,File,ImportFile}]
undefined | pid()
[Node]
[Node]
[{Module,File}]
atom()
atom()
atom()
integer()
integer()
integer()
atom() Module name
[{M,F,A,C,L}]
atom()
int()
int()
[int()]
[int()]
int()
boolean
Line doesn't matter.
----------------------------------------------------------------------
External exports
----------------------------------------------------------------------
Nodes = Node | [Node,...]
Node = atom()
compile(ModFile) ->
compile(ModFile, Options) ->
compile_module(ModFile) -> Result
compile_module(ModFile, Options) -> Result
ModFile = Module | File
Module = atom()
File = string()
Options = [Option]
Result = {ok,Module} | {error,File}
compile_directory() ->
compile_directory(Dir) ->
compile_directory(Dir, Options) -> [Result] | {error,Reason}
Dir = string()
Options - see compile/1
Result - see compile/1
Reason = eacces | enoent
compile_beam(ModFile) -> Result | {error,Reason}
ModFile - see compile/1
Result - see compile/1
Reason = non_existing | already_cover_compiled
compile_beam_directory(Dir) -> [Result] | {error,Reason}
Dir - see compile_directory/1
Result - see compile/1
Reason = eacces | enoent
analyse(Module) ->
analyse(Module, Analysis) ->
analyse(Module, Level) ->
analyse(Module, Analysis, Level) -> {ok,Answer} | {error,Error}
Module = atom()
Analysis = coverage | calls
Level = line | clause | function | module
Answer = {Module,Value} | [{Item,Value}]
Item = Line | Clause | Function
Line = {M,N}
Clause = {M,F,A,C}
Function = {M,F,A}
M = F = atom()
N = A = C = integer()
Value = {Cov,NotCov} | Calls
Cov = NotCov = Calls = integer()
Error = {not_cover_compiled,Module}
analyse_to_file(Module) ->
analyse_to_file(Module, Options) ->
Module = atom()
Options = [Option]
Option = html
Error = {not_cover_compiled,Module} | no_source_code_found |
{file,File,Reason}
File = string()
Reason = term()
export(File)
export(File,Module) -> ok | {error,Reason}
File = string(); file to write the exported data to
Module = atom()
import(File) -> ok | {error, Reason}
File = string(); file created with cover:export/1,2
modules() -> [Module]
Module = atom()
imported_modules() -> [Module]
Module = atom()
imported() -> [ImportFile]
which_nodes() -> [Node]
Node = atom()
is_compiled(Module) -> {file,File} | false
Module = atom()
File = string()
reset(Module) -> ok | {error,Error}
reset() -> ok
Module = atom()
Error = {not_cover_compiled,Module}
stop() -> ok
flush(Nodes) -> ok | {error,not_main_node}
Nodes = [Node] | Node
Node = atom()
Error = {not_cover_compiled,Module}
Used by test_server only. Not documented.
Module = Function = atom()
Arity = Clause = Line = integer()
executable line.
line=Line},
----------------------------------------------------------------------
cover_server on main node
----------------------------------------------------------------------
performance boost
when collect/1 is called.
This module (cover) could have been reloaded. Make
sure we run the new code.
no abstract code
This module (cover) could have been reloaded. Make
sure we run the new code.
someone tries to compile it from .beam
Get all compiled modules which are still loaded
Get all modules with imported data
List all imported files
List all imported files
A remote cover_server is down, mark as lost
node stopped
Will be taken care of when 'DOWN' message arrives
----------------------------------------------------------------------
cover_server on remote node
----------------------------------------------------------------------
write_concurrency here makes otp_8270 break :(
,{write_concurrency, true}
not replying since 'DOWN' message will be received anyway
Sending clause by clause in order to avoid large lists
Reset
remove code marked as 'old'
mark cover compiled code as 'old'
Note: original beam code must be loaded before the cover
compiled code is purged, in order to for references to
'fun M:F/A' and %% 'fun F/A' funs to be correct (they
refer to (M:)F/A in the *latest* version of the module)
load original code
remove cover compiled code
Make sure the #bump{} records are available *before* the
module is loaded.
----------------------------------------------------------------------
----------------------------------------------------------------------
--Handling of remote nodes--------------------------------------------
In case some of the compiled modules have been unloaded they
should not be loaded on the new node.
start the cover_server on a remote node
If a lost node comes back, ensure that main and remote node has the
same cover compiled modules. Note that no action is taken if the
same {Mod,File} eksists on both, i.e. code change is not handled!
Load a set of cover compiled modules on remote nodes,
We do it ?MAX_MODS modules at a time so that we don't
run out of memory on the cover_server node.
Read all data needed for loading a cover compiled module on a remote node
data to insert in ?COVER_TABLE.
Create a match spec which returns the clause info {Module,InitInfo} and
all #bump keys for the given module with 0 number of calls.
Unload modules on remote nodes
Reset one or all modules on remote nodes
Collect data from remote nodes - used for analyse or stop(Node)
Process which receives chunks of data from remote nodes - either when
analysing or when stopping cover on the remote nodes.
Make sure that there are no race conditions from ets:member
--Handling of modules state data--------------------------------------
Do not print that the export includes imported modules
Do not print that the export includes imported modules
Removes a module from the list of imported modules and writes a warning
This is done when a module is compiled.
Adds information to the list of compiled modules, preserving time order
and without adding duplicate entries.
Get all compiled modules which are still loaded, and possibly an
updated version of the Compiled list.
--Compilation---------------------------------------------------------
do_compile(File, Options) -> {ok,Module} | {error,Error}
Beam is a binary or a .beam file name
Clear database
Extract the abstract format and insert calls to bump/6 at
every executable line and, as a side effect, initiate
the database
We need to recover the source from the compilation
info otherwise the newly compiled module will have
source pointing to the current directory
Compile and load the result
It's necessary to check the result of loading since it may
fail, for example if Module resides in a sticky directory
Store info about all function clauses in database
Store binary code so it can be loaded on remote nodes
Wrong version of abstract code. Just report that there
is no abstract code.
be interpreted as the name of the main erlang source file.
Expand short-circuit Boolean expressions.
This code traverses the abstract code, stored as the abstract_code
The switch is turned off when we encounter other files than the main file.
This way we will be able to exclude functions defined in include files.
Don't want to run parse transforms more than once.
Other attributes and skipped includes.
function clause
Not used?
receive-, case-, if-, or try-clause
Here is the place to add a call to cover:bump/6!
already a bump at this line
Bump = {call, 0, {remote, 0, {atom,0,cover}, {atom,0,bump}},
[{atom, 0, Vars#vars.module},
{integer, 0, Line}]},
Fix last expression (OTP-8188). A typical example:
Bump line 5 after " a " has been evaluated !
Line 5 wasn't bumped just before "F()" since it was already bumped
before "b" (and before "c") (one mustn't bump a line more than
once in a single "evaluation"). The expression "case X ... end" is
This doesn't solve all problems with expressions on the same line,
though. 'case' and 'try' are tricky. An example:
?
done on some of the compiler's "lower level" format.
'fun' is also problematic since a bump inside the body "shadows"
the rest of the line.
No need to update ?COVER_TABLE.
End of fix of last expression.
var|char|integer|float|string|atom|nil|eof|default
Every qualifier is decorated with a counter.
--Analysis------------------------------------------------------------
Collect data for all modules
local node
remote nodes
local node
remote nodes
?COLLECTION_TABLE. Resetting data in ?COVER_TABLE
the .beam file, then in ../src, then in compile info.
not .beam
Not in pwd: look in ../src.
Not in ../src: look for source path in compile info, but
first look relative the beam directory.
stripped
below ../src
or absolute
No success means that source is either not under ../src or
its relative path differs from that of compile info. (For
example, compiled under src/x but installed under src/y.)
An option to specify an arbitrary source path explicitly is
probably a better solution than either more heuristics or a
potentially slow filesystem search.
Splice the tail of a source path, starting from the last "src"
component, onto the parent of a beam directory, or return false if
no "src" component is found.
--> "/path/to/app-1.0/ebin/../src/x/y.erl"
This handles the case of source in subdirectories of ../src with
beams that have moved since compilation.
found src component
or not
do_analyse(Module, Analysis, Level, Clauses)-> {ok,Answer} | {error,Error}
Clauses = [{Module,Function,Arity,Clause,Lines}]
There are no clauses.
No function can be covered or notcov.
Module = atom()
OutFile = ErlFile = string()
Write some initial information to the output file
Comment line - not executed.
--Export--------------------------------------------------------------
don't know if I should allow this -
export a module which is only imported
--Import--------------------------------------------------------------
--Reset---------------------------------------------------------------
Reset main node and all remote nodes
do_reset(Module) -> ok
The reset is done on a per-clause basis to avoid building
long lists in the case of very large modules
We're on the main node
--Div-----------------------------------------------------------------
-----------------------------------------------------------------
Read encoding from source file | Copyright Ericsson AB 2001 - 2013 . All Rights Reserved .
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
-module(cover).
This module implements the Erlang coverage tool . The module named
ARCHITECTURE
The coverage tool consists of one process on each node involved in
functions init_main/1 and main_process_loop/1 . The cover_server on
Each nodes has two tables : cover_internal_data_table ( ? COVER_TABLE ) and .
PARALLELISM
collection and analysis has been parallelized . One process is spawned for
one process is spawned per module .
-export([start/0, start/1,
compile/1, compile/2, compile_module/1, compile_module/2,
compile_directory/0, compile_directory/1, compile_directory/2,
compile_beam/1, compile_beam_directory/0, compile_beam_directory/1,
analyse/1, analyse/2, analyse/3, analyze/1, analyze/2, analyze/3,
analyse_to_file/1, analyse_to_file/2, analyse_to_file/3,
analyze_to_file/1, analyze_to_file/2, analyze_to_file/3,
async_analyse_to_file/1,async_analyse_to_file/2,
async_analyse_to_file/3, async_analyze_to_file/1,
async_analyze_to_file/2, async_analyze_to_file/3,
export/1, export/2, import/1,
modules/0, imported/0, imported_modules/0, which_nodes/0, is_compiled/1,
reset/1, reset/0,
flush/1,
stop/0, stop/1]).
-export([remote_start/1,get_main_node/0]).
-export([main_process_loop/1,remote_process_loop/1]).
}).
-define(BUMP_REC_NAME,bump).
}).
-define(COVER_TABLE, 'cover_internal_data_table').
-define(COVER_CLAUSE_TABLE, 'cover_internal_clause_table').
-define(BINARY_TABLE, 'cover_binary_code_table').
-define(COLLECTION_TABLE, 'cover_collected_remote_data_table').
-define(COLLECTION_CLAUSE_TABLE, 'cover_collected_remote_clause_table').
-define(TAG, cover_compiled).
-define(SERVER, cover_server).
-define(BLOCK(Expr), {block,0,[Expr]}).
-define(BLOCK1(Expr),
if
element(1, Expr) =:= block ->
Expr;
true -> ?BLOCK(Expr)
end).
-define(SPAWN_DBG(Tag,Value),put(Tag,Value)).
-include_lib("stdlib/include/ms_transform.hrl").
start ( ) - > { ok , Pid } | { error , Reason }
Pid = pid ( )
Reason = { already_started , Pid } | term ( )
start() ->
case whereis(?SERVER) of
undefined ->
Starter = self(),
Pid = spawn(fun() ->
?SPAWN_DBG(start,[]),
init_main(Starter)
end),
Ref = erlang:monitor(process,Pid),
Return =
receive
{?SERVER,started} ->
{ok,Pid};
{'DOWN', Ref, _Type, _Object, Info} ->
{error,Info}
end,
erlang:demonitor(Ref),
Return;
Pid ->
{error,{already_started,Pid}}
end.
start(Nodes ) - > { ok , }
start(Node) when is_atom(Node) ->
start([Node]);
start(Nodes) ->
call({start_nodes,remove_myself(Nodes,[])}).
Option = { i , Dir } | { d , Macro } | { d , Macro , Value }
compile(ModFile) ->
compile_module(ModFile, []).
compile(ModFile, Options) ->
compile_module(ModFile, Options).
compile_module(ModFile) when is_atom(ModFile);
is_list(ModFile) ->
compile_module(ModFile, []).
compile_module(Module, Options) when is_atom(Module), is_list(Options) ->
compile_module(atom_to_list(Module), Options);
compile_module(File, Options) when is_list(File), is_list(Options) ->
WithExt = case filename:extension(File) of
".erl" ->
File;
_ ->
File++".erl"
end,
AbsFile = filename:absname(WithExt),
[R] = compile_modules([AbsFile], Options),
R.
compile_directory() ->
case file:get_cwd() of
{ok, Dir} ->
compile_directory(Dir, []);
Error ->
Error
end.
compile_directory(Dir) when is_list(Dir) ->
compile_directory(Dir, []).
compile_directory(Dir, Options) when is_list(Dir), is_list(Options) ->
case file:list_dir(Dir) of
{ok, Files} ->
ErlFiles = [filename:join(Dir, File) ||
File <- Files,
filename:extension(File) =:= ".erl"],
compile_modules(ErlFiles, Options);
Error ->
Error
end.
compile_modules(Files,Options) ->
Options2 = filter_options(Options),
compile_modules(Files,Options2,[]).
compile_modules([File|Files], Options, Result) ->
R = call({compile, File, Options}),
compile_modules(Files,Options,[R|Result]);
compile_modules([],_Opts,Result) ->
lists:reverse(Result).
filter_options(Options) ->
lists:filter(fun(Option) ->
case Option of
{i, Dir} when is_list(Dir) -> true;
{d, _Macro} -> true;
{d, _Macro, _Value} -> true;
export_all -> true;
_ -> false
end
end,
Options).
compile_beam(Module) when is_atom(Module) ->
case code:which(Module) of
non_existing ->
{error,non_existing};
?TAG ->
compile_beam(Module,?TAG);
File ->
compile_beam(Module,File)
end;
compile_beam(File) when is_list(File) ->
{WithExt,WithoutExt}
= case filename:rootname(File,".beam") of
File ->
{File++".beam",File};
Rootname ->
{File,Rootname}
end,
AbsFile = filename:absname(WithExt),
Module = list_to_atom(filename:basename(WithoutExt)),
compile_beam(Module,AbsFile).
compile_beam(Module,File) ->
call({compile_beam,Module,File}).
compile_beam_directory() ->
case file:get_cwd() of
{ok, Dir} ->
compile_beam_directory(Dir);
Error ->
Error
end.
compile_beam_directory(Dir) when is_list(Dir) ->
case file:list_dir(Dir) of
{ok, Files} ->
BeamFiles = [filename:join(Dir, File) ||
File <- Files,
filename:extension(File) =:= ".beam"],
compile_beams(BeamFiles);
Error ->
Error
end.
compile_beams(Files) ->
compile_beams(Files,[]).
compile_beams([File|Files],Result) ->
R = compile_beam(File),
compile_beams(Files,[R|Result]);
compile_beams([],Result) ->
lists:reverse(Result).
analyse(Module) ->
analyse(Module, coverage).
analyse(Module, Analysis) when Analysis=:=coverage; Analysis=:=calls ->
analyse(Module, Analysis, function);
analyse(Module, Level) when Level=:=line; Level=:=clause; Level=:=function;
Level=:=module ->
analyse(Module, coverage, Level).
analyse(Module, Analysis, Level) when is_atom(Module),
Analysis=:=coverage; Analysis=:=calls,
Level=:=line; Level=:=clause;
Level=:=function; Level=:=module ->
call({{analyse, Analysis, Level}, Module}).
analyze(Module) -> analyse(Module).
analyze(Module, Analysis) -> analyse(Module, Analysis).
analyze(Module, Analysis, Level) -> analyse(Module, Analysis, Level).
analyse_to_file(Module , OutFile ) - >
analyse_to_file(Module , OutFile , Options ) - > { ok , OutFile } | { error , Error }
OutFile = string ( )
analyse_to_file(Module) when is_atom(Module) ->
analyse_to_file(Module, outfilename(Module,[]), []).
analyse_to_file(Module, []) when is_atom(Module) ->
analyse_to_file(Module, outfilename(Module,[]), []);
analyse_to_file(Module, Options) when is_atom(Module),
is_list(Options), is_atom(hd(Options)) ->
analyse_to_file(Module, outfilename(Module,Options), Options);
analyse_to_file(Module, OutFile) when is_atom(Module), is_list(OutFile) ->
analyse_to_file(Module, OutFile, []).
analyse_to_file(Module, OutFile, Options) when is_atom(Module), is_list(OutFile) ->
call({{analyse_to_file, OutFile, Options}, Module}).
analyze_to_file(Module) -> analyse_to_file(Module).
analyze_to_file(Module, OptOrOut) -> analyse_to_file(Module, OptOrOut).
analyze_to_file(Module, OutFile, Options) ->
analyse_to_file(Module, OutFile, Options).
async_analyse_to_file(Module) ->
do_spawn(?MODULE, analyse_to_file, [Module]).
async_analyse_to_file(Module, OutFileOrOpts) ->
do_spawn(?MODULE, analyse_to_file, [Module, OutFileOrOpts]).
async_analyse_to_file(Module, OutFile, Options) ->
do_spawn(?MODULE, analyse_to_file, [Module, OutFile, Options]).
do_spawn(M,F,A) ->
spawn_link(fun() ->
case apply(M,F,A) of
{ok, _} ->
ok;
{error, Reason} ->
exit(Reason)
end
end).
async_analyze_to_file(Module) ->
async_analyse_to_file(Module).
async_analyze_to_file(Module, OutFileOrOpts) ->
async_analyse_to_file(Module, OutFileOrOpts).
async_analyze_to_file(Module, OutFile, Options) ->
async_analyse_to_file(Module, OutFile, Options).
outfilename(Module,Opts) ->
case lists:member(html,Opts) of
true ->
atom_to_list(Module)++".COVER.html";
false ->
atom_to_list(Module)++".COVER.out"
end.
export(File) ->
export(File, '_').
export(File, Module) ->
call({export,File,Module}).
import(File) ->
call({import,File}).
modules() ->
call(modules).
imported_modules() ->
call(imported_modules).
ImportFile = string ( )
imported() ->
call(imported).
which_nodes() ->
call(which_nodes).
is_compiled(Module) when is_atom(Module) ->
call({is_compiled, Module}).
reset(Module) when is_atom(Module) ->
call({reset, Module}).
reset() ->
call(reset).
stop() ->
call(stop).
stop(Node) when is_atom(Node) ->
stop([Node]);
stop(Nodes) ->
call({stop,remove_myself(Nodes,[])}).
flush(Node) when is_atom(Node) ->
flush([Node]);
flush(Nodes) ->
call({flush,remove_myself(Nodes,[])}).
get_main_node() ->
call(get_main_node).
bump(Module , Function , Arity , Clause , Line )
This function is inserted into Cover compiled modules , once for each
bump(Module , Function , Arity , Clause , Line ) - >
Key = # bump{module = Module , function = Function , arity = Arity , clause = Clause ,
ets : update_counter(?COVER_TABLE , Key , 1 ) .
call(Request) ->
Ref = erlang:monitor(process,?SERVER),
receive {'DOWN', Ref, _Type, _Object, noproc} ->
erlang:demonitor(Ref),
start(),
call(Request)
after 0 ->
?SERVER ! {self(),Request},
Return =
receive
{'DOWN', Ref, _Type, _Object, Info} ->
exit(Info);
{?SERVER,Reply} ->
Reply
end,
erlang:demonitor(Ref, [flush]),
Return
end.
reply(From, Reply) ->
From ! {?SERVER,Reply}.
is_from(From) ->
is_pid(From).
remote_call(Node,Request) ->
Ref = erlang:monitor(process,{?SERVER,Node}),
receive {'DOWN', Ref, _Type, _Object, noproc} ->
erlang:demonitor(Ref),
{error,node_dead}
after 0 ->
{?SERVER,Node} ! Request,
Return =
receive
{'DOWN', Ref, _Type, _Object, _Info} ->
case Request of
{remote,stop} -> ok;
_ -> {error,node_dead}
end;
{?SERVER,Reply} ->
Reply
end,
erlang:demonitor(Ref, [flush]),
Return
end.
remote_reply(Proc,Reply) when is_pid(Proc) ->
Proc ! {?SERVER,Reply};
remote_reply(MainNode,Reply) ->
{?SERVER,MainNode} ! {?SERVER,Reply}.
init_main(Starter) ->
register(?SERVER,self()),
ets:new(?COVER_TABLE, [set, public, named_table
,{write_concurrency, true}
]),
ets:new(?COVER_CLAUSE_TABLE, [set, public, named_table]),
ets:new(?BINARY_TABLE, [set, named_table]),
ets:new(?COLLECTION_TABLE, [set, public, named_table]),
ets:new(?COLLECTION_CLAUSE_TABLE, [set, public, named_table]),
net_kernel:monitor_nodes(true),
Starter ! {?SERVER,started},
main_process_loop(#main_state{}).
main_process_loop(State) ->
receive
{From, {start_nodes,Nodes}} ->
{StartedNodes,State1} = do_start_nodes(Nodes, State),
reply(From, {ok,StartedNodes}),
main_process_loop(State1);
{From, {compile, File, Options}} ->
case do_compile(File, Options) of
{ok, Module} ->
remote_load_compiled(State#main_state.nodes,[{Module,File}]),
reply(From, {ok, Module}),
Compiled = add_compiled(Module, File,
State#main_state.compiled),
Imported = remove_imported(Module,State#main_state.imported),
NewState = State#main_state{compiled = Compiled,
imported = Imported},
?MODULE:main_process_loop(NewState);
error ->
reply(From, {error, File}),
main_process_loop(State)
end;
{From, {compile_beam, Module, BeamFile0}} ->
Compiled0 = State#main_state.compiled,
case get_beam_file(Module,BeamFile0,Compiled0) of
{ok,BeamFile} ->
UserOptions = get_compile_options(Module,BeamFile),
{Reply,Compiled} =
case do_compile_beam(Module,BeamFile,UserOptions) of
{ok, Module} ->
remote_load_compiled(State#main_state.nodes,
[{Module,BeamFile}]),
C = add_compiled(Module,BeamFile,Compiled0),
{{ok,Module},C};
error ->
{{error, BeamFile}, Compiled0};
{{error, {Reason, BeamFile}}, Compiled0}
end,
reply(From,Reply),
Imported = remove_imported(Module,State#main_state.imported),
NewState = State#main_state{compiled = Compiled,
imported = Imported},
?MODULE:main_process_loop(NewState);
{error,no_beam} ->
The module has first been compiled from .erl , and now
reply(From,
{error,{already_cover_compiled,no_beam_found,Module}}),
main_process_loop(State)
end;
{From, {export,OutFile,Module}} ->
spawn(fun() ->
?SPAWN_DBG(export,{OutFile, Module}),
do_export(Module, OutFile, From, State)
end),
main_process_loop(State);
{From, {import,File}} ->
case file:open(File,[read,binary,raw]) of
{ok,Fd} ->
Imported = do_import_to_table(Fd,File,
State#main_state.imported),
reply(From, ok),
file:close(Fd),
main_process_loop(State#main_state{imported=Imported});
{error,Reason} ->
reply(From, {error, {cant_open_file,File,Reason}}),
main_process_loop(State)
end;
{From, modules} ->
{LoadedModules,Compiled} =
get_compiled_still_loaded(State#main_state.nodes,
State#main_state.compiled),
reply(From, LoadedModules),
main_process_loop(State#main_state{compiled=Compiled});
{From, imported_modules} ->
ImportedModules = lists:map(fun({Mod,_File,_ImportFile}) -> Mod end,
State#main_state.imported),
reply(From, ImportedModules),
main_process_loop(State);
{From, imported} ->
reply(From, get_all_importfiles(State#main_state.imported,[])),
main_process_loop(State);
{From, which_nodes} ->
reply(From, State#main_state.nodes),
main_process_loop(State);
{From, reset} ->
lists:foreach(
fun({Module,_File}) ->
do_reset_main_node(Module,State#main_state.nodes)
end,
State#main_state.compiled),
reply(From, ok),
main_process_loop(State#main_state{imported=[]});
{From, {stop,Nodes}} ->
remote_collect('_',Nodes,true),
reply(From, ok),
Nodes1 = State#main_state.nodes--Nodes,
LostNodes1 = State#main_state.lost_nodes--Nodes,
main_process_loop(State#main_state{nodes=Nodes1,
lost_nodes=LostNodes1});
{From, {flush,Nodes}} ->
remote_collect('_',Nodes,false),
reply(From, ok),
main_process_loop(State);
{From, stop} ->
lists:foreach(
fun(Node) ->
remote_call(Node,{remote,stop})
end,
State#main_state.nodes),
reload_originals(State#main_state.compiled),
ets:delete(?COVER_TABLE),
ets:delete(?COVER_CLAUSE_TABLE),
ets:delete(?BINARY_TABLE),
ets:delete(?COLLECTION_TABLE),
ets:delete(?COLLECTION_CLAUSE_TABLE),
unregister(?SERVER),
reply(From, ok);
{From, {{analyse, Analysis, Level}, Module}} ->
S = try
Loaded = is_loaded(Module, State),
spawn(fun() ->
?SPAWN_DBG(analyse,{Module,Analysis, Level}),
do_parallel_analysis(
Module, Analysis, Level,
Loaded, From, State)
end),
State
catch throw:Reason ->
reply(From,{error, {not_cover_compiled,Module}}),
not_loaded(Module, Reason, State)
end,
main_process_loop(S);
{From, {{analyse_to_file, OutFile, Opts},Module}} ->
S = try
Loaded = is_loaded(Module, State),
spawn(fun() ->
?SPAWN_DBG(analyse_to_file,
{Module,OutFile, Opts}),
do_parallel_analysis_to_file(
Module, OutFile, Opts,
Loaded, From, State)
end),
State
catch throw:Reason ->
reply(From,{error, {not_cover_compiled,Module}}),
not_loaded(Module, Reason, State)
end,
main_process_loop(S);
{From, {is_compiled, Module}} ->
S = try is_loaded(Module, State) of
{loaded, File} ->
reply(From,{file, File}),
State;
{imported,_File,_ImportFiles} ->
reply(From,false),
State
catch throw:Reason ->
reply(From,false),
not_loaded(Module, Reason, State)
end,
main_process_loop(S);
{From, {reset, Module}} ->
S = try
Loaded = is_loaded(Module,State),
R = case Loaded of
{loaded, _File} ->
do_reset_main_node(
Module, State#main_state.nodes);
{imported, _File, _} ->
do_reset_collection_table(Module)
end,
Imported =
remove_imported(Module,
State#main_state.imported),
reply(From, R),
State#main_state{imported=Imported}
catch throw:Reason ->
reply(From,{error, {not_cover_compiled,Module}}),
not_loaded(Module, Reason, State)
end,
main_process_loop(S);
{'DOWN', _MRef, process, {?SERVER,Node}, _Info} ->
{Nodes,Lost} =
case lists:member(Node,State#main_state.nodes) of
true ->
N = State#main_state.nodes--[Node],
L = [Node|State#main_state.lost_nodes],
{N,L};
{State#main_state.nodes,State#main_state.lost_nodes}
end,
main_process_loop(State#main_state{nodes=Nodes,lost_nodes=Lost});
{nodeup,Node} ->
State1 =
case lists:member(Node,State#main_state.lost_nodes) of
true ->
sync_compiled(Node,State);
false ->
State
end,
main_process_loop(State1);
{nodedown,_} ->
main_process_loop(State);
{From, get_main_node} ->
reply(From, node()),
main_process_loop(State);
get_status ->
io:format("~tp~n",[State]),
main_process_loop(State)
end.
init_remote(Starter,MainNode) ->
register(?SERVER,self()),
ets:new(?COVER_TABLE, [set, public, named_table
]),
ets:new(?COVER_CLAUSE_TABLE, [set, public, named_table]),
Starter ! {self(),started},
remote_process_loop(#remote_state{main_node=MainNode}).
remote_process_loop(State) ->
receive
{remote,load_compiled,Compiled} ->
Compiled1 = load_compiled(Compiled,State#remote_state.compiled),
remote_reply(State#remote_state.main_node, ok),
?MODULE:remote_process_loop(State#remote_state{compiled=Compiled1});
{remote,unload,UnloadedModules} ->
unload(UnloadedModules),
Compiled =
update_compiled(UnloadedModules, State#remote_state.compiled),
remote_reply(State#remote_state.main_node, ok),
remote_process_loop(State#remote_state{compiled=Compiled});
{remote,reset,Module} ->
do_reset(Module),
remote_reply(State#remote_state.main_node, ok),
remote_process_loop(State);
{remote,collect,Module,CollectorPid} ->
self() ! {remote,collect,Module,CollectorPid, ?SERVER};
{remote,collect,Module,CollectorPid,From} ->
spawn(fun() ->
?SPAWN_DBG(remote_collect,
{Module, CollectorPid, From}),
do_collect(Module, CollectorPid, From)
end),
remote_process_loop(State);
{remote,stop} ->
reload_originals(State#remote_state.compiled),
ets:delete(?COVER_TABLE),
ets:delete(?COVER_CLAUSE_TABLE),
unregister(?SERVER),
{remote,get_compiled} ->
remote_reply(State#remote_state.main_node,
State#remote_state.compiled),
remote_process_loop(State);
{From, get_main_node} ->
remote_reply(From, State#remote_state.main_node),
remote_process_loop(State);
get_status ->
io:format("~tp~n",[State]),
remote_process_loop(State);
M ->
io:format("WARNING: remote cover_server received\n~p\n",[M]),
case M of
{From,_} ->
case is_from(From) of
true ->
reply(From,{error,not_main_node});
false ->
ok
end;
_ ->
ok
end,
remote_process_loop(State)
end.
do_collect(Module, CollectorPid, From) ->
AllMods =
case Module of
'_' -> ets:tab2list(?COVER_CLAUSE_TABLE);
_ -> ets:lookup(?COVER_CLAUSE_TABLE, Module)
end,
pmap(
fun({_Mod,Clauses}) ->
lists:map(fun(Clause) ->
send_collected_data(Clause, CollectorPid)
end,Clauses)
end,AllMods),
CollectorPid ! done,
remote_reply(From, ok).
send_collected_data({M,F,A,C,_L}, CollectorPid) ->
Pattern =
{#bump{module=M, function=F, arity=A, clause=C}, '_'},
Bumps = ets:match_object(?COVER_TABLE, Pattern),
lists:foreach(fun({Bump,_N}) ->
ets:insert(?COVER_TABLE, {Bump,0})
end,
Bumps),
CollectorPid ! {chunk,Bumps}.
reload_originals([{Module,_File}|Compiled]) ->
do_reload_original(Module),
reload_originals(Compiled);
reload_originals([]) ->
ok.
do_reload_original(Module) ->
case code:which(Module) of
?TAG ->
_ ->
ignore
end.
load_compiled([{Module,File,Binary,InitialTable}|Compiled],Acc) ->
insert_initial_data(InitialTable),
NewAcc =
case code:load_binary(Module, ?TAG, Binary) of
{module,Module} ->
add_compiled(Module, File, Acc);
_ ->
do_clear(Module),
Acc
end,
load_compiled(Compiled,NewAcc);
load_compiled([],Acc) ->
Acc.
insert_initial_data([Item|Items]) when is_atom(element(1,Item)) ->
ets:insert(?COVER_CLAUSE_TABLE, Item),
insert_initial_data(Items);
insert_initial_data([Item|Items]) ->
ets:insert(?COVER_TABLE, Item),
insert_initial_data(Items);
insert_initial_data([]) ->
ok.
unload([Module|Modules]) ->
do_clear(Module),
do_reload_original(Module),
unload(Modules);
unload([]) ->
ok.
Internal functions
do_start_nodes(Nodes, State) ->
ThisNode = node(),
StartedNodes =
lists:foldl(
fun(Node,Acc) ->
case rpc:call(Node,cover,remote_start,[ThisNode]) of
{ok,_RPid} ->
erlang:monitor(process,{?SERVER,Node}),
[Node|Acc];
Error ->
io:format("Could not start cover on ~w: ~tp\n",
[Node,Error]),
Acc
end
end,
[],
Nodes),
{_LoadedModules,Compiled} =
get_compiled_still_loaded(State#main_state.nodes,
State#main_state.compiled),
remote_load_compiled(StartedNodes,Compiled),
State1 =
State#main_state{nodes = State#main_state.nodes ++ StartedNodes,
compiled = Compiled},
{StartedNodes, State1}.
remote_start(MainNode) ->
case whereis(?SERVER) of
undefined ->
Starter = self(),
Pid = spawn(fun() ->
?SPAWN_DBG(remote_start,{MainNode}),
init_remote(Starter,MainNode)
end),
Ref = erlang:monitor(process,Pid),
Return =
receive
{Pid,started} ->
{ok,Pid};
{'DOWN', Ref, _Type, _Object, Info} ->
{error,Info}
end,
erlang:demonitor(Ref),
Return;
Pid ->
{error,{already_started,Pid}}
end.
sync_compiled(Node,State) ->
#main_state{compiled=Compiled0,nodes=Nodes,lost_nodes=Lost}=State,
State1 =
case remote_call(Node,{remote,get_compiled}) of
{error,node_dead} ->
{_,S} = do_start_nodes([Node],State),
S;
{error,_} ->
State;
RemoteCompiled ->
{_,Compiled} = get_compiled_still_loaded(Nodes,Compiled0),
Unload = [UM || {UM,_}=U <- RemoteCompiled,
false == lists:member(U,Compiled)],
remote_unload([Node],Unload),
Load = [L || L <- Compiled,
false == lists:member(L,RemoteCompiled)],
remote_load_compiled([Node],Load),
State#main_state{compiled=Compiled, nodes=[Node|Nodes]}
end,
State1#main_state{lost_nodes=Lost--[Node]}.
-define(MAX_MODS, 10).
remote_load_compiled(Nodes,Compiled) ->
remote_load_compiled(Nodes, Compiled, [], 0).
remote_load_compiled(_Nodes, [], [], _ModNum) ->
ok;
remote_load_compiled(Nodes, Compiled, Acc, ModNum)
when Compiled == []; ModNum == ?MAX_MODS ->
lists:foreach(
fun(Node) ->
remote_call(Node,{remote,load_compiled,Acc})
end,
Nodes),
remote_load_compiled(Nodes, Compiled, [], 0);
remote_load_compiled(Nodes, [MF | Rest], Acc, ModNum) ->
remote_load_compiled(
Nodes, Rest, [get_data_for_remote_loading(MF) | Acc], ModNum + 1).
Binary is the beam code for the module and InitialTable is the initial
get_data_for_remote_loading({Module,File}) ->
[{Module,Binary}] = ets:lookup(?BINARY_TABLE,Module),
! The InitialTable list will be long if the module is big - what to do ? ?
InitialBumps = ets:select(?COVER_TABLE,ms(Module)),
InitialClauses = ets:lookup(?COVER_CLAUSE_TABLE,Module),
{Module,File,Binary,InitialBumps ++ InitialClauses}.
ms(Module) ->
ets:fun2ms(fun({Key,_}) when Key#bump.module=:=Module ->
{Key,0}
end).
remote_unload(Nodes,UnloadedModules) ->
lists:foreach(
fun(Node) ->
remote_call(Node,{remote,unload,UnloadedModules})
end,
Nodes).
remote_reset(Module,Nodes) ->
lists:foreach(
fun(Node) ->
remote_call(Node,{remote,reset,Module})
end,
Nodes).
remote_collect(Module,Nodes,Stop) ->
pmap(fun(Node) ->
?SPAWN_DBG(remote_collect,
{Module, Nodes, Stop}),
do_collection(Node, Module, Stop)
end,
Nodes).
do_collection(Node, Module, Stop) ->
CollectorPid = spawn(fun collector_proc/0),
case remote_call(Node,{remote,collect,Module,CollectorPid, self()}) of
{error,node_dead} ->
CollectorPid ! done,
ok;
ok when Stop ->
remote_call(Node,{remote,stop});
ok ->
ok
end.
collector_proc() ->
?SPAWN_DBG(collector_proc, []),
receive
{chunk,Chunk} ->
insert_in_collection_table(Chunk),
collector_proc();
done ->
ok
end.
insert_in_collection_table([{Key,Val}|Chunk]) ->
insert_in_collection_table(Key,Val),
insert_in_collection_table(Chunk);
insert_in_collection_table([]) ->
ok.
insert_in_collection_table(Key,Val) ->
case ets:member(?COLLECTION_TABLE,Key) of
true ->
ets:update_counter(?COLLECTION_TABLE,
Key,Val);
false ->
case ets:insert_new(?COLLECTION_TABLE,{Key,Val}) of
false ->
insert_in_collection_table(Key,Val);
_ ->
ok
end
end.
remove_myself([Node|Nodes],Acc) when Node=:=node() ->
remove_myself(Nodes,Acc);
remove_myself([Node|Nodes],Acc) ->
remove_myself(Nodes,[Node|Acc]);
remove_myself([],Acc) ->
Acc.
analyse_info(_Module,[]) ->
ok;
analyse_info(Module,Imported) ->
imported_info("Analysis",Module,Imported).
export_info(_Module,[]) ->
ok;
export_info(_Module,_Imported) ->
ok.
export_info([]) ->
ok;
export_info(_Imported) ->
ok.
get_all_importfiles([{_M,_F,ImportFiles}|Imported],Acc) ->
NewAcc = do_get_all_importfiles(ImportFiles,Acc),
get_all_importfiles(Imported,NewAcc);
get_all_importfiles([],Acc) ->
Acc.
do_get_all_importfiles([ImportFile|ImportFiles],Acc) ->
case lists:member(ImportFile,Acc) of
true ->
do_get_all_importfiles(ImportFiles,Acc);
false ->
do_get_all_importfiles(ImportFiles,[ImportFile|Acc])
end;
do_get_all_importfiles([],Acc) ->
Acc.
imported_info(Text,Module,Imported) ->
case lists:keysearch(Module,1,Imported) of
{value,{Module,_File,ImportFiles}} ->
io:format("~ts includes data from imported files\n~tp\n",
[Text,ImportFiles]);
false ->
ok
end.
add_imported(Module, File, ImportFile, Imported) ->
add_imported(Module, File, filename:absname(ImportFile), Imported, []).
add_imported(M, F1, ImportFile, [{M,_F2,ImportFiles}|Imported], Acc) ->
case lists:member(ImportFile,ImportFiles) of
true ->
io:fwrite("WARNING: Module ~w already imported from ~tp~n"
"Not importing again!~n",[M,ImportFile]),
dont_import;
false ->
NewEntry = {M, F1, [ImportFile | ImportFiles]},
{ok, lists:reverse([NewEntry | Acc]) ++ Imported}
end;
add_imported(M, F, ImportFile, [H|Imported], Acc) ->
add_imported(M, F, ImportFile, Imported, [H|Acc]);
add_imported(M, F, ImportFile, [], Acc) ->
{ok, lists:reverse([{M, F, [ImportFile]} | Acc])}.
remove_imported(Module,Imported) ->
case lists:keysearch(Module,1,Imported) of
{value,{Module,_,ImportFiles}} ->
io:fwrite("WARNING: Deleting data for module ~w imported from~n"
"~tp~n",[Module,ImportFiles]),
lists:keydelete(Module,1,Imported);
false ->
Imported
end.
add_compiled(Module, File1, [{Module,_File2}|Compiled]) ->
[{Module,File1}|Compiled];
add_compiled(Module, File, [H|Compiled]) ->
[H|add_compiled(Module, File, Compiled)];
add_compiled(Module, File, []) ->
[{Module,File}].
is_loaded(Module, State) ->
case get_file(Module, State#main_state.compiled) of
{ok, File} ->
case code:which(Module) of
?TAG -> {loaded, File};
_ -> throw(unloaded)
end;
false ->
case get_file(Module,State#main_state.imported) of
{ok,File,ImportFiles} ->
{imported, File, ImportFiles};
false ->
throw(not_loaded)
end
end.
get_file(Module, [{Module, File}|_T]) ->
{ok, File};
get_file(Module, [{Module, File, ImportFiles}|_T]) ->
{ok, File, ImportFiles};
get_file(Module, [_H|T]) ->
get_file(Module, T);
get_file(_Module, []) ->
false.
get_beam_file(Module,?TAG,Compiled) ->
{value,{Module,File}} = lists:keysearch(Module,1,Compiled),
case filename:extension(File) of
".erl" -> {error,no_beam};
".beam" -> {ok,File}
end;
get_beam_file(_Module,BeamFile,_Compiled) ->
{ok,BeamFile}.
get_modules(Compiled) ->
lists:map(fun({Module, _File}) -> Module end, Compiled).
update_compiled([Module|Modules], [{Module,_File}|Compiled]) ->
update_compiled(Modules, Compiled);
update_compiled(Modules, [H|Compiled]) ->
[H|update_compiled(Modules, Compiled)];
update_compiled(_Modules, []) ->
[].
get_compiled_still_loaded(Nodes,Compiled0) ->
Find all Cover compiled modules which are still loaded
CompiledModules = get_modules(Compiled0),
LoadedModules = lists:filter(fun(Module) ->
case code:which(Module) of
?TAG -> true;
_ -> false
end
end,
CompiledModules),
If some Cover compiled modules have been unloaded , update the database .
UnloadedModules = CompiledModules--LoadedModules,
Compiled =
case UnloadedModules of
[] ->
Compiled0;
_ ->
lists:foreach(fun(Module) -> do_clear(Module) end,
UnloadedModules),
remote_unload(Nodes,UnloadedModules),
update_compiled(UnloadedModules, Compiled0)
end,
{LoadedModules,Compiled}.
do_compile(File, UserOptions) ->
Options = [debug_info,binary,report_errors,report_warnings] ++ UserOptions,
case compile:file(File, Options) of
{ok, Module, Binary} ->
do_compile_beam(Module,Binary,UserOptions);
error ->
error
end.
do_compile_beam(Module,Beam,UserOptions) ->
do_clear(Module),
case get_abstract_code(Module, Beam) of
no_abstract_code=E ->
{error,E};
encrypted_abstract_code=E ->
{error,E};
{raw_abstract_v1,Code} ->
Forms0 = epp:interpret_file_attribute(Code),
{Forms,Vars} = transform(Forms0, Module),
SourceInfo = get_source_info(Module, Beam),
{ok, Module, Binary} = compile:forms(Forms, SourceInfo ++ UserOptions),
case code:load_binary(Module, ?TAG, Binary) of
{module, Module} ->
InitInfo = lists:reverse(Vars#vars.init_info),
ets:insert(?COVER_CLAUSE_TABLE, {Module, InitInfo}),
ets:insert(?BINARY_TABLE, {Module, Binary}),
{ok, Module};
_Error ->
do_clear(Module),
error
end;
{_VSN,_Code} ->
{error,no_abstract_code}
end.
get_abstract_code(Module, Beam) ->
case beam_lib:chunks(Beam, [abstract_code]) of
{ok, {Module, [{abstract_code, AbstractCode}]}} ->
AbstractCode;
{error,beam_lib,{key_missing_or_invalid,_,_}} ->
encrypted_abstract_code;
Error -> Error
end.
get_source_info(Module, Beam) ->
Compile = get_compile_info(Module, Beam),
case lists:keyfind(source, 1, Compile) of
{ source, _ } = Tuple -> [Tuple];
false -> []
end.
get_compile_options(Module, Beam) ->
Compile = get_compile_info(Module, Beam),
case lists:keyfind(options, 1, Compile) of
{options, Options } -> filter_options(Options);
false -> []
end.
get_compile_info(Module, Beam) ->
case beam_lib:chunks(Beam, [compile_info]) of
{ok, {Module, [{compile_info, Compile}]}} ->
Compile;
_ ->
[]
end.
transform(Code, Module) ->
MainFile=find_main_filename(Code),
Vars0 = #vars{module=Module},
{ok,MungedForms,Vars} = transform_2(Code,[],Vars0,MainFile,on),
{MungedForms,Vars}.
Helpfunction which returns the first found file - attribute , which can
find_main_filename([{attribute,_,file,{MainFile,_}}|_]) ->
MainFile;
find_main_filename([_|Rest]) ->
find_main_filename(Rest).
transform_2([Form0|Forms],MungedForms,Vars,MainFile,Switch) ->
Form = expand(Form0),
case munge(Form,Vars,MainFile,Switch) of
ignore ->
transform_2(Forms,MungedForms,Vars,MainFile,Switch);
{MungedForm,Vars2,NewSwitch} ->
transform_2(Forms,[MungedForm|MungedForms],Vars2,MainFile,NewSwitch)
end;
transform_2([],MungedForms,Vars,_,_) ->
{ok, lists:reverse(MungedForms), Vars}.
expand(Expr) ->
AllVars = sets:from_list(ordsets:to_list(vars([], Expr))),
{Expr1,_} = expand(Expr, AllVars, 1),
Expr1.
expand({clause,Line,Pattern,Guards,Body}, Vs, N) ->
{ExpandedBody,N2} = expand(Body, Vs, N),
{{clause,Line,Pattern,Guards,ExpandedBody},N2};
expand({op,_Line,'andalso',ExprL,ExprR}, Vs, N) ->
{ExpandedExprL,N2} = expand(ExprL, Vs, N),
{ExpandedExprR,N3} = expand(ExprR, Vs, N2),
LineL = element(2, ExpandedExprL),
{bool_switch(ExpandedExprL,
ExpandedExprR,
{atom,LineL,false},
Vs, N3),
N3 + 1};
expand({op,_Line,'orelse',ExprL,ExprR}, Vs, N) ->
{ExpandedExprL,N2} = expand(ExprL, Vs, N),
{ExpandedExprR,N3} = expand(ExprR, Vs, N2),
LineL = element(2, ExpandedExprL),
{bool_switch(ExpandedExprL,
{atom,LineL,true},
ExpandedExprR,
Vs, N3),
N3 + 1};
expand(T, Vs, N) when is_tuple(T) ->
{TL,N2} = expand(tuple_to_list(T), Vs, N),
{list_to_tuple(TL),N2};
expand([E|Es], Vs, N) ->
{E2,N2} = expand(E, Vs, N),
{Es2,N3} = expand(Es, Vs, N2),
{[E2|Es2],N3};
expand(T, _Vs, N) ->
{T,N}.
vars(A, {var,_,V}) when V =/= '_' ->
[V|A];
vars(A, T) when is_tuple(T) ->
vars(A, tuple_to_list(T));
vars(A, [E|Es]) ->
vars(vars(A, E), Es);
vars(A, _T) ->
A.
bool_switch(E, T, F, AllVars, AuxVarN) ->
Line = element(2, E),
AuxVar = {var,Line,aux_var(AllVars, AuxVarN)},
{'case',Line,E,
[{clause,Line,[{atom,Line,true}],[],[T]},
{clause,Line,[{atom,Line,false}],[],[F]},
{clause,Line,[AuxVar],[],
[{call,Line,
{remote,Line,{atom,Line,erlang},{atom,Line,error}},
[{tuple,Line,[{atom,Line,badarg},AuxVar]}]}]}]}.
aux_var(Vars, N) ->
Name = list_to_atom(lists:concat(['_', N])),
case sets:is_element(Name, Vars) of
true -> aux_var(Vars, N + 1);
false -> Name
end.
chunk in the BEAM file , as described in absform(3 ) .
munge({function,Line,Function,Arity,Clauses},Vars,_MainFile,on) ->
Vars2 = Vars#vars{function=Function,
arity=Arity,
clause=1,
lines=[],
no_bump_lines=[],
depth=1},
{MungedClauses, Vars3} = munge_clauses(Clauses, Vars2),
{{function,Line,Function,Arity,MungedClauses},Vars3,on};
munge(Form={attribute,_,file,{MainFile,_}},Vars,MainFile,_Switch) ->
Switch on tranformation !
munge(Form={attribute,_,file,{_InclFile,_}},Vars,_MainFile,_Switch) ->
Switch off transformation !
munge({attribute,_,compile,{parse_transform,_}},_Vars,_MainFile,_Switch) ->
ignore;
{Form,Vars,Switch}.
munge_clauses(Clauses, Vars) ->
munge_clauses(Clauses, Vars, Vars#vars.lines, []).
munge_clauses([Clause|Clauses], Vars, Lines, MClauses) ->
{clause,Line,Pattern,Guards,Body} = Clause,
{MungedGuards, _Vars} = munge_exprs(Guards, Vars#vars{is_guard=true},[]),
case Vars#vars.depth of
{MungedBody, Vars2} = munge_body(Body, Vars#vars{depth=2}),
ClauseInfo = {Vars2#vars.module,
Vars2#vars.function,
Vars2#vars.arity,
Vars2#vars.clause,
InitInfo = [ClauseInfo | Vars2#vars.init_info],
Vars3 = Vars2#vars{init_info=InitInfo,
clause=(Vars2#vars.clause)+1,
lines=[],
no_bump_lines=[],
depth=1},
NewBumps = Vars2#vars.lines,
NewLines = NewBumps ++ Lines,
munge_clauses(Clauses, Vars3, NewLines,
[{clause,Line,Pattern,MungedGuards,MungedBody}|
MClauses]);
Lines0 = Vars#vars.lines,
{MungedBody, Vars2} = munge_body(Body, Vars),
NewBumps = new_bumps(Vars2, Vars),
NewLines = NewBumps ++ Lines,
munge_clauses(Clauses, Vars2#vars{lines=Lines0},
NewLines,
[{clause,Line,Pattern,MungedGuards,MungedBody}|
MClauses])
end;
munge_clauses([], Vars, Lines, MungedClauses) ->
{lists:reverse(MungedClauses), Vars#vars{lines = Lines}}.
munge_body(Expr, Vars) ->
munge_body(Expr, Vars, [], []).
munge_body([Expr|Body], Vars, MungedBody, LastExprBumpLines) ->
Line = element(2, Expr),
Lines = Vars#vars.lines,
case lists:member(Line,Lines) of
{MungedExpr, Vars2} = munge_expr(Expr, Vars),
NewBumps = new_bumps(Vars2, Vars),
NoBumpLines = [Line|Vars#vars.no_bump_lines],
Vars3 = Vars2#vars{no_bump_lines = NoBumpLines},
MungedBody1 =
maybe_fix_last_expr(MungedBody, Vars3, LastExprBumpLines),
MungedExprs1 = [MungedExpr|MungedBody1],
munge_body(Body, Vars3, MungedExprs1, NewBumps);
false ->
ets:insert(?COVER_TABLE, {#bump{module = Vars#vars.module,
function = Vars#vars.function,
arity = Vars#vars.arity,
clause = Vars#vars.clause,
line = Line},
0}),
Bump = bump_call(Vars, Line),
{ atom , 0 , Vars#vars.function } ,
{ integer , 0 , Vars#vars.arity } ,
{ integer , 0 , Vars#vars.clause } ,
Lines2 = [Line|Lines],
{MungedExpr, Vars2} = munge_expr(Expr, Vars#vars{lines=Lines2}),
NewBumps = new_bumps(Vars2, Vars),
NoBumpLines = subtract(Vars2#vars.no_bump_lines, NewBumps),
Vars3 = Vars2#vars{no_bump_lines = NoBumpLines},
MungedBody1 =
maybe_fix_last_expr(MungedBody, Vars3, LastExprBumpLines),
MungedExprs1 = [MungedExpr,Bump|MungedBody1],
munge_body(Body, Vars3, MungedExprs1, NewBumps)
end;
munge_body([], Vars, MungedBody, _LastExprBumpLines) ->
{lists:reverse(MungedBody), Vars}.
3 : case X of
5 : 2 - > b ; 3 - > c end , F ( )
now traversed again ( " fixed " ) , this time adding bumps of line 5
where appropriate , in this case when X matches 1 .
8 : 2 - > bar ( ) end of a - > 1 ;
9 : b - > 2 end .
If X matches 1 and foo ( ) evaluates to a then line 8 should be
bumped , but not if foo ( ) evaluates to b. In other words , line 8
can not be bumped after " foo ( ) " on line 7 , so one has to bump line
8 before " begin 1 end " . But if X matches 2 and bar evaluates to a
then line 8 would be bumped twice ( there has to be a bump before
" bar ( ) " . It is like one would have to have two copies of the inner
clauses , one for each outer clause . Maybe the munging should be
maybe_fix_last_expr(MungedExprs, Vars, LastExprBumpLines) ->
case last_expr_needs_fixing(Vars, LastExprBumpLines) of
{yes, Line} ->
fix_last_expr(MungedExprs, Line, Vars);
no ->
MungedExprs
end.
last_expr_needs_fixing(Vars, LastExprBumpLines) ->
case common_elems(Vars#vars.no_bump_lines, LastExprBumpLines) of
[Line] -> {yes, Line};
_ -> no
end.
fix_last_expr([MungedExpr|MungedExprs], Line, Vars) ->
Bump = bump_call(Vars, Line),
[fix_expr(MungedExpr, Line, Bump)|MungedExprs].
fix_expr({'if',L,Clauses}, Line, Bump) ->
FixedClauses = fix_clauses(Clauses, Line, Bump),
{'if',L,FixedClauses};
fix_expr({'case',L,Expr,Clauses}, Line, Bump) ->
FixedExpr = fix_expr(Expr, Line, Bump),
FixedClauses = fix_clauses(Clauses, Line, Bump),
{'case',L,FixedExpr,FixedClauses};
fix_expr({'receive',L,Clauses}, Line, Bump) ->
FixedClauses = fix_clauses(Clauses, Line, Bump),
{'receive',L,FixedClauses};
fix_expr({'receive',L,Clauses,Expr,Body}, Line, Bump) ->
FixedClauses = fix_clauses(Clauses, Line, Bump),
FixedExpr = fix_expr(Expr, Line, Bump),
FixedBody = fix_expr(Body, Line, Bump),
{'receive',L,FixedClauses,FixedExpr,FixedBody};
fix_expr({'try',L,Exprs,Clauses,CatchClauses,After}, Line, Bump) ->
FixedExprs = fix_expr(Exprs, Line, Bump),
FixedClauses = fix_clauses(Clauses, Line, Bump),
FixedCatchClauses = fix_clauses(CatchClauses, Line, Bump),
FixedAfter = fix_expr(After, Line, Bump),
{'try',L,FixedExprs,FixedClauses,FixedCatchClauses,FixedAfter};
fix_expr([E | Es], Line, Bump) ->
[fix_expr(E, Line, Bump) | fix_expr(Es, Line, Bump)];
fix_expr(T, Line, Bump) when is_tuple(T) ->
list_to_tuple(fix_expr(tuple_to_list(T), Line, Bump));
fix_expr(E, _Line, _Bump) ->
E.
fix_clauses(Cs, Line, Bump) ->
case bumps_line(lists:last(Cs), Line) of
true ->
fix_cls(Cs, Line, Bump);
false ->
Cs
end.
fix_cls([], _Line, _Bump) ->
[];
fix_cls([Cl | Cls], Line, Bump) ->
case bumps_line(Cl, Line) of
true ->
[fix_expr(C, Line, Bump) || C <- [Cl | Cls]];
false ->
{clause,CL,P,G,Body} = Cl,
UniqueVarName = list_to_atom(lists:concat(["$cover$ ",Line])),
V = {var,0,UniqueVarName},
[Last|Rest] = lists:reverse(Body),
Body1 = lists:reverse(Rest, [{match,0,V,Last},Bump,V]),
[{clause,CL,P,G,Body1} | fix_cls(Cls, Line, Bump)]
end.
bumps_line(E, L) ->
try bumps_line1(E, L) catch true -> true end.
bumps_line1({call,0,{remote,0,{atom,0,ets},{atom,0,update_counter}},
[{atom,0,?COVER_TABLE},{tuple,0,[_,_,_,_,_,{integer,0,Line}]},_]},
Line) ->
throw(true);
bumps_line1([E | Es], Line) ->
bumps_line1(E, Line),
bumps_line1(Es, Line);
bumps_line1(T, Line) when is_tuple(T) ->
bumps_line1(tuple_to_list(T), Line);
bumps_line1(_, _) ->
false.
bump_call(Vars, Line) ->
{call,0,{remote,0,{atom,0,ets},{atom,0,update_counter}},
[{atom,0,?COVER_TABLE},
{tuple,0,[{atom,0,?BUMP_REC_NAME},
{atom,0,Vars#vars.module},
{atom,0,Vars#vars.function},
{integer,0,Vars#vars.arity},
{integer,0,Vars#vars.clause},
{integer,0,Line}]},
{integer,0,1}]}.
munge_expr({match,Line,ExprL,ExprR}, Vars) ->
{MungedExprL, Vars2} = munge_expr(ExprL, Vars),
{MungedExprR, Vars3} = munge_expr(ExprR, Vars2),
{{match,Line,MungedExprL,MungedExprR}, Vars3};
munge_expr({tuple,Line,Exprs}, Vars) ->
{MungedExprs, Vars2} = munge_exprs(Exprs, Vars, []),
{{tuple,Line,MungedExprs}, Vars2};
munge_expr({record,Line,Name,Exprs}, Vars) ->
{MungedExprFields, Vars2} = munge_exprs(Exprs, Vars, []),
{{record,Line,Name,MungedExprFields}, Vars2};
munge_expr({record,Line,Arg,Name,Exprs}, Vars) ->
{MungedArg, Vars2} = munge_expr(Arg, Vars),
{MungedExprFields, Vars3} = munge_exprs(Exprs, Vars2, []),
{{record,Line,MungedArg,Name,MungedExprFields}, Vars3};
munge_expr({record_field,Line,ExprL,ExprR}, Vars) ->
{MungedExprR, Vars2} = munge_expr(ExprR, Vars),
{{record_field,Line,ExprL,MungedExprR}, Vars2};
munge_expr({map,Line,Fields}, Vars) ->
EEP 43
{MungedFields, Vars2} = munge_exprs(Fields, Vars, []),
{{map,Line,MungedFields}, Vars2};
munge_expr({map,Line,Arg,Fields}, Vars) ->
EEP 43
{MungedArg, Vars2} = munge_expr(Arg, Vars),
{MungedFields, Vars3} = munge_exprs(Fields, Vars2, []),
{{map,Line,MungedArg,MungedFields}, Vars3};
munge_expr({map_field_assoc,Line,Name,Value}, Vars) ->
EEP 43
{MungedName, Vars2} = munge_expr(Name, Vars),
{MungedValue, Vars3} = munge_expr(Value, Vars2),
{{map_field_assoc,Line,MungedName,MungedValue}, Vars3};
munge_expr({map_field_exact,Line,Name,Value}, Vars) ->
EEP 43
{MungedName, Vars2} = munge_expr(Name, Vars),
{MungedValue, Vars3} = munge_expr(Value, Vars2),
{{map_field_exact,Line,MungedName,MungedValue}, Vars3};
munge_expr({cons,Line,ExprH,ExprT}, Vars) ->
{MungedExprH, Vars2} = munge_expr(ExprH, Vars),
{MungedExprT, Vars3} = munge_expr(ExprT, Vars2),
{{cons,Line,MungedExprH,MungedExprT}, Vars3};
munge_expr({op,Line,Op,ExprL,ExprR}, Vars) ->
{MungedExprL, Vars2} = munge_expr(ExprL, Vars),
{MungedExprR, Vars3} = munge_expr(ExprR, Vars2),
{{op,Line,Op,MungedExprL,MungedExprR}, Vars3};
munge_expr({op,Line,Op,Expr}, Vars) ->
{MungedExpr, Vars2} = munge_expr(Expr, Vars),
{{op,Line,Op,MungedExpr}, Vars2};
munge_expr({'catch',Line,Expr}, Vars) ->
{MungedExpr, Vars2} = munge_expr(Expr, Vars),
{{'catch',Line,MungedExpr}, Vars2};
munge_expr({call,Line1,{remote,Line2,ExprM,ExprF},Exprs},
Vars) ->
{MungedExprM, Vars2} = munge_expr(ExprM, Vars),
{MungedExprF, Vars3} = munge_expr(ExprF, Vars2),
{MungedExprs, Vars4} = munge_exprs(Exprs, Vars3, []),
{{call,Line1,{remote,Line2,MungedExprM,MungedExprF},MungedExprs}, Vars4};
munge_expr({call,Line,Expr,Exprs}, Vars) ->
{MungedExpr, Vars2} = munge_expr(Expr, Vars),
{MungedExprs, Vars3} = munge_exprs(Exprs, Vars2, []),
{{call,Line,MungedExpr,MungedExprs}, Vars3};
munge_expr({lc,Line,Expr,Qs}, Vars) ->
{MungedExpr, Vars2} = munge_expr(?BLOCK1(Expr), Vars),
{MungedQs, Vars3} = munge_qualifiers(Qs, Vars2),
{{lc,Line,MungedExpr,MungedQs}, Vars3};
munge_expr({bc,Line,Expr,Qs}, Vars) ->
{bin,BLine,[{bin_element,EL,Val,Sz,TSL}|Es]} = Expr,
Expr2 = {bin,BLine,[{bin_element,EL,?BLOCK1(Val),Sz,TSL}|Es]},
{MungedExpr,Vars2} = munge_expr(Expr2, Vars),
{MungedQs, Vars3} = munge_qualifiers(Qs, Vars2),
{{bc,Line,MungedExpr,MungedQs}, Vars3};
munge_expr({block,Line,Body}, Vars) ->
{MungedBody, Vars2} = munge_body(Body, Vars),
{{block,Line,MungedBody}, Vars2};
munge_expr({'if',Line,Clauses}, Vars) ->
{MungedClauses,Vars2} = munge_clauses(Clauses, Vars),
{{'if',Line,MungedClauses}, Vars2};
munge_expr({'case',Line,Expr,Clauses}, Vars) ->
{MungedExpr,Vars2} = munge_expr(Expr, Vars),
{MungedClauses,Vars3} = munge_clauses(Clauses, Vars2),
{{'case',Line,MungedExpr,MungedClauses}, Vars3};
munge_expr({'receive',Line,Clauses}, Vars) ->
{MungedClauses,Vars2} = munge_clauses(Clauses, Vars),
{{'receive',Line,MungedClauses}, Vars2};
munge_expr({'receive',Line,Clauses,Expr,Body}, Vars) ->
{MungedExpr, Vars1} = munge_expr(Expr, Vars),
{MungedClauses,Vars2} = munge_clauses(Clauses, Vars1),
{MungedBody,Vars3} =
munge_body(Body, Vars2#vars{lines = Vars1#vars.lines}),
Vars4 = Vars3#vars{lines = Vars2#vars.lines ++ new_bumps(Vars3, Vars2)},
{{'receive',Line,MungedClauses,MungedExpr,MungedBody}, Vars4};
munge_expr({'try',Line,Body,Clauses,CatchClauses,After}, Vars) ->
{MungedBody, Vars1} = munge_body(Body, Vars),
{MungedClauses, Vars2} = munge_clauses(Clauses, Vars1),
{MungedCatchClauses, Vars3} = munge_clauses(CatchClauses, Vars2),
{MungedAfter, Vars4} = munge_body(After, Vars3),
{{'try',Line,MungedBody,MungedClauses,MungedCatchClauses,MungedAfter},
Vars4};
munge_expr({'fun',Line,{clauses,Clauses}}, Vars) ->
{MungedClauses,Vars2}=munge_clauses(Clauses, Vars),
{{'fun',Line,{clauses,MungedClauses}}, Vars2};
munge_expr({named_fun,Line,Name,Clauses}, Vars) ->
{MungedClauses,Vars2}=munge_clauses(Clauses, Vars),
{{named_fun,Line,Name,MungedClauses}, Vars2};
munge_expr({bin,Line,BinElements}, Vars) ->
{MungedBinElements,Vars2} = munge_exprs(BinElements, Vars, []),
{{bin,Line,MungedBinElements}, Vars2};
munge_expr({bin_element,Line,Value,Size,TypeSpecifierList}, Vars) ->
{MungedValue,Vars2} = munge_expr(Value, Vars),
{MungedSize,Vars3} = munge_expr(Size, Vars2),
{{bin_element,Line,MungedValue,MungedSize,TypeSpecifierList},Vars3};
{Form, Vars}.
munge_exprs([Expr|Exprs], Vars, MungedExprs) when Vars#vars.is_guard=:=true,
is_list(Expr) ->
{MungedExpr, _Vars} = munge_exprs(Expr, Vars, []),
munge_exprs(Exprs, Vars, [MungedExpr|MungedExprs]);
munge_exprs([Expr|Exprs], Vars, MungedExprs) ->
{MungedExpr, Vars2} = munge_expr(Expr, Vars),
munge_exprs(Exprs, Vars2, [MungedExpr|MungedExprs]);
munge_exprs([], Vars, MungedExprs) ->
{lists:reverse(MungedExprs), Vars}.
munge_qualifiers(Qualifiers, Vars) ->
munge_qs(Qualifiers, Vars, []).
munge_qs([{generate,Line,Pattern,Expr}|Qs], Vars, MQs) ->
L = element(2, Expr),
{MungedExpr, Vars2} = munge_expr(Expr, Vars),
munge_qs1(Qs, L, {generate,Line,Pattern,MungedExpr}, Vars, Vars2, MQs);
munge_qs([{b_generate,Line,Pattern,Expr}|Qs], Vars, MQs) ->
L = element(2, Expr),
{MExpr, Vars2} = munge_expr(Expr, Vars),
munge_qs1(Qs, L, {b_generate,Line,Pattern,MExpr}, Vars, Vars2, MQs);
munge_qs([Expr|Qs], Vars, MQs) ->
L = element(2, Expr),
{MungedExpr, Vars2} = munge_expr(Expr, Vars),
munge_qs1(Qs, L, MungedExpr, Vars, Vars2, MQs);
munge_qs([], Vars, MQs) ->
{lists:reverse(MQs), Vars}.
munge_qs1(Qs, Line, NQ, Vars, Vars2, MQs) ->
case new_bumps(Vars2, Vars) of
[_] ->
munge_qs(Qs, Vars2, [NQ | MQs]);
_ ->
{MungedTrue, Vars3} = munge_expr(?BLOCK({atom,Line,true}), Vars2),
munge_qs(Qs, Vars3, [NQ, MungedTrue | MQs])
end.
new_bumps(#vars{lines = New}, #vars{lines = Old}) ->
subtract(New, Old).
subtract(L1, L2) ->
[E || E <- L1, not lists:member(E, L2)].
common_elems(L1, L2) ->
[E || E <- L1, lists:member(E, L2)].
collect(Nodes) ->
AllClauses = ets:tab2list(?COVER_CLAUSE_TABLE),
pmap(fun move_modules/1,AllClauses),
remote_collect('_',Nodes,false).
Collect data for one module
collect(Module,Clauses,Nodes) ->
move_modules({Module,Clauses}),
remote_collect(Module,Nodes,false).
When analysing , the data from the local ? COVER_TABLE is moved to the
move_modules({Module,Clauses}) ->
ets:insert(?COLLECTION_CLAUSE_TABLE,{Module,Clauses}),
move_clauses(Clauses).
move_clauses([{M,F,A,C,_L}|Clauses]) ->
Pattern = {#bump{module=M, function=F, arity=A, clause=C}, '_'},
Bumps = ets:match_object(?COVER_TABLE,Pattern),
lists:foreach(fun({Key,Val}) ->
ets:insert(?COVER_TABLE, {Key,0}),
insert_in_collection_table(Key,Val)
end,
Bumps),
move_clauses(Clauses);
move_clauses([]) ->
ok.
Given a .beam file , find the .erl file . Look first in same directory as
find_source(Module, File0) ->
try
Root = filename:rootname(File0, ".beam"),
Look for .erl in pwd .
File = Root ++ ".erl",
throw_file(File),
BeamDir = filename:dirname(File),
Base = filename:basename(File),
throw_file(filename:join([BeamDir, "..", "src", Base])),
Info = lists:keyfind(source, 1, Module:module_info(compile)),
{source, SrcFile} = Info,
{beam, File0}
catch
Path -> Path
end.
throw_file(Path) ->
false /= Path andalso filelib:is_file(Path) andalso throw(Path).
Eg . splice("/path / to / app-1.0 / ebin " , " /compiled / path / to / app / src / x / y.erl " )
splice(BeamDir, SrcFile) ->
case lists:splitwith(fun(C) -> C /= "src" end, revsplit(SrcFile)) of
filename:join([BeamDir, "..", "src" | lists:reverse(T)]);
false
end.
revsplit(Path) ->
lists:reverse(filename:split(Path)).
do_parallel_analysis(Module, Analysis, Level, Loaded, From, State) ->
analyse_info(Module,State#main_state.imported),
C = case Loaded of
{loaded, _File} ->
[{Module,Clauses}] =
ets:lookup(?COVER_CLAUSE_TABLE,Module),
collect(Module,Clauses,State#main_state.nodes),
Clauses;
_ ->
[{Module,Clauses}] =
ets:lookup(?COLLECTION_CLAUSE_TABLE,Module),
Clauses
end,
R = do_analyse(Module, Analysis, Level, C),
reply(From, R).
do_analyse(Module, Analysis, line, _Clauses) ->
Pattern = {#bump{module=Module},'_'},
Bumps = ets:match_object(?COLLECTION_TABLE, Pattern),
Fun = case Analysis of
coverage ->
fun({#bump{line=L}, 0}) ->
{{Module,L}, {0,1}};
({#bump{line=L}, _N}) ->
{{Module,L}, {1,0}}
end;
calls ->
fun({#bump{line=L}, N}) ->
{{Module,L}, N}
end
end,
Answer = lists:keysort(1, lists:map(Fun, Bumps)),
{ok, Answer};
do_analyse(_Module, Analysis, clause, Clauses) ->
Fun = case Analysis of
coverage ->
fun({M,F,A,C,Ls}) ->
Pattern = {#bump{module=M,function=F,arity=A,
clause=C},0},
Bumps = ets:match_object(?COLLECTION_TABLE, Pattern),
NotCov = length(Bumps),
{{M,F,A,C}, {Ls-NotCov, NotCov}}
end;
calls ->
fun({M,F,A,C,_Ls}) ->
Pattern = {#bump{module=M,function=F,arity=A,
clause=C},'_'},
Bumps = ets:match_object(?COLLECTION_TABLE, Pattern),
{_Bump, Calls} = hd(lists:keysort(1, Bumps)),
{{M,F,A,C}, Calls}
end
end,
Answer = lists:map(Fun, Clauses),
{ok, Answer};
do_analyse(Module, Analysis, function, Clauses) ->
{ok, ClauseResult} = do_analyse(Module, Analysis, clause, Clauses),
Result = merge_clauses(ClauseResult, merge_fun(Analysis)),
{ok, Result};
do_analyse(Module, Analysis, module, Clauses) ->
{ok, FunctionResult} = do_analyse(Module, Analysis, function, Clauses),
Result = merge_functions(FunctionResult, merge_fun(Analysis)),
{ok, {Module,Result}}.
merge_fun(coverage) ->
fun({Cov1,NotCov1}, {Cov2,NotCov2}) ->
{Cov1+Cov2, NotCov1+NotCov2}
end;
merge_fun(calls) ->
fun(Calls1, Calls2) ->
Calls1+Calls2
end.
merge_clauses(Clauses, MFun) -> merge_clauses(Clauses, MFun, []).
merge_clauses([{{M,F,A,_C1},R1},{{M,F,A,C2},R2}|Clauses], MFun, Result) ->
merge_clauses([{{M,F,A,C2},MFun(R1,R2)}|Clauses], MFun, Result);
merge_clauses([{{M,F,A,_C},R}|Clauses], MFun, Result) ->
merge_clauses(Clauses, MFun, [{{M,F,A},R}|Result]);
merge_clauses([], _Fun, Result) ->
lists:reverse(Result).
merge_functions([{_MFA,R}|Functions], MFun) ->
merge_functions(Functions, MFun, R);
merge_functions([{_MFA,R}|Functions], MFun, Result) ->
merge_functions(Functions, MFun, MFun(Result, R));
merge_functions([], _MFun, Result) ->
Result.
do_parallel_analysis_to_file(Module, OutFile, Opts, Loaded, From, State) ->
File = case Loaded of
{loaded, File0} ->
[{Module,Clauses}] =
ets:lookup(?COVER_CLAUSE_TABLE,Module),
collect(Module, Clauses,
State#main_state.nodes),
File0;
{imported, File0, _} ->
File0
end,
case find_source(Module, File) of
{beam,_BeamFile} ->
reply(From, {error,no_source_code_found});
ErlFile ->
analyse_info(Module,State#main_state.imported),
HTML = lists:member(html,Opts),
R = do_analyse_to_file(Module,OutFile,
ErlFile,HTML),
reply(From, R)
end.
do_analyse_to_file(Module , OutFile , ErlFile ) - > { ok , OutFile } | { error , Error }
do_analyse_to_file(Module, OutFile, ErlFile, HTML) ->
case file:open(ErlFile, [read]) of
{ok, InFd} ->
case file:open(OutFile, [write]) of
{ok, OutFd} ->
if HTML ->
Encoding = encoding(ErlFile),
Header =
["<!DOCTYPE HTML PUBLIC "
"\"-//W3C//DTD HTML 3.2 Final//EN\">\n"
"<html>\n"
"<head>\n"
"<meta http-equiv=\"Content-Type\""
" content=\"text/html; charset=",
Encoding,"\"/>\n"
"<title>",OutFile,"</title>\n"
"</head>"
"<body style='background-color: white;"
" color: black'>\n"
"<pre>\n"],
file:write(OutFd,Header);
true -> ok
end,
{{Y,Mo,D},{H,Mi,S}} = calendar:local_time(),
Timestamp =
io_lib:format("~p-~s-~s at ~s:~s:~s",
[Y,
string:right(integer_to_list(Mo), 2, $0),
string:right(integer_to_list(D), 2, $0),
string:right(integer_to_list(H), 2, $0),
string:right(integer_to_list(Mi), 2, $0),
string:right(integer_to_list(S), 2, $0)]),
file:write(OutFd,
["File generated from ",ErlFile," by COVER ",
Timestamp,"\n\n"
"**************************************"
"**************************************"
"\n\n"]),
print_lines(Module, InFd, OutFd, 1, HTML),
if HTML -> io:format(OutFd,"</pre>\n</body>\n</html>\n",[]);
true -> ok
end,
file:close(OutFd),
file:close(InFd),
{ok, OutFile};
{error, Reason} ->
{error, {file, OutFile, Reason}}
end;
{error, Reason} ->
{error, {file, ErlFile, Reason}}
end.
print_lines(Module, InFd, OutFd, L, HTML) ->
case io:get_line(InFd, '') of
eof ->
ignore;
io:put_chars(OutFd, [tab(),escape_lt_and_gt(Line, HTML)]),
print_lines(Module, InFd, OutFd, L+1, HTML);
RawLine ->
Line = escape_lt_and_gt(RawLine,HTML),
Pattern = {#bump{module=Module,line=L},'$1'},
case ets:match(?COLLECTION_TABLE, Pattern) of
[] ->
io:put_chars(OutFd, [tab(),Line]);
Ns ->
N = lists:foldl(fun([Ni], Nacc) -> Nacc+Ni end, 0, Ns),
if
N=:=0, HTML=:=true ->
LineNoNL = Line -- "\n",
Str = " 0",
Str = string : right("0 " , 6 , 32 ) ,
RedLine = ["<font color=red>",Str,fill1(),
LineNoNL,"</font>\n"],
io:put_chars(OutFd, RedLine);
N<1000000 ->
Str = string:right(integer_to_list(N), 6, 32),
io:put_chars(OutFd, [Str,fill1(),Line]);
N<10000000 ->
Str = integer_to_list(N),
io:put_chars(OutFd, [Str,fill2(),Line]);
true ->
Str = integer_to_list(N),
io:put_chars(OutFd, [Str,fill3(),Line])
end
end,
print_lines(Module, InFd, OutFd, L+1, HTML)
end.
tab() -> " | ".
fill1() -> "..| ".
fill2() -> ".| ".
fill3() -> "| ".
do_export(Module, OutFile, From, State) ->
case file:open(OutFile,[write,binary,raw]) of
{ok,Fd} ->
Reply =
case Module of
'_' ->
export_info(State#main_state.imported),
collect(State#main_state.nodes),
do_export_table(State#main_state.compiled,
State#main_state.imported,
Fd);
_ ->
export_info(Module,State#main_state.imported),
try is_loaded(Module, State) of
{loaded, File} ->
[{Module,Clauses}] =
ets:lookup(?COVER_CLAUSE_TABLE,Module),
collect(Module, Clauses,
State#main_state.nodes),
do_export_table([{Module,File}],[],Fd);
{imported, File, ImportFiles} ->
Imported = [{Module,File,ImportFiles}],
do_export_table([],Imported,Fd)
catch throw:_ ->
{error,{not_cover_compiled,Module}}
end
end,
file:close(Fd),
reply(From, Reply);
{error,Reason} ->
reply(From, {error, {cant_open_file,OutFile,Reason}})
end.
do_export_table(Compiled, Imported, Fd) ->
ModList = merge(Imported,Compiled),
write_module_data(ModList,Fd).
merge([{Module,File,_ImportFiles}|Imported],ModuleList) ->
case lists:keymember(Module,1,ModuleList) of
true ->
merge(Imported,ModuleList);
false ->
merge(Imported,[{Module,File}|ModuleList])
end;
merge([],ModuleList) ->
ModuleList.
write_module_data([{Module,File}|ModList],Fd) ->
write({file,Module,File},Fd),
[Clauses] = ets:lookup(?COLLECTION_CLAUSE_TABLE,Module),
write(Clauses,Fd),
ModuleData = ets:match_object(?COLLECTION_TABLE,{#bump{module=Module},'_'}),
do_write_module_data(ModuleData,Fd),
write_module_data(ModList,Fd);
write_module_data([],_Fd) ->
ok.
do_write_module_data([H|T],Fd) ->
write(H,Fd),
do_write_module_data(T,Fd);
do_write_module_data([],_Fd) ->
ok.
write(Element,Fd) ->
Bin = term_to_binary(Element,[compressed]),
case byte_size(Bin) of
Size when Size > 255 ->
SizeBin = term_to_binary({'$size',Size}),
file:write(Fd,
<<(byte_size(SizeBin)):8,SizeBin/binary,Bin/binary>>);
Size ->
file:write(Fd,<<Size:8,Bin/binary>>)
end,
ok.
do_import_to_table(Fd,ImportFile,Imported) ->
do_import_to_table(Fd,ImportFile,Imported,[]).
do_import_to_table(Fd,ImportFile,Imported,DontImport) ->
case get_term(Fd) of
{file,Module,File} ->
case add_imported(Module, File, ImportFile, Imported) of
{ok,NewImported} ->
do_import_to_table(Fd,ImportFile,NewImported,DontImport);
dont_import ->
do_import_to_table(Fd,ImportFile,Imported,
[Module|DontImport])
end;
{Key=#bump{module=Module},Val} ->
case lists:member(Module,DontImport) of
false ->
insert_in_collection_table(Key,Val);
true ->
ok
end,
do_import_to_table(Fd,ImportFile,Imported,DontImport);
{Module,Clauses} ->
case lists:member(Module,DontImport) of
false ->
ets:insert(?COLLECTION_CLAUSE_TABLE,{Module,Clauses});
true ->
ok
end,
do_import_to_table(Fd,ImportFile,Imported,DontImport);
eof ->
Imported
end.
get_term(Fd) ->
case file:read(Fd,1) of
{ok,<<Size1:8>>} ->
{ok,Bin1} = file:read(Fd,Size1),
case binary_to_term(Bin1) of
{'$size',Size2} ->
{ok,Bin2} = file:read(Fd,Size2),
binary_to_term(Bin2);
Term ->
Term
end;
eof ->
eof
end.
do_reset_main_node(Module,Nodes) ->
do_reset(Module),
do_reset_collection_table(Module),
remote_reset(Module,Nodes).
do_reset_collection_table(Module) ->
ets:delete(?COLLECTION_CLAUSE_TABLE,Module),
ets:match_delete(?COLLECTION_TABLE, {#bump{module=Module},'_'}).
do_reset(Module) ->
[{Module,Clauses}] = ets:lookup(?COVER_CLAUSE_TABLE, Module),
do_reset2(Clauses).
do_reset2([{M,F,A,C,_L}|Clauses]) ->
Pattern = {#bump{module=M, function=F, arity=A, clause=C}, '_'},
Bumps = ets:match_object(?COVER_TABLE, Pattern),
lists:foreach(fun({Bump,_N}) ->
ets:insert(?COVER_TABLE, {Bump,0})
end,
Bumps),
do_reset2(Clauses);
do_reset2([]) ->
ok.
do_clear(Module) ->
ets:match_delete(?COVER_CLAUSE_TABLE, {Module,'_'}),
ets:match_delete(?COVER_TABLE, {#bump{module=Module},'_'}),
case lists:member(?COLLECTION_TABLE, ets:all()) of
true ->
ets:match_delete(?COLLECTION_TABLE, {#bump{module=Module},'_'});
false ->
ok
end.
not_loaded(Module, unloaded, State) ->
do_clear(Module),
remote_unload(State#main_state.nodes,[Module]),
Compiled = update_compiled([Module],
State#main_state.compiled),
State#main_state{ compiled = Compiled };
not_loaded(_Module,_Else, State) ->
State.
escape_lt_and_gt(Rawline,HTML) when HTML =/= true ->
Rawline;
escape_lt_and_gt(Rawline,_HTML) ->
escape_lt_and_gt1(Rawline,[]).
escape_lt_and_gt1([$<|T],Acc) ->
escape_lt_and_gt1(T,[$;,$t,$l,$&|Acc]);
escape_lt_and_gt1([$>|T],Acc) ->
escape_lt_and_gt1(T,[$;,$t,$g,$&|Acc]);
escape_lt_and_gt1([$&|T],Acc) ->
escape_lt_and_gt1(T,[$;,$p,$m,$a,$&|Acc]);
escape_lt_and_gt1([],Acc) ->
lists:reverse(Acc);
escape_lt_and_gt1([H|T],Acc) ->
escape_lt_and_gt1(T,[H|Acc]).
pmap(Fun, List) ->
pmap(Fun, List, 20).
pmap(Fun, List, Limit) ->
pmap(Fun, List, [], Limit, 0, []).
pmap(Fun, [E | Rest], Pids, Limit, Cnt, Acc) when Cnt < Limit ->
Collector = self(),
Pid = spawn_link(fun() ->
?SPAWN_DBG(pmap,E),
Collector ! {res,self(),Fun(E)}
end),
erlang:monitor(process, Pid),
pmap(Fun, Rest, Pids ++ [Pid], Limit, Cnt + 1, Acc);
pmap(Fun, List, [Pid | Pids], Limit, Cnt, Acc) ->
receive
{'DOWN', _Ref, process, X, _} when is_pid(X) ->
pmap(Fun, List, [Pid | Pids], Limit, Cnt - 1, Acc);
{res, Pid, Res} ->
pmap(Fun, List, Pids, Limit, Cnt, [Res | Acc])
end;
pmap(_Fun, [], [], _Limit, 0, Acc) ->
lists:reverse(Acc);
pmap(Fun, [], [], Limit, Cnt, Acc) ->
receive
{'DOWN', _Ref, process, X, _} when is_pid(X) ->
pmap(Fun, [], [], Limit, Cnt - 1, Acc)
end.
encoding(File) ->
Encoding =
case epp:read_encoding(File) of
none ->
epp:default_encoding();
E ->
E
end,
html_encoding(Encoding).
html_encoding(latin1) ->
"iso-8859-1";
html_encoding(utf8) ->
"utf-8".
|
40542a1815f534e1c746f959c6033632b52047c05743ed3f04a68584a6fcf2a6 | jeffshrager/biobike | af-types.lisp | ;;; -*- mode: Lisp; Syntax: Common-Lisp; Package: bio; -*-
(in-package :aframes)
;;; +=========================================================================+
| Copyright ( c ) 2002 , 2003 , 2004 JP , , |
;;; | |
;;; | Permission is hereby granted, free of charge, to any person obtaining |
;;; | a copy of this software and associated documentation files (the |
| " Software " ) , to deal in the Software without restriction , including |
;;; | without limitation the rights to use, copy, modify, merge, publish, |
| distribute , sublicense , and/or sell copies of the Software , and to |
| permit persons to whom the Software is furnished to do so , subject to |
;;; | the following conditions: |
;;; | |
;;; | The above copyright notice and this permission notice shall be included |
| in all copies or substantial portions of the Software . |
;;; | |
| THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , |
;;; | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
;;; | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. |
;;; | IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY |
| CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , |
;;; | TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE |
;;; | SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
;;; +=========================================================================+
;;; Author: JP Massar.
(def-frame-class
#$bio.gene ()
((#$from :domain integer :allocation :instance :initform 0)
(#$to :domain integer :allocation :instance :initform 0)
(#$direction :allocation :instance)
(#$contiguous-sequence :allocation :instance)
(#$proteins :allocation :instance)
(#$organism :allocation :instance)
(#$component-of :allocation :class :initform #$bio.organism)
))
(def-frame-class
#$bio.protein ()
((#$sequence-length :allocation :instance)
(#$gene :allocation :instance)
(#$organism :allocation :instance)
(#$internal-sequence-info :allocation :instance)
(#$component-of :allocation :class :initform #$bio.organism)
))
(def-frame-class
#$bio.contiguous-sequence ()
((#$sequence-length :allocation :instance)
(#$organism :allocation :instance)
(#$internal-sequence-info :allocation :instance)
(#$component-of :allocation :class :initform #$bio.organism)
))
(def-frame-class
#$bio.organism ()
((#$genes :allocation :instance)
(#$contiguous-sequences :allocation :instance)
(#$proteins :allocation :instance)
(#$genome-sequence-file :allocation :instance)
(#$genome-sequence-stream-ptr :allocation :instance)
(#$protein-sequence-file :allocation :instance)
))
| null | https://raw.githubusercontent.com/jeffshrager/biobike/5313ec1fe8e82c21430d645e848ecc0386436f57/BioLisp/Organisms/af-types.lisp | lisp | -*- mode: Lisp; Syntax: Common-Lisp; Package: bio; -*-
+=========================================================================+
| |
| Permission is hereby granted, free of charge, to any person obtaining |
| a copy of this software and associated documentation files (the |
| without limitation the rights to use, copy, modify, merge, publish, |
| the following conditions: |
| |
| The above copyright notice and this permission notice shall be included |
| |
| EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
| MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. |
| IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY |
| TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE |
| SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
+=========================================================================+
Author: JP Massar. |
(in-package :aframes)
| Copyright ( c ) 2002 , 2003 , 2004 JP , , |
| " Software " ) , to deal in the Software without restriction , including |
| distribute , sublicense , and/or sell copies of the Software , and to |
| permit persons to whom the Software is furnished to do so , subject to |
| in all copies or substantial portions of the Software . |
| THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , |
| CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , |
(def-frame-class
#$bio.gene ()
((#$from :domain integer :allocation :instance :initform 0)
(#$to :domain integer :allocation :instance :initform 0)
(#$direction :allocation :instance)
(#$contiguous-sequence :allocation :instance)
(#$proteins :allocation :instance)
(#$organism :allocation :instance)
(#$component-of :allocation :class :initform #$bio.organism)
))
(def-frame-class
#$bio.protein ()
((#$sequence-length :allocation :instance)
(#$gene :allocation :instance)
(#$organism :allocation :instance)
(#$internal-sequence-info :allocation :instance)
(#$component-of :allocation :class :initform #$bio.organism)
))
(def-frame-class
#$bio.contiguous-sequence ()
((#$sequence-length :allocation :instance)
(#$organism :allocation :instance)
(#$internal-sequence-info :allocation :instance)
(#$component-of :allocation :class :initform #$bio.organism)
))
(def-frame-class
#$bio.organism ()
((#$genes :allocation :instance)
(#$contiguous-sequences :allocation :instance)
(#$proteins :allocation :instance)
(#$genome-sequence-file :allocation :instance)
(#$genome-sequence-stream-ptr :allocation :instance)
(#$protein-sequence-file :allocation :instance)
))
|
20491e46c757b7ea3171a2e3c885fa496fb1c4a04a415fa3367d0699b520ec37 | lemmih/lhc | HelloWorld.hs | module Main (main) where
import LHC.Prim
main :: IO ()
main = putStrLn "Hello world!"
entrypoint :: ()
entrypoint = unsafePerformIO main
| null | https://raw.githubusercontent.com/lemmih/lhc/53bfa57b9b7275b7737dcf9dd620533d0261be66/examples/HelloWorld.hs | haskell | module Main (main) where
import LHC.Prim
main :: IO ()
main = putStrLn "Hello world!"
entrypoint :: ()
entrypoint = unsafePerformIO main
|
|
1be3bc15c02f0a8e9339a0a8cb241ba9bb67b8295a98ffe37c0342819c795b28 | danr/hipspec | Integers.hs | -- The implementation of these integers correspond to those in the
Agda standard library , which is proved to be a commutative ring
module Integers where
import Hip.Prelude
import Prelude (Eq,Ord,Show,iterate,(!!),fmap,Bool(..))
data Nat = Z | S Nat deriving (Eq)
instance Arbitrary where
-- arbitrary =
-- let nats = iterate S Z
in ( nats ! ! ) ` fmap ` choose ( 0,50 )
data Integ = P Nat | N Nat deriving (Eq)
instance Arbitrary Integ where
-- arbitrary = oneof [P `fmap` arbitrary,N `fmap` arbitrary]
eqnat Z Z = True
eqnat (S m) (S n) = True
eqnat _ _ = False
(==) :: Integ -> Integ -> Bool
N x == N y = eqnat x y
P x == P y = eqnat x y
_ == _ = False
neg :: Integ -> Integ
neg (P (S n)) = N n
neg (P Z) = P Z
neg (N n) = P (S n)
prop_neg_involutive :: Integ -> Prop Integ
prop_neg_involutive x = x =:= neg (neg x)
-- Natural addition
x +. Z = x
x +. (S y) = S (x +. y)
-- Natural multiplication
x *. Z = Z
x *. (S y) = (x *. y) +. x
-- Natural subtraction
m -. Z = P m
Z -. S n = N n
S m -. S n = m -. n
Integer addition
N m +! N n = N (S (m +. n))
N m +! P n = n -. S m
P m +! N n = m -. S n
P m +! P n = P (m +. n)
zero = P Z
prop_add_ident_left :: Integ -> Prop Integ
prop_add_ident_left x = x =:= zero +! x
prop_add_ident_right :: Integ -> Prop Integ
prop_add_ident_right x = x =:= x +! zero
prop_add_assoc :: Integ -> Integ -> Integ -> Prop Integ
prop_add_assoc x y z = (x +! (y +! z)) =:= ((x +! y) +! z)
prop_add_comm :: Integ -> Integ -> Prop Integ
prop_add_comm x y = (x +! y) =:= (y +! x)
prop_add_inv_left :: Integ -> Prop Integ
prop_add_inv_left x = neg x +! x =:= zero
prop_add_inv_right :: Integ -> Prop Integ
prop_add_inv_right x = x +! neg x =:= zero
Integer subtraction
N m -! N n = n -. m
N m -! P n = N (n +. m)
P m -! N n = P (S (n +. m))
P m -! P n = m -. n
abs' (P n) = n
abs' (N n) = S n
data Sign = Pos | Neg deriving (Eq)
instance Arbitrary Sign where
arbitrary = elements [Pos,Neg]
opposite Pos = Neg
opposite Neg = Pos
Pos *% x = x
Neg *% x = opposite x
prop_sign_assoc :: Sign -> Sign -> Sign -> Prop Sign
prop_sign_assoc s t u = (s *% (t *% u)) =:= ((s *% t) *% u)
prop_sign_ident_left :: Sign -> Prop Sign
prop_sign_ident_left s = s *% Pos =:= s
prop_sign_ident_right :: Sign -> Prop Sign
prop_sign_ident_right s = Pos *% s =:= s
prop_sign_opposite_involutive :: Sign -> Prop Sign
prop_sign_opposite_involutive s = opposite (opposite s) =:= s
prop_sign_triple :: Sign -> Prop Sign
prop_sign_triple s = s *% (s *% s) =:= s
sign :: Integ -> Sign
sign (P _) = Pos
sign (N _) = Neg
_ <| Z = P Z
Pos <| n = P n
Neg <| (S m) = N m
i *! j = (sign i *% sign j) <| (abs' i *. abs' j)
one = P (S Z)
prop_mul_ident_left :: Integ -> Prop Integ
prop_mul_ident_left x = x =:= one *! x
prop_mul_ident_right :: Integ -> Prop Integ
prop_mul_ident_right x = x =:= x *! one
prop_mul_assoc :: Integ -> Integ -> Integ -> Prop Integ
prop_mul_assoc x y z = (x *! (y *! z)) =:= ((x *! y) *! z)
prop_mul_comm :: Integ -> Integ -> Prop Integ
prop_mul_comm x y = (x *! y) =:= (y *! x)
prop_left_distrib :: Integ -> Integ -> Integ -> Prop Integ
prop_left_distrib x y z = x *! (y +! z) =:= (x *! y) +! (x *! z)
prop_right_distrib :: Integ -> Integ -> Integ -> Prop Integ
prop_right_distrib x y z = (x +! y) *! z =:= (x *! z) +! (y *! z)
main = do
quickCheck ( printTestCase " prop_neg_involutive " prop_neg_involutive )
quickCheck ( printTestCase " prop_add_ident_left " prop_add_ident_left )
quickCheck ( printTestCase " prop_add_ident_right " prop_add_ident_right )
quickCheck ( printTestCase " prop_add_assoc " prop_add_assoc )
quickCheck ( printTestCase " prop_add_comm " prop_add_comm )
quickCheck ( printTestCase " prop_add_inv_left " prop_add_inv_left )
quickCheck ( printTestCase " prop_add_inv_right " prop_add_inv_right )
quickCheck ( printTestCase " prop_sign_assoc " prop_sign_assoc )
quickCheck ( printTestCase " prop_sign_ident_left " prop_sign_ident_left )
quickCheck ( printTestCase " prop_sign_opposite_involutive " prop_sign_opposite_involutive )
quickCheck ( printTestCase " prop_sign_triple " prop_sign_triple )
quickCheck ( printTestCase " prop_mul_ident_left " prop_mul_ident_left )
quickCheck ( printTestCase " prop_mul_ident_right " prop_mul_ident_right )
quickCheck ( printTestCase " prop_mul_assoc " )
quickCheck ( printTestCase " prop_mul_comm " prop_mul_comm )
main = do
quickCheck (printTestCase "prop_neg_involutive" prop_neg_involutive)
quickCheck (printTestCase "prop_add_ident_left" prop_add_ident_left)
quickCheck (printTestCase "prop_add_ident_right" prop_add_ident_right)
quickCheck (printTestCase "prop_add_assoc" prop_add_assoc)
quickCheck (printTestCase "prop_add_comm" prop_add_comm)
quickCheck (printTestCase "prop_add_inv_left" prop_add_inv_left)
quickCheck (printTestCase "prop_add_inv_right" prop_add_inv_right)
quickCheck (printTestCase "prop_sign_assoc" prop_sign_assoc)
quickCheck (printTestCase "prop_sign_ident_left" prop_sign_ident_left)
quickCheck (printTestCase "prop_sign_opposite_involutive" prop_sign_opposite_involutive)
quickCheck (printTestCase "prop_sign_triple" prop_sign_triple)
quickCheck (printTestCase "prop_mul_ident_left" prop_mul_ident_left)
quickCheck (printTestCase "prop_mul_ident_right" prop_mul_ident_right)
quickCheck (printTestCase "prop_mul_assoc" prop_mul_assoc)
quickCheck (printTestCase "prop_mul_comm" prop_mul_comm)
-}
| null | https://raw.githubusercontent.com/danr/hipspec/a114db84abd5fee8ce0b026abc5380da11147aa9/examples/old-examples/hip/Integers.hs | haskell | The implementation of these integers correspond to those in the
arbitrary =
let nats = iterate S Z
arbitrary = oneof [P `fmap` arbitrary,N `fmap` arbitrary]
Natural addition
Natural multiplication
Natural subtraction | Agda standard library , which is proved to be a commutative ring
module Integers where
import Hip.Prelude
import Prelude (Eq,Ord,Show,iterate,(!!),fmap,Bool(..))
data Nat = Z | S Nat deriving (Eq)
instance Arbitrary where
in ( nats ! ! ) ` fmap ` choose ( 0,50 )
data Integ = P Nat | N Nat deriving (Eq)
instance Arbitrary Integ where
eqnat Z Z = True
eqnat (S m) (S n) = True
eqnat _ _ = False
(==) :: Integ -> Integ -> Bool
N x == N y = eqnat x y
P x == P y = eqnat x y
_ == _ = False
neg :: Integ -> Integ
neg (P (S n)) = N n
neg (P Z) = P Z
neg (N n) = P (S n)
prop_neg_involutive :: Integ -> Prop Integ
prop_neg_involutive x = x =:= neg (neg x)
x +. Z = x
x +. (S y) = S (x +. y)
x *. Z = Z
x *. (S y) = (x *. y) +. x
m -. Z = P m
Z -. S n = N n
S m -. S n = m -. n
Integer addition
N m +! N n = N (S (m +. n))
N m +! P n = n -. S m
P m +! N n = m -. S n
P m +! P n = P (m +. n)
zero = P Z
prop_add_ident_left :: Integ -> Prop Integ
prop_add_ident_left x = x =:= zero +! x
prop_add_ident_right :: Integ -> Prop Integ
prop_add_ident_right x = x =:= x +! zero
prop_add_assoc :: Integ -> Integ -> Integ -> Prop Integ
prop_add_assoc x y z = (x +! (y +! z)) =:= ((x +! y) +! z)
prop_add_comm :: Integ -> Integ -> Prop Integ
prop_add_comm x y = (x +! y) =:= (y +! x)
prop_add_inv_left :: Integ -> Prop Integ
prop_add_inv_left x = neg x +! x =:= zero
prop_add_inv_right :: Integ -> Prop Integ
prop_add_inv_right x = x +! neg x =:= zero
Integer subtraction
N m -! N n = n -. m
N m -! P n = N (n +. m)
P m -! N n = P (S (n +. m))
P m -! P n = m -. n
abs' (P n) = n
abs' (N n) = S n
data Sign = Pos | Neg deriving (Eq)
instance Arbitrary Sign where
arbitrary = elements [Pos,Neg]
opposite Pos = Neg
opposite Neg = Pos
Pos *% x = x
Neg *% x = opposite x
prop_sign_assoc :: Sign -> Sign -> Sign -> Prop Sign
prop_sign_assoc s t u = (s *% (t *% u)) =:= ((s *% t) *% u)
prop_sign_ident_left :: Sign -> Prop Sign
prop_sign_ident_left s = s *% Pos =:= s
prop_sign_ident_right :: Sign -> Prop Sign
prop_sign_ident_right s = Pos *% s =:= s
prop_sign_opposite_involutive :: Sign -> Prop Sign
prop_sign_opposite_involutive s = opposite (opposite s) =:= s
prop_sign_triple :: Sign -> Prop Sign
prop_sign_triple s = s *% (s *% s) =:= s
sign :: Integ -> Sign
sign (P _) = Pos
sign (N _) = Neg
_ <| Z = P Z
Pos <| n = P n
Neg <| (S m) = N m
i *! j = (sign i *% sign j) <| (abs' i *. abs' j)
one = P (S Z)
prop_mul_ident_left :: Integ -> Prop Integ
prop_mul_ident_left x = x =:= one *! x
prop_mul_ident_right :: Integ -> Prop Integ
prop_mul_ident_right x = x =:= x *! one
prop_mul_assoc :: Integ -> Integ -> Integ -> Prop Integ
prop_mul_assoc x y z = (x *! (y *! z)) =:= ((x *! y) *! z)
prop_mul_comm :: Integ -> Integ -> Prop Integ
prop_mul_comm x y = (x *! y) =:= (y *! x)
prop_left_distrib :: Integ -> Integ -> Integ -> Prop Integ
prop_left_distrib x y z = x *! (y +! z) =:= (x *! y) +! (x *! z)
prop_right_distrib :: Integ -> Integ -> Integ -> Prop Integ
prop_right_distrib x y z = (x +! y) *! z =:= (x *! z) +! (y *! z)
main = do
quickCheck ( printTestCase " prop_neg_involutive " prop_neg_involutive )
quickCheck ( printTestCase " prop_add_ident_left " prop_add_ident_left )
quickCheck ( printTestCase " prop_add_ident_right " prop_add_ident_right )
quickCheck ( printTestCase " prop_add_assoc " prop_add_assoc )
quickCheck ( printTestCase " prop_add_comm " prop_add_comm )
quickCheck ( printTestCase " prop_add_inv_left " prop_add_inv_left )
quickCheck ( printTestCase " prop_add_inv_right " prop_add_inv_right )
quickCheck ( printTestCase " prop_sign_assoc " prop_sign_assoc )
quickCheck ( printTestCase " prop_sign_ident_left " prop_sign_ident_left )
quickCheck ( printTestCase " prop_sign_opposite_involutive " prop_sign_opposite_involutive )
quickCheck ( printTestCase " prop_sign_triple " prop_sign_triple )
quickCheck ( printTestCase " prop_mul_ident_left " prop_mul_ident_left )
quickCheck ( printTestCase " prop_mul_ident_right " prop_mul_ident_right )
quickCheck ( printTestCase " prop_mul_assoc " )
quickCheck ( printTestCase " prop_mul_comm " prop_mul_comm )
main = do
quickCheck (printTestCase "prop_neg_involutive" prop_neg_involutive)
quickCheck (printTestCase "prop_add_ident_left" prop_add_ident_left)
quickCheck (printTestCase "prop_add_ident_right" prop_add_ident_right)
quickCheck (printTestCase "prop_add_assoc" prop_add_assoc)
quickCheck (printTestCase "prop_add_comm" prop_add_comm)
quickCheck (printTestCase "prop_add_inv_left" prop_add_inv_left)
quickCheck (printTestCase "prop_add_inv_right" prop_add_inv_right)
quickCheck (printTestCase "prop_sign_assoc" prop_sign_assoc)
quickCheck (printTestCase "prop_sign_ident_left" prop_sign_ident_left)
quickCheck (printTestCase "prop_sign_opposite_involutive" prop_sign_opposite_involutive)
quickCheck (printTestCase "prop_sign_triple" prop_sign_triple)
quickCheck (printTestCase "prop_mul_ident_left" prop_mul_ident_left)
quickCheck (printTestCase "prop_mul_ident_right" prop_mul_ident_right)
quickCheck (printTestCase "prop_mul_assoc" prop_mul_assoc)
quickCheck (printTestCase "prop_mul_comm" prop_mul_comm)
-}
|
4c4ba6f18d6c9557849fab80c99f50dfc47452cd579e940b51ab85cf643d8ebf | lwhjp/ecmascript | global-object.rkt | #lang racket/base
(require racket/class
"object.rkt")
(provide global-object)
; FIXME: this should be parameterized so that we can create
a new JS environment without reloading modules ( copy the
; "default" global-object containing library functions)
(define global-object
(new ecma-object%
[class-name 'global]
[prototype #f]))
| null | https://raw.githubusercontent.com/lwhjp/ecmascript/69fcfa42856ea799ff9d9d63a60eaf1b1783fe50/private/global-object.rkt | racket | FIXME: this should be parameterized so that we can create
"default" global-object containing library functions) | #lang racket/base
(require racket/class
"object.rkt")
(provide global-object)
a new JS environment without reloading modules ( copy the
(define global-object
(new ecma-object%
[class-name 'global]
[prototype #f]))
|
94937822730e0d7f3f619bf903d3b6dcc05f7e50b422b0337ff58255038a8b69 | bravit/hid-examples | Test.hs | # LANGUAGE StandaloneDeriving #
import System.Exit (exitFailure)
import Data.List (sort, nub)
import Control.Monad (replicateM, when)
import System.Random
import System.Random.Stateful (uniformRM, uniformM)
import Radar
instance UniformRange Turn where
uniformRM (lo, hi) rng = do
res <- uniformRM (fromEnum lo :: Int, fromEnum hi) rng
pure $ toEnum res
instance Uniform Turn where
uniformM rng = uniformRM (minBound, maxBound) rng
instance UniformRange Direction where
uniformRM (lo, hi) rng = do
res <- uniformRM (fromEnum lo :: Int, fromEnum hi) rng
pure $ toEnum res
instance Uniform Direction where
uniformM rng = uniformRM (minBound, maxBound) rng
uniformIO :: Uniform a => IO a
uniformIO = getStdRandom uniform
uniformsIO :: Uniform a => Int -> IO [a]
uniformsIO n = replicateM n uniformIO
randomTurns :: Int -> IO [Turn]
randomTurns = uniformsIO
randomDirections :: Int -> IO [Direction]
randomDirections = uniformsIO
writeRandomFile :: (Uniform a, Show a) =>
Int -> (Int -> IO [a]) -> FilePath -> IO ()
writeRandomFile n gen fname = do
xs <- gen n
writeFile fname $ unlines $ map show xs
deriving instance Ord Turn
test_allTurnsInUse :: Bool
test_allTurnsInUse = sort (nub [ orient d1 d2 | d1 <- every, d2 <- every ])
== every
test_rotationsMonoidAgree :: [Turn] -> Bool
test_rotationsMonoidAgree ts =
and [ rotateMany d ts == rotateMany' d ts | d <- every ]
test_orientRotateAgree :: [Direction] -> Bool
test_orientRotateAgree [] = True
test_orientRotateAgree ds@(d:_) = ds == rotateManySteps d (orientMany ds)
main :: IO ()
main = do
ds <- randomDirections 1000
ts <- randomTurns 1000
when (not $ and [test_allTurnsInUse,
test_orientRotateAgree ds,
test_rotationsMonoidAgree ts])
exitFailure
| null | https://raw.githubusercontent.com/bravit/hid-examples/913e116b7ee9c7971bba10fe70ae0b61bfb9391b/tests/radar/Test.hs | haskell | # LANGUAGE StandaloneDeriving #
import System.Exit (exitFailure)
import Data.List (sort, nub)
import Control.Monad (replicateM, when)
import System.Random
import System.Random.Stateful (uniformRM, uniformM)
import Radar
instance UniformRange Turn where
uniformRM (lo, hi) rng = do
res <- uniformRM (fromEnum lo :: Int, fromEnum hi) rng
pure $ toEnum res
instance Uniform Turn where
uniformM rng = uniformRM (minBound, maxBound) rng
instance UniformRange Direction where
uniformRM (lo, hi) rng = do
res <- uniformRM (fromEnum lo :: Int, fromEnum hi) rng
pure $ toEnum res
instance Uniform Direction where
uniformM rng = uniformRM (minBound, maxBound) rng
uniformIO :: Uniform a => IO a
uniformIO = getStdRandom uniform
uniformsIO :: Uniform a => Int -> IO [a]
uniformsIO n = replicateM n uniformIO
randomTurns :: Int -> IO [Turn]
randomTurns = uniformsIO
randomDirections :: Int -> IO [Direction]
randomDirections = uniformsIO
writeRandomFile :: (Uniform a, Show a) =>
Int -> (Int -> IO [a]) -> FilePath -> IO ()
writeRandomFile n gen fname = do
xs <- gen n
writeFile fname $ unlines $ map show xs
deriving instance Ord Turn
test_allTurnsInUse :: Bool
test_allTurnsInUse = sort (nub [ orient d1 d2 | d1 <- every, d2 <- every ])
== every
test_rotationsMonoidAgree :: [Turn] -> Bool
test_rotationsMonoidAgree ts =
and [ rotateMany d ts == rotateMany' d ts | d <- every ]
test_orientRotateAgree :: [Direction] -> Bool
test_orientRotateAgree [] = True
test_orientRotateAgree ds@(d:_) = ds == rotateManySteps d (orientMany ds)
main :: IO ()
main = do
ds <- randomDirections 1000
ts <- randomTurns 1000
when (not $ and [test_allTurnsInUse,
test_orientRotateAgree ds,
test_rotationsMonoidAgree ts])
exitFailure
|
|
afa457bebb4a31332cac559b8fde1eb0920f44770a2e72814da4831d1060153f | zlatozar/study-paip | examples.lisp | -*- Mode : LISP ; Syntax : COMMON - LISP ; Package : TUTOR ; Base : 10 -*-
Code from Paradigms of AI Programming
Copyright ( c ) 1991 , 1996
;; NOTE: this file will be spread through chapters
(in-package #:tutor)
(defexamples 15 "Symbolic Mathematics with Canonical Forms"
"This chapter uses a canonical representation for polynomials"
"to achieve a more efficient program than the rules-based one in Chapter 8."
(:section "15.1 A Canonical Form for Polynomials")
((requires "cmacsyma"))
"We represent polynomials as vectors, with the variable in element 0,"
"and the coefficients starting in element 1 and going up from there."
"Here is the representation of 5x^3 + 10x^2 + 20x + 30"
('#(x 30 20 10 5) @ 511)
"Here are some examples (without the interactive loop):"
((canon '(3 + x + 4 - x)) => 7 @ 521)
((canon '(x + y + y + x)) => ((2 * x) + (2 * y)))
((canon '(3 * x + 4 * x)) => (7 * x))
((canon '(3 * x + y + x + 4 * x)) => ((8 * x) + y))
((canon '((x + 1) ^ 10)) =>
((x ^ 10) + (10 * (x ^ 9)) + (45 * (x ^ 8)) + (120 * (x ^ 7))
+ (210 * (x ^ 6)) + (252 * (x ^ 5)) + (210 * (x ^ 4))
+ (120 * (x ^ 3)) + (45 * (x ^ 2)) + (10 * x) + 1))
((canon '((x + 1) ^ 10 - (x - 1) ^ 10)) =>
((20 * (x ^ 8)) + (240 * (x ^ 7)) + (504 * (x ^ 5))
+ (240 * (x ^ 3)) + (20 * x)))
((canon '(d (3 * x ^ 2 + 2 * x + 1) / d x)) @ 522 =>
((6 * x) + 2))
((canon '(d (z + 3 * x + 3 * z * x ^ 2 + z ^ 2 * x ^ 3) / d z)) =>
(((2 * z) * (x ^ 3)) + (3 * (x ^ 2)) + 1)))
(defexamples 16 "Expert Systems"
"In this chapter we develop an expert system shell, and give it a few rules"
"about infectious disease, thus duplicating some of the Mycin system."
((requires "mycin-r"))
"Because this is an interactive system, we can't show the interaction here."
"You can try it yourself by evaluating (mycin)"
)
(defexamples 17 "Line Diagram Labelling by Constraint Satisfaction"
"In this chapter we look at the line-diagram labeling problem: Given a list"
"of lines and the vertexes at which they intersect, how can we determine"
"what the lines represent?"
((requires "waltz"))
(:section "17.2 Combining Constraints and Searching")
"First let's test that we can find the possible labelings for a vertex class:"
((possible-labelings 'Y) @ 574 =>
((+ + +) (- - -) (L R -) (- L R) (R - L)))
"Notice how matrix-transpose works:"
((matrix-transpose (possible-labelings 'Y)) =>
((+ - L - R)
(+ - R L -)
(+ - - R L)))
((defdiagram cube
(a Y b c d)
(b W g e a)
(c W e f a)
(d W f g a)
(e L c b)
(f L d c)
(g L b d)) @ 575)
(:section "17.3 Labelling Diagrams")
"We are now ready to try labelling diagrams. First the cube:"
((print-labelings (diagram 'cube)) @ 577)
"The cube should have given four solutions."
"We can get down to one solution by grounding line GD:"
((print-labelings (ground (diagram 'cube) 'g 'd)) @ 580)
"For the more complex cube on a plate, we get similar results;"
"Four interpretations, which turn to one after grounding line KM:"
((defdiagram cube-on-plate
(a Y b c d)
(b W g e a)
(c W e f a)
(d W f g a)
(e L c b)
(f Y d c i)
(g Y b d h)
(h W l g j)
(i W f m j)
(j Y h i k)
(k W m l j)
(l L h k)
(m L k i)) @ 581)
((print-labelings (ground (diagram 'cube-on-plate) 'k 'm)) @ 582)
"It is interesting to try the algorithm on an 'impossible' diagram."
"It turns out the algorithm correctly finds no interpretation for this"
"well-known illusion:"
((defdiagram poiuyt
(a L b g)
(b L j a)
(c L d l)
(d L h c)
(e L f i)
(f L k e)
(g L a l)
(h L l d)
(i L e k)
(j L k b)
(k W j i f)
(l W h g c)) @ 583)
((print-labelings (diagram 'poiuyt)) @ 583)
"Now we try a more complex diagram:"
((defdiagram tower
(a Y b c d) (n L q o)
(b W g e a) (o W y j n)
(c W e f a) (p L r i)
(d W f g a) (q W n s w)
(e L c b) (r W s p x)
(f Y d c i) (s L r q)
(g Y b d h) (t W w x z)
(h W l g j) (u W x y z)
(i W f m p) (v W y w z)
(j Y h o k) (w Y t v q)
(k W m l j) (x Y r u t)
(l L h k) (y Y v u o)
(m L k i) (z Y t u v)) @ 584)
((print-labelings (ground (diagram 'tower) 'l 'k)) @ 584))
(defexamples 18 "Search and the Game of Othello"
"In this chapter we will develop a simplified Othello-playing program."
"It will not be a champion, but is much better than beginning players."
(:section "18.2 Representation Choices")
((requires "othello"))
"First, we see that our choices for representing the board seem to work:"
((print-board (initial-board)) @ 604)
"Now we can compare the weighted squares and count difference strategies"
"by playing two games, alternating who goes first. The NIL as third argument"
"means don't print the board after each move."
((othello (maximizer #'weighted-squares)
(maximizer #'count-difference) nil) @ 610)
((othello (maximizer #'count-difference)
(maximizer #'weighted-squares) nil))
(:section "18.4 Searching Ahead: Minimax")
"We can test the minimax strategy, and see that searching ahead 3 ply is"
"indeed better than looking at only 1 ply. We can follow the whole game"
((othello (minimax-searcher 3 #'count-difference)
(maximizer #'count-difference)) @ 614 => 53)
(:section "18.5 Smarter Searching: Alpha-Beta Search")
"The following should produce the same result, only faster:"
((othello (alpha-beta-searcher 3 #'count-difference)
(maximizer #'count-difference) nil) => 53)
(:section "18.8 Playing a Series of Games")
"A single game is not enough to establish that one strategy is better than"
"another. The function RANDOM-OTHELLO-SERIES allows two strategies to"
"compete in a series of games."
((requires "othello2"))
((random-othello-series
(alpha-beta-searcher 2 #'weighted-squares)
(alpha-beta-searcher 2 #'modified-weighted-squares)
5) @ 628)
"Here is a comparison of five strategies that search only 1 ply."
"To save time, we run 2 pairs of games each, not 5 pairs."
((round-robin
(list (maximizer #'count-difference)
(maximizer #'mobility)
(maximizer #'weighted-squares)
(maximizer #'modified-weighted-squares)
#'random-strategy)
2 10
'(count-difference mobility weighted modified-weighted random)) @ 629)
"Now we compare alpha-beta searchers at 3 ply for 1 pair of games each."
"In the book it was 4 ply for 5 pairs each, but that takes too long."
((round-robin
(list (alpha-beta-searcher 3 #'count-difference)
(alpha-beta-searcher 3 #'weighted-squares)
(alpha-beta-searcher 3 #'modified-weighted-squares)
#'random-strategy)
1 10
'(count-difference weighted modified-weighted random)))
)
(defexamples 19 "Introduction to Natural Language"
"This chapter is a brief introduction to natural language processing."
(:section "19.1 Parsing with a Phrase-Structure Grammar")
"We start with the grammar defined on page 39 for the GENERATE program."
"I include 'noun' and 'verb' as nouns in the grammar *grammar3*"
((requires "syntax1"))
(*grammar3* @ 657)
((use *grammar3*))
((parser '(the table)) => ((NP (ART THE) (NOUN TABLE))))
((parser '(the ball hit the table)) =>
((SENTENCE (NP (ART THE) (NOUN BALL))
(VP (VERB HIT)
(NP (ART THE) (NOUN TABLE))))))
((parser '(the noun took the verb)) =>
((SENTENCE (NP (ART THE) (NOUN NOUN))
(VP (VERB TOOK)
(NP (ART THE) (NOUN VERB))))))
"The range of sentences we can parse is quite limited."
"The following grammar includes a wider variety."
(*grammar4* @ 661)
((use *grammar4*))
((parser '(The man hit the table with the ball)) =>
((S (NP (D THE) (N MAN))
(VP (VP (V HIT) (NP (D THE) (N TABLE)))
(PP (P WITH) (NP (D THE) (N BALL)))))
(S (NP (D THE) (N MAN))
(VP (V HIT)
(NP (NP (D THE) (N TABLE))
(PP (P WITH) (NP (D THE) (N BALL))))))))
"Here we see a phrase that is ambiguous between a sentence and a noun phrase:"
((parser '(the orange saw)) @ 662 =>
((S (NP (D THE) (N ORANGE)) (VP (V SAW)))
(NP (D THE) (A+ (A ORANGE)) (N SAW))))
(:section "19.4 The Unknown-Word Problem")
"As it stands, the parser cannot deal with unknown words."
"One way of treating unknown words is to allow them to be any of the"
"'open-class' categories--nouns, verbs, adjectives, and names."
((parser '(John liked Mary)) @ 664 =>
((S (NP (NAME JOHN))
(VP (V LIKED) (NP (NAME MARY))))))
((parser '(Dana liked Dale)) @ 665 =>
((S (NP (NAME DANA))
(VP (V LIKED) (NP (NAME DALE))))))
"We see the parser works as well with words it knows (John and Mary)"
"as with new words (Dana and Dale), which it can recognize as names"
"because of their position in the sentence."
((parser '(the rab zaggled the woogly quax)) =>
((S (NP (D THE) (N RAB))
(VP (V ZAGGLED) (NP (D THE) (A+ (A WOOGLY)) (N QUAX))))))
((parser '(the slithy toves gymbled)) =>
((S (NP (D THE) (N SLITHY)) (VP (V TOVES) (NP (NAME GYMBLED))))
(S (NP (D THE) (A+ (A SLITHY)) (N TOVES)) (VP (V GYMBLED)))
(NP (D THE) (A+ (A SLITHY) (A+ (A TOVES))) (N GYMBLED))))
((parser '(the slithy toves gymbled on the wabe)) =>
((S (NP (D THE) (N SLITHY))
(VP (VP (V TOVES) (NP (NAME GYMBLED)))
(PP (P ON) (NP (D THE) (N WABE)))))
(S (NP (D THE) (N SLITHY))
(VP (V TOVES) (NP (NP (NAME GYMBLED))
(PP (P ON) (NP (D THE) (N WABE))))))
(S (NP (D THE) (A+ (A SLITHY)) (N TOVES))
(VP (VP (V GYMBLED)) (PP (P ON) (NP (D THE) (N WABE)))))
(NP (NP (D THE) (A+ (A SLITHY) (A+ (A TOVES))) (N GYMBLED))
(PP (P ON) (NP (D THE) (N WABE))))))
(:section "19.5 Parsing into a Semantic Representation")
((requires "syntax2"))
"Syntactic parse trees of a sentence may be interesting, but by themselves"
"they're not very useful. We use sentences to communicate ideas, not to"
"display grammatical structures."
""
"Imagine a compact disc player for which you can punch buttons like"
"'play 1 to 5 without 3'. We will define such a language."
"The meaning of a sentence in the language is the list of tracks played."
(*grammar5* @ 667)
((use *grammar5*))
((meanings '(1 to 5 without 3)) @ 669 => ((1 2 4 5)))
((meanings '(1 to 4 and 7 to 9)) => ((1 2 3 4 7 8 9)))
((meanings '(1 to 6 without 3 and 4)) => ((1 2 4 5 6) (1 2 5 6)))
"The example '1 to 6 without 3 and 4' is ambiguous."
"The syntactic ambiguity leads to a semantic ambiguity."
"We can define a new grammar that eliminates some ambiguities:"
(*grammar6* @ 669)
((use *grammar6*))
"With this new grammar, we can get single interpretations out of most inputs"
((meanings '(1 to 6 without 3 and 4)) => ((1 2 5 6)))
((meanings '(1 and 3 to 7 and 9 without 5 and 6)) => ((1 3 4 7 9)))
((meanings '(1 and 3 to 7 and 9 without 5 and 2)) => ((1 3 4 6 7 9 2)))
((meanings '(1 9 8 to 2 0 1)) => ((198 199 200 201)))
((meanings '(1 2 3)) => (123 (123)))
(:section "19.6 Parsing with Preferences")
((requires "syntax3"))
"We need some compromise between the permissive grammar, which generated"
"all possible parses, and the restrictive grammar, which eliminates too"
"many parses. To get the 'best' interpretation we will need not only a"
"new grammar, we will also need to modify the program to compare the"
"relative worth of candidate interpretations."
(*grammar7* @ 673)
((use *grammar7*))
"We will need a way to show off the prefernce rankings:"
((all-parses '(1 to 6 without 3 and 4)) @ 675)
((all-parses '(1 and 3 to 7 and 9 without 5 and 6)))
((all-parses '(1 and 3 to 7 and 9 without 5 and 2)) @ 676)
"In each case, the preference rules are able to assign higher scores to"
"more reasonable interpretations. What we really want is to pick the best."
"Here we see some examples:"
((meaning '(1 to 5 without 3 and 4)) => (1 2 5))
((meaning '(1 to 5 without 3 and 6)) => (1 2 4 5 6))
((meaning '(1 to 5 without 3 and 6 shuffled)))
((meaning '([ 1 to 5 without [ 3 and 6 ] ] reversed)) => (5 4 2 1))
((meaning '(1 to 5 to 9)) => NIL)
)
(defexamples 20 "Unification Grammars"
"Prolog was invented as a formalism to describe the grammar of French."
"It is still useful to view a grammar as a set of logic programming clauses."
"This chapter describes how that can be done."
((requires "unifgram"))
(:section "20.3 A Simple Grammar in DCG Format")
"Here is the trivial grammar from page 688 in DCG format:"
((clear-db))
((rule (S (?pred ?subj)) -->
(NP ?agr ?subj)
(VP ?agr ?pred)) @ 692)
((rule (NP ?agr (?det ?n)) -->
(Det ?agr ?det)
(N ?agr ?n)))
((rule (NP 3sg (the male)) --> (:word he)) @ 693)
((rule (NP ~3sg (some objects)) --> (:word they)))
((rule (VP 3sg sleep) --> (:word sleeps)))
((rule (VP ~3sg sleep) --> (:word sleep)))
((rule (Det ?any the) --> (:word the)))
((rule (N 3sg (young male human)) --> (:word boy)))
((rule (N 3sg (young female human)) --> (:word girl)))
"We can parse some of the sentences from page 689 (but in DCG format)."
"Parsing:"
((?- (S ?sem (He sleeps) ())) :input ".")
"Generating:"
((?- (S (sleep (the male)) ?words ())) :input ".")
"Enumerating:"
((?- (S ?sem ?words ())) :input ";;;;")
"If we want the interpretation of 'Terry kisses Jean' to be"
"(kiss Terry Jean) not ((lambda (x) (kiss x Jean)) Terry), then we need"
"a way to unify semantic components together. Here's one way:"
((clear-db))
((rule (S ?pred) -->
(NP ?agr ?subj)
(VP ?agr ?subj ?pred)) @ 694)
((rule (VP ?agr ?subj ?pred) -->
(Verb/tr ?agr ?subj ?pred ?obj)
(NP ?any-agr ?obj)))
((rule (VP ?agr ?subj ?pred) -->
(Verb/intr ?agr ?subj ?pred)))
((rule (Verb/tr ~3sg ?x (kiss ?x ?y) ?y) --> (:word kiss)))
((rule (Verb/tr 3sg ?x (kiss ?x ?y) ?y) --> (:word kisses)))
((rule (Verb/tr ?any ?x (kiss ?x ?y) ?y) --> (:word kissed)))
((rule (Verb/intr ~3sg ?x (sleep ?x)) --> (:word sleep)))
((rule (Verb/intr 3sg ?x (sleep ?x)) --> (:word sleeps)))
((rule (Verb/intr ?any ?x (sleep ?x)) --> (:word slept)))
"Here are the rules for noun phrases and nouns"
((rule (NP ?agr ?sem) -->
(Name ?agr ?sem)))
((rule (NP ?agr (?det-sem ?noun-sem)) -->
(Det ?agr ?det-sem)
(Noun ?agr ?noun-sem)))
((rule (Name 3sg Terry) --> (:word Terry)))
((rule (Name 3sg Jean) --> (:word Jean)))
((rule (Noun 3sg (young male human)) --> (:word boy)) @ 695)
((rule (Noun 3sg (young female human)) --> (:word girl)))
((rule (Noun ~3sg (group (young male human))) --> (:word boys)))
((rule (Noun ~3sg (group (young female human))) --> (:word girls)))
((rule (Det ?any the) --> (:word the)))
((rule (Det 3sg a) --> (:word a)))
"This grammar and lexicon generates more sentences, although it is still"
"rather limited. Here are some examples:"
((?- (S ?sem (The boys kiss a girl) ())) @ 695 :input ";.")
((?- (S ?sem (The girls kissed the girls) ())) :input ";.")
((?- (S ?sem (Terry kissed the girl) ())) :input ";.")
((?- (S ?sem (The girls kisses the boys) ())) :input ";.")
((?- (S ?sem (Terry kissed a girls) ())) :input ";.")
((?- (S ?sem (Terry sleeps Jean) ())) :input ";.")
(:section "20.4 A DCG Grammar with Quantifiers")
((clear-db))
((rule (Det ?any ?x ?p ?q (the ?x (and ?p ?q))) --> (:word the)) @ 697)
((rule (Det 3sg ?x ?p ?q (exists ?x (and ?p ?q))) --> (:word a)))
((rule (Det 3sg ?x ?p ?q (all ?x (-> ?p ?q))) --> (:word every)))
((rule (Noun 3sg ?x (picture ?x)) --> (:word picture)) @ 698)
((rule (Noun 3sg ?x (story ?x)) --> (:word story)))
((rule (Noun 3sg ?x (and (young ?x) (male ?x) (human ?x))) -->
(:word boy)))
((rule (NP ?agr ?x ?pred ?pred) -->
(Name ?agr ?name)))
((rule (NP ?agr ?x ?pred ?np) -->
(Det ?agr ?x ?noun&rel ?pred ?np)
(Noun ?agr ?x ?noun)
(rel-clause ?agr ?x ?noun ?noun&rel)))
((rule (rel-clause ?agr ?x ?np ?np) --> ))
((rule (rel-clause ?agr ?x ?np (and ?np ?rel)) -->
(:word that)
(VP ?agr ?x ?rel)))
((rule (Verb/tr ~3sg ?x ?y (paint ?x ?y)) --> (:word paint)) @ 699)
((rule (Verb/tr 3sg ?x ?y (paint ?x ?y)) --> (:word paints)))
((rule (Verb/tr ?any ?x ?y (paint ?x ?y)) --> (:word painted)))
((rule (Verb/intr ~3sg ?x (sleep ?x)) --> (:word sleep)))
((rule (Verb/intr 3sg ?x (sleep ?x)) --> (:word sleeps)))
((rule (Verb/intr ?any ?x (sleep ?x)) --> (:word slept)))
((rule (Verb/intr 3sg ?x (sells ?x)) --> (:word sells)))
((rule (Verb/intr 3sg ?x (stinks ?x)) --> (:word stinks)))
((rule (VP ?agr ?x ?vp) -->
(Verb/tr ?agr ?x ?obj ?verb)
(NP ?any-agr ?obj ?verb ?vp)))
((rule (VP ?agr ?x ?vp) -->
(Verb/intr ?agr ?x ?vp)))
((rule (S ?np) -->
(NP ?agr ?x ?vp ?np)
(VP ?agr ?x ?vp)))
"Now we define a function to show the output from a query."
"In the book, you just saw the output of such a function."
((defun do-s (words)
(top-level-prove `((S ?sem ,words ())))))
((do-s '(Every picture paints a story)) :input "." @ 699)
((do-s '(Every boy that paints a picture sleeps)) :input ".")
((do-s '(Every boy that sleeps paints a picture)) :input ".")
((do-s '(Every boy that paints a picture that sells paints a picture
that stinks)) :input "." @ 700)
(:section "20.5 Preserving Quantifier Scope Ambiguity")
((clear-db))
((rule (S (and ?np ?vp)) -->
(NP ?agr ?x ?np)
(VP ?agr ?x ?vp)) @ 701)
((rule (VP ?agr ?x (and ?verb ?obj)) -->
(Verb/tr ?agr ?x ?o ?verb)
(NP ?any-agr ?o ?obj)))
((rule (VP ?agr ?x ?verb) -->
(Verb/intr ?agr ?x ?verb)))
((rule (NP ?agr ?name t) -->
(Name ?agr ?name)))
((rule (NP ?agr ?x ?det) -->
(Det ?agr ?x (and ?noun ?rel) ?det)
(Noun ?agr ?x ?noun)
(rel-clause ?agr ?x ?rel)))
((rule (rel-clause ?agr ?x t) --> ))
((rule (rel-clause ?agr ?x ?rel) -->
(:word that)
(VP ?agr ?x ?rel)))
((rule (Name 3sg Terry) --> (:word Terry)))
((rule (Name 3sg Jean) --> (:word Jean)))
((rule (Det 3sg ?x ?restr (all ?x ?restr)) --> (:word every)))
((rule (Noun 3sg ?x (man ?x)) --> (:word man)))
((rule (Verb/tr 3sg ?x ?y (love ?x ?y)) --> (:word loves)))
((rule (Verb/intr 3sg ?x (lives ?x)) --> (:word lives)))
((rule (Det 3sg ?x ?res (exists ?x ?res)) --> (:word a)))
((rule (Noun 3sg ?x (woman ?x)) --> (:word woman)))
"Here is an example of the new representation:"
((do-s '(every man loves a woman)) :input "." @ 701)
)
(defexamples 21 "A Grammar of English"
((if (boundp 'clear-db) (clear-db)) @ 715)
((requires "grammar" "lexicon"))
((prolog-compile-symbols))
(:section "21.10 Word Categories")
((?- (word sees verb ?infl ?senses)) :input ".")
((try S John promised Kim to persuade Lee to sleep) :input ";;;.")
(:section "21.14 Examples")
((try S When did John promise Kim to persuade Lee to sleep)
@ 746 :input ";;;.")
((try S Kim would not have been looking for Lee) @ 747 :input ";;;.")
((try s It should not surprise you that Kim does not like Lee) :input ";;;.")
)
(defexamples 22 "Scheme: An Uncommon Lisp"
"This chapter presents the Scheme dialect of Lisp and an interpreter for it."
"Understanding the interpreter can give you a better appreciation of Lisp."
(:section "22.1 A Scheme Interpreter")
((requires "interp1"))
"We're ready to try out the interpreter. Note we provide an argument"
"to avoid going into a read-eval-print loop with SCHEME. This is a new"
"functionality, no in the book, added to make these examples easier."
((scheme '(+ 2 2)) @ 760 => 4 )
((scheme '((if (= 1 2) * +) 3 4)) => 7)
((scheme '((if (= 1 1) * +) 3 4)) => 12 @ 761)
((scheme '(set! fact (lambda (n) (if (= n 0) 1
(* n (fact (- n 1))))))))
((scheme '(fact 5)) => 120)
((scheme '(set! table (lambda (f start end)
(if (<= start end)
(begin
(write (list start (f start)))
(newline)
(table f (+ start 1) end)))))))
((scheme '(table fact 1 10)) => NIL )
((scheme '(table (lambda (x) (* x x x)) 5 10)) => NIL)
(:section "22.2 Syntactic Extension with Macros")
"Scheme has a number of special forms that were not listed above."
"These can be implemented by macros (although macros are not officially"
"part of Scheme). We can test out the macro facility:"
((scheme-macro-expand '(and p q)) => (IF P (AND Q)) @ 765)
((scheme-macro-expand '(and q)) => Q)
((scheme-macro-expand '(let ((x 1) (y 2)) (+ x y))) =>
((LAMBDA (X Y) (+ X Y)) 1 2))
((scheme-macro-expand
'(letrec
((even? (lambda (x) (or (= x 0) (odd? (- x 1)))))
(odd? (lambda (x) (even? (- x 1)))))
(even? z))))
"Now let's look at uses of the macros DEFINE and LET*"
((scheme '(define (reverse l)
(if (null? l) nil
(append (reverse (cdr l)) (list (car l)))))) => REVERSE)
((scheme '(reverse '(a b c d))) => (D C B A))
((scheme '(let* ((x 5) (y (+ x x)))
(if (or (= x 0) (and (< 0 y) (< y 20)))
(list x y)
(+ y x)))) => (5 10))
(:section "22.4 Throw, Catch, and Call/cc")
((requires "interp3"))
"Non-local flow of control is provided in Scheme with a very general and"
"powerful procedure, CALL-WITH-CURRENT-CONTINUATION, which is often"
"abbreviated CALL/CC. Here are some examples:"
((scheme '(+ 1 (call/cc (lambda (cc) (+ 20 300))))) @ 770 => 321)
"The above example ignores CC and computes (+ 1 (+ 20 300))"
"The next example does make use of CC:"
((scheme '(+ 1 (call/cc (lambda (cc) (+ 20 (cc 300)))))) => 301)
"The above passes 300 to CC, thus bypassing the addition of 20."
"It effectively throws 300 out to the catch point established by call/cc."
)
(defexamples 23 "Compiling Lisp"
"Compilers are simple to write and useful to know about."
"In this chapter we develop a simple compiler for Scheme."
""
((requires "compile1"))
"Now we are ready to show the simple compiler at work:"
((comp-show '(if (= x y) (f (g x)) (h x y (h 1 2)))) @ 791)
"Here are some places where a compiler could do better than an interpreter"
"(although our compiler currently does not):"
((comp-show '(begin "doc" (write x) y)) @ 792)
"We should not have to push 'doc' on the stack just to pop it off."
"Here's another example:"
((comp-show '(begin (+ (* a x) (f x)) x)))
"Here's an example using local variables:"
((comp-show '((lambda (x) ((lambda (y z) (f x y z)) 3 x)) 4)) @ 794)
(:section "23.1 A Properly Tail-Recursive Compiler")
"Notice the two new instructions, CALLJ and SAVE"
((requires "compile2"))
"First we see how nested function calls work:"
((comp-show '(f (g x))) @ 796)
"In the next example we see that unneeded constants and variables in BEGIN"
"expressions are ignored:"
((comp-show '(begin "doc" x (f x) y)) @ 797)
((comp-show '(begin (+ (* a x) (f x)) x)))
"Here are some examples of IF expressions:"
((comp-show '(if p (+ x y) (* x y))) @ 801)
"If we put the same code inside a BEGIN we get something quite different:"
((comp-show '(begin (if p (+ x y) (* x y)) z)) @ 802)
"Here are some more examples of the compiler at work:"
((comp-show '(if (null? (car l)) (f (+ (* a x) b))
(g (/ x 2)))) @ 806)
((comp-show '(define (last1 l)
(if (null? (cdr l)) (car l)
(last1 (cdr l))))) @ 807)
((comp-show '(define (length l)
(if (null? l) 0 (+ 1 (length (cdr l)))))) @ 808)
"Of course, it is possible to write LENGTH in tail-recursive fashion:"
((comp-show '(define (length l)
(letrec ((len (lambda (l n)
(if (null? l) n
(len (rest l) (+ n 1))))))
(len l 0)))))
(:section "23.4 A Peephole Optimizer")
"In this section we investigate a simple technique that will generate"
"slightly better code in cases where the compiler is less than perfect."
((requires "compile3" "compopt"))
((comp-show '(begin (if (if t 1 (f x)) (set! x 2)) x)) @ 818)
)
| null | https://raw.githubusercontent.com/zlatozar/study-paip/dfa1ca6118f718f5d47d8c63cbb7b4cad23671e1/examples.lisp | lisp | Syntax : COMMON - LISP ; Package : TUTOR ; Base : 10 -*-
NOTE: this file will be spread through chapters |
Code from Paradigms of AI Programming
Copyright ( c ) 1991 , 1996
(in-package #:tutor)
(defexamples 15 "Symbolic Mathematics with Canonical Forms"
"This chapter uses a canonical representation for polynomials"
"to achieve a more efficient program than the rules-based one in Chapter 8."
(:section "15.1 A Canonical Form for Polynomials")
((requires "cmacsyma"))
"We represent polynomials as vectors, with the variable in element 0,"
"and the coefficients starting in element 1 and going up from there."
"Here is the representation of 5x^3 + 10x^2 + 20x + 30"
('#(x 30 20 10 5) @ 511)
"Here are some examples (without the interactive loop):"
((canon '(3 + x + 4 - x)) => 7 @ 521)
((canon '(x + y + y + x)) => ((2 * x) + (2 * y)))
((canon '(3 * x + 4 * x)) => (7 * x))
((canon '(3 * x + y + x + 4 * x)) => ((8 * x) + y))
((canon '((x + 1) ^ 10)) =>
((x ^ 10) + (10 * (x ^ 9)) + (45 * (x ^ 8)) + (120 * (x ^ 7))
+ (210 * (x ^ 6)) + (252 * (x ^ 5)) + (210 * (x ^ 4))
+ (120 * (x ^ 3)) + (45 * (x ^ 2)) + (10 * x) + 1))
((canon '((x + 1) ^ 10 - (x - 1) ^ 10)) =>
((20 * (x ^ 8)) + (240 * (x ^ 7)) + (504 * (x ^ 5))
+ (240 * (x ^ 3)) + (20 * x)))
((canon '(d (3 * x ^ 2 + 2 * x + 1) / d x)) @ 522 =>
((6 * x) + 2))
((canon '(d (z + 3 * x + 3 * z * x ^ 2 + z ^ 2 * x ^ 3) / d z)) =>
(((2 * z) * (x ^ 3)) + (3 * (x ^ 2)) + 1)))
(defexamples 16 "Expert Systems"
"In this chapter we develop an expert system shell, and give it a few rules"
"about infectious disease, thus duplicating some of the Mycin system."
((requires "mycin-r"))
"Because this is an interactive system, we can't show the interaction here."
"You can try it yourself by evaluating (mycin)"
)
(defexamples 17 "Line Diagram Labelling by Constraint Satisfaction"
"In this chapter we look at the line-diagram labeling problem: Given a list"
"of lines and the vertexes at which they intersect, how can we determine"
"what the lines represent?"
((requires "waltz"))
(:section "17.2 Combining Constraints and Searching")
"First let's test that we can find the possible labelings for a vertex class:"
((possible-labelings 'Y) @ 574 =>
((+ + +) (- - -) (L R -) (- L R) (R - L)))
"Notice how matrix-transpose works:"
((matrix-transpose (possible-labelings 'Y)) =>
((+ - L - R)
(+ - R L -)
(+ - - R L)))
((defdiagram cube
(a Y b c d)
(b W g e a)
(c W e f a)
(d W f g a)
(e L c b)
(f L d c)
(g L b d)) @ 575)
(:section "17.3 Labelling Diagrams")
"We are now ready to try labelling diagrams. First the cube:"
((print-labelings (diagram 'cube)) @ 577)
"The cube should have given four solutions."
"We can get down to one solution by grounding line GD:"
((print-labelings (ground (diagram 'cube) 'g 'd)) @ 580)
"For the more complex cube on a plate, we get similar results;"
"Four interpretations, which turn to one after grounding line KM:"
((defdiagram cube-on-plate
(a Y b c d)
(b W g e a)
(c W e f a)
(d W f g a)
(e L c b)
(f Y d c i)
(g Y b d h)
(h W l g j)
(i W f m j)
(j Y h i k)
(k W m l j)
(l L h k)
(m L k i)) @ 581)
((print-labelings (ground (diagram 'cube-on-plate) 'k 'm)) @ 582)
"It is interesting to try the algorithm on an 'impossible' diagram."
"It turns out the algorithm correctly finds no interpretation for this"
"well-known illusion:"
((defdiagram poiuyt
(a L b g)
(b L j a)
(c L d l)
(d L h c)
(e L f i)
(f L k e)
(g L a l)
(h L l d)
(i L e k)
(j L k b)
(k W j i f)
(l W h g c)) @ 583)
((print-labelings (diagram 'poiuyt)) @ 583)
"Now we try a more complex diagram:"
((defdiagram tower
(a Y b c d) (n L q o)
(b W g e a) (o W y j n)
(c W e f a) (p L r i)
(d W f g a) (q W n s w)
(e L c b) (r W s p x)
(f Y d c i) (s L r q)
(g Y b d h) (t W w x z)
(h W l g j) (u W x y z)
(i W f m p) (v W y w z)
(j Y h o k) (w Y t v q)
(k W m l j) (x Y r u t)
(l L h k) (y Y v u o)
(m L k i) (z Y t u v)) @ 584)
((print-labelings (ground (diagram 'tower) 'l 'k)) @ 584))
(defexamples 18 "Search and the Game of Othello"
"In this chapter we will develop a simplified Othello-playing program."
"It will not be a champion, but is much better than beginning players."
(:section "18.2 Representation Choices")
((requires "othello"))
"First, we see that our choices for representing the board seem to work:"
((print-board (initial-board)) @ 604)
"Now we can compare the weighted squares and count difference strategies"
"by playing two games, alternating who goes first. The NIL as third argument"
"means don't print the board after each move."
((othello (maximizer #'weighted-squares)
(maximizer #'count-difference) nil) @ 610)
((othello (maximizer #'count-difference)
(maximizer #'weighted-squares) nil))
(:section "18.4 Searching Ahead: Minimax")
"We can test the minimax strategy, and see that searching ahead 3 ply is"
"indeed better than looking at only 1 ply. We can follow the whole game"
((othello (minimax-searcher 3 #'count-difference)
(maximizer #'count-difference)) @ 614 => 53)
(:section "18.5 Smarter Searching: Alpha-Beta Search")
"The following should produce the same result, only faster:"
((othello (alpha-beta-searcher 3 #'count-difference)
(maximizer #'count-difference) nil) => 53)
(:section "18.8 Playing a Series of Games")
"A single game is not enough to establish that one strategy is better than"
"another. The function RANDOM-OTHELLO-SERIES allows two strategies to"
"compete in a series of games."
((requires "othello2"))
((random-othello-series
(alpha-beta-searcher 2 #'weighted-squares)
(alpha-beta-searcher 2 #'modified-weighted-squares)
5) @ 628)
"Here is a comparison of five strategies that search only 1 ply."
"To save time, we run 2 pairs of games each, not 5 pairs."
((round-robin
(list (maximizer #'count-difference)
(maximizer #'mobility)
(maximizer #'weighted-squares)
(maximizer #'modified-weighted-squares)
#'random-strategy)
2 10
'(count-difference mobility weighted modified-weighted random)) @ 629)
"Now we compare alpha-beta searchers at 3 ply for 1 pair of games each."
"In the book it was 4 ply for 5 pairs each, but that takes too long."
((round-robin
(list (alpha-beta-searcher 3 #'count-difference)
(alpha-beta-searcher 3 #'weighted-squares)
(alpha-beta-searcher 3 #'modified-weighted-squares)
#'random-strategy)
1 10
'(count-difference weighted modified-weighted random)))
)
(defexamples 19 "Introduction to Natural Language"
"This chapter is a brief introduction to natural language processing."
(:section "19.1 Parsing with a Phrase-Structure Grammar")
"We start with the grammar defined on page 39 for the GENERATE program."
"I include 'noun' and 'verb' as nouns in the grammar *grammar3*"
((requires "syntax1"))
(*grammar3* @ 657)
((use *grammar3*))
((parser '(the table)) => ((NP (ART THE) (NOUN TABLE))))
((parser '(the ball hit the table)) =>
((SENTENCE (NP (ART THE) (NOUN BALL))
(VP (VERB HIT)
(NP (ART THE) (NOUN TABLE))))))
((parser '(the noun took the verb)) =>
((SENTENCE (NP (ART THE) (NOUN NOUN))
(VP (VERB TOOK)
(NP (ART THE) (NOUN VERB))))))
"The range of sentences we can parse is quite limited."
"The following grammar includes a wider variety."
(*grammar4* @ 661)
((use *grammar4*))
((parser '(The man hit the table with the ball)) =>
((S (NP (D THE) (N MAN))
(VP (VP (V HIT) (NP (D THE) (N TABLE)))
(PP (P WITH) (NP (D THE) (N BALL)))))
(S (NP (D THE) (N MAN))
(VP (V HIT)
(NP (NP (D THE) (N TABLE))
(PP (P WITH) (NP (D THE) (N BALL))))))))
"Here we see a phrase that is ambiguous between a sentence and a noun phrase:"
((parser '(the orange saw)) @ 662 =>
((S (NP (D THE) (N ORANGE)) (VP (V SAW)))
(NP (D THE) (A+ (A ORANGE)) (N SAW))))
(:section "19.4 The Unknown-Word Problem")
"As it stands, the parser cannot deal with unknown words."
"One way of treating unknown words is to allow them to be any of the"
"'open-class' categories--nouns, verbs, adjectives, and names."
((parser '(John liked Mary)) @ 664 =>
((S (NP (NAME JOHN))
(VP (V LIKED) (NP (NAME MARY))))))
((parser '(Dana liked Dale)) @ 665 =>
((S (NP (NAME DANA))
(VP (V LIKED) (NP (NAME DALE))))))
"We see the parser works as well with words it knows (John and Mary)"
"as with new words (Dana and Dale), which it can recognize as names"
"because of their position in the sentence."
((parser '(the rab zaggled the woogly quax)) =>
((S (NP (D THE) (N RAB))
(VP (V ZAGGLED) (NP (D THE) (A+ (A WOOGLY)) (N QUAX))))))
((parser '(the slithy toves gymbled)) =>
((S (NP (D THE) (N SLITHY)) (VP (V TOVES) (NP (NAME GYMBLED))))
(S (NP (D THE) (A+ (A SLITHY)) (N TOVES)) (VP (V GYMBLED)))
(NP (D THE) (A+ (A SLITHY) (A+ (A TOVES))) (N GYMBLED))))
((parser '(the slithy toves gymbled on the wabe)) =>
((S (NP (D THE) (N SLITHY))
(VP (VP (V TOVES) (NP (NAME GYMBLED)))
(PP (P ON) (NP (D THE) (N WABE)))))
(S (NP (D THE) (N SLITHY))
(VP (V TOVES) (NP (NP (NAME GYMBLED))
(PP (P ON) (NP (D THE) (N WABE))))))
(S (NP (D THE) (A+ (A SLITHY)) (N TOVES))
(VP (VP (V GYMBLED)) (PP (P ON) (NP (D THE) (N WABE)))))
(NP (NP (D THE) (A+ (A SLITHY) (A+ (A TOVES))) (N GYMBLED))
(PP (P ON) (NP (D THE) (N WABE))))))
(:section "19.5 Parsing into a Semantic Representation")
((requires "syntax2"))
"Syntactic parse trees of a sentence may be interesting, but by themselves"
"they're not very useful. We use sentences to communicate ideas, not to"
"display grammatical structures."
""
"Imagine a compact disc player for which you can punch buttons like"
"'play 1 to 5 without 3'. We will define such a language."
"The meaning of a sentence in the language is the list of tracks played."
(*grammar5* @ 667)
((use *grammar5*))
((meanings '(1 to 5 without 3)) @ 669 => ((1 2 4 5)))
((meanings '(1 to 4 and 7 to 9)) => ((1 2 3 4 7 8 9)))
((meanings '(1 to 6 without 3 and 4)) => ((1 2 4 5 6) (1 2 5 6)))
"The example '1 to 6 without 3 and 4' is ambiguous."
"The syntactic ambiguity leads to a semantic ambiguity."
"We can define a new grammar that eliminates some ambiguities:"
(*grammar6* @ 669)
((use *grammar6*))
"With this new grammar, we can get single interpretations out of most inputs"
((meanings '(1 to 6 without 3 and 4)) => ((1 2 5 6)))
((meanings '(1 and 3 to 7 and 9 without 5 and 6)) => ((1 3 4 7 9)))
((meanings '(1 and 3 to 7 and 9 without 5 and 2)) => ((1 3 4 6 7 9 2)))
((meanings '(1 9 8 to 2 0 1)) => ((198 199 200 201)))
((meanings '(1 2 3)) => (123 (123)))
(:section "19.6 Parsing with Preferences")
((requires "syntax3"))
"We need some compromise between the permissive grammar, which generated"
"all possible parses, and the restrictive grammar, which eliminates too"
"many parses. To get the 'best' interpretation we will need not only a"
"new grammar, we will also need to modify the program to compare the"
"relative worth of candidate interpretations."
(*grammar7* @ 673)
((use *grammar7*))
"We will need a way to show off the prefernce rankings:"
((all-parses '(1 to 6 without 3 and 4)) @ 675)
((all-parses '(1 and 3 to 7 and 9 without 5 and 6)))
((all-parses '(1 and 3 to 7 and 9 without 5 and 2)) @ 676)
"In each case, the preference rules are able to assign higher scores to"
"more reasonable interpretations. What we really want is to pick the best."
"Here we see some examples:"
((meaning '(1 to 5 without 3 and 4)) => (1 2 5))
((meaning '(1 to 5 without 3 and 6)) => (1 2 4 5 6))
((meaning '(1 to 5 without 3 and 6 shuffled)))
((meaning '([ 1 to 5 without [ 3 and 6 ] ] reversed)) => (5 4 2 1))
((meaning '(1 to 5 to 9)) => NIL)
)
(defexamples 20 "Unification Grammars"
"Prolog was invented as a formalism to describe the grammar of French."
"It is still useful to view a grammar as a set of logic programming clauses."
"This chapter describes how that can be done."
((requires "unifgram"))
(:section "20.3 A Simple Grammar in DCG Format")
"Here is the trivial grammar from page 688 in DCG format:"
((clear-db))
((rule (S (?pred ?subj)) -->
(NP ?agr ?subj)
(VP ?agr ?pred)) @ 692)
((rule (NP ?agr (?det ?n)) -->
(Det ?agr ?det)
(N ?agr ?n)))
((rule (NP 3sg (the male)) --> (:word he)) @ 693)
((rule (NP ~3sg (some objects)) --> (:word they)))
((rule (VP 3sg sleep) --> (:word sleeps)))
((rule (VP ~3sg sleep) --> (:word sleep)))
((rule (Det ?any the) --> (:word the)))
((rule (N 3sg (young male human)) --> (:word boy)))
((rule (N 3sg (young female human)) --> (:word girl)))
"We can parse some of the sentences from page 689 (but in DCG format)."
"Parsing:"
((?- (S ?sem (He sleeps) ())) :input ".")
"Generating:"
((?- (S (sleep (the male)) ?words ())) :input ".")
"Enumerating:"
((?- (S ?sem ?words ())) :input ";;;;")
"If we want the interpretation of 'Terry kisses Jean' to be"
"(kiss Terry Jean) not ((lambda (x) (kiss x Jean)) Terry), then we need"
"a way to unify semantic components together. Here's one way:"
((clear-db))
((rule (S ?pred) -->
(NP ?agr ?subj)
(VP ?agr ?subj ?pred)) @ 694)
((rule (VP ?agr ?subj ?pred) -->
(Verb/tr ?agr ?subj ?pred ?obj)
(NP ?any-agr ?obj)))
((rule (VP ?agr ?subj ?pred) -->
(Verb/intr ?agr ?subj ?pred)))
((rule (Verb/tr ~3sg ?x (kiss ?x ?y) ?y) --> (:word kiss)))
((rule (Verb/tr 3sg ?x (kiss ?x ?y) ?y) --> (:word kisses)))
((rule (Verb/tr ?any ?x (kiss ?x ?y) ?y) --> (:word kissed)))
((rule (Verb/intr ~3sg ?x (sleep ?x)) --> (:word sleep)))
((rule (Verb/intr 3sg ?x (sleep ?x)) --> (:word sleeps)))
((rule (Verb/intr ?any ?x (sleep ?x)) --> (:word slept)))
"Here are the rules for noun phrases and nouns"
((rule (NP ?agr ?sem) -->
(Name ?agr ?sem)))
((rule (NP ?agr (?det-sem ?noun-sem)) -->
(Det ?agr ?det-sem)
(Noun ?agr ?noun-sem)))
((rule (Name 3sg Terry) --> (:word Terry)))
((rule (Name 3sg Jean) --> (:word Jean)))
((rule (Noun 3sg (young male human)) --> (:word boy)) @ 695)
((rule (Noun 3sg (young female human)) --> (:word girl)))
((rule (Noun ~3sg (group (young male human))) --> (:word boys)))
((rule (Noun ~3sg (group (young female human))) --> (:word girls)))
((rule (Det ?any the) --> (:word the)))
((rule (Det 3sg a) --> (:word a)))
"This grammar and lexicon generates more sentences, although it is still"
"rather limited. Here are some examples:"
((?- (S ?sem (The boys kiss a girl) ())) @ 695 :input ";.")
((?- (S ?sem (The girls kissed the girls) ())) :input ";.")
((?- (S ?sem (Terry kissed the girl) ())) :input ";.")
((?- (S ?sem (The girls kisses the boys) ())) :input ";.")
((?- (S ?sem (Terry kissed a girls) ())) :input ";.")
((?- (S ?sem (Terry sleeps Jean) ())) :input ";.")
(:section "20.4 A DCG Grammar with Quantifiers")
((clear-db))
((rule (Det ?any ?x ?p ?q (the ?x (and ?p ?q))) --> (:word the)) @ 697)
((rule (Det 3sg ?x ?p ?q (exists ?x (and ?p ?q))) --> (:word a)))
((rule (Det 3sg ?x ?p ?q (all ?x (-> ?p ?q))) --> (:word every)))
((rule (Noun 3sg ?x (picture ?x)) --> (:word picture)) @ 698)
((rule (Noun 3sg ?x (story ?x)) --> (:word story)))
((rule (Noun 3sg ?x (and (young ?x) (male ?x) (human ?x))) -->
(:word boy)))
((rule (NP ?agr ?x ?pred ?pred) -->
(Name ?agr ?name)))
((rule (NP ?agr ?x ?pred ?np) -->
(Det ?agr ?x ?noun&rel ?pred ?np)
(Noun ?agr ?x ?noun)
(rel-clause ?agr ?x ?noun ?noun&rel)))
((rule (rel-clause ?agr ?x ?np ?np) --> ))
((rule (rel-clause ?agr ?x ?np (and ?np ?rel)) -->
(:word that)
(VP ?agr ?x ?rel)))
((rule (Verb/tr ~3sg ?x ?y (paint ?x ?y)) --> (:word paint)) @ 699)
((rule (Verb/tr 3sg ?x ?y (paint ?x ?y)) --> (:word paints)))
((rule (Verb/tr ?any ?x ?y (paint ?x ?y)) --> (:word painted)))
((rule (Verb/intr ~3sg ?x (sleep ?x)) --> (:word sleep)))
((rule (Verb/intr 3sg ?x (sleep ?x)) --> (:word sleeps)))
((rule (Verb/intr ?any ?x (sleep ?x)) --> (:word slept)))
((rule (Verb/intr 3sg ?x (sells ?x)) --> (:word sells)))
((rule (Verb/intr 3sg ?x (stinks ?x)) --> (:word stinks)))
((rule (VP ?agr ?x ?vp) -->
(Verb/tr ?agr ?x ?obj ?verb)
(NP ?any-agr ?obj ?verb ?vp)))
((rule (VP ?agr ?x ?vp) -->
(Verb/intr ?agr ?x ?vp)))
((rule (S ?np) -->
(NP ?agr ?x ?vp ?np)
(VP ?agr ?x ?vp)))
"Now we define a function to show the output from a query."
"In the book, you just saw the output of such a function."
((defun do-s (words)
(top-level-prove `((S ?sem ,words ())))))
((do-s '(Every picture paints a story)) :input "." @ 699)
((do-s '(Every boy that paints a picture sleeps)) :input ".")
((do-s '(Every boy that sleeps paints a picture)) :input ".")
((do-s '(Every boy that paints a picture that sells paints a picture
that stinks)) :input "." @ 700)
(:section "20.5 Preserving Quantifier Scope Ambiguity")
((clear-db))
((rule (S (and ?np ?vp)) -->
(NP ?agr ?x ?np)
(VP ?agr ?x ?vp)) @ 701)
((rule (VP ?agr ?x (and ?verb ?obj)) -->
(Verb/tr ?agr ?x ?o ?verb)
(NP ?any-agr ?o ?obj)))
((rule (VP ?agr ?x ?verb) -->
(Verb/intr ?agr ?x ?verb)))
((rule (NP ?agr ?name t) -->
(Name ?agr ?name)))
((rule (NP ?agr ?x ?det) -->
(Det ?agr ?x (and ?noun ?rel) ?det)
(Noun ?agr ?x ?noun)
(rel-clause ?agr ?x ?rel)))
((rule (rel-clause ?agr ?x t) --> ))
((rule (rel-clause ?agr ?x ?rel) -->
(:word that)
(VP ?agr ?x ?rel)))
((rule (Name 3sg Terry) --> (:word Terry)))
((rule (Name 3sg Jean) --> (:word Jean)))
((rule (Det 3sg ?x ?restr (all ?x ?restr)) --> (:word every)))
((rule (Noun 3sg ?x (man ?x)) --> (:word man)))
((rule (Verb/tr 3sg ?x ?y (love ?x ?y)) --> (:word loves)))
((rule (Verb/intr 3sg ?x (lives ?x)) --> (:word lives)))
((rule (Det 3sg ?x ?res (exists ?x ?res)) --> (:word a)))
((rule (Noun 3sg ?x (woman ?x)) --> (:word woman)))
"Here is an example of the new representation:"
((do-s '(every man loves a woman)) :input "." @ 701)
)
(defexamples 21 "A Grammar of English"
((if (boundp 'clear-db) (clear-db)) @ 715)
((requires "grammar" "lexicon"))
((prolog-compile-symbols))
(:section "21.10 Word Categories")
((?- (word sees verb ?infl ?senses)) :input ".")
((try S John promised Kim to persuade Lee to sleep) :input ";;;.")
(:section "21.14 Examples")
((try S When did John promise Kim to persuade Lee to sleep)
@ 746 :input ";;;.")
((try S Kim would not have been looking for Lee) @ 747 :input ";;;.")
((try s It should not surprise you that Kim does not like Lee) :input ";;;.")
)
(defexamples 22 "Scheme: An Uncommon Lisp"
"This chapter presents the Scheme dialect of Lisp and an interpreter for it."
"Understanding the interpreter can give you a better appreciation of Lisp."
(:section "22.1 A Scheme Interpreter")
((requires "interp1"))
"We're ready to try out the interpreter. Note we provide an argument"
"to avoid going into a read-eval-print loop with SCHEME. This is a new"
"functionality, no in the book, added to make these examples easier."
((scheme '(+ 2 2)) @ 760 => 4 )
((scheme '((if (= 1 2) * +) 3 4)) => 7)
((scheme '((if (= 1 1) * +) 3 4)) => 12 @ 761)
((scheme '(set! fact (lambda (n) (if (= n 0) 1
(* n (fact (- n 1))))))))
((scheme '(fact 5)) => 120)
((scheme '(set! table (lambda (f start end)
(if (<= start end)
(begin
(write (list start (f start)))
(newline)
(table f (+ start 1) end)))))))
((scheme '(table fact 1 10)) => NIL )
((scheme '(table (lambda (x) (* x x x)) 5 10)) => NIL)
(:section "22.2 Syntactic Extension with Macros")
"Scheme has a number of special forms that were not listed above."
"These can be implemented by macros (although macros are not officially"
"part of Scheme). We can test out the macro facility:"
((scheme-macro-expand '(and p q)) => (IF P (AND Q)) @ 765)
((scheme-macro-expand '(and q)) => Q)
((scheme-macro-expand '(let ((x 1) (y 2)) (+ x y))) =>
((LAMBDA (X Y) (+ X Y)) 1 2))
((scheme-macro-expand
'(letrec
((even? (lambda (x) (or (= x 0) (odd? (- x 1)))))
(odd? (lambda (x) (even? (- x 1)))))
(even? z))))
"Now let's look at uses of the macros DEFINE and LET*"
((scheme '(define (reverse l)
(if (null? l) nil
(append (reverse (cdr l)) (list (car l)))))) => REVERSE)
((scheme '(reverse '(a b c d))) => (D C B A))
((scheme '(let* ((x 5) (y (+ x x)))
(if (or (= x 0) (and (< 0 y) (< y 20)))
(list x y)
(+ y x)))) => (5 10))
(:section "22.4 Throw, Catch, and Call/cc")
((requires "interp3"))
"Non-local flow of control is provided in Scheme with a very general and"
"powerful procedure, CALL-WITH-CURRENT-CONTINUATION, which is often"
"abbreviated CALL/CC. Here are some examples:"
((scheme '(+ 1 (call/cc (lambda (cc) (+ 20 300))))) @ 770 => 321)
"The above example ignores CC and computes (+ 1 (+ 20 300))"
"The next example does make use of CC:"
((scheme '(+ 1 (call/cc (lambda (cc) (+ 20 (cc 300)))))) => 301)
"The above passes 300 to CC, thus bypassing the addition of 20."
"It effectively throws 300 out to the catch point established by call/cc."
)
(defexamples 23 "Compiling Lisp"
"Compilers are simple to write and useful to know about."
"In this chapter we develop a simple compiler for Scheme."
""
((requires "compile1"))
"Now we are ready to show the simple compiler at work:"
((comp-show '(if (= x y) (f (g x)) (h x y (h 1 2)))) @ 791)
"Here are some places where a compiler could do better than an interpreter"
"(although our compiler currently does not):"
((comp-show '(begin "doc" (write x) y)) @ 792)
"We should not have to push 'doc' on the stack just to pop it off."
"Here's another example:"
((comp-show '(begin (+ (* a x) (f x)) x)))
"Here's an example using local variables:"
((comp-show '((lambda (x) ((lambda (y z) (f x y z)) 3 x)) 4)) @ 794)
(:section "23.1 A Properly Tail-Recursive Compiler")
"Notice the two new instructions, CALLJ and SAVE"
((requires "compile2"))
"First we see how nested function calls work:"
((comp-show '(f (g x))) @ 796)
"In the next example we see that unneeded constants and variables in BEGIN"
"expressions are ignored:"
((comp-show '(begin "doc" x (f x) y)) @ 797)
((comp-show '(begin (+ (* a x) (f x)) x)))
"Here are some examples of IF expressions:"
((comp-show '(if p (+ x y) (* x y))) @ 801)
"If we put the same code inside a BEGIN we get something quite different:"
((comp-show '(begin (if p (+ x y) (* x y)) z)) @ 802)
"Here are some more examples of the compiler at work:"
((comp-show '(if (null? (car l)) (f (+ (* a x) b))
(g (/ x 2)))) @ 806)
((comp-show '(define (last1 l)
(if (null? (cdr l)) (car l)
(last1 (cdr l))))) @ 807)
((comp-show '(define (length l)
(if (null? l) 0 (+ 1 (length (cdr l)))))) @ 808)
"Of course, it is possible to write LENGTH in tail-recursive fashion:"
((comp-show '(define (length l)
(letrec ((len (lambda (l n)
(if (null? l) n
(len (rest l) (+ n 1))))))
(len l 0)))))
(:section "23.4 A Peephole Optimizer")
"In this section we investigate a simple technique that will generate"
"slightly better code in cases where the compiler is less than perfect."
((requires "compile3" "compopt"))
((comp-show '(begin (if (if t 1 (f x)) (set! x 2)) x)) @ 818)
)
|
c7dcad458265b0063b2799a1831b10f1089b045486b6c59319608008357dbfe1 | Kappa-Dev/KappaTools | webapp.ml | (******************************************************************************)
(* _ __ * The Kappa Language *)
| |/ / * Copyright 2010 - 2020 CNRS - Harvard Medical School - INRIA - IRIF
(* | ' / *********************************************************************)
(* | . \ * This file is distributed under the terms of the *)
(* |_|\_\ * GNU Lesser General Public License Version 3 *)
(******************************************************************************)
let route_handler
?(shutdown_key : string option = None)
()
:
Cohttp_lwt_unix.Server.conn ->
Cohttp.Request.t ->
Cohttp_lwt.Body.t ->
(Cohttp.Response.t * Cohttp_lwt.Body.t) Lwt.t
=
let intermediate =
Webapp_common.route_handler (Route_root.route ~shutdown_key) in
fun (conn : Cohttp_lwt_unix.Server.conn)
(request : Cohttp.Request.t)
(body : Cohttp_lwt.Body.t)
->
let context = { Webapp_common.arguments = []
; Webapp_common.connection = conn
; Webapp_common.request = request
; Webapp_common.body = body }
in
intermediate ~context
| null | https://raw.githubusercontent.com/Kappa-Dev/KappaTools/eef2337e8688018eda47ccc838aea809cae68de7/webapp/webapp.ml | ocaml | ****************************************************************************
_ __ * The Kappa Language
| ' / ********************************************************************
| . \ * This file is distributed under the terms of the
|_|\_\ * GNU Lesser General Public License Version 3
**************************************************************************** | | |/ / * Copyright 2010 - 2020 CNRS - Harvard Medical School - INRIA - IRIF
let route_handler
?(shutdown_key : string option = None)
()
:
Cohttp_lwt_unix.Server.conn ->
Cohttp.Request.t ->
Cohttp_lwt.Body.t ->
(Cohttp.Response.t * Cohttp_lwt.Body.t) Lwt.t
=
let intermediate =
Webapp_common.route_handler (Route_root.route ~shutdown_key) in
fun (conn : Cohttp_lwt_unix.Server.conn)
(request : Cohttp.Request.t)
(body : Cohttp_lwt.Body.t)
->
let context = { Webapp_common.arguments = []
; Webapp_common.connection = conn
; Webapp_common.request = request
; Webapp_common.body = body }
in
intermediate ~context
|
6b7c4851c9147dd0e74522051601089da2603fadacc0a8eca076f94ce0448c1e | haskell-mafia/mafia | Constraint.hs | # LANGUAGE NoImplicitPrelude #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TemplateHaskell #
module Test.Mafia.Cabal.Constraint where
import Test.Mafia.Tripping (tripping)
import Mafia.Cabal.Constraint
import Mafia.P
import System.IO (IO)
import Test.Mafia.Arbitrary (EqCabalError(..))
import Test.QuickCheck
import Test.QuickCheck.Instances ()
prop_roundtrip_Constraint :: Constraint -> Property
prop_roundtrip_Constraint =
tripping renderConstraint (first EqCabalError . parseConstraint)
return []
tests :: IO Bool
tests =
$quickCheckAll
| null | https://raw.githubusercontent.com/haskell-mafia/mafia/529440246ee571bf1473615e6218f52cd1e990ae/test/Test/Mafia/Cabal/Constraint.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE NoImplicitPrelude #
# LANGUAGE TemplateHaskell #
module Test.Mafia.Cabal.Constraint where
import Test.Mafia.Tripping (tripping)
import Mafia.Cabal.Constraint
import Mafia.P
import System.IO (IO)
import Test.Mafia.Arbitrary (EqCabalError(..))
import Test.QuickCheck
import Test.QuickCheck.Instances ()
prop_roundtrip_Constraint :: Constraint -> Property
prop_roundtrip_Constraint =
tripping renderConstraint (first EqCabalError . parseConstraint)
return []
tests :: IO Bool
tests =
$quickCheckAll
|
6ab66fb89f49e3bbedbe371731932379960932d0f2f73fc2acce18f34074175b | diagrams/diagrams-lib | Attributes.hs | {-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveFunctor #-}
# LANGUAGE ExistentialQuantification #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
{-# LANGUAGE TypeOperators #-}
# LANGUAGE ViewPatterns #
-----------------------------------------------------------------------------
-- |
-- Module : Diagrams.Attributes
Copyright : ( c ) 2011 - 2015 diagrams - lib team ( see LICENSE )
-- License : BSD-style (see LICENSE)
-- Maintainer :
--
-- Diagrams may have /attributes/ which affect the way they are
-- rendered. This module defines some common attributes; particular
-- backends may also define more backend-specific attributes.
--
-- Every attribute type must have a /semigroup/ structure, that is, an
associative binary operation for combining two attributes into one .
-- Unless otherwise noted, all the attributes defined here use the
' Last ' structure , that is , combining two attributes simply keeps
the second one and throws away the first . This means that child
-- attributes always override parent attributes.
--
-----------------------------------------------------------------------------
module Diagrams.Attributes (
-- ** Standard measures
ultraThin, veryThin, thin, medium, thick, veryThick, ultraThick, none
, tiny, verySmall, small, normal, large, veryLarge, huge
-- ** Line width
, LineWidth, getLineWidth
, _LineWidth, _LineWidthM
, lineWidth, lineWidthM
, _lineWidth, _lw, _lineWidthU
, lw, lwN, lwO, lwL, lwG
-- ** Dashing
, Dashing(..), getDashing
, dashing, dashingN, dashingO, dashingL, dashingG
, _dashing, _dashingU
-- * Color
-- $color
, Color(..), SomeColor(..), _SomeColor, someToAlpha
-- ** Opacity
, Opacity, _Opacity
, getOpacity, opacity, _opacity
, FillOpacity, _FillOpacity
, getFillOpacity, fillOpacity, _fillOpacity
, StrokeOpacity, _StrokeOpacity
, getStrokeOpacity, strokeOpacity, _strokeOpacity
-- ** Converting colors
, colorToSRGBA, colorToRGBA
-- * Line stuff
* * style
, LineCap(..)
, getLineCap, lineCap, _lineCap
-- ** Join style
, LineJoin(..)
, getLineJoin, lineJoin, _lineJoin
-- ** Miter limit
, LineMiterLimit(..), _LineMiterLimit
, getLineMiterLimit, lineMiterLimit, lineMiterLimitA, _lineMiterLimit
-- * Recommend optics
, _Recommend
, _Commit
, _recommend
, isCommitted
, committed
) where
import Control.Lens hiding (none, over)
import Data.Colour
import Data.Colour.RGBSpace (RGB (..))
import Data.Colour.SRGB (toSRGB)
import Data.Default.Class
import Data.Distributive
import Data.Monoid.Recommend
import Data.Semigroup
import Data.Typeable
import Diagrams.Core
------------------------------------------------------------------------
-- Standard measures
------------------------------------------------------------------------
none, ultraThin, veryThin, thin, medium, thick, veryThick, ultraThick,
tiny, verySmall, small, normal, large, veryLarge, huge
:: OrderedField n => Measure n
none = output 0
ultraThin = normalized 0.0005 `atLeast` output 0.5
veryThin = normalized 0.001 `atLeast` output 0.5
thin = normalized 0.002 `atLeast` output 0.5
medium = normalized 0.004 `atLeast` output 0.5
thick = normalized 0.0075 `atLeast` output 0.5
veryThick = normalized 0.01 `atLeast` output 0.5
ultraThick = normalized 0.02 `atLeast` output 0.5
tiny = normalized 0.01
verySmall = normalized 0.015
small = normalized 0.023
normal = normalized 0.035
large = normalized 0.05
veryLarge = normalized 0.07
huge = normalized 0.10
------------------------------------------------------------------------
-- Line width
------------------------------------------------------------------------
-- | Line widths specified on child nodes always override line widths
-- specified at parent nodes.
newtype LineWidth n = LineWidth (Last n)
deriving (Typeable, Semigroup)
_LineWidth :: Iso' (LineWidth n) n
_LineWidth = iso getLineWidth (LineWidth . Last)
_LineWidthM :: Iso' (LineWidthM n) (Measure n)
_LineWidthM = mapping _LineWidth
instance Typeable n => AttributeClass (LineWidth n)
type LineWidthM n = Measured n (LineWidth n)
instance OrderedField n => Default (LineWidthM n) where
def = fmap (LineWidth . Last) medium
getLineWidth :: LineWidth n -> n
getLineWidth (LineWidth (Last w)) = w
-- | Set the line (stroke) width.
lineWidth :: (N a ~ n, HasStyle a, Typeable n) => Measure n -> a -> a
lineWidth = applyMAttr . fmap (LineWidth . Last)
| Apply a ' LineWidth ' attribute .
lineWidthM :: (N a ~ n, HasStyle a, Typeable n) => LineWidthM n -> a -> a
lineWidthM = applyMAttr
-- | Default for 'lineWidth'.
lw :: (N a ~ n, HasStyle a, Typeable n) => Measure n -> a -> a
lw = lineWidth
-- | A convenient synonym for 'lineWidth (global w)'.
lwG :: (N a ~ n, HasStyle a, Typeable n, Num n) => n -> a -> a
lwG = lw . global
-- | A convenient synonym for 'lineWidth (normalized w)'.
lwN :: (N a ~ n, HasStyle a, Typeable n, Num n) => n -> a -> a
lwN = lw . normalized
-- | A convenient synonym for 'lineWidth (output w)'.
lwO :: (N a ~ n, HasStyle a, Typeable n) => n -> a -> a
lwO = lw . output
-- | A convenient sysnonym for 'lineWidth (local w)'.
lwL :: (N a ~ n, HasStyle a, Typeable n, Num n) => n -> a -> a
lwL = lw . local
-- | Lens onto a measured line width in a style.
_lineWidth, _lw :: (Typeable n, OrderedField n) => Lens' (Style v n) (Measure n)
_lineWidth = atMAttr . anon def (const False) . _LineWidthM
_lw = _lineWidth
-- | Lens onto the unmeasured linewith attribute. This is useful for
-- backends to use on styles once they have been unmeasured. Using on
-- a diagram style could lead to unexpected results.
_lineWidthU :: Typeable n => Lens' (Style v n) (Maybe n)
_lineWidthU = atAttr . mapping _LineWidth
------------------------------------------------------------------------
-- Dashing
------------------------------------------------------------------------
-- | Create lines that are dashing... er, dashed.
data Dashing n = Dashing [n] n
deriving (Functor, Typeable, Eq)
instance Semigroup (Dashing n) where
_ <> b = b
instance Typeable n => AttributeClass (Dashing n)
getDashing :: Dashing n -> Dashing n
getDashing = id
-- | Set the line dashing style.
dashing :: (N a ~ n, HasStyle a, Typeable n)
=> [Measure n] -- ^ A list specifying alternate lengths of on
-- and off portions of the stroke. The empty
-- list indicates no dashing.
-> Measure n -- ^ An offset into the dash pattern at which the
-- stroke should start.
-> a -> a
dashing ds offs = applyMAttr . distribute $ Dashing ds offs
-- | A convenient synonym for 'dashing (global w)'.
dashingG :: (N a ~ n, HasStyle a, Typeable n, Num n) => [n] -> n -> a -> a
dashingG w v = dashing (map global w) (global v)
-- | A convenient synonym for 'dashing (normalized w)'.
dashingN :: (N a ~ n, HasStyle a, Typeable n, Num n) => [n] -> n -> a -> a
dashingN w v = dashing (map normalized w) (normalized v)
-- | A convenient synonym for 'dashing (output w)'.
dashingO :: (N a ~ n, HasStyle a, Typeable n) => [n] -> n -> a -> a
dashingO w v = dashing (map output w) (output v)
-- | A convenient sysnonym for 'dashing (local w)'.
dashingL :: (N a ~ n, HasStyle a, Typeable n, Num n) => [n] -> n -> a -> a
dashingL w v = dashing (map local w) (local v)
-- | Lens onto a measured dashing attribute in a style.
_dashing :: Typeable n
=> Lens' (Style v n) (Maybe (Measured n (Dashing n)))
_dashing = atMAttr
-- | Lens onto the unmeasured 'Dashing' attribute. This is useful for
-- backends to use on styles once they have been unmeasured. Using on
-- a diagram style could lead to unexpected results.
_dashingU :: Typeable n => Lens' (Style v n) (Maybe (Dashing n))
_dashingU = atAttr
------------------------------------------------------------------------
Color
------------------------------------------------------------------------
-- $color
Diagrams outsources all things color - related to O\'Connor\ 's
-- very nice colour package
-- (<>). For starters, it
-- provides a large collection of standard color names. However, it
-- also provides a rich set of combinators for combining and
-- manipulating colors; see its documentation for more information.
-- | The 'Color' type class encompasses color representations which
can be used by the library . Instances are provided for
-- both the 'Data.Colour.Colour' and 'Data.Colour.AlphaColour' types
-- from the "Data.Colour" library.
class Color c where
| Convert a color to its standard representation , AlphaColour .
toAlphaColour :: c -> AlphaColour Double
-- | Convert from an AlphaColour Double. Note that this direction
-- may lose some information. For example, the instance for
-- 'Colour' drops the alpha channel.
fromAlphaColour :: AlphaColour Double -> c
-- | An existential wrapper for instances of the 'Color' class.
data SomeColor = forall c. Color c => SomeColor c
deriving Typeable
instance Show SomeColor where
showsPrec d (colorToSRGBA -> (r,g,b,a)) =
showParen (d > 10) $ showString "SomeColor " .
if a == 0
then showString "transparent"
else showString "(sRGB " . showsPrec 11 r . showChar ' '
. showsPrec 11 g . showChar ' '
. showsPrec 11 b .
(if a /= 1
then showString " `withOpacity` " . showsPrec 11 a
else id) . showChar ')'
| Isomorphism between ' SomeColor ' and ' AlphaColour ' ' Double ' .
_SomeColor :: Iso' SomeColor (AlphaColour Double)
_SomeColor = iso toAlphaColour fromAlphaColour
someToAlpha :: SomeColor -> AlphaColour Double
someToAlpha (SomeColor c) = toAlphaColour c
instance a ~ Double => Color (Colour a) where
toAlphaColour = opaque
fromAlphaColour = (`over` black)
instance a ~ Double => Color (AlphaColour a) where
toAlphaColour = id
fromAlphaColour = id
instance Color SomeColor where
toAlphaColour (SomeColor c) = toAlphaColour c
fromAlphaColour = SomeColor
| Convert to sRGBA .
colorToSRGBA, colorToRGBA :: Color c => c -> (Double, Double, Double, Double)
colorToSRGBA col = (r, g, b, a)
where
c' = toAlphaColour col
c = alphaToColour c'
a = alphaChannel c'
RGB r g b = toSRGB c
colorToRGBA = colorToSRGBA
{-# DEPRECATED colorToRGBA "Renamed to colorToSRGBA." #-}
alphaToColour :: (Floating a, Ord a) => AlphaColour a -> Colour a
alphaToColour ac | alphaChannel ac == 0 = ac `over` black
| otherwise = darken (recip (alphaChannel ac)) (ac `over` black)
------------------------------------------------------------------------
-- Opacity
------------------------------------------------------------------------
-- | Although the individual colors in a diagram can have
-- transparency, the opacity/transparency of a diagram as a whole
-- can be specified with the @Opacity@ attribute. The opacity is a
value between 1 ( completely opaque , the default ) and 0
-- (completely transparent). Opacity is multiplicative, that is,
@'opacity ' o1 . ' opacity ' o2 = = = ' opacity ' ( o1 * o2)@. In other
words , for example , means \"decrease this diagram 's
opacity to 80 % of its previous opacity\ " .
newtype Opacity = Opacity (Product Double)
deriving (Typeable, Semigroup)
instance AttributeClass Opacity
_Opacity :: Iso' Opacity Double
_Opacity = iso getOpacity (Opacity . Product)
getOpacity :: Opacity -> Double
getOpacity (Opacity (Product d)) = d
-- | Multiply the opacity (see 'Opacity') by the given value. For
example , means \"decrease this diagram 's opacity to
80 % of its previous opacity\ " .
opacity :: HasStyle a => Double -> a -> a
opacity = applyAttr . Opacity . Product
-- | Lens onto the opacity in a style.
_opacity :: Lens' (Style v n) Double
_opacity = atAttr . mapping _Opacity . non 1
-- fill opacity --------------------------------------------------------
-- | Like 'Opacity', but set the opacity only for fills (as opposed to strokes).
As with ' Opacity ' , the fill opacity is a value between 1
-- (completely opaque, the default) and 0 (completely transparent),
-- and is multiplicative.
newtype FillOpacity = FillOpacity (Product Double)
deriving (Typeable, Semigroup)
instance AttributeClass FillOpacity
_FillOpacity :: Iso' FillOpacity Double
_FillOpacity = iso getFillOpacity (FillOpacity . Product)
getFillOpacity :: FillOpacity -> Double
getFillOpacity (FillOpacity (Product d)) = d
| Multiply the fill opacity ( see ' FillOpacity ' ) by the given value . For
example , @fillOpacity 0.8@ means \"decrease this diagram 's fill opacity to
80 % of its previous " .
fillOpacity :: HasStyle a => Double -> a -> a
fillOpacity = applyAttr . FillOpacity . Product
-- | Lens onto the fill opacity in a style.
_fillOpacity :: Lens' (Style v n) Double
_fillOpacity = atAttr . mapping _FillOpacity . non 1
-- stroke opacity --------------------------------------------------------
-- | Like 'Opacity', but set the opacity only for strokes (as opposed to fills).
As with ' Opacity ' , the fill opacity is a value between 1
-- (completely opaque, the default) and 0 (completely transparent),
-- and is multiplicative.
newtype StrokeOpacity = StrokeOpacity (Product Double)
deriving (Typeable, Semigroup)
instance AttributeClass StrokeOpacity
_StrokeOpacity :: Iso' StrokeOpacity Double
_StrokeOpacity = iso getStrokeOpacity (StrokeOpacity . Product)
getStrokeOpacity :: StrokeOpacity -> Double
getStrokeOpacity (StrokeOpacity (Product d)) = d
-- | Multiply the stroke opacity (see 'StrokeOpacity') by the given value. For
-- example, @strokeOpacity 0.8@ means \"decrease this diagram's
stroke opacity to 80 % of its previous " .
strokeOpacity :: HasStyle a => Double -> a -> a
strokeOpacity = applyAttr . StrokeOpacity . Product
-- | Lens onto the stroke opacity in a style.
_strokeOpacity :: Lens' (Style v n) Double
_strokeOpacity = atAttr . mapping _StrokeOpacity . non 1
------------------------------------------------------------------------
-- Line stuff
------------------------------------------------------------------------
-- line cap ------------------------------------------------------------
-- | What sort of shape should be placed at the endpoints of lines?
data LineCap = LineCapButt -- ^ Lines end precisely at their endpoints.
| LineCapRound -- ^ Lines are capped with semicircles
-- centered on endpoints.
| LineCapSquare -- ^ Lines are capped with a squares
-- centered on endpoints.
deriving (Eq, Ord, Show, Typeable)
instance Default LineCap where
def = LineCapButt
instance AttributeClass LineCap
-- | Last semigroup structure.
instance Semigroup LineCap where
_ <> b = b
getLineCap :: LineCap -> LineCap
getLineCap = id
-- | Set the line end cap attribute.
lineCap :: HasStyle a => LineCap -> a -> a
lineCap = applyAttr
-- | Lens onto the line cap in a style.
_lineCap :: Lens' (Style v n) LineCap
_lineCap = atAttr . non def
-- line join -----------------------------------------------------------
-- | How should the join points between line segments be drawn?
data LineJoin = LineJoinMiter -- ^ Use a \"miter\" shape (whatever that is).
| LineJoinRound -- ^ Use rounded join points.
| LineJoinBevel -- ^ Use a \"bevel\" shape (whatever
-- that is). Are these...
-- carpentry terms?
deriving (Eq, Ord, Show, Typeable)
instance AttributeClass LineJoin
-- | Last semigroup structure.
instance Semigroup LineJoin where
_ <> b = b
instance Default LineJoin where
def = LineJoinMiter
getLineJoin :: LineJoin -> LineJoin
getLineJoin = id
-- | Set the segment join style.
lineJoin :: HasStyle a => LineJoin -> a -> a
lineJoin = applyAttr
-- | Lens onto the line join type in a style.
_lineJoin :: Lens' (Style v n) LineJoin
_lineJoin = atAttr . non def
-- miter limit ---------------------------------------------------------
-- | Miter limit attribute affecting the 'LineJoinMiter' joins.
-- For some backends this value may have additional effects.
newtype LineMiterLimit = LineMiterLimit (Last Double)
deriving (Typeable, Semigroup, Eq, Ord)
instance AttributeClass LineMiterLimit
_LineMiterLimit :: Iso' LineMiterLimit Double
_LineMiterLimit = iso getLineMiterLimit (LineMiterLimit . Last)
instance Default LineMiterLimit where
def = LineMiterLimit (Last 10)
getLineMiterLimit :: LineMiterLimit -> Double
getLineMiterLimit (LineMiterLimit (Last l)) = l
-- | Set the miter limit for joins with 'LineJoinMiter'.
lineMiterLimit :: HasStyle a => Double -> a -> a
lineMiterLimit = applyAttr . LineMiterLimit . Last
-- | Apply a 'LineMiterLimit' attribute.
lineMiterLimitA :: HasStyle a => LineMiterLimit -> a -> a
lineMiterLimitA = applyAttr
-- | Lens onto the line miter limit in a style.
_lineMiterLimit :: Lens' (Style v n) Double
_lineMiterLimit = atAttr . non def . _LineMiterLimit
------------------------------------------------------------------------
-- Recommend optics
------------------------------------------------------------------------
-- | Prism onto a 'Recommend'.
_Recommend :: Prism' (Recommend a) a
_Recommend = prism' Recommend $ \case (Recommend a) -> Just a; _ -> Nothing
-- | Prism onto a 'Commit'.
_Commit :: Prism' (Recommend a) a
_Commit = prism' Commit $ \case (Commit a) -> Just a; _ -> Nothing
-- | Lens onto the value inside either a 'Recommend' or 'Commit'. Unlike
-- 'committed', this is a valid lens.
_recommend :: Lens (Recommend a) (Recommend b) a b
_recommend f (Recommend a) = Recommend <$> f a
_recommend f (Commit a) = Commit <$> f a
-- | Lens onto whether something is committed or not.
isCommitted :: Lens' (Recommend a) Bool
isCommitted f r@(Recommend a) = f False <&> \b -> if b then Commit a else r
isCommitted f r@(Commit a) = f True <&> \b -> if b then r else Recommend a
| ' Commit ' a value for any ' Recommend ' . This is * not * a valid ' '
-- because the resulting @Recommend b@ is always a 'Commit'. This is
-- useful because it means any 'Recommend' styles set with a lens will
-- not be accidentally overridden. If you want a valid lens onto a
-- recommend value use '_recommend'.
--
-- Other lenses that use this are labeled with a warning.
committed :: Iso (Recommend a) (Recommend b) a b
committed = iso getRecommend Commit
| null | https://raw.githubusercontent.com/diagrams/diagrams-lib/ed8276e7babecace51aad34b3dfd608847be2c47/src/Diagrams/Attributes.hs | haskell | # LANGUAGE ConstraintKinds #
# LANGUAGE DeriveDataTypeable #
# LANGUAGE DeriveFunctor #
# LANGUAGE TypeOperators #
---------------------------------------------------------------------------
|
Module : Diagrams.Attributes
License : BSD-style (see LICENSE)
Maintainer :
Diagrams may have /attributes/ which affect the way they are
rendered. This module defines some common attributes; particular
backends may also define more backend-specific attributes.
Every attribute type must have a /semigroup/ structure, that is, an
Unless otherwise noted, all the attributes defined here use the
attributes always override parent attributes.
---------------------------------------------------------------------------
** Standard measures
** Line width
** Dashing
* Color
$color
** Opacity
** Converting colors
* Line stuff
** Join style
** Miter limit
* Recommend optics
----------------------------------------------------------------------
Standard measures
----------------------------------------------------------------------
----------------------------------------------------------------------
Line width
----------------------------------------------------------------------
| Line widths specified on child nodes always override line widths
specified at parent nodes.
| Set the line (stroke) width.
| Default for 'lineWidth'.
| A convenient synonym for 'lineWidth (global w)'.
| A convenient synonym for 'lineWidth (normalized w)'.
| A convenient synonym for 'lineWidth (output w)'.
| A convenient sysnonym for 'lineWidth (local w)'.
| Lens onto a measured line width in a style.
| Lens onto the unmeasured linewith attribute. This is useful for
backends to use on styles once they have been unmeasured. Using on
a diagram style could lead to unexpected results.
----------------------------------------------------------------------
Dashing
----------------------------------------------------------------------
| Create lines that are dashing... er, dashed.
| Set the line dashing style.
^ A list specifying alternate lengths of on
and off portions of the stroke. The empty
list indicates no dashing.
^ An offset into the dash pattern at which the
stroke should start.
| A convenient synonym for 'dashing (global w)'.
| A convenient synonym for 'dashing (normalized w)'.
| A convenient synonym for 'dashing (output w)'.
| A convenient sysnonym for 'dashing (local w)'.
| Lens onto a measured dashing attribute in a style.
| Lens onto the unmeasured 'Dashing' attribute. This is useful for
backends to use on styles once they have been unmeasured. Using on
a diagram style could lead to unexpected results.
----------------------------------------------------------------------
----------------------------------------------------------------------
$color
very nice colour package
(<>). For starters, it
provides a large collection of standard color names. However, it
also provides a rich set of combinators for combining and
manipulating colors; see its documentation for more information.
| The 'Color' type class encompasses color representations which
both the 'Data.Colour.Colour' and 'Data.Colour.AlphaColour' types
from the "Data.Colour" library.
| Convert from an AlphaColour Double. Note that this direction
may lose some information. For example, the instance for
'Colour' drops the alpha channel.
| An existential wrapper for instances of the 'Color' class.
# DEPRECATED colorToRGBA "Renamed to colorToSRGBA." #
----------------------------------------------------------------------
Opacity
----------------------------------------------------------------------
| Although the individual colors in a diagram can have
transparency, the opacity/transparency of a diagram as a whole
can be specified with the @Opacity@ attribute. The opacity is a
(completely transparent). Opacity is multiplicative, that is,
| Multiply the opacity (see 'Opacity') by the given value. For
| Lens onto the opacity in a style.
fill opacity --------------------------------------------------------
| Like 'Opacity', but set the opacity only for fills (as opposed to strokes).
(completely opaque, the default) and 0 (completely transparent),
and is multiplicative.
| Lens onto the fill opacity in a style.
stroke opacity --------------------------------------------------------
| Like 'Opacity', but set the opacity only for strokes (as opposed to fills).
(completely opaque, the default) and 0 (completely transparent),
and is multiplicative.
| Multiply the stroke opacity (see 'StrokeOpacity') by the given value. For
example, @strokeOpacity 0.8@ means \"decrease this diagram's
| Lens onto the stroke opacity in a style.
----------------------------------------------------------------------
Line stuff
----------------------------------------------------------------------
line cap ------------------------------------------------------------
| What sort of shape should be placed at the endpoints of lines?
^ Lines end precisely at their endpoints.
^ Lines are capped with semicircles
centered on endpoints.
^ Lines are capped with a squares
centered on endpoints.
| Last semigroup structure.
| Set the line end cap attribute.
| Lens onto the line cap in a style.
line join -----------------------------------------------------------
| How should the join points between line segments be drawn?
^ Use a \"miter\" shape (whatever that is).
^ Use rounded join points.
^ Use a \"bevel\" shape (whatever
that is). Are these...
carpentry terms?
| Last semigroup structure.
| Set the segment join style.
| Lens onto the line join type in a style.
miter limit ---------------------------------------------------------
| Miter limit attribute affecting the 'LineJoinMiter' joins.
For some backends this value may have additional effects.
| Set the miter limit for joins with 'LineJoinMiter'.
| Apply a 'LineMiterLimit' attribute.
| Lens onto the line miter limit in a style.
----------------------------------------------------------------------
Recommend optics
----------------------------------------------------------------------
| Prism onto a 'Recommend'.
| Prism onto a 'Commit'.
| Lens onto the value inside either a 'Recommend' or 'Commit'. Unlike
'committed', this is a valid lens.
| Lens onto whether something is committed or not.
because the resulting @Recommend b@ is always a 'Commit'. This is
useful because it means any 'Recommend' styles set with a lens will
not be accidentally overridden. If you want a valid lens onto a
recommend value use '_recommend'.
Other lenses that use this are labeled with a warning. | # LANGUAGE ExistentialQuantification #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
# LANGUAGE ViewPatterns #
Copyright : ( c ) 2011 - 2015 diagrams - lib team ( see LICENSE )
associative binary operation for combining two attributes into one .
' Last ' structure , that is , combining two attributes simply keeps
the second one and throws away the first . This means that child
module Diagrams.Attributes (
ultraThin, veryThin, thin, medium, thick, veryThick, ultraThick, none
, tiny, verySmall, small, normal, large, veryLarge, huge
, LineWidth, getLineWidth
, _LineWidth, _LineWidthM
, lineWidth, lineWidthM
, _lineWidth, _lw, _lineWidthU
, lw, lwN, lwO, lwL, lwG
, Dashing(..), getDashing
, dashing, dashingN, dashingO, dashingL, dashingG
, _dashing, _dashingU
, Color(..), SomeColor(..), _SomeColor, someToAlpha
, Opacity, _Opacity
, getOpacity, opacity, _opacity
, FillOpacity, _FillOpacity
, getFillOpacity, fillOpacity, _fillOpacity
, StrokeOpacity, _StrokeOpacity
, getStrokeOpacity, strokeOpacity, _strokeOpacity
, colorToSRGBA, colorToRGBA
* * style
, LineCap(..)
, getLineCap, lineCap, _lineCap
, LineJoin(..)
, getLineJoin, lineJoin, _lineJoin
, LineMiterLimit(..), _LineMiterLimit
, getLineMiterLimit, lineMiterLimit, lineMiterLimitA, _lineMiterLimit
, _Recommend
, _Commit
, _recommend
, isCommitted
, committed
) where
import Control.Lens hiding (none, over)
import Data.Colour
import Data.Colour.RGBSpace (RGB (..))
import Data.Colour.SRGB (toSRGB)
import Data.Default.Class
import Data.Distributive
import Data.Monoid.Recommend
import Data.Semigroup
import Data.Typeable
import Diagrams.Core
none, ultraThin, veryThin, thin, medium, thick, veryThick, ultraThick,
tiny, verySmall, small, normal, large, veryLarge, huge
:: OrderedField n => Measure n
none = output 0
ultraThin = normalized 0.0005 `atLeast` output 0.5
veryThin = normalized 0.001 `atLeast` output 0.5
thin = normalized 0.002 `atLeast` output 0.5
medium = normalized 0.004 `atLeast` output 0.5
thick = normalized 0.0075 `atLeast` output 0.5
veryThick = normalized 0.01 `atLeast` output 0.5
ultraThick = normalized 0.02 `atLeast` output 0.5
tiny = normalized 0.01
verySmall = normalized 0.015
small = normalized 0.023
normal = normalized 0.035
large = normalized 0.05
veryLarge = normalized 0.07
huge = normalized 0.10
newtype LineWidth n = LineWidth (Last n)
deriving (Typeable, Semigroup)
_LineWidth :: Iso' (LineWidth n) n
_LineWidth = iso getLineWidth (LineWidth . Last)
_LineWidthM :: Iso' (LineWidthM n) (Measure n)
_LineWidthM = mapping _LineWidth
instance Typeable n => AttributeClass (LineWidth n)
type LineWidthM n = Measured n (LineWidth n)
instance OrderedField n => Default (LineWidthM n) where
def = fmap (LineWidth . Last) medium
getLineWidth :: LineWidth n -> n
getLineWidth (LineWidth (Last w)) = w
lineWidth :: (N a ~ n, HasStyle a, Typeable n) => Measure n -> a -> a
lineWidth = applyMAttr . fmap (LineWidth . Last)
| Apply a ' LineWidth ' attribute .
lineWidthM :: (N a ~ n, HasStyle a, Typeable n) => LineWidthM n -> a -> a
lineWidthM = applyMAttr
lw :: (N a ~ n, HasStyle a, Typeable n) => Measure n -> a -> a
lw = lineWidth
lwG :: (N a ~ n, HasStyle a, Typeable n, Num n) => n -> a -> a
lwG = lw . global
lwN :: (N a ~ n, HasStyle a, Typeable n, Num n) => n -> a -> a
lwN = lw . normalized
lwO :: (N a ~ n, HasStyle a, Typeable n) => n -> a -> a
lwO = lw . output
lwL :: (N a ~ n, HasStyle a, Typeable n, Num n) => n -> a -> a
lwL = lw . local
_lineWidth, _lw :: (Typeable n, OrderedField n) => Lens' (Style v n) (Measure n)
_lineWidth = atMAttr . anon def (const False) . _LineWidthM
_lw = _lineWidth
_lineWidthU :: Typeable n => Lens' (Style v n) (Maybe n)
_lineWidthU = atAttr . mapping _LineWidth
data Dashing n = Dashing [n] n
deriving (Functor, Typeable, Eq)
instance Semigroup (Dashing n) where
_ <> b = b
instance Typeable n => AttributeClass (Dashing n)
getDashing :: Dashing n -> Dashing n
getDashing = id
dashing :: (N a ~ n, HasStyle a, Typeable n)
-> a -> a
dashing ds offs = applyMAttr . distribute $ Dashing ds offs
dashingG :: (N a ~ n, HasStyle a, Typeable n, Num n) => [n] -> n -> a -> a
dashingG w v = dashing (map global w) (global v)
dashingN :: (N a ~ n, HasStyle a, Typeable n, Num n) => [n] -> n -> a -> a
dashingN w v = dashing (map normalized w) (normalized v)
dashingO :: (N a ~ n, HasStyle a, Typeable n) => [n] -> n -> a -> a
dashingO w v = dashing (map output w) (output v)
dashingL :: (N a ~ n, HasStyle a, Typeable n, Num n) => [n] -> n -> a -> a
dashingL w v = dashing (map local w) (local v)
_dashing :: Typeable n
=> Lens' (Style v n) (Maybe (Measured n (Dashing n)))
_dashing = atMAttr
_dashingU :: Typeable n => Lens' (Style v n) (Maybe (Dashing n))
_dashingU = atAttr
Color
Diagrams outsources all things color - related to O\'Connor\ 's
can be used by the library . Instances are provided for
class Color c where
| Convert a color to its standard representation , AlphaColour .
toAlphaColour :: c -> AlphaColour Double
fromAlphaColour :: AlphaColour Double -> c
data SomeColor = forall c. Color c => SomeColor c
deriving Typeable
instance Show SomeColor where
showsPrec d (colorToSRGBA -> (r,g,b,a)) =
showParen (d > 10) $ showString "SomeColor " .
if a == 0
then showString "transparent"
else showString "(sRGB " . showsPrec 11 r . showChar ' '
. showsPrec 11 g . showChar ' '
. showsPrec 11 b .
(if a /= 1
then showString " `withOpacity` " . showsPrec 11 a
else id) . showChar ')'
| Isomorphism between ' SomeColor ' and ' AlphaColour ' ' Double ' .
_SomeColor :: Iso' SomeColor (AlphaColour Double)
_SomeColor = iso toAlphaColour fromAlphaColour
someToAlpha :: SomeColor -> AlphaColour Double
someToAlpha (SomeColor c) = toAlphaColour c
instance a ~ Double => Color (Colour a) where
toAlphaColour = opaque
fromAlphaColour = (`over` black)
instance a ~ Double => Color (AlphaColour a) where
toAlphaColour = id
fromAlphaColour = id
instance Color SomeColor where
toAlphaColour (SomeColor c) = toAlphaColour c
fromAlphaColour = SomeColor
| Convert to sRGBA .
colorToSRGBA, colorToRGBA :: Color c => c -> (Double, Double, Double, Double)
colorToSRGBA col = (r, g, b, a)
where
c' = toAlphaColour col
c = alphaToColour c'
a = alphaChannel c'
RGB r g b = toSRGB c
colorToRGBA = colorToSRGBA
alphaToColour :: (Floating a, Ord a) => AlphaColour a -> Colour a
alphaToColour ac | alphaChannel ac == 0 = ac `over` black
| otherwise = darken (recip (alphaChannel ac)) (ac `over` black)
value between 1 ( completely opaque , the default ) and 0
@'opacity ' o1 . ' opacity ' o2 = = = ' opacity ' ( o1 * o2)@. In other
words , for example , means \"decrease this diagram 's
opacity to 80 % of its previous opacity\ " .
newtype Opacity = Opacity (Product Double)
deriving (Typeable, Semigroup)
instance AttributeClass Opacity
_Opacity :: Iso' Opacity Double
_Opacity = iso getOpacity (Opacity . Product)
getOpacity :: Opacity -> Double
getOpacity (Opacity (Product d)) = d
example , means \"decrease this diagram 's opacity to
80 % of its previous opacity\ " .
opacity :: HasStyle a => Double -> a -> a
opacity = applyAttr . Opacity . Product
_opacity :: Lens' (Style v n) Double
_opacity = atAttr . mapping _Opacity . non 1
As with ' Opacity ' , the fill opacity is a value between 1
newtype FillOpacity = FillOpacity (Product Double)
deriving (Typeable, Semigroup)
instance AttributeClass FillOpacity
_FillOpacity :: Iso' FillOpacity Double
_FillOpacity = iso getFillOpacity (FillOpacity . Product)
getFillOpacity :: FillOpacity -> Double
getFillOpacity (FillOpacity (Product d)) = d
| Multiply the fill opacity ( see ' FillOpacity ' ) by the given value . For
example , @fillOpacity 0.8@ means \"decrease this diagram 's fill opacity to
80 % of its previous " .
fillOpacity :: HasStyle a => Double -> a -> a
fillOpacity = applyAttr . FillOpacity . Product
_fillOpacity :: Lens' (Style v n) Double
_fillOpacity = atAttr . mapping _FillOpacity . non 1
As with ' Opacity ' , the fill opacity is a value between 1
newtype StrokeOpacity = StrokeOpacity (Product Double)
deriving (Typeable, Semigroup)
instance AttributeClass StrokeOpacity
_StrokeOpacity :: Iso' StrokeOpacity Double
_StrokeOpacity = iso getStrokeOpacity (StrokeOpacity . Product)
getStrokeOpacity :: StrokeOpacity -> Double
getStrokeOpacity (StrokeOpacity (Product d)) = d
stroke opacity to 80 % of its previous " .
strokeOpacity :: HasStyle a => Double -> a -> a
strokeOpacity = applyAttr . StrokeOpacity . Product
_strokeOpacity :: Lens' (Style v n) Double
_strokeOpacity = atAttr . mapping _StrokeOpacity . non 1
deriving (Eq, Ord, Show, Typeable)
instance Default LineCap where
def = LineCapButt
instance AttributeClass LineCap
instance Semigroup LineCap where
_ <> b = b
getLineCap :: LineCap -> LineCap
getLineCap = id
lineCap :: HasStyle a => LineCap -> a -> a
lineCap = applyAttr
_lineCap :: Lens' (Style v n) LineCap
_lineCap = atAttr . non def
deriving (Eq, Ord, Show, Typeable)
instance AttributeClass LineJoin
instance Semigroup LineJoin where
_ <> b = b
instance Default LineJoin where
def = LineJoinMiter
getLineJoin :: LineJoin -> LineJoin
getLineJoin = id
lineJoin :: HasStyle a => LineJoin -> a -> a
lineJoin = applyAttr
_lineJoin :: Lens' (Style v n) LineJoin
_lineJoin = atAttr . non def
newtype LineMiterLimit = LineMiterLimit (Last Double)
deriving (Typeable, Semigroup, Eq, Ord)
instance AttributeClass LineMiterLimit
_LineMiterLimit :: Iso' LineMiterLimit Double
_LineMiterLimit = iso getLineMiterLimit (LineMiterLimit . Last)
instance Default LineMiterLimit where
def = LineMiterLimit (Last 10)
getLineMiterLimit :: LineMiterLimit -> Double
getLineMiterLimit (LineMiterLimit (Last l)) = l
lineMiterLimit :: HasStyle a => Double -> a -> a
lineMiterLimit = applyAttr . LineMiterLimit . Last
lineMiterLimitA :: HasStyle a => LineMiterLimit -> a -> a
lineMiterLimitA = applyAttr
_lineMiterLimit :: Lens' (Style v n) Double
_lineMiterLimit = atAttr . non def . _LineMiterLimit
_Recommend :: Prism' (Recommend a) a
_Recommend = prism' Recommend $ \case (Recommend a) -> Just a; _ -> Nothing
_Commit :: Prism' (Recommend a) a
_Commit = prism' Commit $ \case (Commit a) -> Just a; _ -> Nothing
_recommend :: Lens (Recommend a) (Recommend b) a b
_recommend f (Recommend a) = Recommend <$> f a
_recommend f (Commit a) = Commit <$> f a
isCommitted :: Lens' (Recommend a) Bool
isCommitted f r@(Recommend a) = f False <&> \b -> if b then Commit a else r
isCommitted f r@(Commit a) = f True <&> \b -> if b then r else Recommend a
| ' Commit ' a value for any ' Recommend ' . This is * not * a valid ' '
committed :: Iso (Recommend a) (Recommend b) a b
committed = iso getRecommend Commit
|
ff07cf49f3b3ce66c8cda26e29d5dfa3ed15ab34a79f8881e0c202b5748b8499 | KeliLanguage/compiler | Cli.hs | {-# LANGUAGE BangPatterns #-}
module Cli where
import Options.Applicative
import Data.Semigroup ((<>))
import Data.Aeson
import qualified Data.ByteString.Lazy.Char8 as Char8
import qualified Data.HashMap.Strict as HashMap
import Debug.Pretty.Simple (pTraceShowId, pTraceShow)
import System.IO
import PreludeJSCode
import Interpreter
import Repl
import Transpiler
import Package
import Compiler
import Diagnostics(toDiagnostic)
import CompletionItems
keliCompilerVersion :: String
keliCompilerVersion = "0.0.2-alpha"
data KeliCommand
= Execute
String -- filename
Bool -- whether to show line number or not
| Repl
| Analyze
String -- filename
| Compile
String -- filename
| Suggest
String --filename
Int --line number
Int --column number
| NewPackage
String -- package name
| AddDependency
String -- git repo url
String -- tag
| Version
| Install
String -- path to purse.json
deriving (Show)
allParser :: Parser KeliCommand
allParser = subparser (
command "run" (info
(Execute
<$> (argument str (metavar "FILENAME"))
<*> switch
( long "show-line-number"
<> short 'l'
<> help "Where to show line number or not." ))
(progDesc "Execute a Keli program (*.keli)"))
<>
command "analyze" (info
(Analyze
<$> (argument str (metavar "FILENAME")))
(progDesc "Analyze a Keli program (*.keli) and display error as JSON."))
<>
command "compile" (info
(Compile
<$> (argument str (metavar "FILENAME")))
(progDesc "Compile a Keli program (*.keli) into JavaScript file."))
<>
command "suggest" (info
(Suggest
<$> (argument str (metavar "FILENAME"))
<*> (argument auto (metavar "LINE_NUMBER(zero-based index)"))
<*> (argument auto (metavar "COLUMN_NUMBER(zero-based index)")))
(progDesc "Analyze a Keli program (*.keli) and suggest completion items."))
<>
command "repl" (info
(pure Repl)
(progDesc "Starts the Keli REPL."))
<>
command "new-package" (info
(NewPackage
<$> (argument str (metavar "FILENAME")))
(progDesc "Create a new Keli package"))
<>
command "add-dependency" (info
(AddDependency
<$> (argument str (metavar "GIT_REPO_URL"))
<*> (argument str (metavar "TAG")))
(progDesc "Create a new Keli package"))
<>
command "install" (info
(Install
<$> (argument str (metavar "PATH_TO_PURSE.JSON")))
(progDesc "Install dependencies based on the specified purse.json"))
<>
command "version" (info
(pure Version)
(progDesc "Get the version of this Keli compiler."))
)
cli :: IO ()
cli = handleKeliCommand =<< execParser opts
where
opts = info (allParser <**> helper)
( fullDesc
<> progDesc "Compile or interpret Keli program."
<> header "The Keli Compiler" )
handleKeliCommand :: KeliCommand -> IO ()
handleKeliCommand input =
case input of
Execute filename showLineNumber -> do
result <- keliInterpret showLineNumber filename
case result of
Right output ->
hPutStrLn stdout output
Left err ->
hPutStrLn stderr err
Compile filename -> do
contents <- readFile filename
(errors, module', _, _) <- keliCompile filename contents (HashMap.empty) []
if length errors > 0 then
putStr (Char8.unpack (encode (concat (map toDiagnostic errors))))
else
putStr (preludeJSCode ++ transpileModule True False module')
Repl ->
keliRepl
Analyze filename -> do
contents <- readFile filename
(errors, _, _, _) <- keliCompile filename contents (HashMap.empty) []
putStr (Char8.unpack (encode (concat (map toDiagnostic errors))))
Suggest filename lineNumber columnNumber -> do
completionItems <- suggestCompletionItemsAt filename (lineNumber, columnNumber)
putStr (Char8.unpack (encode completionItems))
Install pursePath ->
installDeps pursePath
Version ->
putStrLn keliCompilerVersion
NewPackage packageName -> do
createNewPackage packageName
AddDependency gitRepoUrl tag -> do
addDependency gitRepoUrl tag
| null | https://raw.githubusercontent.com/KeliLanguage/compiler/5cc5f2314fa0e0863a49c504cdb115d799f382f6/src/Cli.hs | haskell | # LANGUAGE BangPatterns #
filename
whether to show line number or not
filename
filename
filename
line number
column number
package name
git repo url
tag
path to purse.json | module Cli where
import Options.Applicative
import Data.Semigroup ((<>))
import Data.Aeson
import qualified Data.ByteString.Lazy.Char8 as Char8
import qualified Data.HashMap.Strict as HashMap
import Debug.Pretty.Simple (pTraceShowId, pTraceShow)
import System.IO
import PreludeJSCode
import Interpreter
import Repl
import Transpiler
import Package
import Compiler
import Diagnostics(toDiagnostic)
import CompletionItems
keliCompilerVersion :: String
keliCompilerVersion = "0.0.2-alpha"
data KeliCommand
= Execute
| Repl
| Analyze
| Compile
| Suggest
| NewPackage
| AddDependency
| Version
| Install
deriving (Show)
allParser :: Parser KeliCommand
allParser = subparser (
command "run" (info
(Execute
<$> (argument str (metavar "FILENAME"))
<*> switch
( long "show-line-number"
<> short 'l'
<> help "Where to show line number or not." ))
(progDesc "Execute a Keli program (*.keli)"))
<>
command "analyze" (info
(Analyze
<$> (argument str (metavar "FILENAME")))
(progDesc "Analyze a Keli program (*.keli) and display error as JSON."))
<>
command "compile" (info
(Compile
<$> (argument str (metavar "FILENAME")))
(progDesc "Compile a Keli program (*.keli) into JavaScript file."))
<>
command "suggest" (info
(Suggest
<$> (argument str (metavar "FILENAME"))
<*> (argument auto (metavar "LINE_NUMBER(zero-based index)"))
<*> (argument auto (metavar "COLUMN_NUMBER(zero-based index)")))
(progDesc "Analyze a Keli program (*.keli) and suggest completion items."))
<>
command "repl" (info
(pure Repl)
(progDesc "Starts the Keli REPL."))
<>
command "new-package" (info
(NewPackage
<$> (argument str (metavar "FILENAME")))
(progDesc "Create a new Keli package"))
<>
command "add-dependency" (info
(AddDependency
<$> (argument str (metavar "GIT_REPO_URL"))
<*> (argument str (metavar "TAG")))
(progDesc "Create a new Keli package"))
<>
command "install" (info
(Install
<$> (argument str (metavar "PATH_TO_PURSE.JSON")))
(progDesc "Install dependencies based on the specified purse.json"))
<>
command "version" (info
(pure Version)
(progDesc "Get the version of this Keli compiler."))
)
cli :: IO ()
cli = handleKeliCommand =<< execParser opts
where
opts = info (allParser <**> helper)
( fullDesc
<> progDesc "Compile or interpret Keli program."
<> header "The Keli Compiler" )
handleKeliCommand :: KeliCommand -> IO ()
handleKeliCommand input =
case input of
Execute filename showLineNumber -> do
result <- keliInterpret showLineNumber filename
case result of
Right output ->
hPutStrLn stdout output
Left err ->
hPutStrLn stderr err
Compile filename -> do
contents <- readFile filename
(errors, module', _, _) <- keliCompile filename contents (HashMap.empty) []
if length errors > 0 then
putStr (Char8.unpack (encode (concat (map toDiagnostic errors))))
else
putStr (preludeJSCode ++ transpileModule True False module')
Repl ->
keliRepl
Analyze filename -> do
contents <- readFile filename
(errors, _, _, _) <- keliCompile filename contents (HashMap.empty) []
putStr (Char8.unpack (encode (concat (map toDiagnostic errors))))
Suggest filename lineNumber columnNumber -> do
completionItems <- suggestCompletionItemsAt filename (lineNumber, columnNumber)
putStr (Char8.unpack (encode completionItems))
Install pursePath ->
installDeps pursePath
Version ->
putStrLn keliCompilerVersion
NewPackage packageName -> do
createNewPackage packageName
AddDependency gitRepoUrl tag -> do
addDependency gitRepoUrl tag
|
0322d511f28fffabd95057ab7a1adc539d96f3c7b1db8623f52712104cff9891 | atgreen/red-light-green-light | common.lisp | (in-package #:snooze-common)
;;; Verbs
;;;
;;; "Sending" and "Receiving" are always from the server's
perspective . Hence GET is " sending to client " and POST and PUT are
;;; "receiving from client".
;;;
(defpackage :snooze-verbs (:use)
(:export #:http-verb #:get #:post #:put #:delete
#:content-verb
#:receiving-verb
#:sending-verb))
(cl:defclass snooze-verbs:http-verb () ())
(cl:defclass snooze-verbs:delete (snooze-verbs:http-verb) ())
(cl:defclass snooze-verbs:content-verb (snooze-verbs:http-verb) ())
(cl:defclass snooze-verbs:receiving-verb (snooze-verbs:content-verb) ())
(cl:defclass snooze-verbs:sending-verb (snooze-verbs:content-verb) ())
(cl:defclass snooze-verbs:post (snooze-verbs:receiving-verb) ())
(cl:defclass snooze-verbs:put (snooze-verbs:receiving-verb) ())
(cl:defclass snooze-verbs:get (snooze-verbs:sending-verb) ())
(defun destructive-p (verb) (or (typep verb 'snooze-verbs:receiving-verb)
(typep verb 'snooze-verbs:delete)))
;;; Content-types
;;;
For PUT and POST requests we match routes based on what the client
declares to us in its " Content - Type " header . At most one CLOS
;;; primary method may match.
;;;
;;; In GET requests we are only interested in the request's "Accept"
header , since GET never have useful bodies ( 1 ) and as such do n't
have " Content - Type " . For GET requests , the logic is actually
;;; inverse: the routes are matched based on what the client accepts.
;;; If it accepts a range of content-types, multiple routes (or
primary CLOS methods ) are now eligible . We try many routes in
;;; order (according to that range) until we find one that matches.
;;;
[ 1 ] : -get-with-request-body
;;;
(defclass snooze-types:content () ())
(eval-when (:compile-toplevel :load-toplevel :execute)
(defun intern-safe (designator package)
(intern (string-upcase designator) package))
(defun scan-to-strings* (regex string)
(coerce (nth-value 1
(cl-ppcre:scan-to-strings regex
string))
'list)))
(defmacro define-content (type-designator
&optional (supertype-designator
(first (scan-to-strings*
"([^/]+)" type-designator))))
(let* ((type (intern-safe type-designator :snooze-types))
(supertype (intern-safe supertype-designator :snooze-types)))
`(progn
(setf (get ',type 'name) ,(string-downcase (symbol-name type)))
(unless (find-class ',supertype nil)
(setf (get ',supertype 'name)
,(format nil "~a/*"
(string-downcase (symbol-name supertype))))
(defclass ,supertype (snooze-types:content) ()))
(defclass ,type (,supertype) ())
(eval-when (:compile-toplevel :load-toplevel :execute)
(export '(,type ,supertype) :snooze-types)))))
(defmacro define-known-content-types ()
`(progn
,@(loop for (type-spec . nil) in *mime-type-list*
for matches
= (nth-value
1 (cl-ppcre:scan-to-strings "(.*/.*)(?:;.*)?" type-spec))
for type = (and matches (aref matches 0))
when type
collect `(define-content ,type))))
(eval-when (:compile-toplevel :load-toplevel :execute)
(define-known-content-types))
(defun find-content-class (designator)
"Return class for DESIGNATOR if it defines a content-type or nil."
(cond ((typep designator 'snooze-types:content)
(class-of designator))
((and (typep designator 'class)
(subtypep designator 'snooze-types:content))
designator)
((eq designator t)
(alexandria:simple-style-warning
"Coercing content-designating type designator T to ~s"
'snooze-types:content)
(find-class 'snooze-types:content))
((or (symbolp designator)
(stringp designator))
(or (find-class (intern (string-upcase designator) :snooze-types) nil)
(and (string= designator "*/*") (find-class 'snooze-types:content))
(let* ((matches (nth-value 1
(cl-ppcre:scan-to-strings
"([^/]+)/\\*"
(string-upcase designator))))
(supertype-designator (and matches
(aref matches 0))))
(find-class
(intern (string-upcase supertype-designator) :snooze-types)
nil))))
(t
(error "~a cannot possibly designate a content-type" designator))))
(defun content-class-name (designator)
(get (class-name (find-content-class designator)) 'name))
;;; Resources
;;;
(defun resource-p (thing)
(and (functionp thing)
(eq 'resource-generic-function (type-of thing))))
(deftype resource ()
`(satisfies resource-p))
(defclass resource-generic-function (cl:standard-generic-function)
()
(:metaclass closer-mop:funcallable-standard-class))
(defun resource-name (resource)
(closer-mop:generic-function-name resource))
(defvar *all-resources* (make-hash-table))
(defun find-resource (designator &key filter)
(cond ((or (stringp designator)
(keywordp designator))
(maphash (lambda (k v)
(when (and (string-equal (string k) (string designator))
(or (not filter)
(funcall filter v)))
(return-from find-resource v)))
*all-resources*))
((resource-p designator)
(find-resource (resource-name designator)
:filter filter))
((and designator
(symbolp designator))
(let ((probe (gethash designator *all-resources*)))
(when (or (not filter)
(funcall filter designator))
probe)))
(t
(error "~a ins't a resource designator" designator))))
(defun delete-resource (designator)
(let ((resource (find-resource designator)))
(cond (resource
(fmakunbound (resource-name resource))
(remhash (resource-name resource) *all-resources*))
(t
(error "No such resource to delete!")))))
(defmethod initialize-instance :after
((gf resource-generic-function) &rest args)
(declare (ignore args))
(setf (gethash (resource-name gf) *all-resources*)
gf))
(defun probe-class-sym (sym)
"Like CL:FIND-CLASS but don't error and return SYM or nil"
(when (find-class sym nil)
sym))
(defun parse-defroute-args (defmethod-arglist)
"Return values QUALIFIERS, LAMBDA-LIST, BODY for DEFMETHOD-ARGLIST"
(loop for args on defmethod-arglist
if (listp (first args))
return (values qualifiers (first args) (cdr args))
else
collect (first args) into qualifiers))
(defun verb-spec-or-lose (verb-spec)
"Convert VERB-SPEC into something CL:DEFMETHOD can grok."
(labels ((verb-designator-to-verb (designator)
(or (and (eq designator 't)
(progn
(alexandria:simple-style-warning
"Coercing verb-designating type T in ~a to ~s"
verb-spec 'snooze-verbs:http-verb)
'snooze-verbs:http-verb))
(probe-class-sym (intern (string-upcase designator)
:snooze-verbs))
(error "Sorry, don't know the HTTP verb ~a"
(string-upcase designator)))))
(cond ((and verb-spec
(listp verb-spec))
(list (first verb-spec)
(verb-designator-to-verb (second verb-spec))))
((or (keywordp verb-spec)
(stringp verb-spec))
(list 'snooze-verbs:http-verb (verb-designator-to-verb verb-spec)))
(verb-spec
(list verb-spec 'snooze-verbs:http-verb))
(t
(error "~a is not a valid convertable HTTP verb spec" verb-spec)))))
(defun content-type-spec-or-lose-1 (type-spec)
(labels ((type-designator-to-type (designator)
(let ((class (find-content-class designator)))
(if class (class-name class)
(error "Sorry, don't know the content-type ~a" type-spec)))))
(cond ((and type-spec
(listp type-spec))
(list (first type-spec)
(type-designator-to-type (second type-spec))))
((or (keywordp type-spec)
(stringp type-spec))
(list 'snooze-types:type (type-designator-to-type type-spec)))
(type-spec
(list type-spec (type-designator-to-type t))))))
(defun content-type-spec-or-lose (type-spec verb)
(cond ((subtypep verb 'snooze-verbs:content-verb)
(content-type-spec-or-lose-1 type-spec))
((and type-spec (listp type-spec))
;; specializations are not allowed on DELETE, for example
(assert (eq t (second type-spec))
nil
"For verb ~a, no specializations on Content-Type are allowed"
verb)
type-spec)
(t
(list type-spec t))))
(defun ensure-atom (thing)
(if (listp thing)
(ensure-atom (first thing))
thing))
(defun ensure-uri (maybe-uri)
(etypecase maybe-uri
(string (quri:uri maybe-uri))
(quri:uri maybe-uri)))
(defun parse-resource (uri)
"Parse URI for a resource and how it should be called.
Honours of *RESOURCE-NAME-FUNCTION*, *RESOURCES-FUNCTION*,
*HOME-RESOURCE* and *URI-CONTENT-TYPES-FUNCTION*.
Returns nil if the resource cannot be found, otherwise returns 3
values: RESOURCE, URI-CONTENT-TYPES and RELATIVE-URI. RESOURCE is a
generic function verifying RESOURCE-P discovered in URI.
URI-CONTENT-TYPES is a list of subclasses of SNOOZE-TYPES:CONTENT
discovered in directly URI by
*URI-CONTENT-TYPES-FUNCTION*. RELATIVE-URI is the remaining URI after
these discoveries."
;; <scheme name> : <hierarchical part> [ ? <query> ] [ # <fragment> ]
;;
(let ((uri (ensure-uri uri))
uri-stripped-of-content-type-info
uri-content-types)
(when *uri-content-types-function*
(multiple-value-setq (uri-content-types uri-stripped-of-content-type-info)
(funcall *uri-content-types-function*
(quri:render-uri uri nil))))
(let* ((uri (ensure-uri (or uri-stripped-of-content-type-info
uri))))
(multiple-value-bind (resource-name relative-uri)
(funcall *resource-name-function*
(quri:render-uri uri))
(setq resource-name (and resource-name
(plusp (length resource-name))
(ignore-errors
(quri:url-decode resource-name))))
(values (find-resource (or resource-name
*home-resource*)
:filter *resource-filter*)
(mapcar #'find-content-class uri-content-types)
relative-uri)))))
(defun content-classes-in-accept-string (string)
(labels ((expand (class)
(cons class
(reduce
#'append
(mapcar #'expand
(closer-mop:class-direct-subclasses class))))))
(loop for media-range-and-params in (cl-ppcre:split "\\s*,\\s*" string)
for class = (parse-content-type-header media-range-and-params)
when class
append (expand class))))
(defun parse-content-type-header (string)
"Return a class associated with the content-type described by STRING.
As a second value, return what RFC2388:PARSE-HEADER"
(let* ((parsed (rfc2388:parse-header string :value))
(designator (second parsed)))
(values (find-content-class designator)
parsed)))
(defun find-verb-or-lose (designator)
(let ((class (or (probe-class-sym
(intern (string-upcase designator)
:snooze-verbs))
(error "Can't find HTTP verb for designator ~a!"
designator))))
;; FIXME: perhaps use singletons here
(make-instance class)))
(defun gf-primary-method-specializer (gf args ct-arg-pos)
"Compute proper content-type for calling GF with ARGS"
(let ((applicable (compute-applicable-methods gf args)))
(when applicable
(nth ct-arg-pos (closer-mop:method-specializers (first applicable))))))
Internal symbols of : SNOOZE
;;;
(in-package :snooze)
(defun check-arglist-compatible (resource args)
(let ((lambda-list (closer-mop:generic-function-lambda-list
resource)))
(handler-case
;; FIXME: evaluate this need for eval, for security reasons
(let ((*read-eval* nil))
(handler-bind ((warning #'muffle-warning))
(eval `(apply (lambda ,lambda-list
t)
'(t t ,@args)))))
(error (e)
(error 'incompatible-lambda-list
:actual-args args
:lambda-list (cddr lambda-list)
:format-control "Too many, too few, or unsupported ~
query arguments for REST resource ~a"
:format-arguments
(list (resource-name resource))
:original-condition e)))))
(defun check-optional-args (opt-values &optional warn-p)
(let ((nil-tail
(member nil opt-values)))
(unless (every #'null (rest nil-tail))
(if warn-p
(warn 'style-warning :format-control
"The NIL defaults to a genpath-function's &OPTIONALs ~
must be at the end")
(error "The NILs to a genpath-function's &OPTIONALs ~
must be at the end")))))
(defun genpath-fn-lambda-list (all-kwargs
augmented-optional
required
rest
aok-p)
"Helper for MAKE-GENPATH-FORM"
`(,@required
&optional
,@augmented-optional
,@(if rest
(warn 'style-warning
:format-control
"&REST ~a is not supported for genpath-functions"
:format-arguments (list rest)))
&key
,@all-kwargs
,@(if aok-p `(&allow-other-keys))))
(defun make-genpath-form (genpath-fn-name resource-sym lambda-list)
(multiple-value-bind (required optional rest kwargs aok-p aux key-p)
(alexandria:parse-ordinary-lambda-list lambda-list)
(declare (ignore aux key-p))
(let* (;;
;;
(augmented-optional
(loop for (name default nil) in optional
collect `(,name ,default ,(gensym))))
;;
;;
(augmented-kwargs
(loop for (kw-and-sym default) in kwargs
collect `(,kw-and-sym ,default ,(gensym))))
;;
;;
(all-kwargs
augmented-kwargs)
;;
;;
(required-args-form
`(list ,@required))
;;
;;
(optional-args-form
`(list ,@(loop for (name default supplied-p) in augmented-optional
collect `(if ,supplied-p ,name
(or ,name ,default)))))
;;
;;
(keyword-arguments-form
`(remove-if #'null
(list
,@(loop for (kw-and-sym default supplied-p)
in augmented-kwargs
for (nil sym) = kw-and-sym
collect `(cons (intern
(symbol-name ',sym)
(find-package :KEYWORD))
(if ,supplied-p
,sym
(or ,sym
,default)))))
:key #'cdr)))
;; Optional args are checked at macroexpansion time
;;
(check-optional-args (mapcar #'second optional) 'warn-p)
`(progn
(defun ,genpath-fn-name
,(genpath-fn-lambda-list
all-kwargs
augmented-optional
required
rest
aok-p)
;; And at runtime...
;;
(check-optional-args ,optional-args-form)
(arguments-to-uri
(find-resource ',resource-sym)
(append
,required-args-form
(remove nil ,optional-args-form))
,keyword-arguments-form)
)))))
(defun defroute-1 (name args)
(let* (;; find the qualifiers and lambda list
;;
(first-parse
(multiple-value-list
(parse-defroute-args args)))
(qualifiers (first first-parse))
(lambda-list (second first-parse))
(body (third first-parse))
;; now parse body
;;
(parsed-body (multiple-value-list (alexandria:parse-body body)))
(remaining (first parsed-body))
(declarations (second parsed-body))
(docstring (third parsed-body))
Add syntactic sugar for the first two specializers in the
;; lambda list
;;
(verb-spec (verb-spec-or-lose (first lambda-list)))
(type-spec (content-type-spec-or-lose (second lambda-list)
(second verb-spec)))
(proper-lambda-list
`(,verb-spec ,type-spec ,@(nthcdr 2 lambda-list)))
(simplified-lambda-list
(mapcar #'ensure-atom proper-lambda-list)))
`(progn
(unless (find-resource ',name)
(defresource ,name ,simplified-lambda-list))
(defmethod ,name ,@qualifiers
,proper-lambda-list
,@(if docstring `(,docstring))
,@declarations
,@remaining))))
(defun defgenpath-1 (function resource)
(make-genpath-form function resource
(nthcdr 2 (closer-mop:generic-function-lambda-list
(let ((probe (find-resource resource)))
(assert probe nil
"Cannot find the resource ~a"
resource)
probe)))))
(defun defresource-1 (name lambda-list options)
(let* ((genpath-form)
(defgeneric-args
(loop for option in options
for routep = (eq :route (car option))
for (qualifiers spec-list body)
= (and routep
(multiple-value-list
(parse-defroute-args (cdr option))))
for verb-spec = (and routep
(verb-spec-or-lose (first spec-list)))
for type-spec = (and routep
(content-type-spec-or-lose
(second spec-list)
(second verb-spec)))
if routep
collect `(:method
,@qualifiers
(,verb-spec ,type-spec ,@(nthcdr 2 spec-list))
,@body)
else if (eq :genpath (car option))
do (setq genpath-form
(make-genpath-form (second option) name
(nthcdr 2 lambda-list)))
else
collect option))
(simplified-lambda-list (mapcar #'(lambda (argspec)
(ensure-atom argspec))
lambda-list)))
`(progn
,@(if genpath-form `(,genpath-form))
(defgeneric ,name ,simplified-lambda-list
(:generic-function-class resource-generic-function)
,@defgeneric-args))))
;;; Some external stuff but hidden away from the main file
;;;
(defmethod explain-condition-failsafe (condition resource &optional verbose-p)
(declare (ignore resource))
(let* ((original-condition (and (typep condition 'resignalled-condition)
(original-condition condition)))
(status-code (or (and original-condition
(typep original-condition 'http-condition)
(status-code original-condition))
500)))
(with-output-to-string (s)
(cond (verbose-p
(format s "~a" condition)
(explain-failsafe condition s)
(loop for (condition backtrace) in *useful-backtraces*
do (format s "~&~%Here's a backtrace for condition ~s~
~&~a" condition backtrace)))
(t
(format s "~a ~a"
status-code
(reason-for status-code)))))))
(define-condition http-condition (simple-condition)
((status-code :initarg :status-code
:initform (error "Must supply a HTTP status code.")
:reader status-code))
(:default-initargs :format-control "HTTP condition"))
(define-condition http-error (http-condition simple-error) ()
(:default-initargs
:format-control "HTTP Internal Server Error"
:status-code 500))
(define-condition no-such-resource (http-condition) ()
(:default-initargs
:status-code 404
:format-control "Resource does not exist"))
(define-condition invalid-resource-arguments (http-condition) ()
(:default-initargs
:status-code 400
:format-control "Resource exists but invalid arguments passed"))
(define-condition resignalled-condition ()
((original-condition :initarg :original-condition
:initform (error "Must supply an original condition")
:reader original-condition)))
(define-condition unconvertible-argument
(invalid-resource-arguments resignalled-condition)
((unconvertible-argument-value :initarg :unconvertible-argument-value
:accessor unconvertible-argument-value)
(unconvertible-argument-key :initarg :unconvertible-argument-key
:accessor unconvertible-argument-key))
(:default-initargs
:format-control "An argument in the URI cannot be read"))
(define-condition incompatible-lambda-list
(invalid-resource-arguments resignalled-condition)
((lambda-list :initarg :lambda-list
:initform (error "Must supply :LAMBDA-LIST")
:accessor lambda-list)
(actual-args :initarg :actual-args
:initform (error "Must supply :ACTUAL-ARGS")
:accessor actual-args))
(:default-initargs
:format-control "An argument in the URI cannot be read"))
(define-condition invalid-uri-structure
(invalid-resource-arguments resignalled-condition)
((invalid-uri :initarg :invalid-uri
:initform (error "Must supply the invalid URI")
:accessor invalid-uri))
(:default-initargs
:format-control "The URI structure cannot be converted into arguments"))
(define-condition unsupported-content-type (http-error) ()
(:default-initargs
:status-code 501
:format-control "Content type is not supported"))
(define-condition no-such-route (http-condition) ()
(:default-initargs
:format-control "Resource exists but no such route"))
(define-condition error-when-explaining (simple-error resignalled-condition) ()
(:default-initargs
:format-control "An error occurred when trying to explain a condition"))
(defmethod print-object ((c http-condition) s)
(print-unreadable-object (c s :type t)
(format s "~a: ~?" (status-code c)
(simple-condition-format-control c)
(simple-condition-format-arguments c))))
(defmethod print-object ((c resignalled-condition) s)
(print-unreadable-object (c s :type t)
(princ (original-condition c) s)))
(defmethod explain-failsafe ((c condition) s)
;; (format s "~&~%No more interesting information on ~a, sorry~%" c)
)
(defmethod explain-failsafe ((c error-when-explaining) s)
(format s "~& SNOOZE:EXPLAIN-CONDITION is missing a method to politely explain:~
~& ~a~
~& to the client."
(original-condition c)))
(defmethod explain-failsafe ((c unconvertible-argument) s)
(format s "~& SNOOZE:URI-TO-ARGUMENTS caught a ~a when converting:~
~& ~a=~a~
~& into Lisp objects to give to your route."
(type-of (original-condition c))
(unconvertible-argument-key c)
(unconvertible-argument-value c)))
(defmethod explain-failsafe ((c invalid-uri-structure) s)
(format s "~& SNOOZE:URI-TO-ARGUMENTS can't grok this URI:~
~& ~a" (invalid-uri c)))
(defmethod explain-failsafe ((c incompatible-lambda-list) s)
(format s "~& Snooze failed to fit:~
~& ~s~
~& to the lambda list:~
~& ~a~
~& which produced a ~a which your Lisp describes as:~
~& ~a"
(actual-args c) (lambda-list c)
(type-of (original-condition c))
(original-condition c)))
(defmethod explain-failsafe :before ((c resignalled-condition) s)
(format s "~&~%You got a ~a because:~% " (type-of c)))
(defmethod explain-failsafe :after ((c resignalled-condition) s)
(explain-failsafe (original-condition c) s))
;;; More internal stuff
;;;
(defmethod initialize-instance :after ((e http-error) &key)
(assert (<= 500 (status-code e) 599) nil
"An HTTP error must have a status code between 500 and 599"))
(defun matching-content-type-or-lose (resource verb args try-list)
"Check RESOURCE for route matching VERB, TRY-LIST and ARGS.
TRY-LIST, a list of subclasses of SNOOZE-TYPES:CONTENT, is iterated.
The first subclass for which RESOURCE has a matching specializer is
used to create an instance, which is returned. If none is found error
out with NO-SUCH-ROUTE."
(or (some (lambda (maybe)
(when (gf-primary-method-specializer
resource
(list* verb maybe args)
1)
maybe))
(mapcar #'make-instance try-list))
(error 'no-such-route
:status-code (if try-list
(if (destructive-p verb)
415 ; unsupported media type
406 ; not acceptable
)
FIXME , make " unimplemented " more pervasive
501 ; unimplemented
))))
(defvar *useful-backtraces* nil "Useful backtraces.")
(defmacro saving-useful-backtrace (args &body body)
(declare (ignore args))
`(handler-bind
((t
(lambda (e)
(when *catch-errors*
(pushnew (list e
(with-output-to-string (s)
(uiop/image:print-condition-backtrace
e :stream s)))
*useful-backtraces*
:test (lambda (a b) (eq (first a) (first b))))))))
,@body))
(defun call-brutally-explaining-conditions (fn)
(let (code condition original-condition *useful-backtraces*)
(flet ((explain (verbose-p)
(throw 'response
(values code
(explain-condition-failsafe condition
*resource*
verbose-p)
(content-class-name 'text/plain)))))
(restart-case
(handler-bind
((resignalled-condition
(lambda (e)
(setq original-condition (original-condition e)
code
(when (typep original-condition 'http-condition)
(status-code original-condition)))))
(error
(lambda (e)
(setq code (or code 500)
condition e)
(cond ((eq *catch-errors* :verbose)
(invoke-restart 'explain-verbosely))
(*catch-errors*
(invoke-restart 'failsafe-explain))
(;; HACK! notice that a non-error
;; `http-condition' (like a simple redirect)
with ` * catch - errors * ' = NIL and
;; `*catch-http-conditions*' = T will land
;; us in this branch. We do not want to
;; break in this case, so explain succintly.
(and original-condition
(typep original-condition 'http-condition)
(not (typep original-condition 'error)))
(invoke-restart 'failsafe-explain)))))
(http-condition
(lambda (c)
(setq code (status-code c) condition c)
(cond ((eq *catch-http-conditions* :verbose)
(invoke-restart 'explain-verbosely))))))
(saving-useful-backtrace () (funcall fn)))
(explain-verbosely () :report
(lambda (s)
(format s "Explain ~a condition more verbosely" code))
(explain t))
(failsafe-explain () :report
(lambda (s) (format s "Explain ~a condition very succintly" code))
(explain nil))))))
(defun call-politely-explaining-conditions (client-accepts fn)
(let (code
condition
accepted-type)
(labels ((accepted-type-for (condition)
(some (lambda (wanted)
(when (gf-primary-method-specializer
#'explain-condition
(list condition *resource* wanted)
1)
wanted))
(mapcar #'make-instance client-accepts)))
(check-politely-explain ()
(unless accepted-type
(error 'error-when-explaining
:format-control "No ~a to politely explain ~a to client"
:format-arguments
(list 'explain-condition (type-of condition))
:original-condition condition))))
(restart-case
(handler-bind ((condition
(lambda (c)
(setq
condition c
accepted-type (accepted-type-for condition))))
(http-condition
(lambda (c)
(setq code (status-code c))
(when (and *catch-http-conditions*
(not (eq *catch-http-conditions*
:verbose)))
(check-politely-explain)
(invoke-restart 'politely-explain))))
(error
(lambda (e)
(declare (ignore e))
(setq code 500)
(when (and *catch-errors*
(not (eq *catch-errors* :verbose)))
(check-politely-explain)
(invoke-restart 'politely-explain)))))
(saving-useful-backtrace () (funcall fn)))
(politely-explain ()
:report (lambda (s)
(format s "Politely explain to client in ~a"
accepted-type))
:test (lambda (c) (declare (ignore c)) accepted-type)
(throw 'response
(handler-case
(values code
(explain-condition condition *resource* accepted-type)
(content-class-name accepted-type))
(error (e)
(error 'error-when-explaining
:format-control "Error when explaining ~a"
:format-arguments (list (type-of e))
:original-condition condition)))))
(auto-catch ()
:report (lambda (s)
(format s "Start catching ~a automatically"
(if (typep condition 'http-condition)
"HTTP conditions" "errors")))
:test (lambda (c)
(if (typep c 'http-condition)
(not *catch-http-conditions*)
(not *catch-errors*)))
(if (typep condition 'http-condition)
(setq *catch-http-conditions* t)
(setq *catch-errors* t))
(if (find-restart 'politely-explain)
(invoke-restart 'politely-explain)
(if (find-restart 'failsafe-explain)
(invoke-restart 'failsafe-explain))))))))
(defmacro brutally-explaining-conditions (() &body body)
"Explain conditions in BODY in a failsafe way.
Honours the :VERBOSE option to *CATCH-ERRORS* and *CATCH-HTTP-CONDITIONS*."
`(call-brutally-explaining-conditions (lambda () ,@body)))
(defmacro politely-explaining-conditions ((client-accepts) &body body)
"Explain conditions in BODY taking the client accepts into account.
Honours *CATCH-ERRORS* and *CATCH-HTTP-CONDITIONS*"
`(call-politely-explaining-conditions ,client-accepts (lambda () ,@body)))
(defvar *resource*)
(setf (documentation '*resource* 'variable)
"Bound early in HANDLE-REQUEST-1 to nil or to a RESOURCE.
Used by POLITELY-EXPLAINING-CONDITIONS and
BRUTALLY-EXPLAINING-CONDITIONS to pass a resource to
EXPLAIN-CONDITION.")
(defun handle-request-1 (uri method accept &optional content-type)
(catch 'response
(let (*resource*
content-classes-encoded-in-uri
relative-uri)
(brutally-explaining-conditions ()
(multiple-value-setq (*resource* content-classes-encoded-in-uri relative-uri)
(parse-resource uri))
(let* ((verb (find-verb-or-lose method))
(client-accepted-content-types
(or (append content-classes-encoded-in-uri
(content-classes-in-accept-string accept))
(list (find-content-class 'snooze-types:text/plain)))))
(politely-explaining-conditions (client-accepted-content-types)
(unless *resource*
(error 'no-such-resource
:format-control
"So sorry, but that URI doesn't match any REST resources"))
;; URL-decode args to strings
;;
(multiple-value-bind (converted-plain-args converted-keyword-args)
(handler-bind
((error
(lambda (e)
(when *catch-errors*
(error 'invalid-uri-structure
:format-control
"Caught ~a in URI-TO-ARGUMENTS"
:format-arguments (list (type-of e))
:original-condition e
:invalid-uri relative-uri)))))
(uri-to-arguments *resource* relative-uri))
(let ((converted-arguments
(append converted-plain-args
(loop for (a . b) in converted-keyword-args
collect a collect b))))
;; Double check that the arguments indeed
;; fit the resource's lambda list
;;
(check-arglist-compatible *resource* converted-arguments)
(let* ((matching-ct
(typecase verb
;; HTTP DELETE doesn't care about
;; content-types
(snooze-verbs:delete nil)
(t
(matching-content-type-or-lose
*resource*
verb
converted-arguments
(typecase verb
(snooze-verbs:sending-verb
client-accepted-content-types)
(snooze-verbs:receiving-verb
(list (or (and content-classes-encoded-in-uri
(first content-classes-encoded-in-uri))
(parse-content-type-header content-type)
(error 'unsupported-content-type))))))))))
(multiple-value-bind (payload code payload-ct)
(apply *resource* verb matching-ct converted-arguments)
(unless code
(setq code (if payload
200 ; OK
204 ; OK, no content
)))
(cond (payload-ct
(when (and (destructive-p verb)
(not (typep payload-ct
(class-of matching-ct))))
(warn "Route declared ~a as its payload ~
content-type, but it matched ~a"
payload-ct matching-ct)))
(t
(setq payload-ct
(if (destructive-p verb)
'snooze-types:text/html ; the default
matching-ct))))
(throw 'response (values code
payload
(content-class-name
payload-ct)))))))))))))
;;; Default values for options
;;;
(defun default-resource-name (uri)
"Default value for *RESOURCE-NAME-FUNCTION*, which see."
(if (string= "" uri)
""
(let* ((first-slash-or-qmark (position-if #'(lambda (char)
(member char '(#\/ #\?)))
uri
:start 1)))
(values (cond (first-slash-or-qmark
(subseq uri 1 first-slash-or-qmark))
(t
(subseq uri 1)))
(if first-slash-or-qmark
(subseq uri first-slash-or-qmark))))))
(defun search-for-extension-content-type (uri-path)
"Default value for *URI-CONTENT-TYPES-FUNCTION*, which see."
(multiple-value-bind (matchp groups)
(cl-ppcre:scan-to-strings "([^\\.]+)\\.(\\w+)([^/]*)$" uri-path)
(let ((content-type-class (and matchp
(find-content-class
(gethash (aref groups 1)
*mime-type-hash*)))))
(when content-type-class
(values
(list content-type-class)
(format nil "~a~a" (aref groups 0) (aref groups 2)))))))
(defun all-defined-resources ()
"Default value for *RESOURCES-FUNCTION*, which see."
snooze-common:*all-resources*)
Reading and writing URI 's
;;;
(defun resource-package (resource)
(symbol-package (resource-name resource)))
(defun uri-to-arguments-1 (resource relative-uri)
"Do actual work for default method of URI-TO-ARGUMENTS."
(labels ((probe (str &optional key)
(handler-bind
((error (lambda (e)
(when *catch-errors*
(error 'unconvertible-argument
:unconvertible-argument-value str
:unconvertible-argument-key key
:original-condition e
:format-control
"Malformed arg for resource ~a"
:format-arguments
(list (resource-name resource)))))))
(progn
(let ((*read-eval* nil))
(read-for-resource resource str)))))
(probe-keyword (str)
(let* ((probe (probe str)))
;; Though perhaps that keyword is accepted, we may
still refuse to intern it in the : KEYWORD pacakge
;; before trying to use it as a keyword argument, if it
;; looks like the symbol didn't "exist" yet.
;;
;; In other words, we simply require that the symbol
;; has a package: it's up to READ-FOR-RESOURCE (the
;; default doesn't intern new symbols) to decide if it
;; spits out symbols in those conditions.
;;
(if (and (symbolp probe)
(symbol-package probe))
(intern (symbol-name probe) :keyword)
(error 'invalid-resource-arguments
:format-control "Unknown keyword for resource ~a"
:format-arguments (list (resource-name resource)))))))
(when relative-uri
(let* ((relative-uri (ensure-uri relative-uri))
(path (quri:uri-path relative-uri))
(query (quri:uri-query relative-uri))
(fragment (quri:uri-fragment relative-uri))
(plain-args (and path
(plusp (length path))
(cl-ppcre:split "/" (subseq path 1))))
(keyword-args
(append
(and
query
(loop for maybe-pair in (cl-ppcre:split "[;&]" query)
for (undecoded-key-name undecoded-value-string)
= (scan-to-strings* "(.*)=(.*)" maybe-pair)
when (and undecoded-key-name undecoded-value-string)
collect
(cons (quri:url-decode undecoded-key-name)
(quri:url-decode undecoded-value-string)))))))
(values
(mapcar #'probe (mapcar #'quri:url-decode plain-args))
(loop for (key-str . value-str) in keyword-args
collect (cons (probe-keyword key-str)
(probe value-str key-str))
into keyword-alist
finally
(return
(append
keyword-alist
(if fragment
`((snooze:fragment . ,(probe fragment))))))))))))
(defun arguments-to-uri-1 (resource plain-args keyword-args)
"Do actual work for default method of ARGUMENTS-TO-URI."
(flet ((encode (thing &optional keyword)
(quri:url-encode
(cond (keyword
(string-downcase thing))
(t
(write-for-resource resource thing)
)))))
(let* ((plain-part (format nil "/~{~a~^/~}"
(mapcar #'encode plain-args)))
(query-part (and keyword-args
(format nil "?~{~a=~a~^&~}"
(loop for (k . v) in keyword-args
collect (encode k t)
collect (encode v))))))
(let ((string (format nil "/~a~a~a"
(string-downcase (resource-name resource))
plain-part
(or query-part ""))))
string))))
(defun read-for-resource-1 (resource string)
"Do actual work for default method of READ-FOR-RESOURCE."
(let ((*package* (resource-package resource)))
(snooze-safe-simple-read:safe-simple-read-from-string string t)))
(defun write-for-resource-1 (resource object)
"Do actual work for default-method of WRITE-FOR-RESOURCE."
(let ((*package* (symbol-package (resource-name resource)))
(*print-case* :downcase))
(if (and (symbolp object)
(not (symbol-package object)))
(princ-to-string (string-downcase (symbol-name object)))
(write-to-string object))))
| null | https://raw.githubusercontent.com/atgreen/red-light-green-light/1dad8773dcec766ce354112416f3b9aa9528fa49/local-projects/snooze-20210518-git/common.lisp | lisp | Verbs
"Sending" and "Receiving" are always from the server's
"receiving from client".
Content-types
primary method may match.
In GET requests we are only interested in the request's "Accept"
inverse: the routes are matched based on what the client accepts.
If it accepts a range of content-types, multiple routes (or
order (according to that range) until we find one that matches.
Resources
specializations are not allowed on DELETE, for example
<scheme name> : <hierarchical part> [ ? <query> ] [ # <fragment> ]
FIXME: perhaps use singletons here
FIXME: evaluate this need for eval, for security reasons
Optional args are checked at macroexpansion time
And at runtime...
find the qualifiers and lambda list
now parse body
lambda list
Some external stuff but hidden away from the main file
(format s "~&~%No more interesting information on ~a, sorry~%" c)
More internal stuff
unsupported media type
not acceptable
unimplemented
HACK! notice that a non-error
`http-condition' (like a simple redirect)
`*catch-http-conditions*' = T will land
us in this branch. We do not want to
break in this case, so explain succintly.
URL-decode args to strings
Double check that the arguments indeed
fit the resource's lambda list
HTTP DELETE doesn't care about
content-types
OK
OK, no content
the default
Default values for options
Though perhaps that keyword is accepted, we may
before trying to use it as a keyword argument, if it
looks like the symbol didn't "exist" yet.
In other words, we simply require that the symbol
has a package: it's up to READ-FOR-RESOURCE (the
default doesn't intern new symbols) to decide if it
spits out symbols in those conditions.
| (in-package #:snooze-common)
perspective . Hence GET is " sending to client " and POST and PUT are
(defpackage :snooze-verbs (:use)
(:export #:http-verb #:get #:post #:put #:delete
#:content-verb
#:receiving-verb
#:sending-verb))
(cl:defclass snooze-verbs:http-verb () ())
(cl:defclass snooze-verbs:delete (snooze-verbs:http-verb) ())
(cl:defclass snooze-verbs:content-verb (snooze-verbs:http-verb) ())
(cl:defclass snooze-verbs:receiving-verb (snooze-verbs:content-verb) ())
(cl:defclass snooze-verbs:sending-verb (snooze-verbs:content-verb) ())
(cl:defclass snooze-verbs:post (snooze-verbs:receiving-verb) ())
(cl:defclass snooze-verbs:put (snooze-verbs:receiving-verb) ())
(cl:defclass snooze-verbs:get (snooze-verbs:sending-verb) ())
(defun destructive-p (verb) (or (typep verb 'snooze-verbs:receiving-verb)
(typep verb 'snooze-verbs:delete)))
For PUT and POST requests we match routes based on what the client
declares to us in its " Content - Type " header . At most one CLOS
header , since GET never have useful bodies ( 1 ) and as such do n't
have " Content - Type " . For GET requests , the logic is actually
primary CLOS methods ) are now eligible . We try many routes in
[ 1 ] : -get-with-request-body
(defclass snooze-types:content () ())
(eval-when (:compile-toplevel :load-toplevel :execute)
(defun intern-safe (designator package)
(intern (string-upcase designator) package))
(defun scan-to-strings* (regex string)
(coerce (nth-value 1
(cl-ppcre:scan-to-strings regex
string))
'list)))
(defmacro define-content (type-designator
&optional (supertype-designator
(first (scan-to-strings*
"([^/]+)" type-designator))))
(let* ((type (intern-safe type-designator :snooze-types))
(supertype (intern-safe supertype-designator :snooze-types)))
`(progn
(setf (get ',type 'name) ,(string-downcase (symbol-name type)))
(unless (find-class ',supertype nil)
(setf (get ',supertype 'name)
,(format nil "~a/*"
(string-downcase (symbol-name supertype))))
(defclass ,supertype (snooze-types:content) ()))
(defclass ,type (,supertype) ())
(eval-when (:compile-toplevel :load-toplevel :execute)
(export '(,type ,supertype) :snooze-types)))))
(defmacro define-known-content-types ()
`(progn
,@(loop for (type-spec . nil) in *mime-type-list*
for matches
= (nth-value
1 (cl-ppcre:scan-to-strings "(.*/.*)(?:;.*)?" type-spec))
for type = (and matches (aref matches 0))
when type
collect `(define-content ,type))))
(eval-when (:compile-toplevel :load-toplevel :execute)
(define-known-content-types))
(defun find-content-class (designator)
"Return class for DESIGNATOR if it defines a content-type or nil."
(cond ((typep designator 'snooze-types:content)
(class-of designator))
((and (typep designator 'class)
(subtypep designator 'snooze-types:content))
designator)
((eq designator t)
(alexandria:simple-style-warning
"Coercing content-designating type designator T to ~s"
'snooze-types:content)
(find-class 'snooze-types:content))
((or (symbolp designator)
(stringp designator))
(or (find-class (intern (string-upcase designator) :snooze-types) nil)
(and (string= designator "*/*") (find-class 'snooze-types:content))
(let* ((matches (nth-value 1
(cl-ppcre:scan-to-strings
"([^/]+)/\\*"
(string-upcase designator))))
(supertype-designator (and matches
(aref matches 0))))
(find-class
(intern (string-upcase supertype-designator) :snooze-types)
nil))))
(t
(error "~a cannot possibly designate a content-type" designator))))
(defun content-class-name (designator)
(get (class-name (find-content-class designator)) 'name))
(defun resource-p (thing)
(and (functionp thing)
(eq 'resource-generic-function (type-of thing))))
(deftype resource ()
`(satisfies resource-p))
(defclass resource-generic-function (cl:standard-generic-function)
()
(:metaclass closer-mop:funcallable-standard-class))
(defun resource-name (resource)
(closer-mop:generic-function-name resource))
(defvar *all-resources* (make-hash-table))
(defun find-resource (designator &key filter)
(cond ((or (stringp designator)
(keywordp designator))
(maphash (lambda (k v)
(when (and (string-equal (string k) (string designator))
(or (not filter)
(funcall filter v)))
(return-from find-resource v)))
*all-resources*))
((resource-p designator)
(find-resource (resource-name designator)
:filter filter))
((and designator
(symbolp designator))
(let ((probe (gethash designator *all-resources*)))
(when (or (not filter)
(funcall filter designator))
probe)))
(t
(error "~a ins't a resource designator" designator))))
(defun delete-resource (designator)
(let ((resource (find-resource designator)))
(cond (resource
(fmakunbound (resource-name resource))
(remhash (resource-name resource) *all-resources*))
(t
(error "No such resource to delete!")))))
(defmethod initialize-instance :after
((gf resource-generic-function) &rest args)
(declare (ignore args))
(setf (gethash (resource-name gf) *all-resources*)
gf))
(defun probe-class-sym (sym)
"Like CL:FIND-CLASS but don't error and return SYM or nil"
(when (find-class sym nil)
sym))
(defun parse-defroute-args (defmethod-arglist)
"Return values QUALIFIERS, LAMBDA-LIST, BODY for DEFMETHOD-ARGLIST"
(loop for args on defmethod-arglist
if (listp (first args))
return (values qualifiers (first args) (cdr args))
else
collect (first args) into qualifiers))
(defun verb-spec-or-lose (verb-spec)
"Convert VERB-SPEC into something CL:DEFMETHOD can grok."
(labels ((verb-designator-to-verb (designator)
(or (and (eq designator 't)
(progn
(alexandria:simple-style-warning
"Coercing verb-designating type T in ~a to ~s"
verb-spec 'snooze-verbs:http-verb)
'snooze-verbs:http-verb))
(probe-class-sym (intern (string-upcase designator)
:snooze-verbs))
(error "Sorry, don't know the HTTP verb ~a"
(string-upcase designator)))))
(cond ((and verb-spec
(listp verb-spec))
(list (first verb-spec)
(verb-designator-to-verb (second verb-spec))))
((or (keywordp verb-spec)
(stringp verb-spec))
(list 'snooze-verbs:http-verb (verb-designator-to-verb verb-spec)))
(verb-spec
(list verb-spec 'snooze-verbs:http-verb))
(t
(error "~a is not a valid convertable HTTP verb spec" verb-spec)))))
(defun content-type-spec-or-lose-1 (type-spec)
(labels ((type-designator-to-type (designator)
(let ((class (find-content-class designator)))
(if class (class-name class)
(error "Sorry, don't know the content-type ~a" type-spec)))))
(cond ((and type-spec
(listp type-spec))
(list (first type-spec)
(type-designator-to-type (second type-spec))))
((or (keywordp type-spec)
(stringp type-spec))
(list 'snooze-types:type (type-designator-to-type type-spec)))
(type-spec
(list type-spec (type-designator-to-type t))))))
(defun content-type-spec-or-lose (type-spec verb)
(cond ((subtypep verb 'snooze-verbs:content-verb)
(content-type-spec-or-lose-1 type-spec))
((and type-spec (listp type-spec))
(assert (eq t (second type-spec))
nil
"For verb ~a, no specializations on Content-Type are allowed"
verb)
type-spec)
(t
(list type-spec t))))
(defun ensure-atom (thing)
(if (listp thing)
(ensure-atom (first thing))
thing))
(defun ensure-uri (maybe-uri)
(etypecase maybe-uri
(string (quri:uri maybe-uri))
(quri:uri maybe-uri)))
(defun parse-resource (uri)
"Parse URI for a resource and how it should be called.
Honours of *RESOURCE-NAME-FUNCTION*, *RESOURCES-FUNCTION*,
*HOME-RESOURCE* and *URI-CONTENT-TYPES-FUNCTION*.
Returns nil if the resource cannot be found, otherwise returns 3
values: RESOURCE, URI-CONTENT-TYPES and RELATIVE-URI. RESOURCE is a
generic function verifying RESOURCE-P discovered in URI.
URI-CONTENT-TYPES is a list of subclasses of SNOOZE-TYPES:CONTENT
discovered in directly URI by
*URI-CONTENT-TYPES-FUNCTION*. RELATIVE-URI is the remaining URI after
these discoveries."
(let ((uri (ensure-uri uri))
uri-stripped-of-content-type-info
uri-content-types)
(when *uri-content-types-function*
(multiple-value-setq (uri-content-types uri-stripped-of-content-type-info)
(funcall *uri-content-types-function*
(quri:render-uri uri nil))))
(let* ((uri (ensure-uri (or uri-stripped-of-content-type-info
uri))))
(multiple-value-bind (resource-name relative-uri)
(funcall *resource-name-function*
(quri:render-uri uri))
(setq resource-name (and resource-name
(plusp (length resource-name))
(ignore-errors
(quri:url-decode resource-name))))
(values (find-resource (or resource-name
*home-resource*)
:filter *resource-filter*)
(mapcar #'find-content-class uri-content-types)
relative-uri)))))
(defun content-classes-in-accept-string (string)
(labels ((expand (class)
(cons class
(reduce
#'append
(mapcar #'expand
(closer-mop:class-direct-subclasses class))))))
(loop for media-range-and-params in (cl-ppcre:split "\\s*,\\s*" string)
for class = (parse-content-type-header media-range-and-params)
when class
append (expand class))))
(defun parse-content-type-header (string)
"Return a class associated with the content-type described by STRING.
As a second value, return what RFC2388:PARSE-HEADER"
(let* ((parsed (rfc2388:parse-header string :value))
(designator (second parsed)))
(values (find-content-class designator)
parsed)))
(defun find-verb-or-lose (designator)
(let ((class (or (probe-class-sym
(intern (string-upcase designator)
:snooze-verbs))
(error "Can't find HTTP verb for designator ~a!"
designator))))
(make-instance class)))
(defun gf-primary-method-specializer (gf args ct-arg-pos)
"Compute proper content-type for calling GF with ARGS"
(let ((applicable (compute-applicable-methods gf args)))
(when applicable
(nth ct-arg-pos (closer-mop:method-specializers (first applicable))))))
Internal symbols of : SNOOZE
(in-package :snooze)
(defun check-arglist-compatible (resource args)
(let ((lambda-list (closer-mop:generic-function-lambda-list
resource)))
(handler-case
(let ((*read-eval* nil))
(handler-bind ((warning #'muffle-warning))
(eval `(apply (lambda ,lambda-list
t)
'(t t ,@args)))))
(error (e)
(error 'incompatible-lambda-list
:actual-args args
:lambda-list (cddr lambda-list)
:format-control "Too many, too few, or unsupported ~
query arguments for REST resource ~a"
:format-arguments
(list (resource-name resource))
:original-condition e)))))
(defun check-optional-args (opt-values &optional warn-p)
(let ((nil-tail
(member nil opt-values)))
(unless (every #'null (rest nil-tail))
(if warn-p
(warn 'style-warning :format-control
"The NIL defaults to a genpath-function's &OPTIONALs ~
must be at the end")
(error "The NILs to a genpath-function's &OPTIONALs ~
must be at the end")))))
(defun genpath-fn-lambda-list (all-kwargs
augmented-optional
required
rest
aok-p)
"Helper for MAKE-GENPATH-FORM"
`(,@required
&optional
,@augmented-optional
,@(if rest
(warn 'style-warning
:format-control
"&REST ~a is not supported for genpath-functions"
:format-arguments (list rest)))
&key
,@all-kwargs
,@(if aok-p `(&allow-other-keys))))
(defun make-genpath-form (genpath-fn-name resource-sym lambda-list)
(multiple-value-bind (required optional rest kwargs aok-p aux key-p)
(alexandria:parse-ordinary-lambda-list lambda-list)
(declare (ignore aux key-p))
(augmented-optional
(loop for (name default nil) in optional
collect `(,name ,default ,(gensym))))
(augmented-kwargs
(loop for (kw-and-sym default) in kwargs
collect `(,kw-and-sym ,default ,(gensym))))
(all-kwargs
augmented-kwargs)
(required-args-form
`(list ,@required))
(optional-args-form
`(list ,@(loop for (name default supplied-p) in augmented-optional
collect `(if ,supplied-p ,name
(or ,name ,default)))))
(keyword-arguments-form
`(remove-if #'null
(list
,@(loop for (kw-and-sym default supplied-p)
in augmented-kwargs
for (nil sym) = kw-and-sym
collect `(cons (intern
(symbol-name ',sym)
(find-package :KEYWORD))
(if ,supplied-p
,sym
(or ,sym
,default)))))
:key #'cdr)))
(check-optional-args (mapcar #'second optional) 'warn-p)
`(progn
(defun ,genpath-fn-name
,(genpath-fn-lambda-list
all-kwargs
augmented-optional
required
rest
aok-p)
(check-optional-args ,optional-args-form)
(arguments-to-uri
(find-resource ',resource-sym)
(append
,required-args-form
(remove nil ,optional-args-form))
,keyword-arguments-form)
)))))
(defun defroute-1 (name args)
(first-parse
(multiple-value-list
(parse-defroute-args args)))
(qualifiers (first first-parse))
(lambda-list (second first-parse))
(body (third first-parse))
(parsed-body (multiple-value-list (alexandria:parse-body body)))
(remaining (first parsed-body))
(declarations (second parsed-body))
(docstring (third parsed-body))
Add syntactic sugar for the first two specializers in the
(verb-spec (verb-spec-or-lose (first lambda-list)))
(type-spec (content-type-spec-or-lose (second lambda-list)
(second verb-spec)))
(proper-lambda-list
`(,verb-spec ,type-spec ,@(nthcdr 2 lambda-list)))
(simplified-lambda-list
(mapcar #'ensure-atom proper-lambda-list)))
`(progn
(unless (find-resource ',name)
(defresource ,name ,simplified-lambda-list))
(defmethod ,name ,@qualifiers
,proper-lambda-list
,@(if docstring `(,docstring))
,@declarations
,@remaining))))
(defun defgenpath-1 (function resource)
(make-genpath-form function resource
(nthcdr 2 (closer-mop:generic-function-lambda-list
(let ((probe (find-resource resource)))
(assert probe nil
"Cannot find the resource ~a"
resource)
probe)))))
(defun defresource-1 (name lambda-list options)
(let* ((genpath-form)
(defgeneric-args
(loop for option in options
for routep = (eq :route (car option))
for (qualifiers spec-list body)
= (and routep
(multiple-value-list
(parse-defroute-args (cdr option))))
for verb-spec = (and routep
(verb-spec-or-lose (first spec-list)))
for type-spec = (and routep
(content-type-spec-or-lose
(second spec-list)
(second verb-spec)))
if routep
collect `(:method
,@qualifiers
(,verb-spec ,type-spec ,@(nthcdr 2 spec-list))
,@body)
else if (eq :genpath (car option))
do (setq genpath-form
(make-genpath-form (second option) name
(nthcdr 2 lambda-list)))
else
collect option))
(simplified-lambda-list (mapcar #'(lambda (argspec)
(ensure-atom argspec))
lambda-list)))
`(progn
,@(if genpath-form `(,genpath-form))
(defgeneric ,name ,simplified-lambda-list
(:generic-function-class resource-generic-function)
,@defgeneric-args))))
(defmethod explain-condition-failsafe (condition resource &optional verbose-p)
(declare (ignore resource))
(let* ((original-condition (and (typep condition 'resignalled-condition)
(original-condition condition)))
(status-code (or (and original-condition
(typep original-condition 'http-condition)
(status-code original-condition))
500)))
(with-output-to-string (s)
(cond (verbose-p
(format s "~a" condition)
(explain-failsafe condition s)
(loop for (condition backtrace) in *useful-backtraces*
do (format s "~&~%Here's a backtrace for condition ~s~
~&~a" condition backtrace)))
(t
(format s "~a ~a"
status-code
(reason-for status-code)))))))
(define-condition http-condition (simple-condition)
((status-code :initarg :status-code
:initform (error "Must supply a HTTP status code.")
:reader status-code))
(:default-initargs :format-control "HTTP condition"))
(define-condition http-error (http-condition simple-error) ()
(:default-initargs
:format-control "HTTP Internal Server Error"
:status-code 500))
(define-condition no-such-resource (http-condition) ()
(:default-initargs
:status-code 404
:format-control "Resource does not exist"))
(define-condition invalid-resource-arguments (http-condition) ()
(:default-initargs
:status-code 400
:format-control "Resource exists but invalid arguments passed"))
(define-condition resignalled-condition ()
((original-condition :initarg :original-condition
:initform (error "Must supply an original condition")
:reader original-condition)))
(define-condition unconvertible-argument
(invalid-resource-arguments resignalled-condition)
((unconvertible-argument-value :initarg :unconvertible-argument-value
:accessor unconvertible-argument-value)
(unconvertible-argument-key :initarg :unconvertible-argument-key
:accessor unconvertible-argument-key))
(:default-initargs
:format-control "An argument in the URI cannot be read"))
(define-condition incompatible-lambda-list
(invalid-resource-arguments resignalled-condition)
((lambda-list :initarg :lambda-list
:initform (error "Must supply :LAMBDA-LIST")
:accessor lambda-list)
(actual-args :initarg :actual-args
:initform (error "Must supply :ACTUAL-ARGS")
:accessor actual-args))
(:default-initargs
:format-control "An argument in the URI cannot be read"))
(define-condition invalid-uri-structure
(invalid-resource-arguments resignalled-condition)
((invalid-uri :initarg :invalid-uri
:initform (error "Must supply the invalid URI")
:accessor invalid-uri))
(:default-initargs
:format-control "The URI structure cannot be converted into arguments"))
(define-condition unsupported-content-type (http-error) ()
(:default-initargs
:status-code 501
:format-control "Content type is not supported"))
(define-condition no-such-route (http-condition) ()
(:default-initargs
:format-control "Resource exists but no such route"))
(define-condition error-when-explaining (simple-error resignalled-condition) ()
(:default-initargs
:format-control "An error occurred when trying to explain a condition"))
(defmethod print-object ((c http-condition) s)
(print-unreadable-object (c s :type t)
(format s "~a: ~?" (status-code c)
(simple-condition-format-control c)
(simple-condition-format-arguments c))))
(defmethod print-object ((c resignalled-condition) s)
(print-unreadable-object (c s :type t)
(princ (original-condition c) s)))
(defmethod explain-failsafe ((c condition) s)
)
(defmethod explain-failsafe ((c error-when-explaining) s)
(format s "~& SNOOZE:EXPLAIN-CONDITION is missing a method to politely explain:~
~& ~a~
~& to the client."
(original-condition c)))
(defmethod explain-failsafe ((c unconvertible-argument) s)
(format s "~& SNOOZE:URI-TO-ARGUMENTS caught a ~a when converting:~
~& ~a=~a~
~& into Lisp objects to give to your route."
(type-of (original-condition c))
(unconvertible-argument-key c)
(unconvertible-argument-value c)))
(defmethod explain-failsafe ((c invalid-uri-structure) s)
(format s "~& SNOOZE:URI-TO-ARGUMENTS can't grok this URI:~
~& ~a" (invalid-uri c)))
(defmethod explain-failsafe ((c incompatible-lambda-list) s)
(format s "~& Snooze failed to fit:~
~& ~s~
~& to the lambda list:~
~& ~a~
~& which produced a ~a which your Lisp describes as:~
~& ~a"
(actual-args c) (lambda-list c)
(type-of (original-condition c))
(original-condition c)))
(defmethod explain-failsafe :before ((c resignalled-condition) s)
(format s "~&~%You got a ~a because:~% " (type-of c)))
(defmethod explain-failsafe :after ((c resignalled-condition) s)
(explain-failsafe (original-condition c) s))
(defmethod initialize-instance :after ((e http-error) &key)
(assert (<= 500 (status-code e) 599) nil
"An HTTP error must have a status code between 500 and 599"))
(defun matching-content-type-or-lose (resource verb args try-list)
"Check RESOURCE for route matching VERB, TRY-LIST and ARGS.
TRY-LIST, a list of subclasses of SNOOZE-TYPES:CONTENT, is iterated.
The first subclass for which RESOURCE has a matching specializer is
used to create an instance, which is returned. If none is found error
out with NO-SUCH-ROUTE."
(or (some (lambda (maybe)
(when (gf-primary-method-specializer
resource
(list* verb maybe args)
1)
maybe))
(mapcar #'make-instance try-list))
(error 'no-such-route
:status-code (if try-list
(if (destructive-p verb)
)
FIXME , make " unimplemented " more pervasive
))))
(defvar *useful-backtraces* nil "Useful backtraces.")
(defmacro saving-useful-backtrace (args &body body)
(declare (ignore args))
`(handler-bind
((t
(lambda (e)
(when *catch-errors*
(pushnew (list e
(with-output-to-string (s)
(uiop/image:print-condition-backtrace
e :stream s)))
*useful-backtraces*
:test (lambda (a b) (eq (first a) (first b))))))))
,@body))
(defun call-brutally-explaining-conditions (fn)
(let (code condition original-condition *useful-backtraces*)
(flet ((explain (verbose-p)
(throw 'response
(values code
(explain-condition-failsafe condition
*resource*
verbose-p)
(content-class-name 'text/plain)))))
(restart-case
(handler-bind
((resignalled-condition
(lambda (e)
(setq original-condition (original-condition e)
code
(when (typep original-condition 'http-condition)
(status-code original-condition)))))
(error
(lambda (e)
(setq code (or code 500)
condition e)
(cond ((eq *catch-errors* :verbose)
(invoke-restart 'explain-verbosely))
(*catch-errors*
(invoke-restart 'failsafe-explain))
with ` * catch - errors * ' = NIL and
(and original-condition
(typep original-condition 'http-condition)
(not (typep original-condition 'error)))
(invoke-restart 'failsafe-explain)))))
(http-condition
(lambda (c)
(setq code (status-code c) condition c)
(cond ((eq *catch-http-conditions* :verbose)
(invoke-restart 'explain-verbosely))))))
(saving-useful-backtrace () (funcall fn)))
(explain-verbosely () :report
(lambda (s)
(format s "Explain ~a condition more verbosely" code))
(explain t))
(failsafe-explain () :report
(lambda (s) (format s "Explain ~a condition very succintly" code))
(explain nil))))))
(defun call-politely-explaining-conditions (client-accepts fn)
(let (code
condition
accepted-type)
(labels ((accepted-type-for (condition)
(some (lambda (wanted)
(when (gf-primary-method-specializer
#'explain-condition
(list condition *resource* wanted)
1)
wanted))
(mapcar #'make-instance client-accepts)))
(check-politely-explain ()
(unless accepted-type
(error 'error-when-explaining
:format-control "No ~a to politely explain ~a to client"
:format-arguments
(list 'explain-condition (type-of condition))
:original-condition condition))))
(restart-case
(handler-bind ((condition
(lambda (c)
(setq
condition c
accepted-type (accepted-type-for condition))))
(http-condition
(lambda (c)
(setq code (status-code c))
(when (and *catch-http-conditions*
(not (eq *catch-http-conditions*
:verbose)))
(check-politely-explain)
(invoke-restart 'politely-explain))))
(error
(lambda (e)
(declare (ignore e))
(setq code 500)
(when (and *catch-errors*
(not (eq *catch-errors* :verbose)))
(check-politely-explain)
(invoke-restart 'politely-explain)))))
(saving-useful-backtrace () (funcall fn)))
(politely-explain ()
:report (lambda (s)
(format s "Politely explain to client in ~a"
accepted-type))
:test (lambda (c) (declare (ignore c)) accepted-type)
(throw 'response
(handler-case
(values code
(explain-condition condition *resource* accepted-type)
(content-class-name accepted-type))
(error (e)
(error 'error-when-explaining
:format-control "Error when explaining ~a"
:format-arguments (list (type-of e))
:original-condition condition)))))
(auto-catch ()
:report (lambda (s)
(format s "Start catching ~a automatically"
(if (typep condition 'http-condition)
"HTTP conditions" "errors")))
:test (lambda (c)
(if (typep c 'http-condition)
(not *catch-http-conditions*)
(not *catch-errors*)))
(if (typep condition 'http-condition)
(setq *catch-http-conditions* t)
(setq *catch-errors* t))
(if (find-restart 'politely-explain)
(invoke-restart 'politely-explain)
(if (find-restart 'failsafe-explain)
(invoke-restart 'failsafe-explain))))))))
(defmacro brutally-explaining-conditions (() &body body)
"Explain conditions in BODY in a failsafe way.
Honours the :VERBOSE option to *CATCH-ERRORS* and *CATCH-HTTP-CONDITIONS*."
`(call-brutally-explaining-conditions (lambda () ,@body)))
(defmacro politely-explaining-conditions ((client-accepts) &body body)
"Explain conditions in BODY taking the client accepts into account.
Honours *CATCH-ERRORS* and *CATCH-HTTP-CONDITIONS*"
`(call-politely-explaining-conditions ,client-accepts (lambda () ,@body)))
(defvar *resource*)
(setf (documentation '*resource* 'variable)
"Bound early in HANDLE-REQUEST-1 to nil or to a RESOURCE.
Used by POLITELY-EXPLAINING-CONDITIONS and
BRUTALLY-EXPLAINING-CONDITIONS to pass a resource to
EXPLAIN-CONDITION.")
(defun handle-request-1 (uri method accept &optional content-type)
(catch 'response
(let (*resource*
content-classes-encoded-in-uri
relative-uri)
(brutally-explaining-conditions ()
(multiple-value-setq (*resource* content-classes-encoded-in-uri relative-uri)
(parse-resource uri))
(let* ((verb (find-verb-or-lose method))
(client-accepted-content-types
(or (append content-classes-encoded-in-uri
(content-classes-in-accept-string accept))
(list (find-content-class 'snooze-types:text/plain)))))
(politely-explaining-conditions (client-accepted-content-types)
(unless *resource*
(error 'no-such-resource
:format-control
"So sorry, but that URI doesn't match any REST resources"))
(multiple-value-bind (converted-plain-args converted-keyword-args)
(handler-bind
((error
(lambda (e)
(when *catch-errors*
(error 'invalid-uri-structure
:format-control
"Caught ~a in URI-TO-ARGUMENTS"
:format-arguments (list (type-of e))
:original-condition e
:invalid-uri relative-uri)))))
(uri-to-arguments *resource* relative-uri))
(let ((converted-arguments
(append converted-plain-args
(loop for (a . b) in converted-keyword-args
collect a collect b))))
(check-arglist-compatible *resource* converted-arguments)
(let* ((matching-ct
(typecase verb
(snooze-verbs:delete nil)
(t
(matching-content-type-or-lose
*resource*
verb
converted-arguments
(typecase verb
(snooze-verbs:sending-verb
client-accepted-content-types)
(snooze-verbs:receiving-verb
(list (or (and content-classes-encoded-in-uri
(first content-classes-encoded-in-uri))
(parse-content-type-header content-type)
(error 'unsupported-content-type))))))))))
(multiple-value-bind (payload code payload-ct)
(apply *resource* verb matching-ct converted-arguments)
(unless code
(setq code (if payload
)))
(cond (payload-ct
(when (and (destructive-p verb)
(not (typep payload-ct
(class-of matching-ct))))
(warn "Route declared ~a as its payload ~
content-type, but it matched ~a"
payload-ct matching-ct)))
(t
(setq payload-ct
(if (destructive-p verb)
matching-ct))))
(throw 'response (values code
payload
(content-class-name
payload-ct)))))))))))))
(defun default-resource-name (uri)
"Default value for *RESOURCE-NAME-FUNCTION*, which see."
(if (string= "" uri)
""
(let* ((first-slash-or-qmark (position-if #'(lambda (char)
(member char '(#\/ #\?)))
uri
:start 1)))
(values (cond (first-slash-or-qmark
(subseq uri 1 first-slash-or-qmark))
(t
(subseq uri 1)))
(if first-slash-or-qmark
(subseq uri first-slash-or-qmark))))))
(defun search-for-extension-content-type (uri-path)
"Default value for *URI-CONTENT-TYPES-FUNCTION*, which see."
(multiple-value-bind (matchp groups)
(cl-ppcre:scan-to-strings "([^\\.]+)\\.(\\w+)([^/]*)$" uri-path)
(let ((content-type-class (and matchp
(find-content-class
(gethash (aref groups 1)
*mime-type-hash*)))))
(when content-type-class
(values
(list content-type-class)
(format nil "~a~a" (aref groups 0) (aref groups 2)))))))
(defun all-defined-resources ()
"Default value for *RESOURCES-FUNCTION*, which see."
snooze-common:*all-resources*)
Reading and writing URI 's
(defun resource-package (resource)
(symbol-package (resource-name resource)))
(defun uri-to-arguments-1 (resource relative-uri)
"Do actual work for default method of URI-TO-ARGUMENTS."
(labels ((probe (str &optional key)
(handler-bind
((error (lambda (e)
(when *catch-errors*
(error 'unconvertible-argument
:unconvertible-argument-value str
:unconvertible-argument-key key
:original-condition e
:format-control
"Malformed arg for resource ~a"
:format-arguments
(list (resource-name resource)))))))
(progn
(let ((*read-eval* nil))
(read-for-resource resource str)))))
(probe-keyword (str)
(let* ((probe (probe str)))
still refuse to intern it in the : KEYWORD pacakge
(if (and (symbolp probe)
(symbol-package probe))
(intern (symbol-name probe) :keyword)
(error 'invalid-resource-arguments
:format-control "Unknown keyword for resource ~a"
:format-arguments (list (resource-name resource)))))))
(when relative-uri
(let* ((relative-uri (ensure-uri relative-uri))
(path (quri:uri-path relative-uri))
(query (quri:uri-query relative-uri))
(fragment (quri:uri-fragment relative-uri))
(plain-args (and path
(plusp (length path))
(cl-ppcre:split "/" (subseq path 1))))
(keyword-args
(append
(and
query
(loop for maybe-pair in (cl-ppcre:split "[;&]" query)
for (undecoded-key-name undecoded-value-string)
= (scan-to-strings* "(.*)=(.*)" maybe-pair)
when (and undecoded-key-name undecoded-value-string)
collect
(cons (quri:url-decode undecoded-key-name)
(quri:url-decode undecoded-value-string)))))))
(values
(mapcar #'probe (mapcar #'quri:url-decode plain-args))
(loop for (key-str . value-str) in keyword-args
collect (cons (probe-keyword key-str)
(probe value-str key-str))
into keyword-alist
finally
(return
(append
keyword-alist
(if fragment
`((snooze:fragment . ,(probe fragment))))))))))))
(defun arguments-to-uri-1 (resource plain-args keyword-args)
"Do actual work for default method of ARGUMENTS-TO-URI."
(flet ((encode (thing &optional keyword)
(quri:url-encode
(cond (keyword
(string-downcase thing))
(t
(write-for-resource resource thing)
)))))
(let* ((plain-part (format nil "/~{~a~^/~}"
(mapcar #'encode plain-args)))
(query-part (and keyword-args
(format nil "?~{~a=~a~^&~}"
(loop for (k . v) in keyword-args
collect (encode k t)
collect (encode v))))))
(let ((string (format nil "/~a~a~a"
(string-downcase (resource-name resource))
plain-part
(or query-part ""))))
string))))
(defun read-for-resource-1 (resource string)
"Do actual work for default method of READ-FOR-RESOURCE."
(let ((*package* (resource-package resource)))
(snooze-safe-simple-read:safe-simple-read-from-string string t)))
(defun write-for-resource-1 (resource object)
"Do actual work for default-method of WRITE-FOR-RESOURCE."
(let ((*package* (symbol-package (resource-name resource)))
(*print-case* :downcase))
(if (and (symbolp object)
(not (symbol-package object)))
(princ-to-string (string-downcase (symbol-name object)))
(write-to-string object))))
|
5de7c18756fa54b94a9d7b5acdf94522fb13a2c1dd14db3a3b7cb72038e83526 | racket/rhombus-prototype | indirect-static-info-key.rkt | #lang racket/base
(provide #%indirect-static-info)
(define #%indirect-static-info #f)
| null | https://raw.githubusercontent.com/racket/rhombus-prototype/fbe0a400eea3ab12cd5155704d18cada0f76ced9/rhombus/private/indirect-static-info-key.rkt | racket | #lang racket/base
(provide #%indirect-static-info)
(define #%indirect-static-info #f)
|
|
1dad960ae12566b42f4fda204966e6521a1ba91b86de3e92717bf8a05a2f9157 | gaborcs/lambda-terminal | Primitive.hs | # LANGUAGE LambdaCase #
module Primitive where
import qualified Type as T
import qualified Value as V
data Primitive
= Plus
| Minus
| Times
| Signum
| Concat
deriving (Eq, Read, Show, Bounded, Enum)
getDisplayName :: Primitive -> String
getDisplayName p = case p of
Plus -> "+"
Minus -> "-"
Times -> "*"
Signum -> "signum"
Concat -> "concat"
getType :: Primitive -> T.Type v d
getType p = case p of
Plus -> binaryIntegerOpType T.Integer
Minus -> binaryIntegerOpType T.Integer
Times -> binaryIntegerOpType T.Integer
Signum -> T.fn T.Integer T.Integer
Concat -> T.fn T.String (T.fn T.String T.String)
getValue :: Primitive -> V.Value c
getValue p = case p of
Plus -> binaryIntegerOpValue $ \a b -> V.Integer (a + b)
Minus -> binaryIntegerOpValue $ \a b -> V.Integer (a - b)
Times -> binaryIntegerOpValue $ \a b -> V.Integer (a * b)
Signum -> V.Fn $ \case
Just (V.Integer a) -> Just $ V.Integer $ signum a
_ -> Nothing
Concat -> twoParamFnVal $ \case
(Just (V.String s1), Just (V.String s2)) -> Just $ V.String $ s1 ++ s2
_ -> Nothing
binaryIntegerOpType :: T.Type v d -> T.Type v d
binaryIntegerOpType resultType = T.fn T.Integer $ T.fn T.Integer resultType
binaryIntegerOpValue :: (Integer -> Integer -> V.Value c) -> V.Value c
binaryIntegerOpValue f = twoParamFnVal $ \case
(Just (V.Integer a), Just (V.Integer b)) -> Just $ f a b
_ -> Nothing
-- only evaluates args if and when f does
twoParamFnVal :: ((Maybe (V.Value c), Maybe (V.Value c)) -> Maybe (V.Value c)) -> V.Value c
twoParamFnVal f = V.Fn $ \maybeVal1 -> Just $ V.Fn $ \maybeVal2 -> f (maybeVal1, maybeVal2)
| null | https://raw.githubusercontent.com/gaborcs/lambda-terminal/7f2638f2f3a562e0b60248da7652bda81be76adf/src/Primitive.hs | haskell | only evaluates args if and when f does | # LANGUAGE LambdaCase #
module Primitive where
import qualified Type as T
import qualified Value as V
data Primitive
= Plus
| Minus
| Times
| Signum
| Concat
deriving (Eq, Read, Show, Bounded, Enum)
getDisplayName :: Primitive -> String
getDisplayName p = case p of
Plus -> "+"
Minus -> "-"
Times -> "*"
Signum -> "signum"
Concat -> "concat"
getType :: Primitive -> T.Type v d
getType p = case p of
Plus -> binaryIntegerOpType T.Integer
Minus -> binaryIntegerOpType T.Integer
Times -> binaryIntegerOpType T.Integer
Signum -> T.fn T.Integer T.Integer
Concat -> T.fn T.String (T.fn T.String T.String)
getValue :: Primitive -> V.Value c
getValue p = case p of
Plus -> binaryIntegerOpValue $ \a b -> V.Integer (a + b)
Minus -> binaryIntegerOpValue $ \a b -> V.Integer (a - b)
Times -> binaryIntegerOpValue $ \a b -> V.Integer (a * b)
Signum -> V.Fn $ \case
Just (V.Integer a) -> Just $ V.Integer $ signum a
_ -> Nothing
Concat -> twoParamFnVal $ \case
(Just (V.String s1), Just (V.String s2)) -> Just $ V.String $ s1 ++ s2
_ -> Nothing
binaryIntegerOpType :: T.Type v d -> T.Type v d
binaryIntegerOpType resultType = T.fn T.Integer $ T.fn T.Integer resultType
binaryIntegerOpValue :: (Integer -> Integer -> V.Value c) -> V.Value c
binaryIntegerOpValue f = twoParamFnVal $ \case
(Just (V.Integer a), Just (V.Integer b)) -> Just $ f a b
_ -> Nothing
twoParamFnVal :: ((Maybe (V.Value c), Maybe (V.Value c)) -> Maybe (V.Value c)) -> V.Value c
twoParamFnVal f = V.Fn $ \maybeVal1 -> Just $ V.Fn $ \maybeVal2 -> f (maybeVal1, maybeVal2)
|
a37d57b80422c5ff00186ae6dab084b77fba1b7655cce9e34e7a4e99ebb441a8 | codinuum/volt | configurationNew.ml |
* This file is part of Bolt .
* Copyright ( C ) 2009 - 2012 .
*
* Bolt is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation ; either version 3 of the License , or
* ( at your option ) any later version .
*
* Bolt is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program . If not , see < / > .
* This file is part of Bolt.
* Copyright (C) 2009-2012 Xavier Clerc.
*
* Bolt is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* Bolt is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see </>.
*)
let load filename =
let ch = open_in filename in
let lexbuf = Lexing.from_channel ch in
try
let res = ConfigParser.file ConfigLexer.token lexbuf in
close_in_noerr ch;
res
with e ->
close_in_noerr ch;
raise e
| null | https://raw.githubusercontent.com/codinuum/volt/546207693ef102a2f02c85af935f64a8f16882e6/src/library/configurationNew.ml | ocaml |
* This file is part of Bolt .
* Copyright ( C ) 2009 - 2012 .
*
* Bolt is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation ; either version 3 of the License , or
* ( at your option ) any later version .
*
* Bolt is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program . If not , see < / > .
* This file is part of Bolt.
* Copyright (C) 2009-2012 Xavier Clerc.
*
* Bolt is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* Bolt is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see </>.
*)
let load filename =
let ch = open_in filename in
let lexbuf = Lexing.from_channel ch in
try
let res = ConfigParser.file ConfigLexer.token lexbuf in
close_in_noerr ch;
res
with e ->
close_in_noerr ch;
raise e
|
|
8069f521baff3a5671a18ac5436f6095887b006f2b8195c5b3dbc82720a8e548 | TyOverby/mono | keys.ml | Copyright ( c ) 2016 - 2017 . All rights reserved .
See LICENSE.md .
See LICENSE.md. *)
(**
* Demonstrates input parsing.
*)
open Notty
open Notty.Infix
open Common
let pps = Format.pp_print_string
let ppi = Format.pp_print_int
let pp_special fmt = function
| `Escape -> pps fmt "ESCAPE"
| `Enter -> pps fmt "ENTER"
| `Tab -> pps fmt "TAB"
| `Backspace -> pps fmt "BACKSPACE"
| `Arrow `Up -> pps fmt "UP"
| `Arrow `Down -> pps fmt "DOWN"
| `Arrow `Left -> pps fmt "LEFT"
| `Arrow `Right -> pps fmt "RIGHT"
| `Page `Up -> pps fmt "PAGE UP"
| `Page `Down -> pps fmt "PAGE DOWN"
| `Home -> pps fmt "HOME"
| `End -> pps fmt "END"
| `Insert -> pps fmt "INSERT"
| `Delete -> pps fmt "DELETE"
| `Function n -> pps fmt "FN"; ppi fmt n
let pp_mods fmt = function
| [] -> ()
| ms -> ms |> List.iter (fun m ->
pps fmt @@ match m with `Meta -> "M" | `Ctrl -> "C" | `Shift -> "S"
)
let pp_mouse fmt = function
| `Release -> pps fmt "Release"
| `Drag -> pps fmt "Drag"
| `Move -> pps fmt "Move"
| `Press k ->
pps fmt "Press ";
pps fmt @@ match k with
| `Left -> "Left"
| `Middle -> "Middle"
| `Right -> "Right"
| `Scroll `Up -> "Scroll Up"
| `Scroll `Down -> "Scroll Down"
let pp_u ppf u = Format.fprintf ppf "U+%04X" (Uchar.to_int u)
let pp_s = Format.pp_print_string
let () =
let pp_mods = I.pp_attr A.(fg lightcyan) pp_mods in
simpleterm ~s:[]
~f:(fun xs x -> Some (List.take 100 (x::xs)))
~imgf:(fun (_, h) xs ->
let msg = I.string "Push keys."
and ks = List.map (function
| `Key ((`ASCII _ | `Uchar _) as c, mods) ->
let u = Unescape.uchar c
and attr = A.(fg lightblue ++ bg black) in
I.uchar ~attr u 1 1 <|> I.strf " %a %a" pp_u u pp_mods mods
| `Key (#Unescape.special as k, mods) ->
let pp = I.pp_attr A.(fg lightgreen) pp_special in
I.strf "%a %a" pp k pp_mods mods
| `Mouse (e, (x, y), mods) ->
let pp = I.pp_attr A.(fg lightmagenta) pp_s in
I.strf "%a %a (%d, %d) %a" pp "MOUSE" pp_mouse e x y pp_mods mods
| `Paste e ->
let pp = I.pp_attr A.(fg lightred) pp_s in
I.strf "%a %s" pp "PASTE" (if e = `Start then "START" else "END")
) xs |> I.vcat in
I.(vsnap ~align:`Top (h - 3) ks <-> void 0 1 <-> msg |> pad ~l:1 ~t:1))
| null | https://raw.githubusercontent.com/TyOverby/mono/94225736a93457d5c9aeed399c4ae1a08b239fd5/vendor/pqwy-notty/examples/keys.ml | ocaml | *
* Demonstrates input parsing.
| Copyright ( c ) 2016 - 2017 . All rights reserved .
See LICENSE.md .
See LICENSE.md. *)
open Notty
open Notty.Infix
open Common
let pps = Format.pp_print_string
let ppi = Format.pp_print_int
let pp_special fmt = function
| `Escape -> pps fmt "ESCAPE"
| `Enter -> pps fmt "ENTER"
| `Tab -> pps fmt "TAB"
| `Backspace -> pps fmt "BACKSPACE"
| `Arrow `Up -> pps fmt "UP"
| `Arrow `Down -> pps fmt "DOWN"
| `Arrow `Left -> pps fmt "LEFT"
| `Arrow `Right -> pps fmt "RIGHT"
| `Page `Up -> pps fmt "PAGE UP"
| `Page `Down -> pps fmt "PAGE DOWN"
| `Home -> pps fmt "HOME"
| `End -> pps fmt "END"
| `Insert -> pps fmt "INSERT"
| `Delete -> pps fmt "DELETE"
| `Function n -> pps fmt "FN"; ppi fmt n
let pp_mods fmt = function
| [] -> ()
| ms -> ms |> List.iter (fun m ->
pps fmt @@ match m with `Meta -> "M" | `Ctrl -> "C" | `Shift -> "S"
)
let pp_mouse fmt = function
| `Release -> pps fmt "Release"
| `Drag -> pps fmt "Drag"
| `Move -> pps fmt "Move"
| `Press k ->
pps fmt "Press ";
pps fmt @@ match k with
| `Left -> "Left"
| `Middle -> "Middle"
| `Right -> "Right"
| `Scroll `Up -> "Scroll Up"
| `Scroll `Down -> "Scroll Down"
let pp_u ppf u = Format.fprintf ppf "U+%04X" (Uchar.to_int u)
let pp_s = Format.pp_print_string
let () =
let pp_mods = I.pp_attr A.(fg lightcyan) pp_mods in
simpleterm ~s:[]
~f:(fun xs x -> Some (List.take 100 (x::xs)))
~imgf:(fun (_, h) xs ->
let msg = I.string "Push keys."
and ks = List.map (function
| `Key ((`ASCII _ | `Uchar _) as c, mods) ->
let u = Unescape.uchar c
and attr = A.(fg lightblue ++ bg black) in
I.uchar ~attr u 1 1 <|> I.strf " %a %a" pp_u u pp_mods mods
| `Key (#Unescape.special as k, mods) ->
let pp = I.pp_attr A.(fg lightgreen) pp_special in
I.strf "%a %a" pp k pp_mods mods
| `Mouse (e, (x, y), mods) ->
let pp = I.pp_attr A.(fg lightmagenta) pp_s in
I.strf "%a %a (%d, %d) %a" pp "MOUSE" pp_mouse e x y pp_mods mods
| `Paste e ->
let pp = I.pp_attr A.(fg lightred) pp_s in
I.strf "%a %s" pp "PASTE" (if e = `Start then "START" else "END")
) xs |> I.vcat in
I.(vsnap ~align:`Top (h - 3) ks <-> void 0 1 <-> msg |> pad ~l:1 ~t:1))
|
323599885733e2a58987fe93db99b30bc7eb30b2d8ac7a1b588a8278e8e6e1f1 | javier-paris/erlang-tcpip | fin_wait_1.erl | %%%-------------------------------------------------------------------
%%% File : fin_wait_1.erl
Author : < >
Description : wait-1 connection state
%%%
Created : 7 Sep 2004 by < >
%%%
%%%
erlang - tcpip , Copyright ( C ) 2004 Javier Paris
%%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%%% you may not use this file except in compliance with the License.
%%% You may obtain a copy of the License at
%%%
%%% -2.0
%%%
%%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%%% See the License for the specific language governing permissions and
%%% limitations under the License.
%%%
%%%-------------------------------------------------------------------
-module(fin_wait_1).
-export([recv/3, send/2, badack_action/3, newdata_action/3,
nonewdata_action/3, data_action/2, fin_action/3,
out_order_action/3, queue/0, read/2, close/0]).
-include("tcp_packet.hrl").
%%%%%%%%%%%%%%%%%%%%%% READER %%%%%%%%%%%%%%%%%%%%%%%%%%%%%
recv(Tcb, Pkt, Writer) ->
tcp_input:process_packet(Tcb, Pkt, fin_wait_1, Writer).
%%%%%%%%%%%%%%%%%%%%%% WRITER %%%%%%%%%%%%%%%%%%%%%%%%%%%%%
send(Tcb, {send, ack}) ->
tcp_packet:send_packet(Tcb, ack);
send(Tcb, rto) ->
tcp_packet:send_packet(Tcb, rto);
send(_, _) ->
ok.
%%%%%%%%%%%%%%%%% TCP INPUT CALLBACKS %%%%%%%%%%%%%%%%%%%%%%
badack_action(_, _, Writer) ->
tcp_con:send_packet(Writer, ack).
nonewdata_action(_, _, _) ->
ok.
newdata_action(Tcb, _, _) ->
{Snd_Una, Snd_Nxt, _, _} = tcb:get_tcbdata(Tcb, snd),
State = tcb:get_tcbdata(Tcb, state),
if
Snd_Una == Snd_Nxt ->
if State == fin_wait_1 ->
tcb:syncset_tcbdata(Tcb, state, fin_wait_2),
ok;
true ->
ok
end;
true ->
no_data_action
end.
data_action(Tcb, Data) ->
tcb:set_tcbdata(Tcb, rdata, Data).
out_order_action(Tcb, Data, Writer) ->
tcb:set_tcbdata(Tcb, out_order, Data),
tcp_con:send_packet(Writer, ack).
fin_action(Tcb, Rcv_Nxt, Writer) ->
{Snd_Una, Snd_Nxt, _, _} = tcb:get_tcbdata(Tcb, snd),
if
Snd_Una == Snd_Nxt ->
tcb:syncset_tcbdata(Tcb, state, time_wait),
tcb:set_tcbdata(Tcb, twtimer, Writer);
true ->
tcb:syncset_tcbdata(Tcb, state, closing)
end,
tcb:set_tcbdata(Tcb, rcv_nxt, seq:add(Rcv_Nxt, 1)),
tcp_con:send_packet(Writer, ack).
%%%%%%%%%%%%%%%%%%%%%%%%%% USER COMMANDS %%%%%%%%%%%%%%%%%%%%%%%%%
queue() ->
{error, connection_closing}.
read(_, Bytes) ->
{ok, Bytes}.
close() ->
{error, connection_closing}.
| null | https://raw.githubusercontent.com/javier-paris/erlang-tcpip/708b57fa37176980cddfd8605867426368d33ed1/src/fin_wait_1.erl | erlang | -------------------------------------------------------------------
File : fin_wait_1.erl
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-------------------------------------------------------------------
READER %%%%%%%%%%%%%%%%%%%%%%%%%%%%%
WRITER %%%%%%%%%%%%%%%%%%%%%%%%%%%%%
TCP INPUT CALLBACKS %%%%%%%%%%%%%%%%%%%%%%
USER COMMANDS %%%%%%%%%%%%%%%%%%%%%%%%% | Author : < >
Description : wait-1 connection state
Created : 7 Sep 2004 by < >
erlang - tcpip , Copyright ( C ) 2004 Javier Paris
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(fin_wait_1).
-export([recv/3, send/2, badack_action/3, newdata_action/3,
nonewdata_action/3, data_action/2, fin_action/3,
out_order_action/3, queue/0, read/2, close/0]).
-include("tcp_packet.hrl").
recv(Tcb, Pkt, Writer) ->
tcp_input:process_packet(Tcb, Pkt, fin_wait_1, Writer).
send(Tcb, {send, ack}) ->
tcp_packet:send_packet(Tcb, ack);
send(Tcb, rto) ->
tcp_packet:send_packet(Tcb, rto);
send(_, _) ->
ok.
badack_action(_, _, Writer) ->
tcp_con:send_packet(Writer, ack).
nonewdata_action(_, _, _) ->
ok.
newdata_action(Tcb, _, _) ->
{Snd_Una, Snd_Nxt, _, _} = tcb:get_tcbdata(Tcb, snd),
State = tcb:get_tcbdata(Tcb, state),
if
Snd_Una == Snd_Nxt ->
if State == fin_wait_1 ->
tcb:syncset_tcbdata(Tcb, state, fin_wait_2),
ok;
true ->
ok
end;
true ->
no_data_action
end.
data_action(Tcb, Data) ->
tcb:set_tcbdata(Tcb, rdata, Data).
out_order_action(Tcb, Data, Writer) ->
tcb:set_tcbdata(Tcb, out_order, Data),
tcp_con:send_packet(Writer, ack).
fin_action(Tcb, Rcv_Nxt, Writer) ->
{Snd_Una, Snd_Nxt, _, _} = tcb:get_tcbdata(Tcb, snd),
if
Snd_Una == Snd_Nxt ->
tcb:syncset_tcbdata(Tcb, state, time_wait),
tcb:set_tcbdata(Tcb, twtimer, Writer);
true ->
tcb:syncset_tcbdata(Tcb, state, closing)
end,
tcb:set_tcbdata(Tcb, rcv_nxt, seq:add(Rcv_Nxt, 1)),
tcp_con:send_packet(Writer, ack).
queue() ->
{error, connection_closing}.
read(_, Bytes) ->
{ok, Bytes}.
close() ->
{error, connection_closing}.
|
6708f56028c4092d11c75cad21f933dbf626e63c7a3457897ba31d7fd41e06a5 | composewell/unicode-transforms | Benchmark.hs | {-# LANGUAGE CPP #-}
# LANGUAGE TemplateHaskell #
-- |
Copyright : ( c ) 2016
--
-- License : BSD-3-Clause
-- Maintainer :
-- Stability : experimental
Portability : GHC
--
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative ((<$>), (<*>))
#endif
import Control.DeepSeq (NFData)
import Data.Text (Text)
import Path (Dir, Path, Rel, mkRelDir, toFilePath, (</>))
import Path.IO (listDir)
import System.FilePath (dropExtensions, takeFileName)
import Gauge.Main (Benchmark, bench, bgroup, defaultMain, env, nf)
import qualified Data.Text as T
import qualified Data.Text.Normalize as UTText
#ifdef BENCH_ICU
#if MIN_VERSION_text_icu(0,8,0)
import qualified Data.Text.ICU.Normalize2 as TI
#else
import qualified Data.Text.ICU as TI
#endif
textICUFuncs :: [(String, Text -> Text)]
textICUFuncs =
[ ("NFD", TI.normalize TI.NFD)
, ("NFKD", TI.normalize TI.NFKD)
, ("NFC", TI.normalize TI.NFC)
, ("NFKC", TI.normalize TI.NFKC)
]
#endif
unicodeTransformTextFuncs :: [(String, Text -> Text)]
unicodeTransformTextFuncs =
[ ("NFD", UTText.normalize UTText.NFD)
, ("NFKD", UTText.normalize UTText.NFKD)
, ("NFC", UTText.normalize UTText.NFC)
, ("NFKC", UTText.normalize UTText.NFKC)
]
dataDir :: Path Rel Dir
dataDir = $(mkRelDir "benchmark") </> $(mkRelDir "data")
Truncate or expand all datasets to this size to provide a normalized
-- measurement view across all datasets and to reduce the effect of noise
-- because of the datasets being too small.
dataSetSize :: Int
dataSetSize = 1000000
makeBench :: (NFData a, NFData b) => (String, a -> b) -> (String, IO a) -> Benchmark
makeBench (implName, func) (dataName, setup) =
env setup (\txt -> bench (implName ++ "/" ++ dataName) (nf func txt))
strInput :: FilePath -> (String, IO String)
strInput file = (dataName file,
fmap (take dataSetSize . cycle) (readFile file))
where dataName = dropExtensions . takeFileName
txtInput :: FilePath -> (String, IO Text)
txtInput file = second (fmap T.pack) (strInput file)
where second f (a, b) = (a, f b)
main :: IO ()
main = do
dataFiles <- fmap (map toFilePath . snd) (listDir dataDir)
defaultMain $
[
#ifdef BENCH_ICU
bgroup "text-icu"
$ makeBench <$> textICUFuncs <*> (map txtInput dataFiles)
,
#endif
bgroup "unicode-transforms-text"
$ makeBench <$> unicodeTransformTextFuncs
<*> (map txtInput dataFiles)
]
| null | https://raw.githubusercontent.com/composewell/unicode-transforms/34d4d7c4318fb05ac3a35be2de1fcd5902fedfa4/benchmark/Benchmark.hs | haskell | # LANGUAGE CPP #
|
License : BSD-3-Clause
Maintainer :
Stability : experimental
measurement view across all datasets and to reduce the effect of noise
because of the datasets being too small. | # LANGUAGE TemplateHaskell #
Copyright : ( c ) 2016
Portability : GHC
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative ((<$>), (<*>))
#endif
import Control.DeepSeq (NFData)
import Data.Text (Text)
import Path (Dir, Path, Rel, mkRelDir, toFilePath, (</>))
import Path.IO (listDir)
import System.FilePath (dropExtensions, takeFileName)
import Gauge.Main (Benchmark, bench, bgroup, defaultMain, env, nf)
import qualified Data.Text as T
import qualified Data.Text.Normalize as UTText
#ifdef BENCH_ICU
#if MIN_VERSION_text_icu(0,8,0)
import qualified Data.Text.ICU.Normalize2 as TI
#else
import qualified Data.Text.ICU as TI
#endif
textICUFuncs :: [(String, Text -> Text)]
textICUFuncs =
[ ("NFD", TI.normalize TI.NFD)
, ("NFKD", TI.normalize TI.NFKD)
, ("NFC", TI.normalize TI.NFC)
, ("NFKC", TI.normalize TI.NFKC)
]
#endif
unicodeTransformTextFuncs :: [(String, Text -> Text)]
unicodeTransformTextFuncs =
[ ("NFD", UTText.normalize UTText.NFD)
, ("NFKD", UTText.normalize UTText.NFKD)
, ("NFC", UTText.normalize UTText.NFC)
, ("NFKC", UTText.normalize UTText.NFKC)
]
dataDir :: Path Rel Dir
dataDir = $(mkRelDir "benchmark") </> $(mkRelDir "data")
Truncate or expand all datasets to this size to provide a normalized
dataSetSize :: Int
dataSetSize = 1000000
makeBench :: (NFData a, NFData b) => (String, a -> b) -> (String, IO a) -> Benchmark
makeBench (implName, func) (dataName, setup) =
env setup (\txt -> bench (implName ++ "/" ++ dataName) (nf func txt))
strInput :: FilePath -> (String, IO String)
strInput file = (dataName file,
fmap (take dataSetSize . cycle) (readFile file))
where dataName = dropExtensions . takeFileName
txtInput :: FilePath -> (String, IO Text)
txtInput file = second (fmap T.pack) (strInput file)
where second f (a, b) = (a, f b)
main :: IO ()
main = do
dataFiles <- fmap (map toFilePath . snd) (listDir dataDir)
defaultMain $
[
#ifdef BENCH_ICU
bgroup "text-icu"
$ makeBench <$> textICUFuncs <*> (map txtInput dataFiles)
,
#endif
bgroup "unicode-transforms-text"
$ makeBench <$> unicodeTransformTextFuncs
<*> (map txtInput dataFiles)
]
|
ef716ad2969bed481d3facb5ed756bfd1361e838f0720e87072bcf8c794f67f3 | Philonous/d-bus | Signature.hs | {-# LANGUAGE OverloadedStrings #-}
module DBus.Signature where
import Control.Applicative ((<$>))
import qualified Data.Attoparsec.ByteString as AP
import qualified Data.Attoparsec.ByteString.Char8 as AP
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as BSL
import qualified Data.ByteString.Lazy.Builder as BS
import Data.Char
import qualified Data.IntMap as IMap
import qualified Data.Text as Text
import DBus.Types
stToSignature :: DBusSimpleType -> Char
stToSignature TypeByte = 'y'
stToSignature TypeBoolean = 'b'
stToSignature TypeInt16 = 'n'
stToSignature TypeUInt16 = 'q'
stToSignature TypeInt32 = 'i'
stToSignature TypeUInt32 = 'u'
stToSignature TypeInt64 = 'x'
stToSignature TypeUInt64 = 't'
stToSignature TypeDouble = 'd'
stToSignature TypeUnixFD = 'h'
stToSignature TypeString = 's'
stToSignature TypeObjectPath = 'o'
stToSignature TypeSignature = 'g'
toSignature :: DBusType -> BS.ByteString
toSignature = BS.concat . BSL.toChunks . BS.toLazyByteString . toSignature'
toSignatures :: [DBusType] -> BS.ByteString
toSignatures = BS.concat . BSL.toChunks . BS.toLazyByteString . mconcat . map toSignature'
toSignature' :: DBusType -> BS.Builder
toSignature' (DBusSimpleType t) = BS.char8 $ stToSignature t
toSignature' (TypeArray t) = BS.char8 'a' <> toSignature' t
toSignature' (TypeStruct ts) = BS.char8 '('
<> mconcat (toSignature' <$> ts)
<> BS.char8 ')'
toSignature' (TypeDict kt vt) = BS.string8 "a{"
<> BS.char8 (stToSignature kt)
<> toSignature' vt
<> BS.char8 '}'
toSignature' (TypeDictEntry kt vt) = BS.string8 "e{"
<> BS.char8 (stToSignature kt)
<> toSignature' vt
<> BS.char8 '}'
toSignature' TypeVariant = BS.char8 'v'
toSignature' TypeUnit = ""
simpleTypeMap :: IMap.IntMap DBusSimpleType
simpleTypeMap = IMap.fromList[ (ord 'y', TypeByte )
, (ord 'b', TypeBoolean )
, (ord 'n', TypeInt16 )
, (ord 'q', TypeUInt16 )
, (ord 'i', TypeInt32 )
, (ord 'u', TypeUInt32 )
, (ord 'x', TypeInt64 )
, (ord 't', TypeUInt64 )
, (ord 'd', TypeDouble )
, (ord 'h', TypeUnixFD )
, (ord 's', TypeString )
, (ord 'o', TypeObjectPath )
, (ord 'g', TypeSignature )
]
simpleType :: AP.Parser DBusSimpleType
simpleType = do
c <- AP.anyWord8
case IMap.lookup (fromIntegral c) simpleTypeMap of
Nothing -> fail "not a simple type"
Just t -> return t
dictEntrySignature :: AP.Parser DBusType
dictEntrySignature = do
_ <- AP.char8 '{'
kt <- simpleType
vt <- signature
_ <- AP.string "}"
return $ TypeDictEntry kt vt
arraySignature :: AP.Parser DBusType
arraySignature = do
_ <- AP.char8 'a'
((do TypeDictEntry kt vt <- dictEntrySignature
return $ TypeDict kt vt)
<> (TypeArray <$> signature))
structSignature :: AP.Parser DBusType
structSignature = do
_ <- AP.char '('
TypeStruct <$> AP.manyTill signature (AP.char ')')
signature :: AP.Parser DBusType
signature = AP.choice [ AP.char 'v' >> return TypeVariant
, arraySignature
, structSignature
, DBusSimpleType <$> simpleType
]
eitherParseSig :: BS.ByteString -> Either Text.Text DBusType
eitherParseSig s = case AP.parseOnly signature s of
Left e -> Left $ Text.pack e
Right r -> Right r
parseSig :: BS.ByteString -> Maybe DBusType
parseSig s = case eitherParseSig s of
Left _ -> Nothing
Right r -> Just r
eitherParseSigs :: BS.ByteString -> Either Text.Text [DBusType]
eitherParseSigs s = case AP.parseOnly (AP.many' signature) s of
Left e -> Left $ Text.pack e
Right r -> Right r
parseSigs :: BS.ByteString -> Maybe [DBusType]
parseSigs s = case eitherParseSigs s of
Left _ -> Nothing
Right r -> Just r
| null | https://raw.githubusercontent.com/Philonous/d-bus/fb8a948a3b9d51db618454328dbe18fb1f313c70/src/DBus/Signature.hs | haskell | # LANGUAGE OverloadedStrings # | module DBus.Signature where
import Control.Applicative ((<$>))
import qualified Data.Attoparsec.ByteString as AP
import qualified Data.Attoparsec.ByteString.Char8 as AP
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as BSL
import qualified Data.ByteString.Lazy.Builder as BS
import Data.Char
import qualified Data.IntMap as IMap
import qualified Data.Text as Text
import DBus.Types
stToSignature :: DBusSimpleType -> Char
stToSignature TypeByte = 'y'
stToSignature TypeBoolean = 'b'
stToSignature TypeInt16 = 'n'
stToSignature TypeUInt16 = 'q'
stToSignature TypeInt32 = 'i'
stToSignature TypeUInt32 = 'u'
stToSignature TypeInt64 = 'x'
stToSignature TypeUInt64 = 't'
stToSignature TypeDouble = 'd'
stToSignature TypeUnixFD = 'h'
stToSignature TypeString = 's'
stToSignature TypeObjectPath = 'o'
stToSignature TypeSignature = 'g'
toSignature :: DBusType -> BS.ByteString
toSignature = BS.concat . BSL.toChunks . BS.toLazyByteString . toSignature'
toSignatures :: [DBusType] -> BS.ByteString
toSignatures = BS.concat . BSL.toChunks . BS.toLazyByteString . mconcat . map toSignature'
toSignature' :: DBusType -> BS.Builder
toSignature' (DBusSimpleType t) = BS.char8 $ stToSignature t
toSignature' (TypeArray t) = BS.char8 'a' <> toSignature' t
toSignature' (TypeStruct ts) = BS.char8 '('
<> mconcat (toSignature' <$> ts)
<> BS.char8 ')'
toSignature' (TypeDict kt vt) = BS.string8 "a{"
<> BS.char8 (stToSignature kt)
<> toSignature' vt
<> BS.char8 '}'
toSignature' (TypeDictEntry kt vt) = BS.string8 "e{"
<> BS.char8 (stToSignature kt)
<> toSignature' vt
<> BS.char8 '}'
toSignature' TypeVariant = BS.char8 'v'
toSignature' TypeUnit = ""
simpleTypeMap :: IMap.IntMap DBusSimpleType
simpleTypeMap = IMap.fromList[ (ord 'y', TypeByte )
, (ord 'b', TypeBoolean )
, (ord 'n', TypeInt16 )
, (ord 'q', TypeUInt16 )
, (ord 'i', TypeInt32 )
, (ord 'u', TypeUInt32 )
, (ord 'x', TypeInt64 )
, (ord 't', TypeUInt64 )
, (ord 'd', TypeDouble )
, (ord 'h', TypeUnixFD )
, (ord 's', TypeString )
, (ord 'o', TypeObjectPath )
, (ord 'g', TypeSignature )
]
simpleType :: AP.Parser DBusSimpleType
simpleType = do
c <- AP.anyWord8
case IMap.lookup (fromIntegral c) simpleTypeMap of
Nothing -> fail "not a simple type"
Just t -> return t
dictEntrySignature :: AP.Parser DBusType
dictEntrySignature = do
_ <- AP.char8 '{'
kt <- simpleType
vt <- signature
_ <- AP.string "}"
return $ TypeDictEntry kt vt
arraySignature :: AP.Parser DBusType
arraySignature = do
_ <- AP.char8 'a'
((do TypeDictEntry kt vt <- dictEntrySignature
return $ TypeDict kt vt)
<> (TypeArray <$> signature))
structSignature :: AP.Parser DBusType
structSignature = do
_ <- AP.char '('
TypeStruct <$> AP.manyTill signature (AP.char ')')
signature :: AP.Parser DBusType
signature = AP.choice [ AP.char 'v' >> return TypeVariant
, arraySignature
, structSignature
, DBusSimpleType <$> simpleType
]
eitherParseSig :: BS.ByteString -> Either Text.Text DBusType
eitherParseSig s = case AP.parseOnly signature s of
Left e -> Left $ Text.pack e
Right r -> Right r
parseSig :: BS.ByteString -> Maybe DBusType
parseSig s = case eitherParseSig s of
Left _ -> Nothing
Right r -> Just r
eitherParseSigs :: BS.ByteString -> Either Text.Text [DBusType]
eitherParseSigs s = case AP.parseOnly (AP.many' signature) s of
Left e -> Left $ Text.pack e
Right r -> Right r
parseSigs :: BS.ByteString -> Maybe [DBusType]
parseSigs s = case eitherParseSigs s of
Left _ -> Nothing
Right r -> Just r
|
f9d7e5eab8795d08fc34e5489f625571d58362d24cc4826c29505a87de8b8bf8 | tisnik/clojure-examples | core.clj | (ns clisktest6.core
(:gen-class)
(:use clisk.live))
(import java.io.File)
(import javax.imageio.ImageIO)
(defn write-image
"Uložení rastrového obrázku typu BufferedImage do souboru."
[image file-name]
(ImageIO/write image "png" (File. file-name)))
(defn write-pattern
"Vytvoření rastrového obrázku na základě předaného patternu."
[pattern file-name]
(write-image (image pattern) file-name))
(defn predefined-textures-test
[]
(let [textures [agate
clouds
velvet
flecks
wood
]]
postupně projít všemi prvky vektoru " textures " ,
dvouprvkový vektor [ index+patter ] ,
souboru a následně zavolat funkci write - texture
(doseq [ [i texture] (map-indexed vector textures)]
(write-pattern texture (str "texture_" i ".png")))))
(defn -main
[& args]
(try
(println "Predefined textures test...")
(predefined-textures-test)
(println "Done")
(catch Throwable e
(println (.toString e)))
jistota , že program vždy korektně skončí
(System/exit 0))))
| null | https://raw.githubusercontent.com/tisnik/clojure-examples/984af4a3e20d994b4f4989678ee1330e409fdae3/clisktest6/src/clisktest6/core.clj | clojure | (ns clisktest6.core
(:gen-class)
(:use clisk.live))
(import java.io.File)
(import javax.imageio.ImageIO)
(defn write-image
"Uložení rastrového obrázku typu BufferedImage do souboru."
[image file-name]
(ImageIO/write image "png" (File. file-name)))
(defn write-pattern
"Vytvoření rastrového obrázku na základě předaného patternu."
[pattern file-name]
(write-image (image pattern) file-name))
(defn predefined-textures-test
[]
(let [textures [agate
clouds
velvet
flecks
wood
]]
postupně projít všemi prvky vektoru " textures " ,
dvouprvkový vektor [ index+patter ] ,
souboru a následně zavolat funkci write - texture
(doseq [ [i texture] (map-indexed vector textures)]
(write-pattern texture (str "texture_" i ".png")))))
(defn -main
[& args]
(try
(println "Predefined textures test...")
(predefined-textures-test)
(println "Done")
(catch Throwable e
(println (.toString e)))
jistota , že program vždy korektně skončí
(System/exit 0))))
|
|
93cb9f15fc656d994bdb34cb40b8c3b982ea2b5fade38f6c0e1966c2f3398c51 | clojure/core.typed | classify_invoke.clj | Copyright ( c ) , Rich Hickey & contributors .
;; The use and distribution terms for this software are covered by the
;; Eclipse Public License 1.0 (-1.0.php)
;; which can be found in the file epl-v10.html at the root of this distribution.
;; By using this software in any fashion, you are agreeing to be bound by
;; the terms of this license.
;; You must not remove this notice, or any other, from this software.
;copied from clojure.tools.analyzer.passes.jvm.classify-invoke
(ns clojure.core.typed.analyzer.jvm.passes.classify-invoke
(:require [clojure.core.typed.analyzer.common :as common]
[clojure.core.typed.analyzer.common.utils :as cu]
[clojure.core.typed.analyzer.jvm.utils :as ju]
[clojure.core.typed.analyzer.jvm.passes.validate :as validate]))
(create-ns 'clojure.core.typed.analyzer.jvm)
(alias 'jvm 'clojure.core.typed.analyzer.jvm)
;;important that this pass depends our `uniquify-locals`
( clojure.core.typed.analyzer.common.passes.uniquify ) , not the taj pass
(defn classify-invoke
"If the AST node is an :invoke, check the node in function position,
* if it is a keyword, transform the node in a :keyword-invoke node;
* if it is the clojure.core/instance? var and the first argument is a
literal class, transform the node in a :instance? node to be inlined by
the emitter
* if it is a protocol function var, transform the node in a :protocol-invoke
node
* if it is a regular function with primitive type hints that match a
clojure.lang.IFn$[primitive interface], transform the node in a :prim-invoke
node"
{:pass-info {:walk :post :depends #{#'validate/validate}}}
[{:keys [op args tag env form] :as ast}]
(if-not (= op :invoke)
ast
(let [argc (count args)
the-fn (:fn ast)
op (:op the-fn)
var? (= :var op)
the-var (:var the-fn)]
(cond
(and (= :const op)
(= :keyword (:type the-fn)))
(if (<= 1 argc 2)
(if (and (not (namespace (:val the-fn)))
(= 1 argc))
(merge (dissoc ast :fn :args)
{:op :keyword-invoke
::common/op ::common/keyword-invoke
:target (first args)
:keyword the-fn
:children [:keyword :target]})
ast)
(throw (ex-info (str "Cannot invoke keyword with " argc " arguments")
(merge {:form form}
(cu/source-info env)))))
(and (= 2 argc)
var?
(= #'clojure.core/instance? the-var)
(= :const (:op (first args)))
(= :class (:type (first args))))
(merge (dissoc ast :fn :args)
{:op :instance?
::common/op ::jvm/keyword-invoke
:class (:val (first args))
:target (second args)
:form form
:env env
:o-tag Boolean/TYPE
:tag (or tag Boolean/TYPE)
:children [:target]})
(and var? (cu/protocol-node? the-var (:meta the-fn)))
(if (>= argc 1)
(merge (dissoc ast :fn)
{:op :protocol-invoke
::common/op ::common/protocol-invoke
:protocol-fn the-fn
:target (first args)
:args (vec (rest args))
:children [:protocol-fn :target :args]})
(throw (ex-info "Cannot invoke protocol method with no args"
(merge {:form form}
(cu/source-info env)))))
:else
(let [arglist (cu/arglist-for-arity the-fn argc)
arg-tags (mapv (comp ju/specials str :tag meta) arglist)
ret-tag (-> arglist meta :tag str ju/specials)
tags (conj arg-tags ret-tag)]
(if-let [prim-interface (ju/prim-interface (mapv #(if (nil? %) Object %) tags))]
(merge ast
{:op :prim-invoke
::common/op ::jvm/protocol-invoke
:prim-interface prim-interface
:args (mapv (fn [arg tag] (assoc arg :tag tag)) args arg-tags)
:o-tag ret-tag
:tag (or tag ret-tag)})
ast))))))
| null | https://raw.githubusercontent.com/clojure/core.typed/f5b7d00bbb29d09000d7fef7cca5b40416c9fa91/typed/analyzer.jvm/src/clojure/core/typed/analyzer/jvm/passes/classify_invoke.clj | clojure | The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 (-1.0.php)
which can be found in the file epl-v10.html at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software.
copied from clojure.tools.analyzer.passes.jvm.classify-invoke
important that this pass depends our `uniquify-locals`
| Copyright ( c ) , Rich Hickey & contributors .
(ns clojure.core.typed.analyzer.jvm.passes.classify-invoke
(:require [clojure.core.typed.analyzer.common :as common]
[clojure.core.typed.analyzer.common.utils :as cu]
[clojure.core.typed.analyzer.jvm.utils :as ju]
[clojure.core.typed.analyzer.jvm.passes.validate :as validate]))
(create-ns 'clojure.core.typed.analyzer.jvm)
(alias 'jvm 'clojure.core.typed.analyzer.jvm)
( clojure.core.typed.analyzer.common.passes.uniquify ) , not the taj pass
(defn classify-invoke
"If the AST node is an :invoke, check the node in function position,
* if it is the clojure.core/instance? var and the first argument is a
literal class, transform the node in a :instance? node to be inlined by
the emitter
* if it is a protocol function var, transform the node in a :protocol-invoke
node
* if it is a regular function with primitive type hints that match a
clojure.lang.IFn$[primitive interface], transform the node in a :prim-invoke
node"
{:pass-info {:walk :post :depends #{#'validate/validate}}}
[{:keys [op args tag env form] :as ast}]
(if-not (= op :invoke)
ast
(let [argc (count args)
the-fn (:fn ast)
op (:op the-fn)
var? (= :var op)
the-var (:var the-fn)]
(cond
(and (= :const op)
(= :keyword (:type the-fn)))
(if (<= 1 argc 2)
(if (and (not (namespace (:val the-fn)))
(= 1 argc))
(merge (dissoc ast :fn :args)
{:op :keyword-invoke
::common/op ::common/keyword-invoke
:target (first args)
:keyword the-fn
:children [:keyword :target]})
ast)
(throw (ex-info (str "Cannot invoke keyword with " argc " arguments")
(merge {:form form}
(cu/source-info env)))))
(and (= 2 argc)
var?
(= #'clojure.core/instance? the-var)
(= :const (:op (first args)))
(= :class (:type (first args))))
(merge (dissoc ast :fn :args)
{:op :instance?
::common/op ::jvm/keyword-invoke
:class (:val (first args))
:target (second args)
:form form
:env env
:o-tag Boolean/TYPE
:tag (or tag Boolean/TYPE)
:children [:target]})
(and var? (cu/protocol-node? the-var (:meta the-fn)))
(if (>= argc 1)
(merge (dissoc ast :fn)
{:op :protocol-invoke
::common/op ::common/protocol-invoke
:protocol-fn the-fn
:target (first args)
:args (vec (rest args))
:children [:protocol-fn :target :args]})
(throw (ex-info "Cannot invoke protocol method with no args"
(merge {:form form}
(cu/source-info env)))))
:else
(let [arglist (cu/arglist-for-arity the-fn argc)
arg-tags (mapv (comp ju/specials str :tag meta) arglist)
ret-tag (-> arglist meta :tag str ju/specials)
tags (conj arg-tags ret-tag)]
(if-let [prim-interface (ju/prim-interface (mapv #(if (nil? %) Object %) tags))]
(merge ast
{:op :prim-invoke
::common/op ::jvm/protocol-invoke
:prim-interface prim-interface
:args (mapv (fn [arg tag] (assoc arg :tag tag)) args arg-tags)
:o-tag ret-tag
:tag (or tag ret-tag)})
ast))))))
|
182e2c66b4a7900db53726d0c18a5381cc3cc8e215c5b8debea10ce47e445513 | heroku/lein-heroku | core.clj | (ns happy-path.core
(:require [happy-path.handler :refer [app init destroy]]
[immutant.web :as immutant]
[clojure.tools.nrepl.server :as nrepl]
[taoensso.timbre :as timbre]
[environ.core :refer [env]])
(:gen-class))
(defonce nrepl-server (atom nil))
(defn parse-port [port]
(when port
(cond
(string? port) (Integer/parseInt port)
(number? port) port
:else (throw (Exception. (str "invalid port value: " port))))))
(defn stop-nrepl []
(when-let [server @nrepl-server]
(nrepl/stop-server server)))
(defn start-nrepl
"Start a network repl for debugging when the :nrepl-port is set in the environment."
[]
(if @nrepl-server
(timbre/error "nREPL is already running!")
(when-let [port (env :nrepl-port)]
(try
(->> port
(parse-port)
(nrepl/start-server :port)
(reset! nrepl-server))
(timbre/info "nREPL server started on port" port)
(catch Throwable t
(timbre/error "failed to start nREPL" t))))))
(defn http-port [port]
(parse-port (or port (env :port) 3000)))
(defonce http-server (atom nil))
(defn start-http-server [port]
(init)
(reset! http-server (immutant/run app :host "0.0.0.0" :port port)))
(defn stop-http-server []
(when @http-server
(destroy)
(immutant/stop @http-server)
(reset! http-server nil)))
(defn stop-app []
(stop-nrepl)
(stop-http-server))
(defn start-app [[port]]
(.addShutdownHook (Runtime/getRuntime) (Thread. stop-app))
(start-nrepl)
(start-http-server (http-port port))
(timbre/info "server started on port:" (:port @http-server)))
(defn -main [& args]
(start-app args))
| null | https://raw.githubusercontent.com/heroku/lein-heroku/337a56787b42b7291e519090fa9bb7d96470667c/it/buildpacks/src/happy_path/core.clj | clojure | (ns happy-path.core
(:require [happy-path.handler :refer [app init destroy]]
[immutant.web :as immutant]
[clojure.tools.nrepl.server :as nrepl]
[taoensso.timbre :as timbre]
[environ.core :refer [env]])
(:gen-class))
(defonce nrepl-server (atom nil))
(defn parse-port [port]
(when port
(cond
(string? port) (Integer/parseInt port)
(number? port) port
:else (throw (Exception. (str "invalid port value: " port))))))
(defn stop-nrepl []
(when-let [server @nrepl-server]
(nrepl/stop-server server)))
(defn start-nrepl
"Start a network repl for debugging when the :nrepl-port is set in the environment."
[]
(if @nrepl-server
(timbre/error "nREPL is already running!")
(when-let [port (env :nrepl-port)]
(try
(->> port
(parse-port)
(nrepl/start-server :port)
(reset! nrepl-server))
(timbre/info "nREPL server started on port" port)
(catch Throwable t
(timbre/error "failed to start nREPL" t))))))
(defn http-port [port]
(parse-port (or port (env :port) 3000)))
(defonce http-server (atom nil))
(defn start-http-server [port]
(init)
(reset! http-server (immutant/run app :host "0.0.0.0" :port port)))
(defn stop-http-server []
(when @http-server
(destroy)
(immutant/stop @http-server)
(reset! http-server nil)))
(defn stop-app []
(stop-nrepl)
(stop-http-server))
(defn start-app [[port]]
(.addShutdownHook (Runtime/getRuntime) (Thread. stop-app))
(start-nrepl)
(start-http-server (http-port port))
(timbre/info "server started on port:" (:port @http-server)))
(defn -main [& args]
(start-app args))
|
|
f2967c8da469d08a7e2014bb0954c2eb024f603e1a6e72b65f474b64609e62b4 | cedlemo/OCaml-GI-ctypes-bindings-generator | Window_group_private.ml | open Ctypes
open Foreign
type t
let t_typ : t structure typ = structure "Window_group_private"
| null | https://raw.githubusercontent.com/cedlemo/OCaml-GI-ctypes-bindings-generator/21a4d449f9dbd6785131979b91aa76877bad2615/tools/Gtk3/Window_group_private.ml | ocaml | open Ctypes
open Foreign
type t
let t_typ : t structure typ = structure "Window_group_private"
|
|
17de9c4fc4bcd7dd293258e38dd8551a957ee12139aee6e5fd82d6c02527549e | javalib-team/sawja | jCFADom.mli |
* This file is part of SAWJA
* Copyright ( c)2013 ( INRIA )
*
* This program is free software : you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation , either version 3 of
* the License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful , but
* WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* General Public License for more details .
*
* You should have received a copy of the GNU General Public
* License along with this program . If not , see
* < / > .
* This file is part of SAWJA
* Copyright (c)2013 Pierre Vittet (INRIA)
*
* This program is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation, either version 3 of
* the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* </>.
*)
open Javalib_pack
open JBasics
open JProgram
(*Abstraction of a variable*)
module AbVSet:
sig
type t
type analysisID = unit
type analysisDomain = t
val bot : t
(*==Null*)
val empty : t
val isBot : t -> bool
val isTop: t -> bool
val primitive : t
val isPrimitive : t -> bool
(*If the set is empty, it means that the only possible concrete value of the
* variable is null.*)
val is_empty: t -> bool
* [ singleton pps cn ] : Create a set from a singleton element . [ pps ] is a list
* of program point characterizing the location of the affectation . cn is the
* class of the allocated object .
* of program point characterizing the location of the affectation. cn is the
* class of the allocated object. *)
val singleton : JBirPP.t list -> object_type-> t
val equal : t -> t -> bool
val inter : t -> t -> t
val join : ?modifies:bool ref -> t -> t -> t
val join_ad : ?do_join:bool -> ?modifies:bool ref -> t -> t -> t
(** [filter_with_compatible prog abs cn] :Restrain the abstraction [abs] to
* the type compatible with [cn].*)
val filter_with_compatible : 'a JProgram.program -> t -> object_type -> t
(** [filter_with_uncompatible prog abs cn] :Restrain the abstraction [abs] to
* the type not compatible with [cn].*)
val filter_with_uncompatible : 'a JProgram.program -> t -> object_type -> t
val concretize : t -> JType.ObjectSet.t
val to_string : t -> string
val pprint : Format.formatter -> t -> unit
val get_analysis : analysisID -> t -> analysisDomain
end
(*Abstraction of a field*)
module AbFSet :
sig
type t
type analysisID = unit
type analysisDomain = t
val bot : t
val empty : t
val isBot : t -> bool
val is_empty: t -> bool
val equal : t -> t -> bool
val inter : t -> t -> t
val join : ?modifies:bool ref -> t -> t -> t
val join_ad : ?do_join:bool -> ?modifies:bool ref -> t -> t -> t
(** [var2fSet obj var]: for a field such as [obj].field = [var], return its AbFSet according to the AbVSet of [obj] and [var]. *)
val var2fSet : AbVSet.t -> AbVSet.t -> t
(** [fSet2var fset objvSet]: From a field abstraction [fset] and [objvSet],
* the abstraction of the object variable used to access the field, return
* a variable abstraction corresponding to the possible abstract values when
* then variable is affected the field value.*)
val fSet2var: t -> AbVSet.t -> AbVSet.t
(*A special 'virtual' set which can contains static variables. It used as obj*)
val static_field_dom : AbVSet.t
val to_string : t -> string
val pprint : Format.formatter -> t -> unit
val get_analysis : analysisID -> t -> analysisDomain
end
(*primitive variables are ignored from this map*)
module AbLocals : sig
type t
type analysisID = AbVSet.analysisID
type analysisDomain = t
val bot : t
val init : t
val isBot : analysisDomain -> bool
val join : ?modifies:bool ref -> t -> t -> t
val join_ad : ?do_join:bool -> ?modifies:bool ref -> t -> analysisDomain -> t
val equal : t -> t -> bool
val get_analysis : analysisID -> t -> analysisDomain
val to_string : t -> string
val pprint : Format.formatter -> t -> unit
val get_var : int -> analysisDomain -> AbVSet.t
val set_var : int -> AbVSet.t -> analysisDomain -> analysisDomain
end
module AbMethod : sig
type t
type analysisID = unit
type analysisDomain = t
val is_static : A3Bir.t node - > method_signature - > bool
val equal : t -> t -> bool
val bot : t
val isBot : t -> bool
val init : t
* For a virtual method , the argument at index 0 , is ' this ' .
val get_args : t -> AbLocals.t
val init_locals : JBir.t node -> method_signature -> t -> AbLocals.t
val get_return : t -> AbVSet.t
val get_exc_return : t -> AbVSet.t
val join_args : t -> AbLocals.t -> t
val set_args: t -> AbLocals.t -> t
val join_return : t -> AbVSet.t -> t
val join_exc_return : t -> AbVSet.t -> t
val join : ?modifies:bool ref -> t -> t -> t
val join_ad : ?do_join:bool -> ?modifies:bool Stdlib.ref ->
t -> analysisDomain -> t
val pprint : Format.formatter -> t -> unit
val to_string : t -> string
val get_analysis : analysisID -> t -> analysisDomain
end
module Var : Safe.Var.S with module Context = Safe.Var.EmptyContext
module AbField : (Safe.Domain.S
with type t = AbFSet.t
and type analysisDomain = AbFSet.t
and type analysisID = AbFSet.analysisID)
module AbPP : (Safe.Domain.S
with type t = AbLocals.t
and type analysisDomain = AbLocals.t
and type analysisID = AbLocals.analysisID)
module AbMeth : (Safe.Domain.S
with type t = AbMethod.t
and type analysisDomain = AbMethod.t
and type analysisID = AbMethod.analysisID)
module CFAState : Safe.State.S
with module Var = Safe.Var.Make(Safe.Var.EmptyContext)
and module Global = Safe.Domain.Empty
and module IOC = Safe.Domain.Empty
and module Field = AbField
and module Method = AbMeth
and module PP = AbPP
module CFAConstraints : Safe.Constraints.S with module State = CFAState
| null | https://raw.githubusercontent.com/javalib-team/sawja/5b46e4afc024092cdeaf8ba125f0c5ac05cb9137/src/jCFADom.mli | ocaml | Abstraction of a variable
==Null
If the set is empty, it means that the only possible concrete value of the
* variable is null.
* [filter_with_compatible prog abs cn] :Restrain the abstraction [abs] to
* the type compatible with [cn].
* [filter_with_uncompatible prog abs cn] :Restrain the abstraction [abs] to
* the type not compatible with [cn].
Abstraction of a field
* [var2fSet obj var]: for a field such as [obj].field = [var], return its AbFSet according to the AbVSet of [obj] and [var].
* [fSet2var fset objvSet]: From a field abstraction [fset] and [objvSet],
* the abstraction of the object variable used to access the field, return
* a variable abstraction corresponding to the possible abstract values when
* then variable is affected the field value.
A special 'virtual' set which can contains static variables. It used as obj
primitive variables are ignored from this map |
* This file is part of SAWJA
* Copyright ( c)2013 ( INRIA )
*
* This program is free software : you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation , either version 3 of
* the License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful , but
* WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* General Public License for more details .
*
* You should have received a copy of the GNU General Public
* License along with this program . If not , see
* < / > .
* This file is part of SAWJA
* Copyright (c)2013 Pierre Vittet (INRIA)
*
* This program is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation, either version 3 of
* the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* </>.
*)
open Javalib_pack
open JBasics
open JProgram
module AbVSet:
sig
type t
type analysisID = unit
type analysisDomain = t
val bot : t
val empty : t
val isBot : t -> bool
val isTop: t -> bool
val primitive : t
val isPrimitive : t -> bool
val is_empty: t -> bool
* [ singleton pps cn ] : Create a set from a singleton element . [ pps ] is a list
* of program point characterizing the location of the affectation . cn is the
* class of the allocated object .
* of program point characterizing the location of the affectation. cn is the
* class of the allocated object. *)
val singleton : JBirPP.t list -> object_type-> t
val equal : t -> t -> bool
val inter : t -> t -> t
val join : ?modifies:bool ref -> t -> t -> t
val join_ad : ?do_join:bool -> ?modifies:bool ref -> t -> t -> t
val filter_with_compatible : 'a JProgram.program -> t -> object_type -> t
val filter_with_uncompatible : 'a JProgram.program -> t -> object_type -> t
val concretize : t -> JType.ObjectSet.t
val to_string : t -> string
val pprint : Format.formatter -> t -> unit
val get_analysis : analysisID -> t -> analysisDomain
end
module AbFSet :
sig
type t
type analysisID = unit
type analysisDomain = t
val bot : t
val empty : t
val isBot : t -> bool
val is_empty: t -> bool
val equal : t -> t -> bool
val inter : t -> t -> t
val join : ?modifies:bool ref -> t -> t -> t
val join_ad : ?do_join:bool -> ?modifies:bool ref -> t -> t -> t
val var2fSet : AbVSet.t -> AbVSet.t -> t
val fSet2var: t -> AbVSet.t -> AbVSet.t
val static_field_dom : AbVSet.t
val to_string : t -> string
val pprint : Format.formatter -> t -> unit
val get_analysis : analysisID -> t -> analysisDomain
end
module AbLocals : sig
type t
type analysisID = AbVSet.analysisID
type analysisDomain = t
val bot : t
val init : t
val isBot : analysisDomain -> bool
val join : ?modifies:bool ref -> t -> t -> t
val join_ad : ?do_join:bool -> ?modifies:bool ref -> t -> analysisDomain -> t
val equal : t -> t -> bool
val get_analysis : analysisID -> t -> analysisDomain
val to_string : t -> string
val pprint : Format.formatter -> t -> unit
val get_var : int -> analysisDomain -> AbVSet.t
val set_var : int -> AbVSet.t -> analysisDomain -> analysisDomain
end
module AbMethod : sig
type t
type analysisID = unit
type analysisDomain = t
val is_static : A3Bir.t node - > method_signature - > bool
val equal : t -> t -> bool
val bot : t
val isBot : t -> bool
val init : t
* For a virtual method , the argument at index 0 , is ' this ' .
val get_args : t -> AbLocals.t
val init_locals : JBir.t node -> method_signature -> t -> AbLocals.t
val get_return : t -> AbVSet.t
val get_exc_return : t -> AbVSet.t
val join_args : t -> AbLocals.t -> t
val set_args: t -> AbLocals.t -> t
val join_return : t -> AbVSet.t -> t
val join_exc_return : t -> AbVSet.t -> t
val join : ?modifies:bool ref -> t -> t -> t
val join_ad : ?do_join:bool -> ?modifies:bool Stdlib.ref ->
t -> analysisDomain -> t
val pprint : Format.formatter -> t -> unit
val to_string : t -> string
val get_analysis : analysisID -> t -> analysisDomain
end
module Var : Safe.Var.S with module Context = Safe.Var.EmptyContext
module AbField : (Safe.Domain.S
with type t = AbFSet.t
and type analysisDomain = AbFSet.t
and type analysisID = AbFSet.analysisID)
module AbPP : (Safe.Domain.S
with type t = AbLocals.t
and type analysisDomain = AbLocals.t
and type analysisID = AbLocals.analysisID)
module AbMeth : (Safe.Domain.S
with type t = AbMethod.t
and type analysisDomain = AbMethod.t
and type analysisID = AbMethod.analysisID)
module CFAState : Safe.State.S
with module Var = Safe.Var.Make(Safe.Var.EmptyContext)
and module Global = Safe.Domain.Empty
and module IOC = Safe.Domain.Empty
and module Field = AbField
and module Method = AbMeth
and module PP = AbPP
module CFAConstraints : Safe.Constraints.S with module State = CFAState
|
679369985ed9d1c1de95a050dcf78d9b27754efca9f50a0f24bdf4a9e952fafc | programaker-project/Programaker-Core | automate_rest_api_metrics.erl | %%% @doc
%%% REST endpoint to work as prometheus exporter.
%%% @end
-module(automate_rest_api_metrics).
-export([init/2]).
-export([ content_types_provided/2
, is_authorized/2
]).
-export([ to_text/2
]).
-define(APPLICATION, automate_rest_api).
-define(METRICS_BEARER_TOKEN_SETTING, metrics_secret).
-spec init(_,_) -> {'cowboy_rest',_,_}.
init(Req, _Opts) ->
{cowboy_rest, Req, { }}.
%% Authorization
is_authorized(Req, State) ->
case application:get_env(?APPLICATION, ?METRICS_BEARER_TOKEN_SETTING) of
%% No setting, we allow anything
undefined ->
{ true, Req, State };
{ok, Secret} ->
case cowboy_req:header(<<"authorization">>, Req, undefined) of
undefined ->
{ {false, <<"Authorization header not found">>} , Req, State };
<<"Bearer ", Secret/binary>> ->
{ true, Req, State };
X ->
{ { false, <<"Authorization not correct">>}, Req, State }
end
end.
%% GET handler
content_types_provided(Req, State) ->
{[{{<<"*">>, <<"*">>, []}, to_text}],
Req, State}.
-spec to_text(cowboy_req:req(), {})
-> {binary(),cowboy_req:req(), {}}.
to_text(Req, State) ->
try automate_stats:format(prometheus) of
Output ->
Res1 = cowboy_req:delete_resp_header(<<"content-type">>, Req),
Res2 = cowboy_req:set_resp_header(<<"content-type">>, <<"text/plain">>, Res1),
{ Output, Res2, State }
catch ErrorNS:Error:StackTrace ->
Code = 500,
automate_logging:log_platform(error, ErrorNS, Error, StackTrace),
Res = cowboy_req:reply(Code, #{ <<"content-type">> => <<"application/json">> }, <<"Error getting stats, check logs for more info">>, Req),
{stop, Res, State}
end.
| null | https://raw.githubusercontent.com/programaker-project/Programaker-Core/ef10fc6d2a228b2096b121170c421f5c29f9f270/backend/apps/automate_rest_api/src/automate_rest_api_metrics.erl | erlang | @doc
REST endpoint to work as prometheus exporter.
@end
Authorization
No setting, we allow anything
GET handler |
-module(automate_rest_api_metrics).
-export([init/2]).
-export([ content_types_provided/2
, is_authorized/2
]).
-export([ to_text/2
]).
-define(APPLICATION, automate_rest_api).
-define(METRICS_BEARER_TOKEN_SETTING, metrics_secret).
-spec init(_,_) -> {'cowboy_rest',_,_}.
init(Req, _Opts) ->
{cowboy_rest, Req, { }}.
is_authorized(Req, State) ->
case application:get_env(?APPLICATION, ?METRICS_BEARER_TOKEN_SETTING) of
undefined ->
{ true, Req, State };
{ok, Secret} ->
case cowboy_req:header(<<"authorization">>, Req, undefined) of
undefined ->
{ {false, <<"Authorization header not found">>} , Req, State };
<<"Bearer ", Secret/binary>> ->
{ true, Req, State };
X ->
{ { false, <<"Authorization not correct">>}, Req, State }
end
end.
content_types_provided(Req, State) ->
{[{{<<"*">>, <<"*">>, []}, to_text}],
Req, State}.
-spec to_text(cowboy_req:req(), {})
-> {binary(),cowboy_req:req(), {}}.
to_text(Req, State) ->
try automate_stats:format(prometheus) of
Output ->
Res1 = cowboy_req:delete_resp_header(<<"content-type">>, Req),
Res2 = cowboy_req:set_resp_header(<<"content-type">>, <<"text/plain">>, Res1),
{ Output, Res2, State }
catch ErrorNS:Error:StackTrace ->
Code = 500,
automate_logging:log_platform(error, ErrorNS, Error, StackTrace),
Res = cowboy_req:reply(Code, #{ <<"content-type">> => <<"application/json">> }, <<"Error getting stats, check logs for more info">>, Req),
{stop, Res, State}
end.
|
bd224c5c68cc5040aba2d2f46ab1f02c055920021ff5f44266229908d0d88016 | KirinDave/fuzed | fuzed_node.erl | -module(fuzed_node).
-export([start/0]).
start() ->
application:load(fuzed_node),
application:start(fuzed_node). | null | https://raw.githubusercontent.com/KirinDave/fuzed/56098d9e4c139613845289bdd5acebdfe608981a/elibs/fuzed_node.erl | erlang | -module(fuzed_node).
-export([start/0]).
start() ->
application:load(fuzed_node),
application:start(fuzed_node). |
|
2584a2151cbab65836d0b3bb602c6f256405e0951dbecc90145eef84f32089aa | tfausak/advent-of-code | 2.hs | -- stack --resolver lts-12.25 script
import qualified Data.SBV as SBV
import qualified Text.ParserCombinators.ReadP as Parse
main = do
nanobots <- map read . lines <$> readFile "input.txt"
model <- SBV.optimize SBV.Lexicographic $ do
[x, y, z] <- SBV.sIntegers ["x", "y", "z"]
SBV.maximize "nanobots-in-range" . sum $ map
((\ n -> n :: SBV.SInteger) . inRange x y z) nanobots
SBV.minimize "distance-to-origin" $ manhattanDistance 0 0 0 x y z
print model
inRange x y z n = SBV.oneIf . (SBV..<= SBV.literal (nr n)) $ manhattanDistance
(SBV.literal $ nx n) (SBV.literal $ ny n) (SBV.literal $ nz n)
x y z
absoluteValue n = SBV.ite (n SBV..< 0) (negate n) n
manhattanDistance x0 y0 z0 x1 y1 z1 =
absoluteValue (x0 - x1) + absoluteValue (y0 - y1) + absoluteValue (z0 - z1)
data Nanobot = Nanobot { nx, ny, nz, nr :: Integer } deriving Show
instance Read Nanobot where
readsPrec n = let parseInt = Parse.readS_to_P (readsPrec n) in
Parse.readP_to_S (Nanobot
<$> (Parse.string "pos=<" *> parseInt)
<*> (Parse.char ',' *> parseInt)
<*> (Parse.char ',' *> parseInt)
<*> (Parse.string ">, r=" *> parseInt))
| null | https://raw.githubusercontent.com/tfausak/advent-of-code/26f0d9726b019ff7b97fa7e0f2f995269b399578/2018/23/2.hs | haskell | stack --resolver lts-12.25 script | import qualified Data.SBV as SBV
import qualified Text.ParserCombinators.ReadP as Parse
main = do
nanobots <- map read . lines <$> readFile "input.txt"
model <- SBV.optimize SBV.Lexicographic $ do
[x, y, z] <- SBV.sIntegers ["x", "y", "z"]
SBV.maximize "nanobots-in-range" . sum $ map
((\ n -> n :: SBV.SInteger) . inRange x y z) nanobots
SBV.minimize "distance-to-origin" $ manhattanDistance 0 0 0 x y z
print model
inRange x y z n = SBV.oneIf . (SBV..<= SBV.literal (nr n)) $ manhattanDistance
(SBV.literal $ nx n) (SBV.literal $ ny n) (SBV.literal $ nz n)
x y z
absoluteValue n = SBV.ite (n SBV..< 0) (negate n) n
manhattanDistance x0 y0 z0 x1 y1 z1 =
absoluteValue (x0 - x1) + absoluteValue (y0 - y1) + absoluteValue (z0 - z1)
data Nanobot = Nanobot { nx, ny, nz, nr :: Integer } deriving Show
instance Read Nanobot where
readsPrec n = let parseInt = Parse.readS_to_P (readsPrec n) in
Parse.readP_to_S (Nanobot
<$> (Parse.string "pos=<" *> parseInt)
<*> (Parse.char ',' *> parseInt)
<*> (Parse.char ',' *> parseInt)
<*> (Parse.string ">, r=" *> parseInt))
|
d6f23ed27685725fcbbfa894d2b9383615f8d1957c47e5f692832cffe5bde408 | maoo/segway | web.clj | (ns segway.web
(:use segway.data
segway.pages.home
segway.pages.detail
ring.util.response
[net.cgrand.moustache :only [app]]
[clojure.contrib.duck-streams :only [pwd]])
(:require [net.cgrand.enlive-html :as html]))
(def *webdir* (str (pwd) "/src/template/"))
(defn render [t]
(apply str t))
(def render-to-response
(comp response render))
(defn redirectToTemplates
[url]
(let [webdata-item (get webdata url)]
(render-to-response
(if (= url "/index.html") (index webdata-item) (detail webdata-item)))))
(def routes
(app
[""] (fn [req]
(let [url (req :uri)] (redirectToTemplates "/index.html")))
[*] (fn [req]
(let [url (req :uri)] (redirectToTemplates url))))) | null | https://raw.githubusercontent.com/maoo/segway/569306d306de1de743bcd3e68cf380c373bf1d22/src/segway/web.clj | clojure | (ns segway.web
(:use segway.data
segway.pages.home
segway.pages.detail
ring.util.response
[net.cgrand.moustache :only [app]]
[clojure.contrib.duck-streams :only [pwd]])
(:require [net.cgrand.enlive-html :as html]))
(def *webdir* (str (pwd) "/src/template/"))
(defn render [t]
(apply str t))
(def render-to-response
(comp response render))
(defn redirectToTemplates
[url]
(let [webdata-item (get webdata url)]
(render-to-response
(if (= url "/index.html") (index webdata-item) (detail webdata-item)))))
(def routes
(app
[""] (fn [req]
(let [url (req :uri)] (redirectToTemplates "/index.html")))
[*] (fn [req]
(let [url (req :uri)] (redirectToTemplates url))))) |
|
746e031f57fea4ddb7c3e6b6235a568a45ae391581e05c3589854d63cb4fa2a2 | jordanthayer/ocaml-search | rst_tree.ml | * , , Thayer Trees
(* Assumes fixed branching factor *)
type data = {
cost : float;
depth : int;
key : int;
}
type scale =
| Uniform
| Linear_Increase
| Linear_Decrease
type rst_tree = {
scale : scale;
max_edge_cost : float;
max_depth : int;
branch : int;
t : data Random_tree.tree;
}
(************************* Printing functions ********************************)
let data_to_string d =
Wrutils.str "depth: %i\t cost: %f\t key: %i\n" d.depth d.cost d.key
let print_tree t = Random_tree.print_tree data_to_string t.t
let print_key k = Printf.eprintf "%i" k
(*****************************************************************************)
let get_path branch =
(** Calculates the path from the root to the leaf node *)
let rec gp key = if key = 0 then [0]
else key::(gp ((key - 1) / branch)) in
gp
let path_to_string p =
let rec fn str p =
match p with
[] -> str
| hd::tl -> fn (str ^ (Wrutils.str ", %i" hd)) tl in
fn "" p
let calculate_key b parent offset =
* Calculates the rank of a current node as defined by the order in which it
would be touched in a breadth first traversal .
would be touched in a breadth first traversal. *)
$ b \cdot [ k_p \cdot \frac{1 - b^(d-1)}{1 - b ) + \frac{1 - b^d}{1 - b } + offset$
b * (parent.key - ((1 - (Math.int_exp b (parent.depth))) / ( 1 - b))) +
((1 - (Math.int_exp b (parent.depth + 1))) / (1 - b)) + offset
let size_of_tree t =
let v = (1 - (Math.int_exp t.branch (t.max_depth + 1))) / (1 - t.branch) in
Verb.pe Verb.debug "tree size: %i\n" v;
v
let uniform _ = (fun _ -> 1.)
let make_scaling slope mdepth =
assert (slope >= (-.mdepth /. 2.));
assert (slope <= (mdepth /. 2.));
let b = 1. -. (2. /. mdepth) *. slope in
(fun node -> (float_of_int node) *. slope +. b)
let make_scaling_fslope slope mdepth =
assert (slope >= -.1.);
assert (slope <= 1.);
make_scaling (slope *. mdepth /. 2.) mdepth
let scale_to_fun s =
match s with
| Uniform -> uniform
| Linear_Increase -> (make_scaling_fslope 1.)
| Linear_Decrease -> (make_scaling_fslope (-.1.))
let scale_to_string s =
match s with
| Uniform -> "uniform"
| Linear_Increase -> "linear_increasing"
| Linear_Decrease -> "linear_decreasing"
let string_to_scale str =
if str = (scale_to_string Uniform)
then Uniform
else (if str = (scale_to_string Linear_Increase)
then Linear_Increase
else (if str = (scale_to_string Linear_Decrease)
then Linear_Decrease
else failwith "str doesn't matcha model!"))
let make_expand ?(dfun = uniform) max_depth max_edge_cost
branch =
(** Geneartes the expand function used to *)
let scale = dfun (float_of_int max_depth) in
(fun node -> (* Random_tree.wrap_expand does the seed init for us! *)
let data = node.Random_tree.data in
if max_depth > data.depth
then (let nd = data.depth + 1 in
let next i = {depth = nd;
cost = (data.cost +.
(scale nd) *.
(Random.float max_edge_cost));
key = calculate_key branch data i; } in
Array.to_list (Array.init branch next))
else [])
let make_goal t =
(** generates a goal predicate for the given pearl tree *)
(fun node -> if node.Random_tree.data.depth == t.max_depth
then (Verb.pe Verb.debug "goal @ %i\n" node.Random_tree.data.key;
true)
else false)
let make_tree scale max_edge_cost branch depth seed =
(** generates a new pearl tree from a probability, maximum depth, and a seed.
seed comes last so that we can generate many similar trees from a
list or an array of seeds *)
{scale = scale;
max_edge_cost = max_edge_cost;
max_depth = depth;
branch = branch;
t = (Random_tree.make_tree seed {depth = 0; cost = 0.; key = 0}
(make_expand ~dfun:(scale_to_fun scale)
depth max_edge_cost branch));}
let wrap_random_tree_expand exp =
(** Takes the random tree expand function and manipulates it into something
that the searches are expecting *)
(fun n _ -> let children = exp n in
List.map (fun c -> c,c.Random_tree.data.cost) children)
(******************************** IO ****************************************)
let config_to_pairs t =
(** Converts an rst tree into a set of datafile pairs *)
[("model", "rst");
("scale", scale_to_string t.scale);
("branching factor", string_of_int t.branch);
("tree depth", string_of_int t.max_depth);
("maximum edge cost", string_of_float t.max_edge_cost);
("seed", string_of_int t.t.Random_tree.root.Random_tree.seed);]
let write_pairs t ch =
(** Writes an rst tree into the channel as a datfile *)
Datafile.write_header_pairs ch;
Datafile.write_pairs ch (config_to_pairs t);
Datafile.write_trailer_pairs ch
let read_instance file =
(** Reads a datafile repersenting an rst tree into memory *)
let df = Datafile.load file in
let mec = float_of_string (Datafile.get_val df "maximum edge cost")
and dep = int_of_string (Datafile.get_val df "tree depth")
and b = int_of_string (Datafile.get_val df "branching factor")
and seed = int_of_string (Datafile.get_val df "seed")
and scale = string_to_scale (Datafile.get_val df "scale") in
make_tree scale mec b dep seed
let read_instance_ch file_name ch =
(** Reads a datafile repersenting an rst tree into memory *)
let df = Datafile.read file_name ch in
let mec = float_of_string (Datafile.get_val df "maximum edge cost")
and dep = int_of_string (Datafile.get_val df "tree depth")
and b = int_of_string (Datafile.get_val df "branching factor")
and seed = int_of_string (Datafile.get_val df "seed")
and scale = string_to_scale (Datafile.get_val df "scale") in
make_tree scale mec b dep seed
let read_heuristic_values df =
let h_vals = Datafile.get_col df "cost to go" in
(fun n -> h_vals.(n.Random_tree.data.key))
let make_true_heuristic cache ch t =
(** Traverses the tree [t], calculating the true heuristic value of the nodes
in said tree. Requires the use of a post order traversal*)
let h_vals = Array.create (size_of_tree t) 0. in
Verb.pe Verb.debug "Allocating array of length %i\n" (Array.length h_vals);
let calc_true_h node =
let v =
if node.Random_tree.data.depth == t.max_depth
Leaves have heuristics value = 0
else (let children = t.t.Random_tree.expand node in
let heuristics = List.map
(fun c -> (* cost of getting to child + true h of child *)
Verb.pe Verb.debug "Getting h of %i\n" c.Random_tree.data.key;
(c.Random_tree.data.cost -.
node.Random_tree.data.cost) +.
(h_vals.(c.Random_tree.data.key))) children
in
List.fold_left (* get minimum value and pass back *)
(fun accum cur -> min accum cur) infinity heuristics) in
Verb.pe Verb.debug "Setting h of %i\n" node.Random_tree.data.key;
h_vals.(node.Random_tree.data.key) <- v
in
if cache
then (Random_tree.postorder_traversal calc_true_h t.t;
Datafile.write_header_pairs ch;
Datafile.write_colnames ch ["cost to go"];
Array.iter (fun v -> Printf.fprintf ch "%f\n" v) h_vals;
Datafile.write_pairs ch (config_to_pairs t);
Datafile.write_trailer_pairs ch;
flush ch);
(fun n -> h_vals.(n.Random_tree.data.key))
(***************************** Heuristics *********************************)
let get_true_heuristic cache data_root t =
let h_pairs = ("type", "heuristic")::(config_to_pairs t) in
let paths = Rdb.matching_paths data_root h_pairs in
match paths with
| [path] -> read_heuristic_values (Datafile.load path)
| _ -> (if cache
then (Verb.pe Verb.toplvl "Heuristic Not Cached, Recording\n";
Wrio.with_outfile (Rdb.path_for data_root h_pairs)
(fun ch -> make_true_heuristic cache ch t))
else make_true_heuristic cache stderr t)
let malte_roeger_h c cache data_root t =
let truth = get_true_heuristic cache data_root t in
(fun node -> Math.fmax 0. ((truth node) -. c))
let constant_percent_scale p cache data_root t =
assert (p <= 1.);
let truth = get_true_heuristic cache data_root t in
(fun node -> (truth node) *. p)
let random_percent_scale max_p cache data_root t =
assert (max_p <= 1.);
let truth = get_true_heuristic cache data_root t in
(fun node ->
Random.set_state !(Math.random_state_from node.Random_tree.seed);
(truth node) *. (Random.float max_p))
let distance t =
(fun n -> float_of_int (t.max_depth - n.Random_tree.data.depth))
let truth_scaling_error_decreasing _ cache data_root t =
let truth = get_true_heuristic cache data_root t in
(fun node ->
((float_of_int node.Random_tree.data.depth) /.
(float_of_int t.max_depth)) *. (truth node))
let truth_scaling_error_increasing _ cache data_root t =
let truth = get_true_heuristic cache data_root t in
(fun node ->
(1. -. ((float_of_int node.Random_tree.data.depth) /.
(float_of_int t.max_depth))) *. (truth node))
let constant_scaling_error_decreasing max_p_error cache data_root t =
assert (max_p_error <= 1.);
assert (max_p_error >= 0.);
let truth = get_true_heuristic cache data_root t in
(fun node ->
((float_of_int node.Random_tree.data.depth) /.
(float_of_int t.max_depth)) *. (truth node) *. max_p_error)
let constant_scaling_error_increasing max_p_error cache data_root t =
assert (max_p_error <= 1.);
assert (max_p_error >= 0.);
let truth = get_true_heuristic cache data_root t in
(fun node ->
(1. -. ((float_of_int node.Random_tree.data.depth) /.
(float_of_int t.max_depth))) *.
(truth node) *. max_p_error)
let random_scaling_error_decreasing max_p_error cache data_root t =
assert (max_p_error <= 1.);
assert (max_p_error >= 0.);
let truth = get_true_heuristic cache data_root t in
(fun node ->
Random.set_state !(Math.random_state_from node.Random_tree.seed);
((float_of_int node.Random_tree.data.depth) /.
(float_of_int t.max_depth)) *.
(truth node) *. (Random.float max_p_error))
let random_scaling_error_increasing max_p_error cache data_root t =
assert (max_p_error <= 1.);
assert (max_p_error >= 0.);
let truth = get_true_heuristic cache data_root t in
(fun node ->
Random.set_state !(Math.random_state_from node.Random_tree.seed);
(1. -. ((float_of_int node.Random_tree.data.depth) /.
(float_of_int t.max_depth))) *.
(truth node) *. (Random.float max_p_error))
let string_to_heuristic ?(opt_arg = 0.) str =
match str with
| "truth" -> get_true_heuristic
| "helmert" -> malte_roeger_h opt_arg
| "constant_percent" -> constant_percent_scale opt_arg
| "random_percent" -> random_percent_scale opt_arg
| "truth_increase" -> truth_scaling_error_increasing opt_arg
| "truth_decrease" -> truth_scaling_error_decreasing opt_arg
| "constant_increase" -> constant_scaling_error_increasing opt_arg
| "constant_decrease" -> constant_scaling_error_decreasing opt_arg
| "random_increase" -> random_scaling_error_increasing opt_arg
| "random_decrease" -> random_scaling_error_decreasing opt_arg
| _ -> failwith (Wrutils.str "%s not recognized!" str)
(***************************** Interfaces *********************************)
let alt_col_name = "solution_id"
let output_header () = Datafile.write_alt_colnames stdout alt_col_name
["id";"quality";]
let make_interface ?(cache = false) ?(h = get_true_heuristic)
data_root t limit =
output_header();
let hfun = (h cache data_root t)
and dfun = (distance t) in
Search_interface.make
~h:hfun
~d:dfun
~hd:(fun n -> hfun n, dfun n)
~rev_hd:(fun n -> n.Random_tree.data.cost, float_of_int n.Random_tree.data.depth)
~domain_expand:(wrap_random_tree_expand t.t.Random_tree.expand)
~key:(fun n -> n.Random_tree.data.key)
~key_print:string_of_int
~goal_p:(make_goal t)
~get_sol_length:(fun _ -> -1)
~halt_on:limit
~equals:(=)
Search_interface.Synthetic
t.t.Random_tree.root
(fun _ _ -> false)
(fun sol_info ->
match sol_info.Limit.incumbent with
Limit.Nothing -> ()
| Limit.Incumbent (q,node) ->
Datafile.write_alt_row_prefix stdout alt_col_name;
Verb.pr Verb.always "%i\t%f\n" node.Random_tree.data.key q)
eof
| null | https://raw.githubusercontent.com/jordanthayer/ocaml-search/57cfc85417aa97ee5d8fbcdb84c333aae148175f/synthetic_graph/rst_tree.ml | ocaml | Assumes fixed branching factor
************************ Printing functions *******************************
***************************************************************************
* Calculates the path from the root to the leaf node
* Geneartes the expand function used to
Random_tree.wrap_expand does the seed init for us!
* generates a goal predicate for the given pearl tree
* generates a new pearl tree from a probability, maximum depth, and a seed.
seed comes last so that we can generate many similar trees from a
list or an array of seeds
* Takes the random tree expand function and manipulates it into something
that the searches are expecting
******************************* IO ***************************************
* Converts an rst tree into a set of datafile pairs
* Writes an rst tree into the channel as a datfile
* Reads a datafile repersenting an rst tree into memory
* Reads a datafile repersenting an rst tree into memory
* Traverses the tree [t], calculating the true heuristic value of the nodes
in said tree. Requires the use of a post order traversal
cost of getting to child + true h of child
get minimum value and pass back
**************************** Heuristics ********************************
**************************** Interfaces ******************************** | * , , Thayer Trees
type data = {
cost : float;
depth : int;
key : int;
}
type scale =
| Uniform
| Linear_Increase
| Linear_Decrease
type rst_tree = {
scale : scale;
max_edge_cost : float;
max_depth : int;
branch : int;
t : data Random_tree.tree;
}
let data_to_string d =
Wrutils.str "depth: %i\t cost: %f\t key: %i\n" d.depth d.cost d.key
let print_tree t = Random_tree.print_tree data_to_string t.t
let print_key k = Printf.eprintf "%i" k
let get_path branch =
let rec gp key = if key = 0 then [0]
else key::(gp ((key - 1) / branch)) in
gp
let path_to_string p =
let rec fn str p =
match p with
[] -> str
| hd::tl -> fn (str ^ (Wrutils.str ", %i" hd)) tl in
fn "" p
let calculate_key b parent offset =
* Calculates the rank of a current node as defined by the order in which it
would be touched in a breadth first traversal .
would be touched in a breadth first traversal. *)
$ b \cdot [ k_p \cdot \frac{1 - b^(d-1)}{1 - b ) + \frac{1 - b^d}{1 - b } + offset$
b * (parent.key - ((1 - (Math.int_exp b (parent.depth))) / ( 1 - b))) +
((1 - (Math.int_exp b (parent.depth + 1))) / (1 - b)) + offset
let size_of_tree t =
let v = (1 - (Math.int_exp t.branch (t.max_depth + 1))) / (1 - t.branch) in
Verb.pe Verb.debug "tree size: %i\n" v;
v
let uniform _ = (fun _ -> 1.)
let make_scaling slope mdepth =
assert (slope >= (-.mdepth /. 2.));
assert (slope <= (mdepth /. 2.));
let b = 1. -. (2. /. mdepth) *. slope in
(fun node -> (float_of_int node) *. slope +. b)
let make_scaling_fslope slope mdepth =
assert (slope >= -.1.);
assert (slope <= 1.);
make_scaling (slope *. mdepth /. 2.) mdepth
let scale_to_fun s =
match s with
| Uniform -> uniform
| Linear_Increase -> (make_scaling_fslope 1.)
| Linear_Decrease -> (make_scaling_fslope (-.1.))
let scale_to_string s =
match s with
| Uniform -> "uniform"
| Linear_Increase -> "linear_increasing"
| Linear_Decrease -> "linear_decreasing"
let string_to_scale str =
if str = (scale_to_string Uniform)
then Uniform
else (if str = (scale_to_string Linear_Increase)
then Linear_Increase
else (if str = (scale_to_string Linear_Decrease)
then Linear_Decrease
else failwith "str doesn't matcha model!"))
let make_expand ?(dfun = uniform) max_depth max_edge_cost
branch =
let scale = dfun (float_of_int max_depth) in
let data = node.Random_tree.data in
if max_depth > data.depth
then (let nd = data.depth + 1 in
let next i = {depth = nd;
cost = (data.cost +.
(scale nd) *.
(Random.float max_edge_cost));
key = calculate_key branch data i; } in
Array.to_list (Array.init branch next))
else [])
let make_goal t =
(fun node -> if node.Random_tree.data.depth == t.max_depth
then (Verb.pe Verb.debug "goal @ %i\n" node.Random_tree.data.key;
true)
else false)
let make_tree scale max_edge_cost branch depth seed =
{scale = scale;
max_edge_cost = max_edge_cost;
max_depth = depth;
branch = branch;
t = (Random_tree.make_tree seed {depth = 0; cost = 0.; key = 0}
(make_expand ~dfun:(scale_to_fun scale)
depth max_edge_cost branch));}
let wrap_random_tree_expand exp =
(fun n _ -> let children = exp n in
List.map (fun c -> c,c.Random_tree.data.cost) children)
let config_to_pairs t =
[("model", "rst");
("scale", scale_to_string t.scale);
("branching factor", string_of_int t.branch);
("tree depth", string_of_int t.max_depth);
("maximum edge cost", string_of_float t.max_edge_cost);
("seed", string_of_int t.t.Random_tree.root.Random_tree.seed);]
let write_pairs t ch =
Datafile.write_header_pairs ch;
Datafile.write_pairs ch (config_to_pairs t);
Datafile.write_trailer_pairs ch
let read_instance file =
let df = Datafile.load file in
let mec = float_of_string (Datafile.get_val df "maximum edge cost")
and dep = int_of_string (Datafile.get_val df "tree depth")
and b = int_of_string (Datafile.get_val df "branching factor")
and seed = int_of_string (Datafile.get_val df "seed")
and scale = string_to_scale (Datafile.get_val df "scale") in
make_tree scale mec b dep seed
let read_instance_ch file_name ch =
let df = Datafile.read file_name ch in
let mec = float_of_string (Datafile.get_val df "maximum edge cost")
and dep = int_of_string (Datafile.get_val df "tree depth")
and b = int_of_string (Datafile.get_val df "branching factor")
and seed = int_of_string (Datafile.get_val df "seed")
and scale = string_to_scale (Datafile.get_val df "scale") in
make_tree scale mec b dep seed
let read_heuristic_values df =
let h_vals = Datafile.get_col df "cost to go" in
(fun n -> h_vals.(n.Random_tree.data.key))
let make_true_heuristic cache ch t =
let h_vals = Array.create (size_of_tree t) 0. in
Verb.pe Verb.debug "Allocating array of length %i\n" (Array.length h_vals);
let calc_true_h node =
let v =
if node.Random_tree.data.depth == t.max_depth
Leaves have heuristics value = 0
else (let children = t.t.Random_tree.expand node in
let heuristics = List.map
Verb.pe Verb.debug "Getting h of %i\n" c.Random_tree.data.key;
(c.Random_tree.data.cost -.
node.Random_tree.data.cost) +.
(h_vals.(c.Random_tree.data.key))) children
in
(fun accum cur -> min accum cur) infinity heuristics) in
Verb.pe Verb.debug "Setting h of %i\n" node.Random_tree.data.key;
h_vals.(node.Random_tree.data.key) <- v
in
if cache
then (Random_tree.postorder_traversal calc_true_h t.t;
Datafile.write_header_pairs ch;
Datafile.write_colnames ch ["cost to go"];
Array.iter (fun v -> Printf.fprintf ch "%f\n" v) h_vals;
Datafile.write_pairs ch (config_to_pairs t);
Datafile.write_trailer_pairs ch;
flush ch);
(fun n -> h_vals.(n.Random_tree.data.key))
let get_true_heuristic cache data_root t =
let h_pairs = ("type", "heuristic")::(config_to_pairs t) in
let paths = Rdb.matching_paths data_root h_pairs in
match paths with
| [path] -> read_heuristic_values (Datafile.load path)
| _ -> (if cache
then (Verb.pe Verb.toplvl "Heuristic Not Cached, Recording\n";
Wrio.with_outfile (Rdb.path_for data_root h_pairs)
(fun ch -> make_true_heuristic cache ch t))
else make_true_heuristic cache stderr t)
let malte_roeger_h c cache data_root t =
let truth = get_true_heuristic cache data_root t in
(fun node -> Math.fmax 0. ((truth node) -. c))
let constant_percent_scale p cache data_root t =
assert (p <= 1.);
let truth = get_true_heuristic cache data_root t in
(fun node -> (truth node) *. p)
let random_percent_scale max_p cache data_root t =
assert (max_p <= 1.);
let truth = get_true_heuristic cache data_root t in
(fun node ->
Random.set_state !(Math.random_state_from node.Random_tree.seed);
(truth node) *. (Random.float max_p))
let distance t =
(fun n -> float_of_int (t.max_depth - n.Random_tree.data.depth))
let truth_scaling_error_decreasing _ cache data_root t =
let truth = get_true_heuristic cache data_root t in
(fun node ->
((float_of_int node.Random_tree.data.depth) /.
(float_of_int t.max_depth)) *. (truth node))
let truth_scaling_error_increasing _ cache data_root t =
let truth = get_true_heuristic cache data_root t in
(fun node ->
(1. -. ((float_of_int node.Random_tree.data.depth) /.
(float_of_int t.max_depth))) *. (truth node))
let constant_scaling_error_decreasing max_p_error cache data_root t =
assert (max_p_error <= 1.);
assert (max_p_error >= 0.);
let truth = get_true_heuristic cache data_root t in
(fun node ->
((float_of_int node.Random_tree.data.depth) /.
(float_of_int t.max_depth)) *. (truth node) *. max_p_error)
let constant_scaling_error_increasing max_p_error cache data_root t =
assert (max_p_error <= 1.);
assert (max_p_error >= 0.);
let truth = get_true_heuristic cache data_root t in
(fun node ->
(1. -. ((float_of_int node.Random_tree.data.depth) /.
(float_of_int t.max_depth))) *.
(truth node) *. max_p_error)
let random_scaling_error_decreasing max_p_error cache data_root t =
assert (max_p_error <= 1.);
assert (max_p_error >= 0.);
let truth = get_true_heuristic cache data_root t in
(fun node ->
Random.set_state !(Math.random_state_from node.Random_tree.seed);
((float_of_int node.Random_tree.data.depth) /.
(float_of_int t.max_depth)) *.
(truth node) *. (Random.float max_p_error))
let random_scaling_error_increasing max_p_error cache data_root t =
assert (max_p_error <= 1.);
assert (max_p_error >= 0.);
let truth = get_true_heuristic cache data_root t in
(fun node ->
Random.set_state !(Math.random_state_from node.Random_tree.seed);
(1. -. ((float_of_int node.Random_tree.data.depth) /.
(float_of_int t.max_depth))) *.
(truth node) *. (Random.float max_p_error))
let string_to_heuristic ?(opt_arg = 0.) str =
match str with
| "truth" -> get_true_heuristic
| "helmert" -> malte_roeger_h opt_arg
| "constant_percent" -> constant_percent_scale opt_arg
| "random_percent" -> random_percent_scale opt_arg
| "truth_increase" -> truth_scaling_error_increasing opt_arg
| "truth_decrease" -> truth_scaling_error_decreasing opt_arg
| "constant_increase" -> constant_scaling_error_increasing opt_arg
| "constant_decrease" -> constant_scaling_error_decreasing opt_arg
| "random_increase" -> random_scaling_error_increasing opt_arg
| "random_decrease" -> random_scaling_error_decreasing opt_arg
| _ -> failwith (Wrutils.str "%s not recognized!" str)
let alt_col_name = "solution_id"
let output_header () = Datafile.write_alt_colnames stdout alt_col_name
["id";"quality";]
let make_interface ?(cache = false) ?(h = get_true_heuristic)
data_root t limit =
output_header();
let hfun = (h cache data_root t)
and dfun = (distance t) in
Search_interface.make
~h:hfun
~d:dfun
~hd:(fun n -> hfun n, dfun n)
~rev_hd:(fun n -> n.Random_tree.data.cost, float_of_int n.Random_tree.data.depth)
~domain_expand:(wrap_random_tree_expand t.t.Random_tree.expand)
~key:(fun n -> n.Random_tree.data.key)
~key_print:string_of_int
~goal_p:(make_goal t)
~get_sol_length:(fun _ -> -1)
~halt_on:limit
~equals:(=)
Search_interface.Synthetic
t.t.Random_tree.root
(fun _ _ -> false)
(fun sol_info ->
match sol_info.Limit.incumbent with
Limit.Nothing -> ()
| Limit.Incumbent (q,node) ->
Datafile.write_alt_row_prefix stdout alt_col_name;
Verb.pr Verb.always "%i\t%f\n" node.Random_tree.data.key q)
eof
|
05efdca7d78dc568fe19806d57f28261b250eb93a8d63e43025ca63b42249f9c | twosigma/Cook | prometheus_metrics.clj | ;;
Copyright ( c ) Two Sigma Open Source , LLC
;;
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
;; you may not use this file except in compliance with the License.
;; You may obtain a copy of the License at
;;
;; -2.0
;;
;; Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;; See the License for the specific language governing permissions and
;; limitations under the License.
;;
;; Declares prometheus metrics for cook scheduler.
(ns cook.prometheus-metrics
(:require [iapetos.collector.jvm :as jvm]
[iapetos.collector.ring :as ring]
[iapetos.core :as prometheus]
[iapetos.export :as prometheus-export]
[mount.core :as mount]))
;; Quantiles are specified as a map of quantile to error margin.
(def default-summary-quantiles {0.25 0.01 0.5 0.01 0.75 0.01 0.9 0.005 0.95 0.005 0.98 0.001 0.99 0.001 0.999 0.0001})
;; We define all the metric names here to get IDE support and avoid the chance of runtime
;; errors due to misspelled metric names.
We are standardizing the metric format to be : >
;; Scheduler metrics
(def scheduler-rank-cycle-duration :cook/scheduler-rank-cycle-duration-seconds)
(def scheduler-match-cycle-duration :cook/scheduler-match-cycle-duration-seconds)
(def scheduler-generate-user-usage-map-duration :cook/scheduler-generate-user-usage-map-duration-seconds)
(def scheduler-handle-resource-offers-total-duration :cook/scheduler-handle-resource-offers-total-duration-seconds)
(def scheduler-pool-handler-pending-to-considerable-duration :cook/scheduler-pool-handler-pending-to-considerable-duration)
(def scheduler-fenzo-schedule-once-duration :cook/scheduler-fenzo-schedule-once-duration-seconds)
(def scheduler-handle-resource-offers-match-duration :cook/scheduler-handle-resource-offers-match-duration-seconds)
(def scheduler-handle-resource-offers-matches-to-job-uuids-duration :cook/scheduler-handle-resource-offers-matches-to-job-uuids-duration-seconds)
(def scheduler-launch-all-matched-tasks-total-duration :cook/scheduler-launch-all-matched-tasks-total-duration-seconds)
(def scheduler-launch-all-matched-tasks-transact-duration :cook/scheduler-launch-all-matched-tasks-transact-duration-seconds)
(def scheduler-launch-all-matched-tasks-submit-duration :cook/scheduler-launch-all-matched-tasks-submit-duration-seconds)
(def scheduler-trigger-autoscaling-duration :cook/scheduler-trigger-autoscaling-duration-seconds)
(def scheduler-schedule-jobs-on-kubernetes-duration :cook/scheduler-schedule-jobs-on-kubernetes-duration-seconds)
(def scheduler-distribute-jobs-for-kubernetes-duration :cook/scheduler-distribute-jobs-for-kubernetes-duration-seconds)
(def scheduler-kill-cancelled-tasks-duration :cook/scheduler-kill-cancelled-tasks-duration-seconds)
(def scheduler-sort-jobs-hierarchy-duration :cook/scheduler-sort-jobs-hierarchy-duration-seconds)
(def scheduler-filter-offensive-jobs-duration :cook/scheduler-filter-offensive-jobs-duration-seconds)
(def scheduler-handle-status-update-duaration :cook/scheduler-handle-status-update-duaration-seconds)
(def scheduler-handle-framework-message-duration :cook/scheduler-handle-framework-message-duration-seconds)
(def scheduler-jobs-launched :cook/scheduler-jobs-launched-total)
(def scheduler-match-cycle-jobs-count :cook/scheduler-match-cycle-jobs-count)
(def scheduler-match-cycle-matched-percent :cook/scheduler-match-cycle-matched-percent)
(def scheduler-match-cycle-head-was-matched :cook/scheduler-match-cycle-head-was-matched)
(def scheduler-match-cycle-queue-was-full :cook/scheduler-match-cycle-queue-was-full)
(def scheduler-match-cycle-all-matched :cook/scheduler-match-cycle-all-matched)
(def init-user-to-dry-divisors-duration :cook/scheduler-init-user-to-dru-divisors-duration-seconds)
(def generate-sorted-task-scored-task-pairs-duration :cook/scheduler-generate-sorted-task-scored-task-duration-seconds)
(def get-shares-duration :cook/scheduler-get-shares-duration-seconds)
(def create-user-to-share-fn-duration :cook/scheduler-create-user-to-share-fn-duration-seconds)
(def task-failure-reasons :cook/scheduler-task-failures-by-reason)
(def iterations-at-fenzo-floor :cook/scheduler-iterations-at-fenzo-floor-count)
(def in-order-queue-count :cook/scheduler-in-order-queue-count)
(def task-times-by-status :cook/scheduler-task-runtimes-by-status)
(def number-offers-matched :cook/scheduler-number-offers-matched-distribution)
(def fraction-unmatched-jobs :cook/scheduler-fraction-unmatched-jobs)
(def offer-size-by-resource :cook/scheduler-offer-size-by-resource)
(def task-completion-rate :cook/scheduler-task-completion-rate)
(def task-completion-rate-by-resource :cook/scheduler-task-completion-rate-by-resource)
(def transact-report-queue-datoms :cook/scheduler-transact-report-queue-datoms-count)
(def transact-report-queue-update-job-state :cook/scheduler-transact-report-queue-update-job-state-count)
(def transact-report-queue-job-complete :cook/scheduler-transact-report-queue-job-complete-count)
(def transact-report-queue-tasks-killed :cook/scheduler-transact-report-queue-tasks-killed-count)
(def scheduler-offers-declined :cook/scheduler-offers-declined-count)
(def scheduler-handle-resource-offer-errors :cook/scheduler-handle-resource-offer-errors-count)
(def scheduler-matched-resource-counts :cook/scheduler-matched-resource-count)
(def scheduler-matched-tasks :cook/scheduler-matched-tasks-count)
(def scheduler-abandon-and-reset :cook/scheduler-abandon-and-reset-count)
(def scheduler-rank-job-failures :cook/scheduler-rank-job-failures)
(def scheduler-offer-channel-full-error :cook/scheduler-offer-channel-full-error)
(def scheduler-schedule-jobs-event-duration :cook/scheduler-schedule-jobs-event-duration-seconds)
(def match-jobs-event-duration :cook/scheduler-match-jobs-event-duration-seconds)
(def in-order-queue-delay-duration :cook/scheduler-in-order-queue-delay-duration-seconds)
;; Monitor / user resource metrics
(def user-state-count :cook/scheduler-users-state-count)
;; For user resource metrics, we access them by resource type at runtime, so it is
;; easier to define them all in a map instead of separate vars.
(def resource-metric-map
{:cpus :cook/scheduler-users-cpu-count
:mem :cook/scheduler-users-memory-mebibytes
:jobs :cook/scheduler-users-jobs-count
:gpus :cook/scheduler-users-gpu-count
:launch-rate-saved :cook/scheduler-users-launch-rate-saved
:launch-rate-per-minute :cook/scheduler-users-launch-rate-per-minute})
;; Kubernetes metrics
(def total-pods :cook/scheduler-kubernetes-pods-count)
(def max-pods :cook/scheduler-kubernetes-max-pods)
(def total-synthetic-pods :cook/scheduler-kubernetes-synthetic-pods-count)
(def max-synthetic-pods :cook/scheduler-kubernethes-max-synthetic-pods)
(def synthetic-pods-submitted :cook/scheduler-kubernetes-synthetic-pods-submitted-count)
(def total-nodes :cook/scheduler-kubernetes-nodes-count)
(def max-nodes :cook/scheduler-kubernetes-max-nodes)
(def watch-gap :cook/scheduler-kubernetes-watch-gap-millis)
(def disconnected-watch-gap :cook/scheduler-kubernetes-disconnected-watch-gap-millis)
(def delete-pod-errors :cook/scheduler-kubernetes-delete-pod-errors-count)
(def delete-finalizer-errors :cook/scheduler-kubernetes-delete-finalizer-errors-count)
(def launch-pod-errors :cook/scheduler-launch-pod-errors-count)
(def list-pods-chunk-duration :cook/scheduler-kubernetes-list-pods-chunk-duration-seconds)
(def list-pods-duration :cook/scheduler-kubernetes-list-pods-duration-seconds)
(def list-nodes-duration :cook/scheduler-kubernetes-list-nodes-duration-seconds)
(def delete-pod-duration :cook/scheduler-kubernetes-delete-pod-duration-seconds)
(def delete-finalizer-duration :cook/scheduler-kubernetes-delete-finalizer-duration-seconds)
(def launch-pod-duration :cook/scheduler-kubernetes-launch-pod-duration-seconds)
(def launch-task-duration :cook/scheduler-kubernetes-launch-task-duration-seconds)
(def kill-task-duration :cook/scheduler-kubernetes-kill-task-duration-seconds)
(def compute-pending-offers-duration :cook/scheduler-kubernetes-compute-pending-offers-duration-seconds)
(def autoscale-duration :cook/scheduler-kubernetes-autoscale-duration-seconds)
(def launch-synthetic-tasks-duration :cook/scheduler-kubernetes-launch-synthetic-tasks-duration-seconds)
(def pods-processed-unforced :cook/scheduler-kubernetes-pods-processed-unforced-count)
(def process-lock-duration :cook/scheduler-kubernetes-process-lock-duration-seconds)
(def process-lock-acquire-duration :cook/scheduler-kubernetes-process-lock-acquire-duration-seconds)
(def controller-process-duration :cook/scheduler-kubernetes-controller-process-duration-seconds)
(def handle-pod-update-duration :cook/scheduler-kubernetes-handle-pod-update-duration-seconds)
(def handle-pod-deletion-duration :cook/scheduler-kubernetes-handle-pod-deletion-duration-seconds)
(def update-cook-expected-state-duration :cook/scheduler-kubernetes-update-cook-expected-state-duration-seconds)
(def scan-process-duration :cook/scheduler-kubernetes-scan-process-pod-duration-seconds)
(def pod-waiting-duration :cook/scheduler-kubernetes-pod-duration-until-waiting-seconds)
(def pod-running-duration :cook/scheduler-kubernetes-pod-duration-until-running-seconds)
(def offer-match-timer :cook/scheduler-kubernetes-offer-match-duration-seconds)
(def resource-capacity :cook/scheduler-kubernetes-resource-capacity)
(def resource-consumption :cook/scheduler-kubernetes-resource-consumption)
Mesos metrics
(def mesos-heartbeats :cook/scheduler-mesos-heartbeats-count)
(def mesos-heartbeat-timeouts :cook/scheduler-mesos-heartbeat-timeouts-count)
(def mesos-datomic-sync-duration :cook/scheduler-mesos-heartbeat-datomic-sync-duration-seconds)
(def mesos-offer-chan-depth :cook/scheduler-mesos-offer-chan-depth)
(def mesos-error :cook/scheduler-mesos-error-count)
(def mesos-handle-framework-message :cook/scheduler-mesos-handle-framework-message)
(def mesos-handle-status-update :cook/scheduler-mesos-handle-status-update)
(def mesos-tasks-killed-in-status-update :cook/scheduler-mesos-tasks-killed-in-status-update-count)
(def mesos-aggregator-pending-count :cook/scheduler-mesos-aggregator-pending-count)
(def mesos-pending-sync-host-count :cook/scheduler-mesos-pending-sync-host-count)
(def mesos-updater-unprocessed-count :cook/scheduler-mesos-field-updater-unprocessed-count)
(def mesos-aggregator-message :cook/scheduler-mesos-field-aggregator-message-count)
(def mesos-updater-publish-duration :cook/scheduler-mesos-field-updater-publish-duration-seconds)
(def mesos-updater-transact-duration :cook/scheduler-mesos-field-updater-transact-duration-seconds)
(def mesos-updater-pending-entries :cook/scheduler-mesos-field-updater-pending-entries-distribution)
(def mesos-updater-unprocessed-entries :cook/scheduler-mesos-unprocessed-entries-distribution)
;; API metrics
(def jobs-created :cook/api-jobs-created)
(def list-request-param-time-range :cook/api-list-request-param-time-range-millis)
(def list-request-param-limit :cook/api-list-request-param-limit-number)
(def list-response-job-count :cook/api-list-request-job-count)
(def fetch-instance-map-duration :cook/api-internal-fetch-instance-map-duration-seconds)
(def fetch-job-map-duration :cook/api-internal-fetch-job-map-duration-seconds)
(def fetch-jobs-duration :cook/api-internal-fetch-jobs-duration-seconds)
(def list-jobs-duration :cook/api-internal-list-jobs-duration-seconds)
(def endpoint-duration :cook/api-endpoint-duration-seconds)
;; Tools metrics
(def get-jobs-by-user-and-state-duration :cook/tools-get-jobs-by-user-duration-seconds)
(def get-jobs-by-user-and-state-total-duration :cook/tools-get-jobs-by-user-and-states-duration-seconds)
(def get-all-running-tasks-duration :cook/tools-get-all-running-tasks-duration-seconds)
(def get-user-running-jobs-duration :cook/tools-get-user-running-jobs-duration-seconds)
(def get-all-running-jobs-duration :cook/tools-get-all-running-jobs-duration-seconds)
;; Plugin metrics
(def pool-mover-jobs-updated :cook/scheduler-plugins-pool-mover-jobs-updated-count)
;; Rebalancer metrics
(def compute-preemption-decision-duration :cook/rebalancer-compute-premeption-decision-duration-seconds)
(def rebalance-duration :cook/rebalancer-rebalance-duration-seconds)
(def pending-job-drus :cook/rebalancer-pending-job-drus)
(def nearest-task-drus :cook/rebalancer-nearest-task-drus)
(def positive-dru-diffs :cook/rebalancer-positive-dru-diffs)
(def preemption-counts-for-host :cook/rebalancer-preemption-counts-for-host)
(def task-counts-to-preempt :cook/rebalancer-task-counts-to-preempt)
(def job-counts-to-run :cook/rebalancer-job-counts-to-run)
;; Progress metrics
(def progress-aggregator-drop-count :cook/progress-aggregator-drop-count)
(def progress-aggregator-pending-states-count :cook/progress-aggregator-pending-states-count)
(def progress-updater-pending-states :cook/progress-updater-pending-states)
(def progress-aggregator-message-count :cook/progress-aggregator-message-count)
(def progress-updater-publish-duration :cook/progress-updater-publish-duration-seconds)
(def progress-updater-transact-duration :cook/progress-updater-transact-duration-seconds)
;; Other metrics
(def is-leader :cook/scheduler-is-leader)
(def update-queue-lengths-duration :cook/scheduler-update-queue-lengths-duration-seconds)
(def acquire-kill-lock-for-kill-duration :cook/scheduler-acquire-kill-lock-for-kill-duration-seconds)
(def get-pending-jobs-duration :cook/scheduler-get-pending-jobs-duration-seconds)
(defn create-registry
[]
(-> (prometheus/collector-registry)
Initialize default JVM metrics
(jvm/initialize)
Initialize ring metrics
(ring/initialize)
(prometheus/register
;; Scheduler metrics ---------------------------------------------------------------------------------------------
;; Note that we choose to use a summary instead of a histogram for the latency metrics because we only have
one scheduler process running per cluster , so we do not need to aggregate data from multiple sources .
;; The quantiles are specified as a map of quantile to error margin.
(prometheus/summary scheduler-rank-cycle-duration
{:description "Distribution of rank cycle latency"
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-match-cycle-duration
{:description "Distribution of overall match cycle latency"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-generate-user-usage-map-duration
{:description "Distribution of generating user->usage map latency"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-handle-resource-offers-total-duration
{:description "Distribution of total handle-resource-offers! duration"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-pool-handler-pending-to-considerable-duration
{:description "Distribution of filtering pending to considerable jobs duration"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-fenzo-schedule-once-duration
{:description "Distribution of fenzo schedule once latency"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-handle-resource-offers-match-duration
{:description "Distribution of matching resource offers to jobs latency"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-handle-resource-offers-matches-to-job-uuids-duration
{:description "Distribution of generating matches->job-uuids map latency"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-launch-all-matched-tasks-total-duration
{:description "Distribution of total launch all matched tasks latency"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-launch-all-matched-tasks-transact-duration
{:description "Distribution of launch all matched tasks--transact in datomic latency"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-launch-all-matched-tasks-submit-duration
{:description "Distribution of launch all matched tasks--submit to compute cluster latency"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-trigger-autoscaling-duration
{:description "Distribution of trigger autoscaling latency"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-schedule-jobs-on-kubernetes-duration
{:description "Distribution of scheduling jobs on Kubernetes latency"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-distribute-jobs-for-kubernetes-duration
{:description "Distribution of distributing jobs for Kubernetes latency"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-kill-cancelled-tasks-duration
{:description "Distribution of kill cancelled tasks latency"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-sort-jobs-hierarchy-duration
{:description "Distribution of sorting jobs by DRU latency"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-filter-offensive-jobs-duration
{:description "Distribution of filter offensive jobs latency"
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-handle-status-update-duaration
{:description "Distribution of handle compute cluster status update latency"
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-handle-framework-message-duration
{:description "Distribution of handle framework message latency"
:quantiles default-summary-quantiles})
(prometheus/counter scheduler-jobs-launched
{:description "Total count of jobs launched per pool and compute cluster"
:labels [:pool :compute-cluster]})
(prometheus/summary init-user-to-dry-divisors-duration
{:description "Latency distribution of initializing the user to dru divisors map"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary generate-sorted-task-scored-task-pairs-duration
{:description "Latency distribution of generating the sorted list of task and scored task pairs"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary get-shares-duration
{:description "Latency distribution of getting all users' share"
:quantiles default-summary-quantiles})
(prometheus/summary create-user-to-share-fn-duration
{:description "Latency distribution of creating the user-to-share function"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary task-times-by-status
{:description "Distribution of task runtime by status"
:labels [:status]
:quantiles default-summary-quantiles})
(prometheus/summary number-offers-matched
{:description "Distribution of number of offers matched"
:labels [:pool :compute-cluster]
:quantiles default-summary-quantiles})
(prometheus/summary fraction-unmatched-jobs
{:description "Distribution of fraction of unmatched jobs"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary offer-size-by-resource
{:description "Distribution of offer size by resource type"
:labels [:pool :resource]
:quantiles default-summary-quantiles})
(prometheus/counter task-completion-rate
{:description "Total count of completed tasks per pool"
:labels [:pool :status]})
(prometheus/counter task-completion-rate-by-resource
{:description "Total count of completed resources per pool"
:labels [:pool :status :resource]})
(prometheus/counter transact-report-queue-datoms
{:description "Total count of report queue datoms"})
(prometheus/counter transact-report-queue-update-job-state
{:description "Total count of job state updates"})
(prometheus/counter transact-report-queue-job-complete
{:description "Total count of completed jobs"})
(prometheus/counter transact-report-queue-tasks-killed
{:description "Total count of tasks killed"})
(prometheus/counter scheduler-offers-declined
{:description "Total offers declined"
:labels [:compute-cluster]})
(prometheus/counter scheduler-matched-resource-counts
{:description "Total matched count per resource type"
:labels [:pool :resource]})
(prometheus/counter scheduler-matched-tasks
{:description "Total matched tasks"
:labels [:pool :compute-cluster]})
(prometheus/counter scheduler-handle-resource-offer-errors
{:descrpiption "Total count of errors encountered in handle-resource-offer!"
:labels [:pool]})
(prometheus/counter scheduler-abandon-and-reset
{:descrpiption "Total count of fenzo abandon-and-reset"
:labels [:pool]})
(prometheus/counter scheduler-rank-job-failures
{:descrpiption "Total count of rank job failures"})
(prometheus/counter scheduler-offer-channel-full-error
{:descrpiption "Total count of offer channel full failures"
:labels [:pool]})
(prometheus/summary scheduler-schedule-jobs-event-duration
{:description "Latency distribution of scheduling jobs in Kubernetes in the full Kenzo codepath"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary match-jobs-event-duration
{:description "Latency distribution of matching jobs in the full Fenzo codepath"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary in-order-queue-delay-duration
{:description "Latency distribution of processing in-order-queue tasks"
:quantiles default-summary-quantiles})
;; Match cycle metrics -------------------------------------------------------------------------------------------
(prometheus/gauge scheduler-match-cycle-jobs-count
{:description "Aggregate match cycle job counts stats"
:labels [:pool :status]})
(prometheus/gauge scheduler-match-cycle-matched-percent
{:description "Percent of jobs matched in last match cycle"
:labels [:pool]})
The following 1/0 metrics are useful for value map visualizations in Grafana
(prometheus/gauge scheduler-match-cycle-head-was-matched
{:description "1 if head was matched, 0 otherwise"
:labels [:pool]})
(prometheus/gauge scheduler-match-cycle-queue-was-full
{:description "1 if queue was full, 0 otherwise"
:labels [:pool]})
(prometheus/gauge scheduler-match-cycle-all-matched
{:description "1 if all jobs were matched, 0 otherwise"
:labels [:pool]})
(prometheus/summary task-failure-reasons
{:description "Distribution of task failures by reason"
:labels [:reason :resource]
:quantiles default-summary-quantiles})
(prometheus/gauge iterations-at-fenzo-floor
{:descriptiion "Current number of iterations at fenzo floor (i.e. 1 considerable job)"
:labels [:pool]})
(prometheus/gauge in-order-queue-count
{:description "Depth of queue for in-order processing"})
;; Resource usage stats ------------------------------------------------------------------------------------------
;; We set these up using a map so we can access them easily by resource type when we set the metric.
(prometheus/gauge (resource-metric-map :mem)
{:description "Current memory by state"
:labels [:pool :user :state]})
(prometheus/gauge (resource-metric-map :cpus)
{:description "Current cpu count by state"
:labels [:pool :user :state]})
(prometheus/gauge (resource-metric-map :gpus)
{:description "Current gpu count by state"
:labels [:pool :user :state]})
(prometheus/gauge (resource-metric-map :jobs)
{:description "Current jobs count by state"
:labels [:pool :user :state]})
(prometheus/gauge (resource-metric-map :launch-rate-saved)
{:description "Current launch-rate-saved count by state"
:labels [:pool :user :state]})
(prometheus/gauge (resource-metric-map :launch-rate-per-minute)
{:description "Current launch-rate-per-minute count by state"
:labels [:pool :user :state]})
;; Metrics for user resource allocation counts
(prometheus/gauge user-state-count
{:description "Current user count by state"
:labels [:pool :state]})
;; Kubernetes metrics --------------------------------------------------------------------------------------------
(prometheus/gauge total-pods
{:description "Total current number of pods per compute cluster"
:labels [:compute-cluster]})
(prometheus/gauge max-pods
{:description "Max number of pods per compute cluster"
:labels [:compute-cluster]})
(prometheus/gauge total-synthetic-pods
{:description "Total current number of synthetic pods per pool and compute cluster"
:labels [:pool :compute-cluster]})
(prometheus/gauge max-synthetic-pods
{:description "Max number of synthetic pods per pool and compute cluster"
:labels [:pool :compute-cluster]})
(prometheus/gauge synthetic-pods-submitted
{:description "Count of synthetic pods submitted in the last match cycle"
:labels [:compute-cluster :pool]})
(prometheus/gauge total-nodes
{:description "Total current number of nodes per compute cluster"
:labels [:compute-cluster]})
(prometheus/gauge max-nodes
{:description "Max number of nodes per compute cluster"
:labels [:compute-cluster]})
(prometheus/summary watch-gap
{:description "Latency distribution of the gap between last watch response and current response"
:labels [:compute-cluster :object]
:quantiles default-summary-quantiles})
(prometheus/summary disconnected-watch-gap
{:description "Latency distribution of the gap between last watch response and current response after reconnecting"
:labels [:compute-cluster :object]
:quantiles default-summary-quantiles})
(prometheus/counter delete-pod-errors
{:description "Total number of errors when deleting pods"
:labels [:compute-cluster]})
(prometheus/counter delete-finalizer-errors
{:description "Total number of errors when deleting pod finalizers"
:labels [:compute-cluster :type]})
(prometheus/counter launch-pod-errors
{:description "Total number of errors when launching pods"
:labels [:compute-cluster :bad-spec]})
(prometheus/summary list-pods-chunk-duration
{:description "Latency distribution of listing a chunk of pods"
:labels [:compute-cluster]
:quantiles default-summary-quantiles})
(prometheus/summary list-pods-duration
{:description "Latency distribution of listing all pods"
:labels [:compute-cluster]
:quantiles default-summary-quantiles})
(prometheus/summary list-nodes-duration
{:description "Latency distribution of listing all nodes"
:labels [:compute-cluster]
:quantiles default-summary-quantiles})
(prometheus/summary delete-pod-duration
{:description "Latency distribution of deleting a pod"
:labels [:compute-cluster]
:quantiles default-summary-quantiles})
(prometheus/summary delete-finalizer-duration
{:description "Latency distribution of deleting a pod's finalizer"
:labels [:compute-cluster]
:quantiles default-summary-quantiles})
(prometheus/summary launch-pod-duration
{:description "Latency distribution of launching a pod"
:labels [:compute-cluster]
:quantiles default-summary-quantiles})
(prometheus/summary launch-task-duration
{:description "Latency distribution of launching a task (more inclusive than launch-pod)"
:labels [:compute-cluster]
:quantiles default-summary-quantiles})
(prometheus/summary kill-task-duration
{:description "Latency distribution of killing a task (more inclusive than delete-pod)"
:labels [:compute-cluster]
:quantiles default-summary-quantiles})
(prometheus/summary compute-pending-offers-duration
{:description "Latency distribution of computing pending offers"
:labels [:compute-cluster]
:quantiles default-summary-quantiles})
(prometheus/summary autoscale-duration
{:description "Latency distribution of autoscaling"
:labels [:compute-cluster :pool]
:quantiles default-summary-quantiles})
(prometheus/summary launch-synthetic-tasks-duration
{:description "Latency distribution of launching synthetic tasks"
:labels [:compute-cluster :pool]
:quantiles default-summary-quantiles})
(prometheus/counter pods-processed-unforced
{:description "Count of processed pods"
:labels [:compute-cluster]})
(prometheus/summary process-lock-duration
{:description "Latency distribution of processing an event while holding the process lock"
:labels [:compute-cluster]
:quantiles default-summary-quantiles})
(prometheus/summary process-lock-acquire-duration
{:description "Latency distribution of acquiring the process lock"
:labels [:compute-cluster]
:quantiles default-summary-quantiles})
(prometheus/summary controller-process-duration
{:description "Latency distribution of processing a pod event"
:labels [:compute-cluster :doing-scan]
:quantiles default-summary-quantiles})
(prometheus/summary handle-pod-update-duration
{:description "Latency distribution of handling a pod update"
:labels [:compute-cluster]
:quantiles default-summary-quantiles})
(prometheus/summary handle-pod-deletion-duration
{:description "Latency distribution of handling a pod deletion"
:labels [:compute-cluster]
:quantiles default-summary-quantiles})
(prometheus/summary update-cook-expected-state-duration
{:description "Latency distribution of updating cook's expected state"
:labels [:compute-cluster]
:quantiles default-summary-quantiles})
(prometheus/summary scan-process-duration
{:description "Latency distribution of scanning for and processing a pod"
:labels [:compute-cluster]
:quantiles default-summary-quantiles})
(prometheus/summary pod-waiting-duration
{:description "Latency distribution of the time until a pod is waiting"
:labels [:compute-cluster :synthetic :kubernetes-scheduler-pod]
:quantiles default-summary-quantiles})
(prometheus/summary pod-running-duration
{:description "Latency distribution of the time until a pod is running"
:labels [:compute-cluster :synthetic :kubernetes-scheduler-pod]
:quantiles default-summary-quantiles})
(prometheus/summary offer-match-timer
{:description "Latency distribution of matching an offer"
:labels [:compute-cluster]
:quantiles default-summary-quantiles})
(prometheus/gauge resource-capacity
{:description "Total available capacity of the given resource per cluster and pool"
:labels [:compute-cluster :pool :resource :resource-subtype]})
(prometheus/gauge resource-consumption
{:description "Total consumption of the given resource per cluster"
:labels [:compute-cluster :resource :resource-subtype]})
Mesos metrics -------------------------------------------------------------------------------------------------
(prometheus/counter mesos-heartbeats
{:description "Count of mesos heartbeats"})
(prometheus/counter mesos-heartbeat-timeouts
{:description "Count of mesos heartbeat timeouts"})
(prometheus/summary mesos-datomic-sync-duration
{:description "Latency distribution of mesos datomic sync duration"
:quantiles default-summary-quantiles})
(prometheus/gauge mesos-offer-chan-depth
{:description "Depth of mesos offer channel"
:labels [:pool]})
(prometheus/counter mesos-error
{:description "Count of errors in mesos"})
(prometheus/counter mesos-handle-framework-message
{:description "Count of framework messages received in mesos"})
(prometheus/counter mesos-handle-status-update
{:description "Count of status updates received in mesos"})
(prometheus/counter mesos-tasks-killed-in-status-update
{:description "Count of tasks killed during status updates in mesos"})
(prometheus/gauge mesos-aggregator-pending-count
{:description "Count of pending entries in the aggregator"
:labels [:field-name]})
(prometheus/gauge mesos-pending-sync-host-count
{:description "Count of pending sync hosts"})
(prometheus/gauge mesos-updater-unprocessed-count
{:description "Count of unprocessed tasks in mesos"
:labels []})
(prometheus/summary mesos-updater-unprocessed-entries
{:description "Distribution of count of unprocessed entries"
:quantiles default-summary-quantiles})
(prometheus/summary mesos-updater-pending-entries
{:description "Distribution of count of pending entries"
:quantiles default-summary-quantiles})
(prometheus/counter mesos-aggregator-message
{:description "Count of messages received by the aggregator"
:labels [:field-name]})
(prometheus/summary mesos-updater-publish-duration
{:description "Latency distribution of mesos updater publish duration"
:labels [:field-name]
:quantiles default-summary-quantiles})
(prometheus/summary mesos-updater-transact-duration
{:description "Latency distribution of mesos updater transact duration"
:labels [:field-name]
:quantiles default-summary-quantiles})
;; API metrics ---------------------------------------------------------------------------------------------------
(prometheus/counter jobs-created
{:description "Total count of jobs created"
:labels [:pool]})
(prometheus/summary list-request-param-time-range
{:description "Distribution of time range specified in list endpoint requests"
:quantiles default-summary-quantiles})
(prometheus/summary list-request-param-limit
{:description "Distribution of instance count limit specified in list endpoint requests"
:quantiles default-summary-quantiles})
(prometheus/summary list-response-job-count
{:description "Distribution of instance count returned in list endpoint responses"
:quantiles default-summary-quantiles})
(prometheus/summary fetch-instance-map-duration
{:description "Latency distribution of converting the instance entity to a map for API responses"
:quantiles default-summary-quantiles})
(prometheus/summary fetch-job-map-duration
{:description "Latency distribution of converting the job entity to a map for API responses"
:quantiles default-summary-quantiles})
(prometheus/summary fetch-jobs-duration
{:description "Latency distribution of fetching jobs by user and state for API responses"
:quantiles default-summary-quantiles})
(prometheus/summary list-jobs-duration
{:description "Latency distribution of listing jobs for API responses"
:quantiles default-summary-quantiles})
(prometheus/summary endpoint-duration
{:description "Latency distribution of API endpoints"
:labels [:endpoint]
:quantiles default-summary-quantiles})
;; Tools metrics -------------------------------------------------------------------------------------------------
(prometheus/summary get-jobs-by-user-and-state-duration
{:description "Latency distribution of getting jobs by user for a particular state"
:labels [:state]
:quantiles default-summary-quantiles})
(prometheus/summary get-jobs-by-user-and-state-total-duration
{:description "Latency distribution of getting jobs by user for a list of states"
:quantiles default-summary-quantiles})
(prometheus/summary get-all-running-tasks-duration
{:description "Latency distribution of getting all running tasks"
:quantiles default-summary-quantiles})
(prometheus/summary get-user-running-jobs-duration
{:description "Latency distribution of getting running jobs for a particular user"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary get-all-running-jobs-duration
{:description "Latency distribution of getting all running jobs"
:quantiles default-summary-quantiles})
;; Plugins metrics -----------------------------------------------------------------------------------------------
(prometheus/counter pool-mover-jobs-updated
{:description "Total count of jobs moved to a different pool"})
;; Rebalancer metrics --------------------------------------------------------------------------------------------
(prometheus/summary compute-preemption-decision-duration
{:description "Latency distribution of computing preemption decision"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary rebalance-duration
{:description "Latency distribution of rebalancing"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary pending-job-drus
{:description "Distribution of pending jobs drus in the rebalancer"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary nearest-task-drus
{:description "Distribution of nearest task drus in the rebalancer"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary positive-dru-diffs
{:description "Distribution of positive dru diffs in the rebalancer"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary preemption-counts-for-host
{:description "Distribution of preemption counts per host in the rebalancer"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary task-counts-to-preempt
{:description "Distribution of number of tasks to preempt in the rebalancer"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary job-counts-to-run
{:description "Distribution of number of jobs to run in the rebalancer"
:labels [:pool]
:quantiles default-summary-quantiles})
;; Progress metrics ----------------------------------------------------------------------------------------------
(prometheus/counter progress-aggregator-drop-count
{:description "Total count of dropped progress messages"})
(prometheus/counter progress-aggregator-message-count
{:description "Total count of received progress messages"})
(prometheus/gauge progress-aggregator-pending-states-count
{:description "Total count of pending states"})
(prometheus/summary progress-updater-pending-states
{:description "Distribution of pending states count in the progress updater"
:quantiles default-summary-quantiles})
(prometheus/summary progress-updater-publish-duration
{:description "Latency distribution of the publish function in the progress updater"
:quantiles default-summary-quantiles})
(prometheus/summary progress-updater-transact-duration
{:description "Latency distribution of the transact function in the progress updater"
:quantiles default-summary-quantiles})
;; Other metrics -------------------------------------------------------------------------------------------------
(prometheus/gauge is-leader
{:description "1 if this host is the current leader, 0 otherwise"})
(prometheus/summary update-queue-lengths-duration
{:description "Latency distribution of updating queue lengths from the database"
:quantiles default-summary-quantiles})
(prometheus/summary acquire-kill-lock-for-kill-duration
{:description "Latency distribution of acquiring the kill lock for kill"
:quantiles default-summary-quantiles})
(prometheus/summary get-pending-jobs-duration
{:description "Latency distribution of getting all pending jobs"
:quantiles default-summary-quantiles}))))
A global registry for all metrics reported by .
;; All metrics must be registered before they can be recorded.
(mount/defstate registry :start (create-registry))
(defmacro value
"Get the value of the given metric."
{:arglists '([name] [name labels])}
([name]
`(prometheus/value registry ~name))
([name labels]
`(prometheus/value registry ~name ~labels)))
(defmacro with-duration
"Wraps the given block and records its execution time to the collector with the given name.
If using a collector with no labels, pass {} for the labels value."
{:arglists '([name labels & body])}
[name labels & body]
`(prometheus/with-duration (registry ~name ~labels) ~@body))
(defmacro start-timer
"Starts a timer that, when stopped, will store the duration in the given metric.
The return value will be a function that should be called once the operation to time has run."
{:arglists '([name] [name labels])}
([name]
`(prometheus/start-timer registry ~name))
([name labels]
`(prometheus/start-timer registry ~name ~labels)))
(defmacro set
"Sets the value of the given metric."
{:arglists '([name amount] [name labels amount])}
([name amount]
`(prometheus/set registry ~name ~amount))
([name labels amount]
`(prometheus/set registry ~name ~labels ~amount)))
(defmacro inc
"Increments the value of the given metric."
{:arglists '([name] [name labels] [name labels amount])}
([name]
`(prometheus/inc registry ~name))
([name labels]
`(prometheus/inc registry ~name ~labels))
([name labels amount]
`(prometheus/inc registry ~name ~labels ~amount)))
(defmacro dec
"Decrements the value of the given metric."
{:arglists '([name] [name labels])}
([name]
`(prometheus/dec registry ~name))
([name labels]
`(prometheus/dec registry ~name ~labels))
([name labels amount]
`(prometheus/dec registry ~name ~labels ~amount)))
(defmacro observe
"Records the value for the given metric (for histograms and summaries)."
{:arglists '([name amount] [name labels amount])}
([name amount]
`(prometheus/observe registry ~name ~amount))
([name labels amount]
`(prometheus/observe registry ~name ~labels ~amount)))
(defmacro wrap-ring-instrumentation
"Wraps the given Ring handler to write metrics to the given registry."
[handler options]
`(ring/wrap-instrumentation ~handler registry ~options))
(defn export []
"Returns the current values of all registered metrics in plain text format."
(prometheus-export/text-format registry))
| null | https://raw.githubusercontent.com/twosigma/Cook/64fa4858c518c924b0b1bdc5cc40478f67743315/scheduler/src/cook/prometheus_metrics.clj | clojure |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Declares prometheus metrics for cook scheduler.
Quantiles are specified as a map of quantile to error margin.
We define all the metric names here to get IDE support and avoid the chance of runtime
errors due to misspelled metric names.
Scheduler metrics
Monitor / user resource metrics
For user resource metrics, we access them by resource type at runtime, so it is
easier to define them all in a map instead of separate vars.
Kubernetes metrics
API metrics
Tools metrics
Plugin metrics
Rebalancer metrics
Progress metrics
Other metrics
Scheduler metrics ---------------------------------------------------------------------------------------------
Note that we choose to use a summary instead of a histogram for the latency metrics because we only have
The quantiles are specified as a map of quantile to error margin.
Match cycle metrics -------------------------------------------------------------------------------------------
Resource usage stats ------------------------------------------------------------------------------------------
We set these up using a map so we can access them easily by resource type when we set the metric.
Metrics for user resource allocation counts
Kubernetes metrics --------------------------------------------------------------------------------------------
API metrics ---------------------------------------------------------------------------------------------------
Tools metrics -------------------------------------------------------------------------------------------------
Plugins metrics -----------------------------------------------------------------------------------------------
Rebalancer metrics --------------------------------------------------------------------------------------------
Progress metrics ----------------------------------------------------------------------------------------------
Other metrics -------------------------------------------------------------------------------------------------
All metrics must be registered before they can be recorded. | Copyright ( c ) Two Sigma Open Source , LLC
distributed under the License is distributed on an " AS IS " BASIS ,
(ns cook.prometheus-metrics
(:require [iapetos.collector.jvm :as jvm]
[iapetos.collector.ring :as ring]
[iapetos.core :as prometheus]
[iapetos.export :as prometheus-export]
[mount.core :as mount]))
(def default-summary-quantiles {0.25 0.01 0.5 0.01 0.75 0.01 0.9 0.005 0.95 0.005 0.98 0.001 0.99 0.001 0.999 0.0001})
We are standardizing the metric format to be : >
(def scheduler-rank-cycle-duration :cook/scheduler-rank-cycle-duration-seconds)
(def scheduler-match-cycle-duration :cook/scheduler-match-cycle-duration-seconds)
(def scheduler-generate-user-usage-map-duration :cook/scheduler-generate-user-usage-map-duration-seconds)
(def scheduler-handle-resource-offers-total-duration :cook/scheduler-handle-resource-offers-total-duration-seconds)
(def scheduler-pool-handler-pending-to-considerable-duration :cook/scheduler-pool-handler-pending-to-considerable-duration)
(def scheduler-fenzo-schedule-once-duration :cook/scheduler-fenzo-schedule-once-duration-seconds)
(def scheduler-handle-resource-offers-match-duration :cook/scheduler-handle-resource-offers-match-duration-seconds)
(def scheduler-handle-resource-offers-matches-to-job-uuids-duration :cook/scheduler-handle-resource-offers-matches-to-job-uuids-duration-seconds)
(def scheduler-launch-all-matched-tasks-total-duration :cook/scheduler-launch-all-matched-tasks-total-duration-seconds)
(def scheduler-launch-all-matched-tasks-transact-duration :cook/scheduler-launch-all-matched-tasks-transact-duration-seconds)
(def scheduler-launch-all-matched-tasks-submit-duration :cook/scheduler-launch-all-matched-tasks-submit-duration-seconds)
(def scheduler-trigger-autoscaling-duration :cook/scheduler-trigger-autoscaling-duration-seconds)
(def scheduler-schedule-jobs-on-kubernetes-duration :cook/scheduler-schedule-jobs-on-kubernetes-duration-seconds)
(def scheduler-distribute-jobs-for-kubernetes-duration :cook/scheduler-distribute-jobs-for-kubernetes-duration-seconds)
(def scheduler-kill-cancelled-tasks-duration :cook/scheduler-kill-cancelled-tasks-duration-seconds)
(def scheduler-sort-jobs-hierarchy-duration :cook/scheduler-sort-jobs-hierarchy-duration-seconds)
(def scheduler-filter-offensive-jobs-duration :cook/scheduler-filter-offensive-jobs-duration-seconds)
(def scheduler-handle-status-update-duaration :cook/scheduler-handle-status-update-duaration-seconds)
(def scheduler-handle-framework-message-duration :cook/scheduler-handle-framework-message-duration-seconds)
(def scheduler-jobs-launched :cook/scheduler-jobs-launched-total)
(def scheduler-match-cycle-jobs-count :cook/scheduler-match-cycle-jobs-count)
(def scheduler-match-cycle-matched-percent :cook/scheduler-match-cycle-matched-percent)
(def scheduler-match-cycle-head-was-matched :cook/scheduler-match-cycle-head-was-matched)
(def scheduler-match-cycle-queue-was-full :cook/scheduler-match-cycle-queue-was-full)
(def scheduler-match-cycle-all-matched :cook/scheduler-match-cycle-all-matched)
(def init-user-to-dry-divisors-duration :cook/scheduler-init-user-to-dru-divisors-duration-seconds)
(def generate-sorted-task-scored-task-pairs-duration :cook/scheduler-generate-sorted-task-scored-task-duration-seconds)
(def get-shares-duration :cook/scheduler-get-shares-duration-seconds)
(def create-user-to-share-fn-duration :cook/scheduler-create-user-to-share-fn-duration-seconds)
(def task-failure-reasons :cook/scheduler-task-failures-by-reason)
(def iterations-at-fenzo-floor :cook/scheduler-iterations-at-fenzo-floor-count)
(def in-order-queue-count :cook/scheduler-in-order-queue-count)
(def task-times-by-status :cook/scheduler-task-runtimes-by-status)
(def number-offers-matched :cook/scheduler-number-offers-matched-distribution)
(def fraction-unmatched-jobs :cook/scheduler-fraction-unmatched-jobs)
(def offer-size-by-resource :cook/scheduler-offer-size-by-resource)
(def task-completion-rate :cook/scheduler-task-completion-rate)
(def task-completion-rate-by-resource :cook/scheduler-task-completion-rate-by-resource)
(def transact-report-queue-datoms :cook/scheduler-transact-report-queue-datoms-count)
(def transact-report-queue-update-job-state :cook/scheduler-transact-report-queue-update-job-state-count)
(def transact-report-queue-job-complete :cook/scheduler-transact-report-queue-job-complete-count)
(def transact-report-queue-tasks-killed :cook/scheduler-transact-report-queue-tasks-killed-count)
(def scheduler-offers-declined :cook/scheduler-offers-declined-count)
(def scheduler-handle-resource-offer-errors :cook/scheduler-handle-resource-offer-errors-count)
(def scheduler-matched-resource-counts :cook/scheduler-matched-resource-count)
(def scheduler-matched-tasks :cook/scheduler-matched-tasks-count)
(def scheduler-abandon-and-reset :cook/scheduler-abandon-and-reset-count)
(def scheduler-rank-job-failures :cook/scheduler-rank-job-failures)
(def scheduler-offer-channel-full-error :cook/scheduler-offer-channel-full-error)
(def scheduler-schedule-jobs-event-duration :cook/scheduler-schedule-jobs-event-duration-seconds)
(def match-jobs-event-duration :cook/scheduler-match-jobs-event-duration-seconds)
(def in-order-queue-delay-duration :cook/scheduler-in-order-queue-delay-duration-seconds)
(def user-state-count :cook/scheduler-users-state-count)
(def resource-metric-map
{:cpus :cook/scheduler-users-cpu-count
:mem :cook/scheduler-users-memory-mebibytes
:jobs :cook/scheduler-users-jobs-count
:gpus :cook/scheduler-users-gpu-count
:launch-rate-saved :cook/scheduler-users-launch-rate-saved
:launch-rate-per-minute :cook/scheduler-users-launch-rate-per-minute})
(def total-pods :cook/scheduler-kubernetes-pods-count)
(def max-pods :cook/scheduler-kubernetes-max-pods)
(def total-synthetic-pods :cook/scheduler-kubernetes-synthetic-pods-count)
(def max-synthetic-pods :cook/scheduler-kubernethes-max-synthetic-pods)
(def synthetic-pods-submitted :cook/scheduler-kubernetes-synthetic-pods-submitted-count)
(def total-nodes :cook/scheduler-kubernetes-nodes-count)
(def max-nodes :cook/scheduler-kubernetes-max-nodes)
(def watch-gap :cook/scheduler-kubernetes-watch-gap-millis)
(def disconnected-watch-gap :cook/scheduler-kubernetes-disconnected-watch-gap-millis)
(def delete-pod-errors :cook/scheduler-kubernetes-delete-pod-errors-count)
(def delete-finalizer-errors :cook/scheduler-kubernetes-delete-finalizer-errors-count)
(def launch-pod-errors :cook/scheduler-launch-pod-errors-count)
(def list-pods-chunk-duration :cook/scheduler-kubernetes-list-pods-chunk-duration-seconds)
(def list-pods-duration :cook/scheduler-kubernetes-list-pods-duration-seconds)
(def list-nodes-duration :cook/scheduler-kubernetes-list-nodes-duration-seconds)
(def delete-pod-duration :cook/scheduler-kubernetes-delete-pod-duration-seconds)
(def delete-finalizer-duration :cook/scheduler-kubernetes-delete-finalizer-duration-seconds)
(def launch-pod-duration :cook/scheduler-kubernetes-launch-pod-duration-seconds)
(def launch-task-duration :cook/scheduler-kubernetes-launch-task-duration-seconds)
(def kill-task-duration :cook/scheduler-kubernetes-kill-task-duration-seconds)
(def compute-pending-offers-duration :cook/scheduler-kubernetes-compute-pending-offers-duration-seconds)
(def autoscale-duration :cook/scheduler-kubernetes-autoscale-duration-seconds)
(def launch-synthetic-tasks-duration :cook/scheduler-kubernetes-launch-synthetic-tasks-duration-seconds)
(def pods-processed-unforced :cook/scheduler-kubernetes-pods-processed-unforced-count)
(def process-lock-duration :cook/scheduler-kubernetes-process-lock-duration-seconds)
(def process-lock-acquire-duration :cook/scheduler-kubernetes-process-lock-acquire-duration-seconds)
(def controller-process-duration :cook/scheduler-kubernetes-controller-process-duration-seconds)
(def handle-pod-update-duration :cook/scheduler-kubernetes-handle-pod-update-duration-seconds)
(def handle-pod-deletion-duration :cook/scheduler-kubernetes-handle-pod-deletion-duration-seconds)
(def update-cook-expected-state-duration :cook/scheduler-kubernetes-update-cook-expected-state-duration-seconds)
(def scan-process-duration :cook/scheduler-kubernetes-scan-process-pod-duration-seconds)
(def pod-waiting-duration :cook/scheduler-kubernetes-pod-duration-until-waiting-seconds)
(def pod-running-duration :cook/scheduler-kubernetes-pod-duration-until-running-seconds)
(def offer-match-timer :cook/scheduler-kubernetes-offer-match-duration-seconds)
(def resource-capacity :cook/scheduler-kubernetes-resource-capacity)
(def resource-consumption :cook/scheduler-kubernetes-resource-consumption)
Mesos metrics
(def mesos-heartbeats :cook/scheduler-mesos-heartbeats-count)
(def mesos-heartbeat-timeouts :cook/scheduler-mesos-heartbeat-timeouts-count)
(def mesos-datomic-sync-duration :cook/scheduler-mesos-heartbeat-datomic-sync-duration-seconds)
(def mesos-offer-chan-depth :cook/scheduler-mesos-offer-chan-depth)
(def mesos-error :cook/scheduler-mesos-error-count)
(def mesos-handle-framework-message :cook/scheduler-mesos-handle-framework-message)
(def mesos-handle-status-update :cook/scheduler-mesos-handle-status-update)
(def mesos-tasks-killed-in-status-update :cook/scheduler-mesos-tasks-killed-in-status-update-count)
(def mesos-aggregator-pending-count :cook/scheduler-mesos-aggregator-pending-count)
(def mesos-pending-sync-host-count :cook/scheduler-mesos-pending-sync-host-count)
(def mesos-updater-unprocessed-count :cook/scheduler-mesos-field-updater-unprocessed-count)
(def mesos-aggregator-message :cook/scheduler-mesos-field-aggregator-message-count)
(def mesos-updater-publish-duration :cook/scheduler-mesos-field-updater-publish-duration-seconds)
(def mesos-updater-transact-duration :cook/scheduler-mesos-field-updater-transact-duration-seconds)
(def mesos-updater-pending-entries :cook/scheduler-mesos-field-updater-pending-entries-distribution)
(def mesos-updater-unprocessed-entries :cook/scheduler-mesos-unprocessed-entries-distribution)
(def jobs-created :cook/api-jobs-created)
(def list-request-param-time-range :cook/api-list-request-param-time-range-millis)
(def list-request-param-limit :cook/api-list-request-param-limit-number)
(def list-response-job-count :cook/api-list-request-job-count)
(def fetch-instance-map-duration :cook/api-internal-fetch-instance-map-duration-seconds)
(def fetch-job-map-duration :cook/api-internal-fetch-job-map-duration-seconds)
(def fetch-jobs-duration :cook/api-internal-fetch-jobs-duration-seconds)
(def list-jobs-duration :cook/api-internal-list-jobs-duration-seconds)
(def endpoint-duration :cook/api-endpoint-duration-seconds)
(def get-jobs-by-user-and-state-duration :cook/tools-get-jobs-by-user-duration-seconds)
(def get-jobs-by-user-and-state-total-duration :cook/tools-get-jobs-by-user-and-states-duration-seconds)
(def get-all-running-tasks-duration :cook/tools-get-all-running-tasks-duration-seconds)
(def get-user-running-jobs-duration :cook/tools-get-user-running-jobs-duration-seconds)
(def get-all-running-jobs-duration :cook/tools-get-all-running-jobs-duration-seconds)
(def pool-mover-jobs-updated :cook/scheduler-plugins-pool-mover-jobs-updated-count)
(def compute-preemption-decision-duration :cook/rebalancer-compute-premeption-decision-duration-seconds)
(def rebalance-duration :cook/rebalancer-rebalance-duration-seconds)
(def pending-job-drus :cook/rebalancer-pending-job-drus)
(def nearest-task-drus :cook/rebalancer-nearest-task-drus)
(def positive-dru-diffs :cook/rebalancer-positive-dru-diffs)
(def preemption-counts-for-host :cook/rebalancer-preemption-counts-for-host)
(def task-counts-to-preempt :cook/rebalancer-task-counts-to-preempt)
(def job-counts-to-run :cook/rebalancer-job-counts-to-run)
(def progress-aggregator-drop-count :cook/progress-aggregator-drop-count)
(def progress-aggregator-pending-states-count :cook/progress-aggregator-pending-states-count)
(def progress-updater-pending-states :cook/progress-updater-pending-states)
(def progress-aggregator-message-count :cook/progress-aggregator-message-count)
(def progress-updater-publish-duration :cook/progress-updater-publish-duration-seconds)
(def progress-updater-transact-duration :cook/progress-updater-transact-duration-seconds)
(def is-leader :cook/scheduler-is-leader)
(def update-queue-lengths-duration :cook/scheduler-update-queue-lengths-duration-seconds)
(def acquire-kill-lock-for-kill-duration :cook/scheduler-acquire-kill-lock-for-kill-duration-seconds)
(def get-pending-jobs-duration :cook/scheduler-get-pending-jobs-duration-seconds)
(defn create-registry
[]
(-> (prometheus/collector-registry)
Initialize default JVM metrics
(jvm/initialize)
Initialize ring metrics
(ring/initialize)
(prometheus/register
one scheduler process running per cluster , so we do not need to aggregate data from multiple sources .
(prometheus/summary scheduler-rank-cycle-duration
{:description "Distribution of rank cycle latency"
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-match-cycle-duration
{:description "Distribution of overall match cycle latency"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-generate-user-usage-map-duration
{:description "Distribution of generating user->usage map latency"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-handle-resource-offers-total-duration
{:description "Distribution of total handle-resource-offers! duration"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-pool-handler-pending-to-considerable-duration
{:description "Distribution of filtering pending to considerable jobs duration"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-fenzo-schedule-once-duration
{:description "Distribution of fenzo schedule once latency"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-handle-resource-offers-match-duration
{:description "Distribution of matching resource offers to jobs latency"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-handle-resource-offers-matches-to-job-uuids-duration
{:description "Distribution of generating matches->job-uuids map latency"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-launch-all-matched-tasks-total-duration
{:description "Distribution of total launch all matched tasks latency"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-launch-all-matched-tasks-transact-duration
{:description "Distribution of launch all matched tasks--transact in datomic latency"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-launch-all-matched-tasks-submit-duration
{:description "Distribution of launch all matched tasks--submit to compute cluster latency"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-trigger-autoscaling-duration
{:description "Distribution of trigger autoscaling latency"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-schedule-jobs-on-kubernetes-duration
{:description "Distribution of scheduling jobs on Kubernetes latency"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-distribute-jobs-for-kubernetes-duration
{:description "Distribution of distributing jobs for Kubernetes latency"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-kill-cancelled-tasks-duration
{:description "Distribution of kill cancelled tasks latency"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-sort-jobs-hierarchy-duration
{:description "Distribution of sorting jobs by DRU latency"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-filter-offensive-jobs-duration
{:description "Distribution of filter offensive jobs latency"
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-handle-status-update-duaration
{:description "Distribution of handle compute cluster status update latency"
:quantiles default-summary-quantiles})
(prometheus/summary scheduler-handle-framework-message-duration
{:description "Distribution of handle framework message latency"
:quantiles default-summary-quantiles})
(prometheus/counter scheduler-jobs-launched
{:description "Total count of jobs launched per pool and compute cluster"
:labels [:pool :compute-cluster]})
(prometheus/summary init-user-to-dry-divisors-duration
{:description "Latency distribution of initializing the user to dru divisors map"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary generate-sorted-task-scored-task-pairs-duration
{:description "Latency distribution of generating the sorted list of task and scored task pairs"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary get-shares-duration
{:description "Latency distribution of getting all users' share"
:quantiles default-summary-quantiles})
(prometheus/summary create-user-to-share-fn-duration
{:description "Latency distribution of creating the user-to-share function"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary task-times-by-status
{:description "Distribution of task runtime by status"
:labels [:status]
:quantiles default-summary-quantiles})
(prometheus/summary number-offers-matched
{:description "Distribution of number of offers matched"
:labels [:pool :compute-cluster]
:quantiles default-summary-quantiles})
(prometheus/summary fraction-unmatched-jobs
{:description "Distribution of fraction of unmatched jobs"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary offer-size-by-resource
{:description "Distribution of offer size by resource type"
:labels [:pool :resource]
:quantiles default-summary-quantiles})
(prometheus/counter task-completion-rate
{:description "Total count of completed tasks per pool"
:labels [:pool :status]})
(prometheus/counter task-completion-rate-by-resource
{:description "Total count of completed resources per pool"
:labels [:pool :status :resource]})
(prometheus/counter transact-report-queue-datoms
{:description "Total count of report queue datoms"})
(prometheus/counter transact-report-queue-update-job-state
{:description "Total count of job state updates"})
(prometheus/counter transact-report-queue-job-complete
{:description "Total count of completed jobs"})
(prometheus/counter transact-report-queue-tasks-killed
{:description "Total count of tasks killed"})
(prometheus/counter scheduler-offers-declined
{:description "Total offers declined"
:labels [:compute-cluster]})
(prometheus/counter scheduler-matched-resource-counts
{:description "Total matched count per resource type"
:labels [:pool :resource]})
(prometheus/counter scheduler-matched-tasks
{:description "Total matched tasks"
:labels [:pool :compute-cluster]})
(prometheus/counter scheduler-handle-resource-offer-errors
{:descrpiption "Total count of errors encountered in handle-resource-offer!"
:labels [:pool]})
(prometheus/counter scheduler-abandon-and-reset
{:descrpiption "Total count of fenzo abandon-and-reset"
:labels [:pool]})
(prometheus/counter scheduler-rank-job-failures
{:descrpiption "Total count of rank job failures"})
(prometheus/counter scheduler-offer-channel-full-error
{:descrpiption "Total count of offer channel full failures"
:labels [:pool]})
(prometheus/summary scheduler-schedule-jobs-event-duration
{:description "Latency distribution of scheduling jobs in Kubernetes in the full Kenzo codepath"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary match-jobs-event-duration
{:description "Latency distribution of matching jobs in the full Fenzo codepath"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary in-order-queue-delay-duration
{:description "Latency distribution of processing in-order-queue tasks"
:quantiles default-summary-quantiles})
(prometheus/gauge scheduler-match-cycle-jobs-count
{:description "Aggregate match cycle job counts stats"
:labels [:pool :status]})
(prometheus/gauge scheduler-match-cycle-matched-percent
{:description "Percent of jobs matched in last match cycle"
:labels [:pool]})
The following 1/0 metrics are useful for value map visualizations in Grafana
(prometheus/gauge scheduler-match-cycle-head-was-matched
{:description "1 if head was matched, 0 otherwise"
:labels [:pool]})
(prometheus/gauge scheduler-match-cycle-queue-was-full
{:description "1 if queue was full, 0 otherwise"
:labels [:pool]})
(prometheus/gauge scheduler-match-cycle-all-matched
{:description "1 if all jobs were matched, 0 otherwise"
:labels [:pool]})
(prometheus/summary task-failure-reasons
{:description "Distribution of task failures by reason"
:labels [:reason :resource]
:quantiles default-summary-quantiles})
(prometheus/gauge iterations-at-fenzo-floor
{:descriptiion "Current number of iterations at fenzo floor (i.e. 1 considerable job)"
:labels [:pool]})
(prometheus/gauge in-order-queue-count
{:description "Depth of queue for in-order processing"})
(prometheus/gauge (resource-metric-map :mem)
{:description "Current memory by state"
:labels [:pool :user :state]})
(prometheus/gauge (resource-metric-map :cpus)
{:description "Current cpu count by state"
:labels [:pool :user :state]})
(prometheus/gauge (resource-metric-map :gpus)
{:description "Current gpu count by state"
:labels [:pool :user :state]})
(prometheus/gauge (resource-metric-map :jobs)
{:description "Current jobs count by state"
:labels [:pool :user :state]})
(prometheus/gauge (resource-metric-map :launch-rate-saved)
{:description "Current launch-rate-saved count by state"
:labels [:pool :user :state]})
(prometheus/gauge (resource-metric-map :launch-rate-per-minute)
{:description "Current launch-rate-per-minute count by state"
:labels [:pool :user :state]})
(prometheus/gauge user-state-count
{:description "Current user count by state"
:labels [:pool :state]})
(prometheus/gauge total-pods
{:description "Total current number of pods per compute cluster"
:labels [:compute-cluster]})
(prometheus/gauge max-pods
{:description "Max number of pods per compute cluster"
:labels [:compute-cluster]})
(prometheus/gauge total-synthetic-pods
{:description "Total current number of synthetic pods per pool and compute cluster"
:labels [:pool :compute-cluster]})
(prometheus/gauge max-synthetic-pods
{:description "Max number of synthetic pods per pool and compute cluster"
:labels [:pool :compute-cluster]})
(prometheus/gauge synthetic-pods-submitted
{:description "Count of synthetic pods submitted in the last match cycle"
:labels [:compute-cluster :pool]})
(prometheus/gauge total-nodes
{:description "Total current number of nodes per compute cluster"
:labels [:compute-cluster]})
(prometheus/gauge max-nodes
{:description "Max number of nodes per compute cluster"
:labels [:compute-cluster]})
(prometheus/summary watch-gap
{:description "Latency distribution of the gap between last watch response and current response"
:labels [:compute-cluster :object]
:quantiles default-summary-quantiles})
(prometheus/summary disconnected-watch-gap
{:description "Latency distribution of the gap between last watch response and current response after reconnecting"
:labels [:compute-cluster :object]
:quantiles default-summary-quantiles})
(prometheus/counter delete-pod-errors
{:description "Total number of errors when deleting pods"
:labels [:compute-cluster]})
(prometheus/counter delete-finalizer-errors
{:description "Total number of errors when deleting pod finalizers"
:labels [:compute-cluster :type]})
(prometheus/counter launch-pod-errors
{:description "Total number of errors when launching pods"
:labels [:compute-cluster :bad-spec]})
(prometheus/summary list-pods-chunk-duration
{:description "Latency distribution of listing a chunk of pods"
:labels [:compute-cluster]
:quantiles default-summary-quantiles})
(prometheus/summary list-pods-duration
{:description "Latency distribution of listing all pods"
:labels [:compute-cluster]
:quantiles default-summary-quantiles})
(prometheus/summary list-nodes-duration
{:description "Latency distribution of listing all nodes"
:labels [:compute-cluster]
:quantiles default-summary-quantiles})
(prometheus/summary delete-pod-duration
{:description "Latency distribution of deleting a pod"
:labels [:compute-cluster]
:quantiles default-summary-quantiles})
(prometheus/summary delete-finalizer-duration
{:description "Latency distribution of deleting a pod's finalizer"
:labels [:compute-cluster]
:quantiles default-summary-quantiles})
(prometheus/summary launch-pod-duration
{:description "Latency distribution of launching a pod"
:labels [:compute-cluster]
:quantiles default-summary-quantiles})
(prometheus/summary launch-task-duration
{:description "Latency distribution of launching a task (more inclusive than launch-pod)"
:labels [:compute-cluster]
:quantiles default-summary-quantiles})
(prometheus/summary kill-task-duration
{:description "Latency distribution of killing a task (more inclusive than delete-pod)"
:labels [:compute-cluster]
:quantiles default-summary-quantiles})
(prometheus/summary compute-pending-offers-duration
{:description "Latency distribution of computing pending offers"
:labels [:compute-cluster]
:quantiles default-summary-quantiles})
(prometheus/summary autoscale-duration
{:description "Latency distribution of autoscaling"
:labels [:compute-cluster :pool]
:quantiles default-summary-quantiles})
(prometheus/summary launch-synthetic-tasks-duration
{:description "Latency distribution of launching synthetic tasks"
:labels [:compute-cluster :pool]
:quantiles default-summary-quantiles})
(prometheus/counter pods-processed-unforced
{:description "Count of processed pods"
:labels [:compute-cluster]})
(prometheus/summary process-lock-duration
{:description "Latency distribution of processing an event while holding the process lock"
:labels [:compute-cluster]
:quantiles default-summary-quantiles})
(prometheus/summary process-lock-acquire-duration
{:description "Latency distribution of acquiring the process lock"
:labels [:compute-cluster]
:quantiles default-summary-quantiles})
(prometheus/summary controller-process-duration
{:description "Latency distribution of processing a pod event"
:labels [:compute-cluster :doing-scan]
:quantiles default-summary-quantiles})
(prometheus/summary handle-pod-update-duration
{:description "Latency distribution of handling a pod update"
:labels [:compute-cluster]
:quantiles default-summary-quantiles})
(prometheus/summary handle-pod-deletion-duration
{:description "Latency distribution of handling a pod deletion"
:labels [:compute-cluster]
:quantiles default-summary-quantiles})
(prometheus/summary update-cook-expected-state-duration
{:description "Latency distribution of updating cook's expected state"
:labels [:compute-cluster]
:quantiles default-summary-quantiles})
(prometheus/summary scan-process-duration
{:description "Latency distribution of scanning for and processing a pod"
:labels [:compute-cluster]
:quantiles default-summary-quantiles})
(prometheus/summary pod-waiting-duration
{:description "Latency distribution of the time until a pod is waiting"
:labels [:compute-cluster :synthetic :kubernetes-scheduler-pod]
:quantiles default-summary-quantiles})
(prometheus/summary pod-running-duration
{:description "Latency distribution of the time until a pod is running"
:labels [:compute-cluster :synthetic :kubernetes-scheduler-pod]
:quantiles default-summary-quantiles})
(prometheus/summary offer-match-timer
{:description "Latency distribution of matching an offer"
:labels [:compute-cluster]
:quantiles default-summary-quantiles})
(prometheus/gauge resource-capacity
{:description "Total available capacity of the given resource per cluster and pool"
:labels [:compute-cluster :pool :resource :resource-subtype]})
(prometheus/gauge resource-consumption
{:description "Total consumption of the given resource per cluster"
:labels [:compute-cluster :resource :resource-subtype]})
Mesos metrics -------------------------------------------------------------------------------------------------
(prometheus/counter mesos-heartbeats
{:description "Count of mesos heartbeats"})
(prometheus/counter mesos-heartbeat-timeouts
{:description "Count of mesos heartbeat timeouts"})
(prometheus/summary mesos-datomic-sync-duration
{:description "Latency distribution of mesos datomic sync duration"
:quantiles default-summary-quantiles})
(prometheus/gauge mesos-offer-chan-depth
{:description "Depth of mesos offer channel"
:labels [:pool]})
(prometheus/counter mesos-error
{:description "Count of errors in mesos"})
(prometheus/counter mesos-handle-framework-message
{:description "Count of framework messages received in mesos"})
(prometheus/counter mesos-handle-status-update
{:description "Count of status updates received in mesos"})
(prometheus/counter mesos-tasks-killed-in-status-update
{:description "Count of tasks killed during status updates in mesos"})
(prometheus/gauge mesos-aggregator-pending-count
{:description "Count of pending entries in the aggregator"
:labels [:field-name]})
(prometheus/gauge mesos-pending-sync-host-count
{:description "Count of pending sync hosts"})
(prometheus/gauge mesos-updater-unprocessed-count
{:description "Count of unprocessed tasks in mesos"
:labels []})
(prometheus/summary mesos-updater-unprocessed-entries
{:description "Distribution of count of unprocessed entries"
:quantiles default-summary-quantiles})
(prometheus/summary mesos-updater-pending-entries
{:description "Distribution of count of pending entries"
:quantiles default-summary-quantiles})
(prometheus/counter mesos-aggregator-message
{:description "Count of messages received by the aggregator"
:labels [:field-name]})
(prometheus/summary mesos-updater-publish-duration
{:description "Latency distribution of mesos updater publish duration"
:labels [:field-name]
:quantiles default-summary-quantiles})
(prometheus/summary mesos-updater-transact-duration
{:description "Latency distribution of mesos updater transact duration"
:labels [:field-name]
:quantiles default-summary-quantiles})
(prometheus/counter jobs-created
{:description "Total count of jobs created"
:labels [:pool]})
(prometheus/summary list-request-param-time-range
{:description "Distribution of time range specified in list endpoint requests"
:quantiles default-summary-quantiles})
(prometheus/summary list-request-param-limit
{:description "Distribution of instance count limit specified in list endpoint requests"
:quantiles default-summary-quantiles})
(prometheus/summary list-response-job-count
{:description "Distribution of instance count returned in list endpoint responses"
:quantiles default-summary-quantiles})
(prometheus/summary fetch-instance-map-duration
{:description "Latency distribution of converting the instance entity to a map for API responses"
:quantiles default-summary-quantiles})
(prometheus/summary fetch-job-map-duration
{:description "Latency distribution of converting the job entity to a map for API responses"
:quantiles default-summary-quantiles})
(prometheus/summary fetch-jobs-duration
{:description "Latency distribution of fetching jobs by user and state for API responses"
:quantiles default-summary-quantiles})
(prometheus/summary list-jobs-duration
{:description "Latency distribution of listing jobs for API responses"
:quantiles default-summary-quantiles})
(prometheus/summary endpoint-duration
{:description "Latency distribution of API endpoints"
:labels [:endpoint]
:quantiles default-summary-quantiles})
(prometheus/summary get-jobs-by-user-and-state-duration
{:description "Latency distribution of getting jobs by user for a particular state"
:labels [:state]
:quantiles default-summary-quantiles})
(prometheus/summary get-jobs-by-user-and-state-total-duration
{:description "Latency distribution of getting jobs by user for a list of states"
:quantiles default-summary-quantiles})
(prometheus/summary get-all-running-tasks-duration
{:description "Latency distribution of getting all running tasks"
:quantiles default-summary-quantiles})
(prometheus/summary get-user-running-jobs-duration
{:description "Latency distribution of getting running jobs for a particular user"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary get-all-running-jobs-duration
{:description "Latency distribution of getting all running jobs"
:quantiles default-summary-quantiles})
(prometheus/counter pool-mover-jobs-updated
{:description "Total count of jobs moved to a different pool"})
(prometheus/summary compute-preemption-decision-duration
{:description "Latency distribution of computing preemption decision"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary rebalance-duration
{:description "Latency distribution of rebalancing"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary pending-job-drus
{:description "Distribution of pending jobs drus in the rebalancer"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary nearest-task-drus
{:description "Distribution of nearest task drus in the rebalancer"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary positive-dru-diffs
{:description "Distribution of positive dru diffs in the rebalancer"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary preemption-counts-for-host
{:description "Distribution of preemption counts per host in the rebalancer"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary task-counts-to-preempt
{:description "Distribution of number of tasks to preempt in the rebalancer"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/summary job-counts-to-run
{:description "Distribution of number of jobs to run in the rebalancer"
:labels [:pool]
:quantiles default-summary-quantiles})
(prometheus/counter progress-aggregator-drop-count
{:description "Total count of dropped progress messages"})
(prometheus/counter progress-aggregator-message-count
{:description "Total count of received progress messages"})
(prometheus/gauge progress-aggregator-pending-states-count
{:description "Total count of pending states"})
(prometheus/summary progress-updater-pending-states
{:description "Distribution of pending states count in the progress updater"
:quantiles default-summary-quantiles})
(prometheus/summary progress-updater-publish-duration
{:description "Latency distribution of the publish function in the progress updater"
:quantiles default-summary-quantiles})
(prometheus/summary progress-updater-transact-duration
{:description "Latency distribution of the transact function in the progress updater"
:quantiles default-summary-quantiles})
(prometheus/gauge is-leader
{:description "1 if this host is the current leader, 0 otherwise"})
(prometheus/summary update-queue-lengths-duration
{:description "Latency distribution of updating queue lengths from the database"
:quantiles default-summary-quantiles})
(prometheus/summary acquire-kill-lock-for-kill-duration
{:description "Latency distribution of acquiring the kill lock for kill"
:quantiles default-summary-quantiles})
(prometheus/summary get-pending-jobs-duration
{:description "Latency distribution of getting all pending jobs"
:quantiles default-summary-quantiles}))))
A global registry for all metrics reported by .
(mount/defstate registry :start (create-registry))
(defmacro value
"Get the value of the given metric."
{:arglists '([name] [name labels])}
([name]
`(prometheus/value registry ~name))
([name labels]
`(prometheus/value registry ~name ~labels)))
(defmacro with-duration
"Wraps the given block and records its execution time to the collector with the given name.
If using a collector with no labels, pass {} for the labels value."
{:arglists '([name labels & body])}
[name labels & body]
`(prometheus/with-duration (registry ~name ~labels) ~@body))
(defmacro start-timer
"Starts a timer that, when stopped, will store the duration in the given metric.
The return value will be a function that should be called once the operation to time has run."
{:arglists '([name] [name labels])}
([name]
`(prometheus/start-timer registry ~name))
([name labels]
`(prometheus/start-timer registry ~name ~labels)))
(defmacro set
"Sets the value of the given metric."
{:arglists '([name amount] [name labels amount])}
([name amount]
`(prometheus/set registry ~name ~amount))
([name labels amount]
`(prometheus/set registry ~name ~labels ~amount)))
(defmacro inc
"Increments the value of the given metric."
{:arglists '([name] [name labels] [name labels amount])}
([name]
`(prometheus/inc registry ~name))
([name labels]
`(prometheus/inc registry ~name ~labels))
([name labels amount]
`(prometheus/inc registry ~name ~labels ~amount)))
(defmacro dec
"Decrements the value of the given metric."
{:arglists '([name] [name labels])}
([name]
`(prometheus/dec registry ~name))
([name labels]
`(prometheus/dec registry ~name ~labels))
([name labels amount]
`(prometheus/dec registry ~name ~labels ~amount)))
(defmacro observe
"Records the value for the given metric (for histograms and summaries)."
{:arglists '([name amount] [name labels amount])}
([name amount]
`(prometheus/observe registry ~name ~amount))
([name labels amount]
`(prometheus/observe registry ~name ~labels ~amount)))
(defmacro wrap-ring-instrumentation
"Wraps the given Ring handler to write metrics to the given registry."
[handler options]
`(ring/wrap-instrumentation ~handler registry ~options))
(defn export []
"Returns the current values of all registered metrics in plain text format."
(prometheus-export/text-format registry))
|
45cc35e75e55b829f4e1a5d6e26c182c7eaeeac714bb28e6fcbc024e44ed2590 | soulomoon/SICP | Exercise 1.39.scm | Exercise 1.39 : A continued fraction representation of the tangent function was published in 1770 by the German mathematician :
; tanx=x1-x23-x25-...,
; tanx=x1−x23−x25−…,
where xx is in radians . Define a procedure ( tan - cf x k ) that computes an approximation to the tangent function based on Lambert ’s formula . k specifies the number of terms to compute , as in Exercise 1.37 .
#lang planet neil/sicp
(define (cont_frac_iter n d k)
(define (search_frac g result)
(let ((new_result (/ (n g) (+ (d g) result))))
(cond
((= g 0) result)
(else (search_frac (- g 1) new_result))
)
)
)
(search_frac k 0)
)
(define (tan-cf x k)
(define (d g)
(+ (* g 2) 1)
)
(define (n g)
(if (= g 1) x (- 0 (* x x)))
)
(cont_frac_iter n d k)
)
(tan-cf 1.0 10)
(tan-cf 1.0 100)
(tan-cf 1.0 1000)
(tan-cf 1.0 10000)
```````````````````````````````````
Welcome to DrRacket, version 6.6 [3m].
memory limit : 128 MB .
0.3579073840656693
0.3579073840656693
0.3579073840656693
0.3579073840656693
> | null | https://raw.githubusercontent.com/soulomoon/SICP/1c6cbf5ecf6397eaeb990738a938d48c193af1bb/Chapter1/Exercise%201.39.scm | scheme | tanx=x1-x23-x25-...,
tanx=x1−x23−x25−…, | Exercise 1.39 : A continued fraction representation of the tangent function was published in 1770 by the German mathematician :
where xx is in radians . Define a procedure ( tan - cf x k ) that computes an approximation to the tangent function based on Lambert ’s formula . k specifies the number of terms to compute , as in Exercise 1.37 .
#lang planet neil/sicp
(define (cont_frac_iter n d k)
(define (search_frac g result)
(let ((new_result (/ (n g) (+ (d g) result))))
(cond
((= g 0) result)
(else (search_frac (- g 1) new_result))
)
)
)
(search_frac k 0)
)
(define (tan-cf x k)
(define (d g)
(+ (* g 2) 1)
)
(define (n g)
(if (= g 1) x (- 0 (* x x)))
)
(cont_frac_iter n d k)
)
(tan-cf 1.0 10)
(tan-cf 1.0 100)
(tan-cf 1.0 1000)
(tan-cf 1.0 10000)
```````````````````````````````````
Welcome to DrRacket, version 6.6 [3m].
memory limit : 128 MB .
0.3579073840656693
0.3579073840656693
0.3579073840656693
0.3579073840656693
> |
c981e4e68e05337295c6f06c3f282c7fdba170a2dac4b48e0fcf59a40b11a461 | realworldocaml/examples | int_interval_manual_sexp.mli | open Core.Std
type t
val t_of_sexp : Sexp.t -> t
val sexp_of_t : t -> Sexp.t
val is_empty : t -> bool
val create : int -> int -> t
val contains : t -> int -> bool
| null | https://raw.githubusercontent.com/realworldocaml/examples/32ea926861a0b728813a29b0e4cf20dd15eb486e/code/sexpr/int_interval_manual_sexp.mli | ocaml | open Core.Std
type t
val t_of_sexp : Sexp.t -> t
val sexp_of_t : t -> Sexp.t
val is_empty : t -> bool
val create : int -> int -> t
val contains : t -> int -> bool
|
|
fae0638c72f713a54cf1c3a3f925dec50bd70eb68cab7f7cfe6f38a703adb703 | aeternity/aeternity | aec_coinbase_gen.erl | %%%-------------------------------------------------------------------
( C ) 2018 , Aeternity Anstalt
%%% @doc
%%% Calculate coinbase table to meet a given inflation curve.
%%% @end
%%%-------------------------------------------------------------------
-module(aec_coinbase_gen).
-export([ csv_file/2
, csv_file/3
, erlang_module/2
, erlang_module/3
]).
-define(INITIAL_TOKENS, 276450333499323152460728285).
365 * 24 * 20
2 * 24 * 20 ( 2 days )
-define(MULTIPLIER, 1000000000000000000).
erlang_module(To, FileName) ->
erlang_module(To, FileName, ?INITIAL_TOKENS).
erlang_module(To, FileName, InitialTokens) ->
{ok, FD} = file:open(FileName, [write]),
io:format(FD,
"%%%-------------------------------------------------------------------\n"
"%%% @copyright (C) 2018, Aeternity Anstalt\n"
"%%% @doc\n"
"%%% Module generated by ~p\n"
"%%% Initial supply of tokens: ~p\n"
"%%% @end\n"
"%%%-------------------------------------------------------------------\n\n"
"-module(aec_coinbase).\n"
"-export([coinbase_at_height/1]).\n"
"\n"
"-define(MULTIPLIER, ~p).\n\n"
"-spec coinbase_at_height(non_neg_integer()) -> non_neg_integer().\n\n"
"coinbase_at_height(X) when not is_integer(X) orelse X < 0 ->\n"
" error({bad_height, X});\n"
, [?MODULE, InitialTokens, ?MULTIPLIER]),
Fun = fun({Height, Coinbase,_Existing}, LastCoinbase) ->
[io:format(FD, "coinbase_at_height(H) when H < ~p -> ~p * ?MULTIPLIER;\n",
[Height, LastCoinbase])
|| LastCoinbase =/= undefined,
LastCoinbase =/= 0 orelse Height < ?SLOW_START_BLOCKS
],
Coinbase
end,
LastCB = coinbase(0, undefined, To, InitialTokens, undefined, Fun),
io:format(FD, "coinbase_at_height(_H) -> 0.\n", []),
file:close(FD),
case LastCB =:= 0 of
true -> ok;
false -> error({last_coinbase_not_zero, LastCB})
end.
csv_file(To, FileName) ->
csv_file(To, FileName, ?INITIAL_TOKENS).
csv_file(To, FileName, InitialTokens) ->
{ok, FD} = file:open(FileName, [write]),
Fun = fun({Height, Coinbase0, Existing}, _Acc) ->
Coinbase = Coinbase0 * ?MULTIPLIER,
Inflation = Coinbase * ?BLOCKS_PER_YEAR/ Existing,
io:format(FD, "~p;~p;~p;~p\n",
[Height, Coinbase, Existing, Inflation])
end,
ok = coinbase(0, undefined, To, InitialTokens, [], Fun),
file:close(FD).
coinbase(Height, Last, To, Existing, Acc, Fun) ->
Coinbase = coinbase_at_height(Height, Existing),
NewExisting = Existing + Coinbase * ?MULTIPLIER,
case Height =:= To of
true ->
Fun({Height, Coinbase, NewExisting}, Acc);
false ->
case Last =:= Coinbase of
true ->
coinbase(Height + 1, Last, To, NewExisting, Acc, Fun);
false ->
NewAcc = Fun({Height, Coinbase, NewExisting}, Acc),
coinbase(Height + 1, Coinbase, To, NewExisting, NewAcc, Fun)
end
end.
coinbase_at_height(0,_Existing) ->
%% No coinbase at genesis block
0;
coinbase_at_height(Height, Existing) when Height < ?SLOW_START_BLOCKS ->
max(1, round(Existing * inflation_at_height(Height) / (?BLOCKS_PER_YEAR * ?MULTIPLIER)));
coinbase_at_height(Height, Existing) ->
max(0, round(Existing * inflation_at_height(Height) / (?BLOCKS_PER_YEAR * ?MULTIPLIER))).
inflation_at_height(Height) when Height < ?SLOW_START_BLOCKS ->
Height * 0.3 / ?SLOW_START_BLOCKS;
inflation_at_height(Height) ->
Adjusted = Height - ?SLOW_START_BLOCKS,
0.30/(1 + math:pow(Adjusted/(?BLOCKS_PER_YEAR * 0.8), 1.3)) - 0.0003.
| null | https://raw.githubusercontent.com/aeternity/aeternity/b7ce6ae15dab7fa22287c2da3d4405c29bb4edd7/apps/aecore/src/aec_coinbase_gen.erl | erlang | -------------------------------------------------------------------
@doc
Calculate coinbase table to meet a given inflation curve.
@end
-------------------------------------------------------------------
No coinbase at genesis block | ( C ) 2018 , Aeternity Anstalt
-module(aec_coinbase_gen).
-export([ csv_file/2
, csv_file/3
, erlang_module/2
, erlang_module/3
]).
-define(INITIAL_TOKENS, 276450333499323152460728285).
365 * 24 * 20
2 * 24 * 20 ( 2 days )
-define(MULTIPLIER, 1000000000000000000).
erlang_module(To, FileName) ->
erlang_module(To, FileName, ?INITIAL_TOKENS).
erlang_module(To, FileName, InitialTokens) ->
{ok, FD} = file:open(FileName, [write]),
io:format(FD,
"%%%-------------------------------------------------------------------\n"
"%%% @copyright (C) 2018, Aeternity Anstalt\n"
"%%% @doc\n"
"%%% Module generated by ~p\n"
"%%% Initial supply of tokens: ~p\n"
"%%% @end\n"
"%%%-------------------------------------------------------------------\n\n"
"-module(aec_coinbase).\n"
"-export([coinbase_at_height/1]).\n"
"\n"
"-define(MULTIPLIER, ~p).\n\n"
"-spec coinbase_at_height(non_neg_integer()) -> non_neg_integer().\n\n"
"coinbase_at_height(X) when not is_integer(X) orelse X < 0 ->\n"
" error({bad_height, X});\n"
, [?MODULE, InitialTokens, ?MULTIPLIER]),
Fun = fun({Height, Coinbase,_Existing}, LastCoinbase) ->
[io:format(FD, "coinbase_at_height(H) when H < ~p -> ~p * ?MULTIPLIER;\n",
[Height, LastCoinbase])
|| LastCoinbase =/= undefined,
LastCoinbase =/= 0 orelse Height < ?SLOW_START_BLOCKS
],
Coinbase
end,
LastCB = coinbase(0, undefined, To, InitialTokens, undefined, Fun),
io:format(FD, "coinbase_at_height(_H) -> 0.\n", []),
file:close(FD),
case LastCB =:= 0 of
true -> ok;
false -> error({last_coinbase_not_zero, LastCB})
end.
csv_file(To, FileName) ->
csv_file(To, FileName, ?INITIAL_TOKENS).
csv_file(To, FileName, InitialTokens) ->
{ok, FD} = file:open(FileName, [write]),
Fun = fun({Height, Coinbase0, Existing}, _Acc) ->
Coinbase = Coinbase0 * ?MULTIPLIER,
Inflation = Coinbase * ?BLOCKS_PER_YEAR/ Existing,
io:format(FD, "~p;~p;~p;~p\n",
[Height, Coinbase, Existing, Inflation])
end,
ok = coinbase(0, undefined, To, InitialTokens, [], Fun),
file:close(FD).
coinbase(Height, Last, To, Existing, Acc, Fun) ->
Coinbase = coinbase_at_height(Height, Existing),
NewExisting = Existing + Coinbase * ?MULTIPLIER,
case Height =:= To of
true ->
Fun({Height, Coinbase, NewExisting}, Acc);
false ->
case Last =:= Coinbase of
true ->
coinbase(Height + 1, Last, To, NewExisting, Acc, Fun);
false ->
NewAcc = Fun({Height, Coinbase, NewExisting}, Acc),
coinbase(Height + 1, Coinbase, To, NewExisting, NewAcc, Fun)
end
end.
coinbase_at_height(0,_Existing) ->
0;
coinbase_at_height(Height, Existing) when Height < ?SLOW_START_BLOCKS ->
max(1, round(Existing * inflation_at_height(Height) / (?BLOCKS_PER_YEAR * ?MULTIPLIER)));
coinbase_at_height(Height, Existing) ->
max(0, round(Existing * inflation_at_height(Height) / (?BLOCKS_PER_YEAR * ?MULTIPLIER))).
inflation_at_height(Height) when Height < ?SLOW_START_BLOCKS ->
Height * 0.3 / ?SLOW_START_BLOCKS;
inflation_at_height(Height) ->
Adjusted = Height - ?SLOW_START_BLOCKS,
0.30/(1 + math:pow(Adjusted/(?BLOCKS_PER_YEAR * 0.8), 1.3)) - 0.0003.
|
7a37c433d67a23a93fd3c3bd2ab72436b05e2f743008dc8be6b93a89f322f885 | elastic/eui-cljs | delay_render.cljs | (ns eui.delay-render
(:require ["@elastic/eui/lib/components/delay_render/delay_render.js" :as eui]))
(def EuiDelayRender eui/EuiDelayRender)
| null | https://raw.githubusercontent.com/elastic/eui-cljs/ad60b57470a2eb8db9bca050e02f52dd964d9f8e/src/eui/delay_render.cljs | clojure | (ns eui.delay-render
(:require ["@elastic/eui/lib/components/delay_render/delay_render.js" :as eui]))
(def EuiDelayRender eui/EuiDelayRender)
|
|
fe1fc09801c85f085fd8bd45cb9bd54763551ba876f27daff6652e1dcaf063d9 | avsm/platform | uucp_name.ml | ---------------------------------------------------------------------------
Copyright ( c ) 2014 . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
% % NAME%% % % ---------------------------------------------------------------------------
Copyright (c) 2014 Daniel C. Bünzli. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
%%NAME%% %%VERSION%%
---------------------------------------------------------------------------*)
include Uucp_name_base
let name u =
let u = Uchar.to_int u in
match Uucp_tmap4bytes.get_uint16_pair Uucp_name_data.name_map u with
| 0, 0 -> ""
| p, 0 -> Printf.sprintf "%s%04X" Uucp_name_data.name_toks.(p) u
| 0, s -> Uucp_name_data.name_toks.(s)
| p, s ->
Printf.sprintf "%s %s"
Uucp_name_data.name_toks.(p) Uucp_name_data.name_toks.(s)
let name_alias u = Uucp_cmap.get Uucp_name_data.name_alias_map (Uchar.to_int u)
---------------------------------------------------------------------------
Copyright ( c ) 2014
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2014 Daniel C. Bünzli
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
| null | https://raw.githubusercontent.com/avsm/platform/b254e3c6b60f3c0c09dfdcde92eb1abdc267fa1c/duniverse/uucp.12.0.0%2Bdune/src/uucp_name.ml | ocaml | ---------------------------------------------------------------------------
Copyright ( c ) 2014 . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
% % NAME%% % % ---------------------------------------------------------------------------
Copyright (c) 2014 Daniel C. Bünzli. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
%%NAME%% %%VERSION%%
---------------------------------------------------------------------------*)
include Uucp_name_base
let name u =
let u = Uchar.to_int u in
match Uucp_tmap4bytes.get_uint16_pair Uucp_name_data.name_map u with
| 0, 0 -> ""
| p, 0 -> Printf.sprintf "%s%04X" Uucp_name_data.name_toks.(p) u
| 0, s -> Uucp_name_data.name_toks.(s)
| p, s ->
Printf.sprintf "%s %s"
Uucp_name_data.name_toks.(p) Uucp_name_data.name_toks.(s)
let name_alias u = Uucp_cmap.get Uucp_name_data.name_alias_map (Uchar.to_int u)
---------------------------------------------------------------------------
Copyright ( c ) 2014
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2014 Daniel C. Bünzli
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
|
|
1a157e9ecbe89ebf7f4c792ed7e05508cd9facba0e398d81dcf2cfb58a1f0195 | tokenmill/beagle | annotation_merger.clj | (ns beagle.annotation-merger)
(defn related-annotations? [anno1 anno2]
(<= (:begin-offset anno1) (:begin-offset anno2) (:end-offset anno1)))
(defn parent-child-annotations? [parent-anno child-anno]
(and (>= (:begin-offset child-anno) (:begin-offset parent-anno))
(<= (:end-offset child-anno) (:end-offset parent-anno))))
(defn merge-annotations [annotations]
(let [sorted-annotation (sort-by :begin-offset annotations)]
(loop [parent-annotation (first sorted-annotation)
[child-annotation & remaining] (rest sorted-annotation)
result []]
(if child-annotation
(if (related-annotations? parent-annotation child-annotation)
(recur (if (and (parent-child-annotations? parent-annotation child-annotation)
(not (parent-child-annotations? child-annotation parent-annotation)))
parent-annotation
child-annotation)
remaining
result)
(recur child-annotation remaining (conj result parent-annotation)))
(conj result parent-annotation)))))
(defn merge-same-type-annotations [annotations]
(mapcat (fn [[_ anns]] (merge-annotations anns)) (group-by :type annotations)))
| null | https://raw.githubusercontent.com/tokenmill/beagle/863ffa63364e9ae7e7a22a3c7e258fd02ace1632/src/beagle/annotation_merger.clj | clojure | (ns beagle.annotation-merger)
(defn related-annotations? [anno1 anno2]
(<= (:begin-offset anno1) (:begin-offset anno2) (:end-offset anno1)))
(defn parent-child-annotations? [parent-anno child-anno]
(and (>= (:begin-offset child-anno) (:begin-offset parent-anno))
(<= (:end-offset child-anno) (:end-offset parent-anno))))
(defn merge-annotations [annotations]
(let [sorted-annotation (sort-by :begin-offset annotations)]
(loop [parent-annotation (first sorted-annotation)
[child-annotation & remaining] (rest sorted-annotation)
result []]
(if child-annotation
(if (related-annotations? parent-annotation child-annotation)
(recur (if (and (parent-child-annotations? parent-annotation child-annotation)
(not (parent-child-annotations? child-annotation parent-annotation)))
parent-annotation
child-annotation)
remaining
result)
(recur child-annotation remaining (conj result parent-annotation)))
(conj result parent-annotation)))))
(defn merge-same-type-annotations [annotations]
(mapcat (fn [[_ anns]] (merge-annotations anns)) (group-by :type annotations)))
|
|
fa529a93401195b0579bb26b685c9492eba06d1c560a0265fca647a7dc456d9e | status-im/extensions-fiddle | hooks.cljs | (ns react-native-web.hooks
(:require-macros [react-native-web.views :refer [defview letsubs]])
(:require [react-native-web.react :as react]
[status-im.colors :as colors]
[re-frame.core :as re-frame]
[pluto.core :as pluto]
[clojure.string :as string]))
(defn wallet-settings-hook [id {:keys [view]} props]
[react/view {:style {:flex 1}}
[view props]])
(defn profile-settings-hook [id {:keys [view]} props]
[react/view {:style {:flex 1}}
[view props]])
(defn message-container [preview outgoing]
[react/view
[react/view {:style {:margin-top 20
:flex-direction (if outgoing :row-reverse :row)
: width 230
: flex 1
:align-self (if outgoing :flex-end :flex-start)
:align-items (if outgoing :flex-end :flex-start)}}
[react/view {:style (merge
(if outgoing
{:margin-left 64}
{:margin-right 64})
{:flex-direction :column
: width 230
: flex 1
:padding-left 8
:padding-right 8
:align-items (if outgoing :flex-end :flex-start)})}
[react/view {:style {:flex-direction (if outgoing :row-reverse :row)}}
: flex 1
:padding-vertical 6
:padding-horizontal 12
:border-radius 8
:padding-top 12
:padding-bottom 10
;:flex-wrap :wrap
:background-color (if outgoing colors/blue colors/blue-light)}}
preview]]]]])
(def input-container
{:flex-direction :row
:align-items :flex-end
:padding-left 14})
(def send-message-container
{:background-color colors/blue
:width 30
:height 30
:border-radius 15
:margin 10
:align-items :center
:justify-content :center
:padding 4
:margin-left 8
:margin-bottom 11})
(def send-message-icon
{:height 22
:width 22})
(def input-root
{:padding-top 8
:padding-bottom 8
:flex 1})
(def input-animated
{:align-items :center
:flex-direction :row
:flex-grow 1
:min-height 36})
(defn rand-str [len]
(apply str (take len (repeatedly #(char (+ (rand 26) 65))))))
(defview chat-view [preview parameters command-id props on-send on-send-sync]
(letsubs [{:keys [messages params suggestion-id]} [:get :extension-props]]
[react/view {:style {:flex 1}}
[(react/scroll-view) {:style {:flex 1 :background-color :white}}
[react/view
(for [{:keys [plain-message] :as message} messages]
(if plain-message
[message-container [react/text {:style {:max-width 200 :color :white}} plain-message] true]
(let [m (fn [out?] (merge {:outgoing out?} message props))]
^{:key (str message (rand-str 10))}
[react/view
[message-container (when preview (preview (m false))) false]
[message-container (when preview (preview (m true))) true]])))]]
(when-let [suggestion (some #(when (= suggestion-id (:id %)) (:suggestions %)) parameters)]
[react/view {:style {:max-height 300}}
[suggestion]])
[react/view {:style input-container}
[react/view {:style input-root}
[react/view {:style input-animated}
[react/text {:style {:border-width 1 :border-color :red}} (str "/" (name command-id) " ")]
(for [{:keys [placeholder id]} parameters]
^{:key (str id placeholder)}
[react/text-input {:placeholder placeholder
:value (or (get params id) "")
:on-change-text #(re-frame/dispatch [:set-in [:extension-props :params id] %])
:on-focus #(re-frame/dispatch [:set-in [:extension-props :suggestion-id] id])
:style {:margin-right 5 :width 50}}])]]
[(react/touchable-highlight) {:on-press #(if on-send-sync
(do
(on-send-sync {:content {:params params}})
(re-frame/dispatch [:set-in [:extension-props :suggestion-id] nil])
(re-frame/dispatch [:set-in [:extension-props :params] nil]))
(do
(when on-send (on-send {:content {:params params}}))
(re-frame/dispatch [:set-in [:extension-props :suggestion-id] nil])
(re-frame/dispatch [:set-in [:extension-props :params] nil])
(re-frame/dispatch [:set-in [:extension-props :messages] (conj messages {:content {:params params}})])))}
[react/view {:style send-message-container}
[react/text {:style {:color :white}} ">"]
#_[icons/icon :main-icons/arrow-up {:container-style send-message-icon
:color :white}]]]]]))
(defn command-hook [id {:keys [parameters preview on-send on-send-sync]} props]
[chat-view preview parameters id props on-send on-send-sync])
(defn hook-in [id parsed {:keys [on-installation]} props]
(when id
(let [hook-id (last (string/split (name id) #"\."))
type (pluto/hook-type id)]
(when on-installation
(on-installation))
(case type
"chat.command" (command-hook hook-id parsed props)
"wallet.settings" (wallet-settings-hook hook-id parsed props)
"profile.settings" (profile-settings-hook hook-id parsed props)
[:div
(str "Unknown hook type " type)]))))
| null | https://raw.githubusercontent.com/status-im/extensions-fiddle/3f3544e90ff0ecdb1dfd051886b5a5f28e506b0b/src/react_native_web/hooks.cljs | clojure | :flex-wrap :wrap | (ns react-native-web.hooks
(:require-macros [react-native-web.views :refer [defview letsubs]])
(:require [react-native-web.react :as react]
[status-im.colors :as colors]
[re-frame.core :as re-frame]
[pluto.core :as pluto]
[clojure.string :as string]))
(defn wallet-settings-hook [id {:keys [view]} props]
[react/view {:style {:flex 1}}
[view props]])
(defn profile-settings-hook [id {:keys [view]} props]
[react/view {:style {:flex 1}}
[view props]])
(defn message-container [preview outgoing]
[react/view
[react/view {:style {:margin-top 20
:flex-direction (if outgoing :row-reverse :row)
: width 230
: flex 1
:align-self (if outgoing :flex-end :flex-start)
:align-items (if outgoing :flex-end :flex-start)}}
[react/view {:style (merge
(if outgoing
{:margin-left 64}
{:margin-right 64})
{:flex-direction :column
: width 230
: flex 1
:padding-left 8
:padding-right 8
:align-items (if outgoing :flex-end :flex-start)})}
[react/view {:style {:flex-direction (if outgoing :row-reverse :row)}}
: flex 1
:padding-vertical 6
:padding-horizontal 12
:border-radius 8
:padding-top 12
:padding-bottom 10
:background-color (if outgoing colors/blue colors/blue-light)}}
preview]]]]])
(def input-container
{:flex-direction :row
:align-items :flex-end
:padding-left 14})
(def send-message-container
{:background-color colors/blue
:width 30
:height 30
:border-radius 15
:margin 10
:align-items :center
:justify-content :center
:padding 4
:margin-left 8
:margin-bottom 11})
(def send-message-icon
{:height 22
:width 22})
(def input-root
{:padding-top 8
:padding-bottom 8
:flex 1})
(def input-animated
{:align-items :center
:flex-direction :row
:flex-grow 1
:min-height 36})
(defn rand-str [len]
(apply str (take len (repeatedly #(char (+ (rand 26) 65))))))
(defview chat-view [preview parameters command-id props on-send on-send-sync]
(letsubs [{:keys [messages params suggestion-id]} [:get :extension-props]]
[react/view {:style {:flex 1}}
[(react/scroll-view) {:style {:flex 1 :background-color :white}}
[react/view
(for [{:keys [plain-message] :as message} messages]
(if plain-message
[message-container [react/text {:style {:max-width 200 :color :white}} plain-message] true]
(let [m (fn [out?] (merge {:outgoing out?} message props))]
^{:key (str message (rand-str 10))}
[react/view
[message-container (when preview (preview (m false))) false]
[message-container (when preview (preview (m true))) true]])))]]
(when-let [suggestion (some #(when (= suggestion-id (:id %)) (:suggestions %)) parameters)]
[react/view {:style {:max-height 300}}
[suggestion]])
[react/view {:style input-container}
[react/view {:style input-root}
[react/view {:style input-animated}
[react/text {:style {:border-width 1 :border-color :red}} (str "/" (name command-id) " ")]
(for [{:keys [placeholder id]} parameters]
^{:key (str id placeholder)}
[react/text-input {:placeholder placeholder
:value (or (get params id) "")
:on-change-text #(re-frame/dispatch [:set-in [:extension-props :params id] %])
:on-focus #(re-frame/dispatch [:set-in [:extension-props :suggestion-id] id])
:style {:margin-right 5 :width 50}}])]]
[(react/touchable-highlight) {:on-press #(if on-send-sync
(do
(on-send-sync {:content {:params params}})
(re-frame/dispatch [:set-in [:extension-props :suggestion-id] nil])
(re-frame/dispatch [:set-in [:extension-props :params] nil]))
(do
(when on-send (on-send {:content {:params params}}))
(re-frame/dispatch [:set-in [:extension-props :suggestion-id] nil])
(re-frame/dispatch [:set-in [:extension-props :params] nil])
(re-frame/dispatch [:set-in [:extension-props :messages] (conj messages {:content {:params params}})])))}
[react/view {:style send-message-container}
[react/text {:style {:color :white}} ">"]
#_[icons/icon :main-icons/arrow-up {:container-style send-message-icon
:color :white}]]]]]))
(defn command-hook [id {:keys [parameters preview on-send on-send-sync]} props]
[chat-view preview parameters id props on-send on-send-sync])
(defn hook-in [id parsed {:keys [on-installation]} props]
(when id
(let [hook-id (last (string/split (name id) #"\."))
type (pluto/hook-type id)]
(when on-installation
(on-installation))
(case type
"chat.command" (command-hook hook-id parsed props)
"wallet.settings" (wallet-settings-hook hook-id parsed props)
"profile.settings" (profile-settings-hook hook-id parsed props)
[:div
(str "Unknown hook type " type)]))))
|
11053286b1ae63095d2e71754c0ee82b54293507a4675ed2ed8df531458778ed | 3b/3bil | util.lisp | (in-package :avm2-compiler)
;; pieces shared between both writers
;;; fixme: deal with package stuff, possibly reorganize stuff between asm/compiler...
(defun super-names (name)
(let ((c (when name (find-swf-class name))))
(when c
(cons (swf-name c) (super-names (extends c))))))
(defun push-lex-scope (mn-index)
`((:get-lex ,(if (integerp mn-index) `(:id ,mn-index)mn-index))
(:push-scope)))
(defun new-class+scopes (class)
;; fixme: allow class lookup instead of using class-id directly?
(let ((supers (reverse (super-names (extends class)))))
(unless (second (assoc (swf-name class) (class-names *compiler-context*)))
(break "name ~s = ~s names ~s" (swf-name class)
(assoc (swf-name class) (class-names *compiler-context*))
(class-names *compiler-context*)))
`((:get-scope-object 0)
,@(loop for i in supers
append (push-lex-scope i))
(:get-lex ,(swf-name (find-swf-class (extends class))))
(:new-class ,(second (assoc (swf-name class) (class-names *compiler-context*))))
,@(loop repeat (length supers)
collect `(:pop-scope))
(:init-property ,(swf-name class)))))
(defun intern-constant (x)
(etypecase x
((integer 0 #.(expt 2 32))
(cons (avm2-asm::avm2-intern-uint x) 4))
((integer #.(- (expt 2 31)) #.(expt 2 31))
(cons (avm2-asm::avm2-intern-int x) 3))
(number
(cons (avm2-asm::avm2-intern-double (float x 0d0)) 6))
(string
(cons (avm2-asm::avm2-string x) 1))
((eql t)
(cons 1 #x0b))
;;(false #x0a)
((eql nil)
(cons 1 #x0c))
#++(undef 0)
#++(ns 8)
#++(pks ns x16)
#++(pkg internal ns x17)
#++(prot ns x18)
#++(explicit ns x19)
#++(static prot ns x1a)
#++(private ns x05)))
(defun assemble-function (name data)
#+nil(format t "--assemble-function ~s :~%" name)
(destructuring-bind (n nid argtypes return-type flags asm
&key activation-slots class-name class-static
anonymous trait trait-type function-deps class-deps
optional-args literals circularity-fixups )
data
(declare (ignore function-deps class-deps))
;(format t "literals = ~s~%" (reverse literals))
;(format t "circ = ~s~%" circularity-fixups)
(loop for (value code) in (reverse literals)
do (coalesce-literal value code))
(loop for (value code) in circularity-fixups
do (add-circularity-fixup value code))
;;(format t "--assemble-function ~s : ~s : ~s~%" name n nid)
(let* ((traits (loop for (name index type) in activation-slots
;;do (format t "trait = ~s ~s ~s ~%" name index type)
collect (make-instance
'avm2-asm::trait-info
'avm2-asm::name (avm2-asm::asm-intern-multiname name)
'avm2-asm::trait-data
(make-instance
'avm2-asm::trait-data-slot/const
'avm2-asm::kind 0
'avm2-asm::slot-id index
'avm2-asm::type-name type
'avm2-asm::vindex 0 ;; no value
'avm2-asm::vkind 0 ;; no value
))))
(rest-p (or (logbitp 0 flags)
(logbitp 2 flags)))
(mid (avm2-asm::avm2-method name nid argtypes return-type flags
:option-params
(mapcar 'intern-constant optional-args)
:body (avm2-asm::assemble-method-body
asm
:traits traits
:arg-count (+ 1
(if rest-p 1 0)
(length argtypes))))))
(when trait
(setf n (if (symbolp trait)
(avm2-asm::symbol-to-qname-list trait)
`(:qname "" ,trait))))
(if class-name
;; member function
(let ((class (find-swf-class class-name))
(override-p nil))
(assert class) ;; fixme: handle this better
(when class
(loop for super = (extends class) then (extends sc)
for sc = (find-swf-class super)
while (and super sc)
do (format t "check for inherited method ~s in class ~s, super=~s~% ~s~%" n class-name super (functions sc))
when (member n (functions sc) :test 'equal :key 'car)
do (setf override-p t)
(loop-finish))
(when override-p (format t "===> got override~%" )))
;(find-swf-class 'flash:object)
(macrolet ((add (n mid alist &optional flags)
`(progn
(let ((c (assoc ,n ,alist :test 'equal)))
(if c (rplacd c (list ,mid))
(push (list ,n ,mid ,@(when flags
(list flags)))
,alist))))))
(cond
((getf (flags class) :methods-as-properties)
for some stuff like setf , we want functions as
;; properties instead of actual methods, so we can use
;; the class as a namespace without it showing up in the
;; scope for functions in that namespace
;; so add an anonymous function to be put into the slot later
#++(format t "assemble fun ~s / ~s for namespace ~s~% " name n class-name)
#++(format t "=~s~%" (list (list :qname "|setf|" (format nil "~s" trait)))
n mid)
(push (list (list :qname "|setf|" (format nil "~s" trait)) mid)
(function-names *compiler-context*))
(add n (list :qname "|setf|" (format nil "~s" trait)) (class-functions class)))
(class-static
(add n mid (class-functions class)))
(t (add n mid (functions class)
(list :override override-p))))))
;; normal function
(cond
;; fixme: should these use trait instead of n ?
((and (not anonymous) trait (eq trait-type :function))
(push (list n mid) (function-names *compiler-context*)))
((and trait (eq trait-type :slot))
(push (list n 0) (script-slots *compiler-context*)))
(t
#++(format t "no trait for function ~s =~%" name)))))))
;++
(defun assemble-class (name ns super properties constructor instance-functions class-properties class-functions flags implements)
(let* ((constructor-mid
(cond
#++((consp constructor)
(avm2-asm::avm2-method
nil 0 ;; id name
(loop for i in (first constructor)
collect 0) ;; constructor arg types
0 0
:body
(avm2-asm::assemble-method-body
(%compile-defun name (first constructor)
(second constructor) t
(or (third constructor) t)))))
((numberp constructor) constructor)
(t (avm2-asm::intern-method-id constructor))))
;; fixme: probably should make this configurable at some point
(class-init (avm2-asm::avm2-method nil 0 nil 0 0 ;; meta-class init
:body
(avm2-asm::assemble-method-body
`((:get-local-0)
(:push-scope)
,@ (when (getf flags :methods-as-properties)
(loop for (sn an) in class-functions
append `((:get-local-0)
(:get-lex ,an)
(:set-property ,sn))))
(:return-void))
:init-scope 0)))
(junk (avm2-asm::avm2-ns-intern ns))
(class (avm2-asm::avm2-class
(avm2-asm::asm-intern-multiname name)
(avm2-asm::asm-intern-multiname
(or (swf-name (find-swf-class super))
super))
flags 1 = sealed,2 = final,4 = interface , 8 = protectedns ?
flags ;; (:sealed :final :interface :protected-namespace)
(loop for i in implements
collect (avm2-asm::asm-intern-multiname
(or (swf-name (find-swf-class i))
i)))
constructor-mid
(append
(loop for i in properties
collect
(make-instance
'avm2-asm::trait-info
'avm2-asm::name (avm2-asm::asm-intern-multiname i)
'avm2-asm::trait-data
(make-instance 'avm2-asm::trait-data-slot/const
'avm2-asm::kind 0
'avm2-asm::slot-id 0 ;; auto-assign
'avm2-asm::type-name 0 ;; */t
'avm2-asm::vindex 0 ;; no value
'avm2-asm::vkind 0 ;; no value
)))
(loop for (name index fflags) in instance-functions
collect
(make-instance
'avm2-asm::trait-info
'avm2-asm::name (avm2-asm::asm-intern-multiname name)
'avm2-asm::trait-data
(make-instance 'avm2-asm::trait-data-method/get/set
'avm2-asm::slot-id 0 ;; none
'avm2-asm::method index
'avm2-asm::flags fflags))))
class-init
:protected-ns junk
:class-traits
(append
(loop for i in class-properties
collect
(make-instance
'avm2-asm::trait-info
'avm2-asm::name (avm2-asm::asm-intern-multiname i)
'avm2-asm::trait-data
(make-instance 'avm2-asm::trait-data-slot/const
'avm2-asm::kind 0
'avm2-asm::slot-id 0 ;; auto-assign
'avm2-asm::type-name 0 ;; */t
'avm2-asm::vindex 0 ;; no value
'avm2-asm::vkind 0 ;; no value
)))
(loop for (name index fflags) in class-functions
collect
(make-instance
'avm2-asm::trait-info
'avm2-asm::name (avm2-asm::asm-intern-multiname name)
'avm2-asm::trait-data
(if (getf flags :methods-as-properties)
(make-instance 'avm2-asm::trait-data-slot/const
'avm2-asm::kind 0
'avm2-asm::slot-id 0 ;; auto-assign
'avm2-asm::type-name 0 ;; */t
'avm2-asm::vindex 0 ;; no value
'avm2-asm::vkind 0) ;; no value
(make-instance 'avm2-asm::trait-data-method/get/set
'avm2-asm::slot-id 0 ;; none
'avm2-asm::method index
'avm2-asm::flags fflags)))))
;; todo: class traits
;; :class-traits nil
)))
(format t "add ~s to compiler context~%" (list name class))
(push (list name class) (class-names *compiler-context*))))
| null | https://raw.githubusercontent.com/3b/3bil/c852181848bedf476373e901869ca29471f926ee/file/util.lisp | lisp | pieces shared between both writers
fixme: deal with package stuff, possibly reorganize stuff between asm/compiler...
fixme: allow class lookup instead of using class-id directly?
(false #x0a)
(format t "literals = ~s~%" (reverse literals))
(format t "circ = ~s~%" circularity-fixups)
(format t "--assemble-function ~s : ~s : ~s~%" name n nid)
do (format t "trait = ~s ~s ~s ~%" name index type)
no value
no value
member function
fixme: handle this better
(find-swf-class 'flash:object)
properties instead of actual methods, so we can use
the class as a namespace without it showing up in the
scope for functions in that namespace
so add an anonymous function to be put into the slot later
normal function
fixme: should these use trait instead of n ?
++
id name
constructor arg types
fixme: probably should make this configurable at some point
meta-class init
(:sealed :final :interface :protected-namespace)
auto-assign
*/t
no value
no value
none
auto-assign
*/t
no value
no value
auto-assign
*/t
no value
no value
none
todo: class traits
:class-traits nil | (in-package :avm2-compiler)
(defun super-names (name)
(let ((c (when name (find-swf-class name))))
(when c
(cons (swf-name c) (super-names (extends c))))))
(defun push-lex-scope (mn-index)
`((:get-lex ,(if (integerp mn-index) `(:id ,mn-index)mn-index))
(:push-scope)))
(defun new-class+scopes (class)
(let ((supers (reverse (super-names (extends class)))))
(unless (second (assoc (swf-name class) (class-names *compiler-context*)))
(break "name ~s = ~s names ~s" (swf-name class)
(assoc (swf-name class) (class-names *compiler-context*))
(class-names *compiler-context*)))
`((:get-scope-object 0)
,@(loop for i in supers
append (push-lex-scope i))
(:get-lex ,(swf-name (find-swf-class (extends class))))
(:new-class ,(second (assoc (swf-name class) (class-names *compiler-context*))))
,@(loop repeat (length supers)
collect `(:pop-scope))
(:init-property ,(swf-name class)))))
(defun intern-constant (x)
(etypecase x
((integer 0 #.(expt 2 32))
(cons (avm2-asm::avm2-intern-uint x) 4))
((integer #.(- (expt 2 31)) #.(expt 2 31))
(cons (avm2-asm::avm2-intern-int x) 3))
(number
(cons (avm2-asm::avm2-intern-double (float x 0d0)) 6))
(string
(cons (avm2-asm::avm2-string x) 1))
((eql t)
(cons 1 #x0b))
((eql nil)
(cons 1 #x0c))
#++(undef 0)
#++(ns 8)
#++(pks ns x16)
#++(pkg internal ns x17)
#++(prot ns x18)
#++(explicit ns x19)
#++(static prot ns x1a)
#++(private ns x05)))
(defun assemble-function (name data)
#+nil(format t "--assemble-function ~s :~%" name)
(destructuring-bind (n nid argtypes return-type flags asm
&key activation-slots class-name class-static
anonymous trait trait-type function-deps class-deps
optional-args literals circularity-fixups )
data
(declare (ignore function-deps class-deps))
(loop for (value code) in (reverse literals)
do (coalesce-literal value code))
(loop for (value code) in circularity-fixups
do (add-circularity-fixup value code))
(let* ((traits (loop for (name index type) in activation-slots
collect (make-instance
'avm2-asm::trait-info
'avm2-asm::name (avm2-asm::asm-intern-multiname name)
'avm2-asm::trait-data
(make-instance
'avm2-asm::trait-data-slot/const
'avm2-asm::kind 0
'avm2-asm::slot-id index
'avm2-asm::type-name type
))))
(rest-p (or (logbitp 0 flags)
(logbitp 2 flags)))
(mid (avm2-asm::avm2-method name nid argtypes return-type flags
:option-params
(mapcar 'intern-constant optional-args)
:body (avm2-asm::assemble-method-body
asm
:traits traits
:arg-count (+ 1
(if rest-p 1 0)
(length argtypes))))))
(when trait
(setf n (if (symbolp trait)
(avm2-asm::symbol-to-qname-list trait)
`(:qname "" ,trait))))
(if class-name
(let ((class (find-swf-class class-name))
(override-p nil))
(when class
(loop for super = (extends class) then (extends sc)
for sc = (find-swf-class super)
while (and super sc)
do (format t "check for inherited method ~s in class ~s, super=~s~% ~s~%" n class-name super (functions sc))
when (member n (functions sc) :test 'equal :key 'car)
do (setf override-p t)
(loop-finish))
(when override-p (format t "===> got override~%" )))
(macrolet ((add (n mid alist &optional flags)
`(progn
(let ((c (assoc ,n ,alist :test 'equal)))
(if c (rplacd c (list ,mid))
(push (list ,n ,mid ,@(when flags
(list flags)))
,alist))))))
(cond
((getf (flags class) :methods-as-properties)
for some stuff like setf , we want functions as
#++(format t "assemble fun ~s / ~s for namespace ~s~% " name n class-name)
#++(format t "=~s~%" (list (list :qname "|setf|" (format nil "~s" trait)))
n mid)
(push (list (list :qname "|setf|" (format nil "~s" trait)) mid)
(function-names *compiler-context*))
(add n (list :qname "|setf|" (format nil "~s" trait)) (class-functions class)))
(class-static
(add n mid (class-functions class)))
(t (add n mid (functions class)
(list :override override-p))))))
(cond
((and (not anonymous) trait (eq trait-type :function))
(push (list n mid) (function-names *compiler-context*)))
((and trait (eq trait-type :slot))
(push (list n 0) (script-slots *compiler-context*)))
(t
#++(format t "no trait for function ~s =~%" name)))))))
(defun assemble-class (name ns super properties constructor instance-functions class-properties class-functions flags implements)
(let* ((constructor-mid
(cond
#++((consp constructor)
(avm2-asm::avm2-method
(loop for i in (first constructor)
0 0
:body
(avm2-asm::assemble-method-body
(%compile-defun name (first constructor)
(second constructor) t
(or (third constructor) t)))))
((numberp constructor) constructor)
(t (avm2-asm::intern-method-id constructor))))
:body
(avm2-asm::assemble-method-body
`((:get-local-0)
(:push-scope)
,@ (when (getf flags :methods-as-properties)
(loop for (sn an) in class-functions
append `((:get-local-0)
(:get-lex ,an)
(:set-property ,sn))))
(:return-void))
:init-scope 0)))
(junk (avm2-asm::avm2-ns-intern ns))
(class (avm2-asm::avm2-class
(avm2-asm::asm-intern-multiname name)
(avm2-asm::asm-intern-multiname
(or (swf-name (find-swf-class super))
super))
flags 1 = sealed,2 = final,4 = interface , 8 = protectedns ?
(loop for i in implements
collect (avm2-asm::asm-intern-multiname
(or (swf-name (find-swf-class i))
i)))
constructor-mid
(append
(loop for i in properties
collect
(make-instance
'avm2-asm::trait-info
'avm2-asm::name (avm2-asm::asm-intern-multiname i)
'avm2-asm::trait-data
(make-instance 'avm2-asm::trait-data-slot/const
'avm2-asm::kind 0
)))
(loop for (name index fflags) in instance-functions
collect
(make-instance
'avm2-asm::trait-info
'avm2-asm::name (avm2-asm::asm-intern-multiname name)
'avm2-asm::trait-data
(make-instance 'avm2-asm::trait-data-method/get/set
'avm2-asm::method index
'avm2-asm::flags fflags))))
class-init
:protected-ns junk
:class-traits
(append
(loop for i in class-properties
collect
(make-instance
'avm2-asm::trait-info
'avm2-asm::name (avm2-asm::asm-intern-multiname i)
'avm2-asm::trait-data
(make-instance 'avm2-asm::trait-data-slot/const
'avm2-asm::kind 0
)))
(loop for (name index fflags) in class-functions
collect
(make-instance
'avm2-asm::trait-info
'avm2-asm::name (avm2-asm::asm-intern-multiname name)
'avm2-asm::trait-data
(if (getf flags :methods-as-properties)
(make-instance 'avm2-asm::trait-data-slot/const
'avm2-asm::kind 0
(make-instance 'avm2-asm::trait-data-method/get/set
'avm2-asm::method index
'avm2-asm::flags fflags)))))
)))
(format t "add ~s to compiler context~%" (list name class))
(push (list name class) (class-names *compiler-context*))))
|
98a804a0e20cfadeb191168b935074f9c1ad50284313558bc1c6af432db3ccf2 | mjambon/mikmatch | charset.ml | module C = Set.Make (Char)
type t = C.t
let empty = C.empty
let add = C.add
let singleton = C.singleton
let union = C.union
let diff = C.diff
let add_range first last set =
let r = ref set in
for i = Char.code first to Char.code last do
r := add (Char.chr i) !r
done;
!r
let range c1 c2 = add_range c1 c2 empty
let irange i j = range (Char.chr i) (Char.chr j)
let full = range '\000' '\255'
let full_for_C = C.remove '\000' full
let of_string s =
let accu = ref C.empty in
String.iter (fun c -> accu := C.add c !accu) s;
!accu
let complement set = C.diff full set
let list = C.elements
let nocase set =
C.fold
(fun c set ->
let c1 = Char.lowercase_ascii c
and c2 = Char.uppercase_ascii c in
let set1 = C.add c1 set in
if c1 <> c2 then C.add c2 set1
else set1)
set
C.empty
module Posix =
struct
let lower = range 'a' 'z'
let upper = range 'A' 'Z'
let ascii = range '\x00' '\x7F'
let alpha = union lower upper
let digit = range '0' '9'
let alnum = union alpha digit
let punct = of_string "!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~"
let graph = union alnum punct
let print = union (singleton ' ') graph
let blank = of_string " \t"
let cntrl = union (range '\x00' '\x1F') (singleton '\x7F')
let xdigit = of_string "0123456789abcdefABCDEF"
let space = of_string " \t\n\x0B\x0C\r"
let all = [ "lower", lower;
"upper", upper;
"ascii", ascii;
"alpha", alpha;
"digit", digit;
"alnum", alnum;
"punct", punct;
"graph", graph;
"print", print;
"blank", blank;
"cntrl", cntrl;
"xdigit", xdigit;
"space", space; ]
end
| null | https://raw.githubusercontent.com/mjambon/mikmatch/4ee0d1158370be247763027018bbf54b865014dd/common/charset.ml | ocaml | module C = Set.Make (Char)
type t = C.t
let empty = C.empty
let add = C.add
let singleton = C.singleton
let union = C.union
let diff = C.diff
let add_range first last set =
let r = ref set in
for i = Char.code first to Char.code last do
r := add (Char.chr i) !r
done;
!r
let range c1 c2 = add_range c1 c2 empty
let irange i j = range (Char.chr i) (Char.chr j)
let full = range '\000' '\255'
let full_for_C = C.remove '\000' full
let of_string s =
let accu = ref C.empty in
String.iter (fun c -> accu := C.add c !accu) s;
!accu
let complement set = C.diff full set
let list = C.elements
let nocase set =
C.fold
(fun c set ->
let c1 = Char.lowercase_ascii c
and c2 = Char.uppercase_ascii c in
let set1 = C.add c1 set in
if c1 <> c2 then C.add c2 set1
else set1)
set
C.empty
module Posix =
struct
let lower = range 'a' 'z'
let upper = range 'A' 'Z'
let ascii = range '\x00' '\x7F'
let alpha = union lower upper
let digit = range '0' '9'
let alnum = union alpha digit
let punct = of_string "!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~"
let graph = union alnum punct
let print = union (singleton ' ') graph
let blank = of_string " \t"
let cntrl = union (range '\x00' '\x1F') (singleton '\x7F')
let xdigit = of_string "0123456789abcdefABCDEF"
let space = of_string " \t\n\x0B\x0C\r"
let all = [ "lower", lower;
"upper", upper;
"ascii", ascii;
"alpha", alpha;
"digit", digit;
"alnum", alnum;
"punct", punct;
"graph", graph;
"print", print;
"blank", blank;
"cntrl", cntrl;
"xdigit", xdigit;
"space", space; ]
end
|
|
12307d19414f5db7bfe6dc19ea83a8ffc897c32031dd369476364ca812b46e0c | lambdacube3d/lambdacube-edsl | LCDSLType.hs | module LCDSLType where
import Data.Int
import Data.Word
import Data.Typeable
import LCType
import LCAPIType
-- IsScalar means here that the related type is not a tuple, but a GPU primitive type
class GPU a => IsScalar a where
toValue :: a -> Value
toType :: a -> InputType
instance (Typeable dim, Typeable sh, Typeable t, Typeable ar) => IsScalar (Sampler dim sh t ar) where
TODO
TODO
instance IsScalar Int32 where
toValue v = VInt v
toType _ = ITInt
instance IsScalar Word32 where
toValue v = VWord v
toType _ = ITWord
instance IsScalar Float where
toValue v = VFloat v
toType _ = ITFloat
instance IsScalar Bool where
toValue v = VBool v
toType _ = ITBool
instance IsScalar M22F where
toValue v = VM22F v
toType _ = ITM22F
instance IsScalar M23F where
toValue v = VM23F v
toType _ = ITM23F
instance IsScalar M24F where
toValue v = VM24F v
toType _ = ITM24F
instance IsScalar M32F where
toValue v = VM32F v
toType _ = ITM32F
instance IsScalar M33F where
toValue v = VM33F v
toType _ = ITM33F
instance IsScalar M34F where
toValue v = VM34F v
toType _ = ITM34F
instance IsScalar M42F where
toValue v = VM42F v
toType _ = ITM42F
instance IsScalar M43F where
toValue v = VM43F v
toType _ = ITM43F
instance IsScalar M44F where
toValue v = VM44F v
toType _ = ITM44F
instance IsScalar V2F where
toValue v = VV2F v
toType _ = ITV2F
instance IsScalar V3F where
toValue v = VV3F v
toType _ = ITV3F
instance IsScalar V4F where
toValue v = VV4F v
toType _ = ITV4F
instance IsScalar V2I where
toValue v = VV2I v
toType _ = ITV2I
instance IsScalar V3I where
toValue v = VV3I v
toType _ = ITV3I
instance IsScalar V4I where
toValue v = VV4I v
toType _ = ITV4I
instance IsScalar V2U where
toValue v = VV2U v
toType _ = ITV2U
instance IsScalar V3U where
toValue v = VV3U v
toType _ = ITV3U
instance IsScalar V4U where
toValue v = VV4U v
toType _ = ITV4U
instance IsScalar V2B where
toValue v = VV2B v
toType _ = ITV2B
instance IsScalar V3B where
toValue v = VV3B v
toType _ = ITV3B
instance IsScalar V4B where
toValue v = VV4B v
toType _ = ITV4B
GPU type value reification , needed for shader
data Value
= VBool !Bool
| VV2B !V2B
| VV3B !V3B
| VV4B !V4B
| VWord !Word32
| VV2U !V2U
| VV3U !V3U
| VV4U !V4U
| VInt !Int32
| VV2I !V2I
| VV3I !V3I
| VV4I !V4I
| VFloat !Float
| VV2F !V2F
| VV3F !V3F
| VV4F !V4F
| VM22F !M22F
| VM23F !M23F
| VM24F !M24F
| VM32F !M32F
| VM33F !M33F
| VM34F !M34F
| VM42F !M42F
| VM43F !M43F
| VM44F !M44F
deriving (Show,Eq,Ord)
singletonScalarType :: IsScalar a => a -> TupleType ((), a)
singletonScalarType a = PairTuple UnitTuple (SingleTuple a)
GPU type restriction , the functions are used in shader
class (Show a, Typeable a, Typeable (EltRepr a), Typeable (EltRepr' a)) => GPU a where
tupleType :: a -> TupleType (EltRepr a)
tupleType' :: a -> TupleType (EltRepr' a)
instance (Typeable dim, Typeable sh, Typeable t, Typeable ar) => GPU (Sampler dim sh t ar) where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU () where
tupleType _ = UnitTuple
tupleType' _ = UnitTuple
instance GPU Bool where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU Float where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU Int32 where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU Word32 where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU V2B where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU V2F where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU V2I where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU V2U where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU V3B where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU V3F where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU V3I where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU V3U where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU V4B where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU V4F where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU V4I where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU V4U where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU M22F where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU M23F where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU M24F where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU M32F where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU M33F where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU M34F where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU M42F where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU M43F where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU M44F where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance (GPU a, GPU b) => GPU (a, b) where
tupleType (_::(a, b))
= PairTuple (tupleType (undefined :: a)) (tupleType' (undefined :: b))
tupleType' (_::(a, b))
= PairTuple (tupleType (undefined :: a)) (tupleType' (undefined :: b))
instance (GPU a, GPU b, GPU c) => GPU (a, b, c) where
tupleType (_::(a, b, c))
= PairTuple (tupleType (undefined :: (a, b))) (tupleType' (undefined :: c))
tupleType' (_::(a, b, c))
= PairTuple (tupleType (undefined :: (a, b))) (tupleType' (undefined :: c))
instance (GPU a, GPU b, GPU c, GPU d) => GPU (a, b, c, d) where
tupleType (_::(a, b, c, d))
= PairTuple (tupleType (undefined :: (a, b, c))) (tupleType' (undefined :: d))
tupleType' (_::(a, b, c, d))
= PairTuple (tupleType (undefined :: (a, b, c))) (tupleType' (undefined :: d))
instance (GPU a, GPU b, GPU c, GPU d, GPU e) => GPU (a, b, c, d, e) where
tupleType (_::(a, b, c, d, e))
= PairTuple (tupleType (undefined :: (a, b, c, d)))
(tupleType' (undefined :: e))
tupleType' (_::(a, b, c, d, e))
= PairTuple (tupleType (undefined :: (a, b, c, d)))
(tupleType' (undefined :: e))
instance (GPU a, GPU b, GPU c, GPU d, GPU e, GPU f) => GPU (a, b, c, d, e, f) where
tupleType (_::(a, b, c, d, e, f))
= PairTuple (tupleType (undefined :: (a, b, c, d, e)))
(tupleType' (undefined :: f))
tupleType' (_::(a, b, c, d, e, f))
= PairTuple (tupleType (undefined :: (a, b, c, d, e)))
(tupleType' (undefined :: f))
instance (GPU a, GPU b, GPU c, GPU d, GPU e, GPU f, GPU g) => GPU (a, b, c, d, e, f, g) where
tupleType (_::(a, b, c, d, e, f, g))
= PairTuple (tupleType (undefined :: (a, b, c, d, e, f)))
(tupleType' (undefined :: g))
tupleType' (_::(a, b, c, d, e, f, g))
= PairTuple (tupleType (undefined :: (a, b, c, d, e, f)))
(tupleType' (undefined :: g))
instance (GPU a, GPU b, GPU c, GPU d, GPU e, GPU f, GPU g, GPU h) => GPU (a, b, c, d, e, f, g, h) where
tupleType (_::(a, b, c, d, e, f, g, h))
= PairTuple (tupleType (undefined :: (a, b, c, d, e, f, g)))
(tupleType' (undefined :: h))
tupleType' (_::(a, b, c, d, e, f, g, h))
= PairTuple (tupleType (undefined :: (a, b, c, d, e, f, g)))
(tupleType' (undefined :: h))
instance (GPU a, GPU b, GPU c, GPU d, GPU e, GPU f, GPU g, GPU h, GPU i) => GPU (a, b, c, d, e, f, g, h, i) where
tupleType (_::(a, b, c, d, e, f, g, h, i))
= PairTuple (tupleType (undefined :: (a, b, c, d, e, f, g, h)))
(tupleType' (undefined :: i))
tupleType' (_::(a, b, c, d, e, f, g, h, i))
= PairTuple (tupleType (undefined :: (a, b, c, d, e, f, g, h)))
(tupleType' (undefined :: i))
-- stream type restriction, these types can be used in vertex shader input
class GPU a => SGPU a
instance SGPU Int32
instance SGPU Word32
instance SGPU Float
instance SGPU M22F
instance SGPU M23F
instance SGPU M24F
instance SGPU M32F
instance SGPU M33F
instance SGPU M34F
instance SGPU M42F
instance SGPU M43F
instance SGPU M44F
instance SGPU V2F
instance SGPU V3F
instance SGPU V4F
instance SGPU V2I
instance SGPU V3I
instance SGPU V4I
instance SGPU V2U
instance SGPU V3U
instance SGPU V4U
instance (SGPU a, SGPU b) => SGPU (a, b)
instance (SGPU a, SGPU b, SGPU c) => SGPU (a, b, c)
instance (SGPU a, SGPU b, SGPU c, SGPU d) => SGPU (a, b, c, d)
instance (SGPU a, SGPU b, SGPU c, SGPU d, SGPU e) => SGPU (a, b, c, d, e)
instance (SGPU a, SGPU b, SGPU c, SGPU d, SGPU e, SGPU f) => SGPU (a, b, c, d, e, f)
instance (SGPU a, SGPU b, SGPU c, SGPU d, SGPU e, SGPU f, SGPU g) => SGPU (a, b, c, d, e, f, g)
instance (SGPU a, SGPU b, SGPU c, SGPU d, SGPU e, SGPU f, SGPU g, SGPU h) => SGPU (a, b, c, d, e, f, g, h)
instance (SGPU a, SGPU b, SGPU c, SGPU d, SGPU e, SGPU f, SGPU g, SGPU h, SGPU i) => SGPU (a, b, c, d, e, f, g, h, i)
-- uniform type restriction
hint : EltRepr stands for Elementary Type Representation
type family EltRepr a :: *
type instance EltRepr (Sampler dim sh t ar) = ((), Sampler dim sh t ar)
type instance EltRepr () = ()
type instance EltRepr Int32 = ((), Int32)
type instance EltRepr Word32 = ((), Word32)
type instance EltRepr Float = ((), Float)
type instance EltRepr Bool = ((), Bool)
type instance EltRepr V2F = ((), V2F)
type instance EltRepr V2I = ((), V2I)
type instance EltRepr V2U = ((), V2U)
type instance EltRepr V2B = ((), V2B)
type instance EltRepr M22F = ((), M22F)
type instance EltRepr M23F = ((), M23F)
type instance EltRepr M24F = ((), M24F)
type instance EltRepr V3F = ((), V3F)
type instance EltRepr V3I = ((), V3I)
type instance EltRepr V3U = ((), V3U)
type instance EltRepr V3B = ((), V3B)
type instance EltRepr M32F = ((), M32F)
type instance EltRepr M33F = ((), M33F)
type instance EltRepr M34F = ((), M34F)
type instance EltRepr V4F = ((), V4F)
type instance EltRepr V4I = ((), V4I)
type instance EltRepr V4U = ((), V4U)
type instance EltRepr V4B = ((), V4B)
type instance EltRepr M42F = ((), M42F)
type instance EltRepr M43F = ((), M43F)
type instance EltRepr M44F = ((), M44F)
type instance EltRepr (a, b) = (EltRepr a, EltRepr' b)
type instance EltRepr (a, b, c) = (EltRepr (a, b), EltRepr' c)
type instance EltRepr (a, b, c, d) = (EltRepr (a, b, c), EltRepr' d)
type instance EltRepr (a, b, c, d, e) = (EltRepr (a, b, c, d), EltRepr' e)
type instance EltRepr (a, b, c, d, e, f) = (EltRepr (a, b, c, d, e), EltRepr' f)
type instance EltRepr (a, b, c, d, e, f, g) = (EltRepr (a, b, c, d, e, f), EltRepr' g)
type instance EltRepr (a, b, c, d, e, f, g, h) = (EltRepr (a, b, c, d, e, f, g), EltRepr' h)
type instance EltRepr (a, b, c, d, e, f, g, h, i) = (EltRepr (a, b, c, d, e, f, g, h), EltRepr' i)
type family EltRepr' a :: *
type instance EltRepr' (Sampler dim sh t ar) = Sampler dim sh t ar
type instance EltRepr' () = ()
type instance EltRepr' Int32 = Int32
type instance EltRepr' Word32 = Word32
type instance EltRepr' Float = Float
type instance EltRepr' Bool = Bool
type instance EltRepr' V2F = V2F
type instance EltRepr' V2I = V2I
type instance EltRepr' V2U = V2U
type instance EltRepr' V2B = V2B
type instance EltRepr' M22F = M22F
type instance EltRepr' M23F = M23F
type instance EltRepr' M24F = M24F
type instance EltRepr' V3F = V3F
type instance EltRepr' V3I = V3I
type instance EltRepr' V3U = V3U
type instance EltRepr' V3B = V3B
type instance EltRepr' M32F = M32F
type instance EltRepr' M33F = M33F
type instance EltRepr' M34F = M34F
type instance EltRepr' V4F = V4F
type instance EltRepr' V4I = V4I
type instance EltRepr' V4U = V4U
type instance EltRepr' V4B = V4B
type instance EltRepr' M42F = M42F
type instance EltRepr' M43F = M43F
type instance EltRepr' M44F = M44F
type instance EltRepr' (a, b) = (EltRepr a, EltRepr' b)
type instance EltRepr' (a, b, c) = (EltRepr (a, b), EltRepr' c)
type instance EltRepr' (a, b, c, d) = (EltRepr (a, b, c), EltRepr' d)
type instance EltRepr' (a, b, c, d, e) = (EltRepr (a, b, c, d), EltRepr' e)
type instance EltRepr' (a, b, c, d, e, f) = (EltRepr (a, b, c, d, e), EltRepr' f)
type instance EltRepr' (a, b, c, d, e, f, g) = (EltRepr (a, b, c, d, e, f), EltRepr' g)
type instance EltRepr' (a, b, c, d, e, f, g, h) = (EltRepr (a, b, c, d, e, f, g), EltRepr' h)
type instance EltRepr' (a, b, c, d, e, f, g, h, i) = (EltRepr (a, b, c, d, e, f, g, h), EltRepr' i)
-- |Conversion between surface n-tuples and our tuple representation.
--
-- our language uses nested tuple representation
class IsTuple tup where
type TupleRepr tup
fromTuple :: tup -> TupleRepr tup
toTuple :: TupleRepr tup -> tup
instance IsTuple () where
type TupleRepr () = ()
fromTuple = id
toTuple = id
instance IsTuple (a, b) where
type TupleRepr (a, b) = (((), a), b)
fromTuple (x, y) = (((), x), y)
toTuple (((), x), y) = (x, y)
instance IsTuple (a, b, c) where
type TupleRepr (a, b, c) = (TupleRepr (a, b), c)
fromTuple (x, y, z) = ((((), x), y), z)
toTuple ((((), x), y), z) = (x, y, z)
instance IsTuple (a, b, c, d) where
type TupleRepr (a, b, c, d) = (TupleRepr (a, b, c), d)
fromTuple (x, y, z, v) = (((((), x), y), z), v)
toTuple (((((), x), y), z), v) = (x, y, z, v)
instance IsTuple (a, b, c, d, e) where
type TupleRepr (a, b, c, d, e) = (TupleRepr (a, b, c, d), e)
fromTuple (x, y, z, v, w) = ((((((), x), y), z), v), w)
toTuple ((((((), x), y), z), v), w) = (x, y, z, v, w)
instance IsTuple (a, b, c, d, e, f) where
type TupleRepr (a, b, c, d, e, f) = (TupleRepr (a, b, c, d, e), f)
fromTuple (x, y, z, v, w, r) = (((((((), x), y), z), v), w), r)
toTuple (((((((), x), y), z), v), w), r) = (x, y, z, v, w, r)
instance IsTuple (a, b, c, d, e, f, g) where
type TupleRepr (a, b, c, d, e, f, g) = (TupleRepr (a, b, c, d, e, f), g)
fromTuple (x, y, z, v, w, r, s) = ((((((((), x), y), z), v), w), r), s)
toTuple ((((((((), x), y), z), v), w), r), s) = (x, y, z, v, w, r, s)
instance IsTuple (a, b, c, d, e, f, g, h) where
type TupleRepr (a, b, c, d, e, f, g, h) = (TupleRepr (a, b, c, d, e, f, g), h)
fromTuple (x, y, z, v, w, r, s, t) = (((((((((), x), y), z), v), w), r), s), t)
toTuple (((((((((), x), y), z), v), w), r), s), t) = (x, y, z, v, w, r, s, t)
instance IsTuple (a, b, c, d, e, f, g, h, i) where
type TupleRepr (a, b, c, d, e, f, g, h, i) = (TupleRepr (a, b, c, d, e, f, g, h), i)
fromTuple (x, y, z, v, w, r, s, t, u) = ((((((((((), x), y), z), v), w), r), s), t), u)
toTuple ((((((((((), x), y), z), v), w), r), s), t), u) = (x, y, z, v, w, r, s, t, u)
-- Tuple representation
-- --------------------
-- |We represent tuples as heterogenous lists, typed by a type list.
--
data Tuple c t where
NilTup :: Tuple c ()
SnocTup :: GPU t => Tuple c s -> c t -> Tuple c (s, t)
-- |Type-safe projection indicies for tuples.
--
NB : We index tuples by starting to count from the * right * !
--
data TupleIdx t e where
ZeroTupIdx :: GPU s => TupleIdx (t, s) s
SuccTupIdx :: TupleIdx t e -> TupleIdx (t, s) e
-- Auxiliary tuple index constants
--
tix0 :: GPU s => TupleIdx (t, s) s
tix0 = ZeroTupIdx
tix1 :: GPU s => TupleIdx ((t, s), s1) s
tix1 = SuccTupIdx tix0
tix2 :: GPU s => TupleIdx (((t, s), s1), s2) s
tix2 = SuccTupIdx tix1
tix3 :: GPU s => TupleIdx ((((t, s), s1), s2), s3) s
tix3 = SuccTupIdx tix2
tix4 :: GPU s => TupleIdx (((((t, s), s1), s2), s3), s4) s
tix4 = SuccTupIdx tix3
tix5 :: GPU s => TupleIdx ((((((t, s), s1), s2), s3), s4), s5) s
tix5 = SuccTupIdx tix4
tix6 :: GPU s => TupleIdx (((((((t, s), s1), s2), s3), s4), s5), s6) s
tix6 = SuccTupIdx tix5
tix7 :: GPU s => TupleIdx ((((((((t, s), s1), s2), s3), s4), s5), s6), s7) s
tix7 = SuccTupIdx tix6
tix8 :: GPU s => TupleIdx (((((((((t, s), s1), s2), s3), s4), s5), s6), s7), s8) s
tix8 = SuccTupIdx tix7
used in shader
data TupleType a where
UnitTuple :: TupleType ()
SingleTuple :: IsScalar a => a -> TupleType a
PairTuple :: TupleType a -> TupleType b -> TupleType (a, b)
Extend Typeable support for 8- and 9 - tuple
-- ------------------------------------------
myMkTyCon :: String -> TyCon
myMkTyCon = mkTyCon
class Typeable8 t where
typeOf8 :: t a b c d e f g h -> TypeRep
instance Typeable8 (,,,,,,,) where
typeOf8 _ = myMkTyCon "(,,,,,,,)" `mkTyConApp` []
typeOf7Default :: (Typeable8 t, Typeable a) => t a b c d e f g h -> TypeRep
typeOf7Default x = typeOf7 x `mkAppTy` typeOf (argType x)
where
argType :: t a b c d e f g h -> a
argType = undefined
instance (Typeable8 s, Typeable a) => Typeable7 (s a) where
typeOf7 = typeOf7Default
class Typeable9 t where
typeOf9 :: t a b c d e f g h i -> TypeRep
instance Typeable9 (,,,,,,,,) where
typeOf9 _ = myMkTyCon "(,,,,,,,,)" `mkTyConApp` []
typeOf8Default :: (Typeable9 t, Typeable a) => t a b c d e f g h i -> TypeRep
typeOf8Default x = typeOf8 x `mkAppTy` typeOf (argType x)
where
argType :: t a b c d e f g h i -> a
argType = undefined
instance (Typeable9 s, Typeable a) => Typeable8 (s a) where
typeOf8 = typeOf8Default
| null | https://raw.githubusercontent.com/lambdacube3d/lambdacube-edsl/4347bb0ed344e71c0333136cf2e162aec5941df7/lambdacube-core/tmp/archive/LCDSLType.hs | haskell | IsScalar means here that the related type is not a tuple, but a GPU primitive type
stream type restriction, these types can be used in vertex shader input
uniform type restriction
|Conversion between surface n-tuples and our tuple representation.
our language uses nested tuple representation
Tuple representation
--------------------
|We represent tuples as heterogenous lists, typed by a type list.
|Type-safe projection indicies for tuples.
Auxiliary tuple index constants
------------------------------------------ | module LCDSLType where
import Data.Int
import Data.Word
import Data.Typeable
import LCType
import LCAPIType
class GPU a => IsScalar a where
toValue :: a -> Value
toType :: a -> InputType
instance (Typeable dim, Typeable sh, Typeable t, Typeable ar) => IsScalar (Sampler dim sh t ar) where
TODO
TODO
instance IsScalar Int32 where
toValue v = VInt v
toType _ = ITInt
instance IsScalar Word32 where
toValue v = VWord v
toType _ = ITWord
instance IsScalar Float where
toValue v = VFloat v
toType _ = ITFloat
instance IsScalar Bool where
toValue v = VBool v
toType _ = ITBool
instance IsScalar M22F where
toValue v = VM22F v
toType _ = ITM22F
instance IsScalar M23F where
toValue v = VM23F v
toType _ = ITM23F
instance IsScalar M24F where
toValue v = VM24F v
toType _ = ITM24F
instance IsScalar M32F where
toValue v = VM32F v
toType _ = ITM32F
instance IsScalar M33F where
toValue v = VM33F v
toType _ = ITM33F
instance IsScalar M34F where
toValue v = VM34F v
toType _ = ITM34F
instance IsScalar M42F where
toValue v = VM42F v
toType _ = ITM42F
instance IsScalar M43F where
toValue v = VM43F v
toType _ = ITM43F
instance IsScalar M44F where
toValue v = VM44F v
toType _ = ITM44F
instance IsScalar V2F where
toValue v = VV2F v
toType _ = ITV2F
instance IsScalar V3F where
toValue v = VV3F v
toType _ = ITV3F
instance IsScalar V4F where
toValue v = VV4F v
toType _ = ITV4F
instance IsScalar V2I where
toValue v = VV2I v
toType _ = ITV2I
instance IsScalar V3I where
toValue v = VV3I v
toType _ = ITV3I
instance IsScalar V4I where
toValue v = VV4I v
toType _ = ITV4I
instance IsScalar V2U where
toValue v = VV2U v
toType _ = ITV2U
instance IsScalar V3U where
toValue v = VV3U v
toType _ = ITV3U
instance IsScalar V4U where
toValue v = VV4U v
toType _ = ITV4U
instance IsScalar V2B where
toValue v = VV2B v
toType _ = ITV2B
instance IsScalar V3B where
toValue v = VV3B v
toType _ = ITV3B
instance IsScalar V4B where
toValue v = VV4B v
toType _ = ITV4B
GPU type value reification , needed for shader
data Value
= VBool !Bool
| VV2B !V2B
| VV3B !V3B
| VV4B !V4B
| VWord !Word32
| VV2U !V2U
| VV3U !V3U
| VV4U !V4U
| VInt !Int32
| VV2I !V2I
| VV3I !V3I
| VV4I !V4I
| VFloat !Float
| VV2F !V2F
| VV3F !V3F
| VV4F !V4F
| VM22F !M22F
| VM23F !M23F
| VM24F !M24F
| VM32F !M32F
| VM33F !M33F
| VM34F !M34F
| VM42F !M42F
| VM43F !M43F
| VM44F !M44F
deriving (Show,Eq,Ord)
singletonScalarType :: IsScalar a => a -> TupleType ((), a)
singletonScalarType a = PairTuple UnitTuple (SingleTuple a)
GPU type restriction , the functions are used in shader
class (Show a, Typeable a, Typeable (EltRepr a), Typeable (EltRepr' a)) => GPU a where
tupleType :: a -> TupleType (EltRepr a)
tupleType' :: a -> TupleType (EltRepr' a)
instance (Typeable dim, Typeable sh, Typeable t, Typeable ar) => GPU (Sampler dim sh t ar) where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU () where
tupleType _ = UnitTuple
tupleType' _ = UnitTuple
instance GPU Bool where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU Float where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU Int32 where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU Word32 where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU V2B where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU V2F where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU V2I where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU V2U where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU V3B where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU V3F where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU V3I where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU V3U where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU V4B where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU V4F where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU V4I where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU V4U where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU M22F where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU M23F where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU M24F where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU M32F where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU M33F where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU M34F where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU M42F where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU M43F where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance GPU M44F where
tupleType v = singletonScalarType v
tupleType' v = SingleTuple v
instance (GPU a, GPU b) => GPU (a, b) where
tupleType (_::(a, b))
= PairTuple (tupleType (undefined :: a)) (tupleType' (undefined :: b))
tupleType' (_::(a, b))
= PairTuple (tupleType (undefined :: a)) (tupleType' (undefined :: b))
instance (GPU a, GPU b, GPU c) => GPU (a, b, c) where
tupleType (_::(a, b, c))
= PairTuple (tupleType (undefined :: (a, b))) (tupleType' (undefined :: c))
tupleType' (_::(a, b, c))
= PairTuple (tupleType (undefined :: (a, b))) (tupleType' (undefined :: c))
instance (GPU a, GPU b, GPU c, GPU d) => GPU (a, b, c, d) where
tupleType (_::(a, b, c, d))
= PairTuple (tupleType (undefined :: (a, b, c))) (tupleType' (undefined :: d))
tupleType' (_::(a, b, c, d))
= PairTuple (tupleType (undefined :: (a, b, c))) (tupleType' (undefined :: d))
instance (GPU a, GPU b, GPU c, GPU d, GPU e) => GPU (a, b, c, d, e) where
tupleType (_::(a, b, c, d, e))
= PairTuple (tupleType (undefined :: (a, b, c, d)))
(tupleType' (undefined :: e))
tupleType' (_::(a, b, c, d, e))
= PairTuple (tupleType (undefined :: (a, b, c, d)))
(tupleType' (undefined :: e))
instance (GPU a, GPU b, GPU c, GPU d, GPU e, GPU f) => GPU (a, b, c, d, e, f) where
tupleType (_::(a, b, c, d, e, f))
= PairTuple (tupleType (undefined :: (a, b, c, d, e)))
(tupleType' (undefined :: f))
tupleType' (_::(a, b, c, d, e, f))
= PairTuple (tupleType (undefined :: (a, b, c, d, e)))
(tupleType' (undefined :: f))
instance (GPU a, GPU b, GPU c, GPU d, GPU e, GPU f, GPU g) => GPU (a, b, c, d, e, f, g) where
tupleType (_::(a, b, c, d, e, f, g))
= PairTuple (tupleType (undefined :: (a, b, c, d, e, f)))
(tupleType' (undefined :: g))
tupleType' (_::(a, b, c, d, e, f, g))
= PairTuple (tupleType (undefined :: (a, b, c, d, e, f)))
(tupleType' (undefined :: g))
instance (GPU a, GPU b, GPU c, GPU d, GPU e, GPU f, GPU g, GPU h) => GPU (a, b, c, d, e, f, g, h) where
tupleType (_::(a, b, c, d, e, f, g, h))
= PairTuple (tupleType (undefined :: (a, b, c, d, e, f, g)))
(tupleType' (undefined :: h))
tupleType' (_::(a, b, c, d, e, f, g, h))
= PairTuple (tupleType (undefined :: (a, b, c, d, e, f, g)))
(tupleType' (undefined :: h))
instance (GPU a, GPU b, GPU c, GPU d, GPU e, GPU f, GPU g, GPU h, GPU i) => GPU (a, b, c, d, e, f, g, h, i) where
tupleType (_::(a, b, c, d, e, f, g, h, i))
= PairTuple (tupleType (undefined :: (a, b, c, d, e, f, g, h)))
(tupleType' (undefined :: i))
tupleType' (_::(a, b, c, d, e, f, g, h, i))
= PairTuple (tupleType (undefined :: (a, b, c, d, e, f, g, h)))
(tupleType' (undefined :: i))
class GPU a => SGPU a
instance SGPU Int32
instance SGPU Word32
instance SGPU Float
instance SGPU M22F
instance SGPU M23F
instance SGPU M24F
instance SGPU M32F
instance SGPU M33F
instance SGPU M34F
instance SGPU M42F
instance SGPU M43F
instance SGPU M44F
instance SGPU V2F
instance SGPU V3F
instance SGPU V4F
instance SGPU V2I
instance SGPU V3I
instance SGPU V4I
instance SGPU V2U
instance SGPU V3U
instance SGPU V4U
instance (SGPU a, SGPU b) => SGPU (a, b)
instance (SGPU a, SGPU b, SGPU c) => SGPU (a, b, c)
instance (SGPU a, SGPU b, SGPU c, SGPU d) => SGPU (a, b, c, d)
instance (SGPU a, SGPU b, SGPU c, SGPU d, SGPU e) => SGPU (a, b, c, d, e)
instance (SGPU a, SGPU b, SGPU c, SGPU d, SGPU e, SGPU f) => SGPU (a, b, c, d, e, f)
instance (SGPU a, SGPU b, SGPU c, SGPU d, SGPU e, SGPU f, SGPU g) => SGPU (a, b, c, d, e, f, g)
instance (SGPU a, SGPU b, SGPU c, SGPU d, SGPU e, SGPU f, SGPU g, SGPU h) => SGPU (a, b, c, d, e, f, g, h)
instance (SGPU a, SGPU b, SGPU c, SGPU d, SGPU e, SGPU f, SGPU g, SGPU h, SGPU i) => SGPU (a, b, c, d, e, f, g, h, i)
hint : EltRepr stands for Elementary Type Representation
type family EltRepr a :: *
type instance EltRepr (Sampler dim sh t ar) = ((), Sampler dim sh t ar)
type instance EltRepr () = ()
type instance EltRepr Int32 = ((), Int32)
type instance EltRepr Word32 = ((), Word32)
type instance EltRepr Float = ((), Float)
type instance EltRepr Bool = ((), Bool)
type instance EltRepr V2F = ((), V2F)
type instance EltRepr V2I = ((), V2I)
type instance EltRepr V2U = ((), V2U)
type instance EltRepr V2B = ((), V2B)
type instance EltRepr M22F = ((), M22F)
type instance EltRepr M23F = ((), M23F)
type instance EltRepr M24F = ((), M24F)
type instance EltRepr V3F = ((), V3F)
type instance EltRepr V3I = ((), V3I)
type instance EltRepr V3U = ((), V3U)
type instance EltRepr V3B = ((), V3B)
type instance EltRepr M32F = ((), M32F)
type instance EltRepr M33F = ((), M33F)
type instance EltRepr M34F = ((), M34F)
type instance EltRepr V4F = ((), V4F)
type instance EltRepr V4I = ((), V4I)
type instance EltRepr V4U = ((), V4U)
type instance EltRepr V4B = ((), V4B)
type instance EltRepr M42F = ((), M42F)
type instance EltRepr M43F = ((), M43F)
type instance EltRepr M44F = ((), M44F)
type instance EltRepr (a, b) = (EltRepr a, EltRepr' b)
type instance EltRepr (a, b, c) = (EltRepr (a, b), EltRepr' c)
type instance EltRepr (a, b, c, d) = (EltRepr (a, b, c), EltRepr' d)
type instance EltRepr (a, b, c, d, e) = (EltRepr (a, b, c, d), EltRepr' e)
type instance EltRepr (a, b, c, d, e, f) = (EltRepr (a, b, c, d, e), EltRepr' f)
type instance EltRepr (a, b, c, d, e, f, g) = (EltRepr (a, b, c, d, e, f), EltRepr' g)
type instance EltRepr (a, b, c, d, e, f, g, h) = (EltRepr (a, b, c, d, e, f, g), EltRepr' h)
type instance EltRepr (a, b, c, d, e, f, g, h, i) = (EltRepr (a, b, c, d, e, f, g, h), EltRepr' i)
type family EltRepr' a :: *
type instance EltRepr' (Sampler dim sh t ar) = Sampler dim sh t ar
type instance EltRepr' () = ()
type instance EltRepr' Int32 = Int32
type instance EltRepr' Word32 = Word32
type instance EltRepr' Float = Float
type instance EltRepr' Bool = Bool
type instance EltRepr' V2F = V2F
type instance EltRepr' V2I = V2I
type instance EltRepr' V2U = V2U
type instance EltRepr' V2B = V2B
type instance EltRepr' M22F = M22F
type instance EltRepr' M23F = M23F
type instance EltRepr' M24F = M24F
type instance EltRepr' V3F = V3F
type instance EltRepr' V3I = V3I
type instance EltRepr' V3U = V3U
type instance EltRepr' V3B = V3B
type instance EltRepr' M32F = M32F
type instance EltRepr' M33F = M33F
type instance EltRepr' M34F = M34F
type instance EltRepr' V4F = V4F
type instance EltRepr' V4I = V4I
type instance EltRepr' V4U = V4U
type instance EltRepr' V4B = V4B
type instance EltRepr' M42F = M42F
type instance EltRepr' M43F = M43F
type instance EltRepr' M44F = M44F
type instance EltRepr' (a, b) = (EltRepr a, EltRepr' b)
type instance EltRepr' (a, b, c) = (EltRepr (a, b), EltRepr' c)
type instance EltRepr' (a, b, c, d) = (EltRepr (a, b, c), EltRepr' d)
type instance EltRepr' (a, b, c, d, e) = (EltRepr (a, b, c, d), EltRepr' e)
type instance EltRepr' (a, b, c, d, e, f) = (EltRepr (a, b, c, d, e), EltRepr' f)
type instance EltRepr' (a, b, c, d, e, f, g) = (EltRepr (a, b, c, d, e, f), EltRepr' g)
type instance EltRepr' (a, b, c, d, e, f, g, h) = (EltRepr (a, b, c, d, e, f, g), EltRepr' h)
type instance EltRepr' (a, b, c, d, e, f, g, h, i) = (EltRepr (a, b, c, d, e, f, g, h), EltRepr' i)
class IsTuple tup where
type TupleRepr tup
fromTuple :: tup -> TupleRepr tup
toTuple :: TupleRepr tup -> tup
instance IsTuple () where
type TupleRepr () = ()
fromTuple = id
toTuple = id
instance IsTuple (a, b) where
type TupleRepr (a, b) = (((), a), b)
fromTuple (x, y) = (((), x), y)
toTuple (((), x), y) = (x, y)
instance IsTuple (a, b, c) where
type TupleRepr (a, b, c) = (TupleRepr (a, b), c)
fromTuple (x, y, z) = ((((), x), y), z)
toTuple ((((), x), y), z) = (x, y, z)
instance IsTuple (a, b, c, d) where
type TupleRepr (a, b, c, d) = (TupleRepr (a, b, c), d)
fromTuple (x, y, z, v) = (((((), x), y), z), v)
toTuple (((((), x), y), z), v) = (x, y, z, v)
instance IsTuple (a, b, c, d, e) where
type TupleRepr (a, b, c, d, e) = (TupleRepr (a, b, c, d), e)
fromTuple (x, y, z, v, w) = ((((((), x), y), z), v), w)
toTuple ((((((), x), y), z), v), w) = (x, y, z, v, w)
instance IsTuple (a, b, c, d, e, f) where
type TupleRepr (a, b, c, d, e, f) = (TupleRepr (a, b, c, d, e), f)
fromTuple (x, y, z, v, w, r) = (((((((), x), y), z), v), w), r)
toTuple (((((((), x), y), z), v), w), r) = (x, y, z, v, w, r)
instance IsTuple (a, b, c, d, e, f, g) where
type TupleRepr (a, b, c, d, e, f, g) = (TupleRepr (a, b, c, d, e, f), g)
fromTuple (x, y, z, v, w, r, s) = ((((((((), x), y), z), v), w), r), s)
toTuple ((((((((), x), y), z), v), w), r), s) = (x, y, z, v, w, r, s)
instance IsTuple (a, b, c, d, e, f, g, h) where
type TupleRepr (a, b, c, d, e, f, g, h) = (TupleRepr (a, b, c, d, e, f, g), h)
fromTuple (x, y, z, v, w, r, s, t) = (((((((((), x), y), z), v), w), r), s), t)
toTuple (((((((((), x), y), z), v), w), r), s), t) = (x, y, z, v, w, r, s, t)
instance IsTuple (a, b, c, d, e, f, g, h, i) where
type TupleRepr (a, b, c, d, e, f, g, h, i) = (TupleRepr (a, b, c, d, e, f, g, h), i)
fromTuple (x, y, z, v, w, r, s, t, u) = ((((((((((), x), y), z), v), w), r), s), t), u)
toTuple ((((((((((), x), y), z), v), w), r), s), t), u) = (x, y, z, v, w, r, s, t, u)
data Tuple c t where
NilTup :: Tuple c ()
SnocTup :: GPU t => Tuple c s -> c t -> Tuple c (s, t)
NB : We index tuples by starting to count from the * right * !
data TupleIdx t e where
ZeroTupIdx :: GPU s => TupleIdx (t, s) s
SuccTupIdx :: TupleIdx t e -> TupleIdx (t, s) e
tix0 :: GPU s => TupleIdx (t, s) s
tix0 = ZeroTupIdx
tix1 :: GPU s => TupleIdx ((t, s), s1) s
tix1 = SuccTupIdx tix0
tix2 :: GPU s => TupleIdx (((t, s), s1), s2) s
tix2 = SuccTupIdx tix1
tix3 :: GPU s => TupleIdx ((((t, s), s1), s2), s3) s
tix3 = SuccTupIdx tix2
tix4 :: GPU s => TupleIdx (((((t, s), s1), s2), s3), s4) s
tix4 = SuccTupIdx tix3
tix5 :: GPU s => TupleIdx ((((((t, s), s1), s2), s3), s4), s5) s
tix5 = SuccTupIdx tix4
tix6 :: GPU s => TupleIdx (((((((t, s), s1), s2), s3), s4), s5), s6) s
tix6 = SuccTupIdx tix5
tix7 :: GPU s => TupleIdx ((((((((t, s), s1), s2), s3), s4), s5), s6), s7) s
tix7 = SuccTupIdx tix6
tix8 :: GPU s => TupleIdx (((((((((t, s), s1), s2), s3), s4), s5), s6), s7), s8) s
tix8 = SuccTupIdx tix7
used in shader
data TupleType a where
UnitTuple :: TupleType ()
SingleTuple :: IsScalar a => a -> TupleType a
PairTuple :: TupleType a -> TupleType b -> TupleType (a, b)
Extend Typeable support for 8- and 9 - tuple
myMkTyCon :: String -> TyCon
myMkTyCon = mkTyCon
class Typeable8 t where
typeOf8 :: t a b c d e f g h -> TypeRep
instance Typeable8 (,,,,,,,) where
typeOf8 _ = myMkTyCon "(,,,,,,,)" `mkTyConApp` []
typeOf7Default :: (Typeable8 t, Typeable a) => t a b c d e f g h -> TypeRep
typeOf7Default x = typeOf7 x `mkAppTy` typeOf (argType x)
where
argType :: t a b c d e f g h -> a
argType = undefined
instance (Typeable8 s, Typeable a) => Typeable7 (s a) where
typeOf7 = typeOf7Default
class Typeable9 t where
typeOf9 :: t a b c d e f g h i -> TypeRep
instance Typeable9 (,,,,,,,,) where
typeOf9 _ = myMkTyCon "(,,,,,,,,)" `mkTyConApp` []
typeOf8Default :: (Typeable9 t, Typeable a) => t a b c d e f g h i -> TypeRep
typeOf8Default x = typeOf8 x `mkAppTy` typeOf (argType x)
where
argType :: t a b c d e f g h i -> a
argType = undefined
instance (Typeable9 s, Typeable a) => Typeable8 (s a) where
typeOf8 = typeOf8Default
|
f3c571459fb0809172a773ad54893a7088a0ade517c56e9dd496232451f5da24 | jiangpengnju/htdp2e | ex199.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-beginner-abbr-reader.ss" "lang")((modname ex199) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
; Design insert-everywhere/in-all-words.
; Start with a complete wish list entry. Supplement it with tests for empty lists,
a list with one - letter word and another list with two - letter word , etc .
; Hints:
; (1) Reconsider the examples.
( 2 ) You want to use the BSL+ operation append , which consumes two lists
and produces the concatenation of the two lists
( 3 ) The solution to this ex is a series of functions . Patiently stick to
; the design recipe and systematically work through your wish list.
(require 2htdp/batch-io)
; constants
; On OS X:
(define DICTIONARY-LOCATION "/usr/share/dict/words")
(define DICTIONARY-AS-LIST (read-lines DICTIONARY-LOCATION))
; A Word is one of:
; - '()
; (cons 1String Word)
; interpretation: a String as a list of single Strings (letters)
(define word1 (list "d" "e"))
(define word2 (list "c" "a" "t"))
(define word3 (list "r" "a" "t"))
A List - of - words is one of :
; - '()
; (cons Word List-of-words)
(define low1 (list word1 word2))
; String -> List-of-strings
; find all words that the letters of some given word spell
(check-member-of (alternative-words "cat")
(list "act" "cat")
(list "cat" "act"))
; List-of-strings -> Boolean
; checks if w contains all alternative words from "rat"
(define (all-words-from-rat? w)
(and (member? "art" w)
(member? "tar" w)
(member? "rat" w)))
(check-satisfied (alternative-words "rat") all-words-from-rat?)
(define (alternative-words s)
(in-dictionary (words->strings (arrangements (string->word s)))))
; List-of-strings -> List-of-strings
; pick out all those Strings that occur in the dictionary
(check-expect (in-dictionary (list "cat" "tac" "act"))
(list "cat" "act"))
(define (in-dictionary los)
(cond
[(empty? los) '()]
[(good-word? (first los) DICTIONARY-AS-LIST)
(cons (first los) (in-dictionary (rest los)))]
[else
(in-dictionary (rest los))]))
; String List-of-strings -> Boolean
; is given s in the dictionary as a list
(check-expect (good-word? "aaa" '()) #f)
(check-expect (good-word? "abc" (list "abc" "bcd")) #t)
(check-expect (good-word? "cat" DICTIONARY-AS-LIST) #t)
(check-expect (good-word? "tac" DICTIONARY-AS-LIST) #f)
(define (good-word? s l)
(cond
[(empty? l) #f]
[(string=? s (first l)) #t]
[(string>? s (first l))
(good-word? s (rest l))]
[else #f]))
; List-of-words -> List-of-strings
; turn all words in low into Strings
(check-expect (words->strings '()) '())
(check-expect (words->strings (list (list "c" "a" "t")
(list "t" "a" "c")))
(list "cat" "tac"))
(define (words->strings low)
(cond
[(empty? low) '()]
[else
(cons (word->string (first low))
(words->strings (rest low)))]))
; String -> Word
; convert s to the chosen word representation
(check-expect (string->word "") '())
(check-expect (string->word "cat") (list "c" "a" "t"))
(define (string->word s)
(explode s))
; Word -> String
; convert w to a string
(check-expect (word->string (list "c" "a" "t")) "cat")
(check-expect (word->string '()) "")
(define (word->string w)
(implode w))
; Word -> List-of-words
; creates a list of all arrangements of the letters in w
(define (arrangements w)
(cond
[(empty? w) (list '())]
[else (insert-everywhere/in-all-words
(first w) (arrangements (rest w)))]))
; 1String List-of-words -> List-of-words
produces a list of words like low , but with the first argument
(check-expect (insert-everywhere/in-all-words "r" (list '()))
(list (list "r")))
(check-expect (insert-everywhere/in-all-words "e" (list (list "r")))
(list (list "e" "r")
(list "r" "e")))
(define (insert-everywhere/in-all-words letter low)
(cond
[(empty? low) '()]
[(empty? (first low)) (list (list letter))]
[else
(append (insert-everywhere/in-one-word letter (first low))
(insert-everywhere/in-all-words letter (rest low)))]))
; 1String Word -> List-of-words
; produce a list of words whose items are like w, but with letter inserted
; at the beginning, between all letters, and at the end of w.
(check-expect (insert-everywhere/in-one-word "r" '())
(list (list "r")))
(check-expect (insert-everywhere/in-one-word "e" (list "r"))
(list (list "e" "r")
(list "r" "e")))
(check-expect (insert-everywhere/in-one-word "d" (list "e" "r"))
(list (list "d" "e" "r")
(list "e" "d" "r")
(list "e" "r" "d")))
(define (insert-everywhere/in-one-word letter w)
(cond
[(empty? w) (list (list letter))]
[else
(cons (cons letter w)
(add-at-head (first w)
(insert-everywhere/in-one-word letter (rest w))))]))
; 1String List-of-words -> List-of-words
; add the letter to the head of all items of low
(check-expect (add-at-head "e" (list (list "d" "r")
(list "r" "d")))
(list (list "e" "d" "r")
(list "e" "r" "d")))
(define (add-at-head letter low)
(cond
[(empty? low) '()]
[else
(cons (cons letter (first low))
(add-at-head letter (rest low)))]))
| null | https://raw.githubusercontent.com/jiangpengnju/htdp2e/d41555519fbb378330f75c88141f72b00a9ab1d3/arbitrarily-large-data/extended-exercises-lists/ex199.rkt | racket | about the language level of this file in a form that our tools can easily process.
Design insert-everywhere/in-all-words.
Start with a complete wish list entry. Supplement it with tests for empty lists,
Hints:
(1) Reconsider the examples.
the design recipe and systematically work through your wish list.
constants
On OS X:
A Word is one of:
- '()
(cons 1String Word)
interpretation: a String as a list of single Strings (letters)
- '()
(cons Word List-of-words)
String -> List-of-strings
find all words that the letters of some given word spell
List-of-strings -> Boolean
checks if w contains all alternative words from "rat"
List-of-strings -> List-of-strings
pick out all those Strings that occur in the dictionary
String List-of-strings -> Boolean
is given s in the dictionary as a list
List-of-words -> List-of-strings
turn all words in low into Strings
String -> Word
convert s to the chosen word representation
Word -> String
convert w to a string
Word -> List-of-words
creates a list of all arrangements of the letters in w
1String List-of-words -> List-of-words
1String Word -> List-of-words
produce a list of words whose items are like w, but with letter inserted
at the beginning, between all letters, and at the end of w.
1String List-of-words -> List-of-words
add the letter to the head of all items of low | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-beginner-abbr-reader.ss" "lang")((modname ex199) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
a list with one - letter word and another list with two - letter word , etc .
( 2 ) You want to use the BSL+ operation append , which consumes two lists
and produces the concatenation of the two lists
( 3 ) The solution to this ex is a series of functions . Patiently stick to
(require 2htdp/batch-io)
(define DICTIONARY-LOCATION "/usr/share/dict/words")
(define DICTIONARY-AS-LIST (read-lines DICTIONARY-LOCATION))
(define word1 (list "d" "e"))
(define word2 (list "c" "a" "t"))
(define word3 (list "r" "a" "t"))
A List - of - words is one of :
(define low1 (list word1 word2))
(check-member-of (alternative-words "cat")
(list "act" "cat")
(list "cat" "act"))
(define (all-words-from-rat? w)
(and (member? "art" w)
(member? "tar" w)
(member? "rat" w)))
(check-satisfied (alternative-words "rat") all-words-from-rat?)
(define (alternative-words s)
(in-dictionary (words->strings (arrangements (string->word s)))))
(check-expect (in-dictionary (list "cat" "tac" "act"))
(list "cat" "act"))
(define (in-dictionary los)
(cond
[(empty? los) '()]
[(good-word? (first los) DICTIONARY-AS-LIST)
(cons (first los) (in-dictionary (rest los)))]
[else
(in-dictionary (rest los))]))
(check-expect (good-word? "aaa" '()) #f)
(check-expect (good-word? "abc" (list "abc" "bcd")) #t)
(check-expect (good-word? "cat" DICTIONARY-AS-LIST) #t)
(check-expect (good-word? "tac" DICTIONARY-AS-LIST) #f)
(define (good-word? s l)
(cond
[(empty? l) #f]
[(string=? s (first l)) #t]
[(string>? s (first l))
(good-word? s (rest l))]
[else #f]))
(check-expect (words->strings '()) '())
(check-expect (words->strings (list (list "c" "a" "t")
(list "t" "a" "c")))
(list "cat" "tac"))
(define (words->strings low)
(cond
[(empty? low) '()]
[else
(cons (word->string (first low))
(words->strings (rest low)))]))
(check-expect (string->word "") '())
(check-expect (string->word "cat") (list "c" "a" "t"))
(define (string->word s)
(explode s))
(check-expect (word->string (list "c" "a" "t")) "cat")
(check-expect (word->string '()) "")
(define (word->string w)
(implode w))
(define (arrangements w)
(cond
[(empty? w) (list '())]
[else (insert-everywhere/in-all-words
(first w) (arrangements (rest w)))]))
produces a list of words like low , but with the first argument
(check-expect (insert-everywhere/in-all-words "r" (list '()))
(list (list "r")))
(check-expect (insert-everywhere/in-all-words "e" (list (list "r")))
(list (list "e" "r")
(list "r" "e")))
(define (insert-everywhere/in-all-words letter low)
(cond
[(empty? low) '()]
[(empty? (first low)) (list (list letter))]
[else
(append (insert-everywhere/in-one-word letter (first low))
(insert-everywhere/in-all-words letter (rest low)))]))
(check-expect (insert-everywhere/in-one-word "r" '())
(list (list "r")))
(check-expect (insert-everywhere/in-one-word "e" (list "r"))
(list (list "e" "r")
(list "r" "e")))
(check-expect (insert-everywhere/in-one-word "d" (list "e" "r"))
(list (list "d" "e" "r")
(list "e" "d" "r")
(list "e" "r" "d")))
(define (insert-everywhere/in-one-word letter w)
(cond
[(empty? w) (list (list letter))]
[else
(cons (cons letter w)
(add-at-head (first w)
(insert-everywhere/in-one-word letter (rest w))))]))
(check-expect (add-at-head "e" (list (list "d" "r")
(list "r" "d")))
(list (list "e" "d" "r")
(list "e" "r" "d")))
(define (add-at-head letter low)
(cond
[(empty? low) '()]
[else
(cons (cons letter (first low))
(add-at-head letter (rest low)))]))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.