_id
stringlengths 64
64
| repository
stringlengths 6
84
| name
stringlengths 4
110
| content
stringlengths 0
248k
| license
null | download_url
stringlengths 89
454
| language
stringclasses 7
values | comments
stringlengths 0
74.6k
| code
stringlengths 0
248k
|
---|---|---|---|---|---|---|---|---|
42b4b15b4ce9195134fc27c32145094a774f21bcf57541e7c484a7b4e3d929eb | uw-unsat/serval | define.rkt | #lang rosette
(require
(for-syntax
(only-in racket/syntax format-id))
"../base.rkt")
(provide define-insn)
; The main macro for defining instructions.
(define-syntax (define-insn stx)
(syntax-case stx ()
[(_ (arg ...) #:encode encode [(field ...) op interp] ...)
#'(begin
(struct op (arg ...)
#:transparent
#:guard (lambda (arg ... name)
(values
; split for type checking
(for/all ([arg arg #:exhaustive])
(guard arg)
arg) ...))
#:methods gen:instruction
[(define (instruction-encode insn)
(define lst
(match-let ([(op arg ...) insn])
((lambda (arg ...) (encode field ...)) arg ...)))
(apply concat (map (lambda (x) (if (box? x) (unbox x) x)) lst)))
(define (instruction-run insn cpu)
(match-let ([(op arg ...) insn])
(interp cpu arg ...)))])
... )]))
; Type checking guards.
(define-syntax (guard stx)
(syntax-case stx ()
[(_ arg)
(with-syntax ([ctor (format-id stx "guard-~a" (syntax-e #'arg))])
#'(ctor arg))]))
(define-syntax (define-guard stx)
(syntax-case stx ()
[(_ name type)
(with-syntax ([ctor (format-id stx "guard-~a" (syntax-e #'name))])
#'(define (ctor v)
(assert (type v) (format "~a: expected type ~a" v type))))]))
(define (gpr? r)
(and (box? r) ((bitvector 4) (unbox r))))
(define-guard P (bitvector 1))
(define-guard Rd gpr?)
(define-guard RdHi gpr?)
(define-guard RdLo gpr?)
(define-guard Rm gpr?)
(define-guard Rn gpr?)
(define-guard Rs gpr?)
(define-guard Rt gpr?)
(define-guard S (bitvector 1))
(define-guard U (bitvector 1))
(define-guard W (bitvector 1))
(define-guard imm4 (bitvector 4))
(define-guard imm4H (bitvector 4))
(define-guard imm4L (bitvector 4))
(define-guard imm5 (bitvector 5))
(define-guard imm12 (bitvector 12))
(define-guard imm24 (bitvector 24))
(define-guard rotate (bitvector 2))
(define-guard stype (bitvector 2))
(define-guard register_list (bitvector 16))
| null | https://raw.githubusercontent.com/uw-unsat/serval/be11ecccf03f81b8bd0557acf8385a6a5d4f51ed/serval/arm32/interp/define.rkt | racket | The main macro for defining instructions.
split for type checking
Type checking guards. | #lang rosette
(require
(for-syntax
(only-in racket/syntax format-id))
"../base.rkt")
(provide define-insn)
(define-syntax (define-insn stx)
(syntax-case stx ()
[(_ (arg ...) #:encode encode [(field ...) op interp] ...)
#'(begin
(struct op (arg ...)
#:transparent
#:guard (lambda (arg ... name)
(values
(for/all ([arg arg #:exhaustive])
(guard arg)
arg) ...))
#:methods gen:instruction
[(define (instruction-encode insn)
(define lst
(match-let ([(op arg ...) insn])
((lambda (arg ...) (encode field ...)) arg ...)))
(apply concat (map (lambda (x) (if (box? x) (unbox x) x)) lst)))
(define (instruction-run insn cpu)
(match-let ([(op arg ...) insn])
(interp cpu arg ...)))])
... )]))
(define-syntax (guard stx)
(syntax-case stx ()
[(_ arg)
(with-syntax ([ctor (format-id stx "guard-~a" (syntax-e #'arg))])
#'(ctor arg))]))
(define-syntax (define-guard stx)
(syntax-case stx ()
[(_ name type)
(with-syntax ([ctor (format-id stx "guard-~a" (syntax-e #'name))])
#'(define (ctor v)
(assert (type v) (format "~a: expected type ~a" v type))))]))
(define (gpr? r)
(and (box? r) ((bitvector 4) (unbox r))))
(define-guard P (bitvector 1))
(define-guard Rd gpr?)
(define-guard RdHi gpr?)
(define-guard RdLo gpr?)
(define-guard Rm gpr?)
(define-guard Rn gpr?)
(define-guard Rs gpr?)
(define-guard Rt gpr?)
(define-guard S (bitvector 1))
(define-guard U (bitvector 1))
(define-guard W (bitvector 1))
(define-guard imm4 (bitvector 4))
(define-guard imm4H (bitvector 4))
(define-guard imm4L (bitvector 4))
(define-guard imm5 (bitvector 5))
(define-guard imm12 (bitvector 12))
(define-guard imm24 (bitvector 24))
(define-guard rotate (bitvector 2))
(define-guard stype (bitvector 2))
(define-guard register_list (bitvector 16))
|
3fb5170212ba075288253e45753f29c86d9b5e8d4c965364a0afdc2399486474 | abhin4v/link | Server.hs | module Link.Server where
import Control.Concurrent
import Control.Exception hiding (handle)
import Control.Monad (forever, join, void)
import Network.Socket ( AddrInfo (..), AddrInfoFlag(..), SocketType(..)
, SocketOption(..), withSocketsDo, accept
, socketToHandle, defaultHints, getAddrInfo, socket
, setSocketOption, bind, listen)
import System.IO (hClose, hSetNewlineMode, hSetBuffering, BufferMode(..),
IOMode(..), universalNewlineMode, hGetLine, Handle, stdout)
import System.Timeout (timeout)
import Text.Printf (printf)
import qualified Data.Map.Strict as Map
import Link.Client
import Link.Protocol
import Link.Types
runServer :: String -> Int -> IO ()
runServer host port = withSocketsDo $ do
hSetBuffering stdout LineBuffering
server <- newServer
sock <- newSocket
printf "Listening on port %d\n" port
forever $ do
(sock', addr) <- accept sock
printf "Accepted connection from %s\n" (show addr)
handle <- socketToHandle sock' ReadWriteMode
void $ forkFinally (connectClient server handle) (\_ -> hClose handle)
where
newSocket = do
let hints = defaultHints { addrFlags = [AI_NUMERICHOST, AI_NUMERICSERV]
, addrSocketType = Stream }
addr:_ <- getAddrInfo (Just hints) (Just host) (Just (show port))
sock <- socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr)
setSocketOption sock ReuseAddr 1
bind sock (addrAddress addr)
listen sock 10
return sock
connectClient :: Server -> Handle -> IO ()
connectClient server handle = do
hSetNewlineMode handle universalNewlineMode
hSetBuffering handle LineBuffering
readName
where
waitDelay = 60
waitDelayMicros = waitDelay * 1000 * 1000
readName = do
command <- timeout waitDelayMicros . fmap parseCommand $ hGetLine handle
case command of
Nothing -> printf "Client login timed out\n" >> return ()
Just (Just (Login name)) -> do
let user = User name
ok <- checkAddClient server user handle
case ok of
Nothing -> do
printToHandle handle . formatMessage $ NameInUse name
readName
Just client -> do
printToHandle handle . formatMessage $ LoggedIn name
runClient server client `finally` removeClient server user
_ -> readName
checkAddClient :: Server -> User -> Handle -> IO (Maybe Client)
checkAddClient Server {..} user@User {..} handle =
modifyMVar serverUsers $ \clientMap ->
if Map.member user clientMap
then return (clientMap, Nothing)
else do
client <- newClient user handle
printf "New user connected: %s\n" userName
return (Map.insert user client clientMap, Just client)
removeClient :: Server -> User -> IO ()
removeClient Server {..} user =
modifyMVar_ serverUsers $ return . Map.delete user
| null | https://raw.githubusercontent.com/abhin4v/link/130ec707f8470a9cfee96378cee2a518fb1eb1d2/src/Link/Server.hs | haskell | module Link.Server where
import Control.Concurrent
import Control.Exception hiding (handle)
import Control.Monad (forever, join, void)
import Network.Socket ( AddrInfo (..), AddrInfoFlag(..), SocketType(..)
, SocketOption(..), withSocketsDo, accept
, socketToHandle, defaultHints, getAddrInfo, socket
, setSocketOption, bind, listen)
import System.IO (hClose, hSetNewlineMode, hSetBuffering, BufferMode(..),
IOMode(..), universalNewlineMode, hGetLine, Handle, stdout)
import System.Timeout (timeout)
import Text.Printf (printf)
import qualified Data.Map.Strict as Map
import Link.Client
import Link.Protocol
import Link.Types
runServer :: String -> Int -> IO ()
runServer host port = withSocketsDo $ do
hSetBuffering stdout LineBuffering
server <- newServer
sock <- newSocket
printf "Listening on port %d\n" port
forever $ do
(sock', addr) <- accept sock
printf "Accepted connection from %s\n" (show addr)
handle <- socketToHandle sock' ReadWriteMode
void $ forkFinally (connectClient server handle) (\_ -> hClose handle)
where
newSocket = do
let hints = defaultHints { addrFlags = [AI_NUMERICHOST, AI_NUMERICSERV]
, addrSocketType = Stream }
addr:_ <- getAddrInfo (Just hints) (Just host) (Just (show port))
sock <- socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr)
setSocketOption sock ReuseAddr 1
bind sock (addrAddress addr)
listen sock 10
return sock
connectClient :: Server -> Handle -> IO ()
connectClient server handle = do
hSetNewlineMode handle universalNewlineMode
hSetBuffering handle LineBuffering
readName
where
waitDelay = 60
waitDelayMicros = waitDelay * 1000 * 1000
readName = do
command <- timeout waitDelayMicros . fmap parseCommand $ hGetLine handle
case command of
Nothing -> printf "Client login timed out\n" >> return ()
Just (Just (Login name)) -> do
let user = User name
ok <- checkAddClient server user handle
case ok of
Nothing -> do
printToHandle handle . formatMessage $ NameInUse name
readName
Just client -> do
printToHandle handle . formatMessage $ LoggedIn name
runClient server client `finally` removeClient server user
_ -> readName
checkAddClient :: Server -> User -> Handle -> IO (Maybe Client)
checkAddClient Server {..} user@User {..} handle =
modifyMVar serverUsers $ \clientMap ->
if Map.member user clientMap
then return (clientMap, Nothing)
else do
client <- newClient user handle
printf "New user connected: %s\n" userName
return (Map.insert user client clientMap, Just client)
removeClient :: Server -> User -> IO ()
removeClient Server {..} user =
modifyMVar_ serverUsers $ return . Map.delete user
|
|
abb83b5cc0b6578b483d9c03cdf9e4858c3be7c56d7a2518ac54e862e67f594a | fission-codes/fission | Link.hs | module Fission.Web.Server.Link (mkLink) where
import Servant hiding (route)
import Servant.API.Generic
import qualified Fission.Web.API.Types as Fission
type BasicAPI = ToServantApi Fission.Routes
mkLink :: (IsElem route BasicAPI, HasLink route) => Proxy route -> MkLink route Link
mkLink pxy = safeLink (Proxy @BasicAPI) pxy
| null | https://raw.githubusercontent.com/fission-codes/fission/7e69c0da210a77412c96631f5ff7ef1b38240d37/fission-web-server/library/Fission/Web/Server/Link.hs | haskell | module Fission.Web.Server.Link (mkLink) where
import Servant hiding (route)
import Servant.API.Generic
import qualified Fission.Web.API.Types as Fission
type BasicAPI = ToServantApi Fission.Routes
mkLink :: (IsElem route BasicAPI, HasLink route) => Proxy route -> MkLink route Link
mkLink pxy = safeLink (Proxy @BasicAPI) pxy
|
|
ab760c803ca458f30c28b7c4dfbb288623119b1d7eab256366e376514111e211 | replete-repl/replete-shared | repl.clj | (ns replete.repl)
(defmacro ^:private with-err-str
"Evaluates exprs in a context in which *print-err-fn* is bound to .append
on a fresh StringBuffer. Returns the string created by any nested
printing calls."
[& body]
`(let [sb# (js/goog.string.StringBuffer.)]
(binding [cljs.core/*print-newline* true
cljs.core/*print-err-fn* (fn [x#] (.append sb# x#))]
~@body)
(str sb#))) | null | https://raw.githubusercontent.com/replete-repl/replete-shared/8c289b6c8be4eb7ffabcd92a712155b03d8dc665/src/replete/repl.clj | clojure | (ns replete.repl)
(defmacro ^:private with-err-str
"Evaluates exprs in a context in which *print-err-fn* is bound to .append
on a fresh StringBuffer. Returns the string created by any nested
printing calls."
[& body]
`(let [sb# (js/goog.string.StringBuffer.)]
(binding [cljs.core/*print-newline* true
cljs.core/*print-err-fn* (fn [x#] (.append sb# x#))]
~@body)
(str sb#))) |
|
033bf2fd6215cb1d1614bcd5801462c5788e31e7c2206b0dd82451c86bf8a3af | innoq/statuses | config.clj | {:title "innoQ Status Updates"
:database-path "data/db.json"
:save-interval 2
:http-port 8080
:host "localhost"
:run-mode :prod
; {username} is replaced with the username
:avatar-url "/{username}/avatar/32x32"
;:avatar-url "-user-420.png"
:profile-url-prefix "/"}
| null | https://raw.githubusercontent.com/innoq/statuses/66f3f8c8c88bc6445d63ac29310a93baa2e3cf23/config.clj | clojure | {username} is replaced with the username
:avatar-url "-user-420.png" | {:title "innoQ Status Updates"
:database-path "data/db.json"
:save-interval 2
:http-port 8080
:host "localhost"
:run-mode :prod
:avatar-url "/{username}/avatar/32x32"
:profile-url-prefix "/"}
|
d7609cbd977c6d77c9abeddd531bdda02796972430636298b7f264da0d5ccafc | oakmac/cljs-cheatsheet | util.cljs | (ns cljs-cheatsheet-client.util
(:require
[clojure.walk :refer [keywordize-keys]]
[cognitect.transit :as transit]
[oops.core :refer [ocall oget oset!]]))
;;------------------------------------------------------------------------------
;; Util Functions
;;------------------------------------------------------------------------------
(defn half [n]
(/ n 2))
(defn extract-namespace [full-name]
(let [first-slash-pos (.indexOf full-name "/")]
(subs full-name 0 first-slash-pos)))
(defn extract-symbol [full-name]
(let [first-slash-pos (.indexOf full-name "/")]
(subs full-name (inc first-slash-pos))))
(defn split-full-name [r]
(let [ns1 (extract-namespace r)
symbol-name (extract-symbol r)]
{:full-name r
:namespace ns1
:symbol symbol-name}))
(defn point-inside-box? [point box]
(let [px (:x point)
py (:y point)]
(and (>= px (:x1 box))
(<= px (:x2 box))
(>= py (:y1 box))
(<= py (:y2 box)))))
;;------------------------------------------------------------------------------
;; AJAX
;;------------------------------------------------------------------------------
(def transit-json-rdr (transit/reader :json))
(defn- http-success? [status]
(and (>= status 200)
(< status 400)))
(defn- fetch-clj-success [js-evt success-fn error-fn]
(let [status (oget js-evt "target" "status")]
(if-not (http-success? status)
(error-fn)
(let [response-text (oget js-evt "target" "responseText")]
(if-let [clj-result (try (transit/read transit-json-rdr response-text)
(catch js/Error _error nil))]
(success-fn (keywordize-keys clj-result))
(error-fn))))))
(defn fetch-clj
"Makes an AJAX request to an HTTP GET endpoint expecting JSON.
Parses JSON into CLJ using transit.cljs and keywordizes map keys.
transit.cljs is faster than using js->clj: "
([url success-fn]
(fetch-clj url success-fn (fn [] nil)))
([url success-fn error-fn]
(doto (js/XMLHttpRequest.)
(.addEventListener "load" #(fetch-clj-success % success-fn error-fn))
(.addEventListener "error" error-fn)
(.addEventListener "abort" error-fn)
(.open "get" url)
(.send))))
| null | https://raw.githubusercontent.com/oakmac/cljs-cheatsheet/93e0b6e59e8d0132ddaf51fe5783801b210700f6/cljs-client/cljs_cheatsheet_client/util.cljs | clojure | ------------------------------------------------------------------------------
Util Functions
------------------------------------------------------------------------------
------------------------------------------------------------------------------
AJAX
------------------------------------------------------------------------------ | (ns cljs-cheatsheet-client.util
(:require
[clojure.walk :refer [keywordize-keys]]
[cognitect.transit :as transit]
[oops.core :refer [ocall oget oset!]]))
(defn half [n]
(/ n 2))
(defn extract-namespace [full-name]
(let [first-slash-pos (.indexOf full-name "/")]
(subs full-name 0 first-slash-pos)))
(defn extract-symbol [full-name]
(let [first-slash-pos (.indexOf full-name "/")]
(subs full-name (inc first-slash-pos))))
(defn split-full-name [r]
(let [ns1 (extract-namespace r)
symbol-name (extract-symbol r)]
{:full-name r
:namespace ns1
:symbol symbol-name}))
(defn point-inside-box? [point box]
(let [px (:x point)
py (:y point)]
(and (>= px (:x1 box))
(<= px (:x2 box))
(>= py (:y1 box))
(<= py (:y2 box)))))
(def transit-json-rdr (transit/reader :json))
(defn- http-success? [status]
(and (>= status 200)
(< status 400)))
(defn- fetch-clj-success [js-evt success-fn error-fn]
(let [status (oget js-evt "target" "status")]
(if-not (http-success? status)
(error-fn)
(let [response-text (oget js-evt "target" "responseText")]
(if-let [clj-result (try (transit/read transit-json-rdr response-text)
(catch js/Error _error nil))]
(success-fn (keywordize-keys clj-result))
(error-fn))))))
(defn fetch-clj
"Makes an AJAX request to an HTTP GET endpoint expecting JSON.
Parses JSON into CLJ using transit.cljs and keywordizes map keys.
transit.cljs is faster than using js->clj: "
([url success-fn]
(fetch-clj url success-fn (fn [] nil)))
([url success-fn error-fn]
(doto (js/XMLHttpRequest.)
(.addEventListener "load" #(fetch-clj-success % success-fn error-fn))
(.addEventListener "error" error-fn)
(.addEventListener "abort" error-fn)
(.open "get" url)
(.send))))
|
f749c0286f569cd3ec8171720b362bd6e98b1e822488bacd82cc2efd141c8288 | exercism/common-lisp | lillys-lasagna-test.lisp | ;; Ensures that lillys-lasagna.lisp and the testing library are always loaded
(eval-when (:compile-toplevel :load-toplevel :execute)
(load "lillys-lasagna")
(ql:quickload :fiveam))
Defines the testing package with symbols from - lasagna and FiveAM in scope
;; The `run-tests` function is exported for use by both the user and test-runner
(defpackage :lillys-lasagna-test
(:use :cl :fiveam :lillys-lasagna)
(:export :run-tests))
;; Enter the testing package
(in-package :lillys-lasagna-test)
;; Define and enter a new FiveAM test-suite
(def-suite lillys-lasagna-suite)
(in-suite lillys-lasagna-suite)
(test expected-time "Compute the expected time in the oven"
(is (= 337 (expected-time-in-oven)))
(is (not (null (documentation 'expected-time-in-oven 'function)))))
(test remaining-time "Compute how many minutes left for cooking"
(is (= 237 (remaining-minutes-in-oven 100)))
(is (= 37 (remaining-minutes-in-oven 300)))
(is (not (null (documentation 'remaining-minutes-in-oven 'function)))))
(test preparation-time "Compute preparation time based upon number of layers"
(is (= 57 (preparation-time-in-minutes 3)))
(is (= 76 (preparation-time-in-minutes 4)))
(is (not (null (documentation 'preparation-time-in-minutes 'function)))))
(test elapsed-time "Compute sum of prepration time and time lasagna has already been in the oven."
(is (= 157 (elapsed-time-in-minutes 3 100)))
(is (= 77 (elapsed-time-in-minutes 4 1)))
(is (not (null (documentation 'elapsed-time-in-minutes 'function)))))
;; Test helper function
(defun func-docstring (func)
(substitute #\Space #\NewLine (documentation func 'function)))
(defun run-tests (&optional (test-or-suite 'lillys-lasagna-suite))
"Provides human readable results of test run. Default to entire suite."
(run! test-or-suite))
| null | https://raw.githubusercontent.com/exercism/common-lisp/3f2a0da01cb82ef08ec5ef8c2e8f7cec627155d9/exercises/concept/lillys-lasagna/lillys-lasagna-test.lisp | lisp | Ensures that lillys-lasagna.lisp and the testing library are always loaded
The `run-tests` function is exported for use by both the user and test-runner
Enter the testing package
Define and enter a new FiveAM test-suite
Test helper function | (eval-when (:compile-toplevel :load-toplevel :execute)
(load "lillys-lasagna")
(ql:quickload :fiveam))
Defines the testing package with symbols from - lasagna and FiveAM in scope
(defpackage :lillys-lasagna-test
(:use :cl :fiveam :lillys-lasagna)
(:export :run-tests))
(in-package :lillys-lasagna-test)
(def-suite lillys-lasagna-suite)
(in-suite lillys-lasagna-suite)
(test expected-time "Compute the expected time in the oven"
(is (= 337 (expected-time-in-oven)))
(is (not (null (documentation 'expected-time-in-oven 'function)))))
(test remaining-time "Compute how many minutes left for cooking"
(is (= 237 (remaining-minutes-in-oven 100)))
(is (= 37 (remaining-minutes-in-oven 300)))
(is (not (null (documentation 'remaining-minutes-in-oven 'function)))))
(test preparation-time "Compute preparation time based upon number of layers"
(is (= 57 (preparation-time-in-minutes 3)))
(is (= 76 (preparation-time-in-minutes 4)))
(is (not (null (documentation 'preparation-time-in-minutes 'function)))))
(test elapsed-time "Compute sum of prepration time and time lasagna has already been in the oven."
(is (= 157 (elapsed-time-in-minutes 3 100)))
(is (= 77 (elapsed-time-in-minutes 4 1)))
(is (not (null (documentation 'elapsed-time-in-minutes 'function)))))
(defun func-docstring (func)
(substitute #\Space #\NewLine (documentation func 'function)))
(defun run-tests (&optional (test-or-suite 'lillys-lasagna-suite))
"Provides human readable results of test run. Default to entire suite."
(run! test-or-suite))
|
3cce8e0096a7dcdb0fb38557c9da7bef73bc0f6b9594e238dd747cc8c297ce9f | geocaml/ocaml-topojson | topojson.ml | (* Implentation of TopoJSON Objects *)
module Intf = Topojson_intf
module type S = Topojson_intf.S
module type Json = Topojson_intf.Json
let ( let* ) = Result.bind
let decode_or_err f v =
match f v with Ok x -> x | Error (`Msg m) -> failwith m
module Make (J : Intf.Json) = struct
type json = J.t
let bbox_to_json_or_empty = function
| None -> []
| Some bbox -> [ ("bbox", J.array J.float bbox) ]
let id_or_empty = function None -> [] | Some id -> [ ("id", id) ]
module Geometry = struct
type json = J.t
type properties = [ `None | `Null | `Obj of (string * json) list ]
let properties_or_null = function
| `None -> []
| `Null -> [ ("properties", J.null) ]
| `Obj v -> [ ("properties", J.obj v) ]
let keys_in_use =
[
"type";
"properties";
"coordinates";
"bbox";
"arcs";
"id";
"objects";
"geometries";
]
let keys_in_use_for_point =
[
"type";
"properties";
"coordinates";
"bbox";
"id";
"objects";
"geometries";
]
let foreign_members_of_json json keys_in_use =
match J.to_obj json with
| Ok assoc ->
List.filter (fun (k, _v) -> not (List.mem k keys_in_use)) assoc
| Error _ -> []
let parse_with_coords json p_c typ =
match (J.find json [ "type" ], J.find json [ "coordinates" ]) with
| None, _ ->
Error
(`Msg
("JSON should"
^ "have a key-value for `type' whilst parsing "
^ typ))
| _, None -> Error (`Msg "JSON should have a key-value for `coordinates'")
| Some typ, Some coords -> (
let* typ = J.to_string typ in
match typ with
| t when t = typ -> p_c coords
| t -> Error (`Msg ("Expected type of `" ^ typ ^ "' but got " ^ t)))
let parse_with_arcs json p_a typ =
match (J.find json [ "type" ], J.find json [ "arcs" ]) with
| None, _ ->
Error
(`Msg
("JSON should"
^ "have a key-value for `type' whilst parsing "
^ typ))
| _, None -> Error (`Msg "JSON should have a key-value for `arcs'")
| Some typ, Some arcs -> (
let* typ = J.to_string typ in
match typ with
| t when t = typ -> p_a arcs
| t -> Error (`Msg ("Expected type of `" ^ typ ^ "' but got " ^ t)))
module Position = struct
type t = float array
let lng t = t.(0)
let lat t = t.(1)
let altitude t = try Some t.(2) with _ -> None
let v ?altitude ~lng ~lat () =
match altitude with
| Some f -> [| lng; lat; f |]
| None -> [| lng; lat |]
let equal l1 l2 =
let n1 = Array.length l1 and n2 = Array.length l2 in
if n1 <> n2 then false
else
let rec loop i =
if i = n1 then true
else if Float.equal (Array.unsafe_get l1 i) (Array.unsafe_get l2 i)
then loop (succ i)
else false
in
loop 0
let to_json arr = J.array J.float arr
end
module Point = struct
type t = Position.t
let typ = "Point"
let position = Fun.id
let v position = position
let parse_coords coords = J.to_array (decode_or_err J.to_float) coords
let base_of_json json = parse_with_coords json parse_coords typ
let to_json ?bbox ?id ?(properties = `None) ?(foreign_members = [])
position =
J.obj
([
("type", J.string typ); ("coordinates", Position.to_json position);
]
@ properties_or_null properties
@ id_or_empty id
@ bbox_to_json_or_empty bbox
@ foreign_members)
end
module Arc_index = struct
type t = int array
let v t = Array.of_list t
let to_json arr = J.array J.int arr
end
module MultiPoint = struct
type t = Point.t array
let typ = "MultiPoint"
let coordinates = Fun.id
let v positions = positions
let parse_coords coords =
try J.to_array (decode_or_err Point.parse_coords) coords
with Failure m -> Error (`Msg m)
let base_of_json json = parse_with_coords json parse_coords typ
let to_json ?bbox ?id ?(properties = `None) ?(foreign_members = [])
positions =
J.obj
([
("type", J.string typ);
("coordinates", J.array Position.to_json positions);
]
@ properties_or_null properties
@ id_or_empty id
@ bbox_to_json_or_empty bbox
@ foreign_members)
end
module LineString = struct
type t = Arc_index.t
let typ = "LineString"
let v arc = arc
let parse_arcs arcs = J.to_array (decode_or_err J.to_int) arcs
let base_of_json json = parse_with_arcs json parse_arcs typ
let to_json ?bbox ?id ?(properties = `None) ?(foreign_members = []) arc =
J.obj
([ ("type", J.string typ); ("arcs", Arc_index.to_json arc) ]
@ properties_or_null properties
@ id_or_empty id
@ bbox_to_json_or_empty bbox
@ foreign_members)
end
module MultiLineString = struct
type t = LineString.t array
let typ = "MultiLineString"
let v arcs = arcs
let parse_arcs arcs =
try J.to_array (decode_or_err LineString.parse_arcs) arcs
with Failure m -> Error (`Msg m)
let base_of_json json = parse_with_arcs json parse_arcs typ
let to_json ?bbox ?id ?(properties = `None) ?(foreign_members = []) arcs =
J.obj
([ ("type", J.string typ); ("arcs", J.array Arc_index.to_json arcs) ]
@ properties_or_null properties
@ id_or_empty id
@ bbox_to_json_or_empty bbox
@ foreign_members)
end
module Polygon = struct
type t = LineString.t array
let typ = "Polygon"
let rings = Fun.id
let exterior_ring t = t.(0)
let interior_rings t = Array.sub t 1 (Array.length t - 1)
let v arcs = arcs
let parse_arcs arcs =
try
J.to_array (decode_or_err (J.to_array (decode_or_err J.to_int))) arcs
with Failure m -> Error (`Msg m)
let base_of_json json = parse_with_arcs json parse_arcs typ
let to_json ?bbox ?id ?(properties = `None) ?(foreign_members = []) arcs =
J.obj
([ ("type", J.string typ); ("arcs", J.array (J.array J.int) arcs) ]
@ properties_or_null properties
@ id_or_empty id
@ bbox_to_json_or_empty bbox
@ foreign_members)
end
module MultiPolygon = struct
type t = Polygon.t array
let typ = "MultiPolygon"
let polygons = Fun.id
let v arcs = arcs
let parse_arcs arcs =
try J.to_array (decode_or_err Polygon.parse_arcs) arcs
with Failure m -> Error (`Msg m)
let base_of_json json = parse_with_arcs json parse_arcs typ
let to_json ?bbox ?id ?(properties = `None) ?(foreign_members = []) arcs =
J.obj
([
("type", J.string typ);
("arcs", J.array (J.array (J.array J.int)) arcs);
]
@ properties_or_null properties
@ id_or_empty id
@ bbox_to_json_or_empty bbox
@ foreign_members)
end
type geometry =
| Point of Point.t
| MultiPoint of MultiPoint.t
| LineString of LineString.t
| MultiLineString of MultiLineString.t
| Polygon of Polygon.t
| MultiPolygon of MultiPolygon.t
| Collection of t list
and t = {
geometry : geometry;
bbox : float array option;
properties : properties;
foreign_members : (string * json) list;
id : json option;
}
let v ?id ?(properties = `None) ?(foreign_members = []) ?bbox geo =
{ geometry = geo; properties; bbox; foreign_members; id }
let geometry t = t.geometry
let properties t = t.properties
let point t = Point (Point.v t)
let multipoint t = MultiPoint (MultiPoint.v t)
let linestring t = LineString (LineString.v t)
let multilinestring t = MultiLineString (MultiLineString.v t)
let polygon p = Polygon (Polygon.v p)
let multipolygon mp = MultiPolygon (MultiPolygon.v mp)
let collection cs = Collection cs
let get_point = function
| Point p -> Ok p
| _ -> Error (`Msg "Expected point")
let get_point_exn = function
| Point p -> p
| _ -> invalid_arg "Expected point"
let get_multipoint = function
| MultiPoint p -> Ok p
| _ -> Error (`Msg "Expected multipoint")
let get_multipoint_exn = function
| MultiPoint p -> p
| _ -> invalid_arg "Expected multipoint"
let get_linestring = function
| LineString p -> Ok p
| _ -> Error (`Msg "Expected linestring")
let get_linestring_exn = function
| LineString p -> p
| _ -> invalid_arg "Expected linestring"
let get_multilinestring = function
| MultiLineString p -> Ok p
| _ -> Error (`Msg "Expected multilinestring")
let get_multilinestring_exn = function
| MultiLineString p -> p
| _ -> invalid_arg "Expected multilinestring"
let get_polygon = function
| Polygon p -> Ok p
| _ -> Error (`Msg "Expected polygon")
let get_polygon_exn = function
| Polygon p -> p
| _ -> invalid_arg "Expected polygon"
let get_multipolygon = function
| MultiPolygon p -> Ok p
| _ -> Error (`Msg "Expected multipolygon")
let get_multipolygon_exn = function
| MultiPolygon p -> p
| _ -> invalid_arg "Expected multipolygon"
let geometry_to_json geometry = json
let foreign_members t = t.foreign_members
let id t = t.id
let properties_of_json json =
match J.find json [ "properties" ] with
| Some j -> if J.is_null j then `Null else `Obj (decode_or_err J.to_obj j)
| None -> `None
let id_of_json json = J.find json [ "id" ]
let rec of_json json =
let fm = foreign_members_of_json json in
let properties = properties_of_json json in
let id = id_of_json json in
let bbox =
J.find json [ "bbox" ]
|> Option.map J.(decode_or_err @@ to_array (decode_or_err J.to_float))
in
match J.find json [ "type" ] with
| Some typ -> (
match J.to_string typ with
| Ok "Point" ->
Result.map (fun g ->
{
geometry = Point g;
bbox;
properties;
foreign_members = fm keys_in_use_for_point;
id;
})
@@ Point.base_of_json json
| Ok "MultiPoint" ->
Result.map (fun g ->
{
geometry = MultiPoint g;
bbox;
properties;
foreign_members = fm keys_in_use_for_point;
id;
})
@@ MultiPoint.base_of_json json
| Ok "LineString" ->
Result.map (fun g ->
{
geometry = LineString g;
bbox;
properties;
foreign_members = fm keys_in_use;
id;
})
@@ LineString.base_of_json json
| Ok "MultiLineString" ->
Result.map (fun g ->
{
geometry = MultiLineString g;
bbox;
properties;
foreign_members = fm keys_in_use;
id;
})
@@ MultiLineString.base_of_json json
| Ok "Polygon" ->
Result.map (fun g ->
{
geometry = Polygon g;
bbox;
properties;
foreign_members = fm keys_in_use;
id;
})
@@ Polygon.base_of_json json
| Ok "MultiPolygon" ->
Result.map (fun g ->
{
geometry = MultiPolygon g;
bbox;
properties;
foreign_members = fm keys_in_use;
id;
})
@@ MultiPolygon.base_of_json json
| Ok "GeometryCollection" -> (
match J.find json [ "geometries" ] with
| Some list ->
let geo = J.to_list (decode_or_err of_json) list in
Result.map
(fun g ->
{
geometry = Collection g;
bbox;
properties;
foreign_members = fm keys_in_use;
id;
})
geo
| None ->
Error
(`Msg
"A geometry collection should have a member called \
geometries"))
| Ok typ -> Error (`Msg ("Unknown type of geometry " ^ typ))
| Error _ as e -> e)
| None ->
Error
(`Msg
"A TopoJSON text should contain one object with a member `type`.")
let rec to_json t =
let bbox = t.bbox in
let id = t.id in
match t.geometry with
| Point point ->
Point.to_json ?bbox ?id ~foreign_members:t.foreign_members
~properties:t.properties point
| MultiPoint mp ->
MultiPoint.to_json ?bbox ?id ~foreign_members:t.foreign_members
~properties:t.properties mp
| LineString ls ->
LineString.to_json ?bbox ?id ~foreign_members:t.foreign_members
~properties:t.properties ls
| MultiLineString mls ->
MultiLineString.to_json ?bbox ?id ~foreign_members:t.foreign_members
~properties:t.properties mls
| Polygon p ->
Polygon.to_json ?bbox ?id ~foreign_members:t.foreign_members
~properties:t.properties p
| MultiPolygon mp ->
MultiPolygon.to_json ?bbox ~foreign_members:t.foreign_members
~properties:t.properties mp
| Collection c ->
J.obj
([
("type", J.string "GeometryCollection");
("geometries", J.list to_json c);
]
@ properties_or_null t.properties
@ id_or_empty id
@ bbox_to_json_or_empty bbox
@ t.foreign_members)
end
module Topology = struct
type json = J.t
type t = {
objects : (string * Geometry.t) list;
arcs : Geometry.Position.t array array;
foreign_members : (string * json) list;
transform : transform option;
}
and transform = { scale : float * float; translate : float * float }
let v ?(foreign_members = []) ?transform ~arcs objects =
{ foreign_members; arcs; objects; transform }
let transform t = t.transform
let objects t = t.objects
let arcs t = t.arcs
let foreign_members t = t.foreign_members
let keys_in_use =
[
"type";
"arcs";
"objects";
"transform";
"bbox";
"properties";
"coordinates";
"geometries";
]
let foreign_members_of_json json =
match J.to_obj json with
| Ok assoc ->
List.filter (fun (k, _v) -> not (List.mem k keys_in_use)) assoc
| Error _ -> []
let transform_of_json json =
match J.find json [ "transform" ] with
| None -> None
| Some transform_object -> (
match
( J.find transform_object [ "scale" ],
J.find transform_object [ "translate" ] )
with
| Some scale, Some translate ->
let scale =
decode_or_err (J.to_array (decode_or_err J.to_float)) scale
in
let translate =
decode_or_err (J.to_array (decode_or_err J.to_float)) translate
in
Some
{
scale = (scale.(0), scale.(1));
translate = (translate.(0), translate.(1));
}
| _ -> None)
let of_json json =
match (J.find json [ "objects" ], J.find json [ "arcs" ]) with
| Some objects, Some arcs ->
let* objects = J.to_obj objects in
let geometries =
List.map
(fun (k, v) -> (k, decode_or_err Geometry.of_json v))
objects
in
let* arcs =
J.to_array
(decode_or_err
(J.to_array
(decode_or_err (J.to_array (decode_or_err J.to_float)))))
arcs
in
let transform = transform_of_json json in
let fm = foreign_members_of_json json in
Ok { objects = geometries; arcs; transform; foreign_members = fm }
| _, _ -> Error (`Msg "No objects and/or arcs field in Topology object!")
let transform_to_json transform =
J.obj
[
("scale", J.list J.float [ fst transform.scale; snd transform.scale ]);
( "translate",
J.list J.float [ fst transform.translate; snd transform.translate ]
);
]
let to_json ?bbox { objects; arcs; foreign_members; transform } =
let transform =
Option.map (fun v -> ("transform", transform_to_json v)) transform
|> Option.to_list
in
J.obj
([
("type", J.string "Topology");
( "objects",
J.obj (List.map (fun (k, v) -> (k, Geometry.to_json v)) objects) );
("arcs", J.array (J.array (J.array J.float)) arcs);
]
@ transform
@ bbox_to_json_or_empty bbox
@ foreign_members)
end
type topojson = Topology of Topology.t | Geometry of Geometry.t
type t = { topojson : topojson; bbox : float array option }
let topojson t = t.topojson
let bbox t = t.bbox
let topojson_to_t tjson bbox = { topojson = tjson; bbox }
let json_to_bbox json =
match J.to_array (decode_or_err J.to_float) json with
| Ok v -> Some v
| Error _ -> None
let of_json json =
match (J.find json [ "type" ], J.find json [ "bbox" ]) with
| Some typ, bbx -> (
match J.to_string typ with
| Ok "Topology" -> (
match Topology.of_json json with
| Ok v ->
Ok (topojson_to_t (Topology v) @@ Option.bind bbx json_to_bbox)
| Error e -> Error e)
| Ok s -> Error (`Msg ("Expected `Topology` but got " ^ s))
| Error _ as e -> e)
| None, _ -> Error (`Msg "Could not find Topology type")
let to_json = function
| { topojson = Topology f; bbox } -> Topology.to_json ?bbox f
| { topojson = Geometry g; bbox = _ } ->
The geometry value [ g ] will have the bbox already , so we do n't need to
do anything with it here .
do anything with it here. *)
Geometry.to_json g
let v ?bbox topojson = { bbox; topojson }
end
| null | https://raw.githubusercontent.com/geocaml/ocaml-topojson/0b820bdb867fb2932e7ef2cd69ad2e31423acca2/src/topojson/topojson.ml | ocaml | Implentation of TopoJSON Objects |
module Intf = Topojson_intf
module type S = Topojson_intf.S
module type Json = Topojson_intf.Json
let ( let* ) = Result.bind
let decode_or_err f v =
match f v with Ok x -> x | Error (`Msg m) -> failwith m
module Make (J : Intf.Json) = struct
type json = J.t
let bbox_to_json_or_empty = function
| None -> []
| Some bbox -> [ ("bbox", J.array J.float bbox) ]
let id_or_empty = function None -> [] | Some id -> [ ("id", id) ]
module Geometry = struct
type json = J.t
type properties = [ `None | `Null | `Obj of (string * json) list ]
let properties_or_null = function
| `None -> []
| `Null -> [ ("properties", J.null) ]
| `Obj v -> [ ("properties", J.obj v) ]
let keys_in_use =
[
"type";
"properties";
"coordinates";
"bbox";
"arcs";
"id";
"objects";
"geometries";
]
let keys_in_use_for_point =
[
"type";
"properties";
"coordinates";
"bbox";
"id";
"objects";
"geometries";
]
let foreign_members_of_json json keys_in_use =
match J.to_obj json with
| Ok assoc ->
List.filter (fun (k, _v) -> not (List.mem k keys_in_use)) assoc
| Error _ -> []
let parse_with_coords json p_c typ =
match (J.find json [ "type" ], J.find json [ "coordinates" ]) with
| None, _ ->
Error
(`Msg
("JSON should"
^ "have a key-value for `type' whilst parsing "
^ typ))
| _, None -> Error (`Msg "JSON should have a key-value for `coordinates'")
| Some typ, Some coords -> (
let* typ = J.to_string typ in
match typ with
| t when t = typ -> p_c coords
| t -> Error (`Msg ("Expected type of `" ^ typ ^ "' but got " ^ t)))
let parse_with_arcs json p_a typ =
match (J.find json [ "type" ], J.find json [ "arcs" ]) with
| None, _ ->
Error
(`Msg
("JSON should"
^ "have a key-value for `type' whilst parsing "
^ typ))
| _, None -> Error (`Msg "JSON should have a key-value for `arcs'")
| Some typ, Some arcs -> (
let* typ = J.to_string typ in
match typ with
| t when t = typ -> p_a arcs
| t -> Error (`Msg ("Expected type of `" ^ typ ^ "' but got " ^ t)))
module Position = struct
type t = float array
let lng t = t.(0)
let lat t = t.(1)
let altitude t = try Some t.(2) with _ -> None
let v ?altitude ~lng ~lat () =
match altitude with
| Some f -> [| lng; lat; f |]
| None -> [| lng; lat |]
let equal l1 l2 =
let n1 = Array.length l1 and n2 = Array.length l2 in
if n1 <> n2 then false
else
let rec loop i =
if i = n1 then true
else if Float.equal (Array.unsafe_get l1 i) (Array.unsafe_get l2 i)
then loop (succ i)
else false
in
loop 0
let to_json arr = J.array J.float arr
end
module Point = struct
type t = Position.t
let typ = "Point"
let position = Fun.id
let v position = position
let parse_coords coords = J.to_array (decode_or_err J.to_float) coords
let base_of_json json = parse_with_coords json parse_coords typ
let to_json ?bbox ?id ?(properties = `None) ?(foreign_members = [])
position =
J.obj
([
("type", J.string typ); ("coordinates", Position.to_json position);
]
@ properties_or_null properties
@ id_or_empty id
@ bbox_to_json_or_empty bbox
@ foreign_members)
end
module Arc_index = struct
type t = int array
let v t = Array.of_list t
let to_json arr = J.array J.int arr
end
module MultiPoint = struct
type t = Point.t array
let typ = "MultiPoint"
let coordinates = Fun.id
let v positions = positions
let parse_coords coords =
try J.to_array (decode_or_err Point.parse_coords) coords
with Failure m -> Error (`Msg m)
let base_of_json json = parse_with_coords json parse_coords typ
let to_json ?bbox ?id ?(properties = `None) ?(foreign_members = [])
positions =
J.obj
([
("type", J.string typ);
("coordinates", J.array Position.to_json positions);
]
@ properties_or_null properties
@ id_or_empty id
@ bbox_to_json_or_empty bbox
@ foreign_members)
end
module LineString = struct
type t = Arc_index.t
let typ = "LineString"
let v arc = arc
let parse_arcs arcs = J.to_array (decode_or_err J.to_int) arcs
let base_of_json json = parse_with_arcs json parse_arcs typ
let to_json ?bbox ?id ?(properties = `None) ?(foreign_members = []) arc =
J.obj
([ ("type", J.string typ); ("arcs", Arc_index.to_json arc) ]
@ properties_or_null properties
@ id_or_empty id
@ bbox_to_json_or_empty bbox
@ foreign_members)
end
module MultiLineString = struct
type t = LineString.t array
let typ = "MultiLineString"
let v arcs = arcs
let parse_arcs arcs =
try J.to_array (decode_or_err LineString.parse_arcs) arcs
with Failure m -> Error (`Msg m)
let base_of_json json = parse_with_arcs json parse_arcs typ
let to_json ?bbox ?id ?(properties = `None) ?(foreign_members = []) arcs =
J.obj
([ ("type", J.string typ); ("arcs", J.array Arc_index.to_json arcs) ]
@ properties_or_null properties
@ id_or_empty id
@ bbox_to_json_or_empty bbox
@ foreign_members)
end
module Polygon = struct
type t = LineString.t array
let typ = "Polygon"
let rings = Fun.id
let exterior_ring t = t.(0)
let interior_rings t = Array.sub t 1 (Array.length t - 1)
let v arcs = arcs
let parse_arcs arcs =
try
J.to_array (decode_or_err (J.to_array (decode_or_err J.to_int))) arcs
with Failure m -> Error (`Msg m)
let base_of_json json = parse_with_arcs json parse_arcs typ
let to_json ?bbox ?id ?(properties = `None) ?(foreign_members = []) arcs =
J.obj
([ ("type", J.string typ); ("arcs", J.array (J.array J.int) arcs) ]
@ properties_or_null properties
@ id_or_empty id
@ bbox_to_json_or_empty bbox
@ foreign_members)
end
module MultiPolygon = struct
type t = Polygon.t array
let typ = "MultiPolygon"
let polygons = Fun.id
let v arcs = arcs
let parse_arcs arcs =
try J.to_array (decode_or_err Polygon.parse_arcs) arcs
with Failure m -> Error (`Msg m)
let base_of_json json = parse_with_arcs json parse_arcs typ
let to_json ?bbox ?id ?(properties = `None) ?(foreign_members = []) arcs =
J.obj
([
("type", J.string typ);
("arcs", J.array (J.array (J.array J.int)) arcs);
]
@ properties_or_null properties
@ id_or_empty id
@ bbox_to_json_or_empty bbox
@ foreign_members)
end
type geometry =
| Point of Point.t
| MultiPoint of MultiPoint.t
| LineString of LineString.t
| MultiLineString of MultiLineString.t
| Polygon of Polygon.t
| MultiPolygon of MultiPolygon.t
| Collection of t list
and t = {
geometry : geometry;
bbox : float array option;
properties : properties;
foreign_members : (string * json) list;
id : json option;
}
let v ?id ?(properties = `None) ?(foreign_members = []) ?bbox geo =
{ geometry = geo; properties; bbox; foreign_members; id }
let geometry t = t.geometry
let properties t = t.properties
let point t = Point (Point.v t)
let multipoint t = MultiPoint (MultiPoint.v t)
let linestring t = LineString (LineString.v t)
let multilinestring t = MultiLineString (MultiLineString.v t)
let polygon p = Polygon (Polygon.v p)
let multipolygon mp = MultiPolygon (MultiPolygon.v mp)
let collection cs = Collection cs
let get_point = function
| Point p -> Ok p
| _ -> Error (`Msg "Expected point")
let get_point_exn = function
| Point p -> p
| _ -> invalid_arg "Expected point"
let get_multipoint = function
| MultiPoint p -> Ok p
| _ -> Error (`Msg "Expected multipoint")
let get_multipoint_exn = function
| MultiPoint p -> p
| _ -> invalid_arg "Expected multipoint"
let get_linestring = function
| LineString p -> Ok p
| _ -> Error (`Msg "Expected linestring")
let get_linestring_exn = function
| LineString p -> p
| _ -> invalid_arg "Expected linestring"
let get_multilinestring = function
| MultiLineString p -> Ok p
| _ -> Error (`Msg "Expected multilinestring")
let get_multilinestring_exn = function
| MultiLineString p -> p
| _ -> invalid_arg "Expected multilinestring"
let get_polygon = function
| Polygon p -> Ok p
| _ -> Error (`Msg "Expected polygon")
let get_polygon_exn = function
| Polygon p -> p
| _ -> invalid_arg "Expected polygon"
let get_multipolygon = function
| MultiPolygon p -> Ok p
| _ -> Error (`Msg "Expected multipolygon")
let get_multipolygon_exn = function
| MultiPolygon p -> p
| _ -> invalid_arg "Expected multipolygon"
let geometry_to_json geometry = json
let foreign_members t = t.foreign_members
let id t = t.id
let properties_of_json json =
match J.find json [ "properties" ] with
| Some j -> if J.is_null j then `Null else `Obj (decode_or_err J.to_obj j)
| None -> `None
let id_of_json json = J.find json [ "id" ]
let rec of_json json =
let fm = foreign_members_of_json json in
let properties = properties_of_json json in
let id = id_of_json json in
let bbox =
J.find json [ "bbox" ]
|> Option.map J.(decode_or_err @@ to_array (decode_or_err J.to_float))
in
match J.find json [ "type" ] with
| Some typ -> (
match J.to_string typ with
| Ok "Point" ->
Result.map (fun g ->
{
geometry = Point g;
bbox;
properties;
foreign_members = fm keys_in_use_for_point;
id;
})
@@ Point.base_of_json json
| Ok "MultiPoint" ->
Result.map (fun g ->
{
geometry = MultiPoint g;
bbox;
properties;
foreign_members = fm keys_in_use_for_point;
id;
})
@@ MultiPoint.base_of_json json
| Ok "LineString" ->
Result.map (fun g ->
{
geometry = LineString g;
bbox;
properties;
foreign_members = fm keys_in_use;
id;
})
@@ LineString.base_of_json json
| Ok "MultiLineString" ->
Result.map (fun g ->
{
geometry = MultiLineString g;
bbox;
properties;
foreign_members = fm keys_in_use;
id;
})
@@ MultiLineString.base_of_json json
| Ok "Polygon" ->
Result.map (fun g ->
{
geometry = Polygon g;
bbox;
properties;
foreign_members = fm keys_in_use;
id;
})
@@ Polygon.base_of_json json
| Ok "MultiPolygon" ->
Result.map (fun g ->
{
geometry = MultiPolygon g;
bbox;
properties;
foreign_members = fm keys_in_use;
id;
})
@@ MultiPolygon.base_of_json json
| Ok "GeometryCollection" -> (
match J.find json [ "geometries" ] with
| Some list ->
let geo = J.to_list (decode_or_err of_json) list in
Result.map
(fun g ->
{
geometry = Collection g;
bbox;
properties;
foreign_members = fm keys_in_use;
id;
})
geo
| None ->
Error
(`Msg
"A geometry collection should have a member called \
geometries"))
| Ok typ -> Error (`Msg ("Unknown type of geometry " ^ typ))
| Error _ as e -> e)
| None ->
Error
(`Msg
"A TopoJSON text should contain one object with a member `type`.")
let rec to_json t =
let bbox = t.bbox in
let id = t.id in
match t.geometry with
| Point point ->
Point.to_json ?bbox ?id ~foreign_members:t.foreign_members
~properties:t.properties point
| MultiPoint mp ->
MultiPoint.to_json ?bbox ?id ~foreign_members:t.foreign_members
~properties:t.properties mp
| LineString ls ->
LineString.to_json ?bbox ?id ~foreign_members:t.foreign_members
~properties:t.properties ls
| MultiLineString mls ->
MultiLineString.to_json ?bbox ?id ~foreign_members:t.foreign_members
~properties:t.properties mls
| Polygon p ->
Polygon.to_json ?bbox ?id ~foreign_members:t.foreign_members
~properties:t.properties p
| MultiPolygon mp ->
MultiPolygon.to_json ?bbox ~foreign_members:t.foreign_members
~properties:t.properties mp
| Collection c ->
J.obj
([
("type", J.string "GeometryCollection");
("geometries", J.list to_json c);
]
@ properties_or_null t.properties
@ id_or_empty id
@ bbox_to_json_or_empty bbox
@ t.foreign_members)
end
module Topology = struct
type json = J.t
type t = {
objects : (string * Geometry.t) list;
arcs : Geometry.Position.t array array;
foreign_members : (string * json) list;
transform : transform option;
}
and transform = { scale : float * float; translate : float * float }
let v ?(foreign_members = []) ?transform ~arcs objects =
{ foreign_members; arcs; objects; transform }
let transform t = t.transform
let objects t = t.objects
let arcs t = t.arcs
let foreign_members t = t.foreign_members
let keys_in_use =
[
"type";
"arcs";
"objects";
"transform";
"bbox";
"properties";
"coordinates";
"geometries";
]
let foreign_members_of_json json =
match J.to_obj json with
| Ok assoc ->
List.filter (fun (k, _v) -> not (List.mem k keys_in_use)) assoc
| Error _ -> []
let transform_of_json json =
match J.find json [ "transform" ] with
| None -> None
| Some transform_object -> (
match
( J.find transform_object [ "scale" ],
J.find transform_object [ "translate" ] )
with
| Some scale, Some translate ->
let scale =
decode_or_err (J.to_array (decode_or_err J.to_float)) scale
in
let translate =
decode_or_err (J.to_array (decode_or_err J.to_float)) translate
in
Some
{
scale = (scale.(0), scale.(1));
translate = (translate.(0), translate.(1));
}
| _ -> None)
let of_json json =
match (J.find json [ "objects" ], J.find json [ "arcs" ]) with
| Some objects, Some arcs ->
let* objects = J.to_obj objects in
let geometries =
List.map
(fun (k, v) -> (k, decode_or_err Geometry.of_json v))
objects
in
let* arcs =
J.to_array
(decode_or_err
(J.to_array
(decode_or_err (J.to_array (decode_or_err J.to_float)))))
arcs
in
let transform = transform_of_json json in
let fm = foreign_members_of_json json in
Ok { objects = geometries; arcs; transform; foreign_members = fm }
| _, _ -> Error (`Msg "No objects and/or arcs field in Topology object!")
let transform_to_json transform =
J.obj
[
("scale", J.list J.float [ fst transform.scale; snd transform.scale ]);
( "translate",
J.list J.float [ fst transform.translate; snd transform.translate ]
);
]
let to_json ?bbox { objects; arcs; foreign_members; transform } =
let transform =
Option.map (fun v -> ("transform", transform_to_json v)) transform
|> Option.to_list
in
J.obj
([
("type", J.string "Topology");
( "objects",
J.obj (List.map (fun (k, v) -> (k, Geometry.to_json v)) objects) );
("arcs", J.array (J.array (J.array J.float)) arcs);
]
@ transform
@ bbox_to_json_or_empty bbox
@ foreign_members)
end
type topojson = Topology of Topology.t | Geometry of Geometry.t
type t = { topojson : topojson; bbox : float array option }
let topojson t = t.topojson
let bbox t = t.bbox
let topojson_to_t tjson bbox = { topojson = tjson; bbox }
let json_to_bbox json =
match J.to_array (decode_or_err J.to_float) json with
| Ok v -> Some v
| Error _ -> None
let of_json json =
match (J.find json [ "type" ], J.find json [ "bbox" ]) with
| Some typ, bbx -> (
match J.to_string typ with
| Ok "Topology" -> (
match Topology.of_json json with
| Ok v ->
Ok (topojson_to_t (Topology v) @@ Option.bind bbx json_to_bbox)
| Error e -> Error e)
| Ok s -> Error (`Msg ("Expected `Topology` but got " ^ s))
| Error _ as e -> e)
| None, _ -> Error (`Msg "Could not find Topology type")
let to_json = function
| { topojson = Topology f; bbox } -> Topology.to_json ?bbox f
| { topojson = Geometry g; bbox = _ } ->
The geometry value [ g ] will have the bbox already , so we do n't need to
do anything with it here .
do anything with it here. *)
Geometry.to_json g
let v ?bbox topojson = { bbox; topojson }
end
|
65fa90f1ca30a272a661978e5accbd844e91d3d5ac68ab7cee4b8eb6acd26293 | hidaris/thinking-dumps | 02_racket_basics.rkt | #lang racket
(provide (all-defined-out))
val x = 3
(define y (+ x 2)) ; + is a function, call it here
(define cube1
(lambda (x)
(* x (* x x))))
(define cube2
(lambda (x)
(* x x x)))
(define (cube3 x)
(* x x x))
x to the yth power ( y must be nonegative )
(if (= y 0)
1
(* x (pow1 x (- y 1)))))
(define pow2
(lambda (x)
(lambda (y)
(pow1 x y))))
(define three-to-the (pow2 3)) | null | https://raw.githubusercontent.com/hidaris/thinking-dumps/3fceaf9e6195ab99c8315749814a7377ef8baf86/cse341/racket/02_racket_basics.rkt | racket | + is a function, call it here | #lang racket
(provide (all-defined-out))
val x = 3
(define cube1
(lambda (x)
(* x (* x x))))
(define cube2
(lambda (x)
(* x x x)))
(define (cube3 x)
(* x x x))
x to the yth power ( y must be nonegative )
(if (= y 0)
1
(* x (pow1 x (- y 1)))))
(define pow2
(lambda (x)
(lambda (y)
(pow1 x y))))
(define three-to-the (pow2 3)) |
852aa9f511c9e75cb20e444ffa0796c2b0cc7b5eefa6b06e271a1ff75ae4ce3a | Timothy-G-Griffin/cc_cl_cam_ac_uk | free_vars.ml | open Ast
let rec inlist x = function
| [] -> false
| y :: rest -> if x = y then true else inlist x rest
(* free_vars (bvars, e) returns a
list, with no duplicates, of all free variables
of e that are not in the list bvars.
*)
let free_vars(bvars, exp) =
let rec aux bound free = function
| Var x -> if (inlist x bound) || (inlist x free) then free else x :: free
| UnaryOp(_, e) -> aux bound free e
| Op(e1, _, e2) -> aux bound (aux bound free e1) e2
| If(e1, e2, e3) -> aux bound (aux bound (aux bound free e1) e2) e3
| Pair(e1, e2) -> aux bound (aux bound free e1) e2
| App(e1, e2) -> aux bound (aux bound free e1) e2
| Fst e -> aux bound free e
| Snd e -> aux bound free e
| Inl e -> aux bound free e
| Inr e -> aux bound free e
| Lambda l -> lambda bound free l
| Case(e, l1, l2) -> lambda bound (lambda bound (aux bound free e) l1) l2
| LetFun(f, l, e) -> aux (f :: bound) (lambda bound free l) e
| LetRecFun(f, l, e) -> aux (f :: bound) (lambda (f :: bound) free l) e
| Ref e -> aux bound free e
| Deref e -> aux bound free e
| Assign(e1, e2) -> aux bound (aux bound free e1) e2
| While(e1, e2) -> aux bound (aux bound free e1) e2
| Seq [] -> free
| Seq (e :: rest) -> aux bound (aux bound free e) (Seq rest)
| _ -> free
and lambda bound free (x, e) = aux (x :: bound) free e
in aux bvars [] exp
| null | https://raw.githubusercontent.com/Timothy-G-Griffin/cc_cl_cam_ac_uk/aabaf64c997301ea69060a1b69e915b9d1031573/slang/free_vars.ml | ocaml | free_vars (bvars, e) returns a
list, with no duplicates, of all free variables
of e that are not in the list bvars.
| open Ast
let rec inlist x = function
| [] -> false
| y :: rest -> if x = y then true else inlist x rest
let free_vars(bvars, exp) =
let rec aux bound free = function
| Var x -> if (inlist x bound) || (inlist x free) then free else x :: free
| UnaryOp(_, e) -> aux bound free e
| Op(e1, _, e2) -> aux bound (aux bound free e1) e2
| If(e1, e2, e3) -> aux bound (aux bound (aux bound free e1) e2) e3
| Pair(e1, e2) -> aux bound (aux bound free e1) e2
| App(e1, e2) -> aux bound (aux bound free e1) e2
| Fst e -> aux bound free e
| Snd e -> aux bound free e
| Inl e -> aux bound free e
| Inr e -> aux bound free e
| Lambda l -> lambda bound free l
| Case(e, l1, l2) -> lambda bound (lambda bound (aux bound free e) l1) l2
| LetFun(f, l, e) -> aux (f :: bound) (lambda bound free l) e
| LetRecFun(f, l, e) -> aux (f :: bound) (lambda (f :: bound) free l) e
| Ref e -> aux bound free e
| Deref e -> aux bound free e
| Assign(e1, e2) -> aux bound (aux bound free e1) e2
| While(e1, e2) -> aux bound (aux bound free e1) e2
| Seq [] -> free
| Seq (e :: rest) -> aux bound (aux bound free e) (Seq rest)
| _ -> free
and lambda bound free (x, e) = aux (x :: bound) free e
in aux bvars [] exp
|
10e0f239dc358122bf0c8287131f9c4413c45940ef3754ec6a329f5a8dd288b5 | andrejbauer/plzoo | type_check.ml | (** Type checking. *)
type ctype =
| CFree of vtype (** free type [F s] *)
| CArrow of vtype * ctype (** Function type [s -> t] *)
and vtype =
| VInt (** integer [int] *)
| VBool (** booleans [bool] *)
| VForget of ctype (** thunked type [U t] *)
let type_error ?loc = Zoo.error ~kind:"Typing error" ?loc
let rec print_vtype ?max_level vty ppf =
match vty with
| VInt -> Format.fprintf ppf "int"
| VBool -> Format.fprintf ppf "bool"
| VForget cty ->
Zoo.print_parens ?max_level ~at_level:2 ppf
"U@ %t"
(print_ctype ~max_level:1 cty)
and print_ctype ?max_level cty ppf =
match cty with
| CFree vty ->
Zoo.print_parens ?max_level ~at_level:2 ppf
"F@ %t"
(print_vtype ~max_level:1 vty)
| CArrow (vty, cty) ->
Zoo.print_parens ?max_level ~at_level:1 ppf
"%t@ ->@ %t"
(print_vtype ~max_level:1 vty)
(print_ctype ~max_level:2 cty)
let rec as_ctype {Zoo.data=ty; loc} =
match ty with
| Syntax.VInt | Syntax.VBool | Syntax.VForget _ ->
type_error ~loc "this is not a computation type"
| Syntax.CFree ty -> CFree (as_vtype ty)
| Syntax.CArrow (ty1, ty2) -> CArrow (as_vtype ty1, as_ctype ty2)
and as_vtype {Zoo.data=ty; loc} =
match ty with
| Syntax.VInt -> VInt
| Syntax.VBool -> VBool
| Syntax.VForget ty -> VForget (as_ctype ty)
| Syntax.CFree _ | Syntax.CArrow _ ->
type_error ~loc "this is not a value type"
* [ check e ] checks that expression [ e ] has computation
type [ ty ] in context [ ctx ] . It raises a type error if it does
not .
type [ty] in context [ctx]. It raises a type error if it does
not. *)
let rec check_vtype ctx vty e =
let vty' = vtype_of ctx e in
if vty' <> vty then
type_error ~loc:e.Zoo.loc
"this expression has value type %t but is used as if its type is %t"
(print_vtype vty')
(print_vtype vty)
and check_ctype ctx cty e =
let cty' = ctype_of ctx e in
if cty' <> cty then
type_error ~loc:e.Zoo.loc
"this expression has computation type %t but is used as if its type is %t"
(print_ctype cty')
(print_ctype cty)
(** [vtype_of ctx e] computes the value type of an expression [e] in context [ctx].
It raises type error if [e] does not have a value type. *)
and vtype_of ctx {Zoo.data=e; loc} =
match e with
| Syntax.Var x ->
(try
List.assoc x ctx
with
Not_found -> type_error ~loc "unknown identifier %s" x)
| Syntax.Int _ -> VInt
| Syntax.Bool _ -> VBool
| Syntax.Times (e1, e2) ->
check_vtype ctx VInt e1 ;
check_vtype ctx VInt e2 ;
VInt
| Syntax.Plus (e1, e2) ->
check_vtype ctx VInt e1 ;
check_vtype ctx VInt e2 ;
VInt
| Syntax.Minus (e1, e2) ->
check_vtype ctx VInt e1 ;
check_vtype ctx VInt e2 ;
VInt
| Syntax.Equal (e1, e2) ->
check_vtype ctx VInt e1 ;
check_vtype ctx VInt e2 ;
VBool
| Syntax.Less (e1, e2) ->
check_vtype ctx VInt e1 ;
check_vtype ctx VInt e2 ;
VBool
| Syntax.Thunk e ->
let ty = ctype_of ctx e in
VForget ty
| Syntax.If _
| Syntax.Fun _
| Syntax.Apply _
| Syntax.Do _
| Syntax.Let _
| Syntax.Return _
| Syntax.Force _
| Syntax.Rec _ ->
type_error ~loc "a value was expected but a computation was encountered"
(** [ctype_of ctx e] computes the computation type of a computation [e] in context [ctx].
It raises type error if [e] does not have a computation type. *)
and ctype_of ctx {Zoo.data=e; loc} =
match e with
| Syntax.If (e1, e2, e3) ->
check_vtype ctx VBool e1 ;
let ty = ctype_of ctx e2 in
check_ctype ctx ty e3 ;
ty
| Syntax.Fun (x, ty, e) ->
let ty = as_vtype ty in
let ty2 = ctype_of ((x,ty)::ctx) e in
CArrow (ty, ty2)
| Syntax.Apply (e1, e2) ->
(match ctype_of ctx e1 with
| CArrow (ty1, ty2) ->
check_vtype ctx ty1 e2 ;
ty2
| ty ->
type_error ~loc:(e1.Zoo.loc)
"this expression is used as a function but its type is %t"
(print_ctype ty))
| Syntax.Do (x, e1, e2) ->
(match ctype_of ctx e1 with
| CFree ty1 -> ctype_of ((x,ty1)::ctx) e2
| ty -> type_error ~loc:(e1.Zoo.loc)
"this expression is sequenced but its type is %t"
(print_ctype ty))
| Syntax.Let (x, e1, e2) ->
let ty1 = vtype_of ctx e1 in
ctype_of ((x,ty1)::ctx) e2
| Syntax.Return e ->
let ty = vtype_of ctx e in
CFree ty
| Syntax.Force e ->
(match vtype_of ctx e with
| VForget ty -> ty
| ty -> type_error ~loc:(e.Zoo.loc)
"this expression is forced but its type is %t"
(print_vtype ty))
| Syntax.Rec (x, ty, e) ->
let ty = as_ctype ty in
check_ctype ((x, VForget ty)::ctx) ty e ;
ty
| Syntax.Var _
| Syntax.Int _
| Syntax.Bool _
| Syntax.Plus _
| Syntax.Minus _
| Syntax.Times _
| Syntax.Equal _
| Syntax.Less _
| Syntax.Thunk _ ->
type_error ~loc "a computation was expected but a value was encountered"
| null | https://raw.githubusercontent.com/andrejbauer/plzoo/ae6041c65baf1eebf65a60617819efeb8dcd3420/src/levy/type_check.ml | ocaml | * Type checking.
* free type [F s]
* Function type [s -> t]
* integer [int]
* booleans [bool]
* thunked type [U t]
* [vtype_of ctx e] computes the value type of an expression [e] in context [ctx].
It raises type error if [e] does not have a value type.
* [ctype_of ctx e] computes the computation type of a computation [e] in context [ctx].
It raises type error if [e] does not have a computation type. |
type ctype =
and vtype =
let type_error ?loc = Zoo.error ~kind:"Typing error" ?loc
let rec print_vtype ?max_level vty ppf =
match vty with
| VInt -> Format.fprintf ppf "int"
| VBool -> Format.fprintf ppf "bool"
| VForget cty ->
Zoo.print_parens ?max_level ~at_level:2 ppf
"U@ %t"
(print_ctype ~max_level:1 cty)
and print_ctype ?max_level cty ppf =
match cty with
| CFree vty ->
Zoo.print_parens ?max_level ~at_level:2 ppf
"F@ %t"
(print_vtype ~max_level:1 vty)
| CArrow (vty, cty) ->
Zoo.print_parens ?max_level ~at_level:1 ppf
"%t@ ->@ %t"
(print_vtype ~max_level:1 vty)
(print_ctype ~max_level:2 cty)
let rec as_ctype {Zoo.data=ty; loc} =
match ty with
| Syntax.VInt | Syntax.VBool | Syntax.VForget _ ->
type_error ~loc "this is not a computation type"
| Syntax.CFree ty -> CFree (as_vtype ty)
| Syntax.CArrow (ty1, ty2) -> CArrow (as_vtype ty1, as_ctype ty2)
and as_vtype {Zoo.data=ty; loc} =
match ty with
| Syntax.VInt -> VInt
| Syntax.VBool -> VBool
| Syntax.VForget ty -> VForget (as_ctype ty)
| Syntax.CFree _ | Syntax.CArrow _ ->
type_error ~loc "this is not a value type"
* [ check e ] checks that expression [ e ] has computation
type [ ty ] in context [ ctx ] . It raises a type error if it does
not .
type [ty] in context [ctx]. It raises a type error if it does
not. *)
let rec check_vtype ctx vty e =
let vty' = vtype_of ctx e in
if vty' <> vty then
type_error ~loc:e.Zoo.loc
"this expression has value type %t but is used as if its type is %t"
(print_vtype vty')
(print_vtype vty)
and check_ctype ctx cty e =
let cty' = ctype_of ctx e in
if cty' <> cty then
type_error ~loc:e.Zoo.loc
"this expression has computation type %t but is used as if its type is %t"
(print_ctype cty')
(print_ctype cty)
and vtype_of ctx {Zoo.data=e; loc} =
match e with
| Syntax.Var x ->
(try
List.assoc x ctx
with
Not_found -> type_error ~loc "unknown identifier %s" x)
| Syntax.Int _ -> VInt
| Syntax.Bool _ -> VBool
| Syntax.Times (e1, e2) ->
check_vtype ctx VInt e1 ;
check_vtype ctx VInt e2 ;
VInt
| Syntax.Plus (e1, e2) ->
check_vtype ctx VInt e1 ;
check_vtype ctx VInt e2 ;
VInt
| Syntax.Minus (e1, e2) ->
check_vtype ctx VInt e1 ;
check_vtype ctx VInt e2 ;
VInt
| Syntax.Equal (e1, e2) ->
check_vtype ctx VInt e1 ;
check_vtype ctx VInt e2 ;
VBool
| Syntax.Less (e1, e2) ->
check_vtype ctx VInt e1 ;
check_vtype ctx VInt e2 ;
VBool
| Syntax.Thunk e ->
let ty = ctype_of ctx e in
VForget ty
| Syntax.If _
| Syntax.Fun _
| Syntax.Apply _
| Syntax.Do _
| Syntax.Let _
| Syntax.Return _
| Syntax.Force _
| Syntax.Rec _ ->
type_error ~loc "a value was expected but a computation was encountered"
and ctype_of ctx {Zoo.data=e; loc} =
match e with
| Syntax.If (e1, e2, e3) ->
check_vtype ctx VBool e1 ;
let ty = ctype_of ctx e2 in
check_ctype ctx ty e3 ;
ty
| Syntax.Fun (x, ty, e) ->
let ty = as_vtype ty in
let ty2 = ctype_of ((x,ty)::ctx) e in
CArrow (ty, ty2)
| Syntax.Apply (e1, e2) ->
(match ctype_of ctx e1 with
| CArrow (ty1, ty2) ->
check_vtype ctx ty1 e2 ;
ty2
| ty ->
type_error ~loc:(e1.Zoo.loc)
"this expression is used as a function but its type is %t"
(print_ctype ty))
| Syntax.Do (x, e1, e2) ->
(match ctype_of ctx e1 with
| CFree ty1 -> ctype_of ((x,ty1)::ctx) e2
| ty -> type_error ~loc:(e1.Zoo.loc)
"this expression is sequenced but its type is %t"
(print_ctype ty))
| Syntax.Let (x, e1, e2) ->
let ty1 = vtype_of ctx e1 in
ctype_of ((x,ty1)::ctx) e2
| Syntax.Return e ->
let ty = vtype_of ctx e in
CFree ty
| Syntax.Force e ->
(match vtype_of ctx e with
| VForget ty -> ty
| ty -> type_error ~loc:(e.Zoo.loc)
"this expression is forced but its type is %t"
(print_vtype ty))
| Syntax.Rec (x, ty, e) ->
let ty = as_ctype ty in
check_ctype ((x, VForget ty)::ctx) ty e ;
ty
| Syntax.Var _
| Syntax.Int _
| Syntax.Bool _
| Syntax.Plus _
| Syntax.Minus _
| Syntax.Times _
| Syntax.Equal _
| Syntax.Less _
| Syntax.Thunk _ ->
type_error ~loc "a computation was expected but a value was encountered"
|
5a515ccab26a2cd88771f26ed71f53b2d14632aacc06427fe4e6962c21f726ef | repl-electric/cassiopeia | project.clj | (defproject stars "0.1.0-SNAPSHOT"
:description "Repl Electric: Cassiopeia"
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.5.1"]
[overtone "0.9.1"]
[mud "0.1.0-SNAPSHOT"]
[korg-nano-kontrol2 "0.1.0-SNAPSHOT"]
[launchpad "0.1.0-SNAPSHOT"]
[monome "0.1.0-SNAPSHOT"]
;;[shadertone "0.2.4"]
[shadertone "0.3.0-SNAPSHOT"]
[rogerallen/leaplib "0.8.1"]
[rogerallen/leaplib-natives "0.8.1"]
[overtone.synths "0.1.0-SNAPSHOT"]
[overtone.orchestra "0.1.0-SNAPSHOT"]
[org.clojure/math.numeric-tower "0.0.4"]]
:jvm-opts [
" -agentpath:/Applications / YourKit_Java_Profiler_12.0.5.app / bin / mac / libyjpagent.jnilib "
"-Xms512m" "-Xmx1g" ; Minimum and maximum sizes of the
; heap
Use the new parallel GC in
; conjunction with
"-XX:+UseConcMarkSweepGC" ; the concurrent garbage collector
"-XX:+CMSConcurrentMTEnabled" ; Enable multi-threaded concurrent
; gc work (ParNewGC)
Specify a target of 20ms for
; gc pauses
Do many small GC cycles to
; minimize pauses
"-XX:MaxNewSize=257m" ; Specify the max and min size of
; the new
"-XX:NewSize=256m" ; generation to be small
"-XX:+UseTLAB" ; Uses thread-local object
; allocation blocks. This
; improves concurrency by reducing
; contention on
; the shared heap lock.
"-XX:MaxTenuringThreshold=0"
" -XX:+PrintGC " ; Print GC info to stdout
;; "-XX:+PrintGCDetails" ; - with details
;; "-XX:+PrintGCTimeStamps"
Makes the full NewSize available to
every cycle , and reduces
; the
; pause time by not evaluating
; tenured objects. Technically,
; this
; setting promotes all live objects
; to the older generation, rather
; than copying them.
)
| null | https://raw.githubusercontent.com/repl-electric/cassiopeia/a42c01752fc8dd04ea5db95c8037f393c29cdb75/project.clj | clojure | [shadertone "0.2.4"]
Minimum and maximum sizes of the
heap
conjunction with
the concurrent garbage collector
Enable multi-threaded concurrent
gc work (ParNewGC)
gc pauses
minimize pauses
Specify the max and min size of
the new
generation to be small
Uses thread-local object
allocation blocks. This
improves concurrency by reducing
contention on
the shared heap lock.
Print GC info to stdout
"-XX:+PrintGCDetails" ; - with details
"-XX:+PrintGCTimeStamps"
the
pause time by not evaluating
tenured objects. Technically,
this
setting promotes all live objects
to the older generation, rather
than copying them. | (defproject stars "0.1.0-SNAPSHOT"
:description "Repl Electric: Cassiopeia"
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.5.1"]
[overtone "0.9.1"]
[mud "0.1.0-SNAPSHOT"]
[korg-nano-kontrol2 "0.1.0-SNAPSHOT"]
[launchpad "0.1.0-SNAPSHOT"]
[monome "0.1.0-SNAPSHOT"]
[shadertone "0.3.0-SNAPSHOT"]
[rogerallen/leaplib "0.8.1"]
[rogerallen/leaplib-natives "0.8.1"]
[overtone.synths "0.1.0-SNAPSHOT"]
[overtone.orchestra "0.1.0-SNAPSHOT"]
[org.clojure/math.numeric-tower "0.0.4"]]
:jvm-opts [
" -agentpath:/Applications / YourKit_Java_Profiler_12.0.5.app / bin / mac / libyjpagent.jnilib "
Use the new parallel GC in
Specify a target of 20ms for
Do many small GC cycles to
"-XX:MaxTenuringThreshold=0"
Makes the full NewSize available to
every cycle , and reduces
)
|
590d4c6b26c4359dc271a262570f78dc33d37f202d47bda2f04a2eb49e1259c2 | jrm-code-project/LISP-Machine | udasm.lisp | -*- Mode : LISP ; Package : LAMBDA ; : CL ; Base:8 -*-
;;;;;;;;;;;;;;;;;;;;;
;;; Does this look ugly? Use a landscape.
;;;;;;;;;;;;;;;;;;;;;
(defconstant *dont-count-these-instructions-in-those-that-have-no-predecessors*
'(HIGHEST-KERNAL-UCODE-LOCATION)
"Anything on this list is not included in the *instructions-without-predecessors*.")
(defparameter *default-scanning-distance* 10. "Declarations can be this far ahead of applicable instruction.")
(defvar *i-mem-array*)
(defvar *a-mem-array*)
(defvar *main-mem-array*)
(defvar *mid-array*)
(defvar *lmc-pathname*)
(defvar *trace-lmc-version-number*)
(defvar *trap-upc*)
(defvar *illop-upc*)
(defvar *highest-kernal-ucode-location+1*)
(defvar *i-mem-symbols-array* (make-sequence 'vector 50000.))
(defvar *a-mem-symbols-array* (make-sequence 'vector 1024.))
(defvar *register-names* (make-sequence 'vector *number-of-registers*))
(defvar *registers* (make-hash-table :size *number-of-registers*))
(defvar *register-set-universe* (make-universe *number-of-registers*))
(defvar *m-mem-registers-set*)
(defvar *constant-registers-set*)
(defvar *global-registers-set*)
(defvar *ucode-declarations*)
a list ( < ucode - locn > < args to DECLARE at ucode assembly time )
; following forms are meaningful:
; (args <reg1> <reg2> ..) ;this subroutine takes args in these regs.
; (values <reg1> <reg2> ..) ;this subroutine returns values here
; (clobbers <reg1> <reg2> ..);clobbers or may clobber these. the server a documentation
; ; purpose and speeds up the micro-tracer. However, it is
; ; capable of figuring this out for itself.
; (local <reg1> ...) ;holds state in these vars, error if caller depends on them.
; ; a-fake-microsecond-clock, for example.
; (must-avoid <symbol>) ;this subroutine, ones it calls, etc must never get to <symbol>.
; ; intended mainly for (must-avoid INTR).
; (saves (<reg> <place> ..) ;place may be register or NIL if too complicated
; ; error if store it twice in any path and place is NON-NIL.
; (restores (<reg> <place> ..) ;restores from last saved.
; (suspend-flow-tracing) ;dont look past here, leaving environment of interest never to return
(defvar *subroutine-entries*)
(defvar *instructions-without-predecessors*)
(defvar *basic-block-info*)
;;; This is where everything kicks off. If you do this by hand, be sure to follow
;;; the order of things. Many things are not idempotent and most must be done in the
;;; order shown.
Diagnostic :
(defun i-want-to-lose (&key (how-much? :really-big))
"Run the microcode disassembler until you lose."
how-much?
(format t "~%Anything you say...")
(do ((count 0 (1+ count)))
((logic-has-fallen?) "Logic has fallen")
(format t "~%~%Attempting to lose on pass ~d." count)
(disassemble-microcode)))
(defun disassemble-microcode (&optional (filename t)) ;will load latest version if t.
(readin-files filename)
(initial-setup)
(compute-instructions)
(find-subroutine-entries)
(find-instructions-without-predecessors)
)
(defun readin-files (filename)
(multiple-value-setq (*i-mem-array* *a-mem-array*
*main-mem-array* *mid-array*
*lmc-pathname* *trace-lmc-version-number*)
defined in : io;disk
(si:load-i-mem-symbols-if-necessary *trace-lmc-version-number* *lmc-pathname*)
(readin-declarations *trace-lmc-version-number* *lmc-pathname*))
(defun readin-declarations (&optional (vn *trace-lmc-version-number*)
(filename *lmc-pathname*))
(setf filename (funcall filename :new-type-and-version "lmc-dcl" vn))
(with-open-file (f filename)
(let* ((ibase 8)
(base 8)
(s-exp (read f)))
flush ( SETQ MICRO - DECLARATION - TABLE ( quote .. ) )
(defun initial-setup ()
(setup-i-mem-symbols-array)
(setup-a-mem-symbols-array)
(setq *illop-upc* (i-mem-symbol->address "ILLOP")
*trap-upc* (i-mem-symbol->address "TRAP"))
(setup-register-info)
)
(defun compute-instructions ()
(digest-i-mem-array)
(digest-declarations)
(compute-successors)
(compute-predecessors)
)
(defun setup-i-mem-symbols-array ()
(fill *i-mem-symbols-array* '())
(insert-i-mem-labels)
(insert-relative-i-mem-labels)
)
(defun insert-i-mem-labels ()
(let ((hash-table (cadr (assoc *trace-lmc-version-number* si:*i-mem-symbol-tables*))))
(maphash #'(lambda (symbol symbol-value)
(when (array-in-bounds? *i-mem-symbols-array* symbol-value)
(setf (elt *i-mem-symbols-array* symbol-value) symbol)))
hash-table)))
(defun insert-relative-i-mem-labels ()
(let (last-symbol last-symbol-value)
(for-elements-in-vector
*i-mem-symbols-array*
#'(lambda (address entry)
(if entry
(setq last-symbol entry
last-symbol-value address)
(setf (elt *i-mem-symbols-array* address)
(if (and last-symbol ;When last symbol is close enough,
(< (- address last-symbol-value) 100.)) ;number relative to it.
(list last-symbol (- address last-symbol-value))
address))))))) ;Otherwise just use the number.
(defun address->i-mem-symbol (address)
(when address (elt *i-mem-symbols-array* address)))
(defun i-mem-symbol->address (symbol)
(si:i-mem-symeval symbol *trace-lmc-version-number*))
(defun setup-a-mem-symbols-array ()
(dolist (symbol-address-pair (third (assoc *trace-lmc-version-number* si:*i-mem-symbol-tables*)))
(setf (elt *a-mem-symbols-array* (first symbol-address-pair)) (second symbol-address-pair))))
(defun address->a-mem-symbol (address)
(when (array-in-bounds? *a-mem-symbols-array* address)
(elt *a-mem-symbols-array* address)))
;;; Registers
(defun setup-register-info ()
(fill *register-names* '())
(let* ((symbol-table-list (assoc *trace-lmc-version-number* si:*i-mem-symbol-tables*))
(register-alist (third symbol-table-list))
(info-list (fourth symbol-table-list)))
(setup-register-names register-alist)
(check-a-mem-consistancy)
(setup-register-set-universe)
(setup-constants-set (get-from-alternating-list info-list 'a-constant-list))
(setup-globals-set)
(setup-m-mem-set)))
(defun setup-register-names (register-alist)
(dolist (name-register-pairs register-alist)
(let ((register-number (first name-register-pairs))
(register-name (second name-register-pairs)))
(when (and (number? register-number)
(array-in-bounds? *register-names* register-number))
(setf (elt *register-names* register-number) register-name)))))
(defun check-a-mem-consistancy ()
"This just makes a-mem-symbol-array and register-names consistant"
(for-elements-in-vector
*a-mem-symbols-array*
#'(lambda (index a-mem-name)
(let ((register-name (register-number->symbol index)))
(if a-mem-name
(if register-name
(unless (eq? a-mem-name register-name)
(ferror nil "Name mismatch between ~S in a-mem and ~S register at ~D"
a-mem-name register-name index))
(setf (elt *register-names* index) a-mem-name))
(if register-name
(setf (elt *a-mem-symbols-array* index) register-name)
(let ((name (intern (format nil "REGISTER-~D" index))))
(setf (elt *register-names* index) name)
(setf (elt *a-mem-symbols-array* index) name))))))))
(defun setup-register-set-universe ()
;; Setup universe for source/clobbers sets.
;; We assume that lower number registers are used more often.
;; This assumption is implementation dependant, but will only cause speed lossage
;; if it is not true.
(for-elements-in-vector *register-names*
(lambda (ignore-index name)
ignore-index
(element->number-inserting name *register-set-universe*)))
(setq *default-universe* *register-set-universe*))
(defun setup-constants-set (assembly-constant-list)
(setq *constant-registers-set* (list->set *constant-registers*))
(dolist (constant-register-pair assembly-constant-list)
(let ((name (register-number->symbol (second constant-register-pair))))
(unless (null? name)
(set-push! name *constant-registers-set*))))
(dolist (register *registers-used-for-M-constants*) ;;See beginning of file.
(set-push! (register-number->symbol register) *constant-registers-set*)))
(defun setup-globals-set ()
(setq *global-registers-set* (list->set *global-registers*)))
(defun setup-m-mem-set ()
(setq *m-mem-registers-set*
(list->set
(sequence->list
(subseq *a-mem-symbols-array* *m-mem-low-bound* *m-mem-high-bound*)))))
(defun symbol->register-number (symbol)
(si:register-symeval symbol *trace-lmc-version-number*))
(defun register-number->symbol (number)
(elt *register-names* number))
(defun register-exists? (register-name)
(not (null? (symbol->register-number register-name))))
(defun constant-register? (register)
(set-element? register *constant-registers-set*))
;;; Instructions.
(defconstant *instruction-mask* 77777777777777777777) ;;ldb lossage?
(defconstant-now *dispatch-address-mask* 177777)
(defun valid-instruction? (instruction)
(not (null? instruction)))
(defun no-op? (instruction)
(zero? (logand instruction *instruction-mask*)))
;;; Slow extractors
;(defun extractor (field)
; #'(lambda (fixinst)
; (ldb field fixinst)))
;(defun extract-flag (field)
; #'(lambda (fixinst)
; (= 1 (funcall (extractor field) fixinst))))
;(defun extract-symbolic (field symbol-map)
; (let ((lookup (appropriate-lookup symbol-map)))
; #'(lambda (fixinst)
; (funcall lookup (funcall (extractor field) fixinst)))))
;(defun appropriate-lookup (alist-or-vector)
; (if (list? alist-or-vector)
# ' ( lambda ( value ) ( second ( assoc value alist - or - vector ) ) )
; #'(lambda (value) (aref alist-or-vector value))))
;;; Fast extractors
(defmacro defextractor (name field)
`(DEFMACRO ,name (FIXINST) `(LDB ,,field ,fixinst)))
(defmacro defflag-extractor (name field)
`(DEFMACRO ,name (FIXINST) `(= 1 (LDB ,,field ,fixinst))))
(defmacro defsymbolic-extractor (name field symbol-table)
`(DEFMACRO ,name (FIXINST)
(IF (LIST? ,symbol-table)
`(SECOND (ASSOC (LDB ,,field ,fixinst) (QUOTE ,,symbol-table) :TEST #'=))
`(ELT ,,symbol-table (LDB ,,field ,fixinst)))))
(defconstant-now opcode-map (vector 'alu-op 'byte-op 'jump-op 'dispatch-op))
(defeq-test alu-op? 'alu-op)
(defeq-test byte-op? 'byte-op)
(defeq-test jump-op? 'jump-op)
(defeq-test dispatch-op? 'dispatch-op)
(defsymbolic-extractor raw-opcode-extractor lam-ir-op opcode-map)
(defun extract-opcode (instruction)
(if (no-op? instruction)
'no-op
(raw-opcode-extractor instruction)))
(defflag-extractor extract-popj-after-next lam-ir-popj-after-next)
(defconstant-now jump-condition-map
`((,lam-jump-cond-bit-set BIT-SET)
(,lam-jump-cond-bit-clear BIT-CLEAR)
(,lam-jump-cond-m<a M<A)
(,lam-jump-cond-m<=a M<=A)
(,lam-jump-cond-m=a M=A)
(,lam-jump-cond-page-fault PAGE-FAULT)
(,lam-jump-cond-page-fault-or-interrupt PAGE-FAULT-OR-INTERRUPT)
(,lam-jump-cond-page-fault-or-interrupt-or-sequence-break
PAGE-FAULT-OR-INTERRUPT-OR-SEQUENCE-BREAK)
(,lam-jump-cond-unc UNCONDITIONAL)
(,lam-jump-cond-m>=a M>=A)
(,lam-jump-cond-m>a M>A)
(,lam-jump-cond-m-neq-a M-NEQ-A)
(,lam-jump-cond-no-page-fault NO-PAGE-FAULT)
(,lam-jump-cond-data-type-not-equal DATA-TYPE-NOT-EQUAL)
(,lam-jump-cond-data-type-equal DATA-TYPE-EQUAL)))
(defeq-test unconditional? 'unconditional)
(defconstant-now jump-rpn-map
(vector 'jump-xct-next
'jump
'call-xct-next
'call
'return-xct-next
'return
'illegal-rpn
'illegal-rpn))
(defsymbolic-extractor extract-jump-condition lam-ir-jump-cond jump-condition-map)
(defsymbolic-extractor extract-jump-rpn-bits lam-ir-rpn jump-rpn-map)
(defextractor extract-jump-address lam-ir-jump-addr)
(defconstant-now dispatch-rpn-map
(vector 'jump-xct-next
'jump
'call-xct-next
'call
'return-xct-next
'return
'fall-through
'skip))
(defflag-extractor extract-dispatch-push-own-address lam-ir-disp-lpc)
(defextractor extract-dispatch-base-address lam-ir-disp-addr)
(defextractor extract-dispatch-bits lam-ir-disp-bytl)
(defsymbolic-extractor extract-dispatch-rpn-bits lam-disp-rpn-bits dispatch-rpn-map)
(defmacro extract-dispatch-address-from-entry (entry)
`(LOGAND ,entry ,*dispatch-address-mask*))
(defextractor extract-a-source lam-ir-a-src)
(defextractor extract-m-source-address lam-ir-m-src-adr)
(defflag-extractor extract-functional-source-flag lam-ir-func-src-flag)
(defflag-extractor extract-a-memory-destination-flag lam-ir-a-mem-dest-flag)
(defextractor extract-a-memory-destination lam-ir-a-mem-dest)
(defextractor extract-m-memory-destination lam-ir-m-mem-dest)
(defun extract-instruction-sources (fixinst)
(let ((opcode (extract-opcode fixinst))
(a-source (extract-a-source fixinst))
(m-source-address (extract-m-source-address fixinst))
(functional-source-flag (extract-functional-source-flag fixinst))
(ans (make-empty-set)))
(unless (dispatch-op? opcode)
(set-push! (address->a-mem-symbol a-source) ans))
(when (null? functional-source-flag)
(set-push! (address->a-mem-symbol m-source-address) ans))
ans))
(defun extract-instruction-destination (fixinst)
(let ((opcode (extract-opcode fixinst)))
(if (or (alu-op? opcode)
(byte-op? opcode))
(let ((address (if (extract-a-memory-destination-flag fixinst)
(extract-a-memory-destination fixinst)
(extract-m-memory-destination fixinst))))
(make-set (address->a-mem-symbol address)))
(make-empty-set))))
(defstruct (instruction (:constructor make-instruction-internal)
(:callable-constructors nil) ;; This makes the constructor a macro.
(:print-function print-instruction)
)
numeric-form ;; Fresh from the binary file
address
(declarations '())
alu - op , byte - op , jump - op , dispatch - op
popj-after-next
sources
destination
successors
predecessors
calling-subroutine
trace-info
trace-warnings
)
(defsynonym instruction? instruction-p)
(defalike #'instruction? #'eq?)
(defun print-instruction (instruction stream level)
(if (and *print-level* (> level *print-level*))
(format stream " # ")
(format stream "#<Instruction: ~S>"
`(,(instruction-address instruction) ;punctuation!
; ,(instruction-numeric-form instruction)
,(instruction-opcode-type instruction)
,@(when (jump-op? (instruction-opcode-type instruction))
(list (extract-jump-condition (instruction-numeric-form instruction))))
,@(when (instruction-popj-after-next instruction) '(POPJ-AFTER-NEXT))
(SUCCESSORS ,@(map 'list #'names-of-successors (instruction-successors instruction)))
,@(instruction-declarations instruction))
)))
(defun names-of-successors (successor)
(labels (
(maybe-successor-name (instruction)
(when (instruction? instruction)
(instruction-address instruction)))
)
(if (instruction? successor)
(instruction-address successor)
(cons (first successor)
(map 'list #'maybe-successor-name (rest successor))))))
(defvar *instruction-array* (make-sequence 'vector 50000 :initial-element '()))
(defun instruction-at (address)
(elt *instruction-array* address))
(defsetf instruction-at (address) (new-value)
`(SETF (ELT *INSTRUCTION-ARRAY* ,address) ,new-value))
(defun for-each-instruction (operation)
(for-elements-in-vector
*instruction-array*
#'(lambda (address possible-instruction)
(when possible-instruction
(funcall operation possible-instruction address)))))
(defun make-instruction (fixinst address)
(make-instruction-internal
:numeric-form fixinst
:address address
:opcode-type (extract-opcode fixinst)
:popj-after-next (extract-popj-after-next fixinst)
:sources (extract-instruction-sources fixinst)
:destination (extract-instruction-destination fixinst)
))
(defun flush-field (field-selector)
"Takes a symbol specifying a field-selector and removes this field from all
instructions. Must be passed a symbol because of setf lossage (see this code)."
* Sigh * You ca n't pass a selector to setf !
(let ((flusher
(compile-lambda `(LAMBDA (INSTRUCTION)
(WHEN INSTRUCTION
(SETF (,field-selector INSTRUCTION) '()))))))
(map '() flusher *instruction-array*)))
(defun digest-i-mem-array ()
(for-elements-in-vector
*i-mem-array*
#'(lambda (index fixinst)
(when (null? fixinst) (return-from digest-i-mem-array nil))
(when (valid-instruction? fixinst)
(setf (instruction-at index)
(make-instruction fixinst (address->i-mem-symbol index)))))))
;;; Declarations
;;;
;;; We play a little fast and loose with abstraction here, so watch out!
(defconstant *set-declarations* '(args values clobbers local))
(defconstant *saves-tag* 'saves)
(defconstant *restores-tag* 'restores)
(defsynonym declaration-type car)
(defsynonym declaration-info cdr)
(defun declaration-type? (type)
#'(lambda (declaration)
(eq? (declaration-type declaration) type)))
(defun spread-declarations (instruction &rest decls-return-map)
(let ((decls (instruction-declarations instruction)))
(labels (
(find-decl (type)
(or (cdr (assoc type decls))
(when (member type *set-declarations*)
(make-empty-set))))
)
(values-list
(map 'list #'find-decl decls-return-map)))))
(deff saves-declaration? (declaration-type? *saves-tag*))
(deff restores-declaration? (declaration-type? *restores-tag*))
(defun suspend-flow-tracing? (instruction)
(assoc 'suspend-flow-tracing (instruction-declarations instruction)))
(defun instruction-saves? (instruction)
(assoc *saves-tag* (instruction-declarations instruction)))
(defun instruction-restores? (instruction)
(assoc *restores-tag* (instruction-declarations instruction)))
(defun digest-declarations ()
(dolist (declarations *ucode-declarations*)
(let ((address (first declarations)))
(if (> address (length *instruction-array*))
(format t "~&Declaration ~s ignored: address too high." declarations)
(dolist (declaration (rest declarations))
(associate-declaration-with-instruction address declaration))))))
(defun associate-declaration-with-instruction (address declaration)
(case (declaration-type declaration)
((must-avoid suspend-flow-tracing) (push declaration (instruction-declarations (instruction-at address))))
(saves (associate-saves-with-instruction address (declaration-info declaration)))
(restores (associate-restores-with-instruction address (declaration-info declaration)))
(otherwise (push (cons (first declaration) (list->set (rest declaration)))
(instruction-declarations (instruction-at address))))))
(defsubst make-test-triplet (predicate win-action lose-action)
(list predicate win-action lose-action))
(defsynonym test-triplet-predicate first)
(defsynonym test-triplet-win-action second)
(defsynonym test-triplet-lose-action third)
(defun scan-forward-to-appropriate-instruction
(start-address test-triplets &optional (how-far-to-scan *default-scanning-distance*))
(dotimes (scan how-far-to-scan
(when test-triplets (dolist (test-t test-triplets) (funcall (test-triplet-lose-action test-t)))))
(let ((instruction (instruction-at (+ start-address scan)))
(triplets-left '()))
(dolist (test-t test-triplets)
(if (funcall (test-triplet-predicate test-t) instruction)
(funcall (test-triplet-win-action test-t) instruction)
(push test-t triplets-left)))
(unless triplets-left (return-from scan-forward-to-appropriate-instruction nil))
(setq test-triplets triplets-left))))
(defun scanned-declaration (tester win-action lose-action)
#'(lambda (start-address declaration-guts)
(let (test-triplets)
(dolist (decl declaration-guts)
(let ((possible-predicate (apply tester decl)))
(unless (null? possible-predicate)
(push (make-test-triplet possible-predicate
(apply win-action decl)
(apply lose-action start-address decl))
test-triplets))))
(scan-forward-to-appropriate-instruction start-address test-triplets))))
(defun tester (source destination barf-string)
(if (and (register-exists? source)
(register-exists? destination))
#'(lambda (instruction)
(and (set-element? source (instruction-sources instruction))
(set-element? destination (instruction-destination instruction))))
(format t barf-string source destination)
nil))
(defun tagger (source destination illegality-test tag)
#'(lambda (instruction)
(funcall illegality-test instruction)
(pushnew (list tag) (instruction-declarations instruction))
(setf (instruction-sources instruction) (make-set source))
(setf (instruction-destination instruction) (make-set destination))))
(defun loser (barf-string source destination location)
#'(lambda ()
(format t barf-string source destination (instruction-address (instruction-at location)))))
(defun might-instruction-save? (from-register into-place)
(tester from-register into-place "~&Ignoring save ~S into ~S."))
(defun tag-as-save-instruction (from into)
(tagger from into
#'(lambda (instruction)
(when (instruction-restores? instruction)
(format t "Invalid declaration: save and restore in ~S")))
*saves-tag*))
(defun losing-save (where from into)
(loser "~&Cannot match save ~S into ~S at ~S." from into where))
(deff associate-saves-with-instruction
(scanned-declaration #'might-instruction-save?
#'tag-as-save-instruction
#'losing-save))
(defun might-instruction-restore? (register from-register)
(tester from-register register "~&Ignoring restore from ~S into ~S."))
(defun tag-as-restore-instruction (into from)
(tagger from into
#'(lambda (instruction)
(when (instruction-saves? instruction)
(format t "Invalid declaration: save and restore in ~S")))
*restores-tag*))
(defun losing-restore (where into from)
(loser "~&Cannot match restore from ~S into ~S at ~S." from into where))
(deff associate-restores-with-instruction
(scanned-declaration #'might-instruction-restore?
#'tag-as-restore-instruction
#'losing-restore))
(defvar *losing-successors* '() "Holds list of instruction addresses.
Instructions at these addresses have successors that do not exist.")
(defun compute-successors ()
(for-each-instruction
#'(lambda (instruction index)
(setf (instruction-successors instruction)
(compute-instruction-successors instruction index)))))
;;; Uncomment these (and comment the macros) in order to check for the
;;; validity of successors.
;(defun verified-instruction (address referenced-by)
; (let ((instruction (instruction-at address))
; (reference-symbolic-address (instruction-address (instruction-at referenced-by))))
; #'(lambda (message)
; (when (null? instruction)
; (pushnew reference-symbolic-address *losing-successors* :test #'equal?)
; (fresh-line)
; (format t message '())
; (format t " at ~S " address)
; (format t " referenced by instruction at ~S " reference-symbolic-address))
; instruction)))
;(defun check-verification (instruction-or-null message)
; (if (null? instruction-or-null)
; `()
; (funcall instruction-or-null message)))
(defmacro verified-instruction (address ignore-referenced-by)
ignore-referenced-by
`(INSTRUCTION-AT ,address))
(defmacro check-verification (instruction ignore-message)
ignore-message
instruction)
;;; format:
;;; (CALLS <instruction-called> <instruction-xct-along-the-way> <instruction-returned-to>)
( JUMP - XCT - NEXT < instruction - jumped - to > < instruction - xct - along - the - way > )
;;; (RETURNS () <instruction-xct-along-the-way>)
(defsynonym instruction-xfered-to second)
(defsynonym instruction-xct-along-the-way third)
(defsynonym instruction-returned-to fourth)
(defun make-normal-successor (instruction)
(check-verification instruction "Missing instruction in stream"))
(defun make-jump-successor (instruction)
(check-verification instruction "Jump to missing instruction"))
(defun make-dispatch-jump-successor (instruction)
(check-verification instruction "Dispatch jump to missing instruction"))
(defun make-dispatch-fall-through-successor (instruction)
(check-verification instruction "Dispatch fall through to missing instruction"))
(defun make-dispatch-skip-successor (instruction)
(check-verification instruction "Dispatch skip to missing instruction"))
(defun make-call-successor (instruction-called instruction-xct-next instruction-returned-to)
`(CALLS ,(check-verification instruction-called "Call to missing instruction")
,(check-verification instruction-xct-next "Missing instruction xct-next'd during call")
,(check-verification instruction-returned-to "Return to missing instruction")))
(defun make-return-successor (instruction-returned-to instruction-xct-next)
`(RETURNS ,(check-verification instruction-returned-to "Return to missing instruction")
,(check-verification instruction-xct-next "Missing instruction xct-next'd during return")))
(defun make-jump-xct-next-successor (instruction-xfered-to instruction-xct-next)
`(JUMP-XCT-NEXT ,(check-verification instruction-xfered-to "Jump to missing instruction")
,(check-verification instruction-xct-next "Missing instruction xct-next'd during jump")))
(defun list-successor-type? (type)
#'(lambda (successor)
(and (list? successor)
(eq? (first successor) type))))
(deff call-successor? (list-successor-type? 'calls))
(deff returns-successor? (list-successor-type? 'returns))
(deff jump-xct-next-successor? (list-successor-type? 'jump-xct-next))
(defsynonym next-instruction-successor? instruction?)
(defun compute-instruction-successors (instruction in-address)
(let ((numeric-form (instruction-numeric-form instruction))
(popj-after-next (instruction-popj-after-next instruction))
(next-instruction (verified-instruction (1+ in-address) in-address))
(after-next-instruction (verified-instruction (+ 2 in-address) in-address)))
(case (instruction-opcode-type instruction)
((no-op alu-op byte-op) (if popj-after-next
(list (make-return-successor '() next-instruction))
(list (make-normal-successor next-instruction))))
(jump-op (compute-jump-successors popj-after-next next-instruction after-next-instruction
(verified-instruction (extract-jump-address numeric-form) in-address)
(extract-jump-condition numeric-form)
(extract-jump-rpn-bits numeric-form)))
(dispatch-op (compute-dispatch-successors in-address popj-after-next
instruction next-instruction after-next-instruction
(extract-dispatch-push-own-address numeric-form)
(extract-dispatch-base-address numeric-form)
(extract-dispatch-bits numeric-form)))
)))
(defun compute-jump-successors (popj-after-next next-instruction after-next-instruction to-address condition rpn-bits)
(let ((ans '()))
(labels (
(cannot-popj-after-next ()
(when popj-after-next
(ferror nil "Popj-after-next combined with ~S" rpn-bits)))
(can-fall-through ()
(unless (unconditional? condition)
(can-go-to (make-normal-successor next-instruction))))
(can-go-to (where)
(push where ans)))
(case rpn-bits
(jump-xct-next
(cannot-popj-after-next)
(can-go-to (make-jump-xct-next-successor to-address next-instruction))
(can-fall-through))
(jump
(cannot-popj-after-next)
(can-go-to (make-jump-successor to-address))
(can-fall-through))
(call-xct-next
(cannot-popj-after-next)
(can-go-to (make-call-successor to-address next-instruction after-next-instruction))
(can-fall-through))
(call
(cannot-popj-after-next)
(can-go-to (make-call-successor to-address '() next-instruction))
(can-fall-through))
(return-xct-next
(cannot-popj-after-next)
(can-go-to (make-return-successor '() next-instruction))
(can-fall-through))
(return
(if popj-after-next
(can-go-to (make-return-successor '() next-instruction))
(can-go-to (make-return-successor '() '())))
(unless (unconditional? condition)
(can-fall-through)))
(illegal-rpn
(ferror nil "Illegal rpn bits in jump"))))
ans))
(defun compute-dispatch-successors (dispatched-from-location popj-after-next
instruction next-instruction after-next-instruction
push-own-address? base-address bits)
dispatched-from-location ;;This is not used if verification is off.
(let ((number-of-dispatch-options (expt 2 bits))
(return-address (if push-own-address? instruction next-instruction))
(return-address-if-xct-next (if push-own-address? next-instruction after-next-instruction))
(ans '()))
(labels (
(can-go-to (where)
(pushnew where ans :test #'equal?)))
(dotimes (option number-of-dispatch-options)
(let* ((dispatch-entry (elt *a-mem-array* (+ base-address option)))
(rpn-bits (extract-dispatch-rpn-bits dispatch-entry))
(dispatch-address
(verified-instruction (extract-dispatch-address-from-entry dispatch-entry)
dispatched-from-location)))
(can-go-to
(case rpn-bits
(jump-xct-next (make-jump-xct-next-successor dispatch-address next-instruction))
(jump (make-dispatch-jump-successor dispatch-address))
(call-xct-next (make-call-successor dispatch-address return-address return-address-if-xct-next))
(call (make-call-successor dispatch-address '() return-address))
(return-xct-next (make-return-successor '() next-instruction))
(return (make-return-successor '() '()))
(fall-through (if popj-after-next
(make-return-successor '() next-instruction)
(make-dispatch-fall-through-successor next-instruction)))
(skip (if popj-after-next
(make-return-successor '() '()) ;; Is this right?
(make-dispatch-skip-successor after-next-instruction)))))
)))
ans))
(defun compute-predecessors ()
(for-each-instruction
#'(lambda (instruction address)
address ;; is not needed here.
(dolist (successor (instruction-successors instruction))
(unless (null? successor)
(labels (
(preceeds (predecessor successor)
(unless (null? successor)
(push predecessor (instruction-predecessors successor))))
)
(if (instruction? successor)
(preceeds instruction successor)
(case (first successor)
(calls (preceeds `(CALLED ,instruction
,(instruction-xct-along-the-way successor)
,(instruction-returned-to successor))
(instruction-xfered-to successor))
(preceeds `(XCT-NEXTED ,instruction) (instruction-xct-along-the-way successor))
(preceeds `(RETURNED-TO ,instruction) (instruction-returned-to successor)))
(jump-xct-next (preceeds `(JUMP-XCT-NEXTED ,instruction
,(instruction-xct-along-the-way successor))
(instruction-xfered-to successor))
(preceeds `(XCT-NEXTED ,instruction) (instruction-xct-along-the-way successor)))
(returns (preceeds `(XCT-NEXTED ,instruction) (instruction-xct-along-the-way successor)))))))))))
(defun for-instruction-successors (predicate operation)
(for-each-instruction
#'(lambda (instruction address)
address;; is ignored
(dolist (successor (instruction-successors instruction))
(when (funcall predicate successor)
(funcall operation successor))))))
(defun find-subroutine-entries ()
(setq *subroutine-entries* '())
(for-instruction-successors
#'call-successor?
#'(lambda (call-successor)
(pushnew (instruction-xfered-to call-successor) *subroutine-entries*))))
(defun find-instructions-without-predecessors ()
(setq *instructions-without-predecessors* '())
(for-each-instruction
#'(lambda (instruction address)
address ;; is ignored
(when (null? (instruction-predecessors instruction))
(unless (member (instruction-address instruction)
*dont-count-these-instructions-in-those-that-have-no-predecessors*)
(push instruction *instructions-without-predecessors*))))))
(defun find-basic-blocks ()
(setq *basic-block-info* '())
(for-each-instruction
#'(lambda (instruction address)
(check-basic-blockness instruction address)))
(setq *basic-block-info* (cons *trace-lmc-version-number* (list *basic-block-info*))))
;;; Doesn't seem to be any reasonable way to do this.
(defun check-basic-blockness (instruction address)
(let ((predecessors (instruction-predecessors instruction)))
(labels (
(is-block ()
(push address *basic-block-info*))
(isnt-block () (values)))
(if (or (null? predecessors)
(cdr predecessors))
(is-block)
(let ((p (car predecessors)))
(if (list? p)
(ecase (car p)
(called (is-block))
;; this is wrong, but probably what is wanted.
(returned-to (is-block))
((jump-xct-nexted xct-nexted)
(if (null? (cdr (instruction-successors (cadr p))))
(isnt-block)
(is-block))))
(if (null? (cdr (instruction-successors p)))
(isnt-block)
(is-block))))))))
| null | https://raw.githubusercontent.com/jrm-code-project/LISP-Machine/0a448d27f40761fafabe5775ffc550637be537b2/lambda/jrm/udasm.lisp | lisp | Package : LAMBDA ; : CL ; Base:8 -*-
Does this look ugly? Use a landscape.
following forms are meaningful:
(args <reg1> <reg2> ..) ;this subroutine takes args in these regs.
(values <reg1> <reg2> ..) ;this subroutine returns values here
(clobbers <reg1> <reg2> ..);clobbers or may clobber these. the server a documentation
; purpose and speeds up the micro-tracer. However, it is
; capable of figuring this out for itself.
(local <reg1> ...) ;holds state in these vars, error if caller depends on them.
; a-fake-microsecond-clock, for example.
(must-avoid <symbol>) ;this subroutine, ones it calls, etc must never get to <symbol>.
; intended mainly for (must-avoid INTR).
(saves (<reg> <place> ..) ;place may be register or NIL if too complicated
; error if store it twice in any path and place is NON-NIL.
(restores (<reg> <place> ..) ;restores from last saved.
(suspend-flow-tracing) ;dont look past here, leaving environment of interest never to return
This is where everything kicks off. If you do this by hand, be sure to follow
the order of things. Many things are not idempotent and most must be done in the
order shown.
will load latest version if t.
disk
When last symbol is close enough,
number relative to it.
Otherwise just use the number.
Registers
Setup universe for source/clobbers sets.
We assume that lower number registers are used more often.
This assumption is implementation dependant, but will only cause speed lossage
if it is not true.
See beginning of file.
Instructions.
ldb lossage?
Slow extractors
(defun extractor (field)
#'(lambda (fixinst)
(ldb field fixinst)))
(defun extract-flag (field)
#'(lambda (fixinst)
(= 1 (funcall (extractor field) fixinst))))
(defun extract-symbolic (field symbol-map)
(let ((lookup (appropriate-lookup symbol-map)))
#'(lambda (fixinst)
(funcall lookup (funcall (extractor field) fixinst)))))
(defun appropriate-lookup (alist-or-vector)
(if (list? alist-or-vector)
#'(lambda (value) (aref alist-or-vector value))))
Fast extractors
This makes the constructor a macro.
Fresh from the binary file
punctuation!
,(instruction-numeric-form instruction)
Declarations
We play a little fast and loose with abstraction here, so watch out!
Uncomment these (and comment the macros) in order to check for the
validity of successors.
(defun verified-instruction (address referenced-by)
(let ((instruction (instruction-at address))
(reference-symbolic-address (instruction-address (instruction-at referenced-by))))
#'(lambda (message)
(when (null? instruction)
(pushnew reference-symbolic-address *losing-successors* :test #'equal?)
(fresh-line)
(format t message '())
(format t " at ~S " address)
(format t " referenced by instruction at ~S " reference-symbolic-address))
instruction)))
(defun check-verification (instruction-or-null message)
(if (null? instruction-or-null)
`()
(funcall instruction-or-null message)))
format:
(CALLS <instruction-called> <instruction-xct-along-the-way> <instruction-returned-to>)
(RETURNS () <instruction-xct-along-the-way>)
This is not used if verification is off.
Is this right?
is not needed here.
is ignored
is ignored
Doesn't seem to be any reasonable way to do this.
this is wrong, but probably what is wanted. |
(defconstant *dont-count-these-instructions-in-those-that-have-no-predecessors*
'(HIGHEST-KERNAL-UCODE-LOCATION)
"Anything on this list is not included in the *instructions-without-predecessors*.")
(defparameter *default-scanning-distance* 10. "Declarations can be this far ahead of applicable instruction.")
(defvar *i-mem-array*)
(defvar *a-mem-array*)
(defvar *main-mem-array*)
(defvar *mid-array*)
(defvar *lmc-pathname*)
(defvar *trace-lmc-version-number*)
(defvar *trap-upc*)
(defvar *illop-upc*)
(defvar *highest-kernal-ucode-location+1*)
(defvar *i-mem-symbols-array* (make-sequence 'vector 50000.))
(defvar *a-mem-symbols-array* (make-sequence 'vector 1024.))
(defvar *register-names* (make-sequence 'vector *number-of-registers*))
(defvar *registers* (make-hash-table :size *number-of-registers*))
(defvar *register-set-universe* (make-universe *number-of-registers*))
(defvar *m-mem-registers-set*)
(defvar *constant-registers-set*)
(defvar *global-registers-set*)
(defvar *ucode-declarations*)
a list ( < ucode - locn > < args to DECLARE at ucode assembly time )
(defvar *subroutine-entries*)
(defvar *instructions-without-predecessors*)
(defvar *basic-block-info*)
Diagnostic :
(defun i-want-to-lose (&key (how-much? :really-big))
"Run the microcode disassembler until you lose."
how-much?
(format t "~%Anything you say...")
(do ((count 0 (1+ count)))
((logic-has-fallen?) "Logic has fallen")
(format t "~%~%Attempting to lose on pass ~d." count)
(disassemble-microcode)))
(readin-files filename)
(initial-setup)
(compute-instructions)
(find-subroutine-entries)
(find-instructions-without-predecessors)
)
(defun readin-files (filename)
(multiple-value-setq (*i-mem-array* *a-mem-array*
*main-mem-array* *mid-array*
*lmc-pathname* *trace-lmc-version-number*)
(si:load-i-mem-symbols-if-necessary *trace-lmc-version-number* *lmc-pathname*)
(readin-declarations *trace-lmc-version-number* *lmc-pathname*))
(defun readin-declarations (&optional (vn *trace-lmc-version-number*)
(filename *lmc-pathname*))
(setf filename (funcall filename :new-type-and-version "lmc-dcl" vn))
(with-open-file (f filename)
(let* ((ibase 8)
(base 8)
(s-exp (read f)))
flush ( SETQ MICRO - DECLARATION - TABLE ( quote .. ) )
(defun initial-setup ()
(setup-i-mem-symbols-array)
(setup-a-mem-symbols-array)
(setq *illop-upc* (i-mem-symbol->address "ILLOP")
*trap-upc* (i-mem-symbol->address "TRAP"))
(setup-register-info)
)
(defun compute-instructions ()
(digest-i-mem-array)
(digest-declarations)
(compute-successors)
(compute-predecessors)
)
(defun setup-i-mem-symbols-array ()
(fill *i-mem-symbols-array* '())
(insert-i-mem-labels)
(insert-relative-i-mem-labels)
)
(defun insert-i-mem-labels ()
(let ((hash-table (cadr (assoc *trace-lmc-version-number* si:*i-mem-symbol-tables*))))
(maphash #'(lambda (symbol symbol-value)
(when (array-in-bounds? *i-mem-symbols-array* symbol-value)
(setf (elt *i-mem-symbols-array* symbol-value) symbol)))
hash-table)))
(defun insert-relative-i-mem-labels ()
(let (last-symbol last-symbol-value)
(for-elements-in-vector
*i-mem-symbols-array*
#'(lambda (address entry)
(if entry
(setq last-symbol entry
last-symbol-value address)
(setf (elt *i-mem-symbols-array* address)
(list last-symbol (- address last-symbol-value))
(defun address->i-mem-symbol (address)
(when address (elt *i-mem-symbols-array* address)))
(defun i-mem-symbol->address (symbol)
(si:i-mem-symeval symbol *trace-lmc-version-number*))
(defun setup-a-mem-symbols-array ()
(dolist (symbol-address-pair (third (assoc *trace-lmc-version-number* si:*i-mem-symbol-tables*)))
(setf (elt *a-mem-symbols-array* (first symbol-address-pair)) (second symbol-address-pair))))
(defun address->a-mem-symbol (address)
(when (array-in-bounds? *a-mem-symbols-array* address)
(elt *a-mem-symbols-array* address)))
(defun setup-register-info ()
(fill *register-names* '())
(let* ((symbol-table-list (assoc *trace-lmc-version-number* si:*i-mem-symbol-tables*))
(register-alist (third symbol-table-list))
(info-list (fourth symbol-table-list)))
(setup-register-names register-alist)
(check-a-mem-consistancy)
(setup-register-set-universe)
(setup-constants-set (get-from-alternating-list info-list 'a-constant-list))
(setup-globals-set)
(setup-m-mem-set)))
(defun setup-register-names (register-alist)
(dolist (name-register-pairs register-alist)
(let ((register-number (first name-register-pairs))
(register-name (second name-register-pairs)))
(when (and (number? register-number)
(array-in-bounds? *register-names* register-number))
(setf (elt *register-names* register-number) register-name)))))
(defun check-a-mem-consistancy ()
"This just makes a-mem-symbol-array and register-names consistant"
(for-elements-in-vector
*a-mem-symbols-array*
#'(lambda (index a-mem-name)
(let ((register-name (register-number->symbol index)))
(if a-mem-name
(if register-name
(unless (eq? a-mem-name register-name)
(ferror nil "Name mismatch between ~S in a-mem and ~S register at ~D"
a-mem-name register-name index))
(setf (elt *register-names* index) a-mem-name))
(if register-name
(setf (elt *a-mem-symbols-array* index) register-name)
(let ((name (intern (format nil "REGISTER-~D" index))))
(setf (elt *register-names* index) name)
(setf (elt *a-mem-symbols-array* index) name))))))))
(defun setup-register-set-universe ()
(for-elements-in-vector *register-names*
(lambda (ignore-index name)
ignore-index
(element->number-inserting name *register-set-universe*)))
(setq *default-universe* *register-set-universe*))
(defun setup-constants-set (assembly-constant-list)
(setq *constant-registers-set* (list->set *constant-registers*))
(dolist (constant-register-pair assembly-constant-list)
(let ((name (register-number->symbol (second constant-register-pair))))
(unless (null? name)
(set-push! name *constant-registers-set*))))
(set-push! (register-number->symbol register) *constant-registers-set*)))
(defun setup-globals-set ()
(setq *global-registers-set* (list->set *global-registers*)))
(defun setup-m-mem-set ()
(setq *m-mem-registers-set*
(list->set
(sequence->list
(subseq *a-mem-symbols-array* *m-mem-low-bound* *m-mem-high-bound*)))))
(defun symbol->register-number (symbol)
(si:register-symeval symbol *trace-lmc-version-number*))
(defun register-number->symbol (number)
(elt *register-names* number))
(defun register-exists? (register-name)
(not (null? (symbol->register-number register-name))))
(defun constant-register? (register)
(set-element? register *constant-registers-set*))
(defconstant-now *dispatch-address-mask* 177777)
(defun valid-instruction? (instruction)
(not (null? instruction)))
(defun no-op? (instruction)
(zero? (logand instruction *instruction-mask*)))
# ' ( lambda ( value ) ( second ( assoc value alist - or - vector ) ) )
(defmacro defextractor (name field)
`(DEFMACRO ,name (FIXINST) `(LDB ,,field ,fixinst)))
(defmacro defflag-extractor (name field)
`(DEFMACRO ,name (FIXINST) `(= 1 (LDB ,,field ,fixinst))))
(defmacro defsymbolic-extractor (name field symbol-table)
`(DEFMACRO ,name (FIXINST)
(IF (LIST? ,symbol-table)
`(SECOND (ASSOC (LDB ,,field ,fixinst) (QUOTE ,,symbol-table) :TEST #'=))
`(ELT ,,symbol-table (LDB ,,field ,fixinst)))))
(defconstant-now opcode-map (vector 'alu-op 'byte-op 'jump-op 'dispatch-op))
(defeq-test alu-op? 'alu-op)
(defeq-test byte-op? 'byte-op)
(defeq-test jump-op? 'jump-op)
(defeq-test dispatch-op? 'dispatch-op)
(defsymbolic-extractor raw-opcode-extractor lam-ir-op opcode-map)
(defun extract-opcode (instruction)
(if (no-op? instruction)
'no-op
(raw-opcode-extractor instruction)))
(defflag-extractor extract-popj-after-next lam-ir-popj-after-next)
(defconstant-now jump-condition-map
`((,lam-jump-cond-bit-set BIT-SET)
(,lam-jump-cond-bit-clear BIT-CLEAR)
(,lam-jump-cond-m<a M<A)
(,lam-jump-cond-m<=a M<=A)
(,lam-jump-cond-m=a M=A)
(,lam-jump-cond-page-fault PAGE-FAULT)
(,lam-jump-cond-page-fault-or-interrupt PAGE-FAULT-OR-INTERRUPT)
(,lam-jump-cond-page-fault-or-interrupt-or-sequence-break
PAGE-FAULT-OR-INTERRUPT-OR-SEQUENCE-BREAK)
(,lam-jump-cond-unc UNCONDITIONAL)
(,lam-jump-cond-m>=a M>=A)
(,lam-jump-cond-m>a M>A)
(,lam-jump-cond-m-neq-a M-NEQ-A)
(,lam-jump-cond-no-page-fault NO-PAGE-FAULT)
(,lam-jump-cond-data-type-not-equal DATA-TYPE-NOT-EQUAL)
(,lam-jump-cond-data-type-equal DATA-TYPE-EQUAL)))
(defeq-test unconditional? 'unconditional)
(defconstant-now jump-rpn-map
(vector 'jump-xct-next
'jump
'call-xct-next
'call
'return-xct-next
'return
'illegal-rpn
'illegal-rpn))
(defsymbolic-extractor extract-jump-condition lam-ir-jump-cond jump-condition-map)
(defsymbolic-extractor extract-jump-rpn-bits lam-ir-rpn jump-rpn-map)
(defextractor extract-jump-address lam-ir-jump-addr)
(defconstant-now dispatch-rpn-map
(vector 'jump-xct-next
'jump
'call-xct-next
'call
'return-xct-next
'return
'fall-through
'skip))
(defflag-extractor extract-dispatch-push-own-address lam-ir-disp-lpc)
(defextractor extract-dispatch-base-address lam-ir-disp-addr)
(defextractor extract-dispatch-bits lam-ir-disp-bytl)
(defsymbolic-extractor extract-dispatch-rpn-bits lam-disp-rpn-bits dispatch-rpn-map)
(defmacro extract-dispatch-address-from-entry (entry)
`(LOGAND ,entry ,*dispatch-address-mask*))
(defextractor extract-a-source lam-ir-a-src)
(defextractor extract-m-source-address lam-ir-m-src-adr)
(defflag-extractor extract-functional-source-flag lam-ir-func-src-flag)
(defflag-extractor extract-a-memory-destination-flag lam-ir-a-mem-dest-flag)
(defextractor extract-a-memory-destination lam-ir-a-mem-dest)
(defextractor extract-m-memory-destination lam-ir-m-mem-dest)
(defun extract-instruction-sources (fixinst)
(let ((opcode (extract-opcode fixinst))
(a-source (extract-a-source fixinst))
(m-source-address (extract-m-source-address fixinst))
(functional-source-flag (extract-functional-source-flag fixinst))
(ans (make-empty-set)))
(unless (dispatch-op? opcode)
(set-push! (address->a-mem-symbol a-source) ans))
(when (null? functional-source-flag)
(set-push! (address->a-mem-symbol m-source-address) ans))
ans))
(defun extract-instruction-destination (fixinst)
(let ((opcode (extract-opcode fixinst)))
(if (or (alu-op? opcode)
(byte-op? opcode))
(let ((address (if (extract-a-memory-destination-flag fixinst)
(extract-a-memory-destination fixinst)
(extract-m-memory-destination fixinst))))
(make-set (address->a-mem-symbol address)))
(make-empty-set))))
(defstruct (instruction (:constructor make-instruction-internal)
(:print-function print-instruction)
)
address
(declarations '())
alu - op , byte - op , jump - op , dispatch - op
popj-after-next
sources
destination
successors
predecessors
calling-subroutine
trace-info
trace-warnings
)
(defsynonym instruction? instruction-p)
(defalike #'instruction? #'eq?)
(defun print-instruction (instruction stream level)
(if (and *print-level* (> level *print-level*))
(format stream " # ")
(format stream "#<Instruction: ~S>"
,(instruction-opcode-type instruction)
,@(when (jump-op? (instruction-opcode-type instruction))
(list (extract-jump-condition (instruction-numeric-form instruction))))
,@(when (instruction-popj-after-next instruction) '(POPJ-AFTER-NEXT))
(SUCCESSORS ,@(map 'list #'names-of-successors (instruction-successors instruction)))
,@(instruction-declarations instruction))
)))
(defun names-of-successors (successor)
(labels (
(maybe-successor-name (instruction)
(when (instruction? instruction)
(instruction-address instruction)))
)
(if (instruction? successor)
(instruction-address successor)
(cons (first successor)
(map 'list #'maybe-successor-name (rest successor))))))
(defvar *instruction-array* (make-sequence 'vector 50000 :initial-element '()))
(defun instruction-at (address)
(elt *instruction-array* address))
(defsetf instruction-at (address) (new-value)
`(SETF (ELT *INSTRUCTION-ARRAY* ,address) ,new-value))
(defun for-each-instruction (operation)
(for-elements-in-vector
*instruction-array*
#'(lambda (address possible-instruction)
(when possible-instruction
(funcall operation possible-instruction address)))))
(defun make-instruction (fixinst address)
(make-instruction-internal
:numeric-form fixinst
:address address
:opcode-type (extract-opcode fixinst)
:popj-after-next (extract-popj-after-next fixinst)
:sources (extract-instruction-sources fixinst)
:destination (extract-instruction-destination fixinst)
))
(defun flush-field (field-selector)
"Takes a symbol specifying a field-selector and removes this field from all
instructions. Must be passed a symbol because of setf lossage (see this code)."
* Sigh * You ca n't pass a selector to setf !
(let ((flusher
(compile-lambda `(LAMBDA (INSTRUCTION)
(WHEN INSTRUCTION
(SETF (,field-selector INSTRUCTION) '()))))))
(map '() flusher *instruction-array*)))
(defun digest-i-mem-array ()
(for-elements-in-vector
*i-mem-array*
#'(lambda (index fixinst)
(when (null? fixinst) (return-from digest-i-mem-array nil))
(when (valid-instruction? fixinst)
(setf (instruction-at index)
(make-instruction fixinst (address->i-mem-symbol index)))))))
(defconstant *set-declarations* '(args values clobbers local))
(defconstant *saves-tag* 'saves)
(defconstant *restores-tag* 'restores)
(defsynonym declaration-type car)
(defsynonym declaration-info cdr)
(defun declaration-type? (type)
#'(lambda (declaration)
(eq? (declaration-type declaration) type)))
(defun spread-declarations (instruction &rest decls-return-map)
(let ((decls (instruction-declarations instruction)))
(labels (
(find-decl (type)
(or (cdr (assoc type decls))
(when (member type *set-declarations*)
(make-empty-set))))
)
(values-list
(map 'list #'find-decl decls-return-map)))))
(deff saves-declaration? (declaration-type? *saves-tag*))
(deff restores-declaration? (declaration-type? *restores-tag*))
(defun suspend-flow-tracing? (instruction)
(assoc 'suspend-flow-tracing (instruction-declarations instruction)))
(defun instruction-saves? (instruction)
(assoc *saves-tag* (instruction-declarations instruction)))
(defun instruction-restores? (instruction)
(assoc *restores-tag* (instruction-declarations instruction)))
(defun digest-declarations ()
(dolist (declarations *ucode-declarations*)
(let ((address (first declarations)))
(if (> address (length *instruction-array*))
(format t "~&Declaration ~s ignored: address too high." declarations)
(dolist (declaration (rest declarations))
(associate-declaration-with-instruction address declaration))))))
(defun associate-declaration-with-instruction (address declaration)
(case (declaration-type declaration)
((must-avoid suspend-flow-tracing) (push declaration (instruction-declarations (instruction-at address))))
(saves (associate-saves-with-instruction address (declaration-info declaration)))
(restores (associate-restores-with-instruction address (declaration-info declaration)))
(otherwise (push (cons (first declaration) (list->set (rest declaration)))
(instruction-declarations (instruction-at address))))))
(defsubst make-test-triplet (predicate win-action lose-action)
(list predicate win-action lose-action))
(defsynonym test-triplet-predicate first)
(defsynonym test-triplet-win-action second)
(defsynonym test-triplet-lose-action third)
(defun scan-forward-to-appropriate-instruction
(start-address test-triplets &optional (how-far-to-scan *default-scanning-distance*))
(dotimes (scan how-far-to-scan
(when test-triplets (dolist (test-t test-triplets) (funcall (test-triplet-lose-action test-t)))))
(let ((instruction (instruction-at (+ start-address scan)))
(triplets-left '()))
(dolist (test-t test-triplets)
(if (funcall (test-triplet-predicate test-t) instruction)
(funcall (test-triplet-win-action test-t) instruction)
(push test-t triplets-left)))
(unless triplets-left (return-from scan-forward-to-appropriate-instruction nil))
(setq test-triplets triplets-left))))
(defun scanned-declaration (tester win-action lose-action)
#'(lambda (start-address declaration-guts)
(let (test-triplets)
(dolist (decl declaration-guts)
(let ((possible-predicate (apply tester decl)))
(unless (null? possible-predicate)
(push (make-test-triplet possible-predicate
(apply win-action decl)
(apply lose-action start-address decl))
test-triplets))))
(scan-forward-to-appropriate-instruction start-address test-triplets))))
(defun tester (source destination barf-string)
(if (and (register-exists? source)
(register-exists? destination))
#'(lambda (instruction)
(and (set-element? source (instruction-sources instruction))
(set-element? destination (instruction-destination instruction))))
(format t barf-string source destination)
nil))
(defun tagger (source destination illegality-test tag)
#'(lambda (instruction)
(funcall illegality-test instruction)
(pushnew (list tag) (instruction-declarations instruction))
(setf (instruction-sources instruction) (make-set source))
(setf (instruction-destination instruction) (make-set destination))))
(defun loser (barf-string source destination location)
#'(lambda ()
(format t barf-string source destination (instruction-address (instruction-at location)))))
(defun might-instruction-save? (from-register into-place)
(tester from-register into-place "~&Ignoring save ~S into ~S."))
(defun tag-as-save-instruction (from into)
(tagger from into
#'(lambda (instruction)
(when (instruction-restores? instruction)
(format t "Invalid declaration: save and restore in ~S")))
*saves-tag*))
(defun losing-save (where from into)
(loser "~&Cannot match save ~S into ~S at ~S." from into where))
(deff associate-saves-with-instruction
(scanned-declaration #'might-instruction-save?
#'tag-as-save-instruction
#'losing-save))
(defun might-instruction-restore? (register from-register)
(tester from-register register "~&Ignoring restore from ~S into ~S."))
(defun tag-as-restore-instruction (into from)
(tagger from into
#'(lambda (instruction)
(when (instruction-saves? instruction)
(format t "Invalid declaration: save and restore in ~S")))
*restores-tag*))
(defun losing-restore (where into from)
(loser "~&Cannot match restore from ~S into ~S at ~S." from into where))
(deff associate-restores-with-instruction
(scanned-declaration #'might-instruction-restore?
#'tag-as-restore-instruction
#'losing-restore))
(defvar *losing-successors* '() "Holds list of instruction addresses.
Instructions at these addresses have successors that do not exist.")
(defun compute-successors ()
(for-each-instruction
#'(lambda (instruction index)
(setf (instruction-successors instruction)
(compute-instruction-successors instruction index)))))
(defmacro verified-instruction (address ignore-referenced-by)
ignore-referenced-by
`(INSTRUCTION-AT ,address))
(defmacro check-verification (instruction ignore-message)
ignore-message
instruction)
( JUMP - XCT - NEXT < instruction - jumped - to > < instruction - xct - along - the - way > )
(defsynonym instruction-xfered-to second)
(defsynonym instruction-xct-along-the-way third)
(defsynonym instruction-returned-to fourth)
(defun make-normal-successor (instruction)
(check-verification instruction "Missing instruction in stream"))
(defun make-jump-successor (instruction)
(check-verification instruction "Jump to missing instruction"))
(defun make-dispatch-jump-successor (instruction)
(check-verification instruction "Dispatch jump to missing instruction"))
(defun make-dispatch-fall-through-successor (instruction)
(check-verification instruction "Dispatch fall through to missing instruction"))
(defun make-dispatch-skip-successor (instruction)
(check-verification instruction "Dispatch skip to missing instruction"))
(defun make-call-successor (instruction-called instruction-xct-next instruction-returned-to)
`(CALLS ,(check-verification instruction-called "Call to missing instruction")
,(check-verification instruction-xct-next "Missing instruction xct-next'd during call")
,(check-verification instruction-returned-to "Return to missing instruction")))
(defun make-return-successor (instruction-returned-to instruction-xct-next)
`(RETURNS ,(check-verification instruction-returned-to "Return to missing instruction")
,(check-verification instruction-xct-next "Missing instruction xct-next'd during return")))
(defun make-jump-xct-next-successor (instruction-xfered-to instruction-xct-next)
`(JUMP-XCT-NEXT ,(check-verification instruction-xfered-to "Jump to missing instruction")
,(check-verification instruction-xct-next "Missing instruction xct-next'd during jump")))
(defun list-successor-type? (type)
#'(lambda (successor)
(and (list? successor)
(eq? (first successor) type))))
(deff call-successor? (list-successor-type? 'calls))
(deff returns-successor? (list-successor-type? 'returns))
(deff jump-xct-next-successor? (list-successor-type? 'jump-xct-next))
(defsynonym next-instruction-successor? instruction?)
(defun compute-instruction-successors (instruction in-address)
(let ((numeric-form (instruction-numeric-form instruction))
(popj-after-next (instruction-popj-after-next instruction))
(next-instruction (verified-instruction (1+ in-address) in-address))
(after-next-instruction (verified-instruction (+ 2 in-address) in-address)))
(case (instruction-opcode-type instruction)
((no-op alu-op byte-op) (if popj-after-next
(list (make-return-successor '() next-instruction))
(list (make-normal-successor next-instruction))))
(jump-op (compute-jump-successors popj-after-next next-instruction after-next-instruction
(verified-instruction (extract-jump-address numeric-form) in-address)
(extract-jump-condition numeric-form)
(extract-jump-rpn-bits numeric-form)))
(dispatch-op (compute-dispatch-successors in-address popj-after-next
instruction next-instruction after-next-instruction
(extract-dispatch-push-own-address numeric-form)
(extract-dispatch-base-address numeric-form)
(extract-dispatch-bits numeric-form)))
)))
(defun compute-jump-successors (popj-after-next next-instruction after-next-instruction to-address condition rpn-bits)
(let ((ans '()))
(labels (
(cannot-popj-after-next ()
(when popj-after-next
(ferror nil "Popj-after-next combined with ~S" rpn-bits)))
(can-fall-through ()
(unless (unconditional? condition)
(can-go-to (make-normal-successor next-instruction))))
(can-go-to (where)
(push where ans)))
(case rpn-bits
(jump-xct-next
(cannot-popj-after-next)
(can-go-to (make-jump-xct-next-successor to-address next-instruction))
(can-fall-through))
(jump
(cannot-popj-after-next)
(can-go-to (make-jump-successor to-address))
(can-fall-through))
(call-xct-next
(cannot-popj-after-next)
(can-go-to (make-call-successor to-address next-instruction after-next-instruction))
(can-fall-through))
(call
(cannot-popj-after-next)
(can-go-to (make-call-successor to-address '() next-instruction))
(can-fall-through))
(return-xct-next
(cannot-popj-after-next)
(can-go-to (make-return-successor '() next-instruction))
(can-fall-through))
(return
(if popj-after-next
(can-go-to (make-return-successor '() next-instruction))
(can-go-to (make-return-successor '() '())))
(unless (unconditional? condition)
(can-fall-through)))
(illegal-rpn
(ferror nil "Illegal rpn bits in jump"))))
ans))
(defun compute-dispatch-successors (dispatched-from-location popj-after-next
instruction next-instruction after-next-instruction
push-own-address? base-address bits)
(let ((number-of-dispatch-options (expt 2 bits))
(return-address (if push-own-address? instruction next-instruction))
(return-address-if-xct-next (if push-own-address? next-instruction after-next-instruction))
(ans '()))
(labels (
(can-go-to (where)
(pushnew where ans :test #'equal?)))
(dotimes (option number-of-dispatch-options)
(let* ((dispatch-entry (elt *a-mem-array* (+ base-address option)))
(rpn-bits (extract-dispatch-rpn-bits dispatch-entry))
(dispatch-address
(verified-instruction (extract-dispatch-address-from-entry dispatch-entry)
dispatched-from-location)))
(can-go-to
(case rpn-bits
(jump-xct-next (make-jump-xct-next-successor dispatch-address next-instruction))
(jump (make-dispatch-jump-successor dispatch-address))
(call-xct-next (make-call-successor dispatch-address return-address return-address-if-xct-next))
(call (make-call-successor dispatch-address '() return-address))
(return-xct-next (make-return-successor '() next-instruction))
(return (make-return-successor '() '()))
(fall-through (if popj-after-next
(make-return-successor '() next-instruction)
(make-dispatch-fall-through-successor next-instruction)))
(skip (if popj-after-next
(make-dispatch-skip-successor after-next-instruction)))))
)))
ans))
(defun compute-predecessors ()
(for-each-instruction
#'(lambda (instruction address)
(dolist (successor (instruction-successors instruction))
(unless (null? successor)
(labels (
(preceeds (predecessor successor)
(unless (null? successor)
(push predecessor (instruction-predecessors successor))))
)
(if (instruction? successor)
(preceeds instruction successor)
(case (first successor)
(calls (preceeds `(CALLED ,instruction
,(instruction-xct-along-the-way successor)
,(instruction-returned-to successor))
(instruction-xfered-to successor))
(preceeds `(XCT-NEXTED ,instruction) (instruction-xct-along-the-way successor))
(preceeds `(RETURNED-TO ,instruction) (instruction-returned-to successor)))
(jump-xct-next (preceeds `(JUMP-XCT-NEXTED ,instruction
,(instruction-xct-along-the-way successor))
(instruction-xfered-to successor))
(preceeds `(XCT-NEXTED ,instruction) (instruction-xct-along-the-way successor)))
(returns (preceeds `(XCT-NEXTED ,instruction) (instruction-xct-along-the-way successor)))))))))))
(defun for-instruction-successors (predicate operation)
(for-each-instruction
#'(lambda (instruction address)
(dolist (successor (instruction-successors instruction))
(when (funcall predicate successor)
(funcall operation successor))))))
(defun find-subroutine-entries ()
(setq *subroutine-entries* '())
(for-instruction-successors
#'call-successor?
#'(lambda (call-successor)
(pushnew (instruction-xfered-to call-successor) *subroutine-entries*))))
(defun find-instructions-without-predecessors ()
(setq *instructions-without-predecessors* '())
(for-each-instruction
#'(lambda (instruction address)
(when (null? (instruction-predecessors instruction))
(unless (member (instruction-address instruction)
*dont-count-these-instructions-in-those-that-have-no-predecessors*)
(push instruction *instructions-without-predecessors*))))))
(defun find-basic-blocks ()
(setq *basic-block-info* '())
(for-each-instruction
#'(lambda (instruction address)
(check-basic-blockness instruction address)))
(setq *basic-block-info* (cons *trace-lmc-version-number* (list *basic-block-info*))))
(defun check-basic-blockness (instruction address)
(let ((predecessors (instruction-predecessors instruction)))
(labels (
(is-block ()
(push address *basic-block-info*))
(isnt-block () (values)))
(if (or (null? predecessors)
(cdr predecessors))
(is-block)
(let ((p (car predecessors)))
(if (list? p)
(ecase (car p)
(called (is-block))
(returned-to (is-block))
((jump-xct-nexted xct-nexted)
(if (null? (cdr (instruction-successors (cadr p))))
(isnt-block)
(is-block))))
(if (null? (cdr (instruction-successors p)))
(isnt-block)
(is-block))))))))
|
5d2cf38298830286e77e904d5017e3082d142f42d9f986dfa84d74ed9fec684c | patricoferris/sesame | responsive.ml | module Images = struct
type t = MaxWidth of int * int * t | Default of int
type conf = { root : Fpath.t; conf : Image.Transform.conf }
let rec get_default_size = function
| Default i -> i
| MaxWidth (_, _, xs) -> get_default_size xs
let rec get_sizes = function
| Default i -> [ float_of_int i ]
| MaxWidth (_cond, size, media) -> float_of_int size :: get_sizes media
let rename_by_size i s = Fmt.str "%s-%i" s i
let rec get_a_sizes = function
| Default i -> [ Fmt.str "%ipx" i ]
| MaxWidth (cond, size, media) ->
Fmt.str "(max-width: %ipx) %ipx" cond size :: get_a_sizes media
let sizes_to_srcset f sizes =
List.map
(fun s ->
let s = int_of_float s in
`Url_width
(Fpath.(Path.(change_filename f (rename_by_size s) |> to_string)), s))
sizes
let resize ~conf sizes =
let { Image.Transform.quality; rename; files; dst } = conf.conf in
List.iter
(fun f ->
let f = Path.(join_relative ~drop:false conf.root f) in
let resize size =
let img =
try Image.from_file f
with Images.Wrong_file_type -> failwith (Fpath.to_string f)
in
let img = Image.resize size img in
let rename s = rename s |> rename_by_size (int_of_float size) in
let output =
Fpath.(dst // Path.change_filename ~keep_path:false f rename)
in
try Image.to_file ~quality img output with
| Failure fail -> failwith (Fpath.to_string output ^ " " ^ fail)
| f -> raise f
in
List.iter resize sizes)
files
let v ~alt ~conf t =
let open Tyxml.Html in
let gen_srcset f =
let sizes = get_sizes t in
let conf =
{ conf with conf = { conf.conf with Image.Transform.files = [ f ] } }
in
resize ~conf sizes;
let default = get_default_size t in
let srcset = sizes_to_srcset f sizes in
( f,
img ~alt
~src:
Fpath.(
Path.(change_filename f (rename_by_size default)) |> to_string)
~a:[ a_srcset srcset; a_img_sizes (get_a_sizes t) ]
() )
in
List.map gen_srcset conf.conf.files
end
| null | https://raw.githubusercontent.com/patricoferris/sesame/8521e2a086b49d0bc20f0fca705f07675c52e1ae/src/sesame/responsive.ml | ocaml | module Images = struct
type t = MaxWidth of int * int * t | Default of int
type conf = { root : Fpath.t; conf : Image.Transform.conf }
let rec get_default_size = function
| Default i -> i
| MaxWidth (_, _, xs) -> get_default_size xs
let rec get_sizes = function
| Default i -> [ float_of_int i ]
| MaxWidth (_cond, size, media) -> float_of_int size :: get_sizes media
let rename_by_size i s = Fmt.str "%s-%i" s i
let rec get_a_sizes = function
| Default i -> [ Fmt.str "%ipx" i ]
| MaxWidth (cond, size, media) ->
Fmt.str "(max-width: %ipx) %ipx" cond size :: get_a_sizes media
let sizes_to_srcset f sizes =
List.map
(fun s ->
let s = int_of_float s in
`Url_width
(Fpath.(Path.(change_filename f (rename_by_size s) |> to_string)), s))
sizes
let resize ~conf sizes =
let { Image.Transform.quality; rename; files; dst } = conf.conf in
List.iter
(fun f ->
let f = Path.(join_relative ~drop:false conf.root f) in
let resize size =
let img =
try Image.from_file f
with Images.Wrong_file_type -> failwith (Fpath.to_string f)
in
let img = Image.resize size img in
let rename s = rename s |> rename_by_size (int_of_float size) in
let output =
Fpath.(dst // Path.change_filename ~keep_path:false f rename)
in
try Image.to_file ~quality img output with
| Failure fail -> failwith (Fpath.to_string output ^ " " ^ fail)
| f -> raise f
in
List.iter resize sizes)
files
let v ~alt ~conf t =
let open Tyxml.Html in
let gen_srcset f =
let sizes = get_sizes t in
let conf =
{ conf with conf = { conf.conf with Image.Transform.files = [ f ] } }
in
resize ~conf sizes;
let default = get_default_size t in
let srcset = sizes_to_srcset f sizes in
( f,
img ~alt
~src:
Fpath.(
Path.(change_filename f (rename_by_size default)) |> to_string)
~a:[ a_srcset srcset; a_img_sizes (get_a_sizes t) ]
() )
in
List.map gen_srcset conf.conf.files
end
|
|
9619201ed08dfcd5cdda35347aa596f3633287a43625df5ec69f9daa1d8852c0 | softwarelanguageslab/maf | R5RS_scp1_multiply-3.scm | ; Changes:
* removed : 0
* added : 1
* swaps : 0
; * negated predicates: 0
; * swapped branches: 0
; * calls to id fun: 0
(letrec ((rec-multiply (lambda (a b)
(if (zero? b) 0 (+ a (rec-multiply a (- b 1))))))
(iter-multiply (lambda (a b)
(<change>
()
(display (lambda (result counter) (if (zero? counter) result (iter (+ result a) (- counter 1))))))
(letrec ((iter (lambda (result counter)
(if (zero? counter)
result
(iter (+ result a) (- counter 1))))))
(iter 0 b)))))
(= 10 (rec-multiply 5 2) (iter-multiply 5 2))) | null | https://raw.githubusercontent.com/softwarelanguageslab/maf/11acedf56b9bf0c8e55ddb6aea754b6766d8bb40/test/changes/scheme/generated/R5RS_scp1_multiply-3.scm | scheme | Changes:
* negated predicates: 0
* swapped branches: 0
* calls to id fun: 0 | * removed : 0
* added : 1
* swaps : 0
(letrec ((rec-multiply (lambda (a b)
(if (zero? b) 0 (+ a (rec-multiply a (- b 1))))))
(iter-multiply (lambda (a b)
(<change>
()
(display (lambda (result counter) (if (zero? counter) result (iter (+ result a) (- counter 1))))))
(letrec ((iter (lambda (result counter)
(if (zero? counter)
result
(iter (+ result a) (- counter 1))))))
(iter 0 b)))))
(= 10 (rec-multiply 5 2) (iter-multiply 5 2))) |
d48d5e6abc963e7375f8a2614bfa314382ac7a6125233b01535db259400d1662 | noinia/hgeometry | RayTracer.hs | module Demo.RayTracer where
-- import Algorithms.Geometry.HiddenSurfaceRemoval (compareDepthOrder, Tri)
import Data.Ord (comparing)
import qualified Data.List as List
import Data.Maybe (mapMaybe)
import Data.Ext
import Control.Lens
import Geometry.Line
import Geometry.Point
import Geometry.Vector
import Geometry.Triangle
import Data.Range
import Data.Util
import Graphics.Camera
--------------------------------------------------------------------------------
type Picture = ()
render :: (Fractional r, Ord r)
=> Vector 2 Int -- ^ Screen size (i.e. number of pixels)
-> Camera r -> [Triangle 3 p r :+ f] -> Picture
render (Vector2 w h) c ts = fromPixels
[ fmap colorOf . firstInRange vr
$ shootRay (c^.cameraPosition) (toPoint x y) ts
| x <- [1..w], y <- [1..h]
]
where
vr = ClosedRange (c^.nearDist) (c^.farDist)
toPoint x y = undefined
fromPixels :: [Maybe Color] -> Picture
fromPixels = undefined
type Color = ()
colorOf :: Triangle 3 p r :+ f -> Color
colorOf _ = undefined
firstInRange :: Ord r => Range r -> [SP (Triangle 3 p r :+ f) r]
-> Maybe (Triangle 3 p r :+ f)
firstInRange vr = fmap (^._1) . minimumOn (^._2) . filter ((`inRange` vr) . (^._2))
minimumOn :: Ord b => (a -> b) -> [a] -> Maybe a
minimumOn f = go
where
go [] = Nothing
go xs = Just $ List.minimumBy (comparing f) xs
| Shoot a ray from p through q. Report the triangles intersected by the ray
-- and their distance from p.
--
--
shootRay :: (Fractional r, Ord r) => Point 3 r -> Point 3 r -> [Triangle 3 p r :+ f]
-> [SP (Triangle 3 p r :+ f) r]
shootRay p q ts = mapMaybe ((lineThrough p q) `intersectT`) ts
-- | reports the intersection point with the squared distance to the intersection point
intersectT :: Line 3 r -> Triangle 3 p r :+ f -> Maybe (SP (Triangle 3 p r :+ f) r)
intersectT = undefined
-- intersectT l t = case l `intersects` t of
p - > Just ( SP t $ quadrance ( l^.origin ) p )
-- _ -> Nothing
| null | https://raw.githubusercontent.com/noinia/hgeometry/89cd3d3109ec68f877bf8e34dc34b6df337a4ec1/hgeometry-examples/src/Demo/RayTracer.hs | haskell | import Algorithms.Geometry.HiddenSurfaceRemoval (compareDepthOrder, Tri)
------------------------------------------------------------------------------
^ Screen size (i.e. number of pixels)
and their distance from p.
| reports the intersection point with the squared distance to the intersection point
intersectT l t = case l `intersects` t of
_ -> Nothing | module Demo.RayTracer where
import Data.Ord (comparing)
import qualified Data.List as List
import Data.Maybe (mapMaybe)
import Data.Ext
import Control.Lens
import Geometry.Line
import Geometry.Point
import Geometry.Vector
import Geometry.Triangle
import Data.Range
import Data.Util
import Graphics.Camera
type Picture = ()
render :: (Fractional r, Ord r)
-> Camera r -> [Triangle 3 p r :+ f] -> Picture
render (Vector2 w h) c ts = fromPixels
[ fmap colorOf . firstInRange vr
$ shootRay (c^.cameraPosition) (toPoint x y) ts
| x <- [1..w], y <- [1..h]
]
where
vr = ClosedRange (c^.nearDist) (c^.farDist)
toPoint x y = undefined
fromPixels :: [Maybe Color] -> Picture
fromPixels = undefined
type Color = ()
colorOf :: Triangle 3 p r :+ f -> Color
colorOf _ = undefined
firstInRange :: Ord r => Range r -> [SP (Triangle 3 p r :+ f) r]
-> Maybe (Triangle 3 p r :+ f)
firstInRange vr = fmap (^._1) . minimumOn (^._2) . filter ((`inRange` vr) . (^._2))
minimumOn :: Ord b => (a -> b) -> [a] -> Maybe a
minimumOn f = go
where
go [] = Nothing
go xs = Just $ List.minimumBy (comparing f) xs
| Shoot a ray from p through q. Report the triangles intersected by the ray
shootRay :: (Fractional r, Ord r) => Point 3 r -> Point 3 r -> [Triangle 3 p r :+ f]
-> [SP (Triangle 3 p r :+ f) r]
shootRay p q ts = mapMaybe ((lineThrough p q) `intersectT`) ts
intersectT :: Line 3 r -> Triangle 3 p r :+ f -> Maybe (SP (Triangle 3 p r :+ f) r)
intersectT = undefined
p - > Just ( SP t $ quadrance ( l^.origin ) p )
|
9a43d56a2fc17bd709bceedf1ec5823d7bbd7ec33155349ad5df50df2cc38866 | larcenists/larceny | intern.scm | intern.scm - private api functions for Sassy
Copyright ( C ) 2005
; This library is free software; you can redistribute it and/or
; modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation ; either
version 2.1 of the License , or ( at your option ) any later version .
; This library is distributed in the hope that it will be useful,
; but WITHOUT ANY WARRANTY; without even the implied warranty of
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
; Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
; License along with this library; if not, write to the Free Software
Foundation , Inc. , 51 Franklin St , Fifth Floor , Boston , MA 02110 - 1301 USA
; Contact:
; 4130 43 ST #C2
Sunnyside , NY 11104
;
see file COPYING in the top of Sassy 's distribution directory
; module intern
import api push - stacks srfi-69
; export all
; Looks up symbol-name (a scheme symbol) in the symbol-table of
; sassy-output. If no record exists for that name, it creates a fresh
; one, in the table. Then for each item in the list of field-value
; pairs, it sets the corresponding field of the sassy-symbol to the
; value (or in the case of the 'unres field, adds the value to the
; list stored there). The pairs must be proper lists. The result is
; the sassy-symbol that was modified.
; Anytime a new offset is given via the field-pair '(offset <value>),
; all the back-patchers stored in the unres field of the sassy-symbol are
; applied to the <value>.
(define (sassy-symbol-set! sassy-output symbol-name . list-of-field-pairs)
(let ((exists (sassy-symbol-exists-env? sassy-output symbol-name)))
(when (not exists)
(set! exists (make-sassy-symbol symbol-name 'local #f #f #f '() #f))
(let iter ((t (sassy-symbol-table sassy-output)))
(if (hash-table? (car t))
(hash-table-set! (car t) symbol-name exists)
(iter (cdr t)))))
(for-each
(lambda (field-pair)
(case (car field-pair)
((name) (sassy-symbol-name-set! exists (cadr field-pair)))
((scope) (sassy-symbol-scope-set! exists (cadr field-pair)))
((section) (sassy-symbol-section-set! exists (cadr field-pair)))
((size) (sassy-symbol-size-set! exists (cadr field-pair)))
((offset)
(sassy-symbol-offset-set! exists (cadr field-pair))
(for-each (lambda (back-patcher)
(back-patcher (cadr field-pair)
(sassy-symbol-section exists)))
(sassy-symbol-unres exists)))
((unres)
(sassy-symbol-unres-set!
exists (cons (cadr field-pair) (sassy-symbol-unres exists))))
((exp) (sassy-symbol-exp-set! exists (cadr field-pair)))))
list-of-field-pairs)
exists))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;
About a third of the entire compile time is spent in this
one procedure .
;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;(define (sassy-symbol-ensure sassy-output symbol-name)
; (or (sassy-symbol-exists-env? sassy-output symbol-name)
( let ( ( new ( make - sassy - symbol symbol - name ' local # f # f # f ' ( ) # f ) ) )
; (let iter ((t (sassy-symbol-table sassy-output)))
( vector - set ! sassy - symbol - ensure : loops 1
( + 1 ( vector - ref sassy - symbol - ensure : loops 1 ) ) )
; (if (hash-table? (car t))
; (begin (hash-table-set! (car t) symbol-name new)
; new)
; (iter (cdr t)))))))
; Let's inline the call to sassy-symbol-exists-env?, simplify
; a bit, and see whether it makes any difference.
(define (sassy-symbol-ensure sassy-output symbol-name)
(let ((symtable (sassy-symbol-table sassy-output)))
; Hmmm, this doesn't seem to be defined when cross-compiling.
(define (hash-table-ref/default ht key default)
(cond (default
(hash-table-ref ht key (lambda () default)))
(else
(hash-table-ref ht key thunk:false))))
; symtable may be any of
; a hashtable
an improper list of hashtables and Sassy symbols
; ending with a hashtable
a list of hashtables and Sassy symbols
;
; We search the list as follows:
; if we get to a hashtable, we look up symbol-name
; within the hashtable and either return its
associated Sassy symbol or create a new one
; and install it
if we see a Sassy symbol , we compare it and stop
; if they match
;
; The improper list probably doesn't ever occur,
; or if it does then the name is always found within
; the hashtable; otherwise we'd be taking the car of
a hashtable and the original Sassy code would blow
; up. If it ever occurs, it will display a message.
;
; There's no point to searching the list twice, as
the original Sassy code was doing . Once we 've found
; a hashtable, the search is over.
(define (loop1 rst)
(cond ((not (pair? rst))
(if (hash-table? rst)
(begin
FIXME
FIXME
(or (hash-table-ref/default rst symbol-name #f)
(assertion-violation 'sassy-symbol-ensure
"bug in Clinger's version"
rst)))
(assertion-violation 'sassy-symbol-ensure
"bug in Sassy" rst)))
((hash-table? (car rst))
(let ((ht (car rst)))
(or (hash-table-ref/default ht symbol-name #f)
(let ((new (make-sassy-symbol
symbol-name 'local #f #f #f '() #f)))
(hash-table-set! ht symbol-name new)
new))))
((eq? symbol-name (sassy-symbol-name (car rst)))
(car rst))
(else
(loop1 (cdr rst)))))
(loop1 symtable)))
; Defining this here eliminates the need to create a closure
; on most calls to hash-table-ref.
(define (thunk:false) #f)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;
; FIXME: end of changes to sassy-symbol-ensure
;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
; fast path cases used internally
; instead blah-foo-set! these are all blah-set-foo!
(define (sassy-symbol-set-scope! sassy-output name scope)
(let ((sym (sassy-symbol-ensure sassy-output name)))
(sassy-symbol-scope-set! sym scope)
sym))
(define (sassy-symbol-set-sect-off! so name sect off)
(let ((sym (sassy-symbol-ensure so name)))
(sassy-symbol-section-set! sym sect)
(sassy-symbol-offset-set! sym off)
(for-each (lambda (back-patcher)
(back-patcher off (sassy-symbol-section sym)))
(sassy-symbol-unres sym))
sym))
(define (sassy-symbol-set-off! so name off)
(let ((sym (sassy-symbol-ensure so name)))
(sassy-symbol-offset-set! sym off)
(for-each (lambda (back-patcher)
(back-patcher off (sassy-symbol-section sym)))
(sassy-symbol-unres sym))
sym))
(define (sassy-symbol-set-size! so name size)
(let ((sym (sassy-symbol-ensure so name)))
(sassy-symbol-size-set! sym size)
sym))
(define (sassy-symbol-set-unres! so name unres)
(let ((sym (sassy-symbol-ensure so name)))
(sassy-symbol-unres-set! sym (cons unres (sassy-symbol-unres sym)))
sym))
(define (sassy-symbol-set-sect! so name sect)
(let ((sym (sassy-symbol-ensure so name)))
(sassy-symbol-section-set! sym sect)
sym))
(define sassy-symbol-exists-env? sassy-symbol-exists?)
; (define (sassy-symbol-exists-env? sassy-output name)
; (let iter ((rst (sassy-symbol-table sassy-output)))
; (cond ((hash-table? (car rst))
; (hash-table-ref (car rst) name (lambda () #f)))
; ((eq? name (sassy-symbol-name (car rst))) (car rst))
( else ( iter ) ) ) ) ) )
(define (sassy-symbol-defined? sassy-output name)
(let ((maybe (sassy-symbol-exists-env? sassy-output name)))
(cond ((not maybe) #f)
((eq? 'import (sassy-symbol-scope maybe)) #t)
((sassy-symbol-offset maybe) #t)
(else #f))))
(define (sassy-symbol-def-error sassy-output name)
(or (not (sassy-symbol-defined? sassy-output name))
(error "re-definition of a previously defined/imported symbol" name)))
(define new-block
(let ((c 0))
should use native
(let ((n (string->symbol (string-append "%!%!%!block"
(number->string c)))))
(set! c (+ c 1))
n))))
extra - proc is a proc of one argument that does something with each
new sassy - symbol record , or # f
(define (setup-locals locals outp extra-proc)
(let* ((newb (new-block))
(old-env (sassy-symbol-table outp))
(restore! (lambda ()
(sassy-symbol-table-set! outp old-env))))
(sassy-symbol-table-set!
outp
(let iter ((rest locals))
(if (null? rest)
old-env
(let ((new-sym (make-sassy-symbol
(valid-label (car rest)) newb #f #f #f '() #f)))
(if extra-proc
(extra-proc new-sym))
(cons new-sym (iter (cdr rest)))))))
restore!))
(define (quoted-label x)
(and (pair? x)
(eq? 'quote (car x))
(let ((x (cdr x)))
(and (pair? x)
(null? (cdr x))
(let ((x (car x)))
(and (symbol? x) x))))))
(define valid-label0
(let ((keywords '(seq begin inv if iter while with-win
with-lose with-win-lose esc
mark leap label)))
(lambda (x)
(cond ((and (symbol? x) (not (member x keywords))) x)
((quoted-label x))
(else #f)))))
(define (valid-label x)
(or (valid-label0 x)
(error "sassy: invalid label" x)))
(define (get-reloc-target target outp)
(if (symbol? target)
(let ((s (sassy-symbol-exists-env? outp target)))
(if s
(case (sassy-symbol-scope s)
((local import export) target)
(else #f))
target))
#f))
(define (get-reloc-target-sect target outp current)
(if (symbol? target)
(let ((s (sassy-symbol-exists-env? outp target)))
(and s (sassy-symbol-section s)))
current))
(define (check-label-size size cur-byte-size key label)
(if (not (= size cur-byte-size))
(error
"sassy: wrong data size for label or custom reloc under "
`(bits ,(* 8 cur-byte-size)) (list key label))))
| null | https://raw.githubusercontent.com/larcenists/larceny/fef550c7d3923deb7a5a1ccd5a628e54cf231c75/src/Lib/Sassy/intern.scm | scheme | This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
either
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
License along with this library; if not, write to the Free Software
Contact:
4130 43 ST #C2
module intern
export all
Looks up symbol-name (a scheme symbol) in the symbol-table of
sassy-output. If no record exists for that name, it creates a fresh
one, in the table. Then for each item in the list of field-value
pairs, it sets the corresponding field of the sassy-symbol to the
value (or in the case of the 'unres field, adds the value to the
list stored there). The pairs must be proper lists. The result is
the sassy-symbol that was modified.
Anytime a new offset is given via the field-pair '(offset <value>),
all the back-patchers stored in the unres field of the sassy-symbol are
applied to the <value>.
(define (sassy-symbol-ensure sassy-output symbol-name)
(or (sassy-symbol-exists-env? sassy-output symbol-name)
(let iter ((t (sassy-symbol-table sassy-output)))
(if (hash-table? (car t))
(begin (hash-table-set! (car t) symbol-name new)
new)
(iter (cdr t)))))))
Let's inline the call to sassy-symbol-exists-env?, simplify
a bit, and see whether it makes any difference.
Hmmm, this doesn't seem to be defined when cross-compiling.
symtable may be any of
a hashtable
ending with a hashtable
We search the list as follows:
if we get to a hashtable, we look up symbol-name
within the hashtable and either return its
and install it
if they match
The improper list probably doesn't ever occur,
or if it does then the name is always found within
the hashtable; otherwise we'd be taking the car of
up. If it ever occurs, it will display a message.
There's no point to searching the list twice, as
a hashtable, the search is over.
Defining this here eliminates the need to create a closure
on most calls to hash-table-ref.
FIXME: end of changes to sassy-symbol-ensure
fast path cases used internally
instead blah-foo-set! these are all blah-set-foo!
(define (sassy-symbol-exists-env? sassy-output name)
(let iter ((rst (sassy-symbol-table sassy-output)))
(cond ((hash-table? (car rst))
(hash-table-ref (car rst) name (lambda () #f)))
((eq? name (sassy-symbol-name (car rst))) (car rst)) | intern.scm - private api functions for Sassy
Copyright ( C ) 2005
version 2.1 of the License , or ( at your option ) any later version .
You should have received a copy of the GNU Lesser General Public
Foundation , Inc. , 51 Franklin St , Fifth Floor , Boston , MA 02110 - 1301 USA
Sunnyside , NY 11104
see file COPYING in the top of Sassy 's distribution directory
import api push - stacks srfi-69
(define (sassy-symbol-set! sassy-output symbol-name . list-of-field-pairs)
(let ((exists (sassy-symbol-exists-env? sassy-output symbol-name)))
(when (not exists)
(set! exists (make-sassy-symbol symbol-name 'local #f #f #f '() #f))
(let iter ((t (sassy-symbol-table sassy-output)))
(if (hash-table? (car t))
(hash-table-set! (car t) symbol-name exists)
(iter (cdr t)))))
(for-each
(lambda (field-pair)
(case (car field-pair)
((name) (sassy-symbol-name-set! exists (cadr field-pair)))
((scope) (sassy-symbol-scope-set! exists (cadr field-pair)))
((section) (sassy-symbol-section-set! exists (cadr field-pair)))
((size) (sassy-symbol-size-set! exists (cadr field-pair)))
((offset)
(sassy-symbol-offset-set! exists (cadr field-pair))
(for-each (lambda (back-patcher)
(back-patcher (cadr field-pair)
(sassy-symbol-section exists)))
(sassy-symbol-unres exists)))
((unres)
(sassy-symbol-unres-set!
exists (cons (cadr field-pair) (sassy-symbol-unres exists))))
((exp) (sassy-symbol-exp-set! exists (cadr field-pair)))))
list-of-field-pairs)
exists))
About a third of the entire compile time is spent in this
one procedure .
( let ( ( new ( make - sassy - symbol symbol - name ' local # f # f # f ' ( ) # f ) ) )
( vector - set ! sassy - symbol - ensure : loops 1
( + 1 ( vector - ref sassy - symbol - ensure : loops 1 ) ) )
(define (sassy-symbol-ensure sassy-output symbol-name)
(let ((symtable (sassy-symbol-table sassy-output)))
(define (hash-table-ref/default ht key default)
(cond (default
(hash-table-ref ht key (lambda () default)))
(else
(hash-table-ref ht key thunk:false))))
an improper list of hashtables and Sassy symbols
a list of hashtables and Sassy symbols
associated Sassy symbol or create a new one
if we see a Sassy symbol , we compare it and stop
a hashtable and the original Sassy code would blow
the original Sassy code was doing . Once we 've found
(define (loop1 rst)
(cond ((not (pair? rst))
(if (hash-table? rst)
(begin
FIXME
FIXME
(or (hash-table-ref/default rst symbol-name #f)
(assertion-violation 'sassy-symbol-ensure
"bug in Clinger's version"
rst)))
(assertion-violation 'sassy-symbol-ensure
"bug in Sassy" rst)))
((hash-table? (car rst))
(let ((ht (car rst)))
(or (hash-table-ref/default ht symbol-name #f)
(let ((new (make-sassy-symbol
symbol-name 'local #f #f #f '() #f)))
(hash-table-set! ht symbol-name new)
new))))
((eq? symbol-name (sassy-symbol-name (car rst)))
(car rst))
(else
(loop1 (cdr rst)))))
(loop1 symtable)))
(define (thunk:false) #f)
(define (sassy-symbol-set-scope! sassy-output name scope)
(let ((sym (sassy-symbol-ensure sassy-output name)))
(sassy-symbol-scope-set! sym scope)
sym))
(define (sassy-symbol-set-sect-off! so name sect off)
(let ((sym (sassy-symbol-ensure so name)))
(sassy-symbol-section-set! sym sect)
(sassy-symbol-offset-set! sym off)
(for-each (lambda (back-patcher)
(back-patcher off (sassy-symbol-section sym)))
(sassy-symbol-unres sym))
sym))
(define (sassy-symbol-set-off! so name off)
(let ((sym (sassy-symbol-ensure so name)))
(sassy-symbol-offset-set! sym off)
(for-each (lambda (back-patcher)
(back-patcher off (sassy-symbol-section sym)))
(sassy-symbol-unres sym))
sym))
(define (sassy-symbol-set-size! so name size)
(let ((sym (sassy-symbol-ensure so name)))
(sassy-symbol-size-set! sym size)
sym))
(define (sassy-symbol-set-unres! so name unres)
(let ((sym (sassy-symbol-ensure so name)))
(sassy-symbol-unres-set! sym (cons unres (sassy-symbol-unres sym)))
sym))
(define (sassy-symbol-set-sect! so name sect)
(let ((sym (sassy-symbol-ensure so name)))
(sassy-symbol-section-set! sym sect)
sym))
(define sassy-symbol-exists-env? sassy-symbol-exists?)
( else ( iter ) ) ) ) ) )
(define (sassy-symbol-defined? sassy-output name)
(let ((maybe (sassy-symbol-exists-env? sassy-output name)))
(cond ((not maybe) #f)
((eq? 'import (sassy-symbol-scope maybe)) #t)
((sassy-symbol-offset maybe) #t)
(else #f))))
(define (sassy-symbol-def-error sassy-output name)
(or (not (sassy-symbol-defined? sassy-output name))
(error "re-definition of a previously defined/imported symbol" name)))
(define new-block
(let ((c 0))
should use native
(let ((n (string->symbol (string-append "%!%!%!block"
(number->string c)))))
(set! c (+ c 1))
n))))
extra - proc is a proc of one argument that does something with each
new sassy - symbol record , or # f
(define (setup-locals locals outp extra-proc)
(let* ((newb (new-block))
(old-env (sassy-symbol-table outp))
(restore! (lambda ()
(sassy-symbol-table-set! outp old-env))))
(sassy-symbol-table-set!
outp
(let iter ((rest locals))
(if (null? rest)
old-env
(let ((new-sym (make-sassy-symbol
(valid-label (car rest)) newb #f #f #f '() #f)))
(if extra-proc
(extra-proc new-sym))
(cons new-sym (iter (cdr rest)))))))
restore!))
(define (quoted-label x)
(and (pair? x)
(eq? 'quote (car x))
(let ((x (cdr x)))
(and (pair? x)
(null? (cdr x))
(let ((x (car x)))
(and (symbol? x) x))))))
(define valid-label0
(let ((keywords '(seq begin inv if iter while with-win
with-lose with-win-lose esc
mark leap label)))
(lambda (x)
(cond ((and (symbol? x) (not (member x keywords))) x)
((quoted-label x))
(else #f)))))
(define (valid-label x)
(or (valid-label0 x)
(error "sassy: invalid label" x)))
(define (get-reloc-target target outp)
(if (symbol? target)
(let ((s (sassy-symbol-exists-env? outp target)))
(if s
(case (sassy-symbol-scope s)
((local import export) target)
(else #f))
target))
#f))
(define (get-reloc-target-sect target outp current)
(if (symbol? target)
(let ((s (sassy-symbol-exists-env? outp target)))
(and s (sassy-symbol-section s)))
current))
(define (check-label-size size cur-byte-size key label)
(if (not (= size cur-byte-size))
(error
"sassy: wrong data size for label or custom reloc under "
`(bits ,(* 8 cur-byte-size)) (list key label))))
|
95705eddbdaf151d7ec8f12a94c0cba57e40fa9ed2c566ea0723a1a235fc0b2f | Elzair/nazghul | player.scm | ;;----------------------------------------------------------------------------
;; test/player.scm - basic player setup
;;
;; This will create a basic player character and party for testing. Test
;; scripts can customize it by adding party member and equipment afterwards.
;; ----------------------------------------------------------------------------
(kern-mk-char
'ch_wanderer ; tag
"The Wanderer" ; name
sp_human ; species
oc_wanderer ; occupation
s_wanderer ; sprite
faction-player ; starting alignment
starting str / int / dex
pc-hp-off ; base max hit points
pc-hp-gain ; max hit points gained per level
pc-mp-off ; base mana points
pc-mp-gain ; mana points gained per level
max-health ; max hit points (kernel will trim based on level)
hit points ( kernel will set to shortly )
max-health ; max mana points (kernel will trim based on level)
mana points ( kernel will set to shortly )
1 ; character level
#f ; dead?
nil ; conversation proc
nil ; schedule
nil ; special ai
nil ; personal inventory
nil ; readied armaments
)
(bind
(kern-mk-player
'player ; tag
s_wanderer ; sprite
"Walk" ; movement description
sound-walking ; movement sound
1 ; food
0 ; gold
turns to next meal ( 5 hours )
nil ; formation
m_campsite ; campsite map
nil ; campsite formation
nil ; vehicle
;; inventory
(kern-mk-inventory nil)
nil ;; party members (should be nil for initial load file)
)
(tbl-mk))
(kern-party-add-member player ch_wanderer)
| null | https://raw.githubusercontent.com/Elzair/nazghul/8f3a45ed6289cd9f469c4ff618d39366f2fbc1d8/worlds/haxima-1.002/test/player.scm | scheme | ----------------------------------------------------------------------------
test/player.scm - basic player setup
This will create a basic player character and party for testing. Test
scripts can customize it by adding party member and equipment afterwards.
----------------------------------------------------------------------------
tag
name
species
occupation
sprite
starting alignment
base max hit points
max hit points gained per level
base mana points
mana points gained per level
max hit points (kernel will trim based on level)
max mana points (kernel will trim based on level)
character level
dead?
conversation proc
schedule
special ai
personal inventory
readied armaments
tag
sprite
movement description
movement sound
food
gold
formation
campsite map
campsite formation
vehicle
inventory
party members (should be nil for initial load file) |
(kern-mk-char
starting str / int / dex
hit points ( kernel will set to shortly )
mana points ( kernel will set to shortly )
)
(bind
(kern-mk-player
turns to next meal ( 5 hours )
(kern-mk-inventory nil)
)
(tbl-mk))
(kern-party-add-member player ch_wanderer)
|
95d77cb1f49517fe28fc120b7db1d1199ae0509700b78c07199ae1b66299da0a | puppetlabs/pcp-broker | connection_test.clj | (ns puppetlabs.pcp.broker.connection-test
(:require [clojure.test :refer :all]
[puppetlabs.pcp.broker.connection :refer :all]))
(deftest make-connection-test
(testing "It returns a map that matches represents a new socket"
(let [socket (make-connection :dummy-ws {:encode identity :decode identity} "pcp-uri" false)]
(is (= :dummy-ws (:websocket socket)))
(is (= nil (:endpoint socket)))
(is (= false (:expired socket))))))
| null | https://raw.githubusercontent.com/puppetlabs/pcp-broker/7806c6a6045c406c256b2c9d6129382923ba3d03/test/unit/puppetlabs/pcp/broker/connection_test.clj | clojure | (ns puppetlabs.pcp.broker.connection-test
(:require [clojure.test :refer :all]
[puppetlabs.pcp.broker.connection :refer :all]))
(deftest make-connection-test
(testing "It returns a map that matches represents a new socket"
(let [socket (make-connection :dummy-ws {:encode identity :decode identity} "pcp-uri" false)]
(is (= :dummy-ws (:websocket socket)))
(is (= nil (:endpoint socket)))
(is (= false (:expired socket))))))
|
|
245e6533bc229f7cdfcc7785c095762b6f895f4f342a1a74b057fcf5d2a77df6 | senapk/funcional_arcade | brena.hs | import Data.List
import Data.Maybe
ehPrimo x
| (length [y | y <- [1..x], x `mod` y == 0]) > 2 = False
| otherwise = True
primos = [y | y <- [2,3..] , ehPrimo y]
pegarTantos :: Int -> [Int] -> [Int]
pegarTantos x = takeWhile (< x)
fatoracao x = [y | y <- pegarTantos x primos, x `mod` y == 0]
expoente' x y = [x] ++ (takeWhile (>1) $ tail $ iterate (`div` y) x)
expoente x y = takeWhile (>(-1)) [b | a <- (expoente' x y), let b = if (mod a y /= 0) then -1 else a]
frequencia :: Eq a => a -> [(a, b)] -> Int
frequencia x xs = length(filter(\(a,b) -> a==x) xs)
factors' :: Int -> [(Int,Int)]
factors' x = [(a,b) | a <- fatoracao x, b <- expoente x a, b `mod` a == 0]
factors :: Int -> [(Int, Int)]
factors x = nub [(m, frequencia m fatores) | (m,n) <- fatores]
where
fatores = factors' x
| null | https://raw.githubusercontent.com/senapk/funcional_arcade/7bfbd3d1407c5c8013550691d4fa048d74a53339/base/076/brena.hs | haskell | import Data.List
import Data.Maybe
ehPrimo x
| (length [y | y <- [1..x], x `mod` y == 0]) > 2 = False
| otherwise = True
primos = [y | y <- [2,3..] , ehPrimo y]
pegarTantos :: Int -> [Int] -> [Int]
pegarTantos x = takeWhile (< x)
fatoracao x = [y | y <- pegarTantos x primos, x `mod` y == 0]
expoente' x y = [x] ++ (takeWhile (>1) $ tail $ iterate (`div` y) x)
expoente x y = takeWhile (>(-1)) [b | a <- (expoente' x y), let b = if (mod a y /= 0) then -1 else a]
frequencia :: Eq a => a -> [(a, b)] -> Int
frequencia x xs = length(filter(\(a,b) -> a==x) xs)
factors' :: Int -> [(Int,Int)]
factors' x = [(a,b) | a <- fatoracao x, b <- expoente x a, b `mod` a == 0]
factors :: Int -> [(Int, Int)]
factors x = nub [(m, frequencia m fatores) | (m,n) <- fatores]
where
fatores = factors' x
|
|
818278e4e0c972712e2de76fce8f9b30b682e0a38b184e95026104290c21d164 | CryptoKami/cryptokami-core | NtpCheck.hs | {-# LANGUAGE DataKinds #-}
# LANGUAGE ScopedTypeVariables #
module Pos.NtpCheck
( mkNtpStatusVar
, ntpSettings
, withNtpCheck
, NtpStatus(..)
, NtpCheckMonad
) where
import Universum
import Control.Monad.Trans.Control (MonadBaseControl)
import qualified Data.List.NonEmpty as NE
import Data.Time.Units (Microsecond)
import Mockable (Async, Concurrently, CurrentTime, Delay, Mockable, Mockables,
currentTime, withAsync)
import NTP.Client (NtpClientSettings (..), ntpSingleShot, spawnNtpClient)
import Serokell.Util (sec)
import System.Wlog (WithLogger)
import Pos.Core.Slotting (Timestamp (..), diffTimestamp)
import Pos.Infra.Configuration (HasInfraConfiguration, infraConfiguration)
import qualified Pos.Infra.Configuration as Infra
import Pos.Util.Util (median)
type NtpCheckMonad m =
( MonadIO m
, MonadMask m
, MonadBaseControl IO m
, Mockable Async m
, Mockable Concurrently m
, Mockable CurrentTime m
, WithLogger m
, HasInfraConfiguration
)
withNtpCheck :: forall m a. NtpCheckMonad m => NtpClientSettings m -> m a -> m a
withNtpCheck settings action = withAsync (spawnNtpClient settings) (const action)
ntpSettings :: NtpCheckMonad m => (NtpStatus -> m ()) -> NtpClientSettings m
ntpSettings onStatus = NtpClientSettings
{ ntpServers = Infra.ntpServers
, ntpHandler = ntpCheckHandler onStatus
, ntpLogName = "ntp-check"
, ntpResponseTimeout = sec 5
, ntpPollDelay = timeDifferenceWarnInterval
, ntpMeanSelection = median . NE.fromList
}
data NtpStatus = NtpSyncOk | NtpDesync Microsecond
deriving (Eq, Show)
ntpCheckHandler :: NtpCheckMonad m => (NtpStatus -> m a) -> (Microsecond, Microsecond) -> m a
ntpCheckHandler cont (newMargin, transmitTime) = do
let ntpTime = Timestamp $ transmitTime + newMargin
localTime <- Timestamp <$> currentTime
let timeDiff = diffTimestamp ntpTime localTime
let ntpStatus
| timeDiff <= timeDifferenceWarnThreshold = NtpSyncOk
| otherwise = NtpDesync timeDiff
cont ntpStatus
timeDifferenceWarnInterval :: HasInfraConfiguration => Microsecond
timeDifferenceWarnInterval = fromIntegral (Infra.ccTimeDifferenceWarnInterval infraConfiguration)
timeDifferenceWarnThreshold :: HasInfraConfiguration => Microsecond
timeDifferenceWarnThreshold = fromIntegral (Infra.ccTimeDifferenceWarnThreshold infraConfiguration)
type NtpStatusVar = MVar NtpStatus
Helper to get status
mkNtpStatusVar :: ( NtpCheckMonad m , Mockables m [ CurrentTime, Delay] )
=> m NtpStatusVar
mkNtpStatusVar = do
status <- newEmptyMVar
let onStatusHandler = putMVar status
_ <- ntpSingleShot $ ntpSettings onStatusHandler
pure status
| null | https://raw.githubusercontent.com/CryptoKami/cryptokami-core/12ca60a9ad167b6327397b3b2f928c19436ae114/infra/Pos/NtpCheck.hs | haskell | # LANGUAGE DataKinds # | # LANGUAGE ScopedTypeVariables #
module Pos.NtpCheck
( mkNtpStatusVar
, ntpSettings
, withNtpCheck
, NtpStatus(..)
, NtpCheckMonad
) where
import Universum
import Control.Monad.Trans.Control (MonadBaseControl)
import qualified Data.List.NonEmpty as NE
import Data.Time.Units (Microsecond)
import Mockable (Async, Concurrently, CurrentTime, Delay, Mockable, Mockables,
currentTime, withAsync)
import NTP.Client (NtpClientSettings (..), ntpSingleShot, spawnNtpClient)
import Serokell.Util (sec)
import System.Wlog (WithLogger)
import Pos.Core.Slotting (Timestamp (..), diffTimestamp)
import Pos.Infra.Configuration (HasInfraConfiguration, infraConfiguration)
import qualified Pos.Infra.Configuration as Infra
import Pos.Util.Util (median)
type NtpCheckMonad m =
( MonadIO m
, MonadMask m
, MonadBaseControl IO m
, Mockable Async m
, Mockable Concurrently m
, Mockable CurrentTime m
, WithLogger m
, HasInfraConfiguration
)
withNtpCheck :: forall m a. NtpCheckMonad m => NtpClientSettings m -> m a -> m a
withNtpCheck settings action = withAsync (spawnNtpClient settings) (const action)
ntpSettings :: NtpCheckMonad m => (NtpStatus -> m ()) -> NtpClientSettings m
ntpSettings onStatus = NtpClientSettings
{ ntpServers = Infra.ntpServers
, ntpHandler = ntpCheckHandler onStatus
, ntpLogName = "ntp-check"
, ntpResponseTimeout = sec 5
, ntpPollDelay = timeDifferenceWarnInterval
, ntpMeanSelection = median . NE.fromList
}
data NtpStatus = NtpSyncOk | NtpDesync Microsecond
deriving (Eq, Show)
ntpCheckHandler :: NtpCheckMonad m => (NtpStatus -> m a) -> (Microsecond, Microsecond) -> m a
ntpCheckHandler cont (newMargin, transmitTime) = do
let ntpTime = Timestamp $ transmitTime + newMargin
localTime <- Timestamp <$> currentTime
let timeDiff = diffTimestamp ntpTime localTime
let ntpStatus
| timeDiff <= timeDifferenceWarnThreshold = NtpSyncOk
| otherwise = NtpDesync timeDiff
cont ntpStatus
timeDifferenceWarnInterval :: HasInfraConfiguration => Microsecond
timeDifferenceWarnInterval = fromIntegral (Infra.ccTimeDifferenceWarnInterval infraConfiguration)
timeDifferenceWarnThreshold :: HasInfraConfiguration => Microsecond
timeDifferenceWarnThreshold = fromIntegral (Infra.ccTimeDifferenceWarnThreshold infraConfiguration)
type NtpStatusVar = MVar NtpStatus
Helper to get status
mkNtpStatusVar :: ( NtpCheckMonad m , Mockables m [ CurrentTime, Delay] )
=> m NtpStatusVar
mkNtpStatusVar = do
status <- newEmptyMVar
let onStatusHandler = putMVar status
_ <- ntpSingleShot $ ntpSettings onStatusHandler
pure status
|
68a0b7894771f1cf2fbc1e366af6e8a708665fa14ca8cf8f4b5fe65f0825a926 | ucsd-progsys/nate | jg_text.mli | (*************************************************************************)
(* *)
(* Objective Caml LablTk library *)
(* *)
, Kyoto University RIMS
(* *)
Copyright 1999 Institut National de Recherche en Informatique et
en Automatique and Kyoto University . All rights reserved .
This file is distributed under the terms of the GNU Library
(* General Public License, with the special exception on linking *)
(* described in file ../../../LICENSE. *)
(* *)
(*************************************************************************)
$ I d : jg_text.mli , v 1.7 2001/12/07 13:40:00 xleroy Exp $
open Widget
val get_all : text widget -> string
val tag_and_see :
text widget ->
tag:Tk.textTag -> start:Tk.textIndex -> stop:Tk.textIndex -> unit
val output : text widget -> buf:string -> pos:int -> len:int -> unit
val add_scrollbar : text widget -> scrollbar widget
val create_with_scrollbar :
'a widget -> frame widget * text widget * scrollbar widget
val goto_tag : text widget -> tag:string -> unit
val search_string : text widget -> unit
| null | https://raw.githubusercontent.com/ucsd-progsys/nate/8b1267cd8b10283d8bc239d16a28c654a4cb8942/eval/sherrloc/easyocaml%2B%2B/otherlibs/labltk/browser/jg_text.mli | ocaml | ***********************************************************************
Objective Caml LablTk library
General Public License, with the special exception on linking
described in file ../../../LICENSE.
*********************************************************************** | , Kyoto University RIMS
Copyright 1999 Institut National de Recherche en Informatique et
en Automatique and Kyoto University . All rights reserved .
This file is distributed under the terms of the GNU Library
$ I d : jg_text.mli , v 1.7 2001/12/07 13:40:00 xleroy Exp $
open Widget
val get_all : text widget -> string
val tag_and_see :
text widget ->
tag:Tk.textTag -> start:Tk.textIndex -> stop:Tk.textIndex -> unit
val output : text widget -> buf:string -> pos:int -> len:int -> unit
val add_scrollbar : text widget -> scrollbar widget
val create_with_scrollbar :
'a widget -> frame widget * text widget * scrollbar widget
val goto_tag : text widget -> tag:string -> unit
val search_string : text widget -> unit
|
3dc4325f8893a4b177ba97df324b86a4bdd57aaa482f3cc4dca1bc263c468e21 | ocaml-flambda/ocaml-jst | main.ml | TEST
readonly_files = " foo.ml "
* setup - ocamlc.byte - build - env
* * ocamlc.byte
module = " foo.ml "
* * * ocaml with ocamlcommon
ocaml_script_as_argument = " true "
test_file = " gen_cached_cmi.ml "
arguments = " cached_cmi.ml "
* * * * ocamlc.byte
module = " "
program = " $ { test_build_directory}/main.exe "
libraries + = " ocamlbytecomp ocamltoplevel "
all_modules = " foo.cmo cached_cmi.ml main.ml "
* * * * * run
set OCAMLLIB="${ocamlsrcdir}/stdlib "
arguments = " input.ml "
* * * * * * check - program - output
readonly_files = "foo.ml gen_cached_cmi.ml input.ml"
* setup-ocamlc.byte-build-env
** ocamlc.byte
module = "foo.ml"
*** ocaml with ocamlcommon
ocaml_script_as_argument = "true"
test_file = "gen_cached_cmi.ml"
arguments = "cached_cmi.ml"
**** ocamlc.byte
module = ""
program = "${test_build_directory}/main.exe"
libraries += "ocamlbytecomp ocamltoplevel"
all_modules = "foo.cmo cached_cmi.ml main.ml"
***** run
set OCAMLLIB="${ocamlsrcdir}/stdlib"
arguments = "input.ml"
****** check-program-output
*)
let () =
(* Make sure it's no longer available on disk *)
if Sys.file_exists "foo.cmi" then Sys.remove "foo.cmi";
let module Persistent_signature = Persistent_env.Persistent_signature in
let old_loader = !Persistent_signature.load in
Persistent_signature.load := (fun ~unit_name ->
match unit_name |> Compilation_unit.Name.to_string with
| "Foo" ->
Some { Persistent_signature.
filename = Sys.executable_name
; cmi = Marshal.from_string Cached_cmi.foo 0
}
| _ -> old_loader unit_name);
Toploop.add_hook (function
| Toploop.After_setup ->
Toploop.toplevel_env :=
Env.add_persistent_structure (Ident.create_persistent "Foo")
!Toploop.toplevel_env
| _ -> ());
exit (Topmain.main ())
| null | https://raw.githubusercontent.com/ocaml-flambda/ocaml-jst/1bb6c797df7c63ddae1fc2e6f403a0ee9896cc8e/testsuite/tests/self-contained-toplevel/main.ml | ocaml | Make sure it's no longer available on disk | TEST
readonly_files = " foo.ml "
* setup - ocamlc.byte - build - env
* * ocamlc.byte
module = " foo.ml "
* * * ocaml with ocamlcommon
ocaml_script_as_argument = " true "
test_file = " gen_cached_cmi.ml "
arguments = " cached_cmi.ml "
* * * * ocamlc.byte
module = " "
program = " $ { test_build_directory}/main.exe "
libraries + = " ocamlbytecomp ocamltoplevel "
all_modules = " foo.cmo cached_cmi.ml main.ml "
* * * * * run
set OCAMLLIB="${ocamlsrcdir}/stdlib "
arguments = " input.ml "
* * * * * * check - program - output
readonly_files = "foo.ml gen_cached_cmi.ml input.ml"
* setup-ocamlc.byte-build-env
** ocamlc.byte
module = "foo.ml"
*** ocaml with ocamlcommon
ocaml_script_as_argument = "true"
test_file = "gen_cached_cmi.ml"
arguments = "cached_cmi.ml"
**** ocamlc.byte
module = ""
program = "${test_build_directory}/main.exe"
libraries += "ocamlbytecomp ocamltoplevel"
all_modules = "foo.cmo cached_cmi.ml main.ml"
***** run
set OCAMLLIB="${ocamlsrcdir}/stdlib"
arguments = "input.ml"
****** check-program-output
*)
let () =
if Sys.file_exists "foo.cmi" then Sys.remove "foo.cmi";
let module Persistent_signature = Persistent_env.Persistent_signature in
let old_loader = !Persistent_signature.load in
Persistent_signature.load := (fun ~unit_name ->
match unit_name |> Compilation_unit.Name.to_string with
| "Foo" ->
Some { Persistent_signature.
filename = Sys.executable_name
; cmi = Marshal.from_string Cached_cmi.foo 0
}
| _ -> old_loader unit_name);
Toploop.add_hook (function
| Toploop.After_setup ->
Toploop.toplevel_env :=
Env.add_persistent_structure (Ident.create_persistent "Foo")
!Toploop.toplevel_env
| _ -> ());
exit (Topmain.main ())
|
0035b393294fa9367e46ef65dc59190ef1092a8b7fae4fb0fb7a1b3c8daf5a0a | austinhaas/kanren | tests.cljc | (ns pettomato.kanren.muKanren.tests
(:refer-clojure :exclude [==])
(:require
[clojure.test :refer [is deftest]]
[pettomato.kanren.util.llist :refer [empty-llist llist llist* llist->seq lcons]]
[pettomato.kanren.muKanren.types :refer [lvar]]
[pettomato.kanren.muKanren.core :refer [empty-s ext-s unify walk]]
[pettomato.kanren.muKanren.goals :refer [== succeed fail emptyo conso firsto resto membero appendo anyo alwayso]]
[pettomato.kanren.muKanren.extras :refer [reify-name walk*]]
#?(:clj
[pettomato.kanren.muKanren.extras-macros :refer [fresh conde all run* run]]))
#?(:cljs
(:require-macros
[pettomato.kanren.muKanren.extras-macros :refer [fresh conde all run* run]])))
;; =============================================================================
;; unify
;; -----------------------------------------------------------------------------
;; nil
(deftest unify-nil-object-1
(is (= (unify nil 1 empty-s) false)))
(deftest unify-nil-lvar-1
(let [x (lvar 'x)
os (ext-s x nil empty-s)]
(is (= (unify nil x empty-s) os))))
(deftest unify-nil-lseq-1
(let [x (lvar 'x)]
(is (= (unify nil (lcons 1 x) empty-s) false))))
(deftest unify-nil-map-1
(let [x (lvar 'x)]
(is (= (unify nil {} empty-s) false))))
;; -----------------------------------------------------------------------------
;; object
(deftest unify-object-nil-1
(is (= (unify 1 nil empty-s) false)))
(deftest unify-object-object-1
(is (= (unify 1 1 empty-s) empty-s)))
(deftest unify-object-object-2
(is (= (unify :foo :foo empty-s) empty-s)))
(deftest unify-object-object-3
(is (= (unify 'foo 'foo empty-s) empty-s)))
(deftest unify-object-object-4
(is (= (unify "foo" "foo" empty-s) empty-s)))
(deftest unify-object-object-5
(is (= (unify 1 2 empty-s) false)))
(deftest unify-object-object-6
(is (= (unify 2 1 empty-s) false)))
(deftest unify-object-object-7
(is (= (unify :foo :bar empty-s) false)))
(deftest unify-object-object-8
(is (= (unify 'foo 'bar empty-s) false)))
(deftest unify-object-object-9
(is (= (unify "foo" "bar" empty-s) false)))
(deftest unify-object-lvar-1
(let [x (lvar 'x)
os (ext-s x 1 empty-s)]
(is (= (unify 1 x empty-s) os))))
(deftest unify-object-lcons-1
(let [x (lvar 'x)]
(is (= (unify 1 (lcons 1 'x) empty-s) false))))
(deftest unify-object-seq-1
(is (= (unify 1 '() empty-s) false)))
(deftest unify-object-seq-2
(is (= (unify 1 '[] empty-s) false)))
(deftest unify-object-map-1
(is (= (unify 1 {} empty-s) false)))
;; -----------------------------------------------------------------------------
lvar
(deftest unify-lvar-object-1
(let [x (lvar 'x)
os (ext-s x 1 empty-s)]
(is (= (unify x 1 empty-s) os))))
(deftest unify-lvar-lvar-1
(let [x (lvar 'x)
y (lvar 'y)
os (ext-s x y empty-s)]
(is (= (unify x y empty-s) os))))
(deftest unify-lvar-lcons-1
(let [x (lvar 'x)
y (lvar 'y)
l (lcons 1 y)
os (ext-s x l empty-s)]
(is (= (unify x l empty-s) os))))
(deftest unify-lvar-seq-1
(let [x (lvar 'x)
os (ext-s x [] empty-s)]
(is (= (unify x [] empty-s) os))))
(deftest unify-lvar-seq-2
(let [x (lvar 'x)
os (ext-s x [1 2 3] empty-s)]
(is (= (unify x [1 2 3] empty-s) os))))
(deftest unify-lvar-seq-3
(let [x (lvar 'x)
os (ext-s x '() empty-s)]
(is (= (unify x '() empty-s) os))))
(deftest unify-lvar-seq-4
(let [x (lvar 'x)
os (ext-s x '(1 2 3) empty-s)]
(is (= (unify x '(1 2 3) empty-s) os))))
(deftest unify-lvar-map-1
(let [x (lvar 'x)
os (ext-s x {} empty-s)]
(is (= (unify x {} empty-s) os))))
(deftest unify-lvar-map-2
(let [x (lvar 'x)
os (ext-s x {1 2 3 4} empty-s)]
(is (= (unify x {1 2 3 4} empty-s) os))))
;; -----------------------------------------------------------------------------
;; lcons
(deftest unify-lcons-object-1
(let [x (lvar 'x)]
(is (= (unify (lcons 1 x) 1 empty-s) false))))
(deftest unify-lcons-lvar-1
(let [x (lvar 'x)
y (lvar 'y)
l (lcons 1 y)
os (ext-s x l empty-s)]
(is (= (unify l x empty-s) os))))
(deftest unify-lcons-lcons-1
(let [x (lvar 'x)
y (lvar 'y)
lc1 (lcons 1 x)
lc2 (lcons 1 y)
os (ext-s x y empty-s)]
(is (= (unify lc1 lc2 empty-s) os))))
(deftest unify-lcons-lcons-2
(let [x (lvar 'x)
y (lvar 'y)
z (lvar 'z)
lc1 (lcons 1 (lcons 2 x))
lc2 (lcons 1 (lcons z y))
os (->> empty-s
(ext-s x y)
(ext-s z 2))]
(is (= (unify lc1 lc2 empty-s) os))))
(deftest unify-lcons-lcons-3
(let [x (lvar 'x)
y (lvar 'y)
lc1 (lcons 1 (lcons 2 x))
lc2 (lcons 1 (lcons 2 (lcons 3 y)))
os (ext-s x (lcons 3 y) empty-s)]
(is (= (unify lc1 lc2 empty-s) os))))
(deftest unify-lcons-lcons-4
(let [x (lvar 'x)
y (lvar 'y)
lc1 (lcons 1 (lcons 2 x))
lc2 (lcons 1 (lcons 3 (lcons 4 y)))]
(is (= (unify lc1 lc2 empty-s) false))))
(deftest unify-lcons-lcons-5
(let [x (lvar 'x)
y (lvar 'y)
lc2 (lcons 1 (lcons 2 x))
lc1 (lcons 1 (lcons 3 (lcons 4 y)))]
(is (= (unify lc1 lc2 empty-s) false))))
(deftest unify-lcons-lcons-6
(let [x (lvar 'x)
y (lvar 'y)
lc1 (lcons 1 (lcons 2 x))
lc2 (lcons 1 (lcons 2 y))
os (ext-s x y empty-s)]
(is (= (unify lc1 lc2 empty-s) os))))
(deftest unify-lcons-seq-1
(let [x (lvar 'x)
lc1 (lcons 1 (lcons 2 x))
l1 (llist '(1 2 3 4))
os (ext-s x (llist '(3 4)) empty-s)]
(is (= (unify lc1 l1 empty-s) os))))
(deftest unify-lcons-seq-2
(let [x (lvar 'x)
y (lvar 'y)
lc1 (lcons 1 (lcons y (lcons 3 x)))
l1 (llist '(1 2 3 4))
os (->> empty-s
(ext-s x (llist '(4)))
(ext-s y 2))]
(is (= (unify lc1 l1 empty-s) os))))
(deftest unify-lcons-seq-3
(let [x (lvar 'x)
lc1 (lcons 1 (lcons 2 (lcons 3 x)))
l1 (llist '(1 2 3))
os (ext-s x '() empty-s)]
(is (= (unify lc1 l1 empty-s) os))))
(deftest unify-lcons-seq-4
(let [x (lvar 'x)
lc1 (lcons 1 (lcons 3 x))
l1 '(1 2 3 4)]
(is (= (unify lc1 l1 empty-s) false))))
(deftest unify-lcons-seq-5
(let [x (lvar 'x)
lc1 (lcons 1 (lcons 2 x))
l1 '(1 3 4 5)]
(is (= (unify lc1 l1 empty-s) false))))
(deftest unify-lcons-map-1
(is (= (unify (lcons 1 (lvar 'x)) {} empty-s) false)))
;; -----------------------------------------------------------------------------
;; seq
(deftest unify-seq-object-1
(is (= (unify '() 1 empty-s) false)))
(deftest unify-seq-object-2
(is (= (unify [] 1 empty-s) false)))
(deftest unify-seq-lvar-1
(let [x (lvar 'x)
os (ext-s x [] empty-s)]
(is (= (unify [] x empty-s) os))))
(deftest unify-seq-lcons-1
(let [x (lvar 'x)
lc1 (lcons 1 (lcons 2 x))
l1 (llist '(1 2 3 4))
os (ext-s x (llist '(3 4)) empty-s)]
(is (= (unify l1 lc1 empty-s) os))))
(deftest unify-seq-seq-1
(is (= (unify [1 2 3] [1 2 3] empty-s) empty-s)))
(deftest unify-seq-seq-2
(is (= (unify '(1 2 3) [1 2 3] empty-s) empty-s)))
(deftest unify-seq-seq-3
(is (= (unify '(1 2 3) '(1 2 3) empty-s) empty-s)))
(deftest unify-seq-seq-4
(let [x (lvar 'x)
os (ext-s x 2 empty-s)]
(is (= (unify `(1 ~x 3) `(1 2 3) empty-s) os))))
(deftest unify-seq-seq-5
(is (= (unify [1 2] [1 2 3] empty-s) false)))
(deftest unify-seq-seq-6
(is (= (unify '(1 2) [1 2 3] empty-s) false)))
(deftest unify-seq-seq-7
(is (= (unify [1 2 3] [3 2 1] empty-s) false)))
(deftest unify-seq-seq-8
(is (= (unify '() '() empty-s) empty-s)))
(deftest unify-seq-seq-9
(is (= (unify '() '(1) empty-s) false)))
(deftest unify-seq-seq-10
(is (= (unify '(1) '() empty-s) false)))
(deftest unify-seq-seq-11
(is (= (unify [[1 2]] [[1 2]] empty-s) empty-s)))
(deftest unify-seq-seq-12
(is (= (unify [[1 2]] [[2 1]] empty-s) false)))
(deftest unify-seq-seq-13
(let [x (lvar 'x)
os (ext-s x 1 empty-s)]
(is (= (unify [[x 2]] [[1 2]] empty-s) os))))
(deftest unify-seq-seq-14
(let [x (lvar 'x)
os (ext-s x [1 2] empty-s)]
(is (= (unify [x] [[1 2]] empty-s) os))))
(deftest unify-seq-seq-15
(let [x (lvar 'x) y (lvar 'y)
u (lvar 'u) v (lvar 'v)
os (->> empty-s
(ext-s x 'b)
(ext-s y 'a))]
(is (= (unify ['a x] [y 'b] empty-s) os))))
(deftest unify-seq-map-1
(is (= (unify [] {} empty-s) false)))
(deftest unify-seq-map-2
(is (= (unify '() {} empty-s) false)))
;; -----------------------------------------------------------------------------
;; map
(deftest unify-map-object-1
(is (= (unify {} 1 empty-s) false)))
(deftest unify-map-lvar-1
(let [x (lvar 'x)
os (ext-s x {} empty-s)]
(is (= (unify {} x empty-s) os))))
(deftest unify-map-lcons-1
(let [x (lvar 'x)]
(is (= (unify {} (lcons 1 x) empty-s) false))))
(deftest unify-map-seq-1
(is (= (unify {} '() empty-s) false)))
(deftest unify-map-map-1
(is (= (unify {} {} empty-s) empty-s)))
(deftest unify-map-map-2
(is (= (unify {1 2 3 4} {1 2 3 4} empty-s) empty-s)))
(deftest unify-map-map-3
(is (= (unify {1 2} {1 2 3 4} empty-s) false)))
(deftest unify-map-map-4
(let [x (lvar 'x)
m1 {1 2 3 4}
m2 {1 2 3 x}
os (ext-s x 4 empty-s)]
(is (= (unify m1 m2 empty-s) os))))
(deftest unify-map-map-5
(let [x (lvar 'x)
m1 {1 2 3 4}
m2 {1 4 3 x}]
(is (= (unify m1 m2 empty-s) false))))
;; =============================================================================
;; walk
(defn to-s [v] (reduce (fn [s [k v]] (ext-s k v s)) empty-s v))
(deftest test-basic-walk
(is (= (let [x (lvar 'x)
y (lvar 'y)
ss (to-s [[x 5] [y x]])]
(walk y ss))
5)))
(deftest test-deep-walk
(is (= (let [[x y z c b a :as s] (map lvar '[x y z c b a])
ss (to-s [[x 5] [y x] [z y] [c z] [b c] [a b]])]
(walk a ss))
5)))
;; =============================================================================
;; reify
#_(deftest test-reify-name
(is (= (let [x (lvar 'x)
y (lvar 'y)]
(reify-name (to-s [[x 5] [y x]])))
'_2)))
;; =============================================================================
;; walk*
(deftest test-walk*
(is (= (let [x (lvar 'x)
y (lvar 'y)]
(walk* `(~x ~y) (to-s [[x 5] [y x]])))
'(5 5))))
;; =============================================================================
;; run and unify
(deftest test-basic-unify
(is (= (run* [q]
(== true q))
'(true))))
(deftest test-basic-unify-2
(is (= (run* [q]
(fresh [x y]
(== [x y] [1 5])
(== [x y] q)))
[[1 5]])))
(deftest test-basic-unify-3
(is (= (run* [q]
(fresh [x y]
(== [x y] q)))
'[[_.0 _.1]])))
;; =============================================================================
;; fail
(deftest test-basic-failure
(is (= (run* [q]
fail
(== true q))
[])))
;; =============================================================================
;; Basic
(deftest test-all
(is (= (run* [q]
(all
(== 1 1)
(== q true)))
'(true))))
;; =============================================================================
TRS
(defn pairo [p]
(fresh [a d]
(== (lcons a d) p)))
(defn twino [p]
(fresh [x]
(conso x x p)))
(defn listo [l]
(conde
[(emptyo l) succeed]
[(pairo l)
(fresh [d]
(resto l d)
(listo d))]))
(defn flatteno [s out]
(conde
[(emptyo s) (== '() out)]
[(pairo s)
(fresh [a d res-a res-d]
(conso a d s)
(flatteno a res-a)
(flatteno d res-d)
(appendo res-a res-d out))]
[(conso s '() out)]))
;; =============================================================================
;; conde
(deftest test-basic-conde
(is (= (into #{}
(run* [x]
(conde
[(== x 'olive) succeed]
[succeed succeed]
[(== x 'oil) succeed])))
(into #{}
'[olive _.0 oil]))))
(deftest test-basic-conde-2
(is (= (into #{}
(run* [r]
(fresh [x y]
(conde
[(== 'split x) (== 'pea y)]
[(== 'navy x) (== 'bean y)])
(== (cons x (cons y ())) r))))
(into #{}
'[(split pea) (navy bean)]))))
(defn teacupo [x]
(conde
[(== 'tea x) succeed]
[(== 'cup x) succeed]))
(deftest test-basic-conde-e-3
(is (= (into #{}
(run* [r]
(fresh [x y]
(conde
[(teacupo x) (== true y) succeed]
[(== false x) (== true y)])
(== (cons x (cons y ())) r))))
(into #{} '((false true) (tea true) (cup true))))))
;; =============================================================================
;; conso
(deftest test-conso
(is (= (run* [q]
(fresh [a d]
(conso a d '())))
())))
(deftest test-conso-1
(let [a (lvar 'a)
d (lvar 'd)]
(is (= (run* [q]
(conso a d q))
['[_.0 _.1]]))))
(deftest test-conso-2
(is (= (run* [q]
(== [q] nil))
[])))
(deftest test-conso-3
(is (=
(run* [q]
(conso 'a '() q))
[(llist '(a))])))
(deftest test-conso-4
(is (= (run* [q]
(conso 'a (llist '(d)) q))
[(llist '(a d))])))
(deftest test-conso-empty-list
(is (= (run* [q]
(conso 'a q (llist '(a))))
'[()])))
(deftest test-conso-5
(is (= (run* [q]
(conso q (llist '(b c)) (llist '(a b c))))
'[a])))
;; =============================================================================
firsto
(deftest test-firsto
(is (= (run* [q]
(firsto '(1 2) q))
'(1))))
;; =============================================================================
;; resto
(deftest test-resto
(is (= (run* [q]
(resto q (llist '(1 2))))
[(llist '(_.0 1 2))])))
(deftest test-resto-2
(is (= (run* [q]
(resto q (llist '[1 2])))
[(llist '(_.0 1 2))])))
(deftest test-resto-3
(is (= (run* [q]
(resto (llist [1 2]) q))
[(llist '(2))])))
(deftest test-resto-4
(is (= (run* [q]
(resto (llist [1 2 3 4 5 6 7 8]) q))
[(llist '(2 3 4 5 6 7 8))])))
;; =============================================================================
;; flatteno
(deftest test-flatteno
(is (= (into #{}
(run* [x]
(flatteno '[[a b] c] x)))
(into #{}
'(([[a b] c]) ([a b] (c)) ([a b] c) ([a b] c ())
(a (b) (c)) (a (b) c) (a (b) c ()) (a b (c))
(a b () (c)) (a b c) (a b c ()) (a b () c)
(a b () c ()))))))
;; =============================================================================
;; membero
(deftest membero-1
(is (= (run* [q]
(all
(== q [(lvar 1)])
(membero ['foo (lvar 2)] q)
(membero [(lvar 3) 'bar] q)))
'([[foo bar]]))))
(deftest membero-2
(is (= (into #{}
(run* [q]
(membero q (llist [1 2 3]))))
#{1 2 3})))
(deftest membero-3
(is (= (run* [q]
(membero q (llist [1 1 1 1 1])))
'(1 1 1 1 1))))
;; -----------------------------------------------------------------------------
;; conde clause count
(defn digit-1 [x]
(conde
[(== 0 x)]))
(defn digit-4 [x]
(conde
[(== 0 x)]
[(== 1 x)]
[(== 2 x)]
[(== 3 x)]))
(deftest test-conde-1-clause
(is (= (run* [q]
(fresh [x y]
(digit-1 x)
(digit-1 y)
(== q [x y])))
'([0 0]))))
(deftest test-conde-4-clauses
(is (= (into #{}
(run* [q]
(fresh [x y]
(digit-4 x)
(digit-4 y)
(== q [x y]))))
(into #{}
'([0 0] [0 1] [0 2] [1 0] [0 3] [1 1] [1 2] [2 0]
[1 3] [2 1] [3 0] [2 2] [3 1] [2 3] [3 2] [3 3])))))
;; -----------------------------------------------------------------------------
;; anyo
(deftest test-anyo-1
(is (= (run 1 [q]
(anyo succeed)
(== true q))
(list true))))
(deftest test-anyo-2
(is (= (run 5 [q]
(anyo succeed)
(== true q))
(list true true true true true))))
;; -----------------------------------------------------------------------------
;; divergence
(def f1 (fresh [] f1))
(deftest test-divergence-1
(is (= (run 1 [q]
(conde
[f1]
[(== false false)]))
'(_.0))))
(deftest test-divergence-2
(is (= (run 1 [q]
(conde
[f1 (== false false)]
[(== false false)]))
'(_.0))))
(def f2
(fresh []
(conde
[f2 (conde
[f2]
[(== false false)])]
[(== false false)])))
(deftest test-divergence-3
(is (= (run 5 [q] f2)
'(_.0 _.0 _.0 _.0 _.0))))
;; -----------------------------------------------------------------------------
;; nil in collection
(deftest test-nil-in-coll-1
(is (= (run* [q]
(== q [nil]))
'([nil]))))
(deftest test-nil-in-coll-2
(is (= (run* [q]
(== q [1 nil]))
'([1 nil]))))
(deftest test-nil-in-coll-3
(is (= (run* [q]
(== q [nil 1]))
'([nil 1]))))
(deftest test-nil-in-coll-4
(is (= (run* [q]
(== q '(nil)))
'((nil)))))
(deftest test-nil-in-coll-5
(is (= (run* [q]
(== q {:foo nil}))
'({:foo nil}))))
(deftest test-nil-in-coll-6
(is (= (run* [q]
(== q {nil :foo}))
'({nil :foo}))))
;; -----------------------------------------------------------------------------
;; Occurs Check
#_(deftest test-occurs-check-1
(is (= (run* [q]
(== q [q]))
())))
;; -----------------------------------------------------------------------------
Unifications that should fail
(deftest test-unify-fail-1
(is (= (run* [p] (fresh [a b] (== b ()) (== '(0 1) (lcons a b)) (== p [a b])))
())))
(deftest test-unify-fail-2
(is (= (run* [p] (fresh [a b] (== b '(1)) (== '(0) (lcons a b)) (== p [a b])))
())))
(deftest test-unify-fail-3
(is (= (run* [p] (fresh [a b c d] (== () b) (== '(1) d) (== (lcons a b) (lcons c d)) (== p [a b c d])))
())))
| null | https://raw.githubusercontent.com/austinhaas/kanren/f55b68279ade01dbaae67a074ea3441370a27f9e/test/pettomato/kanren/muKanren/tests.cljc | clojure | =============================================================================
unify
-----------------------------------------------------------------------------
nil
-----------------------------------------------------------------------------
object
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
lcons
-----------------------------------------------------------------------------
seq
-----------------------------------------------------------------------------
map
=============================================================================
walk
=============================================================================
reify
=============================================================================
walk*
=============================================================================
run and unify
=============================================================================
fail
=============================================================================
Basic
=============================================================================
=============================================================================
conde
=============================================================================
conso
=============================================================================
=============================================================================
resto
=============================================================================
flatteno
=============================================================================
membero
-----------------------------------------------------------------------------
conde clause count
-----------------------------------------------------------------------------
anyo
-----------------------------------------------------------------------------
divergence
-----------------------------------------------------------------------------
nil in collection
-----------------------------------------------------------------------------
Occurs Check
----------------------------------------------------------------------------- | (ns pettomato.kanren.muKanren.tests
(:refer-clojure :exclude [==])
(:require
[clojure.test :refer [is deftest]]
[pettomato.kanren.util.llist :refer [empty-llist llist llist* llist->seq lcons]]
[pettomato.kanren.muKanren.types :refer [lvar]]
[pettomato.kanren.muKanren.core :refer [empty-s ext-s unify walk]]
[pettomato.kanren.muKanren.goals :refer [== succeed fail emptyo conso firsto resto membero appendo anyo alwayso]]
[pettomato.kanren.muKanren.extras :refer [reify-name walk*]]
#?(:clj
[pettomato.kanren.muKanren.extras-macros :refer [fresh conde all run* run]]))
#?(:cljs
(:require-macros
[pettomato.kanren.muKanren.extras-macros :refer [fresh conde all run* run]])))
(deftest unify-nil-object-1
(is (= (unify nil 1 empty-s) false)))
(deftest unify-nil-lvar-1
(let [x (lvar 'x)
os (ext-s x nil empty-s)]
(is (= (unify nil x empty-s) os))))
(deftest unify-nil-lseq-1
(let [x (lvar 'x)]
(is (= (unify nil (lcons 1 x) empty-s) false))))
(deftest unify-nil-map-1
(let [x (lvar 'x)]
(is (= (unify nil {} empty-s) false))))
(deftest unify-object-nil-1
(is (= (unify 1 nil empty-s) false)))
(deftest unify-object-object-1
(is (= (unify 1 1 empty-s) empty-s)))
(deftest unify-object-object-2
(is (= (unify :foo :foo empty-s) empty-s)))
(deftest unify-object-object-3
(is (= (unify 'foo 'foo empty-s) empty-s)))
(deftest unify-object-object-4
(is (= (unify "foo" "foo" empty-s) empty-s)))
(deftest unify-object-object-5
(is (= (unify 1 2 empty-s) false)))
(deftest unify-object-object-6
(is (= (unify 2 1 empty-s) false)))
(deftest unify-object-object-7
(is (= (unify :foo :bar empty-s) false)))
(deftest unify-object-object-8
(is (= (unify 'foo 'bar empty-s) false)))
(deftest unify-object-object-9
(is (= (unify "foo" "bar" empty-s) false)))
(deftest unify-object-lvar-1
(let [x (lvar 'x)
os (ext-s x 1 empty-s)]
(is (= (unify 1 x empty-s) os))))
(deftest unify-object-lcons-1
(let [x (lvar 'x)]
(is (= (unify 1 (lcons 1 'x) empty-s) false))))
(deftest unify-object-seq-1
(is (= (unify 1 '() empty-s) false)))
(deftest unify-object-seq-2
(is (= (unify 1 '[] empty-s) false)))
(deftest unify-object-map-1
(is (= (unify 1 {} empty-s) false)))
lvar
(deftest unify-lvar-object-1
(let [x (lvar 'x)
os (ext-s x 1 empty-s)]
(is (= (unify x 1 empty-s) os))))
(deftest unify-lvar-lvar-1
(let [x (lvar 'x)
y (lvar 'y)
os (ext-s x y empty-s)]
(is (= (unify x y empty-s) os))))
(deftest unify-lvar-lcons-1
(let [x (lvar 'x)
y (lvar 'y)
l (lcons 1 y)
os (ext-s x l empty-s)]
(is (= (unify x l empty-s) os))))
(deftest unify-lvar-seq-1
(let [x (lvar 'x)
os (ext-s x [] empty-s)]
(is (= (unify x [] empty-s) os))))
(deftest unify-lvar-seq-2
(let [x (lvar 'x)
os (ext-s x [1 2 3] empty-s)]
(is (= (unify x [1 2 3] empty-s) os))))
(deftest unify-lvar-seq-3
(let [x (lvar 'x)
os (ext-s x '() empty-s)]
(is (= (unify x '() empty-s) os))))
(deftest unify-lvar-seq-4
(let [x (lvar 'x)
os (ext-s x '(1 2 3) empty-s)]
(is (= (unify x '(1 2 3) empty-s) os))))
(deftest unify-lvar-map-1
(let [x (lvar 'x)
os (ext-s x {} empty-s)]
(is (= (unify x {} empty-s) os))))
(deftest unify-lvar-map-2
(let [x (lvar 'x)
os (ext-s x {1 2 3 4} empty-s)]
(is (= (unify x {1 2 3 4} empty-s) os))))
(deftest unify-lcons-object-1
(let [x (lvar 'x)]
(is (= (unify (lcons 1 x) 1 empty-s) false))))
(deftest unify-lcons-lvar-1
(let [x (lvar 'x)
y (lvar 'y)
l (lcons 1 y)
os (ext-s x l empty-s)]
(is (= (unify l x empty-s) os))))
(deftest unify-lcons-lcons-1
(let [x (lvar 'x)
y (lvar 'y)
lc1 (lcons 1 x)
lc2 (lcons 1 y)
os (ext-s x y empty-s)]
(is (= (unify lc1 lc2 empty-s) os))))
(deftest unify-lcons-lcons-2
(let [x (lvar 'x)
y (lvar 'y)
z (lvar 'z)
lc1 (lcons 1 (lcons 2 x))
lc2 (lcons 1 (lcons z y))
os (->> empty-s
(ext-s x y)
(ext-s z 2))]
(is (= (unify lc1 lc2 empty-s) os))))
(deftest unify-lcons-lcons-3
(let [x (lvar 'x)
y (lvar 'y)
lc1 (lcons 1 (lcons 2 x))
lc2 (lcons 1 (lcons 2 (lcons 3 y)))
os (ext-s x (lcons 3 y) empty-s)]
(is (= (unify lc1 lc2 empty-s) os))))
(deftest unify-lcons-lcons-4
(let [x (lvar 'x)
y (lvar 'y)
lc1 (lcons 1 (lcons 2 x))
lc2 (lcons 1 (lcons 3 (lcons 4 y)))]
(is (= (unify lc1 lc2 empty-s) false))))
(deftest unify-lcons-lcons-5
(let [x (lvar 'x)
y (lvar 'y)
lc2 (lcons 1 (lcons 2 x))
lc1 (lcons 1 (lcons 3 (lcons 4 y)))]
(is (= (unify lc1 lc2 empty-s) false))))
(deftest unify-lcons-lcons-6
(let [x (lvar 'x)
y (lvar 'y)
lc1 (lcons 1 (lcons 2 x))
lc2 (lcons 1 (lcons 2 y))
os (ext-s x y empty-s)]
(is (= (unify lc1 lc2 empty-s) os))))
(deftest unify-lcons-seq-1
(let [x (lvar 'x)
lc1 (lcons 1 (lcons 2 x))
l1 (llist '(1 2 3 4))
os (ext-s x (llist '(3 4)) empty-s)]
(is (= (unify lc1 l1 empty-s) os))))
(deftest unify-lcons-seq-2
(let [x (lvar 'x)
y (lvar 'y)
lc1 (lcons 1 (lcons y (lcons 3 x)))
l1 (llist '(1 2 3 4))
os (->> empty-s
(ext-s x (llist '(4)))
(ext-s y 2))]
(is (= (unify lc1 l1 empty-s) os))))
(deftest unify-lcons-seq-3
(let [x (lvar 'x)
lc1 (lcons 1 (lcons 2 (lcons 3 x)))
l1 (llist '(1 2 3))
os (ext-s x '() empty-s)]
(is (= (unify lc1 l1 empty-s) os))))
(deftest unify-lcons-seq-4
(let [x (lvar 'x)
lc1 (lcons 1 (lcons 3 x))
l1 '(1 2 3 4)]
(is (= (unify lc1 l1 empty-s) false))))
(deftest unify-lcons-seq-5
(let [x (lvar 'x)
lc1 (lcons 1 (lcons 2 x))
l1 '(1 3 4 5)]
(is (= (unify lc1 l1 empty-s) false))))
(deftest unify-lcons-map-1
(is (= (unify (lcons 1 (lvar 'x)) {} empty-s) false)))
(deftest unify-seq-object-1
(is (= (unify '() 1 empty-s) false)))
(deftest unify-seq-object-2
(is (= (unify [] 1 empty-s) false)))
(deftest unify-seq-lvar-1
(let [x (lvar 'x)
os (ext-s x [] empty-s)]
(is (= (unify [] x empty-s) os))))
(deftest unify-seq-lcons-1
(let [x (lvar 'x)
lc1 (lcons 1 (lcons 2 x))
l1 (llist '(1 2 3 4))
os (ext-s x (llist '(3 4)) empty-s)]
(is (= (unify l1 lc1 empty-s) os))))
(deftest unify-seq-seq-1
(is (= (unify [1 2 3] [1 2 3] empty-s) empty-s)))
(deftest unify-seq-seq-2
(is (= (unify '(1 2 3) [1 2 3] empty-s) empty-s)))
(deftest unify-seq-seq-3
(is (= (unify '(1 2 3) '(1 2 3) empty-s) empty-s)))
(deftest unify-seq-seq-4
(let [x (lvar 'x)
os (ext-s x 2 empty-s)]
(is (= (unify `(1 ~x 3) `(1 2 3) empty-s) os))))
(deftest unify-seq-seq-5
(is (= (unify [1 2] [1 2 3] empty-s) false)))
(deftest unify-seq-seq-6
(is (= (unify '(1 2) [1 2 3] empty-s) false)))
(deftest unify-seq-seq-7
(is (= (unify [1 2 3] [3 2 1] empty-s) false)))
(deftest unify-seq-seq-8
(is (= (unify '() '() empty-s) empty-s)))
(deftest unify-seq-seq-9
(is (= (unify '() '(1) empty-s) false)))
(deftest unify-seq-seq-10
(is (= (unify '(1) '() empty-s) false)))
(deftest unify-seq-seq-11
(is (= (unify [[1 2]] [[1 2]] empty-s) empty-s)))
(deftest unify-seq-seq-12
(is (= (unify [[1 2]] [[2 1]] empty-s) false)))
(deftest unify-seq-seq-13
(let [x (lvar 'x)
os (ext-s x 1 empty-s)]
(is (= (unify [[x 2]] [[1 2]] empty-s) os))))
(deftest unify-seq-seq-14
(let [x (lvar 'x)
os (ext-s x [1 2] empty-s)]
(is (= (unify [x] [[1 2]] empty-s) os))))
(deftest unify-seq-seq-15
(let [x (lvar 'x) y (lvar 'y)
u (lvar 'u) v (lvar 'v)
os (->> empty-s
(ext-s x 'b)
(ext-s y 'a))]
(is (= (unify ['a x] [y 'b] empty-s) os))))
(deftest unify-seq-map-1
(is (= (unify [] {} empty-s) false)))
(deftest unify-seq-map-2
(is (= (unify '() {} empty-s) false)))
(deftest unify-map-object-1
(is (= (unify {} 1 empty-s) false)))
(deftest unify-map-lvar-1
(let [x (lvar 'x)
os (ext-s x {} empty-s)]
(is (= (unify {} x empty-s) os))))
(deftest unify-map-lcons-1
(let [x (lvar 'x)]
(is (= (unify {} (lcons 1 x) empty-s) false))))
(deftest unify-map-seq-1
(is (= (unify {} '() empty-s) false)))
(deftest unify-map-map-1
(is (= (unify {} {} empty-s) empty-s)))
(deftest unify-map-map-2
(is (= (unify {1 2 3 4} {1 2 3 4} empty-s) empty-s)))
(deftest unify-map-map-3
(is (= (unify {1 2} {1 2 3 4} empty-s) false)))
(deftest unify-map-map-4
(let [x (lvar 'x)
m1 {1 2 3 4}
m2 {1 2 3 x}
os (ext-s x 4 empty-s)]
(is (= (unify m1 m2 empty-s) os))))
(deftest unify-map-map-5
(let [x (lvar 'x)
m1 {1 2 3 4}
m2 {1 4 3 x}]
(is (= (unify m1 m2 empty-s) false))))
(defn to-s [v] (reduce (fn [s [k v]] (ext-s k v s)) empty-s v))
(deftest test-basic-walk
(is (= (let [x (lvar 'x)
y (lvar 'y)
ss (to-s [[x 5] [y x]])]
(walk y ss))
5)))
(deftest test-deep-walk
(is (= (let [[x y z c b a :as s] (map lvar '[x y z c b a])
ss (to-s [[x 5] [y x] [z y] [c z] [b c] [a b]])]
(walk a ss))
5)))
#_(deftest test-reify-name
(is (= (let [x (lvar 'x)
y (lvar 'y)]
(reify-name (to-s [[x 5] [y x]])))
'_2)))
(deftest test-walk*
(is (= (let [x (lvar 'x)
y (lvar 'y)]
(walk* `(~x ~y) (to-s [[x 5] [y x]])))
'(5 5))))
(deftest test-basic-unify
(is (= (run* [q]
(== true q))
'(true))))
(deftest test-basic-unify-2
(is (= (run* [q]
(fresh [x y]
(== [x y] [1 5])
(== [x y] q)))
[[1 5]])))
(deftest test-basic-unify-3
(is (= (run* [q]
(fresh [x y]
(== [x y] q)))
'[[_.0 _.1]])))
(deftest test-basic-failure
(is (= (run* [q]
fail
(== true q))
[])))
(deftest test-all
(is (= (run* [q]
(all
(== 1 1)
(== q true)))
'(true))))
TRS
(defn pairo [p]
(fresh [a d]
(== (lcons a d) p)))
(defn twino [p]
(fresh [x]
(conso x x p)))
(defn listo [l]
(conde
[(emptyo l) succeed]
[(pairo l)
(fresh [d]
(resto l d)
(listo d))]))
(defn flatteno [s out]
(conde
[(emptyo s) (== '() out)]
[(pairo s)
(fresh [a d res-a res-d]
(conso a d s)
(flatteno a res-a)
(flatteno d res-d)
(appendo res-a res-d out))]
[(conso s '() out)]))
(deftest test-basic-conde
(is (= (into #{}
(run* [x]
(conde
[(== x 'olive) succeed]
[succeed succeed]
[(== x 'oil) succeed])))
(into #{}
'[olive _.0 oil]))))
(deftest test-basic-conde-2
(is (= (into #{}
(run* [r]
(fresh [x y]
(conde
[(== 'split x) (== 'pea y)]
[(== 'navy x) (== 'bean y)])
(== (cons x (cons y ())) r))))
(into #{}
'[(split pea) (navy bean)]))))
(defn teacupo [x]
(conde
[(== 'tea x) succeed]
[(== 'cup x) succeed]))
(deftest test-basic-conde-e-3
(is (= (into #{}
(run* [r]
(fresh [x y]
(conde
[(teacupo x) (== true y) succeed]
[(== false x) (== true y)])
(== (cons x (cons y ())) r))))
(into #{} '((false true) (tea true) (cup true))))))
(deftest test-conso
(is (= (run* [q]
(fresh [a d]
(conso a d '())))
())))
(deftest test-conso-1
(let [a (lvar 'a)
d (lvar 'd)]
(is (= (run* [q]
(conso a d q))
['[_.0 _.1]]))))
(deftest test-conso-2
(is (= (run* [q]
(== [q] nil))
[])))
(deftest test-conso-3
(is (=
(run* [q]
(conso 'a '() q))
[(llist '(a))])))
(deftest test-conso-4
(is (= (run* [q]
(conso 'a (llist '(d)) q))
[(llist '(a d))])))
(deftest test-conso-empty-list
(is (= (run* [q]
(conso 'a q (llist '(a))))
'[()])))
(deftest test-conso-5
(is (= (run* [q]
(conso q (llist '(b c)) (llist '(a b c))))
'[a])))
firsto
(deftest test-firsto
(is (= (run* [q]
(firsto '(1 2) q))
'(1))))
(deftest test-resto
(is (= (run* [q]
(resto q (llist '(1 2))))
[(llist '(_.0 1 2))])))
(deftest test-resto-2
(is (= (run* [q]
(resto q (llist '[1 2])))
[(llist '(_.0 1 2))])))
(deftest test-resto-3
(is (= (run* [q]
(resto (llist [1 2]) q))
[(llist '(2))])))
(deftest test-resto-4
(is (= (run* [q]
(resto (llist [1 2 3 4 5 6 7 8]) q))
[(llist '(2 3 4 5 6 7 8))])))
(deftest test-flatteno
(is (= (into #{}
(run* [x]
(flatteno '[[a b] c] x)))
(into #{}
'(([[a b] c]) ([a b] (c)) ([a b] c) ([a b] c ())
(a (b) (c)) (a (b) c) (a (b) c ()) (a b (c))
(a b () (c)) (a b c) (a b c ()) (a b () c)
(a b () c ()))))))
(deftest membero-1
(is (= (run* [q]
(all
(== q [(lvar 1)])
(membero ['foo (lvar 2)] q)
(membero [(lvar 3) 'bar] q)))
'([[foo bar]]))))
(deftest membero-2
(is (= (into #{}
(run* [q]
(membero q (llist [1 2 3]))))
#{1 2 3})))
(deftest membero-3
(is (= (run* [q]
(membero q (llist [1 1 1 1 1])))
'(1 1 1 1 1))))
(defn digit-1 [x]
(conde
[(== 0 x)]))
(defn digit-4 [x]
(conde
[(== 0 x)]
[(== 1 x)]
[(== 2 x)]
[(== 3 x)]))
(deftest test-conde-1-clause
(is (= (run* [q]
(fresh [x y]
(digit-1 x)
(digit-1 y)
(== q [x y])))
'([0 0]))))
(deftest test-conde-4-clauses
(is (= (into #{}
(run* [q]
(fresh [x y]
(digit-4 x)
(digit-4 y)
(== q [x y]))))
(into #{}
'([0 0] [0 1] [0 2] [1 0] [0 3] [1 1] [1 2] [2 0]
[1 3] [2 1] [3 0] [2 2] [3 1] [2 3] [3 2] [3 3])))))
(deftest test-anyo-1
(is (= (run 1 [q]
(anyo succeed)
(== true q))
(list true))))
(deftest test-anyo-2
(is (= (run 5 [q]
(anyo succeed)
(== true q))
(list true true true true true))))
(def f1 (fresh [] f1))
(deftest test-divergence-1
(is (= (run 1 [q]
(conde
[f1]
[(== false false)]))
'(_.0))))
(deftest test-divergence-2
(is (= (run 1 [q]
(conde
[f1 (== false false)]
[(== false false)]))
'(_.0))))
(def f2
(fresh []
(conde
[f2 (conde
[f2]
[(== false false)])]
[(== false false)])))
(deftest test-divergence-3
(is (= (run 5 [q] f2)
'(_.0 _.0 _.0 _.0 _.0))))
(deftest test-nil-in-coll-1
(is (= (run* [q]
(== q [nil]))
'([nil]))))
(deftest test-nil-in-coll-2
(is (= (run* [q]
(== q [1 nil]))
'([1 nil]))))
(deftest test-nil-in-coll-3
(is (= (run* [q]
(== q [nil 1]))
'([nil 1]))))
(deftest test-nil-in-coll-4
(is (= (run* [q]
(== q '(nil)))
'((nil)))))
(deftest test-nil-in-coll-5
(is (= (run* [q]
(== q {:foo nil}))
'({:foo nil}))))
(deftest test-nil-in-coll-6
(is (= (run* [q]
(== q {nil :foo}))
'({nil :foo}))))
#_(deftest test-occurs-check-1
(is (= (run* [q]
(== q [q]))
())))
Unifications that should fail
(deftest test-unify-fail-1
(is (= (run* [p] (fresh [a b] (== b ()) (== '(0 1) (lcons a b)) (== p [a b])))
())))
(deftest test-unify-fail-2
(is (= (run* [p] (fresh [a b] (== b '(1)) (== '(0) (lcons a b)) (== p [a b])))
())))
(deftest test-unify-fail-3
(is (= (run* [p] (fresh [a b c d] (== () b) (== '(1) d) (== (lcons a b) (lcons c d)) (== p [a b c d])))
())))
|
c195284cf99ba4b56bdb24d22dca767ec6b31ece0164497965c53a5b5c143aef | jfmcbrayer/germinal | middleware.lisp | (in-package :germinal)
(defun basic-logging (handler)
"Middleware that logs requests to *standard-output*."
(lambda (request)
(let ((response (funcall handler request)))
(format
*standard-output*
"[~A] ~A \"~A\" ~A ~A ~%"
(local-time:now)
(usocket:host-to-hostname (request-client-address request))
(request-pathinfo request)
(response-status response)
(response-meta response))
response)))
(defun validate-server-name (handler)
"Middleware that ensures the requested host matches *germinal-server-name*."
(lambda (request)
(let* ((url (request-url request))
(host (uri-host url)))
(if (not (scan *germinal-server-name* host))
(make-response 51 "Not Found")
(funcall handler request)))))
(defun gemini-app (request)
(serve-route request))
(defun middleware-chain (middlewares)
(reduce
#'funcall
(remove-if
#'null
middlewares)
:initial-value #'gemini-app
:from-end t))
| null | https://raw.githubusercontent.com/jfmcbrayer/germinal/cfc5d3b6a81cc47fd01d4a67decd07ec81eef048/middleware.lisp | lisp | (in-package :germinal)
(defun basic-logging (handler)
"Middleware that logs requests to *standard-output*."
(lambda (request)
(let ((response (funcall handler request)))
(format
*standard-output*
"[~A] ~A \"~A\" ~A ~A ~%"
(local-time:now)
(usocket:host-to-hostname (request-client-address request))
(request-pathinfo request)
(response-status response)
(response-meta response))
response)))
(defun validate-server-name (handler)
"Middleware that ensures the requested host matches *germinal-server-name*."
(lambda (request)
(let* ((url (request-url request))
(host (uri-host url)))
(if (not (scan *germinal-server-name* host))
(make-response 51 "Not Found")
(funcall handler request)))))
(defun gemini-app (request)
(serve-route request))
(defun middleware-chain (middlewares)
(reduce
#'funcall
(remove-if
#'null
middlewares)
:initial-value #'gemini-app
:from-end t))
|
|
8d0fe058c37d9ebe0345eb7d81cd46159b163a6a2da0cd95b09290e44e0d495e | Andromedans/andromeda | external.mli | (** Lookup an external value, if it exists. *)
val lookup : string -> Runtime.value option
| null | https://raw.githubusercontent.com/Andromedans/andromeda/a5c678450e6c6d4a7cd5eee1196bde558541b994/src/runtime/external.mli | ocaml | * Lookup an external value, if it exists. | val lookup : string -> Runtime.value option
|
2d6d8e6ad013b0adc772ea0f3a5412fba852ee64cde2397b3b4d49494988315c | caisah/sicp-exercises-and-examples | ex_2.83.scm | ;; Suppose you are designing a generic arithmetic system for dealing with the tower of
types shown in . : integer , rational , real , complex . For each type ( except complex ) ,
design a procedure that raises objects of that type one level in the tower . Show how
;; to install a generic raise operation that will work for each type (except complex).
(define (raise x) apply-generic 'raise x)
(put 'raise 'integer
(lambda (n) (make-rational n 1)))
(put 'raise 'rational
(lambda (n) (make-real (/ (numer n) (denom n)))))
(put 'raise 'real
(lambda (n) (make-from-real-imag n 0)))
| null | https://raw.githubusercontent.com/caisah/sicp-exercises-and-examples/605c698d7495aa3474c2b6edcd1312cb16c5b5cb/2.5.2-combining_data_of_different_types/ex_2.83.scm | scheme | Suppose you are designing a generic arithmetic system for dealing with the tower of
to install a generic raise operation that will work for each type (except complex). | types shown in . : integer , rational , real , complex . For each type ( except complex ) ,
design a procedure that raises objects of that type one level in the tower . Show how
(define (raise x) apply-generic 'raise x)
(put 'raise 'integer
(lambda (n) (make-rational n 1)))
(put 'raise 'rational
(lambda (n) (make-real (/ (numer n) (denom n)))))
(put 'raise 'real
(lambda (n) (make-from-real-imag n 0)))
|
f00c5546ee7843ee59fe30a37faba6d6fdac8bcfc2037de0fc01a94a733ed20f | geophf/1HaskellADay | Exercise.hs | # LANGUAGE OverloadedStrings , QuasiQuotes #
module Y2018.M04.D18.Exercise where
Picking up where we left off on the World Policy Journal articles , see , e.g. :
import Y2018.M04.D02.Exercise
-- I realized I never stored the Raw JSON. We need to do that, in case we mess
-- up along the way, we can always go back to the source JSON and see what went
-- wrong.
import Data.Aeson
import Data.Aeson.Encode.Pretty
import Database.PostgreSQL.Simple
import Database.PostgreSQL.Simple.SqlQQ
import Database.PostgreSQL.Simple.ToField
import Database.PostgreSQL.Simple.ToRow
below imports available via 1HaskellADay git repository
import Store.SQL.Connection
import Store.SQL.Util.Indexed
-- you can work either with the json stored locally:
import Y2018.M04.D02.Exercise
-- or you can download some fresh articles from the REST endpoint:
import Y2018.M04.D13.Exercise
insertJSONs :: Connection -> [Value] -> IO [Index]
insertJSONs conn jsons = undefined
jsonStmt :: Query
jsonStmt = [sql|INSERT INTO article_json (json) VALUES (?) returning id|]
no join table here , as it 's a one - to - one relationship , but we do need the
-- id as the json_id for inserting the eventual parsed article.
read in a set of posts ( see D02 ) and store the source / raw article JSON .
-- How many articles did you insert?
| null | https://raw.githubusercontent.com/geophf/1HaskellADay/514792071226cd1e2ba7640af942667b85601006/exercises/HAD/Y2018/M04/D18/Exercise.hs | haskell | I realized I never stored the Raw JSON. We need to do that, in case we mess
up along the way, we can always go back to the source JSON and see what went
wrong.
you can work either with the json stored locally:
or you can download some fresh articles from the REST endpoint:
id as the json_id for inserting the eventual parsed article.
How many articles did you insert? | # LANGUAGE OverloadedStrings , QuasiQuotes #
module Y2018.M04.D18.Exercise where
Picking up where we left off on the World Policy Journal articles , see , e.g. :
import Y2018.M04.D02.Exercise
import Data.Aeson
import Data.Aeson.Encode.Pretty
import Database.PostgreSQL.Simple
import Database.PostgreSQL.Simple.SqlQQ
import Database.PostgreSQL.Simple.ToField
import Database.PostgreSQL.Simple.ToRow
below imports available via 1HaskellADay git repository
import Store.SQL.Connection
import Store.SQL.Util.Indexed
import Y2018.M04.D02.Exercise
import Y2018.M04.D13.Exercise
insertJSONs :: Connection -> [Value] -> IO [Index]
insertJSONs conn jsons = undefined
jsonStmt :: Query
jsonStmt = [sql|INSERT INTO article_json (json) VALUES (?) returning id|]
no join table here , as it 's a one - to - one relationship , but we do need the
read in a set of posts ( see D02 ) and store the source / raw article JSON .
|
6d3bb37b80353ffa3bf0c1f10f28c4f7a9a5fb45695245acf031018d6890e3de | silkapp/rest | DashedName.hs | # LANGUAGE ScopedTypeVariables #
module Api.Test.DashedName (resource) where
import Control.Monad.Reader
import Control.Monad.Trans.Except
import Rest
import qualified Rest.Resource as R
import Api.Test (WithText)
type SiteId = String
type WithSiteSubscription = ReaderT SiteId WithText
resource :: Resource WithText WithSiteSubscription SiteId Void Void
resource = mkResourceReader
{ R.name = "foo-bar"
, R.schema = noListing $ named [("id", singleRead id)]
, R.remove = Just remove
}
remove :: Handler WithSiteSubscription
remove = mkConstHandler id handler
where
handler :: ExceptT Reason_ WithSiteSubscription ()
handler = return ()
| null | https://raw.githubusercontent.com/silkapp/rest/f0462fc36709407f236f57064d8e37c77bdf8a79/rest-example/example-api/Api/Test/DashedName.hs | haskell | # LANGUAGE ScopedTypeVariables #
module Api.Test.DashedName (resource) where
import Control.Monad.Reader
import Control.Monad.Trans.Except
import Rest
import qualified Rest.Resource as R
import Api.Test (WithText)
type SiteId = String
type WithSiteSubscription = ReaderT SiteId WithText
resource :: Resource WithText WithSiteSubscription SiteId Void Void
resource = mkResourceReader
{ R.name = "foo-bar"
, R.schema = noListing $ named [("id", singleRead id)]
, R.remove = Just remove
}
remove :: Handler WithSiteSubscription
remove = mkConstHandler id handler
where
handler :: ExceptT Reason_ WithSiteSubscription ()
handler = return ()
|
|
bb7f294dc8623a9af62dcf19be51b1e4dcd41bdd1aa8675b6a975fd7ac415fda | project-oak/hafnium-verification | ProcAttributes.ml |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
(** Attributes of a procedure. *)
open! IStd
module F = Format
(** Type for ObjC accessors *)
type objc_accessor_type = Objc_getter of Struct.field | Objc_setter of Struct.field
[@@deriving compare]
let kind_of_objc_accessor_type accessor =
match accessor with Objc_getter _ -> "getter" | Objc_setter _ -> "setter"
let pp_objc_accessor_type fmt objc_accessor_type =
let fieldname, typ, annots =
match objc_accessor_type with Objc_getter field | Objc_setter field -> field
in
F.fprintf fmt "%s<%a:%a@,[%a]>"
(kind_of_objc_accessor_type objc_accessor_type)
Fieldname.pp fieldname (Typ.pp Pp.text) typ
(Pp.semicolon_seq ~print_env:Pp.text_break (Pp.pair ~fst:Annot.pp ~snd:F.pp_print_bool))
annots
type var_data = {name: Mangled.t; typ: Typ.t; modify_in_block: bool; is_constexpr: bool}
[@@deriving compare]
let pp_var_data fmt {name; typ; modify_in_block} =
F.fprintf fmt "@[<h>{ name=@ %a;@ typ=@ %a;@ modify_in_block=@ %b@ }@]" Mangled.pp name
(Typ.pp_full Pp.text) typ modify_in_block
type t =
{ access: PredSymb.access (** visibility access *)
; captured: (Mangled.t * Typ.t) list (** name and type of variables captured in blocks *)
; exceptions: string list (** exceptions thrown by the procedure *)
; formals: (Mangled.t * Typ.t) list (** name and type of formal parameters *)
; const_formals: int list (** list of indices of formals that are const-qualified *)
; is_abstract: bool (** the procedure is abstract *)
; is_biabduction_model: bool (** the procedure is a model for the biabduction analysis *)
; is_bridge_method: bool (** the procedure is a bridge method *)
; is_defined: bool (** true if the procedure is defined, and not just declared *)
; is_cpp_noexcept_method: bool (** the procedure is an C++ method annotated with "noexcept" *)
* the procedure is a Java synchronized method
; is_no_return: bool (** the procedure is known not to return *)
; is_specialized: bool (** the procedure is a clone specialized for dynamic dispatch handling *)
; is_synthetic_method: bool (** the procedure is a synthetic method *)
; is_variadic: bool (** the procedure is variadic, only supported for Clang procedures *)
; sentinel_attr: (int * int) option (** __attribute__((sentinel(int, int))) *)
; clang_method_kind: ClangMethodKind.t (** the kind of method the procedure is *)
; loc: Location.t (** location of this procedure in the source code *)
; translation_unit: SourceFile.t (** translation unit to which the procedure belongs *)
; mutable locals: var_data list (** name, type and attributes of local variables *)
; method_annotation: Annot.Method.t (** annotations for all methods *)
; objc_accessor: objc_accessor_type option (** type of ObjC accessor, if any *)
; proc_name: Procname.t (** name of the procedure *)
; ret_type: Typ.t (** return type *)
; has_added_return_param: bool (** whether or not a return param was added *) }
let default translation_unit proc_name =
{ access= PredSymb.Default
; captured= []
; exceptions= []
; formals= []
; const_formals= []
; is_abstract= false
; is_biabduction_model= false
; is_bridge_method= false
; is_cpp_noexcept_method= false
; is_defined= false
; is_java_synchronized_method= false
; is_no_return= false
; is_specialized= false
; is_synthetic_method= false
; is_variadic= false
; sentinel_attr= None
; clang_method_kind= ClangMethodKind.C_FUNCTION
; loc= Location.dummy
; translation_unit
; locals= []
; has_added_return_param= false
; method_annotation= Annot.Method.empty
; objc_accessor= None
; proc_name
; ret_type= Typ.mk Typ.Tvoid }
let pp_parameters =
Pp.semicolon_seq ~print_env:Pp.text_break (Pp.pair ~fst:Mangled.pp ~snd:(Typ.pp_full Pp.text))
let pp f
({ access
; captured
; exceptions
; formals
; const_formals
; is_abstract
; is_biabduction_model
; is_bridge_method
; is_defined
; is_cpp_noexcept_method
; is_java_synchronized_method
; is_no_return
; is_specialized
; is_synthetic_method
; is_variadic
; sentinel_attr
; clang_method_kind
; loc
; translation_unit
; locals
; has_added_return_param
; method_annotation
; objc_accessor
; proc_name
; ret_type }[@warning "+9"]) =
let default = default translation_unit proc_name in
let pp_bool_default ~default title b f () =
if not (Bool.equal default b) then F.fprintf f "; %s= %b@," title b
in
F.fprintf f "@[<v>{ proc_name= %a@,; translation_unit= %a@," Procname.pp proc_name SourceFile.pp
translation_unit ;
if not (PredSymb.equal_access default.access access) then
F.fprintf f "; access= %a@," (Pp.of_string ~f:PredSymb.string_of_access) access ;
if not ([%compare.equal: (Mangled.t * Typ.t) list] default.captured captured) then
F.fprintf f "; captured= [@[%a@]]@," pp_parameters captured ;
if not ([%compare.equal: string list] default.exceptions exceptions) then
F.fprintf f "; exceptions= [@[%a@]]@,"
(Pp.semicolon_seq ~print_env:Pp.text_break F.pp_print_string)
exceptions ;
(* always print formals *)
F.fprintf f "; formals= [@[%a@]]@," pp_parameters formals ;
if not ([%compare.equal: int list] default.const_formals const_formals) then
F.fprintf f "; const_formals= [@[%a@]]@,"
(Pp.semicolon_seq ~print_env:Pp.text_break F.pp_print_int)
const_formals ;
pp_bool_default ~default:default.is_abstract "is_abstract" is_abstract f () ;
pp_bool_default ~default:default.is_biabduction_model "is_model" is_biabduction_model f () ;
pp_bool_default ~default:default.is_bridge_method "is_bridge_method" is_bridge_method f () ;
pp_bool_default ~default:default.is_cpp_noexcept_method "is_cpp_noexcept_method"
is_cpp_noexcept_method f () ;
pp_bool_default ~default:default.is_defined "is_defined" is_defined f () ;
pp_bool_default ~default:default.is_java_synchronized_method "is_java_synchronized_method"
is_java_synchronized_method f () ;
pp_bool_default ~default:default.is_no_return "is_no_return" is_no_return f () ;
pp_bool_default ~default:default.is_specialized "is_specialized" is_specialized f () ;
pp_bool_default ~default:default.is_synthetic_method "is_synthetic_method" is_synthetic_method f
() ;
pp_bool_default ~default:default.is_variadic "is_variadic" is_variadic f () ;
if not ([%compare.equal: (int * int) option] default.sentinel_attr sentinel_attr) then
F.fprintf f "; sentinel_attr= %a@,"
(Pp.option (Pp.pair ~fst:F.pp_print_int ~snd:F.pp_print_int))
sentinel_attr ;
if not (ClangMethodKind.equal default.clang_method_kind clang_method_kind) then
F.fprintf f "; clang_method_kind= %a@,"
(Pp.of_string ~f:ClangMethodKind.to_string)
clang_method_kind ;
if not (Location.equal default.loc loc) then F.fprintf f "; loc= %a@," Location.pp_file_pos loc ;
F.fprintf f "; locals= [@[%a@]]@," (Pp.semicolon_seq ~print_env:Pp.text_break pp_var_data) locals ;
pp_bool_default ~default:default.has_added_return_param "has_added_return_param"
has_added_return_param f () ;
if not (Annot.Method.is_empty method_annotation) then
F.fprintf f "; method_annotation= %a@," (Annot.Method.pp "") method_annotation ;
if not ([%compare.equal: objc_accessor_type option] default.objc_accessor objc_accessor) then
F.fprintf f "; objc_accessor= %a@," (Pp.option pp_objc_accessor_type) objc_accessor ;
always print ret type
F.fprintf f "; ret_type= %a @," (Typ.pp_full Pp.text) ret_type ;
F.fprintf f "; proc_id= %a }@]" Procname.pp_unique_id proc_name
module SQLite = SqliteUtils.MarshalledDataNOTForComparison (struct
type nonrec t = t
end)
| null | https://raw.githubusercontent.com/project-oak/hafnium-verification/6071eff162148e4d25a0fedaea003addac242ace/experiments/ownership-inference/infer/infer/src/IR/ProcAttributes.ml | ocaml | * Attributes of a procedure.
* Type for ObjC accessors
* visibility access
* name and type of variables captured in blocks
* exceptions thrown by the procedure
* name and type of formal parameters
* list of indices of formals that are const-qualified
* the procedure is abstract
* the procedure is a model for the biabduction analysis
* the procedure is a bridge method
* true if the procedure is defined, and not just declared
* the procedure is an C++ method annotated with "noexcept"
* the procedure is known not to return
* the procedure is a clone specialized for dynamic dispatch handling
* the procedure is a synthetic method
* the procedure is variadic, only supported for Clang procedures
* __attribute__((sentinel(int, int)))
* the kind of method the procedure is
* location of this procedure in the source code
* translation unit to which the procedure belongs
* name, type and attributes of local variables
* annotations for all methods
* type of ObjC accessor, if any
* name of the procedure
* return type
* whether or not a return param was added
always print formals |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
module F = Format
type objc_accessor_type = Objc_getter of Struct.field | Objc_setter of Struct.field
[@@deriving compare]
let kind_of_objc_accessor_type accessor =
match accessor with Objc_getter _ -> "getter" | Objc_setter _ -> "setter"
let pp_objc_accessor_type fmt objc_accessor_type =
let fieldname, typ, annots =
match objc_accessor_type with Objc_getter field | Objc_setter field -> field
in
F.fprintf fmt "%s<%a:%a@,[%a]>"
(kind_of_objc_accessor_type objc_accessor_type)
Fieldname.pp fieldname (Typ.pp Pp.text) typ
(Pp.semicolon_seq ~print_env:Pp.text_break (Pp.pair ~fst:Annot.pp ~snd:F.pp_print_bool))
annots
type var_data = {name: Mangled.t; typ: Typ.t; modify_in_block: bool; is_constexpr: bool}
[@@deriving compare]
let pp_var_data fmt {name; typ; modify_in_block} =
F.fprintf fmt "@[<h>{ name=@ %a;@ typ=@ %a;@ modify_in_block=@ %b@ }@]" Mangled.pp name
(Typ.pp_full Pp.text) typ modify_in_block
type t =
* the procedure is a Java synchronized method
let default translation_unit proc_name =
{ access= PredSymb.Default
; captured= []
; exceptions= []
; formals= []
; const_formals= []
; is_abstract= false
; is_biabduction_model= false
; is_bridge_method= false
; is_cpp_noexcept_method= false
; is_defined= false
; is_java_synchronized_method= false
; is_no_return= false
; is_specialized= false
; is_synthetic_method= false
; is_variadic= false
; sentinel_attr= None
; clang_method_kind= ClangMethodKind.C_FUNCTION
; loc= Location.dummy
; translation_unit
; locals= []
; has_added_return_param= false
; method_annotation= Annot.Method.empty
; objc_accessor= None
; proc_name
; ret_type= Typ.mk Typ.Tvoid }
let pp_parameters =
Pp.semicolon_seq ~print_env:Pp.text_break (Pp.pair ~fst:Mangled.pp ~snd:(Typ.pp_full Pp.text))
let pp f
({ access
; captured
; exceptions
; formals
; const_formals
; is_abstract
; is_biabduction_model
; is_bridge_method
; is_defined
; is_cpp_noexcept_method
; is_java_synchronized_method
; is_no_return
; is_specialized
; is_synthetic_method
; is_variadic
; sentinel_attr
; clang_method_kind
; loc
; translation_unit
; locals
; has_added_return_param
; method_annotation
; objc_accessor
; proc_name
; ret_type }[@warning "+9"]) =
let default = default translation_unit proc_name in
let pp_bool_default ~default title b f () =
if not (Bool.equal default b) then F.fprintf f "; %s= %b@," title b
in
F.fprintf f "@[<v>{ proc_name= %a@,; translation_unit= %a@," Procname.pp proc_name SourceFile.pp
translation_unit ;
if not (PredSymb.equal_access default.access access) then
F.fprintf f "; access= %a@," (Pp.of_string ~f:PredSymb.string_of_access) access ;
if not ([%compare.equal: (Mangled.t * Typ.t) list] default.captured captured) then
F.fprintf f "; captured= [@[%a@]]@," pp_parameters captured ;
if not ([%compare.equal: string list] default.exceptions exceptions) then
F.fprintf f "; exceptions= [@[%a@]]@,"
(Pp.semicolon_seq ~print_env:Pp.text_break F.pp_print_string)
exceptions ;
F.fprintf f "; formals= [@[%a@]]@," pp_parameters formals ;
if not ([%compare.equal: int list] default.const_formals const_formals) then
F.fprintf f "; const_formals= [@[%a@]]@,"
(Pp.semicolon_seq ~print_env:Pp.text_break F.pp_print_int)
const_formals ;
pp_bool_default ~default:default.is_abstract "is_abstract" is_abstract f () ;
pp_bool_default ~default:default.is_biabduction_model "is_model" is_biabduction_model f () ;
pp_bool_default ~default:default.is_bridge_method "is_bridge_method" is_bridge_method f () ;
pp_bool_default ~default:default.is_cpp_noexcept_method "is_cpp_noexcept_method"
is_cpp_noexcept_method f () ;
pp_bool_default ~default:default.is_defined "is_defined" is_defined f () ;
pp_bool_default ~default:default.is_java_synchronized_method "is_java_synchronized_method"
is_java_synchronized_method f () ;
pp_bool_default ~default:default.is_no_return "is_no_return" is_no_return f () ;
pp_bool_default ~default:default.is_specialized "is_specialized" is_specialized f () ;
pp_bool_default ~default:default.is_synthetic_method "is_synthetic_method" is_synthetic_method f
() ;
pp_bool_default ~default:default.is_variadic "is_variadic" is_variadic f () ;
if not ([%compare.equal: (int * int) option] default.sentinel_attr sentinel_attr) then
F.fprintf f "; sentinel_attr= %a@,"
(Pp.option (Pp.pair ~fst:F.pp_print_int ~snd:F.pp_print_int))
sentinel_attr ;
if not (ClangMethodKind.equal default.clang_method_kind clang_method_kind) then
F.fprintf f "; clang_method_kind= %a@,"
(Pp.of_string ~f:ClangMethodKind.to_string)
clang_method_kind ;
if not (Location.equal default.loc loc) then F.fprintf f "; loc= %a@," Location.pp_file_pos loc ;
F.fprintf f "; locals= [@[%a@]]@," (Pp.semicolon_seq ~print_env:Pp.text_break pp_var_data) locals ;
pp_bool_default ~default:default.has_added_return_param "has_added_return_param"
has_added_return_param f () ;
if not (Annot.Method.is_empty method_annotation) then
F.fprintf f "; method_annotation= %a@," (Annot.Method.pp "") method_annotation ;
if not ([%compare.equal: objc_accessor_type option] default.objc_accessor objc_accessor) then
F.fprintf f "; objc_accessor= %a@," (Pp.option pp_objc_accessor_type) objc_accessor ;
always print ret type
F.fprintf f "; ret_type= %a @," (Typ.pp_full Pp.text) ret_type ;
F.fprintf f "; proc_id= %a }@]" Procname.pp_unique_id proc_name
module SQLite = SqliteUtils.MarshalledDataNOTForComparison (struct
type nonrec t = t
end)
|
b41e514e7b00646a38e45507035927e933e1525c692af6363ea41c14bb5c98a6 | timbod7/haskell-chart | ExampleStocks.hs | module ExampleStocks where
import Data.Time.Calendar
import Data.Time.LocalTime
mkDate :: Integer -> LocalTime
mkDate jday =
LocalTime (ModifiedJulianDay jday) midnight
Price data imported from Yahoo : low , open , close , high
pricesAAPL :: [(LocalTime,(Double,Double,Double,Double))]
pricesMSFT :: [(LocalTime,(Double,Double,Double,Double))]
pricesARMH :: [(LocalTime,(Double,Double,Double,Double))]
pricesAAPL =
[ (mkDate 55105,(180.7,185.35,180.86,186.22))
, (mkDate 55104,(182.61,186.13,185.35,186.45))
, (mkDate 55103,(184.31,186.73,185.38,187.4))
, (mkDate 55102,(183.33,183.87,186.15,186.68))
, (mkDate 55099,(181.44,182.01,182.37,185.5))
, (mkDate 55098,(182.77,187.2,183.82,187.7))
, (mkDate 55097,(185.03,185.4,185.5,188.9))
, (mkDate 55096,(182.85,185.19,184.48,185.38))
, (mkDate 55095,(181.62,184.29,184.02,185.16))
, (mkDate 55092,(184.76,185.83,185.02,186.55))
, (mkDate 55091,(181.97,181.98,184.55,186.79))
, (mkDate 55090,(177.88,177.99,181.87,182.75))
, (mkDate 55089,(173.59,174.04,175.16,175.65))
, (mkDate 55088,(170.25,170.83,173.72,173.9))
, (mkDate 55085,(170.87,172.91,172.16,173.18))
, (mkDate 55084,(170.81,172.06,172.56,173.25))
, (mkDate 55083,(169.7,172.78,171.14,174.47))
, (mkDate 55082,(172.0,172.98,172.93,173.14))
, (mkDate 55078,(167.09,167.28,170.31,170.7))
, (mkDate 55077,(165.0,166.44,166.55,167.1))
, (mkDate 55076,(164.11,164.62,165.18,167.61))
, (mkDate 55075,(164.94,167.99,165.3,170.0))
, (mkDate 55074,(166.5,168.16,168.21,168.85))
, (mkDate 55071,(168.53,172.27,170.05,172.49))
, (mkDate 55070,(164.83,168.75,169.45,169.57))
, (mkDate 55069,(166.76,168.92,167.41,169.55))
, (mkDate 55068,(169.13,169.46,169.4,170.94))
, (mkDate 55067,(168.27,170.12,169.06,170.71))
, (mkDate 55064,(166.8,167.65,169.22,169.37))
, (mkDate 55063,(164.61,164.98,166.33,166.72))
, (mkDate 55062,(162.45,162.75,164.6,165.3))
, (mkDate 55061,(161.41,161.63,164.0,164.24))
, (mkDate 55060,(159.42,163.55,159.59,163.59))
, (mkDate 55057,(165.53,167.94,166.78,168.23))
, (mkDate 55056,(166.5,166.65,168.42,168.67))
, (mkDate 55055,(162.46,162.55,165.31,166.71))
, (mkDate 55054,(161.88,163.69,162.83,164.38))
, (mkDate 55053,(163.66,165.66,164.72,166.6))
, (mkDate 55050,(164.8,165.49,165.51,166.6))
, (mkDate 55049,(163.09,165.58,163.91,166.51))
, (mkDate 55048,(164.21,165.75,165.11,167.39))
, (mkDate 55047,(164.21,164.93,165.55,165.57))
, (mkDate 55046,(164.87,165.21,166.43,166.64))
, (mkDate 55043,(162.91,162.99,163.39,165.0))
, (mkDate 55042,(161.5,161.7,162.79,164.72))
, (mkDate 55041,(158.25,158.9,160.03,160.45))
, (mkDate 55040,(157.6,158.88,160.0,160.1))
, (mkDate 55039,(157.26,160.17,160.1,160.88))
, (mkDate 55036,(156.5,156.95,159.99,160.0))
, (mkDate 55035,(155.56,156.63,157.82,158.44))
, (mkDate 55034,(156.11,157.79,156.74,158.73))
, (mkDate 55033,(149.75,153.29,151.51,153.43))
, (mkDate 55032,(150.89,153.27,152.91,155.04))
, (mkDate 55029,(148.63,149.08,151.75,152.02))
, (mkDate 55028,(145.57,145.76,147.52,148.02))
, (mkDate 55027,(144.32,145.04,146.88,147.0))
, (mkDate 55026,(141.16,142.03,142.27,143.18))
, (mkDate 55025,(137.53,139.54,142.34,142.34))
, (mkDate 55022,(136.32,136.34,138.52,138.97))
, (mkDate 55021,(135.93,137.76,136.36,137.99))
, (mkDate 55020,(134.42,135.92,137.22,138.04))
, (mkDate 55019,(135.18,138.48,135.4,139.68))
, (mkDate 55018,(136.25,138.7,138.61,138.99))
, (mkDate 55014,(139.79,141.25,140.02,142.83))
, (mkDate 55013,(142.52,143.5,142.83,144.66))
]
pricesMSFT =
[ (mkDate 55105,(24.8,25.41,24.88,25.47))
, (mkDate 55104,(25.38,25.76,25.72,25.99))
, (mkDate 55103,(25.69,25.91,25.75,25.96))
, (mkDate 55102,(25.6,25.6,25.83,26.16))
, (mkDate 55099,(25.52,25.69,25.55,25.82))
, (mkDate 55098,(25.66,25.92,25.94,26.11))
, (mkDate 55097,(25.64,25.92,25.71,26.25))
, (mkDate 55096,(25.29,25.4,25.77,25.82))
, (mkDate 55095,(25.1,25.11,25.3,25.37))
, (mkDate 55092,(25.1,25.46,25.26,25.48))
, (mkDate 55091,(25.06,25.06,25.3,25.38))
, (mkDate 55090,(24.95,25.25,25.2,25.35))
, (mkDate 55089,(24.86,24.97,25.2,25.27))
, (mkDate 55088,(24.64,24.65,25.0,25.09))
, (mkDate 55085,(24.81,24.93,24.86,25.17))
, (mkDate 55084,(24.65,24.8,25.0,25.05))
, (mkDate 55083,(24.67,24.74,24.78,24.95))
, (mkDate 55082,(24.41,24.62,24.82,24.84))
, (mkDate 55078,(24.08,24.09,24.62,24.8))
, (mkDate 55077,(23.76,23.91,24.11,24.14))
, (mkDate 55076,(23.78,23.82,23.86,24.14))
, (mkDate 55075,(23.9,24.35,24.0,24.74))
, (mkDate 55074,(24.29,24.57,24.65,24.85))
, (mkDate 55071,(24.61,25.07,24.68,25.49))
, (mkDate 55070,(24.3,24.41,24.69,24.78))
, (mkDate 55069,(24.42,24.59,24.55,24.75))
, (mkDate 55068,(24.46,24.6,24.64,24.82))
, (mkDate 55067,(24.28,24.41,24.64,24.73))
, (mkDate 55064,(23.77,23.93,24.41,24.42))
, (mkDate 55063,(23.54,23.6,23.67,23.87))
, (mkDate 55062,(23.25,23.25,23.65,23.72))
, (mkDate 55061,(23.27,23.29,23.58,23.65))
, (mkDate 55060,(23.23,23.32,23.25,23.6))
, (mkDate 55057,(23.51,23.62,23.69,23.8))
, (mkDate 55056,(23.4,23.63,23.62,23.85))
, (mkDate 55055,(23.03,23.13,23.53,23.9))
, (mkDate 55054,(23.05,23.32,23.13,23.4))
, (mkDate 55053,(23.3,23.46,23.42,23.55))
, (mkDate 55050,(23.5,23.75,23.56,23.82))
, (mkDate 55049,(23.27,23.93,23.46,23.98))
, (mkDate 55048,(23.79,23.84,23.81,24.25))
, (mkDate 55047,(23.53,23.68,23.77,23.79))
, (mkDate 55046,(23.5,23.82,23.83,23.86))
, (mkDate 55043,(23.5,23.77,23.52,24.07))
, (mkDate 55042,(23.71,24.2,23.81,24.43))
, (mkDate 55041,(23.34,23.73,23.8,23.91))
, (mkDate 55040,(22.9,22.99,23.47,23.55))
, (mkDate 55039,(22.9,23.44,23.11,23.45))
, (mkDate 55036,(22.81,23.61,23.45,23.89))
, (mkDate 55035,(24.84,24.93,25.56,25.72))
, (mkDate 55034,(24.51,24.7,24.8,24.9))
, (mkDate 55033,(24.37,24.69,24.83,24.83))
, (mkDate 55032,(24.15,24.44,24.53,24.53))
, (mkDate 55029,(24.1,24.4,24.29,24.45))
, (mkDate 55028,(23.86,23.93,24.44,24.44))
, (mkDate 55027,(23.56,23.75,24.12,24.12))
, (mkDate 55026,(22.86,23.2,23.11,23.22))
, (mkDate 55025,(22.14,22.42,23.23,23.29))
, (mkDate 55022,(22.15,22.19,22.39,22.54))
, (mkDate 55021,(22.37,22.65,22.44,22.81))
, (mkDate 55020,(22.0,22.31,22.56,22.69))
, (mkDate 55019,(22.46,23.08,22.53,23.14))
, (mkDate 55018,(22.87,23.21,23.2,23.28))
, (mkDate 55014,(23.21,23.76,23.37,24.04))
, (mkDate 55013,(23.96,24.05,24.04,24.3))
]
pricesARMH =
[ (mkDate 55105,(6.65,6.83,6.65,6.86))
, (mkDate 55104,(6.87,7.0,7.0,7.02))
, (mkDate 55103,(6.88,6.92,6.95,6.97))
, (mkDate 55102,(6.62,6.63,6.81,6.82))
, (mkDate 55099,(6.69,6.88,6.72,6.88))
, (mkDate 55098,(6.55,6.69,6.64,6.88))
, (mkDate 55097,(6.8,6.87,6.8,6.94))
, (mkDate 55096,(6.67,6.68,6.74,6.78))
, (mkDate 55095,(6.62,6.67,6.7,6.77))
, (mkDate 55092,(6.63,6.71,6.7,6.76))
, (mkDate 55091,(6.64,6.7,6.67,6.76))
, (mkDate 55090,(6.76,6.84,6.77,6.85))
, (mkDate 55089,(6.69,6.73,6.84,6.9))
, (mkDate 55088,(6.73,6.74,6.8,6.81))
, (mkDate 55085,(6.84,7.05,6.87,7.07))
, (mkDate 55084,(6.65,6.7,6.94,6.97))
, (mkDate 55083,(6.65,6.71,6.7,6.75))
, (mkDate 55082,(6.56,6.58,6.65,6.68))
, (mkDate 55078,(6.16,6.18,6.39,6.41))
, (mkDate 55077,(6.11,6.19,6.21,6.24))
, (mkDate 55076,(6.03,6.07,6.09,6.14))
, (mkDate 55075,(6.14,6.22,6.24,6.31))
, (mkDate 55074,(6.3,6.45,6.35,6.45))
, (mkDate 55071,(6.4,6.5,6.47,6.56))
, (mkDate 55070,(6.13,6.18,6.35,6.39))
, (mkDate 55069,(6.1,6.12,6.16,6.2))
, (mkDate 55068,(6.14,6.3,6.17,6.3))
, (mkDate 55067,(6.19,6.29,6.21,6.34))
, (mkDate 55064,(6.25,6.32,6.3,6.38))
, (mkDate 55063,(6.18,6.2,6.25,6.27))
, (mkDate 55062,(6.09,6.11,6.19,6.22))
, (mkDate 55061,(6.14,6.14,6.23,6.28))
, (mkDate 55060,(5.91,6.02,5.98,6.04))
, (mkDate 55057,(6.04,6.15,6.2,6.21))
, (mkDate 55056,(6.1,6.18,6.22,6.26))
, (mkDate 55055,(6.07,6.07,6.22,6.3))
, (mkDate 55054,(6.09,6.23,6.14,6.23))
, (mkDate 55053,(6.19,6.39,6.23,6.4))
, (mkDate 55050,(6.25,6.31,6.32,6.41))
, (mkDate 55049,(6.2,6.42,6.24,6.42))
, (mkDate 55048,(6.4,6.55,6.46,6.55))
, (mkDate 55047,(6.5,6.52,6.67,6.7))
, (mkDate 55046,(6.5,6.51,6.58,6.6))
, (mkDate 55043,(6.3,6.34,6.39,6.43))
, (mkDate 55042,(6.42,6.47,6.46,6.64))
, (mkDate 55041,(6.14,6.37,6.22,6.49))
, (mkDate 55040,(6.28,6.32,6.52,6.56))
, (mkDate 55039,(6.41,6.47,6.49,6.63))
, (mkDate 55036,(6.27,6.36,6.44,6.44))
, (mkDate 55035,(6.47,6.48,6.52,6.55))
, (mkDate 55034,(6.38,6.41,6.47,6.51))
, (mkDate 55033,(6.27,6.45,6.41,6.46))
, (mkDate 55032,(6.32,6.44,6.45,6.48))
, (mkDate 55029,(6.23,6.25,6.37,6.45))
, (mkDate 55028,(6.24,6.29,6.35,6.39))
, (mkDate 55027,(6.37,6.53,6.45,6.6))
, (mkDate 55026,(6.12,6.13,6.19,6.23))
, (mkDate 55025,(5.98,6.02,6.12,6.13))
, (mkDate 55022,(5.93,5.96,6.08,6.12))
, (mkDate 55021,(5.74,5.8,5.97,6.0))
, (mkDate 55020,(5.61,5.74,5.69,5.82))
, (mkDate 55019,(5.68,5.82,5.69,5.84))
, (mkDate 55018,(5.77,5.84,5.91,5.93))
, (mkDate 55014,(5.89,6.03,5.94,6.06))
, (mkDate 55013,(5.93,5.98,5.95,6.03))
]
| null | https://raw.githubusercontent.com/timbod7/haskell-chart/692fe212039abac07acdda8ce5956f1a155bbe6c/wiki-examples/ExampleStocks.hs | haskell | module ExampleStocks where
import Data.Time.Calendar
import Data.Time.LocalTime
mkDate :: Integer -> LocalTime
mkDate jday =
LocalTime (ModifiedJulianDay jday) midnight
Price data imported from Yahoo : low , open , close , high
pricesAAPL :: [(LocalTime,(Double,Double,Double,Double))]
pricesMSFT :: [(LocalTime,(Double,Double,Double,Double))]
pricesARMH :: [(LocalTime,(Double,Double,Double,Double))]
pricesAAPL =
[ (mkDate 55105,(180.7,185.35,180.86,186.22))
, (mkDate 55104,(182.61,186.13,185.35,186.45))
, (mkDate 55103,(184.31,186.73,185.38,187.4))
, (mkDate 55102,(183.33,183.87,186.15,186.68))
, (mkDate 55099,(181.44,182.01,182.37,185.5))
, (mkDate 55098,(182.77,187.2,183.82,187.7))
, (mkDate 55097,(185.03,185.4,185.5,188.9))
, (mkDate 55096,(182.85,185.19,184.48,185.38))
, (mkDate 55095,(181.62,184.29,184.02,185.16))
, (mkDate 55092,(184.76,185.83,185.02,186.55))
, (mkDate 55091,(181.97,181.98,184.55,186.79))
, (mkDate 55090,(177.88,177.99,181.87,182.75))
, (mkDate 55089,(173.59,174.04,175.16,175.65))
, (mkDate 55088,(170.25,170.83,173.72,173.9))
, (mkDate 55085,(170.87,172.91,172.16,173.18))
, (mkDate 55084,(170.81,172.06,172.56,173.25))
, (mkDate 55083,(169.7,172.78,171.14,174.47))
, (mkDate 55082,(172.0,172.98,172.93,173.14))
, (mkDate 55078,(167.09,167.28,170.31,170.7))
, (mkDate 55077,(165.0,166.44,166.55,167.1))
, (mkDate 55076,(164.11,164.62,165.18,167.61))
, (mkDate 55075,(164.94,167.99,165.3,170.0))
, (mkDate 55074,(166.5,168.16,168.21,168.85))
, (mkDate 55071,(168.53,172.27,170.05,172.49))
, (mkDate 55070,(164.83,168.75,169.45,169.57))
, (mkDate 55069,(166.76,168.92,167.41,169.55))
, (mkDate 55068,(169.13,169.46,169.4,170.94))
, (mkDate 55067,(168.27,170.12,169.06,170.71))
, (mkDate 55064,(166.8,167.65,169.22,169.37))
, (mkDate 55063,(164.61,164.98,166.33,166.72))
, (mkDate 55062,(162.45,162.75,164.6,165.3))
, (mkDate 55061,(161.41,161.63,164.0,164.24))
, (mkDate 55060,(159.42,163.55,159.59,163.59))
, (mkDate 55057,(165.53,167.94,166.78,168.23))
, (mkDate 55056,(166.5,166.65,168.42,168.67))
, (mkDate 55055,(162.46,162.55,165.31,166.71))
, (mkDate 55054,(161.88,163.69,162.83,164.38))
, (mkDate 55053,(163.66,165.66,164.72,166.6))
, (mkDate 55050,(164.8,165.49,165.51,166.6))
, (mkDate 55049,(163.09,165.58,163.91,166.51))
, (mkDate 55048,(164.21,165.75,165.11,167.39))
, (mkDate 55047,(164.21,164.93,165.55,165.57))
, (mkDate 55046,(164.87,165.21,166.43,166.64))
, (mkDate 55043,(162.91,162.99,163.39,165.0))
, (mkDate 55042,(161.5,161.7,162.79,164.72))
, (mkDate 55041,(158.25,158.9,160.03,160.45))
, (mkDate 55040,(157.6,158.88,160.0,160.1))
, (mkDate 55039,(157.26,160.17,160.1,160.88))
, (mkDate 55036,(156.5,156.95,159.99,160.0))
, (mkDate 55035,(155.56,156.63,157.82,158.44))
, (mkDate 55034,(156.11,157.79,156.74,158.73))
, (mkDate 55033,(149.75,153.29,151.51,153.43))
, (mkDate 55032,(150.89,153.27,152.91,155.04))
, (mkDate 55029,(148.63,149.08,151.75,152.02))
, (mkDate 55028,(145.57,145.76,147.52,148.02))
, (mkDate 55027,(144.32,145.04,146.88,147.0))
, (mkDate 55026,(141.16,142.03,142.27,143.18))
, (mkDate 55025,(137.53,139.54,142.34,142.34))
, (mkDate 55022,(136.32,136.34,138.52,138.97))
, (mkDate 55021,(135.93,137.76,136.36,137.99))
, (mkDate 55020,(134.42,135.92,137.22,138.04))
, (mkDate 55019,(135.18,138.48,135.4,139.68))
, (mkDate 55018,(136.25,138.7,138.61,138.99))
, (mkDate 55014,(139.79,141.25,140.02,142.83))
, (mkDate 55013,(142.52,143.5,142.83,144.66))
]
pricesMSFT =
[ (mkDate 55105,(24.8,25.41,24.88,25.47))
, (mkDate 55104,(25.38,25.76,25.72,25.99))
, (mkDate 55103,(25.69,25.91,25.75,25.96))
, (mkDate 55102,(25.6,25.6,25.83,26.16))
, (mkDate 55099,(25.52,25.69,25.55,25.82))
, (mkDate 55098,(25.66,25.92,25.94,26.11))
, (mkDate 55097,(25.64,25.92,25.71,26.25))
, (mkDate 55096,(25.29,25.4,25.77,25.82))
, (mkDate 55095,(25.1,25.11,25.3,25.37))
, (mkDate 55092,(25.1,25.46,25.26,25.48))
, (mkDate 55091,(25.06,25.06,25.3,25.38))
, (mkDate 55090,(24.95,25.25,25.2,25.35))
, (mkDate 55089,(24.86,24.97,25.2,25.27))
, (mkDate 55088,(24.64,24.65,25.0,25.09))
, (mkDate 55085,(24.81,24.93,24.86,25.17))
, (mkDate 55084,(24.65,24.8,25.0,25.05))
, (mkDate 55083,(24.67,24.74,24.78,24.95))
, (mkDate 55082,(24.41,24.62,24.82,24.84))
, (mkDate 55078,(24.08,24.09,24.62,24.8))
, (mkDate 55077,(23.76,23.91,24.11,24.14))
, (mkDate 55076,(23.78,23.82,23.86,24.14))
, (mkDate 55075,(23.9,24.35,24.0,24.74))
, (mkDate 55074,(24.29,24.57,24.65,24.85))
, (mkDate 55071,(24.61,25.07,24.68,25.49))
, (mkDate 55070,(24.3,24.41,24.69,24.78))
, (mkDate 55069,(24.42,24.59,24.55,24.75))
, (mkDate 55068,(24.46,24.6,24.64,24.82))
, (mkDate 55067,(24.28,24.41,24.64,24.73))
, (mkDate 55064,(23.77,23.93,24.41,24.42))
, (mkDate 55063,(23.54,23.6,23.67,23.87))
, (mkDate 55062,(23.25,23.25,23.65,23.72))
, (mkDate 55061,(23.27,23.29,23.58,23.65))
, (mkDate 55060,(23.23,23.32,23.25,23.6))
, (mkDate 55057,(23.51,23.62,23.69,23.8))
, (mkDate 55056,(23.4,23.63,23.62,23.85))
, (mkDate 55055,(23.03,23.13,23.53,23.9))
, (mkDate 55054,(23.05,23.32,23.13,23.4))
, (mkDate 55053,(23.3,23.46,23.42,23.55))
, (mkDate 55050,(23.5,23.75,23.56,23.82))
, (mkDate 55049,(23.27,23.93,23.46,23.98))
, (mkDate 55048,(23.79,23.84,23.81,24.25))
, (mkDate 55047,(23.53,23.68,23.77,23.79))
, (mkDate 55046,(23.5,23.82,23.83,23.86))
, (mkDate 55043,(23.5,23.77,23.52,24.07))
, (mkDate 55042,(23.71,24.2,23.81,24.43))
, (mkDate 55041,(23.34,23.73,23.8,23.91))
, (mkDate 55040,(22.9,22.99,23.47,23.55))
, (mkDate 55039,(22.9,23.44,23.11,23.45))
, (mkDate 55036,(22.81,23.61,23.45,23.89))
, (mkDate 55035,(24.84,24.93,25.56,25.72))
, (mkDate 55034,(24.51,24.7,24.8,24.9))
, (mkDate 55033,(24.37,24.69,24.83,24.83))
, (mkDate 55032,(24.15,24.44,24.53,24.53))
, (mkDate 55029,(24.1,24.4,24.29,24.45))
, (mkDate 55028,(23.86,23.93,24.44,24.44))
, (mkDate 55027,(23.56,23.75,24.12,24.12))
, (mkDate 55026,(22.86,23.2,23.11,23.22))
, (mkDate 55025,(22.14,22.42,23.23,23.29))
, (mkDate 55022,(22.15,22.19,22.39,22.54))
, (mkDate 55021,(22.37,22.65,22.44,22.81))
, (mkDate 55020,(22.0,22.31,22.56,22.69))
, (mkDate 55019,(22.46,23.08,22.53,23.14))
, (mkDate 55018,(22.87,23.21,23.2,23.28))
, (mkDate 55014,(23.21,23.76,23.37,24.04))
, (mkDate 55013,(23.96,24.05,24.04,24.3))
]
pricesARMH =
[ (mkDate 55105,(6.65,6.83,6.65,6.86))
, (mkDate 55104,(6.87,7.0,7.0,7.02))
, (mkDate 55103,(6.88,6.92,6.95,6.97))
, (mkDate 55102,(6.62,6.63,6.81,6.82))
, (mkDate 55099,(6.69,6.88,6.72,6.88))
, (mkDate 55098,(6.55,6.69,6.64,6.88))
, (mkDate 55097,(6.8,6.87,6.8,6.94))
, (mkDate 55096,(6.67,6.68,6.74,6.78))
, (mkDate 55095,(6.62,6.67,6.7,6.77))
, (mkDate 55092,(6.63,6.71,6.7,6.76))
, (mkDate 55091,(6.64,6.7,6.67,6.76))
, (mkDate 55090,(6.76,6.84,6.77,6.85))
, (mkDate 55089,(6.69,6.73,6.84,6.9))
, (mkDate 55088,(6.73,6.74,6.8,6.81))
, (mkDate 55085,(6.84,7.05,6.87,7.07))
, (mkDate 55084,(6.65,6.7,6.94,6.97))
, (mkDate 55083,(6.65,6.71,6.7,6.75))
, (mkDate 55082,(6.56,6.58,6.65,6.68))
, (mkDate 55078,(6.16,6.18,6.39,6.41))
, (mkDate 55077,(6.11,6.19,6.21,6.24))
, (mkDate 55076,(6.03,6.07,6.09,6.14))
, (mkDate 55075,(6.14,6.22,6.24,6.31))
, (mkDate 55074,(6.3,6.45,6.35,6.45))
, (mkDate 55071,(6.4,6.5,6.47,6.56))
, (mkDate 55070,(6.13,6.18,6.35,6.39))
, (mkDate 55069,(6.1,6.12,6.16,6.2))
, (mkDate 55068,(6.14,6.3,6.17,6.3))
, (mkDate 55067,(6.19,6.29,6.21,6.34))
, (mkDate 55064,(6.25,6.32,6.3,6.38))
, (mkDate 55063,(6.18,6.2,6.25,6.27))
, (mkDate 55062,(6.09,6.11,6.19,6.22))
, (mkDate 55061,(6.14,6.14,6.23,6.28))
, (mkDate 55060,(5.91,6.02,5.98,6.04))
, (mkDate 55057,(6.04,6.15,6.2,6.21))
, (mkDate 55056,(6.1,6.18,6.22,6.26))
, (mkDate 55055,(6.07,6.07,6.22,6.3))
, (mkDate 55054,(6.09,6.23,6.14,6.23))
, (mkDate 55053,(6.19,6.39,6.23,6.4))
, (mkDate 55050,(6.25,6.31,6.32,6.41))
, (mkDate 55049,(6.2,6.42,6.24,6.42))
, (mkDate 55048,(6.4,6.55,6.46,6.55))
, (mkDate 55047,(6.5,6.52,6.67,6.7))
, (mkDate 55046,(6.5,6.51,6.58,6.6))
, (mkDate 55043,(6.3,6.34,6.39,6.43))
, (mkDate 55042,(6.42,6.47,6.46,6.64))
, (mkDate 55041,(6.14,6.37,6.22,6.49))
, (mkDate 55040,(6.28,6.32,6.52,6.56))
, (mkDate 55039,(6.41,6.47,6.49,6.63))
, (mkDate 55036,(6.27,6.36,6.44,6.44))
, (mkDate 55035,(6.47,6.48,6.52,6.55))
, (mkDate 55034,(6.38,6.41,6.47,6.51))
, (mkDate 55033,(6.27,6.45,6.41,6.46))
, (mkDate 55032,(6.32,6.44,6.45,6.48))
, (mkDate 55029,(6.23,6.25,6.37,6.45))
, (mkDate 55028,(6.24,6.29,6.35,6.39))
, (mkDate 55027,(6.37,6.53,6.45,6.6))
, (mkDate 55026,(6.12,6.13,6.19,6.23))
, (mkDate 55025,(5.98,6.02,6.12,6.13))
, (mkDate 55022,(5.93,5.96,6.08,6.12))
, (mkDate 55021,(5.74,5.8,5.97,6.0))
, (mkDate 55020,(5.61,5.74,5.69,5.82))
, (mkDate 55019,(5.68,5.82,5.69,5.84))
, (mkDate 55018,(5.77,5.84,5.91,5.93))
, (mkDate 55014,(5.89,6.03,5.94,6.06))
, (mkDate 55013,(5.93,5.98,5.95,6.03))
]
|
|
d1c70decc0df6647a70477ccd95651b5a36e4dbaaf8958cba043673df9ec1b53 | didierverna/declt | package.lisp | ;;; package.lisp --- Package documentation
Copyright ( C ) 2010 - 2013 , 2015 - 2017 , 2020 - 2022
Author : < >
This file is part of Declt .
;; Permission to use, copy, modify, and distribute this software for any
;; purpose with or without fee is hereby granted, provided that the above
;; copyright notice and this permission notice appear in all copies.
THIS SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
;; WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
;; MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
;; ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
;; OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
;;; Commentary:
;;; Code:
(in-package :net.didierverna.declt)
(in-readtable :net.didierverna.declt)
;; ==========================================================================
;; Documentation Protocols
;; ==========================================================================
(defmethod category-name ((definition package-definition))
"Return \"package\"."
"package")
(defmethod index-command-name ((definition package-definition))
"Return \"packageindex\"."
"packageindex")
(defmethod document ((definition package-definition) context &key)
"Render package DEFINITION's documentation in context."
(anchor-and-index definition)
(render-docstring definition)
(table ()
(when-let (source (source-file definition))
(item ("Source") (reference source context t)))
(when-let* ((nicknames (nicknames definition))
(length (length nicknames)))
(item ((format nil "Nickname~p" length))
(if (eq length 1)
(format t "@t{~(~A~)}" (escape (first nicknames)))
(itemize-list nicknames :format "@t{~(~A~)}" :key #'escape))))
# # # # WARNING : casing policy .
(render-references "Use List"
(sort (use-list definition) #'string-lessp :key #'name)
context
t)
# # # # WARNING : casing policy .
(render-references "Used By List"
(sort (used-by-list definition) #'string-lessp :key #'name)
context
t)
# # # # NOTE : classoids and their slots are documented in a single bloc .
As a consequence , if a classoid belongs to this package , there 's no
;; need to also reference (some of) its slots. On the other hand, we need
;; to reference slots for which the owner is elsewhere (admittedly, and
for the same reason , only one would suffice ) . In the case of generic
;; functions, methods don't need to be referenced at all, because they
;; share the same name.
(flet ((organize-definitions (definitions)
(sort (remove-if
(lambda (definition)
(or (typep definition 'method-definition)
(and (typep definition 'slot-definition)
(eq (home-package definition)
(home-package (owner definition))))))
definitions)
# # # # WARNING : casing policy .
:key #'definition-symbol)))
(render-references "Public Interface"
(organize-definitions (public-definitions definition))
context)
(render-references "Internals"
(organize-definitions (private-definitions definition))
context))))
;; ==========================================================================
Package Nodes
;; ==========================================================================
(defun add-packages-node (parent report context)
"Add REPORT's packages node to PARENT in CONTEXT."
(when-let (definitions
(remove-if-not #'package-definition-p (definitions report)))
(unless (and (every #'foreignp definitions)
(not (foreign-definitions context)))
(let ((packages-node
(add-child parent
(make-node :name "Packages"
:synopsis "The packages documentation"
:before-menu-contents (format nil "~
Packages are listed by definition order.")))))
(dolist (definition definitions)
(let ((contents (render-to-string (document definition context))))
(unless (zerop (length contents))
(add-child packages-node
(make-node :name (long-title definition)
:section-name (format nil "@t{~(~A~)}"
(escape (safe-name definition t)))
:before-menu-contents contents)))))))))
;;; package.lisp ends here
| null | https://raw.githubusercontent.com/didierverna/declt/2da3df1d07ca1e2dfbd0ec07a1d465c0b6eadb84/core/src/doc/package.lisp | lisp | package.lisp --- Package documentation
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
Commentary:
Code:
==========================================================================
Documentation Protocols
==========================================================================
need to also reference (some of) its slots. On the other hand, we need
to reference slots for which the owner is elsewhere (admittedly, and
functions, methods don't need to be referenced at all, because they
share the same name.
==========================================================================
==========================================================================
package.lisp ends here |
Copyright ( C ) 2010 - 2013 , 2015 - 2017 , 2020 - 2022
Author : < >
This file is part of Declt .
THIS SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
(in-package :net.didierverna.declt)
(in-readtable :net.didierverna.declt)
(defmethod category-name ((definition package-definition))
"Return \"package\"."
"package")
(defmethod index-command-name ((definition package-definition))
"Return \"packageindex\"."
"packageindex")
(defmethod document ((definition package-definition) context &key)
"Render package DEFINITION's documentation in context."
(anchor-and-index definition)
(render-docstring definition)
(table ()
(when-let (source (source-file definition))
(item ("Source") (reference source context t)))
(when-let* ((nicknames (nicknames definition))
(length (length nicknames)))
(item ((format nil "Nickname~p" length))
(if (eq length 1)
(format t "@t{~(~A~)}" (escape (first nicknames)))
(itemize-list nicknames :format "@t{~(~A~)}" :key #'escape))))
# # # # WARNING : casing policy .
(render-references "Use List"
(sort (use-list definition) #'string-lessp :key #'name)
context
t)
# # # # WARNING : casing policy .
(render-references "Used By List"
(sort (used-by-list definition) #'string-lessp :key #'name)
context
t)
# # # # NOTE : classoids and their slots are documented in a single bloc .
As a consequence , if a classoid belongs to this package , there 's no
for the same reason , only one would suffice ) . In the case of generic
(flet ((organize-definitions (definitions)
(sort (remove-if
(lambda (definition)
(or (typep definition 'method-definition)
(and (typep definition 'slot-definition)
(eq (home-package definition)
(home-package (owner definition))))))
definitions)
# # # # WARNING : casing policy .
:key #'definition-symbol)))
(render-references "Public Interface"
(organize-definitions (public-definitions definition))
context)
(render-references "Internals"
(organize-definitions (private-definitions definition))
context))))
Package Nodes
(defun add-packages-node (parent report context)
"Add REPORT's packages node to PARENT in CONTEXT."
(when-let (definitions
(remove-if-not #'package-definition-p (definitions report)))
(unless (and (every #'foreignp definitions)
(not (foreign-definitions context)))
(let ((packages-node
(add-child parent
(make-node :name "Packages"
:synopsis "The packages documentation"
:before-menu-contents (format nil "~
Packages are listed by definition order.")))))
(dolist (definition definitions)
(let ((contents (render-to-string (document definition context))))
(unless (zerop (length contents))
(add-child packages-node
(make-node :name (long-title definition)
:section-name (format nil "@t{~(~A~)}"
(escape (safe-name definition t)))
:before-menu-contents contents)))))))))
|
e7f8354a50364e539792dd610a6ca0179edcf8abd5c056f93244ad6772cd418a | lorepub/moot | devel.hs | {-# LANGUAGE PackageImports #-}
import "moot" Application (develMain)
import Prelude (IO)
main :: IO ()
main = develMain
| null | https://raw.githubusercontent.com/lorepub/moot/793c72d046762ec01a250416667e041b35eec7f8/app/devel.hs | haskell | # LANGUAGE PackageImports # | import "moot" Application (develMain)
import Prelude (IO)
main :: IO ()
main = develMain
|
98fb8b4a9b33e102deac96192daa0a91649791a4b914d62f858db32499ef7e65 | input-output-hk/cardano-ledger-byron | Slotting.hs | module Cardano.Chain.Slotting
( module X
)
where
import Cardano.Chain.Slotting.EpochAndSlotCount as X
import Cardano.Chain.Slotting.EpochNumber as X
import Cardano.Chain.Slotting.EpochSlots as X
import Cardano.Chain.Slotting.SlotCount as X
import Cardano.Chain.Slotting.SlotNumber as X
| null | https://raw.githubusercontent.com/input-output-hk/cardano-ledger-byron/d309449e6c303a9f0dcc8dcf172df6f0b3195ed5/cardano-ledger/src/Cardano/Chain/Slotting.hs | haskell | module Cardano.Chain.Slotting
( module X
)
where
import Cardano.Chain.Slotting.EpochAndSlotCount as X
import Cardano.Chain.Slotting.EpochNumber as X
import Cardano.Chain.Slotting.EpochSlots as X
import Cardano.Chain.Slotting.SlotCount as X
import Cardano.Chain.Slotting.SlotNumber as X
|
|
cb34f8586dc8d6672d1cca8110a6f54039f481df89d513b166c68187733137c5 | herd/herdtools7 | runTest.ml | (****************************************************************************)
(* the diy toolsuite *)
(* *)
, University College London , UK .
, INRIA Paris - Rocquencourt , France .
(* *)
Copyright 2023 - present Institut National de Recherche en Informatique et
(* en Automatique and the authors. All rights reserved. *)
(* *)
This software is governed by the CeCILL - B license under French law and
(* abiding by the rules of distribution of free software. You can use, *)
modify and/ or redistribute the software under the terms of the CeCILL - B
license as circulated by CEA , CNRS and INRIA at the following URL
" " . We also give a copy in LICENSE.txt .
(****************************************************************************)
module type Config = sig
val model : Model.t option
val archcheck : bool
val through : Model.through
val strictskip : bool
val cycles : StringSet.t
val bell_model_info : (string * BellModel.info) option
val macros : string option
val check_name : string -> bool
val check_rename : string -> string option
val libfind : string -> string
include GenParser.Config
include Top_herd.CommonConfig
include Sem.Config
val statelessrc11 : bool
val byte : MachSize.Tag.t
end
type runfun =
CacheType.t option ->
DirtyBit.t option ->
float (* start time *) ->
string (* file name *) ->
in_channel (* source channel *) ->
TestHash.env ->
Splitter.result ->
TestHash.env
module Make
(S:Sem.Semantics)
(P:sig
type pseudo
val parse : in_channel -> Splitter.result -> pseudo MiscParser.t
end with type pseudo = S.A.pseudo)
(M:XXXMem.S with module S = S)
(C:Config) =
struct
module T = Test_herd.Make(S.A)
let run cache_type dirty start_time filename chan env splitted =
try
let parsed = P.parse chan splitted in
let name = splitted.Splitter.name in
let hash = MiscParser.get_hash parsed in
let env = match hash with
| None -> env
| Some hash ->
TestHash.check_env env name.Name.name filename hash in
let test = T.build name parsed in
(* Compute basic machine size *)
let sz =
if S.A.is_mixed then begin match C.byte with
| MachSize.Tag.Size sz -> sz
| MachSize.Tag.Auto ->
let szs = test.Test_herd.access_size in
match szs with
| [] -> MachSize.Byte
| [sz] -> MachSize.pred sz
| sz::_ -> sz
end else begin
(* Cannot that easily check the test not to mix sizes,
as there are several locations in test that may be of
different sizes *)
MachSize.Byte
end in
(* And run test *)
let module T =
Top_herd.Make
(struct
include C
let byte = sz
let cache_type = cache_type
let dirty = dirty
end)(M) in
T.run start_time test ;
env
with TestHash.Seen -> env
end
| null | https://raw.githubusercontent.com/herd/herdtools7/574c59e111deda09afbba1f2bdd94353f437faaf/herd/runTest.ml | ocaml | **************************************************************************
the diy toolsuite
en Automatique and the authors. All rights reserved.
abiding by the rules of distribution of free software. You can use,
**************************************************************************
start time
file name
source channel
Compute basic machine size
Cannot that easily check the test not to mix sizes,
as there are several locations in test that may be of
different sizes
And run test | , University College London , UK .
, INRIA Paris - Rocquencourt , France .
Copyright 2023 - present Institut National de Recherche en Informatique et
This software is governed by the CeCILL - B license under French law and
modify and/ or redistribute the software under the terms of the CeCILL - B
license as circulated by CEA , CNRS and INRIA at the following URL
" " . We also give a copy in LICENSE.txt .
module type Config = sig
val model : Model.t option
val archcheck : bool
val through : Model.through
val strictskip : bool
val cycles : StringSet.t
val bell_model_info : (string * BellModel.info) option
val macros : string option
val check_name : string -> bool
val check_rename : string -> string option
val libfind : string -> string
include GenParser.Config
include Top_herd.CommonConfig
include Sem.Config
val statelessrc11 : bool
val byte : MachSize.Tag.t
end
type runfun =
CacheType.t option ->
DirtyBit.t option ->
TestHash.env ->
Splitter.result ->
TestHash.env
module Make
(S:Sem.Semantics)
(P:sig
type pseudo
val parse : in_channel -> Splitter.result -> pseudo MiscParser.t
end with type pseudo = S.A.pseudo)
(M:XXXMem.S with module S = S)
(C:Config) =
struct
module T = Test_herd.Make(S.A)
let run cache_type dirty start_time filename chan env splitted =
try
let parsed = P.parse chan splitted in
let name = splitted.Splitter.name in
let hash = MiscParser.get_hash parsed in
let env = match hash with
| None -> env
| Some hash ->
TestHash.check_env env name.Name.name filename hash in
let test = T.build name parsed in
let sz =
if S.A.is_mixed then begin match C.byte with
| MachSize.Tag.Size sz -> sz
| MachSize.Tag.Auto ->
let szs = test.Test_herd.access_size in
match szs with
| [] -> MachSize.Byte
| [sz] -> MachSize.pred sz
| sz::_ -> sz
end else begin
MachSize.Byte
end in
let module T =
Top_herd.Make
(struct
include C
let byte = sz
let cache_type = cache_type
let dirty = dirty
end)(M) in
T.run start_time test ;
env
with TestHash.Seen -> env
end
|
9b968278a0360d17a35c8c12c97a79170b386542c82c9e479e4afbc790ca4aec | Cumulus/Syndic | syndic_xml.ml | type dtd = string option
module Error = Syndic_error
type pos = Xmlm.pos
type tag = Xmlm.tag
type t = Node of pos * tag * t list | Data of pos * string
let resolve ~xmlbase uri =
match xmlbase with None -> uri | Some b -> Uri.resolve "" b uri
Specialized version of the Xmlm.make_input one .
let input_of_channel fh =
(* Xmlm.make_input does not raise any exception. *)
Xmlm.make_input (`Channel fh)
let of_xmlm input =
let el tag datas = Node (Xmlm.pos input, tag, datas) in
let data data = Data (Xmlm.pos input, data) in
try Xmlm.input_doc_tree ~el ~data input with Xmlm.Error (pos, e) ->
raise (Error.Error (pos, Xmlm.error_message e))
let get_position = function Node (pos, _, _) -> pos | Data (pos, _) -> pos
let rec t_to_xmlm t output =
match t with
| Data (_pos, d) -> (
try Xmlm.output output (`Data d) with Xmlm.Error (pos, e) ->
raise (Error.Error (pos, Xmlm.error_message e)) )
| Node (_pos, tag, t_sub) -> (
Xmlm.output output (`El_start tag) ;
List.iter (fun t -> t_to_xmlm t output) t_sub ;
try Xmlm.output output `El_end with Xmlm.Error (pos, e) ->
raise (Error.Error (pos, Xmlm.error_message e)) )
Specialized version of the Xmlm one .
let make_output ?ns_prefix dest =
(* Xmlm.make_output does not raise any exception. *)
Xmlm.make_output dest ~decl:true ?ns_prefix
let to_xmlm ?dtd t output =
( try Xmlm.output output (`Dtd dtd) with Xmlm.Error (pos, e) ->
raise (Error.Error (pos, Xmlm.error_message e)) ) ;
t_to_xmlm t output
let to_buffer ?ns_prefix t b =
let output = Xmlm.make_output ~decl:false (`Buffer b) ?ns_prefix in
to_xmlm t output
let to_string ?ns_prefix t =
let b = Buffer.create 4096 in
to_buffer ?ns_prefix t b ; Buffer.contents b
| null | https://raw.githubusercontent.com/Cumulus/Syndic/7a70e3062e42f9ee0ec77f282e1924736b0dff2a/lib/syndic_xml.ml | ocaml | Xmlm.make_input does not raise any exception.
Xmlm.make_output does not raise any exception. | type dtd = string option
module Error = Syndic_error
type pos = Xmlm.pos
type tag = Xmlm.tag
type t = Node of pos * tag * t list | Data of pos * string
let resolve ~xmlbase uri =
match xmlbase with None -> uri | Some b -> Uri.resolve "" b uri
Specialized version of the Xmlm.make_input one .
let input_of_channel fh =
Xmlm.make_input (`Channel fh)
let of_xmlm input =
let el tag datas = Node (Xmlm.pos input, tag, datas) in
let data data = Data (Xmlm.pos input, data) in
try Xmlm.input_doc_tree ~el ~data input with Xmlm.Error (pos, e) ->
raise (Error.Error (pos, Xmlm.error_message e))
let get_position = function Node (pos, _, _) -> pos | Data (pos, _) -> pos
let rec t_to_xmlm t output =
match t with
| Data (_pos, d) -> (
try Xmlm.output output (`Data d) with Xmlm.Error (pos, e) ->
raise (Error.Error (pos, Xmlm.error_message e)) )
| Node (_pos, tag, t_sub) -> (
Xmlm.output output (`El_start tag) ;
List.iter (fun t -> t_to_xmlm t output) t_sub ;
try Xmlm.output output `El_end with Xmlm.Error (pos, e) ->
raise (Error.Error (pos, Xmlm.error_message e)) )
Specialized version of the Xmlm one .
let make_output ?ns_prefix dest =
Xmlm.make_output dest ~decl:true ?ns_prefix
let to_xmlm ?dtd t output =
( try Xmlm.output output (`Dtd dtd) with Xmlm.Error (pos, e) ->
raise (Error.Error (pos, Xmlm.error_message e)) ) ;
t_to_xmlm t output
let to_buffer ?ns_prefix t b =
let output = Xmlm.make_output ~decl:false (`Buffer b) ?ns_prefix in
to_xmlm t output
let to_string ?ns_prefix t =
let b = Buffer.create 4096 in
to_buffer ?ns_prefix t b ; Buffer.contents b
|
2a5a516acdaff26b8370d610a9a167e19102fa06a1dca8432bd6b925d4c586c7 | scrintal/heroicons-reagent | chat_bubble_oval_left.cljs | (ns com.scrintal.heroicons.mini.chat-bubble-oval-left)
(defn render []
[:svg {:xmlns ""
:viewBox "0 0 20 20"
:fill "currentColor"
:aria-hidden "true"}
[:path {:fillRule "evenodd"
:d "M2 10c0-3.967 3.69-7 8-7 4.31 0 8 3.033 8 7s-3.69 7-8 7a9.165 9.165 0 01-1.504-.123 5.976 5.976 0 01-3.935 1.107.75.75 0 01-.584-1.143 3.478 3.478 0 00.522-1.756C2.979 13.825 2 12.025 2 10z"
:clipRule "evenodd"}]]) | null | https://raw.githubusercontent.com/scrintal/heroicons-reagent/572f51d2466697ec4d38813663ee2588960365b6/src/com/scrintal/heroicons/mini/chat_bubble_oval_left.cljs | clojure | (ns com.scrintal.heroicons.mini.chat-bubble-oval-left)
(defn render []
[:svg {:xmlns ""
:viewBox "0 0 20 20"
:fill "currentColor"
:aria-hidden "true"}
[:path {:fillRule "evenodd"
:d "M2 10c0-3.967 3.69-7 8-7 4.31 0 8 3.033 8 7s-3.69 7-8 7a9.165 9.165 0 01-1.504-.123 5.976 5.976 0 01-3.935 1.107.75.75 0 01-.584-1.143 3.478 3.478 0 00.522-1.756C2.979 13.825 2 12.025 2 10z"
:clipRule "evenodd"}]]) |
|
a97440c5b91dd83776ad24c2f9b13740187d62309716a4623ff38f4d2d76a641 | TyOverby/mono | vdom_based_tests.ml | open! Core
open! Bonsai_web
open! Bonsai_web_test
open Bonsai.Let_syntax
open Shared
module Test = struct
include Shared.Test
let create
(type a)
?(visible_range = 0, 100)
?(map = small_map)
?(should_print_styles = false)
?(should_set_bounds = true)
component
: a t
=
let min_vis, max_vis = visible_range in
let input_var = Bonsai.Var.create map in
let filter_var = Bonsai.Var.create (fun ~key:_ ~data:_ -> true) in
let { Component.component; get_vdom; get_testing = _; get_inject } =
component (Bonsai.Var.value input_var) (Bonsai.Var.value filter_var)
in
let handle =
Handle.create
(module struct
type t = a
let view result =
result
|> get_vdom
|> Virtual_dom_test_helpers.Node_helpers.unsafe_convert_exn
|> Virtual_dom_test_helpers.Node_helpers.to_string_html
~filter_printed_attributes:(function
| x when String.is_prefix ~prefix:"style." x -> should_print_styles
| _ -> true)
;;
type incoming = Action.t
let incoming = get_inject
end)
component
in
let t = { handle; get_vdom; input_var; filter_var } in
if should_set_bounds then set_bounds t ~low:min_vis ~high:max_vis;
t
;;
let print_message_on_result_recomputation t =
let result = Incr.map (Handle.result_incr t.handle) ~f:t.get_vdom in
Incr.Observer.on_update_exn (Incr.observe result) ~f:(function
| Initialized _ -> print_endline "Initialized"
| Changed _ -> print_endline "Changed"
| Invalidated -> assert false)
;;
end
let print_assocs component =
let rec count needle = function
| Sexp.Atom s when String.equal needle s -> 1
| Atom _ -> 0
| List l -> List.sum (module Int) l ~f:(count needle)
in
let structure =
component
|> Bonsai.Private.reveal_computation
|> Bonsai.Private.Computation.sexp_of_packed
in
let assoc_count = count "Assoc" structure in
let assoc_simple_count = count "Assoc_simpl" structure in
print_s [%message (assoc_count : int) (assoc_simple_count : int)]
;;
let%expect_test "simplified_assocs" =
let { Test.Component.component; _ } =
Test.Component.default
()
(Bonsai.Value.return Int.Map.empty)
(Bonsai.Value.return (fun ~key:_ ~data:_ -> true))
in
print_assocs component;
there 's only one assoc because all the columns are inside of an assoc
per - row instead of it being the other way around as you might have
expected .
per-row instead of it being the other way around as you might have
expected. *)
[%expect {| ((assoc_count 1) (assoc_simple_count 2)) |}]
;;
let%expect_test "simplified_assocs on the dynamic columns" =
let { Test.Component.component; _ } =
Test.Component.default'
()
(Bonsai.Value.return Int.Map.empty)
(Bonsai.Value.return (fun ~key:_ ~data:_ -> true))
in
print_assocs component;
(* No assocs here because it just uses the Incr_map function directly *)
[%expect {| ((assoc_count 0) (assoc_simple_count 0)) |}]
;;
let%expect_test "column visibility" =
let is_column_b_visible_var = Bonsai.Var.create true in
let is_column_b_visible = Bonsai.Var.value is_column_b_visible_var in
let test =
Test.create ~should_print_styles:true (Test.Component.default ~is_column_b_visible ())
in
Handle.recompute_view_until_stable test.handle;
Handle.store_view test.handle;
Bonsai.Var.set is_column_b_visible_var false;
Handle.recompute_view_until_stable test.handle;
Handle.show_diff test.handle;
[%expect
{|
resize: horizontal;
overflow: hidden;
width: 50px;
}>
<div> a </div>
</td>
<td colspan="1"
freeze_width=((set <fun>)(reset <fun>))
size_tracker=<fun>
style={
text-align: center;
user-select: none;
font-weight: bold;
resize: horizontal;
overflow: hidden;
width: 50px;
+| display: none;
}>
<div onclick style={ white-space: pre; cursor: pointer; }>
<span> ◇ b </span>
</div>
</td>
</tr>
</tbody>
</table>
<div bounds-change=<fun> style={ height: 3px; position: relative; }>
<div @key=0
class="prt-table-row prt-table-row-even"
onclick
style={
top: 0px;
position: absolute;
max-height: 1px;
width: 0.00px;
min-width: 0.00px;
max-width: 0.00px;
}>
<input oninput> </input>
hello
</div>
<div @key=key_0-2
data-row-id="key_0"
class="prt-table-cell"
style={
height: 1px;
min-height: 1px;
max-height: 1px;
box-sizing: border-box;
overflow: hidden;
-| display: inline-block;
+| display: none;
contain: strict;
-| width: 0.00px;
-| min-width: 0.00px;
-| max-width: 0.00px;
-| }> 1.000000 </div>
+| }> </div>
</div>
<div @key=100
class="prt-table-row prt-table-row-odd"
onclick
style={
top: 1px;
position: absolute;
max-height: 1px;
width: max-content;
}>
<div @key=key_100-0
data-row-id="key_100"
class="prt-table-cell"
style={
height: 1px;
min-height: 1px;
width: 0.00px;
min-width: 0.00px;
max-width: 0.00px;
}>
<input oninput> </input>
there
</div>
<div @key=key_100-2
data-row-id="key_100"
class="prt-table-cell"
style={
height: 1px;
min-height: 1px;
max-height: 1px;
box-sizing: border-box;
overflow: hidden;
-| display: inline-block;
+| display: none;
contain: strict;
-| width: 0.00px;
-| min-width: 0.00px;
-| max-width: 0.00px;
-| }> 2.000000 </div>
+| }> </div>
</div>
<div @key=200
class="prt-table-row prt-table-row-even"
onclick
style={
top: 2px;
position: absolute;
max-height: 1px;
width: max-content;
}>
<div @key=key_200-0
data-row-id="key_200"
class="prt-table-cell"
style={
height: 1px;
min-height: 1px;
width: 0.00px;
min-width: 0.00px;
max-width: 0.00px;
}>
<input oninput> </input>
world
</div>
<div @key=key_200-2
data-row-id="key_200"
class="prt-table-cell"
style={
height: 1px;
min-height: 1px;
max-height: 1px;
box-sizing: border-box;
overflow: hidden;
-| display: inline-block;
+| display: none;
contain: strict;
-| width: 0.00px;
-| min-width: 0.00px;
-| max-width: 0.00px;
-| }> 2.000000 </div>
+| }> </div>
</div>
</div>
</div> |}]
;;
let%expect_test "stabilization of view range" =
let test =
Test.create (Test.Component.default ()) ~visible_range:(0, 2) ~should_set_bounds:false
in
Handle.recompute_view_until_stable test.handle;
Handle.show test.handle;
[%expect
{|
<div class="partial-render-table-bonsai_path_replaced_in_test">
<table class="prt-table-header" size_tracker=<fun>>
<tbody>
<tr>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div onclick>
<span> ◇ key </span>
</div>
</td>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div> a </div>
</td>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div onclick>
<span> ◇ b </span>
</div>
</td>
</tr>
</tbody>
</table>
<div bounds-change=<fun>>
<div @key=0 class="prt-table-row prt-table-row-even" onclick>
<div @key=key_0-0 data-row-id="key_0" class="prt-table-cell"> 0 </div>
<div @key=key_0-1 data-row-id="key_0" class="prt-table-cell">
<input oninput> </input>
hello
</div>
<div @key=key_0-2 data-row-id="key_0" class="prt-table-cell"> 1.000000 </div>
</div>
<div @key=100 class="prt-table-row prt-table-row-odd" onclick>
<div @key=key_100-0 data-row-id="key_100" class="prt-table-cell"> 1 </div>
<div @key=key_100-1 data-row-id="key_100" class="prt-table-cell">
<input oninput> </input>
there
</div>
<div @key=key_100-2 data-row-id="key_100" class="prt-table-cell"> 2.000000 </div>
</div>
</div>
</div> |}];
(* Change the visibility to show the rest of the nodes *)
Handle.show_diff test.handle;
[%expect {| |}];
Handle.recompute_view_until_stable test.handle;
Handle.show_diff test.handle;
[%expect {||}];
Test.set_bounds test ~low:0 ~high:100;
Handle.recompute_view_until_stable test.handle;
Handle.show_diff test.handle;
[%expect
{|
<div @key=0 class="prt-table-row prt-table-row-even" onclick>
<div @key=key_0-0 data-row-id="key_0" class="prt-table-cell"> 0 </div>
<div @key=key_0-1 data-row-id="key_0" class="prt-table-cell">
<input oninput> </input>
hello
</div>
<div @key=key_0-2 data-row-id="key_0" class="prt-table-cell"> 1.000000 </div>
</div>
<div @key=100 class="prt-table-row prt-table-row-odd" onclick>
<div @key=key_100-0 data-row-id="key_100" class="prt-table-cell"> 1 </div>
<div @key=key_100-1 data-row-id="key_100" class="prt-table-cell">
<input oninput> </input>
there
</div>
<div @key=key_100-2 data-row-id="key_100" class="prt-table-cell"> 2.000000 </div>
</div>
+| <div @key=200 class="prt-table-row prt-table-row-even" onclick>
+| <div @key=key_200-0 data-row-id="key_200" class="prt-table-cell"> 4 </div>
+| <div @key=key_200-1 data-row-id="key_200" class="prt-table-cell">
+| <input oninput> </input>
+| world
+| </div>
+| <div @key=key_200-2 data-row-id="key_200" class="prt-table-cell"> 2.000000 </div>
+| </div>
</div>
</div> |}]
;;
let%expect_test "resize-column" =
let test = Test.create ~should_print_styles:true (Test.Component.default ()) in
Handle.recompute_view_until_stable test.handle;
Handle.store_view test.handle;
Test.resize_column test ~idx:0 ~width:10.0;
Handle.recompute_view_until_stable test.handle;
Handle.show_diff test.handle;
[%expect
{|
top: 0px;
position: absolute;
max-height: 1px;
width: max-content;
}>
<div @key=key_0-0
data-row-id="key_0"
class="prt-table-cell"
style={
height: 1px;
min-height: 1px;
max-height: 1px;
box-sizing: border-box;
overflow: hidden;
display: inline-block;
contain: strict;
-| width: 0.00px;
+| width: 10.00px;
-| min-width: 0.00px;
+| min-width: 10.00px;
-| max-width: 0.00px;
+| max-width: 10.00px;
}> 0 </div>
<div @key=key_0-1
data-row-id="key_0"
class="prt-table-cell"
style={
height: 1px;
min-height: 1px;
max-height: 1px;
box-sizing: border-box;
overflow: hidden;
display: inline-block;
contain: strict;
width: 0.00px;
min-width: 0.00px;
max-width: 0.00px;
}>
top: 1px;
position: absolute;
max-height: 1px;
width: max-content;
}>
<div @key=key_100-0
data-row-id="key_100"
class="prt-table-cell"
style={
height: 1px;
min-height: 1px;
max-height: 1px;
box-sizing: border-box;
overflow: hidden;
display: inline-block;
contain: strict;
-| width: 0.00px;
+| width: 10.00px;
-| min-width: 0.00px;
+| min-width: 10.00px;
-| max-width: 0.00px;
+| max-width: 10.00px;
}> 1 </div>
<div @key=key_100-1
data-row-id="key_100"
class="prt-table-cell"
style={
height: 1px;
min-height: 1px;
max-height: 1px;
box-sizing: border-box;
overflow: hidden;
display: inline-block;
contain: strict;
width: 0.00px;
min-width: 0.00px;
max-width: 0.00px;
}>
top: 2px;
position: absolute;
max-height: 1px;
width: max-content;
}>
<div @key=key_200-0
data-row-id="key_200"
class="prt-table-cell"
style={
height: 1px;
min-height: 1px;
max-height: 1px;
box-sizing: border-box;
overflow: hidden;
display: inline-block;
contain: strict;
-| width: 0.00px;
+| width: 10.00px;
-| min-width: 0.00px;
+| min-width: 10.00px;
-| max-width: 0.00px;
+| max-width: 10.00px;
}> 4 </div>
<div @key=key_200-1
data-row-id="key_200"
class="prt-table-cell"
style={
height: 1px;
min-height: 1px;
max-height: 1px;
box-sizing: border-box;
overflow: hidden;
display: inline-block;
contain: strict;
width: 0.00px;
min-width: 0.00px;
max-width: 0.00px;
}> |}]
;;
let%expect_test "big table" =
The PRT always renders [ low-25 , high+25 ] , so 50,50 will render a big chunk
centered at 50
centered at 50 *)
let test =
Test.create ~map:big_map ~visible_range:(50, 50) (Test.Component.default ())
in
Handle.recompute_view_until_stable test.handle;
Handle.show test.handle;
[%expect
{|
<div class="partial-render-table-bonsai_path_replaced_in_test">
<table class="prt-table-header" size_tracker=<fun>>
<tbody>
<tr>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div onclick>
<span> ◇ key </span>
</div>
</td>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div> a </div>
</td>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div onclick>
<span> ◇ b </span>
</div>
</td>
</tr>
</tbody>
</table>
<div bounds-change=<fun>>
<div @key=0 class="prt-table-row prt-table-row-even" onclick>
<div @key=key_0-0 data-row-id="key_0" class="prt-table-cell"> 51 </div>
<div @key=key_0-1 data-row-id="key_0" class="prt-table-cell">
<input oninput> </input>
hi
</div>
<div @key=key_0-2 data-row-id="key_0" class="prt-table-cell"> 25.000000 </div>
</div>
<div @key=100 class="prt-table-row prt-table-row-odd" onclick>
<div @key=key_100-0 data-row-id="key_100" class="prt-table-cell"> 52 </div>
<div @key=key_100-1 data-row-id="key_100" class="prt-table-cell">
<input oninput> </input>
hi
</div>
<div @key=key_100-2 data-row-id="key_100" class="prt-table-cell"> 26.000000 </div>
</div>
</div>
</div> |}];
(* extending the range upwards should only add to the end *)
Test.set_bounds test ~low:55 ~high:60;
Handle.recompute_view_until_stable test.handle;
Handle.show_diff test.handle;
[%expect
{|
<div onclick>
<span> ◇ key </span>
</div>
</td>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div> a </div>
</td>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div onclick>
<span> ◇ b </span>
</div>
</td>
</tr>
</tbody>
</table>
<div bounds-change=<fun>>
-| <div @key=0 class="prt-table-row prt-table-row-even" onclick>
+| <div @key=0 class="prt-table-row prt-table-row-odd" onclick>
-| <div @key=key_0-0 data-row-id="key_0" class="prt-table-cell"> 51 </div>
+| <div @key=key_0-0 data-row-id="key_0" class="prt-table-cell"> 56 </div>
<div @key=key_0-1 data-row-id="key_0" class="prt-table-cell">
<input oninput> </input>
hi
</div>
-| <div @key=key_0-2 data-row-id="key_0" class="prt-table-cell"> 25.000000 </div>
+| <div @key=key_0-2 data-row-id="key_0" class="prt-table-cell"> 28.000000 </div>
</div>
-| <div @key=100 class="prt-table-row prt-table-row-odd" onclick>
+| <div @key=100 class="prt-table-row prt-table-row-even" onclick>
-| <div @key=key_100-0 data-row-id="key_100" class="prt-table-cell"> 52 </div>
+| <div @key=key_100-0 data-row-id="key_100" class="prt-table-cell"> 57 </div>
<div @key=key_100-1 data-row-id="key_100" class="prt-table-cell">
<input oninput> </input>
hi
</div>
-| <div @key=key_100-2 data-row-id="key_100" class="prt-table-cell"> 26.000000 </div>
+| <div @key=key_100-2 data-row-id="key_100" class="prt-table-cell"> 28.000000 </div>
+| </div>
+| <div @key=200 class="prt-table-row prt-table-row-odd" onclick>
+| <div @key=key_200-0 data-row-id="key_200" class="prt-table-cell"> 58 </div>
+| <div @key=key_200-1 data-row-id="key_200" class="prt-table-cell">
+| <input oninput> </input>
+| hi
+| </div>
+| <div @key=key_200-2 data-row-id="key_200" class="prt-table-cell"> 29.000000 </div>
+| </div>
+| <div @key=300 class="prt-table-row prt-table-row-even" onclick>
+| <div @key=key_300-0 data-row-id="key_300" class="prt-table-cell"> 59 </div>
+| <div @key=key_300-1 data-row-id="key_300" class="prt-table-cell">
+| <input oninput> </input>
+| hi
+| </div>
+| <div @key=key_300-2 data-row-id="key_300" class="prt-table-cell"> 29.000000 </div>
+| </div>
+| <div @key=400 class="prt-table-row prt-table-row-odd" onclick>
+| <div @key=key_400-0 data-row-id="key_400" class="prt-table-cell"> 60 </div>
+| <div @key=key_400-1 data-row-id="key_400" class="prt-table-cell">
+| <input oninput> </input>
+| hi
+| </div>
+| <div @key=key_400-2 data-row-id="key_400" class="prt-table-cell"> 30.000000 </div>
+| </div>
+| <div @key=500 class="prt-table-row prt-table-row-even" onclick>
+| <div @key=key_500-0 data-row-id="key_500" class="prt-table-cell"> 61 </div>
+| <div @key=key_500-1 data-row-id="key_500" class="prt-table-cell">
+| <input oninput> </input>
+| hi
+| </div>
+| <div @key=key_500-2 data-row-id="key_500" class="prt-table-cell"> 30.000000 </div>
+| </div>
+| <div @key=600 class="prt-table-row prt-table-row-odd" onclick>
+| <div @key=key_600-0 data-row-id="key_600" class="prt-table-cell"> 62 </div>
+| <div @key=key_600-1 data-row-id="key_600" class="prt-table-cell">
+| <input oninput> </input>
+| hi
+| </div>
+| <div @key=key_600-2 data-row-id="key_600" class="prt-table-cell"> 31.000000 </div>
</div>
</div>
</div> |}]
;;
let%expect_test "typing into a column, leaving that column, and then coming back. " =
let test =
Test.create ~map:big_map ~visible_range:(50, 50) (Test.Component.default ())
in
Handle.recompute_view_until_stable test.handle;
Handle.store_view test.handle;
Handle.input_text
test.handle
~get_vdom:Table.Result.view
~selector:".prt-table-cell:nth-child(2) input"
~text:"hello world";
Handle.recompute_view_until_stable test.handle;
Handle.show_diff test.handle;
[%expect
{|
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div> a </div>
</td>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div onclick>
<span> ◇ b </span>
</div>
</td>
</tr>
</tbody>
</table>
<div bounds-change=<fun>>
<div @key=0 class="prt-table-row prt-table-row-even" onclick>
<div @key=key_0-0 data-row-id="key_0" class="prt-table-cell"> 51 </div>
<div @key=key_0-1 data-row-id="key_0" class="prt-table-cell">
<input oninput> </input>
-| hi
+| hi hello world
</div>
<div @key=key_0-2 data-row-id="key_0" class="prt-table-cell"> 25.000000 </div>
</div>
<div @key=100 class="prt-table-row prt-table-row-odd" onclick>
<div @key=key_100-0 data-row-id="key_100" class="prt-table-cell"> 52 </div>
<div @key=key_100-1 data-row-id="key_100" class="prt-table-cell">
<input oninput> </input>
hi
</div>
<div @key=key_100-2 data-row-id="key_100" class="prt-table-cell"> 26.000000 </div>
</div>
</div>
</div> |}];
move out of bounds ( really 99 - 25 through 100 )
Test.set_bounds test ~low:99 ~high:99;
Handle.recompute_view_until_stable test.handle;
Handle.store_view test.handle;
(* move back into bounds *)
Test.set_bounds test ~low:50 ~high:50;
Handle.recompute_view_until_stable test.handle;
Handle.show test.handle;
[%expect
{|
<div class="partial-render-table-bonsai_path_replaced_in_test">
<table class="prt-table-header" size_tracker=<fun>>
<tbody>
<tr>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div onclick>
<span> ◇ key </span>
</div>
</td>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div> a </div>
</td>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div onclick>
<span> ◇ b </span>
</div>
</td>
</tr>
</tbody>
</table>
<div bounds-change=<fun>>
<div @key=0 class="prt-table-row prt-table-row-even" onclick>
<div @key=key_0-0 data-row-id="key_0" class="prt-table-cell"> 51 </div>
<div @key=key_0-1 data-row-id="key_0" class="prt-table-cell">
<input oninput> </input>
hi hello world
</div>
<div @key=key_0-2 data-row-id="key_0" class="prt-table-cell"> 25.000000 </div>
</div>
<div @key=100 class="prt-table-row prt-table-row-odd" onclick>
<div @key=key_100-0 data-row-id="key_100" class="prt-table-cell"> 52 </div>
<div @key=key_100-1 data-row-id="key_100" class="prt-table-cell">
<input oninput> </input>
hi
</div>
<div @key=key_100-2 data-row-id="key_100" class="prt-table-cell"> 26.000000 </div>
</div>
</div>
</div> |}]
;;
let%expect_test "table body is not recomputed more often than necessary" =
The size_tracker and visibility hooks that PRT uses can be called by the browser more
often than one would expect . For instance , if one places an element over the table ,
it causes the size_tracker hook on every column to fire . If you have a large table
with lots of columns and lots of rows , it can be expensive to recompute the table
body n times , once for each column .
often than one would expect. For instance, if one places an element over the table,
it causes the size_tracker hook on every column to fire. If you have a large table
with lots of columns and lots of rows, it can be expensive to recompute the table
body n times, once for each column. *)
let test = Test.create (Test.Component.default ()) in
Test.print_message_on_result_recomputation test;
Test.resize_column test ~idx:0 ~width:1.;
Test.set_bounds test ~low:0 ~high:300;
Handle.flush test.handle;
[%expect {|
Initialized
Changed |}];
(* Sanity check: re-stabilizing after doing no actions does not cause recomputation *)
Handle.flush test.handle;
[%expect {| |}];
(* Re-setting a column to its existing width should not cause a re-fire *)
Test.resize_column test ~idx:0 ~width:1.;
Handle.flush test.handle;
[%expect {| |}];
(* Re-setting the bounds to the same value should not cause a re-fire *)
Test.set_bounds test ~low:0 ~high:300;
Handle.flush test.handle;
[%expect {| |}]
;;
let%expect_test "table body is not recomputed more often than necessary" =
let test =
Test.create (fun input _filter_var ->
let component =
let%sub collation =
Table_expert.collate
~filter_equal:[%compare.equal: unit]
~order_equal:[%compare.equal: unit]
~filter_to_predicate:(fun () -> None)
~order_to_compare:(fun () -> Unchanged)
input
(Value.return
{ Incr_map_collate.Collate.filter = ()
; order = ()
; key_range = All_rows
; rank_range = All_rows
})
in
let columns =
[ Table_expert.Columns.Dynamic_cells.column
~label:(Value.return (Vdom.Node.text "key"))
~cell:(fun ~key ~data:_ ->
return
@@ let%map key = key in
Vdom.Node.textf "%d" key)
()
]
|> Table_expert.Columns.Dynamic_cells.lift
in
Table_expert.component
(module Int)
~focus:(By_row { on_change = Value.return (Fn.const Effect.Ignore) })
~row_height:(`Px 10)
~columns
collation
in
{ Test.Component.component
; get_vdom = Table_expert.Result.view
; get_testing = Table_expert.Result.for_testing
; get_inject = Shared.Test.Component.get_inject_expert
})
in
Test.print_message_on_result_recomputation test;
Test.resize_column test ~idx:0 ~width:1.;
Test.set_bounds test ~low:0 ~high:300;
Handle.flush test.handle;
[%expect {|
Initialized
Changed |}];
(* Sanity check: re-stabilizing after doing no actions does not cause recomputation *)
Handle.flush test.handle;
[%expect {| |}];
(* Changing the bounds should not cause a re-fire because we are doing our own collation
and don't rely on result.bounds. *)
Test.set_bounds test ~low:100 ~high:300;
Handle.flush test.handle;
[%expect {| |}]
;;
let%expect_test "test is browser" =
let open Js_of_ocaml in
Dom_html.document |> Obj.magic |> Js_of_ocaml.Js.Optdef.test |> printf "%b";
[%expect {| false |}]
;;
let%expect_test "sorting" =
let test = Test.create (Test.Component.default ()) in
Handle.recompute_view_until_stable test.handle;
Handle.show test.handle;
[%expect
{|
<div class="partial-render-table-bonsai_path_replaced_in_test">
<table class="prt-table-header" size_tracker=<fun>>
<tbody>
<tr>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div onclick>
<span> ◇ key </span>
</div>
</td>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div> a </div>
</td>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div onclick>
<span> ◇ b </span>
</div>
</td>
</tr>
</tbody>
</table>
<div bounds-change=<fun>>
<div @key=0 class="prt-table-row prt-table-row-even" onclick>
<div @key=key_0-0 data-row-id="key_0" class="prt-table-cell"> 0 </div>
<div @key=key_0-1 data-row-id="key_0" class="prt-table-cell">
<input oninput> </input>
hello
</div>
<div @key=key_0-2 data-row-id="key_0" class="prt-table-cell"> 1.000000 </div>
</div>
<div @key=100 class="prt-table-row prt-table-row-odd" onclick>
<div @key=key_100-0 data-row-id="key_100" class="prt-table-cell"> 1 </div>
<div @key=key_100-1 data-row-id="key_100" class="prt-table-cell">
<input oninput> </input>
there
</div>
<div @key=key_100-2 data-row-id="key_100" class="prt-table-cell"> 2.000000 </div>
</div>
<div @key=200 class="prt-table-row prt-table-row-even" onclick>
<div @key=key_200-0 data-row-id="key_200" class="prt-table-cell"> 4 </div>
<div @key=key_200-1 data-row-id="key_200" class="prt-table-cell">
<input oninput> </input>
world
</div>
<div @key=key_200-2 data-row-id="key_200" class="prt-table-cell"> 2.000000 </div>
</div>
</div>
</div> |}];
(* this one is the key, clicking on it does nothing (it's already sorted by the key) *)
Handle.click_on test.handle ~selector:"td:nth-child(1) > div" ~get_vdom:test.get_vdom;
Handle.show_diff test.handle;
[%expect
{|
<div class="partial-render-table-bonsai_path_replaced_in_test">
<table class="prt-table-header" size_tracker=<fun>>
<tbody>
<tr>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div onclick>
-| <span> ◇ key </span>
+| <span> ⬘ key </span>
</div>
</td>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div> a </div>
</td>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div onclick>
<span> ◇ b </span>
</div>
</td>
</tr>
</tbody>
</table>
<div bounds-change=<fun>>
<div @key=0 class="prt-table-row prt-table-row-even" onclick>
<div @key=key_0-0 data-row-id="key_0" class="prt-table-cell"> 0 </div> |}];
(* this one actually does stuff, click on it twice for a reverse sort *)
Handle.click_on test.handle ~selector:"td:nth-child(3) > div" ~get_vdom:test.get_vdom;
Handle.click_on test.handle ~selector:"td:nth-child(3) > div" ~get_vdom:test.get_vdom;
Handle.show_diff test.handle;
[%expect
{|
<div class="partial-render-table-bonsai_path_replaced_in_test">
<table class="prt-table-header" size_tracker=<fun>>
<tbody>
<tr>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div onclick>
-| <span> ⬘ key </span>
+| <span> ◇ key </span>
</div>
</td>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div> a </div>
</td>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div onclick>
-| <span> ◇ b </span>
+| <span> ⬙ b </span>
</div>
</td>
</tr>
</tbody>
</table>
<div bounds-change=<fun>>
<div @key=0 class="prt-table-row prt-table-row-even" onclick>
-| <div @key=key_0-0 data-row-id="key_0" class="prt-table-cell"> 0 </div>
+| <div @key=key_0-0 data-row-id="key_0" class="prt-table-cell"> 1 </div>
<div @key=key_0-1 data-row-id="key_0" class="prt-table-cell">
<input oninput> </input>
-| hello
+| there
</div>
-| <div @key=key_0-2 data-row-id="key_0" class="prt-table-cell"> 1.000000 </div>
+| <div @key=key_0-2 data-row-id="key_0" class="prt-table-cell"> 2.000000 </div>
</div>
<div @key=100 class="prt-table-row prt-table-row-odd" onclick>
-| <div @key=key_100-0 data-row-id="key_100" class="prt-table-cell"> 1 </div>
+| <div @key=key_100-0 data-row-id="key_100" class="prt-table-cell"> 4 </div>
<div @key=key_100-1 data-row-id="key_100" class="prt-table-cell">
<input oninput> </input>
-| there
+| world
</div>
<div @key=key_100-2 data-row-id="key_100" class="prt-table-cell"> 2.000000 </div>
</div>
<div @key=200 class="prt-table-row prt-table-row-even" onclick>
-| <div @key=key_200-0 data-row-id="key_200" class="prt-table-cell"> 4 </div>
+| <div @key=key_200-0 data-row-id="key_200" class="prt-table-cell"> 0 </div>
<div @key=key_200-1 data-row-id="key_200" class="prt-table-cell">
<input oninput> </input>
-| world
+| hello
</div>
-| <div @key=key_200-2 data-row-id="key_200" class="prt-table-cell"> 2.000000 </div>
+| <div @key=key_200-2 data-row-id="key_200" class="prt-table-cell"> 1.000000 </div>
</div>
</div>
</div> |}];
(* Clicking once more reverts the sort *)
Handle.click_on test.handle ~selector:"td:nth-child(3) > div" ~get_vdom:test.get_vdom;
Handle.show_diff test.handle;
[%expect
{|
<div class="partial-render-table-bonsai_path_replaced_in_test">
<table class="prt-table-header" size_tracker=<fun>>
<tbody>
<tr>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div onclick>
<span> ◇ key </span>
</div>
</td>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div> a </div>
</td>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div onclick>
-| <span> ⬙ b </span>
+| <span> ◇ b </span>
</div>
</td>
</tr>
</tbody>
</table>
<div bounds-change=<fun>>
<div @key=0 class="prt-table-row prt-table-row-even" onclick>
-| <div @key=key_0-0 data-row-id="key_0" class="prt-table-cell"> 1 </div>
+| <div @key=key_0-0 data-row-id="key_0" class="prt-table-cell"> 0 </div>
<div @key=key_0-1 data-row-id="key_0" class="prt-table-cell">
<input oninput> </input>
-| there
+| hello
</div>
-| <div @key=key_0-2 data-row-id="key_0" class="prt-table-cell"> 2.000000 </div>
+| <div @key=key_0-2 data-row-id="key_0" class="prt-table-cell"> 1.000000 </div>
</div>
<div @key=100 class="prt-table-row prt-table-row-odd" onclick>
-| <div @key=key_100-0 data-row-id="key_100" class="prt-table-cell"> 4 </div>
+| <div @key=key_100-0 data-row-id="key_100" class="prt-table-cell"> 1 </div>
<div @key=key_100-1 data-row-id="key_100" class="prt-table-cell">
<input oninput> </input>
-| world
+| there
</div>
<div @key=key_100-2 data-row-id="key_100" class="prt-table-cell"> 2.000000 </div>
</div>
<div @key=200 class="prt-table-row prt-table-row-even" onclick>
-| <div @key=key_200-0 data-row-id="key_200" class="prt-table-cell"> 0 </div>
+| <div @key=key_200-0 data-row-id="key_200" class="prt-table-cell"> 4 </div>
<div @key=key_200-1 data-row-id="key_200" class="prt-table-cell">
<input oninput> </input>
-| hello
+| world
</div>
-| <div @key=key_200-2 data-row-id="key_200" class="prt-table-cell"> 1.000000 </div>
+| <div @key=key_200-2 data-row-id="key_200" class="prt-table-cell"> 2.000000 </div>
</div>
</div>
</div> |}]
;;
| null | https://raw.githubusercontent.com/TyOverby/mono/7666c0328d194bf9a569fb65babc0486f2aaa40d/vendor/janestreet-bonsai/web_ui/partial_render_table/test/vdom_based_tests.ml | ocaml | No assocs here because it just uses the Incr_map function directly
Change the visibility to show the rest of the nodes
extending the range upwards should only add to the end
move back into bounds
Sanity check: re-stabilizing after doing no actions does not cause recomputation
Re-setting a column to its existing width should not cause a re-fire
Re-setting the bounds to the same value should not cause a re-fire
Sanity check: re-stabilizing after doing no actions does not cause recomputation
Changing the bounds should not cause a re-fire because we are doing our own collation
and don't rely on result.bounds.
this one is the key, clicking on it does nothing (it's already sorted by the key)
this one actually does stuff, click on it twice for a reverse sort
Clicking once more reverts the sort | open! Core
open! Bonsai_web
open! Bonsai_web_test
open Bonsai.Let_syntax
open Shared
module Test = struct
include Shared.Test
let create
(type a)
?(visible_range = 0, 100)
?(map = small_map)
?(should_print_styles = false)
?(should_set_bounds = true)
component
: a t
=
let min_vis, max_vis = visible_range in
let input_var = Bonsai.Var.create map in
let filter_var = Bonsai.Var.create (fun ~key:_ ~data:_ -> true) in
let { Component.component; get_vdom; get_testing = _; get_inject } =
component (Bonsai.Var.value input_var) (Bonsai.Var.value filter_var)
in
let handle =
Handle.create
(module struct
type t = a
let view result =
result
|> get_vdom
|> Virtual_dom_test_helpers.Node_helpers.unsafe_convert_exn
|> Virtual_dom_test_helpers.Node_helpers.to_string_html
~filter_printed_attributes:(function
| x when String.is_prefix ~prefix:"style." x -> should_print_styles
| _ -> true)
;;
type incoming = Action.t
let incoming = get_inject
end)
component
in
let t = { handle; get_vdom; input_var; filter_var } in
if should_set_bounds then set_bounds t ~low:min_vis ~high:max_vis;
t
;;
let print_message_on_result_recomputation t =
let result = Incr.map (Handle.result_incr t.handle) ~f:t.get_vdom in
Incr.Observer.on_update_exn (Incr.observe result) ~f:(function
| Initialized _ -> print_endline "Initialized"
| Changed _ -> print_endline "Changed"
| Invalidated -> assert false)
;;
end
let print_assocs component =
let rec count needle = function
| Sexp.Atom s when String.equal needle s -> 1
| Atom _ -> 0
| List l -> List.sum (module Int) l ~f:(count needle)
in
let structure =
component
|> Bonsai.Private.reveal_computation
|> Bonsai.Private.Computation.sexp_of_packed
in
let assoc_count = count "Assoc" structure in
let assoc_simple_count = count "Assoc_simpl" structure in
print_s [%message (assoc_count : int) (assoc_simple_count : int)]
;;
let%expect_test "simplified_assocs" =
let { Test.Component.component; _ } =
Test.Component.default
()
(Bonsai.Value.return Int.Map.empty)
(Bonsai.Value.return (fun ~key:_ ~data:_ -> true))
in
print_assocs component;
there 's only one assoc because all the columns are inside of an assoc
per - row instead of it being the other way around as you might have
expected .
per-row instead of it being the other way around as you might have
expected. *)
[%expect {| ((assoc_count 1) (assoc_simple_count 2)) |}]
;;
let%expect_test "simplified_assocs on the dynamic columns" =
let { Test.Component.component; _ } =
Test.Component.default'
()
(Bonsai.Value.return Int.Map.empty)
(Bonsai.Value.return (fun ~key:_ ~data:_ -> true))
in
print_assocs component;
[%expect {| ((assoc_count 0) (assoc_simple_count 0)) |}]
;;
let%expect_test "column visibility" =
let is_column_b_visible_var = Bonsai.Var.create true in
let is_column_b_visible = Bonsai.Var.value is_column_b_visible_var in
let test =
Test.create ~should_print_styles:true (Test.Component.default ~is_column_b_visible ())
in
Handle.recompute_view_until_stable test.handle;
Handle.store_view test.handle;
Bonsai.Var.set is_column_b_visible_var false;
Handle.recompute_view_until_stable test.handle;
Handle.show_diff test.handle;
[%expect
{|
resize: horizontal;
overflow: hidden;
width: 50px;
}>
<div> a </div>
</td>
<td colspan="1"
freeze_width=((set <fun>)(reset <fun>))
size_tracker=<fun>
style={
text-align: center;
user-select: none;
font-weight: bold;
resize: horizontal;
overflow: hidden;
width: 50px;
+| display: none;
}>
<div onclick style={ white-space: pre; cursor: pointer; }>
<span> ◇ b </span>
</div>
</td>
</tr>
</tbody>
</table>
<div bounds-change=<fun> style={ height: 3px; position: relative; }>
<div @key=0
class="prt-table-row prt-table-row-even"
onclick
style={
top: 0px;
position: absolute;
max-height: 1px;
width: 0.00px;
min-width: 0.00px;
max-width: 0.00px;
}>
<input oninput> </input>
hello
</div>
<div @key=key_0-2
data-row-id="key_0"
class="prt-table-cell"
style={
height: 1px;
min-height: 1px;
max-height: 1px;
box-sizing: border-box;
overflow: hidden;
-| display: inline-block;
+| display: none;
contain: strict;
-| width: 0.00px;
-| min-width: 0.00px;
-| max-width: 0.00px;
-| }> 1.000000 </div>
+| }> </div>
</div>
<div @key=100
class="prt-table-row prt-table-row-odd"
onclick
style={
top: 1px;
position: absolute;
max-height: 1px;
width: max-content;
}>
<div @key=key_100-0
data-row-id="key_100"
class="prt-table-cell"
style={
height: 1px;
min-height: 1px;
width: 0.00px;
min-width: 0.00px;
max-width: 0.00px;
}>
<input oninput> </input>
there
</div>
<div @key=key_100-2
data-row-id="key_100"
class="prt-table-cell"
style={
height: 1px;
min-height: 1px;
max-height: 1px;
box-sizing: border-box;
overflow: hidden;
-| display: inline-block;
+| display: none;
contain: strict;
-| width: 0.00px;
-| min-width: 0.00px;
-| max-width: 0.00px;
-| }> 2.000000 </div>
+| }> </div>
</div>
<div @key=200
class="prt-table-row prt-table-row-even"
onclick
style={
top: 2px;
position: absolute;
max-height: 1px;
width: max-content;
}>
<div @key=key_200-0
data-row-id="key_200"
class="prt-table-cell"
style={
height: 1px;
min-height: 1px;
width: 0.00px;
min-width: 0.00px;
max-width: 0.00px;
}>
<input oninput> </input>
world
</div>
<div @key=key_200-2
data-row-id="key_200"
class="prt-table-cell"
style={
height: 1px;
min-height: 1px;
max-height: 1px;
box-sizing: border-box;
overflow: hidden;
-| display: inline-block;
+| display: none;
contain: strict;
-| width: 0.00px;
-| min-width: 0.00px;
-| max-width: 0.00px;
-| }> 2.000000 </div>
+| }> </div>
</div>
</div>
</div> |}]
;;
let%expect_test "stabilization of view range" =
let test =
Test.create (Test.Component.default ()) ~visible_range:(0, 2) ~should_set_bounds:false
in
Handle.recompute_view_until_stable test.handle;
Handle.show test.handle;
[%expect
{|
<div class="partial-render-table-bonsai_path_replaced_in_test">
<table class="prt-table-header" size_tracker=<fun>>
<tbody>
<tr>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div onclick>
<span> ◇ key </span>
</div>
</td>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div> a </div>
</td>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div onclick>
<span> ◇ b </span>
</div>
</td>
</tr>
</tbody>
</table>
<div bounds-change=<fun>>
<div @key=0 class="prt-table-row prt-table-row-even" onclick>
<div @key=key_0-0 data-row-id="key_0" class="prt-table-cell"> 0 </div>
<div @key=key_0-1 data-row-id="key_0" class="prt-table-cell">
<input oninput> </input>
hello
</div>
<div @key=key_0-2 data-row-id="key_0" class="prt-table-cell"> 1.000000 </div>
</div>
<div @key=100 class="prt-table-row prt-table-row-odd" onclick>
<div @key=key_100-0 data-row-id="key_100" class="prt-table-cell"> 1 </div>
<div @key=key_100-1 data-row-id="key_100" class="prt-table-cell">
<input oninput> </input>
there
</div>
<div @key=key_100-2 data-row-id="key_100" class="prt-table-cell"> 2.000000 </div>
</div>
</div>
</div> |}];
Handle.show_diff test.handle;
[%expect {| |}];
Handle.recompute_view_until_stable test.handle;
Handle.show_diff test.handle;
[%expect {||}];
Test.set_bounds test ~low:0 ~high:100;
Handle.recompute_view_until_stable test.handle;
Handle.show_diff test.handle;
[%expect
{|
<div @key=0 class="prt-table-row prt-table-row-even" onclick>
<div @key=key_0-0 data-row-id="key_0" class="prt-table-cell"> 0 </div>
<div @key=key_0-1 data-row-id="key_0" class="prt-table-cell">
<input oninput> </input>
hello
</div>
<div @key=key_0-2 data-row-id="key_0" class="prt-table-cell"> 1.000000 </div>
</div>
<div @key=100 class="prt-table-row prt-table-row-odd" onclick>
<div @key=key_100-0 data-row-id="key_100" class="prt-table-cell"> 1 </div>
<div @key=key_100-1 data-row-id="key_100" class="prt-table-cell">
<input oninput> </input>
there
</div>
<div @key=key_100-2 data-row-id="key_100" class="prt-table-cell"> 2.000000 </div>
</div>
+| <div @key=200 class="prt-table-row prt-table-row-even" onclick>
+| <div @key=key_200-0 data-row-id="key_200" class="prt-table-cell"> 4 </div>
+| <div @key=key_200-1 data-row-id="key_200" class="prt-table-cell">
+| <input oninput> </input>
+| world
+| </div>
+| <div @key=key_200-2 data-row-id="key_200" class="prt-table-cell"> 2.000000 </div>
+| </div>
</div>
</div> |}]
;;
let%expect_test "resize-column" =
let test = Test.create ~should_print_styles:true (Test.Component.default ()) in
Handle.recompute_view_until_stable test.handle;
Handle.store_view test.handle;
Test.resize_column test ~idx:0 ~width:10.0;
Handle.recompute_view_until_stable test.handle;
Handle.show_diff test.handle;
[%expect
{|
top: 0px;
position: absolute;
max-height: 1px;
width: max-content;
}>
<div @key=key_0-0
data-row-id="key_0"
class="prt-table-cell"
style={
height: 1px;
min-height: 1px;
max-height: 1px;
box-sizing: border-box;
overflow: hidden;
display: inline-block;
contain: strict;
-| width: 0.00px;
+| width: 10.00px;
-| min-width: 0.00px;
+| min-width: 10.00px;
-| max-width: 0.00px;
+| max-width: 10.00px;
}> 0 </div>
<div @key=key_0-1
data-row-id="key_0"
class="prt-table-cell"
style={
height: 1px;
min-height: 1px;
max-height: 1px;
box-sizing: border-box;
overflow: hidden;
display: inline-block;
contain: strict;
width: 0.00px;
min-width: 0.00px;
max-width: 0.00px;
}>
top: 1px;
position: absolute;
max-height: 1px;
width: max-content;
}>
<div @key=key_100-0
data-row-id="key_100"
class="prt-table-cell"
style={
height: 1px;
min-height: 1px;
max-height: 1px;
box-sizing: border-box;
overflow: hidden;
display: inline-block;
contain: strict;
-| width: 0.00px;
+| width: 10.00px;
-| min-width: 0.00px;
+| min-width: 10.00px;
-| max-width: 0.00px;
+| max-width: 10.00px;
}> 1 </div>
<div @key=key_100-1
data-row-id="key_100"
class="prt-table-cell"
style={
height: 1px;
min-height: 1px;
max-height: 1px;
box-sizing: border-box;
overflow: hidden;
display: inline-block;
contain: strict;
width: 0.00px;
min-width: 0.00px;
max-width: 0.00px;
}>
top: 2px;
position: absolute;
max-height: 1px;
width: max-content;
}>
<div @key=key_200-0
data-row-id="key_200"
class="prt-table-cell"
style={
height: 1px;
min-height: 1px;
max-height: 1px;
box-sizing: border-box;
overflow: hidden;
display: inline-block;
contain: strict;
-| width: 0.00px;
+| width: 10.00px;
-| min-width: 0.00px;
+| min-width: 10.00px;
-| max-width: 0.00px;
+| max-width: 10.00px;
}> 4 </div>
<div @key=key_200-1
data-row-id="key_200"
class="prt-table-cell"
style={
height: 1px;
min-height: 1px;
max-height: 1px;
box-sizing: border-box;
overflow: hidden;
display: inline-block;
contain: strict;
width: 0.00px;
min-width: 0.00px;
max-width: 0.00px;
}> |}]
;;
let%expect_test "big table" =
The PRT always renders [ low-25 , high+25 ] , so 50,50 will render a big chunk
centered at 50
centered at 50 *)
let test =
Test.create ~map:big_map ~visible_range:(50, 50) (Test.Component.default ())
in
Handle.recompute_view_until_stable test.handle;
Handle.show test.handle;
[%expect
{|
<div class="partial-render-table-bonsai_path_replaced_in_test">
<table class="prt-table-header" size_tracker=<fun>>
<tbody>
<tr>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div onclick>
<span> ◇ key </span>
</div>
</td>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div> a </div>
</td>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div onclick>
<span> ◇ b </span>
</div>
</td>
</tr>
</tbody>
</table>
<div bounds-change=<fun>>
<div @key=0 class="prt-table-row prt-table-row-even" onclick>
<div @key=key_0-0 data-row-id="key_0" class="prt-table-cell"> 51 </div>
<div @key=key_0-1 data-row-id="key_0" class="prt-table-cell">
<input oninput> </input>
hi
</div>
<div @key=key_0-2 data-row-id="key_0" class="prt-table-cell"> 25.000000 </div>
</div>
<div @key=100 class="prt-table-row prt-table-row-odd" onclick>
<div @key=key_100-0 data-row-id="key_100" class="prt-table-cell"> 52 </div>
<div @key=key_100-1 data-row-id="key_100" class="prt-table-cell">
<input oninput> </input>
hi
</div>
<div @key=key_100-2 data-row-id="key_100" class="prt-table-cell"> 26.000000 </div>
</div>
</div>
</div> |}];
Test.set_bounds test ~low:55 ~high:60;
Handle.recompute_view_until_stable test.handle;
Handle.show_diff test.handle;
[%expect
{|
<div onclick>
<span> ◇ key </span>
</div>
</td>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div> a </div>
</td>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div onclick>
<span> ◇ b </span>
</div>
</td>
</tr>
</tbody>
</table>
<div bounds-change=<fun>>
-| <div @key=0 class="prt-table-row prt-table-row-even" onclick>
+| <div @key=0 class="prt-table-row prt-table-row-odd" onclick>
-| <div @key=key_0-0 data-row-id="key_0" class="prt-table-cell"> 51 </div>
+| <div @key=key_0-0 data-row-id="key_0" class="prt-table-cell"> 56 </div>
<div @key=key_0-1 data-row-id="key_0" class="prt-table-cell">
<input oninput> </input>
hi
</div>
-| <div @key=key_0-2 data-row-id="key_0" class="prt-table-cell"> 25.000000 </div>
+| <div @key=key_0-2 data-row-id="key_0" class="prt-table-cell"> 28.000000 </div>
</div>
-| <div @key=100 class="prt-table-row prt-table-row-odd" onclick>
+| <div @key=100 class="prt-table-row prt-table-row-even" onclick>
-| <div @key=key_100-0 data-row-id="key_100" class="prt-table-cell"> 52 </div>
+| <div @key=key_100-0 data-row-id="key_100" class="prt-table-cell"> 57 </div>
<div @key=key_100-1 data-row-id="key_100" class="prt-table-cell">
<input oninput> </input>
hi
</div>
-| <div @key=key_100-2 data-row-id="key_100" class="prt-table-cell"> 26.000000 </div>
+| <div @key=key_100-2 data-row-id="key_100" class="prt-table-cell"> 28.000000 </div>
+| </div>
+| <div @key=200 class="prt-table-row prt-table-row-odd" onclick>
+| <div @key=key_200-0 data-row-id="key_200" class="prt-table-cell"> 58 </div>
+| <div @key=key_200-1 data-row-id="key_200" class="prt-table-cell">
+| <input oninput> </input>
+| hi
+| </div>
+| <div @key=key_200-2 data-row-id="key_200" class="prt-table-cell"> 29.000000 </div>
+| </div>
+| <div @key=300 class="prt-table-row prt-table-row-even" onclick>
+| <div @key=key_300-0 data-row-id="key_300" class="prt-table-cell"> 59 </div>
+| <div @key=key_300-1 data-row-id="key_300" class="prt-table-cell">
+| <input oninput> </input>
+| hi
+| </div>
+| <div @key=key_300-2 data-row-id="key_300" class="prt-table-cell"> 29.000000 </div>
+| </div>
+| <div @key=400 class="prt-table-row prt-table-row-odd" onclick>
+| <div @key=key_400-0 data-row-id="key_400" class="prt-table-cell"> 60 </div>
+| <div @key=key_400-1 data-row-id="key_400" class="prt-table-cell">
+| <input oninput> </input>
+| hi
+| </div>
+| <div @key=key_400-2 data-row-id="key_400" class="prt-table-cell"> 30.000000 </div>
+| </div>
+| <div @key=500 class="prt-table-row prt-table-row-even" onclick>
+| <div @key=key_500-0 data-row-id="key_500" class="prt-table-cell"> 61 </div>
+| <div @key=key_500-1 data-row-id="key_500" class="prt-table-cell">
+| <input oninput> </input>
+| hi
+| </div>
+| <div @key=key_500-2 data-row-id="key_500" class="prt-table-cell"> 30.000000 </div>
+| </div>
+| <div @key=600 class="prt-table-row prt-table-row-odd" onclick>
+| <div @key=key_600-0 data-row-id="key_600" class="prt-table-cell"> 62 </div>
+| <div @key=key_600-1 data-row-id="key_600" class="prt-table-cell">
+| <input oninput> </input>
+| hi
+| </div>
+| <div @key=key_600-2 data-row-id="key_600" class="prt-table-cell"> 31.000000 </div>
</div>
</div>
</div> |}]
;;
let%expect_test "typing into a column, leaving that column, and then coming back. " =
let test =
Test.create ~map:big_map ~visible_range:(50, 50) (Test.Component.default ())
in
Handle.recompute_view_until_stable test.handle;
Handle.store_view test.handle;
Handle.input_text
test.handle
~get_vdom:Table.Result.view
~selector:".prt-table-cell:nth-child(2) input"
~text:"hello world";
Handle.recompute_view_until_stable test.handle;
Handle.show_diff test.handle;
[%expect
{|
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div> a </div>
</td>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div onclick>
<span> ◇ b </span>
</div>
</td>
</tr>
</tbody>
</table>
<div bounds-change=<fun>>
<div @key=0 class="prt-table-row prt-table-row-even" onclick>
<div @key=key_0-0 data-row-id="key_0" class="prt-table-cell"> 51 </div>
<div @key=key_0-1 data-row-id="key_0" class="prt-table-cell">
<input oninput> </input>
-| hi
+| hi hello world
</div>
<div @key=key_0-2 data-row-id="key_0" class="prt-table-cell"> 25.000000 </div>
</div>
<div @key=100 class="prt-table-row prt-table-row-odd" onclick>
<div @key=key_100-0 data-row-id="key_100" class="prt-table-cell"> 52 </div>
<div @key=key_100-1 data-row-id="key_100" class="prt-table-cell">
<input oninput> </input>
hi
</div>
<div @key=key_100-2 data-row-id="key_100" class="prt-table-cell"> 26.000000 </div>
</div>
</div>
</div> |}];
move out of bounds ( really 99 - 25 through 100 )
Test.set_bounds test ~low:99 ~high:99;
Handle.recompute_view_until_stable test.handle;
Handle.store_view test.handle;
Test.set_bounds test ~low:50 ~high:50;
Handle.recompute_view_until_stable test.handle;
Handle.show test.handle;
[%expect
{|
<div class="partial-render-table-bonsai_path_replaced_in_test">
<table class="prt-table-header" size_tracker=<fun>>
<tbody>
<tr>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div onclick>
<span> ◇ key </span>
</div>
</td>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div> a </div>
</td>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div onclick>
<span> ◇ b </span>
</div>
</td>
</tr>
</tbody>
</table>
<div bounds-change=<fun>>
<div @key=0 class="prt-table-row prt-table-row-even" onclick>
<div @key=key_0-0 data-row-id="key_0" class="prt-table-cell"> 51 </div>
<div @key=key_0-1 data-row-id="key_0" class="prt-table-cell">
<input oninput> </input>
hi hello world
</div>
<div @key=key_0-2 data-row-id="key_0" class="prt-table-cell"> 25.000000 </div>
</div>
<div @key=100 class="prt-table-row prt-table-row-odd" onclick>
<div @key=key_100-0 data-row-id="key_100" class="prt-table-cell"> 52 </div>
<div @key=key_100-1 data-row-id="key_100" class="prt-table-cell">
<input oninput> </input>
hi
</div>
<div @key=key_100-2 data-row-id="key_100" class="prt-table-cell"> 26.000000 </div>
</div>
</div>
</div> |}]
;;
let%expect_test "table body is not recomputed more often than necessary" =
The size_tracker and visibility hooks that PRT uses can be called by the browser more
often than one would expect . For instance , if one places an element over the table ,
it causes the size_tracker hook on every column to fire . If you have a large table
with lots of columns and lots of rows , it can be expensive to recompute the table
body n times , once for each column .
often than one would expect. For instance, if one places an element over the table,
it causes the size_tracker hook on every column to fire. If you have a large table
with lots of columns and lots of rows, it can be expensive to recompute the table
body n times, once for each column. *)
let test = Test.create (Test.Component.default ()) in
Test.print_message_on_result_recomputation test;
Test.resize_column test ~idx:0 ~width:1.;
Test.set_bounds test ~low:0 ~high:300;
Handle.flush test.handle;
[%expect {|
Initialized
Changed |}];
Handle.flush test.handle;
[%expect {| |}];
Test.resize_column test ~idx:0 ~width:1.;
Handle.flush test.handle;
[%expect {| |}];
Test.set_bounds test ~low:0 ~high:300;
Handle.flush test.handle;
[%expect {| |}]
;;
let%expect_test "table body is not recomputed more often than necessary" =
let test =
Test.create (fun input _filter_var ->
let component =
let%sub collation =
Table_expert.collate
~filter_equal:[%compare.equal: unit]
~order_equal:[%compare.equal: unit]
~filter_to_predicate:(fun () -> None)
~order_to_compare:(fun () -> Unchanged)
input
(Value.return
{ Incr_map_collate.Collate.filter = ()
; order = ()
; key_range = All_rows
; rank_range = All_rows
})
in
let columns =
[ Table_expert.Columns.Dynamic_cells.column
~label:(Value.return (Vdom.Node.text "key"))
~cell:(fun ~key ~data:_ ->
return
@@ let%map key = key in
Vdom.Node.textf "%d" key)
()
]
|> Table_expert.Columns.Dynamic_cells.lift
in
Table_expert.component
(module Int)
~focus:(By_row { on_change = Value.return (Fn.const Effect.Ignore) })
~row_height:(`Px 10)
~columns
collation
in
{ Test.Component.component
; get_vdom = Table_expert.Result.view
; get_testing = Table_expert.Result.for_testing
; get_inject = Shared.Test.Component.get_inject_expert
})
in
Test.print_message_on_result_recomputation test;
Test.resize_column test ~idx:0 ~width:1.;
Test.set_bounds test ~low:0 ~high:300;
Handle.flush test.handle;
[%expect {|
Initialized
Changed |}];
Handle.flush test.handle;
[%expect {| |}];
Test.set_bounds test ~low:100 ~high:300;
Handle.flush test.handle;
[%expect {| |}]
;;
let%expect_test "test is browser" =
let open Js_of_ocaml in
Dom_html.document |> Obj.magic |> Js_of_ocaml.Js.Optdef.test |> printf "%b";
[%expect {| false |}]
;;
let%expect_test "sorting" =
let test = Test.create (Test.Component.default ()) in
Handle.recompute_view_until_stable test.handle;
Handle.show test.handle;
[%expect
{|
<div class="partial-render-table-bonsai_path_replaced_in_test">
<table class="prt-table-header" size_tracker=<fun>>
<tbody>
<tr>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div onclick>
<span> ◇ key </span>
</div>
</td>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div> a </div>
</td>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div onclick>
<span> ◇ b </span>
</div>
</td>
</tr>
</tbody>
</table>
<div bounds-change=<fun>>
<div @key=0 class="prt-table-row prt-table-row-even" onclick>
<div @key=key_0-0 data-row-id="key_0" class="prt-table-cell"> 0 </div>
<div @key=key_0-1 data-row-id="key_0" class="prt-table-cell">
<input oninput> </input>
hello
</div>
<div @key=key_0-2 data-row-id="key_0" class="prt-table-cell"> 1.000000 </div>
</div>
<div @key=100 class="prt-table-row prt-table-row-odd" onclick>
<div @key=key_100-0 data-row-id="key_100" class="prt-table-cell"> 1 </div>
<div @key=key_100-1 data-row-id="key_100" class="prt-table-cell">
<input oninput> </input>
there
</div>
<div @key=key_100-2 data-row-id="key_100" class="prt-table-cell"> 2.000000 </div>
</div>
<div @key=200 class="prt-table-row prt-table-row-even" onclick>
<div @key=key_200-0 data-row-id="key_200" class="prt-table-cell"> 4 </div>
<div @key=key_200-1 data-row-id="key_200" class="prt-table-cell">
<input oninput> </input>
world
</div>
<div @key=key_200-2 data-row-id="key_200" class="prt-table-cell"> 2.000000 </div>
</div>
</div>
</div> |}];
Handle.click_on test.handle ~selector:"td:nth-child(1) > div" ~get_vdom:test.get_vdom;
Handle.show_diff test.handle;
[%expect
{|
<div class="partial-render-table-bonsai_path_replaced_in_test">
<table class="prt-table-header" size_tracker=<fun>>
<tbody>
<tr>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div onclick>
-| <span> ◇ key </span>
+| <span> ⬘ key </span>
</div>
</td>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div> a </div>
</td>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div onclick>
<span> ◇ b </span>
</div>
</td>
</tr>
</tbody>
</table>
<div bounds-change=<fun>>
<div @key=0 class="prt-table-row prt-table-row-even" onclick>
<div @key=key_0-0 data-row-id="key_0" class="prt-table-cell"> 0 </div> |}];
Handle.click_on test.handle ~selector:"td:nth-child(3) > div" ~get_vdom:test.get_vdom;
Handle.click_on test.handle ~selector:"td:nth-child(3) > div" ~get_vdom:test.get_vdom;
Handle.show_diff test.handle;
[%expect
{|
<div class="partial-render-table-bonsai_path_replaced_in_test">
<table class="prt-table-header" size_tracker=<fun>>
<tbody>
<tr>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div onclick>
-| <span> ⬘ key </span>
+| <span> ◇ key </span>
</div>
</td>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div> a </div>
</td>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div onclick>
-| <span> ◇ b </span>
+| <span> ⬙ b </span>
</div>
</td>
</tr>
</tbody>
</table>
<div bounds-change=<fun>>
<div @key=0 class="prt-table-row prt-table-row-even" onclick>
-| <div @key=key_0-0 data-row-id="key_0" class="prt-table-cell"> 0 </div>
+| <div @key=key_0-0 data-row-id="key_0" class="prt-table-cell"> 1 </div>
<div @key=key_0-1 data-row-id="key_0" class="prt-table-cell">
<input oninput> </input>
-| hello
+| there
</div>
-| <div @key=key_0-2 data-row-id="key_0" class="prt-table-cell"> 1.000000 </div>
+| <div @key=key_0-2 data-row-id="key_0" class="prt-table-cell"> 2.000000 </div>
</div>
<div @key=100 class="prt-table-row prt-table-row-odd" onclick>
-| <div @key=key_100-0 data-row-id="key_100" class="prt-table-cell"> 1 </div>
+| <div @key=key_100-0 data-row-id="key_100" class="prt-table-cell"> 4 </div>
<div @key=key_100-1 data-row-id="key_100" class="prt-table-cell">
<input oninput> </input>
-| there
+| world
</div>
<div @key=key_100-2 data-row-id="key_100" class="prt-table-cell"> 2.000000 </div>
</div>
<div @key=200 class="prt-table-row prt-table-row-even" onclick>
-| <div @key=key_200-0 data-row-id="key_200" class="prt-table-cell"> 4 </div>
+| <div @key=key_200-0 data-row-id="key_200" class="prt-table-cell"> 0 </div>
<div @key=key_200-1 data-row-id="key_200" class="prt-table-cell">
<input oninput> </input>
-| world
+| hello
</div>
-| <div @key=key_200-2 data-row-id="key_200" class="prt-table-cell"> 2.000000 </div>
+| <div @key=key_200-2 data-row-id="key_200" class="prt-table-cell"> 1.000000 </div>
</div>
</div>
</div> |}];
Handle.click_on test.handle ~selector:"td:nth-child(3) > div" ~get_vdom:test.get_vdom;
Handle.show_diff test.handle;
[%expect
{|
<div class="partial-render-table-bonsai_path_replaced_in_test">
<table class="prt-table-header" size_tracker=<fun>>
<tbody>
<tr>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div onclick>
<span> ◇ key </span>
</div>
</td>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div> a </div>
</td>
<td colspan="1" freeze_width=((set <fun>)(reset <fun>)) size_tracker=<fun>>
<div onclick>
-| <span> ⬙ b </span>
+| <span> ◇ b </span>
</div>
</td>
</tr>
</tbody>
</table>
<div bounds-change=<fun>>
<div @key=0 class="prt-table-row prt-table-row-even" onclick>
-| <div @key=key_0-0 data-row-id="key_0" class="prt-table-cell"> 1 </div>
+| <div @key=key_0-0 data-row-id="key_0" class="prt-table-cell"> 0 </div>
<div @key=key_0-1 data-row-id="key_0" class="prt-table-cell">
<input oninput> </input>
-| there
+| hello
</div>
-| <div @key=key_0-2 data-row-id="key_0" class="prt-table-cell"> 2.000000 </div>
+| <div @key=key_0-2 data-row-id="key_0" class="prt-table-cell"> 1.000000 </div>
</div>
<div @key=100 class="prt-table-row prt-table-row-odd" onclick>
-| <div @key=key_100-0 data-row-id="key_100" class="prt-table-cell"> 4 </div>
+| <div @key=key_100-0 data-row-id="key_100" class="prt-table-cell"> 1 </div>
<div @key=key_100-1 data-row-id="key_100" class="prt-table-cell">
<input oninput> </input>
-| world
+| there
</div>
<div @key=key_100-2 data-row-id="key_100" class="prt-table-cell"> 2.000000 </div>
</div>
<div @key=200 class="prt-table-row prt-table-row-even" onclick>
-| <div @key=key_200-0 data-row-id="key_200" class="prt-table-cell"> 0 </div>
+| <div @key=key_200-0 data-row-id="key_200" class="prt-table-cell"> 4 </div>
<div @key=key_200-1 data-row-id="key_200" class="prt-table-cell">
<input oninput> </input>
-| hello
+| world
</div>
-| <div @key=key_200-2 data-row-id="key_200" class="prt-table-cell"> 1.000000 </div>
+| <div @key=key_200-2 data-row-id="key_200" class="prt-table-cell"> 2.000000 </div>
</div>
</div>
</div> |}]
;;
|
25ab5075cc0e3d3bc3c141b17288f7bd1f982571959436e0b78a181969cc834d | ocaml-sf/learn-ocaml-corpus | test26.ml | (* The empty random access list. *)
let empty : 'a . 'a seq =
Nil
(* Example random access lists. *)
let test24 : int seq =
Zero (One ((2, 6), Nil))
| null | https://raw.githubusercontent.com/ocaml-sf/learn-ocaml-corpus/7dcf4d72b49863a3e37e41b3c3097aa4c6101a69/exercises/fpottier/random_access_lists/wrong/test26.ml | ocaml | The empty random access list.
Example random access lists. |
let empty : 'a . 'a seq =
Nil
let test24 : int seq =
Zero (One ((2, 6), Nil))
|
27949a98394afaac22c1f74f16ba80f5933567251a8bd209ee7c9dee17b120e3 | atdixon/me.untethr.nostr-desk | avatar.clj | (ns me.untethr.nostr.avatar
(:require [me.untethr.nostr.util :as util]
[me.untethr.nostr.cache :as cache])
(:import (javafx.scene.image Image)))
(def color
(memoize
(fn [^String public-key]
(util/rand-hex-color (.hashCode public-key)))))
(defonce image-cache
(cache/build-loading "initialCapacity=500,maximumSize=1000"
(fn [[picture-url avatar-dim]]
(Image. picture-url ^double avatar-dim ^double avatar-dim true true true))))
| null | https://raw.githubusercontent.com/atdixon/me.untethr.nostr-desk/0ef74ca5e1a80ce71abd84a1569d71e3cba2be9a/src/me/untethr/nostr/avatar.clj | clojure | (ns me.untethr.nostr.avatar
(:require [me.untethr.nostr.util :as util]
[me.untethr.nostr.cache :as cache])
(:import (javafx.scene.image Image)))
(def color
(memoize
(fn [^String public-key]
(util/rand-hex-color (.hashCode public-key)))))
(defonce image-cache
(cache/build-loading "initialCapacity=500,maximumSize=1000"
(fn [[picture-url avatar-dim]]
(Image. picture-url ^double avatar-dim ^double avatar-dim true true true))))
|
|
c2972aaf704409d1983356c34cb782af630d575ae0a19713feb6349e269f3b85 | paurkedal/ocaml-radixmap | bitword.mli | Copyright ( C ) 2017 - -2022 Petter A. Urkedal < >
*
* This library is free software ; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , either version 3 of the License , or ( at your
* option ) any later version , with the LGPL-3.0 Linking Exception .
*
* This library is distributed in the hope that it will be useful , but WITHOUT
* ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE . See the GNU Lesser General Public
* License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* and the LGPL-3.0 Linking Exception along with this library . If not , see
* < / > and < > , respectively .
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or (at your
* option) any later version, with the LGPL-3.0 Linking Exception.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* and the LGPL-3.0 Linking Exception along with this library. If not, see
* </> and <>, respectively.
*)
* Limited - length bit vectors represented by a unboxed integers .
This module provides bit vectors up to a maximum length , limited by what can
be stored in an OCaml [ int ] . The maximum length is architecture - dependent
but guaranteed to be a multiple of 8 and at least 24 .
This module provides bit vectors up to a maximum length, limited by what can
be stored in an OCaml [int]. The maximum length is architecture-dependent
but guaranteed to be a multiple of 8 and at least 24.
*)
type t = private int
* { 2 Inspection and Slicing }
val max_length : int
val length : t -> int
val bits : t -> int
* { 2 Comparison }
val equal : t -> t -> bool
val compare : t -> t -> int
val common_prefix_length : t -> t -> int
* { 2 Construction }
val is_empty : t -> bool
val is_full : t -> bool
val empty : t
val c0 : t
val c1 : t
val c00 : t
val c01 : t
val c10 : t
val c11 : t
val const : int -> bool -> t
val const_unsafe : int -> bool -> t
val make : int -> int -> t
val make_unsafe : int -> int -> t
val cat_exn : t -> t -> t
val cat_rem : t -> t -> t * t
val random_uniform : int -> t
* { 2 Endian - Dependent Operations }
module type ENDIAN_API = sig
val init : int -> (int -> bool) -> t
val get : t -> int -> bool
(** {2 Parts} *)
val prefix : int -> t -> t
val prefix_unsafe : int -> t -> t
val suffix : int -> t -> t
val suffix_unsafe : int -> t -> t
val cut : int -> t -> t * t
(** {2 Queue-Like Operations}
These functions operate around index 0. That is, for big-endian it
changes the prefix bit and for low-endian the suffix bit. *)
val push_exn : bool -> t -> t
val push_c0_exn : t -> t
val push_c0_unsafe : t -> t
val push_c1_exn : t -> t
val push_c1_unsafe : t -> t
val drop_exn : t -> t
val drop_unsafe : t -> t
val pop_exn : t -> bool * t
val pop_unsafe : t -> bool * t
end
module Be : ENDIAN_API
module Le : ENDIAN_API
* { 2 Parsing and Pretty - Printing }
val pp_set : Format.formatter -> t -> unit
val pp_base2 : Format.formatter -> t -> unit
val to_base2_string : t -> string
val of_base2_string_exn : string -> t
| null | https://raw.githubusercontent.com/paurkedal/ocaml-radixmap/93d0df62ac1da91c42dfdbac579e0e55a2dbdd5e/lib/bitword.mli | ocaml | * {2 Parts}
* {2 Queue-Like Operations}
These functions operate around index 0. That is, for big-endian it
changes the prefix bit and for low-endian the suffix bit. | Copyright ( C ) 2017 - -2022 Petter A. Urkedal < >
*
* This library is free software ; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , either version 3 of the License , or ( at your
* option ) any later version , with the LGPL-3.0 Linking Exception .
*
* This library is distributed in the hope that it will be useful , but WITHOUT
* ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE . See the GNU Lesser General Public
* License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* and the LGPL-3.0 Linking Exception along with this library . If not , see
* < / > and < > , respectively .
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or (at your
* option) any later version, with the LGPL-3.0 Linking Exception.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* and the LGPL-3.0 Linking Exception along with this library. If not, see
* </> and <>, respectively.
*)
* Limited - length bit vectors represented by a unboxed integers .
This module provides bit vectors up to a maximum length , limited by what can
be stored in an OCaml [ int ] . The maximum length is architecture - dependent
but guaranteed to be a multiple of 8 and at least 24 .
This module provides bit vectors up to a maximum length, limited by what can
be stored in an OCaml [int]. The maximum length is architecture-dependent
but guaranteed to be a multiple of 8 and at least 24.
*)
type t = private int
* { 2 Inspection and Slicing }
val max_length : int
val length : t -> int
val bits : t -> int
* { 2 Comparison }
val equal : t -> t -> bool
val compare : t -> t -> int
val common_prefix_length : t -> t -> int
* { 2 Construction }
val is_empty : t -> bool
val is_full : t -> bool
val empty : t
val c0 : t
val c1 : t
val c00 : t
val c01 : t
val c10 : t
val c11 : t
val const : int -> bool -> t
val const_unsafe : int -> bool -> t
val make : int -> int -> t
val make_unsafe : int -> int -> t
val cat_exn : t -> t -> t
val cat_rem : t -> t -> t * t
val random_uniform : int -> t
* { 2 Endian - Dependent Operations }
module type ENDIAN_API = sig
val init : int -> (int -> bool) -> t
val get : t -> int -> bool
val prefix : int -> t -> t
val prefix_unsafe : int -> t -> t
val suffix : int -> t -> t
val suffix_unsafe : int -> t -> t
val cut : int -> t -> t * t
val push_exn : bool -> t -> t
val push_c0_exn : t -> t
val push_c0_unsafe : t -> t
val push_c1_exn : t -> t
val push_c1_unsafe : t -> t
val drop_exn : t -> t
val drop_unsafe : t -> t
val pop_exn : t -> bool * t
val pop_unsafe : t -> bool * t
end
module Be : ENDIAN_API
module Le : ENDIAN_API
* { 2 Parsing and Pretty - Printing }
val pp_set : Format.formatter -> t -> unit
val pp_base2 : Format.formatter -> t -> unit
val to_base2_string : t -> string
val of_base2_string_exn : string -> t
|
be0c9475fc3f1e46e8b8d16f9fc8996d4c9a7085f62ffd01b81e6af393c95c38 | emina/rosette | imdb250_4.rkt | #lang rosette
(require (only-in racket/runtime-path define-runtime-path))
(require "../dom.rkt")
(require "../websynth.rkt")
(require "../websynthlib.rkt")
(define-runtime-path html (build-path ".." "html/imdb250.html"))
(define dom (read-DOMNode html))
(define-tags (tags dom))
(define max_zpath_depth (depth dom))
; Record 0 fields
(define-symbolic r0f0zpath tag? #:length max_zpath_depth)
(define-symbolic r0fieldmask boolean? #:length max_zpath_depth)
; Record 1 fields
(define-symbolic r1f0zpath tag? #:length max_zpath_depth)
(define-symbolic r1fieldmask boolean? #:length max_zpath_depth)
; Record 2 fields
(define-symbolic r2f0zpath tag? #:length max_zpath_depth)
(define-symbolic r2fieldmask boolean? #:length max_zpath_depth)
; Record 3 fields
(define-symbolic r3f0zpath tag? #:length max_zpath_depth)
(define-symbolic r3fieldmask boolean? #:length max_zpath_depth)
; Cross-record Mask
(define-symbolic recordmask boolean? #:length max_zpath_depth)
(current-bitwidth #f)
(define (demonstration)
; Record 0 zpath asserts
(assert (path? r0f0zpath dom "The Shawshank Redemption"))
; Record 1 zpath asserts
(assert (path? r1f0zpath dom "Fight Club"))
; Record 2 zpath asserts
(assert (path? r2f0zpath dom "The Big Sleep"))
; Record 3 zpath asserts
(assert (path? r3f0zpath dom "In the Mood for Love"))
; Record Mask
(generate-mask r0f0zpath r1f0zpath recordmask max_zpath_depth))
; Solve
(define (scrape)
(define sol (solve (demonstration)))
; Record 0 zpaths
; Record 1 zpaths
; Record 2 zpaths
; Record 3 zpaths
Construct final zpaths
(define r0f0zpath_list (map label (evaluate r0f0zpath sol)))
(define generalized_r0f0zpath_list
(apply-mask r0f0zpath_list (evaluate recordmask sol)))
(define field0_zpath (synthsis_solution->zpath generalized_r0f0zpath_list))
(zip
(DOM-Flatten (DOM-XPath dom field0_zpath))
))
(scrape)
| null | https://raw.githubusercontent.com/emina/rosette/a64e2bccfe5876c5daaf4a17c5a28a49e2fbd501/sdsl/websynth/benchmarks/imdb250_4.rkt | racket | Record 0 fields
Record 1 fields
Record 2 fields
Record 3 fields
Cross-record Mask
Record 0 zpath asserts
Record 1 zpath asserts
Record 2 zpath asserts
Record 3 zpath asserts
Record Mask
Solve
Record 0 zpaths
Record 1 zpaths
Record 2 zpaths
Record 3 zpaths | #lang rosette
(require (only-in racket/runtime-path define-runtime-path))
(require "../dom.rkt")
(require "../websynth.rkt")
(require "../websynthlib.rkt")
(define-runtime-path html (build-path ".." "html/imdb250.html"))
(define dom (read-DOMNode html))
(define-tags (tags dom))
(define max_zpath_depth (depth dom))
(define-symbolic r0f0zpath tag? #:length max_zpath_depth)
(define-symbolic r0fieldmask boolean? #:length max_zpath_depth)
(define-symbolic r1f0zpath tag? #:length max_zpath_depth)
(define-symbolic r1fieldmask boolean? #:length max_zpath_depth)
(define-symbolic r2f0zpath tag? #:length max_zpath_depth)
(define-symbolic r2fieldmask boolean? #:length max_zpath_depth)
(define-symbolic r3f0zpath tag? #:length max_zpath_depth)
(define-symbolic r3fieldmask boolean? #:length max_zpath_depth)
(define-symbolic recordmask boolean? #:length max_zpath_depth)
(current-bitwidth #f)
(define (demonstration)
(assert (path? r0f0zpath dom "The Shawshank Redemption"))
(assert (path? r1f0zpath dom "Fight Club"))
(assert (path? r2f0zpath dom "The Big Sleep"))
(assert (path? r3f0zpath dom "In the Mood for Love"))
(generate-mask r0f0zpath r1f0zpath recordmask max_zpath_depth))
(define (scrape)
(define sol (solve (demonstration)))
Construct final zpaths
(define r0f0zpath_list (map label (evaluate r0f0zpath sol)))
(define generalized_r0f0zpath_list
(apply-mask r0f0zpath_list (evaluate recordmask sol)))
(define field0_zpath (synthsis_solution->zpath generalized_r0f0zpath_list))
(zip
(DOM-Flatten (DOM-XPath dom field0_zpath))
))
(scrape)
|
2de286a28d328e92d198357f29bed8d8570a16657a12762f34b1ac3e7a14eeb2 | fortytools/holumbus | ControllerData.hs | -- ----------------------------------------------------------------------------
|
Module : Holumbus . FileSystem . Controller . ControllerData
Copyright : Copyright ( C ) 2008
License : MIT
Maintainer : ( )
Stability : experimental
Portability : portable
Version : 0.1
Module : Holumbus.FileSystem.Controller.ControllerData
Copyright : Copyright (C) 2008 Stefan Schmidt
License : MIT
Maintainer : Stefan Schmidt ()
Stability : experimental
Portability: portable
Version : 0.1
-}
-- ----------------------------------------------------------------------------
module Holumbus.FileSystem.Controller.ControllerData
(
-- * datatypes
ControllerData
-- * creation and destruction
, newController
)
where
import Prelude hiding (appendFile)
import Control.Concurrent
import Data.Maybe
import qualified Data.List as List
import qualified Data.Map as Map
import qualified Data.Set as Set
import System.Log.Logger
import Holumbus.Common.Debug
import Holumbus.Common.Utils ( handleAll )
import qualified Holumbus.FileSystem.Controller as C
import qualified Holumbus.FileSystem.Node as N
import qualified Holumbus.FileSystem.Node.NodePort as NP
import qualified Holumbus.FileSystem.Messages as M
import qualified Holumbus.FileSystem.Storage as S
import Holumbus.Network.Site
import Holumbus.Network.Communication
import Control.Monad (foldM)
import System.Random
localLogger :: String
localLogger = "Holumbus.FileSystem.Controller"
-- ----------------------------------------------------------------------------
--
-- ----------------------------------------------------------------------------
type FileToNodeMap = Map.Map S.FileId (Set.Set M.NodeId)
data FileControllerData = FileControllerData {
cm_FileToNodeMap :: ! FileToNodeMap
}
type FileController = MVar FileControllerData
data ControllerData = ControllerData {
cd_Server :: Server
, cd_FileController :: FileController
}
-- ----------------------------------------------------------------------------
--
-- ----------------------------------------------------------------------------
newFileController :: IO FileController
newFileController
= do
let fc = FileControllerData Map.empty
newMVar fc
newController :: StreamName -> Maybe PortNumber -> IO ControllerData
newController sn pn
= do
-- initialise the server
c <- newEmptyMVar
server <- newServer sn pn (dispatch c) (Just $ registerNode c) (Just $ unregisterNode c)
-- initialize values
fc <- newFileController
let con = ControllerData server fc
putMVar c con
return con
dispatch
:: MVar ControllerData
-> M.ControllerRequestMessage
-> IO (Maybe M.ControllerResponseMessage)
dispatch c msg
= do
cd <-readMVar c
case msg of
(M.CReqGetFileSites f) ->
do
s <- C.getFileSites f cd
return $ Just $ M.CRspGetFileSites s
(M.CReqContains f) ->
do
b <- C.containsFile f cd
return $ Just $ M.CRspContains b
(M.CReqGetNearestNodePortWithFile f sid) ->
do
p <- C.getNearestNodePortWithFile f sid cd
return $ Just $ M.CRspGetNearestNodePortWithFile p
(M.CReqGetNearestNodePortWithFiles l sid) ->
do
portmap <- C.getNearestNodePortWithFiles l sid cd
return $ Just $ M.CRspGetNearestNodePortWithFiles portmap
(M.CReqGetNearestNodePortForFile f l sid) ->
do
p <- C.getNearestNodePortForFile f l sid cd
return $ Just $ M.CRspGetNearestNodePortForFile p
(M.CReqGetNearestNodePortForFiles l sid) ->
do
p <- C.getNearestNodePortForFiles l sid cd
return $ Just $ M.CRspGetNearestNodePortForFiles p
(M.CReqCreate f n) ->
do
C.createFile f n cd
return $ Just $ M.CRspSuccess
(M.CReqCreateS l) ->
do
C.createFiles l cd
return $ Just $ M.CRspSuccess
(M.CReqAppend f n) ->
do
C.appendFile f n cd
return $ Just $ M.CRspSuccess
(M.CReqDelete f n) ->
do
C.deleteFile f n cd
return $ Just $ M.CRspSuccess
_ -> return Nothing
registerNode :: MVar ControllerData -> IdType -> ClientPort -> IO ()
registerNode c i cp
= do
let np = NP.newNodePort cp
fids <- N.getFileIds np
cd <- readMVar c
modifyMVar (cd_FileController cd) $
\fc ->
do
let fc' = addFilesToController fids i fc
return (fc', ())
unregisterNode :: MVar ControllerData -> IdType -> ClientPort -> IO ()
unregisterNode c i _
= do
debugM localLogger "unregisterNode: start"
cd <- readMVar c
modifyMVar (cd_FileController cd) $
\fc ->
do
let fc' = deleteFilesFromController i fc
return (fc', ())
debugM localLogger "unregisterNode: end"
-- ----------------------------------------------------------------------------
--
-- ----------------------------------------------------------------------------
nodes2sites : : ControllerMaps - > [ M.NodeId ] - > [ SiteId ]
nodes2sites ( ControllerMaps _ nsm _ _ _ ) nids = lookupList nsm nids
sites2ports : : ControllerMaps - > [ SiteId ] - > [ NP.NodePort ]
sites2ports ( ControllerMaps _ _ spm _ _ ) sids = lookupList
-- | gets all nodessites
getNodesList : : ControllerMaps - > [ M.NodeId ]
getNodesList ( ControllerMaps _ nsm _ _ _ ) = Map.keys nsm
-- TODO
getNodesWithSpace : : S.FileId - > Integer - > ControllerMaps - > [ SiteId ]
getNodesWithSpace _ _ cm = getSiteList cm
getOtherFileNodes : : S.FileId - > M.NodeId - > ControllerMaps - > [ M.NodeId ]
getOtherFileNodes f nid ( ControllerMaps fnm _ _ _ _ )
= Set.toList $ Set.delete nid allNids
where
allNids = Map.findWithDefault ( Set.empty ) f fnm
-- | the minimal occurrence of a file on the nodes
copyingLimit : : Int
copyingLimit = 2
-- | Get a list of all fileIds that should be copied .
-- That means that the file has not enough copies
-- TODO we only compare the nodeid ... we should look for sites or hosts
getFilesForCopying : : FileControllerData - > [ S.FileId ]
getFilesForCopying cm = ( fst . unzip ) filteredList
where
fnm = cm_FileToNodeMap cm
filteredList = filter setSelector ( Map.toList fnm )
setSelector ( _ , s ) = Set.size s < copyingLimit
nodes2sites :: ControllerMaps -> [M.NodeId] -> [SiteId]
nodes2sites (ControllerMaps _ nsm _ _ _) nids = lookupList nsm nids
sites2ports :: ControllerMaps -> [SiteId] -> [NP.NodePort]
sites2ports (ControllerMaps _ _ spm _ _) sids = lookupList spm sids
-- | gets all nodessites
getNodesList :: ControllerMaps -> [M.NodeId]
getNodesList (ControllerMaps _ nsm _ _ _) = Map.keys nsm
-- TODO
getNodesWithSpace :: S.FileId -> Integer -> ControllerMaps -> [SiteId]
getNodesWithSpace _ _ cm = getSiteList cm
getOtherFileNodes :: S.FileId -> M.NodeId -> ControllerMaps -> [M.NodeId]
getOtherFileNodes f nid (ControllerMaps fnm _ _ _ _)
= Set.toList $ Set.delete nid allNids
where
allNids = Map.findWithDefault (Set.empty) f fnm
-- | the minimal occurrence of a file on the nodes
copyingLimit :: Int
copyingLimit = 2
-- | Get a list of all fileIds that should be copied.
-- That means that the file has not enough copies
-- TODO we only compare the nodeid... we should look for sites or hosts
getFilesForCopying :: FileControllerData -> [S.FileId]
getFilesForCopying cm = (fst . unzip) filteredList
where
fnm = cm_FileToNodeMap cm
filteredList = filter setSelector (Map.toList fnm)
setSelector (_,s) = Set.size s < copyingLimit
-}
-- | Adds the files of a node to the global directory.
addFilesToController :: [S.FileId] -> M.NodeId -> FileControllerData -> FileControllerData
addFilesToController fids nid cm
= cm { cm_FileToNodeMap = fnm' }
where
fnm = cm_FileToNodeMap cm
fnm' = Map.unionWith combine fnm newMap
newMap = Map.fromList $ zip fids (repeat $ Set.singleton nid)
combine s1 s2 = Set.union s1 s2
-- | Deletes the files of a node from the global directory.
deleteFilesFromController :: M.NodeId -> FileControllerData -> FileControllerData
deleteFilesFromController nid cm
= cm { cm_FileToNodeMap = fnm' }
where
fnm = cm_FileToNodeMap cm
fnm' = Map.fromList filteredList
filteredList = filter (\(_,s) -> s /= Set.empty) list
list = map (\(k,s) -> (k, Set.delete nid s)) (Map.toList fnm)
addFileToController :: S.FileId -> M.NodeId -> FileControllerData -> FileControllerData
addFileToController fid nid cm = addFilesToController [ fid ] nid cm
addFileToController fid nid cm = cm { cm_FileToNodeMap = fnm' }
where
fnm = cm_FileToNodeMap cm
fnm' = Map.insert fid nid' fnm
nid' = Set.singleton nid
deleteFileFromController :: S.FileId -> FileControllerData -> FileControllerData
deleteFileFromController fid cm
= cm { cm_FileToNodeMap = fnm' }
where
fnm = cm_FileToNodeMap cm
fnm' = Map.delete fid fnm
-- | gets the List of all sites the file is located on...
getFileClientInfoList :: S.FileId -> Server -> FileControllerData -> IO [ClientInfo]
getFileClientInfoList f s cm
= do
let fnm = cm_FileToNodeMap cm
let is = Set.toList $ maybe Set.empty id (Map.lookup f fnm)
mbDats <- mapM (\i -> getClientInfo i s) is
return $ catMaybes mbDats
shuffle :: [a] -> IO [a]
shuffle l' = shuffle' l' []
where
shuffle' [] acc = return acc
shuffle' l acc = do
k <- randomRIO (0, length l - 1)
let (lead, x:xs) = splitAt k l
shuffle' (lead ++ xs) (x:acc)
lookupNearestPortWithFile :: S.FileId -> SiteId -> Server -> FileControllerData -> IO (Maybe ClientPort)
lookupNearestPortWithFile f sid s cm
= do
dats <- getFileClientInfoList f s cm
let sids' = map (\ci -> ci_Site ci) dats
sids <- shuffle sids'
let mbns = nearestId sid sids
mbdat = maybe Nothing (\ns -> List.find (\ci -> (ci_Site ci) == ns) dats) mbns
mbnp = maybe Nothing (\ci -> Just $ ci_Port ci) mbdat
return mbnp
-- | gets the List of all sites the files are located on...
trude : : SiteId - > [ S.FileId ] - > Server - > FileControllerData - > IO ( Maybe SiteId )
trude currentSite files s cm
= do
let file2node = cm_FileToNodeMap cm ;
nodeIdsWithFiles = concatMap ( \file - > Set.toList $ maybe Set.empty i d ( Map.lookup file file2node ) ) files
sitesIdsWithFiles < - foldM f ( [ ] , [ ] , [ ] ) nodeIdsWithFiles
nearestId ' sitesIdsWithFiles
where
nearestId ' : : ( [ SiteId],[SiteId],[SiteId ] ) - > IO ( Maybe SiteId )
nearestId ' ( [ ] , [ ] , [ ] ) = return Nothing -- no site i d found
nearestId ' ( [ ] , [ ] , xs ) = do
xs ' < - shuffle xs
return . Just . head $ xs ' -- only others ids , shuffle and return the first
nearestId ' ( [ ] , x : _ , _ ) = return $ Just x -- return the hosts stie
nearestId ' ( x : _ , _ , _ ) = return $ Just x -- return the procs site
f : : ( [ SiteId],[SiteId],[SiteId ] ) - > M.NodeId - > IO ( [ SiteId],[SiteId],[SiteId ] )
f ( procs , hosts , others ) i = do
cli < - getClientInfo i s
case cli of
Nothing - > return ( procs , hosts , others )
( Just clientInfo ) - > insert ( procs , hosts , others ) ( ci_Site clientInfo )
insert : : ( [ SiteId],[SiteId],[SiteId ] ) - > SiteId - > IO ( [ SiteId],[SiteId],[SiteId ] )
insert ( procs , hosts , others ) thisSite = if isSameProcess thisSite currentSite
then return ( thisSite : procs , hosts , others )
else if isSameHost thisSite currentSite
then return ( procs , thisSite : hosts , others )
else return ( procs , hosts , thisSite : others )
trude :: SiteId -> [S.FileId] -> Server -> FileControllerData -> IO (Maybe SiteId)
trude currentSite files s cm
= do
let file2node = cm_FileToNodeMap cm;
nodeIdsWithFiles = concatMap (\file -> Set.toList $ maybe Set.empty id (Map.lookup file file2node)) files
sitesIdsWithFiles <- foldM f ([],[],[]) nodeIdsWithFiles
nearestId' sitesIdsWithFiles
where
nearestId' :: ([SiteId],[SiteId],[SiteId]) -> IO (Maybe SiteId)
nearestId' ([], [], []) = return Nothing -- no site id found
nearestId' ([], [], xs) = do
xs' <- shuffle xs
return . Just . head $ xs' -- only others ids, shuffle and return the first
nearestId' ([], x:_, _) = return $ Just x -- return the hosts stie
nearestId' (x:_, _, _) = return $ Just x -- return the procs site
f :: ([SiteId],[SiteId],[SiteId]) -> M.NodeId -> IO ([SiteId],[SiteId],[SiteId])
f (procs,hosts,others) i = do
cli <- getClientInfo i s
case cli of
Nothing -> return (procs,hosts,others)
(Just clientInfo) -> insert (procs,hosts,others) (ci_Site clientInfo)
insert :: ([SiteId],[SiteId],[SiteId]) -> SiteId -> IO ([SiteId],[SiteId],[SiteId])
insert (procs,hosts,others) thisSite = if isSameProcess thisSite currentSite
then return (thisSite:procs,hosts,others)
else if isSameHost thisSite currentSite
then return (procs,thisSite:hosts,others)
else return (procs,hosts,thisSite:others)
-}
lookupNearestPortWithFiles :: [S.FileId] -> SiteId -> Server -> FileControllerData -> IO M.ClientPortMap
lookupNearestPortWithFiles l sid s cm = do
infoM localLogger $ "Getting nearest ports with: " ++ show l
res <- foldM f [] l
infoM localLogger $ "Clientportmap is: " ++ show res
return res
where
f :: M.ClientPortMap -> S.FileId -> IO M.ClientPortMap
f theMap fid = do
infoM localLogger $ "Getting nearest ports with: " ++ fid
maybeport <- lookupNearestPortWithFile fid sid s cm
debugM localLogger $ "Nearest ports: " ++ show maybeport
case maybeport of
(Just port) -> return (ins port fid theMap)
Nothing -> return theMap
lookupNearestPortWithFileAndSpace : : S.FileId - > Integer - > SiteId - > Server - > FileControllerData - > IO ( Maybe ClientPort )
lookupNearestPortWithFileAndSpace f _ size s cm
-- = lookupNearestPortWithFile f sid s cm
dats < - getFileClientInfoList f s cm
-- TODO add Capacity
let sids = map ( \ci - > ci_Site ci ) $ filter ( ... ) dats
= nearestId
= maybe Nothing ( \ns - > List.find ( \(s , _ , _ ) - > s = = ns ) dats ) mbns
mbnp = maybe Nothing ( \(_,np , _ ) - > Just np ) mbdat
return mbnp
-- TODO add Capacity
let sids = map (\ci -> ci_Site ci) $ filter (...) dats
mbns = nearestId sid sids
mbdat = maybe Nothing (\ns -> List.find (\(s,_,_) -> s == ns) dats) mbns
mbnp = maybe Nothing (\(_,np,_) -> Just np) mbdat
return mbnp
-}
TODO add capacity
lookupNearestPortWithSpace :: Integer -> SiteId -> Server -> FileControllerData -> IO (Maybe ClientPort)
lookupNearestPortWithSpace _size sid s _cm
= do
dats <- getAllClientInfos s
let sids' = map (\ci -> ci_Site ci) dats
sids <- shuffle sids'
let mbns = nearestId sid sids
mbdat = maybe Nothing (\ns -> List.find (\ci -> (ci_Site ci) == ns) dats) mbns
mbnp = maybe Nothing (\ci -> Just $ ci_Port ci) mbdat
return mbnp
lookupPortWithoutFile :: S.FileId -> Server -> FileControllerData -> IO (Maybe ClientPort)
lookupPortWithoutFile f s cm
= do
-- get all sites with the files
fileCis <- getFileClientInfoList f s cm
-- get all sites
allCis <- getAllClientInfos s
let fileNids = map ci_Id fileCis
allNids = map ci_Id allCis
nonNids = Set.toList $ Set.difference (Set.fromList allNids) (Set.fromList fileNids)
if (null nonNids)
then return Nothing
else do
let i = head nonNids
mbCi <- getClientInfo i s
case mbCi of
(Just ci) -> return $ Just $ ci_Port ci
(Nothing) -> return Nothing
lookupNearestPortForFile :: S.FileId -> Integer -> SiteId -> Server -> FileControllerData -> IO (Maybe ClientPort)
lookupNearestPortForFile _ size sid s cm
-- if file exists, get nearest node, else the closest with space
= do
nodeWithFile < - lookupNearestPortWithFileAndSpace f size s cm
nodeWithoutFile <- lookupNearestPortWithSpace size sid s cm
let mbnp = maybe Nothing (\np -> Just np) nodeWithoutFile
debugM localLogger $ " lookupNearestPortForFile : file : " + + show f
debugM localLogger $ " lookupNearestPortForFile : size : " + + show size
debugM localLogger $ " lookupNearestPortForFile : site : " + + show " lookupNearestPortForFile : nodeWithFile : " + + show nodeWithFile
debugM localLogger $ " lookupNearestPortForFile : nodeWithoutFile : " + + show nodeWithoutFile
debugM localLogger $ " lookupNearestPortForFile : result : " + + show mbnp
debugM localLogger $ "lookupNearestPortForFile: size: " ++ show size
debugM localLogger $ "lookupNearestPortForFile: site: " ++ show sid
debugM localLogger $ "lookupNearestPortForFile: nodeWithFile: " ++ show nodeWithFile
debugM localLogger $ "lookupNearestPortForFile: nodeWithoutFile: " ++ show nodeWithoutFile
debugM localLogger $ "lookupNearestPortForFile: result: " ++ show mbnp
-} return mbnp
= do
dats < - getAllClientInfos s : : [ ClientInfo ]
let sids ' = map ( \ci - > ci_Site ci ) dats
sids < - shuffle sids '
let mbns = nearestId
= maybe Nothing ( \ns - > List.find ( \ci - > ( ci_Site ci ) = = ns ) dats ) mbns
mbnp = maybe Nothing ( \ci - > Just $ ci_Port ci ) mbdat
return mbnp
dats <- getAllClientInfos s :: [ClientInfo]
let sids' = map (\ci -> ci_Site ci) dats
sids <- shuffle sids'
let mbns = nearestId sid sids
mbdat = maybe Nothing (\ns -> List.find (\ci -> (ci_Site ci) == ns) dats) mbns
mbnp = maybe Nothing (\ci -> Just $ ci_Port ci) mbdat
return mbnp-}
lookupNearestPortForFiles :: [(S.FileId,Integer)] -> SiteId -> Server -> FileControllerData -> IO M.ClientPortMap
lookupNearestPortForFiles l sid s cm = do
nearestPortWithSpace <- lookupNearestPortWithSpace 0 sid s cm
case nearestPortWithSpace of
Nothing -> return []
(Just p) -> return [(p,map fst l)]
-- infoM localLogger $ "Getting nearest ports for: " ++ show l
-- res <- foldM f [] l
-- infoM localLogger $ "Clientportmap is: " ++ show res
-- return res
-- where
--
f : : M.ClientPortMap - > ( S.FileId , Integer ) - > IO M.ClientPortMap
-- f theMap (fid,len) = do
-- infoM localLogger $ "Getting nearest ports for: " ++ fid
-- debugM localLogger $ "Nearest ports: " ++ show mp
mp of
( Just port ) - > return ( ins port fid theMap )
Nothing - > return theMap
return cpm
ins :: ClientPort -> S.FileId -> M.ClientPortMap -> M.ClientPortMap
ins port fid [] = [(port,[fid])]
ins port fid ((p,fids):[]) = if (p==port)
then [(p,(fid:fids))]
else [(port,[fid]),(p,fids)]
ins port fid ((p,fids):ps) = if (p==port)
then ((p,fid:fids):ps)
else (p,fids):(ins port fid ps)
--
getOtherFilePorts :: S.FileId -> IdType -> Server -> FileControllerData -> IO [ClientInfo]
getOtherFilePorts f nid s cm
= do
let fnm = cm_FileToNodeMap cm
-- get all nodes which hold the file without the given node
let otherids = Set.toList $ Set.delete nid $ maybe Set.empty id (Map.lookup f fnm)
mbDats <- mapM (\i -> getClientInfo i s) otherids
return $ catMaybes mbDats
deleteFileFromNodes :: S.FileId -> [NP.NodePort] -> IO ()
deleteFileFromNodes fid nps = sequence_ $ map deleteFileFromNode nps
where
deleteFileFromNode np
= do
handleAll (\e -> putStrLn $ show e) $
do
-- send a delete-request to the node
-- but don't inform the controller again (False)
N.deleteFile fid False np
return ()
-- ----------------------------------------------------------------------------
--
-- ----------------------------------------------------------------------------
instance C.ControllerClass ControllerData where
closeController cd
= do
debugM localLogger "closing Server"
closeServer (cd_Server cd)
debugM localLogger "server closed"
getFileSites : : S.FileId - > Controller - > IO ( Set . Set SiteId )
getFileSites f cd
= withMVar (cd_FileController cd) $
\fc ->
do
dats <- getFileClientInfoList f (cd_Server cd) fc
let sids = map (\ci -> ci_Site ci) dats
return (Set.fromList sids)
containsFile : : S.FileId - > Controller - > IO Bool
containsFile f cd
= withMVar (cd_FileController cd) $
\fc -> return $ Map.member f (cm_FileToNodeMap fc)
getNearestNodePortWithFile : : S.FileId - > SiteId - > c - > IO ( Maybe M.NodeRequestPort )
getNearestNodePortWithFile f sid cd
= withMVar (cd_FileController cd) $
\fc -> lookupNearestPortWithFile f sid (cd_Server cd) fc
getNearestNodePortWithFiles : : [ S.FileId ] - > SiteId - > c - > IO ( Maybe M.NodeRequestPort )
getNearestNodePortWithFiles l sid cd
= withMVar (cd_FileController cd) $
\fc -> lookupNearestPortWithFiles l sid (cd_Server cd) fc
getNearestNodePortForFile : : S.FileId - > Integer - > SiteId - > c - > IO ( Maybe M.NodeRequestPort )
getNearestNodePortForFile f c sid cd
= withMVar (cd_FileController cd) $
\fc -> lookupNearestPortForFile f c sid (cd_Server cd) fc
getNearestNodePortForFiles : : [ ( S.FileId , Integer ) ] - > SiteId - > c - > IO ( ClientPortMap )
getNearestNodePortForFiles l sid cd
= withMVar (cd_FileController cd) $
\fc -> lookupNearestPortForFiles l sid (cd_Server cd) fc
-- ----------------------------------------------------------------------------
-- used by the nodes
-- ----------------------------------------------------------------------------
createFile : : S.FileId - > M.NodeId - > ControllerData - > IO ControllerData
createFile f nid cd
= modifyMVar (cd_FileController cd) $
\fc ->
do
mbCi <- getClientInfo nid (cd_Server cd)
case mbCi of
(Just _) ->
do
let fc' = addFileToController f nid fc
copy file to one other node
mpCp <- lookupPortWithoutFile f (cd_Server cd) fc
case mpCp of
(Just _) ->
do
return ()
let np = NP.newNodePort cp
N.copyFile f ( ) np
(Nothing) -> return ()
return (fc', ())
(Nothing) -> return (fc,())
createFiles : : [ ( S.FileId , M.NodeId ) ] - > ControllerData - > IO ControllerData
createFiles l cd
= modifyMVar (cd_FileController cd) $
\fc ->
do
fc'' <- foldM f fc l
return (fc'',())
where
f :: FileControllerData -> (S.FileId,M.NodeId) -> IO FileControllerData
f filecontroller (fid,nid) = do
mbCi <- getClientInfo nid (cd_Server cd)
case mbCi of
(Just _) ->
do
let fc' = addFileToController fid nid filecontroller
copy file to one other node
mpCp < - lookupPortWithoutFile f ( cd_Server cd ) fc
-- case mpCp of
-- (Just _) ->
-- do
-- return ()
let np = NP.newNodePort cp
N.copyFile f ( ) np
-- (Nothing) -> return ()
return fc'
(Nothing) -> return filecontroller
appendFile : : S.FileId - > M.NodeId - > ControllerData - > IO ControllerData
appendFile f nid cd
= modifyMVar (cd_FileController cd) $
\fc ->
do
mbCi <- getClientInfo nid (cd_Server cd)
case mbCi of
(Just ci) ->
do
-- renew file entry
let fc' = addFileToController f nid fc
-- get other nodes with this file
cps <- getOtherFilePorts f nid (cd_Server cd) fc
let nps = map (\i -> NP.newNodePort (ci_Port i)) cps
order them to copy the file from the first node
_ <- mapM (N.copyFile f (ci_Port ci)) nps
return (fc', ())
(Nothing) -> return (fc,())
deleteFile : : S.FileId - > M.NodeId - > ControllerData - > IO ControllerData
deleteFile f nid cd
= modifyMVar (cd_FileController cd) $
\fc ->
do
-- inform all other nodes to delete node
cps <- getOtherFilePorts f nid (cd_Server cd) fc
let nps = map (\ci -> NP.newNodePort (ci_Port ci)) cps
deleteFileFromNodes f nps
delete file from Controller
let fc' = deleteFileFromController f fc
return (fc', ())
instance Debug ControllerData where
printDebug cd
= do
putStrLn "Controller-Object (full)"
putStrLn "--------------------------------------------------------"
putStrLn "Server"
printDebug (cd_Server cd)
putStrLn "--------------------------------------------------------"
putStrLn "FileToNodeMap:"
withMVar (cd_FileController cd) $
\fc -> do
putStrLn $ show (cm_FileToNodeMap $ fc)
getDebug cd
= do
let line = "--------------------------------------------------------"
tmp <- getDebug (cd_Server cd)
tmp2 <- withMVar (cd_FileController cd) $
\fc -> do
return $ show (cm_FileToNodeMap $ fc)
return ( "Controller-Object (full)"
++"\n"++ line
++"\n"++ "Server"
++"\n"++ tmp
++"\n"++ line
++"\n"++ "FileToNodeMap:"
++"\n"++tmp2++"\n")
| null | https://raw.githubusercontent.com/fortytools/holumbus/4b2f7b832feab2715a4d48be0b07dca018eaa8e8/storage/source/Holumbus/FileSystem/Controller/ControllerData.hs | haskell | ----------------------------------------------------------------------------
----------------------------------------------------------------------------
* datatypes
* creation and destruction
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
initialise the server
initialize values
----------------------------------------------------------------------------
----------------------------------------------------------------------------
| gets all nodessites
TODO
| the minimal occurrence of a file on the nodes
| Get a list of all fileIds that should be copied .
That means that the file has not enough copies
TODO we only compare the nodeid ... we should look for sites or hosts
| gets all nodessites
TODO
| the minimal occurrence of a file on the nodes
| Get a list of all fileIds that should be copied.
That means that the file has not enough copies
TODO we only compare the nodeid... we should look for sites or hosts
| Adds the files of a node to the global directory.
| Deletes the files of a node from the global directory.
| gets the List of all sites the file is located on...
| gets the List of all sites the files are located on...
no site i d found
only others ids , shuffle and return the first
return the hosts stie
return the procs site
no site id found
only others ids, shuffle and return the first
return the hosts stie
return the procs site
= lookupNearestPortWithFile f sid s cm
TODO add Capacity
TODO add Capacity
get all sites with the files
get all sites
if file exists, get nearest node, else the closest with space
infoM localLogger $ "Getting nearest ports for: " ++ show l
res <- foldM f [] l
infoM localLogger $ "Clientportmap is: " ++ show res
return res
where
f theMap (fid,len) = do
infoM localLogger $ "Getting nearest ports for: " ++ fid
debugM localLogger $ "Nearest ports: " ++ show mp
get all nodes which hold the file without the given node
send a delete-request to the node
but don't inform the controller again (False)
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
used by the nodes
----------------------------------------------------------------------------
case mpCp of
(Just _) ->
do
return ()
(Nothing) -> return ()
renew file entry
get other nodes with this file
inform all other nodes to delete node |
|
Module : Holumbus . FileSystem . Controller . ControllerData
Copyright : Copyright ( C ) 2008
License : MIT
Maintainer : ( )
Stability : experimental
Portability : portable
Version : 0.1
Module : Holumbus.FileSystem.Controller.ControllerData
Copyright : Copyright (C) 2008 Stefan Schmidt
License : MIT
Maintainer : Stefan Schmidt ()
Stability : experimental
Portability: portable
Version : 0.1
-}
module Holumbus.FileSystem.Controller.ControllerData
(
ControllerData
, newController
)
where
import Prelude hiding (appendFile)
import Control.Concurrent
import Data.Maybe
import qualified Data.List as List
import qualified Data.Map as Map
import qualified Data.Set as Set
import System.Log.Logger
import Holumbus.Common.Debug
import Holumbus.Common.Utils ( handleAll )
import qualified Holumbus.FileSystem.Controller as C
import qualified Holumbus.FileSystem.Node as N
import qualified Holumbus.FileSystem.Node.NodePort as NP
import qualified Holumbus.FileSystem.Messages as M
import qualified Holumbus.FileSystem.Storage as S
import Holumbus.Network.Site
import Holumbus.Network.Communication
import Control.Monad (foldM)
import System.Random
localLogger :: String
localLogger = "Holumbus.FileSystem.Controller"
type FileToNodeMap = Map.Map S.FileId (Set.Set M.NodeId)
data FileControllerData = FileControllerData {
cm_FileToNodeMap :: ! FileToNodeMap
}
type FileController = MVar FileControllerData
data ControllerData = ControllerData {
cd_Server :: Server
, cd_FileController :: FileController
}
newFileController :: IO FileController
newFileController
= do
let fc = FileControllerData Map.empty
newMVar fc
newController :: StreamName -> Maybe PortNumber -> IO ControllerData
newController sn pn
= do
c <- newEmptyMVar
server <- newServer sn pn (dispatch c) (Just $ registerNode c) (Just $ unregisterNode c)
fc <- newFileController
let con = ControllerData server fc
putMVar c con
return con
dispatch
:: MVar ControllerData
-> M.ControllerRequestMessage
-> IO (Maybe M.ControllerResponseMessage)
dispatch c msg
= do
cd <-readMVar c
case msg of
(M.CReqGetFileSites f) ->
do
s <- C.getFileSites f cd
return $ Just $ M.CRspGetFileSites s
(M.CReqContains f) ->
do
b <- C.containsFile f cd
return $ Just $ M.CRspContains b
(M.CReqGetNearestNodePortWithFile f sid) ->
do
p <- C.getNearestNodePortWithFile f sid cd
return $ Just $ M.CRspGetNearestNodePortWithFile p
(M.CReqGetNearestNodePortWithFiles l sid) ->
do
portmap <- C.getNearestNodePortWithFiles l sid cd
return $ Just $ M.CRspGetNearestNodePortWithFiles portmap
(M.CReqGetNearestNodePortForFile f l sid) ->
do
p <- C.getNearestNodePortForFile f l sid cd
return $ Just $ M.CRspGetNearestNodePortForFile p
(M.CReqGetNearestNodePortForFiles l sid) ->
do
p <- C.getNearestNodePortForFiles l sid cd
return $ Just $ M.CRspGetNearestNodePortForFiles p
(M.CReqCreate f n) ->
do
C.createFile f n cd
return $ Just $ M.CRspSuccess
(M.CReqCreateS l) ->
do
C.createFiles l cd
return $ Just $ M.CRspSuccess
(M.CReqAppend f n) ->
do
C.appendFile f n cd
return $ Just $ M.CRspSuccess
(M.CReqDelete f n) ->
do
C.deleteFile f n cd
return $ Just $ M.CRspSuccess
_ -> return Nothing
registerNode :: MVar ControllerData -> IdType -> ClientPort -> IO ()
registerNode c i cp
= do
let np = NP.newNodePort cp
fids <- N.getFileIds np
cd <- readMVar c
modifyMVar (cd_FileController cd) $
\fc ->
do
let fc' = addFilesToController fids i fc
return (fc', ())
unregisterNode :: MVar ControllerData -> IdType -> ClientPort -> IO ()
unregisterNode c i _
= do
debugM localLogger "unregisterNode: start"
cd <- readMVar c
modifyMVar (cd_FileController cd) $
\fc ->
do
let fc' = deleteFilesFromController i fc
return (fc', ())
debugM localLogger "unregisterNode: end"
nodes2sites : : ControllerMaps - > [ M.NodeId ] - > [ SiteId ]
nodes2sites ( ControllerMaps _ nsm _ _ _ ) nids = lookupList nsm nids
sites2ports : : ControllerMaps - > [ SiteId ] - > [ NP.NodePort ]
sites2ports ( ControllerMaps _ _ spm _ _ ) sids = lookupList
getNodesList : : ControllerMaps - > [ M.NodeId ]
getNodesList ( ControllerMaps _ nsm _ _ _ ) = Map.keys nsm
getNodesWithSpace : : S.FileId - > Integer - > ControllerMaps - > [ SiteId ]
getNodesWithSpace _ _ cm = getSiteList cm
getOtherFileNodes : : S.FileId - > M.NodeId - > ControllerMaps - > [ M.NodeId ]
getOtherFileNodes f nid ( ControllerMaps fnm _ _ _ _ )
= Set.toList $ Set.delete nid allNids
where
allNids = Map.findWithDefault ( Set.empty ) f fnm
copyingLimit : : Int
copyingLimit = 2
getFilesForCopying : : FileControllerData - > [ S.FileId ]
getFilesForCopying cm = ( fst . unzip ) filteredList
where
fnm = cm_FileToNodeMap cm
filteredList = filter setSelector ( Map.toList fnm )
setSelector ( _ , s ) = Set.size s < copyingLimit
nodes2sites :: ControllerMaps -> [M.NodeId] -> [SiteId]
nodes2sites (ControllerMaps _ nsm _ _ _) nids = lookupList nsm nids
sites2ports :: ControllerMaps -> [SiteId] -> [NP.NodePort]
sites2ports (ControllerMaps _ _ spm _ _) sids = lookupList spm sids
getNodesList :: ControllerMaps -> [M.NodeId]
getNodesList (ControllerMaps _ nsm _ _ _) = Map.keys nsm
getNodesWithSpace :: S.FileId -> Integer -> ControllerMaps -> [SiteId]
getNodesWithSpace _ _ cm = getSiteList cm
getOtherFileNodes :: S.FileId -> M.NodeId -> ControllerMaps -> [M.NodeId]
getOtherFileNodes f nid (ControllerMaps fnm _ _ _ _)
= Set.toList $ Set.delete nid allNids
where
allNids = Map.findWithDefault (Set.empty) f fnm
copyingLimit :: Int
copyingLimit = 2
getFilesForCopying :: FileControllerData -> [S.FileId]
getFilesForCopying cm = (fst . unzip) filteredList
where
fnm = cm_FileToNodeMap cm
filteredList = filter setSelector (Map.toList fnm)
setSelector (_,s) = Set.size s < copyingLimit
-}
addFilesToController :: [S.FileId] -> M.NodeId -> FileControllerData -> FileControllerData
addFilesToController fids nid cm
= cm { cm_FileToNodeMap = fnm' }
where
fnm = cm_FileToNodeMap cm
fnm' = Map.unionWith combine fnm newMap
newMap = Map.fromList $ zip fids (repeat $ Set.singleton nid)
combine s1 s2 = Set.union s1 s2
deleteFilesFromController :: M.NodeId -> FileControllerData -> FileControllerData
deleteFilesFromController nid cm
= cm { cm_FileToNodeMap = fnm' }
where
fnm = cm_FileToNodeMap cm
fnm' = Map.fromList filteredList
filteredList = filter (\(_,s) -> s /= Set.empty) list
list = map (\(k,s) -> (k, Set.delete nid s)) (Map.toList fnm)
addFileToController :: S.FileId -> M.NodeId -> FileControllerData -> FileControllerData
addFileToController fid nid cm = addFilesToController [ fid ] nid cm
addFileToController fid nid cm = cm { cm_FileToNodeMap = fnm' }
where
fnm = cm_FileToNodeMap cm
fnm' = Map.insert fid nid' fnm
nid' = Set.singleton nid
deleteFileFromController :: S.FileId -> FileControllerData -> FileControllerData
deleteFileFromController fid cm
= cm { cm_FileToNodeMap = fnm' }
where
fnm = cm_FileToNodeMap cm
fnm' = Map.delete fid fnm
getFileClientInfoList :: S.FileId -> Server -> FileControllerData -> IO [ClientInfo]
getFileClientInfoList f s cm
= do
let fnm = cm_FileToNodeMap cm
let is = Set.toList $ maybe Set.empty id (Map.lookup f fnm)
mbDats <- mapM (\i -> getClientInfo i s) is
return $ catMaybes mbDats
shuffle :: [a] -> IO [a]
shuffle l' = shuffle' l' []
where
shuffle' [] acc = return acc
shuffle' l acc = do
k <- randomRIO (0, length l - 1)
let (lead, x:xs) = splitAt k l
shuffle' (lead ++ xs) (x:acc)
lookupNearestPortWithFile :: S.FileId -> SiteId -> Server -> FileControllerData -> IO (Maybe ClientPort)
lookupNearestPortWithFile f sid s cm
= do
dats <- getFileClientInfoList f s cm
let sids' = map (\ci -> ci_Site ci) dats
sids <- shuffle sids'
let mbns = nearestId sid sids
mbdat = maybe Nothing (\ns -> List.find (\ci -> (ci_Site ci) == ns) dats) mbns
mbnp = maybe Nothing (\ci -> Just $ ci_Port ci) mbdat
return mbnp
trude : : SiteId - > [ S.FileId ] - > Server - > FileControllerData - > IO ( Maybe SiteId )
trude currentSite files s cm
= do
let file2node = cm_FileToNodeMap cm ;
nodeIdsWithFiles = concatMap ( \file - > Set.toList $ maybe Set.empty i d ( Map.lookup file file2node ) ) files
sitesIdsWithFiles < - foldM f ( [ ] , [ ] , [ ] ) nodeIdsWithFiles
nearestId ' sitesIdsWithFiles
where
nearestId ' : : ( [ SiteId],[SiteId],[SiteId ] ) - > IO ( Maybe SiteId )
nearestId ' ( [ ] , [ ] , xs ) = do
xs ' < - shuffle xs
f : : ( [ SiteId],[SiteId],[SiteId ] ) - > M.NodeId - > IO ( [ SiteId],[SiteId],[SiteId ] )
f ( procs , hosts , others ) i = do
cli < - getClientInfo i s
case cli of
Nothing - > return ( procs , hosts , others )
( Just clientInfo ) - > insert ( procs , hosts , others ) ( ci_Site clientInfo )
insert : : ( [ SiteId],[SiteId],[SiteId ] ) - > SiteId - > IO ( [ SiteId],[SiteId],[SiteId ] )
insert ( procs , hosts , others ) thisSite = if isSameProcess thisSite currentSite
then return ( thisSite : procs , hosts , others )
else if isSameHost thisSite currentSite
then return ( procs , thisSite : hosts , others )
else return ( procs , hosts , thisSite : others )
trude :: SiteId -> [S.FileId] -> Server -> FileControllerData -> IO (Maybe SiteId)
trude currentSite files s cm
= do
let file2node = cm_FileToNodeMap cm;
nodeIdsWithFiles = concatMap (\file -> Set.toList $ maybe Set.empty id (Map.lookup file file2node)) files
sitesIdsWithFiles <- foldM f ([],[],[]) nodeIdsWithFiles
nearestId' sitesIdsWithFiles
where
nearestId' :: ([SiteId],[SiteId],[SiteId]) -> IO (Maybe SiteId)
nearestId' ([], [], xs) = do
xs' <- shuffle xs
f :: ([SiteId],[SiteId],[SiteId]) -> M.NodeId -> IO ([SiteId],[SiteId],[SiteId])
f (procs,hosts,others) i = do
cli <- getClientInfo i s
case cli of
Nothing -> return (procs,hosts,others)
(Just clientInfo) -> insert (procs,hosts,others) (ci_Site clientInfo)
insert :: ([SiteId],[SiteId],[SiteId]) -> SiteId -> IO ([SiteId],[SiteId],[SiteId])
insert (procs,hosts,others) thisSite = if isSameProcess thisSite currentSite
then return (thisSite:procs,hosts,others)
else if isSameHost thisSite currentSite
then return (procs,thisSite:hosts,others)
else return (procs,hosts,thisSite:others)
-}
lookupNearestPortWithFiles :: [S.FileId] -> SiteId -> Server -> FileControllerData -> IO M.ClientPortMap
lookupNearestPortWithFiles l sid s cm = do
infoM localLogger $ "Getting nearest ports with: " ++ show l
res <- foldM f [] l
infoM localLogger $ "Clientportmap is: " ++ show res
return res
where
f :: M.ClientPortMap -> S.FileId -> IO M.ClientPortMap
f theMap fid = do
infoM localLogger $ "Getting nearest ports with: " ++ fid
maybeport <- lookupNearestPortWithFile fid sid s cm
debugM localLogger $ "Nearest ports: " ++ show maybeport
case maybeport of
(Just port) -> return (ins port fid theMap)
Nothing -> return theMap
lookupNearestPortWithFileAndSpace : : S.FileId - > Integer - > SiteId - > Server - > FileControllerData - > IO ( Maybe ClientPort )
lookupNearestPortWithFileAndSpace f _ size s cm
dats < - getFileClientInfoList f s cm
let sids = map ( \ci - > ci_Site ci ) $ filter ( ... ) dats
= nearestId
= maybe Nothing ( \ns - > List.find ( \(s , _ , _ ) - > s = = ns ) dats ) mbns
mbnp = maybe Nothing ( \(_,np , _ ) - > Just np ) mbdat
return mbnp
let sids = map (\ci -> ci_Site ci) $ filter (...) dats
mbns = nearestId sid sids
mbdat = maybe Nothing (\ns -> List.find (\(s,_,_) -> s == ns) dats) mbns
mbnp = maybe Nothing (\(_,np,_) -> Just np) mbdat
return mbnp
-}
TODO add capacity
lookupNearestPortWithSpace :: Integer -> SiteId -> Server -> FileControllerData -> IO (Maybe ClientPort)
lookupNearestPortWithSpace _size sid s _cm
= do
dats <- getAllClientInfos s
let sids' = map (\ci -> ci_Site ci) dats
sids <- shuffle sids'
let mbns = nearestId sid sids
mbdat = maybe Nothing (\ns -> List.find (\ci -> (ci_Site ci) == ns) dats) mbns
mbnp = maybe Nothing (\ci -> Just $ ci_Port ci) mbdat
return mbnp
lookupPortWithoutFile :: S.FileId -> Server -> FileControllerData -> IO (Maybe ClientPort)
lookupPortWithoutFile f s cm
= do
fileCis <- getFileClientInfoList f s cm
allCis <- getAllClientInfos s
let fileNids = map ci_Id fileCis
allNids = map ci_Id allCis
nonNids = Set.toList $ Set.difference (Set.fromList allNids) (Set.fromList fileNids)
if (null nonNids)
then return Nothing
else do
let i = head nonNids
mbCi <- getClientInfo i s
case mbCi of
(Just ci) -> return $ Just $ ci_Port ci
(Nothing) -> return Nothing
lookupNearestPortForFile :: S.FileId -> Integer -> SiteId -> Server -> FileControllerData -> IO (Maybe ClientPort)
lookupNearestPortForFile _ size sid s cm
= do
nodeWithFile < - lookupNearestPortWithFileAndSpace f size s cm
nodeWithoutFile <- lookupNearestPortWithSpace size sid s cm
let mbnp = maybe Nothing (\np -> Just np) nodeWithoutFile
debugM localLogger $ " lookupNearestPortForFile : file : " + + show f
debugM localLogger $ " lookupNearestPortForFile : size : " + + show size
debugM localLogger $ " lookupNearestPortForFile : site : " + + show " lookupNearestPortForFile : nodeWithFile : " + + show nodeWithFile
debugM localLogger $ " lookupNearestPortForFile : nodeWithoutFile : " + + show nodeWithoutFile
debugM localLogger $ " lookupNearestPortForFile : result : " + + show mbnp
debugM localLogger $ "lookupNearestPortForFile: size: " ++ show size
debugM localLogger $ "lookupNearestPortForFile: site: " ++ show sid
debugM localLogger $ "lookupNearestPortForFile: nodeWithFile: " ++ show nodeWithFile
debugM localLogger $ "lookupNearestPortForFile: nodeWithoutFile: " ++ show nodeWithoutFile
debugM localLogger $ "lookupNearestPortForFile: result: " ++ show mbnp
-} return mbnp
= do
dats < - getAllClientInfos s : : [ ClientInfo ]
let sids ' = map ( \ci - > ci_Site ci ) dats
sids < - shuffle sids '
let mbns = nearestId
= maybe Nothing ( \ns - > List.find ( \ci - > ( ci_Site ci ) = = ns ) dats ) mbns
mbnp = maybe Nothing ( \ci - > Just $ ci_Port ci ) mbdat
return mbnp
dats <- getAllClientInfos s :: [ClientInfo]
let sids' = map (\ci -> ci_Site ci) dats
sids <- shuffle sids'
let mbns = nearestId sid sids
mbdat = maybe Nothing (\ns -> List.find (\ci -> (ci_Site ci) == ns) dats) mbns
mbnp = maybe Nothing (\ci -> Just $ ci_Port ci) mbdat
return mbnp-}
lookupNearestPortForFiles :: [(S.FileId,Integer)] -> SiteId -> Server -> FileControllerData -> IO M.ClientPortMap
lookupNearestPortForFiles l sid s cm = do
nearestPortWithSpace <- lookupNearestPortWithSpace 0 sid s cm
case nearestPortWithSpace of
Nothing -> return []
(Just p) -> return [(p,map fst l)]
f : : M.ClientPortMap - > ( S.FileId , Integer ) - > IO M.ClientPortMap
mp of
( Just port ) - > return ( ins port fid theMap )
Nothing - > return theMap
return cpm
ins :: ClientPort -> S.FileId -> M.ClientPortMap -> M.ClientPortMap
ins port fid [] = [(port,[fid])]
ins port fid ((p,fids):[]) = if (p==port)
then [(p,(fid:fids))]
else [(port,[fid]),(p,fids)]
ins port fid ((p,fids):ps) = if (p==port)
then ((p,fid:fids):ps)
else (p,fids):(ins port fid ps)
getOtherFilePorts :: S.FileId -> IdType -> Server -> FileControllerData -> IO [ClientInfo]
getOtherFilePorts f nid s cm
= do
let fnm = cm_FileToNodeMap cm
let otherids = Set.toList $ Set.delete nid $ maybe Set.empty id (Map.lookup f fnm)
mbDats <- mapM (\i -> getClientInfo i s) otherids
return $ catMaybes mbDats
deleteFileFromNodes :: S.FileId -> [NP.NodePort] -> IO ()
deleteFileFromNodes fid nps = sequence_ $ map deleteFileFromNode nps
where
deleteFileFromNode np
= do
handleAll (\e -> putStrLn $ show e) $
do
N.deleteFile fid False np
return ()
instance C.ControllerClass ControllerData where
closeController cd
= do
debugM localLogger "closing Server"
closeServer (cd_Server cd)
debugM localLogger "server closed"
getFileSites : : S.FileId - > Controller - > IO ( Set . Set SiteId )
getFileSites f cd
= withMVar (cd_FileController cd) $
\fc ->
do
dats <- getFileClientInfoList f (cd_Server cd) fc
let sids = map (\ci -> ci_Site ci) dats
return (Set.fromList sids)
containsFile : : S.FileId - > Controller - > IO Bool
containsFile f cd
= withMVar (cd_FileController cd) $
\fc -> return $ Map.member f (cm_FileToNodeMap fc)
getNearestNodePortWithFile : : S.FileId - > SiteId - > c - > IO ( Maybe M.NodeRequestPort )
getNearestNodePortWithFile f sid cd
= withMVar (cd_FileController cd) $
\fc -> lookupNearestPortWithFile f sid (cd_Server cd) fc
getNearestNodePortWithFiles : : [ S.FileId ] - > SiteId - > c - > IO ( Maybe M.NodeRequestPort )
getNearestNodePortWithFiles l sid cd
= withMVar (cd_FileController cd) $
\fc -> lookupNearestPortWithFiles l sid (cd_Server cd) fc
getNearestNodePortForFile : : S.FileId - > Integer - > SiteId - > c - > IO ( Maybe M.NodeRequestPort )
getNearestNodePortForFile f c sid cd
= withMVar (cd_FileController cd) $
\fc -> lookupNearestPortForFile f c sid (cd_Server cd) fc
getNearestNodePortForFiles : : [ ( S.FileId , Integer ) ] - > SiteId - > c - > IO ( ClientPortMap )
getNearestNodePortForFiles l sid cd
= withMVar (cd_FileController cd) $
\fc -> lookupNearestPortForFiles l sid (cd_Server cd) fc
createFile : : S.FileId - > M.NodeId - > ControllerData - > IO ControllerData
createFile f nid cd
= modifyMVar (cd_FileController cd) $
\fc ->
do
mbCi <- getClientInfo nid (cd_Server cd)
case mbCi of
(Just _) ->
do
let fc' = addFileToController f nid fc
copy file to one other node
mpCp <- lookupPortWithoutFile f (cd_Server cd) fc
case mpCp of
(Just _) ->
do
return ()
let np = NP.newNodePort cp
N.copyFile f ( ) np
(Nothing) -> return ()
return (fc', ())
(Nothing) -> return (fc,())
createFiles : : [ ( S.FileId , M.NodeId ) ] - > ControllerData - > IO ControllerData
createFiles l cd
= modifyMVar (cd_FileController cd) $
\fc ->
do
fc'' <- foldM f fc l
return (fc'',())
where
f :: FileControllerData -> (S.FileId,M.NodeId) -> IO FileControllerData
f filecontroller (fid,nid) = do
mbCi <- getClientInfo nid (cd_Server cd)
case mbCi of
(Just _) ->
do
let fc' = addFileToController fid nid filecontroller
copy file to one other node
mpCp < - lookupPortWithoutFile f ( cd_Server cd ) fc
let np = NP.newNodePort cp
N.copyFile f ( ) np
return fc'
(Nothing) -> return filecontroller
appendFile : : S.FileId - > M.NodeId - > ControllerData - > IO ControllerData
appendFile f nid cd
= modifyMVar (cd_FileController cd) $
\fc ->
do
mbCi <- getClientInfo nid (cd_Server cd)
case mbCi of
(Just ci) ->
do
let fc' = addFileToController f nid fc
cps <- getOtherFilePorts f nid (cd_Server cd) fc
let nps = map (\i -> NP.newNodePort (ci_Port i)) cps
order them to copy the file from the first node
_ <- mapM (N.copyFile f (ci_Port ci)) nps
return (fc', ())
(Nothing) -> return (fc,())
deleteFile : : S.FileId - > M.NodeId - > ControllerData - > IO ControllerData
deleteFile f nid cd
= modifyMVar (cd_FileController cd) $
\fc ->
do
cps <- getOtherFilePorts f nid (cd_Server cd) fc
let nps = map (\ci -> NP.newNodePort (ci_Port ci)) cps
deleteFileFromNodes f nps
delete file from Controller
let fc' = deleteFileFromController f fc
return (fc', ())
instance Debug ControllerData where
printDebug cd
= do
putStrLn "Controller-Object (full)"
putStrLn "--------------------------------------------------------"
putStrLn "Server"
printDebug (cd_Server cd)
putStrLn "--------------------------------------------------------"
putStrLn "FileToNodeMap:"
withMVar (cd_FileController cd) $
\fc -> do
putStrLn $ show (cm_FileToNodeMap $ fc)
getDebug cd
= do
let line = "--------------------------------------------------------"
tmp <- getDebug (cd_Server cd)
tmp2 <- withMVar (cd_FileController cd) $
\fc -> do
return $ show (cm_FileToNodeMap $ fc)
return ( "Controller-Object (full)"
++"\n"++ line
++"\n"++ "Server"
++"\n"++ tmp
++"\n"++ line
++"\n"++ "FileToNodeMap:"
++"\n"++tmp2++"\n")
|
ca35508d2671b5c55b33138941f7cbce2b41c0fd4478ff786b7cec713b7fa0ff | emotiq/emotiq | startup.lisp | There are two entry points for - development and binary ( production ) .
;;
;; At the moment, we favor development activities over binary building. Developers
; should be able to load-and-go. The MAIN entry point is an example of what developers
; might use. MAIN does nothing, but test whether make-key-pair doesn't crash.
;;
;; When building binaries, we use the DELIVER function. This function cannot
; run with multitasking turned on, but it can create a binary which runs
with multitasking turned on . In code , multitasking is required
; by the Actors system. This means that the Actors code cannot be initialized
; during construction of a binary. This "special case" is handled only in
; the binary construction code. A binary must install and initialize the Actors
system during startup . The START function is called by a binary , as its
entry point . During the building of a binary , / etc / deliver / deliver.lisp
; sets a special variable (cl-user::*performing-binary-build*) to any value
( as long as BOUNDP returns T on this special ) . The START function
; must set EMOTIQ::*production* to T, which is used in emotiq/src/Crypto/pbc-cffi.lisp
; via the function EMOTIQ:PRODUCTION-P to initialize DLL's at runtime.
;
;; We allow developers to use Lisp LOAD to initialize various parts of the
;; system (including Actors). When building the binary, we need to explicitly
;; initialize Actors.
(in-package "EMOTIQ")
(defun main (&key etc-and-wallets how-started-message?)
"Main loop for Emotiq daemon"
after calling this , ( RANDOM 100 ) will return the same sequence of pseudo - random numbers on each test run
(when etc-and-wallets
(setf (symbol-function 'emotiq/fs:etc/)
(lambda () (pathname etc-and-wallets))))
(message-running-state how-started-message?)
;; Create a default wallet on disk if one doesn't already exist
(emotiq/wallet:create-wallet)
Start the websocket interface for the Electron wallet
;; listening <ws:PORT/wallet> .
(when (string-equal "true"
(emotiq/config:setting :websocket-server))
(websocket/wallet:start-server :port (emotiq/config:setting :websocket-server-port)))
;; Start the REST server which provides support for testing the
;; WebSocket implementation at <:PORT/client/>
(when (string-equal "true"
(emotiq/config:setting :rest-server))
(emotiq-rest:start-server :port (emotiq/config:setting :rest-server-port)))
(emotiq/tracker:start-tracker)
(emotiq:start-node)
(cosi-simgen:startup-elections))
;; Entry Point for binary (aka "production" version of the system.
(defun start ()
;; This is for running in the binary command line only. For now, if we're
;; starting from the command line, we assume it's for
;; production. Later, we'll have other means of setting
* production * . TEMPORARY ! FIX ! 4/6/18
;; ^^ in this context "production" ONLY means binary build.
(unintern 'cl-user::*performing-binary-build*) ;; if building binary,
used by : PRODUCTION - P in Crypto
(message-running-state "from command line")
(core-crypto:startup)
(actors:install-actor-system)
(main))
(defun argv ()
#+lispworks system:*line-arguments-list*
#+OPENMCL ccl:*command-line-argument-list*)
(defun message-running-state (&optional how-started-message?)
(format *standard-output* "~%Running ~a in ~a~%with args [~a]~%"
(or how-started-message? "interactively")
(if (production-p) "production" "development")
(argv)))
| null | https://raw.githubusercontent.com/emotiq/emotiq/9af78023f670777895a3dac29a2bbe98e19b6249/src/startup.lisp | lisp |
At the moment, we favor development activities over binary building. Developers
should be able to load-and-go. The MAIN entry point is an example of what developers
might use. MAIN does nothing, but test whether make-key-pair doesn't crash.
When building binaries, we use the DELIVER function. This function cannot
run with multitasking turned on, but it can create a binary which runs
by the Actors system. This means that the Actors code cannot be initialized
during construction of a binary. This "special case" is handled only in
the binary construction code. A binary must install and initialize the Actors
sets a special variable (cl-user::*performing-binary-build*) to any value
must set EMOTIQ::*production* to T, which is used in emotiq/src/Crypto/pbc-cffi.lisp
via the function EMOTIQ:PRODUCTION-P to initialize DLL's at runtime.
We allow developers to use Lisp LOAD to initialize various parts of the
system (including Actors). When building the binary, we need to explicitly
initialize Actors.
Create a default wallet on disk if one doesn't already exist
listening <ws:PORT/wallet> .
Start the REST server which provides support for testing the
WebSocket implementation at <:PORT/client/>
Entry Point for binary (aka "production" version of the system.
This is for running in the binary command line only. For now, if we're
starting from the command line, we assume it's for
production. Later, we'll have other means of setting
^^ in this context "production" ONLY means binary build.
if building binary, | There are two entry points for - development and binary ( production ) .
with multitasking turned on . In code , multitasking is required
system during startup . The START function is called by a binary , as its
entry point . During the building of a binary , / etc / deliver / deliver.lisp
( as long as BOUNDP returns T on this special ) . The START function
(in-package "EMOTIQ")
(defun main (&key etc-and-wallets how-started-message?)
"Main loop for Emotiq daemon"
after calling this , ( RANDOM 100 ) will return the same sequence of pseudo - random numbers on each test run
(when etc-and-wallets
(setf (symbol-function 'emotiq/fs:etc/)
(lambda () (pathname etc-and-wallets))))
(message-running-state how-started-message?)
(emotiq/wallet:create-wallet)
Start the websocket interface for the Electron wallet
(when (string-equal "true"
(emotiq/config:setting :websocket-server))
(websocket/wallet:start-server :port (emotiq/config:setting :websocket-server-port)))
(when (string-equal "true"
(emotiq/config:setting :rest-server))
(emotiq-rest:start-server :port (emotiq/config:setting :rest-server-port)))
(emotiq/tracker:start-tracker)
(emotiq:start-node)
(cosi-simgen:startup-elections))
(defun start ()
* production * . TEMPORARY ! FIX ! 4/6/18
used by : PRODUCTION - P in Crypto
(message-running-state "from command line")
(core-crypto:startup)
(actors:install-actor-system)
(main))
(defun argv ()
#+lispworks system:*line-arguments-list*
#+OPENMCL ccl:*command-line-argument-list*)
(defun message-running-state (&optional how-started-message?)
(format *standard-output* "~%Running ~a in ~a~%with args [~a]~%"
(or how-started-message? "interactively")
(if (production-p) "production" "development")
(argv)))
|
51ef521e0d109be199fe35a0d91ca92a2dcc7a210722e58d30cbfa12ee6f3635 | cirfi/sicp-my-solutions | 1.22.scm | define runtime for GNU
;;; (define (runtime) (tms:clock (times)))
;;; define runtime for Racket
;;; (define (runtime) (current-milliseconds))
;;; timed prime, modified
(define (timed-prime-test n)
(start-prime-test n (runtime)))
(define (start-prime-test n start-time)
(if (prime? n)
(report-prime n (- (runtime) start-time))
#f))
(define (report-prime n elapsed-time)
(display n)
(display " *** ")
(display elapsed-time)
(newline))
;;; prime?
(define (square x)
(* x x))
(define (smallest-divisor n)
(find-divisor n 2))
(define (find-divisor n test-divisor)
(cond ((> (square test-divisor) n) n)
((divides? test-divisor n) test-divisor)
(else (find-divisor n (+ test-divisor 1)))))
(define (divides? a b)
(= (remainder b a) 0))
(define (prime? n)
(= n (smallest-divisor n)))
;;; search-for-primes
(define (search-for-primes start)
(define (search-iter n count)
(cond ((= count 3))
((timed-prime-test n) (search-iter (+ n 2) (+ count 1)))
(else (search-iter (+ n 2) count))))
(cond ((even? start) (search-iter (+ start 1) 0))
(else (search-iter start 0))))
(search-for-primes 1000)
1009 * * * 0 .
;;; 1013 *** 0.
1019 * * * 0 .
(search-for-primes 10000)
;;; 10007 *** 0.
10009 * * * 0 .
10037 * * * 0 .
(search-for-primes 100000)
100003 * * * 1.0000000000000675e-2
100019 * * * 0 .
100043 * * * 0 .
(search-for-primes 1000000)
1000003 * * * 9.999999999999787e-3
1000033 * * * 0 .
1000037 * * * 0 .
;;; the former cases are too small for modern computers
(search-for-primes 1000000000)
1000000007 * * * .09999999999999999
1000000009 * * * .06
1000000021 * * * .04999999999999999
(search-for-primes 10000000000)
10000000019 * * * .14
;;; 10000000033 *** .13
;;; 10000000061 *** .14
(search-for-primes 100000000000)
100000000003 * * * .5
100000000019 * * * .43999999999999995
100000000057 * * * .3999999999999999
(search-for-primes 1000000000000)
1000000000039 * * * 1.3199999999999998
1000000000061 * * * 1.2999999999999998
1000000000063 * * * 1.3199999999999994
| null | https://raw.githubusercontent.com/cirfi/sicp-my-solutions/4b6cc17391aa2c8c033b42b076a663b23aa022de/ch1/1.22.scm | scheme | (define (runtime) (tms:clock (times)))
define runtime for Racket
(define (runtime) (current-milliseconds))
timed prime, modified
prime?
search-for-primes
1013 *** 0.
10007 *** 0.
the former cases are too small for modern computers
10000000033 *** .13
10000000061 *** .14 | define runtime for GNU
(define (timed-prime-test n)
(start-prime-test n (runtime)))
(define (start-prime-test n start-time)
(if (prime? n)
(report-prime n (- (runtime) start-time))
#f))
(define (report-prime n elapsed-time)
(display n)
(display " *** ")
(display elapsed-time)
(newline))
(define (square x)
(* x x))
(define (smallest-divisor n)
(find-divisor n 2))
(define (find-divisor n test-divisor)
(cond ((> (square test-divisor) n) n)
((divides? test-divisor n) test-divisor)
(else (find-divisor n (+ test-divisor 1)))))
(define (divides? a b)
(= (remainder b a) 0))
(define (prime? n)
(= n (smallest-divisor n)))
(define (search-for-primes start)
(define (search-iter n count)
(cond ((= count 3))
((timed-prime-test n) (search-iter (+ n 2) (+ count 1)))
(else (search-iter (+ n 2) count))))
(cond ((even? start) (search-iter (+ start 1) 0))
(else (search-iter start 0))))
(search-for-primes 1000)
1009 * * * 0 .
1019 * * * 0 .
(search-for-primes 10000)
10009 * * * 0 .
10037 * * * 0 .
(search-for-primes 100000)
100003 * * * 1.0000000000000675e-2
100019 * * * 0 .
100043 * * * 0 .
(search-for-primes 1000000)
1000003 * * * 9.999999999999787e-3
1000033 * * * 0 .
1000037 * * * 0 .
(search-for-primes 1000000000)
1000000007 * * * .09999999999999999
1000000009 * * * .06
1000000021 * * * .04999999999999999
(search-for-primes 10000000000)
10000000019 * * * .14
(search-for-primes 100000000000)
100000000003 * * * .5
100000000019 * * * .43999999999999995
100000000057 * * * .3999999999999999
(search-for-primes 1000000000000)
1000000000039 * * * 1.3199999999999998
1000000000061 * * * 1.2999999999999998
1000000000063 * * * 1.3199999999999994
|
f443c7332a4c2f997d237d1d7932adb02dddba084947636ec038b440bb824cac | Frama-C/Frama-C-snapshot | register.mli | (**************************************************************************)
(* *)
This file is part of Frama - C.
(* *)
Copyright ( C ) 2007 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
(* -------------------------------------------------------------------------- *)
(* --- Report Properties Status --- *)
(* -------------------------------------------------------------------------- *)
val print : unit -> unit
(*
Local Variables:
compile-command: "make -C ../../.."
End:
*)
| null | https://raw.githubusercontent.com/Frama-C/Frama-C-snapshot/639a3647736bf8ac127d00ebe4c4c259f75f9b87/src/plugins/report/register.mli | ocaml | ************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************
--------------------------------------------------------------------------
--- Report Properties Status ---
--------------------------------------------------------------------------
Local Variables:
compile-command: "make -C ../../.."
End:
| This file is part of Frama - C.
Copyright ( C ) 2007 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
val print : unit -> unit
|
8abf601193f68468b0ca3fafe84c337f4da33f431149510dfd4429d760c51d73 | protz/mezzo | Kind.ml | type kind =
| KValue
| KType
| KPerm
| KArrow of kind * kind
let karrow bindings kind =
List.fold_right (fun (_, kind1) kind2 ->
KArrow (kind1, kind2)
) bindings kind
let as_arrow k =
let rec as_arrow accu = function
| KArrow (k1, k2) ->
as_arrow (k1 :: accu) k2
| k ->
List.rev accu, k
in
as_arrow [] k
let arity k =
let rec arity accu = function
| KArrow (_, k2) ->
arity (1 + accu) k2
| _ ->
accu
in
arity 0 k
let rec print =
function
| KValue ->
"value"
| KPerm ->
"perm"
| KType ->
"type"
| KArrow (k1, k2) ->
No parentheses required ; first - order kinds only .
print k1 ^ " -> " ^ print k2
let print_kind k =
PPrint.string (print k)
let equal =
(=)
| null | https://raw.githubusercontent.com/protz/mezzo/4e9d917558bd96067437116341b7a6ea02ab9c39/parsing/Kind.ml | ocaml | type kind =
| KValue
| KType
| KPerm
| KArrow of kind * kind
let karrow bindings kind =
List.fold_right (fun (_, kind1) kind2 ->
KArrow (kind1, kind2)
) bindings kind
let as_arrow k =
let rec as_arrow accu = function
| KArrow (k1, k2) ->
as_arrow (k1 :: accu) k2
| k ->
List.rev accu, k
in
as_arrow [] k
let arity k =
let rec arity accu = function
| KArrow (_, k2) ->
arity (1 + accu) k2
| _ ->
accu
in
arity 0 k
let rec print =
function
| KValue ->
"value"
| KPerm ->
"perm"
| KType ->
"type"
| KArrow (k1, k2) ->
No parentheses required ; first - order kinds only .
print k1 ^ " -> " ^ print k2
let print_kind k =
PPrint.string (print k)
let equal =
(=)
|
|
9d3fd2763163042eaa88e4334c93bbdbd10d994b61dee673e26ee4acecb0551c | archaelus/erlirc | irc_messages_tests.erl | %%%-------------------------------------------------------------------
Geoff Ca nt
@author nt < >
%% @version {@vsn}, {@date} {@time}
%% @doc
%% @end
%%%-------------------------------------------------------------------
-module(irc_messages_tests).
-include_lib("irc.hrl").
-include_lib("logging.hrl").
-include_lib("eunit/include/eunit.hrl").
-import(irc_messages, [parse_line/1
,to_list/1
,decode_ctcp_delims/1
,encode_ctcp_delims/1
,unix_ts_to_datetime/1
,chantype_to_list/1
,list_to_chantype/1
]).
irc_error_test() ->
?assertMatch(#irc_cmd{name=error},
parse_line("ERROR :Closing Link: erl.irc by ve.irc.dollyfish.net.nz (No C:line)\r\n")).
irc_server_test() ->
?assertMatch(X when is_record(X, irc_cmd),
parse_line("SERVER ve.irc.dollyfish.net.nz 1 1164352162 1171089421 J10 ACAP] +h :ircd on ve\r\n")).
burst_server_test() ->
?assertMatch(X when is_record(X, irc_cmd),
parse_line("AC S scorch.irc.dollyfish.net.nz 2 0 1164963563 P10 ABAP] +h :DollyNET ircd at irc.dollyfish.net.nz\r\n")).
burst_nick_test() ->
?assertMatch(X when is_record(X, irc_cmd),
parse_line("AB N Ned 2 1166709690 ~martyn 202-61-3-148.cable5.acsdata.co.nz +oiwg DKPQOU ABABc :Unknown\r\n")).
burst_service_test() ->
?assertMatch(X when is_record(X, irc_cmd),
parse_line("AB S x2.irc.dollyfish.net.nz 3 0 1164965565 P10 A0]]] +s :X2 Channel Service\r\n")).
burst_chang_test() ->
?assertMatch(X when is_record(X, irc_cmd),
parse_line("AC B #wow 1167179822 ACAE[\r\n")).
burst_chan_2_test() ->
?assertMatch(X when is_record(X, irc_cmd),
parse_line("AC B #blah 1164352204 +tn ABAFT,ACAKJ,ABAFQ,ACAJ9,ABAE7,ABAEp,ACAJH,ABAEf,ABABs:o,ABABc,A0AAA\r\n")).
burst_end_test() ->
?assertMatch(X when is_record(X, irc_cmd),
parse_line("AC EB \r\n")).
burst_nick_2_test() ->
?assertMatch(X when is_record(X, irc_cmd),
parse_line("AC N shinsterw 1 1167197569 sian leibniz.catalyst.net.nz DKTvAH ACAE[ :shinster\r\n")).
ctcp_version_test() ->
?assertMatch(X when is_record(X, irc_cmd),
parse_line(":freenode-connect!freenode@freenode/bot/connect PRIVMSG nemerling :^AVERSION^A")).
namreply_test() ->
?assertMatch(":ve.irc.dollyfish.net.nz 353 nembot = #dullbots :nembot @nem\r\n",
to_list(parse_line(":ve.irc.dollyfish.net.nz 353 nembot = #dullbots :nembot @nem\r\n"))).
endofnames_test() ->
?assertMatch(X when is_record(X, irc_cmd),
parse_line(":ve.irc.dollyfish.net.nz 366 nembot #dullbots :End of /NAMES list.\r\n")).
topic_test() ->
?assertMatch(X when is_record(X, irc_cmd),
parse_line(":ve.irc.dollyfish.net.nz 332 nermerlin #dullbots :Foo.\r\n")).
topicinfo_test() ->
X = parse_line(":ve.irc.dollyfish.net.nz 333 nermerlin #dullbots nem 1180326256\r\n"),
?assertMatch(#irc_cmd{},X),
?assertMatch(topicinfo, X#irc_cmd.name),
?assertMatch("#dullbots",
proplists:get_value(channel, X#irc_cmd.args)),
?assertMatch("nem",
proplists:get_value(topic_set_by, X#irc_cmd.args)),
TS = unix_ts_to_datetime(1180326256),
?assertMatch(TS,
proplists:get_value(topic_set_at, X#irc_cmd.args)).
to_list_topicinfo_test() ->
?assertMatch(X when is_record(X, irc_cmd),
parse_line(":ve.irc.dollyfish.net.nz 333 nermerlin #dullbots nem 1180326256\r\n")).
to_list_join_test() ->
?assertMatch("JOIN #c1,#c2\r\n",
to_list(#irc_cmd{name=join,
args=[{channels,
["#c1", "#c2"]}]})),
?assertMatch(":nem!nem@localhost JOIN #c1,#c2\r\n",
to_list(#irc_cmd{name=join,
target=#user{nick="nem",name="nem",host="localhost"},
args=[{channels,
["#c1", "#c2"]}]})).
decode_ctcp_delims_test() ->
?assertMatch([{ctcp, "VERSION"}],
decode_ctcp_delims([1] ++ "VERSION" ++ [1])),
?assertMatch([{non_ctcp, "This is a "}, {ctcp, "VERSION"}, {non_ctcp, "test."}],
decode_ctcp_delims("This is a " ++ [1] ++ "VERSION" ++ [1] ++ "test.")).
encode_ctcp_delims_test() ->
?assertMatch([1,$V,$E,$R,$S,$I,$O,$N,1],
encode_ctcp_delims([{ctcp, "VERSION"}])),
?assertMatch("This is a " ++ [1] ++ "VERSION" ++ [1] ++ "test.",
encode_ctcp_delims([{non_ctcp, "This is a "}, {ctcp, "VERSION"}, {non_ctcp, "test."}])).
nick_reply_test() ->
?assertMatch(#irc_cmd{name=nick, args=[{name, "nemor"}]},
parse_line(":nemerlng! NICK :nemor\r\n")).
user_test() ->
Cmd = parse_line("USER nem nem localhost :Geoff Cant\r\n"),
?assertMatch(#irc_cmd{name=user}, Cmd),
?assertMatch("nem", proplists:get_value(user_name,Cmd#irc_cmd.args)),
?assertMatch("Geoff Cant", proplists:get_value(real_name,Cmd#irc_cmd.args)).
user_to_list_test() ->
?assertMatch("nem!nem@localhost", to_list(#user{nick="nem",name="nem",host="localhost"})).
pingpong_test() ->
?assertMatch(#irc_cmd{name=ping,args=[{servers, {"localhost", []}}]},
parse_line("PING localhost\r\n")),
?assertMatch(#irc_cmd{name=ping,args=[{servers, {"localhost", "foobar"}}]},
parse_line("PING localhost foobar\r\n")),
?assertMatch(#irc_cmd{name=pong,args=[{servers, {"localhost", []}}]},
parse_line("PONG localhost\r\n")),
?assertMatch(#irc_cmd{name=pong,args=[{servers, {"localhost", "foobar"}}]},
parse_line("PONG localhost foobar\r\n")).
pingpong_gen_test() ->
?assertMatch("PING localhost\r\n",
to_list(parse_line("PING localhost\r\n"))),
?assertMatch("PING localhost foobar\r\n",
to_list(parse_line("PING localhost foobar\r\n"))),
?assertMatch("PONG localhost\r\n",
to_list(parse_line("PONG localhost\r\n"))),
?assertMatch("PONG localhost foobar\r\n",
to_list(parse_line("PONG localhost foobar\r\n"))).
quit_test() ->
?assertMatch(#irc_cmd{name=quit},
parse_line("QUIT\r\n")),
?assertMatch(#irc_cmd{name=quit, args=[{message, "Foo"}]},
parse_line("QUIT :Foo\r\n")),
?assertMatch("QUIT\r\n",
to_list(parse_line("QUIT\r\n"))),
?assertMatch("QUIT :Foo\r\n",
to_list(parse_line("QUIT :Foo\r\n"))),
?assertMatch("ERROR :Foo\r\n",
to_list((parse_line("QUIT :Foo\r\n"))#irc_cmd{name=error})).
nomotd_to_list_test() ->
?assertMatch(":localhost 422 nem :NOMOTD\r\n",
to_list(#irc_cmd{source=#irc_server{host="localhost"},
target=#user{nick="nem"},
name=nomotd,
args=[]})),
?assertMatch(":localhost 422 nem :No MOTD\r\n",
to_list(#irc_cmd{source=#irc_server{host="localhost"},
target=#user{nick="nem"},
name=nomotd,
args=[{message, "No MOTD"}]})).
chantypes_test() ->
?assert(lists:all(fun (T) ->
T =:= list_to_chantype(chantype_to_list(T))
end,
[secret, public, private])).
%numreply_test() ->
? assertMatch(Num when > 0 , string : str(to_list(#irc_cmd{name = notregistered , } ) ) ) .
| null | https://raw.githubusercontent.com/archaelus/erlirc/b922b2004f0f9f58a6ccf8fe71313190dee081c6/src/irc_messages_tests.erl | erlang | -------------------------------------------------------------------
@version {@vsn}, {@date} {@time}
@doc
@end
-------------------------------------------------------------------
numreply_test() -> | Geoff Ca nt
@author nt < >
-module(irc_messages_tests).
-include_lib("irc.hrl").
-include_lib("logging.hrl").
-include_lib("eunit/include/eunit.hrl").
-import(irc_messages, [parse_line/1
,to_list/1
,decode_ctcp_delims/1
,encode_ctcp_delims/1
,unix_ts_to_datetime/1
,chantype_to_list/1
,list_to_chantype/1
]).
irc_error_test() ->
?assertMatch(#irc_cmd{name=error},
parse_line("ERROR :Closing Link: erl.irc by ve.irc.dollyfish.net.nz (No C:line)\r\n")).
irc_server_test() ->
?assertMatch(X when is_record(X, irc_cmd),
parse_line("SERVER ve.irc.dollyfish.net.nz 1 1164352162 1171089421 J10 ACAP] +h :ircd on ve\r\n")).
burst_server_test() ->
?assertMatch(X when is_record(X, irc_cmd),
parse_line("AC S scorch.irc.dollyfish.net.nz 2 0 1164963563 P10 ABAP] +h :DollyNET ircd at irc.dollyfish.net.nz\r\n")).
burst_nick_test() ->
?assertMatch(X when is_record(X, irc_cmd),
parse_line("AB N Ned 2 1166709690 ~martyn 202-61-3-148.cable5.acsdata.co.nz +oiwg DKPQOU ABABc :Unknown\r\n")).
burst_service_test() ->
?assertMatch(X when is_record(X, irc_cmd),
parse_line("AB S x2.irc.dollyfish.net.nz 3 0 1164965565 P10 A0]]] +s :X2 Channel Service\r\n")).
burst_chang_test() ->
?assertMatch(X when is_record(X, irc_cmd),
parse_line("AC B #wow 1167179822 ACAE[\r\n")).
burst_chan_2_test() ->
?assertMatch(X when is_record(X, irc_cmd),
parse_line("AC B #blah 1164352204 +tn ABAFT,ACAKJ,ABAFQ,ACAJ9,ABAE7,ABAEp,ACAJH,ABAEf,ABABs:o,ABABc,A0AAA\r\n")).
burst_end_test() ->
?assertMatch(X when is_record(X, irc_cmd),
parse_line("AC EB \r\n")).
burst_nick_2_test() ->
?assertMatch(X when is_record(X, irc_cmd),
parse_line("AC N shinsterw 1 1167197569 sian leibniz.catalyst.net.nz DKTvAH ACAE[ :shinster\r\n")).
ctcp_version_test() ->
?assertMatch(X when is_record(X, irc_cmd),
parse_line(":freenode-connect!freenode@freenode/bot/connect PRIVMSG nemerling :^AVERSION^A")).
namreply_test() ->
?assertMatch(":ve.irc.dollyfish.net.nz 353 nembot = #dullbots :nembot @nem\r\n",
to_list(parse_line(":ve.irc.dollyfish.net.nz 353 nembot = #dullbots :nembot @nem\r\n"))).
endofnames_test() ->
?assertMatch(X when is_record(X, irc_cmd),
parse_line(":ve.irc.dollyfish.net.nz 366 nembot #dullbots :End of /NAMES list.\r\n")).
topic_test() ->
?assertMatch(X when is_record(X, irc_cmd),
parse_line(":ve.irc.dollyfish.net.nz 332 nermerlin #dullbots :Foo.\r\n")).
topicinfo_test() ->
X = parse_line(":ve.irc.dollyfish.net.nz 333 nermerlin #dullbots nem 1180326256\r\n"),
?assertMatch(#irc_cmd{},X),
?assertMatch(topicinfo, X#irc_cmd.name),
?assertMatch("#dullbots",
proplists:get_value(channel, X#irc_cmd.args)),
?assertMatch("nem",
proplists:get_value(topic_set_by, X#irc_cmd.args)),
TS = unix_ts_to_datetime(1180326256),
?assertMatch(TS,
proplists:get_value(topic_set_at, X#irc_cmd.args)).
to_list_topicinfo_test() ->
?assertMatch(X when is_record(X, irc_cmd),
parse_line(":ve.irc.dollyfish.net.nz 333 nermerlin #dullbots nem 1180326256\r\n")).
to_list_join_test() ->
?assertMatch("JOIN #c1,#c2\r\n",
to_list(#irc_cmd{name=join,
args=[{channels,
["#c1", "#c2"]}]})),
?assertMatch(":nem!nem@localhost JOIN #c1,#c2\r\n",
to_list(#irc_cmd{name=join,
target=#user{nick="nem",name="nem",host="localhost"},
args=[{channels,
["#c1", "#c2"]}]})).
decode_ctcp_delims_test() ->
?assertMatch([{ctcp, "VERSION"}],
decode_ctcp_delims([1] ++ "VERSION" ++ [1])),
?assertMatch([{non_ctcp, "This is a "}, {ctcp, "VERSION"}, {non_ctcp, "test."}],
decode_ctcp_delims("This is a " ++ [1] ++ "VERSION" ++ [1] ++ "test.")).
encode_ctcp_delims_test() ->
?assertMatch([1,$V,$E,$R,$S,$I,$O,$N,1],
encode_ctcp_delims([{ctcp, "VERSION"}])),
?assertMatch("This is a " ++ [1] ++ "VERSION" ++ [1] ++ "test.",
encode_ctcp_delims([{non_ctcp, "This is a "}, {ctcp, "VERSION"}, {non_ctcp, "test."}])).
nick_reply_test() ->
?assertMatch(#irc_cmd{name=nick, args=[{name, "nemor"}]},
parse_line(":nemerlng! NICK :nemor\r\n")).
user_test() ->
Cmd = parse_line("USER nem nem localhost :Geoff Cant\r\n"),
?assertMatch(#irc_cmd{name=user}, Cmd),
?assertMatch("nem", proplists:get_value(user_name,Cmd#irc_cmd.args)),
?assertMatch("Geoff Cant", proplists:get_value(real_name,Cmd#irc_cmd.args)).
user_to_list_test() ->
?assertMatch("nem!nem@localhost", to_list(#user{nick="nem",name="nem",host="localhost"})).
pingpong_test() ->
?assertMatch(#irc_cmd{name=ping,args=[{servers, {"localhost", []}}]},
parse_line("PING localhost\r\n")),
?assertMatch(#irc_cmd{name=ping,args=[{servers, {"localhost", "foobar"}}]},
parse_line("PING localhost foobar\r\n")),
?assertMatch(#irc_cmd{name=pong,args=[{servers, {"localhost", []}}]},
parse_line("PONG localhost\r\n")),
?assertMatch(#irc_cmd{name=pong,args=[{servers, {"localhost", "foobar"}}]},
parse_line("PONG localhost foobar\r\n")).
pingpong_gen_test() ->
?assertMatch("PING localhost\r\n",
to_list(parse_line("PING localhost\r\n"))),
?assertMatch("PING localhost foobar\r\n",
to_list(parse_line("PING localhost foobar\r\n"))),
?assertMatch("PONG localhost\r\n",
to_list(parse_line("PONG localhost\r\n"))),
?assertMatch("PONG localhost foobar\r\n",
to_list(parse_line("PONG localhost foobar\r\n"))).
quit_test() ->
?assertMatch(#irc_cmd{name=quit},
parse_line("QUIT\r\n")),
?assertMatch(#irc_cmd{name=quit, args=[{message, "Foo"}]},
parse_line("QUIT :Foo\r\n")),
?assertMatch("QUIT\r\n",
to_list(parse_line("QUIT\r\n"))),
?assertMatch("QUIT :Foo\r\n",
to_list(parse_line("QUIT :Foo\r\n"))),
?assertMatch("ERROR :Foo\r\n",
to_list((parse_line("QUIT :Foo\r\n"))#irc_cmd{name=error})).
nomotd_to_list_test() ->
?assertMatch(":localhost 422 nem :NOMOTD\r\n",
to_list(#irc_cmd{source=#irc_server{host="localhost"},
target=#user{nick="nem"},
name=nomotd,
args=[]})),
?assertMatch(":localhost 422 nem :No MOTD\r\n",
to_list(#irc_cmd{source=#irc_server{host="localhost"},
target=#user{nick="nem"},
name=nomotd,
args=[{message, "No MOTD"}]})).
chantypes_test() ->
?assert(lists:all(fun (T) ->
T =:= list_to_chantype(chantype_to_list(T))
end,
[secret, public, private])).
? assertMatch(Num when > 0 , string : str(to_list(#irc_cmd{name = notregistered , } ) ) ) .
|
a8b4cdac9bdb39661d3d07c3d7d8387cfe11541eff006845eb5ae7485324c0a2 | sras/servant-examples | CustomPostFormat.hs | # LANGUAGE DataKinds #
# LANGUAGE FlexibleInstances #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeOperators #
module CustomPostFormat where
import Servant ( PlainText
, MimeUnrender(..)
, Accept(..)
, Post
, ReqBody
, Proxy(..)
, type (:>) -- Syntax for importing type operator
, type (:<|>)
, (:<|>)(..)
)
import Servant.Server (Handler, Server, Application, serve)
import Network.Wai.Handler.Warp (run)
import Data.Text as T (unpack)
import Data.Text.Lazy.Encoding as TE (decodeUtf8')
import Data.Text.Lazy as TL (toStrict)
-- In this example, we see how we can accept data
-- in any custom format. Earlier example, we were accepting
-- data in JSON format, which Servant has built in support for.
data ANewFormat -- This is our new format that we will be using along with JSON. We won't be needing a constructor since we won't be dealing with values of this type.
The sole purpose of this type is to enable the type system to select the proper decoding / encoding and content type generation
-- functions inside proper typeclass instances, which is why we don't need a constructor.
In the code below , you can see the type of the two end points .
Look at the ` ReqBody ' [ ANewFormat ] String ` part , in the first one .
-- This is what enables our endpoint to recieve a value of type
` String ` encoded as ANewFormat , in the body of the request .
The MimeUnrender instance also defines how some bytestring
encoded as ANewFormat can be decoded into a String .
type ServantType = "name-in-new-format" :> ReqBody '[ANewFormat] String :> Post '[PlainText] String
:<|> "name" :> ReqBody '[PlainText] String :> Post '[PlainText] String
instance Accept ANewFormat where
This instance means that servant will use the decoding specific to ANewFormat as soon as it sees this content type ( " text / a - new - format " ) in the incoming request .
This instance implements the decoding of a bytestring that encodes some content in ANewFormat , into a target type ( Which is String here )
mimeUnrender _ bs = case TE.decodeUtf8' bs of
We just prefix the decoded text to differentiate it to show this was decoded using ANewFormat decoding logic .
Left _ -> Left "Decoding error"
handlerName :: String -> Handler String
handlerName nameIn = return nameIn -- Just output back the input string value
server :: Server ServantType
server = handlerName :<|> handlerName -- We can use same handler for both endpoints, because they only differ in input encoding.
app :: Application
app = serve (Proxy :: Proxy ServantType) server
mainFn :: IO ()
mainFn = run 4000 app
-- Output - See how the output differs when only the content type is changes, triggerring different decoding mechanisms.
--
$ curl -v -H " Content - Type : text / a - new - format " -d " \"John\ " " :4000 / name - in - new - format
* Trying 127.0.0.1 ...
* Connected to 127.0.0.1 ( 127.0.0.1 ) port 4000 ( # 0 )
> POST /name - in - new - format HTTP/1.1
> Host :
-- > User-Agent: curl/7.47.0
-- > Accept: */*
-- > Content-Type:text/a-new-format
> Content - Length : 6
-- >
* upload completely sent off : 6 out of 6 bytes
< HTTP/1.1 200 OK
-- < Transfer-Encoding: chunked
< Date : Sun , 22 Jul 2018 07:37:15 GMT
-- < Server: Warp/3.2.23
< Content - Type : text / plain;charset = utf-8
-- <
* Connection # 0 to host 127.0.0.1 left intact
Decoded from ANewFormat - " "
--
$ curl -v -H " Content - Type : text / plain;charset = utf-8 " -d " " :4000 / name
* Trying 127.0.0.1 ...
* Connected to 127.0.0.1 ( 127.0.0.1 ) port 4000 ( # 0 )
-- > POST /name HTTP/1.1
> Host :
-- > User-Agent: curl/7.47.0
-- > Accept: */*
> Content - Type : text / plain;charset = utf-8
> Content - Length : 4
-- >
* upload completely sent off : 4 out of 4 bytes
< HTTP/1.1 200 OK
-- < Transfer-Encoding: chunked
< Date : Sun , 22 Jul 2018 07:40:17 GMT
-- < Server: Warp/3.2.23
< Content - Type : text / plain;charset = utf-8
-- <
* Connection # 0 to host 127.0.0.1 left intact
-- Johns
| null | https://raw.githubusercontent.com/sras/servant-examples/923b54a13e14a4c2a37a3633dc7e2fa8fe49adc6/src/CustomPostFormat.hs | haskell | # LANGUAGE OverloadedStrings #
Syntax for importing type operator
In this example, we see how we can accept data
in any custom format. Earlier example, we were accepting
data in JSON format, which Servant has built in support for.
This is our new format that we will be using along with JSON. We won't be needing a constructor since we won't be dealing with values of this type.
functions inside proper typeclass instances, which is why we don't need a constructor.
This is what enables our endpoint to recieve a value of type
Just output back the input string value
We can use same handler for both endpoints, because they only differ in input encoding.
Output - See how the output differs when only the content type is changes, triggerring different decoding mechanisms.
> User-Agent: curl/7.47.0
> Accept: */*
> Content-Type:text/a-new-format
>
< Transfer-Encoding: chunked
< Server: Warp/3.2.23
<
> POST /name HTTP/1.1
> User-Agent: curl/7.47.0
> Accept: */*
>
< Transfer-Encoding: chunked
< Server: Warp/3.2.23
<
Johns | # LANGUAGE DataKinds #
# LANGUAGE FlexibleInstances #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeOperators #
module CustomPostFormat where
import Servant ( PlainText
, MimeUnrender(..)
, Accept(..)
, Post
, ReqBody
, Proxy(..)
, type (:<|>)
, (:<|>)(..)
)
import Servant.Server (Handler, Server, Application, serve)
import Network.Wai.Handler.Warp (run)
import Data.Text as T (unpack)
import Data.Text.Lazy.Encoding as TE (decodeUtf8')
import Data.Text.Lazy as TL (toStrict)
The sole purpose of this type is to enable the type system to select the proper decoding / encoding and content type generation
In the code below , you can see the type of the two end points .
Look at the ` ReqBody ' [ ANewFormat ] String ` part , in the first one .
` String ` encoded as ANewFormat , in the body of the request .
The MimeUnrender instance also defines how some bytestring
encoded as ANewFormat can be decoded into a String .
type ServantType = "name-in-new-format" :> ReqBody '[ANewFormat] String :> Post '[PlainText] String
:<|> "name" :> ReqBody '[PlainText] String :> Post '[PlainText] String
instance Accept ANewFormat where
This instance means that servant will use the decoding specific to ANewFormat as soon as it sees this content type ( " text / a - new - format " ) in the incoming request .
This instance implements the decoding of a bytestring that encodes some content in ANewFormat , into a target type ( Which is String here )
mimeUnrender _ bs = case TE.decodeUtf8' bs of
We just prefix the decoded text to differentiate it to show this was decoded using ANewFormat decoding logic .
Left _ -> Left "Decoding error"
handlerName :: String -> Handler String
server :: Server ServantType
app :: Application
app = serve (Proxy :: Proxy ServantType) server
mainFn :: IO ()
mainFn = run 4000 app
$ curl -v -H " Content - Type : text / a - new - format " -d " \"John\ " " :4000 / name - in - new - format
* Trying 127.0.0.1 ...
* Connected to 127.0.0.1 ( 127.0.0.1 ) port 4000 ( # 0 )
> POST /name - in - new - format HTTP/1.1
> Host :
> Content - Length : 6
* upload completely sent off : 6 out of 6 bytes
< HTTP/1.1 200 OK
< Date : Sun , 22 Jul 2018 07:37:15 GMT
< Content - Type : text / plain;charset = utf-8
* Connection # 0 to host 127.0.0.1 left intact
Decoded from ANewFormat - " "
$ curl -v -H " Content - Type : text / plain;charset = utf-8 " -d " " :4000 / name
* Trying 127.0.0.1 ...
* Connected to 127.0.0.1 ( 127.0.0.1 ) port 4000 ( # 0 )
> Host :
> Content - Type : text / plain;charset = utf-8
> Content - Length : 4
* upload completely sent off : 4 out of 4 bytes
< HTTP/1.1 200 OK
< Date : Sun , 22 Jul 2018 07:40:17 GMT
< Content - Type : text / plain;charset = utf-8
* Connection # 0 to host 127.0.0.1 left intact
|
4edf2d26ee952f9e97f2391485387608a8aa59af7f19a5c4938a5f95a5880b1f | DougHamil/threeagent | entity.cljs | (ns threeagent.entity
(:require ["three" :as three]))
(defprotocol IEntityType
(create [this context entity-config]
"Returns a new instance of this entity type, based on the provided `entity-config` and `context`.
The returned instance should be a ThreeJS `Object3D` or one of its sub-classes.")
(destroy! [this context ^three/Object3D object entity-config]
"Destroys an existing instance of this entity type."))
(defprotocol IUpdateableEntityType
(update! [this context ^three/Object3D object new-entity-config]
"Updates an existing instance of this entity entity type in-place, based on the provided
`new-entity-config` and `context`."))
| null | https://raw.githubusercontent.com/DougHamil/threeagent/8f8b158bcbbf484b41d6f4a87436ce6f81ca7c04/src/main/threeagent/entity.cljs | clojure | (ns threeagent.entity
(:require ["three" :as three]))
(defprotocol IEntityType
(create [this context entity-config]
"Returns a new instance of this entity type, based on the provided `entity-config` and `context`.
The returned instance should be a ThreeJS `Object3D` or one of its sub-classes.")
(destroy! [this context ^three/Object3D object entity-config]
"Destroys an existing instance of this entity type."))
(defprotocol IUpdateableEntityType
(update! [this context ^three/Object3D object new-entity-config]
"Updates an existing instance of this entity entity type in-place, based on the provided
`new-entity-config` and `context`."))
|
|
d0043603c4e923355ed85ec0c432983f9fc494adfdcd29215b705e94b77f7a6a | Decentralized-Pictures/T4L3NT | test_typechecking.ml | * Testing
-------
Component : Protocol ( type - checking )
Invocation : cd src / proto_011_PtHangz2 / lib_protocol / test
dune exec ./main.exe -- test " ^typechecking$ "
Subject : Type - checking
-------
Component: Protocol (type-checking)
Invocation: cd src/proto_011_PtHangz2/lib_protocol/test
dune exec ./main.exe -- test "^typechecking$"
Subject: Type-checking
*)
open Protocol
open Alpha_context
open Micheline
open Error_monad_operators
let wrap_error_lwt x = x >>= fun x -> Lwt.return @@ Environment.wrap_tzresult x
(* Test for Script_ir_translator.unparse_script on a script declaring views. *)
let test_unparse_view () =
let dummy_contract =
"{parameter unit; storage unit; code { CAR; NIL operation; PAIR }; view \
\"v0\" unit unit { DROP; UNIT }; view \"v1\" nat nat {CAR}}"
in
let contract_expr = Expr.from_string dummy_contract in
let storage_expr = Expr.from_string "Unit" in
let bef = Script.lazy_expr contract_expr |> Data_encoding.force_bytes in
let script =
Script.{code = lazy_expr contract_expr; storage = lazy_expr storage_expr}
in
Context.init 3 >>=? fun (b, _cs) ->
Incremental.begin_construction b >>=? fun v ->
let ctx = Incremental.alpha_ctxt v in
Script_ir_translator.parse_script
ctx
~legacy:true
~allow_forged_in_storage:false
script
>>=?? fun (Ex_script script, ctx) ->
Script_ir_translator.unparse_script ctx Readable script
>>=?? fun (unparse_script, _ctx) ->
let aft = Data_encoding.force_bytes unparse_script.code in
Alcotest.(check bytes) "didn't match" bef aft |> return
let test_context () =
Context.init 3 >>=? fun (b, _cs) ->
Incremental.begin_construction b >>=? fun v ->
return (Incremental.alpha_ctxt v)
let test_context_with_nat_nat_big_map () =
Context.init 3 >>=? fun (b, contracts) ->
let source = WithExceptions.Option.get ~loc:__LOC__ @@ List.hd contracts in
Op.origination (B b) source ~script:Op.dummy_script
>>=? fun (operation, originated) ->
Block.bake ~operation b >>=? fun b ->
Incremental.begin_construction b >>=? fun v ->
let ctxt = Incremental.alpha_ctxt v in
wrap_error_lwt @@ Big_map.fresh ~temporary:false ctxt >>=? fun (ctxt, id) ->
let nat_ty = Script_typed_ir.nat_t ~annot:None in
wrap_error_lwt @@ Lwt.return @@ Script_ir_translator.unparse_ty ctxt nat_ty
>>=? fun (nat_ty_node, ctxt) ->
let nat_ty_expr = Micheline.strip_locations nat_ty_node in
let alloc = Big_map.{key_type = nat_ty_expr; value_type = nat_ty_expr} in
let init = Lazy_storage.Alloc alloc in
let diffs =
[
Lazy_storage.make
Lazy_storage.Kind.Big_map
id
(Update {init; updates = []});
]
in
wrap_error_lwt
@@ Contract.update_script_storage ctxt originated nat_ty_expr (Some diffs)
>>=? fun ctxt -> return (ctxt, id)
let read_file filename =
let ch = open_in filename in
let s = really_input_string ch (in_channel_length ch) in
close_in ch ;
s
(** Check that the custom stack overflow exception is triggered when
it should be. *)
let test_typecheck_stack_overflow () =
test_context () >>=? fun ctxt ->
let storage = "Unit" in
let parameter = "Unit" in
let script = read_file "./contracts/big_interpreter_stack.tz" in
Contract_helpers.run_script ctxt script ~storage ~parameter () >>= function
| Ok _ -> Alcotest.fail "expected an error"
| Error lst
when List.mem
~equal:( = )
(Environment.Ecoproto_error
Script_tc_errors.Typechecking_too_many_recursive_calls)
lst ->
return ()
| Error trace ->
Alcotest.failf "Unexpected error: %a" Error_monad.pp_print_trace trace
(* NOTE: this test fails with an out-of-memory exception. *)
let _test_unparse_stack_overflow () =
test_context () >>=? fun ctxt ->
(* Meme *)
let enorme_et_seq n =
let rec aux n acc = aux (n - 1) @@ Micheline.Seq (0, [acc]) in
aux n (Micheline.Int (0, Z.zero))
in
Script_ir_translator.(unparse_code ctxt Readable (enorme_et_seq 10_001))
>>= function
| Ok _ -> Alcotest.fail "expected an error"
| Error trace ->
let trace_string =
Format.asprintf "%a" Environment.Error_monad.pp_trace trace
in
let expect_id = "michelson_v1.unparsing_stack_overflow" in
let expect_descrfiption =
"Too many recursive calls were needed for unparsing"
in
if
Astring.String.is_infix ~affix:expect_id trace_string
&& Astring.String.is_infix ~affix:expect_descrfiption trace_string
then return_unit
else
Alcotest.failf
"Unexpected error (%s) at %s"
trace_string
__LOC__
return_unit
let location = function
| Prim (loc, _, _, _)
| Int (loc, _)
| String (loc, _)
| Bytes (loc, _)
| Seq (loc, _) ->
loc
let test_parse_ty ctxt node expected =
let legacy = false in
let allow_lazy_storage = true in
let allow_operation = true in
let allow_contract = true in
let allow_ticket = true in
Environment.wrap_tzresult
( Script_ir_translator.parse_ty
ctxt
~legacy
~allow_lazy_storage
~allow_operation
~allow_contract
~allow_ticket
node
>>? fun (Script_ir_translator.Ex_ty actual, ctxt) ->
Script_ir_translator.ty_eq ctxt (location node) actual expected
>|? fun (_, ctxt) -> ctxt )
let test_parse_comb_type () =
let open Script in
let open Script_typed_ir in
let nat_prim = Prim (-1, T_nat, [], []) in
let nat_prim_a = Prim (-1, T_nat, [], ["%a"]) in
let nat_prim_b = Prim (-1, T_nat, [], ["%b"]) in
let nat_prim_c = Prim (-1, T_nat, [], ["%c"]) in
let nat_ty = nat_t ~annot:None in
let pair_prim l = Prim (-1, T_pair, l, []) in
let pair_ty ty1 ty2 =
pair_t (-1) (ty1, None, None) (ty2, None, None) ~annot:None
in
let pair_prim2 a b = pair_prim [a; b] in
let pair_nat_nat_prim = pair_prim2 nat_prim nat_prim in
pair_ty nat_ty nat_ty >>??= fun pair_nat_nat_ty ->
test_context () >>=? fun ctxt ->
pair
test_parse_ty ctxt pair_nat_nat_prim pair_nat_nat_ty >>?= fun ctxt ->
pair ( pair )
pair_ty pair_nat_nat_ty nat_ty >>??= fun pair_pair_nat_nat_nat_ty ->
test_parse_ty
ctxt
(pair_prim2 pair_nat_nat_prim nat_prim)
pair_pair_nat_nat_nat_ty
>>?= fun ctxt ->
pair ( pair )
pair_ty nat_ty pair_nat_nat_ty >>??= fun pair_nat_pair_nat_nat_ty ->
test_parse_ty
ctxt
(pair_prim2 nat_prim pair_nat_nat_prim)
pair_nat_pair_nat_nat_ty
>>?= fun ctxt ->
pair
pair_ty nat_ty pair_nat_nat_ty >>??= fun pair_nat_nat_nat_ty ->
test_parse_ty
ctxt
(pair_prim [nat_prim; nat_prim; nat_prim])
pair_nat_nat_nat_ty
>>?= fun ctxt ->
pair ( nat % a ) nat
pair_t
(-1)
(nat_ty, Some (Field_annot "a"), None)
(nat_ty, None, None)
~annot:None
>>??= fun pair_nat_a_nat_ty ->
test_parse_ty ctxt (pair_prim2 nat_prim_a nat_prim) pair_nat_a_nat_ty
>>?= fun ctxt ->
(* pair nat (nat %b) *)
pair_t
(-1)
(nat_ty, None, None)
(nat_ty, Some (Field_annot "b"), None)
~annot:None
>>??= fun pair_nat_nat_b_ty ->
test_parse_ty ctxt (pair_prim2 nat_prim nat_prim_b) pair_nat_nat_b_ty
>>?= fun ctxt ->
pair ( nat % a ) ( nat % b )
pair_t
(-1)
(nat_ty, Some (Field_annot "a"), None)
(nat_ty, Some (Field_annot "b"), None)
~annot:None
>>??= fun pair_nat_a_nat_b_ty ->
test_parse_ty ctxt (pair_prim2 nat_prim_a nat_prim_b) pair_nat_a_nat_b_ty
>>?= fun ctxt ->
pair ( nat % a ) ( nat % b ) ( nat % c )
pair_t
(-1)
(nat_ty, Some (Field_annot "b"), None)
(nat_ty, Some (Field_annot "c"), None)
~annot:None
>>??= fun pair_nat_b_nat_c_ty ->
pair_t
(-1)
(nat_ty, Some (Field_annot "a"), None)
(pair_nat_b_nat_c_ty, None, None)
~annot:None
>>??= fun pair_nat_a_nat_b_nat_c_ty ->
test_parse_ty
ctxt
(pair_prim [nat_prim_a; nat_prim_b; nat_prim_c])
pair_nat_a_nat_b_nat_c_ty
>>?= fun ctxt ->
pair ( nat % a ) ( pair % b )
pair_t (-1) (nat_ty, None, None) (nat_ty, None, None) ~annot:None
>>??= fun pair_b_nat_nat_ty ->
pair_t
(-1)
(nat_ty, Some (Field_annot "a"), None)
(pair_b_nat_nat_ty, Some (Field_annot "b"), None)
~annot:None
>>??= fun pair_nat_a_pair_b_nat_nat_ty ->
test_parse_ty
ctxt
(pair_prim2 nat_prim_a (Prim (-1, T_pair, [nat_prim; nat_prim], ["%b"])))
pair_nat_a_pair_b_nat_nat_ty
>>?= fun _ -> return_unit
let test_unparse_ty loc ctxt expected ty =
Environment.wrap_tzresult
( Script_ir_translator.unparse_ty ctxt ty >>? fun (actual, ctxt) ->
if actual = expected then ok ctxt
else Alcotest.failf "Unexpected error: %s" loc )
let test_unparse_comb_type () =
let open Script in
let open Script_typed_ir in
let nat_prim = Prim (-1, T_nat, [], []) in
let nat_prim_a = Prim (-1, T_nat, [], ["%a"]) in
let nat_prim_b = Prim (-1, T_nat, [], ["%b"]) in
let nat_prim_c = Prim (-1, T_nat, [], ["%c"]) in
let nat_ty = nat_t ~annot:None in
let pair_prim l = Prim (-1, T_pair, l, []) in
let pair_ty ty1 ty2 =
pair_t (-1) (ty1, None, None) (ty2, None, None) ~annot:None
in
let pair_prim2 a b = pair_prim [a; b] in
let pair_nat_nat_prim = pair_prim2 nat_prim nat_prim in
pair_ty nat_ty nat_ty >>??= fun pair_nat_nat_ty ->
test_context () >>=? fun ctxt ->
pair
test_unparse_ty __LOC__ ctxt pair_nat_nat_prim pair_nat_nat_ty
>>?= fun ctxt ->
pair ( pair )
pair_ty pair_nat_nat_ty nat_ty >>??= fun pair_pair_nat_nat_nat_ty ->
test_unparse_ty
__LOC__
ctxt
(pair_prim2 pair_nat_nat_prim nat_prim)
pair_pair_nat_nat_nat_ty
>>?= fun ctxt ->
pair
pair_ty nat_ty pair_nat_nat_ty >>??= fun pair_nat_nat_nat_ty ->
test_unparse_ty
__LOC__
ctxt
(pair_prim [nat_prim; nat_prim; nat_prim])
pair_nat_nat_nat_ty
>>?= fun ctxt ->
pair ( nat % a ) nat
pair_t
(-1)
(nat_ty, Some (Field_annot "a"), None)
(nat_ty, None, None)
~annot:None
>>??= fun pair_nat_a_nat_ty ->
test_unparse_ty
__LOC__
ctxt
(pair_prim2 nat_prim_a nat_prim)
pair_nat_a_nat_ty
>>?= fun ctxt ->
(* pair nat (nat %b) *)
pair_t
(-1)
(nat_ty, None, None)
(nat_ty, Some (Field_annot "b"), None)
~annot:None
>>??= fun pair_nat_nat_b_ty ->
test_unparse_ty
__LOC__
ctxt
(pair_prim2 nat_prim nat_prim_b)
pair_nat_nat_b_ty
>>?= fun ctxt ->
pair ( nat % a ) ( nat % b )
pair_t
(-1)
(nat_ty, Some (Field_annot "a"), None)
(nat_ty, Some (Field_annot "b"), None)
~annot:None
>>??= fun pair_nat_a_nat_b_ty ->
test_unparse_ty
__LOC__
ctxt
(pair_prim2 nat_prim_a nat_prim_b)
pair_nat_a_nat_b_ty
>>?= fun ctxt ->
pair ( nat % a ) ( nat % b ) ( nat % c )
pair_t
(-1)
(nat_ty, Some (Field_annot "b"), None)
(nat_ty, Some (Field_annot "c"), None)
~annot:None
>>??= fun pair_nat_b_nat_c_ty ->
pair_t
(-1)
(nat_ty, Some (Field_annot "a"), None)
(pair_nat_b_nat_c_ty, None, None)
~annot:None
>>??= fun pair_nat_a_nat_b_nat_c_ty ->
test_unparse_ty
__LOC__
ctxt
(pair_prim [nat_prim_a; nat_prim_b; nat_prim_c])
pair_nat_a_nat_b_nat_c_ty
>>?= fun ctxt ->
pair ( nat % a ) ( pair % b )
pair_t (-1) (nat_ty, None, None) (nat_ty, None, None) ~annot:None
>>??= fun pair_nat_nat_ty ->
pair_t
(-1)
(nat_ty, Some (Field_annot "a"), None)
(pair_nat_nat_ty, Some (Field_annot "b"), None)
~annot:None
>>??= fun pair_nat_a_pair_b_nat_nat_ty ->
test_unparse_ty
__LOC__
ctxt
(pair_prim2 nat_prim_a (Prim (-1, T_pair, [nat_prim; nat_prim], ["%b"])))
pair_nat_a_pair_b_nat_nat_ty
>>?= fun ctxt ->
pair ( pair @b )
pair_t
(-1)
(nat_ty, None, None)
(pair_nat_nat_ty, None, Some (Var_annot "b"))
~annot:None
>>??= fun pair_nat_pair_b_nat_nat_ty ->
test_unparse_ty
__LOC__
ctxt
(pair_prim2 nat_prim (Prim (-1, T_pair, [nat_prim; nat_prim], ["@b"])))
pair_nat_pair_b_nat_nat_ty
>>?= fun ctxt ->
pair ( pair : b )
pair_t
(-1)
(nat_ty, None, None)
(nat_ty, None, None)
~annot:(Some (Type_annot "b"))
>>??= fun pair_b_nat_nat_ty ->
pair_t (-1) (nat_ty, None, None) (pair_b_nat_nat_ty, None, None) ~annot:None
>>??= fun pair_nat_pair_b_nat_nat_ty ->
test_unparse_ty
__LOC__
ctxt
(pair_prim2 nat_prim (Prim (-1, T_pair, [nat_prim; nat_prim], [":b"])))
pair_nat_pair_b_nat_nat_ty
>>?= fun _ -> return_unit
let test_unparse_comparable_ty loc ctxt expected ty =
(* unparse_comparable_ty is not exported, the simplest way to call it is to
call parse_ty on a set type *)
let open Script_typed_ir in
Environment.wrap_tzresult
( set_t (-1) ty ~annot:None >>? fun set_ty_ty ->
Script_ir_translator.unparse_ty ctxt set_ty_ty >>? fun (actual, ctxt) ->
if actual = Prim (-1, T_set, [expected], []) then ok ctxt
else Alcotest.failf "Unexpected error: %s" loc )
let test_unparse_comb_comparable_type () =
let open Script in
let open Script_typed_ir in
let nat_prim = Prim (-1, T_nat, [], []) in
let nat_prim_a = Prim (-1, T_nat, [], ["%a"]) in
let nat_prim_b = Prim (-1, T_nat, [], ["%b"]) in
let nat_prim_c = Prim (-1, T_nat, [], ["%c"]) in
let nat_ty = nat_key ~annot:None in
let pair_prim l = Prim (-1, T_pair, l, []) in
let pair_ty ty1 ty2 = pair_key (-1) (ty1, None) (ty2, None) ~annot:None in
let pair_prim2 a b = pair_prim [a; b] in
let pair_nat_nat_prim = pair_prim2 nat_prim nat_prim in
pair_ty nat_ty nat_ty >>??= fun pair_nat_nat_ty ->
test_context () >>=? fun ctxt ->
pair
test_unparse_comparable_ty __LOC__ ctxt pair_nat_nat_prim pair_nat_nat_ty
>>?= fun ctxt ->
pair ( pair )
pair_ty pair_nat_nat_ty nat_ty >>??= fun pair_pair_nat_nat_nat_ty ->
test_unparse_comparable_ty
__LOC__
ctxt
(pair_prim2 pair_nat_nat_prim nat_prim)
pair_pair_nat_nat_nat_ty
>>?= fun ctxt ->
pair
pair_ty nat_ty pair_nat_nat_ty >>??= fun pair_nat_nat_nat_ty ->
test_unparse_comparable_ty
__LOC__
ctxt
(pair_prim [nat_prim; nat_prim; nat_prim])
pair_nat_nat_nat_ty
>>?= fun ctxt ->
pair ( nat % a ) nat
pair_key (-1) (nat_ty, Some (Field_annot "a")) (nat_ty, None) ~annot:None
>>??= fun pair_nat_a_nat_ty ->
test_unparse_comparable_ty
__LOC__
ctxt
(pair_prim2 nat_prim_a nat_prim)
pair_nat_a_nat_ty
>>?= fun ctxt ->
(* pair nat (nat %b) *)
pair_key (-1) (nat_ty, None) (nat_ty, Some (Field_annot "b")) ~annot:None
>>??= fun pair_nat_nat_b_ty ->
test_unparse_comparable_ty
__LOC__
ctxt
(pair_prim2 nat_prim nat_prim_b)
pair_nat_nat_b_ty
>>?= fun ctxt ->
pair ( nat % a ) ( nat % b )
pair_key
(-1)
(nat_ty, Some (Field_annot "a"))
(nat_ty, Some (Field_annot "b"))
~annot:None
>>??= fun pair_nat_a_nat_b_ty ->
test_unparse_comparable_ty
__LOC__
ctxt
(pair_prim2 nat_prim_a nat_prim_b)
pair_nat_a_nat_b_ty
>>?= fun ctxt ->
pair ( nat % a ) ( nat % b ) ( nat % c )
pair_key
(-1)
(nat_ty, Some (Field_annot "b"))
(nat_ty, Some (Field_annot "c"))
~annot:None
>>??= fun pair_nat_b_nat_c_ty ->
pair_key
(-1)
(nat_ty, Some (Field_annot "a"))
(pair_nat_b_nat_c_ty, None)
~annot:None
>>??= fun pair_nat_a_nat_b_nat_c_ty ->
test_unparse_comparable_ty
__LOC__
ctxt
(pair_prim [nat_prim_a; nat_prim_b; nat_prim_c])
pair_nat_a_nat_b_nat_c_ty
>>?= fun ctxt ->
pair ( nat % a ) ( pair % b )
pair_key
(-1)
(nat_ty, Some (Field_annot "a"))
(pair_nat_nat_ty, Some (Field_annot "b"))
~annot:None
>>??= fun pair_nat_a_pair_b_nat_nat_ty ->
test_unparse_comparable_ty
__LOC__
ctxt
(pair_prim2 nat_prim_a (Prim (-1, T_pair, [nat_prim; nat_prim], ["%b"])))
pair_nat_a_pair_b_nat_nat_ty
>>?= fun ctxt ->
pair ( pair : b )
pair_key (-1) (nat_ty, None) (nat_ty, None) ~annot:(Some (Type_annot "b"))
>>??= fun pair_b_nat_nat_ty ->
pair_key (-1) (nat_ty, None) (pair_b_nat_nat_ty, None) ~annot:None
>>??= fun pair_nat_pair_b_nat_nat_ty ->
test_unparse_comparable_ty
__LOC__
ctxt
(pair_prim2 nat_prim (Prim (-1, T_pair, [nat_prim; nat_prim], [":b"])))
pair_nat_pair_b_nat_nat_ty
>>?= fun _ -> return_unit
let test_parse_data ?(equal = Stdlib.( = )) loc ctxt ty node expected =
let legacy = false in
let allow_forged = true in
wrap_error_lwt
( Script_ir_translator.parse_data ctxt ~legacy ~allow_forged ty node
>>=? fun (actual, ctxt) ->
if equal actual expected then return ctxt
else Alcotest.failf "Unexpected error: %s" loc )
let test_parse_data_fails loc ctxt ty node =
let legacy = false in
let allow_forged = false in
wrap_error_lwt
(Script_ir_translator.parse_data ctxt ~legacy ~allow_forged ty node
>>= function
| Ok _ -> Alcotest.failf "Unexpected typechecking success: %s" loc
| Error trace ->
let trace_string =
Format.asprintf "%a" Environment.Error_monad.pp_trace trace
in
let expect_id = "michelson_v1.invalid_constant" in
let expect_descrfiption =
"A data expression was invalid for its expected type."
in
if
Astring.String.is_infix ~affix:expect_id trace_string
&& Astring.String.is_infix ~affix:expect_descrfiption trace_string
then return_unit
else
Alcotest.failf
"Unexpected error (%s) at %s"
trace_string
__LOC__
return_unit)
let test_parse_comb_data () =
let open Script in
let open Script_typed_ir in
let z = Script_int.zero_n in
let z_prim = Micheline.Int (-1, Z.zero) in
let nat_ty = nat_t ~annot:None in
let pair_prim l = Prim (-1, D_Pair, l, []) in
let pair_ty ty1 ty2 =
pair_t (-1) (ty1, None, None) (ty2, None, None) ~annot:None
in
pair_ty nat_ty nat_ty >>??= fun pair_nat_nat_ty ->
let pair_prim2 a b = pair_prim [a; b] in
let pair_z_z_prim = pair_prim2 z_prim z_prim in
list_t (-1) nat_ty ~annot:None >>??= fun list_nat_ty ->
big_map_t (-1) (nat_key ~annot:None) nat_ty ~annot:None
>>??= fun big_map_nat_nat_ty ->
test_context_with_nat_nat_big_map () >>=? fun (ctxt, big_map_id) ->
(* Pair 0 0 *)
test_parse_data __LOC__ ctxt pair_nat_nat_ty pair_z_z_prim (z, z)
>>=? fun ctxt ->
(* {0; 0} *)
test_parse_data
__LOC__
ctxt
pair_nat_nat_ty
(Micheline.Seq (-1, [z_prim; z_prim]))
(z, z)
>>=? fun ctxt ->
Pair ( Pair 0 0 ) 0
pair_ty pair_nat_nat_ty nat_ty >>??= fun pair_pair_nat_nat_nat_ty ->
test_parse_data
__LOC__
ctxt
pair_pair_nat_nat_nat_ty
(pair_prim2 pair_z_z_prim z_prim)
((z, z), z)
>>=? fun ctxt ->
(* Pair 0 (Pair 0 0) *)
pair_ty nat_ty pair_nat_nat_ty >>??= fun pair_nat_pair_nat_nat_ty ->
test_parse_data
__LOC__
ctxt
pair_nat_pair_nat_nat_ty
(pair_prim2 z_prim pair_z_z_prim)
(z, (z, z))
>>=? fun ctxt ->
(* Pair 0 0 0 *)
test_parse_data
__LOC__
ctxt
pair_nat_pair_nat_nat_ty
(pair_prim [z_prim; z_prim; z_prim])
(z, (z, z))
>>=? fun ctxt ->
{ 0 ; 0 ; 0 }
test_parse_data
__LOC__
ctxt
pair_nat_pair_nat_nat_ty
(Micheline.Seq (-1, [z_prim; z_prim; z_prim]))
(z, (z, z))
>>=? fun ctxt ->
Should fail : { 0 } against ( list )
pair_ty nat_ty list_nat_ty >>??= fun pair_nat_list_nat_ty ->
test_parse_data_fails
__LOC__
ctxt
pair_nat_list_nat_ty
(Micheline.Seq (-1, [z_prim]))
>>=? fun () ->
Should fail : { 0 ; 0 ; 0 } against ( list )
test_parse_data_fails
__LOC__
ctxt
pair_nat_list_nat_ty
(Micheline.Seq (-1, [z_prim; z_prim; z_prim]))
>>=? fun () ->
check Pair 0 ( Pair 0 { } ) against ( big_map )
so that the following test fails for the good reason and not because
the big map does n't exist
so that the following test fails for the good reason and not because
the big map doesn't exist
*)
let id_z = Big_map.Id.unparse_to_z big_map_id in
let id_prim = Int (-1, id_z) in
let expected_big_map =
let open Script_typed_ir in
let diff = {map = Big_map_overlay.empty; size = 0} in
let nat_key_ty = nat_key ~annot:None in
{id = Some big_map_id; diff; key_type = nat_key_ty; value_type = nat_ty}
in
let equal (nat1, big_map1) (nat2, big_map2) =
(* Custom equal needed because big maps contain boxed maps containing functional values *)
nat1 = nat2 && big_map1.id = big_map2.id
&& big_map1.key_type = big_map2.key_type
&& big_map1.value_type = big_map2.value_type
&& big_map1.diff.size = big_map2.diff.size
&& Big_map_overlay.bindings big_map1.diff.map
= Big_map_overlay.bindings big_map2.diff.map
in
pair_ty nat_ty big_map_nat_nat_ty >>??= fun pair_nat_big_map_nat_nat_ty ->
test_parse_data
~equal
__LOC__
ctxt
pair_nat_big_map_nat_nat_ty
(pair_prim2 z_prim (pair_prim2 id_prim (Seq (-1, []))))
(Script_int.zero_n, expected_big_map)
>>=? fun ctxt ->
Should fail : Pair 0 0 { } against ( big_map )
test_parse_data_fails
__LOC__
ctxt
pair_nat_big_map_nat_nat_ty
(pair_prim [z_prim; id_prim; Seq (-1, [])])
let test_parse_address () =
let open Script_typed_ir in
test_context_with_nat_nat_big_map () >>=? fun (ctxt, _big_map_id) ->
(* KT1% (empty entrypoint) *)
wrap_error_lwt
(Lwt.return (Contract.of_b58check "KT1FAKEFAKEFAKEFAKEFAKEFAKEFAKGGSE2x"))
>>=? fun kt1fake ->
test_parse_data
__LOC__
ctxt
(address_t ~annot:None)
(String (-1, "KT1FAKEFAKEFAKEFAKEFAKEFAKEFAKGGSE2x%"))
(kt1fake, "default")
>>=? fun ctxt ->
(* tz1% (empty entrypoint) *)
wrap_error_lwt
(Lwt.return (Contract.of_b58check "tz1fakefakefakefakefakefakefakcphLA5"))
>>=? fun tz1fake ->
test_parse_data
__LOC__
ctxt
(address_t ~annot:None)
(String (-1, "tz1fakefakefakefakefakefakefakcphLA5%"))
(tz1fake, "default")
>|=? fun _ctxt -> ()
let test_unparse_data loc ctxt ty x ~expected_readable ~expected_optimized =
wrap_error_lwt
( Script_ir_translator.unparse_data ctxt Script_ir_translator.Readable ty x
>>=? fun (actual_readable, ctxt) ->
(if actual_readable = expected_readable then return ctxt
else Alcotest.failf "Error in readable unparsing: %s" loc)
>>=? fun ctxt ->
Script_ir_translator.unparse_data ctxt Script_ir_translator.Optimized ty x
>>=? fun (actual_optimized, ctxt) ->
if actual_optimized = expected_optimized then return ctxt
else Alcotest.failf "Error in optimized unparsing: %s" loc )
let test_unparse_comb_data () =
let open Script in
let open Script_typed_ir in
let z = Script_int.zero_n in
let z_prim = Micheline.Int (-1, Z.zero) in
let nat_ty = nat_t ~annot:None in
let pair_prim l = Prim (-1, D_Pair, l, []) in
let pair_ty ty1 ty2 =
pair_t (-1) (ty1, None, None) (ty2, None, None) ~annot:None
in
pair_ty nat_ty nat_ty >>??= fun pair_nat_nat_ty ->
let pair_prim2 a b = pair_prim [a; b] in
let pair_z_z_prim = pair_prim2 z_prim z_prim in
test_context () >>=? fun ctxt ->
(* Pair 0 0 *)
test_unparse_data
__LOC__
ctxt
pair_nat_nat_ty
(z, z)
~expected_readable:pair_z_z_prim
~expected_optimized:pair_z_z_prim
>>=? fun ctxt ->
Pair ( Pair 0 0 ) 0
pair_ty pair_nat_nat_ty nat_ty >>??= fun pair_pair_nat_nat_nat_ty ->
test_unparse_data
__LOC__
ctxt
pair_pair_nat_nat_nat_ty
((z, z), z)
~expected_readable:(pair_prim2 pair_z_z_prim z_prim)
~expected_optimized:(pair_prim2 pair_z_z_prim z_prim)
>>=? fun ctxt ->
(* Readable: Pair 0 0 0; Optimized: Pair 0 (Pair 0 0) *)
pair_ty nat_ty pair_nat_nat_ty >>??= fun pair_nat_pair_nat_nat_ty ->
test_unparse_data
__LOC__
ctxt
pair_nat_pair_nat_nat_ty
(z, (z, z))
~expected_readable:(pair_prim [z_prim; z_prim; z_prim])
~expected_optimized:(pair_prim2 z_prim pair_z_z_prim)
>>=? fun ctxt ->
Readable : Pair 0 0 0 0 ; Optimized : { 0 ; 0 ; 0 ; 0 }
pair_ty nat_ty pair_nat_pair_nat_nat_ty
>>??= fun pair_nat_pair_nat_pair_nat_nat_ty ->
test_unparse_data
__LOC__
ctxt
pair_nat_pair_nat_pair_nat_nat_ty
(z, (z, (z, z)))
~expected_readable:(pair_prim [z_prim; z_prim; z_prim; z_prim])
~expected_optimized:(Micheline.Seq (-1, [z_prim; z_prim; z_prim; z_prim]))
>>=? fun _ -> return_unit
(* Generate all the possible syntaxes for pairs *)
let gen_pairs left right =
[Prim (-1, Script.D_Pair, [left; right], []); Seq (-1, [left; right])]
(* Generate all the possible syntaxes for combs *)
let rec gen_combs leaf arity =
assert (arity >= 2) ;
if arity = 2 then gen_pairs leaf leaf
else
gen_combs leaf (arity - 1)
|> List.map (fun smaller ->
(match smaller with
| Prim (loc, Script.D_Pair, vs, []) ->
Prim (loc, Script.D_Pair, leaf :: vs, [])
| Seq (loc, vs) -> Seq (loc, leaf :: vs)
| _ -> assert false)
:: gen_pairs leaf smaller)
|> List.flatten
(* Checks the optimality of the Optimized Micheline representation for combs *)
let test_optimal_comb () =
let open Script_typed_ir in
let leaf_ty = nat_t ~annot:None in
let leaf_mich = Int (-1, Z.zero) in
let leaf_v = Script_int.zero_n in
let size_of_micheline mich =
let canonical = Micheline.strip_locations mich in
( canonical,
Bytes.length
@@ Data_encoding.Binary.to_bytes_exn Script.expr_encoding canonical )
in
let check_optimal_comb loc ctxt ty v arity =
wrap_error_lwt
( Script_ir_translator.unparse_data
ctxt
Script_ir_translator.Optimized
ty
v
>>=? fun (unparsed, ctxt) ->
let (unparsed_canonical, unparsed_size) = size_of_micheline unparsed in
List.iter_es (fun other_repr ->
let (other_repr_canonical, other_repr_size) =
size_of_micheline other_repr
in
if other_repr_size < unparsed_size then
Alcotest.failf
"At %s, for comb of arity %d, representation %a (size %d \
bytes) is shorter than representation %a (size %d bytes) \
returned by unparse_data in Optimized mode"
loc
arity
Michelson_v1_printer.print_expr
other_repr_canonical
other_repr_size
Michelson_v1_printer.print_expr
unparsed_canonical
unparsed_size
else return_unit)
@@ gen_combs leaf_mich arity
>>=? fun () -> return ctxt )
in
let pair_ty ty1 ty2 =
pair_t (-1) (ty1, None, None) (ty2, None, None) ~annot:None
in
test_context () >>=? fun ctxt ->
pair_ty leaf_ty leaf_ty >>??= fun comb2_ty ->
let comb2_v = (leaf_v, leaf_v) in
check_optimal_comb __LOC__ ctxt comb2_ty comb2_v 2 >>=? fun ctxt ->
pair_ty leaf_ty comb2_ty >>??= fun comb3_ty ->
let comb3_v = (leaf_v, comb2_v) in
check_optimal_comb __LOC__ ctxt comb3_ty comb3_v 3 >>=? fun ctxt ->
pair_ty leaf_ty comb3_ty >>??= fun comb4_ty ->
let comb4_v = (leaf_v, comb3_v) in
check_optimal_comb __LOC__ ctxt comb4_ty comb4_v 4 >>=? fun ctxt ->
pair_ty leaf_ty comb4_ty >>??= fun comb5_ty ->
let comb5_v = (leaf_v, comb4_v) in
check_optimal_comb __LOC__ ctxt comb5_ty comb5_v 5 >>=? fun _ctxt ->
return_unit
let tests =
[
Tztest.tztest "test unparse view" `Quick test_unparse_view;
Tztest.tztest
"test typecheck stack overflow error"
`Quick
test_typecheck_stack_overflow;
Tztest.tztest "test comb type parsing" `Quick test_parse_comb_type;
Tztest.tztest "test comb type unparsing" `Quick test_unparse_comb_type;
Tztest.tztest
"test comb comparable type unparsing"
`Quick
test_unparse_comb_comparable_type;
Tztest.tztest "test comb data parsing" `Quick test_parse_comb_data;
Tztest.tztest "test comb data unparsing" `Quick test_unparse_comb_data;
Tztest.tztest "test optimal comb data unparsing" `Quick test_optimal_comb;
Tztest.tztest "test parse address" `Quick test_parse_address;
]
| null | https://raw.githubusercontent.com/Decentralized-Pictures/T4L3NT/6d4d3edb2d73575384282ad5a633518cba3d29e3/src/proto_011_PtHangz2/lib_protocol/test/test_typechecking.ml | ocaml | Test for Script_ir_translator.unparse_script on a script declaring views.
* Check that the custom stack overflow exception is triggered when
it should be.
NOTE: this test fails with an out-of-memory exception.
Meme
pair nat (nat %b)
pair nat (nat %b)
unparse_comparable_ty is not exported, the simplest way to call it is to
call parse_ty on a set type
pair nat (nat %b)
Pair 0 0
{0; 0}
Pair 0 (Pair 0 0)
Pair 0 0 0
Custom equal needed because big maps contain boxed maps containing functional values
KT1% (empty entrypoint)
tz1% (empty entrypoint)
Pair 0 0
Readable: Pair 0 0 0; Optimized: Pair 0 (Pair 0 0)
Generate all the possible syntaxes for pairs
Generate all the possible syntaxes for combs
Checks the optimality of the Optimized Micheline representation for combs | * Testing
-------
Component : Protocol ( type - checking )
Invocation : cd src / proto_011_PtHangz2 / lib_protocol / test
dune exec ./main.exe -- test " ^typechecking$ "
Subject : Type - checking
-------
Component: Protocol (type-checking)
Invocation: cd src/proto_011_PtHangz2/lib_protocol/test
dune exec ./main.exe -- test "^typechecking$"
Subject: Type-checking
*)
open Protocol
open Alpha_context
open Micheline
open Error_monad_operators
let wrap_error_lwt x = x >>= fun x -> Lwt.return @@ Environment.wrap_tzresult x
let test_unparse_view () =
let dummy_contract =
"{parameter unit; storage unit; code { CAR; NIL operation; PAIR }; view \
\"v0\" unit unit { DROP; UNIT }; view \"v1\" nat nat {CAR}}"
in
let contract_expr = Expr.from_string dummy_contract in
let storage_expr = Expr.from_string "Unit" in
let bef = Script.lazy_expr contract_expr |> Data_encoding.force_bytes in
let script =
Script.{code = lazy_expr contract_expr; storage = lazy_expr storage_expr}
in
Context.init 3 >>=? fun (b, _cs) ->
Incremental.begin_construction b >>=? fun v ->
let ctx = Incremental.alpha_ctxt v in
Script_ir_translator.parse_script
ctx
~legacy:true
~allow_forged_in_storage:false
script
>>=?? fun (Ex_script script, ctx) ->
Script_ir_translator.unparse_script ctx Readable script
>>=?? fun (unparse_script, _ctx) ->
let aft = Data_encoding.force_bytes unparse_script.code in
Alcotest.(check bytes) "didn't match" bef aft |> return
let test_context () =
Context.init 3 >>=? fun (b, _cs) ->
Incremental.begin_construction b >>=? fun v ->
return (Incremental.alpha_ctxt v)
let test_context_with_nat_nat_big_map () =
Context.init 3 >>=? fun (b, contracts) ->
let source = WithExceptions.Option.get ~loc:__LOC__ @@ List.hd contracts in
Op.origination (B b) source ~script:Op.dummy_script
>>=? fun (operation, originated) ->
Block.bake ~operation b >>=? fun b ->
Incremental.begin_construction b >>=? fun v ->
let ctxt = Incremental.alpha_ctxt v in
wrap_error_lwt @@ Big_map.fresh ~temporary:false ctxt >>=? fun (ctxt, id) ->
let nat_ty = Script_typed_ir.nat_t ~annot:None in
wrap_error_lwt @@ Lwt.return @@ Script_ir_translator.unparse_ty ctxt nat_ty
>>=? fun (nat_ty_node, ctxt) ->
let nat_ty_expr = Micheline.strip_locations nat_ty_node in
let alloc = Big_map.{key_type = nat_ty_expr; value_type = nat_ty_expr} in
let init = Lazy_storage.Alloc alloc in
let diffs =
[
Lazy_storage.make
Lazy_storage.Kind.Big_map
id
(Update {init; updates = []});
]
in
wrap_error_lwt
@@ Contract.update_script_storage ctxt originated nat_ty_expr (Some diffs)
>>=? fun ctxt -> return (ctxt, id)
let read_file filename =
let ch = open_in filename in
let s = really_input_string ch (in_channel_length ch) in
close_in ch ;
s
let test_typecheck_stack_overflow () =
test_context () >>=? fun ctxt ->
let storage = "Unit" in
let parameter = "Unit" in
let script = read_file "./contracts/big_interpreter_stack.tz" in
Contract_helpers.run_script ctxt script ~storage ~parameter () >>= function
| Ok _ -> Alcotest.fail "expected an error"
| Error lst
when List.mem
~equal:( = )
(Environment.Ecoproto_error
Script_tc_errors.Typechecking_too_many_recursive_calls)
lst ->
return ()
| Error trace ->
Alcotest.failf "Unexpected error: %a" Error_monad.pp_print_trace trace
let _test_unparse_stack_overflow () =
test_context () >>=? fun ctxt ->
let enorme_et_seq n =
let rec aux n acc = aux (n - 1) @@ Micheline.Seq (0, [acc]) in
aux n (Micheline.Int (0, Z.zero))
in
Script_ir_translator.(unparse_code ctxt Readable (enorme_et_seq 10_001))
>>= function
| Ok _ -> Alcotest.fail "expected an error"
| Error trace ->
let trace_string =
Format.asprintf "%a" Environment.Error_monad.pp_trace trace
in
let expect_id = "michelson_v1.unparsing_stack_overflow" in
let expect_descrfiption =
"Too many recursive calls were needed for unparsing"
in
if
Astring.String.is_infix ~affix:expect_id trace_string
&& Astring.String.is_infix ~affix:expect_descrfiption trace_string
then return_unit
else
Alcotest.failf
"Unexpected error (%s) at %s"
trace_string
__LOC__
return_unit
let location = function
| Prim (loc, _, _, _)
| Int (loc, _)
| String (loc, _)
| Bytes (loc, _)
| Seq (loc, _) ->
loc
let test_parse_ty ctxt node expected =
let legacy = false in
let allow_lazy_storage = true in
let allow_operation = true in
let allow_contract = true in
let allow_ticket = true in
Environment.wrap_tzresult
( Script_ir_translator.parse_ty
ctxt
~legacy
~allow_lazy_storage
~allow_operation
~allow_contract
~allow_ticket
node
>>? fun (Script_ir_translator.Ex_ty actual, ctxt) ->
Script_ir_translator.ty_eq ctxt (location node) actual expected
>|? fun (_, ctxt) -> ctxt )
let test_parse_comb_type () =
let open Script in
let open Script_typed_ir in
let nat_prim = Prim (-1, T_nat, [], []) in
let nat_prim_a = Prim (-1, T_nat, [], ["%a"]) in
let nat_prim_b = Prim (-1, T_nat, [], ["%b"]) in
let nat_prim_c = Prim (-1, T_nat, [], ["%c"]) in
let nat_ty = nat_t ~annot:None in
let pair_prim l = Prim (-1, T_pair, l, []) in
let pair_ty ty1 ty2 =
pair_t (-1) (ty1, None, None) (ty2, None, None) ~annot:None
in
let pair_prim2 a b = pair_prim [a; b] in
let pair_nat_nat_prim = pair_prim2 nat_prim nat_prim in
pair_ty nat_ty nat_ty >>??= fun pair_nat_nat_ty ->
test_context () >>=? fun ctxt ->
pair
test_parse_ty ctxt pair_nat_nat_prim pair_nat_nat_ty >>?= fun ctxt ->
pair ( pair )
pair_ty pair_nat_nat_ty nat_ty >>??= fun pair_pair_nat_nat_nat_ty ->
test_parse_ty
ctxt
(pair_prim2 pair_nat_nat_prim nat_prim)
pair_pair_nat_nat_nat_ty
>>?= fun ctxt ->
pair ( pair )
pair_ty nat_ty pair_nat_nat_ty >>??= fun pair_nat_pair_nat_nat_ty ->
test_parse_ty
ctxt
(pair_prim2 nat_prim pair_nat_nat_prim)
pair_nat_pair_nat_nat_ty
>>?= fun ctxt ->
pair
pair_ty nat_ty pair_nat_nat_ty >>??= fun pair_nat_nat_nat_ty ->
test_parse_ty
ctxt
(pair_prim [nat_prim; nat_prim; nat_prim])
pair_nat_nat_nat_ty
>>?= fun ctxt ->
pair ( nat % a ) nat
pair_t
(-1)
(nat_ty, Some (Field_annot "a"), None)
(nat_ty, None, None)
~annot:None
>>??= fun pair_nat_a_nat_ty ->
test_parse_ty ctxt (pair_prim2 nat_prim_a nat_prim) pair_nat_a_nat_ty
>>?= fun ctxt ->
pair_t
(-1)
(nat_ty, None, None)
(nat_ty, Some (Field_annot "b"), None)
~annot:None
>>??= fun pair_nat_nat_b_ty ->
test_parse_ty ctxt (pair_prim2 nat_prim nat_prim_b) pair_nat_nat_b_ty
>>?= fun ctxt ->
pair ( nat % a ) ( nat % b )
pair_t
(-1)
(nat_ty, Some (Field_annot "a"), None)
(nat_ty, Some (Field_annot "b"), None)
~annot:None
>>??= fun pair_nat_a_nat_b_ty ->
test_parse_ty ctxt (pair_prim2 nat_prim_a nat_prim_b) pair_nat_a_nat_b_ty
>>?= fun ctxt ->
pair ( nat % a ) ( nat % b ) ( nat % c )
pair_t
(-1)
(nat_ty, Some (Field_annot "b"), None)
(nat_ty, Some (Field_annot "c"), None)
~annot:None
>>??= fun pair_nat_b_nat_c_ty ->
pair_t
(-1)
(nat_ty, Some (Field_annot "a"), None)
(pair_nat_b_nat_c_ty, None, None)
~annot:None
>>??= fun pair_nat_a_nat_b_nat_c_ty ->
test_parse_ty
ctxt
(pair_prim [nat_prim_a; nat_prim_b; nat_prim_c])
pair_nat_a_nat_b_nat_c_ty
>>?= fun ctxt ->
pair ( nat % a ) ( pair % b )
pair_t (-1) (nat_ty, None, None) (nat_ty, None, None) ~annot:None
>>??= fun pair_b_nat_nat_ty ->
pair_t
(-1)
(nat_ty, Some (Field_annot "a"), None)
(pair_b_nat_nat_ty, Some (Field_annot "b"), None)
~annot:None
>>??= fun pair_nat_a_pair_b_nat_nat_ty ->
test_parse_ty
ctxt
(pair_prim2 nat_prim_a (Prim (-1, T_pair, [nat_prim; nat_prim], ["%b"])))
pair_nat_a_pair_b_nat_nat_ty
>>?= fun _ -> return_unit
let test_unparse_ty loc ctxt expected ty =
Environment.wrap_tzresult
( Script_ir_translator.unparse_ty ctxt ty >>? fun (actual, ctxt) ->
if actual = expected then ok ctxt
else Alcotest.failf "Unexpected error: %s" loc )
let test_unparse_comb_type () =
let open Script in
let open Script_typed_ir in
let nat_prim = Prim (-1, T_nat, [], []) in
let nat_prim_a = Prim (-1, T_nat, [], ["%a"]) in
let nat_prim_b = Prim (-1, T_nat, [], ["%b"]) in
let nat_prim_c = Prim (-1, T_nat, [], ["%c"]) in
let nat_ty = nat_t ~annot:None in
let pair_prim l = Prim (-1, T_pair, l, []) in
let pair_ty ty1 ty2 =
pair_t (-1) (ty1, None, None) (ty2, None, None) ~annot:None
in
let pair_prim2 a b = pair_prim [a; b] in
let pair_nat_nat_prim = pair_prim2 nat_prim nat_prim in
pair_ty nat_ty nat_ty >>??= fun pair_nat_nat_ty ->
test_context () >>=? fun ctxt ->
pair
test_unparse_ty __LOC__ ctxt pair_nat_nat_prim pair_nat_nat_ty
>>?= fun ctxt ->
pair ( pair )
pair_ty pair_nat_nat_ty nat_ty >>??= fun pair_pair_nat_nat_nat_ty ->
test_unparse_ty
__LOC__
ctxt
(pair_prim2 pair_nat_nat_prim nat_prim)
pair_pair_nat_nat_nat_ty
>>?= fun ctxt ->
pair
pair_ty nat_ty pair_nat_nat_ty >>??= fun pair_nat_nat_nat_ty ->
test_unparse_ty
__LOC__
ctxt
(pair_prim [nat_prim; nat_prim; nat_prim])
pair_nat_nat_nat_ty
>>?= fun ctxt ->
pair ( nat % a ) nat
pair_t
(-1)
(nat_ty, Some (Field_annot "a"), None)
(nat_ty, None, None)
~annot:None
>>??= fun pair_nat_a_nat_ty ->
test_unparse_ty
__LOC__
ctxt
(pair_prim2 nat_prim_a nat_prim)
pair_nat_a_nat_ty
>>?= fun ctxt ->
pair_t
(-1)
(nat_ty, None, None)
(nat_ty, Some (Field_annot "b"), None)
~annot:None
>>??= fun pair_nat_nat_b_ty ->
test_unparse_ty
__LOC__
ctxt
(pair_prim2 nat_prim nat_prim_b)
pair_nat_nat_b_ty
>>?= fun ctxt ->
pair ( nat % a ) ( nat % b )
pair_t
(-1)
(nat_ty, Some (Field_annot "a"), None)
(nat_ty, Some (Field_annot "b"), None)
~annot:None
>>??= fun pair_nat_a_nat_b_ty ->
test_unparse_ty
__LOC__
ctxt
(pair_prim2 nat_prim_a nat_prim_b)
pair_nat_a_nat_b_ty
>>?= fun ctxt ->
pair ( nat % a ) ( nat % b ) ( nat % c )
pair_t
(-1)
(nat_ty, Some (Field_annot "b"), None)
(nat_ty, Some (Field_annot "c"), None)
~annot:None
>>??= fun pair_nat_b_nat_c_ty ->
pair_t
(-1)
(nat_ty, Some (Field_annot "a"), None)
(pair_nat_b_nat_c_ty, None, None)
~annot:None
>>??= fun pair_nat_a_nat_b_nat_c_ty ->
test_unparse_ty
__LOC__
ctxt
(pair_prim [nat_prim_a; nat_prim_b; nat_prim_c])
pair_nat_a_nat_b_nat_c_ty
>>?= fun ctxt ->
pair ( nat % a ) ( pair % b )
pair_t (-1) (nat_ty, None, None) (nat_ty, None, None) ~annot:None
>>??= fun pair_nat_nat_ty ->
pair_t
(-1)
(nat_ty, Some (Field_annot "a"), None)
(pair_nat_nat_ty, Some (Field_annot "b"), None)
~annot:None
>>??= fun pair_nat_a_pair_b_nat_nat_ty ->
test_unparse_ty
__LOC__
ctxt
(pair_prim2 nat_prim_a (Prim (-1, T_pair, [nat_prim; nat_prim], ["%b"])))
pair_nat_a_pair_b_nat_nat_ty
>>?= fun ctxt ->
pair ( pair @b )
pair_t
(-1)
(nat_ty, None, None)
(pair_nat_nat_ty, None, Some (Var_annot "b"))
~annot:None
>>??= fun pair_nat_pair_b_nat_nat_ty ->
test_unparse_ty
__LOC__
ctxt
(pair_prim2 nat_prim (Prim (-1, T_pair, [nat_prim; nat_prim], ["@b"])))
pair_nat_pair_b_nat_nat_ty
>>?= fun ctxt ->
pair ( pair : b )
pair_t
(-1)
(nat_ty, None, None)
(nat_ty, None, None)
~annot:(Some (Type_annot "b"))
>>??= fun pair_b_nat_nat_ty ->
pair_t (-1) (nat_ty, None, None) (pair_b_nat_nat_ty, None, None) ~annot:None
>>??= fun pair_nat_pair_b_nat_nat_ty ->
test_unparse_ty
__LOC__
ctxt
(pair_prim2 nat_prim (Prim (-1, T_pair, [nat_prim; nat_prim], [":b"])))
pair_nat_pair_b_nat_nat_ty
>>?= fun _ -> return_unit
let test_unparse_comparable_ty loc ctxt expected ty =
let open Script_typed_ir in
Environment.wrap_tzresult
( set_t (-1) ty ~annot:None >>? fun set_ty_ty ->
Script_ir_translator.unparse_ty ctxt set_ty_ty >>? fun (actual, ctxt) ->
if actual = Prim (-1, T_set, [expected], []) then ok ctxt
else Alcotest.failf "Unexpected error: %s" loc )
let test_unparse_comb_comparable_type () =
let open Script in
let open Script_typed_ir in
let nat_prim = Prim (-1, T_nat, [], []) in
let nat_prim_a = Prim (-1, T_nat, [], ["%a"]) in
let nat_prim_b = Prim (-1, T_nat, [], ["%b"]) in
let nat_prim_c = Prim (-1, T_nat, [], ["%c"]) in
let nat_ty = nat_key ~annot:None in
let pair_prim l = Prim (-1, T_pair, l, []) in
let pair_ty ty1 ty2 = pair_key (-1) (ty1, None) (ty2, None) ~annot:None in
let pair_prim2 a b = pair_prim [a; b] in
let pair_nat_nat_prim = pair_prim2 nat_prim nat_prim in
pair_ty nat_ty nat_ty >>??= fun pair_nat_nat_ty ->
test_context () >>=? fun ctxt ->
pair
test_unparse_comparable_ty __LOC__ ctxt pair_nat_nat_prim pair_nat_nat_ty
>>?= fun ctxt ->
pair ( pair )
pair_ty pair_nat_nat_ty nat_ty >>??= fun pair_pair_nat_nat_nat_ty ->
test_unparse_comparable_ty
__LOC__
ctxt
(pair_prim2 pair_nat_nat_prim nat_prim)
pair_pair_nat_nat_nat_ty
>>?= fun ctxt ->
pair
pair_ty nat_ty pair_nat_nat_ty >>??= fun pair_nat_nat_nat_ty ->
test_unparse_comparable_ty
__LOC__
ctxt
(pair_prim [nat_prim; nat_prim; nat_prim])
pair_nat_nat_nat_ty
>>?= fun ctxt ->
pair ( nat % a ) nat
pair_key (-1) (nat_ty, Some (Field_annot "a")) (nat_ty, None) ~annot:None
>>??= fun pair_nat_a_nat_ty ->
test_unparse_comparable_ty
__LOC__
ctxt
(pair_prim2 nat_prim_a nat_prim)
pair_nat_a_nat_ty
>>?= fun ctxt ->
pair_key (-1) (nat_ty, None) (nat_ty, Some (Field_annot "b")) ~annot:None
>>??= fun pair_nat_nat_b_ty ->
test_unparse_comparable_ty
__LOC__
ctxt
(pair_prim2 nat_prim nat_prim_b)
pair_nat_nat_b_ty
>>?= fun ctxt ->
pair ( nat % a ) ( nat % b )
pair_key
(-1)
(nat_ty, Some (Field_annot "a"))
(nat_ty, Some (Field_annot "b"))
~annot:None
>>??= fun pair_nat_a_nat_b_ty ->
test_unparse_comparable_ty
__LOC__
ctxt
(pair_prim2 nat_prim_a nat_prim_b)
pair_nat_a_nat_b_ty
>>?= fun ctxt ->
pair ( nat % a ) ( nat % b ) ( nat % c )
pair_key
(-1)
(nat_ty, Some (Field_annot "b"))
(nat_ty, Some (Field_annot "c"))
~annot:None
>>??= fun pair_nat_b_nat_c_ty ->
pair_key
(-1)
(nat_ty, Some (Field_annot "a"))
(pair_nat_b_nat_c_ty, None)
~annot:None
>>??= fun pair_nat_a_nat_b_nat_c_ty ->
test_unparse_comparable_ty
__LOC__
ctxt
(pair_prim [nat_prim_a; nat_prim_b; nat_prim_c])
pair_nat_a_nat_b_nat_c_ty
>>?= fun ctxt ->
pair ( nat % a ) ( pair % b )
pair_key
(-1)
(nat_ty, Some (Field_annot "a"))
(pair_nat_nat_ty, Some (Field_annot "b"))
~annot:None
>>??= fun pair_nat_a_pair_b_nat_nat_ty ->
test_unparse_comparable_ty
__LOC__
ctxt
(pair_prim2 nat_prim_a (Prim (-1, T_pair, [nat_prim; nat_prim], ["%b"])))
pair_nat_a_pair_b_nat_nat_ty
>>?= fun ctxt ->
pair ( pair : b )
pair_key (-1) (nat_ty, None) (nat_ty, None) ~annot:(Some (Type_annot "b"))
>>??= fun pair_b_nat_nat_ty ->
pair_key (-1) (nat_ty, None) (pair_b_nat_nat_ty, None) ~annot:None
>>??= fun pair_nat_pair_b_nat_nat_ty ->
test_unparse_comparable_ty
__LOC__
ctxt
(pair_prim2 nat_prim (Prim (-1, T_pair, [nat_prim; nat_prim], [":b"])))
pair_nat_pair_b_nat_nat_ty
>>?= fun _ -> return_unit
let test_parse_data ?(equal = Stdlib.( = )) loc ctxt ty node expected =
let legacy = false in
let allow_forged = true in
wrap_error_lwt
( Script_ir_translator.parse_data ctxt ~legacy ~allow_forged ty node
>>=? fun (actual, ctxt) ->
if equal actual expected then return ctxt
else Alcotest.failf "Unexpected error: %s" loc )
let test_parse_data_fails loc ctxt ty node =
let legacy = false in
let allow_forged = false in
wrap_error_lwt
(Script_ir_translator.parse_data ctxt ~legacy ~allow_forged ty node
>>= function
| Ok _ -> Alcotest.failf "Unexpected typechecking success: %s" loc
| Error trace ->
let trace_string =
Format.asprintf "%a" Environment.Error_monad.pp_trace trace
in
let expect_id = "michelson_v1.invalid_constant" in
let expect_descrfiption =
"A data expression was invalid for its expected type."
in
if
Astring.String.is_infix ~affix:expect_id trace_string
&& Astring.String.is_infix ~affix:expect_descrfiption trace_string
then return_unit
else
Alcotest.failf
"Unexpected error (%s) at %s"
trace_string
__LOC__
return_unit)
let test_parse_comb_data () =
let open Script in
let open Script_typed_ir in
let z = Script_int.zero_n in
let z_prim = Micheline.Int (-1, Z.zero) in
let nat_ty = nat_t ~annot:None in
let pair_prim l = Prim (-1, D_Pair, l, []) in
let pair_ty ty1 ty2 =
pair_t (-1) (ty1, None, None) (ty2, None, None) ~annot:None
in
pair_ty nat_ty nat_ty >>??= fun pair_nat_nat_ty ->
let pair_prim2 a b = pair_prim [a; b] in
let pair_z_z_prim = pair_prim2 z_prim z_prim in
list_t (-1) nat_ty ~annot:None >>??= fun list_nat_ty ->
big_map_t (-1) (nat_key ~annot:None) nat_ty ~annot:None
>>??= fun big_map_nat_nat_ty ->
test_context_with_nat_nat_big_map () >>=? fun (ctxt, big_map_id) ->
test_parse_data __LOC__ ctxt pair_nat_nat_ty pair_z_z_prim (z, z)
>>=? fun ctxt ->
test_parse_data
__LOC__
ctxt
pair_nat_nat_ty
(Micheline.Seq (-1, [z_prim; z_prim]))
(z, z)
>>=? fun ctxt ->
Pair ( Pair 0 0 ) 0
pair_ty pair_nat_nat_ty nat_ty >>??= fun pair_pair_nat_nat_nat_ty ->
test_parse_data
__LOC__
ctxt
pair_pair_nat_nat_nat_ty
(pair_prim2 pair_z_z_prim z_prim)
((z, z), z)
>>=? fun ctxt ->
pair_ty nat_ty pair_nat_nat_ty >>??= fun pair_nat_pair_nat_nat_ty ->
test_parse_data
__LOC__
ctxt
pair_nat_pair_nat_nat_ty
(pair_prim2 z_prim pair_z_z_prim)
(z, (z, z))
>>=? fun ctxt ->
test_parse_data
__LOC__
ctxt
pair_nat_pair_nat_nat_ty
(pair_prim [z_prim; z_prim; z_prim])
(z, (z, z))
>>=? fun ctxt ->
{ 0 ; 0 ; 0 }
test_parse_data
__LOC__
ctxt
pair_nat_pair_nat_nat_ty
(Micheline.Seq (-1, [z_prim; z_prim; z_prim]))
(z, (z, z))
>>=? fun ctxt ->
Should fail : { 0 } against ( list )
pair_ty nat_ty list_nat_ty >>??= fun pair_nat_list_nat_ty ->
test_parse_data_fails
__LOC__
ctxt
pair_nat_list_nat_ty
(Micheline.Seq (-1, [z_prim]))
>>=? fun () ->
Should fail : { 0 ; 0 ; 0 } against ( list )
test_parse_data_fails
__LOC__
ctxt
pair_nat_list_nat_ty
(Micheline.Seq (-1, [z_prim; z_prim; z_prim]))
>>=? fun () ->
check Pair 0 ( Pair 0 { } ) against ( big_map )
so that the following test fails for the good reason and not because
the big map does n't exist
so that the following test fails for the good reason and not because
the big map doesn't exist
*)
let id_z = Big_map.Id.unparse_to_z big_map_id in
let id_prim = Int (-1, id_z) in
let expected_big_map =
let open Script_typed_ir in
let diff = {map = Big_map_overlay.empty; size = 0} in
let nat_key_ty = nat_key ~annot:None in
{id = Some big_map_id; diff; key_type = nat_key_ty; value_type = nat_ty}
in
let equal (nat1, big_map1) (nat2, big_map2) =
nat1 = nat2 && big_map1.id = big_map2.id
&& big_map1.key_type = big_map2.key_type
&& big_map1.value_type = big_map2.value_type
&& big_map1.diff.size = big_map2.diff.size
&& Big_map_overlay.bindings big_map1.diff.map
= Big_map_overlay.bindings big_map2.diff.map
in
pair_ty nat_ty big_map_nat_nat_ty >>??= fun pair_nat_big_map_nat_nat_ty ->
test_parse_data
~equal
__LOC__
ctxt
pair_nat_big_map_nat_nat_ty
(pair_prim2 z_prim (pair_prim2 id_prim (Seq (-1, []))))
(Script_int.zero_n, expected_big_map)
>>=? fun ctxt ->
Should fail : Pair 0 0 { } against ( big_map )
test_parse_data_fails
__LOC__
ctxt
pair_nat_big_map_nat_nat_ty
(pair_prim [z_prim; id_prim; Seq (-1, [])])
let test_parse_address () =
let open Script_typed_ir in
test_context_with_nat_nat_big_map () >>=? fun (ctxt, _big_map_id) ->
wrap_error_lwt
(Lwt.return (Contract.of_b58check "KT1FAKEFAKEFAKEFAKEFAKEFAKEFAKGGSE2x"))
>>=? fun kt1fake ->
test_parse_data
__LOC__
ctxt
(address_t ~annot:None)
(String (-1, "KT1FAKEFAKEFAKEFAKEFAKEFAKEFAKGGSE2x%"))
(kt1fake, "default")
>>=? fun ctxt ->
wrap_error_lwt
(Lwt.return (Contract.of_b58check "tz1fakefakefakefakefakefakefakcphLA5"))
>>=? fun tz1fake ->
test_parse_data
__LOC__
ctxt
(address_t ~annot:None)
(String (-1, "tz1fakefakefakefakefakefakefakcphLA5%"))
(tz1fake, "default")
>|=? fun _ctxt -> ()
let test_unparse_data loc ctxt ty x ~expected_readable ~expected_optimized =
wrap_error_lwt
( Script_ir_translator.unparse_data ctxt Script_ir_translator.Readable ty x
>>=? fun (actual_readable, ctxt) ->
(if actual_readable = expected_readable then return ctxt
else Alcotest.failf "Error in readable unparsing: %s" loc)
>>=? fun ctxt ->
Script_ir_translator.unparse_data ctxt Script_ir_translator.Optimized ty x
>>=? fun (actual_optimized, ctxt) ->
if actual_optimized = expected_optimized then return ctxt
else Alcotest.failf "Error in optimized unparsing: %s" loc )
let test_unparse_comb_data () =
let open Script in
let open Script_typed_ir in
let z = Script_int.zero_n in
let z_prim = Micheline.Int (-1, Z.zero) in
let nat_ty = nat_t ~annot:None in
let pair_prim l = Prim (-1, D_Pair, l, []) in
let pair_ty ty1 ty2 =
pair_t (-1) (ty1, None, None) (ty2, None, None) ~annot:None
in
pair_ty nat_ty nat_ty >>??= fun pair_nat_nat_ty ->
let pair_prim2 a b = pair_prim [a; b] in
let pair_z_z_prim = pair_prim2 z_prim z_prim in
test_context () >>=? fun ctxt ->
test_unparse_data
__LOC__
ctxt
pair_nat_nat_ty
(z, z)
~expected_readable:pair_z_z_prim
~expected_optimized:pair_z_z_prim
>>=? fun ctxt ->
Pair ( Pair 0 0 ) 0
pair_ty pair_nat_nat_ty nat_ty >>??= fun pair_pair_nat_nat_nat_ty ->
test_unparse_data
__LOC__
ctxt
pair_pair_nat_nat_nat_ty
((z, z), z)
~expected_readable:(pair_prim2 pair_z_z_prim z_prim)
~expected_optimized:(pair_prim2 pair_z_z_prim z_prim)
>>=? fun ctxt ->
pair_ty nat_ty pair_nat_nat_ty >>??= fun pair_nat_pair_nat_nat_ty ->
test_unparse_data
__LOC__
ctxt
pair_nat_pair_nat_nat_ty
(z, (z, z))
~expected_readable:(pair_prim [z_prim; z_prim; z_prim])
~expected_optimized:(pair_prim2 z_prim pair_z_z_prim)
>>=? fun ctxt ->
Readable : Pair 0 0 0 0 ; Optimized : { 0 ; 0 ; 0 ; 0 }
pair_ty nat_ty pair_nat_pair_nat_nat_ty
>>??= fun pair_nat_pair_nat_pair_nat_nat_ty ->
test_unparse_data
__LOC__
ctxt
pair_nat_pair_nat_pair_nat_nat_ty
(z, (z, (z, z)))
~expected_readable:(pair_prim [z_prim; z_prim; z_prim; z_prim])
~expected_optimized:(Micheline.Seq (-1, [z_prim; z_prim; z_prim; z_prim]))
>>=? fun _ -> return_unit
let gen_pairs left right =
[Prim (-1, Script.D_Pair, [left; right], []); Seq (-1, [left; right])]
let rec gen_combs leaf arity =
assert (arity >= 2) ;
if arity = 2 then gen_pairs leaf leaf
else
gen_combs leaf (arity - 1)
|> List.map (fun smaller ->
(match smaller with
| Prim (loc, Script.D_Pair, vs, []) ->
Prim (loc, Script.D_Pair, leaf :: vs, [])
| Seq (loc, vs) -> Seq (loc, leaf :: vs)
| _ -> assert false)
:: gen_pairs leaf smaller)
|> List.flatten
let test_optimal_comb () =
let open Script_typed_ir in
let leaf_ty = nat_t ~annot:None in
let leaf_mich = Int (-1, Z.zero) in
let leaf_v = Script_int.zero_n in
let size_of_micheline mich =
let canonical = Micheline.strip_locations mich in
( canonical,
Bytes.length
@@ Data_encoding.Binary.to_bytes_exn Script.expr_encoding canonical )
in
let check_optimal_comb loc ctxt ty v arity =
wrap_error_lwt
( Script_ir_translator.unparse_data
ctxt
Script_ir_translator.Optimized
ty
v
>>=? fun (unparsed, ctxt) ->
let (unparsed_canonical, unparsed_size) = size_of_micheline unparsed in
List.iter_es (fun other_repr ->
let (other_repr_canonical, other_repr_size) =
size_of_micheline other_repr
in
if other_repr_size < unparsed_size then
Alcotest.failf
"At %s, for comb of arity %d, representation %a (size %d \
bytes) is shorter than representation %a (size %d bytes) \
returned by unparse_data in Optimized mode"
loc
arity
Michelson_v1_printer.print_expr
other_repr_canonical
other_repr_size
Michelson_v1_printer.print_expr
unparsed_canonical
unparsed_size
else return_unit)
@@ gen_combs leaf_mich arity
>>=? fun () -> return ctxt )
in
let pair_ty ty1 ty2 =
pair_t (-1) (ty1, None, None) (ty2, None, None) ~annot:None
in
test_context () >>=? fun ctxt ->
pair_ty leaf_ty leaf_ty >>??= fun comb2_ty ->
let comb2_v = (leaf_v, leaf_v) in
check_optimal_comb __LOC__ ctxt comb2_ty comb2_v 2 >>=? fun ctxt ->
pair_ty leaf_ty comb2_ty >>??= fun comb3_ty ->
let comb3_v = (leaf_v, comb2_v) in
check_optimal_comb __LOC__ ctxt comb3_ty comb3_v 3 >>=? fun ctxt ->
pair_ty leaf_ty comb3_ty >>??= fun comb4_ty ->
let comb4_v = (leaf_v, comb3_v) in
check_optimal_comb __LOC__ ctxt comb4_ty comb4_v 4 >>=? fun ctxt ->
pair_ty leaf_ty comb4_ty >>??= fun comb5_ty ->
let comb5_v = (leaf_v, comb4_v) in
check_optimal_comb __LOC__ ctxt comb5_ty comb5_v 5 >>=? fun _ctxt ->
return_unit
let tests =
[
Tztest.tztest "test unparse view" `Quick test_unparse_view;
Tztest.tztest
"test typecheck stack overflow error"
`Quick
test_typecheck_stack_overflow;
Tztest.tztest "test comb type parsing" `Quick test_parse_comb_type;
Tztest.tztest "test comb type unparsing" `Quick test_unparse_comb_type;
Tztest.tztest
"test comb comparable type unparsing"
`Quick
test_unparse_comb_comparable_type;
Tztest.tztest "test comb data parsing" `Quick test_parse_comb_data;
Tztest.tztest "test comb data unparsing" `Quick test_unparse_comb_data;
Tztest.tztest "test optimal comb data unparsing" `Quick test_optimal_comb;
Tztest.tztest "test parse address" `Quick test_parse_address;
]
|
864d3b017499a757e46293bfee9d740d94e6b3af258e3fc034f0360b597ef406 | prg-titech/baccaml | simple1.ml | ;;
let rec interp bytecode pc a =
jit_dispatch (pc = 0) bytecode a;
(* if pc = 0 then test_trace a bytecode else *)
let opcode = bytecode.(pc) in
if opcode = 0
then (* INCR_A *)
interp bytecode (pc + 1) (a + 1)
else if opcode = 1
then (* DECR_A *)
interp bytecode (pc + 1) (a - 1)
else if opcode = 2
then (
JUMP_IF
let target = bytecode.(pc + 1) in
if a > 0 then interp bytecode target a else interp bytecode (pc + 2) a)
else if opcode = 3
then (
JUMP
let target = bytecode.(pc + 1) in
interp bytecode target a)
else if opcode = 4
then (* RETURN_A *)
a
else -100
in
let input = Array.make 100 0 in
input.(0) <- 1;
input.(1) <- 2;
input.(2) <- 0;
input.(3) <- 4;
print_int (interp input 0 100000000)
| null | https://raw.githubusercontent.com/prg-titech/baccaml/a3b95e996a995b5004ca897a4b6419edfee590aa/test/interp_example/simple1.ml | ocaml | if pc = 0 then test_trace a bytecode else
INCR_A
DECR_A
RETURN_A | ;;
let rec interp bytecode pc a =
jit_dispatch (pc = 0) bytecode a;
let opcode = bytecode.(pc) in
if opcode = 0
interp bytecode (pc + 1) (a + 1)
else if opcode = 1
interp bytecode (pc + 1) (a - 1)
else if opcode = 2
then (
JUMP_IF
let target = bytecode.(pc + 1) in
if a > 0 then interp bytecode target a else interp bytecode (pc + 2) a)
else if opcode = 3
then (
JUMP
let target = bytecode.(pc + 1) in
interp bytecode target a)
else if opcode = 4
a
else -100
in
let input = Array.make 100 0 in
input.(0) <- 1;
input.(1) <- 2;
input.(2) <- 0;
input.(3) <- 4;
print_int (interp input 0 100000000)
|
89532136778ec8968e7917af209614d03b347737eb97956b6a66c30f64563a07 | shayne-fletcher/zen | red_black_tree.ml | module type Ordered_type_sig = sig
type t
val compare : t -> t -> int
end
module type Set_sig = sig
type element
type t
exception Empty_set
val empty : t (*The empty set*)
val add : t -> element -> t (*Add an element*)
val remove : t -> element -> t * element (*Remove an element*)
val fold : t -> 'a -> ('a -> element -> 'a) -> 'a (*Visit elements in order*)
val contains : t -> element -> bool (*Test for membership*)
val size : t -> int (*Cardinality of a set*)
The union of two sets
The intersection of two sets
val min_element : t -> element (*The minimum value of the set*)
val max_element : t -> element (*The maximum value of the set*)
Construct set from unordered list
end
module type SET = sig
(*Input signature of the functor [Make]*)
module type Ordered_type = Ordered_type_sig
(*Output signature of the functor [Make]*)
module type S = Set_sig
(*Functor building an implementation of the set structure given a
totally ordered type*)
module Make : functor (Ord : Ordered_type) -> S with type element = Ord.t
end
module Red_black_tree : SET = struct
(*A red-black tree is a binary search tree with additional
representation invariants:
- No red node has a red parent;
- Every path from the root to an empty node has the same number of
black nodes: the black height of the tree
*)
module type Ordered_type = Ordered_type_sig
module type S = Set_sig
module Make (Ord : Ordered_type)(* : (S with type element = Ord.t)*) = struct
type element = Ord.t
type color = Red | Black
type tree = Empty | Node of node
and node = { value : element; left : tree; right : tree; color : color }
type t = tree
let empty : t = Empty
let rec add (s : t) (x : element) : t =
let make_black (s : t) : t =
match s with
| Empty -> Empty
| Node ({value;left;right;color} as r) ->
Node {r with color = Black } in
let rotate x y z a b c d : t =
Node { value = y;
left = Node {color = Black; value = x; left = a; right = b};
right = Node {color = Black; value = z; left = c; right = d};
color = Red } in
let balance (s : t) : t =
match s with
1
left =
Node {color = Red; value = y;
left = Node {color = Red; value = x;
left = a; right = b};
right = c};
right = d} ->
rotate x y z a b c d
2
left = Node {color=Red; value = x;
left = a; right = Node {color=Red; value = y;
left = b; right = c}};
right=d} ->
rotate x y z a b c d
3
left = a;
right = Node {color=Red; value=z;
left = Node{color=Red; value=y;
left=b; right= c};
right=d}} ->
rotate x y z a b c d
4
left=a;
right=Node{color=Red;value=y;
left=b;
right=Node{color=Red;value=z;
left=c;right=d}}} ->
rotate x y z a b c d
| _ -> s in
let rec walk (s : t) : t =
match s with
| Empty -> Node {color=Red; value=x; left=Empty; right=Empty}
| Node {color; value; left; right} ->
let cmp = compare x value in
if cmp = 0 then s
else if cmp < 0 then
balance (Node {color=color;
value =value;
left = walk left;
right = right})
else
balance (Node {color = color;
value = value;
left = left;
right = walk right})
in make_black (walk s)
let of_list (l : element list) : t =
List.fold_left (fun s e -> add s e) empty l
end
end
(*Test*)
(*
(*Prepare an [Set.Ordered_type] module to pass as argument
to [Set.Make]*)
module Int : Red_black_tree.Ordered_type with type t = int = struct
type t = int
let compare = Pervasives.compare
end
(*Make a set*)
module Int_set = Binary_search_tree.Make (Int)
module Int_set = Red_black_tree.Make (Int)
type 'a set = (module Set.S with type element = 'a)
*)
| null | https://raw.githubusercontent.com/shayne-fletcher/zen/10a1d0b9bf261bb133918dd62fb1593c3d4d21cb/ocaml/red_black_tree/red_black_tree.ml | ocaml | The empty set
Add an element
Remove an element
Visit elements in order
Test for membership
Cardinality of a set
The minimum value of the set
The maximum value of the set
Input signature of the functor [Make]
Output signature of the functor [Make]
Functor building an implementation of the set structure given a
totally ordered type
A red-black tree is a binary search tree with additional
representation invariants:
- No red node has a red parent;
- Every path from the root to an empty node has the same number of
black nodes: the black height of the tree
: (S with type element = Ord.t)
Test
(*Prepare an [Set.Ordered_type] module to pass as argument
to [Set.Make]
Make a set | module type Ordered_type_sig = sig
type t
val compare : t -> t -> int
end
module type Set_sig = sig
type element
type t
exception Empty_set
The union of two sets
The intersection of two sets
Construct set from unordered list
end
module type SET = sig
module type Ordered_type = Ordered_type_sig
module type S = Set_sig
module Make : functor (Ord : Ordered_type) -> S with type element = Ord.t
end
module Red_black_tree : SET = struct
module type Ordered_type = Ordered_type_sig
module type S = Set_sig
type element = Ord.t
type color = Red | Black
type tree = Empty | Node of node
and node = { value : element; left : tree; right : tree; color : color }
type t = tree
let empty : t = Empty
let rec add (s : t) (x : element) : t =
let make_black (s : t) : t =
match s with
| Empty -> Empty
| Node ({value;left;right;color} as r) ->
Node {r with color = Black } in
let rotate x y z a b c d : t =
Node { value = y;
left = Node {color = Black; value = x; left = a; right = b};
right = Node {color = Black; value = z; left = c; right = d};
color = Red } in
let balance (s : t) : t =
match s with
1
left =
Node {color = Red; value = y;
left = Node {color = Red; value = x;
left = a; right = b};
right = c};
right = d} ->
rotate x y z a b c d
2
left = Node {color=Red; value = x;
left = a; right = Node {color=Red; value = y;
left = b; right = c}};
right=d} ->
rotate x y z a b c d
3
left = a;
right = Node {color=Red; value=z;
left = Node{color=Red; value=y;
left=b; right= c};
right=d}} ->
rotate x y z a b c d
4
left=a;
right=Node{color=Red;value=y;
left=b;
right=Node{color=Red;value=z;
left=c;right=d}}} ->
rotate x y z a b c d
| _ -> s in
let rec walk (s : t) : t =
match s with
| Empty -> Node {color=Red; value=x; left=Empty; right=Empty}
| Node {color; value; left; right} ->
let cmp = compare x value in
if cmp = 0 then s
else if cmp < 0 then
balance (Node {color=color;
value =value;
left = walk left;
right = right})
else
balance (Node {color = color;
value = value;
left = left;
right = walk right})
in make_black (walk s)
let of_list (l : element list) : t =
List.fold_left (fun s e -> add s e) empty l
end
end
module Int : Red_black_tree.Ordered_type with type t = int = struct
type t = int
let compare = Pervasives.compare
end
module Int_set = Binary_search_tree.Make (Int)
module Int_set = Red_black_tree.Make (Int)
type 'a set = (module Set.S with type element = 'a)
*)
|
9093d88d9dc82a5db4a5aa6c36990ef1b5158f123ca4085d81f86f5b29583d21 | rjohnsondev/haskellshop | Feedback.hs | {-# LANGUAGE OverloadedStrings, ScopedTypeVariables, NoMonomorphismRestriction, GADTs #-}
module Admin.Feedback where
import Application
import Data.ByteString (ByteString)
import Data.Text
import Snap.Core
import Snap.Snaplet
import Snap.Snaplet.Heist
import Snap.Snaplet.Session
import Text.Printf
import qualified Data.ByteString.Char8 as C8
import qualified Control.Monad.CatchIO as CIO
import qualified Control.Exception as CE
msg :: Text -> Text -> Handler App App ()
msg level m = with sess $ do
setInSession level m
commitSession
msgRedirect :: Text -> ByteString -> Text -> Handler App App ()
msgRedirect level url m = do
msg level m
redirect url
info :: Text -> Handler App App ()
info = msg "info"
infoRedirect :: ByteString -> Text -> Handler App App ()
infoRedirect = msgRedirect "info"
danger :: Text -> Handler App App ()
danger = msg "danger"
dangerRedirect :: ByteString -> Text -> Handler App App ()
dangerRedirect = msgRedirect "danger"
catchErrors :: HasHeist b => Handler b v () -> Handler b v ()
catchErrors action = action `CIO.catch` (\(ex::CE.SomeException) -> do
logError $ C8.pack (printf "Unexpected exception: %s" (show ex))
modifyResponse $ setResponseCode 500
render "err")
| null | https://raw.githubusercontent.com/rjohnsondev/haskellshop/645f9e40b9843fc987d37a0ee58460929ef50aae/src/Admin/Feedback.hs | haskell | # LANGUAGE OverloadedStrings, ScopedTypeVariables, NoMonomorphismRestriction, GADTs # |
module Admin.Feedback where
import Application
import Data.ByteString (ByteString)
import Data.Text
import Snap.Core
import Snap.Snaplet
import Snap.Snaplet.Heist
import Snap.Snaplet.Session
import Text.Printf
import qualified Data.ByteString.Char8 as C8
import qualified Control.Monad.CatchIO as CIO
import qualified Control.Exception as CE
msg :: Text -> Text -> Handler App App ()
msg level m = with sess $ do
setInSession level m
commitSession
msgRedirect :: Text -> ByteString -> Text -> Handler App App ()
msgRedirect level url m = do
msg level m
redirect url
info :: Text -> Handler App App ()
info = msg "info"
infoRedirect :: ByteString -> Text -> Handler App App ()
infoRedirect = msgRedirect "info"
danger :: Text -> Handler App App ()
danger = msg "danger"
dangerRedirect :: ByteString -> Text -> Handler App App ()
dangerRedirect = msgRedirect "danger"
catchErrors :: HasHeist b => Handler b v () -> Handler b v ()
catchErrors action = action `CIO.catch` (\(ex::CE.SomeException) -> do
logError $ C8.pack (printf "Unexpected exception: %s" (show ex))
modifyResponse $ setResponseCode 500
render "err")
|
90161e1649e172aeeb3489c12ca5e7da66b5565bdebe1f2af152d40bccfc2451 | ocaml-multicore/parafuzz | lazy_.ml | (* TEST *)
let rec c = lazy (0 + d) and d = 3;;
let () = Printf.printf "%d\n" (Lazy.force c)
| null | https://raw.githubusercontent.com/ocaml-multicore/parafuzz/6a92906f1ba03287ffcb433063bded831a644fd5/testsuite/tests/letrec-compilation/lazy_.ml | ocaml | TEST |
let rec c = lazy (0 + d) and d = 3;;
let () = Printf.printf "%d\n" (Lazy.force c)
|
165bf30ec91223613096ae4a3d8e852c36144688b86fdd0a89042900c193f814 | racket/plot | lazy-snip-types.rkt | #lang typed/racket/base
(require (only-in typed/mred/mred Snip% Frame% Bitmap%)
plot/utils)
(provide (all-defined-out))
(define-type Plot-Snip% (Class #:implements Snip% #:implements Plot-Metrics<%>))
(define-type Make-2D-Plot-Snip
(-> (Instance Bitmap%)
Plot-Parameters
(-> Boolean Rect Positive-Integer Positive-Integer
(Values (Instance Bitmap%) (U #f (Instance 2D-Plot-Area%))))
Rect
(U #f (Instance 2D-Plot-Area%))
Positive-Integer
Positive-Integer
(Instance Plot-Snip%)))
(define-type Make-3D-Plot-Snip
(-> (Instance Bitmap%)
Plot-Parameters
(-> Boolean Real Real Positive-Integer Positive-Integer
(Values (Instance Bitmap%) (U #f (Instance 3D-Plot-Area%))))
Real
Real
(U #f (Instance 3D-Plot-Area%))
Positive-Integer
Positive-Integer
(Instance Plot-Snip%)))
(define-type Make-Snip-Frame
(-> (-> Positive-Integer Positive-Integer (Instance Snip%))
Positive-Real
Positive-Real
String
(Instance Frame%)))
| null | https://raw.githubusercontent.com/racket/plot/c4126001f2c609e36c3aa12f300e9c673ab1a806/plot-gui-lib/plot/private/gui/lazy-snip-types.rkt | racket | #lang typed/racket/base
(require (only-in typed/mred/mred Snip% Frame% Bitmap%)
plot/utils)
(provide (all-defined-out))
(define-type Plot-Snip% (Class #:implements Snip% #:implements Plot-Metrics<%>))
(define-type Make-2D-Plot-Snip
(-> (Instance Bitmap%)
Plot-Parameters
(-> Boolean Rect Positive-Integer Positive-Integer
(Values (Instance Bitmap%) (U #f (Instance 2D-Plot-Area%))))
Rect
(U #f (Instance 2D-Plot-Area%))
Positive-Integer
Positive-Integer
(Instance Plot-Snip%)))
(define-type Make-3D-Plot-Snip
(-> (Instance Bitmap%)
Plot-Parameters
(-> Boolean Real Real Positive-Integer Positive-Integer
(Values (Instance Bitmap%) (U #f (Instance 3D-Plot-Area%))))
Real
Real
(U #f (Instance 3D-Plot-Area%))
Positive-Integer
Positive-Integer
(Instance Plot-Snip%)))
(define-type Make-Snip-Frame
(-> (-> Positive-Integer Positive-Integer (Instance Snip%))
Positive-Real
Positive-Real
String
(Instance Frame%)))
|
|
990f315d66d11b1ad94bcb41bbeb2ffa6309456776f8b1d9688c8017fc401880 | softwarelanguageslab/maf | R5RS_scp1_count-pairs2-5.scm | ; Changes:
* removed : 0
* added : 2
* swaps : 1
; * negated predicates: 0
* swapped branches : 1
; * calls to id fun: 0
(letrec ((count-pairs (lambda (lst)
(let ((path ()))
(<change>
()
(lambda (current)
(if (null? current)
0
(if (not (pair? current))
0
(if (memq current path)
0
(begin
(display +)
(+ 1 (count (car current)) (count (cdr current)))
(set! path (cons current path))))))))
(letrec ((count (lambda (current)
(if (null? current)
0
(if (not (pair? current))
0
(if (memq current path)
(<change>
0
(begin
(set! path (cons current path))
(+ 1 (count (car current)) (count (cdr current)))))
(<change>
(begin
(set! path (cons current path))
(+ 1 (count (car current)) (count (cdr current))))
0)))))))
(count lst)))))
(ret3 (cons 'a (cons 'b (cons 'c ()))))
(ret4 (let ((last (cons 'c ())))
(cons last (cons 'b last))))
(ret7 (let* ((last (cons 'c ()))
(middle (cons last last)))
(cons middle middle)))
(retno (let* ((last (cons 'c ()))
(lst (cons 'a (cons 'b last))))
(set-cdr! last lst)
lst)))
(= 3 (count-pairs ret3) (count-pairs ret4) (count-pairs ret7) (count-pairs retno))) | null | https://raw.githubusercontent.com/softwarelanguageslab/maf/11acedf56b9bf0c8e55ddb6aea754b6766d8bb40/test/changes/scheme/generated/R5RS_scp1_count-pairs2-5.scm | scheme | Changes:
* negated predicates: 0
* calls to id fun: 0 | * removed : 0
* added : 2
* swaps : 1
* swapped branches : 1
(letrec ((count-pairs (lambda (lst)
(let ((path ()))
(<change>
()
(lambda (current)
(if (null? current)
0
(if (not (pair? current))
0
(if (memq current path)
0
(begin
(display +)
(+ 1 (count (car current)) (count (cdr current)))
(set! path (cons current path))))))))
(letrec ((count (lambda (current)
(if (null? current)
0
(if (not (pair? current))
0
(if (memq current path)
(<change>
0
(begin
(set! path (cons current path))
(+ 1 (count (car current)) (count (cdr current)))))
(<change>
(begin
(set! path (cons current path))
(+ 1 (count (car current)) (count (cdr current))))
0)))))))
(count lst)))))
(ret3 (cons 'a (cons 'b (cons 'c ()))))
(ret4 (let ((last (cons 'c ())))
(cons last (cons 'b last))))
(ret7 (let* ((last (cons 'c ()))
(middle (cons last last)))
(cons middle middle)))
(retno (let* ((last (cons 'c ()))
(lst (cons 'a (cons 'b last))))
(set-cdr! last lst)
lst)))
(= 3 (count-pairs ret3) (count-pairs ret4) (count-pairs ret7) (count-pairs retno))) |
299737aad3a81338e38ef2aa0e7444d9d94445df76d4f0be9beeb7f323a80a9f | raghavkarol/edt | edt_profile_store.erl | %%
%% edt_profile_store is a in memory store for traces
%%
Copyright 2021 .
%%
-module(edt_profile_store).
-export([
crec_id/2,
crec_id/4,
find/1,
find_crecs/1, find_crecs/2,
find_srecs/0,
init_store/0, init_store/1,
srec/8,
srec_id/5,
store/1,
track_call/7,
track_summary/6,
context_id/0
]).
-include_lib("stdlib/include/ms_transform.hrl").
-include("edt_profile.hrl").
%% ---------------------------------------------------------
Internal functions
%% ---------------------------------------------------------
table_name() ->
edt_profile.
context_id() ->
erlang:phash2(make_ref()).
crec_id(Pid, #call{} = S) ->
#call{
module = M,
func = F,
seq_no = SeqNo
} = S,
crec_id(Pid, M, F, SeqNo).
crec_id(Pid, M, F, SeqNo) ->
erlang:phash2({Pid, M, F, SeqNo}).
srec_id(ContextId, Pid, M, F, Arity) ->
erlang:phash2({Pid, ContextId, M, F, Arity}).
crec(ContextId, CallerId, Pid, Call, Result, EndTime, EndReds) ->
Id = crec_id(Pid, Call),
#crec{
id = Id,
context_id = ContextId,
caller_id = CallerId,
pid = Pid,
module = Call#call.module,
func = Call#call.func,
arity = Call#call.arity,
args = Call#call.args,
result = Result,
%TODO Don't use normal as default value
return = normal,
seq_no = Call#call.seq_no,
start_time = Call#call.start_time,
end_time = EndTime,
start_reds = Call#call.start_reds,
end_reds = EndReds
}.
srec(ContextId, Pid, M, F, Arity, Count, Time, Reds) ->
Id = srec_id(ContextId, Pid, M, F, Arity),
#srec{
id = Id,
context_id = ContextId,
module = M,
func = F,
arity = Arity,
pid = Pid,
count = Count,
time = Time,
reds = Reds
}.
%% ---------------------------------------------------------
%% API
%% ---------------------------------------------------------
init_store() ->
ets:new(table_name(), [public, named_table, {keypos, #rec.id}]).
init_store(delete_all) ->
ets:delete_all_objects(table_name()).
store(#crec{id = Id} = Data) ->
Rec = #rec{id = Id, data = Data},
store(Rec);
store(#srec{id = Id} = Data) ->
Rec = #rec{id = Id, data = Data},
store(Rec);
store(#rec{} = Rec) ->
ets:insert(table_name(), Rec).
find(Id) ->
case ets:lookup(table_name(), Id) of
[#rec{data = Data}] ->
Data;
[] ->
{error, not_found}
end.
find_crecs(CallerId) ->
MS = ets:fun2ms(fun(#rec{data = #crec{caller_id = Id}} = A) when
Id == CallerId
->
A
end),
CRecs1 = [CRec || #rec{data = CRec} <- ets:select(edt_profile, MS)],
lists:sort(fun(#crec{seq_no = A}, #crec{seq_no = B}) -> A =< B end, CRecs1).
find_crecs(Module, Func) ->
MatchSpec = ets:fun2ms(
fun(#rec{data = #crec{module = M1, func = F1}} = A) when
M1 == Module, F1 == Func
->
A
end
),
Recs = ets:select(table_name(), MatchSpec),
[CRec || #rec{data = #crec{} = CRec} <- Recs].
find_srecs() ->
MatchSpec = ets:fun2ms(fun(#rec{data = #srec{}} = A) -> A end),
SRecs = [SRec || #rec{data = SRec} <- ets:select(edt_profile, MatchSpec)],
lists:sort(
fun(#srec{context_id = A}, #srec{context_id = B}) ->
A =< B
end,
SRecs
).
track_call(ContextId, CallerId, Pid, Call, Result, EndTime, EndReds) ->
CRec = crec(
ContextId,
CallerId,
Pid,
Call,
Result,
EndTime,
EndReds
),
store(CRec).
track_summary(ContextId, Pid, Call, Arity, EndTime, EndReds) ->
Time = EndTime - Call#call.start_time,
Reds = EndReds - Call#call.start_reds,
Id = srec_id(ContextId, Pid, Call#call.module, Call#call.func, Arity),
SRec1 =
case find(Id) of
#srec{} = SRec ->
SRec#srec{
reds = SRec#srec.reds + Reds,
count = SRec#srec.count + 1,
time = SRec#srec.time + Time
};
{error, not_found} ->
srec(ContextId, Pid, Call#call.module, Call#call.func, Arity, 1, Time, Reds)
end,
store(SRec1).
| null | https://raw.githubusercontent.com/raghavkarol/edt/c9ac86d4161deb7f23ff20b6ae713e77706e52fa/src/edt_profile_store.erl | erlang |
edt_profile_store is a in memory store for traces
---------------------------------------------------------
---------------------------------------------------------
TODO Don't use normal as default value
---------------------------------------------------------
API
--------------------------------------------------------- | Copyright 2021 .
-module(edt_profile_store).
-export([
crec_id/2,
crec_id/4,
find/1,
find_crecs/1, find_crecs/2,
find_srecs/0,
init_store/0, init_store/1,
srec/8,
srec_id/5,
store/1,
track_call/7,
track_summary/6,
context_id/0
]).
-include_lib("stdlib/include/ms_transform.hrl").
-include("edt_profile.hrl").
Internal functions
table_name() ->
edt_profile.
context_id() ->
erlang:phash2(make_ref()).
crec_id(Pid, #call{} = S) ->
#call{
module = M,
func = F,
seq_no = SeqNo
} = S,
crec_id(Pid, M, F, SeqNo).
crec_id(Pid, M, F, SeqNo) ->
erlang:phash2({Pid, M, F, SeqNo}).
srec_id(ContextId, Pid, M, F, Arity) ->
erlang:phash2({Pid, ContextId, M, F, Arity}).
crec(ContextId, CallerId, Pid, Call, Result, EndTime, EndReds) ->
Id = crec_id(Pid, Call),
#crec{
id = Id,
context_id = ContextId,
caller_id = CallerId,
pid = Pid,
module = Call#call.module,
func = Call#call.func,
arity = Call#call.arity,
args = Call#call.args,
result = Result,
return = normal,
seq_no = Call#call.seq_no,
start_time = Call#call.start_time,
end_time = EndTime,
start_reds = Call#call.start_reds,
end_reds = EndReds
}.
srec(ContextId, Pid, M, F, Arity, Count, Time, Reds) ->
Id = srec_id(ContextId, Pid, M, F, Arity),
#srec{
id = Id,
context_id = ContextId,
module = M,
func = F,
arity = Arity,
pid = Pid,
count = Count,
time = Time,
reds = Reds
}.
init_store() ->
ets:new(table_name(), [public, named_table, {keypos, #rec.id}]).
init_store(delete_all) ->
ets:delete_all_objects(table_name()).
store(#crec{id = Id} = Data) ->
Rec = #rec{id = Id, data = Data},
store(Rec);
store(#srec{id = Id} = Data) ->
Rec = #rec{id = Id, data = Data},
store(Rec);
store(#rec{} = Rec) ->
ets:insert(table_name(), Rec).
find(Id) ->
case ets:lookup(table_name(), Id) of
[#rec{data = Data}] ->
Data;
[] ->
{error, not_found}
end.
find_crecs(CallerId) ->
MS = ets:fun2ms(fun(#rec{data = #crec{caller_id = Id}} = A) when
Id == CallerId
->
A
end),
CRecs1 = [CRec || #rec{data = CRec} <- ets:select(edt_profile, MS)],
lists:sort(fun(#crec{seq_no = A}, #crec{seq_no = B}) -> A =< B end, CRecs1).
find_crecs(Module, Func) ->
MatchSpec = ets:fun2ms(
fun(#rec{data = #crec{module = M1, func = F1}} = A) when
M1 == Module, F1 == Func
->
A
end
),
Recs = ets:select(table_name(), MatchSpec),
[CRec || #rec{data = #crec{} = CRec} <- Recs].
find_srecs() ->
MatchSpec = ets:fun2ms(fun(#rec{data = #srec{}} = A) -> A end),
SRecs = [SRec || #rec{data = SRec} <- ets:select(edt_profile, MatchSpec)],
lists:sort(
fun(#srec{context_id = A}, #srec{context_id = B}) ->
A =< B
end,
SRecs
).
track_call(ContextId, CallerId, Pid, Call, Result, EndTime, EndReds) ->
CRec = crec(
ContextId,
CallerId,
Pid,
Call,
Result,
EndTime,
EndReds
),
store(CRec).
track_summary(ContextId, Pid, Call, Arity, EndTime, EndReds) ->
Time = EndTime - Call#call.start_time,
Reds = EndReds - Call#call.start_reds,
Id = srec_id(ContextId, Pid, Call#call.module, Call#call.func, Arity),
SRec1 =
case find(Id) of
#srec{} = SRec ->
SRec#srec{
reds = SRec#srec.reds + Reds,
count = SRec#srec.count + 1,
time = SRec#srec.time + Time
};
{error, not_found} ->
srec(ContextId, Pid, Call#call.module, Call#call.func, Arity, 1, Time, Reds)
end,
store(SRec1).
|
be26a847e9392a377a01bbacb1ee4d6571633f1bae6999e76b400ef926a0fe3b | CarlosMChica/HaskellBook | ChapterExercises.hs | # LANGUAGE FlexibleInstances #
module ChapterExercises where
import GHC.Arr
import FunctorLaws
import Test.QuickCheck
import Test.QuickCheck.Function
type IntToString = Fun Int String
type StringToInt = Fun String Int
1 .
-- data Bool = False | True
No valid Functor instance possible as is not a higher kinded type , it 's kind is * , functor needs * - > *
2 .
data BoolAndSomethingElse a = False' a | True' a deriving (Eq, Show)
instance Functor BoolAndSomethingElse where
fmap f (False' x) = False' $ f x
fmap f (True' x) = True' $ f x
instance (Arbitrary a) => Arbitrary (BoolAndSomethingElse a) where
arbitrary = do
x <- arbitrary
oneof [return $ False' x, return $ True' x]
type BoolAndSomethingElseId = BoolAndSomethingElse String -> Bool
type BoolAndSomethingElseCompose = StringToInt -> IntToString -> BoolAndSomethingElse String -> Bool
3 .
data BoolAndMaybeSomethingElse a = Falsish | Truish a
-- This is equivalent to Maybe and I'm lazy so I'm not writing it again :). It's a perfectly valid functor.
4 .
newtype Mu f = Inf { outF :: f (Mu f) }
Not Possible as has kind ( * - > * ) - > * and Functor requires kind * - > * .
-- (* -> *) -> * This kind means that the type argument f is itself a higher-kinded type. Applying f would leave kind *.
5 .
data D = D (Array Word Word) Int Int
No valid Functor instance possible as is not a higher kinded type , it 's kind is * , functor needs * - > *
Rearrange the arguments to the type constructor of the datatype so the Functor instance works .
1 .
-- Given type
data Sum a b = First a | Second b
-- Fixed type
data Sum b a = First a | Second b
instance Functor (Sum e) where
fmap f (First a) = First (f a)
fmap f (Second b) = Second b
2 .
-- Given type
-- data Company a b c = DeepBlue a c | Something b
-- Fixed type
data Company a c b = DeepBlue a c | Something b
instance Functor (Company e e') where
fmap f (Something b) = Something $ f b
fmap _ (DeepBlue a c) = DeepBlue a c
3 .
-- Given type
data More a b = L a b a | R b a b deriving ( Eq , Show )
-- Fixed type
data More b a = L a b a | R b a b deriving (Eq, Show)
instance Functor (More x) where
fmap f (L a b a') = L (f a) b (f a')
fmap f (R b a b') = R b (f a) b'
--
--
-- Write Functor instances for the following datatypes.
1 .
data Quant a b =
Finance
| Desk a
| Bloor b deriving (Eq, Show)
instance Functor (Quant a) where
fmap _ Finance = Finance
fmap _ (Desk x) = Desk x
fmap f (Bloor x) = Bloor $ f x
instance (Arbitrary a, Arbitrary b) => Arbitrary (Quant a b) where
arbitrary = do
x <- arbitrary
y <- arbitrary
elements [Finance, Desk x, Bloor y]
type QuantId = Quant Int String -> Bool
type QuantCompose = IntToString -> StringToInt -> Quant String Int -> Bool
2 .
data K a b = K a deriving (Eq, Show)
instance Functor (K a) where
fmap _ (K x) = K x
instance (Arbitrary a) => Arbitrary (K a b) where
arbitrary = do
x <- arbitrary
return $ K x
type KId = K Int Int -> Bool
type KCompose = IntToString -> StringToInt -> K Int Int -> Bool
3 .
newtype Flip f a b = Flip (f b a) deriving (Eq, Show)
newtype K' a b = K' a deriving (Eq, Show)
instance Functor (Flip K' a) where
fmap f (Flip (K' x)) = Flip $ K' $ f x
instance (Arbitrary b) => Arbitrary (Flip K' a b) where
arbitrary = do
x <- arbitrary
return $ Flip $ K' x
type FlipKId = Flip K' Int Int -> Bool
type FlipKCompose = IntToString -> StringToInt -> Flip K' String Int -> Bool
4 .
data EvilGoateeConst a b = GoatyConst b
instance Functor (EvilGoateeConst a) where
fmap f (GoatyConst x) = GoatyConst $ f x
-- This is clearly a valid Functor implementation so I won't bother writing proof for it.
5 .
data LiftItOut f a = LiftItOut (f a) deriving (Eq, Show)
instance (Functor f) => Functor (LiftItOut f) where
fmap f (LiftItOut x) = LiftItOut $ fmap f x
-- Ideally this instance would work for any type, but I couldn't make it work
-- instance (Arbitrary a) => Arbitrary (LiftItOut f a) where
-- arbitrary = do
-- x <- arbitrary
-- return $ LiftItOut x
instance (Arbitrary a) => Arbitrary (LiftItOut Maybe a) where
arbitrary = do
x <- arbitrary
elements [LiftItOut (Just x), LiftItOut Nothing]
type LiftItOutId = LiftItOut Maybe Int -> Bool
type LiftItOutCompose = IntToString -> StringToInt -> LiftItOut Maybe Int -> Bool
6 .
data Parappa f g a = DaWrappa (f a) (g a) deriving (Eq, Show)
instance (Functor f, Functor g) => Functor (Parappa f g) where
fmap f (DaWrappa x y) = DaWrappa (fmap f x) (fmap f y)
-- Ideally this instance would work for any type, but I couldn't make it work
-- instance (Arbitrary a) => Arbitrary (Parappa f g a) where
-- arbitrary = do
-- x <- arbitrary
-- y <- arbitrary
return y
instance (Arbitrary a) => Arbitrary (Parappa [] [] a) where
arbitrary = do
x <- arbitrary
x' <- arbitrary
return $ DaWrappa [x] [x']
type ParappaId = Parappa [] [] Int -> Bool
type ParappaCompose = IntToString -> StringToInt -> Parappa [] [] Int -> Bool
7 .
data IgnoreOne f g a b = IgnoringSomething (f a) (g b) deriving (Eq, Show)
instance (Functor g) => Functor (IgnoreOne f g a) where
fmap f (IgnoringSomething x y) = IgnoringSomething x (fmap f y)
instance (Arbitrary a, Arbitrary b) => Arbitrary (IgnoreOne [] [] a b) where
arbitrary = do
x <- arbitrary
y <- arbitrary
return $ IgnoringSomething [x] [y]
type IgnoreOneId = IgnoreOne [] [] Int String -> Bool
type IgnoreOneCompose = StringToInt -> IntToString -> IgnoreOne [] [] Int String -> Bool
8 .
data Notorious g o a t = Notorious (g o) (g a) (g t) deriving (Eq, Show)
instance (Functor g) => Functor (Notorious g o a) where
fmap f (Notorious x y z) = Notorious x y (fmap f z)
instance (Arbitrary o, Arbitrary a, Arbitrary t) => Arbitrary (Notorious [] o a t) where
arbitrary = do
x <- arbitrary
y <- arbitrary
z <- arbitrary
return $ Notorious [x] [y] [z]
type NotoriousId = Notorious [] Int String [Double] -> Bool
-- Deal with it :) g ----o-------- ---a---- ---t--
type NotoriousCompose = StringToInt -> IntToString -> Notorious [] (Maybe String) [Double] String -> Bool
9 .
data List a = Nil | Cons a (List a) deriving (Eq, Show)
instance Functor List where
fmap _ Nil = Nil
fmap f (Cons x xs) = Cons (f x) (fmap f xs)
instance (Arbitrary a) => Arbitrary (List a) where
arbitrary = do
x <- arbitrary
xs <- arbitrary
frequency [(1, return Nil), (4, return $ Cons x xs)]
type ListId = List Int -> Bool
type ListCompose = StringToInt -> IntToString -> List String -> Bool
10 .
data GoatLord a =
NoGoat
| OneGoat a
| MoreGoats (GoatLord a) (GoatLord a) (GoatLord a) deriving (Eq, Show)
instance Functor GoatLord where
fmap _ NoGoat = NoGoat
fmap f (OneGoat x) = OneGoat $ f x
fmap f (MoreGoats x y z) = MoreGoats (fmap f x) (fmap f y) (fmap f z)
Hangs . Do n't really get why , my understanding is that this case is the same as exercise 9 , although it seems it 's not .
instance ( Arbitrary a ) = > Arbitrary ( GoatLord a ) where
-- arbitrary = do
-- x <- arbitrary
-- f <- arbitrary
-- f' <- arbitrary
-- f'' <- arbitrary
frequency [ ( 1 , return NoGoat ) , ( 2 , return $ OneGoat x ) , ( 2 , return $ MoreGoats f f ' f '' ) ]
instance (Arbitrary a) => Arbitrary (GoatLord a) where
arbitrary = do
w <- arbitrary
x <- arbitrary
y <- arbitrary
z <- arbitrary
frequency [
(1, return NoGoat),
(2, return $ OneGoat w),
(2, return $ MoreGoats (OneGoat x) (OneGoat y) (OneGoat z))]
type GoatLordId = GoatLord Int -> Bool
type GoatLordCompose = StringToInt -> IntToString -> GoatLord String -> Bool
11 .
data TalkToMe a =
Halt
| Print String a
| Read (String -> a)
Ca n't derive Eq & Show instances . Do n't know how to create one either .
deriving ( Eq , Show )
instance Functor TalkToMe where
fmap _ Halt = Halt
fmap f (Print x y) = Print x (f y)
fmap f (Read g) = Read (f . g)
instance (Arbitrary a) => Arbitrary (TalkToMe a) where
arbitrary = do
x <- arbitrary
s <- arbitrary
f <- arbitrary
frequency [
(1, return Halt),
(2, return $ Print s x),
(2, return $ Read f)]
main = do
quickCheck (functorIdentity :: BoolAndSomethingElseId)
quickCheck (functorCompose :: BoolAndSomethingElseCompose)
quickCheck (functorIdentity :: QuantId)
quickCheck (functorCompose :: QuantCompose)
quickCheck (functorIdentity :: KId)
quickCheck (functorCompose :: KCompose)
quickCheck (functorIdentity :: FlipKId)
quickCheck (functorCompose :: FlipKCompose)
quickCheck (functorIdentity :: LiftItOutId)
quickCheck (functorCompose :: LiftItOutCompose)
quickCheck (functorIdentity :: ParappaId)
quickCheck (functorCompose :: ParappaCompose)
quickCheck (functorIdentity :: IgnoreOneId)
quickCheck (functorCompose :: IgnoreOneCompose)
quickCheck (functorIdentity :: NotoriousId)
quickCheck (functorCompose :: NotoriousCompose)
quickCheck (functorIdentity :: ListId)
quickCheck (functorCompose :: ListCompose)
quickCheck (functorIdentity :: GoatLordId)
quickCheck (functorCompose :: GoatLordCompose)
| null | https://raw.githubusercontent.com/CarlosMChica/HaskellBook/86f82cf36cd00003b1a1aebf264e4b5d606ddfad/chapter16/ChapterExercises.hs | haskell | data Bool = False | True
This is equivalent to Maybe and I'm lazy so I'm not writing it again :). It's a perfectly valid functor.
(* -> *) -> * This kind means that the type argument f is itself a higher-kinded type. Applying f would leave kind *.
Given type
Fixed type
Given type
data Company a b c = DeepBlue a c | Something b
Fixed type
Given type
Fixed type
Write Functor instances for the following datatypes.
This is clearly a valid Functor implementation so I won't bother writing proof for it.
Ideally this instance would work for any type, but I couldn't make it work
instance (Arbitrary a) => Arbitrary (LiftItOut f a) where
arbitrary = do
x <- arbitrary
return $ LiftItOut x
Ideally this instance would work for any type, but I couldn't make it work
instance (Arbitrary a) => Arbitrary (Parappa f g a) where
arbitrary = do
x <- arbitrary
y <- arbitrary
Deal with it :) g ----o-------- ---a---- ---t--
arbitrary = do
x <- arbitrary
f <- arbitrary
f' <- arbitrary
f'' <- arbitrary | # LANGUAGE FlexibleInstances #
module ChapterExercises where
import GHC.Arr
import FunctorLaws
import Test.QuickCheck
import Test.QuickCheck.Function
type IntToString = Fun Int String
type StringToInt = Fun String Int
1 .
No valid Functor instance possible as is not a higher kinded type , it 's kind is * , functor needs * - > *
2 .
data BoolAndSomethingElse a = False' a | True' a deriving (Eq, Show)
instance Functor BoolAndSomethingElse where
fmap f (False' x) = False' $ f x
fmap f (True' x) = True' $ f x
instance (Arbitrary a) => Arbitrary (BoolAndSomethingElse a) where
arbitrary = do
x <- arbitrary
oneof [return $ False' x, return $ True' x]
type BoolAndSomethingElseId = BoolAndSomethingElse String -> Bool
type BoolAndSomethingElseCompose = StringToInt -> IntToString -> BoolAndSomethingElse String -> Bool
3 .
data BoolAndMaybeSomethingElse a = Falsish | Truish a
4 .
newtype Mu f = Inf { outF :: f (Mu f) }
Not Possible as has kind ( * - > * ) - > * and Functor requires kind * - > * .
5 .
data D = D (Array Word Word) Int Int
No valid Functor instance possible as is not a higher kinded type , it 's kind is * , functor needs * - > *
Rearrange the arguments to the type constructor of the datatype so the Functor instance works .
1 .
data Sum a b = First a | Second b
data Sum b a = First a | Second b
instance Functor (Sum e) where
fmap f (First a) = First (f a)
fmap f (Second b) = Second b
2 .
data Company a c b = DeepBlue a c | Something b
instance Functor (Company e e') where
fmap f (Something b) = Something $ f b
fmap _ (DeepBlue a c) = DeepBlue a c
3 .
data More a b = L a b a | R b a b deriving ( Eq , Show )
data More b a = L a b a | R b a b deriving (Eq, Show)
instance Functor (More x) where
fmap f (L a b a') = L (f a) b (f a')
fmap f (R b a b') = R b (f a) b'
1 .
data Quant a b =
Finance
| Desk a
| Bloor b deriving (Eq, Show)
instance Functor (Quant a) where
fmap _ Finance = Finance
fmap _ (Desk x) = Desk x
fmap f (Bloor x) = Bloor $ f x
instance (Arbitrary a, Arbitrary b) => Arbitrary (Quant a b) where
arbitrary = do
x <- arbitrary
y <- arbitrary
elements [Finance, Desk x, Bloor y]
type QuantId = Quant Int String -> Bool
type QuantCompose = IntToString -> StringToInt -> Quant String Int -> Bool
2 .
data K a b = K a deriving (Eq, Show)
instance Functor (K a) where
fmap _ (K x) = K x
instance (Arbitrary a) => Arbitrary (K a b) where
arbitrary = do
x <- arbitrary
return $ K x
type KId = K Int Int -> Bool
type KCompose = IntToString -> StringToInt -> K Int Int -> Bool
3 .
newtype Flip f a b = Flip (f b a) deriving (Eq, Show)
newtype K' a b = K' a deriving (Eq, Show)
instance Functor (Flip K' a) where
fmap f (Flip (K' x)) = Flip $ K' $ f x
instance (Arbitrary b) => Arbitrary (Flip K' a b) where
arbitrary = do
x <- arbitrary
return $ Flip $ K' x
type FlipKId = Flip K' Int Int -> Bool
type FlipKCompose = IntToString -> StringToInt -> Flip K' String Int -> Bool
4 .
data EvilGoateeConst a b = GoatyConst b
instance Functor (EvilGoateeConst a) where
fmap f (GoatyConst x) = GoatyConst $ f x
5 .
data LiftItOut f a = LiftItOut (f a) deriving (Eq, Show)
instance (Functor f) => Functor (LiftItOut f) where
fmap f (LiftItOut x) = LiftItOut $ fmap f x
instance (Arbitrary a) => Arbitrary (LiftItOut Maybe a) where
arbitrary = do
x <- arbitrary
elements [LiftItOut (Just x), LiftItOut Nothing]
type LiftItOutId = LiftItOut Maybe Int -> Bool
type LiftItOutCompose = IntToString -> StringToInt -> LiftItOut Maybe Int -> Bool
6 .
data Parappa f g a = DaWrappa (f a) (g a) deriving (Eq, Show)
instance (Functor f, Functor g) => Functor (Parappa f g) where
fmap f (DaWrappa x y) = DaWrappa (fmap f x) (fmap f y)
return y
instance (Arbitrary a) => Arbitrary (Parappa [] [] a) where
arbitrary = do
x <- arbitrary
x' <- arbitrary
return $ DaWrappa [x] [x']
type ParappaId = Parappa [] [] Int -> Bool
type ParappaCompose = IntToString -> StringToInt -> Parappa [] [] Int -> Bool
7 .
data IgnoreOne f g a b = IgnoringSomething (f a) (g b) deriving (Eq, Show)
instance (Functor g) => Functor (IgnoreOne f g a) where
fmap f (IgnoringSomething x y) = IgnoringSomething x (fmap f y)
instance (Arbitrary a, Arbitrary b) => Arbitrary (IgnoreOne [] [] a b) where
arbitrary = do
x <- arbitrary
y <- arbitrary
return $ IgnoringSomething [x] [y]
type IgnoreOneId = IgnoreOne [] [] Int String -> Bool
type IgnoreOneCompose = StringToInt -> IntToString -> IgnoreOne [] [] Int String -> Bool
8 .
data Notorious g o a t = Notorious (g o) (g a) (g t) deriving (Eq, Show)
instance (Functor g) => Functor (Notorious g o a) where
fmap f (Notorious x y z) = Notorious x y (fmap f z)
instance (Arbitrary o, Arbitrary a, Arbitrary t) => Arbitrary (Notorious [] o a t) where
arbitrary = do
x <- arbitrary
y <- arbitrary
z <- arbitrary
return $ Notorious [x] [y] [z]
type NotoriousId = Notorious [] Int String [Double] -> Bool
type NotoriousCompose = StringToInt -> IntToString -> Notorious [] (Maybe String) [Double] String -> Bool
9 .
data List a = Nil | Cons a (List a) deriving (Eq, Show)
instance Functor List where
fmap _ Nil = Nil
fmap f (Cons x xs) = Cons (f x) (fmap f xs)
instance (Arbitrary a) => Arbitrary (List a) where
arbitrary = do
x <- arbitrary
xs <- arbitrary
frequency [(1, return Nil), (4, return $ Cons x xs)]
type ListId = List Int -> Bool
type ListCompose = StringToInt -> IntToString -> List String -> Bool
10 .
data GoatLord a =
NoGoat
| OneGoat a
| MoreGoats (GoatLord a) (GoatLord a) (GoatLord a) deriving (Eq, Show)
instance Functor GoatLord where
fmap _ NoGoat = NoGoat
fmap f (OneGoat x) = OneGoat $ f x
fmap f (MoreGoats x y z) = MoreGoats (fmap f x) (fmap f y) (fmap f z)
Hangs . Do n't really get why , my understanding is that this case is the same as exercise 9 , although it seems it 's not .
instance ( Arbitrary a ) = > Arbitrary ( GoatLord a ) where
frequency [ ( 1 , return NoGoat ) , ( 2 , return $ OneGoat x ) , ( 2 , return $ MoreGoats f f ' f '' ) ]
instance (Arbitrary a) => Arbitrary (GoatLord a) where
arbitrary = do
w <- arbitrary
x <- arbitrary
y <- arbitrary
z <- arbitrary
frequency [
(1, return NoGoat),
(2, return $ OneGoat w),
(2, return $ MoreGoats (OneGoat x) (OneGoat y) (OneGoat z))]
type GoatLordId = GoatLord Int -> Bool
type GoatLordCompose = StringToInt -> IntToString -> GoatLord String -> Bool
11 .
data TalkToMe a =
Halt
| Print String a
| Read (String -> a)
Ca n't derive Eq & Show instances . Do n't know how to create one either .
deriving ( Eq , Show )
instance Functor TalkToMe where
fmap _ Halt = Halt
fmap f (Print x y) = Print x (f y)
fmap f (Read g) = Read (f . g)
instance (Arbitrary a) => Arbitrary (TalkToMe a) where
arbitrary = do
x <- arbitrary
s <- arbitrary
f <- arbitrary
frequency [
(1, return Halt),
(2, return $ Print s x),
(2, return $ Read f)]
main = do
quickCheck (functorIdentity :: BoolAndSomethingElseId)
quickCheck (functorCompose :: BoolAndSomethingElseCompose)
quickCheck (functorIdentity :: QuantId)
quickCheck (functorCompose :: QuantCompose)
quickCheck (functorIdentity :: KId)
quickCheck (functorCompose :: KCompose)
quickCheck (functorIdentity :: FlipKId)
quickCheck (functorCompose :: FlipKCompose)
quickCheck (functorIdentity :: LiftItOutId)
quickCheck (functorCompose :: LiftItOutCompose)
quickCheck (functorIdentity :: ParappaId)
quickCheck (functorCompose :: ParappaCompose)
quickCheck (functorIdentity :: IgnoreOneId)
quickCheck (functorCompose :: IgnoreOneCompose)
quickCheck (functorIdentity :: NotoriousId)
quickCheck (functorCompose :: NotoriousCompose)
quickCheck (functorIdentity :: ListId)
quickCheck (functorCompose :: ListCompose)
quickCheck (functorIdentity :: GoatLordId)
quickCheck (functorCompose :: GoatLordCompose)
|
d68a67e211ce8736434ffd8c1f8a4aa9b6c9237f0672cd3a11806bf055642818 | haskell/statistics | FDistribution.hs | {-# LANGUAGE OverloadedStrings #-}
# LANGUAGE DeriveDataTypeable , DeriveGeneric #
-- |
-- Module : Statistics.Distribution.FDistribution
Copyright : ( c ) 2011
-- License : BSD3
--
-- Maintainer :
-- Stability : experimental
-- Portability : portable
--
-- Fisher F distribution
module Statistics.Distribution.FDistribution (
FDistribution
-- * Constructors
, fDistribution
, fDistributionE
, fDistributionReal
, fDistributionRealE
-- * Accessors
, fDistributionNDF1
, fDistributionNDF2
) where
import Control.Applicative
import Data.Aeson (FromJSON(..), ToJSON, Value(..), (.:))
import Data.Binary (Binary(..))
import Data.Data (Data, Typeable)
import GHC.Generics (Generic)
import Numeric.SpecFunctions (
logBeta, incompleteBeta, invIncompleteBeta, digamma)
import Numeric.MathFunctions.Constants (m_neg_inf)
import qualified Statistics.Distribution as D
import Statistics.Function (square)
import Statistics.Internal
-- | F distribution
data FDistribution = F { fDistributionNDF1 :: {-# UNPACK #-} !Double
, fDistributionNDF2 :: {-# UNPACK #-} !Double
, _pdfFactor :: {-# UNPACK #-} !Double
}
deriving (Eq, Typeable, Data, Generic)
instance Show FDistribution where
showsPrec i (F n m _) = defaultShow2 "fDistributionReal" n m i
instance Read FDistribution where
readPrec = defaultReadPrecM2 "fDistributionReal" fDistributionRealE
instance ToJSON FDistribution
instance FromJSON FDistribution where
parseJSON (Object v) = do
n <- v .: "fDistributionNDF1"
m <- v .: "fDistributionNDF2"
maybe (fail $ errMsgR n m) return $ fDistributionRealE n m
parseJSON _ = empty
instance Binary FDistribution where
put (F n m _) = put n >> put m
get = do
n <- get
m <- get
maybe (fail $ errMsgR n m) return $ fDistributionRealE n m
fDistribution :: Int -> Int -> FDistribution
fDistribution n m = maybe (error $ errMsg n m) id $ fDistributionE n m
fDistributionReal :: Double -> Double -> FDistribution
fDistributionReal n m = maybe (error $ errMsgR n m) id $ fDistributionRealE n m
fDistributionE :: Int -> Int -> Maybe FDistribution
fDistributionE n m
| n > 0 && m > 0 =
let n' = fromIntegral n
m' = fromIntegral m
f' = 0.5 * (log m' * m' + log n' * n') - logBeta (0.5*n') (0.5*m')
in Just $ F n' m' f'
| otherwise = Nothing
fDistributionRealE :: Double -> Double -> Maybe FDistribution
fDistributionRealE n m
| n > 0 && m > 0 =
let f' = 0.5 * (log m * m + log n * n) - logBeta (0.5*n) (0.5*m)
in Just $ F n m f'
| otherwise = Nothing
errMsg :: Int -> Int -> String
errMsg _ _ = "Statistics.Distribution.FDistribution.fDistribution: non-positive number of degrees of freedom"
errMsgR :: Double -> Double -> String
errMsgR _ _ = "Statistics.Distribution.FDistribution.fDistribution: non-positive number of degrees of freedom"
instance D.Distribution FDistribution where
cumulative = cumulative
complCumulative = complCumulative
instance D.ContDistr FDistribution where
density d x
| x <= 0 = 0
| otherwise = exp $ logDensity d x
logDensity d x
| x <= 0 = m_neg_inf
| otherwise = logDensity d x
quantile = quantile
cumulative :: FDistribution -> Double -> Double
cumulative (F n m _) x
| x <= 0 = 0
-- Only matches +∞
| isInfinite x = 1
-- NOTE: Here we rely on implementation detail of incompleteBeta. It
-- computes using series expansion for sufficiently small x
-- and uses following identity otherwise:
--
-- I(x; a, b) = 1 - I(1-x; b, a)
--
Point is we can compute 1 - x as ) without loss of
-- precision for large x. Sadly this switchover point is
-- implementation detail.
| n >= (n+m)*bx = incompleteBeta (0.5 * n) (0.5 * m) bx
| otherwise = 1 - incompleteBeta (0.5 * m) (0.5 * n) bx1
where
y = n * x
bx = y / (m + y)
bx1 = m / (m + y)
complCumulative :: FDistribution -> Double -> Double
complCumulative (F n m _) x
| x <= 0 = 1
-- Only matches +∞
| isInfinite x = 0
-- See NOTE at cumulative
| m >= (n+m)*bx = incompleteBeta (0.5 * m) (0.5 * n) bx
| otherwise = 1 - incompleteBeta (0.5 * n) (0.5 * m) bx1
where
y = n*x
bx = m / (m + y)
bx1 = y / (m + y)
logDensity :: FDistribution -> Double -> Double
logDensity (F n m fac) x
= fac + log x * (0.5 * n - 1) - log(m + n*x) * 0.5 * (n + m)
quantile :: FDistribution -> Double -> Double
quantile (F n m _) p
| p >= 0 && p <= 1 =
let x = invIncompleteBeta (0.5 * n) (0.5 * m) p
in m * x / (n * (1 - x))
| otherwise =
error $ "Statistics.Distribution.Uniform.quantile: p must be in [0,1] range. Got: "++show p
instance D.MaybeMean FDistribution where
maybeMean (F _ m _) | m > 2 = Just $ m / (m - 2)
| otherwise = Nothing
instance D.MaybeVariance FDistribution where
maybeStdDev (F n m _)
| m > 4 = Just $ 2 * square m * (m + n - 2) / (n * square (m - 2) * (m - 4))
| otherwise = Nothing
instance D.Entropy FDistribution where
entropy (F n m _) =
let nHalf = 0.5 * n
mHalf = 0.5 * m in
log (n/m)
+ logBeta nHalf mHalf
+ (1 - nHalf) * digamma nHalf
- (1 + mHalf) * digamma mHalf
+ (nHalf + mHalf) * digamma (nHalf + mHalf)
instance D.MaybeEntropy FDistribution where
maybeEntropy = Just . D.entropy
instance D.ContGen FDistribution where
genContVar = D.genContinuous
| null | https://raw.githubusercontent.com/haskell/statistics/a2aa25181e50cd63db4a785c20c973a3c4dd5dac/Statistics/Distribution/FDistribution.hs | haskell | # LANGUAGE OverloadedStrings #
|
Module : Statistics.Distribution.FDistribution
License : BSD3
Maintainer :
Stability : experimental
Portability : portable
Fisher F distribution
* Constructors
* Accessors
| F distribution
# UNPACK #
# UNPACK #
# UNPACK #
Only matches +∞
NOTE: Here we rely on implementation detail of incompleteBeta. It
computes using series expansion for sufficiently small x
and uses following identity otherwise:
I(x; a, b) = 1 - I(1-x; b, a)
precision for large x. Sadly this switchover point is
implementation detail.
Only matches +∞
See NOTE at cumulative | # LANGUAGE DeriveDataTypeable , DeriveGeneric #
Copyright : ( c ) 2011
module Statistics.Distribution.FDistribution (
FDistribution
, fDistribution
, fDistributionE
, fDistributionReal
, fDistributionRealE
, fDistributionNDF1
, fDistributionNDF2
) where
import Control.Applicative
import Data.Aeson (FromJSON(..), ToJSON, Value(..), (.:))
import Data.Binary (Binary(..))
import Data.Data (Data, Typeable)
import GHC.Generics (Generic)
import Numeric.SpecFunctions (
logBeta, incompleteBeta, invIncompleteBeta, digamma)
import Numeric.MathFunctions.Constants (m_neg_inf)
import qualified Statistics.Distribution as D
import Statistics.Function (square)
import Statistics.Internal
}
deriving (Eq, Typeable, Data, Generic)
instance Show FDistribution where
showsPrec i (F n m _) = defaultShow2 "fDistributionReal" n m i
instance Read FDistribution where
readPrec = defaultReadPrecM2 "fDistributionReal" fDistributionRealE
instance ToJSON FDistribution
instance FromJSON FDistribution where
parseJSON (Object v) = do
n <- v .: "fDistributionNDF1"
m <- v .: "fDistributionNDF2"
maybe (fail $ errMsgR n m) return $ fDistributionRealE n m
parseJSON _ = empty
instance Binary FDistribution where
put (F n m _) = put n >> put m
get = do
n <- get
m <- get
maybe (fail $ errMsgR n m) return $ fDistributionRealE n m
fDistribution :: Int -> Int -> FDistribution
fDistribution n m = maybe (error $ errMsg n m) id $ fDistributionE n m
fDistributionReal :: Double -> Double -> FDistribution
fDistributionReal n m = maybe (error $ errMsgR n m) id $ fDistributionRealE n m
fDistributionE :: Int -> Int -> Maybe FDistribution
fDistributionE n m
| n > 0 && m > 0 =
let n' = fromIntegral n
m' = fromIntegral m
f' = 0.5 * (log m' * m' + log n' * n') - logBeta (0.5*n') (0.5*m')
in Just $ F n' m' f'
| otherwise = Nothing
fDistributionRealE :: Double -> Double -> Maybe FDistribution
fDistributionRealE n m
| n > 0 && m > 0 =
let f' = 0.5 * (log m * m + log n * n) - logBeta (0.5*n) (0.5*m)
in Just $ F n m f'
| otherwise = Nothing
errMsg :: Int -> Int -> String
errMsg _ _ = "Statistics.Distribution.FDistribution.fDistribution: non-positive number of degrees of freedom"
errMsgR :: Double -> Double -> String
errMsgR _ _ = "Statistics.Distribution.FDistribution.fDistribution: non-positive number of degrees of freedom"
instance D.Distribution FDistribution where
cumulative = cumulative
complCumulative = complCumulative
instance D.ContDistr FDistribution where
density d x
| x <= 0 = 0
| otherwise = exp $ logDensity d x
logDensity d x
| x <= 0 = m_neg_inf
| otherwise = logDensity d x
quantile = quantile
cumulative :: FDistribution -> Double -> Double
cumulative (F n m _) x
| x <= 0 = 0
| isInfinite x = 1
Point is we can compute 1 - x as ) without loss of
| n >= (n+m)*bx = incompleteBeta (0.5 * n) (0.5 * m) bx
| otherwise = 1 - incompleteBeta (0.5 * m) (0.5 * n) bx1
where
y = n * x
bx = y / (m + y)
bx1 = m / (m + y)
complCumulative :: FDistribution -> Double -> Double
complCumulative (F n m _) x
| x <= 0 = 1
| isInfinite x = 0
| m >= (n+m)*bx = incompleteBeta (0.5 * m) (0.5 * n) bx
| otherwise = 1 - incompleteBeta (0.5 * n) (0.5 * m) bx1
where
y = n*x
bx = m / (m + y)
bx1 = y / (m + y)
logDensity :: FDistribution -> Double -> Double
logDensity (F n m fac) x
= fac + log x * (0.5 * n - 1) - log(m + n*x) * 0.5 * (n + m)
quantile :: FDistribution -> Double -> Double
quantile (F n m _) p
| p >= 0 && p <= 1 =
let x = invIncompleteBeta (0.5 * n) (0.5 * m) p
in m * x / (n * (1 - x))
| otherwise =
error $ "Statistics.Distribution.Uniform.quantile: p must be in [0,1] range. Got: "++show p
instance D.MaybeMean FDistribution where
maybeMean (F _ m _) | m > 2 = Just $ m / (m - 2)
| otherwise = Nothing
instance D.MaybeVariance FDistribution where
maybeStdDev (F n m _)
| m > 4 = Just $ 2 * square m * (m + n - 2) / (n * square (m - 2) * (m - 4))
| otherwise = Nothing
instance D.Entropy FDistribution where
entropy (F n m _) =
let nHalf = 0.5 * n
mHalf = 0.5 * m in
log (n/m)
+ logBeta nHalf mHalf
+ (1 - nHalf) * digamma nHalf
- (1 + mHalf) * digamma mHalf
+ (nHalf + mHalf) * digamma (nHalf + mHalf)
instance D.MaybeEntropy FDistribution where
maybeEntropy = Just . D.entropy
instance D.ContGen FDistribution where
genContVar = D.genContinuous
|
1d3b20c5422f942149de4b07b2a2b522b4fed887a660703843e9f353478b3f32 | Haskell-OpenAPI-Code-Generator/Stripe-Haskell-Library | InvoiceSettingCustomField.hs | {-# LANGUAGE MultiWayIf #-}
CHANGE WITH CAUTION : This is a generated code file generated by -OpenAPI-Code-Generator/Haskell-OpenAPI-Client-Code-Generator .
{-# LANGUAGE OverloadedStrings #-}
-- | Contains the types generated from the schema InvoiceSettingCustomField
module StripeAPI.Types.InvoiceSettingCustomField where
import qualified Control.Monad.Fail
import qualified Data.Aeson
import qualified Data.Aeson as Data.Aeson.Encoding.Internal
import qualified Data.Aeson as Data.Aeson.Types
import qualified Data.Aeson as Data.Aeson.Types.FromJSON
import qualified Data.Aeson as Data.Aeson.Types.Internal
import qualified Data.Aeson as Data.Aeson.Types.ToJSON
import qualified Data.ByteString.Char8
import qualified Data.ByteString.Char8 as Data.ByteString.Internal
import qualified Data.Foldable
import qualified Data.Functor
import qualified Data.Maybe
import qualified Data.Scientific
import qualified Data.Text
import qualified Data.Text.Internal
import qualified Data.Time.Calendar as Data.Time.Calendar.Days
import qualified Data.Time.LocalTime as Data.Time.LocalTime.Internal.ZonedTime
import qualified GHC.Base
import qualified GHC.Classes
import qualified GHC.Int
import qualified GHC.Show
import qualified GHC.Types
import qualified StripeAPI.Common
import StripeAPI.TypeAlias
import qualified Prelude as GHC.Integer.Type
import qualified Prelude as GHC.Maybe
-- | Defines the object schema located at @components.schemas.invoice_setting_custom_field@ in the specification.
data InvoiceSettingCustomField = InvoiceSettingCustomField
{ -- | name: The name of the custom field.
--
-- Constraints:
--
* Maximum length of 5000
invoiceSettingCustomFieldName :: Data.Text.Internal.Text,
-- | value: The value of the custom field.
--
-- Constraints:
--
* Maximum length of 5000
invoiceSettingCustomFieldValue :: Data.Text.Internal.Text
}
deriving
( GHC.Show.Show,
GHC.Classes.Eq
)
instance Data.Aeson.Types.ToJSON.ToJSON InvoiceSettingCustomField where
toJSON obj = Data.Aeson.Types.Internal.object (Data.Foldable.concat (["name" Data.Aeson.Types.ToJSON..= invoiceSettingCustomFieldName obj] : ["value" Data.Aeson.Types.ToJSON..= invoiceSettingCustomFieldValue obj] : GHC.Base.mempty))
toEncoding obj = Data.Aeson.Encoding.Internal.pairs (GHC.Base.mconcat (Data.Foldable.concat (["name" Data.Aeson.Types.ToJSON..= invoiceSettingCustomFieldName obj] : ["value" Data.Aeson.Types.ToJSON..= invoiceSettingCustomFieldValue obj] : GHC.Base.mempty)))
instance Data.Aeson.Types.FromJSON.FromJSON InvoiceSettingCustomField where
parseJSON = Data.Aeson.Types.FromJSON.withObject "InvoiceSettingCustomField" (\obj -> (GHC.Base.pure InvoiceSettingCustomField GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..: "name")) GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..: "value"))
-- | Create a new 'InvoiceSettingCustomField' with all required fields.
mkInvoiceSettingCustomField ::
-- | 'invoiceSettingCustomFieldName'
Data.Text.Internal.Text ->
-- | 'invoiceSettingCustomFieldValue'
Data.Text.Internal.Text ->
InvoiceSettingCustomField
mkInvoiceSettingCustomField invoiceSettingCustomFieldName invoiceSettingCustomFieldValue =
InvoiceSettingCustomField
{ invoiceSettingCustomFieldName = invoiceSettingCustomFieldName,
invoiceSettingCustomFieldValue = invoiceSettingCustomFieldValue
}
| null | https://raw.githubusercontent.com/Haskell-OpenAPI-Code-Generator/Stripe-Haskell-Library/ba4401f083ff054f8da68c741f762407919de42f/src/StripeAPI/Types/InvoiceSettingCustomField.hs | haskell | # LANGUAGE MultiWayIf #
# LANGUAGE OverloadedStrings #
| Contains the types generated from the schema InvoiceSettingCustomField
| Defines the object schema located at @components.schemas.invoice_setting_custom_field@ in the specification.
| name: The name of the custom field.
Constraints:
| value: The value of the custom field.
Constraints:
| Create a new 'InvoiceSettingCustomField' with all required fields.
| 'invoiceSettingCustomFieldName'
| 'invoiceSettingCustomFieldValue' | CHANGE WITH CAUTION : This is a generated code file generated by -OpenAPI-Code-Generator/Haskell-OpenAPI-Client-Code-Generator .
module StripeAPI.Types.InvoiceSettingCustomField where
import qualified Control.Monad.Fail
import qualified Data.Aeson
import qualified Data.Aeson as Data.Aeson.Encoding.Internal
import qualified Data.Aeson as Data.Aeson.Types
import qualified Data.Aeson as Data.Aeson.Types.FromJSON
import qualified Data.Aeson as Data.Aeson.Types.Internal
import qualified Data.Aeson as Data.Aeson.Types.ToJSON
import qualified Data.ByteString.Char8
import qualified Data.ByteString.Char8 as Data.ByteString.Internal
import qualified Data.Foldable
import qualified Data.Functor
import qualified Data.Maybe
import qualified Data.Scientific
import qualified Data.Text
import qualified Data.Text.Internal
import qualified Data.Time.Calendar as Data.Time.Calendar.Days
import qualified Data.Time.LocalTime as Data.Time.LocalTime.Internal.ZonedTime
import qualified GHC.Base
import qualified GHC.Classes
import qualified GHC.Int
import qualified GHC.Show
import qualified GHC.Types
import qualified StripeAPI.Common
import StripeAPI.TypeAlias
import qualified Prelude as GHC.Integer.Type
import qualified Prelude as GHC.Maybe
data InvoiceSettingCustomField = InvoiceSettingCustomField
* Maximum length of 5000
invoiceSettingCustomFieldName :: Data.Text.Internal.Text,
* Maximum length of 5000
invoiceSettingCustomFieldValue :: Data.Text.Internal.Text
}
deriving
( GHC.Show.Show,
GHC.Classes.Eq
)
instance Data.Aeson.Types.ToJSON.ToJSON InvoiceSettingCustomField where
toJSON obj = Data.Aeson.Types.Internal.object (Data.Foldable.concat (["name" Data.Aeson.Types.ToJSON..= invoiceSettingCustomFieldName obj] : ["value" Data.Aeson.Types.ToJSON..= invoiceSettingCustomFieldValue obj] : GHC.Base.mempty))
toEncoding obj = Data.Aeson.Encoding.Internal.pairs (GHC.Base.mconcat (Data.Foldable.concat (["name" Data.Aeson.Types.ToJSON..= invoiceSettingCustomFieldName obj] : ["value" Data.Aeson.Types.ToJSON..= invoiceSettingCustomFieldValue obj] : GHC.Base.mempty)))
instance Data.Aeson.Types.FromJSON.FromJSON InvoiceSettingCustomField where
parseJSON = Data.Aeson.Types.FromJSON.withObject "InvoiceSettingCustomField" (\obj -> (GHC.Base.pure InvoiceSettingCustomField GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..: "name")) GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..: "value"))
mkInvoiceSettingCustomField ::
Data.Text.Internal.Text ->
Data.Text.Internal.Text ->
InvoiceSettingCustomField
mkInvoiceSettingCustomField invoiceSettingCustomFieldName invoiceSettingCustomFieldValue =
InvoiceSettingCustomField
{ invoiceSettingCustomFieldName = invoiceSettingCustomFieldName,
invoiceSettingCustomFieldValue = invoiceSettingCustomFieldValue
}
|
615ba1cb60cf08e8fcd80dd0cd35fc3769e90beda92d3e29f539404e9e3d1bde | ricardobcl/DottedDB | dotted_db_vnode.erl | -module(dotted_db_vnode).
-behaviour(riak_core_vnode).
-include_lib("dotted_db.hrl").
-include_lib("riak_core/include/riak_core_vnode.hrl").
-export([start_vnode/1,
init/1,
terminate/2,
handle_command/3,
handle_info/2,
is_empty/1,
delete/1,
handle_handoff_command/3,
handoff_starting/2,
handoff_cancelled/1,
handoff_finished/2,
handle_handoff_data/2,
encode_handoff_item/2,
handle_coverage/4,
handle_exit/3
]).
-export([
get_vnode_id/1,
broadcast_my_peers_to_my_peers/3,
replace_peer/3,
restart/2,
inform_peers_restart/2,
inform_peers_restart2/2,
recover_keys/2,
read/3,
repair/3,
write/2,
replicate/2,
sync_start/2,
sync_missing/5,
sync_repair/2
]).
-ignore_xref([
start_vnode/1
]).
-type dets() :: reference().
-record(state, {
% node id used for in logical clocks
id :: vnode_id(),
% the atom representing the vnode id
atom_id :: atom(),
% index on the consistent hashing ring
index :: index(),
% my peers ids
peers_ids :: [vnode_id()],
% node logical clock
clock :: bvv(),
key->object store , where the object contains a DCC ( values + logical clock )
storage :: dotted_db_storage:storage(),
% what me and my peers know about me and their peers
watermark :: vv_matrix(),
% map for keys that this node replicates (eventually all keys are safely pruned from this)
dotkeymap :: key_matrix(),
% the left list of pairs of deleted keys not yet stripped, and their causal context (version vector);
the right side is a list of ( vnode , map ) , where the map is between dots and keys not yet completely stripped ( and their VV also )
non_stripped_keys :: {[{key(),vv()}], [{id(), dict:dict()}]},
% interval in which the vnode tries to strip the non-stripped-keys
buffer_strip_interval :: non_neg_integer(),
% temporary list of nodes recovering from failure and a list of keys to send
recover_keys :: [{id(), [bkey()]}],
% number of updates (put or deletes) since saving node state to storage
updates_mem :: integer(),
% DETS table that stores in disk the vnode state
dets :: dets(),
% a flag to collect or not stats
stats :: boolean(),
% syncs stats
syncs :: [{id(), integer(), integer(), os:timestamp(), os:timestamp()}],
% what mode the vnode is on
mode :: normal | recovering,
% interval time between reports on this vnode
report_interval :: non_neg_integer()
}).
-type state() :: #state{}.
-define(MASTER, dotted_db_vnode_master).
save vnode state every 100 updates
-define(VNODE_STATE_FILE, "dotted_db_vnode_state").
-define(VNODE_STATE_KEY, "dotted_db_vnode_state_key").
-define(ETS_DELETE_NO_STRIP, 0).
-define(ETS_DELETE_STRIP, 1).
-define(ETS_WRITE_NO_STRIP, 2).
-define(ETS_WRITE_STRIP, 3).
%%%===================================================================
%%% API
%%%===================================================================
start_vnode(I) ->
riak_core_vnode_master:get_vnode_pid(I, ?MODULE).
get_vnode_id(IndexNodes) ->
riak_core_vnode_master:command(IndexNodes,
get_vnode_id,
{raw, undefined, self()},
?MASTER).
broadcast_my_peers_to_my_peers(IndexNodes, MyId, MyPeersIds) ->
riak_core_vnode_master:command(IndexNodes,
{broadcast_my_peers_to_my_peers, MyId, MyPeersIds},
{raw, undefined, self()},
?MASTER).
replace_peer(IndexNodes, OldPeerId, NewPeerId) ->
riak_core_vnode_master:command(IndexNodes,
{replace_peer, OldPeerId, NewPeerId},
{raw, undefined, self()},
?MASTER).
restart(IndexNodes, ReqID) ->
riak_core_vnode_master:command(IndexNodes,
{restart, ReqID},
{fsm, undefined, self()},
?MASTER).
inform_peers_restart(Peers, Args) ->
riak_core_vnode_master:command(Peers,
{inform_peers_restart, Args},
{fsm, undefined, self()},
?MASTER).
inform_peers_restart2(Peers, Args) ->
riak_core_vnode_master:command(Peers,
{inform_peers_restart2, Args},
{fsm, undefined, self()},
?MASTER).
recover_keys(Peers, Args) ->
riak_core_vnode_master:command(Peers,
{recover_keys, Args},
{fsm, undefined, self()},
?MASTER).
read(ReplicaNodes, ReqID, Key) ->
riak_core_vnode_master:command(ReplicaNodes,
{read, ReqID, Key},
{fsm, undefined, self()},
?MASTER).
repair(OutdatedNodes, BKey, Object) ->
riak_core_vnode_master:command(OutdatedNodes,
{repair, BKey, Object},
{fsm, undefined, self()},
?MASTER).
write(Coordinator, Args) ->
riak_core_vnode_master:command(Coordinator,
{write, Args},
{fsm, undefined, self()},
?MASTER).
replicate(ReplicaNodes, Args) ->
riak_core_vnode_master:command(ReplicaNodes,
{replicate, Args},
{fsm, undefined, self()},
?MASTER).
sync_start(Node, ReqID) ->
riak_core_vnode_master:command(Node,
{sync_start, ReqID},
{fsm, undefined, self()},
?MASTER).
sync_missing(Peer, ReqID, RemoteNodeID, RemoteClock, RemotePeers) ->
riak_core_vnode_master:command(Peer,
{sync_missing, ReqID, RemoteNodeID, RemoteClock, RemotePeers},
{fsm, undefined, self()},
?MASTER).
sync_repair(Node, Args) ->
riak_core_vnode_master:command(Node,
{sync_repair, Args},
{fsm, undefined, self()},
?MASTER).
%%%===================================================================
%%% Callbacks
%%%===================================================================
init([Index]) ->
put(watermark, false),
process_flag(priority, high),
% try to read the vnode state in the DETS file, if it exists
{Dets, NodeId2, NodeClock, DotKeyMap, Watermark, NonStrippedKeys} =
case read_vnode_state(Index) of
{Ref, not_found} -> % there isn't a past vnode state stored
lager:debug("No persisted state for vnode index: ~p.",[Index]),
NodeId = new_vnode_id(Index),
Clock = swc_node:new(),
KLog = swc_dotkeymap:new(),
Repli = swc_watermark:new(),
{Ref, NodeId, Clock, KLog, Repli, {[],[]}};
{Ref, error, Error} -> % some unexpected error
lager:error("Error reading vnode state from storage: ~p", [Error]),
NodeId = new_vnode_id(Index),
Clock = swc_node:new(),
KLog = swc_dotkeymap:new(),
Repli = swc_watermark:new(),
{Ref, NodeId, Clock, KLog, Repli, {[],[]}};
{Ref, {Id, Clock, DKMap, Repli, NSK}} -> % we have vnode state in the storage
lager:info("Recovered state for vnode ID: ~p.",[Id]),
{Ref, Id, Clock, DKMap, Repli, NSK}
end,
% open the storage backend for the key-values of this vnode
{Storage, NodeId3, NodeClock2, DotKeyMap2, Watermark2, NonStrippedKeys2} =
case open_storage(Index) of
{{backend, ets}, S} ->
% if the storage is in memory, start with an "empty" vnode state
NodeId4 = new_vnode_id(Index),
{S, NodeId4, swc_node:new(), swc_dotkeymap:new(), swc_watermark:new(), {[],[]}};
{_, S} ->
{S, NodeId2,NodeClock, DotKeyMap, Watermark, NonStrippedKeys}
end,
PeersIDs = ordsets:del_element(NodeId3, ordsets:from_list(swc_watermark:peers(Watermark2))),
% create an ETS to store keys written and deleted in this node (for stats)
AtomID = create_ets_all_keys(NodeId3),
schedule a periodic reporting message ( wait 2 seconds initially )
schedule_report(2000),
% schedule a periodic strip of local keys
schedule_strip_keys(2000),
{ok, #state{
% for now, lets use the index in the consistent hash as the vnode ID
id = NodeId3,
atom_id = AtomID,
index = Index,
peers_ids = PeersIDs,
clock = NodeClock2,
watermark = Watermark2,
dotkeymap = DotKeyMap2,
non_stripped_keys = NonStrippedKeys2,
buffer_strip_interval = ?BUFFER_STRIP_INTERVAL,
recover_keys = [],
storage = Storage,
dets = Dets,
updates_mem = 0,
stats = application:get_env(dotted_db, do_stats, ?DEFAULT_DO_STATS),
syncs = initialize_syncs(Index),
mode = normal,
report_interval = ?REPORT_TICK_INTERVAL
}
}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%% READING
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
handle_command(Cmd={read, _ReqID, _Key}, _Sender, State) ->
handle_read(Cmd, State);
handle_command({repair, BKey, Object}, Sender, State) ->
{noreply, State2} =
handle_command({replicate, {dummy_req_id, BKey, Object, ?DEFAULT_NO_REPLY}}, Sender, State),
{noreply, State2};
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%% WRITING
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
handle_command(Cmd={write, _Args}, _Sender, State) ->
handle_write(Cmd, State);
handle_command(Cmd={replicate, _Args}, _Sender, State) ->
handle_replicate(Cmd, State);
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%% SYNCHRONIZING
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
handle_command(Cmd={sync_start, _ReqID}, _Sender, State) ->
handle_sync_start(Cmd, State);
handle_command(Cmd={sync_missing, _ReqID, _RemoteID, _RemoteClock, _RemotePeers}, Sender, State) ->
handle_sync_missing(Cmd, Sender, State);
handle_command(Cmd={sync_repair, _Args}, _Sender, State) ->
handle_sync_repair(Cmd, State);
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%% Restarting Vnode (and recovery of keys)
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% On the restarting node
handle_command(Cmd={restart, _ReqID}, _Sender, State) ->
handle_restart(Cmd, State);
%% On the good nodes
handle_command(Cmd={inform_peers_restart, {_ReqID, _RestartingNodeIndex, _OldVnodeID, _NewVnodeID}}, _Sender, State) ->
handle_inform_peers_restart(Cmd, State);
%% On the restarting node
handle_command(Cmd={recover_keys, {_ReqID, _RemoteVnode, _RemoteVnodeId, _RemoteClock, _Objects, _RemoteWatermark, _LastBatch}}, _Sender, State) ->
handle_recover_keys(Cmd, State);
%% On the good nodes
handle_command(Cmd={inform_peers_restart2, {_ReqID, _NewVnodeID}}, _Sender, State) ->
handle_inform_peers_restart2(Cmd, State);
%% Sample command: respond to a ping
handle_command(ping, _Sender, State) ->
{reply, {pong, State#state.id}, State};
handle_command(get_vnode_state, _Sender, State) ->
{reply, {pong, State}, State};
handle_command({set_strip_interval, NewStripInterval}, _Sender, State) ->
OldStripInterval = State#state.buffer_strip_interval,
lager:info("Strip Interval => from: ~p \t to: ~p",[OldStripInterval,NewStripInterval]),
{noreply, State#state{buffer_strip_interval=NewStripInterval}};
handle_command({set_stats, NewStats}, _Sender, State) ->
OldStats = State#state.stats,
lager:info("Vnode stats => from: ~p \t to: ~p",[OldStats, NewStats]),
{noreply, State#state{stats=NewStats}};
handle_command(get_vnode_id, _Sender, State) ->
{reply, {get_vnode_id, {State#state.index, node()}, State#state.id}, State};
handle_command({broadcast_my_peers_to_my_peers, MyPeer, MyPeerPeers}, _Sender, State) ->
Watermark = swc_watermark:add_peer(State#state.watermark, MyPeer, MyPeerPeers),
case length(swc_watermark:peers(Watermark)) == (?REPLICATION_FACTOR*2)-1 of
true ->
put(watermark, true),
lager:info("Peers 2 peers 4 watermark -> DONE!!!");
false ->
% lager:info("Getting my peer's peers ~p/~p",[orddict:size(Watermark), (?REPLICATION_FACTOR*2)-1]),
ok
end,
{noreply, State#state{watermark=Watermark}};
handle_command({replace_peer, OldPeerId, NewPeerId}, _Sender, State) ->
NewPeersIds =
case ordsets:is_element(OldPeerId, State#state.peers_ids) of
true -> ordsets:add_element(NewPeerId, ordsets:del_element(OldPeerId, State#state.peers_ids));
false -> State#state.peers_ids
end,
NewWatermark = swc_watermark : replace_peer(State#state.watermark , OldPeerId , NewPeerId ) ,
add_removed_vnode_jump_clock(OldPeerId),
NewWatermark = swc_watermark:retire_peer(State#state.watermark, OldPeerId, NewPeerId),
{noreply, State#state{peers_ids=NewPeersIds, watermark=NewWatermark}};
handle_command(Message, _Sender, State) ->
lager:info("Unhandled Command ~p", [Message]),
{noreply, State}.
%%%===================================================================
%%% Coverage
%%%===================================================================
handle_coverage(vnode_state, _KeySpaces, {_, RefId, _}, State) ->
{reply, {RefId, {ok, vs, State}}, State};
handle_coverage(strip_latency, _KeySpaces, {_, RefId, _}, State) ->
Latencies = compute_strip_latency(State#state.atom_id),
{reply, {RefId, {ok, strip_latency, Latencies}}, State};
handle_coverage(replication_latency, _KeySpaces, {_, RefId, _}, State) ->
Latencies = compute_replication_latency(State#state.atom_id),
{reply, {RefId, {ok, replication_latency, Latencies}}, State};
handle_coverage(all_current_dots, _KeySpaces, {_, RefId, _}, State) ->
% Dots = ets_get_all_dots(State#state.atom_id),
Dots = storage_get_all_dots(State#state.storage),
{reply, {RefId, {ok, all_current_dots, Dots}}, State};
handle_coverage(actual_deleted_keys, _KeySpaces, {_, RefId, _}, State) ->
ADelKeys = ets_get_actual_deleted(State#state.atom_id),
{reply, {RefId, {ok, adk, ADelKeys}}, State};
handle_coverage(issued_deleted_keys, _KeySpaces, {_, RefId, _}, State) ->
IDelKeys = ets_get_issued_deleted(State#state.atom_id),
Res = case length(IDelKeys) > 0 of
true ->
Key = hd(IDelKeys),
case dotted_db_storage:get(State#state.storage, Key) of
{error, not_found} ->
there is was deleted locally ( improbable , since there was a 0 in the ETS )
{Key, not_found};
{error, Error} ->
% some unexpected error
lager:error("Error reading a key from storage: ~p", [Error]),
% assume that the key was lost, i.e. it's equal to not_found
{Key, storage_error};
Obj ->
save the new k\v and remove unnecessary causal information
{Key, dotted_db_object:strip(State#state.clock, Obj), Obj}
end;
false ->
{}
end,
ThisVnode = {State#state.index, node()},
{reply, {RefId, {ok, idk, IDelKeys, Res, ThisVnode}}, State};
handle_coverage(written_keys, _KeySpaces, {_, RefId, _}, State) ->
WrtKeys = ets_get_issued_written(State#state.atom_id),
Res = case length(WrtKeys) > 0 of
true ->
Key = hd(WrtKeys),
case dotted_db_storage:get(State#state.storage, Key) of
{error, not_found} ->
there is was deleted locally ( improbable , since there was a 0 in the ETS )
{Key, not_found};
{error, Error} ->
% some unexpected error
lager:error("Error reading a key from storage: ~p", [Error]),
% assume that the key was lost, i.e. it's equal to not_found
{Key, storage_error};
Obj ->
save the new k\v and remove unnecessary causal information
{Key, dotted_db_object:strip(State#state.clock, Obj), Obj}
end;
false ->
{}
end,
ThisVnode = {State#state.index, node()},
{reply, {RefId, {ok, wk, WrtKeys, Res, ThisVnode}}, State};
handle_coverage(final_written_keys, _KeySpaces, {_, RefId, _}, State) ->
WrtKeys = ets_get_final_written(State#state.atom_id),
{reply, {RefId, {ok, fwk, WrtKeys}}, State};
handle_coverage(all_keys, _KeySpaces, {_, RefId, _}, State) ->
IDelKeys = ets_get_issued_deleted(State#state.atom_id),
IWrtKeys = ets_get_issued_written(State#state.atom_id),
FWrtKeys = ets_get_final_written(State#state.atom_id),
{reply, {RefId, {ok, ak, IDelKeys, IWrtKeys, FWrtKeys}}, State};
handle_coverage(Req, _KeySpaces, _Sender, State) ->
% lager:warning("unknown coverage received ~p", [Req]),
lager:info("unknown coverage received ~p", [Req]),
{noreply, State}.
handle_info({undefined,{get_vnode_id, IndexNode={Index,_}, PeerId={Index,_}}}, State) ->
% lager:info("New vnode id for watermark: ~p ", [PeerId]),
case lists:member(IndexNode, dotted_db_utils:peers(State#state.index)) of
true ->
NodeClock = swc_node:add(State#state.clock, {PeerId, 0}),
MyPeersIds = ordsets:add_element(PeerId, State#state.peers_ids),
WM = case ordsets:size(MyPeersIds) == (?REPLICATION_FACTOR-1)*2 of
true -> % we have all the peers Ids, now broadcast that list to our peers
% lager:info("Peers Ids DONE!"),
CurrentPeers = dotted_db_utils:peers(State#state.index),
broadcast_my_peers_to_my_peers(CurrentPeers, State#state.id, MyPeersIds),
swc_watermark:add_peer(State#state.watermark, State#state.id, MyPeersIds);
false ->
lager : info("Getting Peers Ids ~p/~p",[ordsets : size(MyPeersIds ) , ( ? REPLICATION_FACTOR-1)*2 ] ) ,
State#state.watermark
end,
{ok, State#state{clock=NodeClock, watermark=WM, peers_ids=MyPeersIds}};
false ->
lager:info("WRONG NODE ID! IxNd: ~p ", [IndexNode]),
{ok, State}
end;
%% Report Tick
handle_info(report_tick, State=#state{stats=false}) ->
schedule_report(State#state.report_interval),
{ok, State};
handle_info(report_tick, State=#state{stats=true}) ->
{_, NextState} = report(State),
schedule_report(State#state.report_interval),
{ok, NextState};
%% Buffer Strip Tick
handle_info(strip_keys, State=#state{mode=recovering}) ->
% schedule the strip for keys that still have causal context at the moment
lager:warning("Not stripping keys because we are in recovery mode."),
schedule_strip_keys(State#state.buffer_strip_interval),
{ok, State};
handle_info(strip_keys, State=#state{mode=normal, non_stripped_keys=NSKeys}) ->
NSKeys2 = read_strip_write(NSKeys, State),
% Take this time to filter timed-out entries in the "currently syncing peers" set
case get(current_sync) of
undefined -> ok;
Set ->
Now = os:timestamp(),
put(current_sync, ordsets:filter(
fun({TS, _Peer}) ->
%% To get the diff in milliseconds
TimeElapsed = timer:now_diff(Now, TS) / 1000,
TimeElapsed < ?DEFAULT_TIMEOUT
end, Set))
end,
% Optionally collect stats
case State#state.stats of
true ->
{ D1,W1 } = NSKeys ,
{ D2,W2 } = NSKeys2 ,
NumNSKeys = lists : sum([dict : size(Dict ) || { _ , } < - W1 ] ) + length(D1 ) ,
NumNSKeys2 = lists : sum([dict : size(Dict ) || { _ , } < - W2 ] ) + length(D2 ) ,
CCF = NumNSKeys * ? REPLICATION_FACTOR ,
% CCS = NumNSKeys2 * ?REPLICATION_FACTOR, % we don't really know, but assume the worst
EntryExampleSize = byte_size(term_to_binary({State#state.id , 123345 } ) ) ,
% MetaF = EntryExampleSize * ?REPLICATION_FACTOR * NumNSKeys,
MetaS = EntryExampleSize * CCS ,
dotted_db_stats : update_key_meta(State#state.index , NumNSKeys , MetaF , MetaS , CCF , CCS ) ,
ok;
false -> ok
end,
% schedule the strip for keys that still have causal context at the moment
schedule_strip_keys(State#state.buffer_strip_interval),
{ok, State#state{non_stripped_keys=NSKeys2}};
handle_info(Info, State) ->
lager:info("unhandled_info: ~p",[Info]),
{ok, State}.
%%%===================================================================
HANDOFF
%%%===================================================================
handle_handoff_command(?FOLD_REQ{foldfun=FoldFun, acc0=Acc0}, _Sender, State) ->
we need to wrap the fold function because it expect 3 elements ( K , V , Acc ) ,
and our storage layer expect 2 elements ( { K , V},Acc ) .
WrapperFun = fun({Key,Val}, Acc) -> FoldFun(Key, Val, Acc) end,
Acc = dotted_db_storage:fold(State#state.storage, WrapperFun, Acc0),
{reply, Acc, State};
Ignore AAE sync requests
handle_handoff_command(Cmd, _Sender, State) when
element(1, Cmd) == sync_start orelse
element(1, Cmd) == sync_missing orelse
element(1, Cmd) == sync_repair ->
{drop, State};
handle_handoff_command(Cmd, Sender, State) when
element(1, Cmd) == replicate orelse
element(1, Cmd) == repair ->
case handle_command(Cmd, Sender, State) of
{noreply, State2} ->
{forward, State2};
{reply, {ok,_}, State2} ->
{forward, State2}
end;
%% For coordinating writes, do it locally and forward the replication
handle_handoff_command(Cmd={write, {ReqID, _, Key, _, _, _FSMTime}}, Sender, State) ->
lager:info("HAND_WRITE: {~p, ~p} // Key: ~p",[State#state.id, node(), Key]),
% do the local coordinating write
{reply, {ok, ReqID, NewObject}, State2} = handle_command(Cmd, Sender, State),
send the ack to the PUT_FSM
riak_core_vnode:reply(Sender, {ok, ReqID, NewObject}),
% create a new request to forward the replication of this new object
NewCommand = {replicate, {ReqID, Key, NewObject, ?DEFAULT_NO_REPLY}},
{forward, NewCommand, State2};
%% Handle all other commands locally (only gets?)
handle_handoff_command(Cmd, Sender, State) ->
lager:info("Handoff command ~p at ~p", [Cmd, State#state.id]),
handle_command(Cmd, Sender, State).
handoff_starting(TargetNode, State) ->
lager:info("HAND_START: {~p, ~p} to ~p",[State#state.index, node(), TargetNode]),
%% save the vnode state, if not empty
ok = case State#state.clock =:= swc_node:new() of
true -> ok;
false ->
Key = {?DEFAULT_BUCKET, {?VNODE_STATE_KEY, State#state.index}},
NodeState = {State#state.clock, State#state.dotkeymap, State#state.watermark, State#state.non_stripped_keys},
dotted_db_storage:put(State#state.storage, Key, NodeState)
end,
{true, State}.
handoff_cancelled(State) ->
{ok, State}.
handoff_finished(_TargetNode, State) ->
{ok, State}.
handle_handoff_data(Data, State) ->
NodeKey = {?DEFAULT_BUCKET, {?VNODE_STATE_KEY, State#state.index}},
% decode the data received
NewState =
case dotted_db_utils:decode_kv(Data) of
{NodeKey, {NodeClock, DotKeyMap, Watermark, NSK}} ->
NodeClock2 = swc_node:join(NodeClock, State#state.clock),
State#state{clock = NodeClock2, dotkeymap = DotKeyMap, watermark = Watermark, non_stripped_keys = NSK};
{OtherNodeKey, {NodeClock, DotKeyMap, Watermark, NSK}} = Data ->
case is_binary(OtherNodeKey) andalso binary_to_term(OtherNodeKey) == NodeKey of
true ->
NodeClock2 = swc_node:join(NodeClock, State#state.clock),
State#state{clock = NodeClock2, dotkeymap = DotKeyMap, watermark = Watermark, non_stripped_keys = NSK};
false -> lager:warning("HANDOFF: strang data read -> ~p!",[Data])
end;
{Key, Obj} ->
lager:info("HANDOFF: key -> ~p | node key -> ~p \n obj -> ~p!", [Key, NodeKey, Obj]),
{noreply, State2} = handle_command({replicate, {dummy_req_id, Key, Obj, ?DEFAULT_NO_REPLY}}, undefined, State),
State2
end,
{reply, ok, NewState}.
encode_handoff_item(Key, Val) ->
dotted_db_utils:encode_kv({Key,Val}).
is_empty(State) ->
case dotted_db_storage:is_empty(State#state.storage) of
true ->
{true, State};
false ->
lager:info("IS_EMPTY: not empty -> {~p, ~p}",[State#state.index, node()]),
{false, State}
end.
delete(State) ->
{Good, Storage1} =
case dotted_db_storage:drop(State#state.storage) of
{ok, Storage} ->
{true, Storage};
{error, Reason, Storage} ->
lager:info("BAD_DROP: {~p, ~p} Reason: ~p",[State#state.index, node(), Reason]),
{false, Storage}
end,
case State#state.clock =/= [] andalso Good of
true ->
lager:info("IxNd:~p // Clock:~p // DKM:~p // Watermark:~p",
[{State#state.index, node()}, State#state.clock, State#state.dotkeymap, State#state.watermark] ),
lager:info("GOOD_DROP: {~p, ~p}",[State#state.index, node()]);
false -> ok
end,
true = delete_ets_all_keys(State),
{ok, State#state{storage=Storage1}}.
handle_exit(_Pid, _Reason, State) ->
{noreply, State}.
terminate(_Reason, State) ->
lager:debug("HAND_TERM: {~p, ~p}",[State#state.index, node()]),
close_all(State),
ok.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Private
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%% READING
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
handle_read({read, ReqID, Key}, State) ->
Response =
case dotted_db_storage:get(State#state.storage, Key) of
{error, not_found} ->
% there is no key K in this node
% create an empty "object" and fill its causality with the node clock
% this is needed to ensure that deletes "win" over old writes at the coordinator
{ok, dotted_db_object:fill(Key, State#state.clock, dotted_db_object:new())};
{error, Error} ->
% some unexpected error
lager:error("Error reading a key from storage (command read): ~p", [Error]),
% return the error
{error, Error};
Obj ->
% get and fill the causal history of the local object
{ok, dotted_db_object:fill(Key, State#state.clock, Obj)}
end,
% Optionally collect stats
case State#state.stats of
true -> ok;
false -> ok
end,
IndexNode = {State#state.index, node()},
{reply, {ok, ReqID, IndexNode, Response}, State}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%% WRITING
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
handle_write({write, {ReqID, Operation, Key, Value, Context, FSMTime}}, State) ->
Now = undefined,% os:timestamp(),
% get and fill the causal history of the local key
DiskObject = guaranteed_get(Key, State),
% discard obsolete values w.r.t the causal context
DiscardObject = dotted_db_object:discard_values(Context, DiskObject),
% generate a new dot for this write/delete and add it to the node clock
{Dot, NodeClock} = swc_node:event(State#state.clock, State#state.id),
% test if this is a delete; if not, add dot-value to the object container
NewObject0 =
case Operation of
?DELETE_OP -> % DELETE
dotted_db_object:add_value({State#state.id, Dot}, ?DELETE_OP, DiscardObject);
PUT
dotted_db_object:add_value({State#state.id, Dot}, Value, DiscardObject)
end,
NewObject = dotted_db_object:set_fsm_time(FSMTime, NewObject0),
save the new k\v and remove unnecessary causal information
_= strip_save_batch([{Key, NewObject}], State#state{clock=NodeClock}, Now),
append the key to the tail of the
DotKeyMap = swc_dotkeymap:add_key(State#state.dotkeymap, State#state.id, Key, Dot),
% Optionally collect stats
case State#state.stats of
true -> ok;
false -> ok
end,
% return the updated node state
{reply, {ok, ReqID, NewObject}, State#state{clock = NodeClock, dotkeymap = DotKeyMap}}.
handle_replicate({replicate, {ReqID, Key, NewObject, NoReply}}, State) ->
Now = undefined,% os:timestamp(),
NodeClock = dotted_db_object:add_to_node_clock(State#state.clock, NewObject),
append the key to the
DotKeyMap = swc_dotkeymap:add_objects(State#state.dotkeymap, [{Key, dotted_db_object:get_container(NewObject)}]),
% get and fill the causal history of the local key
DiskObject = guaranteed_get(Key, State),
% synchronize both objects
FinalObject = dotted_db_object:sync(NewObject, DiskObject),
test if the FinalObject has newer information
NSK = case dotted_db_object:equal(FinalObject, DiskObject) of
true ->
lager:debug("Replicated object is ignored (already seen)"),
State#state.non_stripped_keys;
false ->
% save the new object, while stripping the unnecessary causality
case strip_save_batch([{Key, FinalObject}], State#state{clock=NodeClock, dotkeymap=DotKeyMap}, Now) of
[] -> State#state.non_stripped_keys;
_ -> add_key_to_NSK(Key, NewObject, State#state.non_stripped_keys)
end
end,
% Optionally collect stats
case State#state.stats of
true -> ok;
false -> ok
end,
NewState = State#state{clock = NodeClock, dotkeymap = DotKeyMap, non_stripped_keys = NSK},
% return the updated node state
case NoReply of
true -> {noreply, NewState};
false -> {reply, {ok, ReqID}, NewState}
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%% SYNCHRONIZING
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
handle_sync_start({sync_start, ReqID}, State=#state{mode=recovering}) ->
{reply, {cancel, ReqID, recovering}, State};
handle_sync_start({sync_start, ReqID}, State=#state{mode=normal}) ->
Now = os:timestamp(),
MyPeersIndexNodes = dotted_db_utils:peers(State#state.index),
Peer = case get(current_sync) of
undefined ->
Node = hd(MyPeersIndexNodes),
put(current_sync, ordsets:add_element({Now,Node}, ordsets:new())),
Node;
Set ->
case ordsets:subtract(MyPeersIndexNodes, Set) of
[] -> [];
Nodes ->
Node = dotted_db_utils:random_from_list(Nodes),
put(current_sync, ordsets:add_element({Now,Node}, Set)),
Node
end
end,
case Peer of
[] ->
{reply, {cancel, ReqID, already_syncing}, State};
_ ->
% get my peers
PeersIDs = swc_watermark:peers(State#state.watermark),
% send a sync message to that node
{reply, {ok, ReqID, State#state.id, Peer, State#state.clock, PeersIDs}, State}
end.
handle_sync_missing({sync_missing, ReqID, _, _, _}, _Sender, State=#state{mode=recovering}) ->
{reply, {cancel, ReqID, recovering}, State};
handle_sync_missing({sync_missing, ReqID, _RemoteID={RemoteIndex,_}, RemoteClock, RemotePeers}, Sender, State=#state{mode=normal}) ->
spawn(fun() ->
% calculate what dots are present locally that the asking node does not have
MissingDots = swc_node:missing_dots(State#state.clock, RemoteClock, RemotePeers),
% get the keys corresponding to the missing dots,
{MissingKeys0, _DotsNotFound} = swc_dotkeymap:get_keys(State#state.dotkeymap, MissingDots),
% remove duplicate keys
MissingKeys = sets:to_list(sets:from_list(MissingKeys0)),
% filter the keys that the asking node does not replicate
RelevantMissingKeys = filter_irrelevant_keys(MissingKeys, RemoteIndex),
% get each key's respective Object and strip any unnecessary causal information to save network
StrippedObjects = guaranteed_get_strip_list(RelevantMissingKeys, State),
% DotsNotFound2 = [ {A,B} || {A,B} <- DotsNotFound, B =/= [] andalso A =:= State#state.id],
case of
% [] -> ok;
_ - > lager : info("\n\n ~ p to ~p:\n\tNotFound : ~p \n\tMiKeys : ~p \n\tRelKey : ~p \n\tKDM : ~p \n\tStrip : ~p \n\tBVV : ~p \n " ,
[ State#state.id , RemoteIndex , DotsNotFound2 , MissingKeys , RelevantMissingKeys , State#state.dotkeymap , StrippedObjects , State#state.clock ] )
% end,
% Optionally collect stats
case ?STAT_SYNC andalso State#state.stats andalso MissingKeys > 0 andalso length(StrippedObjects) > 0 of
true ->
Ratio_Relevant_Keys = round(100*length(RelevantMissingKeys)/max(1,length(MissingKeys))),
SRR = {histogram, sync_relevant_ratio, Ratio_Relevant_Keys},
Ctx_Sent_Strip = [dotted_db_object:get_context(Obj) || {_Key, Obj} <- StrippedObjects],
Sum_Ctx_Sent_Strip = lists:sum([length(VV) || VV <- Ctx_Sent_Strip]),
Ratio_Sent_Strip = Sum_Ctx_Sent_Strip/max(1,length(StrippedObjects)),
SSDS = {histogram, sync_sent_dcc_strip, Ratio_Sent_Strip},
Size_Meta_Sent = byte_size(term_to_binary(Ctx_Sent_Strip)),
SCS = {histogram, sync_context_size, Size_Meta_Sent},
SMS = {histogram, sync_metadata_size, byte_size(term_to_binary(RemoteClock))},
Payload_Sent_Strip = [{Key, dotted_db_object:get_values(Obj)} || {Key, Obj} <- StrippedObjects],
Size_Payload_Sent = byte_size(term_to_binary(Payload_Sent_Strip)),
SPS = {histogram, sync_payload_size, Size_Payload_Sent},
dotted_db_stats:notify2([SRR, SSDS, SCS, SMS, SPS]),
ok;
false -> ok
end,
% send the final objects and the base (contiguous) dots of the node clock to the asking node
riak_core_vnode:reply(
Sender,
{ ok,
ReqID,
State#state.id,
State#state.clock,
State#state.watermark,
swc_watermark:peers(State#state.watermark),
StrippedObjects
})
end),
{noreply, State}.
handle_sync_repair({sync_repair, {ReqID, _, _, _, _, NoReply}}, State=#state{mode=recovering}) ->
lager:warning("repairing stuff"),
case NoReply of
true -> {noreply, State};
false -> {reply, {cancel, ReqID, recovering}, State}
end;
handle_sync_repair({sync_repair, {ReqID, RemoteNode={RemoteIndex,_}, RemoteClock, RemoteWatermark, MissingObjects, NoReply}},
State=#state{mode=normal, index=MyIndex, clock=LocalClock, dotkeymap=DotKeyMap, watermark=Watermark1}) ->
Now = os:timestamp(),
% add information about the remote clock to our clock, but only for the remote node entry
LocalClock2 = sync_clocks(LocalClock, RemoteClock, RemoteIndex),
% get the local objects corresponding to the received objects and fill the causal history for all of them
FilledObjects =
[{ Key, dotted_db_object:fill(Key, RemoteClock, Obj), guaranteed_get(Key, State) }
|| {Key,Obj} <- MissingObjects],
% synchronize / merge the remote and local objects
SyncedObjects = [{ Key, dotted_db_object:sync(Remote, Local), Local } || {Key, Remote, Local} <- FilledObjects],
% filter the objects that are not missing after all
RealMissingObjects = [{ Key, Synced } || {Key, Synced, Local} <- SyncedObjects,
(not dotted_db_object:equal_values(Synced,Local)) orelse
(dotted_db_object:get_values(Synced)==[] andalso
dotted_db_object:get_values(Local)==[])],
% add each new dot to our node clock
NodeClock = lists:foldl(fun ({_K,O}, Acc) -> dotted_db_object:add_to_node_clock(Acc, O) end, LocalClock2, RealMissingObjects),
add new keys to the Dot - Key Mapping
DKM = swc_dotkeymap:add_objects(DotKeyMap,
lists:map(fun ({Key,Obj}) -> {Key, dotted_db_object:get_container(Obj)} end, RealMissingObjects)),
% save the synced objects and strip their causal history
NonStrippedObjects = strip_save_batch(RealMissingObjects, State#state{clock=NodeClock}, Now),
% schedule a later strip attempt for non-stripped synced keys
NSK = add_keys_to_NSK(NonStrippedObjects, State#state.non_stripped_keys),
% update my watermark
Watermark3 = update_watermark_after_sync(Watermark1, RemoteWatermark, MyIndex, RemoteIndex, NodeClock, RemoteClock),
% Garbage Collect keys from the dotkeymap and delete keys with no causal context
update_jump_clock(RemoteIndex),
State2 = gc_dotkeymap(State#state{clock=NodeClock, dotkeymap=DKM, non_stripped_keys=NSK, watermark=Watermark3}),
% case MissingObjects == [] of
% true -> ok;
% false ->
% lager:info("Repairing SYNC ~p !\n\n",[MissingObjects]),
% lager:info("LId: ~p\nLC: ~p\n\n", [State2#state.id, State2#state.clock]),
% lager:info("WM: ~p\n\n", [State2#state.watermark]),
lager : info("RI : ~p\nRC : ~p\n\n " , [ RemoteIndex , RemoteClock ] ) ,
% lager:info("RW: ~p\n\n", [RemoteWatermark])
% end,
Mark this Peer as available for sync again
case get(current_sync) of
undefined -> ok;
Set -> put(current_sync, ordsets:filter(fun({_TS, RN}) -> RN =/= RemoteNode end, Set))
end,
% Optionally collect stats
case ?STAT_SYNC andalso State2#state.stats of
true ->
Repaired = length(RealMissingObjects),
Sent = length(MissingObjects),
Hit_Ratio = 100*Repaired/max(1, Sent),
SL = case Sent =/= 0 of
true ->
[{histogram, sync_hit_ratio, round(Hit_Ratio)},
{histogram, sync_sent_missing, Sent},
{histogram, sync_sent_truly_missing, Repaired}];
false ->
[{histogram, sync_hit_ratio, 100}]
end,
dotted_db_stats:notify2([{histogram, sync_metadata_size, byte_size(term_to_binary(RemoteClock))} | SL]),
ok;
false ->
ok
end,
% return the updated node state
case NoReply of
true -> {noreply, State2};
false -> {reply, {ok, ReqID}, State2}
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%% Restarting Vnode (and recovery of keys)
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% On the restarting node
handle_restart({restart, ReqID}, State=#state{mode=recovering}) ->
{reply, {cancel, ReqID, recovering}, State};
handle_restart({restart, ReqID}, State=#state{mode=normal}) ->
OldVnodeID = State#state.id,
NewVnodeID = new_vnode_id(State#state.index),
% keep track from now on on which peers this node did a sync, to be able to
% jump the old vnode id in the node clock when we synced with every other peer
add_removed_vnode_jump_clock(OldVnodeID),
% changes all occurences of the old id for the new id,
% while at the same time resetting the column and the line of the new id,
% which means that this node does not know about anything at the moment,
% nor other vnodes know about this new id updates (since there are none at the moment).
NewWatermark0 = swc_watermark:retire_peer(State#state.watermark, OldVnodeID, NewVnodeID),
% reset the entire watermark
NewWatermark = swc_watermark:reset_counters(NewWatermark0),
CurrentPeers = dotted_db_utils:peers(State#state.index),
lager:info("RESTART:\nOLD: ~p\nNEW: ~p\nPEERS: ~p",[OldVnodeID, NewVnodeID, CurrentPeers]),
true = delete_ets_all_keys(State),
NewAtomID = create_ets_all_keys(NewVnodeID),
{ok, Storage1} = dotted_db_storage:drop(State#state.storage),
ok = dotted_db_storage:close(Storage1),
% open the storage backend for the key-values of this vnode
{_, NewStorage} = open_storage(State#state.index),
ok = save_vnode_state(State#state.dets, {NewVnodeID, swc_node:new(), swc_dotkeymap:new(), NewWatermark, []}),
% store the number of full-syncs
put(nr_full_syncs, 0),
{reply, {ok, ReqID, {ReqID, State#state.index, OldVnodeID, NewVnodeID}, CurrentPeers},
State#state{
id = NewVnodeID,
atom_id = NewAtomID,
clock = swc_node:new(),
dotkeymap = swc_dotkeymap:new(),
watermark = NewWatermark,
non_stripped_keys = {[],[]},
recover_keys = [],
storage = NewStorage,
syncs = initialize_syncs(State#state.index),
updates_mem = 0,
mode = recovering}}.
%% On the good nodes
handle_inform_peers_restart({inform_peers_restart, {ReqID, RestartingVnodeIndex, OldVnodeID, NewVnodeID}}, State) ->
% keep track from now on on which peers this node did a sync, to be able to
% jump the old vnode id in the node clock when we synced with every other peer
add_removed_vnode_jump_clock(OldVnodeID),
% replace the old peer entry in the watermarks of this vnode's peers also
CurrentPeers = dotted_db_utils:peers(State#state.index),
replace_peer(CurrentPeers, OldVnodeID, NewVnodeID),
% update the "mypeers" set
MyPeersIds = ordsets:add_element(NewVnodeID, ordsets:del_element(OldVnodeID, State#state.peers_ids)),
% add the new node id to the node clock
NewClock = swc_node:add(State#state.clock, {NewVnodeID, 0}),
% replace the old entry for the new entry in the watermark
NewWatermark = swc_watermark:retire_peer(State#state.watermark, OldVnodeID, NewVnodeID),
% add the new node id to the node clock
{AllKeys,_} = ets_get_all_keys(State),
% filter irrelevant keys from the perspective of the restarting vnode
RelevantKeys = filter_irrelevant_keys(AllKeys, RestartingVnodeIndex),
{Now, Later} = lists:split(min(?MAX_KEYS_SENT_RECOVERING,length(RelevantKeys)), RelevantKeys),
lager:info("Restart transfer => Now: ~p Later: ~p",[length(Now), length(Later)]),
% get each key's respective Object and strip any unnecessary causal information to save network bandwidth
StrippedObjects = guaranteed_get_strip_list(Now, State#state{clock=NewClock}),
% save the rest of the keys for later (if there's any)
{LastBatch, RecoverKeys} = case Later of
[] -> {true, State#state.recover_keys};
_ -> {false, [{NewVnodeID, Later} | State#state.recover_keys]}
end,
{reply, { ok, stage1, ReqID, {
ReqID,
{State#state.index, node()},
OldVnodeID,
NewClock,
StrippedObjects,
NewWatermark,
LastBatch % is this the last batch?
}}, State#state{clock=NewClock, peers_ids=MyPeersIds, watermark=NewWatermark, recover_keys=RecoverKeys}}.
%% On the restarting node
handle_recover_keys({recover_keys, {ReqID, RemoteVnode, _OldVnodeID={_,_}, RemoteClock, Objects, _RemoteWatermark, _LastBatch=false}}, State) ->
% save the objects and return the ones that were not totally filtered
{NodeClock, DKM, NonStrippedObjects} = fill_strip_save_kvs(Objects, RemoteClock, State#state.clock, State, os:timestamp()),
% add new keys to the Dot - Key Mapping
% DKM = swc_dotkeymap:add_objects(State#state.dotkeymap,
% lists:map(fun ({Key,Obj}) -> {Key, dotted_db_object:get_container(Obj)} end, Objects)),
% schedule a later strip attempt for non-stripped synced keys
NSK = add_keys_to_NSK(NonStrippedObjects, State#state.non_stripped_keys),
{reply, {ok, stage2, ReqID, RemoteVnode}, State#state{clock=NodeClock, dotkeymap=DKM, non_stripped_keys=NSK}};
%% On the restarting node
handle_recover_keys({recover_keys, {ReqID, RemoteVnode={RemoteIndex,_}, _OldID, RemoteClock, Objects, RemoteWatermark, _LastBatch=true}}, State) ->
NodeClock0 = sync_clocks(State#state.clock, RemoteClock, RemoteIndex),
% save the objects and return the ones that were not totally filtered
{NodeClock, DKM, NonStrippedObjects} = fill_strip_save_kvs(Objects, RemoteClock, State#state.clock, State#state{clock=NodeClock0}, os:timestamp()),
% schedule a later strip attempt for non-stripped synced keys
NSK = add_keys_to_NSK(NonStrippedObjects, State#state.non_stripped_keys),
% update my watermark
Watermark = update_watermark_after_sync(State#state.watermark, RemoteWatermark, State#state.index, RemoteIndex, NodeClock, RemoteClock),
{Mode, NodeClock3} = case get(nr_full_syncs) of
undefined ->
{normal, NodeClock};
N when N >= (?REPLICATION_FACTOR-1)*2-1 ->
erase(nr_full_syncs),
% jump the base counter of all old ids in the node clock, to make sure we "win"
% against all keys potentially not stripped yet because of that old id
NodeClock2 = jump_node_clock_by_index(NodeClock, State#state.id, State#state.index, 20000),
NodeClock2 = swc_node : store_entry(OldVnodeID , { Base+10000,0 } , NodeClock ) ,
{normal, NodeClock2};
N when N < (?REPLICATION_FACTOR-1)*2-1 ->
put(nr_full_syncs, N+1),
{recovering, NodeClock}
end,
{reply, {ok, stage4, ReqID, RemoteVnode}, State#state{clock=NodeClock3, dotkeymap=DKM, non_stripped_keys=NSK, watermark=Watermark, mode=Mode}}.
%% On the good nodes
handle_inform_peers_restart2({inform_peers_restart2, {ReqID, NewVnodeID, OldVnodeID}}, State) ->
{LastBatch1, Objects, RecoverKeys1} =
case proplists:get_value(NewVnodeID, State#state.recover_keys) of
undefined ->
{true, [], State#state.recover_keys};
RelevantKeys ->
RK = proplists:delete(NewVnodeID, State#state.recover_keys),
{Now, Later} = lists:split(min(?MAX_KEYS_SENT_RECOVERING,length(RelevantKeys)), RelevantKeys),
% get each key's respective Object and strip any unnecessary causal information to save network bandwidth
StrippedObjects = guaranteed_get_strip_list(Now, State),
% save the rest of the keys for later (if there's any)
{LastBatch, RecoverKeys} = case Later of
[] -> {true, RK};
_ -> {false, [{NewVnodeID, Later} | RK]}
end,
{LastBatch, StrippedObjects, RecoverKeys}
end,
{reply, { ok, stage3, ReqID, {
ReqID,
{State#state.index, node()},
OldVnodeID,
State#state.clock,
Objects,
State#state.watermark,
LastBatch1 % is this the last batch?
}}, State#state{recover_keys=RecoverKeys1}}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%% Aux functions
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% @doc Returns the Object associated with the Key.
% By default, we want to return a filled causality, unless we get a storage error.
% If the key does not exists or for some reason, the storage returns an
% error, return an empty Object (also filled).
guaranteed_get(Key, State) ->
case dotted_db_storage:get(State#state.storage, Key) of
{error, not_found} ->
% there is no key K in this node
Obj = dotted_db_object:new(),
Obj2 = dotted_db_object:set_fsm_time(ets_get_fsm_time(State#state.atom_id, Key), Obj),
dotted_db_object:fill(Key, State#state.clock, Obj2);
{error, Error} ->
% some unexpected error
lager:error("Error reading a key from storage (guaranteed GET): ~p", [Error]),
% assume that the key was lost, i.e. it's equal to not_found
dotted_db_object:new();
Obj ->
% get and fill the causal history of the local object
dotted_db_object:fill(Key, State#state.clock, Obj)
end.
guaranteed_get_strip_list(Keys, State) ->
lists:map(fun(Key) -> guaranteed_get_strip(Key, State) end, Keys).
guaranteed_get_strip(Key, State) ->
case dotted_db_storage:get(State#state.storage, Key) of
{error, not_found} ->
% there is no key K in this node
{Key, dotted_db_object:set_fsm_time(ets_get_fsm_time(State#state.atom_id, Key),
dotted_db_object:new())};
{error, Error} ->
% some unexpected error
lager:error("Error reading a key from storage (guaranteed GET) (2): ~p", [Error]),
% assume that the key was lost, i.e. it's equal to not_found
{Key, dotted_db_object:set_fsm_time(ets_get_fsm_time(State#state.atom_id, Key),
dotted_db_object:new())};
Obj ->
% get and fill the causal history of the local object
{Key, dotted_db_object:strip(State#state.clock, Obj)}
end.
filter_irrelevant_keys(Keys, Index) ->
FunFilterIrrelevant = fun(Key) -> lists:member(Index, dotted_db_utils:replica_nodes_indices(Key)) end,
lists:filter(FunFilterIrrelevant, Keys).
% @doc Saves the relevant vnode state to the storage.
save_vnode_state(Dets, State={Id={Index,_},_,_,_,_}) ->
Key = {?VNODE_STATE_KEY, Index},
ok = dets:insert(Dets, {Key, State}),
ok = dets:sync(Dets),
lager:debug("Saved state for vnode ~p.",[Id]),
ok.
% @doc Reads the relevant vnode state from the storage.
read_vnode_state(Index) ->
Folder = "data/vnode_state/",
ok = filelib:ensure_dir(Folder),
FileName = filename:join(Folder, integer_to_list(Index)),
Ref = list_to_atom(integer_to_list(Index)),
{ok, Dets} = dets:open_file(Ref,[{type, set},
{file, FileName},
{auto_save, infinity},
{min_no_slots, 1}]),
Key = {?VNODE_STATE_KEY, Index},
case dets:lookup(Dets, Key) of
[] -> % there isn't a past vnode state stored
{Dets, not_found};
{error, Error} -> % some unexpected error
{Dets, error, Error};
[{Key, State={{Index,_},_,_,_,_}}] ->
{Dets, State}
end.
% @doc Initializes the "watermark" matrix with 0's for peers of this vnode.
initialize_watermark(_NodeId={Index,_}) ->
lager:debug("Initialize watermark @ IndexNode: ~p",[{Index,node()}]),
get the Index and of this node 's peers , i.e. , all nodes that replicates any subset of local keys .
IndexNodes = [ IndexNode || IndexNode <- dotted_db_utils:peers(Index)],
for replication factor N = 3 , the numbers of peers should be 4 ( 2 vnodes before and 2 after ) .
(?REPLICATION_FACTOR-1)*2 = length(IndexNodes),
% ask each vnode for their current vnode ID
get_vnode_id(IndexNodes),
ok.
% @doc Initializes the "sync" stats for peers of this vnode.
initialize_syncs(_Index) ->
[{dummy_node_id,0,0,0,0}].
% % get this node's peers, i.e., all nodes that replicates any subset of local keys.
% PeerIDs = [ ID || {ID, _Node} <- dotted_db_utils:peers(Index)],
% for replication factor N = 3 , the numbers of peers should be 4 ( 2 vnodes before and 2 after ) .
% (?REPLICATION_FACTOR-1)*2 = length(PeerIDs),
% Now = os:timestamp(),
% Syncs = lists:foldl(fun (ID, List) -> [{ID,0,0,Now,Now} | List] end , [], PeerIDs),
% (?REPLICATION_FACTOR-1)*2 = length(Syncs),
% Syncs.
% @doc Returns the Storage for this vnode.
open_storage(Index) ->
% get the preferred backend in the configuration file, defaulting to ETS if
% there is no preference.
{Backend, Options} = case application:get_env(dotted_db, storage_backend, ets) of
leveldb -> {{backend, leveldb}, []};
ets -> {{backend, ets}, []};
bitcask -> {{backend, bitcask}, [{db_opts,[
read_write,
{sync_strategy, application:get_env(dotted_db, bitcask_io_sync, none)},
{io_mode, application:get_env(dotted_db, bitcask_io_mode, erlang)},
{merge_window, application:get_env(dotted_db, bitcask_merge_window, never)}]}]}
end,
lager:debug("Using ~p for vnode ~p.",[Backend,Index]),
% give the name to the backend for this vnode using its position in the ring.
DBName = filename:join("data/objects/", integer_to_list(Index)),
{ok, Storage} = dotted_db_storage:open(DBName, Backend, Options),
{Backend, Storage}.
% @doc Close the key-value backend, save the vnode state and close the DETS file.
close_all(undefined) -> ok;
close_all(State=#state{ id = Id,
storage = Storage,
clock = NodeClock,
watermark = Watermark,
dotkeymap = DotKeyMap,
non_stripped_keys = NSK,
dets = Dets } ) ->
case dotted_db_storage:close(Storage) of
ok -> ok;
{error, Reason} ->
lager:warning("Error on closing storage: ~p",[Reason])
end,
ok = save_vnode_state(Dets, {Id, NodeClock, DotKeyMap, Watermark, NSK}),
true = delete_ets_all_keys(State),
ok = dets:close(Dets).
gc_dotkeymap(State=#state{dotkeymap = DotKeyMap, watermark = Watermark, non_stripped_keys = NSK}) ->
case is_watermark_up_to_date(Watermark) of
true ->
% remove the keys from the dotkeymap that have a dot (corresponding to their position) smaller than the
% minimum dot, i.e., this update is known by all nodes that replicate it and therefore can be removed
% from the dotkeymap;
{DotKeyMap2, RemovedKeys} = swc_dotkeymap:prune(DotKeyMap, Watermark),
% remove entries in watermark from retired peers, that aren't needed anymore
( i.e. there is n't keys coordinated by those retired nodes in the DotKeyMap )
OldPeersStillNotSynced = get_old_peers_still_not_synced(),
Watermark2 = swc_watermark:prune_retired_peers(Watermark, DotKeyMap2, OldPeersStillNotSynced),
% add the non stripped keys to the node state for later strip attempt
NSK2 = add_keys_from_dotkeymap_to_NSK(RemovedKeys, NSK),
State#state{dotkeymap = DotKeyMap2, non_stripped_keys=NSK2, watermark=Watermark2};
false ->
{WM,_} = State#state.watermark,
lager:info("Watermark not up to date: ~p entries, mode: ~p",[orddict:size(WM), State#state.mode]),
[case orddict:size(V) =:= (?REPLICATION_FACTOR*2)-1 of
true -> lager:info("\t ~p for ~p \n", [orddict:size(V), K]);
false -> lager:info("\t ~p for ~p \n\t\t ~p\n", [orddict:size(V), K, V])
end || {K,V} <- WM],
swc_dotkeymap:empty(DotKeyMap) andalso initialize_watermark(State#state.id),
State
end.
-spec schedule_strip_keys(non_neg_integer()) -> ok.
schedule_strip_keys(Interval) ->
erlang:send_after(Interval, self(), strip_keys),
ok.
-spec schedule_report(non_neg_integer()) -> ok.
schedule_report(Interval) ->
Perform tick every X seconds
erlang:send_after(Interval, self(), report_tick),
ok.
-spec report(state()) -> {any(), state()}.
report(State=#state{ id = Id,
clock = NodeClock,
watermark = Watermark,
dotkeymap = DotKeyMap,
non_stripped_keys = NSK,
dets = Dets,
updates_mem = UpMem } ) ->
report_stats(State),
% increment the updates since saving
UpdatesMemory = case UpMem =< ?UPDATE_LIMITE*50 of
true -> % it's still early to save to storage
UpMem + 1;
false ->
% it's time to persist vnode state
save_vnode_state(Dets, {Id, NodeClock, DotKeyMap, Watermark, NSK}),
% restart the counter
0
end,
{ok, State#state{updates_mem=UpdatesMemory}}.
report_stats(State=#state{stats=true}) ->
case (not swc_dotkeymap:empty(State#state.dotkeymap)) andalso
State#state.clock =/= swc_node:new() andalso
State#state.watermark =/= swc_watermark:new() of
true ->
SSL = case ?STAT_STATE_LENGTH of
false -> [];
true ->
KLLEN = {histogram, kl_len, swc_dotkeymap:size(State#state.dotkeymap)},
MissingDots = [ miss_dots(Entry) || {_,Entry} <- State#state.clock ],
BVVMD = {histogram, bvv_missing_dots, average(MissingDots)},
{Del,Wrt} = State#state.non_stripped_keys,
NumNSKeys = lists:sum([dict:size(Map) || {_, Map} <- Wrt]) + length(Del),
NSKN = {histogram, nsk_number, NumNSKeys},
[KLLEN, BVVMD, NSKN]
end,
SSS = case ?STAT_STATE_SIZE of
false -> [];
true ->
KLSIZE = {histogram, kl_size, size(term_to_binary(State#state.dotkeymap))},
BVVSIZE = {histogram, bvv_size, size(term_to_binary(State#state.clock))},
NSKSIZE = {histogram, nsk_size, size(term_to_binary(State#state.non_stripped_keys))},
[KLSIZE, BVVSIZE, NSKSIZE]
end,
SD = case ?STAT_DELETES of
false -> [];
true ->
ADelKeys = length(ets_get_actual_deleted(State#state.atom_id)),
IDelKeys = length(ets_get_issued_deleted(State#state.atom_id)),
DI = {histogram, deletes_incomplete, IDelKeys},
DC = {histogram, deletes_completed, ADelKeys},
IWKeys = length(ets_get_issued_written(State#state.atom_id)),
FWKeys = length(ets_get_final_written(State#state.atom_id)),
WI = {histogram, write_incomplete, IWKeys},
WC = {histogram, write_completed, FWKeys},
[DI,DC,WI,WC]
end,
dotted_db_stats:notify2(SD ++ SSS ++ SSL),
ok;
false ->
ok
end,
{ok, State}.
miss_dots({N,B}) ->
case values_aux(N,B,[]) of
[] -> 0;
L -> lists:max(L) - N - length(L)
end.
values_aux(_,0,L) -> L;
values_aux(N,B,L) ->
M = N + 1,
case B rem 2 of
0 -> values_aux(M, B bsr 1, L);
1 -> values_aux(M, B bsr 1, [ M | L ])
end.
average(L) ->
lists:sum(L) / max(1,length(L)).
strip_save_batch(O,S,Now) -> strip_save_batch(O,S,Now,true).
strip_save_batch(Objects, State, Now, ETS) ->
strip_save_batch(Objects, State, Now, {[],[]}, ETS).
strip_save_batch([], State, _Now, {NSK, StrippedObjects}, _ETS) ->
ok = dotted_db_storage:write_batch(State#state.storage, StrippedObjects),
NSK;
strip_save_batch([{Key, Obj} | Objects], S=#state{atom_id=ID}, Now, {NSK, StrippedObjects}, ETS) ->
% removed unnecessary causality from the Object, based on the current node clock
StrippedObj = dotted_db_object:strip(S#state.clock, Obj),
{Values, Context} = dotted_db_object:get_container(StrippedObj),
Values2 = [{D,V} || {D,V} <- Values, V =/= ?DELETE_OP],
StrippedObj2 = dotted_db_object:set_container({Values2, Context}, StrippedObj),
% the resulting object is one of the following options:
% 0 * it has no value but has causal history -> it's a delete, but still must be persisted
1 * it has no value and no causal history - > can be deleted
2 * has values , with causal context - > it 's a normal write and we should persist
3 * has values , but no causal context - > it 's the final form for this write
Acc = case {Values2, Context} of
{[],[]} ->
?STAT_ENTRIES andalso dotted_db_stats:notify({histogram, entries_per_clock}, 0),
ETS andalso ets_set_status(ID, Key, ?ETS_DELETE_STRIP),
ETS andalso ets_set_strip_time(ID, Key, Now),
ETS andalso notify_strip_delete_latency(Now, Now),
ETS andalso ets_set_dots(ID, Key, []),
{NSK, [{delete, Key}|StrippedObjects]};
{_ ,[]} ->
?STAT_ENTRIES andalso dotted_db_stats:notify({histogram, entries_per_clock}, 1),
ETS andalso ets_set_status(ID, Key, ?ETS_WRITE_STRIP),
ETS andalso ets_set_strip_time(ID, Key, Now),
ETS andalso notify_strip_write_latency(Now, Now),
ETS andalso ets_set_dots(ID, Key, get_value_dots_for_ets(StrippedObj)),
{NSK, [{put, Key, StrippedObj2}|StrippedObjects]};
{[],_CC} ->
?STAT_ENTRIES andalso dotted_db_stats:notify({histogram, entries_per_clock}, length(Context)),
ETS andalso ets_set_status(ID, Key, ?ETS_DELETE_NO_STRIP),
ETS andalso ets_set_dots(ID, Key, get_value_dots_for_ets(StrippedObj)),
{[{Key, StrippedObj2}|NSK], [{put, Key, StrippedObj2}|StrippedObjects]};
{_ ,_CC} ->
?STAT_ENTRIES andalso dotted_db_stats:notify({histogram, entries_per_clock}, length(Context)+1),
ETS andalso ets_set_status(ID, Key, ?ETS_WRITE_NO_STRIP),
ETS andalso ets_set_dots(ID, Key, get_value_dots_for_ets(StrippedObj)),
{[{Key, StrippedObj2}|NSK], [{put, Key, StrippedObj2}|StrippedObjects]}
end,
ETS andalso notify_write_latency(dotted_db_object:get_fsm_time(StrippedObj), Now),
ETS andalso ets_set_write_time(ID, Key, Now),
ETS andalso ets_set_fsm_time(ID, Key, dotted_db_object:get_fsm_time(StrippedObj)),
strip_save_batch(Objects, S, Now, Acc, ETS).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
Try to remove elements from Non - Stripped Keys
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% @doc Used periodically to see which non-stripped keys can be stripped.
-spec read_strip_write({[{key(),vv()}], [{dot(), dict:dict()}]}, state()) -> {[{key(),vv()}], [{dot(), dict:dict()}]}.
read_strip_write({Deletes, Writes}, State) ->
Now = os:timestamp(),
{Stripped, NotStripped} = split_deletes(Deletes, State, {[],[]}),
Deletes2 = strip_maybe_save_delete_batch(Stripped, State, Now) ++ NotStripped,
Writes2 = compute_writes_NSK(Writes, State, [], [], Now),
{Deletes2, Writes2}.
Take care of NSK deletes
split_deletes([], _State, Acc) -> Acc;
split_deletes([{Key, Ctx} | Deletes], State, {Stripped, NotStripped}) ->
case strip_context(Ctx,State#state.clock) of
[] ->
case read_one_key(Key, State) of
0 ->
ets_set_status(State#state.atom_id, Key, ?ETS_DELETE_STRIP),
ets_set_strip_time(State#state.atom_id, Key, os:timestamp()),
split_deletes(Deletes, State, {Stripped, NotStripped});
Obj ->
split_deletes(Deletes, State, {[{Key, Obj}|Stripped], NotStripped})
end;
VV ->
split_deletes(Deletes, State, {Stripped, [{Key, VV}|NotStripped]})
end.
-spec strip_context(vv(), bvv()) -> vv().
strip_context(Context, NodeClock) ->
FunFilter =
fun (Id, Counter) ->
{Base,_Dots} = swc_node:get(Id, NodeClock),
Counter > Base
end,
swc_vv:filter(FunFilter, Context).
strip_maybe_save_delete_batch(O,S,Now) -> strip_maybe_save_delete_batch(O,S,Now,true).
strip_maybe_save_delete_batch(Objects, State, Now, ETS) ->
strip_maybe_save_delete_batch(Objects, State, Now, {[],[]}, ETS).
strip_maybe_save_delete_batch([], State, _Now, {NSK, StrippedObjects}, _ETS) ->
ok = dotted_db_storage:write_batch(State#state.storage, StrippedObjects),
NSK;
strip_maybe_save_delete_batch([{Key={_,_}, Obj} | Objects], State, Now, {NSK, StrippedObjects}, ETS) ->
% removed unnecessary causality from the object, based on the current node clock
StrippedObj = dotted_db_object:strip(State#state.clock, Obj),
{Values, Context} = dotted_db_object:get_container(StrippedObj),
Values2 = [{D,V} || {D,V} <- Values, V =/= ?DELETE_OP],
StrippedObj2 = dotted_db_object:set_container({Values2, Context}, StrippedObj),
% the resulting object is one of the following options:
% 0 * it has no value but has causal history -> it's a delete, but still must be persisted
1 * it has no value and no causal history - > can be deleted
2 * has values , with causal context - > it 's a normal write and we should persist
3 * has values , but no causal context - > it 's the final form for this write
Acc = case {Values2, Context} of
{[],[]} ->
?STAT_ENTRIES andalso dotted_db_stats:notify({histogram, entries_per_clock}, 0),
ETS andalso ets_set_status(State#state.atom_id, Key, ?ETS_DELETE_STRIP),
ETS andalso ets_set_strip_time(State#state.atom_id, Key, Now),
ETS andalso notify_strip_delete_latency(ets_get_write_time(State#state.atom_id, Key), Now),
ETS andalso ets_set_fsm_time(State#state.atom_id, Key, dotted_db_object:get_fsm_time(StrippedObj)),
ETS andalso ets_set_dots(State#state.atom_id, Key, []),
{NSK, [{delete, Key}|StrippedObjects]};
{_ ,[]} ->
?STAT_ENTRIES andalso dotted_db_stats:notify({histogram, entries_per_clock}, 1),
ETS andalso ets_set_status(State#state.atom_id, Key, ?ETS_WRITE_STRIP),
ETS andalso ets_set_strip_time(State#state.atom_id, Key, Now),
ETS andalso notify_strip_write_latency(ets_get_write_time(State#state.atom_id, Key), Now),
ETS andalso ets_set_fsm_time(State#state.atom_id, Key, dotted_db_object:get_fsm_time(StrippedObj)),
ETS andalso ets_set_dots(State#state.atom_id, Key, get_value_dots_for_ets(StrippedObj)),
{NSK, [{put, Key, StrippedObj2}|StrippedObjects]};
{[],_CC} ->
{[{Key, Context}|NSK], StrippedObjects};
{_ ,_CC} ->
{[{Key, Context}|NSK], StrippedObjects}
end,
strip_maybe_save_delete_batch(Objects, State, Now, Acc, ETS).
Take care of NSK writes
compute_writes_NSK([], State, Batch, NSK, _Now) ->
ok = dotted_db_storage:write_batch(State#state.storage, Batch),
NSK;
compute_writes_NSK([{NodeID, Dict} |Tail], State, Batch, NSK, Now) ->
{DelDots, SaveBatch} = dict:fold(fun(Dot, Key, Acc) -> dictNSK(Dot, Key, Acc, State, Now) end, {[],[]}, Dict),
NewDict = remove_stripped_writes_NSK(DelDots, Dict),
case dict:size(NewDict) of
0 -> compute_writes_NSK(Tail, State, SaveBatch++Batch, NSK, Now);
_ -> compute_writes_NSK(Tail, State, SaveBatch++Batch, [{NodeID, NewDict}| NSK], Now)
end.
dictNSK(Dot, {Key, undefined}, {Del, Batch}, State, Now) ->
case read_one_key(Key, State) of
0 ->
ets_set_status(State#state.atom_id, Key, ?ETS_DELETE_STRIP),
ets_set_strip_time(State#state.atom_id, Key, os:timestamp()),
{[Dot|Del], Batch};
Obj ->
dictNSK2(Dot, {Key, Obj}, {Del,Batch}, State, Now, true)
end;
dictNSK(Dot, {Key, Ctx}, {Del, Batch}, State, Now) ->
case strip_context(Ctx, State#state.clock) of
[] ->
case read_one_key(Key, State) of
0 ->
ets_set_status(State#state.atom_id, Key, ?ETS_DELETE_STRIP),
ets_set_strip_time(State#state.atom_id, Key, os:timestamp()),
{[Dot|Del], Batch};
Obj ->
dictNSK2(Dot, {Key, Obj}, {Del,Batch}, State, Now, true)
end;
_V ->
case random : uniform ( ) < 0.05 of
% true -> lager:info("STRIPPPPPPPPPPPPP:\nClock:~p\nCtx: ~p\n", [State#state.clock, V]);
% false -> ok
% end,
{Del, Batch} %% not stripped yet; keep in the dict
end.
dictNSK2(Dot, {Key, Obj}, {Del, Batch}, State, Now, ETS) ->
% removed unnecessary causality from the object, based on the current node clock
StrippedObj = dotted_db_object:strip(State#state.clock, Obj),
{Values, Context} = dotted_db_object:get_container(StrippedObj),
Values2 = [{D,V} || {D,V} <- Values, V =/= ?DELETE_OP],
StrippedObj2 = dotted_db_object:set_container({Values2, Context}, StrippedObj),
% the resulting object is one of the following options:
% 0 * it has no value but has causal history -> it's a delete, but still must be persisted
1 * it has no value and no causal history - > can be deleted
2 * has values , with causal context - > it 's a normal write and we should persist
3 * has values , but no causal context - > it 's the final form for this write
case {Values2, Context} of
{[],[]} -> % do the real delete
?STAT_ENTRIES andalso dotted_db_stats:notify({histogram, entries_per_clock}, 0),
ETS andalso ets_set_status(State#state.atom_id, Key, ?ETS_DELETE_STRIP),
ETS andalso ets_set_strip_time(State#state.atom_id, Key, Now),
ETS andalso notify_strip_delete_latency(ets_get_write_time(State#state.atom_id, Key), Now),
ETS andalso ets_set_fsm_time(State#state.atom_id, Key, dotted_db_object:get_fsm_time(StrippedObj)),
ETS andalso ets_set_dots(State#state.atom_id, Key, []),
{[Dot|Del], [{delete, Key}|Batch]};
{_ ,[]} -> % write to disk without the version vector context
?STAT_ENTRIES andalso dotted_db_stats:notify({histogram, entries_per_clock}, 1),
ETS andalso ets_set_status(State#state.atom_id, Key, ?ETS_WRITE_STRIP),
ETS andalso ets_set_strip_time(State#state.atom_id, Key, Now),
ETS andalso notify_strip_write_latency(ets_get_write_time(State#state.atom_id, Key), Now),
ETS andalso ets_set_fsm_time(State#state.atom_id, Key, dotted_db_object:get_fsm_time(StrippedObj)),
ETS andalso ets_set_dots(State#state.atom_id, Key, get_value_dots_for_ets(StrippedObj)),
{[Dot|Del], [{put, Key, StrippedObj2}|Batch]};
{_,_} ->
{Del, Batch} %% not stripped yet; keep in the dict
end.
remove_stripped_writes_NSK([], Dict) -> Dict;
remove_stripped_writes_NSK([H|T], Dict) ->
NewDict = dict:erase(H, Dict),
remove_stripped_writes_NSK(T, NewDict).
read_one_key(Key={_,_}, State) ->
case dotted_db_storage:get(State#state.storage, Key) of
{error, not_found} ->
0;
{error, Error} ->
% some unexpected error
lager:error("Error reading a key from storage: ~p", [Error]),
% assume that the key was lost, i.e. it's equal to not_found
0;
Obj ->
Obj
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
Add elements to Non - Stripped Keys
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
add_keys_to_NSK([], NSK) -> NSK;
add_keys_to_NSK([{Key, Object}|Tail], NSK) ->
NSK2 = add_key_to_NSK(Key, Object, NSK),
add_keys_to_NSK(Tail, NSK2).
% @doc Add a replicated key and context to the list of non-stripped-keys
add_key_to_NSK(Key, Object, NSK) ->
add_key_to_NSK2(Key, dotted_db_object:get_container(Object), NSK).
add_key_to_NSK2(_, {[],[]}, NSK) -> NSK;
add_key_to_NSK2(Key, {[],Ctx}, {Del,Wrt}) ->
{[{Key, Ctx}|Del], Wrt};
add_key_to_NSK2(Key, {DotValues,Ctx}, NSK) ->
KeyDots = [{Key, Dot, Ctx} || {Dot,_} <- DotValues],
add_writes_to_NSK(KeyDots, NSK).
add_writes_to_NSK([], NSK) -> NSK;
add_writes_to_NSK([Head={_,_,_} | Tail], {Del,Wrt}) ->
Wrt2 = add_one_write_to_NSK(Head, Wrt),
add_writes_to_NSK(Tail, {Del,Wrt2}).
add_keys_from_dotkeymap_to_NSK([], NSK) -> NSK;
add_keys_from_dotkeymap_to_NSK([{NodeId, DotKeys}|Tail], {Del,Wrt}) ->
Wrt2 = lists:foldl(
fun({Dot,Key}, Acc) ->
add_one_write_to_NSK({Key, {NodeId, Dot}, undefined}, Acc)
end,
Wrt,
DotKeys),
Wrt2 = add_one_write_to_NSK({Key , { NodeID , Base+1 } , undefined } , ) ,
add_keys_from_dotkeymap_to_NSK(Tail, {Del,Wrt2}).
add_one_write_to_NSK({Key, {NodeID,Counter}, Context}, []) ->
[{NodeID, dict:store(Counter, {Key, Context}, dict:new())}];
add_one_write_to_NSK({Key, {NodeID, Counter}, Context}, [{NodeID2, Dict}|Tail])
when NodeID =:= NodeID2 andalso Counter =/= -1 ->
Dict2 = dict:store(Counter, {Key, Context}, Dict),
[{NodeID, Dict2} | Tail];
add_one_write_to_NSK(KV={_, {NodeID,_}, _}, [H={NodeID2, _}|Tail])
when NodeID =/= NodeID2 ->
[H | add_one_write_to_NSK(KV, Tail)].
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Recovering vnode saves multiples objects from peers
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% For recovering keys remotely after a vnode crash/failure (with lost key-values)
fill_strip_save_kvs(Objects, RemoteClock, LocalClock, State, Now) ->
fill_strip_save_kvs(Objects, RemoteClock, LocalClock, State, Now, {[],[]}, true).
fill_strip_save_kvs([], _, _, State, _Now, {NSK, StrippedObjects}, _ETS) ->
ok = dotted_db_storage:write_batch(State#state.storage, StrippedObjects),
{State#state.clock, State#state.dotkeymap, NSK};
fill_strip_save_kvs([{Key={_,_}, Object} | Objects], RemoteClock, LocalClock, State, Now, {NSK, StrippedObjects}, ETS) ->
% fill the Object with the sending node clock
FilledObject = dotted_db_object:fill(Key, RemoteClock, Object),
% get and fill the causal history of the local key
DiskObject = guaranteed_get(Key, State#state{clock=LocalClock}),
% synchronize both objects
FinalObject = dotted_db_object:sync(FilledObject, DiskObject),
test if the FinalObject has newer information
case (not dotted_db_object:equal_values(FinalObject, DiskObject)) orelse
(dotted_db_object:get_values(FinalObject)==[] andalso dotted_db_object:get_values(DiskObject)==[]) of
false -> fill_strip_save_kvs(Objects, RemoteClock, LocalClock, State, Now, {NSK, StrippedObjects}, ETS);
true ->
% add each new dot to our node clock
StateNodeClock = dotted_db_object:add_to_node_clock(State#state.clock, FinalObject),
add new keys to the Dot - Key Mapping
DKM = swc_dotkeymap:add_objects(State#state.dotkeymap, [{Key, dotted_db_object:get_container(FinalObject)}]),
% removed unnecessary causality from the object, based on the current node clock
StrippedObject = dotted_db_object:strip(State#state.clock, FinalObject),
{Values, Context} = dotted_db_object:get_container(StrippedObject),
Values2 = [{D,V} || {D,V} <- Values, V =/= ?DELETE_OP],
StrippedObject2 = dotted_db_object:set_container({Values2, Context}, StrippedObject),
% the resulting object is one of the following options:
% * it has no value and no causal history -> can be deleted
% * it has no value but has causal history -> it's a delete, but still must be persisted
% * has values, with causal context -> it's a normal write and we should persist
% * has values, but no causal context -> it's the final form for this write
Acc = case {Values2, Context} of
{[],[]} ->
?STAT_ENTRIES andalso dotted_db_stats:notify({histogram, entries_per_clock}, 0),
ETS andalso ets_set_status(State#state.atom_id, Key, ?ETS_DELETE_STRIP),
ETS andalso ets_set_strip_time(State#state.atom_id, Key, Now),
ETS andalso notify_strip_delete_latency(Now, Now),
ETS andalso ets_set_dots(State#state.atom_id, Key, []),
{NSK, [{delete, Key}|StrippedObjects]};
{_ ,[]} ->
?STAT_ENTRIES andalso dotted_db_stats:notify({histogram, entries_per_clock}, 1),
ETS andalso ets_set_status(State#state.atom_id, Key, ?ETS_WRITE_STRIP),
ETS andalso ets_set_strip_time(State#state.atom_id, Key, Now),
ETS andalso notify_strip_write_latency(Now, Now),
ETS andalso ets_set_dots(State#state.atom_id, Key, get_value_dots_for_ets(StrippedObject2)),
{NSK, [{put, Key, StrippedObject2}|StrippedObjects]};
{[],_CC} ->
?STAT_ENTRIES andalso dotted_db_stats:notify({histogram, entries_per_clock}, length(Context)),
ETS andalso ets_set_status(State#state.atom_id, Key, ?ETS_DELETE_NO_STRIP),
ETS andalso ets_set_dots(State#state.atom_id, Key, get_value_dots_for_ets(StrippedObject2)),
{[{Key, StrippedObject2}|NSK], [{put, Key, StrippedObject2}|StrippedObjects]};
{_ ,_CC} ->
?STAT_ENTRIES andalso dotted_db_stats:notify({histogram, entries_per_clock}, length(Context)+1),
ETS andalso ets_set_status(State#state.atom_id, Key, ?ETS_WRITE_NO_STRIP),
ETS andalso ets_set_dots(State#state.atom_id, Key, get_value_dots_for_ets(StrippedObject2)),
{[{Key, StrippedObject2}|NSK], [{put, Key, StrippedObject2}|StrippedObjects]}
end,
ETS andalso notify_write_latency(dotted_db_object:get_fsm_time(StrippedObject2), Now),
ETS andalso ets_set_write_time(State#state.atom_id, Key, Now),
ETS andalso ets_set_fsm_time(State#state.atom_id, Key, dotted_db_object:get_fsm_time(StrippedObject2)),
fill_strip_save_kvs(Objects, RemoteClock, LocalClock, State#state{dotkeymap=DKM, clock=StateNodeClock}, Now, Acc, ETS)
end.
is_watermark_up_to_date({WM,_}) ->
(orddict:size(WM) =:= (?REPLICATION_FACTOR*2)-1) andalso
is_watermark_up_to_date2(WM).
is_watermark_up_to_date2([]) -> true;
is_watermark_up_to_date2([{_,V}|T]) ->
case orddict:size(V) =:= (?REPLICATION_FACTOR*2)-1 of
true -> is_watermark_up_to_date2(T);
false -> false
end.
new_vnode_id(Index) ->
% generate a new vnode ID for now
dotted_db_utils:maybe_seed(),
% get a random index withing the length of the list
{Index, random:uniform(999999999999)}.
create_ets_all_keys(NewVnodeID) ->
% create the ETS for this vnode
AtomID = get_ets_id(NewVnodeID),
_ = ((ets:info(AtomID) =:= undefined) andalso
ets:new(AtomID, [named_table, public, set, {write_concurrency, false}])),
AtomID.
delete_ets_all_keys(#state{atom_id=AtomID}) ->
_ = ((ets:info(AtomID) =/= undefined) andalso ets:delete(AtomID)),
true.
-spec get_ets_id(any()) -> atom().
get_ets_id(Id) ->
list_to_atom(lists:flatten(io_lib:format("~p", [Id]))).
sync_clocks(LocalClock, RemoteClock, RemoteIndex) ->
% replace the current entry in the node clock for the responding clock with
% the current knowledge it's receiving
RemoteClock2 = orddict:filter(fun ({Index,_},_) -> Index == RemoteIndex end, RemoteClock),
swc_node:merge(LocalClock, RemoteClock2).
update_watermark_after_sync(MyWatermark, RemoteWatermark, MyIndex, RemoteIndex, MyClock, RemoteClock) ->
% update my watermark with what I know, based on my node clock
MyWatermark2 = orddict:fold(
fun (Vnode={Index,_}, _, Acc) ->
case Index == MyIndex of
false -> Acc;
true -> swc_watermark:update_peer(Acc, Vnode, MyClock)
end
end, MyWatermark, MyClock),
% update my watermark with what my peer knows, based on its node clock
MyWatermark3 = orddict:fold(
fun (Vnode={Index,_}, _, Acc) ->
case Index == RemoteIndex of
false -> Acc;
true -> swc_watermark:update_peer(Acc, Vnode, RemoteClock)
end
end, MyWatermark2, RemoteClock),
% update the watermark to reflect what the asking peer has about its peers
swc_watermark:left_join(MyWatermark3, RemoteWatermark).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% ETS functions that store some stats and benchmark info
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@doc Returns a pair : first is the number of keys present in storage ,
the second is the number of keys completely deleted from storage .
ets_get_all_keys(State) ->
ets:foldl(fun
({Key,St,_,_,_,_}, {Others, Deleted}) when St =:= ?ETS_DELETE_STRIP -> {Others, [Key|Deleted]};
({Key,St,_,_,_,_}, {Others, Deleted}) when St =/= ?ETS_DELETE_STRIP -> {[Key|Others], Deleted}
end, {[],[]}, State#state.atom_id).
% ets_get_issued_written(State#state.atom_id) ++
% ets_get_final_written(State#state.atom_id) ++
% ets_get_issued_deleted(State#state.atom_id).
ets_set_status(Id, Key, Status) -> ensure_tuple(Id, Key), ets:update_element(Id, Key, {2, Status}).
% ets_set_strip_time(_, _, undefined) -> true;
ets_set_strip_time(Id, Key, Time) -> ensure_tuple(Id, Key), ets:update_element(Id, Key, {3, Time}).
% ets_set_write_time(_, _, undefined) -> true;
ets_set_write_time(Id, Key, Time) -> ensure_tuple(Id, Key), ets:update_element(Id, Key, {4, Time}).
ets_set_fsm_time(_, _, undefined) -> true;
ets_set_fsm_time(Id, Key, Time) -> ensure_tuple(Id, Key), ets:update_element(Id, Key, {5, Time}).
ets_set_dots(Id, Key, Dots) -> ensure_tuple(Id, Key), ets:update_element(Id, Key, {6, Dots}).
notify_write_latency(undefined, _WriteTime) ->
lager:warning("Undefined FSM write time!!!!!!!!"), ok;
notify_write_latency(_FSMTime, undefined) ->
% lager:warning("Undefined write time!!!!!!!!"),
ok;
notify_write_latency(FSMTime, WriteTime) ->
case ?STAT_WRITE_LATENCY of
false -> ok;
true ->
Delta = timer:now_diff(WriteTime, FSMTime)/1000,
dotted_db_stats:notify({gauge, write_latency}, Delta)
end.
notify_strip_write_latency(undefined, _StripTime) -> ok;
notify_strip_write_latency(WriteTime, StripTime) ->
case ?STAT_STRIP_LATENCY of
false -> ok;
true ->
Delta = timer:now_diff(StripTime, WriteTime)/1000,
dotted_db_stats:notify({gauge, strip_write_latency}, Delta)
end.
notify_strip_delete_latency(undefined, _StripTime) -> ok;
notify_strip_delete_latency(WriteTime, StripTime) ->
case ?STAT_STRIP_LATENCY of
false -> ok;
true ->
Delta = timer:now_diff(StripTime, WriteTime)/1000,
dotted_db_stats:notify({gauge, strip_delete_latency}, Delta)
end.
ensure_tuple(Id, Key) ->
U = undefined,
not ets:member(Id, Key) andalso ets:insert(Id, {Key,U,U,U,U,U}).
ets_get_status(Id , Key ) - > ets : , Key , 2 ) .
ets_get_strip_time(Id , Key ) - > ets : , Key , 3 ) .
ets_get_write_time(Id, Key) -> ensure_tuple(Id, Key), ets:lookup_element(Id, Key, 4).
ets_get_fsm_time(Id, Key) -> ensure_tuple(Id, Key), ets:lookup_element(Id, Key, 5).
ets_get_dots(Id , Key ) - > ets : , Key , 6 ) .
ets_get_issued_deleted(Id) ->
ets:select(Id, [{{'$1', '$2', '_', '_', '_', '_'}, [{'==', '$2', ?ETS_DELETE_NO_STRIP}], ['$1'] }]).
ets_get_actual_deleted(Id) ->
ets:select(Id, [{{'$1', '$2', '_', '_', '_', '_'}, [{'==', '$2', ?ETS_DELETE_STRIP}], ['$1'] }]).
ets_get_issued_written(Id) ->
ets:select(Id, [{{'$1', '$2', '_', '_', '_', '_'}, [{'==', '$2', ?ETS_WRITE_NO_STRIP}], ['$1'] }]).
ets_get_final_written(Id) ->
ets:select(Id, [{{'$1', '$2', '_', '_', '_', '_'}, [{'==', '$2', ?ETS_WRITE_STRIP}], ['$1'] }]).
compute_strip_latency(Id) ->
ets:foldl(fun
({_,_,undefined,_,_,_}, Acc) -> Acc; ({_,_,_,undefined,_,_}, Acc) -> Acc;
({_,_,Strip,Write,_,_}, Acc) -> [timer:now_diff(Strip, Write)/1000 | Acc]
end, [], Id).
compute_replication_latency(Id) ->
ets:foldl(fun
({_,_,_,_,undefined,_}, Acc) -> Acc; ({_,_,_,undefined,_,_}, Acc) -> Acc;
({_,_,_,Write,Fsm,_}, Acc) -> [timer:now_diff(Write, Fsm)/1000 | Acc]
end, [], Id).
% ets_get_all_dots(EtsId) ->
% ets:foldl(fun
% ({Key,?ETS_DELETE_STRIP ,_,_,_,Dots}, {Others, Deleted}) -> {Others, [{Key,lists:sort(Dots)}|Deleted]};
( { Key,?ETS_DELETE_NO_STRIP,_,_,_,Dots } , { Others , Deleted } ) - > { Others , [ { Key , lists : sort(Dots)}|Deleted ] } ;
% ({Key,?ETS_WRITE_STRIP ,_,_,_,Dots}, {Others, Deleted}) -> {[{Key,lists:sort(Dots)}|Others], Deleted};
% ({Key,?ETS_WRITE_NO_STRIP ,_,_,_,Dots}, {Others, Deleted}) -> {[{Key,lists:sort(Dots)}|Others], Deleted};
% ({Key,undefined,_,_,_,undefined}, {Others, Deleted}) -> {Others, [{Key,undefined}|Deleted]}
% end, {[],[]}, EtsId).
storage_get_all_dots(Storage) ->
Fun = fun({Key, Object}, {Others, Deleted}) ->
DCC = dotted_db_object:get_container(Object),
{[{Key,DCC}|Others], Deleted}
end,
dotted_db_storage:fold(Storage, Fun, {[],[]}).
get_value_dots_for_ets(Object) ->
{ValueDots, _Context} = dotted_db_object:get_container(Object),
ValueDots2 = [{D,V} || {D,V} <- ValueDots, V =/= ?DELETE_OP],
orddict:fetch_keys(ValueDots2).
%%% Functions for the small in-memory tracking of which peers this node synces since a some node failure
add_removed_vnode_jump_clock(OldVnodeID) ->
Dict = case get(jump_clock) of
undefined -> orddict:new();
D -> D
end,
lager:warning("MEMORY: new retired vnode: ~p\n", [OldVnodeID]),
put(jump_clock, orddict:store(OldVnodeID, orddict:new(), Dict)).
update_jump_clock(SyncPeerIndex) ->
case get(jump_clock) of
undefined -> ok;
Dict ->
case random : uniform ( ) < 0.01 of
% true -> lager:warning("for ~p: ~p\n\n", [SyncPeerIndex,Dict]);
% false -> ok
% end,
D2 = orddict:map(fun (_,PeersCount) -> orddict:update_counter(SyncPeerIndex, 1, PeersCount) end, Dict),
D3 = orddict:filter(fun (_,PeersCount) ->
PeersCount2 = orddict:filter(fun (_,C) -> C > 50 end, PeersCount),
orddict:size(PeersCount2) < (?REPLICATION_FACTOR-1)*2
end, D2),
D4 = orddict : filter(fun ( _ , ) - >
PeersCount2 = orddict : filter(fun ( _ , C ) - > C > 50 end , PeersCount ) ,
% orddict:size(PeersCount2) >= (?REPLICATION_FACTOR-1)*2
% end, D2),
% case orddict:fetch_keys(D4) of
% [] -> ok;
Rem - > lager : warning("MEMORY : from ~p deleted : ~p\n " , [ x99problems , Rem ] )
% end,
put(jump_clock, D3)
end.
get_old_peers_still_not_synced() ->
case get(jump_clock) of
undefined -> [];
Dict -> orddict:fetch_keys(Dict)
end.
jump_node_clock_by_index(Clock, CurrentId, Index, Jump) ->
OldIds = [Id || Id={Idx,_} <- swc_node:ids(Clock) , Idx == Index andalso Id =/= CurrentId],
lists:foldl(fun (OldId, AccClock) ->
{Base,_} = swc_node:get(OldId, AccClock),
swc_node:store_entry(OldId, {Base+Jump,0}, AccClock)
end, Clock, OldIds).
| null | https://raw.githubusercontent.com/ricardobcl/DottedDB/e3b96a9ec77439af90f94a80b875a01732c1425e/src/dotted_db_vnode.erl | erlang | node id used for in logical clocks
the atom representing the vnode id
index on the consistent hashing ring
my peers ids
node logical clock
what me and my peers know about me and their peers
map for keys that this node replicates (eventually all keys are safely pruned from this)
the left list of pairs of deleted keys not yet stripped, and their causal context (version vector);
interval in which the vnode tries to strip the non-stripped-keys
temporary list of nodes recovering from failure and a list of keys to send
number of updates (put or deletes) since saving node state to storage
DETS table that stores in disk the vnode state
a flag to collect or not stats
syncs stats
what mode the vnode is on
interval time between reports on this vnode
===================================================================
API
===================================================================
===================================================================
Callbacks
===================================================================
try to read the vnode state in the DETS file, if it exists
there isn't a past vnode state stored
some unexpected error
we have vnode state in the storage
open the storage backend for the key-values of this vnode
if the storage is in memory, start with an "empty" vnode state
create an ETS to store keys written and deleted in this node (for stats)
schedule a periodic strip of local keys
for now, lets use the index in the consistent hash as the vnode ID
READING
WRITING
SYNCHRONIZING
Restarting Vnode (and recovery of keys)
On the restarting node
On the good nodes
On the restarting node
On the good nodes
Sample command: respond to a ping
lager:info("Getting my peer's peers ~p/~p",[orddict:size(Watermark), (?REPLICATION_FACTOR*2)-1]),
===================================================================
Coverage
===================================================================
Dots = ets_get_all_dots(State#state.atom_id),
some unexpected error
assume that the key was lost, i.e. it's equal to not_found
some unexpected error
assume that the key was lost, i.e. it's equal to not_found
lager:warning("unknown coverage received ~p", [Req]),
lager:info("New vnode id for watermark: ~p ", [PeerId]),
we have all the peers Ids, now broadcast that list to our peers
lager:info("Peers Ids DONE!"),
Report Tick
Buffer Strip Tick
schedule the strip for keys that still have causal context at the moment
Take this time to filter timed-out entries in the "currently syncing peers" set
To get the diff in milliseconds
Optionally collect stats
CCS = NumNSKeys2 * ?REPLICATION_FACTOR, % we don't really know, but assume the worst
MetaF = EntryExampleSize * ?REPLICATION_FACTOR * NumNSKeys,
schedule the strip for keys that still have causal context at the moment
===================================================================
===================================================================
For coordinating writes, do it locally and forward the replication
do the local coordinating write
create a new request to forward the replication of this new object
Handle all other commands locally (only gets?)
save the vnode state, if not empty
decode the data received
Private
READING
there is no key K in this node
create an empty "object" and fill its causality with the node clock
this is needed to ensure that deletes "win" over old writes at the coordinator
some unexpected error
return the error
get and fill the causal history of the local object
Optionally collect stats
WRITING
os:timestamp(),
get and fill the causal history of the local key
discard obsolete values w.r.t the causal context
generate a new dot for this write/delete and add it to the node clock
test if this is a delete; if not, add dot-value to the object container
DELETE
Optionally collect stats
return the updated node state
os:timestamp(),
get and fill the causal history of the local key
synchronize both objects
save the new object, while stripping the unnecessary causality
Optionally collect stats
return the updated node state
SYNCHRONIZING
get my peers
send a sync message to that node
calculate what dots are present locally that the asking node does not have
get the keys corresponding to the missing dots,
remove duplicate keys
filter the keys that the asking node does not replicate
get each key's respective Object and strip any unnecessary causal information to save network
DotsNotFound2 = [ {A,B} || {A,B} <- DotsNotFound, B =/= [] andalso A =:= State#state.id],
[] -> ok;
end,
Optionally collect stats
send the final objects and the base (contiguous) dots of the node clock to the asking node
add information about the remote clock to our clock, but only for the remote node entry
get the local objects corresponding to the received objects and fill the causal history for all of them
synchronize / merge the remote and local objects
filter the objects that are not missing after all
add each new dot to our node clock
save the synced objects and strip their causal history
schedule a later strip attempt for non-stripped synced keys
update my watermark
Garbage Collect keys from the dotkeymap and delete keys with no causal context
case MissingObjects == [] of
true -> ok;
false ->
lager:info("Repairing SYNC ~p !\n\n",[MissingObjects]),
lager:info("LId: ~p\nLC: ~p\n\n", [State2#state.id, State2#state.clock]),
lager:info("WM: ~p\n\n", [State2#state.watermark]),
lager:info("RW: ~p\n\n", [RemoteWatermark])
end,
Optionally collect stats
return the updated node state
Restarting Vnode (and recovery of keys)
On the restarting node
keep track from now on on which peers this node did a sync, to be able to
jump the old vnode id in the node clock when we synced with every other peer
changes all occurences of the old id for the new id,
while at the same time resetting the column and the line of the new id,
which means that this node does not know about anything at the moment,
nor other vnodes know about this new id updates (since there are none at the moment).
reset the entire watermark
open the storage backend for the key-values of this vnode
store the number of full-syncs
On the good nodes
keep track from now on on which peers this node did a sync, to be able to
jump the old vnode id in the node clock when we synced with every other peer
replace the old peer entry in the watermarks of this vnode's peers also
update the "mypeers" set
add the new node id to the node clock
replace the old entry for the new entry in the watermark
add the new node id to the node clock
filter irrelevant keys from the perspective of the restarting vnode
get each key's respective Object and strip any unnecessary causal information to save network bandwidth
save the rest of the keys for later (if there's any)
is this the last batch?
On the restarting node
save the objects and return the ones that were not totally filtered
add new keys to the Dot - Key Mapping
DKM = swc_dotkeymap:add_objects(State#state.dotkeymap,
lists:map(fun ({Key,Obj}) -> {Key, dotted_db_object:get_container(Obj)} end, Objects)),
schedule a later strip attempt for non-stripped synced keys
On the restarting node
save the objects and return the ones that were not totally filtered
schedule a later strip attempt for non-stripped synced keys
update my watermark
jump the base counter of all old ids in the node clock, to make sure we "win"
against all keys potentially not stripped yet because of that old id
On the good nodes
get each key's respective Object and strip any unnecessary causal information to save network bandwidth
save the rest of the keys for later (if there's any)
is this the last batch?
Aux functions
@doc Returns the Object associated with the Key.
By default, we want to return a filled causality, unless we get a storage error.
If the key does not exists or for some reason, the storage returns an
error, return an empty Object (also filled).
there is no key K in this node
some unexpected error
assume that the key was lost, i.e. it's equal to not_found
get and fill the causal history of the local object
there is no key K in this node
some unexpected error
assume that the key was lost, i.e. it's equal to not_found
get and fill the causal history of the local object
@doc Saves the relevant vnode state to the storage.
@doc Reads the relevant vnode state from the storage.
there isn't a past vnode state stored
some unexpected error
@doc Initializes the "watermark" matrix with 0's for peers of this vnode.
ask each vnode for their current vnode ID
@doc Initializes the "sync" stats for peers of this vnode.
% get this node's peers, i.e., all nodes that replicates any subset of local keys.
PeerIDs = [ ID || {ID, _Node} <- dotted_db_utils:peers(Index)],
for replication factor N = 3 , the numbers of peers should be 4 ( 2 vnodes before and 2 after ) .
(?REPLICATION_FACTOR-1)*2 = length(PeerIDs),
Now = os:timestamp(),
Syncs = lists:foldl(fun (ID, List) -> [{ID,0,0,Now,Now} | List] end , [], PeerIDs),
(?REPLICATION_FACTOR-1)*2 = length(Syncs),
Syncs.
@doc Returns the Storage for this vnode.
get the preferred backend in the configuration file, defaulting to ETS if
there is no preference.
give the name to the backend for this vnode using its position in the ring.
@doc Close the key-value backend, save the vnode state and close the DETS file.
remove the keys from the dotkeymap that have a dot (corresponding to their position) smaller than the
minimum dot, i.e., this update is known by all nodes that replicate it and therefore can be removed
from the dotkeymap;
remove entries in watermark from retired peers, that aren't needed anymore
add the non stripped keys to the node state for later strip attempt
increment the updates since saving
it's still early to save to storage
it's time to persist vnode state
restart the counter
removed unnecessary causality from the Object, based on the current node clock
the resulting object is one of the following options:
0 * it has no value but has causal history -> it's a delete, but still must be persisted
@doc Used periodically to see which non-stripped keys can be stripped.
removed unnecessary causality from the object, based on the current node clock
the resulting object is one of the following options:
0 * it has no value but has causal history -> it's a delete, but still must be persisted
true -> lager:info("STRIPPPPPPPPPPPPP:\nClock:~p\nCtx: ~p\n", [State#state.clock, V]);
false -> ok
end,
not stripped yet; keep in the dict
removed unnecessary causality from the object, based on the current node clock
the resulting object is one of the following options:
0 * it has no value but has causal history -> it's a delete, but still must be persisted
do the real delete
write to disk without the version vector context
not stripped yet; keep in the dict
some unexpected error
assume that the key was lost, i.e. it's equal to not_found
@doc Add a replicated key and context to the list of non-stripped-keys
Recovering vnode saves multiples objects from peers
For recovering keys remotely after a vnode crash/failure (with lost key-values)
fill the Object with the sending node clock
get and fill the causal history of the local key
synchronize both objects
add each new dot to our node clock
removed unnecessary causality from the object, based on the current node clock
the resulting object is one of the following options:
* it has no value and no causal history -> can be deleted
* it has no value but has causal history -> it's a delete, but still must be persisted
* has values, with causal context -> it's a normal write and we should persist
* has values, but no causal context -> it's the final form for this write
generate a new vnode ID for now
get a random index withing the length of the list
create the ETS for this vnode
replace the current entry in the node clock for the responding clock with
the current knowledge it's receiving
update my watermark with what I know, based on my node clock
update my watermark with what my peer knows, based on its node clock
update the watermark to reflect what the asking peer has about its peers
ETS functions that store some stats and benchmark info
ets_get_issued_written(State#state.atom_id) ++
ets_get_final_written(State#state.atom_id) ++
ets_get_issued_deleted(State#state.atom_id).
ets_set_strip_time(_, _, undefined) -> true;
ets_set_write_time(_, _, undefined) -> true;
lager:warning("Undefined write time!!!!!!!!"),
ets_get_all_dots(EtsId) ->
ets:foldl(fun
({Key,?ETS_DELETE_STRIP ,_,_,_,Dots}, {Others, Deleted}) -> {Others, [{Key,lists:sort(Dots)}|Deleted]};
({Key,?ETS_WRITE_STRIP ,_,_,_,Dots}, {Others, Deleted}) -> {[{Key,lists:sort(Dots)}|Others], Deleted};
({Key,?ETS_WRITE_NO_STRIP ,_,_,_,Dots}, {Others, Deleted}) -> {[{Key,lists:sort(Dots)}|Others], Deleted};
({Key,undefined,_,_,_,undefined}, {Others, Deleted}) -> {Others, [{Key,undefined}|Deleted]}
end, {[],[]}, EtsId).
Functions for the small in-memory tracking of which peers this node synces since a some node failure
true -> lager:warning("for ~p: ~p\n\n", [SyncPeerIndex,Dict]);
false -> ok
end,
orddict:size(PeersCount2) >= (?REPLICATION_FACTOR-1)*2
end, D2),
case orddict:fetch_keys(D4) of
[] -> ok;
end, | -module(dotted_db_vnode).
-behaviour(riak_core_vnode).
-include_lib("dotted_db.hrl").
-include_lib("riak_core/include/riak_core_vnode.hrl").
-export([start_vnode/1,
init/1,
terminate/2,
handle_command/3,
handle_info/2,
is_empty/1,
delete/1,
handle_handoff_command/3,
handoff_starting/2,
handoff_cancelled/1,
handoff_finished/2,
handle_handoff_data/2,
encode_handoff_item/2,
handle_coverage/4,
handle_exit/3
]).
-export([
get_vnode_id/1,
broadcast_my_peers_to_my_peers/3,
replace_peer/3,
restart/2,
inform_peers_restart/2,
inform_peers_restart2/2,
recover_keys/2,
read/3,
repair/3,
write/2,
replicate/2,
sync_start/2,
sync_missing/5,
sync_repair/2
]).
-ignore_xref([
start_vnode/1
]).
-type dets() :: reference().
-record(state, {
id :: vnode_id(),
atom_id :: atom(),
index :: index(),
peers_ids :: [vnode_id()],
clock :: bvv(),
key->object store , where the object contains a DCC ( values + logical clock )
storage :: dotted_db_storage:storage(),
watermark :: vv_matrix(),
dotkeymap :: key_matrix(),
the right side is a list of ( vnode , map ) , where the map is between dots and keys not yet completely stripped ( and their VV also )
non_stripped_keys :: {[{key(),vv()}], [{id(), dict:dict()}]},
buffer_strip_interval :: non_neg_integer(),
recover_keys :: [{id(), [bkey()]}],
updates_mem :: integer(),
dets :: dets(),
stats :: boolean(),
syncs :: [{id(), integer(), integer(), os:timestamp(), os:timestamp()}],
mode :: normal | recovering,
report_interval :: non_neg_integer()
}).
-type state() :: #state{}.
-define(MASTER, dotted_db_vnode_master).
save vnode state every 100 updates
-define(VNODE_STATE_FILE, "dotted_db_vnode_state").
-define(VNODE_STATE_KEY, "dotted_db_vnode_state_key").
-define(ETS_DELETE_NO_STRIP, 0).
-define(ETS_DELETE_STRIP, 1).
-define(ETS_WRITE_NO_STRIP, 2).
-define(ETS_WRITE_STRIP, 3).
start_vnode(I) ->
riak_core_vnode_master:get_vnode_pid(I, ?MODULE).
get_vnode_id(IndexNodes) ->
riak_core_vnode_master:command(IndexNodes,
get_vnode_id,
{raw, undefined, self()},
?MASTER).
broadcast_my_peers_to_my_peers(IndexNodes, MyId, MyPeersIds) ->
riak_core_vnode_master:command(IndexNodes,
{broadcast_my_peers_to_my_peers, MyId, MyPeersIds},
{raw, undefined, self()},
?MASTER).
replace_peer(IndexNodes, OldPeerId, NewPeerId) ->
riak_core_vnode_master:command(IndexNodes,
{replace_peer, OldPeerId, NewPeerId},
{raw, undefined, self()},
?MASTER).
restart(IndexNodes, ReqID) ->
riak_core_vnode_master:command(IndexNodes,
{restart, ReqID},
{fsm, undefined, self()},
?MASTER).
inform_peers_restart(Peers, Args) ->
riak_core_vnode_master:command(Peers,
{inform_peers_restart, Args},
{fsm, undefined, self()},
?MASTER).
inform_peers_restart2(Peers, Args) ->
riak_core_vnode_master:command(Peers,
{inform_peers_restart2, Args},
{fsm, undefined, self()},
?MASTER).
recover_keys(Peers, Args) ->
riak_core_vnode_master:command(Peers,
{recover_keys, Args},
{fsm, undefined, self()},
?MASTER).
read(ReplicaNodes, ReqID, Key) ->
riak_core_vnode_master:command(ReplicaNodes,
{read, ReqID, Key},
{fsm, undefined, self()},
?MASTER).
repair(OutdatedNodes, BKey, Object) ->
riak_core_vnode_master:command(OutdatedNodes,
{repair, BKey, Object},
{fsm, undefined, self()},
?MASTER).
write(Coordinator, Args) ->
riak_core_vnode_master:command(Coordinator,
{write, Args},
{fsm, undefined, self()},
?MASTER).
replicate(ReplicaNodes, Args) ->
riak_core_vnode_master:command(ReplicaNodes,
{replicate, Args},
{fsm, undefined, self()},
?MASTER).
sync_start(Node, ReqID) ->
riak_core_vnode_master:command(Node,
{sync_start, ReqID},
{fsm, undefined, self()},
?MASTER).
sync_missing(Peer, ReqID, RemoteNodeID, RemoteClock, RemotePeers) ->
riak_core_vnode_master:command(Peer,
{sync_missing, ReqID, RemoteNodeID, RemoteClock, RemotePeers},
{fsm, undefined, self()},
?MASTER).
sync_repair(Node, Args) ->
riak_core_vnode_master:command(Node,
{sync_repair, Args},
{fsm, undefined, self()},
?MASTER).
init([Index]) ->
put(watermark, false),
process_flag(priority, high),
{Dets, NodeId2, NodeClock, DotKeyMap, Watermark, NonStrippedKeys} =
case read_vnode_state(Index) of
lager:debug("No persisted state for vnode index: ~p.",[Index]),
NodeId = new_vnode_id(Index),
Clock = swc_node:new(),
KLog = swc_dotkeymap:new(),
Repli = swc_watermark:new(),
{Ref, NodeId, Clock, KLog, Repli, {[],[]}};
lager:error("Error reading vnode state from storage: ~p", [Error]),
NodeId = new_vnode_id(Index),
Clock = swc_node:new(),
KLog = swc_dotkeymap:new(),
Repli = swc_watermark:new(),
{Ref, NodeId, Clock, KLog, Repli, {[],[]}};
lager:info("Recovered state for vnode ID: ~p.",[Id]),
{Ref, Id, Clock, DKMap, Repli, NSK}
end,
{Storage, NodeId3, NodeClock2, DotKeyMap2, Watermark2, NonStrippedKeys2} =
case open_storage(Index) of
{{backend, ets}, S} ->
NodeId4 = new_vnode_id(Index),
{S, NodeId4, swc_node:new(), swc_dotkeymap:new(), swc_watermark:new(), {[],[]}};
{_, S} ->
{S, NodeId2,NodeClock, DotKeyMap, Watermark, NonStrippedKeys}
end,
PeersIDs = ordsets:del_element(NodeId3, ordsets:from_list(swc_watermark:peers(Watermark2))),
AtomID = create_ets_all_keys(NodeId3),
schedule a periodic reporting message ( wait 2 seconds initially )
schedule_report(2000),
schedule_strip_keys(2000),
{ok, #state{
id = NodeId3,
atom_id = AtomID,
index = Index,
peers_ids = PeersIDs,
clock = NodeClock2,
watermark = Watermark2,
dotkeymap = DotKeyMap2,
non_stripped_keys = NonStrippedKeys2,
buffer_strip_interval = ?BUFFER_STRIP_INTERVAL,
recover_keys = [],
storage = Storage,
dets = Dets,
updates_mem = 0,
stats = application:get_env(dotted_db, do_stats, ?DEFAULT_DO_STATS),
syncs = initialize_syncs(Index),
mode = normal,
report_interval = ?REPORT_TICK_INTERVAL
}
}.
handle_command(Cmd={read, _ReqID, _Key}, _Sender, State) ->
handle_read(Cmd, State);
handle_command({repair, BKey, Object}, Sender, State) ->
{noreply, State2} =
handle_command({replicate, {dummy_req_id, BKey, Object, ?DEFAULT_NO_REPLY}}, Sender, State),
{noreply, State2};
handle_command(Cmd={write, _Args}, _Sender, State) ->
handle_write(Cmd, State);
handle_command(Cmd={replicate, _Args}, _Sender, State) ->
handle_replicate(Cmd, State);
handle_command(Cmd={sync_start, _ReqID}, _Sender, State) ->
handle_sync_start(Cmd, State);
handle_command(Cmd={sync_missing, _ReqID, _RemoteID, _RemoteClock, _RemotePeers}, Sender, State) ->
handle_sync_missing(Cmd, Sender, State);
handle_command(Cmd={sync_repair, _Args}, _Sender, State) ->
handle_sync_repair(Cmd, State);
handle_command(Cmd={restart, _ReqID}, _Sender, State) ->
handle_restart(Cmd, State);
handle_command(Cmd={inform_peers_restart, {_ReqID, _RestartingNodeIndex, _OldVnodeID, _NewVnodeID}}, _Sender, State) ->
handle_inform_peers_restart(Cmd, State);
handle_command(Cmd={recover_keys, {_ReqID, _RemoteVnode, _RemoteVnodeId, _RemoteClock, _Objects, _RemoteWatermark, _LastBatch}}, _Sender, State) ->
handle_recover_keys(Cmd, State);
handle_command(Cmd={inform_peers_restart2, {_ReqID, _NewVnodeID}}, _Sender, State) ->
handle_inform_peers_restart2(Cmd, State);
handle_command(ping, _Sender, State) ->
{reply, {pong, State#state.id}, State};
handle_command(get_vnode_state, _Sender, State) ->
{reply, {pong, State}, State};
handle_command({set_strip_interval, NewStripInterval}, _Sender, State) ->
OldStripInterval = State#state.buffer_strip_interval,
lager:info("Strip Interval => from: ~p \t to: ~p",[OldStripInterval,NewStripInterval]),
{noreply, State#state{buffer_strip_interval=NewStripInterval}};
handle_command({set_stats, NewStats}, _Sender, State) ->
OldStats = State#state.stats,
lager:info("Vnode stats => from: ~p \t to: ~p",[OldStats, NewStats]),
{noreply, State#state{stats=NewStats}};
handle_command(get_vnode_id, _Sender, State) ->
{reply, {get_vnode_id, {State#state.index, node()}, State#state.id}, State};
handle_command({broadcast_my_peers_to_my_peers, MyPeer, MyPeerPeers}, _Sender, State) ->
Watermark = swc_watermark:add_peer(State#state.watermark, MyPeer, MyPeerPeers),
case length(swc_watermark:peers(Watermark)) == (?REPLICATION_FACTOR*2)-1 of
true ->
put(watermark, true),
lager:info("Peers 2 peers 4 watermark -> DONE!!!");
false ->
ok
end,
{noreply, State#state{watermark=Watermark}};
handle_command({replace_peer, OldPeerId, NewPeerId}, _Sender, State) ->
NewPeersIds =
case ordsets:is_element(OldPeerId, State#state.peers_ids) of
true -> ordsets:add_element(NewPeerId, ordsets:del_element(OldPeerId, State#state.peers_ids));
false -> State#state.peers_ids
end,
NewWatermark = swc_watermark : replace_peer(State#state.watermark , OldPeerId , NewPeerId ) ,
add_removed_vnode_jump_clock(OldPeerId),
NewWatermark = swc_watermark:retire_peer(State#state.watermark, OldPeerId, NewPeerId),
{noreply, State#state{peers_ids=NewPeersIds, watermark=NewWatermark}};
handle_command(Message, _Sender, State) ->
lager:info("Unhandled Command ~p", [Message]),
{noreply, State}.
handle_coverage(vnode_state, _KeySpaces, {_, RefId, _}, State) ->
{reply, {RefId, {ok, vs, State}}, State};
handle_coverage(strip_latency, _KeySpaces, {_, RefId, _}, State) ->
Latencies = compute_strip_latency(State#state.atom_id),
{reply, {RefId, {ok, strip_latency, Latencies}}, State};
handle_coverage(replication_latency, _KeySpaces, {_, RefId, _}, State) ->
Latencies = compute_replication_latency(State#state.atom_id),
{reply, {RefId, {ok, replication_latency, Latencies}}, State};
handle_coverage(all_current_dots, _KeySpaces, {_, RefId, _}, State) ->
Dots = storage_get_all_dots(State#state.storage),
{reply, {RefId, {ok, all_current_dots, Dots}}, State};
handle_coverage(actual_deleted_keys, _KeySpaces, {_, RefId, _}, State) ->
ADelKeys = ets_get_actual_deleted(State#state.atom_id),
{reply, {RefId, {ok, adk, ADelKeys}}, State};
handle_coverage(issued_deleted_keys, _KeySpaces, {_, RefId, _}, State) ->
IDelKeys = ets_get_issued_deleted(State#state.atom_id),
Res = case length(IDelKeys) > 0 of
true ->
Key = hd(IDelKeys),
case dotted_db_storage:get(State#state.storage, Key) of
{error, not_found} ->
there is was deleted locally ( improbable , since there was a 0 in the ETS )
{Key, not_found};
{error, Error} ->
lager:error("Error reading a key from storage: ~p", [Error]),
{Key, storage_error};
Obj ->
save the new k\v and remove unnecessary causal information
{Key, dotted_db_object:strip(State#state.clock, Obj), Obj}
end;
false ->
{}
end,
ThisVnode = {State#state.index, node()},
{reply, {RefId, {ok, idk, IDelKeys, Res, ThisVnode}}, State};
handle_coverage(written_keys, _KeySpaces, {_, RefId, _}, State) ->
WrtKeys = ets_get_issued_written(State#state.atom_id),
Res = case length(WrtKeys) > 0 of
true ->
Key = hd(WrtKeys),
case dotted_db_storage:get(State#state.storage, Key) of
{error, not_found} ->
there is was deleted locally ( improbable , since there was a 0 in the ETS )
{Key, not_found};
{error, Error} ->
lager:error("Error reading a key from storage: ~p", [Error]),
{Key, storage_error};
Obj ->
save the new k\v and remove unnecessary causal information
{Key, dotted_db_object:strip(State#state.clock, Obj), Obj}
end;
false ->
{}
end,
ThisVnode = {State#state.index, node()},
{reply, {RefId, {ok, wk, WrtKeys, Res, ThisVnode}}, State};
handle_coverage(final_written_keys, _KeySpaces, {_, RefId, _}, State) ->
WrtKeys = ets_get_final_written(State#state.atom_id),
{reply, {RefId, {ok, fwk, WrtKeys}}, State};
handle_coverage(all_keys, _KeySpaces, {_, RefId, _}, State) ->
IDelKeys = ets_get_issued_deleted(State#state.atom_id),
IWrtKeys = ets_get_issued_written(State#state.atom_id),
FWrtKeys = ets_get_final_written(State#state.atom_id),
{reply, {RefId, {ok, ak, IDelKeys, IWrtKeys, FWrtKeys}}, State};
handle_coverage(Req, _KeySpaces, _Sender, State) ->
lager:info("unknown coverage received ~p", [Req]),
{noreply, State}.
handle_info({undefined,{get_vnode_id, IndexNode={Index,_}, PeerId={Index,_}}}, State) ->
case lists:member(IndexNode, dotted_db_utils:peers(State#state.index)) of
true ->
NodeClock = swc_node:add(State#state.clock, {PeerId, 0}),
MyPeersIds = ordsets:add_element(PeerId, State#state.peers_ids),
WM = case ordsets:size(MyPeersIds) == (?REPLICATION_FACTOR-1)*2 of
CurrentPeers = dotted_db_utils:peers(State#state.index),
broadcast_my_peers_to_my_peers(CurrentPeers, State#state.id, MyPeersIds),
swc_watermark:add_peer(State#state.watermark, State#state.id, MyPeersIds);
false ->
lager : info("Getting Peers Ids ~p/~p",[ordsets : size(MyPeersIds ) , ( ? REPLICATION_FACTOR-1)*2 ] ) ,
State#state.watermark
end,
{ok, State#state{clock=NodeClock, watermark=WM, peers_ids=MyPeersIds}};
false ->
lager:info("WRONG NODE ID! IxNd: ~p ", [IndexNode]),
{ok, State}
end;
handle_info(report_tick, State=#state{stats=false}) ->
schedule_report(State#state.report_interval),
{ok, State};
handle_info(report_tick, State=#state{stats=true}) ->
{_, NextState} = report(State),
schedule_report(State#state.report_interval),
{ok, NextState};
handle_info(strip_keys, State=#state{mode=recovering}) ->
lager:warning("Not stripping keys because we are in recovery mode."),
schedule_strip_keys(State#state.buffer_strip_interval),
{ok, State};
handle_info(strip_keys, State=#state{mode=normal, non_stripped_keys=NSKeys}) ->
NSKeys2 = read_strip_write(NSKeys, State),
case get(current_sync) of
undefined -> ok;
Set ->
Now = os:timestamp(),
put(current_sync, ordsets:filter(
fun({TS, _Peer}) ->
TimeElapsed = timer:now_diff(Now, TS) / 1000,
TimeElapsed < ?DEFAULT_TIMEOUT
end, Set))
end,
case State#state.stats of
true ->
{ D1,W1 } = NSKeys ,
{ D2,W2 } = NSKeys2 ,
NumNSKeys = lists : sum([dict : size(Dict ) || { _ , } < - W1 ] ) + length(D1 ) ,
NumNSKeys2 = lists : sum([dict : size(Dict ) || { _ , } < - W2 ] ) + length(D2 ) ,
CCF = NumNSKeys * ? REPLICATION_FACTOR ,
EntryExampleSize = byte_size(term_to_binary({State#state.id , 123345 } ) ) ,
MetaS = EntryExampleSize * CCS ,
dotted_db_stats : update_key_meta(State#state.index , NumNSKeys , MetaF , MetaS , CCF , CCS ) ,
ok;
false -> ok
end,
schedule_strip_keys(State#state.buffer_strip_interval),
{ok, State#state{non_stripped_keys=NSKeys2}};
handle_info(Info, State) ->
lager:info("unhandled_info: ~p",[Info]),
{ok, State}.
HANDOFF
handle_handoff_command(?FOLD_REQ{foldfun=FoldFun, acc0=Acc0}, _Sender, State) ->
we need to wrap the fold function because it expect 3 elements ( K , V , Acc ) ,
and our storage layer expect 2 elements ( { K , V},Acc ) .
WrapperFun = fun({Key,Val}, Acc) -> FoldFun(Key, Val, Acc) end,
Acc = dotted_db_storage:fold(State#state.storage, WrapperFun, Acc0),
{reply, Acc, State};
Ignore AAE sync requests
handle_handoff_command(Cmd, _Sender, State) when
element(1, Cmd) == sync_start orelse
element(1, Cmd) == sync_missing orelse
element(1, Cmd) == sync_repair ->
{drop, State};
handle_handoff_command(Cmd, Sender, State) when
element(1, Cmd) == replicate orelse
element(1, Cmd) == repair ->
case handle_command(Cmd, Sender, State) of
{noreply, State2} ->
{forward, State2};
{reply, {ok,_}, State2} ->
{forward, State2}
end;
handle_handoff_command(Cmd={write, {ReqID, _, Key, _, _, _FSMTime}}, Sender, State) ->
lager:info("HAND_WRITE: {~p, ~p} // Key: ~p",[State#state.id, node(), Key]),
{reply, {ok, ReqID, NewObject}, State2} = handle_command(Cmd, Sender, State),
send the ack to the PUT_FSM
riak_core_vnode:reply(Sender, {ok, ReqID, NewObject}),
NewCommand = {replicate, {ReqID, Key, NewObject, ?DEFAULT_NO_REPLY}},
{forward, NewCommand, State2};
handle_handoff_command(Cmd, Sender, State) ->
lager:info("Handoff command ~p at ~p", [Cmd, State#state.id]),
handle_command(Cmd, Sender, State).
handoff_starting(TargetNode, State) ->
lager:info("HAND_START: {~p, ~p} to ~p",[State#state.index, node(), TargetNode]),
ok = case State#state.clock =:= swc_node:new() of
true -> ok;
false ->
Key = {?DEFAULT_BUCKET, {?VNODE_STATE_KEY, State#state.index}},
NodeState = {State#state.clock, State#state.dotkeymap, State#state.watermark, State#state.non_stripped_keys},
dotted_db_storage:put(State#state.storage, Key, NodeState)
end,
{true, State}.
handoff_cancelled(State) ->
{ok, State}.
handoff_finished(_TargetNode, State) ->
{ok, State}.
handle_handoff_data(Data, State) ->
NodeKey = {?DEFAULT_BUCKET, {?VNODE_STATE_KEY, State#state.index}},
NewState =
case dotted_db_utils:decode_kv(Data) of
{NodeKey, {NodeClock, DotKeyMap, Watermark, NSK}} ->
NodeClock2 = swc_node:join(NodeClock, State#state.clock),
State#state{clock = NodeClock2, dotkeymap = DotKeyMap, watermark = Watermark, non_stripped_keys = NSK};
{OtherNodeKey, {NodeClock, DotKeyMap, Watermark, NSK}} = Data ->
case is_binary(OtherNodeKey) andalso binary_to_term(OtherNodeKey) == NodeKey of
true ->
NodeClock2 = swc_node:join(NodeClock, State#state.clock),
State#state{clock = NodeClock2, dotkeymap = DotKeyMap, watermark = Watermark, non_stripped_keys = NSK};
false -> lager:warning("HANDOFF: strang data read -> ~p!",[Data])
end;
{Key, Obj} ->
lager:info("HANDOFF: key -> ~p | node key -> ~p \n obj -> ~p!", [Key, NodeKey, Obj]),
{noreply, State2} = handle_command({replicate, {dummy_req_id, Key, Obj, ?DEFAULT_NO_REPLY}}, undefined, State),
State2
end,
{reply, ok, NewState}.
encode_handoff_item(Key, Val) ->
dotted_db_utils:encode_kv({Key,Val}).
is_empty(State) ->
case dotted_db_storage:is_empty(State#state.storage) of
true ->
{true, State};
false ->
lager:info("IS_EMPTY: not empty -> {~p, ~p}",[State#state.index, node()]),
{false, State}
end.
delete(State) ->
{Good, Storage1} =
case dotted_db_storage:drop(State#state.storage) of
{ok, Storage} ->
{true, Storage};
{error, Reason, Storage} ->
lager:info("BAD_DROP: {~p, ~p} Reason: ~p",[State#state.index, node(), Reason]),
{false, Storage}
end,
case State#state.clock =/= [] andalso Good of
true ->
lager:info("IxNd:~p // Clock:~p // DKM:~p // Watermark:~p",
[{State#state.index, node()}, State#state.clock, State#state.dotkeymap, State#state.watermark] ),
lager:info("GOOD_DROP: {~p, ~p}",[State#state.index, node()]);
false -> ok
end,
true = delete_ets_all_keys(State),
{ok, State#state{storage=Storage1}}.
handle_exit(_Pid, _Reason, State) ->
{noreply, State}.
terminate(_Reason, State) ->
lager:debug("HAND_TERM: {~p, ~p}",[State#state.index, node()]),
close_all(State),
ok.
handle_read({read, ReqID, Key}, State) ->
Response =
case dotted_db_storage:get(State#state.storage, Key) of
{error, not_found} ->
{ok, dotted_db_object:fill(Key, State#state.clock, dotted_db_object:new())};
{error, Error} ->
lager:error("Error reading a key from storage (command read): ~p", [Error]),
{error, Error};
Obj ->
{ok, dotted_db_object:fill(Key, State#state.clock, Obj)}
end,
case State#state.stats of
true -> ok;
false -> ok
end,
IndexNode = {State#state.index, node()},
{reply, {ok, ReqID, IndexNode, Response}, State}.
handle_write({write, {ReqID, Operation, Key, Value, Context, FSMTime}}, State) ->
DiskObject = guaranteed_get(Key, State),
DiscardObject = dotted_db_object:discard_values(Context, DiskObject),
{Dot, NodeClock} = swc_node:event(State#state.clock, State#state.id),
NewObject0 =
case Operation of
dotted_db_object:add_value({State#state.id, Dot}, ?DELETE_OP, DiscardObject);
PUT
dotted_db_object:add_value({State#state.id, Dot}, Value, DiscardObject)
end,
NewObject = dotted_db_object:set_fsm_time(FSMTime, NewObject0),
save the new k\v and remove unnecessary causal information
_= strip_save_batch([{Key, NewObject}], State#state{clock=NodeClock}, Now),
append the key to the tail of the
DotKeyMap = swc_dotkeymap:add_key(State#state.dotkeymap, State#state.id, Key, Dot),
case State#state.stats of
true -> ok;
false -> ok
end,
{reply, {ok, ReqID, NewObject}, State#state{clock = NodeClock, dotkeymap = DotKeyMap}}.
handle_replicate({replicate, {ReqID, Key, NewObject, NoReply}}, State) ->
NodeClock = dotted_db_object:add_to_node_clock(State#state.clock, NewObject),
append the key to the
DotKeyMap = swc_dotkeymap:add_objects(State#state.dotkeymap, [{Key, dotted_db_object:get_container(NewObject)}]),
DiskObject = guaranteed_get(Key, State),
FinalObject = dotted_db_object:sync(NewObject, DiskObject),
test if the FinalObject has newer information
NSK = case dotted_db_object:equal(FinalObject, DiskObject) of
true ->
lager:debug("Replicated object is ignored (already seen)"),
State#state.non_stripped_keys;
false ->
case strip_save_batch([{Key, FinalObject}], State#state{clock=NodeClock, dotkeymap=DotKeyMap}, Now) of
[] -> State#state.non_stripped_keys;
_ -> add_key_to_NSK(Key, NewObject, State#state.non_stripped_keys)
end
end,
case State#state.stats of
true -> ok;
false -> ok
end,
NewState = State#state{clock = NodeClock, dotkeymap = DotKeyMap, non_stripped_keys = NSK},
case NoReply of
true -> {noreply, NewState};
false -> {reply, {ok, ReqID}, NewState}
end.
handle_sync_start({sync_start, ReqID}, State=#state{mode=recovering}) ->
{reply, {cancel, ReqID, recovering}, State};
handle_sync_start({sync_start, ReqID}, State=#state{mode=normal}) ->
Now = os:timestamp(),
MyPeersIndexNodes = dotted_db_utils:peers(State#state.index),
Peer = case get(current_sync) of
undefined ->
Node = hd(MyPeersIndexNodes),
put(current_sync, ordsets:add_element({Now,Node}, ordsets:new())),
Node;
Set ->
case ordsets:subtract(MyPeersIndexNodes, Set) of
[] -> [];
Nodes ->
Node = dotted_db_utils:random_from_list(Nodes),
put(current_sync, ordsets:add_element({Now,Node}, Set)),
Node
end
end,
case Peer of
[] ->
{reply, {cancel, ReqID, already_syncing}, State};
_ ->
PeersIDs = swc_watermark:peers(State#state.watermark),
{reply, {ok, ReqID, State#state.id, Peer, State#state.clock, PeersIDs}, State}
end.
handle_sync_missing({sync_missing, ReqID, _, _, _}, _Sender, State=#state{mode=recovering}) ->
{reply, {cancel, ReqID, recovering}, State};
handle_sync_missing({sync_missing, ReqID, _RemoteID={RemoteIndex,_}, RemoteClock, RemotePeers}, Sender, State=#state{mode=normal}) ->
spawn(fun() ->
MissingDots = swc_node:missing_dots(State#state.clock, RemoteClock, RemotePeers),
{MissingKeys0, _DotsNotFound} = swc_dotkeymap:get_keys(State#state.dotkeymap, MissingDots),
MissingKeys = sets:to_list(sets:from_list(MissingKeys0)),
RelevantMissingKeys = filter_irrelevant_keys(MissingKeys, RemoteIndex),
StrippedObjects = guaranteed_get_strip_list(RelevantMissingKeys, State),
case of
_ - > lager : info("\n\n ~ p to ~p:\n\tNotFound : ~p \n\tMiKeys : ~p \n\tRelKey : ~p \n\tKDM : ~p \n\tStrip : ~p \n\tBVV : ~p \n " ,
[ State#state.id , RemoteIndex , DotsNotFound2 , MissingKeys , RelevantMissingKeys , State#state.dotkeymap , StrippedObjects , State#state.clock ] )
case ?STAT_SYNC andalso State#state.stats andalso MissingKeys > 0 andalso length(StrippedObjects) > 0 of
true ->
Ratio_Relevant_Keys = round(100*length(RelevantMissingKeys)/max(1,length(MissingKeys))),
SRR = {histogram, sync_relevant_ratio, Ratio_Relevant_Keys},
Ctx_Sent_Strip = [dotted_db_object:get_context(Obj) || {_Key, Obj} <- StrippedObjects],
Sum_Ctx_Sent_Strip = lists:sum([length(VV) || VV <- Ctx_Sent_Strip]),
Ratio_Sent_Strip = Sum_Ctx_Sent_Strip/max(1,length(StrippedObjects)),
SSDS = {histogram, sync_sent_dcc_strip, Ratio_Sent_Strip},
Size_Meta_Sent = byte_size(term_to_binary(Ctx_Sent_Strip)),
SCS = {histogram, sync_context_size, Size_Meta_Sent},
SMS = {histogram, sync_metadata_size, byte_size(term_to_binary(RemoteClock))},
Payload_Sent_Strip = [{Key, dotted_db_object:get_values(Obj)} || {Key, Obj} <- StrippedObjects],
Size_Payload_Sent = byte_size(term_to_binary(Payload_Sent_Strip)),
SPS = {histogram, sync_payload_size, Size_Payload_Sent},
dotted_db_stats:notify2([SRR, SSDS, SCS, SMS, SPS]),
ok;
false -> ok
end,
riak_core_vnode:reply(
Sender,
{ ok,
ReqID,
State#state.id,
State#state.clock,
State#state.watermark,
swc_watermark:peers(State#state.watermark),
StrippedObjects
})
end),
{noreply, State}.
handle_sync_repair({sync_repair, {ReqID, _, _, _, _, NoReply}}, State=#state{mode=recovering}) ->
lager:warning("repairing stuff"),
case NoReply of
true -> {noreply, State};
false -> {reply, {cancel, ReqID, recovering}, State}
end;
handle_sync_repair({sync_repair, {ReqID, RemoteNode={RemoteIndex,_}, RemoteClock, RemoteWatermark, MissingObjects, NoReply}},
State=#state{mode=normal, index=MyIndex, clock=LocalClock, dotkeymap=DotKeyMap, watermark=Watermark1}) ->
Now = os:timestamp(),
LocalClock2 = sync_clocks(LocalClock, RemoteClock, RemoteIndex),
FilledObjects =
[{ Key, dotted_db_object:fill(Key, RemoteClock, Obj), guaranteed_get(Key, State) }
|| {Key,Obj} <- MissingObjects],
SyncedObjects = [{ Key, dotted_db_object:sync(Remote, Local), Local } || {Key, Remote, Local} <- FilledObjects],
RealMissingObjects = [{ Key, Synced } || {Key, Synced, Local} <- SyncedObjects,
(not dotted_db_object:equal_values(Synced,Local)) orelse
(dotted_db_object:get_values(Synced)==[] andalso
dotted_db_object:get_values(Local)==[])],
NodeClock = lists:foldl(fun ({_K,O}, Acc) -> dotted_db_object:add_to_node_clock(Acc, O) end, LocalClock2, RealMissingObjects),
add new keys to the Dot - Key Mapping
DKM = swc_dotkeymap:add_objects(DotKeyMap,
lists:map(fun ({Key,Obj}) -> {Key, dotted_db_object:get_container(Obj)} end, RealMissingObjects)),
NonStrippedObjects = strip_save_batch(RealMissingObjects, State#state{clock=NodeClock}, Now),
NSK = add_keys_to_NSK(NonStrippedObjects, State#state.non_stripped_keys),
Watermark3 = update_watermark_after_sync(Watermark1, RemoteWatermark, MyIndex, RemoteIndex, NodeClock, RemoteClock),
update_jump_clock(RemoteIndex),
State2 = gc_dotkeymap(State#state{clock=NodeClock, dotkeymap=DKM, non_stripped_keys=NSK, watermark=Watermark3}),
lager : info("RI : ~p\nRC : ~p\n\n " , [ RemoteIndex , RemoteClock ] ) ,
Mark this Peer as available for sync again
case get(current_sync) of
undefined -> ok;
Set -> put(current_sync, ordsets:filter(fun({_TS, RN}) -> RN =/= RemoteNode end, Set))
end,
case ?STAT_SYNC andalso State2#state.stats of
true ->
Repaired = length(RealMissingObjects),
Sent = length(MissingObjects),
Hit_Ratio = 100*Repaired/max(1, Sent),
SL = case Sent =/= 0 of
true ->
[{histogram, sync_hit_ratio, round(Hit_Ratio)},
{histogram, sync_sent_missing, Sent},
{histogram, sync_sent_truly_missing, Repaired}];
false ->
[{histogram, sync_hit_ratio, 100}]
end,
dotted_db_stats:notify2([{histogram, sync_metadata_size, byte_size(term_to_binary(RemoteClock))} | SL]),
ok;
false ->
ok
end,
case NoReply of
true -> {noreply, State2};
false -> {reply, {ok, ReqID}, State2}
end.
handle_restart({restart, ReqID}, State=#state{mode=recovering}) ->
{reply, {cancel, ReqID, recovering}, State};
handle_restart({restart, ReqID}, State=#state{mode=normal}) ->
OldVnodeID = State#state.id,
NewVnodeID = new_vnode_id(State#state.index),
add_removed_vnode_jump_clock(OldVnodeID),
NewWatermark0 = swc_watermark:retire_peer(State#state.watermark, OldVnodeID, NewVnodeID),
NewWatermark = swc_watermark:reset_counters(NewWatermark0),
CurrentPeers = dotted_db_utils:peers(State#state.index),
lager:info("RESTART:\nOLD: ~p\nNEW: ~p\nPEERS: ~p",[OldVnodeID, NewVnodeID, CurrentPeers]),
true = delete_ets_all_keys(State),
NewAtomID = create_ets_all_keys(NewVnodeID),
{ok, Storage1} = dotted_db_storage:drop(State#state.storage),
ok = dotted_db_storage:close(Storage1),
{_, NewStorage} = open_storage(State#state.index),
ok = save_vnode_state(State#state.dets, {NewVnodeID, swc_node:new(), swc_dotkeymap:new(), NewWatermark, []}),
put(nr_full_syncs, 0),
{reply, {ok, ReqID, {ReqID, State#state.index, OldVnodeID, NewVnodeID}, CurrentPeers},
State#state{
id = NewVnodeID,
atom_id = NewAtomID,
clock = swc_node:new(),
dotkeymap = swc_dotkeymap:new(),
watermark = NewWatermark,
non_stripped_keys = {[],[]},
recover_keys = [],
storage = NewStorage,
syncs = initialize_syncs(State#state.index),
updates_mem = 0,
mode = recovering}}.
handle_inform_peers_restart({inform_peers_restart, {ReqID, RestartingVnodeIndex, OldVnodeID, NewVnodeID}}, State) ->
add_removed_vnode_jump_clock(OldVnodeID),
CurrentPeers = dotted_db_utils:peers(State#state.index),
replace_peer(CurrentPeers, OldVnodeID, NewVnodeID),
MyPeersIds = ordsets:add_element(NewVnodeID, ordsets:del_element(OldVnodeID, State#state.peers_ids)),
NewClock = swc_node:add(State#state.clock, {NewVnodeID, 0}),
NewWatermark = swc_watermark:retire_peer(State#state.watermark, OldVnodeID, NewVnodeID),
{AllKeys,_} = ets_get_all_keys(State),
RelevantKeys = filter_irrelevant_keys(AllKeys, RestartingVnodeIndex),
{Now, Later} = lists:split(min(?MAX_KEYS_SENT_RECOVERING,length(RelevantKeys)), RelevantKeys),
lager:info("Restart transfer => Now: ~p Later: ~p",[length(Now), length(Later)]),
StrippedObjects = guaranteed_get_strip_list(Now, State#state{clock=NewClock}),
{LastBatch, RecoverKeys} = case Later of
[] -> {true, State#state.recover_keys};
_ -> {false, [{NewVnodeID, Later} | State#state.recover_keys]}
end,
{reply, { ok, stage1, ReqID, {
ReqID,
{State#state.index, node()},
OldVnodeID,
NewClock,
StrippedObjects,
NewWatermark,
}}, State#state{clock=NewClock, peers_ids=MyPeersIds, watermark=NewWatermark, recover_keys=RecoverKeys}}.
handle_recover_keys({recover_keys, {ReqID, RemoteVnode, _OldVnodeID={_,_}, RemoteClock, Objects, _RemoteWatermark, _LastBatch=false}}, State) ->
{NodeClock, DKM, NonStrippedObjects} = fill_strip_save_kvs(Objects, RemoteClock, State#state.clock, State, os:timestamp()),
NSK = add_keys_to_NSK(NonStrippedObjects, State#state.non_stripped_keys),
{reply, {ok, stage2, ReqID, RemoteVnode}, State#state{clock=NodeClock, dotkeymap=DKM, non_stripped_keys=NSK}};
handle_recover_keys({recover_keys, {ReqID, RemoteVnode={RemoteIndex,_}, _OldID, RemoteClock, Objects, RemoteWatermark, _LastBatch=true}}, State) ->
NodeClock0 = sync_clocks(State#state.clock, RemoteClock, RemoteIndex),
{NodeClock, DKM, NonStrippedObjects} = fill_strip_save_kvs(Objects, RemoteClock, State#state.clock, State#state{clock=NodeClock0}, os:timestamp()),
NSK = add_keys_to_NSK(NonStrippedObjects, State#state.non_stripped_keys),
Watermark = update_watermark_after_sync(State#state.watermark, RemoteWatermark, State#state.index, RemoteIndex, NodeClock, RemoteClock),
{Mode, NodeClock3} = case get(nr_full_syncs) of
undefined ->
{normal, NodeClock};
N when N >= (?REPLICATION_FACTOR-1)*2-1 ->
erase(nr_full_syncs),
NodeClock2 = jump_node_clock_by_index(NodeClock, State#state.id, State#state.index, 20000),
NodeClock2 = swc_node : store_entry(OldVnodeID , { Base+10000,0 } , NodeClock ) ,
{normal, NodeClock2};
N when N < (?REPLICATION_FACTOR-1)*2-1 ->
put(nr_full_syncs, N+1),
{recovering, NodeClock}
end,
{reply, {ok, stage4, ReqID, RemoteVnode}, State#state{clock=NodeClock3, dotkeymap=DKM, non_stripped_keys=NSK, watermark=Watermark, mode=Mode}}.
handle_inform_peers_restart2({inform_peers_restart2, {ReqID, NewVnodeID, OldVnodeID}}, State) ->
{LastBatch1, Objects, RecoverKeys1} =
case proplists:get_value(NewVnodeID, State#state.recover_keys) of
undefined ->
{true, [], State#state.recover_keys};
RelevantKeys ->
RK = proplists:delete(NewVnodeID, State#state.recover_keys),
{Now, Later} = lists:split(min(?MAX_KEYS_SENT_RECOVERING,length(RelevantKeys)), RelevantKeys),
StrippedObjects = guaranteed_get_strip_list(Now, State),
{LastBatch, RecoverKeys} = case Later of
[] -> {true, RK};
_ -> {false, [{NewVnodeID, Later} | RK]}
end,
{LastBatch, StrippedObjects, RecoverKeys}
end,
{reply, { ok, stage3, ReqID, {
ReqID,
{State#state.index, node()},
OldVnodeID,
State#state.clock,
Objects,
State#state.watermark,
}}, State#state{recover_keys=RecoverKeys1}}.
guaranteed_get(Key, State) ->
case dotted_db_storage:get(State#state.storage, Key) of
{error, not_found} ->
Obj = dotted_db_object:new(),
Obj2 = dotted_db_object:set_fsm_time(ets_get_fsm_time(State#state.atom_id, Key), Obj),
dotted_db_object:fill(Key, State#state.clock, Obj2);
{error, Error} ->
lager:error("Error reading a key from storage (guaranteed GET): ~p", [Error]),
dotted_db_object:new();
Obj ->
dotted_db_object:fill(Key, State#state.clock, Obj)
end.
guaranteed_get_strip_list(Keys, State) ->
lists:map(fun(Key) -> guaranteed_get_strip(Key, State) end, Keys).
guaranteed_get_strip(Key, State) ->
case dotted_db_storage:get(State#state.storage, Key) of
{error, not_found} ->
{Key, dotted_db_object:set_fsm_time(ets_get_fsm_time(State#state.atom_id, Key),
dotted_db_object:new())};
{error, Error} ->
lager:error("Error reading a key from storage (guaranteed GET) (2): ~p", [Error]),
{Key, dotted_db_object:set_fsm_time(ets_get_fsm_time(State#state.atom_id, Key),
dotted_db_object:new())};
Obj ->
{Key, dotted_db_object:strip(State#state.clock, Obj)}
end.
filter_irrelevant_keys(Keys, Index) ->
FunFilterIrrelevant = fun(Key) -> lists:member(Index, dotted_db_utils:replica_nodes_indices(Key)) end,
lists:filter(FunFilterIrrelevant, Keys).
save_vnode_state(Dets, State={Id={Index,_},_,_,_,_}) ->
Key = {?VNODE_STATE_KEY, Index},
ok = dets:insert(Dets, {Key, State}),
ok = dets:sync(Dets),
lager:debug("Saved state for vnode ~p.",[Id]),
ok.
read_vnode_state(Index) ->
Folder = "data/vnode_state/",
ok = filelib:ensure_dir(Folder),
FileName = filename:join(Folder, integer_to_list(Index)),
Ref = list_to_atom(integer_to_list(Index)),
{ok, Dets} = dets:open_file(Ref,[{type, set},
{file, FileName},
{auto_save, infinity},
{min_no_slots, 1}]),
Key = {?VNODE_STATE_KEY, Index},
case dets:lookup(Dets, Key) of
{Dets, not_found};
{Dets, error, Error};
[{Key, State={{Index,_},_,_,_,_}}] ->
{Dets, State}
end.
initialize_watermark(_NodeId={Index,_}) ->
lager:debug("Initialize watermark @ IndexNode: ~p",[{Index,node()}]),
get the Index and of this node 's peers , i.e. , all nodes that replicates any subset of local keys .
IndexNodes = [ IndexNode || IndexNode <- dotted_db_utils:peers(Index)],
for replication factor N = 3 , the numbers of peers should be 4 ( 2 vnodes before and 2 after ) .
(?REPLICATION_FACTOR-1)*2 = length(IndexNodes),
get_vnode_id(IndexNodes),
ok.
initialize_syncs(_Index) ->
[{dummy_node_id,0,0,0,0}].
open_storage(Index) ->
{Backend, Options} = case application:get_env(dotted_db, storage_backend, ets) of
leveldb -> {{backend, leveldb}, []};
ets -> {{backend, ets}, []};
bitcask -> {{backend, bitcask}, [{db_opts,[
read_write,
{sync_strategy, application:get_env(dotted_db, bitcask_io_sync, none)},
{io_mode, application:get_env(dotted_db, bitcask_io_mode, erlang)},
{merge_window, application:get_env(dotted_db, bitcask_merge_window, never)}]}]}
end,
lager:debug("Using ~p for vnode ~p.",[Backend,Index]),
DBName = filename:join("data/objects/", integer_to_list(Index)),
{ok, Storage} = dotted_db_storage:open(DBName, Backend, Options),
{Backend, Storage}.
close_all(undefined) -> ok;
close_all(State=#state{ id = Id,
storage = Storage,
clock = NodeClock,
watermark = Watermark,
dotkeymap = DotKeyMap,
non_stripped_keys = NSK,
dets = Dets } ) ->
case dotted_db_storage:close(Storage) of
ok -> ok;
{error, Reason} ->
lager:warning("Error on closing storage: ~p",[Reason])
end,
ok = save_vnode_state(Dets, {Id, NodeClock, DotKeyMap, Watermark, NSK}),
true = delete_ets_all_keys(State),
ok = dets:close(Dets).
gc_dotkeymap(State=#state{dotkeymap = DotKeyMap, watermark = Watermark, non_stripped_keys = NSK}) ->
case is_watermark_up_to_date(Watermark) of
true ->
{DotKeyMap2, RemovedKeys} = swc_dotkeymap:prune(DotKeyMap, Watermark),
( i.e. there is n't keys coordinated by those retired nodes in the DotKeyMap )
OldPeersStillNotSynced = get_old_peers_still_not_synced(),
Watermark2 = swc_watermark:prune_retired_peers(Watermark, DotKeyMap2, OldPeersStillNotSynced),
NSK2 = add_keys_from_dotkeymap_to_NSK(RemovedKeys, NSK),
State#state{dotkeymap = DotKeyMap2, non_stripped_keys=NSK2, watermark=Watermark2};
false ->
{WM,_} = State#state.watermark,
lager:info("Watermark not up to date: ~p entries, mode: ~p",[orddict:size(WM), State#state.mode]),
[case orddict:size(V) =:= (?REPLICATION_FACTOR*2)-1 of
true -> lager:info("\t ~p for ~p \n", [orddict:size(V), K]);
false -> lager:info("\t ~p for ~p \n\t\t ~p\n", [orddict:size(V), K, V])
end || {K,V} <- WM],
swc_dotkeymap:empty(DotKeyMap) andalso initialize_watermark(State#state.id),
State
end.
-spec schedule_strip_keys(non_neg_integer()) -> ok.
schedule_strip_keys(Interval) ->
erlang:send_after(Interval, self(), strip_keys),
ok.
-spec schedule_report(non_neg_integer()) -> ok.
schedule_report(Interval) ->
Perform tick every X seconds
erlang:send_after(Interval, self(), report_tick),
ok.
-spec report(state()) -> {any(), state()}.
report(State=#state{ id = Id,
clock = NodeClock,
watermark = Watermark,
dotkeymap = DotKeyMap,
non_stripped_keys = NSK,
dets = Dets,
updates_mem = UpMem } ) ->
report_stats(State),
UpdatesMemory = case UpMem =< ?UPDATE_LIMITE*50 of
UpMem + 1;
false ->
save_vnode_state(Dets, {Id, NodeClock, DotKeyMap, Watermark, NSK}),
0
end,
{ok, State#state{updates_mem=UpdatesMemory}}.
report_stats(State=#state{stats=true}) ->
case (not swc_dotkeymap:empty(State#state.dotkeymap)) andalso
State#state.clock =/= swc_node:new() andalso
State#state.watermark =/= swc_watermark:new() of
true ->
SSL = case ?STAT_STATE_LENGTH of
false -> [];
true ->
KLLEN = {histogram, kl_len, swc_dotkeymap:size(State#state.dotkeymap)},
MissingDots = [ miss_dots(Entry) || {_,Entry} <- State#state.clock ],
BVVMD = {histogram, bvv_missing_dots, average(MissingDots)},
{Del,Wrt} = State#state.non_stripped_keys,
NumNSKeys = lists:sum([dict:size(Map) || {_, Map} <- Wrt]) + length(Del),
NSKN = {histogram, nsk_number, NumNSKeys},
[KLLEN, BVVMD, NSKN]
end,
SSS = case ?STAT_STATE_SIZE of
false -> [];
true ->
KLSIZE = {histogram, kl_size, size(term_to_binary(State#state.dotkeymap))},
BVVSIZE = {histogram, bvv_size, size(term_to_binary(State#state.clock))},
NSKSIZE = {histogram, nsk_size, size(term_to_binary(State#state.non_stripped_keys))},
[KLSIZE, BVVSIZE, NSKSIZE]
end,
SD = case ?STAT_DELETES of
false -> [];
true ->
ADelKeys = length(ets_get_actual_deleted(State#state.atom_id)),
IDelKeys = length(ets_get_issued_deleted(State#state.atom_id)),
DI = {histogram, deletes_incomplete, IDelKeys},
DC = {histogram, deletes_completed, ADelKeys},
IWKeys = length(ets_get_issued_written(State#state.atom_id)),
FWKeys = length(ets_get_final_written(State#state.atom_id)),
WI = {histogram, write_incomplete, IWKeys},
WC = {histogram, write_completed, FWKeys},
[DI,DC,WI,WC]
end,
dotted_db_stats:notify2(SD ++ SSS ++ SSL),
ok;
false ->
ok
end,
{ok, State}.
miss_dots({N,B}) ->
case values_aux(N,B,[]) of
[] -> 0;
L -> lists:max(L) - N - length(L)
end.
values_aux(_,0,L) -> L;
values_aux(N,B,L) ->
M = N + 1,
case B rem 2 of
0 -> values_aux(M, B bsr 1, L);
1 -> values_aux(M, B bsr 1, [ M | L ])
end.
average(L) ->
lists:sum(L) / max(1,length(L)).
strip_save_batch(O,S,Now) -> strip_save_batch(O,S,Now,true).
strip_save_batch(Objects, State, Now, ETS) ->
strip_save_batch(Objects, State, Now, {[],[]}, ETS).
strip_save_batch([], State, _Now, {NSK, StrippedObjects}, _ETS) ->
ok = dotted_db_storage:write_batch(State#state.storage, StrippedObjects),
NSK;
strip_save_batch([{Key, Obj} | Objects], S=#state{atom_id=ID}, Now, {NSK, StrippedObjects}, ETS) ->
StrippedObj = dotted_db_object:strip(S#state.clock, Obj),
{Values, Context} = dotted_db_object:get_container(StrippedObj),
Values2 = [{D,V} || {D,V} <- Values, V =/= ?DELETE_OP],
StrippedObj2 = dotted_db_object:set_container({Values2, Context}, StrippedObj),
1 * it has no value and no causal history - > can be deleted
2 * has values , with causal context - > it 's a normal write and we should persist
3 * has values , but no causal context - > it 's the final form for this write
Acc = case {Values2, Context} of
{[],[]} ->
?STAT_ENTRIES andalso dotted_db_stats:notify({histogram, entries_per_clock}, 0),
ETS andalso ets_set_status(ID, Key, ?ETS_DELETE_STRIP),
ETS andalso ets_set_strip_time(ID, Key, Now),
ETS andalso notify_strip_delete_latency(Now, Now),
ETS andalso ets_set_dots(ID, Key, []),
{NSK, [{delete, Key}|StrippedObjects]};
{_ ,[]} ->
?STAT_ENTRIES andalso dotted_db_stats:notify({histogram, entries_per_clock}, 1),
ETS andalso ets_set_status(ID, Key, ?ETS_WRITE_STRIP),
ETS andalso ets_set_strip_time(ID, Key, Now),
ETS andalso notify_strip_write_latency(Now, Now),
ETS andalso ets_set_dots(ID, Key, get_value_dots_for_ets(StrippedObj)),
{NSK, [{put, Key, StrippedObj2}|StrippedObjects]};
{[],_CC} ->
?STAT_ENTRIES andalso dotted_db_stats:notify({histogram, entries_per_clock}, length(Context)),
ETS andalso ets_set_status(ID, Key, ?ETS_DELETE_NO_STRIP),
ETS andalso ets_set_dots(ID, Key, get_value_dots_for_ets(StrippedObj)),
{[{Key, StrippedObj2}|NSK], [{put, Key, StrippedObj2}|StrippedObjects]};
{_ ,_CC} ->
?STAT_ENTRIES andalso dotted_db_stats:notify({histogram, entries_per_clock}, length(Context)+1),
ETS andalso ets_set_status(ID, Key, ?ETS_WRITE_NO_STRIP),
ETS andalso ets_set_dots(ID, Key, get_value_dots_for_ets(StrippedObj)),
{[{Key, StrippedObj2}|NSK], [{put, Key, StrippedObj2}|StrippedObjects]}
end,
ETS andalso notify_write_latency(dotted_db_object:get_fsm_time(StrippedObj), Now),
ETS andalso ets_set_write_time(ID, Key, Now),
ETS andalso ets_set_fsm_time(ID, Key, dotted_db_object:get_fsm_time(StrippedObj)),
strip_save_batch(Objects, S, Now, Acc, ETS).
Try to remove elements from Non - Stripped Keys
-spec read_strip_write({[{key(),vv()}], [{dot(), dict:dict()}]}, state()) -> {[{key(),vv()}], [{dot(), dict:dict()}]}.
read_strip_write({Deletes, Writes}, State) ->
Now = os:timestamp(),
{Stripped, NotStripped} = split_deletes(Deletes, State, {[],[]}),
Deletes2 = strip_maybe_save_delete_batch(Stripped, State, Now) ++ NotStripped,
Writes2 = compute_writes_NSK(Writes, State, [], [], Now),
{Deletes2, Writes2}.
Take care of NSK deletes
split_deletes([], _State, Acc) -> Acc;
split_deletes([{Key, Ctx} | Deletes], State, {Stripped, NotStripped}) ->
case strip_context(Ctx,State#state.clock) of
[] ->
case read_one_key(Key, State) of
0 ->
ets_set_status(State#state.atom_id, Key, ?ETS_DELETE_STRIP),
ets_set_strip_time(State#state.atom_id, Key, os:timestamp()),
split_deletes(Deletes, State, {Stripped, NotStripped});
Obj ->
split_deletes(Deletes, State, {[{Key, Obj}|Stripped], NotStripped})
end;
VV ->
split_deletes(Deletes, State, {Stripped, [{Key, VV}|NotStripped]})
end.
-spec strip_context(vv(), bvv()) -> vv().
strip_context(Context, NodeClock) ->
FunFilter =
fun (Id, Counter) ->
{Base,_Dots} = swc_node:get(Id, NodeClock),
Counter > Base
end,
swc_vv:filter(FunFilter, Context).
strip_maybe_save_delete_batch(O,S,Now) -> strip_maybe_save_delete_batch(O,S,Now,true).
strip_maybe_save_delete_batch(Objects, State, Now, ETS) ->
strip_maybe_save_delete_batch(Objects, State, Now, {[],[]}, ETS).
strip_maybe_save_delete_batch([], State, _Now, {NSK, StrippedObjects}, _ETS) ->
ok = dotted_db_storage:write_batch(State#state.storage, StrippedObjects),
NSK;
strip_maybe_save_delete_batch([{Key={_,_}, Obj} | Objects], State, Now, {NSK, StrippedObjects}, ETS) ->
StrippedObj = dotted_db_object:strip(State#state.clock, Obj),
{Values, Context} = dotted_db_object:get_container(StrippedObj),
Values2 = [{D,V} || {D,V} <- Values, V =/= ?DELETE_OP],
StrippedObj2 = dotted_db_object:set_container({Values2, Context}, StrippedObj),
1 * it has no value and no causal history - > can be deleted
2 * has values , with causal context - > it 's a normal write and we should persist
3 * has values , but no causal context - > it 's the final form for this write
Acc = case {Values2, Context} of
{[],[]} ->
?STAT_ENTRIES andalso dotted_db_stats:notify({histogram, entries_per_clock}, 0),
ETS andalso ets_set_status(State#state.atom_id, Key, ?ETS_DELETE_STRIP),
ETS andalso ets_set_strip_time(State#state.atom_id, Key, Now),
ETS andalso notify_strip_delete_latency(ets_get_write_time(State#state.atom_id, Key), Now),
ETS andalso ets_set_fsm_time(State#state.atom_id, Key, dotted_db_object:get_fsm_time(StrippedObj)),
ETS andalso ets_set_dots(State#state.atom_id, Key, []),
{NSK, [{delete, Key}|StrippedObjects]};
{_ ,[]} ->
?STAT_ENTRIES andalso dotted_db_stats:notify({histogram, entries_per_clock}, 1),
ETS andalso ets_set_status(State#state.atom_id, Key, ?ETS_WRITE_STRIP),
ETS andalso ets_set_strip_time(State#state.atom_id, Key, Now),
ETS andalso notify_strip_write_latency(ets_get_write_time(State#state.atom_id, Key), Now),
ETS andalso ets_set_fsm_time(State#state.atom_id, Key, dotted_db_object:get_fsm_time(StrippedObj)),
ETS andalso ets_set_dots(State#state.atom_id, Key, get_value_dots_for_ets(StrippedObj)),
{NSK, [{put, Key, StrippedObj2}|StrippedObjects]};
{[],_CC} ->
{[{Key, Context}|NSK], StrippedObjects};
{_ ,_CC} ->
{[{Key, Context}|NSK], StrippedObjects}
end,
strip_maybe_save_delete_batch(Objects, State, Now, Acc, ETS).
Take care of NSK writes
compute_writes_NSK([], State, Batch, NSK, _Now) ->
ok = dotted_db_storage:write_batch(State#state.storage, Batch),
NSK;
compute_writes_NSK([{NodeID, Dict} |Tail], State, Batch, NSK, Now) ->
{DelDots, SaveBatch} = dict:fold(fun(Dot, Key, Acc) -> dictNSK(Dot, Key, Acc, State, Now) end, {[],[]}, Dict),
NewDict = remove_stripped_writes_NSK(DelDots, Dict),
case dict:size(NewDict) of
0 -> compute_writes_NSK(Tail, State, SaveBatch++Batch, NSK, Now);
_ -> compute_writes_NSK(Tail, State, SaveBatch++Batch, [{NodeID, NewDict}| NSK], Now)
end.
dictNSK(Dot, {Key, undefined}, {Del, Batch}, State, Now) ->
case read_one_key(Key, State) of
0 ->
ets_set_status(State#state.atom_id, Key, ?ETS_DELETE_STRIP),
ets_set_strip_time(State#state.atom_id, Key, os:timestamp()),
{[Dot|Del], Batch};
Obj ->
dictNSK2(Dot, {Key, Obj}, {Del,Batch}, State, Now, true)
end;
dictNSK(Dot, {Key, Ctx}, {Del, Batch}, State, Now) ->
case strip_context(Ctx, State#state.clock) of
[] ->
case read_one_key(Key, State) of
0 ->
ets_set_status(State#state.atom_id, Key, ?ETS_DELETE_STRIP),
ets_set_strip_time(State#state.atom_id, Key, os:timestamp()),
{[Dot|Del], Batch};
Obj ->
dictNSK2(Dot, {Key, Obj}, {Del,Batch}, State, Now, true)
end;
_V ->
case random : uniform ( ) < 0.05 of
end.
dictNSK2(Dot, {Key, Obj}, {Del, Batch}, State, Now, ETS) ->
StrippedObj = dotted_db_object:strip(State#state.clock, Obj),
{Values, Context} = dotted_db_object:get_container(StrippedObj),
Values2 = [{D,V} || {D,V} <- Values, V =/= ?DELETE_OP],
StrippedObj2 = dotted_db_object:set_container({Values2, Context}, StrippedObj),
1 * it has no value and no causal history - > can be deleted
2 * has values , with causal context - > it 's a normal write and we should persist
3 * has values , but no causal context - > it 's the final form for this write
case {Values2, Context} of
?STAT_ENTRIES andalso dotted_db_stats:notify({histogram, entries_per_clock}, 0),
ETS andalso ets_set_status(State#state.atom_id, Key, ?ETS_DELETE_STRIP),
ETS andalso ets_set_strip_time(State#state.atom_id, Key, Now),
ETS andalso notify_strip_delete_latency(ets_get_write_time(State#state.atom_id, Key), Now),
ETS andalso ets_set_fsm_time(State#state.atom_id, Key, dotted_db_object:get_fsm_time(StrippedObj)),
ETS andalso ets_set_dots(State#state.atom_id, Key, []),
{[Dot|Del], [{delete, Key}|Batch]};
?STAT_ENTRIES andalso dotted_db_stats:notify({histogram, entries_per_clock}, 1),
ETS andalso ets_set_status(State#state.atom_id, Key, ?ETS_WRITE_STRIP),
ETS andalso ets_set_strip_time(State#state.atom_id, Key, Now),
ETS andalso notify_strip_write_latency(ets_get_write_time(State#state.atom_id, Key), Now),
ETS andalso ets_set_fsm_time(State#state.atom_id, Key, dotted_db_object:get_fsm_time(StrippedObj)),
ETS andalso ets_set_dots(State#state.atom_id, Key, get_value_dots_for_ets(StrippedObj)),
{[Dot|Del], [{put, Key, StrippedObj2}|Batch]};
{_,_} ->
end.
remove_stripped_writes_NSK([], Dict) -> Dict;
remove_stripped_writes_NSK([H|T], Dict) ->
NewDict = dict:erase(H, Dict),
remove_stripped_writes_NSK(T, NewDict).
read_one_key(Key={_,_}, State) ->
case dotted_db_storage:get(State#state.storage, Key) of
{error, not_found} ->
0;
{error, Error} ->
lager:error("Error reading a key from storage: ~p", [Error]),
0;
Obj ->
Obj
end.
Add elements to Non - Stripped Keys
add_keys_to_NSK([], NSK) -> NSK;
add_keys_to_NSK([{Key, Object}|Tail], NSK) ->
NSK2 = add_key_to_NSK(Key, Object, NSK),
add_keys_to_NSK(Tail, NSK2).
add_key_to_NSK(Key, Object, NSK) ->
add_key_to_NSK2(Key, dotted_db_object:get_container(Object), NSK).
add_key_to_NSK2(_, {[],[]}, NSK) -> NSK;
add_key_to_NSK2(Key, {[],Ctx}, {Del,Wrt}) ->
{[{Key, Ctx}|Del], Wrt};
add_key_to_NSK2(Key, {DotValues,Ctx}, NSK) ->
KeyDots = [{Key, Dot, Ctx} || {Dot,_} <- DotValues],
add_writes_to_NSK(KeyDots, NSK).
add_writes_to_NSK([], NSK) -> NSK;
add_writes_to_NSK([Head={_,_,_} | Tail], {Del,Wrt}) ->
Wrt2 = add_one_write_to_NSK(Head, Wrt),
add_writes_to_NSK(Tail, {Del,Wrt2}).
add_keys_from_dotkeymap_to_NSK([], NSK) -> NSK;
add_keys_from_dotkeymap_to_NSK([{NodeId, DotKeys}|Tail], {Del,Wrt}) ->
Wrt2 = lists:foldl(
fun({Dot,Key}, Acc) ->
add_one_write_to_NSK({Key, {NodeId, Dot}, undefined}, Acc)
end,
Wrt,
DotKeys),
Wrt2 = add_one_write_to_NSK({Key , { NodeID , Base+1 } , undefined } , ) ,
add_keys_from_dotkeymap_to_NSK(Tail, {Del,Wrt2}).
add_one_write_to_NSK({Key, {NodeID,Counter}, Context}, []) ->
[{NodeID, dict:store(Counter, {Key, Context}, dict:new())}];
add_one_write_to_NSK({Key, {NodeID, Counter}, Context}, [{NodeID2, Dict}|Tail])
when NodeID =:= NodeID2 andalso Counter =/= -1 ->
Dict2 = dict:store(Counter, {Key, Context}, Dict),
[{NodeID, Dict2} | Tail];
add_one_write_to_NSK(KV={_, {NodeID,_}, _}, [H={NodeID2, _}|Tail])
when NodeID =/= NodeID2 ->
[H | add_one_write_to_NSK(KV, Tail)].
fill_strip_save_kvs(Objects, RemoteClock, LocalClock, State, Now) ->
fill_strip_save_kvs(Objects, RemoteClock, LocalClock, State, Now, {[],[]}, true).
fill_strip_save_kvs([], _, _, State, _Now, {NSK, StrippedObjects}, _ETS) ->
ok = dotted_db_storage:write_batch(State#state.storage, StrippedObjects),
{State#state.clock, State#state.dotkeymap, NSK};
fill_strip_save_kvs([{Key={_,_}, Object} | Objects], RemoteClock, LocalClock, State, Now, {NSK, StrippedObjects}, ETS) ->
FilledObject = dotted_db_object:fill(Key, RemoteClock, Object),
DiskObject = guaranteed_get(Key, State#state{clock=LocalClock}),
FinalObject = dotted_db_object:sync(FilledObject, DiskObject),
test if the FinalObject has newer information
case (not dotted_db_object:equal_values(FinalObject, DiskObject)) orelse
(dotted_db_object:get_values(FinalObject)==[] andalso dotted_db_object:get_values(DiskObject)==[]) of
false -> fill_strip_save_kvs(Objects, RemoteClock, LocalClock, State, Now, {NSK, StrippedObjects}, ETS);
true ->
StateNodeClock = dotted_db_object:add_to_node_clock(State#state.clock, FinalObject),
add new keys to the Dot - Key Mapping
DKM = swc_dotkeymap:add_objects(State#state.dotkeymap, [{Key, dotted_db_object:get_container(FinalObject)}]),
StrippedObject = dotted_db_object:strip(State#state.clock, FinalObject),
{Values, Context} = dotted_db_object:get_container(StrippedObject),
Values2 = [{D,V} || {D,V} <- Values, V =/= ?DELETE_OP],
StrippedObject2 = dotted_db_object:set_container({Values2, Context}, StrippedObject),
Acc = case {Values2, Context} of
{[],[]} ->
?STAT_ENTRIES andalso dotted_db_stats:notify({histogram, entries_per_clock}, 0),
ETS andalso ets_set_status(State#state.atom_id, Key, ?ETS_DELETE_STRIP),
ETS andalso ets_set_strip_time(State#state.atom_id, Key, Now),
ETS andalso notify_strip_delete_latency(Now, Now),
ETS andalso ets_set_dots(State#state.atom_id, Key, []),
{NSK, [{delete, Key}|StrippedObjects]};
{_ ,[]} ->
?STAT_ENTRIES andalso dotted_db_stats:notify({histogram, entries_per_clock}, 1),
ETS andalso ets_set_status(State#state.atom_id, Key, ?ETS_WRITE_STRIP),
ETS andalso ets_set_strip_time(State#state.atom_id, Key, Now),
ETS andalso notify_strip_write_latency(Now, Now),
ETS andalso ets_set_dots(State#state.atom_id, Key, get_value_dots_for_ets(StrippedObject2)),
{NSK, [{put, Key, StrippedObject2}|StrippedObjects]};
{[],_CC} ->
?STAT_ENTRIES andalso dotted_db_stats:notify({histogram, entries_per_clock}, length(Context)),
ETS andalso ets_set_status(State#state.atom_id, Key, ?ETS_DELETE_NO_STRIP),
ETS andalso ets_set_dots(State#state.atom_id, Key, get_value_dots_for_ets(StrippedObject2)),
{[{Key, StrippedObject2}|NSK], [{put, Key, StrippedObject2}|StrippedObjects]};
{_ ,_CC} ->
?STAT_ENTRIES andalso dotted_db_stats:notify({histogram, entries_per_clock}, length(Context)+1),
ETS andalso ets_set_status(State#state.atom_id, Key, ?ETS_WRITE_NO_STRIP),
ETS andalso ets_set_dots(State#state.atom_id, Key, get_value_dots_for_ets(StrippedObject2)),
{[{Key, StrippedObject2}|NSK], [{put, Key, StrippedObject2}|StrippedObjects]}
end,
ETS andalso notify_write_latency(dotted_db_object:get_fsm_time(StrippedObject2), Now),
ETS andalso ets_set_write_time(State#state.atom_id, Key, Now),
ETS andalso ets_set_fsm_time(State#state.atom_id, Key, dotted_db_object:get_fsm_time(StrippedObject2)),
fill_strip_save_kvs(Objects, RemoteClock, LocalClock, State#state{dotkeymap=DKM, clock=StateNodeClock}, Now, Acc, ETS)
end.
is_watermark_up_to_date({WM,_}) ->
(orddict:size(WM) =:= (?REPLICATION_FACTOR*2)-1) andalso
is_watermark_up_to_date2(WM).
is_watermark_up_to_date2([]) -> true;
is_watermark_up_to_date2([{_,V}|T]) ->
case orddict:size(V) =:= (?REPLICATION_FACTOR*2)-1 of
true -> is_watermark_up_to_date2(T);
false -> false
end.
new_vnode_id(Index) ->
dotted_db_utils:maybe_seed(),
{Index, random:uniform(999999999999)}.
create_ets_all_keys(NewVnodeID) ->
AtomID = get_ets_id(NewVnodeID),
_ = ((ets:info(AtomID) =:= undefined) andalso
ets:new(AtomID, [named_table, public, set, {write_concurrency, false}])),
AtomID.
delete_ets_all_keys(#state{atom_id=AtomID}) ->
_ = ((ets:info(AtomID) =/= undefined) andalso ets:delete(AtomID)),
true.
-spec get_ets_id(any()) -> atom().
get_ets_id(Id) ->
list_to_atom(lists:flatten(io_lib:format("~p", [Id]))).
sync_clocks(LocalClock, RemoteClock, RemoteIndex) ->
RemoteClock2 = orddict:filter(fun ({Index,_},_) -> Index == RemoteIndex end, RemoteClock),
swc_node:merge(LocalClock, RemoteClock2).
update_watermark_after_sync(MyWatermark, RemoteWatermark, MyIndex, RemoteIndex, MyClock, RemoteClock) ->
MyWatermark2 = orddict:fold(
fun (Vnode={Index,_}, _, Acc) ->
case Index == MyIndex of
false -> Acc;
true -> swc_watermark:update_peer(Acc, Vnode, MyClock)
end
end, MyWatermark, MyClock),
MyWatermark3 = orddict:fold(
fun (Vnode={Index,_}, _, Acc) ->
case Index == RemoteIndex of
false -> Acc;
true -> swc_watermark:update_peer(Acc, Vnode, RemoteClock)
end
end, MyWatermark2, RemoteClock),
swc_watermark:left_join(MyWatermark3, RemoteWatermark).
@doc Returns a pair : first is the number of keys present in storage ,
the second is the number of keys completely deleted from storage .
ets_get_all_keys(State) ->
ets:foldl(fun
({Key,St,_,_,_,_}, {Others, Deleted}) when St =:= ?ETS_DELETE_STRIP -> {Others, [Key|Deleted]};
({Key,St,_,_,_,_}, {Others, Deleted}) when St =/= ?ETS_DELETE_STRIP -> {[Key|Others], Deleted}
end, {[],[]}, State#state.atom_id).
ets_set_status(Id, Key, Status) -> ensure_tuple(Id, Key), ets:update_element(Id, Key, {2, Status}).
ets_set_strip_time(Id, Key, Time) -> ensure_tuple(Id, Key), ets:update_element(Id, Key, {3, Time}).
ets_set_write_time(Id, Key, Time) -> ensure_tuple(Id, Key), ets:update_element(Id, Key, {4, Time}).
ets_set_fsm_time(_, _, undefined) -> true;
ets_set_fsm_time(Id, Key, Time) -> ensure_tuple(Id, Key), ets:update_element(Id, Key, {5, Time}).
ets_set_dots(Id, Key, Dots) -> ensure_tuple(Id, Key), ets:update_element(Id, Key, {6, Dots}).
notify_write_latency(undefined, _WriteTime) ->
lager:warning("Undefined FSM write time!!!!!!!!"), ok;
notify_write_latency(_FSMTime, undefined) ->
ok;
notify_write_latency(FSMTime, WriteTime) ->
case ?STAT_WRITE_LATENCY of
false -> ok;
true ->
Delta = timer:now_diff(WriteTime, FSMTime)/1000,
dotted_db_stats:notify({gauge, write_latency}, Delta)
end.
notify_strip_write_latency(undefined, _StripTime) -> ok;
notify_strip_write_latency(WriteTime, StripTime) ->
case ?STAT_STRIP_LATENCY of
false -> ok;
true ->
Delta = timer:now_diff(StripTime, WriteTime)/1000,
dotted_db_stats:notify({gauge, strip_write_latency}, Delta)
end.
notify_strip_delete_latency(undefined, _StripTime) -> ok;
notify_strip_delete_latency(WriteTime, StripTime) ->
case ?STAT_STRIP_LATENCY of
false -> ok;
true ->
Delta = timer:now_diff(StripTime, WriteTime)/1000,
dotted_db_stats:notify({gauge, strip_delete_latency}, Delta)
end.
ensure_tuple(Id, Key) ->
U = undefined,
not ets:member(Id, Key) andalso ets:insert(Id, {Key,U,U,U,U,U}).
ets_get_status(Id , Key ) - > ets : , Key , 2 ) .
ets_get_strip_time(Id , Key ) - > ets : , Key , 3 ) .
ets_get_write_time(Id, Key) -> ensure_tuple(Id, Key), ets:lookup_element(Id, Key, 4).
ets_get_fsm_time(Id, Key) -> ensure_tuple(Id, Key), ets:lookup_element(Id, Key, 5).
ets_get_dots(Id , Key ) - > ets : , Key , 6 ) .
ets_get_issued_deleted(Id) ->
ets:select(Id, [{{'$1', '$2', '_', '_', '_', '_'}, [{'==', '$2', ?ETS_DELETE_NO_STRIP}], ['$1'] }]).
ets_get_actual_deleted(Id) ->
ets:select(Id, [{{'$1', '$2', '_', '_', '_', '_'}, [{'==', '$2', ?ETS_DELETE_STRIP}], ['$1'] }]).
ets_get_issued_written(Id) ->
ets:select(Id, [{{'$1', '$2', '_', '_', '_', '_'}, [{'==', '$2', ?ETS_WRITE_NO_STRIP}], ['$1'] }]).
ets_get_final_written(Id) ->
ets:select(Id, [{{'$1', '$2', '_', '_', '_', '_'}, [{'==', '$2', ?ETS_WRITE_STRIP}], ['$1'] }]).
compute_strip_latency(Id) ->
ets:foldl(fun
({_,_,undefined,_,_,_}, Acc) -> Acc; ({_,_,_,undefined,_,_}, Acc) -> Acc;
({_,_,Strip,Write,_,_}, Acc) -> [timer:now_diff(Strip, Write)/1000 | Acc]
end, [], Id).
compute_replication_latency(Id) ->
ets:foldl(fun
({_,_,_,_,undefined,_}, Acc) -> Acc; ({_,_,_,undefined,_,_}, Acc) -> Acc;
({_,_,_,Write,Fsm,_}, Acc) -> [timer:now_diff(Write, Fsm)/1000 | Acc]
end, [], Id).
( { Key,?ETS_DELETE_NO_STRIP,_,_,_,Dots } , { Others , Deleted } ) - > { Others , [ { Key , lists : sort(Dots)}|Deleted ] } ;
storage_get_all_dots(Storage) ->
Fun = fun({Key, Object}, {Others, Deleted}) ->
DCC = dotted_db_object:get_container(Object),
{[{Key,DCC}|Others], Deleted}
end,
dotted_db_storage:fold(Storage, Fun, {[],[]}).
get_value_dots_for_ets(Object) ->
{ValueDots, _Context} = dotted_db_object:get_container(Object),
ValueDots2 = [{D,V} || {D,V} <- ValueDots, V =/= ?DELETE_OP],
orddict:fetch_keys(ValueDots2).
add_removed_vnode_jump_clock(OldVnodeID) ->
Dict = case get(jump_clock) of
undefined -> orddict:new();
D -> D
end,
lager:warning("MEMORY: new retired vnode: ~p\n", [OldVnodeID]),
put(jump_clock, orddict:store(OldVnodeID, orddict:new(), Dict)).
update_jump_clock(SyncPeerIndex) ->
case get(jump_clock) of
undefined -> ok;
Dict ->
case random : uniform ( ) < 0.01 of
D2 = orddict:map(fun (_,PeersCount) -> orddict:update_counter(SyncPeerIndex, 1, PeersCount) end, Dict),
D3 = orddict:filter(fun (_,PeersCount) ->
PeersCount2 = orddict:filter(fun (_,C) -> C > 50 end, PeersCount),
orddict:size(PeersCount2) < (?REPLICATION_FACTOR-1)*2
end, D2),
D4 = orddict : filter(fun ( _ , ) - >
PeersCount2 = orddict : filter(fun ( _ , C ) - > C > 50 end , PeersCount ) ,
Rem - > lager : warning("MEMORY : from ~p deleted : ~p\n " , [ x99problems , Rem ] )
put(jump_clock, D3)
end.
get_old_peers_still_not_synced() ->
case get(jump_clock) of
undefined -> [];
Dict -> orddict:fetch_keys(Dict)
end.
jump_node_clock_by_index(Clock, CurrentId, Index, Jump) ->
OldIds = [Id || Id={Idx,_} <- swc_node:ids(Clock) , Idx == Index andalso Id =/= CurrentId],
lists:foldl(fun (OldId, AccClock) ->
{Base,_} = swc_node:get(OldId, AccClock),
swc_node:store_entry(OldId, {Base+Jump,0}, AccClock)
end, Clock, OldIds).
|
ced00395c65b3a8a4f3bf39b8e734d3be643996e7122b35b21944810cfa00f79 | gwright83/Wheeler | IO.hs | {-# LANGUAGE TypeSynonymInstances #-}
--
IO.hs
--
-- Basic input and output of expressions.
--
, 22 April 2011
--
module Math.Symbolic.Wheeler.IO where
import Control.Monad.Identity
import Text.Parsec
import Text.Parsec.Expr as Ex
import Text.Parsec.Language
import Text.Parsec.String
import qualified Text.Parsec.Token as P
import Math.Symbolic.Wheeler.Canonicalize
import Math . Symbolic . Wheeler . CanonicalizeDebug
import Math.Symbolic.Wheeler.Function
import {-# SOURCE #-} Math.Symbolic.Wheeler.Expr
import Math.Symbolic.Wheeler.Numeric
import Math.Symbolic.Wheeler.Symbol
--
-- Read an expression from a string.
--
-- Parse an expression. Formerly, certain transformations
-- that put subexpressions into canonical form
-- were done on the fly. This is no longer the case. The
-- string is converted to an unsimplified expression, and
-- you must invoke the "canonicalize" function explicitly.
readExpr :: String -> Expr
readExpr = canonicalize . runLex
wheelerDef :: P.LanguageDef st
wheelerDef = P.LanguageDef
{ P.commentStart = "{-"
, P.commentEnd = "-}"
, P.commentLine = "--"
, P.nestedComments = True
, P.identStart = letter <|> char '\\'
, P.identLetter = letter <|> char '\''
, P.opStart = P.opLetter emptyDef
, P.opLetter = oneOf ":!#$%&*+./<=>?@\\^|-~"
, P.reservedOpNames= []
, P.reservedNames = []
, P.caseSensitive = True
}
lexer :: P.TokenParser ()
lexer = P.makeTokenParser
(wheelerDef
{ P.reservedOpNames = ["^", "*", "/", "+", "-", "!", "**", "sqrt"]
})
whiteSpace :: Parser ()
whiteSpace = P.whiteSpace lexer
lexeme :: Parser a -> Parser a
lexeme = P.lexeme lexer
symbol :: String -> Parser String
symbol = P.symbol lexer
integer :: Parser Integer
integer = P.integer lexer
natural :: Parser Integer
natural = P.natural lexer
float :: Parser Double
float = P.float lexer
parens :: Parser a -> Parser a
parens = P.parens lexer
semi :: Parser String
semi = P.semi lexer
identifier :: Parser String
identifier = P.identifier lexer
reserved :: String -> Parser ()
reserved = P.reserved lexer
reservedOp :: String -> Parser ()
reservedOp = P.reservedOp lexer
commaList :: Parser a -> Parser [ a ]
commaList = P.commaSep lexer
expr :: Parser Expr
expr = buildExpressionParser table factor
<?> "expression"
table :: [[ Operator String () Identity Expr ]]
table = [[ inOp "**" (**) AssocRight]
,[ preOp "-" negate, preOp "+" id, preOp "sqrt" sqrt ]
,[ inOp "*" (*) AssocLeft, inOp "/" (/) AssocLeft ]
,[ inOp "+" (+) AssocLeft, inOp "-" (-) AssocLeft ]
]
where
preOp s f = Ex.Prefix (do { reservedOp s; return f } <?> "prefix operator")
inOp s f assoc = Ex.Infix (do { reservedOp s; return f } <?> "infix operator") assoc
factor :: Parser Expr
factor = try application
<|> parens expr
<|> numericConst
<|> do { x <- identifier; return (Symbol (simpleSymbol x)) }
<?> "simple expresion"
application :: Parser Expr
application = do { f <- reservedFunction
; whiteSpace
; arg <- expr
; return (Applic f arg)
}
-- Note that the order is important. Function names that are
-- prefixes of the other function names must be listed later.
reservedFunction :: Parser Function
reservedFunction = do { _ <- try $ string "asinh"; return Asinh }
<|> do { _ <- try $ string "acosh"; return Acosh }
<|> do { _ <- try $ string "atanh"; return Atanh }
<|> do { _ <- try $ string "asin"; return Asin }
<|> do { _ <- try $ string "acos"; return Acos }
<|> do { _ <- try $ string "atan"; return Atan }
<|> do { _ <- try $ string "sinh"; return Sinh }
<|> do { _ <- try $ string "cosh"; return Cosh }
<|> do { _ <- try $ string "tanh"; return Tanh }
<|> do { _ <- try $ string "sin"; return Sin }
<|> do { _ <- try $ string "cos"; return Cos }
<|> do { _ <- try $ string "tan"; return Tan }
<|> do { _ <- try $ string "abs"; return Abs }
<|> do { _ <- try $ string "signum"; return Signum }
<|> do { _ <- try $ string "log"; return Log }
<|> do { _ <- try $ string "exp"; return Exp }
numericConst :: Parser Expr
numericConst = do { x <- integer; return (Const (I x)) }
runLex :: String -> Expr
runLex input = let
result = parse ( do { whiteSpace
; x <- expr
; eof
; return x
}) "" input
in
case result of
Right ex -> ex
Left err -> error (show err)
| null | https://raw.githubusercontent.com/gwright83/Wheeler/cfc8c66f019de92f087cc8157f2a9be22ae26cf7/src/Math/Symbolic/Wheeler/IO.hs | haskell | # LANGUAGE TypeSynonymInstances #
Basic input and output of expressions.
# SOURCE #
Read an expression from a string.
Parse an expression. Formerly, certain transformations
that put subexpressions into canonical form
were done on the fly. This is no longer the case. The
string is converted to an unsimplified expression, and
you must invoke the "canonicalize" function explicitly.
Note that the order is important. Function names that are
prefixes of the other function names must be listed later. | IO.hs
, 22 April 2011
module Math.Symbolic.Wheeler.IO where
import Control.Monad.Identity
import Text.Parsec
import Text.Parsec.Expr as Ex
import Text.Parsec.Language
import Text.Parsec.String
import qualified Text.Parsec.Token as P
import Math.Symbolic.Wheeler.Canonicalize
import Math . Symbolic . Wheeler . CanonicalizeDebug
import Math.Symbolic.Wheeler.Function
import Math.Symbolic.Wheeler.Numeric
import Math.Symbolic.Wheeler.Symbol
readExpr :: String -> Expr
readExpr = canonicalize . runLex
wheelerDef :: P.LanguageDef st
wheelerDef = P.LanguageDef
{ P.commentStart = "{-"
, P.commentEnd = "-}"
, P.commentLine = "--"
, P.nestedComments = True
, P.identStart = letter <|> char '\\'
, P.identLetter = letter <|> char '\''
, P.opStart = P.opLetter emptyDef
, P.opLetter = oneOf ":!#$%&*+./<=>?@\\^|-~"
, P.reservedOpNames= []
, P.reservedNames = []
, P.caseSensitive = True
}
lexer :: P.TokenParser ()
lexer = P.makeTokenParser
(wheelerDef
{ P.reservedOpNames = ["^", "*", "/", "+", "-", "!", "**", "sqrt"]
})
whiteSpace :: Parser ()
whiteSpace = P.whiteSpace lexer
lexeme :: Parser a -> Parser a
lexeme = P.lexeme lexer
symbol :: String -> Parser String
symbol = P.symbol lexer
integer :: Parser Integer
integer = P.integer lexer
natural :: Parser Integer
natural = P.natural lexer
float :: Parser Double
float = P.float lexer
parens :: Parser a -> Parser a
parens = P.parens lexer
semi :: Parser String
semi = P.semi lexer
identifier :: Parser String
identifier = P.identifier lexer
reserved :: String -> Parser ()
reserved = P.reserved lexer
reservedOp :: String -> Parser ()
reservedOp = P.reservedOp lexer
commaList :: Parser a -> Parser [ a ]
commaList = P.commaSep lexer
expr :: Parser Expr
expr = buildExpressionParser table factor
<?> "expression"
table :: [[ Operator String () Identity Expr ]]
table = [[ inOp "**" (**) AssocRight]
,[ preOp "-" negate, preOp "+" id, preOp "sqrt" sqrt ]
,[ inOp "*" (*) AssocLeft, inOp "/" (/) AssocLeft ]
,[ inOp "+" (+) AssocLeft, inOp "-" (-) AssocLeft ]
]
where
preOp s f = Ex.Prefix (do { reservedOp s; return f } <?> "prefix operator")
inOp s f assoc = Ex.Infix (do { reservedOp s; return f } <?> "infix operator") assoc
factor :: Parser Expr
factor = try application
<|> parens expr
<|> numericConst
<|> do { x <- identifier; return (Symbol (simpleSymbol x)) }
<?> "simple expresion"
application :: Parser Expr
application = do { f <- reservedFunction
; whiteSpace
; arg <- expr
; return (Applic f arg)
}
reservedFunction :: Parser Function
reservedFunction = do { _ <- try $ string "asinh"; return Asinh }
<|> do { _ <- try $ string "acosh"; return Acosh }
<|> do { _ <- try $ string "atanh"; return Atanh }
<|> do { _ <- try $ string "asin"; return Asin }
<|> do { _ <- try $ string "acos"; return Acos }
<|> do { _ <- try $ string "atan"; return Atan }
<|> do { _ <- try $ string "sinh"; return Sinh }
<|> do { _ <- try $ string "cosh"; return Cosh }
<|> do { _ <- try $ string "tanh"; return Tanh }
<|> do { _ <- try $ string "sin"; return Sin }
<|> do { _ <- try $ string "cos"; return Cos }
<|> do { _ <- try $ string "tan"; return Tan }
<|> do { _ <- try $ string "abs"; return Abs }
<|> do { _ <- try $ string "signum"; return Signum }
<|> do { _ <- try $ string "log"; return Log }
<|> do { _ <- try $ string "exp"; return Exp }
numericConst :: Parser Expr
numericConst = do { x <- integer; return (Const (I x)) }
runLex :: String -> Expr
runLex input = let
result = parse ( do { whiteSpace
; x <- expr
; eof
; return x
}) "" input
in
case result of
Right ex -> ex
Left err -> error (show err)
|
6f93660aab14d835c3d71e33bdd0043a3c262929824a1844227a93d9f028f52f | potapenko/playphraseme-site | util.clj | (ns playphraseme.common.util
(:require [clojure.string :as string]
[clojure.java.io :as io]
[clojure.walk :as walk]
[playphraseme.common.nlp :as nlp])
(:import [java.net URL URLEncoder]))
(defn- change-keys
[data pred]
(let [transform-map (fn [form]
(if (map? form)
(reduce-kv (fn [acc k v] (assoc acc (pred k) v)) {} form)
form))]
(walk/postwalk transform-map data)))
(defn- remove-dots-from-keys
[data]
(change-keys data #(-> % name (string/replace "." "__") keyword)))
(defn- restore-dots-to-keys
[data]
(change-keys data #(-> % name (string/replace "__" ".") keyword)))
(defn remove-keys
([obj keys]
(walk/postwalk (fn [x]
(if (map? x)
(apply dissoc (concat [x] keys))
x))
obj))
([obj scope keys]
(walk/postwalk (fn [x]
(if (and (map? x) (contains? x scope))
(update x :words remove-keys [:id])
x)) obj)))
(defn update-dafault [m k v]
(if-not (contains? m k)
(assoc m k v)
m))
(defmacro nil-when-throw [& body]
`(try
~@body
(catch Throwable e#
nil)))
(defn resource-file [path]
(-> path
io/resource
io/file))
(defn resource-path [path]
(-> path io/resource .toURI .normalize .getPath))
(defn time-stamp-10-min []
(-> (System/currentTimeMillis)
(float)
(/ 1000.0)
(/ 1000.0)
(/ 10.0)
(int)))
(defn distinct-by
([key-fn coll] (distinct-by key-fn = coll))
([key-fn eq-fn coll]
(loop [[v & t] (->> coll
(map (fn [x]
{:key (key-fn x)
:value x})))
result []]
(if v
(recur
(remove #(-> % :key (eq-fn (:key v))) t)
(conj result (:value v)))
result))))
(defn encode-url [s]
(URLEncoder/encode s))
(defn make-phrase-url [search-text]
(str "/"
(some-> search-text
nlp/remove-punctuation
string/trim string/lower-case (string/replace #" +" "_") encode-url)
"/"))
(defn format-phrase-text [s]
(format "\"%s\"" (string/capitalize s)))
(defmacro catch-and-log-err-and-throw [log-message & body]
`(try
~@body
(catch Throwable e#
(log/error (str "[" ~log-message "]") "- catch error"
(str "\"" (.getMessage e#) "\""))
(throw e#))))
(defmacro catch-and-log-err [log-message & body]
`(try
~@body
(catch Throwable e#
(log/error (str "[" ~log-message "]") "- catch error"
(str "\"" (.getMessage e#) "\"")))))
(defmacro nil-if-throw [& body]
`(try
~@body
(catch Throwable e#
nil)))
(comment
(distinct-by :a [{:a 1} {:a 1} {:a 2}])
)
| null | https://raw.githubusercontent.com/potapenko/playphraseme-site/d50a62a6bc8f463e08365dca96b3a6e5dde4fb12/src/clj/playphraseme/common/util.clj | clojure | (ns playphraseme.common.util
(:require [clojure.string :as string]
[clojure.java.io :as io]
[clojure.walk :as walk]
[playphraseme.common.nlp :as nlp])
(:import [java.net URL URLEncoder]))
(defn- change-keys
[data pred]
(let [transform-map (fn [form]
(if (map? form)
(reduce-kv (fn [acc k v] (assoc acc (pred k) v)) {} form)
form))]
(walk/postwalk transform-map data)))
(defn- remove-dots-from-keys
[data]
(change-keys data #(-> % name (string/replace "." "__") keyword)))
(defn- restore-dots-to-keys
[data]
(change-keys data #(-> % name (string/replace "__" ".") keyword)))
(defn remove-keys
([obj keys]
(walk/postwalk (fn [x]
(if (map? x)
(apply dissoc (concat [x] keys))
x))
obj))
([obj scope keys]
(walk/postwalk (fn [x]
(if (and (map? x) (contains? x scope))
(update x :words remove-keys [:id])
x)) obj)))
(defn update-dafault [m k v]
(if-not (contains? m k)
(assoc m k v)
m))
(defmacro nil-when-throw [& body]
`(try
~@body
(catch Throwable e#
nil)))
(defn resource-file [path]
(-> path
io/resource
io/file))
(defn resource-path [path]
(-> path io/resource .toURI .normalize .getPath))
(defn time-stamp-10-min []
(-> (System/currentTimeMillis)
(float)
(/ 1000.0)
(/ 1000.0)
(/ 10.0)
(int)))
(defn distinct-by
([key-fn coll] (distinct-by key-fn = coll))
([key-fn eq-fn coll]
(loop [[v & t] (->> coll
(map (fn [x]
{:key (key-fn x)
:value x})))
result []]
(if v
(recur
(remove #(-> % :key (eq-fn (:key v))) t)
(conj result (:value v)))
result))))
(defn encode-url [s]
(URLEncoder/encode s))
(defn make-phrase-url [search-text]
(str "/"
(some-> search-text
nlp/remove-punctuation
string/trim string/lower-case (string/replace #" +" "_") encode-url)
"/"))
(defn format-phrase-text [s]
(format "\"%s\"" (string/capitalize s)))
(defmacro catch-and-log-err-and-throw [log-message & body]
`(try
~@body
(catch Throwable e#
(log/error (str "[" ~log-message "]") "- catch error"
(str "\"" (.getMessage e#) "\""))
(throw e#))))
(defmacro catch-and-log-err [log-message & body]
`(try
~@body
(catch Throwable e#
(log/error (str "[" ~log-message "]") "- catch error"
(str "\"" (.getMessage e#) "\"")))))
(defmacro nil-if-throw [& body]
`(try
~@body
(catch Throwable e#
nil)))
(comment
(distinct-by :a [{:a 1} {:a 1} {:a 2}])
)
|
|
05877cbbed43481b143dc3bc0ccad5bd80134b26db498c5912d09f821fa2581b | startalkIM/ejabberd | fast_tls_app.erl | %%%----------------------------------------------------------------------
%%% File : fast_tls_app.erl
Author : < >
Purpose : application
Created : 4 Apr 2013 by < >
%%%
%%%
Copyright ( C ) 2002 - 2016 ProcessOne , SARL . All Rights Reserved .
%%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%%% you may not use this file except in compliance with the License.
%%% You may obtain a copy of the License at
%%%
%%% -2.0
%%%
%%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%%% See the License for the specific language governing permissions and
%%% limitations under the License.
%%%
%%%----------------------------------------------------------------------
-module(fast_tls_app).
-behaviour(application).
%% Application callbacks
-export([start/2, stop/1]).
%%%===================================================================
%%% Application callbacks
%%%===================================================================
%%--------------------------------------------------------------------
@private
%% @doc
%% This function is called whenever an application is started using
application : start/[1,2 ] , and should start the processes of the
%% application. If the application is structured according to the OTP
%% design principles as a supervision tree, this means starting the
%% top supervisor of the tree.
%%
@spec start(StartType , ) - > { ok , Pid } |
{ ok , Pid , State } |
%% {error, Reason}
%% StartType = normal | {takeover, Node} | {failover, Node}
= term ( )
%% @end
%%--------------------------------------------------------------------
start(_StartType, _StartArgs) ->
ok = application:ensure_started(crypto),
ok = p1_sha:load_nif(),
fast_tls_sup:start_link().
%%--------------------------------------------------------------------
@private
%% @doc
%% This function is called whenever an application has stopped. It
%% is intended to be the opposite of Module:start/2 and should do
%% any necessary cleaning up. The return value is ignored.
%%
%% @spec stop(State) -> void()
%% @end
%%--------------------------------------------------------------------
stop(_State) ->
ok.
%%%===================================================================
Internal functions
%%%===================================================================
| null | https://raw.githubusercontent.com/startalkIM/ejabberd/718d86cd2f5681099fad14dab5f2541ddc612c8b/deps/fast_tls/src/fast_tls_app.erl | erlang | ----------------------------------------------------------------------
File : fast_tls_app.erl
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
----------------------------------------------------------------------
Application callbacks
===================================================================
Application callbacks
===================================================================
--------------------------------------------------------------------
@doc
This function is called whenever an application is started using
application. If the application is structured according to the OTP
design principles as a supervision tree, this means starting the
top supervisor of the tree.
{error, Reason}
StartType = normal | {takeover, Node} | {failover, Node}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
This function is called whenever an application has stopped. It
is intended to be the opposite of Module:start/2 and should do
any necessary cleaning up. The return value is ignored.
@spec stop(State) -> void()
@end
--------------------------------------------------------------------
===================================================================
=================================================================== | Author : < >
Purpose : application
Created : 4 Apr 2013 by < >
Copyright ( C ) 2002 - 2016 ProcessOne , SARL . All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(fast_tls_app).
-behaviour(application).
-export([start/2, stop/1]).
@private
application : start/[1,2 ] , and should start the processes of the
@spec start(StartType , ) - > { ok , Pid } |
{ ok , Pid , State } |
= term ( )
start(_StartType, _StartArgs) ->
ok = application:ensure_started(crypto),
ok = p1_sha:load_nif(),
fast_tls_sup:start_link().
@private
stop(_State) ->
ok.
Internal functions
|
57371b72bb517b36d7bfdc6af3c808b88805e4df34608a6737926e0030abe609 | coccinelle/coccinelle | char.mli | external code : char -> int = "%identity"
val chr : int -> char
val escaped : char -> string
val lowercase : char -> char
val uppercase : char -> char
val lowercase_ascii : char -> char
val uppercase_ascii : char -> char
type t = char
val compare : t -> t -> int
val equal : t -> t -> bool
external unsafe_chr : int -> char = "%identity"
| null | https://raw.githubusercontent.com/coccinelle/coccinelle/5448bb2bd03491ffec356bf7bd6ddcdbf4d36bc9/bundles/stdcompat/stdcompat-current/interfaces/4.13/char.mli | ocaml | external code : char -> int = "%identity"
val chr : int -> char
val escaped : char -> string
val lowercase : char -> char
val uppercase : char -> char
val lowercase_ascii : char -> char
val uppercase_ascii : char -> char
type t = char
val compare : t -> t -> int
val equal : t -> t -> bool
external unsafe_chr : int -> char = "%identity"
|
|
69d68f2606b4dc517222ceae304f16394f259ed2478e415cd00f0d6642fac7ed | klarna-incubator/bec | bec_permission_group_t.erl | %%==============================================================================
Type definition for the Permission Group structure
%%==============================================================================
-module(bec_permission_group_t).
%%==============================================================================
%% Exports
%%==============================================================================
-export([ from_config/1
, from_map/1
, to_map/1
]).
-include("bitbucket.hrl").
%%==============================================================================
%% Types
%%==============================================================================
-type permission_type() :: 'REPO_ADMIN' | 'REPO_READ' | 'REPO_WRITE'.
-type permission() :: #{ groupname => bec_group_t:name()
, permission => permission_type()
}.
%%==============================================================================
%% Export Types
%%==============================================================================
-export_type([ permission/0
, permission_type/0
]).
%%==============================================================================
%% API
%%==============================================================================
-spec from_config(map()) -> permission().
from_config(#{ <<"permission">> := Permission
, <<"groupname">> := Groupname
}) ->
#{ permission => binary_to_atom(Permission, utf8)
, groupname => Groupname
}.
-spec from_map(map()) -> permission().
from_map(#{ <<"permission">> := Permission
, <<"group">> := Group
}) ->
#{ permission => binary_to_atom(Permission, utf8)
, groupname => bec_group_t:from_map(Group)
}.
-spec to_map(permission()) -> map().
to_map(#{ groupname := Groupname
, permission := Permission}) ->
#{ <<"group">> => bec_group_t:to_map(Groupname)
, <<"permission">> => Permission
}.
| null | https://raw.githubusercontent.com/klarna-incubator/bec/b090bfbeeff298b4fc40e16a9da217f2ce404844/src/bec_permission_group_t.erl | erlang | ==============================================================================
==============================================================================
==============================================================================
Exports
==============================================================================
==============================================================================
Types
==============================================================================
==============================================================================
Export Types
==============================================================================
==============================================================================
API
============================================================================== | Type definition for the Permission Group structure
-module(bec_permission_group_t).
-export([ from_config/1
, from_map/1
, to_map/1
]).
-include("bitbucket.hrl").
-type permission_type() :: 'REPO_ADMIN' | 'REPO_READ' | 'REPO_WRITE'.
-type permission() :: #{ groupname => bec_group_t:name()
, permission => permission_type()
}.
-export_type([ permission/0
, permission_type/0
]).
-spec from_config(map()) -> permission().
from_config(#{ <<"permission">> := Permission
, <<"groupname">> := Groupname
}) ->
#{ permission => binary_to_atom(Permission, utf8)
, groupname => Groupname
}.
-spec from_map(map()) -> permission().
from_map(#{ <<"permission">> := Permission
, <<"group">> := Group
}) ->
#{ permission => binary_to_atom(Permission, utf8)
, groupname => bec_group_t:from_map(Group)
}.
-spec to_map(permission()) -> map().
to_map(#{ groupname := Groupname
, permission := Permission}) ->
#{ <<"group">> => bec_group_t:to_map(Groupname)
, <<"permission">> => Permission
}.
|
b2ce26af58042a147e7e3748a7ead4829d089b3303383a1f8256c3ef637d35b9 | hasura/pg-client-hs | Interrupt.hs | # LANGUAGE ScopedTypeVariables #
module Control.Concurrent.Interrupt
( interruptOnAsyncException,
)
where
-------------------------------------------------------------------------------
import Control.Concurrent.Async (async, asyncThreadId, wait, waitCatch)
import Control.Exception
( SomeAsyncException,
SomeException,
mask,
throwIO,
throwTo,
try,
)
import Prelude
-------------------------------------------------------------------------------
-- | interruptOnAsyncexception runs the given action in in a separate thread,
-- running the given cancel action before passing on any asynchronous
-- exceptions to that thread. The intent is that
-- 'interruptOnAsyncException (pure ()) == id'
-- in all respects (including exception handling), assuming the wrapped
-- action behaves somewhat reasonably (i.e., doesn't swallow asynchronous
-- exceptions). Particularly, we guarantee that the separate thread terminates
-- before we return. (It's not entirely transparent: for instance, 'myThreadId'
-- returns a different value.)
--
-- The point of this is to allow breaking out of blocking actions if they
-- provide some cancelling escape hatch.
interruptOnAsyncException :: IO () -> IO a -> IO a
interruptOnAsyncException interrupt action = mask $ \restore -> do
x <- async action
-- By using 'try' with 'waitCatch', we can distinguish between asynchronous
-- exceptions received from the outside, and those thrown by the wrapped action.
-- (The latter shouldn't occur, but we also want to avoid throwing an exception
-- back at the thread below.)
res :: Either SomeAsyncException (Either SomeException a) <-
try $ restore (waitCatch x)
case res of
Due to the use of ' waitCatch ' above , the only exceptions that ' tryAsync '
-- might catch are asynchronous exceptions received from the "outside".
-- Thus, the 'Left' case is the only one where the async action has not
-- necessarily terminated.
Left e -> do
-- Cancelling might throw an exception; we save that and re-raise it,
-- but not before doing or job of passing the asynchronous exception on
-- to our child and waiting for it to terminate.
interruptRes :: Either SomeException () <- try interrupt
throwTo (asyncThreadId x) e
waitRes :: Either SomeException a <- try $ wait x
case (interruptRes, waitRes) of
(Left cancelEx, _) -> throwIO cancelEx
-- waitEx could be an exception thrown by the action, or our async
-- exception bubbling back up
(Right _, Left waitEx) -> throwIO waitEx
-- in case the async exits cleanly before receiving the exception, we
-- re-raise it manually so as to not swallow it, since the action
-- /was/ interrupted
(Right _, Right _) -> throwIO e
-- In the non-interrupted case, we "undo" the 'try', collapsing things
-- effectively to 'restore (wait x)'.
Right (Left e) ->
throwIO e
Right (Right r) ->
pure r
| null | https://raw.githubusercontent.com/hasura/pg-client-hs/5793e998c20358eef6ca86b5d480956e08b7e07a/src/Control/Concurrent/Interrupt.hs | haskell | -----------------------------------------------------------------------------
-----------------------------------------------------------------------------
| interruptOnAsyncexception runs the given action in in a separate thread,
running the given cancel action before passing on any asynchronous
exceptions to that thread. The intent is that
'interruptOnAsyncException (pure ()) == id'
in all respects (including exception handling), assuming the wrapped
action behaves somewhat reasonably (i.e., doesn't swallow asynchronous
exceptions). Particularly, we guarantee that the separate thread terminates
before we return. (It's not entirely transparent: for instance, 'myThreadId'
returns a different value.)
The point of this is to allow breaking out of blocking actions if they
provide some cancelling escape hatch.
By using 'try' with 'waitCatch', we can distinguish between asynchronous
exceptions received from the outside, and those thrown by the wrapped action.
(The latter shouldn't occur, but we also want to avoid throwing an exception
back at the thread below.)
might catch are asynchronous exceptions received from the "outside".
Thus, the 'Left' case is the only one where the async action has not
necessarily terminated.
Cancelling might throw an exception; we save that and re-raise it,
but not before doing or job of passing the asynchronous exception on
to our child and waiting for it to terminate.
waitEx could be an exception thrown by the action, or our async
exception bubbling back up
in case the async exits cleanly before receiving the exception, we
re-raise it manually so as to not swallow it, since the action
/was/ interrupted
In the non-interrupted case, we "undo" the 'try', collapsing things
effectively to 'restore (wait x)'. | # LANGUAGE ScopedTypeVariables #
module Control.Concurrent.Interrupt
( interruptOnAsyncException,
)
where
import Control.Concurrent.Async (async, asyncThreadId, wait, waitCatch)
import Control.Exception
( SomeAsyncException,
SomeException,
mask,
throwIO,
throwTo,
try,
)
import Prelude
interruptOnAsyncException :: IO () -> IO a -> IO a
interruptOnAsyncException interrupt action = mask $ \restore -> do
x <- async action
res :: Either SomeAsyncException (Either SomeException a) <-
try $ restore (waitCatch x)
case res of
Due to the use of ' waitCatch ' above , the only exceptions that ' tryAsync '
Left e -> do
interruptRes :: Either SomeException () <- try interrupt
throwTo (asyncThreadId x) e
waitRes :: Either SomeException a <- try $ wait x
case (interruptRes, waitRes) of
(Left cancelEx, _) -> throwIO cancelEx
(Right _, Left waitEx) -> throwIO waitEx
(Right _, Right _) -> throwIO e
Right (Left e) ->
throwIO e
Right (Right r) ->
pure r
|
fb0d39f489811fe7d09695161aa425756950a449dd0056842bb91088588e40d7 | filonenko-mikhail/cl-portaudio | portaudio.lisp | ;;; Generated by Verrazano 0.5
(in-package portaudio)
(define-foreign-library libportaudio
(:darwin "libportaudio.dylib") ;; <--- added
((:and :unix (:not :darwin)) (:or "libportaudio.so.2" "libportaudio.so"))
((:or :win32 :x86-64) (:or "libportaudio" "libportaudio-2"
"portaudio" "portaudio_x64" "portaudio_x86_64"))
((:or :win32 :x86) (:or "libportaudio" "libportaudio-2" "portaudio"
"portaudio_x86"))
(t (:default "libportaudio")))
(use-foreign-library libportaudio)
(defcfun ("Pa_GetErrorText" %get-error-text) :string (error-code :int))
Generated by me , based on " < > " version
(defmacro make-errors (&rest symbols)
`(eval-when (:compile-toplevel :load-toplevel :execute)
(defcenum pa-error ,@symbols)
,@(mapcar #'(lambda (sym)
`(define-condition ,(if (listp sym)
(car sym)
sym) (error)
((value :initform ,(if (listp sym)
(car sym)
sym)))
(:documentation "pa-error condition")
(:report (lambda (condition stream)
(format stream "~A."
(%get-error-text (foreign-enum-value 'pa-error (slot-value condition 'value))))))))
symbols)))
(make-errors
(:no-error 0)
(:not-anticipated -10000)
:unanticipated-host-error
:invalid-channel-count
:invalid-sample-rate
:invalid-device
:invalid-flag
:sample-format-not-supported
:bad-i-o-device-combination
:insufficient-memory
:buffer-too-big
:buffer-too-small
:null-callback
:bad-stream-ptr
:timed-out
:internal-error
:device-unavailable
:incompatible-host-api-specific-stream-info
:stream-is-stopped
:stream-is-not-stopped
:input-overflowed
:output-underflowed
:host-api-not-found
:invalid-host-api
:can-not-read-from-a-callback-stream
:can-not-write-to-a-callback-stream
:can-not-read-from-an-output-only-stream
:can-not-write-to-an-input-only-stream
:incompatible-stream-host-api
:bad-buffer-ptr)
(defun raise-if-error (err)
"Check err and raise condition if it is needed"
(cond
((numberp err)
(if (<= -10000 err -9972)
(error (foreign-enum-keyword 'pa-error err))
err))
(t
(if (not (eql err :no-error))
(error err)))))
(defbitfield (sample-format :unsigned-long)
(:float #x0001) ; cffi::float
: int32 ; cffi::int32
;; :int24 ; not supported
;; :int16 ; cffi::int16 troubles
: int8 ; cffi::int8
: uint8 ; cffi::uint8
;; (:custom-format #x00010000) ; not supported
;; (:non-interleaved #x80000000)
)
(defun cffi-type-to-lisp (cffi-type)
(case cffi-type
(:float 'single-float)
(:int32 '(signed-byte 32))
(:int16 '(signed-byte 16))
(:int8 '(signed-byte 8))
(:uint8 '(unsigned-byte 8))))
;;(defconstant +non-interleaved+ #x80000000)
(defbitfield (stream-flags :unsigned-long)
(:no-flag #x00000000)
(:clip-off #x00000001)
(:dither-off #x00000002)
;; (:never-drop-input #x00000004)
;; (:prime-output-buffers-using-stream-callback #x00000008)
;; (:platform-specific-flags #xFFFF0000)
)
(defconstant +format-is-supported+ 0)
(defconstant +frames-per-buffer-unspecified+ 0)
(defconstant +no-device+ -1)
(defconstant +use-host-api-specific-device-specification -2)
(defctype pa-stream-t :void)
(defctype host-api-index :int)
(defcenum host-api-type-id
(:in-development 0)
(:direct-sound 1)
(:mme 2)
(:asio 3)
(:sound-manager 4)
(:core-audio 5)
(:oss 7)
(:alsa 8)
(:al 9)
(:be-os 10)
(:wdmks 11)
(:jack 12)
(:wasapi 13)
(:audio-science-hpi 14))
(defclass pa-stream ()
((handle :initform (null-pointer)
:accessor pa-stream-handle
:initarg :handle :documentation "Foreign pointer to pa-stream")
(input-sample-format :initarg :input-sample-format :reader pa-stream-input-sample-format :documentation "Format of input samples")
(input-channels :initarg :input-channels :reader pa-stream-input-channels :documentation "Number of input channels")
(output-sample-format :initarg :output-sample-format :reader pa-stream-output-sample-format :documentation "Format of output samples")
(output-channels :initarg :output-channels :reader pa-stream-output-channels :documentation "Number of output channels")
(frames-per-buffer :initarg :frames-per-buffer :reader pa-stream-frames-per-buffer :documentation "Frames per buffer"))
(:documentation "A single PaStream can provide multiple channels of real-time streaming audio input and output to a client application. A stream provides access to audio hardware represented by one or more devices. Depending on the underlying Host API, it may be possible to open multiple streams using the same device, however this behavior is implementation defined. Portable applications should assume that a device may be simultaneously used by at most one stream."))
(export 'pa-stream)
(export 'pa-stream-input-sample-format)
(export 'pa-stream-input-channels)
(export 'pa-stream-output-sample-format)
(export 'pa-stream-output-channels)
(export 'pa-stream-frames-per-buffer)
(defgeneric pa-stream-input-sample-format (instance)
(:documentation "value of sample-format for input channel"))
(defgeneric pa-stream-input-channels (instance)
(:documentation "Number of input channels"))
(defgeneric pa-stream-output-sample-format (instance)
(:documentation "value of sample-format for output channel"))
(defgeneric pa-stream-output-channels (instance)
(:documentation "Number of output channels"))
(defgeneric pa-stream-frames-per-buffer (instance)
(:documentation "Frames per buffer for current stream"))
(defctype p-pa-stream :pointer "Pointer to pa-stream")
(define-foreign-type p-pa-stream ()
()
(:actual-type :pointer)
(:simple-parser p-pa-stream))
(defmethod translate-to-foreign (value (type p-pa-stream))
(pa-stream-handle value))
(defctype device-index :int)
(defctype pa-time :double)
(defcstruct host-error-info
(host-api-type host-api-type-id)
(error-code :long)
(error-text :string))
(defclass host-error-info nil
((host-api-type :reader host-error-info-host-api-type :documentation "the host API which returned the error code")
(error-code :reader host-error-info-error-code :documentation "the error code returned")
(error-text :reader host-error-info-error-text :documentation "a textual description of the error if available, otherwise a zero-length string"))
(:documentation "Structure used to return information about a host error condition."))
(defmethod initialize-instance :after
((inst host-error-info) &key pointer)
(with-foreign-slots ((host-api-type error-code error-text) pointer
host-error-info)
(setf (slot-value inst 'host-api-type) host-api-type)
(setf (slot-value inst 'error-code) error-code)
(setf (slot-value inst 'error-text) error-text)))
(export 'host-error-info)
(export 'host-error-info-host-api-type)
(export 'host-error-info-error-code)
(export 'host-error-info-error-text)
(defgeneric host-error-info-host-api-type (instance)(:documentation "the host API which returned the error code"))
(defgeneric host-error-info-error-code (instance)(:documentation "the error code returned"))
(defgeneric host-error-info-error-text (instance)(:documentation "a textual description of the error if available, otherwise a zero-length string"))
(defctype p-host-error-info :pointer "Pointer to p-host-error-info")
(define-foreign-type p-host-error-info ()
()
(:actual-type :pointer)
(:simple-parser p-host-error-info))
(defmethod translate-from-foreign (value (type p-host-error-info))
(unless (null-pointer-p value)
(make-instance 'host-error-info :pointer value)))
(defcfun ("Pa_GetLastHostErrorInfo" %get-last-host-error-info) p-host-error-info)
(defun get-last-host-error-info ()
"Return information about the last host error encountered. The error information returned by @fun{get-last-host-error-info} will never be modified asynchronously by errors occurring in other PortAudio owned threads.
This function is provided as a last resort, primarily to enhance debugging by providing clients with access to all available error information.
@begin{return}
A structure constraining information about the host error. The values in this structure will only be valid if a PortAudio function has previously raise the unanticipated-host-error error code.
@end{return}
"
(%get-last-host-error-info))
(defcfun ("Pa_GetVersion" %get-version) :int)
(defun get-version ()
"Retrieve the release number of the currently running PortAudio build, eg 1900."
(%get-version))
(export 'get-version)
(defcfun ("Pa_GetVersionText" %get-version-text) :string)
(defun get-version-text ()
"Retrieve a textual description of the current PortAudio build,
eg \"PortAudio V19-devel 13 October 2002\"."
(%get-version-text))
(export 'get-version-text)
(defcfun ("Pa_GetDeviceCount" %get-device-count) device-index)
(defun get-device-count ()
"Retrieve the number of available devices.The number of available devices may be zero.
@begin{return}
A non-negative value indicating the number of available devices or, raises an error if PortAudio is not initialized or an error is encountered.
@end{return}"
(raise-if-error (%get-device-count)))
(export 'get-device-count)
(defcfun ("Pa_GetDefaultOutputDevice" %get-default-output-device) device-index)
(defun get-default-output-device ()
"Retrieve the index of the default output device. The result can be used in the outputDevice parameter to @fun{open-stream}.
@b{Note}
On the PC, the user can specify a default device by setting an environment variable. For example, to use device #1.
@pre{set PA_RECOMMENDED_OUTPUT_DEVICE=1}
The user should first determine the available device ids by using @code{(@fun{print-devices})}.
@begin{return}
The default output device index for the default host API, or raise no-device if no default output device is available or an error was encountered.
@end{return}
"
(raise-if-error (%get-default-output-device)))
(export 'get-default-output-device)
(defcstruct device-info
(struct-version :int)
(name :string)
(host-api host-api-index)
(max-input-channels :int)
(max-output-channels :int)
(default-low-input-latency pa-time)
(default-low-output-latency pa-time)
(default-high-input-latency pa-time)
(default-high-output-latency pa-time)
(default-sample-rate :double))
(defclass device-info nil
((struct-version :reader device-info-struct-version
:documentation "Structure version.")
(name :reader device-info-name :documentation "Device name.")
(host-api :reader device-info-host-api :documentation "note this is a host API index, not a type id.")
(max-input-channels :reader device-info-max-input-channels :documentation "")
(max-output-channels :reader device-info-max-output-channels)
(default-low-input-latency :reader
device-info-default-low-input-latency
:documentation "Default latency values for interactive performance.")
(default-low-output-latency :reader
device-info-default-low-output-latency
:documentation "")
(default-high-input-latency :reader device-info-default-high-input-latency
:documentation "Default latency values for robust non-interactive applications (eg. playing sound files).")p
(default-high-output-latency :reader
device-info-default-high-output-latency
:documentation "")
(default-sample-rate :reader device-info-default-sample-rate :documentation "Sample rate"))
(:documentation "A structure providing information and capabilities of PortAudio devices. Devices may support input, output or both input and output."))
(defmethod initialize-instance :after
((inst device-info) &key pointer)
(with-foreign-slots ((struct-version name host-api max-input-channels
max-output-channels default-low-input-latency
default-low-output-latency default-high-input-latency
default-high-output-latency default-sample-rate)
pointer device-info)
(setf (slot-value inst 'struct-version) struct-version)
(setf (slot-value inst 'name) name)
(setf (slot-value inst 'host-api) host-api)
(setf (slot-value inst 'max-input-channels) max-input-channels)
(setf (slot-value inst 'max-output-channels) max-output-channels)
(setf (slot-value inst 'default-low-input-latency)
default-low-input-latency)
(setf (slot-value inst 'default-low-output-latency)
default-low-output-latency)
(setf (slot-value inst 'default-high-input-latency)
default-high-input-latency)
(setf (slot-value inst 'default-high-output-latency)
default-high-output-latency)
(setf (slot-value inst 'default-sample-rate) default-sample-rate)))
(export 'device-info)
(export 'device-info-name)
(export 'device-info-host-api)
(export 'device-info-max-input-channels)
(export 'device-info-max-output-channels)
(export 'device-info-default-low-input-latency)
(export 'device-info-default-high-input-latency)
(export 'device-info-default-low-output-latency)
(export 'device-info-default-high-output-latency)
(export 'device-info-default-sample-rate)
(defgeneric device-info-name (instance) (:documentation "device name"))
(defgeneric device-info-host-api (instance) (:documentation "note this is a host API index, not a type id."))
(defgeneric device-info-max-input-channels (instance) (:documentation "maximum number of input channels"))
(defgeneric device-info-max-output-channels (instance) (:documentation "maximum number of output channels"))
(defgeneric device-info-default-low-input-latency (instance) (:documentation "Default latency values for interactive performance."))
(defgeneric device-info-default-high-input-latency (instance) (:documentation "Default latency values for robust non-interactive applications (eg. playing sound files)."))
(defgeneric device-info-default-low-output-latency (instance) (:documentation ""))
(defgeneric device-info-default-high-output-latency (instance) (:documentation ""))
(defgeneric device-info-default-sample-rate (instance) (:documentation "Sample rate"))
(defctype p-device-info :pointer "Pointer to device-info")
(define-foreign-type p-device-info ()
()
(:actual-type :pointer)
(:simple-parser p-device-info))
Define a method that converts C to Lisp .
(defmethod translate-from-foreign (value (type p-device-info))
(unless (null-pointer-p value)
(make-instance 'device-info :pointer value)))
(defcfun ("Pa_GetDeviceInfo" %get-device-info) p-device-info
(device device-index))
(defun get-device-info (device)
"Retrieve @class{device-info} structure containing information about the specified device.
@begin{return}
A object of @class{device-info}. If the device parameter is out of range the function returns NIL.
@end{return}
@arg[device]{A valid device index in the range 0 to @code{(- (@fun{get-device-count}) 1)}}
"
(%get-device-info device))
(export 'get-device-info)
(defcstruct stream-parameters
(device device-index)
(channel-count :int)
(sample-format sample-format)
(suggested-latency pa-time)
(host-api-specific-stream-info (:pointer :void)))
(defclass stream-parameters ()
((device :accessor stream-parameters-device
:documentation "A valid device index in the range 0 to (- get-device-count 1) specifying the device to be used. This field must not be set to paNoDevice.")
(channel-count :accessor stream-parameters-channel-count
:documentation "The number of channels of sound to be delivered to the stream callback.")
(sample-format :accessor stream-parameters-sample-format
:documentation "The sample format of the buffer provided to read-stream or write-stream.")
(suggested-latency :accessor stream-parameters-suggested-latency
:documentation "The desired latency in seconds. Where practical, implementations should configure their latency based on these parameters, otherwise they may choose the closest viable latency instead. Unless the suggested latency is greater than the absolute upper limit for the device implementations should round the suggestedLatency up to the next practical value - ie to provide an equal or higher latency than suggestedLatency wherever possible."))
(:documentation "Parameters for one direction (input or output) of a stream."))
(export 'stream-parameters)
(export 'stream-parameters-device)
(export 'stream-parameters-channel-count)
(export 'stream-parameters-sample-format)
(export 'stream-parameters-suggested-latency)
(defgeneric stream-parameters-device (instance) (:documentation "A valid device index in the range 0 to @code{(- (@fun{get-device-count}) 1)} specifying the device to be used. This field must not be set to paNoDevice."))
(defgeneric stream-parameters-channel-count (instance) (:documentation "The number of channels of sound to be delivered to the stream callback."))
(defgeneric stream-parameters-sample-format (instance) (:documentation "The sample format of the buffer provided to read-stream or write-stream."))
(defgeneric stream-parameters-suggested-latency (instance) (:documentation "The desired latency in seconds. Where practical, implementations should configure their latency based on these parameters, otherwise they may choose the closest viable latency instead. Unless the suggested latency is greater than the absolute upper limit for the device implementations should round the suggestedLatency up to the next practical value - ie to provide an equal or higher latency than suggestedLatency wherever possible."))
(defun make-stream-parameters ()
"Make stream-parameters object"
(make-instance 'stream-parameters))
(export 'make-stream-parameters)
(defctype p-stream-parameters :pointer "Pointer to stream parameters")
(define-foreign-type p-stream-parameters ()
()
(:actual-type :pointer)
(:simple-parser p-stream-parameters))
(defmethod translate-from-foreign (value (type p-stream-parameters))
(unless (null-pointer-p value)
(let ((stream-parameters (make-instance 'stream-parameters)))
(setf (slot-value stream-parameters 'device) (foreign-slot-value value 'stream-parameters 'device)
(slot-value stream-parameters 'channel-count) (foreign-slot-value value 'stream-parameters 'channel-count)
(slot-value stream-parameters 'sample-format) (car
(foreign-bitfield-symbols
'sample-format
(foreign-slot-value value 'stream-parameters 'sample-format)))
(slot-value stream-parameters 'suggested-latency) (foreign-slot-value value 'stream-parameters 'suggested-latency))
stream-parameters)))
(defmethod translate-to-foreign (value (type p-stream-parameters))
(if value
(let ((parameters (foreign-alloc 'stream-parameters)))
(setf (foreign-slot-value parameters 'stream-parameters 'device) (slot-value value 'device)
(foreign-slot-value parameters 'stream-parameters 'channel-count) (slot-value value 'channel-count)
(foreign-slot-value parameters 'stream-parameters 'sample-format) (foreign-bitfield-value
'sample-format
(list (slot-value value 'sample-format)))
(foreign-slot-value parameters 'stream-parameters 'suggested-latency) (slot-value value 'suggested-latency)
(foreign-slot-value parameters 'stream-parameters 'host-api-specific-stream-info) (null-pointer))
parameters)
(null-pointer)))
(defmethod free-translated-object (value (type p-stream-parameters) param)
(declare (ignore param))
(unless (null-pointer-p value)
(foreign-free value)))
(defcfun ("Pa_IsFormatSupported" %is-format-supported) pa-error
(input-parameters p-stream-parameters)
(output-parameters p-stream-parameters)
(sample-rate :double))
(defun is-format-supported (input-parameters output-parameters sample-rate)
"Determine whether it would be possible to open a stream with the specified parameters.
@arg[input-parameters]{A structure that describes the input parameters used to open a stream. The suggested-latency slot is ignored. See @class{stream-parameters} for a description of these parameters. input-parameters must be NIL for output-only streams.}
@arg[output-parameters]{A structure that describes the output parameters used to open a stream. The suggested-latency field is ignored. See @class{stream-parameters} for a description of these parameters. output-parameters must be NIL for input-only streams.}
@arg[sample-rate]{The required sampleRate. For full-duplex streams it is the sample rate for both input and output.}
@begin{return}
Returns 0 if the format is supported, and raises an error indicating why the format is not supported otherwise. The constant @variable{+format-is-supported+} is provided to compare with the return value for success.
@end{return}
"
(raise-if-error (%is-format-supported input-parameters output-parameters sample-rate)))
(defcfun ("Pa_HostApiTypeIdToHostApiIndex" %host-api-type-id-to-host-api-index) host-api-index
(type host-api-type-id))
(defun host-api-type-id-to-host-api-index (type)
"Convert a static host API unique identifier, into a runtime host API index.
@arg[type]{A unique host API identifier belonging to the PaHostApiTypeId enumeration.}
@begin{return}
A valid host-api-idnex ranging from 0 to @code{(- (@fun{get-host-api-count}) 1)} or, raises an error if PortAudio is not initialized or
@end{return}
The host-api-not-found error indicates that the host API specified by the type parameter is not available.
"
(raise-if-error (%host-api-type-id-to-host-api-index type)))
(export 'host-api-type-id-to-host-api-index)
(defcfun ("Pa_GetDefaultInputDevice" %get-default-input-device) device-index)
(defun get-default-input-device ()
"Retrieve the index of the default input device. The result can be used in the inputDevice parameter to @fun{open-stream}.
@begin{return}
The default input device index for the default host API, or raise no-device if no default input device is available or an error was encountered.
@end{return}"
(raise-if-error (%get-default-input-device)))
(export 'get-default-input-device)
(defcstruct host-api-info
(struct-version :int)
(type host-api-type-id)
(name :string)
(device-count :int)
(default-input-device device-index)
(default-output-device device-index))
(defclass host-api-info nil
((struct-version :reader host-api-info-struct-version :documentation "Struct version.")
(type :reader host-api-info-type :documentation "The well known unique identifier of this host API.")
(name :reader host-api-info-name :documentation "A textual description of the host API for display on user interfaces.")
(device-count :reader host-api-info-device-count :documentation "The number of devices belonging to this host API. This field may be used in conjunction with host-api-device-index-to-device-index to enumerate all devices for this host API." )
(default-input-device :reader host-api-info-default-input-device
:documentation "The default input device for this host API. The value will be a device index ranging from 0 to (- (get-device-count) 1), or no-device if no default input device is available.")
(default-output-device :reader
host-api-info-default-output-device
:documentation "The default output device for this host API. The value will be a device index ranging from 0 to (- (get-device-count) 1), or paNoDevice if no default output device is available."))
(:documentation "A structure containing information about a particular host API."))
(defmethod initialize-instance :after
((inst host-api-info) &key pointer)
(with-foreign-slots ((struct-version type name device-count
default-input-device default-output-device)
pointer host-api-info)
(setf (slot-value inst 'struct-version) struct-version)
(setf (slot-value inst 'type) type)
(setf (slot-value inst 'name) name)
(setf (slot-value inst 'device-count) device-count)
(setf (slot-value inst 'default-input-device) default-input-device)
(setf (slot-value inst 'default-output-device)
default-output-device)))
(export 'host-api-info)
(export 'host-api-info-type)
(export 'host-api-info-name)
(export 'host-api-info-device-count)
(export 'host-api-info-default-input-device)
(export 'host-api-info-default-output-device)
(defgeneric host-api-info-type (instance) (:documentation "The well known unique identifier of this host API."))
(defgeneric host-api-info-name (instance) (:documentation "A textual description of the host API for display on user interfaces."))
(defgeneric host-api-info-device-count (instance) (:documentation "The number of devices belonging to this host API. This field may be used in conjunction with host-api-device-index-to-device-index to enumerate all devices for this host API."))
(defgeneric host-api-info-default-input-device (instance) (:documentation "The default input device for this host API. The value will be a device index ranging from 0 to (- (get-device-count) 1), or no-device if no default input device is available."))
(defgeneric host-api-info-default-output-device (instance) (:documentation "The default output device for this host API. The value will be a device index ranging from 0 to (- (get-device-count) 1), or paNoDevice if no default output device is available."))
(defctype p-host-api-info :pointer)
(define-foreign-type p-host-api-info ()
()
(:actual-type :pointer)
(:simple-parser p-host-api-info))
(defmethod translate-from-foreign (value (type p-host-api-info))
(unless (null-pointer-p value)
(make-instance 'host-api-info :pointer value)))
(defcfun ("Pa_GetHostApiInfo" %get-host-api-info) p-host-api-info
(host-api host-api-index))
(defun get-host-api-info (host-api)
"Retrieve a pointer to a structure containing information about a specific host Api.
@begin[host-api]{arg}
A valid host API index ranging from 0 to @code{(- (@fun{get-host-api-count}) 1)}
@end{arg}
@begin{return}
An object of @class{host-api-info} describing a specific host API. If the hostApi parameter is out of range or an error is encountered, the function returns NIL.
@end{return}
"
(%get-host-api-info host-api))
(export 'get-host-api-info)
(defcfun ("Pa_Initialize" %initialize) pa-error)
(defun initialize ()
"Library initialization function - call this before using PortAudio. This function initializes internal data structures and prepares underlying host APIs for use. With the exception of @fun{get-version}, @fun{get-version-text}, and @fun{get-error-text}, this function MUST be called before using any other PortAudio API functions.
If initialize is called multiple times, each successful call must be matched with a corresponding call to @fun{terminate}. Pairs of calls to initialize/@fun{terminate} may overlap, and are not required to be fully nested.
Note that if initialize raises an error, @fun{terminate} should NOT be called.
@result{NIL if successful, otherwise raises an error indicating the cause of failure.}
"
(raise-if-error (%initialize)))
(export 'initialize)
(defcfun ("Pa_Terminate" %terminate) pa-error)
(defun terminate ()
"
Library termination function - call this when finished using PortAudio. This function deallocates all resources allocated by PortAudio since it was initialized by a call to @fun{initialize}. In cases where @fun{initialize} has been called multiple times, each call must be matched with a corresponding call to terminate. The final matching call to terminate will automatically close any PortAudio streams that are still open.
terminate MUST be called before exiting a program which uses PortAudio. Failure to do so may result in serious resource leaks, such as audio devices not being available until the next reboot.
@begin{return}
NIL if successful, otherwise raises an error indicating the cause of failure.
@end{return}
"
(raise-if-error (%terminate)))
(export 'terminate)
(defcfun ("Pa_GetDefaultHostApi" %get-default-host-api) host-api-index)
(defun get-default-host-api ()
"Retrieve the index of the default host API. The default host API will be the lowest common denominator host API on
the current platform and is unlikely to provide the best performance.
@begin{return}
A non-negative value ranging from 0 to @code{(- (@fun{get-host-api-count}) 1)} indicating the default host API index or, raises an error if PortAudio is not initialized or an error is encountered.
@end{return}
"
(raise-if-error (%get-default-host-api)))
(export 'get-default-host-api)
(defcfun ("Pa_OpenDefaultStream" %open-default-stream) pa-error
(pa-stream :pointer)
(num-input-channels :int)
(num-output-channels :int)
(sample-format :unsigned-long)
(sample-rate :double)
(frames-per-buffer :unsigned-long)
(stream-callback :pointer)
(user-data (:pointer :void)))
(defun open-default-stream (num-input num-output sample-format sample-rate frames-per-buffer)
"
A simplified version of @fun{open-stream} that opens the default input and/or output devices.
@arg[num-input-channels]{The number of channels of sound that will be returned by @fun{read-stream}. It can range from 1 to the value of max-input-channels in the @class{device-info} class for the default input device. If 0 the stream is opened as an output-only stream.}
@arg[num-output-channels]{The number of channels of sound to be passed to @fun{write-stream}. It can range from 1 to the value of max-output-channels in the @class{device-info} class for the default output device. If 0 the stream is opened as an output-only stream.}
@arg[sample-format]{The sample format of both the input and output buffers passed to and from @fun{read-stream} and @fun{write-stream}. sample-format may be any of the formats described by the sample-format enumeration.}
@arg[sample-rate]{Same as @fun{open-stream} parameter of the same name.}
@arg[frames-per-buffer]{Same as @fun{open-stream} parameter of the same name.}
@result{As for @fun{open-stream}}
"
(with-foreign-object (handle :pointer)
(raise-if-error (%open-default-stream handle num-input num-output
(foreign-bitfield-value 'sample-format (list sample-format))
sample-rate frames-per-buffer
(null-pointer) (null-pointer)))
(make-instance 'pa-stream :handle (mem-ref handle :pointer)
:input-sample-format sample-format
:input-channels (if (= num-input 0) nil num-input)
:output-sample-format sample-format
:output-channels (if (= num-output 0) nil num-output)
:frames-per-buffer frames-per-buffer)))
(export 'open-default-stream)
( defctype stream - flags : unsigned - long )
(defcfun ("Pa_OpenStream" %open-stream) pa-error (pa-stream :pointer)
(input-parameters p-stream-parameters)
(output-parameters p-stream-parameters)
(sample-rate :double)
(frames-per-buffer :unsigned-long)
(stream-flags stream-flags)
(stream-callback :pointer)
(user-data (:pointer :void)))
(defun open-stream (input-parameters output-parameters sample-rate frames-per-buffer stream-flags)
"
Opens a stream for either input, output or both.
@arg[input-parameters]{A structure that describes the input parameters used by the opened stream. See @class{stream-parameters} for a description of these parameters. input-parameters must be NIL for output-only streams.}
@arg[output-parameters]{A structure that describes the output parameters used by the opened stream. See @class{stream-parameters} for a description of these parameters. output-parameters must be NIL for input-only streams.}
@arg[sample-rate]{The desired sample-rate. For full-duplex streams it is the sample rate for both input and output}
@arg[frames-per-buffer]{Preferred block granularity for a blocking read/write stream.}
@arg[stream-flags]{List of flags which modify the behavior of the streaming process. Some flags may only be relevant to certain buffer formats.}
@begin{return}
Upon success pen-stream returns object of @class{pa-stream} class. The stream is inactive (stopped). If a call to open-stream fails, an error code is raised and the value of stream is NIL.
@end{return}
"
(with-foreign-object (handle :pointer)
(raise-if-error (%open-stream handle input-parameters output-parameters sample-rate frames-per-buffer
stream-flags
(null-pointer) (null-pointer)))
(make-instance 'pa-stream :handle (mem-ref handle :pointer)
:input-sample-format
(when input-parameters (stream-parameters-sample-format input-parameters))
:input-channels
(when input-parameters (stream-parameters-channel-count input-parameters))
:output-sample-format
(when output-parameters (stream-parameters-sample-format output-parameters))
:output-channels
(when output-parameters (stream-parameters-channel-count output-parameters))
:frames-per-buffer frames-per-buffer)))
(export 'open-stream)
(defcfun ("Pa_CloseStream" %close-stream) pa-error (pa-stream p-pa-stream))
(defun close-stream (pa-stream)
"Closes an audio stream. If the audio stream is active it discards any pending buffers as if @fun{abort-stream} had been called."
(unwind-protect
(raise-if-error (%close-stream pa-stream))
(setf (pa-stream-handle pa-stream) (null-pointer))))
(export 'close-stream)
(defcfun ("Pa_IsStreamStopped" %is-stream-stopped) pa-error (pa-stream p-pa-stream))
(defun is-stream-stopped (pa-stream)
"
Determine whether the stream is stopped. A stream is considered to be stopped prior to a successful call to @fun{start-stream} and after a successful call to @fun{stop-stream} or @fun{abort-stream}.
@begin{return}
Returns one (1) when the stream is stopped, zero (0) when the stream is running or, raises an error if PortAudio is not initialized or an error is encountered.
@end{return}
"
(raise-if-error (%is-stream-stopped pa-stream)))
(export 'is-stream-stopped)
(defcfun ("Pa_GetStreamTime" %get-stream-time) pa-time (pa-stream p-pa-stream))
(defun get-stream-time (pa-stream)
"Returns valid time values for the entire life of the stream, from when the stream is opened until it is closed. Starting and stopping the stream does not affect the passage of time returned by get-stream-time.
This time may be used for synchronizing other events to the audio stream, for example synchronizing audio to MIDI.
@result{The stream's current time in seconds, or 0 if an error occurred.}"
(%get-stream-time pa-stream))
(export 'get-stream-time)
(defcfun ("Pa_GetHostApiCount" %get-host-api-count) host-api-index)
(defun get-host-api-count ()
"Retrieve the number of available host APIs. Even if a host API is available it may have no devices available.
@begin{return}
A non-negative value indicating the number of available host APIs or, raises an error if PortAudio is not initialized or an error is encountered.
@end{return}"
(raise-if-error (%get-host-api-count)))
(export 'get-host-api-count)
(defcfun ("Pa_IsStreamActive" %is-stream-active) :int (pa-stream p-pa-stream))
(defun is-stream-active (pa-stream)
"
Determine whether the stream is active. A stream is active after a successful call to @fun{start-stream}, until it becomes inactive either as a result of a call to @fun{stop-stream} or @fun{abort-stream}. In the latter case, the stream is considered inactive after the last buffer has finished playing.
@begin{return}
Returns one (1) when the stream is active (ie playing or recording audio), zero (0) when not playing or, raises an error if PortAudio is not initialized or an error is encountered.
@end{return}
"
(raise-if-error (%is-stream-active pa-stream)))
(export 'is-stream-active)
(defcfun ("Pa_GetStreamWriteAvailable" %get-stream-write-available) :long
(pa-stream p-pa-stream))
(defun get-stream-write-available (pa-stream)
"Retrieve the number of frames that can be written to the stream without waiting.
@begin{return}
A non-negative value representing the maximum number of frames that can be written to the stream without blocking or busy waiting or, raises an error if PortAudio is not initialized or an error is encountered.
@end{return}
"
(raise-if-error (%get-stream-write-available pa-stream)))
(export 'get-stream-write-available)
(defcfun ("Pa_GetStreamReadAvailable" %get-stream-read-available) :long
(pa-stream p-pa-stream))
(defun get-stream-read-available (pa-stream)
"Retrieve the number of frames that can be read from the stream without waiting.
@begin{return}
Returns a non-negative value representing the maximum number of frames that can be read from the stream without blocking or busy waiting or, raises an error if PortAudio is not initialized or an error is encountered.
@end{return}
"
(raise-if-error (%get-stream-read-available pa-stream)))
(export 'get-stream-read-available)
(defcfun ("Pa_ReadStream" %read-stream) pa-error (pa-stream p-pa-stream)
(buffer (:pointer :void))
(frames :unsigned-long))
(defun read-stream (pa-stream)
"
Read samples from an input stream. The function doesn't return until the entire buffer has been filled - this may involve waiting for the operating system to supply the data. Size of returned array equal to @code{(* frames-per-buffer channel-count)}.
@arg[pa-stream]{A object of stream previously created with @fun{open-stream}.}
@begin{return}
On success array of data will be returned, or :input-overflowed if input data was discarded by PortAudio after the previous call and before this call.
@end{return}
"
(let* ((sample-format (pa-stream-input-sample-format pa-stream))
(frames (pa-stream-frames-per-buffer pa-stream))
(channel-count (pa-stream-input-channels pa-stream))
(array (make-array (* channel-count frames) :element-type (cffi-type-to-lisp sample-format))))
(when (and sample-format
channel-count)
(with-pointer-to-array (array pointer sample-format (* channel-count frames) :copy-out)
(%read-stream pa-stream pointer frames))
array)))
(export 'read-stream)
(defun separate-array-to-channels (pa-stream array)
"Separate flat array
@arg[pa-stream]{A object of stream previously created with @fun{open-stream}.}
@arg[array]{Flat array, that is received from @fun{read-stream}.}
@begin{return}
(channelcount)-dimensional array of single-floats
@end{return}
"
(let* ((sample-format (pa-stream-input-sample-format pa-stream))
(frames (pa-stream-frames-per-buffer pa-stream))
(channel-count (pa-stream-input-channels pa-stream))
(separated (make-array (list channel-count frames) :element-type (cffi-type-to-lisp sample-format))))
(dotimes (frame frames separated)
(dotimes (channel channel-count separated)
(setf (aref separated channel frame) (aref array (+ (* frame channel-count) channel)))))))
(export 'separate-array-to-channels)
(defun merge-channels-into-array (pa-stream channels)
"Merge subarrays of (channelcount)-dimensional array to flat array.
@arg[pa-stream]{A object of stream previously created with @fun{open-stream}.}
@arg[channels]{Vector of vectors of floats, that contains data for all sound channels.}
@begin{return}
Vector of data, that can be used with @fun{write-stream}.
@end{return}
"
(let* ((sample-format (pa-stream-input-sample-format pa-stream))
(frames (pa-stream-frames-per-buffer pa-stream))
(channel-count (pa-stream-input-channels pa-stream))
(merged (make-array (* frames channel-count) :element-type (cffi-type-to-lisp sample-format))))
(dotimes (frame frames merged)
(dotimes (channel channel-count merged)
(setf (aref merged (+ (* frame channel-count) channel)) (aref channels channel frame))))))
(export 'merge-channels-into-array)
(defun read-stream-into-array (pa-stream array)
"Read samples from an input stream. The function doesn't return until the entire buffer has been filled - this may involve waiting for the operating system to supply the data.
@arg[pa-stream]{A object of stream previously created with @fun{open-stream}.}
@arg[array]{Simple array with has element-type equal to sample-format from @fun{open-stream}. Size of array equal to @code{(* frames-per-buffer channel-count)}.}
@begin{return}
On success NIL will be returned, or :input-overflowed if input data was discarded by PortAudio after the previous call and before this call.
@end{return}"
(let* ((sample-format (pa-stream-input-sample-format pa-stream))
(frames (pa-stream-frames-per-buffer pa-stream))
(channel-count (pa-stream-input-channels pa-stream)))
(when (and sample-format
channel-count)
(with-pointer-to-array (array pointer sample-format (* channel-count frames) :copy-out)
(%read-stream pa-stream pointer frames)))))
(export 'read-stream-into-array)
(defcfun ("Pa_WriteStream" %write-stream) pa-error (pa-stream p-pa-stream)
(buffer :pointer)
(frames :unsigned-long))
(defun write-stream (pa-stream buffer)
"Write samples to an output stream. This function doesn't return until the entire buffer has been consumed - this may involve waiting for the operating system to consume the data. Size of buffer should be equal to @code{(* frames-per-buffer channel-count)}.
@arg[pa-stream]{A object of stream previously created with @fun{open-stream}.}
@arg[buffer]{A array of sample frames. The buffer contains samples in the format specified by the @code{(stream-parameters-sample-format output-parameters)} field used to open the stream, and the number of channels specified by @code{(stream-parameters-num-channels output-parameters)}.}
@begin{return}
On success NIL will be returned, or :output-underflowed if additional output data was inserted after the previous call and before this call.
@end{return}
"
(let* ((sample-format (pa-stream-output-sample-format pa-stream))
(frames (pa-stream-frames-per-buffer pa-stream))
(channel-count (pa-stream-output-channels pa-stream)))
(when (and sample-format
channel-count)
(with-pointer-to-array (buffer pointer sample-format (* channel-count frames) :copy-in)
(%write-stream pa-stream pointer frames)))))
(export 'write-stream)
(defcfun ("Pa_StartStream" %start-stream) pa-error (pa-stream p-pa-stream))
(defun start-stream (pa-stream)
"Commences audio processing."
(raise-if-error (%start-stream pa-stream)))
(export 'start-stream)
(defcfun ("Pa_AbortStream" %abort-stream) pa-error (pa-stream p-pa-stream))
(defun abort-stream (pa-stream)
"Terminates audio processing immediately without waiting for pending buffers to complete."
(raise-if-error (%abort-stream pa-stream)))
(export 'abort-stream)
(defcfun ("Pa_StopStream" %stop-stream) pa-error (pa-stream p-pa-stream))
(defun stop-stream (pa-stream)
"Terminates audio processing. It waits until all pending audio buffers have been played before it returns."
(raise-if-error (%stop-stream pa-stream)))
(defcstruct stream-info
(struct-version :int)
(input-latency pa-time)
(output-latency pa-time)
(sample-rate :double))
(defclass stream-info nil
((struct-version :reader stream-info-struct-version :documentation "Struct version")
(input-latency :reader stream-info-input-latency :documentation "The input latency of the stream in seconds. This value provides the most accurate estimate of input latency available to the implementation. It may differ significantly from the suggestedLatency value passed to open-stream. The value of this field will be zero (0.) for output-only streams.")
(output-latency :reader stream-info-output-latency :documentation "The output latency of the stream in seconds. This value provides the most accurate estimate of output latency available to the implementation. It may differ significantly from the suggestedLatency value passed to open-stream. The value of this field will be zero (0.) for input-only streams.")
(sample-rate :reader stream-info-sample-rate :documentation "The sample rate of the stream in Hertz (samples per second). In cases where the hardware sample rate is inaccurate and PortAudio is aware of it, the value of this field may be different from the sample-rate parameter passed to open-stream. If information about the actual hardware sample rate is not available, this field will have the same value as the sample-rate parameter passed to open-stream."))
(:documentation "A structure containing unchanging information about an open stream."))
(defmethod initialize-instance :after
((inst stream-info) &key pointer)
(with-foreign-slots ((struct-version input-latency output-latency
sample-rate)
pointer stream-info)
(setf (slot-value inst 'struct-version) struct-version)
(setf (slot-value inst 'input-latency) input-latency)
(setf (slot-value inst 'output-latency) output-latency)
(setf (slot-value inst 'sample-rate) sample-rate)))
(export 'stream-info)
(export 'stream-info-input-latency)
(export 'stream-info-output-latency)
(export 'stream-info-sample-rate)
(defgeneric stream-info-input-latency (instance) (:documentation "The input latency of the stream in seconds. This value provides the most accurate estimate of input latency available to the implementation. It may differ significantly from the suggestedLatency value passed to open-stream. The value of this field will be zero (0.) for output-only streams."))
(defgeneric stream-info-output-latency (instance) (:documentation "The output latency of the stream in seconds. This value provides the most accurate estimate of output latency available to the implementation. It may differ significantly from the suggestedLatency value passed to open-stream. The value of this field will be zero (0.) for input-only streams."))
(defgeneric stream-info-sample-rate (instance) (:documentation "The sample rate of the stream in Hertz (samples per second). In cases where the hardware sample rate is inaccurate and PortAudio is aware of it, the value of this field may be different from the sample-rate parameter passed to open-stream. If information about the actual hardware sample rate is not available, this field will have the same value as the sample-rate parameter passed to open-stream."))
(defctype p-stream-info :pointer "Pointer to stream-info")
(define-foreign-type p-stream-info ()
()
(:actual-type :pointer)
(:simple-parser p-stream-info))
(defmethod translate-from-foreign (value (type p-stream-info))
(unless (null-pointer-p value)
(make-instance 'stream-info :pointer value)))
(defcfun ("Pa_GetStreamInfo" %get-stream-info) p-stream-info (pa-stream p-pa-stream))
(defun get-stream-info (pa-stream)
"Retrieve a object of class @class{stream-info} containing information about the specified stream.
@begin{return}
A object of @class{stream-info} structure. If the stream parameter invalid, or an error is encountered, the function returns NIL.
@end{return}
@arg[pa-stream]{A object of stream previously created with @fun{open-stream}.}
"
(%get-stream-info pa-stream))
(export 'get-stream-info)
(defcfun ("Pa_GetSampleSize" %get-sample-size) :int (format sample-format))
(defun get-sample-size (format)
"Retrieve the size of a given sample format in bytes.
@result{The size in bytes of a single sample in the specified format, or paSampleFormatNotSupported if the format is not supported.}"
(raise-if-error (%get-sample-size (foreign-bitfield-value 'sample-format (list format)))))
(export 'get-sample-size)
(defcfun ("Pa_HostApiDeviceIndexToDeviceIndex" %host-api-device-index-to-device-index) device-index
(host-api host-api-index)
(host-api-device-index :int))
(defun host-api-device-index-to-device-index (host-api host-api-device-index)
"Convert a host-API-specific device index to standard PortAudio device index. This function may be used in conjunction with the deviceCount field of PaHostApiInfo to enumerate all devices for the specified host API.
@arg[host-api]{A valid host API index ranging from 0 to @code{(- (@fun{get-host-api-count}) 1)}}
@arg[host-api-device-index]{A valid per-host device index in the range 0 to @code{(- (@fun{host-api-info-device-count} (@fun{get-host-api-info} host-api)) 1)}}
@begin{return}
A non-negative index ranging from 0 to @code{(- (@fun{get-device-count}) 1)} or, raises an error if PortAudio is not initialized or an error is encountered.
@end{return}
A invalid-host-api error indicates that the host API index specified by the hostApi parameter is out of range.
A invalid-device error indicates that the host-api-device-index parameter is out of range.
"
(raise-if-error (%host-api-device-index-to-device-index host-api host-api-device-index)))
(export 'host-api-device-index-to-device-index)
(defcfun ("Pa_Sleep" %pa-sleep) :void (msec :long))
(defun pa-sleep (msec)
"
Put the caller to sleep for at least 'msec' milliseconds. This function is provided only as a convenience for authors of portable code (such as the tests and examples in the PortAudio distribution.)
The function may sleep longer than requested so don't rely on this for accurate musical timing.
"
(%pa-sleep msec))
(export 'pa-sleep)
(defmacro with-audio (&body body)
"Execute body in PortAudio initialize/terminate environment."
`(progn
(initialize)
(unwind-protect
(progn
,@body)
(progn
(terminate)))))
(export 'with-audio)
Generated by me , based on " < > " version
(defmacro with-default-audio-stream ((var num-input num-output &key (sample-format :float) (sample-rate 44100d0) (frames-per-buffer 1024)) &body body)
"Execute body with opened and started stream VAR and shut down
the stream after it is done. It is required use these macro in with-audio or initialize/terminate environment."
`(let ((,var nil))
(unwind-protect
(progn
(setf ,var (open-default-stream ,num-input ,num-output ,sample-format ,sample-rate ,frames-per-buffer))
(start-stream ,var)
,@body)
(progn
(when ,var
(stop-stream ,var)
(close-stream ,var))))))
(export 'with-default-audio-stream)
(defmacro with-audio-stream ((var input-parameters output-parameters &key (sample-rate 44100d0) (frames-per-buffer 1024) (stream-flags (list :no-flag))) &body body)
"Execute body with opened and started stream VAR and shut down
the stream after it is done. It is required use these macro in with-audio or initialize/terminate environment."
`(let ((,var nil))
(unwind-protect
(progn
(setf ,var (open-stream ,input-parameters ,output-parameters ,sample-rate ,frames-per-buffer ',stream-flags))
(start-stream ,var)
,@body)
(progn
(when ,var
(stop-stream ,var)
(close-stream ,var))))))
(export 'with-audio-stream)
(defun print-supported-standard-sample-rates (input-params output-params)
(mapcar
(lambda (rate)
(ignore-errors
(unless (is-format-supported input-params output-params rate)
(format t "~8,2F, " rate))))
'(8000d0 9600d0 11025d0 12000d0 16000d0 22050d0 24000d0 32000d0
44100d0 48000d0 88200d0 96000d0 192000d0))
(format t "~%"))
(defun print-devices ()
"List available sound devices, including device information."
(with-audio
(format t "PortAudio version number = ~D~%PortAudio version text = ~A~%"
(get-version) (get-version-text))
(let ((num-devices (get-device-count))
(input-parameters (make-stream-parameters))
(output-parameters (make-stream-parameters)))
(format t "Number of devices = ~D~%" num-devices)
(dotimes (i num-devices)
(let ((device-info (get-device-info i))
(default-displayed nil))
(format t "---------------------- device ~D~%" i)
(if (= i (get-default-input-device))
(progn
(format t "[ Default Input")
(setf default-displayed t))
(when (= i (host-api-info-default-input-device (get-host-api-info (device-info-host-api device-info))))
(format t "[ Default ~A Input" (get-host-api-info (device-info-host-api device-info)))
(setf default-displayed t)))
(if (= i (get-default-output-device))
(progn
(format t "~:[[~;,~]" default-displayed)
(format t " Default Output")
(setf default-displayed t))
(when (= i (host-api-info-default-output-device (get-host-api-info (device-info-host-api device-info))))
(format t "~:[[~;,~]" default-displayed)
(format t "[ Default ~A Output" (get-host-api-info (device-info-host-api device-info)))
(setf default-displayed t)))
(when default-displayed
(format t " ]~%"))
(format t "Name = ~A~%" (device-info-name device-info))
(format t "Host API = ~A~%" (host-api-info-name (get-host-api-info (device-info-host-api device-info))))
(format t "Max inputs = ~D" (device-info-max-input-channels device-info))
(format t ", Max outputs = ~D~%" (device-info-max-output-channels device-info))
(format t "Default low input latency = ~8,4F~%" (device-info-default-low-input-latency device-info))
(format t "Default low output latency = ~8,4F~%" (device-info-default-low-output-latency device-info))
(format t "Default high input latency = ~8,4F~%" (device-info-default-high-input-latency device-info))
(format t "Default high output latency = ~8,4F~%" (device-info-default-high-output-latency device-info))
(format t "Default sample rate = ~8,4F~%" (device-info-default-sample-rate device-info))
(setf
(stream-parameters-device input-parameters) i
(stream-parameters-channel-count input-parameters) (device-info-max-input-channels device-info)
(stream-parameters-sample-format input-parameters) :float
(stream-parameters-suggested-latency input-parameters) 0d0
(stream-parameters-device output-parameters) i
(stream-parameters-channel-count output-parameters) (device-info-max-output-channels device-info)
(stream-parameters-sample-format output-parameters) :float
(stream-parameters-suggested-latency output-parameters) 0d0)
(when (< 0 (stream-parameters-channel-count input-parameters))
(format t "Supported standard sample rates~% for half-duplex float 32 bit ~D channel input = ~%"
(stream-parameters-channel-count input-parameters))
(print-supported-standard-sample-rates input-parameters nil))
(when (< 0 (stream-parameters-channel-count output-parameters))
(format t "Supported standard sample rates~% for half-duplex float 32 bit ~D channel output = ~%"
(stream-parameters-channel-count output-parameters))
(print-supported-standard-sample-rates nil output-parameters))
(when (and (< 0 (stream-parameters-channel-count input-parameters)) (< 0 (stream-parameters-channel-count output-parameters)))
(format t "Supported standard sample rates~% for full-duplex float 32 bit ~D channel input, ~D channel output = ~%"
(stream-parameters-channel-count input-parameters)
(stream-parameters-channel-count output-parameters))
(print-supported-standard-sample-rates input-parameters output-parameters))))
(format t "--------------------~%"))))
(export 'print-devices)
| null | https://raw.githubusercontent.com/filonenko-mikhail/cl-portaudio/c50cd061c25216a736f684e45101f5c0188a384f/src/portaudio.lisp | lisp | Generated by Verrazano 0.5
<--- added
cffi::float
cffi::int32
:int24 ; not supported
:int16 ; cffi::int16 troubles
cffi::int8
cffi::uint8
(:custom-format #x00010000) ; not supported
(:non-interleaved #x80000000)
(defconstant +non-interleaved+ #x80000000)
(:never-drop-input #x00000004)
(:prime-output-buffers-using-stream-callback #x00000008)
(:platform-specific-flags #xFFFF0000) | (in-package portaudio)
(define-foreign-library libportaudio
((:and :unix (:not :darwin)) (:or "libportaudio.so.2" "libportaudio.so"))
((:or :win32 :x86-64) (:or "libportaudio" "libportaudio-2"
"portaudio" "portaudio_x64" "portaudio_x86_64"))
((:or :win32 :x86) (:or "libportaudio" "libportaudio-2" "portaudio"
"portaudio_x86"))
(t (:default "libportaudio")))
(use-foreign-library libportaudio)
(defcfun ("Pa_GetErrorText" %get-error-text) :string (error-code :int))
Generated by me , based on " < > " version
(defmacro make-errors (&rest symbols)
`(eval-when (:compile-toplevel :load-toplevel :execute)
(defcenum pa-error ,@symbols)
,@(mapcar #'(lambda (sym)
`(define-condition ,(if (listp sym)
(car sym)
sym) (error)
((value :initform ,(if (listp sym)
(car sym)
sym)))
(:documentation "pa-error condition")
(:report (lambda (condition stream)
(format stream "~A."
(%get-error-text (foreign-enum-value 'pa-error (slot-value condition 'value))))))))
symbols)))
(make-errors
(:no-error 0)
(:not-anticipated -10000)
:unanticipated-host-error
:invalid-channel-count
:invalid-sample-rate
:invalid-device
:invalid-flag
:sample-format-not-supported
:bad-i-o-device-combination
:insufficient-memory
:buffer-too-big
:buffer-too-small
:null-callback
:bad-stream-ptr
:timed-out
:internal-error
:device-unavailable
:incompatible-host-api-specific-stream-info
:stream-is-stopped
:stream-is-not-stopped
:input-overflowed
:output-underflowed
:host-api-not-found
:invalid-host-api
:can-not-read-from-a-callback-stream
:can-not-write-to-a-callback-stream
:can-not-read-from-an-output-only-stream
:can-not-write-to-an-input-only-stream
:incompatible-stream-host-api
:bad-buffer-ptr)
(defun raise-if-error (err)
"Check err and raise condition if it is needed"
(cond
((numberp err)
(if (<= -10000 err -9972)
(error (foreign-enum-keyword 'pa-error err))
err))
(t
(if (not (eql err :no-error))
(error err)))))
(defbitfield (sample-format :unsigned-long)
)
(defun cffi-type-to-lisp (cffi-type)
(case cffi-type
(:float 'single-float)
(:int32 '(signed-byte 32))
(:int16 '(signed-byte 16))
(:int8 '(signed-byte 8))
(:uint8 '(unsigned-byte 8))))
(defbitfield (stream-flags :unsigned-long)
(:no-flag #x00000000)
(:clip-off #x00000001)
(:dither-off #x00000002)
)
(defconstant +format-is-supported+ 0)
(defconstant +frames-per-buffer-unspecified+ 0)
(defconstant +no-device+ -1)
(defconstant +use-host-api-specific-device-specification -2)
(defctype pa-stream-t :void)
(defctype host-api-index :int)
(defcenum host-api-type-id
(:in-development 0)
(:direct-sound 1)
(:mme 2)
(:asio 3)
(:sound-manager 4)
(:core-audio 5)
(:oss 7)
(:alsa 8)
(:al 9)
(:be-os 10)
(:wdmks 11)
(:jack 12)
(:wasapi 13)
(:audio-science-hpi 14))
(defclass pa-stream ()
((handle :initform (null-pointer)
:accessor pa-stream-handle
:initarg :handle :documentation "Foreign pointer to pa-stream")
(input-sample-format :initarg :input-sample-format :reader pa-stream-input-sample-format :documentation "Format of input samples")
(input-channels :initarg :input-channels :reader pa-stream-input-channels :documentation "Number of input channels")
(output-sample-format :initarg :output-sample-format :reader pa-stream-output-sample-format :documentation "Format of output samples")
(output-channels :initarg :output-channels :reader pa-stream-output-channels :documentation "Number of output channels")
(frames-per-buffer :initarg :frames-per-buffer :reader pa-stream-frames-per-buffer :documentation "Frames per buffer"))
(:documentation "A single PaStream can provide multiple channels of real-time streaming audio input and output to a client application. A stream provides access to audio hardware represented by one or more devices. Depending on the underlying Host API, it may be possible to open multiple streams using the same device, however this behavior is implementation defined. Portable applications should assume that a device may be simultaneously used by at most one stream."))
(export 'pa-stream)
(export 'pa-stream-input-sample-format)
(export 'pa-stream-input-channels)
(export 'pa-stream-output-sample-format)
(export 'pa-stream-output-channels)
(export 'pa-stream-frames-per-buffer)
(defgeneric pa-stream-input-sample-format (instance)
(:documentation "value of sample-format for input channel"))
(defgeneric pa-stream-input-channels (instance)
(:documentation "Number of input channels"))
(defgeneric pa-stream-output-sample-format (instance)
(:documentation "value of sample-format for output channel"))
(defgeneric pa-stream-output-channels (instance)
(:documentation "Number of output channels"))
(defgeneric pa-stream-frames-per-buffer (instance)
(:documentation "Frames per buffer for current stream"))
(defctype p-pa-stream :pointer "Pointer to pa-stream")
(define-foreign-type p-pa-stream ()
()
(:actual-type :pointer)
(:simple-parser p-pa-stream))
(defmethod translate-to-foreign (value (type p-pa-stream))
(pa-stream-handle value))
(defctype device-index :int)
(defctype pa-time :double)
(defcstruct host-error-info
(host-api-type host-api-type-id)
(error-code :long)
(error-text :string))
(defclass host-error-info nil
((host-api-type :reader host-error-info-host-api-type :documentation "the host API which returned the error code")
(error-code :reader host-error-info-error-code :documentation "the error code returned")
(error-text :reader host-error-info-error-text :documentation "a textual description of the error if available, otherwise a zero-length string"))
(:documentation "Structure used to return information about a host error condition."))
(defmethod initialize-instance :after
((inst host-error-info) &key pointer)
(with-foreign-slots ((host-api-type error-code error-text) pointer
host-error-info)
(setf (slot-value inst 'host-api-type) host-api-type)
(setf (slot-value inst 'error-code) error-code)
(setf (slot-value inst 'error-text) error-text)))
(export 'host-error-info)
(export 'host-error-info-host-api-type)
(export 'host-error-info-error-code)
(export 'host-error-info-error-text)
(defgeneric host-error-info-host-api-type (instance)(:documentation "the host API which returned the error code"))
(defgeneric host-error-info-error-code (instance)(:documentation "the error code returned"))
(defgeneric host-error-info-error-text (instance)(:documentation "a textual description of the error if available, otherwise a zero-length string"))
(defctype p-host-error-info :pointer "Pointer to p-host-error-info")
(define-foreign-type p-host-error-info ()
()
(:actual-type :pointer)
(:simple-parser p-host-error-info))
(defmethod translate-from-foreign (value (type p-host-error-info))
(unless (null-pointer-p value)
(make-instance 'host-error-info :pointer value)))
(defcfun ("Pa_GetLastHostErrorInfo" %get-last-host-error-info) p-host-error-info)
(defun get-last-host-error-info ()
"Return information about the last host error encountered. The error information returned by @fun{get-last-host-error-info} will never be modified asynchronously by errors occurring in other PortAudio owned threads.
This function is provided as a last resort, primarily to enhance debugging by providing clients with access to all available error information.
@begin{return}
A structure constraining information about the host error. The values in this structure will only be valid if a PortAudio function has previously raise the unanticipated-host-error error code.
@end{return}
"
(%get-last-host-error-info))
(defcfun ("Pa_GetVersion" %get-version) :int)
(defun get-version ()
"Retrieve the release number of the currently running PortAudio build, eg 1900."
(%get-version))
(export 'get-version)
(defcfun ("Pa_GetVersionText" %get-version-text) :string)
(defun get-version-text ()
"Retrieve a textual description of the current PortAudio build,
eg \"PortAudio V19-devel 13 October 2002\"."
(%get-version-text))
(export 'get-version-text)
(defcfun ("Pa_GetDeviceCount" %get-device-count) device-index)
(defun get-device-count ()
"Retrieve the number of available devices.The number of available devices may be zero.
@begin{return}
A non-negative value indicating the number of available devices or, raises an error if PortAudio is not initialized or an error is encountered.
@end{return}"
(raise-if-error (%get-device-count)))
(export 'get-device-count)
(defcfun ("Pa_GetDefaultOutputDevice" %get-default-output-device) device-index)
(defun get-default-output-device ()
"Retrieve the index of the default output device. The result can be used in the outputDevice parameter to @fun{open-stream}.
@b{Note}
On the PC, the user can specify a default device by setting an environment variable. For example, to use device #1.
@pre{set PA_RECOMMENDED_OUTPUT_DEVICE=1}
The user should first determine the available device ids by using @code{(@fun{print-devices})}.
@begin{return}
The default output device index for the default host API, or raise no-device if no default output device is available or an error was encountered.
@end{return}
"
(raise-if-error (%get-default-output-device)))
(export 'get-default-output-device)
(defcstruct device-info
(struct-version :int)
(name :string)
(host-api host-api-index)
(max-input-channels :int)
(max-output-channels :int)
(default-low-input-latency pa-time)
(default-low-output-latency pa-time)
(default-high-input-latency pa-time)
(default-high-output-latency pa-time)
(default-sample-rate :double))
(defclass device-info nil
((struct-version :reader device-info-struct-version
:documentation "Structure version.")
(name :reader device-info-name :documentation "Device name.")
(host-api :reader device-info-host-api :documentation "note this is a host API index, not a type id.")
(max-input-channels :reader device-info-max-input-channels :documentation "")
(max-output-channels :reader device-info-max-output-channels)
(default-low-input-latency :reader
device-info-default-low-input-latency
:documentation "Default latency values for interactive performance.")
(default-low-output-latency :reader
device-info-default-low-output-latency
:documentation "")
(default-high-input-latency :reader device-info-default-high-input-latency
:documentation "Default latency values for robust non-interactive applications (eg. playing sound files).")p
(default-high-output-latency :reader
device-info-default-high-output-latency
:documentation "")
(default-sample-rate :reader device-info-default-sample-rate :documentation "Sample rate"))
(:documentation "A structure providing information and capabilities of PortAudio devices. Devices may support input, output or both input and output."))
(defmethod initialize-instance :after
((inst device-info) &key pointer)
(with-foreign-slots ((struct-version name host-api max-input-channels
max-output-channels default-low-input-latency
default-low-output-latency default-high-input-latency
default-high-output-latency default-sample-rate)
pointer device-info)
(setf (slot-value inst 'struct-version) struct-version)
(setf (slot-value inst 'name) name)
(setf (slot-value inst 'host-api) host-api)
(setf (slot-value inst 'max-input-channels) max-input-channels)
(setf (slot-value inst 'max-output-channels) max-output-channels)
(setf (slot-value inst 'default-low-input-latency)
default-low-input-latency)
(setf (slot-value inst 'default-low-output-latency)
default-low-output-latency)
(setf (slot-value inst 'default-high-input-latency)
default-high-input-latency)
(setf (slot-value inst 'default-high-output-latency)
default-high-output-latency)
(setf (slot-value inst 'default-sample-rate) default-sample-rate)))
(export 'device-info)
(export 'device-info-name)
(export 'device-info-host-api)
(export 'device-info-max-input-channels)
(export 'device-info-max-output-channels)
(export 'device-info-default-low-input-latency)
(export 'device-info-default-high-input-latency)
(export 'device-info-default-low-output-latency)
(export 'device-info-default-high-output-latency)
(export 'device-info-default-sample-rate)
(defgeneric device-info-name (instance) (:documentation "device name"))
(defgeneric device-info-host-api (instance) (:documentation "note this is a host API index, not a type id."))
(defgeneric device-info-max-input-channels (instance) (:documentation "maximum number of input channels"))
(defgeneric device-info-max-output-channels (instance) (:documentation "maximum number of output channels"))
(defgeneric device-info-default-low-input-latency (instance) (:documentation "Default latency values for interactive performance."))
(defgeneric device-info-default-high-input-latency (instance) (:documentation "Default latency values for robust non-interactive applications (eg. playing sound files)."))
(defgeneric device-info-default-low-output-latency (instance) (:documentation ""))
(defgeneric device-info-default-high-output-latency (instance) (:documentation ""))
(defgeneric device-info-default-sample-rate (instance) (:documentation "Sample rate"))
(defctype p-device-info :pointer "Pointer to device-info")
(define-foreign-type p-device-info ()
()
(:actual-type :pointer)
(:simple-parser p-device-info))
Define a method that converts C to Lisp .
(defmethod translate-from-foreign (value (type p-device-info))
(unless (null-pointer-p value)
(make-instance 'device-info :pointer value)))
(defcfun ("Pa_GetDeviceInfo" %get-device-info) p-device-info
(device device-index))
(defun get-device-info (device)
"Retrieve @class{device-info} structure containing information about the specified device.
@begin{return}
A object of @class{device-info}. If the device parameter is out of range the function returns NIL.
@end{return}
@arg[device]{A valid device index in the range 0 to @code{(- (@fun{get-device-count}) 1)}}
"
(%get-device-info device))
(export 'get-device-info)
(defcstruct stream-parameters
(device device-index)
(channel-count :int)
(sample-format sample-format)
(suggested-latency pa-time)
(host-api-specific-stream-info (:pointer :void)))
(defclass stream-parameters ()
((device :accessor stream-parameters-device
:documentation "A valid device index in the range 0 to (- get-device-count 1) specifying the device to be used. This field must not be set to paNoDevice.")
(channel-count :accessor stream-parameters-channel-count
:documentation "The number of channels of sound to be delivered to the stream callback.")
(sample-format :accessor stream-parameters-sample-format
:documentation "The sample format of the buffer provided to read-stream or write-stream.")
(suggested-latency :accessor stream-parameters-suggested-latency
:documentation "The desired latency in seconds. Where practical, implementations should configure their latency based on these parameters, otherwise they may choose the closest viable latency instead. Unless the suggested latency is greater than the absolute upper limit for the device implementations should round the suggestedLatency up to the next practical value - ie to provide an equal or higher latency than suggestedLatency wherever possible."))
(:documentation "Parameters for one direction (input or output) of a stream."))
(export 'stream-parameters)
(export 'stream-parameters-device)
(export 'stream-parameters-channel-count)
(export 'stream-parameters-sample-format)
(export 'stream-parameters-suggested-latency)
(defgeneric stream-parameters-device (instance) (:documentation "A valid device index in the range 0 to @code{(- (@fun{get-device-count}) 1)} specifying the device to be used. This field must not be set to paNoDevice."))
(defgeneric stream-parameters-channel-count (instance) (:documentation "The number of channels of sound to be delivered to the stream callback."))
(defgeneric stream-parameters-sample-format (instance) (:documentation "The sample format of the buffer provided to read-stream or write-stream."))
(defgeneric stream-parameters-suggested-latency (instance) (:documentation "The desired latency in seconds. Where practical, implementations should configure their latency based on these parameters, otherwise they may choose the closest viable latency instead. Unless the suggested latency is greater than the absolute upper limit for the device implementations should round the suggestedLatency up to the next practical value - ie to provide an equal or higher latency than suggestedLatency wherever possible."))
(defun make-stream-parameters ()
"Make stream-parameters object"
(make-instance 'stream-parameters))
(export 'make-stream-parameters)
(defctype p-stream-parameters :pointer "Pointer to stream parameters")
(define-foreign-type p-stream-parameters ()
()
(:actual-type :pointer)
(:simple-parser p-stream-parameters))
(defmethod translate-from-foreign (value (type p-stream-parameters))
(unless (null-pointer-p value)
(let ((stream-parameters (make-instance 'stream-parameters)))
(setf (slot-value stream-parameters 'device) (foreign-slot-value value 'stream-parameters 'device)
(slot-value stream-parameters 'channel-count) (foreign-slot-value value 'stream-parameters 'channel-count)
(slot-value stream-parameters 'sample-format) (car
(foreign-bitfield-symbols
'sample-format
(foreign-slot-value value 'stream-parameters 'sample-format)))
(slot-value stream-parameters 'suggested-latency) (foreign-slot-value value 'stream-parameters 'suggested-latency))
stream-parameters)))
(defmethod translate-to-foreign (value (type p-stream-parameters))
(if value
(let ((parameters (foreign-alloc 'stream-parameters)))
(setf (foreign-slot-value parameters 'stream-parameters 'device) (slot-value value 'device)
(foreign-slot-value parameters 'stream-parameters 'channel-count) (slot-value value 'channel-count)
(foreign-slot-value parameters 'stream-parameters 'sample-format) (foreign-bitfield-value
'sample-format
(list (slot-value value 'sample-format)))
(foreign-slot-value parameters 'stream-parameters 'suggested-latency) (slot-value value 'suggested-latency)
(foreign-slot-value parameters 'stream-parameters 'host-api-specific-stream-info) (null-pointer))
parameters)
(null-pointer)))
(defmethod free-translated-object (value (type p-stream-parameters) param)
(declare (ignore param))
(unless (null-pointer-p value)
(foreign-free value)))
(defcfun ("Pa_IsFormatSupported" %is-format-supported) pa-error
(input-parameters p-stream-parameters)
(output-parameters p-stream-parameters)
(sample-rate :double))
(defun is-format-supported (input-parameters output-parameters sample-rate)
"Determine whether it would be possible to open a stream with the specified parameters.
@arg[input-parameters]{A structure that describes the input parameters used to open a stream. The suggested-latency slot is ignored. See @class{stream-parameters} for a description of these parameters. input-parameters must be NIL for output-only streams.}
@arg[output-parameters]{A structure that describes the output parameters used to open a stream. The suggested-latency field is ignored. See @class{stream-parameters} for a description of these parameters. output-parameters must be NIL for input-only streams.}
@arg[sample-rate]{The required sampleRate. For full-duplex streams it is the sample rate for both input and output.}
@begin{return}
Returns 0 if the format is supported, and raises an error indicating why the format is not supported otherwise. The constant @variable{+format-is-supported+} is provided to compare with the return value for success.
@end{return}
"
(raise-if-error (%is-format-supported input-parameters output-parameters sample-rate)))
(defcfun ("Pa_HostApiTypeIdToHostApiIndex" %host-api-type-id-to-host-api-index) host-api-index
(type host-api-type-id))
(defun host-api-type-id-to-host-api-index (type)
"Convert a static host API unique identifier, into a runtime host API index.
@arg[type]{A unique host API identifier belonging to the PaHostApiTypeId enumeration.}
@begin{return}
A valid host-api-idnex ranging from 0 to @code{(- (@fun{get-host-api-count}) 1)} or, raises an error if PortAudio is not initialized or
@end{return}
The host-api-not-found error indicates that the host API specified by the type parameter is not available.
"
(raise-if-error (%host-api-type-id-to-host-api-index type)))
(export 'host-api-type-id-to-host-api-index)
(defcfun ("Pa_GetDefaultInputDevice" %get-default-input-device) device-index)
(defun get-default-input-device ()
"Retrieve the index of the default input device. The result can be used in the inputDevice parameter to @fun{open-stream}.
@begin{return}
The default input device index for the default host API, or raise no-device if no default input device is available or an error was encountered.
@end{return}"
(raise-if-error (%get-default-input-device)))
(export 'get-default-input-device)
(defcstruct host-api-info
(struct-version :int)
(type host-api-type-id)
(name :string)
(device-count :int)
(default-input-device device-index)
(default-output-device device-index))
(defclass host-api-info nil
((struct-version :reader host-api-info-struct-version :documentation "Struct version.")
(type :reader host-api-info-type :documentation "The well known unique identifier of this host API.")
(name :reader host-api-info-name :documentation "A textual description of the host API for display on user interfaces.")
(device-count :reader host-api-info-device-count :documentation "The number of devices belonging to this host API. This field may be used in conjunction with host-api-device-index-to-device-index to enumerate all devices for this host API." )
(default-input-device :reader host-api-info-default-input-device
:documentation "The default input device for this host API. The value will be a device index ranging from 0 to (- (get-device-count) 1), or no-device if no default input device is available.")
(default-output-device :reader
host-api-info-default-output-device
:documentation "The default output device for this host API. The value will be a device index ranging from 0 to (- (get-device-count) 1), or paNoDevice if no default output device is available."))
(:documentation "A structure containing information about a particular host API."))
(defmethod initialize-instance :after
((inst host-api-info) &key pointer)
(with-foreign-slots ((struct-version type name device-count
default-input-device default-output-device)
pointer host-api-info)
(setf (slot-value inst 'struct-version) struct-version)
(setf (slot-value inst 'type) type)
(setf (slot-value inst 'name) name)
(setf (slot-value inst 'device-count) device-count)
(setf (slot-value inst 'default-input-device) default-input-device)
(setf (slot-value inst 'default-output-device)
default-output-device)))
(export 'host-api-info)
(export 'host-api-info-type)
(export 'host-api-info-name)
(export 'host-api-info-device-count)
(export 'host-api-info-default-input-device)
(export 'host-api-info-default-output-device)
(defgeneric host-api-info-type (instance) (:documentation "The well known unique identifier of this host API."))
(defgeneric host-api-info-name (instance) (:documentation "A textual description of the host API for display on user interfaces."))
(defgeneric host-api-info-device-count (instance) (:documentation "The number of devices belonging to this host API. This field may be used in conjunction with host-api-device-index-to-device-index to enumerate all devices for this host API."))
(defgeneric host-api-info-default-input-device (instance) (:documentation "The default input device for this host API. The value will be a device index ranging from 0 to (- (get-device-count) 1), or no-device if no default input device is available."))
(defgeneric host-api-info-default-output-device (instance) (:documentation "The default output device for this host API. The value will be a device index ranging from 0 to (- (get-device-count) 1), or paNoDevice if no default output device is available."))
(defctype p-host-api-info :pointer)
(define-foreign-type p-host-api-info ()
()
(:actual-type :pointer)
(:simple-parser p-host-api-info))
(defmethod translate-from-foreign (value (type p-host-api-info))
(unless (null-pointer-p value)
(make-instance 'host-api-info :pointer value)))
(defcfun ("Pa_GetHostApiInfo" %get-host-api-info) p-host-api-info
(host-api host-api-index))
(defun get-host-api-info (host-api)
"Retrieve a pointer to a structure containing information about a specific host Api.
@begin[host-api]{arg}
A valid host API index ranging from 0 to @code{(- (@fun{get-host-api-count}) 1)}
@end{arg}
@begin{return}
An object of @class{host-api-info} describing a specific host API. If the hostApi parameter is out of range or an error is encountered, the function returns NIL.
@end{return}
"
(%get-host-api-info host-api))
(export 'get-host-api-info)
(defcfun ("Pa_Initialize" %initialize) pa-error)
(defun initialize ()
"Library initialization function - call this before using PortAudio. This function initializes internal data structures and prepares underlying host APIs for use. With the exception of @fun{get-version}, @fun{get-version-text}, and @fun{get-error-text}, this function MUST be called before using any other PortAudio API functions.
If initialize is called multiple times, each successful call must be matched with a corresponding call to @fun{terminate}. Pairs of calls to initialize/@fun{terminate} may overlap, and are not required to be fully nested.
Note that if initialize raises an error, @fun{terminate} should NOT be called.
@result{NIL if successful, otherwise raises an error indicating the cause of failure.}
"
(raise-if-error (%initialize)))
(export 'initialize)
(defcfun ("Pa_Terminate" %terminate) pa-error)
(defun terminate ()
"
Library termination function - call this when finished using PortAudio. This function deallocates all resources allocated by PortAudio since it was initialized by a call to @fun{initialize}. In cases where @fun{initialize} has been called multiple times, each call must be matched with a corresponding call to terminate. The final matching call to terminate will automatically close any PortAudio streams that are still open.
terminate MUST be called before exiting a program which uses PortAudio. Failure to do so may result in serious resource leaks, such as audio devices not being available until the next reboot.
@begin{return}
NIL if successful, otherwise raises an error indicating the cause of failure.
@end{return}
"
(raise-if-error (%terminate)))
(export 'terminate)
(defcfun ("Pa_GetDefaultHostApi" %get-default-host-api) host-api-index)
(defun get-default-host-api ()
"Retrieve the index of the default host API. The default host API will be the lowest common denominator host API on
the current platform and is unlikely to provide the best performance.
@begin{return}
A non-negative value ranging from 0 to @code{(- (@fun{get-host-api-count}) 1)} indicating the default host API index or, raises an error if PortAudio is not initialized or an error is encountered.
@end{return}
"
(raise-if-error (%get-default-host-api)))
(export 'get-default-host-api)
(defcfun ("Pa_OpenDefaultStream" %open-default-stream) pa-error
(pa-stream :pointer)
(num-input-channels :int)
(num-output-channels :int)
(sample-format :unsigned-long)
(sample-rate :double)
(frames-per-buffer :unsigned-long)
(stream-callback :pointer)
(user-data (:pointer :void)))
(defun open-default-stream (num-input num-output sample-format sample-rate frames-per-buffer)
"
A simplified version of @fun{open-stream} that opens the default input and/or output devices.
@arg[num-input-channels]{The number of channels of sound that will be returned by @fun{read-stream}. It can range from 1 to the value of max-input-channels in the @class{device-info} class for the default input device. If 0 the stream is opened as an output-only stream.}
@arg[num-output-channels]{The number of channels of sound to be passed to @fun{write-stream}. It can range from 1 to the value of max-output-channels in the @class{device-info} class for the default output device. If 0 the stream is opened as an output-only stream.}
@arg[sample-format]{The sample format of both the input and output buffers passed to and from @fun{read-stream} and @fun{write-stream}. sample-format may be any of the formats described by the sample-format enumeration.}
@arg[sample-rate]{Same as @fun{open-stream} parameter of the same name.}
@arg[frames-per-buffer]{Same as @fun{open-stream} parameter of the same name.}
@result{As for @fun{open-stream}}
"
(with-foreign-object (handle :pointer)
(raise-if-error (%open-default-stream handle num-input num-output
(foreign-bitfield-value 'sample-format (list sample-format))
sample-rate frames-per-buffer
(null-pointer) (null-pointer)))
(make-instance 'pa-stream :handle (mem-ref handle :pointer)
:input-sample-format sample-format
:input-channels (if (= num-input 0) nil num-input)
:output-sample-format sample-format
:output-channels (if (= num-output 0) nil num-output)
:frames-per-buffer frames-per-buffer)))
(export 'open-default-stream)
( defctype stream - flags : unsigned - long )
(defcfun ("Pa_OpenStream" %open-stream) pa-error (pa-stream :pointer)
(input-parameters p-stream-parameters)
(output-parameters p-stream-parameters)
(sample-rate :double)
(frames-per-buffer :unsigned-long)
(stream-flags stream-flags)
(stream-callback :pointer)
(user-data (:pointer :void)))
(defun open-stream (input-parameters output-parameters sample-rate frames-per-buffer stream-flags)
"
Opens a stream for either input, output or both.
@arg[input-parameters]{A structure that describes the input parameters used by the opened stream. See @class{stream-parameters} for a description of these parameters. input-parameters must be NIL for output-only streams.}
@arg[output-parameters]{A structure that describes the output parameters used by the opened stream. See @class{stream-parameters} for a description of these parameters. output-parameters must be NIL for input-only streams.}
@arg[sample-rate]{The desired sample-rate. For full-duplex streams it is the sample rate for both input and output}
@arg[frames-per-buffer]{Preferred block granularity for a blocking read/write stream.}
@arg[stream-flags]{List of flags which modify the behavior of the streaming process. Some flags may only be relevant to certain buffer formats.}
@begin{return}
Upon success pen-stream returns object of @class{pa-stream} class. The stream is inactive (stopped). If a call to open-stream fails, an error code is raised and the value of stream is NIL.
@end{return}
"
(with-foreign-object (handle :pointer)
(raise-if-error (%open-stream handle input-parameters output-parameters sample-rate frames-per-buffer
stream-flags
(null-pointer) (null-pointer)))
(make-instance 'pa-stream :handle (mem-ref handle :pointer)
:input-sample-format
(when input-parameters (stream-parameters-sample-format input-parameters))
:input-channels
(when input-parameters (stream-parameters-channel-count input-parameters))
:output-sample-format
(when output-parameters (stream-parameters-sample-format output-parameters))
:output-channels
(when output-parameters (stream-parameters-channel-count output-parameters))
:frames-per-buffer frames-per-buffer)))
(export 'open-stream)
(defcfun ("Pa_CloseStream" %close-stream) pa-error (pa-stream p-pa-stream))
(defun close-stream (pa-stream)
"Closes an audio stream. If the audio stream is active it discards any pending buffers as if @fun{abort-stream} had been called."
(unwind-protect
(raise-if-error (%close-stream pa-stream))
(setf (pa-stream-handle pa-stream) (null-pointer))))
(export 'close-stream)
(defcfun ("Pa_IsStreamStopped" %is-stream-stopped) pa-error (pa-stream p-pa-stream))
(defun is-stream-stopped (pa-stream)
"
Determine whether the stream is stopped. A stream is considered to be stopped prior to a successful call to @fun{start-stream} and after a successful call to @fun{stop-stream} or @fun{abort-stream}.
@begin{return}
Returns one (1) when the stream is stopped, zero (0) when the stream is running or, raises an error if PortAudio is not initialized or an error is encountered.
@end{return}
"
(raise-if-error (%is-stream-stopped pa-stream)))
(export 'is-stream-stopped)
(defcfun ("Pa_GetStreamTime" %get-stream-time) pa-time (pa-stream p-pa-stream))
(defun get-stream-time (pa-stream)
"Returns valid time values for the entire life of the stream, from when the stream is opened until it is closed. Starting and stopping the stream does not affect the passage of time returned by get-stream-time.
This time may be used for synchronizing other events to the audio stream, for example synchronizing audio to MIDI.
@result{The stream's current time in seconds, or 0 if an error occurred.}"
(%get-stream-time pa-stream))
(export 'get-stream-time)
(defcfun ("Pa_GetHostApiCount" %get-host-api-count) host-api-index)
(defun get-host-api-count ()
"Retrieve the number of available host APIs. Even if a host API is available it may have no devices available.
@begin{return}
A non-negative value indicating the number of available host APIs or, raises an error if PortAudio is not initialized or an error is encountered.
@end{return}"
(raise-if-error (%get-host-api-count)))
(export 'get-host-api-count)
(defcfun ("Pa_IsStreamActive" %is-stream-active) :int (pa-stream p-pa-stream))
(defun is-stream-active (pa-stream)
"
Determine whether the stream is active. A stream is active after a successful call to @fun{start-stream}, until it becomes inactive either as a result of a call to @fun{stop-stream} or @fun{abort-stream}. In the latter case, the stream is considered inactive after the last buffer has finished playing.
@begin{return}
Returns one (1) when the stream is active (ie playing or recording audio), zero (0) when not playing or, raises an error if PortAudio is not initialized or an error is encountered.
@end{return}
"
(raise-if-error (%is-stream-active pa-stream)))
(export 'is-stream-active)
(defcfun ("Pa_GetStreamWriteAvailable" %get-stream-write-available) :long
(pa-stream p-pa-stream))
(defun get-stream-write-available (pa-stream)
"Retrieve the number of frames that can be written to the stream without waiting.
@begin{return}
A non-negative value representing the maximum number of frames that can be written to the stream without blocking or busy waiting or, raises an error if PortAudio is not initialized or an error is encountered.
@end{return}
"
(raise-if-error (%get-stream-write-available pa-stream)))
(export 'get-stream-write-available)
(defcfun ("Pa_GetStreamReadAvailable" %get-stream-read-available) :long
(pa-stream p-pa-stream))
(defun get-stream-read-available (pa-stream)
"Retrieve the number of frames that can be read from the stream without waiting.
@begin{return}
Returns a non-negative value representing the maximum number of frames that can be read from the stream without blocking or busy waiting or, raises an error if PortAudio is not initialized or an error is encountered.
@end{return}
"
(raise-if-error (%get-stream-read-available pa-stream)))
(export 'get-stream-read-available)
(defcfun ("Pa_ReadStream" %read-stream) pa-error (pa-stream p-pa-stream)
(buffer (:pointer :void))
(frames :unsigned-long))
(defun read-stream (pa-stream)
"
Read samples from an input stream. The function doesn't return until the entire buffer has been filled - this may involve waiting for the operating system to supply the data. Size of returned array equal to @code{(* frames-per-buffer channel-count)}.
@arg[pa-stream]{A object of stream previously created with @fun{open-stream}.}
@begin{return}
On success array of data will be returned, or :input-overflowed if input data was discarded by PortAudio after the previous call and before this call.
@end{return}
"
(let* ((sample-format (pa-stream-input-sample-format pa-stream))
(frames (pa-stream-frames-per-buffer pa-stream))
(channel-count (pa-stream-input-channels pa-stream))
(array (make-array (* channel-count frames) :element-type (cffi-type-to-lisp sample-format))))
(when (and sample-format
channel-count)
(with-pointer-to-array (array pointer sample-format (* channel-count frames) :copy-out)
(%read-stream pa-stream pointer frames))
array)))
(export 'read-stream)
(defun separate-array-to-channels (pa-stream array)
"Separate flat array
@arg[pa-stream]{A object of stream previously created with @fun{open-stream}.}
@arg[array]{Flat array, that is received from @fun{read-stream}.}
@begin{return}
(channelcount)-dimensional array of single-floats
@end{return}
"
(let* ((sample-format (pa-stream-input-sample-format pa-stream))
(frames (pa-stream-frames-per-buffer pa-stream))
(channel-count (pa-stream-input-channels pa-stream))
(separated (make-array (list channel-count frames) :element-type (cffi-type-to-lisp sample-format))))
(dotimes (frame frames separated)
(dotimes (channel channel-count separated)
(setf (aref separated channel frame) (aref array (+ (* frame channel-count) channel)))))))
(export 'separate-array-to-channels)
(defun merge-channels-into-array (pa-stream channels)
"Merge subarrays of (channelcount)-dimensional array to flat array.
@arg[pa-stream]{A object of stream previously created with @fun{open-stream}.}
@arg[channels]{Vector of vectors of floats, that contains data for all sound channels.}
@begin{return}
Vector of data, that can be used with @fun{write-stream}.
@end{return}
"
(let* ((sample-format (pa-stream-input-sample-format pa-stream))
(frames (pa-stream-frames-per-buffer pa-stream))
(channel-count (pa-stream-input-channels pa-stream))
(merged (make-array (* frames channel-count) :element-type (cffi-type-to-lisp sample-format))))
(dotimes (frame frames merged)
(dotimes (channel channel-count merged)
(setf (aref merged (+ (* frame channel-count) channel)) (aref channels channel frame))))))
(export 'merge-channels-into-array)
(defun read-stream-into-array (pa-stream array)
"Read samples from an input stream. The function doesn't return until the entire buffer has been filled - this may involve waiting for the operating system to supply the data.
@arg[pa-stream]{A object of stream previously created with @fun{open-stream}.}
@arg[array]{Simple array with has element-type equal to sample-format from @fun{open-stream}. Size of array equal to @code{(* frames-per-buffer channel-count)}.}
@begin{return}
On success NIL will be returned, or :input-overflowed if input data was discarded by PortAudio after the previous call and before this call.
@end{return}"
(let* ((sample-format (pa-stream-input-sample-format pa-stream))
(frames (pa-stream-frames-per-buffer pa-stream))
(channel-count (pa-stream-input-channels pa-stream)))
(when (and sample-format
channel-count)
(with-pointer-to-array (array pointer sample-format (* channel-count frames) :copy-out)
(%read-stream pa-stream pointer frames)))))
(export 'read-stream-into-array)
(defcfun ("Pa_WriteStream" %write-stream) pa-error (pa-stream p-pa-stream)
(buffer :pointer)
(frames :unsigned-long))
(defun write-stream (pa-stream buffer)
"Write samples to an output stream. This function doesn't return until the entire buffer has been consumed - this may involve waiting for the operating system to consume the data. Size of buffer should be equal to @code{(* frames-per-buffer channel-count)}.
@arg[pa-stream]{A object of stream previously created with @fun{open-stream}.}
@arg[buffer]{A array of sample frames. The buffer contains samples in the format specified by the @code{(stream-parameters-sample-format output-parameters)} field used to open the stream, and the number of channels specified by @code{(stream-parameters-num-channels output-parameters)}.}
@begin{return}
On success NIL will be returned, or :output-underflowed if additional output data was inserted after the previous call and before this call.
@end{return}
"
(let* ((sample-format (pa-stream-output-sample-format pa-stream))
(frames (pa-stream-frames-per-buffer pa-stream))
(channel-count (pa-stream-output-channels pa-stream)))
(when (and sample-format
channel-count)
(with-pointer-to-array (buffer pointer sample-format (* channel-count frames) :copy-in)
(%write-stream pa-stream pointer frames)))))
(export 'write-stream)
(defcfun ("Pa_StartStream" %start-stream) pa-error (pa-stream p-pa-stream))
(defun start-stream (pa-stream)
"Commences audio processing."
(raise-if-error (%start-stream pa-stream)))
(export 'start-stream)
(defcfun ("Pa_AbortStream" %abort-stream) pa-error (pa-stream p-pa-stream))
(defun abort-stream (pa-stream)
"Terminates audio processing immediately without waiting for pending buffers to complete."
(raise-if-error (%abort-stream pa-stream)))
(export 'abort-stream)
(defcfun ("Pa_StopStream" %stop-stream) pa-error (pa-stream p-pa-stream))
(defun stop-stream (pa-stream)
"Terminates audio processing. It waits until all pending audio buffers have been played before it returns."
(raise-if-error (%stop-stream pa-stream)))
(defcstruct stream-info
(struct-version :int)
(input-latency pa-time)
(output-latency pa-time)
(sample-rate :double))
(defclass stream-info nil
((struct-version :reader stream-info-struct-version :documentation "Struct version")
(input-latency :reader stream-info-input-latency :documentation "The input latency of the stream in seconds. This value provides the most accurate estimate of input latency available to the implementation. It may differ significantly from the suggestedLatency value passed to open-stream. The value of this field will be zero (0.) for output-only streams.")
(output-latency :reader stream-info-output-latency :documentation "The output latency of the stream in seconds. This value provides the most accurate estimate of output latency available to the implementation. It may differ significantly from the suggestedLatency value passed to open-stream. The value of this field will be zero (0.) for input-only streams.")
(sample-rate :reader stream-info-sample-rate :documentation "The sample rate of the stream in Hertz (samples per second). In cases where the hardware sample rate is inaccurate and PortAudio is aware of it, the value of this field may be different from the sample-rate parameter passed to open-stream. If information about the actual hardware sample rate is not available, this field will have the same value as the sample-rate parameter passed to open-stream."))
(:documentation "A structure containing unchanging information about an open stream."))
(defmethod initialize-instance :after
((inst stream-info) &key pointer)
(with-foreign-slots ((struct-version input-latency output-latency
sample-rate)
pointer stream-info)
(setf (slot-value inst 'struct-version) struct-version)
(setf (slot-value inst 'input-latency) input-latency)
(setf (slot-value inst 'output-latency) output-latency)
(setf (slot-value inst 'sample-rate) sample-rate)))
(export 'stream-info)
(export 'stream-info-input-latency)
(export 'stream-info-output-latency)
(export 'stream-info-sample-rate)
(defgeneric stream-info-input-latency (instance) (:documentation "The input latency of the stream in seconds. This value provides the most accurate estimate of input latency available to the implementation. It may differ significantly from the suggestedLatency value passed to open-stream. The value of this field will be zero (0.) for output-only streams."))
(defgeneric stream-info-output-latency (instance) (:documentation "The output latency of the stream in seconds. This value provides the most accurate estimate of output latency available to the implementation. It may differ significantly from the suggestedLatency value passed to open-stream. The value of this field will be zero (0.) for input-only streams."))
(defgeneric stream-info-sample-rate (instance) (:documentation "The sample rate of the stream in Hertz (samples per second). In cases where the hardware sample rate is inaccurate and PortAudio is aware of it, the value of this field may be different from the sample-rate parameter passed to open-stream. If information about the actual hardware sample rate is not available, this field will have the same value as the sample-rate parameter passed to open-stream."))
(defctype p-stream-info :pointer "Pointer to stream-info")
(define-foreign-type p-stream-info ()
()
(:actual-type :pointer)
(:simple-parser p-stream-info))
(defmethod translate-from-foreign (value (type p-stream-info))
(unless (null-pointer-p value)
(make-instance 'stream-info :pointer value)))
(defcfun ("Pa_GetStreamInfo" %get-stream-info) p-stream-info (pa-stream p-pa-stream))
(defun get-stream-info (pa-stream)
"Retrieve a object of class @class{stream-info} containing information about the specified stream.
@begin{return}
A object of @class{stream-info} structure. If the stream parameter invalid, or an error is encountered, the function returns NIL.
@end{return}
@arg[pa-stream]{A object of stream previously created with @fun{open-stream}.}
"
(%get-stream-info pa-stream))
(export 'get-stream-info)
(defcfun ("Pa_GetSampleSize" %get-sample-size) :int (format sample-format))
(defun get-sample-size (format)
"Retrieve the size of a given sample format in bytes.
@result{The size in bytes of a single sample in the specified format, or paSampleFormatNotSupported if the format is not supported.}"
(raise-if-error (%get-sample-size (foreign-bitfield-value 'sample-format (list format)))))
(export 'get-sample-size)
(defcfun ("Pa_HostApiDeviceIndexToDeviceIndex" %host-api-device-index-to-device-index) device-index
(host-api host-api-index)
(host-api-device-index :int))
(defun host-api-device-index-to-device-index (host-api host-api-device-index)
"Convert a host-API-specific device index to standard PortAudio device index. This function may be used in conjunction with the deviceCount field of PaHostApiInfo to enumerate all devices for the specified host API.
@arg[host-api]{A valid host API index ranging from 0 to @code{(- (@fun{get-host-api-count}) 1)}}
@arg[host-api-device-index]{A valid per-host device index in the range 0 to @code{(- (@fun{host-api-info-device-count} (@fun{get-host-api-info} host-api)) 1)}}
@begin{return}
A non-negative index ranging from 0 to @code{(- (@fun{get-device-count}) 1)} or, raises an error if PortAudio is not initialized or an error is encountered.
@end{return}
A invalid-host-api error indicates that the host API index specified by the hostApi parameter is out of range.
A invalid-device error indicates that the host-api-device-index parameter is out of range.
"
(raise-if-error (%host-api-device-index-to-device-index host-api host-api-device-index)))
(export 'host-api-device-index-to-device-index)
(defcfun ("Pa_Sleep" %pa-sleep) :void (msec :long))
(defun pa-sleep (msec)
"
Put the caller to sleep for at least 'msec' milliseconds. This function is provided only as a convenience for authors of portable code (such as the tests and examples in the PortAudio distribution.)
The function may sleep longer than requested so don't rely on this for accurate musical timing.
"
(%pa-sleep msec))
(export 'pa-sleep)
(defmacro with-audio (&body body)
"Execute body in PortAudio initialize/terminate environment."
`(progn
(initialize)
(unwind-protect
(progn
,@body)
(progn
(terminate)))))
(export 'with-audio)
Generated by me , based on " < > " version
(defmacro with-default-audio-stream ((var num-input num-output &key (sample-format :float) (sample-rate 44100d0) (frames-per-buffer 1024)) &body body)
"Execute body with opened and started stream VAR and shut down
the stream after it is done. It is required use these macro in with-audio or initialize/terminate environment."
`(let ((,var nil))
(unwind-protect
(progn
(setf ,var (open-default-stream ,num-input ,num-output ,sample-format ,sample-rate ,frames-per-buffer))
(start-stream ,var)
,@body)
(progn
(when ,var
(stop-stream ,var)
(close-stream ,var))))))
(export 'with-default-audio-stream)
(defmacro with-audio-stream ((var input-parameters output-parameters &key (sample-rate 44100d0) (frames-per-buffer 1024) (stream-flags (list :no-flag))) &body body)
"Execute body with opened and started stream VAR and shut down
the stream after it is done. It is required use these macro in with-audio or initialize/terminate environment."
`(let ((,var nil))
(unwind-protect
(progn
(setf ,var (open-stream ,input-parameters ,output-parameters ,sample-rate ,frames-per-buffer ',stream-flags))
(start-stream ,var)
,@body)
(progn
(when ,var
(stop-stream ,var)
(close-stream ,var))))))
(export 'with-audio-stream)
(defun print-supported-standard-sample-rates (input-params output-params)
(mapcar
(lambda (rate)
(ignore-errors
(unless (is-format-supported input-params output-params rate)
(format t "~8,2F, " rate))))
'(8000d0 9600d0 11025d0 12000d0 16000d0 22050d0 24000d0 32000d0
44100d0 48000d0 88200d0 96000d0 192000d0))
(format t "~%"))
(defun print-devices ()
"List available sound devices, including device information."
(with-audio
(format t "PortAudio version number = ~D~%PortAudio version text = ~A~%"
(get-version) (get-version-text))
(let ((num-devices (get-device-count))
(input-parameters (make-stream-parameters))
(output-parameters (make-stream-parameters)))
(format t "Number of devices = ~D~%" num-devices)
(dotimes (i num-devices)
(let ((device-info (get-device-info i))
(default-displayed nil))
(format t "---------------------- device ~D~%" i)
(if (= i (get-default-input-device))
(progn
(format t "[ Default Input")
(setf default-displayed t))
(when (= i (host-api-info-default-input-device (get-host-api-info (device-info-host-api device-info))))
(format t "[ Default ~A Input" (get-host-api-info (device-info-host-api device-info)))
(setf default-displayed t)))
(if (= i (get-default-output-device))
(progn
(format t "~:[[~;,~]" default-displayed)
(format t " Default Output")
(setf default-displayed t))
(when (= i (host-api-info-default-output-device (get-host-api-info (device-info-host-api device-info))))
(format t "~:[[~;,~]" default-displayed)
(format t "[ Default ~A Output" (get-host-api-info (device-info-host-api device-info)))
(setf default-displayed t)))
(when default-displayed
(format t " ]~%"))
(format t "Name = ~A~%" (device-info-name device-info))
(format t "Host API = ~A~%" (host-api-info-name (get-host-api-info (device-info-host-api device-info))))
(format t "Max inputs = ~D" (device-info-max-input-channels device-info))
(format t ", Max outputs = ~D~%" (device-info-max-output-channels device-info))
(format t "Default low input latency = ~8,4F~%" (device-info-default-low-input-latency device-info))
(format t "Default low output latency = ~8,4F~%" (device-info-default-low-output-latency device-info))
(format t "Default high input latency = ~8,4F~%" (device-info-default-high-input-latency device-info))
(format t "Default high output latency = ~8,4F~%" (device-info-default-high-output-latency device-info))
(format t "Default sample rate = ~8,4F~%" (device-info-default-sample-rate device-info))
(setf
(stream-parameters-device input-parameters) i
(stream-parameters-channel-count input-parameters) (device-info-max-input-channels device-info)
(stream-parameters-sample-format input-parameters) :float
(stream-parameters-suggested-latency input-parameters) 0d0
(stream-parameters-device output-parameters) i
(stream-parameters-channel-count output-parameters) (device-info-max-output-channels device-info)
(stream-parameters-sample-format output-parameters) :float
(stream-parameters-suggested-latency output-parameters) 0d0)
(when (< 0 (stream-parameters-channel-count input-parameters))
(format t "Supported standard sample rates~% for half-duplex float 32 bit ~D channel input = ~%"
(stream-parameters-channel-count input-parameters))
(print-supported-standard-sample-rates input-parameters nil))
(when (< 0 (stream-parameters-channel-count output-parameters))
(format t "Supported standard sample rates~% for half-duplex float 32 bit ~D channel output = ~%"
(stream-parameters-channel-count output-parameters))
(print-supported-standard-sample-rates nil output-parameters))
(when (and (< 0 (stream-parameters-channel-count input-parameters)) (< 0 (stream-parameters-channel-count output-parameters)))
(format t "Supported standard sample rates~% for full-duplex float 32 bit ~D channel input, ~D channel output = ~%"
(stream-parameters-channel-count input-parameters)
(stream-parameters-channel-count output-parameters))
(print-supported-standard-sample-rates input-parameters output-parameters))))
(format t "--------------------~%"))))
(export 'print-devices)
|
f273ab9bf767bd7dc461be5e57c2bfedb8ccc52990991edac57d78a267f1bcf6 | andrewmcloud/consimilo | random_seed_test.clj | (ns consimilo.random-seed-test
(:refer-clojure :exclude [rand rand-bigint])
(:require [clojure.test :refer :all]
[consimilo.random-seed :refer :all])
(:import (clojure.lang BigInt)))
(defn- get-seeded-random
"Generates a seeded random number for testing"
[seed max-range]
(set-random-seed! seed)
(rand-bigint max-range))
(defn- get-seeded-random-vec
"Generates a seeded random vector for testing"
[seed n max-range]
(set-random-seed! seed)
(rand-vec n max-range))
(deftest rand-bigint-test
(testing "ensure seeded rand-bigint returns the same value after seeding"
(is (= (get-seeded-random 3 1024)
(get-seeded-random 3 1024))))
(testing "testing rand-bigint returns type bigint"
(is (= true (instance? BigInt (rand-bigint 5))))))
(deftest rand-vec-test
(testing "seeded rand-vec returns the same random collection each time"
(is (= (doall (get-seeded-random-vec 1 10 4096))
(doall (get-seeded-random-vec 1 10 4096)))))
(testing "rand-vec returns a collection of type bigint"
(is (= true (instance? BigInt (first (rand-vec 4 1024)))))
(is (= true (instance? BigInt (last (rand-vec 4 1024))))))) | null | https://raw.githubusercontent.com/andrewmcloud/consimilo/db96c1695248c3486e1d23de5589b39f0e0bd49f/test/consimilo/random_seed_test.clj | clojure | (ns consimilo.random-seed-test
(:refer-clojure :exclude [rand rand-bigint])
(:require [clojure.test :refer :all]
[consimilo.random-seed :refer :all])
(:import (clojure.lang BigInt)))
(defn- get-seeded-random
"Generates a seeded random number for testing"
[seed max-range]
(set-random-seed! seed)
(rand-bigint max-range))
(defn- get-seeded-random-vec
"Generates a seeded random vector for testing"
[seed n max-range]
(set-random-seed! seed)
(rand-vec n max-range))
(deftest rand-bigint-test
(testing "ensure seeded rand-bigint returns the same value after seeding"
(is (= (get-seeded-random 3 1024)
(get-seeded-random 3 1024))))
(testing "testing rand-bigint returns type bigint"
(is (= true (instance? BigInt (rand-bigint 5))))))
(deftest rand-vec-test
(testing "seeded rand-vec returns the same random collection each time"
(is (= (doall (get-seeded-random-vec 1 10 4096))
(doall (get-seeded-random-vec 1 10 4096)))))
(testing "rand-vec returns a collection of type bigint"
(is (= true (instance? BigInt (first (rand-vec 4 1024)))))
(is (= true (instance? BigInt (last (rand-vec 4 1024))))))) |
|
0b9541e9c6ba7b6f21ac789e2a0418ad6818924e05bb47c78b72378a92ae51a7 | tisnik/clojure-examples | project.clj | ;
( C ) Copyright 2018 , 2020
;
; All rights reserved. This program and the accompanying materials
; are made available under the terms of the Eclipse Public License v1.0
; which accompanies this distribution, and is available at
-v10.html
;
; Contributors:
;
(defproject spec-demo4 "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.10.1"]]
:plugins [[lein-codox "0.10.7"]
[test2junit "1.1.0"]
[ lein - test - out " 0.3.1 " ]
[lein-cloverage "1.0.7-SNAPSHOT"]
[lein-kibit "0.1.8"]
[lein-clean-m2 "0.1.2"]
[lein-project-edn "0.3.0"]
[lein-marginalia "0.9.1"]]
:main ^:skip-aot spec-demo4.core
:target-path "target/%s"
:profiles {:uberjar {:aot :all}})
| null | https://raw.githubusercontent.com/tisnik/clojure-examples/1350d206ec6702248a560a6c9569d2128de2da60/spec-demo4/project.clj | clojure |
All rights reserved. This program and the accompanying materials
are made available under the terms of the Eclipse Public License v1.0
which accompanies this distribution, and is available at
Contributors:
| ( C ) Copyright 2018 , 2020
-v10.html
(defproject spec-demo4 "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.10.1"]]
:plugins [[lein-codox "0.10.7"]
[test2junit "1.1.0"]
[ lein - test - out " 0.3.1 " ]
[lein-cloverage "1.0.7-SNAPSHOT"]
[lein-kibit "0.1.8"]
[lein-clean-m2 "0.1.2"]
[lein-project-edn "0.3.0"]
[lein-marginalia "0.9.1"]]
:main ^:skip-aot spec-demo4.core
:target-path "target/%s"
:profiles {:uberjar {:aot :all}})
|
0e6c374592a13a63a445839d3d6067c67f8252ada1cfcd63c02e29ac99f33fd4 | shayne-fletcher/zen | curves.mli | (** Curves *)
include Curves_sig.S
| null | https://raw.githubusercontent.com/shayne-fletcher/zen/10a1d0b9bf261bb133918dd62fb1593c3d4d21cb/ocaml/curve/curves.mli | ocaml | * Curves | include Curves_sig.S
|
f635453f8a661c622db98b2b9fffa8fe50bb563676397608960ef003e54a6f02 | nyu-acsys/drift | mc91_95.ml |
Res : OCaml - > 91
imprecision : Oct : 91 < = v < = 101
Liquid Haskell : false by property { -@ m : : x : Int - > { b : Int | ( b = = 91 ) } @- }
Res: OCaml -> 91
imprecision: Oct: 91 <= v <= 101
Liquid Haskell: false by property {-@ m :: x:Int -> {b:Int | (b == 91)} @-}
*)
let rec m x =
if x > 100
then x - 10
else m (m (x + 11))
let main (n:int(*-:{v:Int | true}*)) =
if n <= 95
then assert (m n = 91)
else () | null | https://raw.githubusercontent.com/nyu-acsys/drift/51a3160d74b761626180da4f7dd0bb950cfe40c0/tests/benchmarks/r_type/first/mc91_95.ml | ocaml | -:{v:Int | true} |
Res : OCaml - > 91
imprecision : Oct : 91 < = v < = 101
Liquid Haskell : false by property { -@ m : : x : Int - > { b : Int | ( b = = 91 ) } @- }
Res: OCaml -> 91
imprecision: Oct: 91 <= v <= 101
Liquid Haskell: false by property {-@ m :: x:Int -> {b:Int | (b == 91)} @-}
*)
let rec m x =
if x > 100
then x - 10
else m (m (x + 11))
if n <= 95
then assert (m n = 91)
else () |
1579aab359f317b3b615d473f56c3205894bcb897e5c391b37c8c551f9eea455 | mirage/mirage-nat | util.ml | (* Some convenience functions to hide how ugly some packet operations are.
This module, and others like it, might inspire some API changes in the next
major release of mirage-tcpip. *)
let get_dst (`IPv4 (packet, _) : Nat_packet.t) = packet.Ipv4_packet.dst
let try_decompose cache ~now f packet =
let cache', r = Nat_packet.of_ipv4_packet !cache ~now:(now ()) packet in
cache := cache';
match r with
| Error e ->
Logs.err (fun m -> m "of_ipv4_packet error %a" Nat_packet.pp_error e);
Lwt.return_unit
| Ok Some packet -> f packet
| Ok None -> Lwt.return_unit
| null | https://raw.githubusercontent.com/mirage/mirage-nat/1bed6c49b7554123a99e5e56d1f0967707424bb2/example/util.ml | ocaml | Some convenience functions to hide how ugly some packet operations are.
This module, and others like it, might inspire some API changes in the next
major release of mirage-tcpip. |
let get_dst (`IPv4 (packet, _) : Nat_packet.t) = packet.Ipv4_packet.dst
let try_decompose cache ~now f packet =
let cache', r = Nat_packet.of_ipv4_packet !cache ~now:(now ()) packet in
cache := cache';
match r with
| Error e ->
Logs.err (fun m -> m "of_ipv4_packet error %a" Nat_packet.pp_error e);
Lwt.return_unit
| Ok Some packet -> f packet
| Ok None -> Lwt.return_unit
|
a91a3c990a897f3c06382e98b6c6f9d4ee6ca70d43ab23e8412784767e3149dd | khotyn/4clojure-answer | 18-sequences-filter.clj | '(6 7)
| null | https://raw.githubusercontent.com/khotyn/4clojure-answer/3de82d732faedceafac4f1585a72d0712fe5d3c6/18-sequences-filter.clj | clojure | '(6 7)
|
|
1c23f5cce936ab30c3437a9c3125e1e3c64e314cd4064900d1e3a1135a579b82 | fargonauts/copycat-lisp | run.lisp | ;---------------------------------------------
; RUN: This file contains functions for running the program.
;---------------------------------------------
(in-package 'user)
(defun run-ccat ()
; Runs the main loop of the program: choose a codelet, run it. Every
% time - step - time - steps , update everything in the program ( all the
; values on the workspace, all the activations in the slipnet, etc.).
; This is the main loop of the program.
(loop until *quit-program* do
If the program has run for % time - step - length% steps , then
; update everything.
(if* (= (mod *codelet-count* %time-step-length%) 0)
then (update-everything))
; If there are no codelets left in the coderack, then
; clamp the initially clamped nodes and post the
initial set of codelets .
(if* (send *coderack* :empty?)
then (loop for node in *initially-clamped-slipnodes* do
(send node :set-clamp t))
(post-initial-codelets))
; Step the program.
(step-ccat)
(if* %verbose% then (break))
; If the rule has been translated, then build the answer.
(if* *translated-rule*
then (answer-builder)
(if* *found-answer*
then (update-everything)
(if* %verbose%
then (format t "My answer is ~a.~&"
(send *answer-string* :pstring)))
; Collect some statistics.
(loop for group in (send *workspace* :group-list)
when (= (send group :length) 1) do
(incf *single-letter-group-at-end-count*))
(if* (= (send plato-length :activation) 100)
then (setq *length-relevant-at-end* t))
(setq *quit-program* t)))))
;---------------------------------------------
(defun update-everything (&aux new-structure-list unclamp-probability)
; Updates all the values in the program (workspace values, slipnet
; activations, etc.).
(setq *updating-everything* t)
; Update values for structures and objects.
(send-method-to-list (send *workspace* :structure-list)
:update-strength-values)
(send-method-to-list (send *workspace* :object-list) :update-object-values)
(send *initial-string* :update-relative-importances)
(send *target-string* :update-relative-importances)
(send *initial-string* :update-intra-string-unhappiness)
(send *target-string* :update-intra-string-unhappiness)
; If %initial-slipnode-clamp-time% cycles have gone by, then unclamp
; the initially-clamped slipnodes.
(if* (= *codelet-count*
(* %initial-slipnode-clamp-time% %time-step-length%))
then (loop for node in *initially-clamped-slipnodes*
do (send node :set-clamp nil)))
; If the program is dealing with a snag, then see if any new structures
; have been made. If so, see if snag condition should be ended.
(if* (and *snag-object* *snag-condition*)
then (setq new-structure-list
(loop for structure in (send *workspace* :structure-list)
when (and (not (typep structure 'bond))
(not (send *workspace*
:structure-in-snag-structure-list?
structure)))
collect structure))
(setq unclamp-probability
(if* (null new-structure-list)
then 0
else (/ (list-max (send-method-to-list new-structure-list
:total-strength))
100)))
(if* (eq (flip-coin unclamp-probability) 'heads)
then (setq *snag-condition* nil)
(setq *clamp-temperature* nil)
(loop for d in (send *snag-object* :descriptions) do
(send (send d :descriptor) :set-clamp nil))
(send *snag-object* :set-clamp-salience? nil)))
(if* (> *codelet-count* 0)
then (update-temperature)
(get-bottom-up-codelets)
(get-top-down-codelets)
(update-slipnet))
(if* *codelets-to-post*
then (send *coderack* :post-codelet-list *codelets-to-post*))
(setq *codelets-to-post* nil)
(if* (> *codelet-count* 0)
then (if* %description-graphics% then (display-descriptions))
(if* %temperature-graphics% then (update-temperature-display))
(if* %coderack-graphics% then (update-coderack-display))
(if* %minimal-coderack-graphics%
then (update-minimal-coderack-display))
(if* %slipnet-graphics% then (update-slipnet-display))
; Update concept-mapping and length displays.
(if* %workspace-graphics%
then (loop for c in (send *workspace* :correspondence-list) do
(send c :erase-concept-mappings)
(send c :draw-concept-mappings))
(loop for group
in (send *workspace* :group-list) do
(if* (send (send group :graphics-obj) :graphics-length)
then (send group :erase-length)
(send group :draw-length)))
(if* (= (mod *codelet-count* 100) 0) then (redraw-graphics))))
(setq *updating-everything* nil))
;---------------------------------------------
(defun step-ccat (&aux codelet)
Runs one step of the program : chooses and runs a codelet from the
; coderack.
(setq codelet (send *coderack* :choose))
(if* %slightly-verbose% then (send codelet :print))
(send codelet :run)
(setq *codelet-count* (1+ *codelet-count*))
(if* %verbose% then (format t "~%"))
(if* *break-on-each-step*
then (if* %minimal-coderack-graphics%
then (update-minimal-coderack-display))
(break)))
;---------------------------------------------
(defun deal-with-snag ()
; If there is a snag in building the answer, then delete all
; proposed structures, empty the coderack, raise and clamp the
; temperature, and activate and clamp the activation of all the descriptions
; of the object causing the snag.
(incf *snag-count*)
(setq *last-snag-time* *codelet-count*)
; Save the current set of structures.
(setq *snag-structure-list* (send *workspace* :structure-list))
Erase proposed structures . ( Their builder codelets will
; disappear when the coderack is initialized.)
(if* %workspace-graphics%
then (loop for b in (send *workspace* :proposed-bond-list)
do (send (send b :string)
:delete-proposed-bond b)
(if* (not (send (send b :string)
:bond-present? b))
then (send b :erase-spline)))
(loop for g in (send *workspace* :proposed-group-list)
do (send (send g :string)
:delete-proposed-group g)
(if* (not (send (send g :string)
:group-present? g))
then (send g :erase-rectangle)))
(loop for c in (send *workspace*
:proposed-correspondence-list)
do (send *workspace*
:delete-proposed-correspondence c)
(if* (not (send *workspace*
:correspondence-present? c))
then (send c :erase-line))))
(send *coderack* :empty)
(if* %coderack-graphics% then (update-coderack-display))
(if* (and %workspace-graphics% *translated-rule*)
then (send *translated-rule* :erase %translated-rule-mode%))
(setq *translated-rule* nil)
(setq *answer-string* nil)
(setq *snag-condition* t)
(setq *temperature* 100)
(setq *clamp-temperature* t)
(loop for d in (send *snag-object* :descriptions) do
(send (send d :descriptor) :set-clamp t))
(send *snag-object* :set-clamp-salience? t)
(send *coderack* :empty)
(post-initial-codelets)
(update-everything))
;---------------------------------------------
(defun unanswer ()
; Deletes the answer so that the program can continue running.
(setq *translated-rule* nil
*answer-string* nil
*found-answer* nil
*quit-program* nil)
(display-ccat)
(run-ccat))
;---------------------------------------------
| null | https://raw.githubusercontent.com/fargonauts/copycat-lisp/a7733ca91a1169d0b5c7af8a46c4804e764d3175/run.lisp | lisp | ---------------------------------------------
RUN: This file contains functions for running the program.
---------------------------------------------
Runs the main loop of the program: choose a codelet, run it. Every
values on the workspace, all the activations in the slipnet, etc.).
This is the main loop of the program.
update everything.
If there are no codelets left in the coderack, then
clamp the initially clamped nodes and post the
Step the program.
If the rule has been translated, then build the answer.
Collect some statistics.
---------------------------------------------
Updates all the values in the program (workspace values, slipnet
activations, etc.).
Update values for structures and objects.
If %initial-slipnode-clamp-time% cycles have gone by, then unclamp
the initially-clamped slipnodes.
If the program is dealing with a snag, then see if any new structures
have been made. If so, see if snag condition should be ended.
Update concept-mapping and length displays.
---------------------------------------------
coderack.
---------------------------------------------
If there is a snag in building the answer, then delete all
proposed structures, empty the coderack, raise and clamp the
temperature, and activate and clamp the activation of all the descriptions
of the object causing the snag.
Save the current set of structures.
disappear when the coderack is initialized.)
---------------------------------------------
Deletes the answer so that the program can continue running.
--------------------------------------------- |
(in-package 'user)
(defun run-ccat ()
% time - step - time - steps , update everything in the program ( all the
(loop until *quit-program* do
If the program has run for % time - step - length% steps , then
(if* (= (mod *codelet-count* %time-step-length%) 0)
then (update-everything))
initial set of codelets .
(if* (send *coderack* :empty?)
then (loop for node in *initially-clamped-slipnodes* do
(send node :set-clamp t))
(post-initial-codelets))
(step-ccat)
(if* %verbose% then (break))
(if* *translated-rule*
then (answer-builder)
(if* *found-answer*
then (update-everything)
(if* %verbose%
then (format t "My answer is ~a.~&"
(send *answer-string* :pstring)))
(loop for group in (send *workspace* :group-list)
when (= (send group :length) 1) do
(incf *single-letter-group-at-end-count*))
(if* (= (send plato-length :activation) 100)
then (setq *length-relevant-at-end* t))
(setq *quit-program* t)))))
(defun update-everything (&aux new-structure-list unclamp-probability)
(setq *updating-everything* t)
(send-method-to-list (send *workspace* :structure-list)
:update-strength-values)
(send-method-to-list (send *workspace* :object-list) :update-object-values)
(send *initial-string* :update-relative-importances)
(send *target-string* :update-relative-importances)
(send *initial-string* :update-intra-string-unhappiness)
(send *target-string* :update-intra-string-unhappiness)
(if* (= *codelet-count*
(* %initial-slipnode-clamp-time% %time-step-length%))
then (loop for node in *initially-clamped-slipnodes*
do (send node :set-clamp nil)))
(if* (and *snag-object* *snag-condition*)
then (setq new-structure-list
(loop for structure in (send *workspace* :structure-list)
when (and (not (typep structure 'bond))
(not (send *workspace*
:structure-in-snag-structure-list?
structure)))
collect structure))
(setq unclamp-probability
(if* (null new-structure-list)
then 0
else (/ (list-max (send-method-to-list new-structure-list
:total-strength))
100)))
(if* (eq (flip-coin unclamp-probability) 'heads)
then (setq *snag-condition* nil)
(setq *clamp-temperature* nil)
(loop for d in (send *snag-object* :descriptions) do
(send (send d :descriptor) :set-clamp nil))
(send *snag-object* :set-clamp-salience? nil)))
(if* (> *codelet-count* 0)
then (update-temperature)
(get-bottom-up-codelets)
(get-top-down-codelets)
(update-slipnet))
(if* *codelets-to-post*
then (send *coderack* :post-codelet-list *codelets-to-post*))
(setq *codelets-to-post* nil)
(if* (> *codelet-count* 0)
then (if* %description-graphics% then (display-descriptions))
(if* %temperature-graphics% then (update-temperature-display))
(if* %coderack-graphics% then (update-coderack-display))
(if* %minimal-coderack-graphics%
then (update-minimal-coderack-display))
(if* %slipnet-graphics% then (update-slipnet-display))
(if* %workspace-graphics%
then (loop for c in (send *workspace* :correspondence-list) do
(send c :erase-concept-mappings)
(send c :draw-concept-mappings))
(loop for group
in (send *workspace* :group-list) do
(if* (send (send group :graphics-obj) :graphics-length)
then (send group :erase-length)
(send group :draw-length)))
(if* (= (mod *codelet-count* 100) 0) then (redraw-graphics))))
(setq *updating-everything* nil))
(defun step-ccat (&aux codelet)
Runs one step of the program : chooses and runs a codelet from the
(setq codelet (send *coderack* :choose))
(if* %slightly-verbose% then (send codelet :print))
(send codelet :run)
(setq *codelet-count* (1+ *codelet-count*))
(if* %verbose% then (format t "~%"))
(if* *break-on-each-step*
then (if* %minimal-coderack-graphics%
then (update-minimal-coderack-display))
(break)))
(defun deal-with-snag ()
(incf *snag-count*)
(setq *last-snag-time* *codelet-count*)
(setq *snag-structure-list* (send *workspace* :structure-list))
Erase proposed structures . ( Their builder codelets will
(if* %workspace-graphics%
then (loop for b in (send *workspace* :proposed-bond-list)
do (send (send b :string)
:delete-proposed-bond b)
(if* (not (send (send b :string)
:bond-present? b))
then (send b :erase-spline)))
(loop for g in (send *workspace* :proposed-group-list)
do (send (send g :string)
:delete-proposed-group g)
(if* (not (send (send g :string)
:group-present? g))
then (send g :erase-rectangle)))
(loop for c in (send *workspace*
:proposed-correspondence-list)
do (send *workspace*
:delete-proposed-correspondence c)
(if* (not (send *workspace*
:correspondence-present? c))
then (send c :erase-line))))
(send *coderack* :empty)
(if* %coderack-graphics% then (update-coderack-display))
(if* (and %workspace-graphics% *translated-rule*)
then (send *translated-rule* :erase %translated-rule-mode%))
(setq *translated-rule* nil)
(setq *answer-string* nil)
(setq *snag-condition* t)
(setq *temperature* 100)
(setq *clamp-temperature* t)
(loop for d in (send *snag-object* :descriptions) do
(send (send d :descriptor) :set-clamp t))
(send *snag-object* :set-clamp-salience? t)
(send *coderack* :empty)
(post-initial-codelets)
(update-everything))
(defun unanswer ()
(setq *translated-rule* nil
*answer-string* nil
*found-answer* nil
*quit-program* nil)
(display-ccat)
(run-ccat))
|
173a54a216c5aebae336d2138c4d3a3078aaac6669eddc406029f958ac0b310f | fulcrologic/fulcro-native-template | util.cljc | (ns app.util
#?(:cljs (:refer-clojure :exclude [uuid]))
(:require
[com.fulcrologic.guardrails.core :refer [>defn =>]]
[clojure.spec.alpha :as s]))
(>defn uuid
"Generate a UUID the same way via clj/cljs. Without args gives random UUID. With args, builds UUID based on input (which
is useful in tests)."
#?(:clj ([] [=> uuid?] (java.util.UUID/randomUUID)))
#?(:clj ([int-or-str]
[(s/or :i int? :s string?) => uuid?]
(if (int? int-or-str)
(java.util.UUID/fromString
(format "ffffffff-ffff-ffff-ffff-%012d" int-or-str))
(java.util.UUID/fromString int-or-str))))
#?(:cljs ([] [=> uuid?] (random-uuid)))
#?(:cljs ([& args]
[(s/* any?) => uuid?]
(cljs.core/uuid (apply str args)))))
| null | https://raw.githubusercontent.com/fulcrologic/fulcro-native-template/2c0a331c722af7df15a7e80c88d224db00f234b3/src/main/app/util.cljc | clojure | (ns app.util
#?(:cljs (:refer-clojure :exclude [uuid]))
(:require
[com.fulcrologic.guardrails.core :refer [>defn =>]]
[clojure.spec.alpha :as s]))
(>defn uuid
"Generate a UUID the same way via clj/cljs. Without args gives random UUID. With args, builds UUID based on input (which
is useful in tests)."
#?(:clj ([] [=> uuid?] (java.util.UUID/randomUUID)))
#?(:clj ([int-or-str]
[(s/or :i int? :s string?) => uuid?]
(if (int? int-or-str)
(java.util.UUID/fromString
(format "ffffffff-ffff-ffff-ffff-%012d" int-or-str))
(java.util.UUID/fromString int-or-str))))
#?(:cljs ([] [=> uuid?] (random-uuid)))
#?(:cljs ([& args]
[(s/* any?) => uuid?]
(cljs.core/uuid (apply str args)))))
|
|
9515c0661d3b523ba4d44ebf7955dc0fbf9999168a50692b1d82cd65363e0262 | rtoy/cmucl | window.lisp | -*- Log : hemlock.log ; Package : Hemlock - Internals -*-
;;;
;;; **********************************************************************
This code was written as part of the CMU Common Lisp project at
Carnegie Mellon University , and has been placed in the public domain .
;;;
(ext:file-comment
"$Header: src/hemlock/window.lisp $")
;;;
;;; **********************************************************************
;;;
;;; This file contains implementation independent code which implements
the Hemlock window primitives and most of the code which defines
;;; other aspects of the interface to redisplay.
;;;
Written by and .
;;;
(in-package "HEMLOCK-INTERNALS")
(export '(current-window window-buffer modeline-field-width
modeline-field-function make-modeline-field update-modeline-fields
update-modeline-field modeline-field-name modeline-field
editor-finish-output *window-list*))
;;;; CURRENT-WINDOW.
(defvar *current-window* nil "The current window object.")
(defvar *window-list* () "A list of all window objects.")
(declaim (inline current-window))
(defun current-window ()
"Return the current window. The current window is specially treated by
redisplay in several ways, the most important of which is that is does
recentering, ensuring that the Buffer-Point of the current window's
Window-Buffer is always displayed. This may be set with Setf."
*current-window*)
(defun %set-current-window (new-window)
(invoke-hook ed::set-window-hook new-window)
(move-mark (window-point *current-window*)
(buffer-point (window-buffer *current-window*)))
(move-mark (buffer-point (window-buffer new-window))
(window-point new-window))
(setq *current-window* new-window))
;;;; Window structure support.
(defun %print-hwindow (obj stream depth)
(declare (ignore depth))
(write-string "#<Hemlock Window \"" stream)
(write-string (buffer-name (window-buffer obj)) stream)
(write-string "\">" stream))
(defun window-buffer (window)
"Return the buffer which is displayed in Window."
(window-%buffer window))
(defun %set-window-buffer (window new-buffer)
(unless (bufferp new-buffer) (error "~S is not a buffer." new-buffer))
(unless (windowp window) (error "~S is not a window." window))
(unless (eq new-buffer (window-buffer window))
(invoke-hook ed::window-buffer-hook window new-buffer)
;;
;; Move the window's marks to the new start.
(let ((buffer (window-buffer window)))
(setf (buffer-windows buffer) (delete window (buffer-windows buffer)))
(move-mark (buffer-display-start buffer) (window-display-start window))
(push window (buffer-windows new-buffer))
(move-mark (window-point window) (buffer-point new-buffer))
(move-mark (window-display-start window) (buffer-display-start new-buffer))
(move-mark (window-display-end window) (buffer-display-start new-buffer)))
;;
;; Delete all the dis-lines, and nil out the line and chars so they get
;; gc'ed.
(let ((first (window-first-line window))
(last (window-last-line window))
(free (window-spare-lines window)))
(unless (eq (cdr first) the-sentinel)
(shiftf (cdr last) free (cdr first) the-sentinel))
(dolist (dl free)
(setf (dis-line-line dl) nil (dis-line-old-chars dl) nil))
(setf (window-spare-lines window) free))
;;
;; Set the last line and first&last changed so we know there's nothing there.
(setf (window-last-line window) the-sentinel
(window-first-changed window) the-sentinel
(window-last-changed window) the-sentinel)
;;
;; Make sure the window gets updated, and set the buffer.
(setf (window-tick window) -3)
(setf (window-%buffer window) new-buffer)))
% INIT - REDISPLAY sets up redisplay 's internal data structures . We create
initial windows , setup some hooks to cause , and call
;;; any device init necessary. This is called from ED.
;;;
(defun %init-redisplay (display)
(%init-screen-manager display)
(add-hook ed::buffer-major-mode-hook 'queue-buffer-change)
(add-hook ed::buffer-minor-mode-hook 'queue-buffer-change)
(add-hook ed::buffer-name-hook 'queue-buffer-change)
(add-hook ed::buffer-pathname-hook 'queue-buffer-change)
(add-hook ed::buffer-modified-hook 'queue-buffer-change)
(add-hook ed::window-buffer-hook 'queue-window-change)
(let ((device (device-hunk-device (window-hunk (current-window)))))
(funcall (device-init device) device))
(center-window *current-window* (current-point)))
;;;; Modelines-field structure support.
(defun print-modeline-field (obj stream ignore)
(declare (ignore ignore))
(write-string "#<Hemlock Modeline-field " stream)
(prin1 (modeline-field-%name obj) stream)
(write-string ">" stream))
(defun print-modeline-field-info (obj stream ignore)
(declare (ignore ignore))
(write-string "#<Hemlock Modeline-field-info " stream)
(prin1 (modeline-field-%name (ml-field-info-field obj)) stream)
(write-string ">" stream))
(defvar *modeline-field-names* (make-hash-table))
(defun make-modeline-field (&key name width function)
"Returns a modeline-field object."
(unless (or (eq width nil) (and (integerp width) (plusp width)))
(error "Width must be nil or a positive integer."))
(when (gethash name *modeline-field-names*)
(with-simple-restart (continue
"Use the new definition for this modeline field.")
(error "Modeline field ~S already exists."
(gethash name *modeline-field-names*))))
(setf (gethash name *modeline-field-names*)
(%make-modeline-field name function width)))
(defun modeline-field (name)
"Returns the modeline-field object named name. If none exists, return nil."
(gethash name *modeline-field-names*))
(declaim (inline modeline-field-name modeline-field-width
modeline-field-function))
(defun modeline-field-name (ml-field)
"Returns the name of a modeline field object."
(modeline-field-%name ml-field))
(defun %set-modeline-field-name (ml-field name)
(check-type ml-field modeline-field)
(when (gethash name *modeline-field-names*)
(error "Modeline field ~S already exists."
(gethash name *modeline-field-names*)))
(remhash (modeline-field-%name ml-field) *modeline-field-names*)
(setf (modeline-field-%name ml-field) name)
(setf (gethash name *modeline-field-names*) ml-field))
(defun modeline-field-width (ml-field)
"Returns the width of a modeline field."
(modeline-field-%width ml-field))
(declaim (special *buffer-list*))
(defun %set-modeline-field-width (ml-field width)
(check-type ml-field modeline-field)
(unless (or (eq width nil) (and (integerp width) (plusp width)))
(error "Width must be nil or a positive integer."))
(unless (eql width (modeline-field-%width ml-field))
(setf (modeline-field-%width ml-field) width)
(dolist (b *buffer-list*)
(when (buffer-modeline-field-p b ml-field)
(dolist (w (buffer-windows b))
(update-modeline-fields b w)))))
width)
(defun modeline-field-function (ml-field)
"Returns the function of a modeline field object. It returns a string."
(modeline-field-%function ml-field))
(defun %set-modeline-field-function (ml-field function)
(check-type ml-field modeline-field)
(check-type function (or symbol function))
(setf (modeline-field-%function ml-field) function)
(dolist (b *buffer-list*)
(when (buffer-modeline-field-p b ml-field)
(dolist (w (buffer-windows b))
(update-modeline-field b w ml-field))))
function)
;;;; Modelines maintenance.
;;; Each window stores a modeline-buffer which is a string hunk-width-limit
;;; long. Whenever a field is updated, we must maintain a maximally long
;;; representation of the modeline in case the window is resized. Updating
then first gets the modeline - buffer setup , and second blasts the necessary
;;; portion into the window's modeline-dis-line, setting the dis-line's changed
;;; flag.
;;;
(defun update-modeline-fields (buffer window)
"Recompute all the fields of buffer's modeline for window, so the next
redisplay will reflect changes."
(let ((ml-buffer (window-modeline-buffer window)))
(declare (simple-string ml-buffer))
(when ml-buffer
(let* ((ml-buffer-len
(do ((finfos (buffer-%modeline-fields buffer) (cdr finfos))
(start 0 (blt-modeline-field-buffer
ml-buffer (car finfos) buffer window start)))
((null finfos) start)))
(dis-line (window-modeline-dis-line window))
(len (min (window-width window) ml-buffer-len)))
(replace (the simple-string (dis-line-chars dis-line)) ml-buffer
:end1 len :end2 len)
(setf (window-modeline-buffer-len window) ml-buffer-len)
(setf (dis-line-length dis-line) len)
(setf (dis-line-flags dis-line) changed-bit)))))
;;; UPDATE-MODELINE-FIELD must replace the entire dis-line-chars with ml-buffer
;;; after blt'ing into buffer. Otherwise it has to do all the work
BLT - MODELINE - FIELD - BUFFER to figure out how to adjust dis - line - chars . It
;;; isn't worth it. Since things could have shifted around, after calling
BLT - MODELINE - FIELD - BUFFER , we get the last field 's end to know how long
;;; the buffer is now.
;;;
(defun update-modeline-field (buffer window field)
"Recompute the field of the buffer's modeline for window, so the next
redisplay will reflect the change. Field is either a modeline-field object
or the name of one for buffer."
(let ((finfo (internal-buffer-modeline-field-p buffer field)))
(unless finfo
(error "~S is not a modeline-field or the name of one for buffer ~S."
field buffer))
(let ((ml-buffer (window-modeline-buffer window))
(dis-line (window-modeline-dis-line window)))
(declare (simple-string ml-buffer))
(blt-modeline-field-buffer ml-buffer finfo buffer window
(ml-field-info-start finfo) t)
(let* ((ml-buffer-len (ml-field-info-end
(car (last (buffer-%modeline-fields buffer)))))
(dis-len (min (window-width window) ml-buffer-len)))
(replace (the simple-string (dis-line-chars dis-line)) ml-buffer
:end1 dis-len :end2 dis-len)
(setf (window-modeline-buffer-len window) ml-buffer-len)
(setf (dis-line-length dis-line) dis-len)
(setf (dis-line-flags dis-line) changed-bit)))))
(defvar *truncated-field-char* #\!)
BLT - MODELINE - FIELD - BUFFER takes a Hemlock buffer , Hemlock window , the
;;; window's modeline buffer, a modeline-field-info object, a start in the
;;; modeline buffer, and an optional indicating whether a variable width field
;;; should be handled carefully. When the field is fixed-width, this is
;;; simple. When it is variable, we possibly have to shift all the text in the
;;; buffer right or left before storing the new string, updating all the
;;; finfo's after the one we're updating. It is an error for the
;;; modeline-field-function to return anything but a simple-string with
;;; standard-chars. This returns the end of the field blasted into ml-buffer.
;;;
(defun blt-modeline-field-buffer (ml-buffer finfo buffer window start
&optional fix-other-fields-p)
(declare (simple-string ml-buffer))
(let* ((f (ml-field-info-field finfo))
(width (modeline-field-width f))
(string (funcall (modeline-field-function f) buffer window))
(str-len (length string)))
(declare (simple-string string))
(setf (ml-field-info-start finfo) start)
(setf (ml-field-info-end finfo)
(cond
((not width)
(let ((end (min (+ start str-len) hunk-width-limit))
(last-end (ml-field-info-end finfo)))
(when (and fix-other-fields-p (/= end last-end))
(blt-ml-field-buffer-fix ml-buffer finfo buffer window
end last-end))
(replace ml-buffer string :start1 start :end1 end :end2 str-len)
end))
((= str-len width)
(let ((end (min (+ start width) hunk-width-limit)))
(replace ml-buffer string :start1 start :end1 end :end2 width)
end))
((> str-len width)
(let* ((end (min (+ start width) hunk-width-limit))
(end-1 (1- end)))
(replace ml-buffer string :start1 start :end1 end-1 :end2 width)
(setf (schar ml-buffer end-1) *truncated-field-char*)
end))
(t
(let ((buf-replace-end (min (+ start str-len) hunk-width-limit))
(buf-field-end (min (+ start width) hunk-width-limit)))
(replace ml-buffer string
:start1 start :end1 buf-replace-end :end2 str-len)
(fill ml-buffer #\space :start buf-replace-end :end buf-field-end)
buf-field-end))))))
;;; BLT-ML-FIELD-BUFFER-FIX shifts the contents of ml-buffer in the direction
of last - end to end . finfo is a modeline - field - info structure in buffer 's
;;; list of these. If there are none following finfo, then we simply store the
;;; new end of the buffer. After blt'ing the text around, we have to update
all the finfos ' starts and ends making sure nobody gets to stick out over
;;; the ml-buffer's end.
;;;
(defun blt-ml-field-buffer-fix (ml-buffer finfo buffer window end last-end)
(declare (simple-string ml-buffer))
(let ((finfos (do ((f (buffer-%modeline-fields buffer) (cdr f)))
((null f) (error "This field must be here."))
(if (eq (car f) finfo)
(return (cdr f))))))
(cond
((not finfos)
(setf (window-modeline-buffer-len window) (min end hunk-width-limit)))
(t
(let ((buffer-len (window-modeline-buffer-len window)))
(replace ml-buffer ml-buffer
:start1 end
:end1 (min (+ end (- buffer-len last-end)) hunk-width-limit)
:start2 last-end :end2 buffer-len)
(let ((diff (- end last-end)))
(macrolet ((frob (f)
`(setf ,f (min (+ ,f diff) hunk-width-limit))))
(dolist (f finfos)
(frob (ml-field-info-start f))
(frob (ml-field-info-end f)))
(frob (window-modeline-buffer-len window)))))))))
;;;; Default modeline and update hooks.
(make-modeline-field :name :hemlock-literal :width 8
:function #'(lambda (buffer window)
"Returns \"Hemlock \"."
(declare (ignore buffer window))
"Hemlock "))
(make-modeline-field
:name :package
:function #'(lambda (buffer window)
"Returns the value of buffer's \"Current Package\" followed
by a colon and two spaces, or a string with one space."
(declare (ignore window))
(if (hemlock-bound-p 'ed::current-package :buffer buffer)
(let ((val (variable-value 'ed::current-package
:buffer buffer)))
(if val
(format nil "~A: " val)
" "))
" ")))
(make-modeline-field
:name :modes
:function #'(lambda (buffer window)
"Returns buffer's modes followed by one space."
(declare (ignore window))
(format nil "~A " (buffer-modes buffer))))
(make-modeline-field
:name :modifiedp
:function #'(lambda (buffer window)
"Returns \"* \" if buffer is modified, or the empty string."
(declare (ignore window))
(let ((modifiedp (buffer-modified buffer)))
(if modifiedp
"* "
""))))
(make-modeline-field
:name :buffer-name
:function #'(lambda (buffer window)
"Returns buffer's name followed by a colon and a space if the
name is not derived from the buffer's pathname, or the empty
string."
(declare (ignore window))
(let ((pn (buffer-pathname buffer))
(name (buffer-name buffer)))
(cond ((not pn)
(format nil "~A: " name))
((string/= (ed::pathname-to-buffer-name pn) name)
(format nil "~A: " name))
(t "")))))
MAXIMUM - MODELINE - PATHNAME - LENGTH - HOOK is called whenever "
;;; Pathname Length" is set.
;;;
(defun maximum-modeline-pathname-length-hook (name kind where new-value)
(declare (ignore name new-value))
(if (eq kind :buffer)
(hi::queue-buffer-change where)
(dolist (buffer *buffer-list*)
(when (and (buffer-modeline-field-p buffer :buffer-pathname)
(buffer-windows buffer))
(hi::queue-buffer-change buffer)))))
(defun buffer-pathname-ml-field-fun (buffer window)
"Returns the namestring of buffer's pathname if there is one. When
\"Maximum Modeline Pathname Length\" is set, and the namestring is too long,
return a truncated namestring chopping off leading directory specifications."
(declare (ignore window))
(let ((pn (buffer-pathname buffer)))
(if pn
(let* ((name (namestring pn))
(length (length name))
;; Prefer a buffer local value over the global one.
;; Because variables don't work right, blow off looking for
;; a value in the buffer's modes. In the future this will
;; be able to get the "current" value as if buffer were current.
(max (if (hemlock-bound-p 'ed::maximum-modeline-pathname-length
:buffer buffer)
(variable-value 'ed::maximum-modeline-pathname-length
:buffer buffer)
(variable-value 'ed::maximum-modeline-pathname-length
:global))))
(declare (simple-string name))
(if (or (not max) (<= length max))
name
(let* ((extra-chars (+ (- length max) 3))
(slash (or (position #\/ name :start extra-chars)
;; If no slash, then file-namestring is very
;; long, and we should include all of it:
(position #\/ name :from-end t
:end extra-chars))))
(if slash
(concatenate 'simple-string "..." (subseq name slash))
name))))
"")))
(make-modeline-field
:name :buffer-pathname
:function 'buffer-pathname-ml-field-fun)
(defvar *default-modeline-fields*
(list (modeline-field :hemlock-literal)
(modeline-field :package)
(modeline-field :modes)
(modeline-field :modifiedp)
(modeline-field :buffer-name)
(modeline-field :buffer-pathname))
"This is the default value for \"Default Modeline Fields\".")
;;; QUEUE-BUFFER-CHANGE is used for various buffer hooks (e.g., mode changes,
;;; name changes, etc.), so it takes some arguments to ignore. These hooks are
;;; invoked at a bad time to update the actual modeline-field, and user's may
;;; have fields that change as a function of the changes this function handles.
;;; This makes his update easier. It doesn't cost much update the entire line
;;; anyway.
;;;
(defun queue-buffer-change (buffer &optional something-else another-else)
(declare (ignore something-else another-else))
(push (list #'update-modelines-for-buffer buffer) *things-to-do-once*))
(defun update-modelines-for-buffer (buffer)
(unless (eq buffer *echo-area-buffer*)
(dolist (w (buffer-windows buffer))
(update-modeline-fields buffer w))))
;;; QUEUE-WINDOW-CHANGE is used for the "Window Buffer Hook". We ignore the
;;; argument since this hook function is invoked before any changes are made,
;;; and the changes must be made before the fields can be set according to the
;;; window's buffer's properties. Therefore, we must queue the change to
;;; happen sometime before redisplay but after the change takes effect.
;;;
(defun queue-window-change (window &optional something-else)
(declare (ignore something-else))
(push (list #'update-modeline-for-window window) *things-to-do-once*))
(defun update-modeline-for-window (window)
(update-modeline-fields (window-buffer window) window))
;;;; Bitmap setting up new windows and modifying old.
(defvar dummy-line (make-window-dis-line "")
"Dummy dis-line that we put at the head of window's dis-lines")
(setf (dis-line-position dummy-line) -1)
WINDOW - FOR - HUNK makes a Hemlock window and sets up its dis - lines and marks
;;; to display starting at start.
;;;
(defun window-for-hunk (hunk start modelinep)
(check-type start mark)
(setf (bitmap-hunk-changed-handler hunk) #'window-changed)
(let ((buffer (line-buffer (mark-line start)))
(first (cons dummy-line the-sentinel))
(width (bitmap-hunk-char-width hunk))
(height (bitmap-hunk-char-height hunk)))
(when (or (< height minimum-window-lines)
(< width minimum-window-columns))
(error "Window too small."))
(unless buffer (error "Window start is not in a buffer."))
(let ((window
(internal-make-window
:hunk hunk
:display-start (copy-mark start :right-inserting)
:old-start (copy-mark start :temporary)
:display-end (copy-mark start :right-inserting)
:%buffer buffer
:point (copy-mark (buffer-point buffer))
:height height
:width width
:first-line first
:last-line the-sentinel
:first-changed the-sentinel
:last-changed first
:tick -1)))
(push window *window-list*)
(push window (buffer-windows buffer))
;;
;; Make the dis-lines.
(do ((i (- height) (1+ i))
(res ()
(cons (make-window-dis-line (make-string width)) res)))
((= i height) (setf (window-spare-lines window) res)))
;;
;; Make the image up to date.
(update-window-image window)
(setf (bitmap-hunk-start hunk) (cdr (window-first-line window)))
;;
;; If there is a modeline, set it up.
(when modelinep
(setup-modeline-image buffer window)
(setf (bitmap-hunk-modeline-dis-line hunk)
(window-modeline-dis-line window)))
window)))
;;; SETUP-MODELINE-IMAGE sets up the modeline-dis-line for window using the
;;; modeline-fields list. This is used by tty redisplay too.
;;;
(defun setup-modeline-image (buffer window)
(setf (window-modeline-buffer window) (make-string hunk-width-limit))
(setf (window-modeline-dis-line window)
(make-window-dis-line (make-string (window-width window))))
(update-modeline-fields buffer window))
Window - Changed -- Internal
;;;
;;; The bitmap-hunk changed handler for windows. This is only called if
;;; the hunk is not locked. We invalidate the window image and change its
;;; size, then do a full redisplay.
;;;
(defun window-changed (hunk)
(let ((window (bitmap-hunk-window hunk)))
;;
;; Nuke all the lines in the window image.
(unless (eq (cdr (window-first-line window)) the-sentinel)
(shiftf (cdr (window-last-line window))
(window-spare-lines window)
(cdr (window-first-line window))
the-sentinel))
(setf (bitmap-hunk-start hunk) (cdr (window-first-line window)))
;;
;; Add some new spare lines if needed. If width is greater,
;; reallocate the dis-line-chars.
(let* ((res (window-spare-lines window))
(new-width (bitmap-hunk-char-width hunk))
(new-height (bitmap-hunk-char-height hunk))
(width (length (the simple-string (dis-line-chars (car res))))))
(declare (list res))
(when (> new-width width)
(setq width new-width)
(dolist (dl res)
(setf (dis-line-chars dl) (make-string new-width))))
(setf (window-height window) new-height (window-width window) new-width)
(do ((i (- (* new-height 2) (length res)) (1- i)))
((minusp i))
(push (make-window-dis-line (make-string width)) res))
(setf (window-spare-lines window) res)
;;
Force modeline update .
(let ((ml-buffer (window-modeline-buffer window)))
(when ml-buffer
(let ((dl (window-modeline-dis-line window))
(chars (make-string new-width))
(len (min new-width (window-modeline-buffer-len window))))
(setf (dis-line-old-chars dl) nil)
(setf (dis-line-chars dl) chars)
(replace chars ml-buffer :end1 len :end2 len)
(setf (dis-line-length dl) len)
(setf (dis-line-flags dl) changed-bit)))))
;;
;; Prepare for redisplay.
(setf (window-tick window) (tick))
(update-window-image window)
(when (eq window *current-window*) (maybe-recenter-window window))
hunk))
;;; EDITOR-FINISH-OUTPUT is used to synch output to a window with the rest of the
;;; system.
;;;
(defun editor-finish-output (window)
(let* ((device (device-hunk-device (window-hunk window)))
(finish-output (device-finish-output device)))
(when finish-output
(funcall finish-output device window))))
;;;; Tty setting up new windows and modifying old.
setup - window - image -- Internal
;;;
;;; Set up the dis-lines and marks for Window to display starting
at Start . Height and are the number of lines and columns in
;;; the window.
;;;
(defun setup-window-image (start window height width)
(check-type start mark)
(let ((buffer (line-buffer (mark-line start)))
(first (cons dummy-line the-sentinel)))
(unless buffer (error "Window start is not in a buffer."))
(setf (window-display-start window) (copy-mark start :right-inserting)
(window-old-start window) (copy-mark start :temporary)
(window-display-end window) (copy-mark start :right-inserting)
(window-%buffer window) buffer
(window-point window) (copy-mark (buffer-point buffer))
(window-height window) height
(window-width window) width
(window-first-line window) first
(window-last-line window) the-sentinel
(window-first-changed window) the-sentinel
(window-last-changed window) first
(window-tick window) -1)
(push window *window-list*)
(push window (buffer-windows buffer))
;;
;; Make the dis-lines.
(do ((i (- height) (1+ i))
(res ()
(cons (make-window-dis-line (make-string width)) res)))
((= i height) (setf (window-spare-lines window) res)))
;;
;; Make the image up to date.
(update-window-image window)))
change - window - image - height -- Internal
;;;
;;; Milkshake.
;;;
(defun change-window-image-height (window new-height)
;; Nuke all the lines in the window image.
(unless (eq (cdr (window-first-line window)) the-sentinel)
(shiftf (cdr (window-last-line window))
(window-spare-lines window)
(cdr (window-first-line window))
the-sentinel))
;; Add some new spare lines if needed.
(let* ((res (window-spare-lines window))
(width (length (the simple-string (dis-line-chars (car res))))))
(declare (list res))
(setf (window-height window) new-height)
(do ((i (- (* new-height 2) (length res)) (1- i)))
((minusp i))
(push (make-window-dis-line (make-string width)) res))
(setf (window-spare-lines window) res)))
| null | https://raw.githubusercontent.com/rtoy/cmucl/9b1abca53598f03a5b39ded4185471a5b8777dea/src/hemlock/window.lisp | lisp | Package : Hemlock - Internals -*-
**********************************************************************
**********************************************************************
This file contains implementation independent code which implements
other aspects of the interface to redisplay.
CURRENT-WINDOW.
Window structure support.
Move the window's marks to the new start.
Delete all the dis-lines, and nil out the line and chars so they get
gc'ed.
Set the last line and first&last changed so we know there's nothing there.
Make sure the window gets updated, and set the buffer.
any device init necessary. This is called from ED.
Modelines-field structure support.
Modelines maintenance.
Each window stores a modeline-buffer which is a string hunk-width-limit
long. Whenever a field is updated, we must maintain a maximally long
representation of the modeline in case the window is resized. Updating
portion into the window's modeline-dis-line, setting the dis-line's changed
flag.
UPDATE-MODELINE-FIELD must replace the entire dis-line-chars with ml-buffer
after blt'ing into buffer. Otherwise it has to do all the work
isn't worth it. Since things could have shifted around, after calling
the buffer is now.
window's modeline buffer, a modeline-field-info object, a start in the
modeline buffer, and an optional indicating whether a variable width field
should be handled carefully. When the field is fixed-width, this is
simple. When it is variable, we possibly have to shift all the text in the
buffer right or left before storing the new string, updating all the
finfo's after the one we're updating. It is an error for the
modeline-field-function to return anything but a simple-string with
standard-chars. This returns the end of the field blasted into ml-buffer.
BLT-ML-FIELD-BUFFER-FIX shifts the contents of ml-buffer in the direction
list of these. If there are none following finfo, then we simply store the
new end of the buffer. After blt'ing the text around, we have to update
the ml-buffer's end.
Default modeline and update hooks.
Pathname Length" is set.
Prefer a buffer local value over the global one.
Because variables don't work right, blow off looking for
a value in the buffer's modes. In the future this will
be able to get the "current" value as if buffer were current.
If no slash, then file-namestring is very
long, and we should include all of it:
QUEUE-BUFFER-CHANGE is used for various buffer hooks (e.g., mode changes,
name changes, etc.), so it takes some arguments to ignore. These hooks are
invoked at a bad time to update the actual modeline-field, and user's may
have fields that change as a function of the changes this function handles.
This makes his update easier. It doesn't cost much update the entire line
anyway.
QUEUE-WINDOW-CHANGE is used for the "Window Buffer Hook". We ignore the
argument since this hook function is invoked before any changes are made,
and the changes must be made before the fields can be set according to the
window's buffer's properties. Therefore, we must queue the change to
happen sometime before redisplay but after the change takes effect.
Bitmap setting up new windows and modifying old.
to display starting at start.
Make the dis-lines.
Make the image up to date.
If there is a modeline, set it up.
SETUP-MODELINE-IMAGE sets up the modeline-dis-line for window using the
modeline-fields list. This is used by tty redisplay too.
The bitmap-hunk changed handler for windows. This is only called if
the hunk is not locked. We invalidate the window image and change its
size, then do a full redisplay.
Nuke all the lines in the window image.
Add some new spare lines if needed. If width is greater,
reallocate the dis-line-chars.
Prepare for redisplay.
EDITOR-FINISH-OUTPUT is used to synch output to a window with the rest of the
system.
Tty setting up new windows and modifying old.
Set up the dis-lines and marks for Window to display starting
the window.
Make the dis-lines.
Make the image up to date.
Milkshake.
Nuke all the lines in the window image.
Add some new spare lines if needed. | This code was written as part of the CMU Common Lisp project at
Carnegie Mellon University , and has been placed in the public domain .
(ext:file-comment
"$Header: src/hemlock/window.lisp $")
the Hemlock window primitives and most of the code which defines
Written by and .
(in-package "HEMLOCK-INTERNALS")
(export '(current-window window-buffer modeline-field-width
modeline-field-function make-modeline-field update-modeline-fields
update-modeline-field modeline-field-name modeline-field
editor-finish-output *window-list*))
(defvar *current-window* nil "The current window object.")
(defvar *window-list* () "A list of all window objects.")
(declaim (inline current-window))
(defun current-window ()
"Return the current window. The current window is specially treated by
redisplay in several ways, the most important of which is that is does
recentering, ensuring that the Buffer-Point of the current window's
Window-Buffer is always displayed. This may be set with Setf."
*current-window*)
(defun %set-current-window (new-window)
(invoke-hook ed::set-window-hook new-window)
(move-mark (window-point *current-window*)
(buffer-point (window-buffer *current-window*)))
(move-mark (buffer-point (window-buffer new-window))
(window-point new-window))
(setq *current-window* new-window))
(defun %print-hwindow (obj stream depth)
(declare (ignore depth))
(write-string "#<Hemlock Window \"" stream)
(write-string (buffer-name (window-buffer obj)) stream)
(write-string "\">" stream))
(defun window-buffer (window)
"Return the buffer which is displayed in Window."
(window-%buffer window))
(defun %set-window-buffer (window new-buffer)
(unless (bufferp new-buffer) (error "~S is not a buffer." new-buffer))
(unless (windowp window) (error "~S is not a window." window))
(unless (eq new-buffer (window-buffer window))
(invoke-hook ed::window-buffer-hook window new-buffer)
(let ((buffer (window-buffer window)))
(setf (buffer-windows buffer) (delete window (buffer-windows buffer)))
(move-mark (buffer-display-start buffer) (window-display-start window))
(push window (buffer-windows new-buffer))
(move-mark (window-point window) (buffer-point new-buffer))
(move-mark (window-display-start window) (buffer-display-start new-buffer))
(move-mark (window-display-end window) (buffer-display-start new-buffer)))
(let ((first (window-first-line window))
(last (window-last-line window))
(free (window-spare-lines window)))
(unless (eq (cdr first) the-sentinel)
(shiftf (cdr last) free (cdr first) the-sentinel))
(dolist (dl free)
(setf (dis-line-line dl) nil (dis-line-old-chars dl) nil))
(setf (window-spare-lines window) free))
(setf (window-last-line window) the-sentinel
(window-first-changed window) the-sentinel
(window-last-changed window) the-sentinel)
(setf (window-tick window) -3)
(setf (window-%buffer window) new-buffer)))
% INIT - REDISPLAY sets up redisplay 's internal data structures . We create
initial windows , setup some hooks to cause , and call
(defun %init-redisplay (display)
(%init-screen-manager display)
(add-hook ed::buffer-major-mode-hook 'queue-buffer-change)
(add-hook ed::buffer-minor-mode-hook 'queue-buffer-change)
(add-hook ed::buffer-name-hook 'queue-buffer-change)
(add-hook ed::buffer-pathname-hook 'queue-buffer-change)
(add-hook ed::buffer-modified-hook 'queue-buffer-change)
(add-hook ed::window-buffer-hook 'queue-window-change)
(let ((device (device-hunk-device (window-hunk (current-window)))))
(funcall (device-init device) device))
(center-window *current-window* (current-point)))
(defun print-modeline-field (obj stream ignore)
(declare (ignore ignore))
(write-string "#<Hemlock Modeline-field " stream)
(prin1 (modeline-field-%name obj) stream)
(write-string ">" stream))
(defun print-modeline-field-info (obj stream ignore)
(declare (ignore ignore))
(write-string "#<Hemlock Modeline-field-info " stream)
(prin1 (modeline-field-%name (ml-field-info-field obj)) stream)
(write-string ">" stream))
(defvar *modeline-field-names* (make-hash-table))
(defun make-modeline-field (&key name width function)
"Returns a modeline-field object."
(unless (or (eq width nil) (and (integerp width) (plusp width)))
(error "Width must be nil or a positive integer."))
(when (gethash name *modeline-field-names*)
(with-simple-restart (continue
"Use the new definition for this modeline field.")
(error "Modeline field ~S already exists."
(gethash name *modeline-field-names*))))
(setf (gethash name *modeline-field-names*)
(%make-modeline-field name function width)))
(defun modeline-field (name)
"Returns the modeline-field object named name. If none exists, return nil."
(gethash name *modeline-field-names*))
(declaim (inline modeline-field-name modeline-field-width
modeline-field-function))
(defun modeline-field-name (ml-field)
"Returns the name of a modeline field object."
(modeline-field-%name ml-field))
(defun %set-modeline-field-name (ml-field name)
(check-type ml-field modeline-field)
(when (gethash name *modeline-field-names*)
(error "Modeline field ~S already exists."
(gethash name *modeline-field-names*)))
(remhash (modeline-field-%name ml-field) *modeline-field-names*)
(setf (modeline-field-%name ml-field) name)
(setf (gethash name *modeline-field-names*) ml-field))
(defun modeline-field-width (ml-field)
"Returns the width of a modeline field."
(modeline-field-%width ml-field))
(declaim (special *buffer-list*))
(defun %set-modeline-field-width (ml-field width)
(check-type ml-field modeline-field)
(unless (or (eq width nil) (and (integerp width) (plusp width)))
(error "Width must be nil or a positive integer."))
(unless (eql width (modeline-field-%width ml-field))
(setf (modeline-field-%width ml-field) width)
(dolist (b *buffer-list*)
(when (buffer-modeline-field-p b ml-field)
(dolist (w (buffer-windows b))
(update-modeline-fields b w)))))
width)
(defun modeline-field-function (ml-field)
"Returns the function of a modeline field object. It returns a string."
(modeline-field-%function ml-field))
(defun %set-modeline-field-function (ml-field function)
(check-type ml-field modeline-field)
(check-type function (or symbol function))
(setf (modeline-field-%function ml-field) function)
(dolist (b *buffer-list*)
(when (buffer-modeline-field-p b ml-field)
(dolist (w (buffer-windows b))
(update-modeline-field b w ml-field))))
function)
then first gets the modeline - buffer setup , and second blasts the necessary
(defun update-modeline-fields (buffer window)
"Recompute all the fields of buffer's modeline for window, so the next
redisplay will reflect changes."
(let ((ml-buffer (window-modeline-buffer window)))
(declare (simple-string ml-buffer))
(when ml-buffer
(let* ((ml-buffer-len
(do ((finfos (buffer-%modeline-fields buffer) (cdr finfos))
(start 0 (blt-modeline-field-buffer
ml-buffer (car finfos) buffer window start)))
((null finfos) start)))
(dis-line (window-modeline-dis-line window))
(len (min (window-width window) ml-buffer-len)))
(replace (the simple-string (dis-line-chars dis-line)) ml-buffer
:end1 len :end2 len)
(setf (window-modeline-buffer-len window) ml-buffer-len)
(setf (dis-line-length dis-line) len)
(setf (dis-line-flags dis-line) changed-bit)))))
BLT - MODELINE - FIELD - BUFFER to figure out how to adjust dis - line - chars . It
BLT - MODELINE - FIELD - BUFFER , we get the last field 's end to know how long
(defun update-modeline-field (buffer window field)
"Recompute the field of the buffer's modeline for window, so the next
redisplay will reflect the change. Field is either a modeline-field object
or the name of one for buffer."
(let ((finfo (internal-buffer-modeline-field-p buffer field)))
(unless finfo
(error "~S is not a modeline-field or the name of one for buffer ~S."
field buffer))
(let ((ml-buffer (window-modeline-buffer window))
(dis-line (window-modeline-dis-line window)))
(declare (simple-string ml-buffer))
(blt-modeline-field-buffer ml-buffer finfo buffer window
(ml-field-info-start finfo) t)
(let* ((ml-buffer-len (ml-field-info-end
(car (last (buffer-%modeline-fields buffer)))))
(dis-len (min (window-width window) ml-buffer-len)))
(replace (the simple-string (dis-line-chars dis-line)) ml-buffer
:end1 dis-len :end2 dis-len)
(setf (window-modeline-buffer-len window) ml-buffer-len)
(setf (dis-line-length dis-line) dis-len)
(setf (dis-line-flags dis-line) changed-bit)))))
(defvar *truncated-field-char* #\!)
BLT - MODELINE - FIELD - BUFFER takes a Hemlock buffer , Hemlock window , the
(defun blt-modeline-field-buffer (ml-buffer finfo buffer window start
&optional fix-other-fields-p)
(declare (simple-string ml-buffer))
(let* ((f (ml-field-info-field finfo))
(width (modeline-field-width f))
(string (funcall (modeline-field-function f) buffer window))
(str-len (length string)))
(declare (simple-string string))
(setf (ml-field-info-start finfo) start)
(setf (ml-field-info-end finfo)
(cond
((not width)
(let ((end (min (+ start str-len) hunk-width-limit))
(last-end (ml-field-info-end finfo)))
(when (and fix-other-fields-p (/= end last-end))
(blt-ml-field-buffer-fix ml-buffer finfo buffer window
end last-end))
(replace ml-buffer string :start1 start :end1 end :end2 str-len)
end))
((= str-len width)
(let ((end (min (+ start width) hunk-width-limit)))
(replace ml-buffer string :start1 start :end1 end :end2 width)
end))
((> str-len width)
(let* ((end (min (+ start width) hunk-width-limit))
(end-1 (1- end)))
(replace ml-buffer string :start1 start :end1 end-1 :end2 width)
(setf (schar ml-buffer end-1) *truncated-field-char*)
end))
(t
(let ((buf-replace-end (min (+ start str-len) hunk-width-limit))
(buf-field-end (min (+ start width) hunk-width-limit)))
(replace ml-buffer string
:start1 start :end1 buf-replace-end :end2 str-len)
(fill ml-buffer #\space :start buf-replace-end :end buf-field-end)
buf-field-end))))))
of last - end to end . finfo is a modeline - field - info structure in buffer 's
all the finfos ' starts and ends making sure nobody gets to stick out over
(defun blt-ml-field-buffer-fix (ml-buffer finfo buffer window end last-end)
(declare (simple-string ml-buffer))
(let ((finfos (do ((f (buffer-%modeline-fields buffer) (cdr f)))
((null f) (error "This field must be here."))
(if (eq (car f) finfo)
(return (cdr f))))))
(cond
((not finfos)
(setf (window-modeline-buffer-len window) (min end hunk-width-limit)))
(t
(let ((buffer-len (window-modeline-buffer-len window)))
(replace ml-buffer ml-buffer
:start1 end
:end1 (min (+ end (- buffer-len last-end)) hunk-width-limit)
:start2 last-end :end2 buffer-len)
(let ((diff (- end last-end)))
(macrolet ((frob (f)
`(setf ,f (min (+ ,f diff) hunk-width-limit))))
(dolist (f finfos)
(frob (ml-field-info-start f))
(frob (ml-field-info-end f)))
(frob (window-modeline-buffer-len window)))))))))
(make-modeline-field :name :hemlock-literal :width 8
:function #'(lambda (buffer window)
"Returns \"Hemlock \"."
(declare (ignore buffer window))
"Hemlock "))
(make-modeline-field
:name :package
:function #'(lambda (buffer window)
"Returns the value of buffer's \"Current Package\" followed
by a colon and two spaces, or a string with one space."
(declare (ignore window))
(if (hemlock-bound-p 'ed::current-package :buffer buffer)
(let ((val (variable-value 'ed::current-package
:buffer buffer)))
(if val
(format nil "~A: " val)
" "))
" ")))
(make-modeline-field
:name :modes
:function #'(lambda (buffer window)
"Returns buffer's modes followed by one space."
(declare (ignore window))
(format nil "~A " (buffer-modes buffer))))
(make-modeline-field
:name :modifiedp
:function #'(lambda (buffer window)
"Returns \"* \" if buffer is modified, or the empty string."
(declare (ignore window))
(let ((modifiedp (buffer-modified buffer)))
(if modifiedp
"* "
""))))
(make-modeline-field
:name :buffer-name
:function #'(lambda (buffer window)
"Returns buffer's name followed by a colon and a space if the
name is not derived from the buffer's pathname, or the empty
string."
(declare (ignore window))
(let ((pn (buffer-pathname buffer))
(name (buffer-name buffer)))
(cond ((not pn)
(format nil "~A: " name))
((string/= (ed::pathname-to-buffer-name pn) name)
(format nil "~A: " name))
(t "")))))
MAXIMUM - MODELINE - PATHNAME - LENGTH - HOOK is called whenever "
(defun maximum-modeline-pathname-length-hook (name kind where new-value)
(declare (ignore name new-value))
(if (eq kind :buffer)
(hi::queue-buffer-change where)
(dolist (buffer *buffer-list*)
(when (and (buffer-modeline-field-p buffer :buffer-pathname)
(buffer-windows buffer))
(hi::queue-buffer-change buffer)))))
(defun buffer-pathname-ml-field-fun (buffer window)
"Returns the namestring of buffer's pathname if there is one. When
\"Maximum Modeline Pathname Length\" is set, and the namestring is too long,
return a truncated namestring chopping off leading directory specifications."
(declare (ignore window))
(let ((pn (buffer-pathname buffer)))
(if pn
(let* ((name (namestring pn))
(length (length name))
(max (if (hemlock-bound-p 'ed::maximum-modeline-pathname-length
:buffer buffer)
(variable-value 'ed::maximum-modeline-pathname-length
:buffer buffer)
(variable-value 'ed::maximum-modeline-pathname-length
:global))))
(declare (simple-string name))
(if (or (not max) (<= length max))
name
(let* ((extra-chars (+ (- length max) 3))
(slash (or (position #\/ name :start extra-chars)
(position #\/ name :from-end t
:end extra-chars))))
(if slash
(concatenate 'simple-string "..." (subseq name slash))
name))))
"")))
(make-modeline-field
:name :buffer-pathname
:function 'buffer-pathname-ml-field-fun)
(defvar *default-modeline-fields*
(list (modeline-field :hemlock-literal)
(modeline-field :package)
(modeline-field :modes)
(modeline-field :modifiedp)
(modeline-field :buffer-name)
(modeline-field :buffer-pathname))
"This is the default value for \"Default Modeline Fields\".")
(defun queue-buffer-change (buffer &optional something-else another-else)
(declare (ignore something-else another-else))
(push (list #'update-modelines-for-buffer buffer) *things-to-do-once*))
(defun update-modelines-for-buffer (buffer)
(unless (eq buffer *echo-area-buffer*)
(dolist (w (buffer-windows buffer))
(update-modeline-fields buffer w))))
(defun queue-window-change (window &optional something-else)
(declare (ignore something-else))
(push (list #'update-modeline-for-window window) *things-to-do-once*))
(defun update-modeline-for-window (window)
(update-modeline-fields (window-buffer window) window))
(defvar dummy-line (make-window-dis-line "")
"Dummy dis-line that we put at the head of window's dis-lines")
(setf (dis-line-position dummy-line) -1)
WINDOW - FOR - HUNK makes a Hemlock window and sets up its dis - lines and marks
(defun window-for-hunk (hunk start modelinep)
(check-type start mark)
(setf (bitmap-hunk-changed-handler hunk) #'window-changed)
(let ((buffer (line-buffer (mark-line start)))
(first (cons dummy-line the-sentinel))
(width (bitmap-hunk-char-width hunk))
(height (bitmap-hunk-char-height hunk)))
(when (or (< height minimum-window-lines)
(< width minimum-window-columns))
(error "Window too small."))
(unless buffer (error "Window start is not in a buffer."))
(let ((window
(internal-make-window
:hunk hunk
:display-start (copy-mark start :right-inserting)
:old-start (copy-mark start :temporary)
:display-end (copy-mark start :right-inserting)
:%buffer buffer
:point (copy-mark (buffer-point buffer))
:height height
:width width
:first-line first
:last-line the-sentinel
:first-changed the-sentinel
:last-changed first
:tick -1)))
(push window *window-list*)
(push window (buffer-windows buffer))
(do ((i (- height) (1+ i))
(res ()
(cons (make-window-dis-line (make-string width)) res)))
((= i height) (setf (window-spare-lines window) res)))
(update-window-image window)
(setf (bitmap-hunk-start hunk) (cdr (window-first-line window)))
(when modelinep
(setup-modeline-image buffer window)
(setf (bitmap-hunk-modeline-dis-line hunk)
(window-modeline-dis-line window)))
window)))
(defun setup-modeline-image (buffer window)
(setf (window-modeline-buffer window) (make-string hunk-width-limit))
(setf (window-modeline-dis-line window)
(make-window-dis-line (make-string (window-width window))))
(update-modeline-fields buffer window))
Window - Changed -- Internal
(defun window-changed (hunk)
(let ((window (bitmap-hunk-window hunk)))
(unless (eq (cdr (window-first-line window)) the-sentinel)
(shiftf (cdr (window-last-line window))
(window-spare-lines window)
(cdr (window-first-line window))
the-sentinel))
(setf (bitmap-hunk-start hunk) (cdr (window-first-line window)))
(let* ((res (window-spare-lines window))
(new-width (bitmap-hunk-char-width hunk))
(new-height (bitmap-hunk-char-height hunk))
(width (length (the simple-string (dis-line-chars (car res))))))
(declare (list res))
(when (> new-width width)
(setq width new-width)
(dolist (dl res)
(setf (dis-line-chars dl) (make-string new-width))))
(setf (window-height window) new-height (window-width window) new-width)
(do ((i (- (* new-height 2) (length res)) (1- i)))
((minusp i))
(push (make-window-dis-line (make-string width)) res))
(setf (window-spare-lines window) res)
Force modeline update .
(let ((ml-buffer (window-modeline-buffer window)))
(when ml-buffer
(let ((dl (window-modeline-dis-line window))
(chars (make-string new-width))
(len (min new-width (window-modeline-buffer-len window))))
(setf (dis-line-old-chars dl) nil)
(setf (dis-line-chars dl) chars)
(replace chars ml-buffer :end1 len :end2 len)
(setf (dis-line-length dl) len)
(setf (dis-line-flags dl) changed-bit)))))
(setf (window-tick window) (tick))
(update-window-image window)
(when (eq window *current-window*) (maybe-recenter-window window))
hunk))
(defun editor-finish-output (window)
(let* ((device (device-hunk-device (window-hunk window)))
(finish-output (device-finish-output device)))
(when finish-output
(funcall finish-output device window))))
setup - window - image -- Internal
at Start . Height and are the number of lines and columns in
(defun setup-window-image (start window height width)
(check-type start mark)
(let ((buffer (line-buffer (mark-line start)))
(first (cons dummy-line the-sentinel)))
(unless buffer (error "Window start is not in a buffer."))
(setf (window-display-start window) (copy-mark start :right-inserting)
(window-old-start window) (copy-mark start :temporary)
(window-display-end window) (copy-mark start :right-inserting)
(window-%buffer window) buffer
(window-point window) (copy-mark (buffer-point buffer))
(window-height window) height
(window-width window) width
(window-first-line window) first
(window-last-line window) the-sentinel
(window-first-changed window) the-sentinel
(window-last-changed window) first
(window-tick window) -1)
(push window *window-list*)
(push window (buffer-windows buffer))
(do ((i (- height) (1+ i))
(res ()
(cons (make-window-dis-line (make-string width)) res)))
((= i height) (setf (window-spare-lines window) res)))
(update-window-image window)))
change - window - image - height -- Internal
(defun change-window-image-height (window new-height)
(unless (eq (cdr (window-first-line window)) the-sentinel)
(shiftf (cdr (window-last-line window))
(window-spare-lines window)
(cdr (window-first-line window))
the-sentinel))
(let* ((res (window-spare-lines window))
(width (length (the simple-string (dis-line-chars (car res))))))
(declare (list res))
(setf (window-height window) new-height)
(do ((i (- (* new-height 2) (length res)) (1- i)))
((minusp i))
(push (make-window-dis-line (make-string width)) res))
(setf (window-spare-lines window) res)))
|
f368f65886c5fe64eb826ef5edfc7fef1ba527db9babb9cafb07be4f66d3804e | cognitect-labs/pedestal.kafka | consumer.clj | (ns com.cognitect.kafka.consumer
(:require [clojure.spec :as s]
[clojure.walk :as walk]
[clojure.stacktrace :as stacktrace]
[io.pedestal.log :as log]
[io.pedestal.interceptor.chain :as interceptor.chain]
[com.cognitect.kafka.common :as common]
[com.cognitect.kafka.topic :as topic])
(:import [org.apache.kafka.clients.consumer KafkaConsumer ConsumerInterceptor ConsumerRecord ConsumerRecords MockConsumer OffsetAndMetadata OffsetResetStrategy]
[java.util.concurrent Executors]
[org.apache.kafka.common.serialization ByteArrayDeserializer Deserializer StringDeserializer]
[org.apache.kafka.common.errors WakeupException]))
(s/def ::key.deserializer (common/names-kindof? Deserializer))
(s/def ::value.deserializer (common/names-kindof? Deserializer))
(s/def ::auto.commit.interval.ms ::common/time)
(s/def ::auto.offset.reset string?)
(s/def ::check.crcs common/bool-string?)
(s/def ::enable.auto.commit common/bool-string?)
(s/def ::exclude.internal.topics common/bool-string?)
(s/def ::fetch.max.wait.ms ::common/time)
(s/def ::fetch.min.bytes ::common/size)
(s/def ::group.id string?)
(s/def ::heartbeat.interval.ms ::common/time)
(s/def ::interceptor.classes (common/names-kindof? ConsumerInterceptor))
(s/def ::max.partition.fetch.bytes ::common/size)
(s/def ::max.poll.records ::common/size)
(s/def ::partition.assignment.strategy string?)
(s/def ::session.timeout.ms ::common/time)
(s/def ::configuration (s/keys :req [::common/bootstrap.servers
::key.deserializer
::value.deserializer]
:opt [::auto.commit.interval.ms
::auto.offset.reset
::check.crcs
::enable.auto.commit
::exclude.internal.topics
::fetch.max.wait.ms
::fetch.min.bytes
::group.id
::heartbeat.interval.ms
::interceptor.classes
::max.partition.fetch.bytes
::max.poll.records
::partition.assignment.strategy
::session.timeout.ms
::common/client.id
::common/connections.max.idle.ms
::common/metadata.max.age.ms
::common/receive.buffer.bytes
::common/reconnect.backoff.ms
::common/request.timeout.ms
::common/retry.backoff.ms
::common/security.protocol
::common/send.buffer.bytes
::common/metric.reporters
::common/metrics.num.samples
::common/metrics.sample.window.ms
::common/ssl.key.password
::common/ssl.keystore.location
::common/ssl.keystore.password
::common/ssl.truststore.location
::common/ssl.truststore.password
::common/ssl.enabled.protocols
::common/ssl.keystore.type
::common/ssl.protocol
::common/ssl.provider
::common/ssl.truststore.type
::common/ssl.cipher.suites
::common/ssl.endpoint.identification.algorithm
::common/ssl.keymanager.algorithm
::common/ssl.trustmanager.algorithm
::common/sasl.kerberos.service.name
::common/sasl.mechanism
::common/sasl.kerberos.kinit.cmd
::common/sasl.kerberos.min.time.before.relogin
::common/sasl.kerberos.ticket.renew.jitter
::common/sasl.kerberos.ticker.renew.window.factor]))
(def string-deserializer (.getName StringDeserializer))
(def byte-array-deserializer (.getName ByteArrayDeserializer))
(defn create-consumer
[config]
{:pre [(s/valid? ::configuration config)]}
(KafkaConsumer. (common/config->properties config)))
(defn create-mock
[]
(MockConsumer. OffsetResetStrategy/EARLIEST))
(defn- consumer-record->map
[^ConsumerRecord record]
{:checksum (.checksum record)
:key (.key record)
:offset (.offset record)
:partition (.partition record)
:serialized-key-size (.serializedKeySize record)
:serialized-value-size (.serializedValueSize record)
:timestamp (.timestamp record)
:timestamp-type (.timestampType record)
:topic (.topic record)
:value (.value record)
:consumer-record record})
(defn- dispatch-record
[consumer interceptors ^ConsumerRecord record]
(let [context {:consumer consumer
:message (consumer-record->map record)}]
(log/debug :in :poll-and-dispatch :context context)
(log/counter :io.pedestal/active-kafka-messages 1)
(try
(let [final-context (interceptor.chain/execute context interceptors)]
(log/debug :msg "leaving interceptor chain" :final-context final-context))
(catch Throwable t
(log/meter ::dispatch-error)
(log/error :msg "Dispatch code threw an exception"
:throwable t
:cause-trace (with-out-str (stacktrace/print-cause-trace t))))
(finally
(log/counter :io.pedestal/active-kafka-messages -1)))))
(defn- poll-and-dispatch
[interceptors consumer]
(let [^ConsumerRecords msgs (.poll consumer (long 100))]
(when (< 0 (.count msgs))
(doseq [record (iterator-seq (.iterator msgs))]
(dispatch-record consumer interceptors record)))))
(defn- start-loop
[consumer interceptors topic-names auto-close?]
(let [continue? (atom true)
_ (.subscribe consumer topic-names)
completion (future
(try
(while @continue?
(try
(poll-and-dispatch interceptors consumer)
(catch WakeupException _)))
:ok
(catch Throwable t t)
(finally
(log/info :msg "Exiting receive loop")
(when auto-close?
(.close consumer)))))]
{:kafka-consumer consumer
:continue? continue?
:completion completion}))
(def error-logger
{:name ::error-logger
:error (fn [context exception]
(log/error :msg "Error reached front of chain"
:exception exception
:context context)
context)})
(def default-interceptors
[error-logger])
(defn start-consumer
[consumer auto-close? service-map]
(let [topic-names (map ::topic/name (::topic/topics service-map))
config (::configuration service-map)
interceptors (::interceptors service-map)
interceptors (into default-interceptors interceptors)]
(start-loop consumer interceptors topic-names auto-close?)))
(defn stop-consumer
[loop-state]
(reset! (:continue? loop-state) false)
(.wakeup (:kafka-consumer loop-state))
(deref (:completion loop-state) 100 :timeout))
;; ----------------------------------------
;; Utility functions
(defn commit-sync [{consumer :consumer}]
(when consumer
(log/debug :msg "commit sync")
(.commitSync ^KafkaConsumer consumer)))
(defn commit-message-offset [{consumer :consumer message :message}]
(when (and consumer message)
(let [commit-point (long (inc (.offset ^ConsumerRecord message)))]
(log/debug :msg (str "commit at " commit-point))
(.commit consumer (java.util.Collections/singletonMap
(:partition message)
(OffsetAndMetadata. commit-point))))))
| null | https://raw.githubusercontent.com/cognitect-labs/pedestal.kafka/91e826112b2f2bdc6a366a66b6a3cc07f7fca20b/src/com/cognitect/kafka/consumer.clj | clojure | ----------------------------------------
Utility functions | (ns com.cognitect.kafka.consumer
(:require [clojure.spec :as s]
[clojure.walk :as walk]
[clojure.stacktrace :as stacktrace]
[io.pedestal.log :as log]
[io.pedestal.interceptor.chain :as interceptor.chain]
[com.cognitect.kafka.common :as common]
[com.cognitect.kafka.topic :as topic])
(:import [org.apache.kafka.clients.consumer KafkaConsumer ConsumerInterceptor ConsumerRecord ConsumerRecords MockConsumer OffsetAndMetadata OffsetResetStrategy]
[java.util.concurrent Executors]
[org.apache.kafka.common.serialization ByteArrayDeserializer Deserializer StringDeserializer]
[org.apache.kafka.common.errors WakeupException]))
(s/def ::key.deserializer (common/names-kindof? Deserializer))
(s/def ::value.deserializer (common/names-kindof? Deserializer))
(s/def ::auto.commit.interval.ms ::common/time)
(s/def ::auto.offset.reset string?)
(s/def ::check.crcs common/bool-string?)
(s/def ::enable.auto.commit common/bool-string?)
(s/def ::exclude.internal.topics common/bool-string?)
(s/def ::fetch.max.wait.ms ::common/time)
(s/def ::fetch.min.bytes ::common/size)
(s/def ::group.id string?)
(s/def ::heartbeat.interval.ms ::common/time)
(s/def ::interceptor.classes (common/names-kindof? ConsumerInterceptor))
(s/def ::max.partition.fetch.bytes ::common/size)
(s/def ::max.poll.records ::common/size)
(s/def ::partition.assignment.strategy string?)
(s/def ::session.timeout.ms ::common/time)
(s/def ::configuration (s/keys :req [::common/bootstrap.servers
::key.deserializer
::value.deserializer]
:opt [::auto.commit.interval.ms
::auto.offset.reset
::check.crcs
::enable.auto.commit
::exclude.internal.topics
::fetch.max.wait.ms
::fetch.min.bytes
::group.id
::heartbeat.interval.ms
::interceptor.classes
::max.partition.fetch.bytes
::max.poll.records
::partition.assignment.strategy
::session.timeout.ms
::common/client.id
::common/connections.max.idle.ms
::common/metadata.max.age.ms
::common/receive.buffer.bytes
::common/reconnect.backoff.ms
::common/request.timeout.ms
::common/retry.backoff.ms
::common/security.protocol
::common/send.buffer.bytes
::common/metric.reporters
::common/metrics.num.samples
::common/metrics.sample.window.ms
::common/ssl.key.password
::common/ssl.keystore.location
::common/ssl.keystore.password
::common/ssl.truststore.location
::common/ssl.truststore.password
::common/ssl.enabled.protocols
::common/ssl.keystore.type
::common/ssl.protocol
::common/ssl.provider
::common/ssl.truststore.type
::common/ssl.cipher.suites
::common/ssl.endpoint.identification.algorithm
::common/ssl.keymanager.algorithm
::common/ssl.trustmanager.algorithm
::common/sasl.kerberos.service.name
::common/sasl.mechanism
::common/sasl.kerberos.kinit.cmd
::common/sasl.kerberos.min.time.before.relogin
::common/sasl.kerberos.ticket.renew.jitter
::common/sasl.kerberos.ticker.renew.window.factor]))
(def string-deserializer (.getName StringDeserializer))
(def byte-array-deserializer (.getName ByteArrayDeserializer))
(defn create-consumer
[config]
{:pre [(s/valid? ::configuration config)]}
(KafkaConsumer. (common/config->properties config)))
(defn create-mock
[]
(MockConsumer. OffsetResetStrategy/EARLIEST))
(defn- consumer-record->map
[^ConsumerRecord record]
{:checksum (.checksum record)
:key (.key record)
:offset (.offset record)
:partition (.partition record)
:serialized-key-size (.serializedKeySize record)
:serialized-value-size (.serializedValueSize record)
:timestamp (.timestamp record)
:timestamp-type (.timestampType record)
:topic (.topic record)
:value (.value record)
:consumer-record record})
(defn- dispatch-record
[consumer interceptors ^ConsumerRecord record]
(let [context {:consumer consumer
:message (consumer-record->map record)}]
(log/debug :in :poll-and-dispatch :context context)
(log/counter :io.pedestal/active-kafka-messages 1)
(try
(let [final-context (interceptor.chain/execute context interceptors)]
(log/debug :msg "leaving interceptor chain" :final-context final-context))
(catch Throwable t
(log/meter ::dispatch-error)
(log/error :msg "Dispatch code threw an exception"
:throwable t
:cause-trace (with-out-str (stacktrace/print-cause-trace t))))
(finally
(log/counter :io.pedestal/active-kafka-messages -1)))))
(defn- poll-and-dispatch
[interceptors consumer]
(let [^ConsumerRecords msgs (.poll consumer (long 100))]
(when (< 0 (.count msgs))
(doseq [record (iterator-seq (.iterator msgs))]
(dispatch-record consumer interceptors record)))))
(defn- start-loop
[consumer interceptors topic-names auto-close?]
(let [continue? (atom true)
_ (.subscribe consumer topic-names)
completion (future
(try
(while @continue?
(try
(poll-and-dispatch interceptors consumer)
(catch WakeupException _)))
:ok
(catch Throwable t t)
(finally
(log/info :msg "Exiting receive loop")
(when auto-close?
(.close consumer)))))]
{:kafka-consumer consumer
:continue? continue?
:completion completion}))
(def error-logger
{:name ::error-logger
:error (fn [context exception]
(log/error :msg "Error reached front of chain"
:exception exception
:context context)
context)})
(def default-interceptors
[error-logger])
(defn start-consumer
[consumer auto-close? service-map]
(let [topic-names (map ::topic/name (::topic/topics service-map))
config (::configuration service-map)
interceptors (::interceptors service-map)
interceptors (into default-interceptors interceptors)]
(start-loop consumer interceptors topic-names auto-close?)))
(defn stop-consumer
[loop-state]
(reset! (:continue? loop-state) false)
(.wakeup (:kafka-consumer loop-state))
(deref (:completion loop-state) 100 :timeout))
(defn commit-sync [{consumer :consumer}]
(when consumer
(log/debug :msg "commit sync")
(.commitSync ^KafkaConsumer consumer)))
(defn commit-message-offset [{consumer :consumer message :message}]
(when (and consumer message)
(let [commit-point (long (inc (.offset ^ConsumerRecord message)))]
(log/debug :msg (str "commit at " commit-point))
(.commit consumer (java.util.Collections/singletonMap
(:partition message)
(OffsetAndMetadata. commit-point))))))
|
90c7c151d2237b47a12ef5f9abd795a63ef4022ee3c01a3f2f18e01b710c935b | lisp-mirror/clpm | sync.lisp | ;;;; clpm sync
;;;;
This software is part of CLPM . See README.org for more information . See
;;;; LICENSE for license information.
(uiop:define-package #:clpm-cli/commands/sync
(:use #:cl
#:clpm-cli/common-args
#:clpm-cli/interface-defs)
(:import-from #:adopt)
(:import-from #:clpm))
(in-package #:clpm-cli/commands/sync)
(define-string *help-text*
"Sync all sources.")
(defparameter *sync-ui*
(adopt:make-interface
:name "clpm-sync"
:summary "Common Lisp Project Manager Sync"
:usage "sync [SOURCE-NAME*]"
:help *help-text*
:manual *help-text*
:contents (list *group-common*)))
(define-cli-command (("sync") *sync-ui*) (args options)
(declare (ignore options))
(clpm:sync :sources args)
t)
| null | https://raw.githubusercontent.com/lisp-mirror/clpm/ad9a704fcdd0df5ce30ead106706ab6cc5fb3e5b/cli/commands/sync.lisp | lisp | clpm sync
LICENSE for license information. | This software is part of CLPM . See README.org for more information . See
(uiop:define-package #:clpm-cli/commands/sync
(:use #:cl
#:clpm-cli/common-args
#:clpm-cli/interface-defs)
(:import-from #:adopt)
(:import-from #:clpm))
(in-package #:clpm-cli/commands/sync)
(define-string *help-text*
"Sync all sources.")
(defparameter *sync-ui*
(adopt:make-interface
:name "clpm-sync"
:summary "Common Lisp Project Manager Sync"
:usage "sync [SOURCE-NAME*]"
:help *help-text*
:manual *help-text*
:contents (list *group-common*)))
(define-cli-command (("sync") *sync-ui*) (args options)
(declare (ignore options))
(clpm:sync :sources args)
t)
|
9f3ee39852b34eac8c2f194ed3f6eb6fb1c9c372eb3c93fa1bb0696280285348 | cram2/cram | neem-generator.lisp | (in-package :cslg)
(defun generate-neem (&optional objects-to-fetch-deliever)
(setf cram-tf:*tf-broadcasting-enabled* t)
(roslisp-utilities:startup-ros :name "cram" :anonymous nil)
(let ((objects-str (roslisp:get-param "/neem_generator/objects"))
objects '())
(loop for x in (split-sequence:split-sequence #\Space objects-str)
do (setf objects (append objects (list (values (intern (string-upcase x) "KEYWORD"))))))
(setf ccl::*is-logging-enabled* t)
(setf ccl::*host* "''")
(setf ccl::*cert-path* "'/home/koralewski/Desktop/localhost.pem'")
(setf ccl::*api-key* "'K103jdr40Rp8UX4egmRf42VbdB1b5PW7qYOOVvTDAoiNG6lcQoaDHONf5KaFcefs'")
(ccl::connect-to-cloud-logger)
(let ((experiment-id (format nil "~d" (truncate (* 1000000 (cram-utilities:current-timestamp))))))
(format t "Starting experiment ~a~%" experiment-id)
(unwind-protect
(if objects
(pr2-proj:with-simulated-robot (demo::demo-random nil objects))
(pr2-proj:with-simulated-robot (demo::demo-random)))
(ccl::export-log-to-owl (concatenate 'string experiment-id ".owl"))
(format t "Done with experiment ~a~%" experiment-id)
(ccl::reset-logged-owl)))))
| null | https://raw.githubusercontent.com/cram2/cram/dcb73031ee944d04215bbff9e98b9e8c210ef6c5/cram_learning/cram_sim_log_generator/src/neem-generator.lisp | lisp | (in-package :cslg)
(defun generate-neem (&optional objects-to-fetch-deliever)
(setf cram-tf:*tf-broadcasting-enabled* t)
(roslisp-utilities:startup-ros :name "cram" :anonymous nil)
(let ((objects-str (roslisp:get-param "/neem_generator/objects"))
objects '())
(loop for x in (split-sequence:split-sequence #\Space objects-str)
do (setf objects (append objects (list (values (intern (string-upcase x) "KEYWORD"))))))
(setf ccl::*is-logging-enabled* t)
(setf ccl::*host* "''")
(setf ccl::*cert-path* "'/home/koralewski/Desktop/localhost.pem'")
(setf ccl::*api-key* "'K103jdr40Rp8UX4egmRf42VbdB1b5PW7qYOOVvTDAoiNG6lcQoaDHONf5KaFcefs'")
(ccl::connect-to-cloud-logger)
(let ((experiment-id (format nil "~d" (truncate (* 1000000 (cram-utilities:current-timestamp))))))
(format t "Starting experiment ~a~%" experiment-id)
(unwind-protect
(if objects
(pr2-proj:with-simulated-robot (demo::demo-random nil objects))
(pr2-proj:with-simulated-robot (demo::demo-random)))
(ccl::export-log-to-owl (concatenate 'string experiment-id ".owl"))
(format t "Done with experiment ~a~%" experiment-id)
(ccl::reset-logged-owl)))))
|
|
63d6b26b2c7c1de7ea40da6c9ae795ec8c9e57f3847bc448d46b72449464aa5f | inconvergent/weird | weir-with.lisp |
(in-package #:weird-tests)
(plan 8)
(veq:vprogn
(subtest "test-weir-with "
(let ((wer (init-weir)))
(weir:with (wer %)
(% (2add-vert? (veq:f2 11f0 3f0)))
(list 4.5
(% (2move-vert? 0 (veq:f2 1f0 0f0)))
nil t
(list 5 (% (2add-vert? (veq:f2 12f0 3f0)))
(% (2add-vert? (veq:f2 13f0 3f0))))
(list nil)
(list (list))))
(is (sort (weir:get-vert-inds wer) #'<)
(list 0 1 2 3 5 6 7)))
(let ((wer (init-weir)))
(is (weir:edge-exists wer '(7 2)) nil)
(weir:with (wer %)
(list) 1 nil
(% (2add-vert? (veq:f2 12f0 3f0)))
11
(% (add-edge? 1 2))
(% (add-edge? 2 7)))
(is (veq:lst (weir:2get-vert wer 12)) '(12f0 3f0))
(is (veq:lst (weir:2get-vert wer 11)) '(13f0 6f0))
(is (weir:edge-exists wer '(1 2)) t)
(is (weir:edge-exists wer '(2 7)) t)
(is (weir:edge-exists wer '(7 2)) t))
(let ((wer (weir:make)))
(weir:with (wer %)
(% (2add-vert? (veq:f2 1f0 2f0) ) :res :a?)
(% (2add-vert? (veq:f2 2f0 2f0 )) :res :b?)
(% (add-edge? :a? :b?) :res :e1?)
(% (2append-edge? (first :e1?) (veq:f2 4f0 3f0)) :res :e2?))
; there was a bug vprogn/vdef. it does not handle dotted pairs
; TODO: the dotted pairs bug is fiexed. so maybe rewrite this?
(is (flatten-ht (weir:get-alteration-result-map wer))
' ( (: A . 1 ) (: B . 0 ) (: E1 0 1 ) (: E2 . 0 2 ) )
'(:B? 0 :A? 1 :E1? 0 1 :E2? 0 2)))
(let ((wer (weir:make)))
(veq:f2let ((v (veq:f2 1f0 2f0)))
(weir:with (wer %)
(% (2add-vert? (veq:f2 1f0 2f0)) :res :a?)
(veq:f2let ((ww (veq:f2 v)))
(% (? (x) (list ww :a?)) :res :l?)
(% (? (x) (veq:lst (veq:f2- ww 1f0 2f0))) :res :l2?))
(veq:f2vset (v) (veq:f2 2f0 2f0))))
(is (gethash :l? (weir:get-alteration-result-map wer)) `(1.0f0 2.0f0 0))
(is (gethash :l2? (weir:get-alteration-result-map wer)) '(0f0 0f0))
(weir:with (wer %) (% (2move-vert? 0 (veq:f2 4f0 7f0)) :res :a?))
(is (gethash :a? (weir:get-alteration-result-map wer)) 0))
(let ((wer (init-weir)))
(weir:with (wer %)
(loop for i from 3 below 7
do (weir:with-gs (a? b?)
(% (add-edge? i (+ i 1)) :res a?)
(% (ldel-edge? a?) :res b?)))
(is (weir:get-alteration-result-list wer) nil))
(let ((res (mapcar #'cdr (weir:get-alteration-result-list wer))))
(is res '((6 7) NIL (4 5) (3 4) T NIL T T)))))
(defun make-sfx-weir ()
(let ((wer (weir:make)))
(weir:2add-vert! wer (veq:f2 1f0 1f0))
(weir:2add-vert! wer (veq:f2 2f0 2f0))
(weir:2add-vert! wer (veq:f2 3f0 3f0))
wer))
(defun isub (wer a b)
(veq:f2- (weir:2get-vert wer b) (weir:2get-vert wer a)))
(subtest "test-weir-with-sfx "
these two cases demonstrate the " side - effect " of alterting the
; graph sequentially while relying on the state of the graph
(let ((wer (make-sfx-weir)))
; this exhibits "side-effects"
(weir:2move-vert! wer 0 (isub wer 1 0))
(weir:2move-vert! wer 1 (isub wer 2 0))
(is (weir:2get-all-verts wer)
#(0.0 0.0 -1.0 -1.0 3.0 3.0)
:test #'equalp))
(let ((wer (make-sfx-weir)))
; this exhibits "side-effects"
(weir:2move-vert! wer 1 (isub wer 2 0))
(weir:2move-vert! wer 0 (isub wer 1 0))
(is (weir:2get-all-verts wer)
203
:test #'equalp))
these three cases demonstrate the expected behavoir of an alteration .
; no "side effect" in the sense described above.
(let ((wer (make-sfx-weir)))
(weir:with (wer %)
; alterations avoid side-effects
(% (2move-vert? 1 (isub wer 2 0)))
(% (2move-vert? 0 (isub wer 1 0))))
(is (weir:2get-all-verts wer)
003
:test #'equalp))
(let ((wer (make-sfx-weir)))
(weir:with (wer %)
; alterations avoid side-effects
(% (2move-vert? 0 (isub wer 1 0)))
(% (2move-vert? 1 (isub wer 2 0))))
(is (weir:2get-all-verts wer)
003
:test #'equalp))
(let ((wer (make-sfx-weir)))
(weir:with (wer %)
(veq:f2let ((va (isub wer 2 0))
(vb (isub wer 1 0)))
(% (? (w) (weir:2move-vert! w 1 va)))
(% (? (w) (weir:2move-vert! w 0 vb)))))
(is (weir:2get-all-verts wer)
003
:test #'equalp)))
(subtest "test-weir-add "
(let ((wer (init-weir)))
(weir:with (wer %)
(% (2add-vert? (veq:f2 10f0 3f0))))
(is (veq:lst (weir:2get-vert wer 11)) `( 10f0 3f0))
(is (weir:get-num-verts wer) 12)
(weir:with (wer %)
(% (2add-vert? (veq:f2 80f0 3f0)) :res :a?)
(% (2add-vert? (veq:f2 70f0 3f0)) :res :b?))
(is (flatten-ht (weir:get-alteration-result-map wer))
`(:b? 12 :a? 13 ))
(is (weir:get-num-verts wer) 14)
(weir:with (wer %)
(% (2vadd-edge? (veq:f2 7f0 3f0) (veq:f2 100f0 0.99f0))))
(is (weir:get-edges wer)
'((14 15) (5 6) (3 7) (0 1) (1 3) (1 2)))))
(subtest "test-weir-move "
(let ((wer (init-weir)))
(weir:with (wer %)
(% (2move-vert? 0 (veq:f2 3f0 3f0)) :res :a?)
(% (2move-vert? 1 (veq:f2 1f0 3f0)) :res :b?)
(% (2move-vert? 3 (veq:f2 2f0 3f0) :rel nil) :res :c?)
(% (2move-vert? 2 (veq:f2 3f0 4f0)) :res :d?))
(is (weir:2get-all-verts wer)
#(3.0 5.0 3.0 6.0 6.0 8.0 2.0 3.0 5.0 4.0 0.0 6.0 -1.0 7.0 0.0 8.0 0.0
9.0 10.0 1.0 3.0 1.0)
:test #'equalp)
(is (veq:lst (weir:2get-vert wer 0)) '(3f0 5f0))
(is (veq:lst (weir:2get-vert wer 1)) '(3f0 6f0))
(is (veq:lst (weir:2get-vert wer 3)) '(2f0 3f0))
(is (veq:lst (weir:2get-vert wer 2)) '(6f0 8f0))))
(subtest "test-weir-join "
(let ((wer (init-weir)))
(weir:with (wer %)
(% (add-edge? 3 3))
(% (add-edge? 3 3))
(% (add-edge? 3 6))
(% (add-edge? 7 1)))
(is (weir:get-num-edges wer) 7)
(weir:with (wer %)
(% (add-edge? 3 3) :res :a?)
(% (add-edge? 1 6) :res :b?)
(% (add-edge? 1 100) :res :c?))
(is (flatten-ht (weir:get-alteration-result-map wer))
'(:C? :B? 1 6 :A?))))
(subtest "test-weir-append "
(let ((wer (init-weir)))
(is (weir:get-num-verts wer) 11)
(weir:with (wer %)
(% (2append-edge? 3 (veq:f2 3f0 4f0)) :res :a?)
(% (2append-edge? 3 (veq:f2 8f0 5f0) :rel nil) :res :b?)
(% (2append-edge? 7 (veq:f2 1f0 2f0)) :res :c?))
(is (flatten-ht (weir:get-alteration-result-map wer))
'(:C? 7 11 :B? 3 12 :A? 3 13))
(is (weir:get-num-edges wer) 8)
(is (weir:get-num-verts wer) 14)
(is (weir:2get-all-verts wer)
#(0.0f0 2.0f0 2.0f0 3.0f0 3.0f0 4.0f0 4.0f0 7.0f0 5.0f0 4.0f0 0.0f0
6.0f0 -1.0f0 7.0f0 0.0f0 8.0f0 0.0f0 9.0f0 10.0f0 1.0f0 3.0f0 1.0f0
1.0f0 10.0f0 8.0f0 5.0f0 7.0f0 11.0f0)
:test #'equalp)))
(subtest "test-weir-split "
(let ((wer (init-weir)))
(weir:with (wer %)
(% (2split-edge? 1 2 (veq:f2 30f0 20f0)) :res :a?)
(% (2lsplit-edge? '(1 2) (veq:f2 31f0 23f0)) :res :b?)
(% (2lsplit-edge? '(5 6) (veq:f2 32f0 24f0)) :res :c?))
(is (flatten-ht (weir:get-alteration-result-map wer))
'(:C? 11 :B? 12 :A?))
(is (weir:get-num-edges wer) 7)
(is (weir:get-num-verts wer) 13)
(is (weir:2get-all-verts wer)
#(0.0 2.0 2.0 3.0 3.0 4.0 4.0 7.0 5.0 4.0 0.0 6.0 -1.0 7.0 0.0 8.0 0.0
9.0 10.0 1.0 3.0 1.0 32.0 24.0 31.0 23.0)
:test #'equalp)))
(subtest "test-weir-itrs "
(rnd:set-rnd-state 1)
(let ((wer (init-weir)))
(weir:with (wer %)
(weir:with-rnd-vert (wer v)
(% (2move-vert? v (veq:f2 2f0 2f0)))
(% (2append-edge? v (veq:f2 3f0 2f0)))))
(is (weir:get-num-edges wer) 6)
(is (weir:get-num-verts wer) 12)
(weir:with (wer %)
(weir:itr-verts (wer v)
(% (2move-vert? v (veq:f2 2f0 2f0)))))
(is (sort (weir:itr-verts (wer i :collect t) i) #'<)
'(0 1 2 3 4 5 6 7 8 9 10 11))
(is (weir:itr-verts (wer i) i) nil)
(is (sort (weir:itr-grp-verts (wer i :collect t) i) #'<)
'(0 1 2 3 5 6 7 11))
(is (weir:itr-edges (wer e :collect t) e)
'((5 11) (5 6) (3 7) (0 1) (1 3) (1 2)))
(is
(sort (weir:itr-edges (wer e :collect t)
(weir:2ledge-length wer e)) #'<)
'(1.0 1.4142135 2.236068 3.1622777 4.1231055 4.472136))
(weir:with (wer %)
(weir:with-rnd-edge (wer e)
(% (2lsplit-edge? e (veq:f2 31f0 23f0)))))
(is (weir:get-num-edges wer) 7)
(is (weir:get-num-verts wer) 13))))
(unless (finalize) (error "error in weir-with tests"))
| null | https://raw.githubusercontent.com/inconvergent/weird/106d154ec2cd0e4ec977c3672ba717d6305c1056/test/weir-with.lisp | lisp | there was a bug vprogn/vdef. it does not handle dotted pairs
TODO: the dotted pairs bug is fiexed. so maybe rewrite this?
graph sequentially while relying on the state of the graph
this exhibits "side-effects"
this exhibits "side-effects"
no "side effect" in the sense described above.
alterations avoid side-effects
alterations avoid side-effects |
(in-package #:weird-tests)
(plan 8)
(veq:vprogn
(subtest "test-weir-with "
(let ((wer (init-weir)))
(weir:with (wer %)
(% (2add-vert? (veq:f2 11f0 3f0)))
(list 4.5
(% (2move-vert? 0 (veq:f2 1f0 0f0)))
nil t
(list 5 (% (2add-vert? (veq:f2 12f0 3f0)))
(% (2add-vert? (veq:f2 13f0 3f0))))
(list nil)
(list (list))))
(is (sort (weir:get-vert-inds wer) #'<)
(list 0 1 2 3 5 6 7)))
(let ((wer (init-weir)))
(is (weir:edge-exists wer '(7 2)) nil)
(weir:with (wer %)
(list) 1 nil
(% (2add-vert? (veq:f2 12f0 3f0)))
11
(% (add-edge? 1 2))
(% (add-edge? 2 7)))
(is (veq:lst (weir:2get-vert wer 12)) '(12f0 3f0))
(is (veq:lst (weir:2get-vert wer 11)) '(13f0 6f0))
(is (weir:edge-exists wer '(1 2)) t)
(is (weir:edge-exists wer '(2 7)) t)
(is (weir:edge-exists wer '(7 2)) t))
(let ((wer (weir:make)))
(weir:with (wer %)
(% (2add-vert? (veq:f2 1f0 2f0) ) :res :a?)
(% (2add-vert? (veq:f2 2f0 2f0 )) :res :b?)
(% (add-edge? :a? :b?) :res :e1?)
(% (2append-edge? (first :e1?) (veq:f2 4f0 3f0)) :res :e2?))
(is (flatten-ht (weir:get-alteration-result-map wer))
' ( (: A . 1 ) (: B . 0 ) (: E1 0 1 ) (: E2 . 0 2 ) )
'(:B? 0 :A? 1 :E1? 0 1 :E2? 0 2)))
(let ((wer (weir:make)))
(veq:f2let ((v (veq:f2 1f0 2f0)))
(weir:with (wer %)
(% (2add-vert? (veq:f2 1f0 2f0)) :res :a?)
(veq:f2let ((ww (veq:f2 v)))
(% (? (x) (list ww :a?)) :res :l?)
(% (? (x) (veq:lst (veq:f2- ww 1f0 2f0))) :res :l2?))
(veq:f2vset (v) (veq:f2 2f0 2f0))))
(is (gethash :l? (weir:get-alteration-result-map wer)) `(1.0f0 2.0f0 0))
(is (gethash :l2? (weir:get-alteration-result-map wer)) '(0f0 0f0))
(weir:with (wer %) (% (2move-vert? 0 (veq:f2 4f0 7f0)) :res :a?))
(is (gethash :a? (weir:get-alteration-result-map wer)) 0))
(let ((wer (init-weir)))
(weir:with (wer %)
(loop for i from 3 below 7
do (weir:with-gs (a? b?)
(% (add-edge? i (+ i 1)) :res a?)
(% (ldel-edge? a?) :res b?)))
(is (weir:get-alteration-result-list wer) nil))
(let ((res (mapcar #'cdr (weir:get-alteration-result-list wer))))
(is res '((6 7) NIL (4 5) (3 4) T NIL T T)))))
(defun make-sfx-weir ()
(let ((wer (weir:make)))
(weir:2add-vert! wer (veq:f2 1f0 1f0))
(weir:2add-vert! wer (veq:f2 2f0 2f0))
(weir:2add-vert! wer (veq:f2 3f0 3f0))
wer))
(defun isub (wer a b)
(veq:f2- (weir:2get-vert wer b) (weir:2get-vert wer a)))
(subtest "test-weir-with-sfx "
these two cases demonstrate the " side - effect " of alterting the
(let ((wer (make-sfx-weir)))
(weir:2move-vert! wer 0 (isub wer 1 0))
(weir:2move-vert! wer 1 (isub wer 2 0))
(is (weir:2get-all-verts wer)
#(0.0 0.0 -1.0 -1.0 3.0 3.0)
:test #'equalp))
(let ((wer (make-sfx-weir)))
(weir:2move-vert! wer 1 (isub wer 2 0))
(weir:2move-vert! wer 0 (isub wer 1 0))
(is (weir:2get-all-verts wer)
203
:test #'equalp))
these three cases demonstrate the expected behavoir of an alteration .
(let ((wer (make-sfx-weir)))
(weir:with (wer %)
(% (2move-vert? 1 (isub wer 2 0)))
(% (2move-vert? 0 (isub wer 1 0))))
(is (weir:2get-all-verts wer)
003
:test #'equalp))
(let ((wer (make-sfx-weir)))
(weir:with (wer %)
(% (2move-vert? 0 (isub wer 1 0)))
(% (2move-vert? 1 (isub wer 2 0))))
(is (weir:2get-all-verts wer)
003
:test #'equalp))
(let ((wer (make-sfx-weir)))
(weir:with (wer %)
(veq:f2let ((va (isub wer 2 0))
(vb (isub wer 1 0)))
(% (? (w) (weir:2move-vert! w 1 va)))
(% (? (w) (weir:2move-vert! w 0 vb)))))
(is (weir:2get-all-verts wer)
003
:test #'equalp)))
(subtest "test-weir-add "
(let ((wer (init-weir)))
(weir:with (wer %)
(% (2add-vert? (veq:f2 10f0 3f0))))
(is (veq:lst (weir:2get-vert wer 11)) `( 10f0 3f0))
(is (weir:get-num-verts wer) 12)
(weir:with (wer %)
(% (2add-vert? (veq:f2 80f0 3f0)) :res :a?)
(% (2add-vert? (veq:f2 70f0 3f0)) :res :b?))
(is (flatten-ht (weir:get-alteration-result-map wer))
`(:b? 12 :a? 13 ))
(is (weir:get-num-verts wer) 14)
(weir:with (wer %)
(% (2vadd-edge? (veq:f2 7f0 3f0) (veq:f2 100f0 0.99f0))))
(is (weir:get-edges wer)
'((14 15) (5 6) (3 7) (0 1) (1 3) (1 2)))))
(subtest "test-weir-move "
(let ((wer (init-weir)))
(weir:with (wer %)
(% (2move-vert? 0 (veq:f2 3f0 3f0)) :res :a?)
(% (2move-vert? 1 (veq:f2 1f0 3f0)) :res :b?)
(% (2move-vert? 3 (veq:f2 2f0 3f0) :rel nil) :res :c?)
(% (2move-vert? 2 (veq:f2 3f0 4f0)) :res :d?))
(is (weir:2get-all-verts wer)
#(3.0 5.0 3.0 6.0 6.0 8.0 2.0 3.0 5.0 4.0 0.0 6.0 -1.0 7.0 0.0 8.0 0.0
9.0 10.0 1.0 3.0 1.0)
:test #'equalp)
(is (veq:lst (weir:2get-vert wer 0)) '(3f0 5f0))
(is (veq:lst (weir:2get-vert wer 1)) '(3f0 6f0))
(is (veq:lst (weir:2get-vert wer 3)) '(2f0 3f0))
(is (veq:lst (weir:2get-vert wer 2)) '(6f0 8f0))))
(subtest "test-weir-join "
(let ((wer (init-weir)))
(weir:with (wer %)
(% (add-edge? 3 3))
(% (add-edge? 3 3))
(% (add-edge? 3 6))
(% (add-edge? 7 1)))
(is (weir:get-num-edges wer) 7)
(weir:with (wer %)
(% (add-edge? 3 3) :res :a?)
(% (add-edge? 1 6) :res :b?)
(% (add-edge? 1 100) :res :c?))
(is (flatten-ht (weir:get-alteration-result-map wer))
'(:C? :B? 1 6 :A?))))
(subtest "test-weir-append "
(let ((wer (init-weir)))
(is (weir:get-num-verts wer) 11)
(weir:with (wer %)
(% (2append-edge? 3 (veq:f2 3f0 4f0)) :res :a?)
(% (2append-edge? 3 (veq:f2 8f0 5f0) :rel nil) :res :b?)
(% (2append-edge? 7 (veq:f2 1f0 2f0)) :res :c?))
(is (flatten-ht (weir:get-alteration-result-map wer))
'(:C? 7 11 :B? 3 12 :A? 3 13))
(is (weir:get-num-edges wer) 8)
(is (weir:get-num-verts wer) 14)
(is (weir:2get-all-verts wer)
#(0.0f0 2.0f0 2.0f0 3.0f0 3.0f0 4.0f0 4.0f0 7.0f0 5.0f0 4.0f0 0.0f0
6.0f0 -1.0f0 7.0f0 0.0f0 8.0f0 0.0f0 9.0f0 10.0f0 1.0f0 3.0f0 1.0f0
1.0f0 10.0f0 8.0f0 5.0f0 7.0f0 11.0f0)
:test #'equalp)))
(subtest "test-weir-split "
(let ((wer (init-weir)))
(weir:with (wer %)
(% (2split-edge? 1 2 (veq:f2 30f0 20f0)) :res :a?)
(% (2lsplit-edge? '(1 2) (veq:f2 31f0 23f0)) :res :b?)
(% (2lsplit-edge? '(5 6) (veq:f2 32f0 24f0)) :res :c?))
(is (flatten-ht (weir:get-alteration-result-map wer))
'(:C? 11 :B? 12 :A?))
(is (weir:get-num-edges wer) 7)
(is (weir:get-num-verts wer) 13)
(is (weir:2get-all-verts wer)
#(0.0 2.0 2.0 3.0 3.0 4.0 4.0 7.0 5.0 4.0 0.0 6.0 -1.0 7.0 0.0 8.0 0.0
9.0 10.0 1.0 3.0 1.0 32.0 24.0 31.0 23.0)
:test #'equalp)))
(subtest "test-weir-itrs "
(rnd:set-rnd-state 1)
(let ((wer (init-weir)))
(weir:with (wer %)
(weir:with-rnd-vert (wer v)
(% (2move-vert? v (veq:f2 2f0 2f0)))
(% (2append-edge? v (veq:f2 3f0 2f0)))))
(is (weir:get-num-edges wer) 6)
(is (weir:get-num-verts wer) 12)
(weir:with (wer %)
(weir:itr-verts (wer v)
(% (2move-vert? v (veq:f2 2f0 2f0)))))
(is (sort (weir:itr-verts (wer i :collect t) i) #'<)
'(0 1 2 3 4 5 6 7 8 9 10 11))
(is (weir:itr-verts (wer i) i) nil)
(is (sort (weir:itr-grp-verts (wer i :collect t) i) #'<)
'(0 1 2 3 5 6 7 11))
(is (weir:itr-edges (wer e :collect t) e)
'((5 11) (5 6) (3 7) (0 1) (1 3) (1 2)))
(is
(sort (weir:itr-edges (wer e :collect t)
(weir:2ledge-length wer e)) #'<)
'(1.0 1.4142135 2.236068 3.1622777 4.1231055 4.472136))
(weir:with (wer %)
(weir:with-rnd-edge (wer e)
(% (2lsplit-edge? e (veq:f2 31f0 23f0)))))
(is (weir:get-num-edges wer) 7)
(is (weir:get-num-verts wer) 13))))
(unless (finalize) (error "error in weir-with tests"))
|
ad008ee63950542b0f8afac43676e0394a4ec2290a425beb9f81631f0c986684 | jellelicht/guix | java.scm | ;;; GNU Guix --- Functional package management for GNU
Copyright © 2015 < >
Copyright © 2016 < >
;;;
;;; This file is part of GNU Guix.
;;;
GNU is free software ; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or ( at
;;; your option) any later version.
;;;
;;; GNU Guix is distributed in the hope that it will be useful, but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages java)
#:use-module ((guix licenses) #:prefix license:)
#:use-module (guix packages)
#:use-module (guix download)
#:use-module (guix utils)
#:use-module (guix build-system gnu)
#:use-module (gnu packages)
#:use-module (gnu packages attr)
#:use-module (gnu packages autotools)
#:use-module (gnu packages base)
#:use-module (gnu packages bash)
#:use-module (gnu packages cpio)
#:use-module (gnu packages cups)
#:use-module (gnu packages compression)
#:use-module (gnu packages fontutils)
#:use-module (gnu packages gawk)
#:use-module (gnu packages gcc)
#:use-module (gnu packages gl)
#:use-module (gnu packages gnuzilla) ;nss
lcms
#:use-module (gnu packages gnome)
#:use-module (gnu packages gtk)
#:use-module (gnu packages image)
#:use-module (gnu packages linux) ;alsa
#:use-module (gnu packages wget)
#:use-module (gnu packages pkg-config)
#:use-module (gnu packages perl)
#:use-module (gnu packages mit-krb5)
#:use-module (gnu packages xml)
#:use-module (gnu packages xorg)
#:use-module (gnu packages zip)
#:use-module (gnu packages texinfo)
#:use-module ((srfi srfi-1) #:select (fold alist-delete)))
(define-public swt
(package
(name "swt")
(version "4.4.2")
(source (origin
(method url-fetch)
(uri (string-append
"-stud.fht-esslingen.de/pub/Mirrors/"
"eclipse/eclipse/downloads/drops4/R-" version
"-201502041700/swt-" version "-gtk-linux-x86.zip"))
(sha256
(base32
"0lzyqr8k2zm5s8fmnrx5kxpslxfs0i73y26fwfms483x45izzwj8"))))
(build-system gnu-build-system)
(arguments
`(#:make-flags '("-f" "make_linux.mak")
#:tests? #f ; no "check" target
#:phases
(alist-replace
'unpack
(lambda _
(and (mkdir "swt")
(zero? (system* "unzip" (assoc-ref %build-inputs "source") "-d" "swt"))
(chdir "swt")
(mkdir "src")
(zero? (system* "unzip" "src.zip" "-d" "src"))
(chdir "src")))
(alist-replace
'build
(lambda* (#:key inputs outputs #:allow-other-keys)
(let ((lib (string-append (assoc-ref outputs "out") "/lib")))
(setenv "JAVA_HOME" (assoc-ref inputs "jdk"))
Build shared libraries . Users of SWT have to set the system
;; property swt.library.path to the "lib" directory of this
;; package output.
(mkdir-p lib)
(setenv "OUTPUT_DIR" lib)
(zero? (system* "bash" "build.sh"))
;; build jar
(mkdir "build")
(for-each (lambda (file)
(format #t "Compiling ~s\n" file)
(system* "javac" "-d" "build" file))
(find-files "." "\\.java"))
(zero? (system* "jar" "cvf" "swt.jar" "-C" "build" "."))))
(alist-cons-after
'install 'install-java-files
(lambda* (#:key outputs #:allow-other-keys)
(let ((java (string-append (assoc-ref outputs "out")
"/share/java")))
(install-file "swt.jar" java)
#t))
(alist-delete 'configure %standard-phases))))))
(inputs
`(("xulrunner" ,icecat)
("gtk" ,gtk+-2)
("libxtst" ,libxtst)
("libxt" ,libxt)
("mesa" ,mesa)
("glu" ,glu)))
(native-inputs
`(("pkg-config" ,pkg-config)
("unzip" ,unzip)
("jdk" ,icedtea "jdk")))
(home-page "/")
(synopsis "Widget toolkit for Java")
(description
"SWT is a widget toolkit for Java designed to provide efficient, portable
access to the user-interface facilities of the operating systems on which it
is implemented.")
SWT code is licensed under EPL1.0
Gnome and Gtk+ bindings contain code licensed under LGPLv2.1
Cairo bindings contain code under MPL1.1
XULRunner 1.9 bindings contain code under MPL2.0
(license (list
license:epl1.0
license:mpl1.1
license:mpl2.0
license:lgpl2.1+))))
(define-public ant
(package
(name "ant")
(version "1.9.6")
(source (origin
(method url-fetch)
(uri (string-append "mirror-ant-"
version "-src.tar.gz"))
(sha256
(base32
"1396wflczyxjxl603dhxjvd559f289lha9y2f04f71c7hapjl3am"))))
(build-system gnu-build-system)
(arguments
`(#:tests? #f ; no "check" target
#:phases
(alist-cons-after
'unpack 'remove-scripts
;; Remove bat / cmd scripts for DOS as well as the antRun and runant
;; wrappers.
(lambda _
(for-each delete-file
(find-files "src/script"
"(.*\\.(bat|cmd)|runant.*|antRun.*)")))
(alist-replace
'build
(lambda _
(setenv "JAVA_HOME" (string-append (assoc-ref %build-inputs "gcj")
"/lib/jvm"))
;; Disable tests to avoid dependency on hamcrest-core, which needs
Ant to build . This is necessary in addition to disabling the
;; "check" phase, because the dependency on "test-jar" would always
;; result in the tests to be run.
(substitute* "build.xml"
(("depends=\"jars,test-jar\"") "depends=\"jars\""))
(zero? (system* "bash" "bootstrap.sh"
(string-append "-Ddist.dir="
(assoc-ref %outputs "out")))))
(alist-delete
'configure
(alist-delete 'install %standard-phases))))))
(native-inputs
`(("gcj" ,gcj)))
(home-page "")
(synopsis "Build tool for Java")
(description
"Ant is a platform-independent build tool for Java. It is similar to
make but is implemented using the Java language, requires the Java platform,
and is best suited to building Java projects. Ant uses XML to describe the
build process and its dependencies, whereas Make uses Makefile format.")
(license license:asl2.0)))
(define-public icedtea-6
(package
(name "icedtea")
(version "1.13.10")
(source (origin
(method url-fetch)
(uri (string-append
"-"
version ".tar.xz"))
(sha256
(base32
"1mq08sfyfjlfw0c1czjs47303zv4h91s1jc0nhdlra4rbbx0g2d0"))
(modules '((guix build utils)))
(snippet
'(substitute* "Makefile.in"
;; link against libgcj to avoid linker error
(("-o native-ecj")
"-lgcj -o native-ecj")
;; do not leak information about the build host
(("DISTRIBUTION_ID=\"\\$\\(DIST_ID\\)\"")
"DISTRIBUTION_ID=\"\\\"guix\\\"\"")))))
(build-system gnu-build-system)
Java Runtime Environment
"jdk" ; Java Development Kit
"doc")) ; all documentation
(arguments
`(;; There are many failing tests and many are known to fail upstream.
;;
;; * Hotspot VM tests:
;; FAILED: compiler/7082949/Test7082949.java
;; FAILED: compiler/7088020/Test7088020.java
;; FAILED: runtime/6929067/Test6929067.sh
;; FAILED: serviceability/sa/jmap-hashcode/Test8028623.java
= > Test results : passed : 161 ; failed : 4
;;
;; * langtools tests:
;; FAILED: com/sun/javadoc/testHtmlDefinitionListTag/TestHtmlDefinitionListTag.java
FAILED : tools / javac/6627362 /
FAILED : tools / javac/7003595 /
;; FAILED: tools/javac/7024568/T7024568.java
;; FAILED: tools/javap/4111861/T4111861.java
;; FAILED: tools/javap/ListTest.java
;; FAILED: tools/javap/OptionTest.java
;; FAILED: tools/javap/T4884240.java
;; FAILED: tools/javap/T4975569.java
;; --> fails because of insignificant whitespace differences
;; in output of javap
FAILED : tools / javap / T6868539.java
= > Test results : passed : 1,445 ; failed : 10
;;
;; * JDK tests:
;; Tests are incomplete because of a segfault after this test:
javax / crypto / spec / RC5ParameterSpec / RC5ParameterSpecEquals.java
;; A bug report has already been filed upstream:
;;
;;
The tests require xvfb - run , a wrapper script around , which
has not been packaged yet . Without it many tests fail , so I
;; made no attempts to make a list of failing JDK tests. At least
222 tests are failing of which at least 132 are tests .
#:tests? #f
;; The DSOs use $ORIGIN to refer to each other, but (guix build
;; gremlin) doesn't support it yet, so skip this phase.
#:validate-runpath? #f
#:modules ((guix build utils)
(guix build gnu-build-system)
(ice-9 popen)
(ice-9 rdelim))
#:configure-flags
(let* ((gcjdir (assoc-ref %build-inputs "gcj"))
(ecj (string-append gcjdir "/share/java/ecj.jar"))
(jdk (string-append gcjdir "/lib/jvm/"))
(gcj (string-append gcjdir "/bin/gcj")))
`("--enable-bootstrap"
"--enable-nss"
"--without-rhino"
"--disable-downloading"
"--disable-tests" ;they are run in the check phase instead
"--with-openjdk-src-dir=./openjdk.src"
,(string-append "--with-javac=" jdk "/bin/javac")
,(string-append "--with-ecj-jar=" ecj)
,(string-append "--with-gcj=" gcj)
,(string-append "--with-jdk-home=" jdk)
,(string-append "--with-java=" jdk "/bin/java")))
#:phases
(alist-replace
'unpack
(lambda* (#:key source inputs #:allow-other-keys)
(and (zero? (system* "tar" "xvf" source))
(begin
(chdir (string-append "icedtea6-" ,version))
(mkdir "openjdk.src")
(with-directory-excursion "openjdk.src"
(copy-file (assoc-ref inputs "openjdk6-src")
"openjdk6-src.tar.xz")
(zero? (system* "tar" "xvf" "openjdk6-src.tar.xz"))))))
(alist-cons-after
'unpack 'patch-patches
(lambda _
;; shebang in patches so that they apply cleanly
(substitute* '("patches/jtreg-jrunscript.patch"
"patches/hotspot/hs23/drop_unlicensed_test.patch")
(("#!/bin/sh") (string-append "#!" (which "sh"))))
;; fix path to alsa header in patch
(substitute* "patches/openjdk/6799141-split_out_versions.patch"
(("ALSA_INCLUDE=/usr/include/alsa/version.h")
(string-append "ALSA_INCLUDE="
(assoc-ref %build-inputs "alsa-lib")
"/include/alsa/version.h"))))
(alist-cons-after
'unpack 'patch-paths
(lambda _
;; buildtree.make generates shell scripts, so we need to replace
;; the generated shebang
(substitute* '("openjdk.src/hotspot/make/linux/makefiles/buildtree.make")
(("/bin/sh") (which "bash")))
(let ((corebin (string-append
(assoc-ref %build-inputs "coreutils") "/bin/"))
(binbin (string-append
(assoc-ref %build-inputs "binutils") "/bin/"))
(grepbin (string-append
(assoc-ref %build-inputs "grep") "/bin/")))
(substitute* '("openjdk.src/jdk/make/common/shared/Defs-linux.gmk"
"openjdk.src/corba/make/common/shared/Defs-linux.gmk")
(("UNIXCOMMAND_PATH = /bin/")
(string-append "UNIXCOMMAND_PATH = " corebin))
(("USRBIN_PATH = /usr/bin/")
(string-append "USRBIN_PATH = " corebin))
(("DEVTOOLS_PATH *= */usr/bin/")
(string-append "DEVTOOLS_PATH = " corebin))
(("COMPILER_PATH *= */usr/bin/")
(string-append "COMPILER_PATH = "
(assoc-ref %build-inputs "gcc") "/bin/"))
(("DEF_OBJCOPY *=.*objcopy")
(string-append "DEF_OBJCOPY = " (which "objcopy"))))
;; fix hard-coded utility paths
(substitute* '("openjdk.src/jdk/make/common/shared/Defs-utils.gmk"
"openjdk.src/corba/make/common/shared/Defs-utils.gmk")
(("ECHO *=.*echo")
(string-append "ECHO = " (which "echo")))
(("^GREP *=.*grep")
(string-append "GREP = " (which "grep")))
(("EGREP *=.*egrep")
(string-append "EGREP = " (which "egrep")))
(("CPIO *=.*cpio")
(string-append "CPIO = " (which "cpio")))
(("READELF *=.*readelf")
(string-append "READELF = " (which "readelf")))
(("^ *AR *=.*ar")
(string-append "AR = " (which "ar")))
(("^ *TAR *=.*tar")
(string-append "TAR = " (which "tar")))
(("AS *=.*as")
(string-append "AS = " (which "as")))
(("LD *=.*ld")
(string-append "LD = " (which "ld")))
(("STRIP *=.*strip")
(string-append "STRIP = " (which "strip")))
(("NM *=.*nm")
(string-append "NM = " (which "nm")))
(("^SH *=.*sh")
(string-append "SH = " (which "bash")))
(("^FIND *=.*find")
(string-append "FIND = " (which "find")))
(("LDD *=.*ldd")
(string-append "LDD = " (which "ldd")))
(("NAWK *=.*(n|g)awk")
(string-append "NAWK = " (which "gawk")))
(("XARGS *=.*xargs")
(string-append "XARGS = " (which "xargs")))
(("UNZIP *=.*unzip")
(string-append "UNZIP = " (which "unzip")))
(("ZIPEXE *=.*zip")
(string-append "ZIPEXE = " (which "zip")))
(("SED *=.*sed")
(string-append "SED = " (which "sed"))))
;; Some of these timestamps cause problems as they are more than
10 years ago , failing the build process .
(substitute*
"openjdk.src/jdk/src/share/classes/java/util/CurrencyData.properties"
(("AZ=AZM;2005-12-31-20-00-00;AZN") "AZ=AZN")
(("MZ=MZM;2006-06-30-22-00-00;MZN") "MZ=MZN")
(("RO=ROL;2005-06-30-21-00-00;RON") "RO=RON")
(("TR=TRL;2004-12-31-22-00-00;TRY") "TR=TRY"))))
(alist-cons-before
'configure 'set-additional-paths
(lambda* (#:key inputs #:allow-other-keys)
(let* ((gcjdir (assoc-ref %build-inputs "gcj"))
(gcjlib (string-append gcjdir "/lib"))
;; Get target-specific include directory so that
;; libgcj-config.h is found when compiling hotspot.
(gcjinclude (let* ((port (open-input-pipe "gcj -print-file-name=include"))
(str (read-line port)))
(close-pipe port)
str)))
(setenv "CPATH"
(string-append gcjinclude ":"
(assoc-ref %build-inputs "libxrender")
"/include/X11/extensions" ":"
(assoc-ref %build-inputs "libxtst")
"/include/X11/extensions" ":"
(assoc-ref %build-inputs "libxinerama")
"/include/X11/extensions" ":"
(or (getenv "CPATH") "")))
(setenv "ALT_CUPS_HEADERS_PATH"
(string-append (assoc-ref %build-inputs "cups")
"/include"))
(setenv "ALT_FREETYPE_HEADERS_PATH"
(string-append (assoc-ref %build-inputs "freetype")
"/include"))
(setenv "ALT_FREETYPE_LIB_PATH"
(string-append (assoc-ref %build-inputs "freetype")
"/lib"))))
(alist-cons-before
'check 'fix-test-framework
(lambda _
;; Fix PATH in test environment
(substitute* "src/jtreg/com/sun/javatest/regtest/Main.java"
(("PATH=/bin:/usr/bin")
(string-append "PATH=" (getenv "PATH"))))
(substitute* "src/jtreg/com/sun/javatest/util/SysEnv.java"
(("/usr/bin/env") (which "env")))
#t)
(alist-cons-before
'check 'fix-hotspot-tests
(lambda _
(with-directory-excursion "openjdk.src/hotspot/test/"
(substitute* "jprt.config"
(("PATH=\"\\$\\{path4sdk\\}\"")
(string-append "PATH=" (getenv "PATH")))
(("make=/usr/bin/make")
(string-append "make=" (which "make"))))
(substitute* '("runtime/6626217/Test6626217.sh"
"runtime/7110720/Test7110720.sh")
(("/bin/rm") (which "rm"))
(("/bin/cp") (which "cp"))
(("/bin/mv") (which "mv"))))
#t)
(alist-cons-before
'check 'fix-jdk-tests
(lambda _
(with-directory-excursion "openjdk.src/jdk/test/"
(substitute* "com/sun/jdi/JdbReadTwiceTest.sh"
(("/bin/pwd") (which "pwd")))
(substitute* "com/sun/jdi/ShellScaffold.sh"
(("/bin/kill") (which "kill")))
(substitute* "start-Xvfb.sh"
;;(("/usr/bin/X11/Xvfb") (which "Xvfb"))
(("/usr/bin/nohup") (which "nohup")))
(substitute* "javax/security/auth/Subject/doAs/Test.sh"
(("/bin/rm") (which "rm")))
(substitute* "tools/launcher/MultipleJRE.sh"
(("echo \"#!/bin/sh\"")
(string-append "echo \"#!" (which "rm") "\""))
(("/usr/bin/zip") (which "zip")))
(substitute* "com/sun/jdi/OnThrowTest.java"
(("#!/bin/sh") (string-append "#!" (which "sh"))))
(substitute* "java/lang/management/OperatingSystemMXBean/GetSystemLoadAverage.java"
(("/usr/bin/uptime") (which "uptime")))
(substitute* "java/lang/ProcessBuilder/Basic.java"
(("/usr/bin/env") (which "env"))
(("/bin/false") (which "false"))
(("/bin/true") (which "true"))
(("/bin/cp") (which "cp"))
(("/bin/sh") (which "sh")))
(substitute* "java/lang/ProcessBuilder/FeelingLucky.java"
(("/bin/sh") (which "sh")))
(substitute* "java/lang/ProcessBuilder/Zombies.java"
(("/usr/bin/perl") (which "perl"))
(("/bin/ps") (which "ps"))
(("/bin/true") (which "true")))
(substitute* "java/lang/Runtime/exec/ConcurrentRead.java"
(("/usr/bin/tee") (which "tee")))
(substitute* "java/lang/Runtime/exec/ExecWithDir.java"
(("/bin/true") (which "true")))
(substitute* "java/lang/Runtime/exec/ExecWithInput.java"
(("/bin/cat") (which "cat")))
(substitute* "java/lang/Runtime/exec/ExitValue.java"
(("/bin/sh") (which "sh"))
(("/bin/true") (which "true"))
(("/bin/kill") (which "kill")))
(substitute* "java/lang/Runtime/exec/LotsOfDestroys.java"
(("/usr/bin/echo") (which "echo")))
(substitute* "java/lang/Runtime/exec/LotsOfOutput.java"
(("/usr/bin/cat") (which "cat")))
(substitute* "java/lang/Runtime/exec/SleepyCat.java"
(("/bin/cat") (which "cat"))
(("/bin/sleep") (which "sleep"))
(("/bin/sh") (which "sh")))
(substitute* "java/lang/Runtime/exec/StreamsSurviveDestroy.java"
(("/bin/cat") (which "cat")))
(substitute* "java/rmi/activation/CommandEnvironment/SetChildEnv.java"
(("/bin/chmod") (which "chmod")))
(substitute* "java/util/zip/ZipFile/Assortment.java"
(("/bin/sh") (which "sh"))))
#t)
(alist-replace
'check
(lambda _
The " make check- * " targets always return zero , so we need to
;; check for errors in the associated log files to determine
;; whether any tests have failed.
(use-modules (ice-9 rdelim))
(let* ((error-pattern (make-regexp "^(Error|FAILED):.*"))
(checker (lambda (port)
(let loop ()
(let ((line (read-line port)))
(cond
((eof-object? line) #t)
((regexp-exec error-pattern line) #f)
(else (loop)))))))
(run-test (lambda (test)
(system* "make" test)
(call-with-input-file
(string-append "test/" test ".log")
checker))))
(or #t ; skip tests
(and (run-test "check-hotspot")
(run-test "check-langtools")
(run-test "check-jdk")))))
(alist-replace
'install
(lambda* (#:key outputs #:allow-other-keys)
(let ((doc (string-append (assoc-ref outputs "doc")
"/share/doc/icedtea"))
(jre (assoc-ref outputs "out"))
(jdk (assoc-ref outputs "jdk")))
(copy-recursively "openjdk.build/docs" doc)
(copy-recursively "openjdk.build/j2re-image" jre)
(copy-recursively "openjdk.build/j2sdk-image" jdk)))
%standard-phases)))))))))))
(native-inputs
`(("ant" ,ant)
("alsa-lib" ,alsa-lib)
("attr" ,attr)
("autoconf" ,autoconf)
("automake" ,automake)
("coreutils" ,coreutils)
("diffutils" ,diffutils) ;for tests
("gawk" ,gawk)
("grep" ,grep)
("libtool" ,libtool)
("pkg-config" ,pkg-config)
("cups" ,cups)
("wget" ,wget)
("which" ,which)
("cpio" ,cpio)
("zip" ,zip)
("unzip" ,unzip)
("fastjar" ,fastjar)
for xsltproc
("mit-krb5" ,mit-krb5)
("nss" ,nss)
("libx11" ,libx11)
("libxt" ,libxt)
("libxtst" ,libxtst)
("libxi" ,libxi)
("libxinerama" ,libxinerama)
("libxrender" ,libxrender)
("libjpeg" ,libjpeg)
("libpng" ,libpng)
("giflib" ,giflib)
("perl" ,perl)
("procps" ,procps) ;for "free", even though I'm not sure we should use it
("openjdk6-src"
,(origin
(method url-fetch)
(uri "-6-src-b38-20_jan_2016.tar.gz")
(sha256
(base32
"1fapj9w4ahzf5nwvdgi1dsxjyh9dqbcvf9638r60h1by13wjqk5p"))))
("lcms" ,lcms)
("zlib" ,zlib)
("gtk" ,gtk+-2)
("fontconfig" ,fontconfig)
("freetype" ,freetype)
("gcj" ,gcj)))
(home-page "")
(synopsis "Java development kit")
(description
"The OpenJDK built with the IcedTea build harness.")
IcedTea is released under the + Classpath exception , which is the
same license as both GNU Classpath and OpenJDK .
(license license:gpl2+)))
(define-public icedtea-7
(let* ((version "2.6.4")
(drop (lambda (name hash)
(origin
(method url-fetch)
(uri (string-append
"/"
"/icedtea7/" version "/" name ".tar.bz2"))
(sha256 (base32 hash))))))
(package (inherit icedtea-6)
(version version)
(source (origin
(method url-fetch)
(uri (string-append
"-"
version ".tar.xz"))
(sha256
(base32
"0r31h8nlsrbfdkgbjbb7phwgcwglc9siznzrr40lqnm9xrgkc2nj"))
(modules '((guix build utils)))
(snippet
'(substitute* "Makefile.in"
;; link against libgcj to avoid linker error
(("-o native-ecj")
"-lgcj -o native-ecj")
;; do not leak information about the build host
(("DISTRIBUTION_ID=\"\\$\\(DIST_ID\\)\"")
"DISTRIBUTION_ID=\"\\\"guix\\\"\"")))))
(arguments
`(;; There are many test failures. Some are known to
;; fail upstream, others relate to not having an X
;; server running at test time, yet others are a
;; complete mystery to me.
hotspot : passed : 241 ; failed : 45 ; error : 2
langtools : passed : 1,934 ; failed : 26
;; jdk: unknown
#:tests? #f
;; Apparently, the C locale is needed for some of the tests.
#:locale "C"
,@(substitute-keyword-arguments (package-arguments icedtea-6)
((#:modules modules)
`((ice-9 match)
(srfi srfi-26)
,@modules))
((#:configure-flags flags)
TODO : package pcsc and sctp , and add to inputs
`(append '("--disable-system-pcsc"
"--disable-system-sctp")
,flags))
((#:phases phases)
`(modify-phases ,phases
(replace 'unpack
(lambda* (#:key source inputs #:allow-other-keys)
(let ((target (string-append "icedtea-" ,version))
(unpack (lambda* (name #:optional dir)
(let ((dir (or dir
(string-drop-right name 5))))
(mkdir dir)
(zero? (system* "tar" "xvf"
(assoc-ref inputs name)
"-C" dir
"--strip-components=1"))))))
(mkdir target)
(and
(zero? (system* "tar" "xvf" source
"-C" target "--strip-components=1"))
(chdir target)
(unpack "openjdk-src" "openjdk.src")
(with-directory-excursion "openjdk.src"
(for-each unpack
(filter (cut string-suffix? "-drop" <>)
(map (match-lambda
((name . _) name))
inputs))))
#t))))
(replace
'set-additional-paths
(lambda* (#:key inputs #:allow-other-keys)
(let (;; Get target-specific include directory so that
;; libgcj-config.h is found when compiling hotspot.
(gcjinclude (let* ((port (open-input-pipe "gcj -print-file-name=include"))
(str (read-line port)))
(close-pipe port)
str)))
(substitute* "openjdk.src/jdk/make/common/shared/Sanity.gmk"
(("ALSA_INCLUDE=/usr/include/alsa/version.h")
(string-append "ALSA_INCLUDE="
(assoc-ref inputs "alsa-lib")
"/include/alsa/version.h")))
(setenv "CC" "gcc")
(setenv "CPATH"
(string-append gcjinclude ":"
(assoc-ref inputs "libxrender")
"/include/X11/extensions" ":"
(assoc-ref inputs "libxtst")
"/include/X11/extensions" ":"
(assoc-ref inputs "libxinerama")
"/include/X11/extensions" ":"
(or (getenv "CPATH") "")))
(setenv "ALT_OBJCOPY" (which "objcopy"))
(setenv "ALT_CUPS_HEADERS_PATH"
(string-append (assoc-ref inputs "cups")
"/include"))
(setenv "ALT_FREETYPE_HEADERS_PATH"
(string-append (assoc-ref inputs "freetype")
"/include"))
(setenv "ALT_FREETYPE_LIB_PATH"
(string-append (assoc-ref inputs "freetype")
"/lib")))))
(add-after
'unpack 'fix-x11-extension-include-path
(lambda* (#:key inputs #:allow-other-keys)
(substitute* "openjdk.src/jdk/make/sun/awt/mawt.gmk"
(((string-append "\\$\\(firstword \\$\\(wildcard "
"\\$\\(OPENWIN_HOME\\)"
"/include/X11/extensions\\).*$"))
(string-append (assoc-ref inputs "libxrender")
"/include/X11/extensions"
" -I" (assoc-ref inputs "libxtst")
"/include/X11/extensions"
" -I" (assoc-ref inputs "libxinerama")
"/include/X11/extensions"))
(("\\$\\(wildcard /usr/include/X11/extensions\\)\\)") ""))
#t))
(replace
'fix-test-framework
(lambda _
;; Fix PATH in test environment
(substitute* "test/jtreg/com/sun/javatest/regtest/Main.java"
(("PATH=/bin:/usr/bin")
(string-append "PATH=" (getenv "PATH"))))
(substitute* "test/jtreg/com/sun/javatest/util/SysEnv.java"
(("/usr/bin/env") (which "env")))
(substitute* "openjdk.src/hotspot/test/test_env.sh"
(("/bin/rm") (which "rm"))
(("/bin/cp") (which "cp"))
(("/bin/mv") (which "mv")))
#t))
(delete 'patch-patches))))))
(native-inputs
`(("openjdk-src"
,(drop "openjdk"
"1qjjf71nq80ac2d08hbaa8589d31vk313z3rkirnwq5df8cyf0mv"))
("corba-drop"
,(drop "corba"
"025warxhjal3nr7w1xyd16k0f32fwkchifpaslzyidsga3hgmfr6"))
("jaxp-drop"
,(drop "jaxp"
"0qiz6swb78w9c0mf88pf0gflgm5rp9k0l6fv6sdl7dki691b0z09"))
("jaxws-drop"
,(drop "jaxws"
"18fz4gl4fdlcmqvh1mlpd9h0gj0qizpfa7njkax97aysmsm08xns"))
("jdk-drop"
,(drop "jdk"
"0qsx5d9pgwlz9vbpapw4jwpajqc6rwk1150cjb33i4n3z709jccx"))
("langtools-drop"
,(drop "langtools"
"1k6plx96smf86z303gb30hncssa8f40qdryzsdv349iwqwacxc7r"))
("hotspot-drop"
,(drop "hotspot"
"0r9ffzyf5vxs8wg732szqcil0ksc8lcxzihdv3viz7d67dy42irp"))
,@(fold alist-delete (package-native-inputs icedtea-6)
'("openjdk6-src")))))))
(define-public icedtea icedtea-7)
| null | https://raw.githubusercontent.com/jellelicht/guix/83cfc9414fca3ab57c949e18c1ceb375a179b59c/gnu/packages/java.scm | scheme | GNU Guix --- Functional package management for GNU
This file is part of GNU Guix.
you can redistribute it and/or modify it
either version 3 of the License , or ( at
your option) any later version.
GNU Guix is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
nss
alsa
no "check" target
property swt.library.path to the "lib" directory of this
package output.
build jar
no "check" target
Remove bat / cmd scripts for DOS as well as the antRun and runant
wrappers.
Disable tests to avoid dependency on hamcrest-core, which needs
"check" phase, because the dependency on "test-jar" would always
result in the tests to be run.
link against libgcj to avoid linker error
do not leak information about the build host
Java Development Kit
all documentation
There are many failing tests and many are known to fail upstream.
* Hotspot VM tests:
FAILED: compiler/7082949/Test7082949.java
FAILED: compiler/7088020/Test7088020.java
FAILED: runtime/6929067/Test6929067.sh
FAILED: serviceability/sa/jmap-hashcode/Test8028623.java
failed : 4
* langtools tests:
FAILED: com/sun/javadoc/testHtmlDefinitionListTag/TestHtmlDefinitionListTag.java
FAILED: tools/javac/7024568/T7024568.java
FAILED: tools/javap/4111861/T4111861.java
FAILED: tools/javap/ListTest.java
FAILED: tools/javap/OptionTest.java
FAILED: tools/javap/T4884240.java
FAILED: tools/javap/T4975569.java
--> fails because of insignificant whitespace differences
in output of javap
failed : 10
* JDK tests:
Tests are incomplete because of a segfault after this test:
A bug report has already been filed upstream:
made no attempts to make a list of failing JDK tests. At least
The DSOs use $ORIGIN to refer to each other, but (guix build
gremlin) doesn't support it yet, so skip this phase.
they are run in the check phase instead
shebang in patches so that they apply cleanly
fix path to alsa header in patch
buildtree.make generates shell scripts, so we need to replace
the generated shebang
fix hard-coded utility paths
Some of these timestamps cause problems as they are more than
Get target-specific include directory so that
libgcj-config.h is found when compiling hotspot.
Fix PATH in test environment
(("/usr/bin/X11/Xvfb") (which "Xvfb"))
check for errors in the associated log files to determine
whether any tests have failed.
skip tests
for tests
for "free", even though I'm not sure we should use it
link against libgcj to avoid linker error
do not leak information about the build host
There are many test failures. Some are known to
fail upstream, others relate to not having an X
server running at test time, yet others are a
complete mystery to me.
failed : 45 ; error : 2
failed : 26
jdk: unknown
Apparently, the C locale is needed for some of the tests.
Get target-specific include directory so that
libgcj-config.h is found when compiling hotspot.
Fix PATH in test environment | Copyright © 2015 < >
Copyright © 2016 < >
under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages java)
#:use-module ((guix licenses) #:prefix license:)
#:use-module (guix packages)
#:use-module (guix download)
#:use-module (guix utils)
#:use-module (guix build-system gnu)
#:use-module (gnu packages)
#:use-module (gnu packages attr)
#:use-module (gnu packages autotools)
#:use-module (gnu packages base)
#:use-module (gnu packages bash)
#:use-module (gnu packages cpio)
#:use-module (gnu packages cups)
#:use-module (gnu packages compression)
#:use-module (gnu packages fontutils)
#:use-module (gnu packages gawk)
#:use-module (gnu packages gcc)
#:use-module (gnu packages gl)
lcms
#:use-module (gnu packages gnome)
#:use-module (gnu packages gtk)
#:use-module (gnu packages image)
#:use-module (gnu packages wget)
#:use-module (gnu packages pkg-config)
#:use-module (gnu packages perl)
#:use-module (gnu packages mit-krb5)
#:use-module (gnu packages xml)
#:use-module (gnu packages xorg)
#:use-module (gnu packages zip)
#:use-module (gnu packages texinfo)
#:use-module ((srfi srfi-1) #:select (fold alist-delete)))
(define-public swt
(package
(name "swt")
(version "4.4.2")
(source (origin
(method url-fetch)
(uri (string-append
"-stud.fht-esslingen.de/pub/Mirrors/"
"eclipse/eclipse/downloads/drops4/R-" version
"-201502041700/swt-" version "-gtk-linux-x86.zip"))
(sha256
(base32
"0lzyqr8k2zm5s8fmnrx5kxpslxfs0i73y26fwfms483x45izzwj8"))))
(build-system gnu-build-system)
(arguments
`(#:make-flags '("-f" "make_linux.mak")
#:phases
(alist-replace
'unpack
(lambda _
(and (mkdir "swt")
(zero? (system* "unzip" (assoc-ref %build-inputs "source") "-d" "swt"))
(chdir "swt")
(mkdir "src")
(zero? (system* "unzip" "src.zip" "-d" "src"))
(chdir "src")))
(alist-replace
'build
(lambda* (#:key inputs outputs #:allow-other-keys)
(let ((lib (string-append (assoc-ref outputs "out") "/lib")))
(setenv "JAVA_HOME" (assoc-ref inputs "jdk"))
Build shared libraries . Users of SWT have to set the system
(mkdir-p lib)
(setenv "OUTPUT_DIR" lib)
(zero? (system* "bash" "build.sh"))
(mkdir "build")
(for-each (lambda (file)
(format #t "Compiling ~s\n" file)
(system* "javac" "-d" "build" file))
(find-files "." "\\.java"))
(zero? (system* "jar" "cvf" "swt.jar" "-C" "build" "."))))
(alist-cons-after
'install 'install-java-files
(lambda* (#:key outputs #:allow-other-keys)
(let ((java (string-append (assoc-ref outputs "out")
"/share/java")))
(install-file "swt.jar" java)
#t))
(alist-delete 'configure %standard-phases))))))
(inputs
`(("xulrunner" ,icecat)
("gtk" ,gtk+-2)
("libxtst" ,libxtst)
("libxt" ,libxt)
("mesa" ,mesa)
("glu" ,glu)))
(native-inputs
`(("pkg-config" ,pkg-config)
("unzip" ,unzip)
("jdk" ,icedtea "jdk")))
(home-page "/")
(synopsis "Widget toolkit for Java")
(description
"SWT is a widget toolkit for Java designed to provide efficient, portable
access to the user-interface facilities of the operating systems on which it
is implemented.")
SWT code is licensed under EPL1.0
Gnome and Gtk+ bindings contain code licensed under LGPLv2.1
Cairo bindings contain code under MPL1.1
XULRunner 1.9 bindings contain code under MPL2.0
(license (list
license:epl1.0
license:mpl1.1
license:mpl2.0
license:lgpl2.1+))))
(define-public ant
(package
(name "ant")
(version "1.9.6")
(source (origin
(method url-fetch)
(uri (string-append "mirror-ant-"
version "-src.tar.gz"))
(sha256
(base32
"1396wflczyxjxl603dhxjvd559f289lha9y2f04f71c7hapjl3am"))))
(build-system gnu-build-system)
(arguments
#:phases
(alist-cons-after
'unpack 'remove-scripts
(lambda _
(for-each delete-file
(find-files "src/script"
"(.*\\.(bat|cmd)|runant.*|antRun.*)")))
(alist-replace
'build
(lambda _
(setenv "JAVA_HOME" (string-append (assoc-ref %build-inputs "gcj")
"/lib/jvm"))
Ant to build . This is necessary in addition to disabling the
(substitute* "build.xml"
(("depends=\"jars,test-jar\"") "depends=\"jars\""))
(zero? (system* "bash" "bootstrap.sh"
(string-append "-Ddist.dir="
(assoc-ref %outputs "out")))))
(alist-delete
'configure
(alist-delete 'install %standard-phases))))))
(native-inputs
`(("gcj" ,gcj)))
(home-page "")
(synopsis "Build tool for Java")
(description
"Ant is a platform-independent build tool for Java. It is similar to
make but is implemented using the Java language, requires the Java platform,
and is best suited to building Java projects. Ant uses XML to describe the
build process and its dependencies, whereas Make uses Makefile format.")
(license license:asl2.0)))
(define-public icedtea-6
(package
(name "icedtea")
(version "1.13.10")
(source (origin
(method url-fetch)
(uri (string-append
"-"
version ".tar.xz"))
(sha256
(base32
"1mq08sfyfjlfw0c1czjs47303zv4h91s1jc0nhdlra4rbbx0g2d0"))
(modules '((guix build utils)))
(snippet
'(substitute* "Makefile.in"
(("-o native-ecj")
"-lgcj -o native-ecj")
(("DISTRIBUTION_ID=\"\\$\\(DIST_ID\\)\"")
"DISTRIBUTION_ID=\"\\\"guix\\\"\"")))))
(build-system gnu-build-system)
Java Runtime Environment
(arguments
FAILED : tools / javac/6627362 /
FAILED : tools / javac/7003595 /
FAILED : tools / javap / T6868539.java
javax / crypto / spec / RC5ParameterSpec / RC5ParameterSpecEquals.java
The tests require xvfb - run , a wrapper script around , which
has not been packaged yet . Without it many tests fail , so I
222 tests are failing of which at least 132 are tests .
#:tests? #f
#:validate-runpath? #f
#:modules ((guix build utils)
(guix build gnu-build-system)
(ice-9 popen)
(ice-9 rdelim))
#:configure-flags
(let* ((gcjdir (assoc-ref %build-inputs "gcj"))
(ecj (string-append gcjdir "/share/java/ecj.jar"))
(jdk (string-append gcjdir "/lib/jvm/"))
(gcj (string-append gcjdir "/bin/gcj")))
`("--enable-bootstrap"
"--enable-nss"
"--without-rhino"
"--disable-downloading"
"--with-openjdk-src-dir=./openjdk.src"
,(string-append "--with-javac=" jdk "/bin/javac")
,(string-append "--with-ecj-jar=" ecj)
,(string-append "--with-gcj=" gcj)
,(string-append "--with-jdk-home=" jdk)
,(string-append "--with-java=" jdk "/bin/java")))
#:phases
(alist-replace
'unpack
(lambda* (#:key source inputs #:allow-other-keys)
(and (zero? (system* "tar" "xvf" source))
(begin
(chdir (string-append "icedtea6-" ,version))
(mkdir "openjdk.src")
(with-directory-excursion "openjdk.src"
(copy-file (assoc-ref inputs "openjdk6-src")
"openjdk6-src.tar.xz")
(zero? (system* "tar" "xvf" "openjdk6-src.tar.xz"))))))
(alist-cons-after
'unpack 'patch-patches
(lambda _
(substitute* '("patches/jtreg-jrunscript.patch"
"patches/hotspot/hs23/drop_unlicensed_test.patch")
(("#!/bin/sh") (string-append "#!" (which "sh"))))
(substitute* "patches/openjdk/6799141-split_out_versions.patch"
(("ALSA_INCLUDE=/usr/include/alsa/version.h")
(string-append "ALSA_INCLUDE="
(assoc-ref %build-inputs "alsa-lib")
"/include/alsa/version.h"))))
(alist-cons-after
'unpack 'patch-paths
(lambda _
(substitute* '("openjdk.src/hotspot/make/linux/makefiles/buildtree.make")
(("/bin/sh") (which "bash")))
(let ((corebin (string-append
(assoc-ref %build-inputs "coreutils") "/bin/"))
(binbin (string-append
(assoc-ref %build-inputs "binutils") "/bin/"))
(grepbin (string-append
(assoc-ref %build-inputs "grep") "/bin/")))
(substitute* '("openjdk.src/jdk/make/common/shared/Defs-linux.gmk"
"openjdk.src/corba/make/common/shared/Defs-linux.gmk")
(("UNIXCOMMAND_PATH = /bin/")
(string-append "UNIXCOMMAND_PATH = " corebin))
(("USRBIN_PATH = /usr/bin/")
(string-append "USRBIN_PATH = " corebin))
(("DEVTOOLS_PATH *= */usr/bin/")
(string-append "DEVTOOLS_PATH = " corebin))
(("COMPILER_PATH *= */usr/bin/")
(string-append "COMPILER_PATH = "
(assoc-ref %build-inputs "gcc") "/bin/"))
(("DEF_OBJCOPY *=.*objcopy")
(string-append "DEF_OBJCOPY = " (which "objcopy"))))
(substitute* '("openjdk.src/jdk/make/common/shared/Defs-utils.gmk"
"openjdk.src/corba/make/common/shared/Defs-utils.gmk")
(("ECHO *=.*echo")
(string-append "ECHO = " (which "echo")))
(("^GREP *=.*grep")
(string-append "GREP = " (which "grep")))
(("EGREP *=.*egrep")
(string-append "EGREP = " (which "egrep")))
(("CPIO *=.*cpio")
(string-append "CPIO = " (which "cpio")))
(("READELF *=.*readelf")
(string-append "READELF = " (which "readelf")))
(("^ *AR *=.*ar")
(string-append "AR = " (which "ar")))
(("^ *TAR *=.*tar")
(string-append "TAR = " (which "tar")))
(("AS *=.*as")
(string-append "AS = " (which "as")))
(("LD *=.*ld")
(string-append "LD = " (which "ld")))
(("STRIP *=.*strip")
(string-append "STRIP = " (which "strip")))
(("NM *=.*nm")
(string-append "NM = " (which "nm")))
(("^SH *=.*sh")
(string-append "SH = " (which "bash")))
(("^FIND *=.*find")
(string-append "FIND = " (which "find")))
(("LDD *=.*ldd")
(string-append "LDD = " (which "ldd")))
(("NAWK *=.*(n|g)awk")
(string-append "NAWK = " (which "gawk")))
(("XARGS *=.*xargs")
(string-append "XARGS = " (which "xargs")))
(("UNZIP *=.*unzip")
(string-append "UNZIP = " (which "unzip")))
(("ZIPEXE *=.*zip")
(string-append "ZIPEXE = " (which "zip")))
(("SED *=.*sed")
(string-append "SED = " (which "sed"))))
10 years ago , failing the build process .
(substitute*
"openjdk.src/jdk/src/share/classes/java/util/CurrencyData.properties"
(("AZ=AZM;2005-12-31-20-00-00;AZN") "AZ=AZN")
(("MZ=MZM;2006-06-30-22-00-00;MZN") "MZ=MZN")
(("RO=ROL;2005-06-30-21-00-00;RON") "RO=RON")
(("TR=TRL;2004-12-31-22-00-00;TRY") "TR=TRY"))))
(alist-cons-before
'configure 'set-additional-paths
(lambda* (#:key inputs #:allow-other-keys)
(let* ((gcjdir (assoc-ref %build-inputs "gcj"))
(gcjlib (string-append gcjdir "/lib"))
(gcjinclude (let* ((port (open-input-pipe "gcj -print-file-name=include"))
(str (read-line port)))
(close-pipe port)
str)))
(setenv "CPATH"
(string-append gcjinclude ":"
(assoc-ref %build-inputs "libxrender")
"/include/X11/extensions" ":"
(assoc-ref %build-inputs "libxtst")
"/include/X11/extensions" ":"
(assoc-ref %build-inputs "libxinerama")
"/include/X11/extensions" ":"
(or (getenv "CPATH") "")))
(setenv "ALT_CUPS_HEADERS_PATH"
(string-append (assoc-ref %build-inputs "cups")
"/include"))
(setenv "ALT_FREETYPE_HEADERS_PATH"
(string-append (assoc-ref %build-inputs "freetype")
"/include"))
(setenv "ALT_FREETYPE_LIB_PATH"
(string-append (assoc-ref %build-inputs "freetype")
"/lib"))))
(alist-cons-before
'check 'fix-test-framework
(lambda _
(substitute* "src/jtreg/com/sun/javatest/regtest/Main.java"
(("PATH=/bin:/usr/bin")
(string-append "PATH=" (getenv "PATH"))))
(substitute* "src/jtreg/com/sun/javatest/util/SysEnv.java"
(("/usr/bin/env") (which "env")))
#t)
(alist-cons-before
'check 'fix-hotspot-tests
(lambda _
(with-directory-excursion "openjdk.src/hotspot/test/"
(substitute* "jprt.config"
(("PATH=\"\\$\\{path4sdk\\}\"")
(string-append "PATH=" (getenv "PATH")))
(("make=/usr/bin/make")
(string-append "make=" (which "make"))))
(substitute* '("runtime/6626217/Test6626217.sh"
"runtime/7110720/Test7110720.sh")
(("/bin/rm") (which "rm"))
(("/bin/cp") (which "cp"))
(("/bin/mv") (which "mv"))))
#t)
(alist-cons-before
'check 'fix-jdk-tests
(lambda _
(with-directory-excursion "openjdk.src/jdk/test/"
(substitute* "com/sun/jdi/JdbReadTwiceTest.sh"
(("/bin/pwd") (which "pwd")))
(substitute* "com/sun/jdi/ShellScaffold.sh"
(("/bin/kill") (which "kill")))
(substitute* "start-Xvfb.sh"
(("/usr/bin/nohup") (which "nohup")))
(substitute* "javax/security/auth/Subject/doAs/Test.sh"
(("/bin/rm") (which "rm")))
(substitute* "tools/launcher/MultipleJRE.sh"
(("echo \"#!/bin/sh\"")
(string-append "echo \"#!" (which "rm") "\""))
(("/usr/bin/zip") (which "zip")))
(substitute* "com/sun/jdi/OnThrowTest.java"
(("#!/bin/sh") (string-append "#!" (which "sh"))))
(substitute* "java/lang/management/OperatingSystemMXBean/GetSystemLoadAverage.java"
(("/usr/bin/uptime") (which "uptime")))
(substitute* "java/lang/ProcessBuilder/Basic.java"
(("/usr/bin/env") (which "env"))
(("/bin/false") (which "false"))
(("/bin/true") (which "true"))
(("/bin/cp") (which "cp"))
(("/bin/sh") (which "sh")))
(substitute* "java/lang/ProcessBuilder/FeelingLucky.java"
(("/bin/sh") (which "sh")))
(substitute* "java/lang/ProcessBuilder/Zombies.java"
(("/usr/bin/perl") (which "perl"))
(("/bin/ps") (which "ps"))
(("/bin/true") (which "true")))
(substitute* "java/lang/Runtime/exec/ConcurrentRead.java"
(("/usr/bin/tee") (which "tee")))
(substitute* "java/lang/Runtime/exec/ExecWithDir.java"
(("/bin/true") (which "true")))
(substitute* "java/lang/Runtime/exec/ExecWithInput.java"
(("/bin/cat") (which "cat")))
(substitute* "java/lang/Runtime/exec/ExitValue.java"
(("/bin/sh") (which "sh"))
(("/bin/true") (which "true"))
(("/bin/kill") (which "kill")))
(substitute* "java/lang/Runtime/exec/LotsOfDestroys.java"
(("/usr/bin/echo") (which "echo")))
(substitute* "java/lang/Runtime/exec/LotsOfOutput.java"
(("/usr/bin/cat") (which "cat")))
(substitute* "java/lang/Runtime/exec/SleepyCat.java"
(("/bin/cat") (which "cat"))
(("/bin/sleep") (which "sleep"))
(("/bin/sh") (which "sh")))
(substitute* "java/lang/Runtime/exec/StreamsSurviveDestroy.java"
(("/bin/cat") (which "cat")))
(substitute* "java/rmi/activation/CommandEnvironment/SetChildEnv.java"
(("/bin/chmod") (which "chmod")))
(substitute* "java/util/zip/ZipFile/Assortment.java"
(("/bin/sh") (which "sh"))))
#t)
(alist-replace
'check
(lambda _
The " make check- * " targets always return zero , so we need to
(use-modules (ice-9 rdelim))
(let* ((error-pattern (make-regexp "^(Error|FAILED):.*"))
(checker (lambda (port)
(let loop ()
(let ((line (read-line port)))
(cond
((eof-object? line) #t)
((regexp-exec error-pattern line) #f)
(else (loop)))))))
(run-test (lambda (test)
(system* "make" test)
(call-with-input-file
(string-append "test/" test ".log")
checker))))
(and (run-test "check-hotspot")
(run-test "check-langtools")
(run-test "check-jdk")))))
(alist-replace
'install
(lambda* (#:key outputs #:allow-other-keys)
(let ((doc (string-append (assoc-ref outputs "doc")
"/share/doc/icedtea"))
(jre (assoc-ref outputs "out"))
(jdk (assoc-ref outputs "jdk")))
(copy-recursively "openjdk.build/docs" doc)
(copy-recursively "openjdk.build/j2re-image" jre)
(copy-recursively "openjdk.build/j2sdk-image" jdk)))
%standard-phases)))))))))))
(native-inputs
`(("ant" ,ant)
("alsa-lib" ,alsa-lib)
("attr" ,attr)
("autoconf" ,autoconf)
("automake" ,automake)
("coreutils" ,coreutils)
("gawk" ,gawk)
("grep" ,grep)
("libtool" ,libtool)
("pkg-config" ,pkg-config)
("cups" ,cups)
("wget" ,wget)
("which" ,which)
("cpio" ,cpio)
("zip" ,zip)
("unzip" ,unzip)
("fastjar" ,fastjar)
for xsltproc
("mit-krb5" ,mit-krb5)
("nss" ,nss)
("libx11" ,libx11)
("libxt" ,libxt)
("libxtst" ,libxtst)
("libxi" ,libxi)
("libxinerama" ,libxinerama)
("libxrender" ,libxrender)
("libjpeg" ,libjpeg)
("libpng" ,libpng)
("giflib" ,giflib)
("perl" ,perl)
("openjdk6-src"
,(origin
(method url-fetch)
(uri "-6-src-b38-20_jan_2016.tar.gz")
(sha256
(base32
"1fapj9w4ahzf5nwvdgi1dsxjyh9dqbcvf9638r60h1by13wjqk5p"))))
("lcms" ,lcms)
("zlib" ,zlib)
("gtk" ,gtk+-2)
("fontconfig" ,fontconfig)
("freetype" ,freetype)
("gcj" ,gcj)))
(home-page "")
(synopsis "Java development kit")
(description
"The OpenJDK built with the IcedTea build harness.")
IcedTea is released under the + Classpath exception , which is the
same license as both GNU Classpath and OpenJDK .
(license license:gpl2+)))
(define-public icedtea-7
(let* ((version "2.6.4")
(drop (lambda (name hash)
(origin
(method url-fetch)
(uri (string-append
"/"
"/icedtea7/" version "/" name ".tar.bz2"))
(sha256 (base32 hash))))))
(package (inherit icedtea-6)
(version version)
(source (origin
(method url-fetch)
(uri (string-append
"-"
version ".tar.xz"))
(sha256
(base32
"0r31h8nlsrbfdkgbjbb7phwgcwglc9siznzrr40lqnm9xrgkc2nj"))
(modules '((guix build utils)))
(snippet
'(substitute* "Makefile.in"
(("-o native-ecj")
"-lgcj -o native-ecj")
(("DISTRIBUTION_ID=\"\\$\\(DIST_ID\\)\"")
"DISTRIBUTION_ID=\"\\\"guix\\\"\"")))))
(arguments
#:tests? #f
#:locale "C"
,@(substitute-keyword-arguments (package-arguments icedtea-6)
((#:modules modules)
`((ice-9 match)
(srfi srfi-26)
,@modules))
((#:configure-flags flags)
TODO : package pcsc and sctp , and add to inputs
`(append '("--disable-system-pcsc"
"--disable-system-sctp")
,flags))
((#:phases phases)
`(modify-phases ,phases
(replace 'unpack
(lambda* (#:key source inputs #:allow-other-keys)
(let ((target (string-append "icedtea-" ,version))
(unpack (lambda* (name #:optional dir)
(let ((dir (or dir
(string-drop-right name 5))))
(mkdir dir)
(zero? (system* "tar" "xvf"
(assoc-ref inputs name)
"-C" dir
"--strip-components=1"))))))
(mkdir target)
(and
(zero? (system* "tar" "xvf" source
"-C" target "--strip-components=1"))
(chdir target)
(unpack "openjdk-src" "openjdk.src")
(with-directory-excursion "openjdk.src"
(for-each unpack
(filter (cut string-suffix? "-drop" <>)
(map (match-lambda
((name . _) name))
inputs))))
#t))))
(replace
'set-additional-paths
(lambda* (#:key inputs #:allow-other-keys)
(gcjinclude (let* ((port (open-input-pipe "gcj -print-file-name=include"))
(str (read-line port)))
(close-pipe port)
str)))
(substitute* "openjdk.src/jdk/make/common/shared/Sanity.gmk"
(("ALSA_INCLUDE=/usr/include/alsa/version.h")
(string-append "ALSA_INCLUDE="
(assoc-ref inputs "alsa-lib")
"/include/alsa/version.h")))
(setenv "CC" "gcc")
(setenv "CPATH"
(string-append gcjinclude ":"
(assoc-ref inputs "libxrender")
"/include/X11/extensions" ":"
(assoc-ref inputs "libxtst")
"/include/X11/extensions" ":"
(assoc-ref inputs "libxinerama")
"/include/X11/extensions" ":"
(or (getenv "CPATH") "")))
(setenv "ALT_OBJCOPY" (which "objcopy"))
(setenv "ALT_CUPS_HEADERS_PATH"
(string-append (assoc-ref inputs "cups")
"/include"))
(setenv "ALT_FREETYPE_HEADERS_PATH"
(string-append (assoc-ref inputs "freetype")
"/include"))
(setenv "ALT_FREETYPE_LIB_PATH"
(string-append (assoc-ref inputs "freetype")
"/lib")))))
(add-after
'unpack 'fix-x11-extension-include-path
(lambda* (#:key inputs #:allow-other-keys)
(substitute* "openjdk.src/jdk/make/sun/awt/mawt.gmk"
(((string-append "\\$\\(firstword \\$\\(wildcard "
"\\$\\(OPENWIN_HOME\\)"
"/include/X11/extensions\\).*$"))
(string-append (assoc-ref inputs "libxrender")
"/include/X11/extensions"
" -I" (assoc-ref inputs "libxtst")
"/include/X11/extensions"
" -I" (assoc-ref inputs "libxinerama")
"/include/X11/extensions"))
(("\\$\\(wildcard /usr/include/X11/extensions\\)\\)") ""))
#t))
(replace
'fix-test-framework
(lambda _
(substitute* "test/jtreg/com/sun/javatest/regtest/Main.java"
(("PATH=/bin:/usr/bin")
(string-append "PATH=" (getenv "PATH"))))
(substitute* "test/jtreg/com/sun/javatest/util/SysEnv.java"
(("/usr/bin/env") (which "env")))
(substitute* "openjdk.src/hotspot/test/test_env.sh"
(("/bin/rm") (which "rm"))
(("/bin/cp") (which "cp"))
(("/bin/mv") (which "mv")))
#t))
(delete 'patch-patches))))))
(native-inputs
`(("openjdk-src"
,(drop "openjdk"
"1qjjf71nq80ac2d08hbaa8589d31vk313z3rkirnwq5df8cyf0mv"))
("corba-drop"
,(drop "corba"
"025warxhjal3nr7w1xyd16k0f32fwkchifpaslzyidsga3hgmfr6"))
("jaxp-drop"
,(drop "jaxp"
"0qiz6swb78w9c0mf88pf0gflgm5rp9k0l6fv6sdl7dki691b0z09"))
("jaxws-drop"
,(drop "jaxws"
"18fz4gl4fdlcmqvh1mlpd9h0gj0qizpfa7njkax97aysmsm08xns"))
("jdk-drop"
,(drop "jdk"
"0qsx5d9pgwlz9vbpapw4jwpajqc6rwk1150cjb33i4n3z709jccx"))
("langtools-drop"
,(drop "langtools"
"1k6plx96smf86z303gb30hncssa8f40qdryzsdv349iwqwacxc7r"))
("hotspot-drop"
,(drop "hotspot"
"0r9ffzyf5vxs8wg732szqcil0ksc8lcxzihdv3viz7d67dy42irp"))
,@(fold alist-delete (package-native-inputs icedtea-6)
'("openjdk6-src")))))))
(define-public icedtea icedtea-7)
|
e7204a5b681973bde319381cd960b005687fd266a730c277d5b4c878f8f9a01f | cabol/oauth2_mnesia_backend | oauth2_mnesia_backend.erl | %% -------------------------------------------------------------------
%%
Copyright ( c ) 2015 , Inc. All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%%%-------------------------------------------------------------------
@author < >
( C ) 2015 , < > , All Rights Reserved .
@doc backend for kivra oauth2 .
@see < a href=" / kivra / oauth2">OAuth2</a >
%%%-------------------------------------------------------------------
-module(oauth2_mnesia_backend).
-behavior(oauth2_backend).
%% API
-export([start/1,
start/2,
stop/0,
get_user/1,
add_user/2,
delete_user/1,
get_client/1,
add_client/2,
add_client/3,
delete_client/1]).
%% Behavior API
-export([authenticate_user/2,
authenticate_client/2,
get_client_identity/2,
associate_access_code/3,
associate_refresh_token/3,
associate_access_token/3,
resolve_access_code/2,
resolve_refresh_token/2,
resolve_access_token/2,
revoke_access_code/2,
revoke_access_token/2,
revoke_refresh_token/2,
get_redirection_uri/2,
verify_redirection_uri/3,
verify_client_scope/3,
verify_resowner_scope/3,
verify_scope/3]).
%%%===================================================================
Types and Macros
%%%===================================================================
%% Tables Config
-type auth_tab() :: access_token | refresh_token | user | client.
-type copies() :: ram_copies | disc_copies | disc_only_copies.
-type tab_config() :: [{auth_tab(), copies()}].
%% Tables
-define(ACCESS_TOKEN_TABLE, access_token).
-define(REFRESH_TOKEN_TABLE, refresh_token).
-define(USER_TABLE, user).
-define(CLIENT_TABLE, client).
%% Table list
-define(TABLES, [?ACCESS_TOKEN_TABLE,
?REFRESH_TOKEN_TABLE,
?USER_TABLE,
?CLIENT_TABLE]).
%% Timeout for mnesia:wait_for_tables
-define(WAIT_FOR_TABLES, 5000).
%% Access token spec
-record(access_token, {token :: binary(),
context = [] :: proplists:proplist()}).
%% Refresh token spec
-record(refresh_token, {token :: binary(),
context = [] :: proplists:proplist()}).
%% User spec
-record(user, {username :: binary(),
password :: binary()}).
-type user() :: #user{}.
%% Client spec
-record(client, {client_id :: binary(),
client_secret :: binary(),
redirect_uri :: binary()}).
-type client() :: #client{}.
%%%===================================================================
%%% API
%%%===================================================================
-spec start([node()]) -> ok.
start(Nodes) ->
start(undefined, Nodes).
-spec start(tab_config(), [node()]) -> ok.
start(TablesConfig, Nodes) ->
mnesia:stop(),
mnesia:create_schema(Nodes),
mnesia:start(),
dynamic_db_init(TablesConfig, Nodes),
ok.
-spec stop() -> ok.
stop() ->
ok.
-spec get_user(binary()) -> user() | notfound.
get_user(Username) ->
case get(?USER_TABLE, Username) of
{ok, User} -> User;
_ -> notfound
end.
-spec add_user(binary(), binary()) -> ok.
add_user(Username, Password) ->
put(?USER_TABLE, Username, #user{username = Username, password = Password}).
-spec delete_user(binary()) -> ok.
delete_user(Username) ->
delete(?USER_TABLE, Username).
-spec get_client(binary()) -> client() | notfound.
get_client(ClientId) ->
case get(?CLIENT_TABLE, ClientId) of
{ok, Client} -> Client;
_ -> notfound
end.
-spec add_client(binary(), binary(), binary()) -> ok.
add_client(Id, Secret, RedirectUri) ->
put(?CLIENT_TABLE, Id, #client{client_id = Id,
client_secret = Secret,
redirect_uri = RedirectUri}).
-spec add_client(binary(), binary()) -> ok.
add_client(Id, Secret) ->
add_client(Id, Secret, undefined).
-spec delete_client(binary()) -> ok.
delete_client(Id) ->
delete(?CLIENT_TABLE, Id).
%%%===================================================================
%%% OAuth2 backend functions
%%%===================================================================
%% @hidden
authenticate_user({Username, Password}, _) ->
case get(?USER_TABLE, Username) of
{ok, #user{password = Password}} ->
{ok, {<<"user">>, Username}};
{ok, #user{password = _WrongPassword}} ->
{error, badpass};
Error = {error, notfound} ->
Error
end.
%% @hidden
authenticate_client({ClientId, ClientSecret}, _) ->
case get(?CLIENT_TABLE, ClientId) of
{ok, #client{client_secret = ClientSecret}} ->
{ok, {<<"client">>, ClientId}};
{ok, #client{client_secret = _WrongSecret}} ->
{error, badsecret};
_ ->
{error, notfound}
end.
%% @hidden
get_client_identity(ClientId, AppCtx) ->
case get(?CLIENT_TABLE, ClientId) of
{ok, Client} ->
{ok, {AppCtx, Client}};
_ ->
{error, notfound}
end.
%% @hidden
associate_access_code(AccessCode, GrantCtx, AppCtx) ->
associate_access_token(AccessCode, GrantCtx, AppCtx).
%% @hidden
associate_access_token(AccessToken, GrantCtx, AppCtx) ->
AccessTokenRec = #access_token{token = AccessToken, context = GrantCtx},
put(?ACCESS_TOKEN_TABLE, AccessToken, AccessTokenRec),
{ok, AppCtx}.
%% @hidden
associate_refresh_token(RefreshToken, GrantCtx, AppCtx) ->
RefreshTokenRec = #access_token{token = RefreshToken, context = GrantCtx},
put(?REFRESH_TOKEN_TABLE, RefreshToken, RefreshTokenRec),
{ok, AppCtx}.
%% @hidden
resolve_access_code(AccessCode, AppCtx) ->
resolve_access_token(AccessCode, AppCtx).
%% @hidden
resolve_refresh_token(RefreshToken, AppCtx) ->
resolve_access_token(RefreshToken, AppCtx).
%% @hidden
resolve_access_token(AccessToken, AppCtx) ->
%% The case trickery is just here to make sure that
%% we don't propagate errors that cannot be legally
%% returned from this function according to the spec.
case get(?ACCESS_TOKEN_TABLE, AccessToken) of
{ok, #access_token{context = Value}} ->
{ok, {AppCtx, Value}};
Error = {error, notfound} ->
Error
end.
%% @hidden
revoke_access_code(AccessCode, AppCtx) ->
revoke_access_token(AccessCode, AppCtx).
%% @hidden
revoke_access_token(AccessToken, AppCtx) ->
delete(?ACCESS_TOKEN_TABLE, AccessToken),
{ok, AppCtx}.
%% @hidden
revoke_refresh_token(_RefreshToken, AppCtx) ->
{ok, AppCtx}.
%% @hidden
get_redirection_uri(ClientId, AppCtx) ->
case get(?CLIENT_TABLE, ClientId) of
{ok, #client{redirect_uri = RedirectUri}} ->
{ok, {AppCtx, RedirectUri}};
Error = {error, notfound} ->
Error
end.
%% @hidden
verify_redirection_uri(ClientId, ClientUri, AppCtx) ->
case get(?CLIENT_TABLE, ClientId) of
{ok, #client{redirect_uri = RedirUri}} when ClientUri =:= RedirUri ->
{ok, AppCtx};
_Error ->
{error, mismatch}
end.
%% @hidden
verify_client_scope(_Client, Scope, AppCtx) ->
{ok, {AppCtx, Scope}}.
%% @hidden
verify_resowner_scope(_ResOwner, Scope, AppCtx) ->
{ok, {AppCtx, Scope}}.
%% @hidden
verify_scope(Scope, Scope, AppCtx) ->
{ok, {AppCtx, Scope}};
verify_scope(_, _, _) ->
{error, invalid_scope}.
%%%===================================================================
Internal functions
%%%===================================================================
@private
dynamic_db_init(undefined, []) ->
DefaultConfig = lists:zip(
?TABLES, [ram_copies, ram_copies, disc_copies, disc_copies]),
dynamic_db_init(DefaultConfig, []);
dynamic_db_init(TablesConfig, []) ->
create_tables(TablesConfig);
dynamic_db_init(TablesConfig, Nodes) ->
add_extra_nodes(TablesConfig, Nodes).
@private
add_extra_nodes(TablesConfig, [Node | T]) ->
case mnesia:change_config(extra_db_nodes, [Node]) of
{ok, [Node]} ->
%% replaces local schema with remote
mnesia:change_table_copy_type(schema, node(), disc_copies),
%% add table copies
[mnesia:add_table_copy(Tab, node(), Cp) || {Tab, Cp} <- TablesConfig],
%% synchronization
Tables = mnesia:system_info(tables),
mnesia:wait_for_tables(Tables, ?WAIT_FOR_TABLES);
_ ->
add_extra_nodes(TablesConfig, T)
end.
@private
create_tables([]) ->
ok;
create_tables([{?ACCESS_TOKEN_TABLE, Copies} | T]) ->
mnesia:create_table(
?ACCESS_TOKEN_TABLE,
[{Copies, [node()]},
{attributes, record_info(fields, ?ACCESS_TOKEN_TABLE)}]),
create_tables(T);
create_tables([{?REFRESH_TOKEN_TABLE, Copies} | T]) ->
mnesia:create_table(
?REFRESH_TOKEN_TABLE,
[{Copies, [node()]},
{attributes, record_info(fields, ?REFRESH_TOKEN_TABLE)}]),
create_tables(T);
create_tables([{?USER_TABLE, Copies} | T]) ->
mnesia:create_table(
?USER_TABLE,
[{Copies, [node()]},
{attributes, record_info(fields, ?USER_TABLE)}]),
create_tables(T);
create_tables([{?CLIENT_TABLE, Copies} | T]) ->
mnesia:create_table(
?CLIENT_TABLE,
[{Copies, [node()]},
{attributes, record_info(fields, ?CLIENT_TABLE)}]),
create_tables(T).
@private
get(Table, Key) ->
case mnesia:dirty_read(Table, Key) of
[] -> {error, notfound};
[Value] -> {ok, Value}
end.
@private
put(Table, _Key, Value) ->
mnesia:dirty_write(Table, Value).
@private
delete(Table, Key) ->
mnesia:dirty_delete(Table, Key).
| null | https://raw.githubusercontent.com/cabol/oauth2_mnesia_backend/9127536f43a6f076ef5cd5458eb052c685ef0e89/src/oauth2_mnesia_backend.erl | erlang | -------------------------------------------------------------------
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-------------------------------------------------------------------
-------------------------------------------------------------------
-------------------------------------------------------------------
API
Behavior API
===================================================================
===================================================================
Tables Config
Tables
Table list
Timeout for mnesia:wait_for_tables
Access token spec
Refresh token spec
User spec
Client spec
===================================================================
API
===================================================================
===================================================================
OAuth2 backend functions
===================================================================
@hidden
@hidden
@hidden
@hidden
@hidden
@hidden
@hidden
@hidden
@hidden
The case trickery is just here to make sure that
we don't propagate errors that cannot be legally
returned from this function according to the spec.
@hidden
@hidden
@hidden
@hidden
@hidden
@hidden
@hidden
@hidden
===================================================================
===================================================================
replaces local schema with remote
add table copies
synchronization | Copyright ( c ) 2015 , Inc. All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
@author < >
( C ) 2015 , < > , All Rights Reserved .
@doc backend for kivra oauth2 .
@see < a href=" / kivra / oauth2">OAuth2</a >
-module(oauth2_mnesia_backend).
-behavior(oauth2_backend).
-export([start/1,
start/2,
stop/0,
get_user/1,
add_user/2,
delete_user/1,
get_client/1,
add_client/2,
add_client/3,
delete_client/1]).
-export([authenticate_user/2,
authenticate_client/2,
get_client_identity/2,
associate_access_code/3,
associate_refresh_token/3,
associate_access_token/3,
resolve_access_code/2,
resolve_refresh_token/2,
resolve_access_token/2,
revoke_access_code/2,
revoke_access_token/2,
revoke_refresh_token/2,
get_redirection_uri/2,
verify_redirection_uri/3,
verify_client_scope/3,
verify_resowner_scope/3,
verify_scope/3]).
Types and Macros
-type auth_tab() :: access_token | refresh_token | user | client.
-type copies() :: ram_copies | disc_copies | disc_only_copies.
-type tab_config() :: [{auth_tab(), copies()}].
-define(ACCESS_TOKEN_TABLE, access_token).
-define(REFRESH_TOKEN_TABLE, refresh_token).
-define(USER_TABLE, user).
-define(CLIENT_TABLE, client).
-define(TABLES, [?ACCESS_TOKEN_TABLE,
?REFRESH_TOKEN_TABLE,
?USER_TABLE,
?CLIENT_TABLE]).
-define(WAIT_FOR_TABLES, 5000).
-record(access_token, {token :: binary(),
context = [] :: proplists:proplist()}).
-record(refresh_token, {token :: binary(),
context = [] :: proplists:proplist()}).
-record(user, {username :: binary(),
password :: binary()}).
-type user() :: #user{}.
-record(client, {client_id :: binary(),
client_secret :: binary(),
redirect_uri :: binary()}).
-type client() :: #client{}.
-spec start([node()]) -> ok.
start(Nodes) ->
start(undefined, Nodes).
-spec start(tab_config(), [node()]) -> ok.
start(TablesConfig, Nodes) ->
mnesia:stop(),
mnesia:create_schema(Nodes),
mnesia:start(),
dynamic_db_init(TablesConfig, Nodes),
ok.
-spec stop() -> ok.
stop() ->
ok.
-spec get_user(binary()) -> user() | notfound.
get_user(Username) ->
case get(?USER_TABLE, Username) of
{ok, User} -> User;
_ -> notfound
end.
-spec add_user(binary(), binary()) -> ok.
add_user(Username, Password) ->
put(?USER_TABLE, Username, #user{username = Username, password = Password}).
-spec delete_user(binary()) -> ok.
delete_user(Username) ->
delete(?USER_TABLE, Username).
-spec get_client(binary()) -> client() | notfound.
get_client(ClientId) ->
case get(?CLIENT_TABLE, ClientId) of
{ok, Client} -> Client;
_ -> notfound
end.
-spec add_client(binary(), binary(), binary()) -> ok.
add_client(Id, Secret, RedirectUri) ->
put(?CLIENT_TABLE, Id, #client{client_id = Id,
client_secret = Secret,
redirect_uri = RedirectUri}).
-spec add_client(binary(), binary()) -> ok.
add_client(Id, Secret) ->
add_client(Id, Secret, undefined).
-spec delete_client(binary()) -> ok.
delete_client(Id) ->
delete(?CLIENT_TABLE, Id).
authenticate_user({Username, Password}, _) ->
case get(?USER_TABLE, Username) of
{ok, #user{password = Password}} ->
{ok, {<<"user">>, Username}};
{ok, #user{password = _WrongPassword}} ->
{error, badpass};
Error = {error, notfound} ->
Error
end.
authenticate_client({ClientId, ClientSecret}, _) ->
case get(?CLIENT_TABLE, ClientId) of
{ok, #client{client_secret = ClientSecret}} ->
{ok, {<<"client">>, ClientId}};
{ok, #client{client_secret = _WrongSecret}} ->
{error, badsecret};
_ ->
{error, notfound}
end.
get_client_identity(ClientId, AppCtx) ->
case get(?CLIENT_TABLE, ClientId) of
{ok, Client} ->
{ok, {AppCtx, Client}};
_ ->
{error, notfound}
end.
associate_access_code(AccessCode, GrantCtx, AppCtx) ->
associate_access_token(AccessCode, GrantCtx, AppCtx).
associate_access_token(AccessToken, GrantCtx, AppCtx) ->
AccessTokenRec = #access_token{token = AccessToken, context = GrantCtx},
put(?ACCESS_TOKEN_TABLE, AccessToken, AccessTokenRec),
{ok, AppCtx}.
associate_refresh_token(RefreshToken, GrantCtx, AppCtx) ->
RefreshTokenRec = #access_token{token = RefreshToken, context = GrantCtx},
put(?REFRESH_TOKEN_TABLE, RefreshToken, RefreshTokenRec),
{ok, AppCtx}.
resolve_access_code(AccessCode, AppCtx) ->
resolve_access_token(AccessCode, AppCtx).
resolve_refresh_token(RefreshToken, AppCtx) ->
resolve_access_token(RefreshToken, AppCtx).
resolve_access_token(AccessToken, AppCtx) ->
case get(?ACCESS_TOKEN_TABLE, AccessToken) of
{ok, #access_token{context = Value}} ->
{ok, {AppCtx, Value}};
Error = {error, notfound} ->
Error
end.
revoke_access_code(AccessCode, AppCtx) ->
revoke_access_token(AccessCode, AppCtx).
revoke_access_token(AccessToken, AppCtx) ->
delete(?ACCESS_TOKEN_TABLE, AccessToken),
{ok, AppCtx}.
revoke_refresh_token(_RefreshToken, AppCtx) ->
{ok, AppCtx}.
get_redirection_uri(ClientId, AppCtx) ->
case get(?CLIENT_TABLE, ClientId) of
{ok, #client{redirect_uri = RedirectUri}} ->
{ok, {AppCtx, RedirectUri}};
Error = {error, notfound} ->
Error
end.
verify_redirection_uri(ClientId, ClientUri, AppCtx) ->
case get(?CLIENT_TABLE, ClientId) of
{ok, #client{redirect_uri = RedirUri}} when ClientUri =:= RedirUri ->
{ok, AppCtx};
_Error ->
{error, mismatch}
end.
verify_client_scope(_Client, Scope, AppCtx) ->
{ok, {AppCtx, Scope}}.
verify_resowner_scope(_ResOwner, Scope, AppCtx) ->
{ok, {AppCtx, Scope}}.
verify_scope(Scope, Scope, AppCtx) ->
{ok, {AppCtx, Scope}};
verify_scope(_, _, _) ->
{error, invalid_scope}.
Internal functions
@private
dynamic_db_init(undefined, []) ->
DefaultConfig = lists:zip(
?TABLES, [ram_copies, ram_copies, disc_copies, disc_copies]),
dynamic_db_init(DefaultConfig, []);
dynamic_db_init(TablesConfig, []) ->
create_tables(TablesConfig);
dynamic_db_init(TablesConfig, Nodes) ->
add_extra_nodes(TablesConfig, Nodes).
@private
add_extra_nodes(TablesConfig, [Node | T]) ->
case mnesia:change_config(extra_db_nodes, [Node]) of
{ok, [Node]} ->
mnesia:change_table_copy_type(schema, node(), disc_copies),
[mnesia:add_table_copy(Tab, node(), Cp) || {Tab, Cp} <- TablesConfig],
Tables = mnesia:system_info(tables),
mnesia:wait_for_tables(Tables, ?WAIT_FOR_TABLES);
_ ->
add_extra_nodes(TablesConfig, T)
end.
@private
create_tables([]) ->
ok;
create_tables([{?ACCESS_TOKEN_TABLE, Copies} | T]) ->
mnesia:create_table(
?ACCESS_TOKEN_TABLE,
[{Copies, [node()]},
{attributes, record_info(fields, ?ACCESS_TOKEN_TABLE)}]),
create_tables(T);
create_tables([{?REFRESH_TOKEN_TABLE, Copies} | T]) ->
mnesia:create_table(
?REFRESH_TOKEN_TABLE,
[{Copies, [node()]},
{attributes, record_info(fields, ?REFRESH_TOKEN_TABLE)}]),
create_tables(T);
create_tables([{?USER_TABLE, Copies} | T]) ->
mnesia:create_table(
?USER_TABLE,
[{Copies, [node()]},
{attributes, record_info(fields, ?USER_TABLE)}]),
create_tables(T);
create_tables([{?CLIENT_TABLE, Copies} | T]) ->
mnesia:create_table(
?CLIENT_TABLE,
[{Copies, [node()]},
{attributes, record_info(fields, ?CLIENT_TABLE)}]),
create_tables(T).
@private
get(Table, Key) ->
case mnesia:dirty_read(Table, Key) of
[] -> {error, notfound};
[Value] -> {ok, Value}
end.
@private
put(Table, _Key, Value) ->
mnesia:dirty_write(Table, Value).
@private
delete(Table, Key) ->
mnesia:dirty_delete(Table, Key).
|
e9cf2cd0fc5496410c02709b7b91ed1cd8226181834b1c01365d3ccd2c9195f2 | JeffreyBenjaminBrown/hode | RunLeaf.hs | # LANGUAGE ScopedTypeVariables #
# LANGUAGE ViewPatterns #
# LANGUAGE LambdaCase #
module Hode.Qseq.RunLeaf where
import Data.Map (Map)
import qualified Data.Map as M
import Data.Set (Set)
import qualified Data.Set as S
import Hode.Qseq.Types
import Hode.Util.Misc
-- | = Running atomic queries
runVarTest :: Possible e -> sp -> Subst e -> VarTest e sp
-> Either String Bool
runVarTest p sp s t = (varTestFunction t) sp p s
runFind :: forall e sp.
sp -> Subst e -> Find e sp -> Either String (CondElts e)
runFind sp s (Find find deps) = do
(found :: Set e) <- prefixLeft "runFind:" $ find sp s
let used = M.restrictKeys s deps :: Subst e
Right $ M.fromSet (const $ S.singleton used) found
runTestOnElt :: forall e sp.
sp -> Subst e -> Test e sp -> e
-> Either String (Bool, Subst e)
runTestOnElt sp s (Test test deps) e = do
(passes :: Bool) <- prefixLeft "runTestOnElt:" $ test sp s e
let used = M.restrictKeys s deps :: Subst e
Right (passes, used)
runTest :: forall e sp. Ord e
=> sp -> Subst e -> Test e sp -> CondElts e
-> Either String (CondElts e)
runTest sp s0 q ce = prefixLeft "runTest:" $ do
(passed :: Map e (Bool, Subst e)) <-
(<$>) (M.filter fst)
$ ifLefts_map
$ M.mapWithKey (\k _ -> runTestOnElt sp s0 q k) ce
let f :: e -> (Bool, Subst e) -> Set (Subst e)
f k (_,s) = let ss = (M.!) ce k :: Set (Subst e)
in S.map (M.union s) ss
Right $ M.mapWithKey f passed
| null | https://raw.githubusercontent.com/JeffreyBenjaminBrown/hode/79a54a6796fa01570cde6903b398675c42954e62/hode/Hode/Qseq/RunLeaf.hs | haskell | | = Running atomic queries | # LANGUAGE ScopedTypeVariables #
# LANGUAGE ViewPatterns #
# LANGUAGE LambdaCase #
module Hode.Qseq.RunLeaf where
import Data.Map (Map)
import qualified Data.Map as M
import Data.Set (Set)
import qualified Data.Set as S
import Hode.Qseq.Types
import Hode.Util.Misc
runVarTest :: Possible e -> sp -> Subst e -> VarTest e sp
-> Either String Bool
runVarTest p sp s t = (varTestFunction t) sp p s
runFind :: forall e sp.
sp -> Subst e -> Find e sp -> Either String (CondElts e)
runFind sp s (Find find deps) = do
(found :: Set e) <- prefixLeft "runFind:" $ find sp s
let used = M.restrictKeys s deps :: Subst e
Right $ M.fromSet (const $ S.singleton used) found
runTestOnElt :: forall e sp.
sp -> Subst e -> Test e sp -> e
-> Either String (Bool, Subst e)
runTestOnElt sp s (Test test deps) e = do
(passes :: Bool) <- prefixLeft "runTestOnElt:" $ test sp s e
let used = M.restrictKeys s deps :: Subst e
Right (passes, used)
runTest :: forall e sp. Ord e
=> sp -> Subst e -> Test e sp -> CondElts e
-> Either String (CondElts e)
runTest sp s0 q ce = prefixLeft "runTest:" $ do
(passed :: Map e (Bool, Subst e)) <-
(<$>) (M.filter fst)
$ ifLefts_map
$ M.mapWithKey (\k _ -> runTestOnElt sp s0 q k) ce
let f :: e -> (Bool, Subst e) -> Set (Subst e)
f k (_,s) = let ss = (M.!) ce k :: Set (Subst e)
in S.map (M.union s) ss
Right $ M.mapWithKey f passed
|
0627de17f30b88c8bb4a2edb7bc214dfd4cb9193018cd01280aaa3c42de74671 | kmi/irs | load.lisp | Copyright © 2008 The Open University
(in-package #:ocml)
(def-ontology lhdl-tests
"Definitions for the LHDL test suite."
:type :goal
:allowed-editors ("john")
:author "dave"
:files ("test")
:includes (lhdl-goals))
| null | https://raw.githubusercontent.com/kmi/irs/e1b8d696f61c6b6878c0e92d993ed549fee6e7dd/apps/lhdl/ontologies/lhdl-tests/load.lisp | lisp | Copyright © 2008 The Open University
(in-package #:ocml)
(def-ontology lhdl-tests
"Definitions for the LHDL test suite."
:type :goal
:allowed-editors ("john")
:author "dave"
:files ("test")
:includes (lhdl-goals))
|
|
15fdcb02e9e90dbbc72b679fd5e0d75c3f470d9d078b2d39b290ce0c470d5a0e | schemedoc/implementation-metadata | qscheme.scm | (title "QScheme")
(tagline "fast interpreter with GTK bindings")
(homepage-url "-e.html")
(repology "qscheme")
(person "Daniel Crettol")
(features
r5rs)
| null | https://raw.githubusercontent.com/schemedoc/implementation-metadata/6280d9c4c73833dc5bd1c9bef9b45be6ea5beb68/schemes/qscheme.scm | scheme | (title "QScheme")
(tagline "fast interpreter with GTK bindings")
(homepage-url "-e.html")
(repology "qscheme")
(person "Daniel Crettol")
(features
r5rs)
|
|
afefe71059868bd66441a4957d5c86c737148640dc573a20fbd2875d9a5a53a2 | zotonic/zotonic | action_wires_form_reset.erl | @author < >
2010 - 2023
%% @doc Reset all input fields in a form.
Copyright 2010 - 2023
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(action_wires_form_reset).
-include_lib("zotonic_core/include/zotonic.hrl").
-export([render_action/4]).
render_action(_TriggerId, TargetId, Args, Context) ->
CssSelector = z_render:css_selector(proplists:get_value(id, Args, TargetId), Args),
{iolist_to_binary([<<"$(">>, z_render:quote_css_selector(CssSelector), <<").resetForm();">>]), Context}.
| null | https://raw.githubusercontent.com/zotonic/zotonic/f74d8ae093fc7d37601c55981c349d59c23f7d1e/apps/zotonic_mod_wires/src/actions/action_wires_form_reset.erl | erlang | @doc Reset all input fields in a form.
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. | @author < >
2010 - 2023
Copyright 2010 - 2023
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(action_wires_form_reset).
-include_lib("zotonic_core/include/zotonic.hrl").
-export([render_action/4]).
render_action(_TriggerId, TargetId, Args, Context) ->
CssSelector = z_render:css_selector(proplists:get_value(id, Args, TargetId), Args),
{iolist_to_binary([<<"$(">>, z_render:quote_css_selector(CssSelector), <<").resetForm();">>]), Context}.
|
376081cd30b66ab88e8574608b56f6011c6eba041d59101d89363db42bf3b55f | logseq/deprecated-github-backend | rss.clj | (ns app.handler.rss
(:require [app.db.page :as page]
[app.db.user :as u]
[hiccup.page :as html]
[clj-time.core :as t]
[clj-time.coerce :as tc]
[clj-time.format :as tf]
[clojure.string :as string]
[app.config :as config]
[app.util :as util]
[clj-rss.core :as rss]))
(defn ->rss
[project pages]
(for [{:keys [title html permalink settings published_at]} pages]
{:title title
:description (format "<![CDATA[ %s ]]>" html)
:link (str config/website-uri "/" project "/" permalink)
:category (string/join ", " (:tags settings))
:pubDate (tc/to-date published_at)}))
(defn rss-page
[project project-id]
(let [pages (page/get-project-pages-all project-id)]
{:status 200
:body (rss/channel-xml
{:title project
:link (str config/website-uri "/" project)
:description (str "Latest posts from " project)}
(->rss project pages))
:headers {"Content-Type" "application/rss+xml; charset=utf-8"}}))
| null | https://raw.githubusercontent.com/logseq/deprecated-github-backend/7aa2f187f0f2d7b1e9c3f6a057bede939b4790b7/src/main/app/handler/rss.clj | clojure | (ns app.handler.rss
(:require [app.db.page :as page]
[app.db.user :as u]
[hiccup.page :as html]
[clj-time.core :as t]
[clj-time.coerce :as tc]
[clj-time.format :as tf]
[clojure.string :as string]
[app.config :as config]
[app.util :as util]
[clj-rss.core :as rss]))
(defn ->rss
[project pages]
(for [{:keys [title html permalink settings published_at]} pages]
{:title title
:description (format "<![CDATA[ %s ]]>" html)
:link (str config/website-uri "/" project "/" permalink)
:category (string/join ", " (:tags settings))
:pubDate (tc/to-date published_at)}))
(defn rss-page
[project project-id]
(let [pages (page/get-project-pages-all project-id)]
{:status 200
:body (rss/channel-xml
{:title project
:link (str config/website-uri "/" project)
:description (str "Latest posts from " project)}
(->rss project pages))
:headers {"Content-Type" "application/rss+xml; charset=utf-8"}}))
|
|
3cb030de21309cd7352660f71e0ed949a8734347dbe2e345815bd67a2cf9c19b | alvatar/spheres | pad.scm | ;;;============================================================================
File : " rfc1423.scm " , Time - stamp : < 2007 - 04 - 05 00:52:53 feeley >
Copyright ( c ) 2006 - 2007 by , All Rights Reserved .
;;;============================================================================
Generalized message padding / unpadding from RFC 1423 ( Privacy
;; Enhancement for Internet Electronic Mail: Part III: Algorithms,
;; Modes, and Identifiers).
(define* (RFC1423-pad u8vect (multiple 8))
(if (or (<= multiple 0) (>= multiple 256))
(error "illegal padding multiple")
(let* ((len (u8vector-length u8vect))
(n (+ multiple (remainder (- len) multiple))))
(u8vector-append u8vect (make-u8vector n n)))))
(define* (RFC1423-unpad u8vect (multiple 8))
(if (or (<= multiple 0) (>= multiple 256))
(error "illegal padding multiple")
(let ((len (u8vector-length u8vect)))
(if (or (< len multiple)
(not (= 0 (modulo len multiple))))
(error "improperly padded u8vector")
(let ((n (u8vector-ref u8vect (- len 1))))
(if (or (= n 0) (> n multiple))
(error "improperly padded u8vector")
(let loop ((i n))
(if (>= i 2)
(if (not (= n (u8vector-ref u8vect (- len i))))
(error "improperly padded u8vector")
(loop (- i 1)))
(subu8vector u8vect 0 (- len n))))))))))
;;;============================================================================
| null | https://raw.githubusercontent.com/alvatar/spheres/568836f234a469ef70c69f4a2d9b56d41c3fc5bd/spheres/crypto/pad.scm | scheme | ============================================================================
============================================================================
Enhancement for Internet Electronic Mail: Part III: Algorithms,
Modes, and Identifiers).
============================================================================ |
File : " rfc1423.scm " , Time - stamp : < 2007 - 04 - 05 00:52:53 feeley >
Copyright ( c ) 2006 - 2007 by , All Rights Reserved .
Generalized message padding / unpadding from RFC 1423 ( Privacy
(define* (RFC1423-pad u8vect (multiple 8))
(if (or (<= multiple 0) (>= multiple 256))
(error "illegal padding multiple")
(let* ((len (u8vector-length u8vect))
(n (+ multiple (remainder (- len) multiple))))
(u8vector-append u8vect (make-u8vector n n)))))
(define* (RFC1423-unpad u8vect (multiple 8))
(if (or (<= multiple 0) (>= multiple 256))
(error "illegal padding multiple")
(let ((len (u8vector-length u8vect)))
(if (or (< len multiple)
(not (= 0 (modulo len multiple))))
(error "improperly padded u8vector")
(let ((n (u8vector-ref u8vect (- len 1))))
(if (or (= n 0) (> n multiple))
(error "improperly padded u8vector")
(let loop ((i n))
(if (>= i 2)
(if (not (= n (u8vector-ref u8vect (- len i))))
(error "improperly padded u8vector")
(loop (- i 1)))
(subu8vector u8vect 0 (- len n))))))))))
|
02263f385ceb3804ec68dd8c504ae9edf88092ec768d7233bf193e323515c7bb | ghc/testsuite | mc25.hs | -- Checks that using the "by" clause in a transform requires a function parameter
# OPTIONS_GHC -XMonadComprehensions -XTransformListComp #
module ShouldFail where
import Data.List(take)
z = [x | x <- [1..10], then group by x using take ]
| null | https://raw.githubusercontent.com/ghc/testsuite/998a816ae89c4fd573f4abd7c6abb346cf7ee9af/tests/typecheck/should_fail/mc25.hs | haskell | Checks that using the "by" clause in a transform requires a function parameter |
# OPTIONS_GHC -XMonadComprehensions -XTransformListComp #
module ShouldFail where
import Data.List(take)
z = [x | x <- [1..10], then group by x using take ]
|
d18168d4da212ab02b3dfbfc5da176c1245133fa43ed10385feef3860b4a19c6 | dwayne/eopl3 | interpreter.test.rkt | #lang racket
(require "./interpreter.rkt")
(require rackunit)
(check-equal?
(run "5")
(num-val 5))
(check-equal?
(run "x")
(num-val 10))
(check-equal?
(run "zero?(i)")
(bool-val #f))
(check-equal?
(run "zero?(-(i, 1))")
(bool-val #t))
(check-equal?
(run "-(55, -(x, 11))")
(num-val 56))
(check-equal?
(run "-(-(x, 3), -(v, i))")
(num-val 3))
(check-equal?
(run
#<<CODE
let x = 33
in let y = 22
in if zero?(-(x, 11)) then -(y, 2) else -(y, 4)
CODE
)
(num-val 18))
(check-equal?
(run "let x = 5 in -(x, 3)")
(num-val 2))
(check-equal?
(run
#<<CODE
let z = 5
in let x = 3
in let y = -(x, 1)
in let x = 4 in -(z, -(x, y))
CODE
)
(num-val 3))
(check-equal?
(run
#<<CODE
let x = 7
in let y = 2
in let y = let x = -(x, 1)
in -(x, y)
in -(-(x, 8), y)
CODE
)
(num-val -5))
(check-equal?
(run
#<<CODE
let x = 30
in let x = -(x,1)
y = -(x,2)
in -(x,y)
CODE
)
(num-val 1))
(check-equal?
(run
#<<CODE
let x = 30
in let* x = -(x,1)
y = -(x,2)
in -(x,y)
CODE
)
(num-val 2))
(check-equal?
(run "minus(-(minus(5), 9))")
(num-val 14))
(check-equal?
(run "add(6, 2)")
(num-val 8))
(check-equal?
(run "mul(6, 2)")
(num-val 12))
(check-equal?
(run "div(6, 2)")
(num-val 3))
(check-equal?
(run "div(6, 4)")
(num-val 1))
(check-exn
#rx"division by 0 is undefined"
(lambda () (run "div(6, 0)")))
(check-equal?
(run "equal?(1, 2)")
(bool-val #f))
(check-equal?
(run "greater?(1, 2)")
(bool-val #f))
(check-equal?
(run "less?(1, 2)")
(bool-val #t))
(check-equal?
(run "emptylist")
(list-val '()))
(check-equal?
(run "cons(5, emptylist)")
(list-val (list (num-val 5))))
(check-equal?
(run "car(cons(x, emptylist))")
(num-val 10))
(check-equal?
(run "cdr(cons(x, emptylist))")
(list-val '()))
(check-equal?
(run "if null?(emptylist) then 0 else 1")
(num-val 0))
(check-equal?
(run
#<<CODE
let x = 4
in cons(x,
cons(cons(-(x, 1),
emptylist),
emptylist))
CODE
)
( 4 ( 3 ) )
(list-val (list (num-val 4)
(list-val (list (num-val 3))))))
(check-exn
#rx"Not a list"
(lambda () (run "car(1)")))
(check-exn
#rx"List is empty"
(lambda () (run "car(emptylist)")))
(check-equal?
(run "list()")
(list-val '()))
(check-equal?
(run "list(1)")
(list-val (list (num-val 1))))
(check-equal?
(run
#<<CODE
let x = 4
in list(x, -(x, 1), -(x, 3))
CODE
)
; (4 3 1)
(list-val (list (num-val 4)
(num-val 3)
(num-val 1))))
(check-exn
#rx"No condition is satisfied"
(lambda () (run "cond end")))
(check-exn
#rx"No condition is satisfied"
(lambda () (run "cond zero?(1) ==> 1 zero?(2) ==> 2 end")))
(check-exn
#rx"Not a boolean"
(lambda () (run "cond 1 ==> 1 end")))
(check-equal?
(run
#<<CODE
cond
equal?(x, i) ==> 1
greater?(x, i) ==> 2
less?(x, i) ==> 3
end
CODE
)
(num-val 2))
(check-equal?
(run
#<<CODE
let u = 7
in unpack x y = cons(u, cons(3, emptylist))
in -(x, y)
CODE
)
(num-val 4))
(check-exn
#rx"The number of variables and values don't match"
(lambda ()
(run
#<<CODE
let u = 7
in unpack x y z = cons(u, cons(3, emptylist))
in -(x, y)
CODE
)))
(check-exn
#rx"Not a list"
(lambda ()
(run
#<<CODE
let u = 7
in unpack x y = i
in -(x, y)
CODE
)))
(check-equal?
(run
#<<CODE
letproc f (x) = -(x, 11)
in (f (f 77))
CODE
)
(num-val 55))
; This is not possible anymore since we can't define unnamed procedures:
;
;(check-equal?
; (run
; #<<CODE
;(proc (f) (f (f 77))
proc ( x ) -(x , 11 ) )
;CODE
; )
; (num-val 55))
(check-equal?
(run
#<<CODE
let x = 200
in letproc f (z) = -(z, x)
in let x = 100
in letproc g (z) = -(z, x)
in -((f 1), (g 1))
CODE
)
(num-val -100))
| null | https://raw.githubusercontent.com/dwayne/eopl3/9d5fdb2a8dafac3bc48852d49cda8b83e7a825cf/solutions/03-ch3/interpreters/racket/PROC-3.19/interpreter.test.rkt | racket | (4 3 1)
This is not possible anymore since we can't define unnamed procedures:
(check-equal?
(run
#<<CODE
(proc (f) (f (f 77))
CODE
)
(num-val 55)) | #lang racket
(require "./interpreter.rkt")
(require rackunit)
(check-equal?
(run "5")
(num-val 5))
(check-equal?
(run "x")
(num-val 10))
(check-equal?
(run "zero?(i)")
(bool-val #f))
(check-equal?
(run "zero?(-(i, 1))")
(bool-val #t))
(check-equal?
(run "-(55, -(x, 11))")
(num-val 56))
(check-equal?
(run "-(-(x, 3), -(v, i))")
(num-val 3))
(check-equal?
(run
#<<CODE
let x = 33
in let y = 22
in if zero?(-(x, 11)) then -(y, 2) else -(y, 4)
CODE
)
(num-val 18))
(check-equal?
(run "let x = 5 in -(x, 3)")
(num-val 2))
(check-equal?
(run
#<<CODE
let z = 5
in let x = 3
in let y = -(x, 1)
in let x = 4 in -(z, -(x, y))
CODE
)
(num-val 3))
(check-equal?
(run
#<<CODE
let x = 7
in let y = 2
in let y = let x = -(x, 1)
in -(x, y)
in -(-(x, 8), y)
CODE
)
(num-val -5))
(check-equal?
(run
#<<CODE
let x = 30
in let x = -(x,1)
y = -(x,2)
in -(x,y)
CODE
)
(num-val 1))
(check-equal?
(run
#<<CODE
let x = 30
in let* x = -(x,1)
y = -(x,2)
in -(x,y)
CODE
)
(num-val 2))
(check-equal?
(run "minus(-(minus(5), 9))")
(num-val 14))
(check-equal?
(run "add(6, 2)")
(num-val 8))
(check-equal?
(run "mul(6, 2)")
(num-val 12))
(check-equal?
(run "div(6, 2)")
(num-val 3))
(check-equal?
(run "div(6, 4)")
(num-val 1))
(check-exn
#rx"division by 0 is undefined"
(lambda () (run "div(6, 0)")))
(check-equal?
(run "equal?(1, 2)")
(bool-val #f))
(check-equal?
(run "greater?(1, 2)")
(bool-val #f))
(check-equal?
(run "less?(1, 2)")
(bool-val #t))
(check-equal?
(run "emptylist")
(list-val '()))
(check-equal?
(run "cons(5, emptylist)")
(list-val (list (num-val 5))))
(check-equal?
(run "car(cons(x, emptylist))")
(num-val 10))
(check-equal?
(run "cdr(cons(x, emptylist))")
(list-val '()))
(check-equal?
(run "if null?(emptylist) then 0 else 1")
(num-val 0))
(check-equal?
(run
#<<CODE
let x = 4
in cons(x,
cons(cons(-(x, 1),
emptylist),
emptylist))
CODE
)
( 4 ( 3 ) )
(list-val (list (num-val 4)
(list-val (list (num-val 3))))))
(check-exn
#rx"Not a list"
(lambda () (run "car(1)")))
(check-exn
#rx"List is empty"
(lambda () (run "car(emptylist)")))
(check-equal?
(run "list()")
(list-val '()))
(check-equal?
(run "list(1)")
(list-val (list (num-val 1))))
(check-equal?
(run
#<<CODE
let x = 4
in list(x, -(x, 1), -(x, 3))
CODE
)
(list-val (list (num-val 4)
(num-val 3)
(num-val 1))))
(check-exn
#rx"No condition is satisfied"
(lambda () (run "cond end")))
(check-exn
#rx"No condition is satisfied"
(lambda () (run "cond zero?(1) ==> 1 zero?(2) ==> 2 end")))
(check-exn
#rx"Not a boolean"
(lambda () (run "cond 1 ==> 1 end")))
(check-equal?
(run
#<<CODE
cond
equal?(x, i) ==> 1
greater?(x, i) ==> 2
less?(x, i) ==> 3
end
CODE
)
(num-val 2))
(check-equal?
(run
#<<CODE
let u = 7
in unpack x y = cons(u, cons(3, emptylist))
in -(x, y)
CODE
)
(num-val 4))
(check-exn
#rx"The number of variables and values don't match"
(lambda ()
(run
#<<CODE
let u = 7
in unpack x y z = cons(u, cons(3, emptylist))
in -(x, y)
CODE
)))
(check-exn
#rx"Not a list"
(lambda ()
(run
#<<CODE
let u = 7
in unpack x y = i
in -(x, y)
CODE
)))
(check-equal?
(run
#<<CODE
letproc f (x) = -(x, 11)
in (f (f 77))
CODE
)
(num-val 55))
proc ( x ) -(x , 11 ) )
(check-equal?
(run
#<<CODE
let x = 200
in letproc f (z) = -(z, x)
in let x = 100
in letproc g (z) = -(z, x)
in -((f 1), (g 1))
CODE
)
(num-val -100))
|
00ee4ccce6ce816fac8eb8a0de3e1aa3a6bbb5a07a46a66a6850db47a4ef04c1 | ndmitchell/uniplate | DataOnly.hs | # LANGUAGE CPP , FlexibleInstances , FlexibleContexts , MultiParamTypeClasses #
# OPTIONS_GHC -Wno - simplifiable - class - constraints #
module Uniplate.DataOnly where
import Data.Generics.Uniplate.DataOnly
import Data.Generics.Uniplate.Data.Instances
#define SKIP_ZIPPER
#include "CommonInc.hs"
| null | https://raw.githubusercontent.com/ndmitchell/uniplate/7d3039606d7a083f6d77f9f960c919668788de91/Uniplate/DataOnly.hs | haskell | # LANGUAGE CPP , FlexibleInstances , FlexibleContexts , MultiParamTypeClasses #
# OPTIONS_GHC -Wno - simplifiable - class - constraints #
module Uniplate.DataOnly where
import Data.Generics.Uniplate.DataOnly
import Data.Generics.Uniplate.Data.Instances
#define SKIP_ZIPPER
#include "CommonInc.hs"
|
|
022d394e981381535b82a01ece4a19da025affd8a5cb6db08e9b4f462ed2b66b | qiao/sicp-solutions | 2.11.scm | (define (make-interval a b) (cons a b))
(define (upper-bound z)
(max (car z)
(cdr z)))
(define (lower-bound z)
(min (car z)
(cdr z)))
(define (sign-pair lo up)
(cond ((and (< lo 0) (< up 0)) -1)
((and (< lo 0) (> up 0)) 0)
(else 1)))
(define (mul-interval x y)
(let ((xl (lower-bound x))
(xu (upper-bound x))
(yl (lower-bound y))
(yu (upper-bound y)))
(let ((xs (sign-pair xl xu))
(ys (sign-pair yl yu)))
(cond ((< xs 0)
(cond ((< ys 0) ; - - - -
(make-interval (* xu yu)
(* xl yl)))
((= ys 0) ; - - - +
(make-interval (* xl yu)
(* xl yl)))
(else ; - - + +
(make-interval (* xl yu)
(* xu yl)))))
((= xs 0)
(cond ((< ys 0) ; - + - -
(make-interval (* xu yl)
(* xl yl)))
((= ys 0) ; - + - +
(make-interval (min (* xl yu)
(* xu yl))
(max (* xl yl)
(* xu yu))))
(else ; - + + +
(make-interval (* xl yu)
(* xu yu)))))
(else
(cond ((< ys 0) ; + + - -
(make-interval (* xu yl)
(* xl yu)))
((= ys 0) ; + + - +
(make-interval (* xu yl)
(* xu yu)))
(else ; + + + +
(make-interval (* xl yl)
(* xu yu)))))))))
| null | https://raw.githubusercontent.com/qiao/sicp-solutions/a2fe069ba6909710a0867bdb705b2e58b2a281af/chapter2/2.11.scm | scheme | - - - -
- - - +
- - + +
- + - -
- + - +
- + + +
+ + - -
+ + - +
+ + + + | (define (make-interval a b) (cons a b))
(define (upper-bound z)
(max (car z)
(cdr z)))
(define (lower-bound z)
(min (car z)
(cdr z)))
(define (sign-pair lo up)
(cond ((and (< lo 0) (< up 0)) -1)
((and (< lo 0) (> up 0)) 0)
(else 1)))
(define (mul-interval x y)
(let ((xl (lower-bound x))
(xu (upper-bound x))
(yl (lower-bound y))
(yu (upper-bound y)))
(let ((xs (sign-pair xl xu))
(ys (sign-pair yl yu)))
(cond ((< xs 0)
(make-interval (* xu yu)
(* xl yl)))
(make-interval (* xl yu)
(* xl yl)))
(make-interval (* xl yu)
(* xu yl)))))
((= xs 0)
(make-interval (* xu yl)
(* xl yl)))
(make-interval (min (* xl yu)
(* xu yl))
(max (* xl yl)
(* xu yu))))
(make-interval (* xl yu)
(* xu yu)))))
(else
(make-interval (* xu yl)
(* xl yu)))
(make-interval (* xu yl)
(* xu yu)))
(make-interval (* xl yl)
(* xu yu)))))))))
|
5aa99fc2724f62ce3bb79bca4bafa92d040edbb592511bdee7d5bdd439a5e114 | music-suite/music-suite | Annotations.hs | # OPTIONS_GHC -fno - warn - incomplete - patterns #
# OPTIONS_GHC -fno - warn - name - shadowing
-fno - warn - unused - imports
-fno - warn - redundant - constraints #
-fno-warn-unused-imports
-fno-warn-redundant-constraints #-}
-------------------------------------------------------------------------------------
-------------------------------------------------------------------------------------
-- |
Copyright : ( c ) 2012 - 2014
--
-- License : BSD-style
--
Maintainer :
-- Stability : experimental
Portability : non - portable ( TF , )
--
-- Provides a basic way annotating scores.
module Music.Score.Meta.Annotations
( Annotation,
getAnnotation,
annotate,
annotateSpan,
showAnnotations,
showAnnotations',
withAnnotations,
)
where
import Control.Monad.Plus
import qualified Data.List
import Data.Semigroup
import Data.String
import Data.Typeable
import Music.Score.Meta
import Music.Score.Part
import Music.Score.Text (HasText, text)
import Music.Time
import Music.Time.Reactive
-- |
-- An annotation is a unique textual value attached to parts of a score.
-- They are ignored by default, but can be collected with 'withAnnotations'.
newtype Annotation = Annotation {getAnnotation_ :: [String]}
deriving (Semigroup, Monoid, Typeable)
instance IsString Annotation where fromString = Annotation . return
getAnnotation :: Annotation -> [String]
getAnnotation = Data.List.nub . getAnnotation_
-- | Annotate the whole score.
annotate :: String -> Score a -> Score a
annotate str x = case _era x of
Nothing -> x
Just e -> annotateSpan e str x
-- | Annotate a part of the score.
annotateSpan :: Span -> String -> Score a -> Score a
annotateSpan span str x = addMetaEvent (transform span $ return $ Annotation [str]) x
-- | Show all annotations in the score.
showAnnotations :: (HasParts' a, Ord (GetPart a), HasText a) => Score a -> Score a
showAnnotations = showAnnotations' ":"
-- | Show all annotations in the score using the given prefix.
showAnnotations' :: (HasParts' a, Ord (GetPart a), HasText a) => String -> Score a -> Score a
showAnnotations' prefix = withAnnotations (flip $ \s -> foldr (text . (prefix ++)) s)
-- | Handle the annotations in a score.
withAnnotations :: ([String] -> Score a -> Score a) -> Score a -> Score a
withAnnotations f = withMeta (f . getAnnotation)
| null | https://raw.githubusercontent.com/music-suite/music-suite/7f01fd62334c66418043b7a2d662af127f98685d/src/Music/Score/Meta/Annotations.hs | haskell | -----------------------------------------------------------------------------------
-----------------------------------------------------------------------------------
|
License : BSD-style
Stability : experimental
Provides a basic way annotating scores.
|
An annotation is a unique textual value attached to parts of a score.
They are ignored by default, but can be collected with 'withAnnotations'.
| Annotate the whole score.
| Annotate a part of the score.
| Show all annotations in the score.
| Show all annotations in the score using the given prefix.
| Handle the annotations in a score. | # OPTIONS_GHC -fno - warn - incomplete - patterns #
# OPTIONS_GHC -fno - warn - name - shadowing
-fno - warn - unused - imports
-fno - warn - redundant - constraints #
-fno-warn-unused-imports
-fno-warn-redundant-constraints #-}
Copyright : ( c ) 2012 - 2014
Maintainer :
Portability : non - portable ( TF , )
module Music.Score.Meta.Annotations
( Annotation,
getAnnotation,
annotate,
annotateSpan,
showAnnotations,
showAnnotations',
withAnnotations,
)
where
import Control.Monad.Plus
import qualified Data.List
import Data.Semigroup
import Data.String
import Data.Typeable
import Music.Score.Meta
import Music.Score.Part
import Music.Score.Text (HasText, text)
import Music.Time
import Music.Time.Reactive
newtype Annotation = Annotation {getAnnotation_ :: [String]}
deriving (Semigroup, Monoid, Typeable)
instance IsString Annotation where fromString = Annotation . return
getAnnotation :: Annotation -> [String]
getAnnotation = Data.List.nub . getAnnotation_
annotate :: String -> Score a -> Score a
annotate str x = case _era x of
Nothing -> x
Just e -> annotateSpan e str x
annotateSpan :: Span -> String -> Score a -> Score a
annotateSpan span str x = addMetaEvent (transform span $ return $ Annotation [str]) x
showAnnotations :: (HasParts' a, Ord (GetPart a), HasText a) => Score a -> Score a
showAnnotations = showAnnotations' ":"
showAnnotations' :: (HasParts' a, Ord (GetPart a), HasText a) => String -> Score a -> Score a
showAnnotations' prefix = withAnnotations (flip $ \s -> foldr (text . (prefix ++)) s)
withAnnotations :: ([String] -> Score a -> Score a) -> Score a -> Score a
withAnnotations f = withMeta (f . getAnnotation)
|
efacb8922d75dc8b455d102c0763f3e45a4b7141b14bd6e9e2a2958b6adf2c9e | kaznum/programming_in_ocaml_exercise | intersect.ml | let rec intersect s1 s2 =
match
(s1, s2)
with
([], _) -> []
| (x::xs, ys) when mem x ys -> x::(intersect xs ys)
| (x::xs, ys) -> intersect xs ys;;
intersect [1;3;5;7] [1;5;3;4;2];;
| null | https://raw.githubusercontent.com/kaznum/programming_in_ocaml_exercise/6f6a5d62a7a87a1c93561db88f08ae4e445b7d4e/ex5.3/intersect.ml | ocaml | let rec intersect s1 s2 =
match
(s1, s2)
with
([], _) -> []
| (x::xs, ys) when mem x ys -> x::(intersect xs ys)
| (x::xs, ys) -> intersect xs ys;;
intersect [1;3;5;7] [1;5;3;4;2];;
|
|
f3511128fb9c018055a874f83185b70d48d4797d2d4d35b32191a01326ef8671 | elastic/eui-cljs | icon_currency.cljs | (ns eui.icon-currency
(:require ["@elastic/eui/lib/components/icon/assets/currency.js" :as eui]))
(def currency eui/icon)
| null | https://raw.githubusercontent.com/elastic/eui-cljs/ad60b57470a2eb8db9bca050e02f52dd964d9f8e/src/eui/icon_currency.cljs | clojure | (ns eui.icon-currency
(:require ["@elastic/eui/lib/components/icon/assets/currency.js" :as eui]))
(def currency eui/icon)
|
|
b05ffac44ca28bffda6c68a4a246439fab5acb10e0387d4f19a13243477a3589 | solidsnack/system-uuid | Main.hs | - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
USAGE : hooty ( -1|-4 ) ? ( -n < number to make > ) ?
The ` hooty ` program generates any number of UUIDs ( one by default ) , using
either the version 1 ( time and MAC ) or version 4 ( random ) algorithm ( version
1 is the default ) . On all platforms , ` hooty ` uses the native implementation .
-n , --number < number > Create < number > many UUIDs in one go .
-1 , --sequential Create version 1 ( time and MAC ) UUIDs .
-4 , --random Create version 4 ( random ) UUIDs .
-h , - ? , --help Print this help and exit .
--version Print version and exit .
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
USAGE: hooty (-1|-4)? (-n <number to make>)?
The `hooty` program generates any number of UUIDs (one by default), using
either the version 1 (time and MAC) or version 4 (random) algorithm (version
1 is the default). On all platforms, `hooty` uses the native implementation.
-n, --number <number> Create <number> many UUIDs in one go.
-1, --sequential Create version 1 (time and MAC) UUIDs.
-4, --random Create version 4 (random) UUIDs.
-h, -?, --help Print this help and exit.
--version Print version and exit.
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -}
{-# LANGUAGE TemplateHaskell
, PatternGuards
#-}
import qualified System.UUID.V1 as V1
import qualified System.UUID.V4 as V4
import Options
import Messages
import qualified Macros as Macros
import System.Environment
import System.Exit
import Control.Monad
import Control.Applicative
import Data.Maybe
import Data.Word
import qualified Data.Map as Map
main = do
m <- opts
let
lk = (`Map.lookup` m)
when (isJust $ lk "h") $ do
stdout << usage
exitWith ExitSuccess
when (isJust $ lk "version") $ do
stdout << version
exitWith ExitSuccess
when (all (isJust . lk) ["1","4"]) $ do
bail "Please specify either version 1 or version 4, not both."
let
n :: Word
n = fromMaybe 1 $ maybeRead =<< lk "n"
gen =
if isJust $ lk "4"
then V4.uuid
else V1.uuid
mapM_ (const $ print =<< gen) [1..n]
bail :: String -> IO a
bail s = do
stderr << s
stderr << usage
exitFailure
usage = $(Macros.usage)
version = "hooty-" ++ $(Macros.version)
opts = do
args <- getArgs
case runParser options () "command line arguments" args of
Right list -> return $ foldr ($) Map.empty list
Left e -> bail $ show e
options = do
res <- choice
[ eof >> return []
, many1 options'
]
eof
return res
options' = do
o <- choice opts
opt o
where
opt o@[c]
| c `elem` "h14" = return $ Map.insert o ""
| c == 'n' = choice
[ eof >> fail "Option requires an argument."
, try $ do
s <- initialChar '-'
fail $ "Option requiring argument followed by:\n " ++ s
, fmap (Map.insert o) anyString
]
| otherwise = prb $ "unimplemented option '" ++ o ++ "'"
opt "version" = return $ Map.insert "version" ""
opt o = prb $ "unimplemented option '" ++ o ++ "'"
prb s = fail $ "Please report a bug -- " ++ s ++ "."
opts = map try
[ option "h?" ["help"]
, option "1" ["sequential"]
, option "4" ["random"]
, option "n" ["number"]
, option "" ["version"]
] ++ [ fail "Invalid option." ]
maybeRead s
| [(a, _)] <- reads s = Just a
| otherwise = Nothing
| null | https://raw.githubusercontent.com/solidsnack/system-uuid/572f44a50d4fdd81a103f05df48a7e2cb75a17f3/Main.hs | haskell | number < number > Create < number > many UUIDs in one go .
sequential Create version 1 ( time and MAC ) UUIDs .
random Create version 4 ( random ) UUIDs .
help Print this help and exit .
version Print version and exit .
number <number> Create <number> many UUIDs in one go.
sequential Create version 1 (time and MAC) UUIDs.
random Create version 4 (random) UUIDs.
help Print this help and exit.
version Print version and exit.
# LANGUAGE TemplateHaskell
, PatternGuards
# | - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
USAGE : hooty ( -1|-4 ) ? ( -n < number to make > ) ?
The ` hooty ` program generates any number of UUIDs ( one by default ) , using
either the version 1 ( time and MAC ) or version 4 ( random ) algorithm ( version
1 is the default ) . On all platforms , ` hooty ` uses the native implementation .
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
USAGE: hooty (-1|-4)? (-n <number to make>)?
The `hooty` program generates any number of UUIDs (one by default), using
either the version 1 (time and MAC) or version 4 (random) algorithm (version
1 is the default). On all platforms, `hooty` uses the native implementation.
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -}
import qualified System.UUID.V1 as V1
import qualified System.UUID.V4 as V4
import Options
import Messages
import qualified Macros as Macros
import System.Environment
import System.Exit
import Control.Monad
import Control.Applicative
import Data.Maybe
import Data.Word
import qualified Data.Map as Map
main = do
m <- opts
let
lk = (`Map.lookup` m)
when (isJust $ lk "h") $ do
stdout << usage
exitWith ExitSuccess
when (isJust $ lk "version") $ do
stdout << version
exitWith ExitSuccess
when (all (isJust . lk) ["1","4"]) $ do
bail "Please specify either version 1 or version 4, not both."
let
n :: Word
n = fromMaybe 1 $ maybeRead =<< lk "n"
gen =
if isJust $ lk "4"
then V4.uuid
else V1.uuid
mapM_ (const $ print =<< gen) [1..n]
bail :: String -> IO a
bail s = do
stderr << s
stderr << usage
exitFailure
usage = $(Macros.usage)
version = "hooty-" ++ $(Macros.version)
opts = do
args <- getArgs
case runParser options () "command line arguments" args of
Right list -> return $ foldr ($) Map.empty list
Left e -> bail $ show e
options = do
res <- choice
[ eof >> return []
, many1 options'
]
eof
return res
options' = do
o <- choice opts
opt o
where
opt o@[c]
| c `elem` "h14" = return $ Map.insert o ""
| c == 'n' = choice
[ eof >> fail "Option requires an argument."
, try $ do
s <- initialChar '-'
fail $ "Option requiring argument followed by:\n " ++ s
, fmap (Map.insert o) anyString
]
| otherwise = prb $ "unimplemented option '" ++ o ++ "'"
opt "version" = return $ Map.insert "version" ""
opt o = prb $ "unimplemented option '" ++ o ++ "'"
prb s = fail $ "Please report a bug -- " ++ s ++ "."
opts = map try
[ option "h?" ["help"]
, option "1" ["sequential"]
, option "4" ["random"]
, option "n" ["number"]
, option "" ["version"]
] ++ [ fail "Invalid option." ]
maybeRead s
| [(a, _)] <- reads s = Just a
| otherwise = Nothing
|
fb37021e24fdd86cf55b19712dc3e8f820608673d3303b94bd77eed2e7f1b9c0 | johnlinvc/erruby | erruby_object.erl | -module(erruby_object).
-include("rb.hrl").
-behavior(gen_server).
-export([init/1, terminate/2, code_change/3, handle_call/3, handle_cast/2, handle_info/2]).
%for vm
-export([def_method/4, find_instance_method/2, def_global_const/2, find_global_const/1, def_const/3, find_const/2, init_object_class/0,object_class/0]).
-export([def_singleton_method/4, def_singleton_method/3]).
-export([def_global_var/2, find_global_var/1]).
for other buildtin class
-export([def_method/3, new_object_with_pid_symbol/2, new_object/2]).
-export([def_ivar/3, find_ivar/2]).
-export([init_main_object/0, main_object/0]).
-export([start_link/2, start_link/1]).
-export([get_properties/1, set_properties/2]).
-export([get_class/1]).
init([#{class := Class, properties := Properties}]) ->
DefaultState = default_state(),
StateWithClass = add_class_to_state(DefaultState, Class),
{ok, add_property_to_state(StateWithClass, Properties)};
init([#{class := Class}]) ->
DefaultState = default_state(),
{ok, add_class_to_state(DefaultState, Class)};
init([]) ->
{ok, default_state()}.
add_class_to_state(State, Class) ->
State#{class => Class}.
add_property_to_state(State, Properties) ->
State#{properties => Properties}.
TODO in return defalut object_class if no class is present
default_state() ->
Methods = #{},
IVars = #{},
Consts = #{},
#{self => self(),
methods => Methods,
ivars => IVars,
properties => #{},
consts => Consts}.
%TODO unify these?
start_link(Class) ->
gen_server:start_link(?MODULE, [#{class => Class }], []).
start_link(Class, Properties) ->
gen_server:start_link(?MODULE, [#{class => Class, properties => Properties}], []).
terminate(_Arg, _State) ->
{ok, dead}.
code_change(_OldVsn, State, _Extra) -> {ok, State}.
get_class(Self) ->
gen_server:call(Self, #{type => get_class}).
find_instance_method(Self, Name) ->
SingletonMethod = find_instance_method_in_singleton_class(Self, Name),
case SingletonMethod of
{ok, Method} -> Method;
{not_found, _} ->
find_instance_method_in_class(Self, Name)
end.
find_instance_method_in_singleton_class(Self, Name) ->
SingletonClass = singleton_class(Self),
case SingletonClass of
not_found -> {not_found, Name};
_ ->
Result = gen_server:call(SingletonClass, #{type => find_method, name => Name}),
case Result of
not_found -> {not_found, Name};
_ -> {ok, Result}
end
end.
find_instance_method_in_class(Self, Name) ->
%erruby_debug:debug_tmp("finding instance method ~p in ~p",[ Name, Self]),
Klass = get_class(Self),
Result = gen_server:call(Klass, #{type => find_method, name => Name}),
case Result of
not_found -> {not_found, Name};
_ -> Result
end.
find_method(Self, Name) ->
gen_server:call(Self, #{type => find_method, name => Name}).
self_or_object_class(Self) ->
MainObject = main_object(),
case Self of
MainObject -> object_class();
_ -> Self
end.
singleton_class(Self) ->
Properties = get_properties(Self),
maps:get(singleton_class, Properties, not_found).
get_or_create_singleton_class(Self) ->
SingletonClass = singleton_class(Self),
case SingletonClass of
not_found ->
{ok, NewSingletonClass} = erruby_class:new_named_class("singleton class"),
Properties = get_properties(Self),
NewProperties = Properties#{ singleton_class => NewSingletonClass },
set_properties(Self, NewProperties),
NewSingletonClass;
_ ->
SingletonClass
end.
def_method(Self, Name, Args, Body) ->
Receiver = self_or_object_class(Self),
gen_server:call(Receiver, #{type => def_method, name => Name, args => Args, body => Body}).
def_method(Self,Name,Func) when is_function(Func) ->
Receiver = self_or_object_class(Self),
gen_server:call(Receiver, #{type => def_method, name => Name, func => Func}).
def_singleton_method(Self, Name, Args, Body) ->
Receiver = get_or_create_singleton_class(Self),
gen_server:call(Receiver, #{type => def_method, name => Name, args => Args, body => Body}).
def_singleton_method(Self,Name,Func) when is_function(Func) ->
Receiver = get_or_create_singleton_class(Self),
gen_server:call(Receiver, #{type => def_method, name => Name, func => Func}).
%TODO call def_const instead
def_global_const(Name, Value) ->
gen_server:call(object_class(), #{type => def_const, name => Name, value => Value}).
find_global_const(Name) ->
find_const(object_class(), Name).
TODO define on basic object instead
%TODO ability to use custom getter/setter
def_global_var(Name, Value) ->
Msg = #{type => def_global_var, name => Name, value => Value},
gen_server:call(object_class(), Msg).
find_global_var(Name) ->
gen_server:call(object_class(), #{type => find_global_var, name => Name}).
def_const(Self, Name, Value) ->
Receiver = self_or_object_class(Self),
gen_server:call(Receiver, #{type => def_const, name => Name, value => Value}).
find_const(Self, Name) ->
erruby_debug:debug_2("finding on ~p for const:~p~n",[Self, Name]),
gen_server:call(Self, #{type => find_const, name => Name}).
def_ivar(Self, Name, Value)->
gen_server:call(Self, #{type => def_ivar, name => Name, value => Value}).
find_ivar(Self, Name) ->
erruby_debug:debug_2("finding on ~p for ivar:~p~n",[Self, Name]),
gen_server:call(Self, #{type => find_ivar, name => Name}).
get_properties(Self) ->
gen_server:call(Self, #{type => get_properties}).
set_properties(Self, Properties) ->
gen_server:call(Self, #{type => set_properties, properties => Properties}).
handle_info(Info, State) ->
io:format("Got unkwon info:~n~p~n", [Info]),
{ok, State}.
handle_call(#{ type := def_method , name := Name, body := Body, args := Args}=_Msg, _From, #{methods := Methods} =State) ->
NewMethods = Methods#{ Name => #{ args => Args, body => Body, argc => length(Args) } },
NewState = State#{ methods := NewMethods},
{reply, Name, NewState};
handle_call(#{ type := def_method, name := Name, func := Func}=_Msg, _From, #{methods := Methods} = State) ->
NewMethods = Methods#{ Name => Func },
NewState = State#{ methods := NewMethods},
{reply, Name, NewState};
handle_call(#{ type := find_method, name := Name }, _From, #{methods := Methods} = State) ->
erruby_debug:debug_2("finding method:~p~n in State:~p~n",[Name, State]),
case maps:is_key(Name,Methods) of
true ->
#{Name := Method} = Methods,
{reply, Method, State};
false ->
TODO use error classes
%io:format("Method ~p not found~n",[Name]),
erruby_debug:debug_2("finding in ancestors:~p~n",[ancestors(State)]),
Method = find_method_in_ancestors(ancestors(State), Name),
{reply, Method, State}
end;
handle_call(#{ type := get_properties }, _From, #{properties := Properties}=State) ->
{reply, Properties, State};
handle_call(#{ type := set_properties, properties := Properties }, _From, State) ->
NewState = State#{ properties := Properties},
{reply, NewState, NewState};
handle_call(#{ type := def_ivar, name := Name, value := Value }, _From, #{ivars := IVars}=State) ->
NewIvars = IVars#{Name => Value},
NewState = State#{ivars := NewIvars},
{reply, Name, NewState};
handle_call(#{ type := find_ivar, name := Name }, _From, #{ivars := IVars}=State) ->
erruby_debug:debug_2("finding ivar:~p~nin State:~p~n",[Name, State]),
Value = maps:get(Name, IVars, erruby_nil:nil_instance()),
{reply, Value, State};
handle_call(#{ type := def_const, name := Name, value := Value }, _From, #{consts := Consts}=State) ->
NewConsts = Consts#{Name => Value},
NewState = State#{consts := NewConsts},
{reply, Name, NewState};
handle_call(#{ type := find_const, name := Name }, _From, #{consts := Consts}=State) ->
erruby_debug:debug_2("finding const:~p~nin State:~p~n",[Name, State]),
Value = maps:get(Name, Consts, not_found),
{reply, Value, State};
handle_call(#{ type := def_global_var, name := Name, value := Value}, _From,
#{properties := #{global_var_tbl := GVarTbl} } = State) ->
NewGVarTbl = GVarTbl#{ Name => Value},
#{properties := Properties} = State,
NewProperties = Properties#{ global_var_tbl := NewGVarTbl },
NewState = State#{ properties := NewProperties},
{reply, Name, NewState};
handle_call(#{ type := find_global_var, name := Name}, _From,
#{properties := #{global_var_tbl := GVarTbl} } = State) ->
Value = maps:get(Name, GVarTbl, not_found),
{reply, Value, State};
handle_call(#{ type := get_class}, _From, State) ->
Value = maps:get(class, State, object_class()),
{reply, Value, State};
handle_call(_Req, _From, State) ->
io:format("handle unknow call ~p ~n ~p ~n ~p ~n",[_Req, _From, State]),
NewState = State,
{reply, done, NewState}.
handle_cast(_Req, State) ->
io:format("handle unknown cast ~p ~p ~n",[_Req, State]),
NewState = State,
{reply, done, NewState}.
TODO support va args
method_puts(Env, String) ->
io:format("~s~n", [String]),
erruby_nil:new_nil(Env).
append_rb_extension(FileName) ->
case filename:extension(FileName) of
[] -> string:concat(FileName, ".rb");
_ -> FileName
end.
%TODO extract to Kernal
%TODO raise error if file not found
method_require_relative(Env, FileName) ->
RelativeFileName = relativeFileName(Env, FileName),
RelativeFileNameWithExt = append_rb_extension(RelativeFileName),
LoadedFeatures = find_global_var("$LOADED_FEATURES"),
LoadedFeaturesList = erruby_array:array_to_list(LoadedFeatures),
Contains = lists:member( RelativeFileNameWithExt, LoadedFeaturesList),
case Contains of
true -> erruby_boolean:new_false(Env);
_ ->
load_file(Env, RelativeFileNameWithExt),
erruby_array:push(LoadedFeatures, RelativeFileNameWithExt),
erruby_boolean:new_true(Env)
end.
relativeFileName(Env, FileName) ->
SrcFile = erruby_vm:file_name(Env),
SrcDir = filename:dirname(SrcFile),
filename:join([SrcDir, FileName]).
load_file(Env, RelativeFileNameWithExt) ->
try
erruby:eruby(RelativeFileNameWithExt),
erruby_boolean:new_true(Env)
catch
_:_E ->
erruby_debug:debug_2("cant require_relative file ~p~n", [RelativeFileNameWithExt]),
erruby_boolean:new_false(Env)
end.
%TODO raise error if file not found
@TODO find a better way to get filename
method_load(Env, FileName)->
Pwd = os:getenv("PWD"),
RelativeFileNameWithExt = filename:join([Pwd, FileName]),
load_file(Env, RelativeFileNameWithExt).
method_self(#{self := Self}=Env) ->
erruby_rb:return(Self, Env).
method_inspect(#{self := Self}=Env) ->
S = io_lib:format("#<Object:~p>",[Self]),
erruby_vm:new_string(S,Env).
method_to_s(#{self := Self}=Env) ->
S = io_lib:format("~p",[Self]),
erruby_vm:new_string(S,Env).
%TODO support property?
new_object_with_pid_symbol(Symbol, Class) ->
gen_server:start_link({local, Symbol}, ?MODULE, [#{class => Class}], []).
new_object(Class, Payload) when is_map(Payload) ->
start_link(Class, Payload).
init_object_class() ->
erb:find_or_init_class(erruby_object_class, fun init_object_class_internal/0).
init_object_class_internal() ->
{ok, Pid} = gen_server:start_link({local, erruby_object_class}, ?MODULE, [],[]),
install_object_class_methods(),
'Object' = def_const(Pid, 'Object', Pid),
set_properties(object_class(), #{global_var_tbl => #{}}),
def_global_var("$LOADED_FEATURES", erruby_array:new_array([])),
{ok, Pid}.
init_main_object() ->
erb:find_or_init_class(erruby_main_object, fun init_main_object_internal/0).
init_main_object_internal() ->
new_object_with_pid_symbol(erruby_main_object, object_class()).
object_class() ->
whereis(erruby_object_class).
main_object() ->
whereis(erruby_main_object).
install_object_class_methods() ->
TODO use this after inherent is done
def_method(object_class ( ) , ' = = ' , fun method_eq/2 ) .
def_method(object_class(), 'puts', fun method_puts/2),
def_method(object_class(), 'self', fun method_self/1),
def_method(object_class(), 'inspect', fun method_inspect/1),
def_method(object_class(), 'to_s', fun method_to_s/1),
def_method(object_class(), '==', fun method_eq/2),
def_method(object_class(), 'require_relative', fun method_require_relative/2),
def_method(object_class(), 'load', fun method_load/2),
ok.
method_eq(#{self := Self}=Env, Object) ->
case Object of
Self -> erruby_boolean:new_true(Env);
_ -> erruby_boolean:new_false(Env)
end.
super_class(#{properties := Properties}=_State) ->
maps:get(superclass, Properties, object_class()).
%TODO handle include & extend
ancestors(State) ->
SuperClass = super_class(State),
ObjectClass = object_class(),
case self() of
ObjectClass -> [];
_ -> [SuperClass, ObjectClass]
end.
find_method_in_ancestors([], _Name) ->
not_found;
find_method_in_ancestors(Ancestors, Name) ->
[Klass | Rest] = Ancestors,
Method = find_method(Klass, Name),
case Method of
not_found -> find_method_in_ancestors(Rest, Name);
_ -> Method
end.
| null | https://raw.githubusercontent.com/johnlinvc/erruby/60df66495a01f9dda08bd3f670bfe9dc0661a168/src/erruby_object.erl | erlang | for vm
TODO unify these?
erruby_debug:debug_tmp("finding instance method ~p in ~p",[ Name, Self]),
TODO call def_const instead
TODO ability to use custom getter/setter
io:format("Method ~p not found~n",[Name]),
TODO extract to Kernal
TODO raise error if file not found
TODO raise error if file not found
TODO support property?
TODO handle include & extend | -module(erruby_object).
-include("rb.hrl").
-behavior(gen_server).
-export([init/1, terminate/2, code_change/3, handle_call/3, handle_cast/2, handle_info/2]).
-export([def_method/4, find_instance_method/2, def_global_const/2, find_global_const/1, def_const/3, find_const/2, init_object_class/0,object_class/0]).
-export([def_singleton_method/4, def_singleton_method/3]).
-export([def_global_var/2, find_global_var/1]).
for other buildtin class
-export([def_method/3, new_object_with_pid_symbol/2, new_object/2]).
-export([def_ivar/3, find_ivar/2]).
-export([init_main_object/0, main_object/0]).
-export([start_link/2, start_link/1]).
-export([get_properties/1, set_properties/2]).
-export([get_class/1]).
init([#{class := Class, properties := Properties}]) ->
DefaultState = default_state(),
StateWithClass = add_class_to_state(DefaultState, Class),
{ok, add_property_to_state(StateWithClass, Properties)};
init([#{class := Class}]) ->
DefaultState = default_state(),
{ok, add_class_to_state(DefaultState, Class)};
init([]) ->
{ok, default_state()}.
add_class_to_state(State, Class) ->
State#{class => Class}.
add_property_to_state(State, Properties) ->
State#{properties => Properties}.
TODO in return defalut object_class if no class is present
default_state() ->
Methods = #{},
IVars = #{},
Consts = #{},
#{self => self(),
methods => Methods,
ivars => IVars,
properties => #{},
consts => Consts}.
start_link(Class) ->
gen_server:start_link(?MODULE, [#{class => Class }], []).
start_link(Class, Properties) ->
gen_server:start_link(?MODULE, [#{class => Class, properties => Properties}], []).
terminate(_Arg, _State) ->
{ok, dead}.
code_change(_OldVsn, State, _Extra) -> {ok, State}.
get_class(Self) ->
gen_server:call(Self, #{type => get_class}).
find_instance_method(Self, Name) ->
SingletonMethod = find_instance_method_in_singleton_class(Self, Name),
case SingletonMethod of
{ok, Method} -> Method;
{not_found, _} ->
find_instance_method_in_class(Self, Name)
end.
find_instance_method_in_singleton_class(Self, Name) ->
SingletonClass = singleton_class(Self),
case SingletonClass of
not_found -> {not_found, Name};
_ ->
Result = gen_server:call(SingletonClass, #{type => find_method, name => Name}),
case Result of
not_found -> {not_found, Name};
_ -> {ok, Result}
end
end.
find_instance_method_in_class(Self, Name) ->
Klass = get_class(Self),
Result = gen_server:call(Klass, #{type => find_method, name => Name}),
case Result of
not_found -> {not_found, Name};
_ -> Result
end.
find_method(Self, Name) ->
gen_server:call(Self, #{type => find_method, name => Name}).
self_or_object_class(Self) ->
MainObject = main_object(),
case Self of
MainObject -> object_class();
_ -> Self
end.
singleton_class(Self) ->
Properties = get_properties(Self),
maps:get(singleton_class, Properties, not_found).
get_or_create_singleton_class(Self) ->
SingletonClass = singleton_class(Self),
case SingletonClass of
not_found ->
{ok, NewSingletonClass} = erruby_class:new_named_class("singleton class"),
Properties = get_properties(Self),
NewProperties = Properties#{ singleton_class => NewSingletonClass },
set_properties(Self, NewProperties),
NewSingletonClass;
_ ->
SingletonClass
end.
def_method(Self, Name, Args, Body) ->
Receiver = self_or_object_class(Self),
gen_server:call(Receiver, #{type => def_method, name => Name, args => Args, body => Body}).
def_method(Self,Name,Func) when is_function(Func) ->
Receiver = self_or_object_class(Self),
gen_server:call(Receiver, #{type => def_method, name => Name, func => Func}).
def_singleton_method(Self, Name, Args, Body) ->
Receiver = get_or_create_singleton_class(Self),
gen_server:call(Receiver, #{type => def_method, name => Name, args => Args, body => Body}).
def_singleton_method(Self,Name,Func) when is_function(Func) ->
Receiver = get_or_create_singleton_class(Self),
gen_server:call(Receiver, #{type => def_method, name => Name, func => Func}).
def_global_const(Name, Value) ->
gen_server:call(object_class(), #{type => def_const, name => Name, value => Value}).
find_global_const(Name) ->
find_const(object_class(), Name).
TODO define on basic object instead
def_global_var(Name, Value) ->
Msg = #{type => def_global_var, name => Name, value => Value},
gen_server:call(object_class(), Msg).
find_global_var(Name) ->
gen_server:call(object_class(), #{type => find_global_var, name => Name}).
def_const(Self, Name, Value) ->
Receiver = self_or_object_class(Self),
gen_server:call(Receiver, #{type => def_const, name => Name, value => Value}).
find_const(Self, Name) ->
erruby_debug:debug_2("finding on ~p for const:~p~n",[Self, Name]),
gen_server:call(Self, #{type => find_const, name => Name}).
def_ivar(Self, Name, Value)->
gen_server:call(Self, #{type => def_ivar, name => Name, value => Value}).
find_ivar(Self, Name) ->
erruby_debug:debug_2("finding on ~p for ivar:~p~n",[Self, Name]),
gen_server:call(Self, #{type => find_ivar, name => Name}).
get_properties(Self) ->
gen_server:call(Self, #{type => get_properties}).
set_properties(Self, Properties) ->
gen_server:call(Self, #{type => set_properties, properties => Properties}).
handle_info(Info, State) ->
io:format("Got unkwon info:~n~p~n", [Info]),
{ok, State}.
handle_call(#{ type := def_method , name := Name, body := Body, args := Args}=_Msg, _From, #{methods := Methods} =State) ->
NewMethods = Methods#{ Name => #{ args => Args, body => Body, argc => length(Args) } },
NewState = State#{ methods := NewMethods},
{reply, Name, NewState};
handle_call(#{ type := def_method, name := Name, func := Func}=_Msg, _From, #{methods := Methods} = State) ->
NewMethods = Methods#{ Name => Func },
NewState = State#{ methods := NewMethods},
{reply, Name, NewState};
handle_call(#{ type := find_method, name := Name }, _From, #{methods := Methods} = State) ->
erruby_debug:debug_2("finding method:~p~n in State:~p~n",[Name, State]),
case maps:is_key(Name,Methods) of
true ->
#{Name := Method} = Methods,
{reply, Method, State};
false ->
TODO use error classes
erruby_debug:debug_2("finding in ancestors:~p~n",[ancestors(State)]),
Method = find_method_in_ancestors(ancestors(State), Name),
{reply, Method, State}
end;
handle_call(#{ type := get_properties }, _From, #{properties := Properties}=State) ->
{reply, Properties, State};
handle_call(#{ type := set_properties, properties := Properties }, _From, State) ->
NewState = State#{ properties := Properties},
{reply, NewState, NewState};
handle_call(#{ type := def_ivar, name := Name, value := Value }, _From, #{ivars := IVars}=State) ->
NewIvars = IVars#{Name => Value},
NewState = State#{ivars := NewIvars},
{reply, Name, NewState};
handle_call(#{ type := find_ivar, name := Name }, _From, #{ivars := IVars}=State) ->
erruby_debug:debug_2("finding ivar:~p~nin State:~p~n",[Name, State]),
Value = maps:get(Name, IVars, erruby_nil:nil_instance()),
{reply, Value, State};
handle_call(#{ type := def_const, name := Name, value := Value }, _From, #{consts := Consts}=State) ->
NewConsts = Consts#{Name => Value},
NewState = State#{consts := NewConsts},
{reply, Name, NewState};
handle_call(#{ type := find_const, name := Name }, _From, #{consts := Consts}=State) ->
erruby_debug:debug_2("finding const:~p~nin State:~p~n",[Name, State]),
Value = maps:get(Name, Consts, not_found),
{reply, Value, State};
handle_call(#{ type := def_global_var, name := Name, value := Value}, _From,
#{properties := #{global_var_tbl := GVarTbl} } = State) ->
NewGVarTbl = GVarTbl#{ Name => Value},
#{properties := Properties} = State,
NewProperties = Properties#{ global_var_tbl := NewGVarTbl },
NewState = State#{ properties := NewProperties},
{reply, Name, NewState};
handle_call(#{ type := find_global_var, name := Name}, _From,
#{properties := #{global_var_tbl := GVarTbl} } = State) ->
Value = maps:get(Name, GVarTbl, not_found),
{reply, Value, State};
handle_call(#{ type := get_class}, _From, State) ->
Value = maps:get(class, State, object_class()),
{reply, Value, State};
handle_call(_Req, _From, State) ->
io:format("handle unknow call ~p ~n ~p ~n ~p ~n",[_Req, _From, State]),
NewState = State,
{reply, done, NewState}.
handle_cast(_Req, State) ->
io:format("handle unknown cast ~p ~p ~n",[_Req, State]),
NewState = State,
{reply, done, NewState}.
TODO support va args
method_puts(Env, String) ->
io:format("~s~n", [String]),
erruby_nil:new_nil(Env).
append_rb_extension(FileName) ->
case filename:extension(FileName) of
[] -> string:concat(FileName, ".rb");
_ -> FileName
end.
method_require_relative(Env, FileName) ->
RelativeFileName = relativeFileName(Env, FileName),
RelativeFileNameWithExt = append_rb_extension(RelativeFileName),
LoadedFeatures = find_global_var("$LOADED_FEATURES"),
LoadedFeaturesList = erruby_array:array_to_list(LoadedFeatures),
Contains = lists:member( RelativeFileNameWithExt, LoadedFeaturesList),
case Contains of
true -> erruby_boolean:new_false(Env);
_ ->
load_file(Env, RelativeFileNameWithExt),
erruby_array:push(LoadedFeatures, RelativeFileNameWithExt),
erruby_boolean:new_true(Env)
end.
relativeFileName(Env, FileName) ->
SrcFile = erruby_vm:file_name(Env),
SrcDir = filename:dirname(SrcFile),
filename:join([SrcDir, FileName]).
load_file(Env, RelativeFileNameWithExt) ->
try
erruby:eruby(RelativeFileNameWithExt),
erruby_boolean:new_true(Env)
catch
_:_E ->
erruby_debug:debug_2("cant require_relative file ~p~n", [RelativeFileNameWithExt]),
erruby_boolean:new_false(Env)
end.
@TODO find a better way to get filename
method_load(Env, FileName)->
Pwd = os:getenv("PWD"),
RelativeFileNameWithExt = filename:join([Pwd, FileName]),
load_file(Env, RelativeFileNameWithExt).
method_self(#{self := Self}=Env) ->
erruby_rb:return(Self, Env).
method_inspect(#{self := Self}=Env) ->
S = io_lib:format("#<Object:~p>",[Self]),
erruby_vm:new_string(S,Env).
method_to_s(#{self := Self}=Env) ->
S = io_lib:format("~p",[Self]),
erruby_vm:new_string(S,Env).
new_object_with_pid_symbol(Symbol, Class) ->
gen_server:start_link({local, Symbol}, ?MODULE, [#{class => Class}], []).
new_object(Class, Payload) when is_map(Payload) ->
start_link(Class, Payload).
init_object_class() ->
erb:find_or_init_class(erruby_object_class, fun init_object_class_internal/0).
init_object_class_internal() ->
{ok, Pid} = gen_server:start_link({local, erruby_object_class}, ?MODULE, [],[]),
install_object_class_methods(),
'Object' = def_const(Pid, 'Object', Pid),
set_properties(object_class(), #{global_var_tbl => #{}}),
def_global_var("$LOADED_FEATURES", erruby_array:new_array([])),
{ok, Pid}.
init_main_object() ->
erb:find_or_init_class(erruby_main_object, fun init_main_object_internal/0).
init_main_object_internal() ->
new_object_with_pid_symbol(erruby_main_object, object_class()).
object_class() ->
whereis(erruby_object_class).
main_object() ->
whereis(erruby_main_object).
install_object_class_methods() ->
TODO use this after inherent is done
def_method(object_class ( ) , ' = = ' , fun method_eq/2 ) .
def_method(object_class(), 'puts', fun method_puts/2),
def_method(object_class(), 'self', fun method_self/1),
def_method(object_class(), 'inspect', fun method_inspect/1),
def_method(object_class(), 'to_s', fun method_to_s/1),
def_method(object_class(), '==', fun method_eq/2),
def_method(object_class(), 'require_relative', fun method_require_relative/2),
def_method(object_class(), 'load', fun method_load/2),
ok.
method_eq(#{self := Self}=Env, Object) ->
case Object of
Self -> erruby_boolean:new_true(Env);
_ -> erruby_boolean:new_false(Env)
end.
super_class(#{properties := Properties}=_State) ->
maps:get(superclass, Properties, object_class()).
ancestors(State) ->
SuperClass = super_class(State),
ObjectClass = object_class(),
case self() of
ObjectClass -> [];
_ -> [SuperClass, ObjectClass]
end.
find_method_in_ancestors([], _Name) ->
not_found;
find_method_in_ancestors(Ancestors, Name) ->
[Klass | Rest] = Ancestors,
Method = find_method(Klass, Name),
case Method of
not_found -> find_method_in_ancestors(Rest, Name);
_ -> Method
end.
|
94c9f0fd99c5b33c0597d4c25a048fbfc9c2e1fb9c33892711e9617035aaafdd | tweag/asterius | CmpWord16.hs | {-# LANGUAGE BangPatterns #-}
# LANGUAGE MagicHash #
module Main where
import Data.Word
import Data.List
import GHC.Prim
import GHC.Exts
Having a wrapper gives us two things :
-- * it's easier to test everything (no need for code using raw primops)
* we test the deriving mechanism for Word16 #
data TestWord16 = T16 Word16#
deriving (Eq, Ord)
mkT16 :: Word -> TestWord16
mkT16 (W# a) = T16 (narrowWord16# a)
main :: IO ()
main = do
let input = [ (a, b) | a <- allWord16, b <- allWord16 ]
--
-- (==)
--
let expected = [ a == b | (a, b) <- input ]
actual = [ mkT16 a == mkT16 b | (a, b) <- input ]
checkResults "(==)" input expected actual
--
-- (/=)
--
let expected = [ a /= b | (a, b) <- input ]
actual = [ mkT16 a /= mkT16 b | (a, b) <- input ]
checkResults "(/=)" input expected actual
--
-- (<)
--
let expected = [ a < b | (a, b) <- input ]
actual = [ mkT16 a < mkT16 b | (a, b) <- input ]
checkResults "(<)" input expected actual
--
-- (>)
--
let expected = [ a > b | (a, b) <- input ]
actual = [ mkT16 a > mkT16 b | (a, b) <- input ]
checkResults "(>)" input expected actual
--
-- (<=)
--
let expected = [ a <= b | (a, b) <- input ]
actual = [ mkT16 a <= mkT16 b | (a, b) <- input ]
checkResults "(<=)" input expected actual
--
-- (>=)
--
let expected = [ a >= b | (a, b) <- input ]
actual = [ mkT16 a >= mkT16 b | (a, b) <- input ]
checkResults "(>=)" input expected actual
checkResults
:: (Eq a, Eq b, Show a, Show b) => String -> [a] -> [b] -> [b] -> IO ()
checkResults test inputs expected actual =
case findIndex (\(e, a) -> e /= a) (zip expected actual) of
Nothing -> putStrLn $ "Pass: " ++ test
Just i -> error $
"FAILED: " ++ test ++ " for input: " ++ show (inputs !! i)
++ " expected: " ++ show (expected !! i)
++ " but got: " ++ show (actual !! i)
testing across the entire Word16 range blows the memory ,
-- hence choosing a smaller range
allWord16 :: [Word]
allWord16 = [ 0 .. 100 ]
| null | https://raw.githubusercontent.com/tweag/asterius/e7b823c87499656860f87b9b468eb0567add1de8/asterius/test/ghc-testsuite/primops/CmpWord16.hs | haskell | # LANGUAGE BangPatterns #
* it's easier to test everything (no need for code using raw primops)
(==)
(/=)
(<)
(>)
(<=)
(>=)
hence choosing a smaller range | # LANGUAGE MagicHash #
module Main where
import Data.Word
import Data.List
import GHC.Prim
import GHC.Exts
Having a wrapper gives us two things :
* we test the deriving mechanism for Word16 #
data TestWord16 = T16 Word16#
deriving (Eq, Ord)
mkT16 :: Word -> TestWord16
mkT16 (W# a) = T16 (narrowWord16# a)
main :: IO ()
main = do
let input = [ (a, b) | a <- allWord16, b <- allWord16 ]
let expected = [ a == b | (a, b) <- input ]
actual = [ mkT16 a == mkT16 b | (a, b) <- input ]
checkResults "(==)" input expected actual
let expected = [ a /= b | (a, b) <- input ]
actual = [ mkT16 a /= mkT16 b | (a, b) <- input ]
checkResults "(/=)" input expected actual
let expected = [ a < b | (a, b) <- input ]
actual = [ mkT16 a < mkT16 b | (a, b) <- input ]
checkResults "(<)" input expected actual
let expected = [ a > b | (a, b) <- input ]
actual = [ mkT16 a > mkT16 b | (a, b) <- input ]
checkResults "(>)" input expected actual
let expected = [ a <= b | (a, b) <- input ]
actual = [ mkT16 a <= mkT16 b | (a, b) <- input ]
checkResults "(<=)" input expected actual
let expected = [ a >= b | (a, b) <- input ]
actual = [ mkT16 a >= mkT16 b | (a, b) <- input ]
checkResults "(>=)" input expected actual
checkResults
:: (Eq a, Eq b, Show a, Show b) => String -> [a] -> [b] -> [b] -> IO ()
checkResults test inputs expected actual =
case findIndex (\(e, a) -> e /= a) (zip expected actual) of
Nothing -> putStrLn $ "Pass: " ++ test
Just i -> error $
"FAILED: " ++ test ++ " for input: " ++ show (inputs !! i)
++ " expected: " ++ show (expected !! i)
++ " but got: " ++ show (actual !! i)
testing across the entire Word16 range blows the memory ,
allWord16 :: [Word]
allWord16 = [ 0 .. 100 ]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.