_id
stringlengths 64
64
| repository
stringlengths 6
84
| name
stringlengths 4
110
| content
stringlengths 0
248k
| license
null | download_url
stringlengths 89
454
| language
stringclasses 7
values | comments
stringlengths 0
74.6k
| code
stringlengths 0
248k
|
---|---|---|---|---|---|---|---|---|
07296f4d16ba940732eec77505b5e5943ae648ea64d21154a663717f1ee85439 | mmcguill/haskell-2048 | MainScotty.hs | {-# LANGUAGE OverloadedStrings #-}
module Main where
import Web.Scotty
import Network.Wai.Middleware.Static
import Network.Wai.Middleware.RequestLogger
import System.Random
import Data.Monoid (mconcat)
import Control.Monad.Trans
import System.Directory
import GameModel
import GameModel as GM
import Logic
import Data.Text.Lazy
import Control.Concurrent
import Paths_Haskell2048
import System.Environment
import Data.Maybe (fromMaybe)
randomFloats :: RandomGen g => g -> [Float]
randomFloats g = randoms (g) :: [Float]
move :: RandomGen g => GM.Direction -> MVar GameState -> g -> IO GameState
move d s g = do
gameState <- liftIO $ takeMVar s
let delta = stepGame d (randomFloats g) gameState
liftIO $ putMVar s delta
return delta
main :: IO ()
main = do
s <- newMVar defaultGame
x <- lookupEnv "PORT"
let port = fromMaybe "3000" x
putStrLn "Starting Haskell2048!!"
scotty (read port) $ do
middleware logStdoutDev
get "/gameState" $ do
x <- liftIO $ takeMVar s
liftIO $ putMVar s x
json $ (x :: GameState)
get "/newGame" $ do
g <- liftIO newStdGen
let x = startNewGame $ randomFloats g
ignored <- liftIO $ takeMVar s
liftIO $ putMVar s x
json $ (x :: GameState)
get "/moveLeft" $ do
g <- liftIO newStdGen
delta <- liftIO $ move GM.Left s g
json $ (delta :: GameState)
get "/moveRight" $ do
g <- liftIO newStdGen
delta <- liftIO $ move GM.Right s g
json $ (delta :: GameState)
get "/moveUp" $ do
g <- liftIO newStdGen
delta <- liftIO $ move GM.Up s g
json $ (delta :: GameState)
get "/moveDown" $ do
g <- liftIO newStdGen
delta <- liftIO $ move GM.Down s g
json $ (delta :: GameState)
Static File Serving
get "/" $ do
foo <- liftIO $ getDataFileName "src/static/index.html"
setHeader "Content-Type" "text/html; charset=utf-8"
file foo
get "/favicon.ico" $ do
foo <- liftIO $ getDataFileName "src/static/favicon.ico"
setHeader "Content-Type" "image/x-icon"
file foo
-- Debug...
get "/:word" $ do
beam <- param "word"
html $ mconcat ["<h1>Last Resort: [", beam , "]</h1>"]
-- Scrap
--get "/pwd" $ do
g < - liftIO $ System .
html $ pack $ g
let beam = L.pack $ show $ head | null | https://raw.githubusercontent.com/mmcguill/haskell-2048/0a2032f95037bb644888327f28bba86ee2554252/src/MainScotty.hs | haskell | # LANGUAGE OverloadedStrings #
Debug...
Scrap
get "/pwd" $ do |
module Main where
import Web.Scotty
import Network.Wai.Middleware.Static
import Network.Wai.Middleware.RequestLogger
import System.Random
import Data.Monoid (mconcat)
import Control.Monad.Trans
import System.Directory
import GameModel
import GameModel as GM
import Logic
import Data.Text.Lazy
import Control.Concurrent
import Paths_Haskell2048
import System.Environment
import Data.Maybe (fromMaybe)
randomFloats :: RandomGen g => g -> [Float]
randomFloats g = randoms (g) :: [Float]
move :: RandomGen g => GM.Direction -> MVar GameState -> g -> IO GameState
move d s g = do
gameState <- liftIO $ takeMVar s
let delta = stepGame d (randomFloats g) gameState
liftIO $ putMVar s delta
return delta
main :: IO ()
main = do
s <- newMVar defaultGame
x <- lookupEnv "PORT"
let port = fromMaybe "3000" x
putStrLn "Starting Haskell2048!!"
scotty (read port) $ do
middleware logStdoutDev
get "/gameState" $ do
x <- liftIO $ takeMVar s
liftIO $ putMVar s x
json $ (x :: GameState)
get "/newGame" $ do
g <- liftIO newStdGen
let x = startNewGame $ randomFloats g
ignored <- liftIO $ takeMVar s
liftIO $ putMVar s x
json $ (x :: GameState)
get "/moveLeft" $ do
g <- liftIO newStdGen
delta <- liftIO $ move GM.Left s g
json $ (delta :: GameState)
get "/moveRight" $ do
g <- liftIO newStdGen
delta <- liftIO $ move GM.Right s g
json $ (delta :: GameState)
get "/moveUp" $ do
g <- liftIO newStdGen
delta <- liftIO $ move GM.Up s g
json $ (delta :: GameState)
get "/moveDown" $ do
g <- liftIO newStdGen
delta <- liftIO $ move GM.Down s g
json $ (delta :: GameState)
Static File Serving
get "/" $ do
foo <- liftIO $ getDataFileName "src/static/index.html"
setHeader "Content-Type" "text/html; charset=utf-8"
file foo
get "/favicon.ico" $ do
foo <- liftIO $ getDataFileName "src/static/favicon.ico"
setHeader "Content-Type" "image/x-icon"
file foo
get "/:word" $ do
beam <- param "word"
html $ mconcat ["<h1>Last Resort: [", beam , "]</h1>"]
g < - liftIO $ System .
html $ pack $ g
let beam = L.pack $ show $ head |
70c51192b59b2a1cd81792c08d81f1bf57a63cd47f79c1fc365a33e72aa16570 | nikomatsakis/a-mir-formality | cosld-solve.rkt | #lang racket
(require redex/reduction-semantics
(prefix-in logic: "cosld-solve/prove.rkt")
"solution.rkt"
"solution-simplify.rkt"
"grammar.rkt"
)
(provide logic:prove-top-level-goal/cosld
solve-top-level-query-goal
)
(define-judgment-form formality-logic
;; Prove the query goal and construct a solution.
#:mode (solve-top-level-query-goal I I I O)
#:contract (solve-top-level-query-goal VarIds Env Goal Solution)
[(logic:prove-top-level-goal/cosld Env Goal Env_out)
(where/error Solution_0 (extract-solution Env_out VarIds_query))
(where/error Solution_1 (simplify-solution Solution_0))
---------------
(solve-top-level-query-goal VarIds_query Env Goal Solution_1)
]
[(logic:prove-top-level-goal/cosld Env Goal ambiguous)
---------------
(solve-top-level-query-goal VarIds_query Env Goal ambiguous)
]
)
| null | https://raw.githubusercontent.com/nikomatsakis/a-mir-formality/9932f8621db162160215c60c323acad56903a4ca/racket-src/logic/cosld-solve.rkt | racket | Prove the query goal and construct a solution. | #lang racket
(require redex/reduction-semantics
(prefix-in logic: "cosld-solve/prove.rkt")
"solution.rkt"
"solution-simplify.rkt"
"grammar.rkt"
)
(provide logic:prove-top-level-goal/cosld
solve-top-level-query-goal
)
(define-judgment-form formality-logic
#:mode (solve-top-level-query-goal I I I O)
#:contract (solve-top-level-query-goal VarIds Env Goal Solution)
[(logic:prove-top-level-goal/cosld Env Goal Env_out)
(where/error Solution_0 (extract-solution Env_out VarIds_query))
(where/error Solution_1 (simplify-solution Solution_0))
---------------
(solve-top-level-query-goal VarIds_query Env Goal Solution_1)
]
[(logic:prove-top-level-goal/cosld Env Goal ambiguous)
---------------
(solve-top-level-query-goal VarIds_query Env Goal ambiguous)
]
)
|
56ea8527d5aec21ff55a29234edb740f9f25c9c2a027c6983f24a8ca7ce20fdb | Holworth/SICP_Solutions | exercise2-35.rkt | #lang sicp
(define (reduce op init items)
(cond ((null? items) init)
(else (op (car items)
(reduce op init (cdr items))))))
(define (count-leaves t)
(reduce (lambda (x y) (+ x y)) 0
(map (lambda (x)
(if (not (pair? x))
1
(count-leaves x))) t)))
(count-leaves (list 1 2))
(count-leaves (list (list 1 2) 3))
(count-leaves nil)
(count-leaves (list (list 1 2) (list 3 (list 5 4)) (list 7))) | null | https://raw.githubusercontent.com/Holworth/SICP_Solutions/da4041d4c48a04df7c17ea840458d8044ae6c9fb/solutions/chap2/code/exercise2-35.rkt | racket | #lang sicp
(define (reduce op init items)
(cond ((null? items) init)
(else (op (car items)
(reduce op init (cdr items))))))
(define (count-leaves t)
(reduce (lambda (x y) (+ x y)) 0
(map (lambda (x)
(if (not (pair? x))
1
(count-leaves x))) t)))
(count-leaves (list 1 2))
(count-leaves (list (list 1 2) 3))
(count-leaves nil)
(count-leaves (list (list 1 2) (list 3 (list 5 4)) (list 7))) |
|
27a31668953936010f52c2a2f1b3b8f1bdb7dc5fe3da7ef16100700a10e9f640 | thomasblanc/curry-flavor | bigset.ml | module Int =
struct
type t = int
let compare (a:t) b = compare a b
end
module M = CurrySet.Nest (Int) (CurrySet.Nest (Int) (CurrySet.Make (Int) ) )
open M
let myset =
empty
|> add 0 0 0
|> add 0 1 1
|> add 1 0 1
|> add 1 1 1
let () = iter (Printf.printf "%d %d %d") myset
| null | https://raw.githubusercontent.com/thomasblanc/curry-flavor/821c56e5485889570e0835627b43c47f5001cd43/examples/bigset.ml | ocaml | module Int =
struct
type t = int
let compare (a:t) b = compare a b
end
module M = CurrySet.Nest (Int) (CurrySet.Nest (Int) (CurrySet.Make (Int) ) )
open M
let myset =
empty
|> add 0 0 0
|> add 0 1 1
|> add 1 0 1
|> add 1 1 1
let () = iter (Printf.printf "%d %d %d") myset
|
|
f9bd2422561d7028ff2e2813d952ef6e87e99310938e9d2f13655da1572db975 | sixohsix/tak | Selection.hs | module Tak.Editor.Selection where
import Tak.Types
import Tak.GlobalState
import Tak.Buffer
import qualified Tak.Buffer.Line as L
import Tak.Range
import Tak.Editor.Cursor
import Tak.Editor.Undo (pushUndo)
import Data.List (sort)
import qualified Data.Sequence as Seq
import Control.Arrow ( (>>>) )
import Control.Lens
startSelecting :: SimpleEditor -> SimpleEditor
startSelecting st =
st { selState = (selState st) { openRange = Just (insertPos st) } }
cancelSelecting :: SimpleEditor -> SimpleEditor
cancelSelecting st =
st { selState = (selState st) { openRange = Nothing } }
currentSelection :: SimpleEditor -> Maybe Range
currentSelection st =
let selSt = selState st
Just rangeStartPos = openRange selSt
in case openRange selSt of
Just rangeStartPos ->
let rangeStopPos = insertPos st
in if rangeStartPos /= rangeStopPos
then Just $ makeRange rangeStartPos rangeStopPos
else Nothing
Nothing -> Nothing
applyIfSelection :: (Range -> SimpleEditor -> SimpleEditor) -> SimpleEditor -> SimpleEditor
applyIfSelection f ed = maybe ed (\r -> f r ed) (currentSelection ed)
deleteSelection :: SimpleEditor -> SimpleEditor
deleteSelection = cancelSelecting . applyIfSelection deleteRange
deleteRange :: Range -> SimpleEditor -> SimpleEditor
deleteRange r ed =
let buf = buffer ed
in (pushUndo ed) { buffer = delSelection buf (asTuple r),
cursorPos = posWithinBuffer buf (startPos r) }
copySelection :: GlobalState -> GlobalState
copySelection gst =
let ed = activeEditor gst
in maybe gst (\r -> set clipboard ( (getSelection (buffer ed) (asTuple r)):(view clipboard gst) ) gst) (currentSelection ed)
pasteAtInsertPos :: GlobalState -> GlobalState
pasteAtInsertPos gst
| Nothing == (pasteable gst) = gst
| otherwise =
let ed = activeEditor gst
buf = buffer ed
iPos = insertPos ed
Pos l r = iPos
Just pasteSeq = pasteable gst
lPasteSeq = Seq.length pasteSeq
isOneLinePaste = lPasteSeq == 1
lastLineLen = L.length $ Seq.index pasteSeq (lPasteSeq - 1)
in (set editor $ (pushUndo ed) { buffer = insertLineSeqIntoBuffer buf iPos pasteSeq,
cursorPos = Pos (l + (Seq.length pasteSeq) - 1)
(if isOneLinePaste then (r + lastLineLen) else lastLineLen) }) gst
tmpWriteClipboard gst = do
writeFile "./clip.tmp" $ lineSeqToStr ((view clipboard gst) !! 0)
return gst
| null | https://raw.githubusercontent.com/sixohsix/tak/6310d19faa683156933dde38666c11dc087d79ea/src/Tak/Editor/Selection.hs | haskell | module Tak.Editor.Selection where
import Tak.Types
import Tak.GlobalState
import Tak.Buffer
import qualified Tak.Buffer.Line as L
import Tak.Range
import Tak.Editor.Cursor
import Tak.Editor.Undo (pushUndo)
import Data.List (sort)
import qualified Data.Sequence as Seq
import Control.Arrow ( (>>>) )
import Control.Lens
startSelecting :: SimpleEditor -> SimpleEditor
startSelecting st =
st { selState = (selState st) { openRange = Just (insertPos st) } }
cancelSelecting :: SimpleEditor -> SimpleEditor
cancelSelecting st =
st { selState = (selState st) { openRange = Nothing } }
currentSelection :: SimpleEditor -> Maybe Range
currentSelection st =
let selSt = selState st
Just rangeStartPos = openRange selSt
in case openRange selSt of
Just rangeStartPos ->
let rangeStopPos = insertPos st
in if rangeStartPos /= rangeStopPos
then Just $ makeRange rangeStartPos rangeStopPos
else Nothing
Nothing -> Nothing
applyIfSelection :: (Range -> SimpleEditor -> SimpleEditor) -> SimpleEditor -> SimpleEditor
applyIfSelection f ed = maybe ed (\r -> f r ed) (currentSelection ed)
deleteSelection :: SimpleEditor -> SimpleEditor
deleteSelection = cancelSelecting . applyIfSelection deleteRange
deleteRange :: Range -> SimpleEditor -> SimpleEditor
deleteRange r ed =
let buf = buffer ed
in (pushUndo ed) { buffer = delSelection buf (asTuple r),
cursorPos = posWithinBuffer buf (startPos r) }
copySelection :: GlobalState -> GlobalState
copySelection gst =
let ed = activeEditor gst
in maybe gst (\r -> set clipboard ( (getSelection (buffer ed) (asTuple r)):(view clipboard gst) ) gst) (currentSelection ed)
pasteAtInsertPos :: GlobalState -> GlobalState
pasteAtInsertPos gst
| Nothing == (pasteable gst) = gst
| otherwise =
let ed = activeEditor gst
buf = buffer ed
iPos = insertPos ed
Pos l r = iPos
Just pasteSeq = pasteable gst
lPasteSeq = Seq.length pasteSeq
isOneLinePaste = lPasteSeq == 1
lastLineLen = L.length $ Seq.index pasteSeq (lPasteSeq - 1)
in (set editor $ (pushUndo ed) { buffer = insertLineSeqIntoBuffer buf iPos pasteSeq,
cursorPos = Pos (l + (Seq.length pasteSeq) - 1)
(if isOneLinePaste then (r + lastLineLen) else lastLineLen) }) gst
tmpWriteClipboard gst = do
writeFile "./clip.tmp" $ lineSeqToStr ((view clipboard gst) !! 0)
return gst
|
|
30378279910f99fb3728c19956fcc9eb27dc5fe0470f6c665640c00d0d28493d | ocamllabs/ocaml-modular-implicits | typedtreeMap.ml | (***********************************************************************)
(* *)
(* OCaml *)
(* *)
, INRIA Saclay
(* *)
Copyright 2012 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
(* *)
(***********************************************************************)
open Typedtree
module type MapArgument = sig
val enter_structure : structure -> structure
val enter_value_description : value_description -> value_description
val enter_type_declaration : type_declaration -> type_declaration
val enter_type_extension : type_extension -> type_extension
val enter_extension_constructor :
extension_constructor -> extension_constructor
val enter_pattern : pattern -> pattern
val enter_expression : expression -> expression
val enter_package_type : package_type -> package_type
val enter_signature : signature -> signature
val enter_signature_item : signature_item -> signature_item
val enter_module_type_declaration : module_type_declaration -> module_type_declaration
val enter_module_type : module_type -> module_type
val enter_module_expr : module_expr -> module_expr
val enter_with_constraint : with_constraint -> with_constraint
val enter_class_expr : class_expr -> class_expr
val enter_class_signature : class_signature -> class_signature
val enter_class_declaration : class_declaration -> class_declaration
val enter_class_description : class_description -> class_description
val enter_class_type_declaration :
class_type_declaration -> class_type_declaration
val enter_class_type : class_type -> class_type
val enter_class_type_field : class_type_field -> class_type_field
val enter_core_type : core_type -> core_type
val enter_class_structure : class_structure -> class_structure
val enter_class_field : class_field -> class_field
val enter_structure_item : structure_item -> structure_item
val leave_structure : structure -> structure
val leave_value_description : value_description -> value_description
val leave_type_declaration : type_declaration -> type_declaration
val leave_type_extension : type_extension -> type_extension
val leave_extension_constructor :
extension_constructor -> extension_constructor
val leave_pattern : pattern -> pattern
val leave_expression : expression -> expression
val leave_package_type : package_type -> package_type
val leave_signature : signature -> signature
val leave_signature_item : signature_item -> signature_item
val leave_module_type_declaration : module_type_declaration -> module_type_declaration
val leave_module_type : module_type -> module_type
val leave_module_expr : module_expr -> module_expr
val leave_with_constraint : with_constraint -> with_constraint
val leave_class_expr : class_expr -> class_expr
val leave_class_signature : class_signature -> class_signature
val leave_class_declaration : class_declaration -> class_declaration
val leave_class_description : class_description -> class_description
val leave_class_type_declaration :
class_type_declaration -> class_type_declaration
val leave_class_type : class_type -> class_type
val leave_class_type_field : class_type_field -> class_type_field
val leave_core_type : core_type -> core_type
val leave_class_structure : class_structure -> class_structure
val leave_class_field : class_field -> class_field
val leave_structure_item : structure_item -> structure_item
end
module MakeMap(Map : MapArgument) = struct
let may_map f v =
match v with
None -> v
| Some x -> Some (f x)
open Misc
let rec map_structure str =
let str = Map.enter_structure str in
let str_items = List.map map_structure_item str.str_items in
Map.leave_structure { str with str_items = str_items }
and map_binding vb =
{
vb_pat = map_pattern vb.vb_pat;
vb_expr = map_expression vb.vb_expr;
vb_attributes = vb.vb_attributes;
vb_loc = vb.vb_loc;
}
and map_bindings rec_flag list =
List.map map_binding list
and map_case {c_lhs; c_guard; c_rhs} =
{
c_lhs = map_pattern c_lhs;
c_guard = may_map map_expression c_guard;
c_rhs = map_expression c_rhs;
}
and map_cases list =
List.map map_case list
and map_structure_item item =
let item = Map.enter_structure_item item in
let str_desc =
match item.str_desc with
Tstr_eval (exp, attrs) -> Tstr_eval (map_expression exp, attrs)
| Tstr_value (rec_flag, list) ->
Tstr_value (rec_flag, map_bindings rec_flag list)
| Tstr_primitive vd ->
Tstr_primitive (map_value_description vd)
| Tstr_type list ->
Tstr_type (List.map map_type_declaration list)
| Tstr_typext tyext ->
Tstr_typext (map_type_extension tyext)
| Tstr_exception ext ->
Tstr_exception (map_extension_constructor ext)
| Tstr_module x ->
Tstr_module (map_module_binding x)
| Tstr_recmodule list ->
let list = List.map map_module_binding list in
Tstr_recmodule list
| Tstr_modtype mtd ->
Tstr_modtype (map_module_type_declaration mtd)
| Tstr_open od -> Tstr_open od
| Tstr_class list ->
let list =
List.map
(fun (ci, string_list, virtual_flag) ->
map_class_declaration ci, string_list, virtual_flag)
list
in
Tstr_class list
| Tstr_class_type list ->
let list =
List.map
(fun (id, name, ct) ->
id, name, map_class_type_declaration ct)
list
in
Tstr_class_type list
| Tstr_include incl ->
Tstr_include {incl with incl_mod = map_module_expr incl.incl_mod}
| Tstr_attribute x -> Tstr_attribute x
in
Map.leave_structure_item { item with str_desc = str_desc}
and map_module_binding x =
{x with mb_expr = map_module_expr x.mb_expr}
and map_value_description v =
let v = Map.enter_value_description v in
let val_desc = map_core_type v.val_desc in
Map.leave_value_description { v with val_desc = val_desc }
and map_type_declaration decl =
let decl = Map.enter_type_declaration decl in
let typ_params = List.map map_type_parameter decl.typ_params in
let typ_cstrs = List.map (fun (ct1, ct2, loc) ->
(map_core_type ct1,
map_core_type ct2,
loc)
) decl.typ_cstrs in
let typ_kind = match decl.typ_kind with
Ttype_abstract -> Ttype_abstract
| Ttype_variant list ->
let list = List.map map_constructor_declaration list in
Ttype_variant list
| Ttype_record list ->
let list =
List.map
(fun ld ->
{ld with ld_type = map_core_type ld.ld_type}
) list
in
Ttype_record list
| Ttype_open -> Ttype_open
in
let typ_manifest = may_map map_core_type decl.typ_manifest in
Map.leave_type_declaration { decl with typ_params = typ_params;
typ_cstrs = typ_cstrs; typ_kind = typ_kind; typ_manifest = typ_manifest }
and map_type_parameter (ct, v) = (map_core_type ct, v)
and map_constructor_declaration cd =
{cd with cd_args = List.map map_core_type cd.cd_args;
cd_res = may_map map_core_type cd.cd_res
}
and map_type_extension tyext =
let tyext = Map.enter_type_extension tyext in
let tyext_params = List.map map_type_parameter tyext.tyext_params in
let tyext_constructors =
List.map map_extension_constructor tyext.tyext_constructors
in
Map.leave_type_extension { tyext with tyext_params = tyext_params;
tyext_constructors = tyext_constructors }
and map_extension_constructor ext =
let ext = Map.enter_extension_constructor ext in
let ext_kind = match ext.ext_kind with
Text_decl(args, ret) ->
let args = List.map map_core_type args in
let ret = may_map map_core_type ret in
Text_decl(args, ret)
| Text_rebind(p, lid) -> Text_rebind(p, lid)
in
Map.leave_extension_constructor {ext with ext_kind = ext_kind}
and map_pattern pat =
let pat = Map.enter_pattern pat in
let pat_desc =
match pat.pat_desc with
| Tpat_alias (pat1, p, text) ->
let pat1 = map_pattern pat1 in
Tpat_alias (pat1, p, text)
| Tpat_tuple list -> Tpat_tuple (List.map map_pattern list)
| Tpat_construct (lid, cstr_decl, args) ->
Tpat_construct (lid, cstr_decl,
List.map map_pattern args)
| Tpat_variant (label, pato, rowo) ->
let pato = match pato with
None -> pato
| Some pat -> Some (map_pattern pat)
in
Tpat_variant (label, pato, rowo)
| Tpat_record (list, closed) ->
Tpat_record (List.map (fun (lid, lab_desc, pat) ->
(lid, lab_desc, map_pattern pat) ) list, closed)
| Tpat_array list -> Tpat_array (List.map map_pattern list)
| Tpat_or (p1, p2, rowo) ->
Tpat_or (map_pattern p1, map_pattern p2, rowo)
| Tpat_lazy p -> Tpat_lazy (map_pattern p)
| Tpat_constant _
| Tpat_any
| Tpat_var _ -> pat.pat_desc
in
let pat_extra = List.map map_pat_extra pat.pat_extra in
Map.leave_pattern { pat with pat_desc = pat_desc; pat_extra = pat_extra }
and map_pat_extra pat_extra =
match pat_extra with
| Tpat_constraint ct, loc, attrs -> (Tpat_constraint (map_core_type ct), loc, attrs)
| (Tpat_type _ | Tpat_unpack), _, _ -> pat_extra
and map_expression exp =
let exp = Map.enter_expression exp in
let exp_desc =
match exp.exp_desc with
Texp_ident (_, _, _)
| Texp_constant _ -> exp.exp_desc
| Texp_let (rec_flag, list, exp) ->
Texp_let (rec_flag,
map_bindings rec_flag list,
map_expression exp)
| Texp_function (label, cases, partial) ->
Texp_function (label, map_cases cases, partial)
| Texp_apply (exp, list) ->
Texp_apply (map_expression exp,
List.map map_argument list )
| Texp_match (exp, list1, list2, partial) ->
Texp_match (
map_expression exp,
map_cases list1,
map_cases list2,
partial
)
| Texp_try (exp, list) ->
Texp_try (
map_expression exp,
map_cases list
)
| Texp_tuple list ->
Texp_tuple (List.map map_expression list)
| Texp_construct (lid, cstr_desc, args) ->
Texp_construct (lid, cstr_desc,
List.map map_expression args )
| Texp_variant (label, expo) ->
let expo =match expo with
None -> expo
| Some exp -> Some (map_expression exp)
in
Texp_variant (label, expo)
| Texp_record (list, expo) ->
let list =
List.map (fun (lid, lab_desc, exp) ->
(lid, lab_desc, map_expression exp)
) list in
let expo = match expo with
None -> expo
| Some exp -> Some (map_expression exp)
in
Texp_record (list, expo)
| Texp_field (exp, lid, label) ->
Texp_field (map_expression exp, lid, label)
| Texp_setfield (exp1, lid, label, exp2) ->
Texp_setfield (
map_expression exp1,
lid,
label,
map_expression exp2)
| Texp_array list ->
Texp_array (List.map map_expression list)
| Texp_ifthenelse (exp1, exp2, expo) ->
Texp_ifthenelse (
map_expression exp1,
map_expression exp2,
match expo with
None -> expo
| Some exp -> Some (map_expression exp)
)
| Texp_sequence (exp1, exp2) ->
Texp_sequence (
map_expression exp1,
map_expression exp2
)
| Texp_while (exp1, exp2) ->
Texp_while (
map_expression exp1,
map_expression exp2
)
| Texp_for (id, name, exp1, exp2, dir, exp3) ->
Texp_for (
id, name,
map_expression exp1,
map_expression exp2,
dir,
map_expression exp3
)
| Texp_send (exp, meth, expo) ->
Texp_send (map_expression exp, meth, may_map map_expression expo)
| Texp_new (path, lid, cl_decl) -> exp.exp_desc
| Texp_instvar (_, path, _) -> exp.exp_desc
| Texp_setinstvar (path, lid, path2, exp) ->
Texp_setinstvar (path, lid, path2, map_expression exp)
| Texp_override (path, list) ->
Texp_override (
path,
List.map (fun (path, lid, exp) ->
(path, lid, map_expression exp)
) list
)
| Texp_letmodule (mb, exp) ->
Texp_letmodule (map_module_binding mb, map_expression exp)
| Texp_assert exp -> Texp_assert (map_expression exp)
| Texp_lazy exp -> Texp_lazy (map_expression exp)
| Texp_object (cl, string_list) ->
Texp_object (map_class_structure cl, string_list)
| Texp_pack (mexpr) ->
Texp_pack (map_module_expr mexpr)
in
let exp_extra = List.map map_exp_extra exp.exp_extra in
Map.leave_expression {
exp with
exp_desc = exp_desc;
exp_extra = exp_extra; }
and map_argument = function
| {arg_expression = None; _} as arg -> arg
| {arg_flag; arg_expression = Some exp} ->
{arg_flag; arg_expression = Some (map_expression exp)}
and map_exp_extra ((desc, loc, attrs) as exp_extra) =
match desc with
| Texp_constraint ct ->
Texp_constraint (map_core_type ct), loc, attrs
| Texp_coerce (None, ct) ->
Texp_coerce (None, map_core_type ct), loc, attrs
| Texp_coerce (Some ct1, ct2) ->
Texp_coerce (Some (map_core_type ct1),
map_core_type ct2), loc, attrs
| Texp_poly (Some ct) ->
Texp_poly (Some ( map_core_type ct )), loc, attrs
| Texp_newtype _
| Texp_open _
| Texp_poly None -> exp_extra
and map_package_type pack =
let pack = Map.enter_package_type pack in
let pack_fields = List.map (
fun (s, ct) -> (s, map_core_type ct) ) pack.pack_fields in
Map.leave_package_type { pack with pack_fields = pack_fields }
and map_signature sg =
let sg = Map.enter_signature sg in
let sig_items = List.map map_signature_item sg.sig_items in
Map.leave_signature { sg with sig_items = sig_items }
and map_signature_item item =
let item = Map.enter_signature_item item in
let sig_desc =
match item.sig_desc with
Tsig_value vd ->
Tsig_value (map_value_description vd)
| Tsig_type list -> Tsig_type (List.map map_type_declaration list)
| Tsig_typext tyext ->
Tsig_typext (map_type_extension tyext)
| Tsig_exception ext ->
Tsig_exception (map_extension_constructor ext)
| Tsig_module md ->
Tsig_module {md with md_type = map_module_type md.md_type}
| Tsig_recmodule list ->
Tsig_recmodule
(List.map
(fun md -> {md with md_type = map_module_type md.md_type})
list
)
| Tsig_modtype mtd ->
Tsig_modtype (map_module_type_declaration mtd)
| Tsig_open _ -> item.sig_desc
| Tsig_include incl ->
Tsig_include {incl with incl_mod = map_module_type incl.incl_mod}
| Tsig_class list -> Tsig_class (List.map map_class_description list)
| Tsig_class_type list ->
Tsig_class_type (List.map map_class_type_declaration list)
| Tsig_attribute _ as x -> x
in
Map.leave_signature_item { item with sig_desc = sig_desc }
and map_module_type_declaration mtd =
let mtd = Map.enter_module_type_declaration mtd in
let mtd = {mtd with mtd_type = may_map map_module_type mtd.mtd_type} in
Map.leave_module_type_declaration mtd
and map_class_declaration cd =
let cd = Map.enter_class_declaration cd in
let ci_params = List.map map_type_parameter cd.ci_params in
let ci_expr = map_class_expr cd.ci_expr in
Map.leave_class_declaration
{ cd with ci_params = ci_params; ci_expr = ci_expr }
and map_class_description cd =
let cd = Map.enter_class_description cd in
let ci_params = List.map map_type_parameter cd.ci_params in
let ci_expr = map_class_type cd.ci_expr in
Map.leave_class_description
{ cd with ci_params = ci_params; ci_expr = ci_expr}
and map_class_type_declaration cd =
let cd = Map.enter_class_type_declaration cd in
let ci_params = List.map map_type_parameter cd.ci_params in
let ci_expr = map_class_type cd.ci_expr in
Map.leave_class_type_declaration
{ cd with ci_params = ci_params; ci_expr = ci_expr }
and map_module_type mty =
let mty = Map.enter_module_type mty in
let mty_desc =
match mty.mty_desc with
Tmty_ident _ -> mty.mty_desc
| Tmty_alias _ -> mty.mty_desc
| Tmty_signature sg -> Tmty_signature (map_signature sg)
| Tmty_functor (mparam, mtype) ->
Tmty_functor (map_module_parameter mparam, map_module_type mtype)
| Tmty_with (mtype, list) ->
Tmty_with (map_module_type mtype,
List.map (fun (path, lid, withc) ->
(path, lid, map_with_constraint withc)
) list)
| Tmty_typeof mexpr ->
Tmty_typeof (map_module_expr mexpr)
in
Map.leave_module_type { mty with mty_desc = mty_desc}
and map_with_constraint cstr =
let cstr = Map.enter_with_constraint cstr in
let cstr =
match cstr with
Twith_type decl -> Twith_type (map_type_declaration decl)
| Twith_typesubst decl -> Twith_typesubst (map_type_declaration decl)
| Twith_module (path, lid) -> cstr
| Twith_modsubst (path, lid) -> cstr
in
Map.leave_with_constraint cstr
and map_module_expr mexpr =
let mexpr = Map.enter_module_expr mexpr in
let mod_desc =
match mexpr.mod_desc with
Tmod_ident (p, lid) -> mexpr.mod_desc
| Tmod_structure st -> Tmod_structure (map_structure st)
| Tmod_functor (mparam, mexpr) ->
Tmod_functor (map_module_parameter mparam, map_module_expr mexpr)
| Tmod_apply (mexp, marg) ->
Tmod_apply (map_module_expr mexp, map_module_argument marg)
| Tmod_constraint (mexpr, mod_type, Tmodtype_implicit, coercion ) ->
Tmod_constraint (map_module_expr mexpr, mod_type,
Tmodtype_implicit, coercion)
| Tmod_constraint (mexpr, mod_type,
Tmodtype_explicit mtype, coercion) ->
Tmod_constraint (map_module_expr mexpr, mod_type,
Tmodtype_explicit (map_module_type mtype),
coercion)
| Tmod_unpack (exp, mod_type) ->
Tmod_unpack (map_expression exp, mod_type)
in
Map.leave_module_expr { mexpr with mod_desc = mod_desc }
and map_module_parameter mparam =
match mparam with
| Tmpar_generative -> Tmpar_generative
| Tmpar_applicative(id, name, mtype) ->
Tmpar_applicative(id, name, map_module_type mtype)
| Tmpar_implicit(id, name, mtype) ->
Tmpar_implicit(id, name, map_module_type mtype)
and map_module_argument marg =
match marg with
| Tmarg_generative -> Tmarg_generative
| Tmarg_applicative(mexp, coercion) ->
Tmarg_applicative(map_module_expr mexp, coercion)
| Tmarg_implicit(mexp, coercion) ->
Tmarg_implicit(map_module_expr mexp, coercion)
and map_class_expr cexpr =
let cexpr = Map.enter_class_expr cexpr in
let cl_desc =
match cexpr.cl_desc with
| Tcl_constraint (cl, None, string_list1, string_list2, concr ) ->
Tcl_constraint (map_class_expr cl, None, string_list1,
string_list2, concr)
| Tcl_structure clstr -> Tcl_structure (map_class_structure clstr)
| Tcl_fun (label, pat, priv, cl, partial) ->
Tcl_fun (label, map_pattern pat,
List.map (fun (id, name, exp) ->
(id, name, map_expression exp)) priv,
map_class_expr cl, partial)
| Tcl_apply (cl, args) ->
Tcl_apply (map_class_expr cl,
List.map map_argument args)
| Tcl_let (rec_flat, bindings, ivars, cl) ->
Tcl_let (rec_flat, map_bindings rec_flat bindings,
List.map (fun (id, name, exp) ->
(id, name, map_expression exp)) ivars,
map_class_expr cl)
| Tcl_constraint (cl, Some clty, vals, meths, concrs) ->
Tcl_constraint ( map_class_expr cl,
Some (map_class_type clty), vals, meths, concrs)
| Tcl_ident (id, name, tyl) ->
Tcl_ident (id, name, List.map map_core_type tyl)
in
Map.leave_class_expr { cexpr with cl_desc = cl_desc }
and map_class_type ct =
let ct = Map.enter_class_type ct in
let cltyp_desc =
match ct.cltyp_desc with
Tcty_signature csg -> Tcty_signature (map_class_signature csg)
| Tcty_constr (path, lid, list) ->
Tcty_constr (path, lid, List.map map_core_type list)
| Tcty_arrow (label, ct, cl) ->
Tcty_arrow (label, map_core_type ct, map_class_type cl)
in
Map.leave_class_type { ct with cltyp_desc = cltyp_desc }
and map_class_signature cs =
let cs = Map.enter_class_signature cs in
let csig_self = map_core_type cs.csig_self in
let csig_fields = List.map map_class_type_field cs.csig_fields in
Map.leave_class_signature { cs with
csig_self = csig_self; csig_fields = csig_fields }
and map_class_type_field ctf =
let ctf = Map.enter_class_type_field ctf in
let ctf_desc =
match ctf.ctf_desc with
Tctf_inherit ct -> Tctf_inherit (map_class_type ct)
| Tctf_val (s, mut, virt, ct) ->
Tctf_val (s, mut, virt, map_core_type ct)
| Tctf_method (s, priv, virt, ct) ->
Tctf_method (s, priv, virt, map_core_type ct)
| Tctf_constraint (ct1, ct2) ->
Tctf_constraint (map_core_type ct1, map_core_type ct2)
| Tctf_attribute _ as x -> x
in
Map.leave_class_type_field { ctf with ctf_desc = ctf_desc }
and map_core_type ct =
let ct = Map.enter_core_type ct in
let ctyp_desc =
match ct.ctyp_desc with
Ttyp_any
| Ttyp_var _ -> ct.ctyp_desc
| Ttyp_arrow (label, ct1, ct2) ->
Ttyp_arrow (label, map_core_type ct1, map_core_type ct2)
| Ttyp_tuple list -> Ttyp_tuple (List.map map_core_type list)
| Ttyp_constr (path, lid, list) ->
Ttyp_constr (path, lid, List.map map_core_type list)
| Ttyp_object (list, o) ->
Ttyp_object
(List.map (fun (s, a, t) -> (s, a, map_core_type t)) list, o)
| Ttyp_class (path, lid, list) ->
Ttyp_class (path, lid, List.map map_core_type list)
| Ttyp_alias (ct, s) -> Ttyp_alias (map_core_type ct, s)
| Ttyp_variant (list, bool, labels) ->
Ttyp_variant (List.map map_row_field list, bool, labels)
| Ttyp_poly (list, ct) -> Ttyp_poly (list, map_core_type ct)
| Ttyp_package pack -> Ttyp_package (map_package_type pack)
in
Map.leave_core_type { ct with ctyp_desc = ctyp_desc }
and map_class_structure cs =
let cs = Map.enter_class_structure cs in
let cstr_self = map_pattern cs.cstr_self in
let cstr_fields = List.map map_class_field cs.cstr_fields in
Map.leave_class_structure { cs with cstr_self; cstr_fields }
and map_row_field rf =
match rf with
Ttag (label, attrs, bool, list) ->
Ttag (label, attrs, bool, List.map map_core_type list)
| Tinherit ct -> Tinherit (map_core_type ct)
and map_class_field cf =
let cf = Map.enter_class_field cf in
let cf_desc =
match cf.cf_desc with
Tcf_inherit (ovf, cl, super, vals, meths) ->
Tcf_inherit (ovf, map_class_expr cl, super, vals, meths)
| Tcf_constraint (cty, cty') ->
Tcf_constraint (map_core_type cty, map_core_type cty')
| Tcf_val (lab, mut, ident, Tcfk_virtual cty, b) ->
Tcf_val (lab, mut, ident, Tcfk_virtual (map_core_type cty), b)
| Tcf_val (lab, mut, ident, Tcfk_concrete (o, exp), b) ->
Tcf_val (lab, mut, ident, Tcfk_concrete (o, map_expression exp), b)
| Tcf_method (lab, priv, Tcfk_virtual cty) ->
Tcf_method (lab, priv, Tcfk_virtual (map_core_type cty))
| Tcf_method (lab, priv, Tcfk_concrete (o, exp)) ->
Tcf_method (lab, priv, Tcfk_concrete (o, map_expression exp))
| Tcf_initializer exp -> Tcf_initializer (map_expression exp)
| Tcf_attribute _ as x -> x
in
Map.leave_class_field { cf with cf_desc = cf_desc }
end
module DefaultMapArgument = struct
let enter_structure t = t
let enter_value_description t = t
let enter_type_declaration t = t
let enter_type_extension t = t
let enter_extension_constructor t = t
let enter_pattern t = t
let enter_expression t = t
let enter_package_type t = t
let enter_signature t = t
let enter_signature_item t = t
let enter_module_type_declaration t = t
let enter_module_type t = t
let enter_module_expr t = t
let enter_with_constraint t = t
let enter_class_expr t = t
let enter_class_signature t = t
let enter_class_declaration t = t
let enter_class_description t = t
let enter_class_type_declaration t = t
let enter_class_type t = t
let enter_class_type_field t = t
let enter_core_type t = t
let enter_class_structure t = t
let enter_class_field t = t
let enter_structure_item t = t
let leave_structure t = t
let leave_value_description t = t
let leave_type_declaration t = t
let leave_type_extension t = t
let leave_extension_constructor t = t
let leave_pattern t = t
let leave_expression t = t
let leave_package_type t = t
let leave_signature t = t
let leave_signature_item t = t
let leave_module_type_declaration t = t
let leave_module_type t = t
let leave_module_expr t = t
let leave_with_constraint t = t
let leave_class_expr t = t
let leave_class_signature t = t
let leave_class_declaration t = t
let leave_class_description t = t
let leave_class_type_declaration t = t
let leave_class_type t = t
let leave_class_type_field t = t
let leave_core_type t = t
let leave_class_structure t = t
let leave_class_field t = t
let leave_structure_item t = t
end
| null | https://raw.githubusercontent.com/ocamllabs/ocaml-modular-implicits/92e45da5c8a4c2db8b2cd5be28a5bec2ac2181f1/typing/typedtreeMap.ml | ocaml | *********************************************************************
OCaml
********************************************************************* | , INRIA Saclay
Copyright 2012 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
open Typedtree
module type MapArgument = sig
val enter_structure : structure -> structure
val enter_value_description : value_description -> value_description
val enter_type_declaration : type_declaration -> type_declaration
val enter_type_extension : type_extension -> type_extension
val enter_extension_constructor :
extension_constructor -> extension_constructor
val enter_pattern : pattern -> pattern
val enter_expression : expression -> expression
val enter_package_type : package_type -> package_type
val enter_signature : signature -> signature
val enter_signature_item : signature_item -> signature_item
val enter_module_type_declaration : module_type_declaration -> module_type_declaration
val enter_module_type : module_type -> module_type
val enter_module_expr : module_expr -> module_expr
val enter_with_constraint : with_constraint -> with_constraint
val enter_class_expr : class_expr -> class_expr
val enter_class_signature : class_signature -> class_signature
val enter_class_declaration : class_declaration -> class_declaration
val enter_class_description : class_description -> class_description
val enter_class_type_declaration :
class_type_declaration -> class_type_declaration
val enter_class_type : class_type -> class_type
val enter_class_type_field : class_type_field -> class_type_field
val enter_core_type : core_type -> core_type
val enter_class_structure : class_structure -> class_structure
val enter_class_field : class_field -> class_field
val enter_structure_item : structure_item -> structure_item
val leave_structure : structure -> structure
val leave_value_description : value_description -> value_description
val leave_type_declaration : type_declaration -> type_declaration
val leave_type_extension : type_extension -> type_extension
val leave_extension_constructor :
extension_constructor -> extension_constructor
val leave_pattern : pattern -> pattern
val leave_expression : expression -> expression
val leave_package_type : package_type -> package_type
val leave_signature : signature -> signature
val leave_signature_item : signature_item -> signature_item
val leave_module_type_declaration : module_type_declaration -> module_type_declaration
val leave_module_type : module_type -> module_type
val leave_module_expr : module_expr -> module_expr
val leave_with_constraint : with_constraint -> with_constraint
val leave_class_expr : class_expr -> class_expr
val leave_class_signature : class_signature -> class_signature
val leave_class_declaration : class_declaration -> class_declaration
val leave_class_description : class_description -> class_description
val leave_class_type_declaration :
class_type_declaration -> class_type_declaration
val leave_class_type : class_type -> class_type
val leave_class_type_field : class_type_field -> class_type_field
val leave_core_type : core_type -> core_type
val leave_class_structure : class_structure -> class_structure
val leave_class_field : class_field -> class_field
val leave_structure_item : structure_item -> structure_item
end
module MakeMap(Map : MapArgument) = struct
let may_map f v =
match v with
None -> v
| Some x -> Some (f x)
open Misc
let rec map_structure str =
let str = Map.enter_structure str in
let str_items = List.map map_structure_item str.str_items in
Map.leave_structure { str with str_items = str_items }
and map_binding vb =
{
vb_pat = map_pattern vb.vb_pat;
vb_expr = map_expression vb.vb_expr;
vb_attributes = vb.vb_attributes;
vb_loc = vb.vb_loc;
}
and map_bindings rec_flag list =
List.map map_binding list
and map_case {c_lhs; c_guard; c_rhs} =
{
c_lhs = map_pattern c_lhs;
c_guard = may_map map_expression c_guard;
c_rhs = map_expression c_rhs;
}
and map_cases list =
List.map map_case list
and map_structure_item item =
let item = Map.enter_structure_item item in
let str_desc =
match item.str_desc with
Tstr_eval (exp, attrs) -> Tstr_eval (map_expression exp, attrs)
| Tstr_value (rec_flag, list) ->
Tstr_value (rec_flag, map_bindings rec_flag list)
| Tstr_primitive vd ->
Tstr_primitive (map_value_description vd)
| Tstr_type list ->
Tstr_type (List.map map_type_declaration list)
| Tstr_typext tyext ->
Tstr_typext (map_type_extension tyext)
| Tstr_exception ext ->
Tstr_exception (map_extension_constructor ext)
| Tstr_module x ->
Tstr_module (map_module_binding x)
| Tstr_recmodule list ->
let list = List.map map_module_binding list in
Tstr_recmodule list
| Tstr_modtype mtd ->
Tstr_modtype (map_module_type_declaration mtd)
| Tstr_open od -> Tstr_open od
| Tstr_class list ->
let list =
List.map
(fun (ci, string_list, virtual_flag) ->
map_class_declaration ci, string_list, virtual_flag)
list
in
Tstr_class list
| Tstr_class_type list ->
let list =
List.map
(fun (id, name, ct) ->
id, name, map_class_type_declaration ct)
list
in
Tstr_class_type list
| Tstr_include incl ->
Tstr_include {incl with incl_mod = map_module_expr incl.incl_mod}
| Tstr_attribute x -> Tstr_attribute x
in
Map.leave_structure_item { item with str_desc = str_desc}
and map_module_binding x =
{x with mb_expr = map_module_expr x.mb_expr}
and map_value_description v =
let v = Map.enter_value_description v in
let val_desc = map_core_type v.val_desc in
Map.leave_value_description { v with val_desc = val_desc }
and map_type_declaration decl =
let decl = Map.enter_type_declaration decl in
let typ_params = List.map map_type_parameter decl.typ_params in
let typ_cstrs = List.map (fun (ct1, ct2, loc) ->
(map_core_type ct1,
map_core_type ct2,
loc)
) decl.typ_cstrs in
let typ_kind = match decl.typ_kind with
Ttype_abstract -> Ttype_abstract
| Ttype_variant list ->
let list = List.map map_constructor_declaration list in
Ttype_variant list
| Ttype_record list ->
let list =
List.map
(fun ld ->
{ld with ld_type = map_core_type ld.ld_type}
) list
in
Ttype_record list
| Ttype_open -> Ttype_open
in
let typ_manifest = may_map map_core_type decl.typ_manifest in
Map.leave_type_declaration { decl with typ_params = typ_params;
typ_cstrs = typ_cstrs; typ_kind = typ_kind; typ_manifest = typ_manifest }
and map_type_parameter (ct, v) = (map_core_type ct, v)
and map_constructor_declaration cd =
{cd with cd_args = List.map map_core_type cd.cd_args;
cd_res = may_map map_core_type cd.cd_res
}
and map_type_extension tyext =
let tyext = Map.enter_type_extension tyext in
let tyext_params = List.map map_type_parameter tyext.tyext_params in
let tyext_constructors =
List.map map_extension_constructor tyext.tyext_constructors
in
Map.leave_type_extension { tyext with tyext_params = tyext_params;
tyext_constructors = tyext_constructors }
and map_extension_constructor ext =
let ext = Map.enter_extension_constructor ext in
let ext_kind = match ext.ext_kind with
Text_decl(args, ret) ->
let args = List.map map_core_type args in
let ret = may_map map_core_type ret in
Text_decl(args, ret)
| Text_rebind(p, lid) -> Text_rebind(p, lid)
in
Map.leave_extension_constructor {ext with ext_kind = ext_kind}
and map_pattern pat =
let pat = Map.enter_pattern pat in
let pat_desc =
match pat.pat_desc with
| Tpat_alias (pat1, p, text) ->
let pat1 = map_pattern pat1 in
Tpat_alias (pat1, p, text)
| Tpat_tuple list -> Tpat_tuple (List.map map_pattern list)
| Tpat_construct (lid, cstr_decl, args) ->
Tpat_construct (lid, cstr_decl,
List.map map_pattern args)
| Tpat_variant (label, pato, rowo) ->
let pato = match pato with
None -> pato
| Some pat -> Some (map_pattern pat)
in
Tpat_variant (label, pato, rowo)
| Tpat_record (list, closed) ->
Tpat_record (List.map (fun (lid, lab_desc, pat) ->
(lid, lab_desc, map_pattern pat) ) list, closed)
| Tpat_array list -> Tpat_array (List.map map_pattern list)
| Tpat_or (p1, p2, rowo) ->
Tpat_or (map_pattern p1, map_pattern p2, rowo)
| Tpat_lazy p -> Tpat_lazy (map_pattern p)
| Tpat_constant _
| Tpat_any
| Tpat_var _ -> pat.pat_desc
in
let pat_extra = List.map map_pat_extra pat.pat_extra in
Map.leave_pattern { pat with pat_desc = pat_desc; pat_extra = pat_extra }
and map_pat_extra pat_extra =
match pat_extra with
| Tpat_constraint ct, loc, attrs -> (Tpat_constraint (map_core_type ct), loc, attrs)
| (Tpat_type _ | Tpat_unpack), _, _ -> pat_extra
and map_expression exp =
let exp = Map.enter_expression exp in
let exp_desc =
match exp.exp_desc with
Texp_ident (_, _, _)
| Texp_constant _ -> exp.exp_desc
| Texp_let (rec_flag, list, exp) ->
Texp_let (rec_flag,
map_bindings rec_flag list,
map_expression exp)
| Texp_function (label, cases, partial) ->
Texp_function (label, map_cases cases, partial)
| Texp_apply (exp, list) ->
Texp_apply (map_expression exp,
List.map map_argument list )
| Texp_match (exp, list1, list2, partial) ->
Texp_match (
map_expression exp,
map_cases list1,
map_cases list2,
partial
)
| Texp_try (exp, list) ->
Texp_try (
map_expression exp,
map_cases list
)
| Texp_tuple list ->
Texp_tuple (List.map map_expression list)
| Texp_construct (lid, cstr_desc, args) ->
Texp_construct (lid, cstr_desc,
List.map map_expression args )
| Texp_variant (label, expo) ->
let expo =match expo with
None -> expo
| Some exp -> Some (map_expression exp)
in
Texp_variant (label, expo)
| Texp_record (list, expo) ->
let list =
List.map (fun (lid, lab_desc, exp) ->
(lid, lab_desc, map_expression exp)
) list in
let expo = match expo with
None -> expo
| Some exp -> Some (map_expression exp)
in
Texp_record (list, expo)
| Texp_field (exp, lid, label) ->
Texp_field (map_expression exp, lid, label)
| Texp_setfield (exp1, lid, label, exp2) ->
Texp_setfield (
map_expression exp1,
lid,
label,
map_expression exp2)
| Texp_array list ->
Texp_array (List.map map_expression list)
| Texp_ifthenelse (exp1, exp2, expo) ->
Texp_ifthenelse (
map_expression exp1,
map_expression exp2,
match expo with
None -> expo
| Some exp -> Some (map_expression exp)
)
| Texp_sequence (exp1, exp2) ->
Texp_sequence (
map_expression exp1,
map_expression exp2
)
| Texp_while (exp1, exp2) ->
Texp_while (
map_expression exp1,
map_expression exp2
)
| Texp_for (id, name, exp1, exp2, dir, exp3) ->
Texp_for (
id, name,
map_expression exp1,
map_expression exp2,
dir,
map_expression exp3
)
| Texp_send (exp, meth, expo) ->
Texp_send (map_expression exp, meth, may_map map_expression expo)
| Texp_new (path, lid, cl_decl) -> exp.exp_desc
| Texp_instvar (_, path, _) -> exp.exp_desc
| Texp_setinstvar (path, lid, path2, exp) ->
Texp_setinstvar (path, lid, path2, map_expression exp)
| Texp_override (path, list) ->
Texp_override (
path,
List.map (fun (path, lid, exp) ->
(path, lid, map_expression exp)
) list
)
| Texp_letmodule (mb, exp) ->
Texp_letmodule (map_module_binding mb, map_expression exp)
| Texp_assert exp -> Texp_assert (map_expression exp)
| Texp_lazy exp -> Texp_lazy (map_expression exp)
| Texp_object (cl, string_list) ->
Texp_object (map_class_structure cl, string_list)
| Texp_pack (mexpr) ->
Texp_pack (map_module_expr mexpr)
in
let exp_extra = List.map map_exp_extra exp.exp_extra in
Map.leave_expression {
exp with
exp_desc = exp_desc;
exp_extra = exp_extra; }
and map_argument = function
| {arg_expression = None; _} as arg -> arg
| {arg_flag; arg_expression = Some exp} ->
{arg_flag; arg_expression = Some (map_expression exp)}
and map_exp_extra ((desc, loc, attrs) as exp_extra) =
match desc with
| Texp_constraint ct ->
Texp_constraint (map_core_type ct), loc, attrs
| Texp_coerce (None, ct) ->
Texp_coerce (None, map_core_type ct), loc, attrs
| Texp_coerce (Some ct1, ct2) ->
Texp_coerce (Some (map_core_type ct1),
map_core_type ct2), loc, attrs
| Texp_poly (Some ct) ->
Texp_poly (Some ( map_core_type ct )), loc, attrs
| Texp_newtype _
| Texp_open _
| Texp_poly None -> exp_extra
and map_package_type pack =
let pack = Map.enter_package_type pack in
let pack_fields = List.map (
fun (s, ct) -> (s, map_core_type ct) ) pack.pack_fields in
Map.leave_package_type { pack with pack_fields = pack_fields }
and map_signature sg =
let sg = Map.enter_signature sg in
let sig_items = List.map map_signature_item sg.sig_items in
Map.leave_signature { sg with sig_items = sig_items }
and map_signature_item item =
let item = Map.enter_signature_item item in
let sig_desc =
match item.sig_desc with
Tsig_value vd ->
Tsig_value (map_value_description vd)
| Tsig_type list -> Tsig_type (List.map map_type_declaration list)
| Tsig_typext tyext ->
Tsig_typext (map_type_extension tyext)
| Tsig_exception ext ->
Tsig_exception (map_extension_constructor ext)
| Tsig_module md ->
Tsig_module {md with md_type = map_module_type md.md_type}
| Tsig_recmodule list ->
Tsig_recmodule
(List.map
(fun md -> {md with md_type = map_module_type md.md_type})
list
)
| Tsig_modtype mtd ->
Tsig_modtype (map_module_type_declaration mtd)
| Tsig_open _ -> item.sig_desc
| Tsig_include incl ->
Tsig_include {incl with incl_mod = map_module_type incl.incl_mod}
| Tsig_class list -> Tsig_class (List.map map_class_description list)
| Tsig_class_type list ->
Tsig_class_type (List.map map_class_type_declaration list)
| Tsig_attribute _ as x -> x
in
Map.leave_signature_item { item with sig_desc = sig_desc }
and map_module_type_declaration mtd =
let mtd = Map.enter_module_type_declaration mtd in
let mtd = {mtd with mtd_type = may_map map_module_type mtd.mtd_type} in
Map.leave_module_type_declaration mtd
and map_class_declaration cd =
let cd = Map.enter_class_declaration cd in
let ci_params = List.map map_type_parameter cd.ci_params in
let ci_expr = map_class_expr cd.ci_expr in
Map.leave_class_declaration
{ cd with ci_params = ci_params; ci_expr = ci_expr }
and map_class_description cd =
let cd = Map.enter_class_description cd in
let ci_params = List.map map_type_parameter cd.ci_params in
let ci_expr = map_class_type cd.ci_expr in
Map.leave_class_description
{ cd with ci_params = ci_params; ci_expr = ci_expr}
and map_class_type_declaration cd =
let cd = Map.enter_class_type_declaration cd in
let ci_params = List.map map_type_parameter cd.ci_params in
let ci_expr = map_class_type cd.ci_expr in
Map.leave_class_type_declaration
{ cd with ci_params = ci_params; ci_expr = ci_expr }
and map_module_type mty =
let mty = Map.enter_module_type mty in
let mty_desc =
match mty.mty_desc with
Tmty_ident _ -> mty.mty_desc
| Tmty_alias _ -> mty.mty_desc
| Tmty_signature sg -> Tmty_signature (map_signature sg)
| Tmty_functor (mparam, mtype) ->
Tmty_functor (map_module_parameter mparam, map_module_type mtype)
| Tmty_with (mtype, list) ->
Tmty_with (map_module_type mtype,
List.map (fun (path, lid, withc) ->
(path, lid, map_with_constraint withc)
) list)
| Tmty_typeof mexpr ->
Tmty_typeof (map_module_expr mexpr)
in
Map.leave_module_type { mty with mty_desc = mty_desc}
and map_with_constraint cstr =
let cstr = Map.enter_with_constraint cstr in
let cstr =
match cstr with
Twith_type decl -> Twith_type (map_type_declaration decl)
| Twith_typesubst decl -> Twith_typesubst (map_type_declaration decl)
| Twith_module (path, lid) -> cstr
| Twith_modsubst (path, lid) -> cstr
in
Map.leave_with_constraint cstr
and map_module_expr mexpr =
let mexpr = Map.enter_module_expr mexpr in
let mod_desc =
match mexpr.mod_desc with
Tmod_ident (p, lid) -> mexpr.mod_desc
| Tmod_structure st -> Tmod_structure (map_structure st)
| Tmod_functor (mparam, mexpr) ->
Tmod_functor (map_module_parameter mparam, map_module_expr mexpr)
| Tmod_apply (mexp, marg) ->
Tmod_apply (map_module_expr mexp, map_module_argument marg)
| Tmod_constraint (mexpr, mod_type, Tmodtype_implicit, coercion ) ->
Tmod_constraint (map_module_expr mexpr, mod_type,
Tmodtype_implicit, coercion)
| Tmod_constraint (mexpr, mod_type,
Tmodtype_explicit mtype, coercion) ->
Tmod_constraint (map_module_expr mexpr, mod_type,
Tmodtype_explicit (map_module_type mtype),
coercion)
| Tmod_unpack (exp, mod_type) ->
Tmod_unpack (map_expression exp, mod_type)
in
Map.leave_module_expr { mexpr with mod_desc = mod_desc }
and map_module_parameter mparam =
match mparam with
| Tmpar_generative -> Tmpar_generative
| Tmpar_applicative(id, name, mtype) ->
Tmpar_applicative(id, name, map_module_type mtype)
| Tmpar_implicit(id, name, mtype) ->
Tmpar_implicit(id, name, map_module_type mtype)
and map_module_argument marg =
match marg with
| Tmarg_generative -> Tmarg_generative
| Tmarg_applicative(mexp, coercion) ->
Tmarg_applicative(map_module_expr mexp, coercion)
| Tmarg_implicit(mexp, coercion) ->
Tmarg_implicit(map_module_expr mexp, coercion)
and map_class_expr cexpr =
let cexpr = Map.enter_class_expr cexpr in
let cl_desc =
match cexpr.cl_desc with
| Tcl_constraint (cl, None, string_list1, string_list2, concr ) ->
Tcl_constraint (map_class_expr cl, None, string_list1,
string_list2, concr)
| Tcl_structure clstr -> Tcl_structure (map_class_structure clstr)
| Tcl_fun (label, pat, priv, cl, partial) ->
Tcl_fun (label, map_pattern pat,
List.map (fun (id, name, exp) ->
(id, name, map_expression exp)) priv,
map_class_expr cl, partial)
| Tcl_apply (cl, args) ->
Tcl_apply (map_class_expr cl,
List.map map_argument args)
| Tcl_let (rec_flat, bindings, ivars, cl) ->
Tcl_let (rec_flat, map_bindings rec_flat bindings,
List.map (fun (id, name, exp) ->
(id, name, map_expression exp)) ivars,
map_class_expr cl)
| Tcl_constraint (cl, Some clty, vals, meths, concrs) ->
Tcl_constraint ( map_class_expr cl,
Some (map_class_type clty), vals, meths, concrs)
| Tcl_ident (id, name, tyl) ->
Tcl_ident (id, name, List.map map_core_type tyl)
in
Map.leave_class_expr { cexpr with cl_desc = cl_desc }
and map_class_type ct =
let ct = Map.enter_class_type ct in
let cltyp_desc =
match ct.cltyp_desc with
Tcty_signature csg -> Tcty_signature (map_class_signature csg)
| Tcty_constr (path, lid, list) ->
Tcty_constr (path, lid, List.map map_core_type list)
| Tcty_arrow (label, ct, cl) ->
Tcty_arrow (label, map_core_type ct, map_class_type cl)
in
Map.leave_class_type { ct with cltyp_desc = cltyp_desc }
and map_class_signature cs =
let cs = Map.enter_class_signature cs in
let csig_self = map_core_type cs.csig_self in
let csig_fields = List.map map_class_type_field cs.csig_fields in
Map.leave_class_signature { cs with
csig_self = csig_self; csig_fields = csig_fields }
and map_class_type_field ctf =
let ctf = Map.enter_class_type_field ctf in
let ctf_desc =
match ctf.ctf_desc with
Tctf_inherit ct -> Tctf_inherit (map_class_type ct)
| Tctf_val (s, mut, virt, ct) ->
Tctf_val (s, mut, virt, map_core_type ct)
| Tctf_method (s, priv, virt, ct) ->
Tctf_method (s, priv, virt, map_core_type ct)
| Tctf_constraint (ct1, ct2) ->
Tctf_constraint (map_core_type ct1, map_core_type ct2)
| Tctf_attribute _ as x -> x
in
Map.leave_class_type_field { ctf with ctf_desc = ctf_desc }
and map_core_type ct =
let ct = Map.enter_core_type ct in
let ctyp_desc =
match ct.ctyp_desc with
Ttyp_any
| Ttyp_var _ -> ct.ctyp_desc
| Ttyp_arrow (label, ct1, ct2) ->
Ttyp_arrow (label, map_core_type ct1, map_core_type ct2)
| Ttyp_tuple list -> Ttyp_tuple (List.map map_core_type list)
| Ttyp_constr (path, lid, list) ->
Ttyp_constr (path, lid, List.map map_core_type list)
| Ttyp_object (list, o) ->
Ttyp_object
(List.map (fun (s, a, t) -> (s, a, map_core_type t)) list, o)
| Ttyp_class (path, lid, list) ->
Ttyp_class (path, lid, List.map map_core_type list)
| Ttyp_alias (ct, s) -> Ttyp_alias (map_core_type ct, s)
| Ttyp_variant (list, bool, labels) ->
Ttyp_variant (List.map map_row_field list, bool, labels)
| Ttyp_poly (list, ct) -> Ttyp_poly (list, map_core_type ct)
| Ttyp_package pack -> Ttyp_package (map_package_type pack)
in
Map.leave_core_type { ct with ctyp_desc = ctyp_desc }
and map_class_structure cs =
let cs = Map.enter_class_structure cs in
let cstr_self = map_pattern cs.cstr_self in
let cstr_fields = List.map map_class_field cs.cstr_fields in
Map.leave_class_structure { cs with cstr_self; cstr_fields }
and map_row_field rf =
match rf with
Ttag (label, attrs, bool, list) ->
Ttag (label, attrs, bool, List.map map_core_type list)
| Tinherit ct -> Tinherit (map_core_type ct)
and map_class_field cf =
let cf = Map.enter_class_field cf in
let cf_desc =
match cf.cf_desc with
Tcf_inherit (ovf, cl, super, vals, meths) ->
Tcf_inherit (ovf, map_class_expr cl, super, vals, meths)
| Tcf_constraint (cty, cty') ->
Tcf_constraint (map_core_type cty, map_core_type cty')
| Tcf_val (lab, mut, ident, Tcfk_virtual cty, b) ->
Tcf_val (lab, mut, ident, Tcfk_virtual (map_core_type cty), b)
| Tcf_val (lab, mut, ident, Tcfk_concrete (o, exp), b) ->
Tcf_val (lab, mut, ident, Tcfk_concrete (o, map_expression exp), b)
| Tcf_method (lab, priv, Tcfk_virtual cty) ->
Tcf_method (lab, priv, Tcfk_virtual (map_core_type cty))
| Tcf_method (lab, priv, Tcfk_concrete (o, exp)) ->
Tcf_method (lab, priv, Tcfk_concrete (o, map_expression exp))
| Tcf_initializer exp -> Tcf_initializer (map_expression exp)
| Tcf_attribute _ as x -> x
in
Map.leave_class_field { cf with cf_desc = cf_desc }
end
module DefaultMapArgument = struct
let enter_structure t = t
let enter_value_description t = t
let enter_type_declaration t = t
let enter_type_extension t = t
let enter_extension_constructor t = t
let enter_pattern t = t
let enter_expression t = t
let enter_package_type t = t
let enter_signature t = t
let enter_signature_item t = t
let enter_module_type_declaration t = t
let enter_module_type t = t
let enter_module_expr t = t
let enter_with_constraint t = t
let enter_class_expr t = t
let enter_class_signature t = t
let enter_class_declaration t = t
let enter_class_description t = t
let enter_class_type_declaration t = t
let enter_class_type t = t
let enter_class_type_field t = t
let enter_core_type t = t
let enter_class_structure t = t
let enter_class_field t = t
let enter_structure_item t = t
let leave_structure t = t
let leave_value_description t = t
let leave_type_declaration t = t
let leave_type_extension t = t
let leave_extension_constructor t = t
let leave_pattern t = t
let leave_expression t = t
let leave_package_type t = t
let leave_signature t = t
let leave_signature_item t = t
let leave_module_type_declaration t = t
let leave_module_type t = t
let leave_module_expr t = t
let leave_with_constraint t = t
let leave_class_expr t = t
let leave_class_signature t = t
let leave_class_declaration t = t
let leave_class_description t = t
let leave_class_type_declaration t = t
let leave_class_type t = t
let leave_class_type_field t = t
let leave_core_type t = t
let leave_class_structure t = t
let leave_class_field t = t
let leave_structure_item t = t
end
|
e3d49411b17726376a0cfbfc1cbf07b7f2ebc754d5cb47d2f7659ae6b543ccc6 | haskell/cabal | setup-external.test.hs | import Test.Cabal.Prelude
main = setupAndCabalTest $ do
skipUnlessGhcVersion ">= 8.1"
ghc <- isGhcVersion "== 9.0.2 || == 9.2.* || == 9.4.* || == 9.6.*"
expectBrokenIf ghc 7987 $ do
withPackageDb $ do
withDirectory "mylib" $ setup_install_with_docs ["--ipid", "mylib-0.1.0.0"]
withDirectory "mysql" $ setup_install_with_docs ["--ipid", "mysql-0.1.0.0"]
withDirectory "postgresql" $ setup_install_with_docs ["--ipid", "postgresql-0.1.0.0"]
withDirectory "mylib" $
setup_install_with_docs ["--ipid", "mylib-0.1.0.0",
"--instantiate-with", "Database=mysql-0.1.0.0:Database.MySQL"]
withDirectory "mylib" $
setup_install_with_docs ["--ipid", "mylib-0.1.0.0",
"--instantiate-with", "Database=postgresql-0.1.0.0:Database.PostgreSQL"]
withDirectory "src" $ setup_install_with_docs []
withDirectory "exe" $ do
setup_install_with_docs []
runExe' "exe" [] >>= assertOutputContains "minemysql minepostgresql"
| null | https://raw.githubusercontent.com/haskell/cabal/0eb638fb18e4ef215413d1a1c25b5175e54f2158/cabal-testsuite/PackageTests/Backpack/Includes2/setup-external.test.hs | haskell | import Test.Cabal.Prelude
main = setupAndCabalTest $ do
skipUnlessGhcVersion ">= 8.1"
ghc <- isGhcVersion "== 9.0.2 || == 9.2.* || == 9.4.* || == 9.6.*"
expectBrokenIf ghc 7987 $ do
withPackageDb $ do
withDirectory "mylib" $ setup_install_with_docs ["--ipid", "mylib-0.1.0.0"]
withDirectory "mysql" $ setup_install_with_docs ["--ipid", "mysql-0.1.0.0"]
withDirectory "postgresql" $ setup_install_with_docs ["--ipid", "postgresql-0.1.0.0"]
withDirectory "mylib" $
setup_install_with_docs ["--ipid", "mylib-0.1.0.0",
"--instantiate-with", "Database=mysql-0.1.0.0:Database.MySQL"]
withDirectory "mylib" $
setup_install_with_docs ["--ipid", "mylib-0.1.0.0",
"--instantiate-with", "Database=postgresql-0.1.0.0:Database.PostgreSQL"]
withDirectory "src" $ setup_install_with_docs []
withDirectory "exe" $ do
setup_install_with_docs []
runExe' "exe" [] >>= assertOutputContains "minemysql minepostgresql"
|
|
265b69140551c9eb8eefdd1f7ea6bfd4e2a682f17efbb4a2b57d3a54ed30743b | ghollisjr/cl-ana | h5ex-d-hyper.lisp | Copyright by The HDF Group .
;;;; All rights reserved.
;;;;
This file is part of hdf5 - cffi .
The full hdf5 - cffi copyright notice , including terms governing
;;;; use, modification, and redistribution, is contained in the file COPYING,
;;;; which can be found at the root of the source code distribution tree.
;;;; If you do not have access to this file, you may request a copy from
;;;; .
;;; This example shows how to read and write data to a
dataset by hyberslabs . The program first writes integers
;;; in a hyperslab selection to a dataset with dataspace
dimensions of DIM0xDIM1 , then closes the file . Next , it
;;; reopens the file, reads back the data, and outputs it to
;;; the screen. Finally it reads the data again using a
different hyperslab selection , and outputs the result to
;;; the screen.
;;; -by-api/hdf5-examples/1_8/C/H5D/h5ex_d_hyper.c
(in-package :hdf5)
(defparameter *FILE* (namestring (merge-pathnames "h5ex_d_hyper.h5" *load-pathname*)))
(defparameter *DATASET* "DS1")
(defparameter *DIM0* 6)
(defparameter *DIM1* 8)
(defun print-data (data)
(dotimes (i *DIM0*)
(format t " [")
(dotimes (j *DIM1*)
(format t " ~3d" (cffi:mem-aref data :int (h5ex:pos2D *DIM1* i j))))
(format t "]~%")))
(cffi:with-foreign-objects ((start 'hsize-t 2)
(stride 'hsize-t 2)
(count 'hsize-t 2)
(block 'hsize-t 2)
(wdata :int (* *DIM0* *DIM1*))
(rdata :int (* *DIM0* *DIM1*)))
Initialize data to " 1 " , to make it easier to see the selections .
(dotimes (i *DIM0*)
(dotimes (j *DIM1*)
(setf (cffi:mem-aref wdata :int (h5ex:pos2D *DIM1* i j)) 1)))
;; Print the data to the screen.
(format t "Original Data:~%")
(print-data wdata)
(let* ((fapl (h5pcreate +H5P-FILE-ACCESS+))
(file (prog2 (h5pset-fclose-degree fapl :H5F-CLOSE-STRONG)
(h5fcreate *FILE* +H5F-ACC-TRUNC+ +H5P-DEFAULT+ fapl))))
(unwind-protect
(let* ((space (h5ex:create-simple-dataspace `(,*DIM0* ,*DIM1*)))
(dset (h5dcreate2 file *DATASET* +H5T-STD-I32BE+ space
+H5P-DEFAULT+ +H5P-DEFAULT+ +H5P-DEFAULT+)))
Define and select the first part of the hyperslab selection .
(setf (cffi:mem-aref start 'hsize-t 0) 0
(cffi:mem-aref start 'hsize-t 1) 0
(cffi:mem-aref stride 'hsize-t 0) 3
(cffi:mem-aref stride 'hsize-t 1) 3
(cffi:mem-aref count 'hsize-t 0) 2
(cffi:mem-aref count 'hsize-t 1) 3
(cffi:mem-aref block 'hsize-t 0) 2
(cffi:mem-aref block 'hsize-t 1) 2)
(h5sselect-hyperslab space :H5S-SELECT-SET start stride count block)
Define and select the second part of the hyperslab selection ,
which is subtracted from the first selection by the use of
H5S_SELECT_NOTB
(setf (cffi:mem-aref block 'hsize-t 0) 1
(cffi:mem-aref block 'hsize-t 1) 1)
(h5sselect-hyperslab space :H5S-SELECT-NOTB start stride count block)
;; Write the data to the dataset
(h5dwrite dset +H5T-NATIVE-INT+ +H5S-ALL+ space +H5P-DEFAULT+ wdata)
;; Close and release resources.
(h5ex:close-handles (list dset space)))
(h5ex:close-handles (list file fapl))))
Open file and dataset using the default properties .
(let* ((fapl (h5pcreate +H5P-FILE-ACCESS+))
(file (prog2 (h5pset-fclose-degree fapl :H5F-CLOSE-STRONG)
(h5fopen *FILE* +H5F-ACC-RDONLY+ fapl))))
(unwind-protect
(let* ((dset (h5dopen2 file *DATASET* +H5P-DEFAULT+))
(space (h5dget-space dset)))
;; Read the data using the default properties
(h5dread dset +H5T-NATIVE-INT+ +H5S-ALL+ +H5S-ALL+ +H5P-DEFAULT+
rdata)
;; Output the data to the screen.
(format t "~%Data as written to disk by hyberslabs:~%")
(print-data rdata)
Initialize the read array .
(dotimes (i *DIM0*)
(dotimes (j *DIM1*)
(setf (cffi:mem-aref rdata :int (h5ex:pos2D *DIM1* i j)) 0)))
;; Define and select the hyperslab to use for reading.
(setf (cffi:mem-aref start 'hsize-t 0) 0
(cffi:mem-aref start 'hsize-t 1) 1
(cffi:mem-aref stride 'hsize-t 0) 4
(cffi:mem-aref stride 'hsize-t 1) 4
(cffi:mem-aref count 'hsize-t 0) 2
(cffi:mem-aref count 'hsize-t 1) 2
(cffi:mem-aref block 'hsize-t 0) 2
(cffi:mem-aref block 'hsize-t 1) 3)
(h5sselect-hyperslab space :H5S-SELECT-SET start stride count block)
Read the data using the previously defined hyperslab .
(h5dread dset +H5T-NATIVE-INT+ +H5S-ALL+ space +H5P-DEFAULT+ rdata)
;; Output the data to the screen.
(format t "~%Data as read from disk by hyperslab:~%")
(print-data rdata)
(h5ex:close-handles (list space dset)))
(h5ex:close-handles (list file fapl)))))
| null | https://raw.githubusercontent.com/ghollisjr/cl-ana/5cb4c0b0c9c4957452ad2a769d6ff9e8d5df0b10/hdf-cffi/examples/datasets/h5ex-d-hyper.lisp | lisp | All rights reserved.
use, modification, and redistribution, is contained in the file COPYING,
which can be found at the root of the source code distribution tree.
If you do not have access to this file, you may request a copy from
.
This example shows how to read and write data to a
in a hyperslab selection to a dataset with dataspace
reopens the file, reads back the data, and outputs it to
the screen. Finally it reads the data again using a
the screen.
-by-api/hdf5-examples/1_8/C/H5D/h5ex_d_hyper.c
Print the data to the screen.
Write the data to the dataset
Close and release resources.
Read the data using the default properties
Output the data to the screen.
Define and select the hyperslab to use for reading.
Output the data to the screen. | Copyright by The HDF Group .
This file is part of hdf5 - cffi .
The full hdf5 - cffi copyright notice , including terms governing
dataset by hyberslabs . The program first writes integers
dimensions of DIM0xDIM1 , then closes the file . Next , it
different hyperslab selection , and outputs the result to
(in-package :hdf5)
(defparameter *FILE* (namestring (merge-pathnames "h5ex_d_hyper.h5" *load-pathname*)))
(defparameter *DATASET* "DS1")
(defparameter *DIM0* 6)
(defparameter *DIM1* 8)
(defun print-data (data)
(dotimes (i *DIM0*)
(format t " [")
(dotimes (j *DIM1*)
(format t " ~3d" (cffi:mem-aref data :int (h5ex:pos2D *DIM1* i j))))
(format t "]~%")))
(cffi:with-foreign-objects ((start 'hsize-t 2)
(stride 'hsize-t 2)
(count 'hsize-t 2)
(block 'hsize-t 2)
(wdata :int (* *DIM0* *DIM1*))
(rdata :int (* *DIM0* *DIM1*)))
Initialize data to " 1 " , to make it easier to see the selections .
(dotimes (i *DIM0*)
(dotimes (j *DIM1*)
(setf (cffi:mem-aref wdata :int (h5ex:pos2D *DIM1* i j)) 1)))
(format t "Original Data:~%")
(print-data wdata)
(let* ((fapl (h5pcreate +H5P-FILE-ACCESS+))
(file (prog2 (h5pset-fclose-degree fapl :H5F-CLOSE-STRONG)
(h5fcreate *FILE* +H5F-ACC-TRUNC+ +H5P-DEFAULT+ fapl))))
(unwind-protect
(let* ((space (h5ex:create-simple-dataspace `(,*DIM0* ,*DIM1*)))
(dset (h5dcreate2 file *DATASET* +H5T-STD-I32BE+ space
+H5P-DEFAULT+ +H5P-DEFAULT+ +H5P-DEFAULT+)))
Define and select the first part of the hyperslab selection .
(setf (cffi:mem-aref start 'hsize-t 0) 0
(cffi:mem-aref start 'hsize-t 1) 0
(cffi:mem-aref stride 'hsize-t 0) 3
(cffi:mem-aref stride 'hsize-t 1) 3
(cffi:mem-aref count 'hsize-t 0) 2
(cffi:mem-aref count 'hsize-t 1) 3
(cffi:mem-aref block 'hsize-t 0) 2
(cffi:mem-aref block 'hsize-t 1) 2)
(h5sselect-hyperslab space :H5S-SELECT-SET start stride count block)
Define and select the second part of the hyperslab selection ,
which is subtracted from the first selection by the use of
H5S_SELECT_NOTB
(setf (cffi:mem-aref block 'hsize-t 0) 1
(cffi:mem-aref block 'hsize-t 1) 1)
(h5sselect-hyperslab space :H5S-SELECT-NOTB start stride count block)
(h5dwrite dset +H5T-NATIVE-INT+ +H5S-ALL+ space +H5P-DEFAULT+ wdata)
(h5ex:close-handles (list dset space)))
(h5ex:close-handles (list file fapl))))
Open file and dataset using the default properties .
(let* ((fapl (h5pcreate +H5P-FILE-ACCESS+))
(file (prog2 (h5pset-fclose-degree fapl :H5F-CLOSE-STRONG)
(h5fopen *FILE* +H5F-ACC-RDONLY+ fapl))))
(unwind-protect
(let* ((dset (h5dopen2 file *DATASET* +H5P-DEFAULT+))
(space (h5dget-space dset)))
(h5dread dset +H5T-NATIVE-INT+ +H5S-ALL+ +H5S-ALL+ +H5P-DEFAULT+
rdata)
(format t "~%Data as written to disk by hyberslabs:~%")
(print-data rdata)
Initialize the read array .
(dotimes (i *DIM0*)
(dotimes (j *DIM1*)
(setf (cffi:mem-aref rdata :int (h5ex:pos2D *DIM1* i j)) 0)))
(setf (cffi:mem-aref start 'hsize-t 0) 0
(cffi:mem-aref start 'hsize-t 1) 1
(cffi:mem-aref stride 'hsize-t 0) 4
(cffi:mem-aref stride 'hsize-t 1) 4
(cffi:mem-aref count 'hsize-t 0) 2
(cffi:mem-aref count 'hsize-t 1) 2
(cffi:mem-aref block 'hsize-t 0) 2
(cffi:mem-aref block 'hsize-t 1) 3)
(h5sselect-hyperslab space :H5S-SELECT-SET start stride count block)
Read the data using the previously defined hyperslab .
(h5dread dset +H5T-NATIVE-INT+ +H5S-ALL+ space +H5P-DEFAULT+ rdata)
(format t "~%Data as read from disk by hyperslab:~%")
(print-data rdata)
(h5ex:close-handles (list space dset)))
(h5ex:close-handles (list file fapl)))))
|
d2bf42cdbfcab793894514fa6134ecf0edddcf1164275a62de66f1eae2fe7e7c | zeniuseducation/poly-euler | two.clj | (ns alfa.beta.two)
(defn ^longs jumfak
[^long lim]
(let [llim (int (Math/sqrt lim))
faks (int-array (+ lim 1) 1)]
(loop [i (int 2)]
(if (> i llim)
(filterv #(> (aget faks %) %) (range 12 (+ lim 1)))
(do (let [isqr (* i i)]
(do (aset faks isqr (+ (aget faks isqr) i))
(loop [j (int (+ isqr i))]
(when (<= j lim)
(aset faks j (+ (aget faks j) i (quot j i)))
(recur (+ j i))))))
(recur (+ i 1)))))))
(defn ^long sol23
[^long lim]
(let [abuns (jumfak lim)
ctr (count abuns)
rabuns (int-array abuns)
refs (boolean-array (+ lim 1) false)
hlim (quot lim 2)]
(loop [i (int 0)]
(let [iref (aget rabuns i)]
(if (> iref hlim)
(- (quot (* lim (+ lim 1)) 2)
(transduce
(filter #(aget refs %))
+ (range 12 (+ lim 1))))
(do (loop [j (int i)]
(let [jref (aget rabuns j)]
(when (<= (+ iref jref) lim)
(aset refs (+ iref jref) true)
(recur (+ j 1)))))
(recur (+ i 1))))))))
(time (sol23 28123))
(dotimes [i 10]
(time (sol23 28123)))
(time (jumfak 100))
| null | https://raw.githubusercontent.com/zeniuseducation/poly-euler/734fdcf1ddd096a8730600b684bf7398d071d499/Alfa/src/alfa/beta/two.clj | clojure | (ns alfa.beta.two)
(defn ^longs jumfak
[^long lim]
(let [llim (int (Math/sqrt lim))
faks (int-array (+ lim 1) 1)]
(loop [i (int 2)]
(if (> i llim)
(filterv #(> (aget faks %) %) (range 12 (+ lim 1)))
(do (let [isqr (* i i)]
(do (aset faks isqr (+ (aget faks isqr) i))
(loop [j (int (+ isqr i))]
(when (<= j lim)
(aset faks j (+ (aget faks j) i (quot j i)))
(recur (+ j i))))))
(recur (+ i 1)))))))
(defn ^long sol23
[^long lim]
(let [abuns (jumfak lim)
ctr (count abuns)
rabuns (int-array abuns)
refs (boolean-array (+ lim 1) false)
hlim (quot lim 2)]
(loop [i (int 0)]
(let [iref (aget rabuns i)]
(if (> iref hlim)
(- (quot (* lim (+ lim 1)) 2)
(transduce
(filter #(aget refs %))
+ (range 12 (+ lim 1))))
(do (loop [j (int i)]
(let [jref (aget rabuns j)]
(when (<= (+ iref jref) lim)
(aset refs (+ iref jref) true)
(recur (+ j 1)))))
(recur (+ i 1))))))))
(time (sol23 28123))
(dotimes [i 10]
(time (sol23 28123)))
(time (jumfak 100))
|
|
51a4667d05d0307ec2497331a031a333af5524cb03a5701d69cae6a45a59288d | soren-n/bidi-higher-rank-poly | Tests.ml | open Bhrp_shared
open Back
open Front
open Poly
open Expr
open Simple
let parse input return =
return (Parser.input Lexer.token (Lexing.from_string input))
let print layout =
Typeset.compile layout @@ fun doc ->
Typeset.render doc 2 80 @@ fun msg ->
print_endline msg
let (<:) left right =
Check.subtype left right Native.tenv
(fun msg -> print msg; false)
(fun () -> true)
let (==>) expr return =
Check.synth_expr expr Native.tenv
(fun msg -> print msg; assert false)
return
let (<==) expr poly =
Check.check_expr expr poly Native.tenv
(fun msg -> print msg; false)
(fun () -> true)
let purely_universally_quantified poly =
let open Syntax in
let rec _visit_poly poly =
match poly with
| PNothing -> true
| PUnit -> true
| PParam _label -> true
| PVar _exist -> false
| PArrow (dom, codom) ->
_visit_poly dom && _visit_poly codom
| PForall (_label, poly1) ->
_visit_poly poly1
| PMono mono ->
_visit_mono mono
and _visit_mono mono =
match mono with
| MNothing -> true
| MUnit -> true
| MParam _label -> true
| MVar _exist -> false
| MArrow (dom, codom) ->
_visit_mono dom && _visit_mono codom
in
_visit_poly poly
(* Define tests *)
let print_parse_sound =
let ctx = Naming.make_ctx () in
QCheck.Test.make ~count:32
~name:"print_parse_sound"
(arbitrary_typed_stmt ctx)
(fun (stmt, _simple_mono) ->
Print.print_stmt ctx stmt @@ fun stmt_s ->
parse stmt_s @@ fun stmt1 ->
Syntax.stmt_equal stmt stmt1)
let subtype_sound =
QCheck.Test.make ~count:64
~name:"subtype_sound"
arbitrary_simple
(fun simple ->
Mono.simple_2_simple_mono simple @@ fun simple_mono ->
Poly.simple_mono_2_simple_poly simple_mono @@ fun simple_poly_exist ->
Poly.simple_2_simple_poly simple @@ fun simple_poly ->
if not (simple_poly_exist <: simple_poly) then
let ctx = Naming.make_ctx () in
Print.print_poly ctx simple_poly print_endline;
print_endline "-----------------------------------";
Print.print_poly ctx simple_poly_exist print_endline;
print_endline "***********************************";
false
else true)
let synth_expr_sound =
let ctx = Naming.make_ctx () in
QCheck.Test.make ~count:128
~name:"synth_expr_sound"
(arbitrary_typed_expr ctx)
(fun (expr, simple_mono) ->
Poly.simple_mono_2_simple_poly simple_mono @@ fun expr_t ->
if not (expr <== expr_t) then
let ctx = Naming.make_ctx () in
Print.print_expr ctx expr print_endline;
print_endline "-----------------------------------";
Print.print_poly ctx expr_t print_endline;
print_endline "***********************************";
false
else true)
let synth_type_sound_l =
let ctx = Naming.make_ctx () in
QCheck.Test.make ~count:64
~name:"synth_type_sound_l"
(arbitrary_typed_expr ctx)
(fun (expr, simple_mono) ->
Poly.simple_mono_2_simple_poly simple_mono @@ fun left ->
expr ==> fun right ->
left <: right)
let synth_type_sound_r =
let ctx = Naming.make_ctx () in
QCheck.Test.make ~count:64
~name:"synth_type_sound_r"
(arbitrary_typed_expr ctx)
(fun (expr, simple_mono) ->
Poly.simple_mono_2_simple_poly simple_mono @@ fun right ->
expr ==> fun left ->
left <: right)
let check_type_sound =
let ctx = Naming.make_ctx () in
QCheck.Test.make ~count:64
~name:"check_type_sound"
(arbitrary_typed_expr ctx)
(fun (expr, _simple_mono) ->
expr ==> fun expr_t ->
expr <== expr_t)
let generalize_sound =
let ctx = Naming.make_ctx () in
QCheck.Test.make ~count:64
~name:"generalize_sound"
(arbitrary_poly ctx)
(fun poly ->
Check.generalize poly @@ fun poly1 ->
purely_universally_quantified poly1)
(* Run tests *)
let _ =
QCheck_runner.run_tests
[ print_parse_sound
;
; synth_expr_sound
(* ; synth_type_sound_l
; synth_type_sound_r
; check_type_sound
; generalize_sound *)
];
| null | https://raw.githubusercontent.com/soren-n/bidi-higher-rank-poly/73cb66a31d8d432cfbc344f29681b536a983f3d5/back/test/Tests.ml | ocaml | Define tests
Run tests
; synth_type_sound_l
; synth_type_sound_r
; check_type_sound
; generalize_sound | open Bhrp_shared
open Back
open Front
open Poly
open Expr
open Simple
let parse input return =
return (Parser.input Lexer.token (Lexing.from_string input))
let print layout =
Typeset.compile layout @@ fun doc ->
Typeset.render doc 2 80 @@ fun msg ->
print_endline msg
let (<:) left right =
Check.subtype left right Native.tenv
(fun msg -> print msg; false)
(fun () -> true)
let (==>) expr return =
Check.synth_expr expr Native.tenv
(fun msg -> print msg; assert false)
return
let (<==) expr poly =
Check.check_expr expr poly Native.tenv
(fun msg -> print msg; false)
(fun () -> true)
let purely_universally_quantified poly =
let open Syntax in
let rec _visit_poly poly =
match poly with
| PNothing -> true
| PUnit -> true
| PParam _label -> true
| PVar _exist -> false
| PArrow (dom, codom) ->
_visit_poly dom && _visit_poly codom
| PForall (_label, poly1) ->
_visit_poly poly1
| PMono mono ->
_visit_mono mono
and _visit_mono mono =
match mono with
| MNothing -> true
| MUnit -> true
| MParam _label -> true
| MVar _exist -> false
| MArrow (dom, codom) ->
_visit_mono dom && _visit_mono codom
in
_visit_poly poly
let print_parse_sound =
let ctx = Naming.make_ctx () in
QCheck.Test.make ~count:32
~name:"print_parse_sound"
(arbitrary_typed_stmt ctx)
(fun (stmt, _simple_mono) ->
Print.print_stmt ctx stmt @@ fun stmt_s ->
parse stmt_s @@ fun stmt1 ->
Syntax.stmt_equal stmt stmt1)
let subtype_sound =
QCheck.Test.make ~count:64
~name:"subtype_sound"
arbitrary_simple
(fun simple ->
Mono.simple_2_simple_mono simple @@ fun simple_mono ->
Poly.simple_mono_2_simple_poly simple_mono @@ fun simple_poly_exist ->
Poly.simple_2_simple_poly simple @@ fun simple_poly ->
if not (simple_poly_exist <: simple_poly) then
let ctx = Naming.make_ctx () in
Print.print_poly ctx simple_poly print_endline;
print_endline "-----------------------------------";
Print.print_poly ctx simple_poly_exist print_endline;
print_endline "***********************************";
false
else true)
let synth_expr_sound =
let ctx = Naming.make_ctx () in
QCheck.Test.make ~count:128
~name:"synth_expr_sound"
(arbitrary_typed_expr ctx)
(fun (expr, simple_mono) ->
Poly.simple_mono_2_simple_poly simple_mono @@ fun expr_t ->
if not (expr <== expr_t) then
let ctx = Naming.make_ctx () in
Print.print_expr ctx expr print_endline;
print_endline "-----------------------------------";
Print.print_poly ctx expr_t print_endline;
print_endline "***********************************";
false
else true)
let synth_type_sound_l =
let ctx = Naming.make_ctx () in
QCheck.Test.make ~count:64
~name:"synth_type_sound_l"
(arbitrary_typed_expr ctx)
(fun (expr, simple_mono) ->
Poly.simple_mono_2_simple_poly simple_mono @@ fun left ->
expr ==> fun right ->
left <: right)
let synth_type_sound_r =
let ctx = Naming.make_ctx () in
QCheck.Test.make ~count:64
~name:"synth_type_sound_r"
(arbitrary_typed_expr ctx)
(fun (expr, simple_mono) ->
Poly.simple_mono_2_simple_poly simple_mono @@ fun right ->
expr ==> fun left ->
left <: right)
let check_type_sound =
let ctx = Naming.make_ctx () in
QCheck.Test.make ~count:64
~name:"check_type_sound"
(arbitrary_typed_expr ctx)
(fun (expr, _simple_mono) ->
expr ==> fun expr_t ->
expr <== expr_t)
let generalize_sound =
let ctx = Naming.make_ctx () in
QCheck.Test.make ~count:64
~name:"generalize_sound"
(arbitrary_poly ctx)
(fun poly ->
Check.generalize poly @@ fun poly1 ->
purely_universally_quantified poly1)
let _ =
QCheck_runner.run_tests
[ print_parse_sound
;
; synth_expr_sound
];
|
6e881003e1967f1e29725daf60000474aad27fd441c1954469b9221f6b2e5697 | pkhuong/Napa-FFT3 | test-support.lisp | (in-package "NAPA-FFT.TESTS")
(defun impulse (i n)
(let ((vec (make-array n :element-type 'complex-sample
:initial-element (complex 0d0 0d0))))
(setf (aref vec i) (complex 1d0 0d0))
vec))
(defun iota (n)
(let ((count 0))
(map-into (make-array n :element-type 'complex-sample)
(lambda ()
(complex (1- (incf count))
1d0)))))
(defun make-vector (n)
(make-array n :element-type 'complex-sample))
(defun random-vector (n &optional (dst (make-vector n)))
(declare (type complex-sample-array dst))
(unless (= n (length dst))
(setf dst (make-array n :element-type 'complex-sample)))
(map-into dst (lambda ()
(complex (- (random 2d0) 1d0)
(- (random 2d0) 1d0)))))
(macrolet ((define-mfun (name op)
`(defun ,name (x y &optional (dst (make-vector (length x))))
(declare (type complex-sample-array x y dst))
(map-into dst #',op x y))))
(define-mfun m+ +)
(define-mfun m- -)
(define-mfun m* *))
(defvar *default-abs-tol* 1d-6)
(defun m= (x y &optional (tol *default-abs-tol*))
(declare (type complex-sample-array x y)
(type double-float tol))
(let ((worst 0d0))
(declare (type double-float worst))
(dotimes (i (length x))
(let ((x (aref x i))
(y (aref y i)))
(let ((delta (abs (- x y))))
(if (< delta tol)
(setf worst (max worst delta))
(return-from m= (values nil delta i))))))
(values t worst nil)))
(defun slow-bit-reverse (array)
(let ((dst (copy-seq array))
(width (integer-length (1- (length array)))))
(flet ((rev (x)
(bit-reverse-integer x width)))
(dotimes (i (length array) dst)
(setf (aref dst (rev i)) (aref array i))))))
| null | https://raw.githubusercontent.com/pkhuong/Napa-FFT3/f2d9614c7167da327c9ceebefb04ff6eae2d2236/test-support.lisp | lisp | (in-package "NAPA-FFT.TESTS")
(defun impulse (i n)
(let ((vec (make-array n :element-type 'complex-sample
:initial-element (complex 0d0 0d0))))
(setf (aref vec i) (complex 1d0 0d0))
vec))
(defun iota (n)
(let ((count 0))
(map-into (make-array n :element-type 'complex-sample)
(lambda ()
(complex (1- (incf count))
1d0)))))
(defun make-vector (n)
(make-array n :element-type 'complex-sample))
(defun random-vector (n &optional (dst (make-vector n)))
(declare (type complex-sample-array dst))
(unless (= n (length dst))
(setf dst (make-array n :element-type 'complex-sample)))
(map-into dst (lambda ()
(complex (- (random 2d0) 1d0)
(- (random 2d0) 1d0)))))
(macrolet ((define-mfun (name op)
`(defun ,name (x y &optional (dst (make-vector (length x))))
(declare (type complex-sample-array x y dst))
(map-into dst #',op x y))))
(define-mfun m+ +)
(define-mfun m- -)
(define-mfun m* *))
(defvar *default-abs-tol* 1d-6)
(defun m= (x y &optional (tol *default-abs-tol*))
(declare (type complex-sample-array x y)
(type double-float tol))
(let ((worst 0d0))
(declare (type double-float worst))
(dotimes (i (length x))
(let ((x (aref x i))
(y (aref y i)))
(let ((delta (abs (- x y))))
(if (< delta tol)
(setf worst (max worst delta))
(return-from m= (values nil delta i))))))
(values t worst nil)))
(defun slow-bit-reverse (array)
(let ((dst (copy-seq array))
(width (integer-length (1- (length array)))))
(flet ((rev (x)
(bit-reverse-integer x width)))
(dotimes (i (length array) dst)
(setf (aref dst (rev i)) (aref array i))))))
|
|
4aa397df59e4d58e1d5522a77929befa4aec07dcdb896f3d4210e19bf155805b | hyper-systems/rescript-sx | Main.ml | open Bos
module String_set = Set.Make (String)
let rev_lines_of_chan chan =
let rec loop acc chan =
match input_line chan with
| line -> loop (line :: acc) chan
| exception End_of_file ->
close_in chan;
acc
in
loop [] chan
let rescript_toplevel_modules =
lazy
(let mlmap_pat = Fpath.v "$(namespace).mlmap" in
match OS.Path.matches mlmap_pat with
| Ok [ sourcefile ] ->
(* -lang/rescript-compiler/blob/b4d5caea15e9594f95f6f8ac6620417540986c20/jscomp/core/js_implementation.ml#L254 *)
let ext_digest_length = 16 in
let ichan = open_in_bin (Fpath.to_string sourcefile) in
seek_in ichan (ext_digest_length + 1);
let list_of_modules = rev_lines_of_chan ichan in
close_in ichan;
list_of_modules |> String_set.of_list
| Error (`Msg msg) ->
prerr_endline
("sx: error: could not find .mlmap file in ./lib/bs:\n" ^ msg);
exit 1
| Ok [] ->
prerr_endline
"sx: error: No .mlmap file found in ./lib/bs.\n\
Is your project compiled and has namespacing enabled?";
exit 1
| _ ->
prerr_endline
"sx: error: More than one .mlmap file found in ./lib/bs.\n\
Try cleanning and rebuilding your project.";
exit 1)
let ensure_is_lib_bs () =
if not (OS.File.exists (Fpath.v "build.ninja") |> Result.get_ok) then
failwith "sx: Could not find build.ninja file in the current directory"
let ( let* ) = Result.bind
let generate_css output_file =
ensure_is_lib_bs ();
let sx_cache_files_pat =
Fpath.v "./../../node_modules/.cache/sx/$(module_name).mldata"
in
let* paths = OS.Path.matches sx_cache_files_pat in
let css =
List.fold_left
(fun acc cache_file ->
let cache_file_basename =
cache_file |> Fpath.rem_ext ~multi:true |> Fpath.basename
in
if
String_set.exists
(String.equal cache_file_basename)
(Lazy.force rescript_toplevel_modules)
then
let css = Sx.read_module_cache cache_file in
Sx.Css.union acc css
else (
OS.File.delete cache_file |> Result.get_ok;
acc))
Sx.Css.empty paths
in
let chan = open_out_bin (Fpath.to_string output_file) in
Fmt.pf
(Format.formatter_of_out_channel chan)
"@[<v>/* Generated by sx, PLEASE DO NOT EDIT! */@,%s@,@,%a@]@."
Sx.Preflight.css Sx.Css.pp css;
Ok ()
let () =
Stdlib.at_exit (fun () ->
let lib_bs = Sys.getcwd () in
let input_name = !Location.input_name in
Sx.write_module_cache ~lib_bs ~input_name;
if not (Sx.global_css_is_empty ()) then
generate_css (Sx_ppx.output_path ()) |> Result.get_ok);
Ppxlib.Driver.run_as_ppx_rewriter ()
| null | https://raw.githubusercontent.com/hyper-systems/rescript-sx/fcd0e71332d0f8e777d2850eeac3326c1dfdc95a/sx.ppx/Main.ml | ocaml | -lang/rescript-compiler/blob/b4d5caea15e9594f95f6f8ac6620417540986c20/jscomp/core/js_implementation.ml#L254 | open Bos
module String_set = Set.Make (String)
let rev_lines_of_chan chan =
let rec loop acc chan =
match input_line chan with
| line -> loop (line :: acc) chan
| exception End_of_file ->
close_in chan;
acc
in
loop [] chan
let rescript_toplevel_modules =
lazy
(let mlmap_pat = Fpath.v "$(namespace).mlmap" in
match OS.Path.matches mlmap_pat with
| Ok [ sourcefile ] ->
let ext_digest_length = 16 in
let ichan = open_in_bin (Fpath.to_string sourcefile) in
seek_in ichan (ext_digest_length + 1);
let list_of_modules = rev_lines_of_chan ichan in
close_in ichan;
list_of_modules |> String_set.of_list
| Error (`Msg msg) ->
prerr_endline
("sx: error: could not find .mlmap file in ./lib/bs:\n" ^ msg);
exit 1
| Ok [] ->
prerr_endline
"sx: error: No .mlmap file found in ./lib/bs.\n\
Is your project compiled and has namespacing enabled?";
exit 1
| _ ->
prerr_endline
"sx: error: More than one .mlmap file found in ./lib/bs.\n\
Try cleanning and rebuilding your project.";
exit 1)
let ensure_is_lib_bs () =
if not (OS.File.exists (Fpath.v "build.ninja") |> Result.get_ok) then
failwith "sx: Could not find build.ninja file in the current directory"
let ( let* ) = Result.bind
let generate_css output_file =
ensure_is_lib_bs ();
let sx_cache_files_pat =
Fpath.v "./../../node_modules/.cache/sx/$(module_name).mldata"
in
let* paths = OS.Path.matches sx_cache_files_pat in
let css =
List.fold_left
(fun acc cache_file ->
let cache_file_basename =
cache_file |> Fpath.rem_ext ~multi:true |> Fpath.basename
in
if
String_set.exists
(String.equal cache_file_basename)
(Lazy.force rescript_toplevel_modules)
then
let css = Sx.read_module_cache cache_file in
Sx.Css.union acc css
else (
OS.File.delete cache_file |> Result.get_ok;
acc))
Sx.Css.empty paths
in
let chan = open_out_bin (Fpath.to_string output_file) in
Fmt.pf
(Format.formatter_of_out_channel chan)
"@[<v>/* Generated by sx, PLEASE DO NOT EDIT! */@,%s@,@,%a@]@."
Sx.Preflight.css Sx.Css.pp css;
Ok ()
let () =
Stdlib.at_exit (fun () ->
let lib_bs = Sys.getcwd () in
let input_name = !Location.input_name in
Sx.write_module_cache ~lib_bs ~input_name;
if not (Sx.global_css_is_empty ()) then
generate_css (Sx_ppx.output_path ()) |> Result.get_ok);
Ppxlib.Driver.run_as_ppx_rewriter ()
|
6f56cc4cc8a94a787445189a588dbd23162045961f4b6b72afc9ccef5266bc15 | patoline/patoline | Document.ml |
Copyright Florian Hatat , , ,
Pierre - Etienne Meunier , , 2012 .
This file is part of Patoline .
Patoline is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
Patoline is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with Patoline . If not , see < / > .
Copyright Florian Hatat, Tom Hirschowitz, Pierre Hyvernat,
Pierre-Etienne Meunier, Christophe Raffalli, Guillaume Theyssier 2012.
This file is part of Patoline.
Patoline is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Patoline is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Patoline. If not, see </>.
*)
* High - level representation of documents
The module defines the { ! type : tree } type , which describes whole
documents . This tree is typically produced by running the OCaml
executable obtained after parsing a [ .txp ] file , but can be produced by
other mean . It is the main input to Patoline Typography library in
order to produce the final document .
Values of type { ! type : tree } are meant to be transformed by some format 's
output routine .
We also provide a tree zipper interface to ease construction of a
{ ! type : tree } when reading linearly an input file .
The module defines the {!type:tree} type, which describes whole
documents. This tree is typically produced by running the OCaml
executable obtained after parsing a [.txp] file, but can be produced by
anyy other mean. It is the main input to Patoline Typography library in
order to produce the final document.
Values of type {!type:tree} are meant to be transformed by some format's
output routine.
We also provide a tree zipper interface to ease construction of a
{!type:tree} when reading linearly an input file.
*)
open Patoraw
open Unicodelib
open Patutil
open Patfonts
open Extra
open Fonts
open FTypes
open RawContent
open Driver
open Box
* { 2 Font , substitutions , positioning }
type fontAlternative = Regular | Bold | Caps | Demi
let simpleFamilyMember:(unit->font)->(font*(glyph_id list -> glyph_id list)*(glyph_ids list -> glyph_ids list)) Lazy.t =
fun a->Lazy.from_fun (fun ()->(a (),(fun x->x),(fun x->x)))
let make_ligature l gl x=
let rec match_lig l x=match (l,x) with
[],[]->Some []
| _::_,[]->None
| [],_->Some x
| h1::s1, h2::s2 when h1=h2.glyph_index-> match_lig s1 s2
| _::_,_::_->None
in
let rec make_ligature x=match x with
[]->[]
| h::s->(
match match_lig l x with
None->h::make_ligature s
| Some g->gl::make_ligature g
)
in
make_ligature x
Italic is second
type fontFamily =
fontAlternative *
((font*(glyph_id list -> glyph_id list)*(glyph_ids list -> glyph_ids list)) Lazy.t *
(font*(glyph_id list -> glyph_id list)*(glyph_ids list -> glyph_ids list)) Lazy.t)
module TS = Break.Make(
struct
type t = line
let compare a b =
if a.paragraph < b.paragraph then -1 else
if a.paragraph > b.paragraph then 1 else
if a.lineStart < b.lineStart then -1 else
if a.lineStart > b.lineStart then 1 else
if a.lineEnd < b.lineEnd then -1 else
if a.lineEnd > b.lineEnd then 1 else
if a.hyphenStart < b.hyphenStart then -1 else
if a.hyphenStart > b.hyphenStart then 1 else
if a.hyphenEnd < b.hyphenEnd then -1 else
if a.hyphenEnd > b.hyphenEnd then 1 else
if a.lastFigure < b.lastFigure then -1 else
if a.lastFigure > b.lastFigure then 1 else
if a.isFigure < b.isFigure then -1 else
if a.isFigure > b.isFigure then 1 else
if a.height < b.height then -1 else
if a.height > b.height then 1 else 0
let hash a=Hashtbl.hash a
end)
* { 2 Mathematical formulae }
module Mathematical=struct
type env={
mathsFont:Fonts.font Lazy.t;
mathsSize:float;
mathsSubst:glyph_id list -> glyph_id list;
numerator_spacing:float;
denominator_spacing:float;
sub1:float;
sub2:float;
sup1:float;
sup2:float;
sup3:float;
sub_drop:float;
sup_drop:float;
default_rule_thickness:float;
subscript_distance:float;
superscript_distance:float;
limit_subscript_distance:float;
limit_superscript_distance:float;
invisible_binary_factor:float;
open_dist:float;
close_dist:float;
left_op_dist:float;
right_op_dist:float;
sqrt_dist:float;
kerning:bool;
None means precise , Some x mean unprecise , but subdivise
curve until the thickness of the polygon is less than x
Bezier curve until the thickness of the polygon is less than x *)
priorities:float array;
priority_unit:float;
delimiter_up_tolerance:float;
delimiter_down_tolerance:float;
op_tolerance:float;
op_limits_tolerance:float;
punctuation_factor:float;
optical_alpha:float;
optical_beta:float;
precise_kerning:float;
}
and environment=env array (* doit etre de taille 8 *)
and style=
Display
| Display'
| Text
| Text'
| Script
| Script'
| ScriptScript
| ScriptScript'
end
* { 2 Environments }
(** Environments. These are typically folded on document trees, and
control many different things about the fonts, counters, or
labels. *)
type environment={
fontFamily:fontFamily list;
fontMonoFamily:fontFamily list;
size adjustment of the two previous family
fontItalic:bool;
fontAlternative:fontAlternative;
fontFeatures:string list;
fontColor:Color.color;
font:font;
mathsEnvironment:Mathematical.environment;
mathStyle:Mathematical.style;
size:float;
lead:float;
footnote_y:float;
normalMeasure:float;
normalLead:float;
normalLeftMargin:float;
normalPageFormat:float*float;
par_indent:box list;
hyphenate:string->(string*string) array;
substitutions:glyph_id list -> glyph_id list;
positioning:glyph_ids list -> glyph_ids list;
* , état .
last_changed_counter:string;
* Niveaux de tous les compteurs à cet endroit , type , position
fixable:bool ref;
new_page:Box.frame_zipper->Box.frame_zipper;
new_line:environment->line->parameters->
line->parameters->Box.frame_zipper->float->float;
user_positions:line MarkerMap.t;
show_boxes:bool;
show_frames:bool;
adjust_optical_alpha:float;
adjust_optical_beta:float;
adjust_epsilon:float;
adjust_min_space:float;
pas dans l'environement math , car en dehors du TextStyle
stdGlue:float*float*float;
}
let env_accessed=ref false
let names env=
env_accessed:=true;
env.names
let user_positions env=
env_accessed:=true;
env.user_positions
let displayname n=
env_accessed:=true;
n.raw_name
(** {2 Document content} *)
(** Main type used to hold document contents. *)
type content =
| B of (environment -> box list) * box list option ref
* List of boxes depending on an environment . The second parameters is a
cache used when compilation is iterated to resolve names .
cache used when compilation is iterated to resolve names. *)
| C of (environment -> content list)
(** A contents list depending on the environment. This may be used to
typeset the state of a counter for example. *)
| T of string * (box list IntMap.t option) ref
(** Simple text. *)
| Env of (environment -> environment)
(** Environment modification function. It can be used to register a name
or modify the state of a counter for instance. *)
| Scoped of (environment -> environment) * (content list)
(** A scoped environment transformation applied on a (small) list of
contents. *)
| N of tree
(** A document tree. *)
* First type of leaves in a document : paragraphs .
and paragraph =
{ par_contents : content list
; par_env : environment -> environment
; par_post_env : environment -> environment -> environment
; par_parameters : environment -> Box.box array array -> Box.drawingBox array
-> parameters -> Break.figurePosition IntMap.t
-> line MarkerMap.t -> line -> line -> parameters
; par_badness : environment -> Box.box array array -> Box.drawingBox array
-> Break.figurePosition IntMap.t -> Box.line
-> Box.box array -> int -> Box.parameters -> float
-> Box.line -> Box.box array -> int
-> Box.parameters -> float -> float
; par_completeLine : environment -> Box.box array array
-> Box.drawingBox array
-> Break.figurePosition IntMap.t -> line MarkerMap.t
-> line -> bool -> line list
; par_states : int list
; par_paragraph : int
}
* Second type of leaves in a document : figures .
and figuredef =
{ fig_contents : environment -> Box.drawingBox
; fig_env : environment -> environment
; fig_post_env : environment -> environment -> environment
; fig_parameters : environment -> Box.box array array -> Box.drawingBox array
-> parameters -> Break.figurePosition IntMap.t
-> line MarkerMap.t -> line -> line -> parameters
}
(** Internal node of the document tree (e.g. section, chapter...). *)
and node =
{ name : string
; displayname : content list
; mutable boxified_displayname : raw list
(* Extensible array of childrens : *)
; children : tree IntMap.t
; node_tags : (string * string) list
(* Environment modification function applied when entering the node : *)
; node_env : environment -> environment
(* Environment modification function applied when leaving the node : *)
; node_post_env : environment -> environment -> environment
(* Page states in which the contents is visible. *)
; node_states : int list
; mutable node_paragraph : int }
(** Type of a document tree. *)
and tree =
| Paragraph of paragraph
| FigureDef of figuredef
| Node of node
(** Empty node (with no child tree). *)
let empty : node =
{ name = ""
; node_tags = []
; displayname = []
; boxified_displayname = []
; children = IntMap.empty
; node_env = (fun x->x)
; node_post_env =
(fun x y -> { x with counters = y.counters ; names = names y
; user_positions = user_positions y })
; node_states = []
; node_paragraph = 0 }
(** Build a node with a single child tree. *)
let singleton : tree -> node = fun t ->
{ empty with children = IntMap.singleton 0 t }
* The main datatype is a zipper over a document tree . It consists in a
couple whose first component is a tree . The second component represents
the context identifying a position in the tree .
The tree represented by the zipper [ ( t , [ ( p1,t1 ) , ... , ( pn , tn ) ] ) ] is
built by :
+ appending the tree [ t ] at position [ p1 ] in [ t1 ] ,
+ appending the resulting tree at position [ p2 ] in [ t2 ] ,
+ ...
+ appending the resulting tree at position [ pn ] in [ tn ] .
couple whose first component is a tree. The second component represents
the context identifying a position in the tree.
The tree represented by the zipper [(t, [(p1,t1), ... , (pn,tn)])] is
built by:
+ appending the tree [t] at position [p1] in [t1],
+ appending the resulting tree at position [p2] in [t2],
+ ...
+ appending the resulting tree at position [pn] in [tn]. *)
module TreeData = struct
type nonrec node = node
type nonrec tree = tree
let tree_of_node node = Node(node)
let node_of_tree = function
| Node(node) -> node
| _ -> invalid_arg "Document.TreeData.node_of_tree"
let get_child node i = IntMap.find i node.children
let set_child node i tree =
{node with children = IntMap.add i tree node.children}
let remove_child node i =
{node with children = IntMap.remove i node.children}
let has_child node i =
IntMap.mem i node.children
let min_index node =
fst (IntMap.min_binding node.children)
let max_index node =
fst (IntMap.max_binding node.children)
end
module DocZipper = Zipper.Make(TreeData)
type tree_zipper = DocZipper.zipper
(** Build a zipper from a tree. The resulting zipper points to the root
of the tree. *)
let zipper_of_tree = DocZipper.zipper_to_tree
* Build a zipper whose single node is { ! : empty } .
let empty_zipper = DocZipper.empty empty
(** Function that takes a tree zipper [(t,cxt)] pointing to some node
[t] and returns a zipper pointing to the father node of [t]. If this
function is called on a zipper that points to the root of the tree, a
new empty node is created to have [t] as its only child. *)
let up = DocZipper.up
[@@ocaml.deprecated "Use DocZipper.up instead"]
* Function that applies { ! : up } n times on a zipper , effectively moving the
zipper to the n - th ancestor of the currently pointed node .
zipper to the n-th ancestor of the currently pointed node. *)
let up_n = DocZipper.up_n
(** Move the zipper to the root of the tree *)
let top = DocZipper.top
(** Retrieve the complete tree from a zipper *)
let tree_of_zipper = DocZipper.zipper_to_tree
(** Move the zipper to point to the child of the pointed node with the higher
index. If the pointed tree is not a node the zipper is left unchanged. *)
let lastChild zip =
try DocZipper.down_last zip
with Invalid_argument(_) -> zip
(** Take a zipper [zip] and a tree [c] and adds [c] as the last child of the
pointed node. If the pointed subtree is not a node, a new node is
created to hold [t] and [c]. The returned zipper points to [c]. *)
let rec newChildAfter : tree_zipper -> tree -> tree_zipper =
let next_key t = try fst (IntMap.max_binding t) + 1 with Not_found -> 0 in
fun (t,cxt) c ->
match (t, cxt) with
| (Node x, _ ) -> (c, (next_key x.children,x)::cxt)
| (_ , []) -> (c, [(1, singleton t)])
| _ -> newChildAfter (up (t,cxt)) c
* Same as { ! : newChildAfter } but adds the tree as the first child .
let rec newChildBefore : tree_zipper -> tree -> tree_zipper =
let prev_key t = try fst (IntMap.min_binding t) - 1 with Not_found -> 0 in
fun (t,cxt) c ->
match (t, cxt) with
| (Node x, _) -> (c, (prev_key x.children,x)::cxt)
| (_ , []) -> (c, [(1, singleton t)])
| _ -> newChildBefore (up (t,cxt)) c
(** Take a zipper pointing to a node and move it down its i-th child. If the
zipper does not point to a node, [Invalid_argument] is raised. If the i-th
child does not exists, it is created as a new empty node. *)
let child : tree_zipper -> int -> tree_zipper =
fun (t,cxt) i ->
match t with
| Node n -> let t =
try IntMap.find i n.children with Not_found -> Node empty
in (t, (i,n)::cxt)
| _ -> raise (Invalid_argument "Typography.child")
(** Take a tree zipper and an path represented as a list of integers and move
the zipper down the path (i.e. calling child on the successive indices. *)
let rec follow : tree_zipper -> int list -> tree_zipper =
fun z -> function
| [] -> z
| n :: ns -> follow (child z n) ns
(** Module type of a document format. *)
module type Format =
sig
val defaultEnv : environment
val postprocess_tree : tree -> tree
val title : (tree * (IntMap.key * tree) list) ref -> ?label:'a
-> ?extra_tags:(string * string) list -> content list -> bool
val parameters : environment -> box array array -> Box.drawingBox array
-> parameters -> Break.figurePosition IntMap.t -> line MarkerMap.t
-> line -> parameters
end
(** Module type to be used as a document wrapper. The document structure is
stored in its zipper form in a reference. Functions are provided below
to edit the document tree. *)
module type DocumentStructure =
sig
val structure : tree_zipper ref
end
let doc_tags n=match n with
Node n->n.node_tags
| _->[]
let init_env_hook = ref ([] : (environment -> environment) list)
let add_env_hook f = init_env_hook := f::!init_env_hook
let bB f = B(f,ref None)
let uB f = C(fun _->env_accessed:=true;[bB f])
let tT f = T(f,ref None)
let uT f = C(fun _->env_accessed:=true;[tT f])
let string_of_contents l =
let buf=Buffer.create 1000 in
let rec fill_buf t=match t with
T (str,_)::s->(
if Buffer.length buf>0 then (
Buffer.add_string buf " ";
);
Buffer.add_string buf str;
fill_buf s
)
(* | C f::s->( *)
(* fill_buf (f defaultEnv); *)
(* fill_buf s *)
(* ) *)
| _::s -> fill_buf s
| []->()
in
fill_buf l;
Buffer.contents buf
let raw : (environment -> RawContent.raw list) -> content = fun f ->
let contents _ =
let dr env =
let raw = f env in
let (x0,y0,x1,y1) = RawContent.bounding_box raw in
let w = x1 -. x0 in
let open Box in
{ drawing_min_width = w
; drawing_nominal_width = w
; drawing_max_width = w
; drawing_width_fixed = true
; drawing_adjust_before = false
; drawing_y0 = y0
; drawing_y1 = y1
; drawing_badness = (fun _ -> 0.0)
; drawing_break_badness = infinity
; drawing_states = []
; drawing_contents = (fun _ -> raw) }
in
[bB (fun env -> [Drawing (dr env)])]
in
C contents
let _names env=
env.names
let _user_positions env=
env.user_positions
let incr_counter ?(level= -1) name env=
{ env with
last_changed_counter=name;
counters=
StrMap.add name (try let a,b=StrMap.find name env.counters in
match b with
h::s -> (a,(h+1)::s)
| _->a,[0]
with
Not_found -> level, [0]
) env.counters }
let pop_counter name env=
{ env with
last_changed_counter=name;
counters=
StrMap.add name (let a,b=StrMap.find name env.counters in (a, List.drop 1 b)) env.counters }
let push_counter name env=
{ env with
last_changed_counter=name;
counters=
StrMap.add name (let a,b=StrMap.find name env.counters in (a,0::b)) env.counters }
let tags=function
Node n->n.node_tags
| _->[]
* Creates a new page , using 1/6th of the given lengths for margins .
A page is implemented as two nested frames : the outer frame has the
actual size of the whole page , while the inner frame size is the
papersize minus margins .
This function returns the inner frame .
A page is implemented as two nested frames: the outer frame has the
actual size of the whole page, while the inner frame size is the
papersize minus margins.
This function returns the inner frame.
*)
let default_new_page pageFormat zip =
let ((page, _) as zip)=Box.make_page pageFormat (frame_top zip) in
let w = page.frame_x1 -. page.frame_x0
and h = page.frame_y1 -. page.frame_y0 in
let x0=(page.frame_x0+.1.*.w/.6.) in
let y0=(page.frame_y0+.1.*.h/.6.) in
let x1=(page.frame_x1-.1.*.w/.6.) in
let y1=(page.frame_y1-.1.*.h/.6.) in
frame x0 y0 x1 y1 zip
(** Creates a new page without any margin *)
let raw_new_page pageFormat zip =
let (page, _) as zip = Box.make_page pageFormat (frame_top zip) in
frame page.frame_x0 page.frame_y0 page.frame_x1 page.frame_y1 zip
(**/**)
let envApp l env =
List.fold_left (fun env f -> f env) env l
let rec map_paragraphs f = function
| Node n -> Node { n with children=IntMap.map (map_paragraphs f) n.children }
| Paragraph p -> Paragraph (f p)
| x -> x
exception Found
(** Finds the last node satisfying a given predicate in a document tree. *)
let find_last f tr=
let result=ref None in
let rec find_last path tr=match tr with
| _ when f tr->(
result:=Some (List.rev path);
raise Found
)
| Node n->(
let k1,_=IntMap.max_binding n.children in
let k0,_=IntMap.min_binding n.children in
for i=k1 downto k0 do
try
find_last (i::path) (IntMap.find i n.children);
with
Not_found -> ()
done;
)
| _->raise Not_found
in
try
find_last [] tr;
raise Not_found
with
Found->(
match !result with
None->raise Not_found
| Some a->a
)
(** Is the node a paragraph ? *)
let is_paragraph x=match x with
Paragraph _->true
| _->false
(** Is the node an internal node ? *)
let is_node x=match x with
Node _->true
| _->false
let rec prev f (t,cxt) =
if f t then (t,cxt) else (
match t with
Node nt->
let bin=List.rev (IntMap.bindings nt.children) in
let rec prevs=function
[]->raise Not_found
| (h,ht)::s->
try
prev f (ht, (h,t)::cxt)
with
Not_found->prevs s
in
prevs bin
| _->raise Not_found
)
let go_up str=
(if snd !str=[] then Printf.fprintf stderr "Warning : go_up\n");
str:=(up !str)
let n_go_up n str =
for _ = 1 to n do go_up str done
(** {3 Environment transformations} *)
let change_env t fenv=match t with
(Node n,l)->(Node { n with node_env=fun x->fenv (n.node_env x) }, l)
| (Paragraph n,l)->(Paragraph { n with par_env=fun x->fenv (n.par_env x) }, l)
| (FigureDef f, l)->
FigureDef {f with fig_env=fun x->fenv (f.fig_env x) }, l
exception Not_found_in_family
let selectFont fam alt it =
try
let r,i = List.assoc alt fam in
Lazy.force (if it then i else r)
with Not_found -> raise Not_found_in_family
let updateFont env font subst pos=
let feat=Fonts.select_features font env.fontFeatures in
{ env with
font=font;
substitutions=(fun glyphs -> Fonts.apply_features font feat (subst glyphs));
positioning=(fun x->pos (positioning font x)) }
let change_font f env = updateFont env f (fun x->x) (fun x->x)
Changer de font dans un scope , ignore la famille , attention , à en direct
let font f t=
[Scoped (change_font f, t)]
Rajouter une liste de features , voir Fonts . FTypes pour savoir ce
qui existe
qui existe *)
let add_features features env=
let feat=Fonts.select_features env.font () in
{ env with
fontFeatures=;
substitutions=(fun glyphs -> Fonts.apply_features env.font feat
(env.substitutions glyphs));
}
let envItalic b env =
let font, subst, pos= selectFont env.fontFamily env.fontAlternative b in
let env = { env with fontItalic = b } in
updateFont env font subst pos
let italic t = [ Scoped(envItalic true, t) ]
module Italic = struct
(* let do_begin_Italic () = () *)
(* let do_end_Italic () = () *)
(* let defaultEnv = envItalic true defaultEnv *)
(* end *)
module Env_Italic = Italic
let notItalic t =
[Scoped (envItalic false, t)]
let toggleItalic t =
[Scoped ((fun env -> envItalic (not env.fontItalic) env), t)]
let envAlternative ?(features:'a option) alt env =
let features = match features with
None -> env.fontFeatures
| Some f -> f
in
let font,subs,pos = selectFont env.fontFamily alt env.fontItalic in
let env = { env with fontAlternative = alt } in
add_features features (updateFont env font subs pos)
let alternative ?(features:'a option) alt t =
[Scoped ((fun env -> envAlternative ?features alt env), t)]
let font_size_ratio font1 font2 =
let x_h f =
let f,_,_ = Lazy.force (fst (List.assoc Regular f)) in
let x=Fonts.loadGlyph f
({empty_glyph with glyph_index=Fonts.glyph_of_char f 'o'}) in
Fonts.glyph_y1 x -. Fonts.glyph_y0 x
in
x_h font1 /. x_h font2
let envFamily fam env =
let font,subs,pos = selectFont fam env.fontAlternative env.fontItalic in
let env = { env with fontFamily = fam; size = font_size_ratio env.fontFamily fam *. env.size } in
updateFont env font subs pos
let family fam t =
[Scoped ((fun env -> envFamily fam env), t)]
let envMonoFamily fam env =
{ env with
fontMonoFamily = fam;
fontMonoRatio=font_size_ratio env.fontFamily fam }
let monoFamily fam t =
[Scoped ((fun env -> envMonoFamily fam env), t)]
let envSize fsize env=
{ env with
size=fsize;
lead=env.lead*.fsize/.env.size }
Changer de taille dans un scope
let size fsize t=
[Scoped (envSize fsize, t)]
let envScale alpha env =
{ env with size = env.size *. alpha }
Changer de taille dans un scope
let scale alpha t=
[Scoped (envScale alpha, t)]
let envScaleLead alpha env=
{ env with
lead=env.lead *. alpha }
let scaleLead alpha t=
[Scoped (envScaleLead alpha, t)]
let envColor color env =
{env with fontColor=color}
let color color t=
[Scoped (envColor color, t)]
let envBold = envAlternative Bold
let bold = alternative Bold
let envSv = envAlternative Caps
let sc = alternative Caps
let verbEnv x =
{ (envFamily x.fontMonoFamily (envScale x.fontMonoRatio x))
with normalMeasure=infinity; par_indent = [] } (* For full paragraph *)
let verb p =
[Scoped ((fun x -> envFamily x.fontMonoFamily
(envScale x.fontMonoRatio x)),
p)] (* for inline text *)
let emph=toggleItalic
let id x=x
(****************************************************************)
Partie compliquée :
pour toucher à ça , ou apprendre en touchant ça
pour toucher à ça, ou apprendre en touchant ça *)
let parameters env pars figures _ last_figures _ _ line =
let fn i figPos m =
let open Break in
match figPos with
| Placed(l) when layout_page line = layout_page l
&& line.height >= l.height +. figures.(i).drawing_y0
&& line.height <= l.height +. figures.(i).drawing_y1
-> env.normalMeasure -. figures.(i).drawing_nominal_width
| _ -> m
in
let params =
{ measure = IntMap.fold fn last_figures env.normalMeasure
; left_margin = env.normalLeftMargin
; local_optimization = 0
; min_page_before = 0
; min_page_after = 0
; min_height_before = 0.0
; min_height_after = 0.0
; not_last_line = false
; not_first_line = false
; min_lines_before = 1
; min_lines_after = 0
; absolute = false }
in
let fn params b = match b with Parameters(f) -> f params | _ -> params in
fold_left_line pars fn params line
let set_parameters : (parameters -> parameters) -> content list =
fun f -> [bB (fun _ -> [Parameters(f)])]
let vspaceBefore : float -> content list = fun sp ->
let fn p = {p with min_height_before = max p.min_height_before sp} in
set_parameters fn
let vspaceAfter : float -> content list = fun sp ->
let fn p = {p with min_height_after = max p.min_height_after sp} in
set_parameters fn
let pagesBefore : int -> content list = fun nb ->
let fn p = {p with min_page_before = max p.min_page_before nb} in
set_parameters fn
let pagesAfter : int -> content list = fun nb ->
let fn p = {p with min_page_after = max p.min_page_after nb} in
set_parameters fn
let linesBefore : int -> content list = fun nb ->
let fn p = {p with min_lines_before = max p.min_lines_before nb} in
set_parameters fn
let linesAfter : int -> content list = fun nb ->
let fn p = {p with min_lines_after = max p.min_lines_after nb} in
set_parameters fn
let notFirstLine : content list =
set_parameters (fun p -> {p with not_first_line = true})
let notLastLine : content list =
set_parameters (fun p -> {p with not_last_line = true})
let hspace : float -> content list = fun sp ->
[bB (fun env -> let sp = sp *. env.size in [glue sp sp sp])]
let hfill : content list =
[bB (fun env -> let mes = env.normalMeasure in [glue 0.0 (0.5 *. mes) mes])]
let do_center parameters a b c d e f g line =
let param = parameters a b c d e f g line in
let min_w = line.min_width in
let nom_w = line.nom_width in
if param.measure >= nom_w then
let left_margin = param.left_margin +. (param.measure -. nom_w) /. 2.0 in
{param with measure = nom_w; left_margin}
else if param.measure < min_w then
let left_margin = param.left_margin +. (param.measure -. min_w) /. 2.0 in
{param with measure = min_w; left_margin}
else param
let do_ragged_left parameters a b c d e f g line =
let param = parameters a b c d e f g line in
{param with measure = line.nom_width}
let do_ragged_right parameters a b c d e f g line =
let param = parameters a b c d e f g line in
let left_margin = param.left_margin +. param.measure -. line.nom_width in
{param with measure = line.nom_width; left_margin}
let badness env paragraphs _ _
node_i line_i max_i params_i comp_i
node_j line_j max_j params_j comp_j=
if node_j.paragraph>=Array.length paragraphs then 0. else (
let v_bad=
if layout_page node_i=layout_page node_j then (
Badness.v_badness
(node_j.height-.node_i.height)
line_i max_i params_i comp_i
line_j max_j params_j comp_j
) else (
if node_i.hyphenEnd>=0 then infinity else 0.
)
in
(Badness.h_badness paragraphs params_j.measure node_j comp_j)
+. v_bad
(* Page pas assez remplie *)
+. (if layout_page node_i<>layout_page node_j &&
node_i.height>=(fst node_i.layout).frame_y0+.env.lead then 10000. else 0.)
(* Cesures *)
+. (if node_j.hyphenEnd >=0 then
(if node_j.hyphenStart >=0 then
1e10
else
1e8)
else
(if node_j.hyphenStart >=0 then
1e8
else
0.)
)
(* Badness de couper ici *)
+. (if node_j.lineEnd<Array.length paragraphs.(node_j.paragraph)
&& not node_j.isFigure then
match paragraphs.(node_j.paragraph).(node_j.lineEnd) with
Glue g->g.drawing_break_badness
| _->0.
else 0.0
)
(* Différence de compression entre deux lignes consécutives *)
+. (1000.*.(abs_float (comp_i-.comp_j)))
)
* { 3 Figures }
let figure str parameters ?(name="") drawing=
str:=up (newChildAfter !str (
FigureDef
{ fig_contents=drawing;
fig_env=(fun x->
let l,cou=try StrMap.find "_figure" x.counters with
Not_found -> -1, [] in
let l0,cou0=try StrMap.find "figure" x.counters with
Not_found -> -1, [] in
let counters'=
(StrMap.add "_figure"
(l,match cou with h::s->(h+1)::s | _->[0])
(StrMap.add "figure"
(l0,match cou0 with h::s->(h+1)::s | _->[0]) x.counters)
)
in
{ x with
names=if name="" then names x else (
let w=
try let (_,_,w)=StrMap.find name (names x) in w
with Not_found -> uselessLine
in
StrMap.add name (counters', "_figure", w) (names x)
);
counters=counters';
last_changed_counter="_figure"
});
fig_post_env=(fun x y->{ x with names=names y; counters=y.counters; user_positions=user_positions y });
fig_parameters=parameters }))
let flushFigure name=
[C (fun env->
try
env_accessed:=true;
let (counters,_,_)=StrMap.find name (names env) in
match StrMap.find "_figure" counters with
_,h::_->[bB (fun _->[FlushFigure h])]
| _->[]
with
Not_found ->[]
)]
let beginFigure name=
[C (fun env->
try
env_accessed:=true;
let (counters,_,_)=StrMap.find name (names env) in
match StrMap.find "_figure" counters with
_,h::_->[bB (fun _->[BeginFigure h])]
| _->[]
with
Not_found ->[]
)]
(****************************************************************)
(* Add a new paragraph (with given parameters) below the current node. *)
let newPar str ?(environment=(fun x->x)) ?(badness=badness) ?(states=[]) complete parameters par=
let para =
{ par_contents = par
; par_env = environment
; par_post_env = (fun env1 env2 ->
{ env1 with names = names env2
; counters = env2.counters
; user_positions = user_positions env2 })
; par_parameters = parameters
; par_badness = badness
; par_completeLine = complete
; par_states = states
; par_paragraph = (-1) }
in up (newChildAfter str (Paragraph para))
(** Adds a new node, just below the last one. *)
let newStruct str ?(in_toc=true) ?label ?(numbered=true) ?(extra_tags=[]) displayname =
let name = match label with
None -> string_of_contents displayname
| Some s -> s
in
let displayname=match displayname with
[]->(match label with Some s->[tT s] | None->[])
| _->displayname
in
let para=Node {
empty with
name=name;
displayname =[C (fun _->env_accessed:=true;displayname)];
node_tags= extra_tags @ (if in_toc then ["intoc",""] else []) @ ["structural",""] @ (if numbered then ["numbered",""] else []);
node_env=(
fun env->
{ env with
last_changed_counter="_structure";
counters=StrMap.add "_structure" (
try
let (a,b)=StrMap.find "_structure" env.counters in
a,0::(match b with []->[0] | _->b)
with
Not_found -> (-1,[0;0])
) env.counters }
);
node_post_env=(
fun env env'->
{ env with
names=names env';
user_positions=user_positions env';
counters=StrMap.add "_structure" (
try
let a,b=StrMap.find "_structure" env'.counters in
match b with
_::h::s when numbered ->a,(h+1)::s
| _::h::s ->a,h::s
| _ -> a, [0]
with
Not_found -> -1,[0]
) env'.counters }
);
}
in newChildAfter str para
(** {3 References, labels and links} *)
let pageref x=
[C (fun env->
try
env_accessed:=true;
let (_,_,node)=StrMap.find x (names env) in
[bB (fun _->[Marker (BeginLink (Intern x))]);
tT (string_of_int (1+layout_page node));
bB (fun _->[Marker EndLink])]
with Not_found -> []
)]
let make_name name=
let realName=UTF8.Buf.create (String.length name) in
let rec fill i sp=
if UTF8.out_of_range name i then
UTF8.Buf.contents realName
else (
if UChar.is_space (UTF8.look name i) then
if sp then fill (i+1) true
else (
UTF8.Buf.add_char realName (UChar.of_char ' ');
fill (UTF8.next name i) true
)
else (
UTF8.Buf.add_char realName (UTF8.look name i);
fill (UTF8.next name i) false
)
)
in
fill 0 true
let label ?labelType name=
let name=make_name name in
[Env (fun env->
let w=try let (_,_,w)=StrMap.find name (names env) in w with Not_found -> uselessLine in
let labelType=match labelType with None->env.last_changed_counter | Some t->t in
{ env with names=StrMap.add name (env.counters, labelType, w) (names env) });
bB (fun _ -> [Marker (Label name)])
]
let pass_number = ref (-1)
let lref ?refType name=
let name=make_name name in
[ C (fun env->
try
env_accessed:=true;
let counters,refType_=
if name="_here" then env.counters,env.last_changed_counter else
let a,t,_=StrMap.find name (names env) in a,t
in
let refType=match refType with Some x->x | None->refType_ in
let lvl,num_=StrMap.find refType counters in
let num=if refType="_structure" then List.drop 1 num_ else num_ in
let str_counter=
try
let _,str_counter=StrMap.find "_structure" counters in
str_counter
with
Not_found->[]
in
let sect_num = List.drop (List.length str_counter - max 0 lvl+1) str_counter in
[bB (fun _->[Marker (BeginLink (Intern name))]);
tT (String.concat "." (List.map (fun x->string_of_int (x+1))
(List.rev (num@sect_num))));
bB (fun _->[Marker EndLink])]
with
Not_found ->
let refType=match refType with Some x->x | None-> "Default" in
if !pass_number <> 0 then Printf.eprintf "Unknown label %S of labelType %S (%d)\n%!" name refType !pass_number;
color Color.red [tT "??"]
)]
let generalRef t x = lref ~refType:t x
let sectref x=lref ~refType:"_structure" x
let extLink a b=bB (fun _->[Marker (BeginLink (Extern a))])::b@[bB (fun _->[Marker EndLink])]
let link a b=bB (fun _->[Marker (BeginLink (Intern a))])::b@[bB (fun _->[Marker EndLink])]
let button_name =
let c = ref 0 in
fun () -> let x = !c in c := x+1; "button_" ^ string_of_int x
let button =
fun btype b ->
bB (fun _->[Marker (BeginLink (Button(btype, button_name ())))])::
b @ bB (fun _->[Marker EndLink]) :: []
(** {3 Images} *)
let image ?scale:(scale=0.) ?width:(width=0.) ?height:(height=0.) ?offset:(offset=0.) imageFile _ =
let i=RawContent.image imageFile in
let dr={
drawing_min_width=i.image_width;
drawing_max_width=i.image_width;
drawing_nominal_width=i.image_width;
drawing_width_fixed = true;
drawing_adjust_before = false;
drawing_y0=(-.offset);
drawing_y1=(-.offset) -. i.image_height;
drawing_break_badness=0.;
drawing_states=[];
drawing_badness=(fun _->0.);
drawing_contents=(fun _->[RawContent.translate 0. (-.offset) (Image i)])
}
in
let scale =
if scale >0. then scale
else if width > 0. then width /. i.image_width
else if height > 0. then height /. i.image_height
else 0.
in
if scale>0. then resize_drawing scale dr
else dr
let video ?scale:(scale=0.) ?width:(width=0.) ?height:(height=0.) ?offset:(offset=0.) imageFile env=
let tmp=(try Filename.chop_extension imageFile with _->imageFile) in
if not (Sys.file_exists (tmp^"-1.png")) ||
(Unix.stat (tmp^"-1.png")).Unix.st_mtime
< (Unix.stat imageFile).Unix.st_mtime then (
let _=Sys.command (Printf.sprintf "ffmpeg -i %s -t 1 -r 1 %s-%%d.png" imageFile tmp) in
()
);
let w,h = ImageLib.size (tmp^"-1.png") in
let fw,fh=
if width=0. then
if height=0. then
if scale=0. then
if env.normalMeasure<(float_of_int w)/.7. then
env.normalMeasure, env.normalMeasure*.(float_of_int h)/.(float_of_int w)
else
(float_of_int w)/.7.,(float_of_int h)/.7.
else
(float_of_int w)*.scale,(float_of_int h)*.scale
else
height*.(float_of_int w)/.(float_of_int h), height
else
width, width*.(float_of_int h)/.(float_of_int w)
in
let i={video_file=imageFile;
video_width=fw;
video_height=fh;
video_pixel_width=w;
video_pixel_height=h;
video_x=0.;
video_y=offset;
video_order=0
}
in
{
drawing_min_width=fw;
drawing_max_width=fw;
drawing_nominal_width=fw;
drawing_width_fixed = true;
drawing_adjust_before = false;
drawing_y0=offset;
drawing_y1=fh+.offset;
drawing_break_badness=0.;
drawing_states=[];
drawing_badness=(fun _->0.);
drawing_contents=(fun _->[RawContent.Video i])
}
let includeGraphics ?scale:(scale=0.) ?width:(width=0.) ?height:(height=0.) ?offset:(offset=0.) imageFile=
[bB (fun env->[Drawing (image ~scale ~width ~height ~offset imageFile env)])]
let includeVideo ?scale:(scale=0.) ?width:(width=0.) ?height:(height=0.) ?offset:(offset=0.) imageFile=
[bB (fun env->[Drawing (video ~scale ~width ~height ~offset imageFile env)])]
* { 3 Boxification }
(**/**)
let rStdGlue:(float*box) ref=ref (0.,glue 0. 0. 0.)
(**/**)
let ambientBuf = ref ( [ ||],0 )
(** Makes a glue from the unicode character code given in the argument. *)
let makeGlue env x0=
let stdGlue=
if fst !rStdGlue <> env.size then
begin
let (mi,no,ma) = env.stdGlue in
rStdGlue:=(env.size,
glue (mi*.env.size) (no*.env.size) (ma*.env.size))
end;
snd !rStdGlue
in
if (x0>=0x0009 && x0<=0x000d) || x0=0x0020 then stdGlue else
match x0 with
0x00a0->(match stdGlue with
Glue y->(
Drawing y
)
| y->y)
| 0x1680->stdGlue
| 0x180e->(glue 0. 0. 0.)
| 0x2000->let w=env.size/.2. in (glue w w w)
| 0x2001->let w=env.size in (glue w w w)
| 0x2002->let w=env.size/.2. in (glue (w*.2./.3.) w (w*.3./.2.))
| 0x2003->let w=env.size in (glue (w*.2./.3.) w (w*.3./.2.))
| 0x2004->let w=env.size/.3. in (glue (w*.2./.3.) w (w*.3./.2.))
| 0x2005->let w=env.size/.4. in (glue (w*.2./.3.) w (w*.3./.2.))
| 0x2006->let w=env.size/.6. in (glue (w*.2./.3.) w (w*.3./.2.))
| 0x2007->(
let w0=
glyph_of_string env.substitutions env.positioning env.font env.size
env.fontColor
"0"
in
let w=env.size*.(List.fold_left (fun w1 b->w1+.box_width 0. b) 0. w0) in (glue (w*.2./.3.) w (w*.3./.2.))
)
| 0x2008->(
let w0=
glyph_of_string env.substitutions env.positioning env.font env.size
env.fontColor
"."
in
let w=env.size*.(List.fold_left (fun w1 b->w1+.box_width 0. b) 0. w0) in (glue (w*.2./.3.) w (w*.3./.2.))
)
| 0x2009->let w=env.size/.5. in (glue (w*.2./.3.) w (w*.3./.2.))
| 0x200a->let w=env.size/.8. in (glue (w*.2./.3.) w (w*.3./.2.))
| 0x202f->
let w=env.size/.5. in
(match glue (w*.2./.3.) w (w*.3./.2.) with
Glue y->Drawing y
| y->y)
| 0x205f->let w=env.size*.4./.18. in (glue (w*.2./.3.) w (w*.3./.2.))
| 0xfeff->(glue 0. 0. 0.)
| _->stdGlue
(** Converts a [string] to a list of glyphs, according to the environment. *)
let gl_of_str env str =
try
let res = hyphenate env.hyphenate env.substitutions env.positioning env.font
env.size env.fontColor str
in
res
with Glyph_not_found _ ->
Printf.eprintf "glyph not found in: %s (%S)\n%!" str str;
[]
(**/**)
let append buf nbuf x=
let arr=
if !nbuf>=Array.length !buf then
Array.init (max 1 (2*Array.length !buf)) (fun j->if j< !nbuf then (!buf).(j) else Empty)
else !buf
in
arr.(!nbuf)<-x;
buf:=arr;
incr nbuf
let concat buf1 nbuf1 buf2 nbuf2=
for i=0 to nbuf2-1 do
append buf1 nbuf1 buf2.(i)
done
let mappend m x=
let a=try fst (IntMap.max_binding m) with Not_found -> -1 in
IntMap.add (a+1) x m
let nfkc = UNF8.nfkc
let nfkc x = x
(**/**)
* Converts a list of contents into a list of boxes , which is the next Patoline layer .
let boxify buf nbuf env0 l=
let rec boxify keep_cache env = function
| []->env
| B (b, cache) :: s ->
let l =
match !cache with
| Some l when keep_cache -> l
| _ ->
let acc = !env_accessed in
env_accessed := false;
let l = b env in
if keep_cache then
(if not !env_accessed then cache := Some l
else env0.fixable := true);
env_accessed := acc || !env_accessed; l
in
List.iter (append buf nbuf) l;
boxify keep_cache env s
| (C b)::s->(
let acc= !env_accessed in
env_accessed:=false;
let c = b env in
let env'=if !env_accessed then (
env0.fixable:=true;
boxify false env c
) else boxify keep_cache env c
in
env_accessed:=acc || !env_accessed;
boxify keep_cache env' s
)
| Env f::s->boxify keep_cache (f env) s
(* The following (commented) case is a mistake and causes non-transparent behavior, for instance when defining "tT"s in a global variable. *) (*
| T (t,cache)::T (t',_)::s->
boxify keep_cache env (T (t^t',match !cache with Some _->cache | _->cache)::s)
*)
| T (t,cache) :: s -> (
match !cache with
| Some l when keep_cache ->
IntMap.iter (fun _->List.iter (append buf nbuf)) l;
boxify keep_cache env s
| _ ->
let l = ref IntMap.empty in
let t = nfkc t in
let rec cut_str i0 i =
if i >= String.length t then
let sub = String.sub t i0 (i-i0) in
l := mappend !l (gl_of_str env sub)
else if UChar.is_space (UTF8.look t i) then
let sub = String.sub t i0 (i-i0) in
l := mappend !l (gl_of_str env (nfkc sub));
if i <> i0 || i = 0 then
l:=mappend !l [makeGlue env (UChar.code (UTF8.look t i))];
cut_str (UTF8.next t i) (UTF8.next t i)
else
cut_str i0 (UTF8.next t i)
in cut_str 0 0;
if keep_cache then cache := Some !l;
IntMap.iter (fun _->List.iter (append buf nbuf)) !l;
boxify keep_cache env s)
| Scoped (fenv, p)::s->
let env'=fenv env in
let _=boxify keep_cache env' p in
boxify keep_cache env s
| N _ :: _->
failwith "boxify: wrong argument (N)";
in
boxify true env0 l
(** Typesets boxes on a single line, then converts them to a list of basic
drawing elements: [RawContent.raw]. *)
let draw_boxes env l=
let rec draw_boxes x y dr l=match l with
[]->dr,x
| Kerning kbox::s ->(
let dr',x'=draw_boxes (x+.kbox.kern_x0) (y+.kbox.kern_y0) dr [kbox.kern_contents] in
draw_boxes (x'+.kbox.advance_width) y dr' s
)
| Hyphen h::s->(
let dr1,w1=Array.fold_left (fun (dr',x') box->
draw_boxes x' y dr' [box]
) (dr,x) h.hyphen_normal
in
draw_boxes w1 y dr1 s
)
| GlyphBox a::s->(
let box=RawContent.Glyph { a with glyph_x=a.glyph_x+.x;glyph_y=a.glyph_y+.y } in
let w=a.glyph_size*.Fonts.glyphWidth a.glyph/.1000. in
draw_boxes (x+.w) y (box::dr) s
)
| Glue g::s
| Drawing g ::s->(
let w=g.drawing_nominal_width in
let box=(List.map (RawContent.translate (x) (y)) (g.drawing_contents w)) in
draw_boxes (x+.w) y (box@dr) s
)
| Marker (BeginLink l)::s->(
Printf.fprintf stderr " * * * * BeginURILink % S****\n " l ;
let k = match l with
Box.Extern l -> RawContent.Extern l;
| Box.Intern l ->
let dest_page=
try
let line=MarkerMap.find (Label l) env.user_positions in
layout_page line
with
Not_found->(-1)
in
RawContent.Intern(l,dest_page,0.,0.);
| Box.Button(t,n) -> RawContent.Button(t,n)
in
let link={ link_x0=x;link_y0=y;link_x1=x;link_y1=y;link_kind=k;
link_order=0;
link_closed=false;
link_contents=[] }
in
draw_boxes x y (Link link::dr) s
)
| Marker EndLink::s->(
(* Printf.fprintf stderr "****EndLink****\n"; *)
let rec link_contents u l =
match l with
| [] -> assert false
| (Link h)::_ when not h.link_closed ->
let u = List.rev u in
h.link_contents<-u;
let (_,y0,_,y1)=bounding_box u in
h.link_y0<-y0;
h.link_y1<-y1;
h.link_closed<-true;
h.link_x1<-x;
l
| h::s->link_contents (h::u) s
in
let dr'=link_contents [] dr in
List.iter ( print_raw ) dr ' ;
(* Printf.fprintf stderr "***************\n";flush stderr; *)
draw_boxes x y dr' s
)
| b::s->
let _,w,_=box_interval b in
draw_boxes (x+.w) y dr s
in
let dr,_ = draw_boxes 0. 0. [] l in dr
let rec bezier_of_boxes=function
[]->[]
| Glyph g::s->
let out=Fonts.outlines g.glyph in
(List.map (fun (x,y)->Array.map (fun xx->g.glyph_x+.xx *. g.glyph_size/.1000.) x,
Array.map (fun xx->g.glyph_y+.xx *. g.glyph_size/.1000.) y)
(List.concat out)) @ (bezier_of_boxes s)
| Path (param,p)::s->
let l = List.concat (List.map Array.to_list p) in
if param.strokingColor <> None then (
let lw = param.lineWidth /. 2.0 in
let l1 = List.map (fun (xa, ya) -> Array.map (fun x -> x +. lw) xa, ya) l in
let l2 = List.map (fun (xa, ya) -> Array.map (fun x -> x -. lw) xa, ya) l in
let l3 = List.map (fun (xa, ya) -> xa, Array.map (fun x -> x +. lw) ya) l in
let l4 = List.map (fun (xa, ya) -> xa, Array.map (fun x -> x -. lw) ya) l in
l1@l2@l3@l4@(bezier_of_boxes s))
else
l@(bezier_of_boxes s)
| Dynamic(d)::s ->
(bezier_of_boxes (d.dyn_contents ()))@(bezier_of_boxes s)
| Link(l)::s ->
(bezier_of_boxes l.link_contents)@(bezier_of_boxes s)
| _::s->
TODO more cases ? , Affine and States ?
let adjust_width env buf nbuf =
FIXME :
let alpha = env.adjust_optical_alpha in
let beta = env.adjust_optical_beta in
let char_space = env.normalLead *. env.adjust_min_space in
let epsilon = env.adjust_epsilon in
let dir = (-.cos(alpha), sin(alpha)), (-.cos(alpha), -.sin(alpha)) in
let dir' = (cos(alpha), -.sin(alpha)), (cos(alpha), sin(alpha)) in
let profile_left = ref [] in
let buf = !buf in
let i0 = ref 0 in
while !i0 < !nbuf do
match buf.(!i0) with
| Glue x ->
profile_left := Distance.translate_profile !profile_left (-.x.drawing_nominal_width);
incr i0;
| Drawing _ | GlyphBox _ | Hyphen _ as x0-> (
let adjust = ref (match x0 with
Drawing x -> if x.drawing_width_fixed then None else Some(x0,!i0)
| _ -> None)
in
let min = ref 0.0 in
let nominal = ref 0.0 in
let max = ref 0.0 in
let left = draw_boxes env [x0] in
let bezier_left = bezier_of_boxes left in
let profile_left' = Distance.bezier_profile dir epsilon bezier_left in
let (x0_l,_,x1_l,_) = bounding_box_kerning left in
if !Distance.debug then
Printf.fprintf stderr "Drawing(1): i0 = %d (%d,%d)\n" !i0 (List.length !profile_left) (List.length profile_left');
profile_left := Distance.translate_profile (Distance.profile_union dir !profile_left profile_left') (x0_l -. x1_l);
incr i0;
try while !i0 < !nbuf do
match buf.(!i0) with
| Marker AlignmentMark -> incr i0; raise Exit
| Marker _ -> incr i0
| Drawing x as b when x.drawing_nominal_width = 0.0 ->
if !Distance.debug then Printf.fprintf stderr "0 Drawing(2)\n";
if !adjust = None && not x.drawing_width_fixed then adjust := Some(b,!i0);
incr i0
| Glue x as b ->
min := !min +. x.drawing_min_width;
max := !max +. x.drawing_max_width;
nominal := !nominal +. x.drawing_nominal_width;
profile_left := Distance.translate_profile !profile_left (-.x.drawing_nominal_width);
if !adjust = None && not x.drawing_width_fixed then adjust := Some(b,!i0);
incr i0
| Drawing _ | GlyphBox _ | Hyphen _ as y0 -> (
let before =
match y0 with
Drawing y when !adjust = None && y.drawing_adjust_before ->
adjust := Some(y0, !i0);
true
| _ -> false
in
match !adjust with
| None -> raise Exit
| Some (b,i) ->
let right = draw_boxes env [y0] in
let profile_left = !profile_left in
let bezier_right = bezier_of_boxes right in
let profile_right = Distance.bezier_profile dir' epsilon bezier_right in
if !Distance.debug then
Printf.fprintf stderr "Drawing(2): i0 = %d (%d,%d)\n" !i0 (List.length profile_left) (List.length profile_right);
if profile_left = [] || profile_right = [] then raise Exit;
if !Distance.debug then
Printf.fprintf stderr "Drawing(2b): i0 = %d\n" !i0;
let d space =
let pr = List.map (fun (x,y) -> (x+.space,y)) profile_right in
let r = Distance.distance beta dir profile_left pr in
r
in
let (x0_r,_,x1_r,_) = bounding_box_kerning right in
let (x0_r',_,_,_) = bounding_box_full right in
let nominal' = !nominal +. char_space in
let min' = Pervasives.min (Pervasives.max (x0_r -. x1_r) (x0_l -. x1_l)) (!min -. nominal') in
let max' = Pervasives.max (2. *. char_space) (!max -. nominal') in
let da = d min' in
let db = d max' in
let target = nominal' in
if !Distance.debug then
Printf.fprintf stderr "start Adjust: min = %f => %f, max = %f => %f, target = %f\n" min' da max' db nominal';
let epsilon = epsilon /. 16. in
let r =
if da > target then min' else
if db < target then max' else (
let rec fn sa da sb db =
let sc = (sa +. sb) /. 2.0 in
let dc = d sc in
if abs_float (dc -. target) < epsilon || (sb -. sa) < epsilon then sc
else if dc < target then fn sc dc sb db
else fn sa da sc dc
in
fn min' da max' db)
in
let r = r - . x0_r ' + . x0_r - . + . ' in
if !Distance.debug then Printf.fprintf stderr "end Adjust: r = %f nominal = %f" r !nominal;
buf.(i) <-
(match b with
| Drawing x when before -> Drawing { x with
drawing_contents =
(fun w -> List.map (RawContent.translate (r +. x0_r' -. x0_r) 0.0) (x.drawing_contents w))
}
| Drawing x -> Drawing { x with
drawing_nominal_width = r +. x.drawing_nominal_width;
drawing_min_width = r +. x.drawing_min_width;
drawing_max_width = r +. x.drawing_max_width;
}
| Glue x -> Glue { x with
drawing_nominal_width = r +. x.drawing_nominal_width;
drawing_min_width = r +. x.drawing_min_width;
drawing_max_width = r +. x.drawing_max_width;
}
| _ -> assert false);
raise Exit)
| _ ->
incr i0;
raise Exit
done with Exit -> ())
| _ -> incr i0
done
(** The same as boxify, but discards the final environment. *)
let boxify_scoped env x=
let buf=ref [||] in
let nbuf=ref 0 in
let _=boxify buf nbuf env x in
adjust_width env buf nbuf;
Array.to_list (Array.sub !buf 0 !nbuf)
(** Composes [boxify] and [draw_boxes] *)
let draw env x=
let buf=ref [||] in
let nbuf=ref 0 in
let env'=boxify buf nbuf env x in
adjust_width env buf nbuf;
draw_boxes env' (Array.to_list (Array.sub !buf 0 !nbuf))
let states st x=
[uB (fun env->
let d=draw env x in
let (_,off,_,_)=bounding_box_kerning d in
[Drawing
(drawing ~offset:off
[States { states_contents=d;
states_states=st;
states_order=0 }]
)]
)]
let altStates l =
[uB (fun env->
let ds = List.map (fun (st,x) -> (st, draw env x)) l in
(* FIXME : each state should have its own offset !!!*)
let off = List.fold_left (fun acc (_,d) ->
let (_,off,_,_) = bounding_box_kerning d in
min acc off) 0.0 ds
in
[Drawing
(drawing ~offset:off
(List.map (fun (st, d) ->
States { states_contents=d;
states_states=st;
states_order=0 }) ds
))]
)]
(** "flattens" a document tree to an array of paragraphs, a paragraph
being an array of boxes. *)
let flatten ?(initial_path=[]) env0 str=
let paragraphs=ref [] in
let trees=ref [] in
let figures=ref IntMap.empty in
let figure_trees=ref IntMap.empty in
let fig_param=ref IntMap.empty in
let param=ref [] in
let new_page_list=ref [] in
let new_line_list=ref [] in
let compl=ref [] in
let bads=ref [] in
let states=ref [] in
let n=ref 0 in
let buf=ref [||] in
let nbuf=ref 0 in
let frees=ref 0 in
let add_paragraph env tree path p=
let cont = bB (fun env->(p.par_env env).par_indent) :: p.par_contents in
nbuf:= !frees;
let env=boxify buf nbuf env cont in
adjust_width env buf nbuf;
paragraphs:=(Array.sub !buf 0 !nbuf)::(!paragraphs);
trees:=(tree,path)::(!trees);
compl:=(p.par_completeLine env)::(!compl);
param:=(p.par_parameters env)::(!param);
new_page_list:=(env.new_page)::(!new_page_list);
new_line_list:=(env.new_line env)::(!new_line_list);
bads:=(p.par_badness env)::(!bads);
states:=(p.par_states)::(!states);
incr n;
frees:=0;
env
in
let rec flatten flushes env0 path tree=
match tree with
| Paragraph p -> (
let env1 = p.par_env env0 in
let add_node env cur =
add_paragraph env tree path
{ p with par_paragraph = List.length !paragraphs;
par_contents=List.rev cur }
in
let rec collect_nodes env1 l cur =
match l with
| []-> (env1, cur)
| C(f)::s-> collect_nodes env1 (f env1@s) cur
| Scoped(f,s')::s->
let env2 = f env1 in
let (_, res) = collect_nodes env2 s' [] in
collect_nodes env1 s (Scoped((fun _ -> env2),List.rev res)::cur)
| Env f::s ->
let env1 = f env1 in
collect_nodes env1 s (Env (fun _ -> env1)::cur)
| N n::s->
let env1 = add_node env1 cur in
let env1 = flatten flushes env1 path n in
collect_nodes env1 s []
| (T _ | B _ as h)::s-> collect_nodes env1 s (h::cur)
in
let (env1, cur) = collect_nodes env1 p.par_contents [] in
let env1 = add_node env1 cur in
p.par_post_env env0 env1
)
| FigureDef f -> (
let env1=f.fig_env env0 in
let n=IntMap.cardinal !figures in
fig_param:=IntMap.add n (f.fig_parameters env1) !fig_param;
figures:=IntMap.add n (f.fig_contents env1) !figures;
figure_trees:=IntMap.add n (tree,path) !figure_trees;
append buf frees (BeginFigure n);
f.fig_post_env env0 env1
)
| Node s-> (
let env1 = s.node_env env0 in
let env1=
let level=
try
List.length (snd (StrMap.find "_structure" env1.counters))
with Not_found->0
in
{ env1 with counters=StrMap.map (fun (lvl,l)->if lvl>level then lvl,[] else lvl,l)
env1.counters }
in
s.node_paragraph <- List.length !paragraphs;
s.boxified_displayname <- draw_boxes env1 (boxify_scoped env1 s.displayname);
let flushes'=ref [] in
let flat_children k a (is_first, env1)=match a with
Paragraph p->(
let env2=flatten flushes' env1 ((k,tree)::path)
(Paragraph { p with par_contents=
(if is_first then (
Set up a marker to be able to obtain section page .
It is added to the MarkerMap in Break .
It is added to the MarkerMap in Break. *)
let name=String.concat "_" ("_"::List.map string_of_int ((List.map fst path)@initial_path)) in
[Env (fun env->
let w=try let (_,_,w)=StrMap.find name (names env) in w with
Not_found -> uselessLine in
{ env with names=StrMap.add name (env.counters, "_", w)
(names env) });
bB (fun _->[Marker (Label name)])
]
) else [])@ p.par_contents
}
) in
false, env2
)
| FigureDef _ as h->(
let env2=flatten flushes' env1 ((k,tree)::path) h in
let num=try
match StrMap.find "_figure" env2.counters with
_,h::_->h
| _->0
with
Not_found ->0
in
flushes':=FlushFigure num::(!flushes');
is_first,env2
)
| Node _ as tr->(
(is_first, flatten flushes' env1 ((k,tree)::path) tr)
)
in
let _,env2=IntMap.fold flat_children s.children (true,env1) in
paragraphs:=(match !paragraphs with
[]->[]
| h::s->Array.append h (Array.of_list !flushes')::s);
s.node_post_env env0 env2
)
in
let env1=flatten (ref []) env0 [] str in
let params=Array.init
(IntMap.cardinal !figures)
(fun i->IntMap.find i !fig_param)
in
(env1, params,
Array.of_list (match List.rev !param with []->[parameters env1] | l->l),
Array.of_list (match List.rev !new_page_list with []->[env1.new_page] | l->l),
Array.of_list (match List.rev !new_line_list with []->[env1.new_line env1] | l->l),
Array.of_list (List.rev !compl),
Array.of_list (List.rev !bads),
Array.of_list (List.rev !paragraphs),
Array.of_list (List.rev !trees),
Array.of_list (List.map snd (IntMap.bindings !figures)),
Array.of_list (List.map snd (IntMap.bindings !figure_trees)),
Array.of_list (List.rev !states))
let rec make_struct positions = function
| Node s ->
let rec make = function
| [] -> []
| (_,Node u)::s when List.mem_assoc "intoc" u.node_tags ->
(make_struct positions (Node u))::(make s)
| _ :: s->make s
in
let a = Array.of_list (make (IntMap.bindings s.children)) in
let (p,x,y) =
let lenpos = Array.length positions in
if s.node_paragraph >= 0 && s.node_paragraph < lenpos then
positions.(s.node_paragraph)
else (0,0.,0.)
in
{ Driver.name = s.name
; Driver.metadata = []
; Driver.raw_name = s.boxified_displayname
; Driver.tags = s.node_tags
; Driver.page = p
; Driver.struct_x = x
; Driver.struct_y = y
; Driver.children = a }
| _ -> Driver.empty_structure
(** Adds a tag to the given structure. *)
let tag str tags=
match str with
Node n->Node { n with node_tags=_tags }
| _->Node { empty with node_tags=tags; children=IntMap.singleton 0 str }
(** Label updating after optimization. *)
let update_names env figs user=
let user=MarkerMap.fold (MarkerMap.add) user env.user_positions in
( fil user<>fil env.user_positions ) in ;
let env'={ env with user_positions=user;
names=
StrMap.fold (fun k (a,b,c) m->
try
let pos=
if b="_figure" then
(match StrMap.find "_figure" a with
_,[]->(Printf.fprintf stderr "figure not found (1):%S\n" k;
raise Not_found)
| _,(h::_)->(
match IntMap.find h figs with
Break.Placed l->l
| _->raise Not_found
)
)
else
MarkerMap.find (Label k) user
in
if not (lines_eq pos c) && b<>"_" then (
(* Printf.fprintf stderr "reboot : position of %S (%S) changed\n" k b; *)
(* print_line pos; *)
(* print_line c; *)
);
needs_reboot:= !needs_reboot || (not (lines_eq pos c));
StrMap.add k (a,b,pos) m
with Not_found -> ((* Printf.fprintf stderr "reboot : position of %S (%S) not found\n" k b; *)
needs_reboot:=true; m)
) (names env) (names env)
}
in
flush stderr;
env',!needs_reboot
(** Resets all the counters, preserving their levels. *)
let reset_counters env=
{ env with
counters=StrMap.map (fun (l,_)->(l,[])) env.counters }
| null | https://raw.githubusercontent.com/patoline/patoline/3dcd41fdff64895d795d4a78baa27d572b161081/typography/Document.ml | ocaml | doit etre de taille 8
* Environments. These are typically folded on document trees, and
control many different things about the fonts, counters, or
labels.
* {2 Document content}
* Main type used to hold document contents.
* A contents list depending on the environment. This may be used to
typeset the state of a counter for example.
* Simple text.
* Environment modification function. It can be used to register a name
or modify the state of a counter for instance.
* A scoped environment transformation applied on a (small) list of
contents.
* A document tree.
* Internal node of the document tree (e.g. section, chapter...).
Extensible array of childrens :
Environment modification function applied when entering the node :
Environment modification function applied when leaving the node :
Page states in which the contents is visible.
* Type of a document tree.
* Empty node (with no child tree).
* Build a node with a single child tree.
* Build a zipper from a tree. The resulting zipper points to the root
of the tree.
* Function that takes a tree zipper [(t,cxt)] pointing to some node
[t] and returns a zipper pointing to the father node of [t]. If this
function is called on a zipper that points to the root of the tree, a
new empty node is created to have [t] as its only child.
* Move the zipper to the root of the tree
* Retrieve the complete tree from a zipper
* Move the zipper to point to the child of the pointed node with the higher
index. If the pointed tree is not a node the zipper is left unchanged.
* Take a zipper [zip] and a tree [c] and adds [c] as the last child of the
pointed node. If the pointed subtree is not a node, a new node is
created to hold [t] and [c]. The returned zipper points to [c].
* Take a zipper pointing to a node and move it down its i-th child. If the
zipper does not point to a node, [Invalid_argument] is raised. If the i-th
child does not exists, it is created as a new empty node.
* Take a tree zipper and an path represented as a list of integers and move
the zipper down the path (i.e. calling child on the successive indices.
* Module type of a document format.
* Module type to be used as a document wrapper. The document structure is
stored in its zipper form in a reference. Functions are provided below
to edit the document tree.
| C f::s->(
fill_buf (f defaultEnv);
fill_buf s
)
* Creates a new page without any margin
*/*
* Finds the last node satisfying a given predicate in a document tree.
* Is the node a paragraph ?
* Is the node an internal node ?
* {3 Environment transformations}
let do_begin_Italic () = ()
let do_end_Italic () = ()
let defaultEnv = envItalic true defaultEnv
end
For full paragraph
for inline text
**************************************************************
Page pas assez remplie
Cesures
Badness de couper ici
Différence de compression entre deux lignes consécutives
**************************************************************
Add a new paragraph (with given parameters) below the current node.
* Adds a new node, just below the last one.
* {3 References, labels and links}
* {3 Images}
*/*
*/*
* Makes a glue from the unicode character code given in the argument.
* Converts a [string] to a list of glyphs, according to the environment.
*/*
*/*
The following (commented) case is a mistake and causes non-transparent behavior, for instance when defining "tT"s in a global variable.
| T (t,cache)::T (t',_)::s->
boxify keep_cache env (T (t^t',match !cache with Some _->cache | _->cache)::s)
* Typesets boxes on a single line, then converts them to a list of basic
drawing elements: [RawContent.raw].
Printf.fprintf stderr "****EndLink****\n";
Printf.fprintf stderr "***************\n";flush stderr;
* The same as boxify, but discards the final environment.
* Composes [boxify] and [draw_boxes]
FIXME : each state should have its own offset !!!
* "flattens" a document tree to an array of paragraphs, a paragraph
being an array of boxes.
* Adds a tag to the given structure.
* Label updating after optimization.
Printf.fprintf stderr "reboot : position of %S (%S) changed\n" k b;
print_line pos;
print_line c;
Printf.fprintf stderr "reboot : position of %S (%S) not found\n" k b;
* Resets all the counters, preserving their levels. |
Copyright Florian Hatat , , ,
Pierre - Etienne Meunier , , 2012 .
This file is part of Patoline .
Patoline is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
Patoline is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with Patoline . If not , see < / > .
Copyright Florian Hatat, Tom Hirschowitz, Pierre Hyvernat,
Pierre-Etienne Meunier, Christophe Raffalli, Guillaume Theyssier 2012.
This file is part of Patoline.
Patoline is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Patoline is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Patoline. If not, see </>.
*)
* High - level representation of documents
The module defines the { ! type : tree } type , which describes whole
documents . This tree is typically produced by running the OCaml
executable obtained after parsing a [ .txp ] file , but can be produced by
other mean . It is the main input to Patoline Typography library in
order to produce the final document .
Values of type { ! type : tree } are meant to be transformed by some format 's
output routine .
We also provide a tree zipper interface to ease construction of a
{ ! type : tree } when reading linearly an input file .
The module defines the {!type:tree} type, which describes whole
documents. This tree is typically produced by running the OCaml
executable obtained after parsing a [.txp] file, but can be produced by
anyy other mean. It is the main input to Patoline Typography library in
order to produce the final document.
Values of type {!type:tree} are meant to be transformed by some format's
output routine.
We also provide a tree zipper interface to ease construction of a
{!type:tree} when reading linearly an input file.
*)
open Patoraw
open Unicodelib
open Patutil
open Patfonts
open Extra
open Fonts
open FTypes
open RawContent
open Driver
open Box
* { 2 Font , substitutions , positioning }
type fontAlternative = Regular | Bold | Caps | Demi
let simpleFamilyMember:(unit->font)->(font*(glyph_id list -> glyph_id list)*(glyph_ids list -> glyph_ids list)) Lazy.t =
fun a->Lazy.from_fun (fun ()->(a (),(fun x->x),(fun x->x)))
let make_ligature l gl x=
let rec match_lig l x=match (l,x) with
[],[]->Some []
| _::_,[]->None
| [],_->Some x
| h1::s1, h2::s2 when h1=h2.glyph_index-> match_lig s1 s2
| _::_,_::_->None
in
let rec make_ligature x=match x with
[]->[]
| h::s->(
match match_lig l x with
None->h::make_ligature s
| Some g->gl::make_ligature g
)
in
make_ligature x
Italic is second
type fontFamily =
fontAlternative *
((font*(glyph_id list -> glyph_id list)*(glyph_ids list -> glyph_ids list)) Lazy.t *
(font*(glyph_id list -> glyph_id list)*(glyph_ids list -> glyph_ids list)) Lazy.t)
module TS = Break.Make(
struct
type t = line
let compare a b =
if a.paragraph < b.paragraph then -1 else
if a.paragraph > b.paragraph then 1 else
if a.lineStart < b.lineStart then -1 else
if a.lineStart > b.lineStart then 1 else
if a.lineEnd < b.lineEnd then -1 else
if a.lineEnd > b.lineEnd then 1 else
if a.hyphenStart < b.hyphenStart then -1 else
if a.hyphenStart > b.hyphenStart then 1 else
if a.hyphenEnd < b.hyphenEnd then -1 else
if a.hyphenEnd > b.hyphenEnd then 1 else
if a.lastFigure < b.lastFigure then -1 else
if a.lastFigure > b.lastFigure then 1 else
if a.isFigure < b.isFigure then -1 else
if a.isFigure > b.isFigure then 1 else
if a.height < b.height then -1 else
if a.height > b.height then 1 else 0
let hash a=Hashtbl.hash a
end)
* { 2 Mathematical formulae }
module Mathematical=struct
type env={
mathsFont:Fonts.font Lazy.t;
mathsSize:float;
mathsSubst:glyph_id list -> glyph_id list;
numerator_spacing:float;
denominator_spacing:float;
sub1:float;
sub2:float;
sup1:float;
sup2:float;
sup3:float;
sub_drop:float;
sup_drop:float;
default_rule_thickness:float;
subscript_distance:float;
superscript_distance:float;
limit_subscript_distance:float;
limit_superscript_distance:float;
invisible_binary_factor:float;
open_dist:float;
close_dist:float;
left_op_dist:float;
right_op_dist:float;
sqrt_dist:float;
kerning:bool;
None means precise , Some x mean unprecise , but subdivise
curve until the thickness of the polygon is less than x
Bezier curve until the thickness of the polygon is less than x *)
priorities:float array;
priority_unit:float;
delimiter_up_tolerance:float;
delimiter_down_tolerance:float;
op_tolerance:float;
op_limits_tolerance:float;
punctuation_factor:float;
optical_alpha:float;
optical_beta:float;
precise_kerning:float;
}
and style=
Display
| Display'
| Text
| Text'
| Script
| Script'
| ScriptScript
| ScriptScript'
end
* { 2 Environments }
type environment={
fontFamily:fontFamily list;
fontMonoFamily:fontFamily list;
size adjustment of the two previous family
fontItalic:bool;
fontAlternative:fontAlternative;
fontFeatures:string list;
fontColor:Color.color;
font:font;
mathsEnvironment:Mathematical.environment;
mathStyle:Mathematical.style;
size:float;
lead:float;
footnote_y:float;
normalMeasure:float;
normalLead:float;
normalLeftMargin:float;
normalPageFormat:float*float;
par_indent:box list;
hyphenate:string->(string*string) array;
substitutions:glyph_id list -> glyph_id list;
positioning:glyph_ids list -> glyph_ids list;
* , état .
last_changed_counter:string;
* Niveaux de tous les compteurs à cet endroit , type , position
fixable:bool ref;
new_page:Box.frame_zipper->Box.frame_zipper;
new_line:environment->line->parameters->
line->parameters->Box.frame_zipper->float->float;
user_positions:line MarkerMap.t;
show_boxes:bool;
show_frames:bool;
adjust_optical_alpha:float;
adjust_optical_beta:float;
adjust_epsilon:float;
adjust_min_space:float;
pas dans l'environement math , car en dehors du TextStyle
stdGlue:float*float*float;
}
let env_accessed=ref false
let names env=
env_accessed:=true;
env.names
let user_positions env=
env_accessed:=true;
env.user_positions
let displayname n=
env_accessed:=true;
n.raw_name
type content =
| B of (environment -> box list) * box list option ref
* List of boxes depending on an environment . The second parameters is a
cache used when compilation is iterated to resolve names .
cache used when compilation is iterated to resolve names. *)
| C of (environment -> content list)
| T of string * (box list IntMap.t option) ref
| Env of (environment -> environment)
| Scoped of (environment -> environment) * (content list)
| N of tree
* First type of leaves in a document : paragraphs .
and paragraph =
{ par_contents : content list
; par_env : environment -> environment
; par_post_env : environment -> environment -> environment
; par_parameters : environment -> Box.box array array -> Box.drawingBox array
-> parameters -> Break.figurePosition IntMap.t
-> line MarkerMap.t -> line -> line -> parameters
; par_badness : environment -> Box.box array array -> Box.drawingBox array
-> Break.figurePosition IntMap.t -> Box.line
-> Box.box array -> int -> Box.parameters -> float
-> Box.line -> Box.box array -> int
-> Box.parameters -> float -> float
; par_completeLine : environment -> Box.box array array
-> Box.drawingBox array
-> Break.figurePosition IntMap.t -> line MarkerMap.t
-> line -> bool -> line list
; par_states : int list
; par_paragraph : int
}
* Second type of leaves in a document : figures .
and figuredef =
{ fig_contents : environment -> Box.drawingBox
; fig_env : environment -> environment
; fig_post_env : environment -> environment -> environment
; fig_parameters : environment -> Box.box array array -> Box.drawingBox array
-> parameters -> Break.figurePosition IntMap.t
-> line MarkerMap.t -> line -> line -> parameters
}
and node =
{ name : string
; displayname : content list
; mutable boxified_displayname : raw list
; children : tree IntMap.t
; node_tags : (string * string) list
; node_env : environment -> environment
; node_post_env : environment -> environment -> environment
; node_states : int list
; mutable node_paragraph : int }
and tree =
| Paragraph of paragraph
| FigureDef of figuredef
| Node of node
let empty : node =
{ name = ""
; node_tags = []
; displayname = []
; boxified_displayname = []
; children = IntMap.empty
; node_env = (fun x->x)
; node_post_env =
(fun x y -> { x with counters = y.counters ; names = names y
; user_positions = user_positions y })
; node_states = []
; node_paragraph = 0 }
let singleton : tree -> node = fun t ->
{ empty with children = IntMap.singleton 0 t }
* The main datatype is a zipper over a document tree . It consists in a
couple whose first component is a tree . The second component represents
the context identifying a position in the tree .
The tree represented by the zipper [ ( t , [ ( p1,t1 ) , ... , ( pn , tn ) ] ) ] is
built by :
+ appending the tree [ t ] at position [ p1 ] in [ t1 ] ,
+ appending the resulting tree at position [ p2 ] in [ t2 ] ,
+ ...
+ appending the resulting tree at position [ pn ] in [ tn ] .
couple whose first component is a tree. The second component represents
the context identifying a position in the tree.
The tree represented by the zipper [(t, [(p1,t1), ... , (pn,tn)])] is
built by:
+ appending the tree [t] at position [p1] in [t1],
+ appending the resulting tree at position [p2] in [t2],
+ ...
+ appending the resulting tree at position [pn] in [tn]. *)
module TreeData = struct
type nonrec node = node
type nonrec tree = tree
let tree_of_node node = Node(node)
let node_of_tree = function
| Node(node) -> node
| _ -> invalid_arg "Document.TreeData.node_of_tree"
let get_child node i = IntMap.find i node.children
let set_child node i tree =
{node with children = IntMap.add i tree node.children}
let remove_child node i =
{node with children = IntMap.remove i node.children}
let has_child node i =
IntMap.mem i node.children
let min_index node =
fst (IntMap.min_binding node.children)
let max_index node =
fst (IntMap.max_binding node.children)
end
module DocZipper = Zipper.Make(TreeData)
type tree_zipper = DocZipper.zipper
let zipper_of_tree = DocZipper.zipper_to_tree
* Build a zipper whose single node is { ! : empty } .
let empty_zipper = DocZipper.empty empty
let up = DocZipper.up
[@@ocaml.deprecated "Use DocZipper.up instead"]
* Function that applies { ! : up } n times on a zipper , effectively moving the
zipper to the n - th ancestor of the currently pointed node .
zipper to the n-th ancestor of the currently pointed node. *)
let up_n = DocZipper.up_n
let top = DocZipper.top
let tree_of_zipper = DocZipper.zipper_to_tree
let lastChild zip =
try DocZipper.down_last zip
with Invalid_argument(_) -> zip
let rec newChildAfter : tree_zipper -> tree -> tree_zipper =
let next_key t = try fst (IntMap.max_binding t) + 1 with Not_found -> 0 in
fun (t,cxt) c ->
match (t, cxt) with
| (Node x, _ ) -> (c, (next_key x.children,x)::cxt)
| (_ , []) -> (c, [(1, singleton t)])
| _ -> newChildAfter (up (t,cxt)) c
* Same as { ! : newChildAfter } but adds the tree as the first child .
let rec newChildBefore : tree_zipper -> tree -> tree_zipper =
let prev_key t = try fst (IntMap.min_binding t) - 1 with Not_found -> 0 in
fun (t,cxt) c ->
match (t, cxt) with
| (Node x, _) -> (c, (prev_key x.children,x)::cxt)
| (_ , []) -> (c, [(1, singleton t)])
| _ -> newChildBefore (up (t,cxt)) c
let child : tree_zipper -> int -> tree_zipper =
fun (t,cxt) i ->
match t with
| Node n -> let t =
try IntMap.find i n.children with Not_found -> Node empty
in (t, (i,n)::cxt)
| _ -> raise (Invalid_argument "Typography.child")
let rec follow : tree_zipper -> int list -> tree_zipper =
fun z -> function
| [] -> z
| n :: ns -> follow (child z n) ns
module type Format =
sig
val defaultEnv : environment
val postprocess_tree : tree -> tree
val title : (tree * (IntMap.key * tree) list) ref -> ?label:'a
-> ?extra_tags:(string * string) list -> content list -> bool
val parameters : environment -> box array array -> Box.drawingBox array
-> parameters -> Break.figurePosition IntMap.t -> line MarkerMap.t
-> line -> parameters
end
module type DocumentStructure =
sig
val structure : tree_zipper ref
end
let doc_tags n=match n with
Node n->n.node_tags
| _->[]
let init_env_hook = ref ([] : (environment -> environment) list)
let add_env_hook f = init_env_hook := f::!init_env_hook
let bB f = B(f,ref None)
let uB f = C(fun _->env_accessed:=true;[bB f])
let tT f = T(f,ref None)
let uT f = C(fun _->env_accessed:=true;[tT f])
let string_of_contents l =
let buf=Buffer.create 1000 in
let rec fill_buf t=match t with
T (str,_)::s->(
if Buffer.length buf>0 then (
Buffer.add_string buf " ";
);
Buffer.add_string buf str;
fill_buf s
)
| _::s -> fill_buf s
| []->()
in
fill_buf l;
Buffer.contents buf
let raw : (environment -> RawContent.raw list) -> content = fun f ->
let contents _ =
let dr env =
let raw = f env in
let (x0,y0,x1,y1) = RawContent.bounding_box raw in
let w = x1 -. x0 in
let open Box in
{ drawing_min_width = w
; drawing_nominal_width = w
; drawing_max_width = w
; drawing_width_fixed = true
; drawing_adjust_before = false
; drawing_y0 = y0
; drawing_y1 = y1
; drawing_badness = (fun _ -> 0.0)
; drawing_break_badness = infinity
; drawing_states = []
; drawing_contents = (fun _ -> raw) }
in
[bB (fun env -> [Drawing (dr env)])]
in
C contents
let _names env=
env.names
let _user_positions env=
env.user_positions
let incr_counter ?(level= -1) name env=
{ env with
last_changed_counter=name;
counters=
StrMap.add name (try let a,b=StrMap.find name env.counters in
match b with
h::s -> (a,(h+1)::s)
| _->a,[0]
with
Not_found -> level, [0]
) env.counters }
let pop_counter name env=
{ env with
last_changed_counter=name;
counters=
StrMap.add name (let a,b=StrMap.find name env.counters in (a, List.drop 1 b)) env.counters }
let push_counter name env=
{ env with
last_changed_counter=name;
counters=
StrMap.add name (let a,b=StrMap.find name env.counters in (a,0::b)) env.counters }
let tags=function
Node n->n.node_tags
| _->[]
* Creates a new page , using 1/6th of the given lengths for margins .
A page is implemented as two nested frames : the outer frame has the
actual size of the whole page , while the inner frame size is the
papersize minus margins .
This function returns the inner frame .
A page is implemented as two nested frames: the outer frame has the
actual size of the whole page, while the inner frame size is the
papersize minus margins.
This function returns the inner frame.
*)
let default_new_page pageFormat zip =
let ((page, _) as zip)=Box.make_page pageFormat (frame_top zip) in
let w = page.frame_x1 -. page.frame_x0
and h = page.frame_y1 -. page.frame_y0 in
let x0=(page.frame_x0+.1.*.w/.6.) in
let y0=(page.frame_y0+.1.*.h/.6.) in
let x1=(page.frame_x1-.1.*.w/.6.) in
let y1=(page.frame_y1-.1.*.h/.6.) in
frame x0 y0 x1 y1 zip
let raw_new_page pageFormat zip =
let (page, _) as zip = Box.make_page pageFormat (frame_top zip) in
frame page.frame_x0 page.frame_y0 page.frame_x1 page.frame_y1 zip
let envApp l env =
List.fold_left (fun env f -> f env) env l
let rec map_paragraphs f = function
| Node n -> Node { n with children=IntMap.map (map_paragraphs f) n.children }
| Paragraph p -> Paragraph (f p)
| x -> x
exception Found
let find_last f tr=
let result=ref None in
let rec find_last path tr=match tr with
| _ when f tr->(
result:=Some (List.rev path);
raise Found
)
| Node n->(
let k1,_=IntMap.max_binding n.children in
let k0,_=IntMap.min_binding n.children in
for i=k1 downto k0 do
try
find_last (i::path) (IntMap.find i n.children);
with
Not_found -> ()
done;
)
| _->raise Not_found
in
try
find_last [] tr;
raise Not_found
with
Found->(
match !result with
None->raise Not_found
| Some a->a
)
let is_paragraph x=match x with
Paragraph _->true
| _->false
let is_node x=match x with
Node _->true
| _->false
let rec prev f (t,cxt) =
if f t then (t,cxt) else (
match t with
Node nt->
let bin=List.rev (IntMap.bindings nt.children) in
let rec prevs=function
[]->raise Not_found
| (h,ht)::s->
try
prev f (ht, (h,t)::cxt)
with
Not_found->prevs s
in
prevs bin
| _->raise Not_found
)
let go_up str=
(if snd !str=[] then Printf.fprintf stderr "Warning : go_up\n");
str:=(up !str)
let n_go_up n str =
for _ = 1 to n do go_up str done
let change_env t fenv=match t with
(Node n,l)->(Node { n with node_env=fun x->fenv (n.node_env x) }, l)
| (Paragraph n,l)->(Paragraph { n with par_env=fun x->fenv (n.par_env x) }, l)
| (FigureDef f, l)->
FigureDef {f with fig_env=fun x->fenv (f.fig_env x) }, l
exception Not_found_in_family
let selectFont fam alt it =
try
let r,i = List.assoc alt fam in
Lazy.force (if it then i else r)
with Not_found -> raise Not_found_in_family
let updateFont env font subst pos=
let feat=Fonts.select_features font env.fontFeatures in
{ env with
font=font;
substitutions=(fun glyphs -> Fonts.apply_features font feat (subst glyphs));
positioning=(fun x->pos (positioning font x)) }
let change_font f env = updateFont env f (fun x->x) (fun x->x)
Changer de font dans un scope , ignore la famille , attention , à en direct
let font f t=
[Scoped (change_font f, t)]
Rajouter une liste de features , voir Fonts . FTypes pour savoir ce
qui existe
qui existe *)
let add_features features env=
let feat=Fonts.select_features env.font () in
{ env with
fontFeatures=;
substitutions=(fun glyphs -> Fonts.apply_features env.font feat
(env.substitutions glyphs));
}
let envItalic b env =
let font, subst, pos= selectFont env.fontFamily env.fontAlternative b in
let env = { env with fontItalic = b } in
updateFont env font subst pos
let italic t = [ Scoped(envItalic true, t) ]
module Italic = struct
module Env_Italic = Italic
let notItalic t =
[Scoped (envItalic false, t)]
let toggleItalic t =
[Scoped ((fun env -> envItalic (not env.fontItalic) env), t)]
let envAlternative ?(features:'a option) alt env =
let features = match features with
None -> env.fontFeatures
| Some f -> f
in
let font,subs,pos = selectFont env.fontFamily alt env.fontItalic in
let env = { env with fontAlternative = alt } in
add_features features (updateFont env font subs pos)
let alternative ?(features:'a option) alt t =
[Scoped ((fun env -> envAlternative ?features alt env), t)]
let font_size_ratio font1 font2 =
let x_h f =
let f,_,_ = Lazy.force (fst (List.assoc Regular f)) in
let x=Fonts.loadGlyph f
({empty_glyph with glyph_index=Fonts.glyph_of_char f 'o'}) in
Fonts.glyph_y1 x -. Fonts.glyph_y0 x
in
x_h font1 /. x_h font2
let envFamily fam env =
let font,subs,pos = selectFont fam env.fontAlternative env.fontItalic in
let env = { env with fontFamily = fam; size = font_size_ratio env.fontFamily fam *. env.size } in
updateFont env font subs pos
let family fam t =
[Scoped ((fun env -> envFamily fam env), t)]
let envMonoFamily fam env =
{ env with
fontMonoFamily = fam;
fontMonoRatio=font_size_ratio env.fontFamily fam }
let monoFamily fam t =
[Scoped ((fun env -> envMonoFamily fam env), t)]
let envSize fsize env=
{ env with
size=fsize;
lead=env.lead*.fsize/.env.size }
Changer de taille dans un scope
let size fsize t=
[Scoped (envSize fsize, t)]
let envScale alpha env =
{ env with size = env.size *. alpha }
Changer de taille dans un scope
let scale alpha t=
[Scoped (envScale alpha, t)]
let envScaleLead alpha env=
{ env with
lead=env.lead *. alpha }
let scaleLead alpha t=
[Scoped (envScaleLead alpha, t)]
let envColor color env =
{env with fontColor=color}
let color color t=
[Scoped (envColor color, t)]
let envBold = envAlternative Bold
let bold = alternative Bold
let envSv = envAlternative Caps
let sc = alternative Caps
let verbEnv x =
{ (envFamily x.fontMonoFamily (envScale x.fontMonoRatio x))
let verb p =
[Scoped ((fun x -> envFamily x.fontMonoFamily
(envScale x.fontMonoRatio x)),
let emph=toggleItalic
let id x=x
Partie compliquée :
pour toucher à ça , ou apprendre en touchant ça
pour toucher à ça, ou apprendre en touchant ça *)
let parameters env pars figures _ last_figures _ _ line =
let fn i figPos m =
let open Break in
match figPos with
| Placed(l) when layout_page line = layout_page l
&& line.height >= l.height +. figures.(i).drawing_y0
&& line.height <= l.height +. figures.(i).drawing_y1
-> env.normalMeasure -. figures.(i).drawing_nominal_width
| _ -> m
in
let params =
{ measure = IntMap.fold fn last_figures env.normalMeasure
; left_margin = env.normalLeftMargin
; local_optimization = 0
; min_page_before = 0
; min_page_after = 0
; min_height_before = 0.0
; min_height_after = 0.0
; not_last_line = false
; not_first_line = false
; min_lines_before = 1
; min_lines_after = 0
; absolute = false }
in
let fn params b = match b with Parameters(f) -> f params | _ -> params in
fold_left_line pars fn params line
let set_parameters : (parameters -> parameters) -> content list =
fun f -> [bB (fun _ -> [Parameters(f)])]
let vspaceBefore : float -> content list = fun sp ->
let fn p = {p with min_height_before = max p.min_height_before sp} in
set_parameters fn
let vspaceAfter : float -> content list = fun sp ->
let fn p = {p with min_height_after = max p.min_height_after sp} in
set_parameters fn
let pagesBefore : int -> content list = fun nb ->
let fn p = {p with min_page_before = max p.min_page_before nb} in
set_parameters fn
let pagesAfter : int -> content list = fun nb ->
let fn p = {p with min_page_after = max p.min_page_after nb} in
set_parameters fn
let linesBefore : int -> content list = fun nb ->
let fn p = {p with min_lines_before = max p.min_lines_before nb} in
set_parameters fn
let linesAfter : int -> content list = fun nb ->
let fn p = {p with min_lines_after = max p.min_lines_after nb} in
set_parameters fn
let notFirstLine : content list =
set_parameters (fun p -> {p with not_first_line = true})
let notLastLine : content list =
set_parameters (fun p -> {p with not_last_line = true})
let hspace : float -> content list = fun sp ->
[bB (fun env -> let sp = sp *. env.size in [glue sp sp sp])]
let hfill : content list =
[bB (fun env -> let mes = env.normalMeasure in [glue 0.0 (0.5 *. mes) mes])]
let do_center parameters a b c d e f g line =
let param = parameters a b c d e f g line in
let min_w = line.min_width in
let nom_w = line.nom_width in
if param.measure >= nom_w then
let left_margin = param.left_margin +. (param.measure -. nom_w) /. 2.0 in
{param with measure = nom_w; left_margin}
else if param.measure < min_w then
let left_margin = param.left_margin +. (param.measure -. min_w) /. 2.0 in
{param with measure = min_w; left_margin}
else param
let do_ragged_left parameters a b c d e f g line =
let param = parameters a b c d e f g line in
{param with measure = line.nom_width}
let do_ragged_right parameters a b c d e f g line =
let param = parameters a b c d e f g line in
let left_margin = param.left_margin +. param.measure -. line.nom_width in
{param with measure = line.nom_width; left_margin}
let badness env paragraphs _ _
node_i line_i max_i params_i comp_i
node_j line_j max_j params_j comp_j=
if node_j.paragraph>=Array.length paragraphs then 0. else (
let v_bad=
if layout_page node_i=layout_page node_j then (
Badness.v_badness
(node_j.height-.node_i.height)
line_i max_i params_i comp_i
line_j max_j params_j comp_j
) else (
if node_i.hyphenEnd>=0 then infinity else 0.
)
in
(Badness.h_badness paragraphs params_j.measure node_j comp_j)
+. v_bad
+. (if layout_page node_i<>layout_page node_j &&
node_i.height>=(fst node_i.layout).frame_y0+.env.lead then 10000. else 0.)
+. (if node_j.hyphenEnd >=0 then
(if node_j.hyphenStart >=0 then
1e10
else
1e8)
else
(if node_j.hyphenStart >=0 then
1e8
else
0.)
)
+. (if node_j.lineEnd<Array.length paragraphs.(node_j.paragraph)
&& not node_j.isFigure then
match paragraphs.(node_j.paragraph).(node_j.lineEnd) with
Glue g->g.drawing_break_badness
| _->0.
else 0.0
)
+. (1000.*.(abs_float (comp_i-.comp_j)))
)
* { 3 Figures }
let figure str parameters ?(name="") drawing=
str:=up (newChildAfter !str (
FigureDef
{ fig_contents=drawing;
fig_env=(fun x->
let l,cou=try StrMap.find "_figure" x.counters with
Not_found -> -1, [] in
let l0,cou0=try StrMap.find "figure" x.counters with
Not_found -> -1, [] in
let counters'=
(StrMap.add "_figure"
(l,match cou with h::s->(h+1)::s | _->[0])
(StrMap.add "figure"
(l0,match cou0 with h::s->(h+1)::s | _->[0]) x.counters)
)
in
{ x with
names=if name="" then names x else (
let w=
try let (_,_,w)=StrMap.find name (names x) in w
with Not_found -> uselessLine
in
StrMap.add name (counters', "_figure", w) (names x)
);
counters=counters';
last_changed_counter="_figure"
});
fig_post_env=(fun x y->{ x with names=names y; counters=y.counters; user_positions=user_positions y });
fig_parameters=parameters }))
let flushFigure name=
[C (fun env->
try
env_accessed:=true;
let (counters,_,_)=StrMap.find name (names env) in
match StrMap.find "_figure" counters with
_,h::_->[bB (fun _->[FlushFigure h])]
| _->[]
with
Not_found ->[]
)]
let beginFigure name=
[C (fun env->
try
env_accessed:=true;
let (counters,_,_)=StrMap.find name (names env) in
match StrMap.find "_figure" counters with
_,h::_->[bB (fun _->[BeginFigure h])]
| _->[]
with
Not_found ->[]
)]
let newPar str ?(environment=(fun x->x)) ?(badness=badness) ?(states=[]) complete parameters par=
let para =
{ par_contents = par
; par_env = environment
; par_post_env = (fun env1 env2 ->
{ env1 with names = names env2
; counters = env2.counters
; user_positions = user_positions env2 })
; par_parameters = parameters
; par_badness = badness
; par_completeLine = complete
; par_states = states
; par_paragraph = (-1) }
in up (newChildAfter str (Paragraph para))
let newStruct str ?(in_toc=true) ?label ?(numbered=true) ?(extra_tags=[]) displayname =
let name = match label with
None -> string_of_contents displayname
| Some s -> s
in
let displayname=match displayname with
[]->(match label with Some s->[tT s] | None->[])
| _->displayname
in
let para=Node {
empty with
name=name;
displayname =[C (fun _->env_accessed:=true;displayname)];
node_tags= extra_tags @ (if in_toc then ["intoc",""] else []) @ ["structural",""] @ (if numbered then ["numbered",""] else []);
node_env=(
fun env->
{ env with
last_changed_counter="_structure";
counters=StrMap.add "_structure" (
try
let (a,b)=StrMap.find "_structure" env.counters in
a,0::(match b with []->[0] | _->b)
with
Not_found -> (-1,[0;0])
) env.counters }
);
node_post_env=(
fun env env'->
{ env with
names=names env';
user_positions=user_positions env';
counters=StrMap.add "_structure" (
try
let a,b=StrMap.find "_structure" env'.counters in
match b with
_::h::s when numbered ->a,(h+1)::s
| _::h::s ->a,h::s
| _ -> a, [0]
with
Not_found -> -1,[0]
) env'.counters }
);
}
in newChildAfter str para
let pageref x=
[C (fun env->
try
env_accessed:=true;
let (_,_,node)=StrMap.find x (names env) in
[bB (fun _->[Marker (BeginLink (Intern x))]);
tT (string_of_int (1+layout_page node));
bB (fun _->[Marker EndLink])]
with Not_found -> []
)]
let make_name name=
let realName=UTF8.Buf.create (String.length name) in
let rec fill i sp=
if UTF8.out_of_range name i then
UTF8.Buf.contents realName
else (
if UChar.is_space (UTF8.look name i) then
if sp then fill (i+1) true
else (
UTF8.Buf.add_char realName (UChar.of_char ' ');
fill (UTF8.next name i) true
)
else (
UTF8.Buf.add_char realName (UTF8.look name i);
fill (UTF8.next name i) false
)
)
in
fill 0 true
let label ?labelType name=
let name=make_name name in
[Env (fun env->
let w=try let (_,_,w)=StrMap.find name (names env) in w with Not_found -> uselessLine in
let labelType=match labelType with None->env.last_changed_counter | Some t->t in
{ env with names=StrMap.add name (env.counters, labelType, w) (names env) });
bB (fun _ -> [Marker (Label name)])
]
let pass_number = ref (-1)
let lref ?refType name=
let name=make_name name in
[ C (fun env->
try
env_accessed:=true;
let counters,refType_=
if name="_here" then env.counters,env.last_changed_counter else
let a,t,_=StrMap.find name (names env) in a,t
in
let refType=match refType with Some x->x | None->refType_ in
let lvl,num_=StrMap.find refType counters in
let num=if refType="_structure" then List.drop 1 num_ else num_ in
let str_counter=
try
let _,str_counter=StrMap.find "_structure" counters in
str_counter
with
Not_found->[]
in
let sect_num = List.drop (List.length str_counter - max 0 lvl+1) str_counter in
[bB (fun _->[Marker (BeginLink (Intern name))]);
tT (String.concat "." (List.map (fun x->string_of_int (x+1))
(List.rev (num@sect_num))));
bB (fun _->[Marker EndLink])]
with
Not_found ->
let refType=match refType with Some x->x | None-> "Default" in
if !pass_number <> 0 then Printf.eprintf "Unknown label %S of labelType %S (%d)\n%!" name refType !pass_number;
color Color.red [tT "??"]
)]
let generalRef t x = lref ~refType:t x
let sectref x=lref ~refType:"_structure" x
let extLink a b=bB (fun _->[Marker (BeginLink (Extern a))])::b@[bB (fun _->[Marker EndLink])]
let link a b=bB (fun _->[Marker (BeginLink (Intern a))])::b@[bB (fun _->[Marker EndLink])]
let button_name =
let c = ref 0 in
fun () -> let x = !c in c := x+1; "button_" ^ string_of_int x
let button =
fun btype b ->
bB (fun _->[Marker (BeginLink (Button(btype, button_name ())))])::
b @ bB (fun _->[Marker EndLink]) :: []
let image ?scale:(scale=0.) ?width:(width=0.) ?height:(height=0.) ?offset:(offset=0.) imageFile _ =
let i=RawContent.image imageFile in
let dr={
drawing_min_width=i.image_width;
drawing_max_width=i.image_width;
drawing_nominal_width=i.image_width;
drawing_width_fixed = true;
drawing_adjust_before = false;
drawing_y0=(-.offset);
drawing_y1=(-.offset) -. i.image_height;
drawing_break_badness=0.;
drawing_states=[];
drawing_badness=(fun _->0.);
drawing_contents=(fun _->[RawContent.translate 0. (-.offset) (Image i)])
}
in
let scale =
if scale >0. then scale
else if width > 0. then width /. i.image_width
else if height > 0. then height /. i.image_height
else 0.
in
if scale>0. then resize_drawing scale dr
else dr
let video ?scale:(scale=0.) ?width:(width=0.) ?height:(height=0.) ?offset:(offset=0.) imageFile env=
let tmp=(try Filename.chop_extension imageFile with _->imageFile) in
if not (Sys.file_exists (tmp^"-1.png")) ||
(Unix.stat (tmp^"-1.png")).Unix.st_mtime
< (Unix.stat imageFile).Unix.st_mtime then (
let _=Sys.command (Printf.sprintf "ffmpeg -i %s -t 1 -r 1 %s-%%d.png" imageFile tmp) in
()
);
let w,h = ImageLib.size (tmp^"-1.png") in
let fw,fh=
if width=0. then
if height=0. then
if scale=0. then
if env.normalMeasure<(float_of_int w)/.7. then
env.normalMeasure, env.normalMeasure*.(float_of_int h)/.(float_of_int w)
else
(float_of_int w)/.7.,(float_of_int h)/.7.
else
(float_of_int w)*.scale,(float_of_int h)*.scale
else
height*.(float_of_int w)/.(float_of_int h), height
else
width, width*.(float_of_int h)/.(float_of_int w)
in
let i={video_file=imageFile;
video_width=fw;
video_height=fh;
video_pixel_width=w;
video_pixel_height=h;
video_x=0.;
video_y=offset;
video_order=0
}
in
{
drawing_min_width=fw;
drawing_max_width=fw;
drawing_nominal_width=fw;
drawing_width_fixed = true;
drawing_adjust_before = false;
drawing_y0=offset;
drawing_y1=fh+.offset;
drawing_break_badness=0.;
drawing_states=[];
drawing_badness=(fun _->0.);
drawing_contents=(fun _->[RawContent.Video i])
}
let includeGraphics ?scale:(scale=0.) ?width:(width=0.) ?height:(height=0.) ?offset:(offset=0.) imageFile=
[bB (fun env->[Drawing (image ~scale ~width ~height ~offset imageFile env)])]
let includeVideo ?scale:(scale=0.) ?width:(width=0.) ?height:(height=0.) ?offset:(offset=0.) imageFile=
[bB (fun env->[Drawing (video ~scale ~width ~height ~offset imageFile env)])]
* { 3 Boxification }
let rStdGlue:(float*box) ref=ref (0.,glue 0. 0. 0.)
let ambientBuf = ref ( [ ||],0 )
let makeGlue env x0=
let stdGlue=
if fst !rStdGlue <> env.size then
begin
let (mi,no,ma) = env.stdGlue in
rStdGlue:=(env.size,
glue (mi*.env.size) (no*.env.size) (ma*.env.size))
end;
snd !rStdGlue
in
if (x0>=0x0009 && x0<=0x000d) || x0=0x0020 then stdGlue else
match x0 with
0x00a0->(match stdGlue with
Glue y->(
Drawing y
)
| y->y)
| 0x1680->stdGlue
| 0x180e->(glue 0. 0. 0.)
| 0x2000->let w=env.size/.2. in (glue w w w)
| 0x2001->let w=env.size in (glue w w w)
| 0x2002->let w=env.size/.2. in (glue (w*.2./.3.) w (w*.3./.2.))
| 0x2003->let w=env.size in (glue (w*.2./.3.) w (w*.3./.2.))
| 0x2004->let w=env.size/.3. in (glue (w*.2./.3.) w (w*.3./.2.))
| 0x2005->let w=env.size/.4. in (glue (w*.2./.3.) w (w*.3./.2.))
| 0x2006->let w=env.size/.6. in (glue (w*.2./.3.) w (w*.3./.2.))
| 0x2007->(
let w0=
glyph_of_string env.substitutions env.positioning env.font env.size
env.fontColor
"0"
in
let w=env.size*.(List.fold_left (fun w1 b->w1+.box_width 0. b) 0. w0) in (glue (w*.2./.3.) w (w*.3./.2.))
)
| 0x2008->(
let w0=
glyph_of_string env.substitutions env.positioning env.font env.size
env.fontColor
"."
in
let w=env.size*.(List.fold_left (fun w1 b->w1+.box_width 0. b) 0. w0) in (glue (w*.2./.3.) w (w*.3./.2.))
)
| 0x2009->let w=env.size/.5. in (glue (w*.2./.3.) w (w*.3./.2.))
| 0x200a->let w=env.size/.8. in (glue (w*.2./.3.) w (w*.3./.2.))
| 0x202f->
let w=env.size/.5. in
(match glue (w*.2./.3.) w (w*.3./.2.) with
Glue y->Drawing y
| y->y)
| 0x205f->let w=env.size*.4./.18. in (glue (w*.2./.3.) w (w*.3./.2.))
| 0xfeff->(glue 0. 0. 0.)
| _->stdGlue
let gl_of_str env str =
try
let res = hyphenate env.hyphenate env.substitutions env.positioning env.font
env.size env.fontColor str
in
res
with Glyph_not_found _ ->
Printf.eprintf "glyph not found in: %s (%S)\n%!" str str;
[]
let append buf nbuf x=
let arr=
if !nbuf>=Array.length !buf then
Array.init (max 1 (2*Array.length !buf)) (fun j->if j< !nbuf then (!buf).(j) else Empty)
else !buf
in
arr.(!nbuf)<-x;
buf:=arr;
incr nbuf
let concat buf1 nbuf1 buf2 nbuf2=
for i=0 to nbuf2-1 do
append buf1 nbuf1 buf2.(i)
done
let mappend m x=
let a=try fst (IntMap.max_binding m) with Not_found -> -1 in
IntMap.add (a+1) x m
let nfkc = UNF8.nfkc
let nfkc x = x
* Converts a list of contents into a list of boxes , which is the next Patoline layer .
let boxify buf nbuf env0 l=
let rec boxify keep_cache env = function
| []->env
| B (b, cache) :: s ->
let l =
match !cache with
| Some l when keep_cache -> l
| _ ->
let acc = !env_accessed in
env_accessed := false;
let l = b env in
if keep_cache then
(if not !env_accessed then cache := Some l
else env0.fixable := true);
env_accessed := acc || !env_accessed; l
in
List.iter (append buf nbuf) l;
boxify keep_cache env s
| (C b)::s->(
let acc= !env_accessed in
env_accessed:=false;
let c = b env in
let env'=if !env_accessed then (
env0.fixable:=true;
boxify false env c
) else boxify keep_cache env c
in
env_accessed:=acc || !env_accessed;
boxify keep_cache env' s
)
| Env f::s->boxify keep_cache (f env) s
| T (t,cache) :: s -> (
match !cache with
| Some l when keep_cache ->
IntMap.iter (fun _->List.iter (append buf nbuf)) l;
boxify keep_cache env s
| _ ->
let l = ref IntMap.empty in
let t = nfkc t in
let rec cut_str i0 i =
if i >= String.length t then
let sub = String.sub t i0 (i-i0) in
l := mappend !l (gl_of_str env sub)
else if UChar.is_space (UTF8.look t i) then
let sub = String.sub t i0 (i-i0) in
l := mappend !l (gl_of_str env (nfkc sub));
if i <> i0 || i = 0 then
l:=mappend !l [makeGlue env (UChar.code (UTF8.look t i))];
cut_str (UTF8.next t i) (UTF8.next t i)
else
cut_str i0 (UTF8.next t i)
in cut_str 0 0;
if keep_cache then cache := Some !l;
IntMap.iter (fun _->List.iter (append buf nbuf)) !l;
boxify keep_cache env s)
| Scoped (fenv, p)::s->
let env'=fenv env in
let _=boxify keep_cache env' p in
boxify keep_cache env s
| N _ :: _->
failwith "boxify: wrong argument (N)";
in
boxify true env0 l
let draw_boxes env l=
let rec draw_boxes x y dr l=match l with
[]->dr,x
| Kerning kbox::s ->(
let dr',x'=draw_boxes (x+.kbox.kern_x0) (y+.kbox.kern_y0) dr [kbox.kern_contents] in
draw_boxes (x'+.kbox.advance_width) y dr' s
)
| Hyphen h::s->(
let dr1,w1=Array.fold_left (fun (dr',x') box->
draw_boxes x' y dr' [box]
) (dr,x) h.hyphen_normal
in
draw_boxes w1 y dr1 s
)
| GlyphBox a::s->(
let box=RawContent.Glyph { a with glyph_x=a.glyph_x+.x;glyph_y=a.glyph_y+.y } in
let w=a.glyph_size*.Fonts.glyphWidth a.glyph/.1000. in
draw_boxes (x+.w) y (box::dr) s
)
| Glue g::s
| Drawing g ::s->(
let w=g.drawing_nominal_width in
let box=(List.map (RawContent.translate (x) (y)) (g.drawing_contents w)) in
draw_boxes (x+.w) y (box@dr) s
)
| Marker (BeginLink l)::s->(
Printf.fprintf stderr " * * * * BeginURILink % S****\n " l ;
let k = match l with
Box.Extern l -> RawContent.Extern l;
| Box.Intern l ->
let dest_page=
try
let line=MarkerMap.find (Label l) env.user_positions in
layout_page line
with
Not_found->(-1)
in
RawContent.Intern(l,dest_page,0.,0.);
| Box.Button(t,n) -> RawContent.Button(t,n)
in
let link={ link_x0=x;link_y0=y;link_x1=x;link_y1=y;link_kind=k;
link_order=0;
link_closed=false;
link_contents=[] }
in
draw_boxes x y (Link link::dr) s
)
| Marker EndLink::s->(
let rec link_contents u l =
match l with
| [] -> assert false
| (Link h)::_ when not h.link_closed ->
let u = List.rev u in
h.link_contents<-u;
let (_,y0,_,y1)=bounding_box u in
h.link_y0<-y0;
h.link_y1<-y1;
h.link_closed<-true;
h.link_x1<-x;
l
| h::s->link_contents (h::u) s
in
let dr'=link_contents [] dr in
List.iter ( print_raw ) dr ' ;
draw_boxes x y dr' s
)
| b::s->
let _,w,_=box_interval b in
draw_boxes (x+.w) y dr s
in
let dr,_ = draw_boxes 0. 0. [] l in dr
let rec bezier_of_boxes=function
[]->[]
| Glyph g::s->
let out=Fonts.outlines g.glyph in
(List.map (fun (x,y)->Array.map (fun xx->g.glyph_x+.xx *. g.glyph_size/.1000.) x,
Array.map (fun xx->g.glyph_y+.xx *. g.glyph_size/.1000.) y)
(List.concat out)) @ (bezier_of_boxes s)
| Path (param,p)::s->
let l = List.concat (List.map Array.to_list p) in
if param.strokingColor <> None then (
let lw = param.lineWidth /. 2.0 in
let l1 = List.map (fun (xa, ya) -> Array.map (fun x -> x +. lw) xa, ya) l in
let l2 = List.map (fun (xa, ya) -> Array.map (fun x -> x -. lw) xa, ya) l in
let l3 = List.map (fun (xa, ya) -> xa, Array.map (fun x -> x +. lw) ya) l in
let l4 = List.map (fun (xa, ya) -> xa, Array.map (fun x -> x -. lw) ya) l in
l1@l2@l3@l4@(bezier_of_boxes s))
else
l@(bezier_of_boxes s)
| Dynamic(d)::s ->
(bezier_of_boxes (d.dyn_contents ()))@(bezier_of_boxes s)
| Link(l)::s ->
(bezier_of_boxes l.link_contents)@(bezier_of_boxes s)
| _::s->
TODO more cases ? , Affine and States ?
let adjust_width env buf nbuf =
FIXME :
let alpha = env.adjust_optical_alpha in
let beta = env.adjust_optical_beta in
let char_space = env.normalLead *. env.adjust_min_space in
let epsilon = env.adjust_epsilon in
let dir = (-.cos(alpha), sin(alpha)), (-.cos(alpha), -.sin(alpha)) in
let dir' = (cos(alpha), -.sin(alpha)), (cos(alpha), sin(alpha)) in
let profile_left = ref [] in
let buf = !buf in
let i0 = ref 0 in
while !i0 < !nbuf do
match buf.(!i0) with
| Glue x ->
profile_left := Distance.translate_profile !profile_left (-.x.drawing_nominal_width);
incr i0;
| Drawing _ | GlyphBox _ | Hyphen _ as x0-> (
let adjust = ref (match x0 with
Drawing x -> if x.drawing_width_fixed then None else Some(x0,!i0)
| _ -> None)
in
let min = ref 0.0 in
let nominal = ref 0.0 in
let max = ref 0.0 in
let left = draw_boxes env [x0] in
let bezier_left = bezier_of_boxes left in
let profile_left' = Distance.bezier_profile dir epsilon bezier_left in
let (x0_l,_,x1_l,_) = bounding_box_kerning left in
if !Distance.debug then
Printf.fprintf stderr "Drawing(1): i0 = %d (%d,%d)\n" !i0 (List.length !profile_left) (List.length profile_left');
profile_left := Distance.translate_profile (Distance.profile_union dir !profile_left profile_left') (x0_l -. x1_l);
incr i0;
try while !i0 < !nbuf do
match buf.(!i0) with
| Marker AlignmentMark -> incr i0; raise Exit
| Marker _ -> incr i0
| Drawing x as b when x.drawing_nominal_width = 0.0 ->
if !Distance.debug then Printf.fprintf stderr "0 Drawing(2)\n";
if !adjust = None && not x.drawing_width_fixed then adjust := Some(b,!i0);
incr i0
| Glue x as b ->
min := !min +. x.drawing_min_width;
max := !max +. x.drawing_max_width;
nominal := !nominal +. x.drawing_nominal_width;
profile_left := Distance.translate_profile !profile_left (-.x.drawing_nominal_width);
if !adjust = None && not x.drawing_width_fixed then adjust := Some(b,!i0);
incr i0
| Drawing _ | GlyphBox _ | Hyphen _ as y0 -> (
let before =
match y0 with
Drawing y when !adjust = None && y.drawing_adjust_before ->
adjust := Some(y0, !i0);
true
| _ -> false
in
match !adjust with
| None -> raise Exit
| Some (b,i) ->
let right = draw_boxes env [y0] in
let profile_left = !profile_left in
let bezier_right = bezier_of_boxes right in
let profile_right = Distance.bezier_profile dir' epsilon bezier_right in
if !Distance.debug then
Printf.fprintf stderr "Drawing(2): i0 = %d (%d,%d)\n" !i0 (List.length profile_left) (List.length profile_right);
if profile_left = [] || profile_right = [] then raise Exit;
if !Distance.debug then
Printf.fprintf stderr "Drawing(2b): i0 = %d\n" !i0;
let d space =
let pr = List.map (fun (x,y) -> (x+.space,y)) profile_right in
let r = Distance.distance beta dir profile_left pr in
r
in
let (x0_r,_,x1_r,_) = bounding_box_kerning right in
let (x0_r',_,_,_) = bounding_box_full right in
let nominal' = !nominal +. char_space in
let min' = Pervasives.min (Pervasives.max (x0_r -. x1_r) (x0_l -. x1_l)) (!min -. nominal') in
let max' = Pervasives.max (2. *. char_space) (!max -. nominal') in
let da = d min' in
let db = d max' in
let target = nominal' in
if !Distance.debug then
Printf.fprintf stderr "start Adjust: min = %f => %f, max = %f => %f, target = %f\n" min' da max' db nominal';
let epsilon = epsilon /. 16. in
let r =
if da > target then min' else
if db < target then max' else (
let rec fn sa da sb db =
let sc = (sa +. sb) /. 2.0 in
let dc = d sc in
if abs_float (dc -. target) < epsilon || (sb -. sa) < epsilon then sc
else if dc < target then fn sc dc sb db
else fn sa da sc dc
in
fn min' da max' db)
in
let r = r - . x0_r ' + . x0_r - . + . ' in
if !Distance.debug then Printf.fprintf stderr "end Adjust: r = %f nominal = %f" r !nominal;
buf.(i) <-
(match b with
| Drawing x when before -> Drawing { x with
drawing_contents =
(fun w -> List.map (RawContent.translate (r +. x0_r' -. x0_r) 0.0) (x.drawing_contents w))
}
| Drawing x -> Drawing { x with
drawing_nominal_width = r +. x.drawing_nominal_width;
drawing_min_width = r +. x.drawing_min_width;
drawing_max_width = r +. x.drawing_max_width;
}
| Glue x -> Glue { x with
drawing_nominal_width = r +. x.drawing_nominal_width;
drawing_min_width = r +. x.drawing_min_width;
drawing_max_width = r +. x.drawing_max_width;
}
| _ -> assert false);
raise Exit)
| _ ->
incr i0;
raise Exit
done with Exit -> ())
| _ -> incr i0
done
let boxify_scoped env x=
let buf=ref [||] in
let nbuf=ref 0 in
let _=boxify buf nbuf env x in
adjust_width env buf nbuf;
Array.to_list (Array.sub !buf 0 !nbuf)
let draw env x=
let buf=ref [||] in
let nbuf=ref 0 in
let env'=boxify buf nbuf env x in
adjust_width env buf nbuf;
draw_boxes env' (Array.to_list (Array.sub !buf 0 !nbuf))
let states st x=
[uB (fun env->
let d=draw env x in
let (_,off,_,_)=bounding_box_kerning d in
[Drawing
(drawing ~offset:off
[States { states_contents=d;
states_states=st;
states_order=0 }]
)]
)]
let altStates l =
[uB (fun env->
let ds = List.map (fun (st,x) -> (st, draw env x)) l in
let off = List.fold_left (fun acc (_,d) ->
let (_,off,_,_) = bounding_box_kerning d in
min acc off) 0.0 ds
in
[Drawing
(drawing ~offset:off
(List.map (fun (st, d) ->
States { states_contents=d;
states_states=st;
states_order=0 }) ds
))]
)]
let flatten ?(initial_path=[]) env0 str=
let paragraphs=ref [] in
let trees=ref [] in
let figures=ref IntMap.empty in
let figure_trees=ref IntMap.empty in
let fig_param=ref IntMap.empty in
let param=ref [] in
let new_page_list=ref [] in
let new_line_list=ref [] in
let compl=ref [] in
let bads=ref [] in
let states=ref [] in
let n=ref 0 in
let buf=ref [||] in
let nbuf=ref 0 in
let frees=ref 0 in
let add_paragraph env tree path p=
let cont = bB (fun env->(p.par_env env).par_indent) :: p.par_contents in
nbuf:= !frees;
let env=boxify buf nbuf env cont in
adjust_width env buf nbuf;
paragraphs:=(Array.sub !buf 0 !nbuf)::(!paragraphs);
trees:=(tree,path)::(!trees);
compl:=(p.par_completeLine env)::(!compl);
param:=(p.par_parameters env)::(!param);
new_page_list:=(env.new_page)::(!new_page_list);
new_line_list:=(env.new_line env)::(!new_line_list);
bads:=(p.par_badness env)::(!bads);
states:=(p.par_states)::(!states);
incr n;
frees:=0;
env
in
let rec flatten flushes env0 path tree=
match tree with
| Paragraph p -> (
let env1 = p.par_env env0 in
let add_node env cur =
add_paragraph env tree path
{ p with par_paragraph = List.length !paragraphs;
par_contents=List.rev cur }
in
let rec collect_nodes env1 l cur =
match l with
| []-> (env1, cur)
| C(f)::s-> collect_nodes env1 (f env1@s) cur
| Scoped(f,s')::s->
let env2 = f env1 in
let (_, res) = collect_nodes env2 s' [] in
collect_nodes env1 s (Scoped((fun _ -> env2),List.rev res)::cur)
| Env f::s ->
let env1 = f env1 in
collect_nodes env1 s (Env (fun _ -> env1)::cur)
| N n::s->
let env1 = add_node env1 cur in
let env1 = flatten flushes env1 path n in
collect_nodes env1 s []
| (T _ | B _ as h)::s-> collect_nodes env1 s (h::cur)
in
let (env1, cur) = collect_nodes env1 p.par_contents [] in
let env1 = add_node env1 cur in
p.par_post_env env0 env1
)
| FigureDef f -> (
let env1=f.fig_env env0 in
let n=IntMap.cardinal !figures in
fig_param:=IntMap.add n (f.fig_parameters env1) !fig_param;
figures:=IntMap.add n (f.fig_contents env1) !figures;
figure_trees:=IntMap.add n (tree,path) !figure_trees;
append buf frees (BeginFigure n);
f.fig_post_env env0 env1
)
| Node s-> (
let env1 = s.node_env env0 in
let env1=
let level=
try
List.length (snd (StrMap.find "_structure" env1.counters))
with Not_found->0
in
{ env1 with counters=StrMap.map (fun (lvl,l)->if lvl>level then lvl,[] else lvl,l)
env1.counters }
in
s.node_paragraph <- List.length !paragraphs;
s.boxified_displayname <- draw_boxes env1 (boxify_scoped env1 s.displayname);
let flushes'=ref [] in
let flat_children k a (is_first, env1)=match a with
Paragraph p->(
let env2=flatten flushes' env1 ((k,tree)::path)
(Paragraph { p with par_contents=
(if is_first then (
Set up a marker to be able to obtain section page .
It is added to the MarkerMap in Break .
It is added to the MarkerMap in Break. *)
let name=String.concat "_" ("_"::List.map string_of_int ((List.map fst path)@initial_path)) in
[Env (fun env->
let w=try let (_,_,w)=StrMap.find name (names env) in w with
Not_found -> uselessLine in
{ env with names=StrMap.add name (env.counters, "_", w)
(names env) });
bB (fun _->[Marker (Label name)])
]
) else [])@ p.par_contents
}
) in
false, env2
)
| FigureDef _ as h->(
let env2=flatten flushes' env1 ((k,tree)::path) h in
let num=try
match StrMap.find "_figure" env2.counters with
_,h::_->h
| _->0
with
Not_found ->0
in
flushes':=FlushFigure num::(!flushes');
is_first,env2
)
| Node _ as tr->(
(is_first, flatten flushes' env1 ((k,tree)::path) tr)
)
in
let _,env2=IntMap.fold flat_children s.children (true,env1) in
paragraphs:=(match !paragraphs with
[]->[]
| h::s->Array.append h (Array.of_list !flushes')::s);
s.node_post_env env0 env2
)
in
let env1=flatten (ref []) env0 [] str in
let params=Array.init
(IntMap.cardinal !figures)
(fun i->IntMap.find i !fig_param)
in
(env1, params,
Array.of_list (match List.rev !param with []->[parameters env1] | l->l),
Array.of_list (match List.rev !new_page_list with []->[env1.new_page] | l->l),
Array.of_list (match List.rev !new_line_list with []->[env1.new_line env1] | l->l),
Array.of_list (List.rev !compl),
Array.of_list (List.rev !bads),
Array.of_list (List.rev !paragraphs),
Array.of_list (List.rev !trees),
Array.of_list (List.map snd (IntMap.bindings !figures)),
Array.of_list (List.map snd (IntMap.bindings !figure_trees)),
Array.of_list (List.rev !states))
let rec make_struct positions = function
| Node s ->
let rec make = function
| [] -> []
| (_,Node u)::s when List.mem_assoc "intoc" u.node_tags ->
(make_struct positions (Node u))::(make s)
| _ :: s->make s
in
let a = Array.of_list (make (IntMap.bindings s.children)) in
let (p,x,y) =
let lenpos = Array.length positions in
if s.node_paragraph >= 0 && s.node_paragraph < lenpos then
positions.(s.node_paragraph)
else (0,0.,0.)
in
{ Driver.name = s.name
; Driver.metadata = []
; Driver.raw_name = s.boxified_displayname
; Driver.tags = s.node_tags
; Driver.page = p
; Driver.struct_x = x
; Driver.struct_y = y
; Driver.children = a }
| _ -> Driver.empty_structure
let tag str tags=
match str with
Node n->Node { n with node_tags=_tags }
| _->Node { empty with node_tags=tags; children=IntMap.singleton 0 str }
let update_names env figs user=
let user=MarkerMap.fold (MarkerMap.add) user env.user_positions in
( fil user<>fil env.user_positions ) in ;
let env'={ env with user_positions=user;
names=
StrMap.fold (fun k (a,b,c) m->
try
let pos=
if b="_figure" then
(match StrMap.find "_figure" a with
_,[]->(Printf.fprintf stderr "figure not found (1):%S\n" k;
raise Not_found)
| _,(h::_)->(
match IntMap.find h figs with
Break.Placed l->l
| _->raise Not_found
)
)
else
MarkerMap.find (Label k) user
in
if not (lines_eq pos c) && b<>"_" then (
);
needs_reboot:= !needs_reboot || (not (lines_eq pos c));
StrMap.add k (a,b,pos) m
needs_reboot:=true; m)
) (names env) (names env)
}
in
flush stderr;
env',!needs_reboot
let reset_counters env=
{ env with
counters=StrMap.map (fun (l,_)->(l,[])) env.counters }
|
1a86561ac225fe12882cab3292a261e05f7c36151d4b8a6b240590e5c577f56b | yihming/aihaskell | Main.hs | module Main where
import System.Environment
import FrontEnd as FE
import SemanticsAnalysis as SA
import Plot as PLT
main :: IO ()
main = do
args <- getArgs
let filename = head args
src <- readFile filename
let t = FE.parseInterproc src
--putStrLn $ show t
let outFileDot = filename ++ ".dot"
PLT.genDotFile outFileDot t
newT <- SA.aiProcess t
putStrLn $ show newT
let outFile = filename ++ ".certified"
writeFile outFile (show newT)
| null | https://raw.githubusercontent.com/yihming/aihaskell/5d4539a0f093914e32bda7e797f502626b8f2d93/Main.hs | haskell | putStrLn $ show t | module Main where
import System.Environment
import FrontEnd as FE
import SemanticsAnalysis as SA
import Plot as PLT
main :: IO ()
main = do
args <- getArgs
let filename = head args
src <- readFile filename
let t = FE.parseInterproc src
let outFileDot = filename ++ ".dot"
PLT.genDotFile outFileDot t
newT <- SA.aiProcess t
putStrLn $ show newT
let outFile = filename ++ ".certified"
writeFile outFile (show newT)
|
9d3a459fe4dc255522ca55b858bc678b84d8385a7add5d4df120d5c78c16e4fd | zadean/xqerl | xqldb_dml.erl | Copyright ( c ) 2018 - 2020 .
SPDX - FileCopyrightText : 2022
%
SPDX - License - Identifier : Apache-2.0
-module(xqldb_dml).
%% Reading functions set the read locks in the function.
%% Writing functions should only be called from a Pending Update List
%% where the write locks have already been acquired.
-include("xqerl_db.hrl").
%% ====================================================================
%% API functions
%% ====================================================================
-export([analyze/1]).
-export([
commit/1,
select_paths/2,
select_collection/2, select_collection/3,
delete_collection/1, delete_collection/2,
import_from_directory/2,
insert_doc_as_collection/3
]).
Generic
-export([select/2]).
%% XML
% only used in test
-export([delete_doc/1]).
-export([
exists_doc/2,
select_doc/1, select_doc/2
]).
-export([
insert_doc/2,
insert_doc_node/3
]).
%% Text/Binary Resource - Internal/External
-export([
exists_resource/1, exists_resource/2,
select_resource/3
]).
-export([
insert_resource/2,
insert_text_resource/3,
insert_binary_resource/3,
link_resource/3
]).
%% XDM Values
-export([
exists_item/2,
select_item/2
]).
-export([insert_item/2, insert_item/3]).
-define(BIN(Bin), #xqAtomicValue{type = 'xs:base64Binary', value = Bin}).
% used by fn:uri-collection
select_paths(#{trans := Agent}, Uri) ->
DBs = xqldb_db:databases(Uri),
ok = read_lock_all(Agent, DBs),
Fun = fun(DB) ->
DbUri = xqldb_path_table:uri(DB),
[
xqldb_uri:join(DbUri, element(1, Rec))
|| Rec <- xqldb_path_table:all(DB)
]
end,
lists:flatmap(Fun, DBs).
% used by fn:collection
select_collection(#{trans := Agent} = Ctx, Uri) ->
DBs = xqldb_db:databases(Uri),
ok = read_lock_all(Agent, DBs),
Fun = fun(DB) ->
[
normalize_item(Ctx, P, DB)
|| P <- xqldb_path_table:all(DB)
]
end,
lists:flatten(lists:flatmap(Fun, DBs)).
select_collection(#{trans := Agent} = Ctx, Uri, Type) ->
DBs = xqldb_db:databases(Uri),
ok = read_lock_all(Agent, DBs),
Fun = fun(DB) ->
[
normalize_item(Ctx, P, DB)
|| P <- xqldb_path_table:all(DB, Type)
]
end,
lists:flatten(lists:flatmap(Fun, DBs)).
% used in test suite so create lock agent
delete_collection(Uri) ->
Agent = new_agent(),
delete_collection(#{trans => Agent}, Uri),
locks:end_transaction(Agent).
%% TODO remove Agent and put lock in PUL
delete_collection(#{trans := Agent}, Uri) ->
DBs = xqldb_db:databases(Uri),
ok = write_lock_all(Agent, DBs),
_ = [xqldb_path_table:delete_all(DB) || DB <- DBs],
ok.
read_lock_all(Agent, DBs) ->
read - lock all [ DbPid ]
Locks = [
{[DbPid], read}
|| #{db_name := DbPid} <- DBs
],
ok = locks:lock_objects(Agent, Locks),
% now wait for the read locks
await_locks(Agent).
write_lock_all(Agent, DBs) ->
Locks = [
{[DbPid, write], write}
|| #{db_name := DbPid} <- DBs
],
ok = locks:lock_objects(Agent, Locks),
% now wait for the write locks
await_locks(Agent).
read_lock_one(Agent, #{db_name := DbPid}, Name) ->
ok = locks:lock_nowait(Agent, [DbPid, Name], read),
% now wait for the read lock
await_locks(Agent).
xqldb_structure_index : ) , Counts ) ,
analyze(DocUri) when is_binary(DocUri) ->
{DbUri, _Name} = xqldb_uri:split_uri(DocUri),
case xqldb_db:exists(DbUri) of
false ->
{error, not_exists};
true ->
DB = xqldb_db:database(DbUri),
xqldb_structure_index:analyze(DB)
end.
select(#{trans := Agent} = Ctx, DocUri) when is_binary(DocUri) ->
{DbUri, Name} = xqldb_uri:split_uri(DocUri),
case xqldb_db:exists(DbUri) of
false ->
{error, not_exists};
true ->
DB = xqldb_db:database(DbUri),
ok = read_lock_one(Agent, DB, Name),
Rec = xqldb_path_table:lookup(DB, Name),
normalize_item(Ctx, Rec, Name, DB)
end.
exists_doc(#{trans := Agent}, DocUri) when is_binary(DocUri) ->
{DbUri, Name} = xqldb_uri:split_uri(DocUri),
case xqldb_db:exists(DbUri) of
false ->
false;
true ->
DB = xqldb_db:database(DbUri),
ok = read_lock_one(Agent, DB, Name),
case xqldb_path_table:lookup(DB, Name) of
{xml, _} ->
true;
_ ->
false
end
end.
% called from test suite
select_doc(DocUriL) ->
DocUri = unicode:characters_to_binary(DocUriL),
Agent = new_agent(),
Doc = select_doc(#{trans => Agent}, DocUri),
locks:end_transaction(Agent),
Doc.
select_doc(#{trans := Agent}, DocUri) when is_binary(DocUri) ->
{DbUri, Name} = xqldb_uri:split_uri(DocUri),
case xqldb_db:exists(DbUri) of
false ->
{error, not_exists};
true ->
DB = xqldb_db:database(DbUri),
ok = read_lock_one(Agent, DB, Name),
#{db_name := DBId} = DB = xqldb_db:database(DbUri),
case xqldb_path_table:lookup(DB, Name) of
{xml, Stamp} ->
NodeId = {DBId, {Name, Stamp}, []},
xqldb_nodes:get_doc(NodeId);
_ ->
{error, not_exists}
end
end.
% called from test suite
insert_doc(DocUriL, Filename) ->
DocUri = unicode:characters_to_binary(DocUriL),
{DbUri, Name} = xqldb_uri:split_uri(DocUri),
Agent = new_agent(),
locks:lock_nowait(Agent, [DbUri, Name]),
ok = await_locks(Agent),
DB = xqldb_db:database(DbUri),
case xqldb_path_table:lookup(DB, Name) of
[] ->
try
Stamp = erlang:system_time(),
ok = xqldb_sax:parse_file(DB, Filename, Name, Stamp),
xqldb_path_table:insert(DB, {Name, xml, Stamp})
after
locks:end_transaction(Agent)
end;
_ ->
locks:end_transaction(Agent)
end.
% only used in xquts_SUITE
delete_doc(DocUriL) ->
DocUri = unicode:characters_to_binary(DocUriL),
{DbUri, Name} = xqldb_uri:split_uri(DocUri),
Agent = new_agent(),
locks:lock_nowait(Agent, [DbUri, Name]),
ok = await_locks(Agent),
case xqldb_db:exists(DbUri) of
false ->
locks:end_transaction(Agent);
true ->
DB = xqldb_db:database(DbUri),
case xqldb_path_table:lookup(DB, Name) of
{xml, _} ->
commit([{DB, delete, Name}]),
locks:end_transaction(Agent);
_ ->
locks:end_transaction(Agent)
end
end.
% called from xqerl_update, already in transaction
return { DB , insert , InsertRec }
insert_doc_node(Node, DB, Name) ->
Stamp = erlang:system_time(),
ok = xqldb_sax:parse_node(DB, Node, Name, Stamp),
{DB, insert, {Name, xml, Stamp}}.
% used in test suite
exists_resource(DocUri) when is_binary(DocUri) ->
Agent = new_agent(),
Res = exists_resource(#{trans => Agent}, DocUri),
locks:end_transaction(Agent),
Res.
exists_resource(#{trans := Agent}, DocUri) when is_binary(DocUri) ->
{DbUri, Name} = xqldb_uri:split_uri(DocUri),
case xqldb_db:exists(DbUri) of
false ->
false;
true ->
DB = xqldb_db:database(DbUri),
ok = read_lock_one(Agent, DB, Name),
case xqldb_path_table:lookup(DB, Name) of
{text, _, _} -> true;
{raw, _, _} -> true;
{link, _, _} -> true;
_ -> false
end
end.
select_resource(#{trans := Agent} = Ctx, DocUri, Type) when is_binary(DocUri) ->
{DbUri, Name} = xqldb_uri:split_uri(DocUri),
case xqldb_db:exists(DbUri) of
false ->
% not locking anything
try
get_remote_resource(Ctx, DocUri, Type)
catch
_:_ ->
{error, not_exists}
end;
true ->
DB = xqldb_db:database(DbUri),
ok = read_lock_one(Agent, DB, Name),
case xqldb_path_table:lookup(DB, Name) of
[] ->
try
get_remote_resource(Ctx, DocUri, Type)
catch
_:_ ->
{error, not_exists}
end;
Rec ->
normalize_item(Ctx, Rec, Name, DB)
end
end.
% called from test suite for unparsed-text
insert_resource(DocUri, Bin) ->
{DbUri, Name} = xqldb_uri:split_uri(DocUri),
Agent = new_agent(),
locks:lock_nowait(Agent, [DbUri, Name]),
ok = await_locks(Agent),
DB = xqldb_db:database(DbUri),
case xqldb_path_table:lookup(DB, Name) of
[] ->
try
Stamp = erlang:system_time(),
NewPosSize = xqldb_resource_table:insert(DB, Bin),
xqldb_path_table:insert(DB, {Name, text, NewPosSize, Stamp})
after
locks:end_transaction(Agent)
end;
_ ->
locks:end_transaction(Agent)
end.
% this should be called in a transaction that already has
% write locks on everything.
% Returns {DB, insert, Rec}
insert_text_resource(DB, Name, Bin) ->
Stamp = erlang:system_time(),
NewPosSize = xqldb_resource_table:insert(DB, Bin),
{DB, insert, {Name, text, NewPosSize, Stamp}}.
insert_binary_resource(DB, Name, Bin) ->
Stamp = erlang:system_time(),
NewPosSize = xqldb_resource_table:insert(DB, Bin),
{DB, insert, {Name, raw, NewPosSize, Stamp}}.
% Returns {DB, insert, Rec}
link_resource(DB, Name, Filename) ->
Stamp = erlang:system_time(),
{DB, insert, {Name, link, Filename, Stamp}}.
exists_item(#{trans := Agent}, DocUri) when is_binary(DocUri) ->
{DbUri, Name} = xqldb_uri:split_uri(DocUri),
case xqldb_db:exists(DbUri) of
false ->
false;
true ->
DB = xqldb_db:database(DbUri),
ok = read_lock_one(Agent, DB, Name),
case xqldb_path_table:lookup(DB, Name) of
Term when is_tuple(Term) ->
true;
_ ->
false
end
end.
select_item(#{trans := Agent}, DocUri) when is_binary(DocUri) ->
{DbUri, Name} = xqldb_uri:split_uri(DocUri),
case xqldb_db:exists(DbUri) of
false ->
{error, not_exists};
true ->
DB = xqldb_db:database(DbUri),
ok = read_lock_one(Agent, DB, Name),
case xqldb_path_table:lookup(DB, Name) of
{item, _, PosSize} ->
Res = xqldb_resource_table:get(DB, PosSize),
binary_to_term(Res, [safe]);
[] ->
{error, not_exists}
end
end.
create_or_open_db(DbUri) ->
case xqldb_db:exists(DbUri) of
false ->
_ = xqldb_db:open(DbUri),
xqldb_db:database(DbUri);
true ->
xqldb_db:database(DbUri)
end.
% called from test suite out of transaction
insert_item(DocUri, Item) when is_binary(DocUri) ->
{DbUri, Name} = xqldb_uri:split_uri(DocUri),
DB = create_or_open_db(DbUri),
Stamp = erlang:system_time(),
Bin = term_to_binary(Item),
PosSize = xqldb_resource_table:insert(DB, Bin),
xqldb_path_table:insert(DB, {Name, item, PosSize, Stamp}).
% Returns {DB, insert, Rec}
insert_item(DB, Name, Item) ->
Stamp = erlang:system_time(),
Bin = term_to_binary(Item),
PosSize = xqldb_resource_table:insert(DB, Bin),
{DB, insert, {Name, item, PosSize, Stamp}}.
insert_doc_as_collection(DocUri, Filename, BasePath) when is_binary(DocUri) ->
ReplyTo = self(),
{DbUri, Name} = xqldb_uri:split_uri(DocUri),
DB = create_or_open_db(DbUri),
InsFun = fun(Events) ->
F = fun() ->
Self = self(),
uuid:new(Self),
UUID = uuid:get_v4_urandom(),
NName = uuid:uuid_to_string(UUID, binary_standard),
Name1 = <<Name/binary, $-, NName/binary>>,
Stamp = erlang:system_time(),
xqldb_sax:parse_list(DB, Events, Name1, Stamp),
_ = xqldb_path_table:insert(DB, {Name1, xml, Stamp}),
ReplyTo ! {Self, done}
end,
erlang:spawn_link(F)
end,
Path = path_to_stack(BasePath),
xqldb_sax:split_parse_file(Filename, InsFun, Path).
import_from_directory(BaseUri, Directory) when is_list(Directory) ->
Dir = filename:absname(Directory),
% all relative filenames
All = filelib:wildcard("**/*.xml", Dir),
Grouped = group(All, dict:new(), Dir, list_to_binary(BaseUri)),
Parent = self(),
Fun = fun(DbUri, Vals, Acc) ->
DB =
case xqldb_db:exists(DbUri) of
true ->
xqldb_db:database(DbUri);
false ->
{ok, _, _} = xqldb_db:open(DbUri),
xqldb_db:database(DbUri)
end,
Fun1 = fun() ->
{Agent, _} = locks:begin_transaction(),
% lock entire database
{ok, _} = locks:lock(Agent, [DbUri]),
[
try
Stamp = erlang:system_time(),
xqldb_sax:parse_file(DB, FN, Name, Stamp),
_ = xqldb_path_table:insert(DB, {Name, xml, Stamp}),
ok
catch
_:_ -> ok
end
|| {FN, Name} <- Vals
],
locks:end_transaction(Agent),
Parent ! {done, self(), DbUri}
end,
Child = erlang:spawn(Fun1),
[{Child, DbUri} | Acc]
end,
Uris = dict:fold(Fun, [], Grouped),
collect_uris(Uris).
%% ====================================================================
Internal functions
%% ====================================================================
collect_uris([]) ->
ok;
collect_uris([{Pid, Uri} | T]) ->
receive
{done, Pid, Uri} ->
collect_uris(T)
after 600000 ->
erlang:exit(Pid, timeout),
collect_uris(T)
end.
path_to_stack(Path) when is_binary(Path) ->
path_to_stack(binary_to_list(Path));
path_to_stack(Path) when is_list(Path) ->
Split = filename:split(Path),
compile_stack(Split, []).
compile_stack(["/" | Ts], Acc) ->
compile_stack(Ts, [document | Acc]);
compile_stack([QName | Ts], Acc) ->
case string:split(QName, ":") of
[Prefix, Name] ->
compile_stack(Ts, [{element, Prefix, Name} | Acc]);
[Name] ->
compile_stack(Ts, [{element, [], Name} | Acc])
end;
compile_stack([], Acc) ->
Acc.
group([], Dict, _, _) ->
Dict;
group([F | Fs], Dict, FileDir, BaseUri0) ->
BaseUri =
case binary:last(BaseUri0) of
$/ ->
BaseUri0;
_ ->
<<BaseUri0/binary, $/>>
end,
FileName = filename:join([FileDir, F]),
DocUri = xqldb_lib:join_uris(BaseUri, unicode:characters_to_binary(F)),
{DbUri, Name} = xqldb_uri:split_uri(DocUri),
Value = {FileName, Name},
group(Fs, dict:append(DbUri, Value, Dict), FileDir, BaseUri).
get_remote_resource(#{tab := Tab}, Uri, Type) ->
Key = {remote, Type, Uri},
case ets:lookup(Tab, Key) of
[Obj] ->
Obj;
[] ->
Res = xqerl_lib:get_remote_resource(Uri, Type),
ets:insert(Tab, {Key, Res}),
Res
end.
new_agent() ->
{ok, Agent} = locks_agent:start([
{abort_on_deadlock, true},
{link, true}
]),
Agent.
%% Takes a list of {DB, insert | delete, Rec} where Rec is what is to inserted to the path table or
% the name of the Rec by delete.
-spec commit([{map(), insert | delete, tuple() | binary()}]) -> ok.
commit(Transaction) ->
_ = [
case InsDel of
insert ->
xqldb_path_table:delete(DB, element(1, Rec)),
xqldb_path_table:insert(DB, Rec);
delete when Rec =:= all ->
xqldb_path_table:delete_all(DB);
delete ->
% Rec is the Name
xqldb_path_table:delete(DB, Rec)
end
|| {DB, InsDel, Rec} <- Transaction
],
ok.
await_locks(Agent) ->
case locks_agent:transaction_status(Agent) of
no_locks ->
ok;
_ ->
{have_all_locks, _} = locks:await_all_locks(Agent),
ok
end.
normalize_item(_, {xml, Sp}, N, #{db_name := DbPid}) ->
NodeId = {DbPid, {N, Sp}, []},
xqldb_nodes:get_doc(NodeId);
normalize_item(Ctx, {A, B, C}, N, DB) ->
normalize_item(Ctx, {N, A, B, C}, DB).
given path table record and DB , return the item
normalize_item(_, {N, xml, Sp}, #{db_name := DbPid}) ->
NodeId = {DbPid, {N, Sp}, []},
xqldb_nodes:get_doc(NodeId);
normalize_item(#{tab := Tab}, {_, link, _, Filename}, _) ->
Key = {remote, link, Filename},
case ets:lookup(Tab, Key) of
[Obj] ->
?BIN(Obj);
[] ->
{ok, Bin} = file:read_file(Filename),
ets:insert(Tab, {Key, Bin}),
?BIN(Bin)
end;
normalize_item(_, {_, text, _, {Pos, Len}}, DB) ->
xqldb_resource_table:get(DB, {Pos, Len});
normalize_item(_, {_, raw, _, {Pos, Len}}, DB) ->
Bin = xqldb_resource_table:get(DB, {Pos, Len}),
?BIN(Bin);
normalize_item(_, {_, item, _, {Pos, Len}}, DB) ->
Res = xqldb_resource_table:get(DB, {Pos, Len}),
binary_to_term(Res, [safe]).
| null | https://raw.githubusercontent.com/zadean/xqerl/06c651ec832d0ac2b77bef92c1b4ab14d8da8883/src/xqldb_dml.erl | erlang |
Reading functions set the read locks in the function.
Writing functions should only be called from a Pending Update List
where the write locks have already been acquired.
====================================================================
API functions
====================================================================
XML
only used in test
Text/Binary Resource - Internal/External
XDM Values
used by fn:uri-collection
used by fn:collection
used in test suite so create lock agent
TODO remove Agent and put lock in PUL
now wait for the read locks
now wait for the write locks
now wait for the read lock
called from test suite
called from test suite
only used in xquts_SUITE
called from xqerl_update, already in transaction
used in test suite
not locking anything
called from test suite for unparsed-text
this should be called in a transaction that already has
write locks on everything.
Returns {DB, insert, Rec}
Returns {DB, insert, Rec}
called from test suite out of transaction
Returns {DB, insert, Rec}
all relative filenames
lock entire database
====================================================================
====================================================================
Takes a list of {DB, insert | delete, Rec} where Rec is what is to inserted to the path table or
the name of the Rec by delete.
Rec is the Name | Copyright ( c ) 2018 - 2020 .
SPDX - FileCopyrightText : 2022
SPDX - License - Identifier : Apache-2.0
-module(xqldb_dml).
-include("xqerl_db.hrl").
-export([analyze/1]).
-export([
commit/1,
select_paths/2,
select_collection/2, select_collection/3,
delete_collection/1, delete_collection/2,
import_from_directory/2,
insert_doc_as_collection/3
]).
Generic
-export([select/2]).
-export([delete_doc/1]).
-export([
exists_doc/2,
select_doc/1, select_doc/2
]).
-export([
insert_doc/2,
insert_doc_node/3
]).
-export([
exists_resource/1, exists_resource/2,
select_resource/3
]).
-export([
insert_resource/2,
insert_text_resource/3,
insert_binary_resource/3,
link_resource/3
]).
-export([
exists_item/2,
select_item/2
]).
-export([insert_item/2, insert_item/3]).
-define(BIN(Bin), #xqAtomicValue{type = 'xs:base64Binary', value = Bin}).
select_paths(#{trans := Agent}, Uri) ->
DBs = xqldb_db:databases(Uri),
ok = read_lock_all(Agent, DBs),
Fun = fun(DB) ->
DbUri = xqldb_path_table:uri(DB),
[
xqldb_uri:join(DbUri, element(1, Rec))
|| Rec <- xqldb_path_table:all(DB)
]
end,
lists:flatmap(Fun, DBs).
select_collection(#{trans := Agent} = Ctx, Uri) ->
DBs = xqldb_db:databases(Uri),
ok = read_lock_all(Agent, DBs),
Fun = fun(DB) ->
[
normalize_item(Ctx, P, DB)
|| P <- xqldb_path_table:all(DB)
]
end,
lists:flatten(lists:flatmap(Fun, DBs)).
select_collection(#{trans := Agent} = Ctx, Uri, Type) ->
DBs = xqldb_db:databases(Uri),
ok = read_lock_all(Agent, DBs),
Fun = fun(DB) ->
[
normalize_item(Ctx, P, DB)
|| P <- xqldb_path_table:all(DB, Type)
]
end,
lists:flatten(lists:flatmap(Fun, DBs)).
delete_collection(Uri) ->
Agent = new_agent(),
delete_collection(#{trans => Agent}, Uri),
locks:end_transaction(Agent).
delete_collection(#{trans := Agent}, Uri) ->
DBs = xqldb_db:databases(Uri),
ok = write_lock_all(Agent, DBs),
_ = [xqldb_path_table:delete_all(DB) || DB <- DBs],
ok.
read_lock_all(Agent, DBs) ->
read - lock all [ DbPid ]
Locks = [
{[DbPid], read}
|| #{db_name := DbPid} <- DBs
],
ok = locks:lock_objects(Agent, Locks),
await_locks(Agent).
write_lock_all(Agent, DBs) ->
Locks = [
{[DbPid, write], write}
|| #{db_name := DbPid} <- DBs
],
ok = locks:lock_objects(Agent, Locks),
await_locks(Agent).
read_lock_one(Agent, #{db_name := DbPid}, Name) ->
ok = locks:lock_nowait(Agent, [DbPid, Name], read),
await_locks(Agent).
xqldb_structure_index : ) , Counts ) ,
analyze(DocUri) when is_binary(DocUri) ->
{DbUri, _Name} = xqldb_uri:split_uri(DocUri),
case xqldb_db:exists(DbUri) of
false ->
{error, not_exists};
true ->
DB = xqldb_db:database(DbUri),
xqldb_structure_index:analyze(DB)
end.
select(#{trans := Agent} = Ctx, DocUri) when is_binary(DocUri) ->
{DbUri, Name} = xqldb_uri:split_uri(DocUri),
case xqldb_db:exists(DbUri) of
false ->
{error, not_exists};
true ->
DB = xqldb_db:database(DbUri),
ok = read_lock_one(Agent, DB, Name),
Rec = xqldb_path_table:lookup(DB, Name),
normalize_item(Ctx, Rec, Name, DB)
end.
exists_doc(#{trans := Agent}, DocUri) when is_binary(DocUri) ->
{DbUri, Name} = xqldb_uri:split_uri(DocUri),
case xqldb_db:exists(DbUri) of
false ->
false;
true ->
DB = xqldb_db:database(DbUri),
ok = read_lock_one(Agent, DB, Name),
case xqldb_path_table:lookup(DB, Name) of
{xml, _} ->
true;
_ ->
false
end
end.
select_doc(DocUriL) ->
DocUri = unicode:characters_to_binary(DocUriL),
Agent = new_agent(),
Doc = select_doc(#{trans => Agent}, DocUri),
locks:end_transaction(Agent),
Doc.
select_doc(#{trans := Agent}, DocUri) when is_binary(DocUri) ->
{DbUri, Name} = xqldb_uri:split_uri(DocUri),
case xqldb_db:exists(DbUri) of
false ->
{error, not_exists};
true ->
DB = xqldb_db:database(DbUri),
ok = read_lock_one(Agent, DB, Name),
#{db_name := DBId} = DB = xqldb_db:database(DbUri),
case xqldb_path_table:lookup(DB, Name) of
{xml, Stamp} ->
NodeId = {DBId, {Name, Stamp}, []},
xqldb_nodes:get_doc(NodeId);
_ ->
{error, not_exists}
end
end.
insert_doc(DocUriL, Filename) ->
DocUri = unicode:characters_to_binary(DocUriL),
{DbUri, Name} = xqldb_uri:split_uri(DocUri),
Agent = new_agent(),
locks:lock_nowait(Agent, [DbUri, Name]),
ok = await_locks(Agent),
DB = xqldb_db:database(DbUri),
case xqldb_path_table:lookup(DB, Name) of
[] ->
try
Stamp = erlang:system_time(),
ok = xqldb_sax:parse_file(DB, Filename, Name, Stamp),
xqldb_path_table:insert(DB, {Name, xml, Stamp})
after
locks:end_transaction(Agent)
end;
_ ->
locks:end_transaction(Agent)
end.
delete_doc(DocUriL) ->
DocUri = unicode:characters_to_binary(DocUriL),
{DbUri, Name} = xqldb_uri:split_uri(DocUri),
Agent = new_agent(),
locks:lock_nowait(Agent, [DbUri, Name]),
ok = await_locks(Agent),
case xqldb_db:exists(DbUri) of
false ->
locks:end_transaction(Agent);
true ->
DB = xqldb_db:database(DbUri),
case xqldb_path_table:lookup(DB, Name) of
{xml, _} ->
commit([{DB, delete, Name}]),
locks:end_transaction(Agent);
_ ->
locks:end_transaction(Agent)
end
end.
return { DB , insert , InsertRec }
insert_doc_node(Node, DB, Name) ->
Stamp = erlang:system_time(),
ok = xqldb_sax:parse_node(DB, Node, Name, Stamp),
{DB, insert, {Name, xml, Stamp}}.
exists_resource(DocUri) when is_binary(DocUri) ->
Agent = new_agent(),
Res = exists_resource(#{trans => Agent}, DocUri),
locks:end_transaction(Agent),
Res.
exists_resource(#{trans := Agent}, DocUri) when is_binary(DocUri) ->
{DbUri, Name} = xqldb_uri:split_uri(DocUri),
case xqldb_db:exists(DbUri) of
false ->
false;
true ->
DB = xqldb_db:database(DbUri),
ok = read_lock_one(Agent, DB, Name),
case xqldb_path_table:lookup(DB, Name) of
{text, _, _} -> true;
{raw, _, _} -> true;
{link, _, _} -> true;
_ -> false
end
end.
select_resource(#{trans := Agent} = Ctx, DocUri, Type) when is_binary(DocUri) ->
{DbUri, Name} = xqldb_uri:split_uri(DocUri),
case xqldb_db:exists(DbUri) of
false ->
try
get_remote_resource(Ctx, DocUri, Type)
catch
_:_ ->
{error, not_exists}
end;
true ->
DB = xqldb_db:database(DbUri),
ok = read_lock_one(Agent, DB, Name),
case xqldb_path_table:lookup(DB, Name) of
[] ->
try
get_remote_resource(Ctx, DocUri, Type)
catch
_:_ ->
{error, not_exists}
end;
Rec ->
normalize_item(Ctx, Rec, Name, DB)
end
end.
insert_resource(DocUri, Bin) ->
{DbUri, Name} = xqldb_uri:split_uri(DocUri),
Agent = new_agent(),
locks:lock_nowait(Agent, [DbUri, Name]),
ok = await_locks(Agent),
DB = xqldb_db:database(DbUri),
case xqldb_path_table:lookup(DB, Name) of
[] ->
try
Stamp = erlang:system_time(),
NewPosSize = xqldb_resource_table:insert(DB, Bin),
xqldb_path_table:insert(DB, {Name, text, NewPosSize, Stamp})
after
locks:end_transaction(Agent)
end;
_ ->
locks:end_transaction(Agent)
end.
insert_text_resource(DB, Name, Bin) ->
Stamp = erlang:system_time(),
NewPosSize = xqldb_resource_table:insert(DB, Bin),
{DB, insert, {Name, text, NewPosSize, Stamp}}.
insert_binary_resource(DB, Name, Bin) ->
Stamp = erlang:system_time(),
NewPosSize = xqldb_resource_table:insert(DB, Bin),
{DB, insert, {Name, raw, NewPosSize, Stamp}}.
link_resource(DB, Name, Filename) ->
Stamp = erlang:system_time(),
{DB, insert, {Name, link, Filename, Stamp}}.
exists_item(#{trans := Agent}, DocUri) when is_binary(DocUri) ->
{DbUri, Name} = xqldb_uri:split_uri(DocUri),
case xqldb_db:exists(DbUri) of
false ->
false;
true ->
DB = xqldb_db:database(DbUri),
ok = read_lock_one(Agent, DB, Name),
case xqldb_path_table:lookup(DB, Name) of
Term when is_tuple(Term) ->
true;
_ ->
false
end
end.
select_item(#{trans := Agent}, DocUri) when is_binary(DocUri) ->
{DbUri, Name} = xqldb_uri:split_uri(DocUri),
case xqldb_db:exists(DbUri) of
false ->
{error, not_exists};
true ->
DB = xqldb_db:database(DbUri),
ok = read_lock_one(Agent, DB, Name),
case xqldb_path_table:lookup(DB, Name) of
{item, _, PosSize} ->
Res = xqldb_resource_table:get(DB, PosSize),
binary_to_term(Res, [safe]);
[] ->
{error, not_exists}
end
end.
create_or_open_db(DbUri) ->
case xqldb_db:exists(DbUri) of
false ->
_ = xqldb_db:open(DbUri),
xqldb_db:database(DbUri);
true ->
xqldb_db:database(DbUri)
end.
insert_item(DocUri, Item) when is_binary(DocUri) ->
{DbUri, Name} = xqldb_uri:split_uri(DocUri),
DB = create_or_open_db(DbUri),
Stamp = erlang:system_time(),
Bin = term_to_binary(Item),
PosSize = xqldb_resource_table:insert(DB, Bin),
xqldb_path_table:insert(DB, {Name, item, PosSize, Stamp}).
insert_item(DB, Name, Item) ->
Stamp = erlang:system_time(),
Bin = term_to_binary(Item),
PosSize = xqldb_resource_table:insert(DB, Bin),
{DB, insert, {Name, item, PosSize, Stamp}}.
insert_doc_as_collection(DocUri, Filename, BasePath) when is_binary(DocUri) ->
ReplyTo = self(),
{DbUri, Name} = xqldb_uri:split_uri(DocUri),
DB = create_or_open_db(DbUri),
InsFun = fun(Events) ->
F = fun() ->
Self = self(),
uuid:new(Self),
UUID = uuid:get_v4_urandom(),
NName = uuid:uuid_to_string(UUID, binary_standard),
Name1 = <<Name/binary, $-, NName/binary>>,
Stamp = erlang:system_time(),
xqldb_sax:parse_list(DB, Events, Name1, Stamp),
_ = xqldb_path_table:insert(DB, {Name1, xml, Stamp}),
ReplyTo ! {Self, done}
end,
erlang:spawn_link(F)
end,
Path = path_to_stack(BasePath),
xqldb_sax:split_parse_file(Filename, InsFun, Path).
import_from_directory(BaseUri, Directory) when is_list(Directory) ->
Dir = filename:absname(Directory),
All = filelib:wildcard("**/*.xml", Dir),
Grouped = group(All, dict:new(), Dir, list_to_binary(BaseUri)),
Parent = self(),
Fun = fun(DbUri, Vals, Acc) ->
DB =
case xqldb_db:exists(DbUri) of
true ->
xqldb_db:database(DbUri);
false ->
{ok, _, _} = xqldb_db:open(DbUri),
xqldb_db:database(DbUri)
end,
Fun1 = fun() ->
{Agent, _} = locks:begin_transaction(),
{ok, _} = locks:lock(Agent, [DbUri]),
[
try
Stamp = erlang:system_time(),
xqldb_sax:parse_file(DB, FN, Name, Stamp),
_ = xqldb_path_table:insert(DB, {Name, xml, Stamp}),
ok
catch
_:_ -> ok
end
|| {FN, Name} <- Vals
],
locks:end_transaction(Agent),
Parent ! {done, self(), DbUri}
end,
Child = erlang:spawn(Fun1),
[{Child, DbUri} | Acc]
end,
Uris = dict:fold(Fun, [], Grouped),
collect_uris(Uris).
Internal functions
collect_uris([]) ->
ok;
collect_uris([{Pid, Uri} | T]) ->
receive
{done, Pid, Uri} ->
collect_uris(T)
after 600000 ->
erlang:exit(Pid, timeout),
collect_uris(T)
end.
path_to_stack(Path) when is_binary(Path) ->
path_to_stack(binary_to_list(Path));
path_to_stack(Path) when is_list(Path) ->
Split = filename:split(Path),
compile_stack(Split, []).
compile_stack(["/" | Ts], Acc) ->
compile_stack(Ts, [document | Acc]);
compile_stack([QName | Ts], Acc) ->
case string:split(QName, ":") of
[Prefix, Name] ->
compile_stack(Ts, [{element, Prefix, Name} | Acc]);
[Name] ->
compile_stack(Ts, [{element, [], Name} | Acc])
end;
compile_stack([], Acc) ->
Acc.
group([], Dict, _, _) ->
Dict;
group([F | Fs], Dict, FileDir, BaseUri0) ->
BaseUri =
case binary:last(BaseUri0) of
$/ ->
BaseUri0;
_ ->
<<BaseUri0/binary, $/>>
end,
FileName = filename:join([FileDir, F]),
DocUri = xqldb_lib:join_uris(BaseUri, unicode:characters_to_binary(F)),
{DbUri, Name} = xqldb_uri:split_uri(DocUri),
Value = {FileName, Name},
group(Fs, dict:append(DbUri, Value, Dict), FileDir, BaseUri).
get_remote_resource(#{tab := Tab}, Uri, Type) ->
Key = {remote, Type, Uri},
case ets:lookup(Tab, Key) of
[Obj] ->
Obj;
[] ->
Res = xqerl_lib:get_remote_resource(Uri, Type),
ets:insert(Tab, {Key, Res}),
Res
end.
new_agent() ->
{ok, Agent} = locks_agent:start([
{abort_on_deadlock, true},
{link, true}
]),
Agent.
-spec commit([{map(), insert | delete, tuple() | binary()}]) -> ok.
commit(Transaction) ->
_ = [
case InsDel of
insert ->
xqldb_path_table:delete(DB, element(1, Rec)),
xqldb_path_table:insert(DB, Rec);
delete when Rec =:= all ->
xqldb_path_table:delete_all(DB);
delete ->
xqldb_path_table:delete(DB, Rec)
end
|| {DB, InsDel, Rec} <- Transaction
],
ok.
await_locks(Agent) ->
case locks_agent:transaction_status(Agent) of
no_locks ->
ok;
_ ->
{have_all_locks, _} = locks:await_all_locks(Agent),
ok
end.
normalize_item(_, {xml, Sp}, N, #{db_name := DbPid}) ->
NodeId = {DbPid, {N, Sp}, []},
xqldb_nodes:get_doc(NodeId);
normalize_item(Ctx, {A, B, C}, N, DB) ->
normalize_item(Ctx, {N, A, B, C}, DB).
given path table record and DB , return the item
normalize_item(_, {N, xml, Sp}, #{db_name := DbPid}) ->
NodeId = {DbPid, {N, Sp}, []},
xqldb_nodes:get_doc(NodeId);
normalize_item(#{tab := Tab}, {_, link, _, Filename}, _) ->
Key = {remote, link, Filename},
case ets:lookup(Tab, Key) of
[Obj] ->
?BIN(Obj);
[] ->
{ok, Bin} = file:read_file(Filename),
ets:insert(Tab, {Key, Bin}),
?BIN(Bin)
end;
normalize_item(_, {_, text, _, {Pos, Len}}, DB) ->
xqldb_resource_table:get(DB, {Pos, Len});
normalize_item(_, {_, raw, _, {Pos, Len}}, DB) ->
Bin = xqldb_resource_table:get(DB, {Pos, Len}),
?BIN(Bin);
normalize_item(_, {_, item, _, {Pos, Len}}, DB) ->
Res = xqldb_resource_table:get(DB, {Pos, Len}),
binary_to_term(Res, [safe]).
|
2364928a82aa58096b0925b5c5a6b72e060f5c0b68569b923263e0928929c7e2 | oshyshko/adventofcode | D13.hs | module Y15.D13 where
import qualified Data.HashMap.Strict as M
import Imports
import Parser
type Guest = String
type Attr = ((Guest, Guest), Int) -- (from, to), attractiveness)
would lose 75 happiness units by sitting next to .
would gain 71 happiness units by sitting next to .
attrs :: Parser [Attr]
attrs =
attr `endBy` eol
where
attr :: Parser Attr
attr =
(\from sign n to -> ((from, to), sign * n))
<$> many letter <* string " would "
<*> (s2sign <$> many letter) <* string " " -- gain / lose
<*> natural <* string " happiness units by sitting next to "
<*> many letter <* string "."
s2sign = \case
"gain" -> 1
"lose" -> -1
x -> error $ "Unknown sign: " ++ x
attrs2guests :: [Attr] -> [Guest]
attrs2guests = sort . nub . map (fst . fst)
maxHappiness :: [Attr] -> Int
maxHappiness ms =
maximum $ table2happiness <$> permutations (attrs2guests ms)
where
fta :: HashMap (Guest, Guest) Int -- (from, to) -> attractiveness
fta = M.fromList ms
table2happiness :: [Guest] -> Int
table2happiness guests =
wrap around one guest
& divvy 2 1
& map (\[a,b] -> fta ! (a,b) + fta ! (b,a))
& sum
like Data . . Strict . ! , but prints missing key in case of error
(!) :: (Hashable k, Show k) => HashMap k v -> k -> v
(!) m k = fromMaybe
(error $ "Couldn't find key: " ++ show k)
(M.lookup k m)
solve1 :: String -> Int
solve1 = maxHappiness . parseOrDie attrs
solve2 :: String -> Int
solve2 =
maxHappiness . addSelf . parseOrDie attrs
where
addSelf ms =
ms ++ map (\g -> (("Me", g), 0)) (attrs2guests ms)
++ map (\g -> ((g, "Me"), 0)) (attrs2guests ms)
| null | https://raw.githubusercontent.com/oshyshko/adventofcode/fc0ce87c1dfffc30647763fa5b84ff9fcf58b8b3/src/Y15/D13.hs | haskell | (from, to), attractiveness)
gain / lose
(from, to) -> attractiveness | module Y15.D13 where
import qualified Data.HashMap.Strict as M
import Imports
import Parser
type Guest = String
would lose 75 happiness units by sitting next to .
would gain 71 happiness units by sitting next to .
attrs :: Parser [Attr]
attrs =
attr `endBy` eol
where
attr :: Parser Attr
attr =
(\from sign n to -> ((from, to), sign * n))
<$> many letter <* string " would "
<*> natural <* string " happiness units by sitting next to "
<*> many letter <* string "."
s2sign = \case
"gain" -> 1
"lose" -> -1
x -> error $ "Unknown sign: " ++ x
attrs2guests :: [Attr] -> [Guest]
attrs2guests = sort . nub . map (fst . fst)
maxHappiness :: [Attr] -> Int
maxHappiness ms =
maximum $ table2happiness <$> permutations (attrs2guests ms)
where
fta = M.fromList ms
table2happiness :: [Guest] -> Int
table2happiness guests =
wrap around one guest
& divvy 2 1
& map (\[a,b] -> fta ! (a,b) + fta ! (b,a))
& sum
like Data . . Strict . ! , but prints missing key in case of error
(!) :: (Hashable k, Show k) => HashMap k v -> k -> v
(!) m k = fromMaybe
(error $ "Couldn't find key: " ++ show k)
(M.lookup k m)
solve1 :: String -> Int
solve1 = maxHappiness . parseOrDie attrs
solve2 :: String -> Int
solve2 =
maxHappiness . addSelf . parseOrDie attrs
where
addSelf ms =
ms ++ map (\g -> (("Me", g), 0)) (attrs2guests ms)
++ map (\g -> ((g, "Me"), 0)) (attrs2guests ms)
|
b55e710d3009593a80e42f4a9c246f2f0b7255509b42fd71403e739d98bcc6ac | iokasimov/pandora | Contravariant.hs | module Pandora.Pattern.Functor.Contravariant where
import Pandora.Pattern.Category (Category)
import Pandora.Pattern.Betwixt (Betwixt)
infixl 1 >-|------
infixl 2 >-|-----
infixl 3 >-|----
infixl 4 >-|---, >-|-|-
infixl 5 >-|--
infixl 6 >-|-
{- |
> When providing a new instance, you should ensure it satisfies:
> * Exactly morphism: (identity >-|-) ≡ identity
> * Interpreted of morphisms: (f >-|-) . (g >-|-) ≡ (g . f >-|-)
-}
class (Category source, Category target) => Contravariant source target t where
(>-|-) :: source a b -> target (t b) (t a)
(>-|--), (>-|---), (>-|----), (>-|-----), (>-|------), (>-|-------), (>-|--------) :: source a b -> target (t b) (t a)
(>-|--) = (>-|-)
(>-|---) = (>-|-)
(>-|----) = (>-|-)
(>-|-----) = (>-|-)
(>-|------) = (>-|-)
(>-|-------) = (>-|-)
(>-|--------) = (>-|-)
(>-|-|-) :: (Contravariant source (Betwixt source target) u, Contravariant (Betwixt source target) target t)
=> source a b -> target (t (u a)) (t (u b))
(>-|-|-) s = ((>-|-) ((>-|-) @source @(Betwixt source target) @_ s))
(>$<) :: Contravariant source target t => source a b -> target (t b) (t a)
(>$<) = (>-|-)
(>$$<) :: (Contravariant source target t, Contravariant source (Betwixt source target) u, Contravariant (Betwixt source target) target t) => source a b -> target (t (u a)) (t (u b))
(>$$<) = (>-|-|-)
| null | https://raw.githubusercontent.com/iokasimov/pandora/578ec3b831aaad201e1d32eed72f4ca7229b4bea/Pandora/Pattern/Functor/Contravariant.hs | haskell | ----
---
--
-, >-|-|-
|
> When providing a new instance, you should ensure it satisfies:
> * Exactly morphism: (identity >-|-) ≡ identity
> * Interpreted of morphisms: (f >-|-) . (g >-|-) ≡ (g . f >-|-)
), (>-|---), (>-|----), (>-|-----), (>-|------), (>-|-------), (>-|--------) :: source a b -> target (t b) (t a)
) = (>-|-)
-) = (>-|-)
--) = (>-|-)
---) = (>-|-)
----) = (>-|-)
-----) = (>-|-)
------) = (>-|-) | module Pandora.Pattern.Functor.Contravariant where
import Pandora.Pattern.Category (Category)
import Pandora.Pattern.Betwixt (Betwixt)
infixl 6 >-|-
class (Category source, Category target) => Contravariant source target t where
(>-|-) :: source a b -> target (t b) (t a)
(>-|-|-) :: (Contravariant source (Betwixt source target) u, Contravariant (Betwixt source target) target t)
=> source a b -> target (t (u a)) (t (u b))
(>-|-|-) s = ((>-|-) ((>-|-) @source @(Betwixt source target) @_ s))
(>$<) :: Contravariant source target t => source a b -> target (t b) (t a)
(>$<) = (>-|-)
(>$$<) :: (Contravariant source target t, Contravariant source (Betwixt source target) u, Contravariant (Betwixt source target) target t) => source a b -> target (t (u a)) (t (u b))
(>$$<) = (>-|-|-)
|
0d568a29bfaa270aeff92ebea441c5efa8447ea5031484b3497edf25d9fdaad8 | mfikes/chivorcam | core.cljc | (ns chivorcam.core
(:refer-clojure :exclude [defmacro])
(:require
[cljs.env :as env]
[cljs.analyzer :as ana :refer [*cljs-ns*]]))
(defn- eval-form
[form ns]
(when-not (find-ns ns)
#?(:clj (create-ns ns)
:cljs (eval `(~'ns ~ns))))
(binding #?(:clj [*ns* (the-ns ns)]
:cljs [*ns* (find-ns ns)])
(#?(:clj do :cljs try)
(eval `(do
(clojure.core/refer-clojure)
~form))
#?(:cljs (catch :default e (throw (ex-cause e)))))))
(defn- fake-var [ns sym]
(symbol (str "#'" ns) (str sym)))
(defn- macros-ns [sym]
#?(:clj sym
:cljs (symbol (str sym "$macros"))))
(clojure.core/defmacro defmacfn
[name & args]
(let [form `(clojure.core/defn ~name ~@args)]
(if &env
(do
(eval-form form (macros-ns *cljs-ns*))
`'~(fake-var *cljs-ns* name))
form)))
(clojure.core/defmacro defmacro
[name & args]
(let [form `(clojure.core/defmacro ~name ~@args)]
(if &env
(do
(eval-form form (macros-ns *cljs-ns*))
(swap! env/*compiler* update-in [::ana/namespaces *cljs-ns* :require-macros] assoc *cljs-ns* *cljs-ns*)
(swap! env/*compiler* update-in [::ana/namespaces *cljs-ns* :use-macros] assoc name *cljs-ns*)
`'~(fake-var *cljs-ns* name))
form)))
| null | https://raw.githubusercontent.com/mfikes/chivorcam/3e833bc090eb12613a4ed3be0d95ff12ef789788/src/chivorcam/core.cljc | clojure | (ns chivorcam.core
(:refer-clojure :exclude [defmacro])
(:require
[cljs.env :as env]
[cljs.analyzer :as ana :refer [*cljs-ns*]]))
(defn- eval-form
[form ns]
(when-not (find-ns ns)
#?(:clj (create-ns ns)
:cljs (eval `(~'ns ~ns))))
(binding #?(:clj [*ns* (the-ns ns)]
:cljs [*ns* (find-ns ns)])
(#?(:clj do :cljs try)
(eval `(do
(clojure.core/refer-clojure)
~form))
#?(:cljs (catch :default e (throw (ex-cause e)))))))
(defn- fake-var [ns sym]
(symbol (str "#'" ns) (str sym)))
(defn- macros-ns [sym]
#?(:clj sym
:cljs (symbol (str sym "$macros"))))
(clojure.core/defmacro defmacfn
[name & args]
(let [form `(clojure.core/defn ~name ~@args)]
(if &env
(do
(eval-form form (macros-ns *cljs-ns*))
`'~(fake-var *cljs-ns* name))
form)))
(clojure.core/defmacro defmacro
[name & args]
(let [form `(clojure.core/defmacro ~name ~@args)]
(if &env
(do
(eval-form form (macros-ns *cljs-ns*))
(swap! env/*compiler* update-in [::ana/namespaces *cljs-ns* :require-macros] assoc *cljs-ns* *cljs-ns*)
(swap! env/*compiler* update-in [::ana/namespaces *cljs-ns* :use-macros] assoc name *cljs-ns*)
`'~(fake-var *cljs-ns* name))
form)))
|
|
3a3824fda74e00a285ec732925fa9fc3833d0e8f12582473ae3b5ae9e2b142f1 | metabase/metabase | search.clj | (ns metabase.api.search
(:require
[compojure.core :refer [GET]]
[flatland.ordered.map :as ordered-map]
[honey.sql.helpers :as sql.helpers]
[medley.core :as m]
[metabase.api.common :as api]
[metabase.db :as mdb]
[metabase.db.query :as mdb.query]
[metabase.models.collection :as collection]
[metabase.models.interface :as mi]
[metabase.models.permissions :as perms]
[metabase.search.config :as search-config]
[metabase.search.scoring :as scoring]
[metabase.search.util :as search-util]
[metabase.server.middleware.offset-paging :as mw.offset-paging]
[metabase.util :as u]
[metabase.util.honey-sql-2 :as h2x]
[metabase.util.log :as log]
[metabase.util.schema :as su]
[schema.core :as s]
[toucan2.core :as t2]
[toucan2.instance :as t2.instance]
[toucan2.realize :as t2.realize]))
(set! *warn-on-reflection* true)
(def ^:private SearchContext
"Map with the various allowed search parameters, used to construct the SQL query"
{:search-string (s/maybe su/NonBlankString)
:archived? s/Bool
:current-user-perms #{perms/PathSchema}
(s/optional-key :models) (s/maybe #{su/NonBlankString})
(s/optional-key :table-db-id) (s/maybe s/Int)
(s/optional-key :limit-int) (s/maybe s/Int)
(s/optional-key :offset-int) (s/maybe s/Int)})
(def ^:private SearchableModel
(apply s/enum search-config/all-models))
(def ^:private HoneySQLColumn
(s/cond-pre
s/Keyword
[(s/one s/Any "column or value")
(s/one s/Keyword "alias")]))
;;; +----------------------------------------------------------------------------------------------------------------+
;;; | Columns for each Entity |
;;; +----------------------------------------------------------------------------------------------------------------+
(def ^:private all-search-columns
"All columns that will appear in the search results, and the types of those columns. The generated search query is a
`UNION ALL` of the queries for each different entity; it looks something like:
SELECT 'card' AS model, id, cast(NULL AS integer) AS table_id, ...
FROM report_card
UNION ALL
SELECT 'metric' as model, id, table_id, ...
FROM metric
Columns that aren't used in any individual query are replaced with `SELECT cast(NULL AS <type>)` statements. (These
are cast to the appropriate type because Postgres will assume `SELECT NULL` is `TEXT` by default and will refuse to
`UNION` two columns of two different types.)"
(ordered-map/ordered-map
returned for all models . Important to be first for changing model for dataset
:model :text
:id :integer
:name :text
:display_name :text
:description :text
:archived :boolean
;; returned for Card, Dashboard, and Collection
:collection_id :integer
:collection_name :text
:collection_authority_level :text
;; returned for Card and Dashboard
:collection_position :integer
:bookmark :boolean
;; returned for everything except Collection
:updated_at :timestamp
;; returned for Card only
:dashboardcard_count :integer
:dataset_query :text
:moderated_status :text
;; returned for Metric and Segment
:table_id :integer
:database_id :integer
:table_schema :text
:table_name :text
:table_description :text
returned for Database and Table
:initial_sync_status :text
;; returned for Action
:model_id :integer
:model_name :text))
;;; +----------------------------------------------------------------------------------------------------------------+
;;; | Shared Query Logic |
;;; +----------------------------------------------------------------------------------------------------------------+
(def ^:private true-clause [:inline [:= 1 1]])
(def ^:private false-clause [:inline [:= 0 1]])
(s/defn ^:private model->alias :- s/Keyword
[model :- SearchableModel]
(keyword model))
(s/defn ^:private ->column-alias :- s/Keyword
"Returns the column name. If the column is aliased, i.e. [`:original_name` `:aliased_name`], return the aliased
column name"
[column-or-aliased :- HoneySQLColumn]
(if (sequential? column-or-aliased)
(second column-or-aliased)
column-or-aliased))
(s/defn ^:private canonical-columns :- [HoneySQLColumn]
"Returns a seq of canonicalized list of columns for the search query with the given `model` Will return column names
prefixed with the `model` name so that it can be used in criteria. Projects a `nil` for columns the `model` doesn't
have and doesn't modify aliases."
[model :- SearchableModel, col-alias->honeysql-clause :- {s/Keyword HoneySQLColumn}]
(for [[search-col col-type] all-search-columns
:let [maybe-aliased-col (get col-alias->honeysql-clause search-col)]]
(cond
(= search-col :model)
[(h2x/literal model) :model]
;; This is an aliased column, no need to include the table alias
(sequential? maybe-aliased-col)
maybe-aliased-col
;; This is a column reference, need to add the table alias to the column
maybe-aliased-col
(keyword (name (model->alias model)) (name maybe-aliased-col))
This entity is missing the column , project a null for that column value . For Postgres and H2 , cast it to the
;; correct type, e.g.
;;
;; SELECT cast(NULL AS integer)
;;
;; For MySQL, this is not needed.
:else
[(when-not (= (mdb/db-type) :mysql)
[:cast nil col-type])
search-col])))
(s/defn ^:private select-clause-for-model :- [HoneySQLColumn]
"The search query uses a `union-all` which requires that there be the same number of columns in each of the segments
of the query. This function will take the columns for `model` and will inject constant `nil` values for any column
missing from `entity-columns` but found in `all-search-columns`."
[model :- SearchableModel]
(let [entity-columns (search-config/columns-for-model model)
column-alias->honeysql-clause (m/index-by ->column-alias entity-columns)
cols-or-nils (canonical-columns model column-alias->honeysql-clause)]
cols-or-nils))
(s/defn ^:private from-clause-for-model :- [(s/one [(s/one s/Keyword "table name") (s/one s/Keyword "alias")]
"from clause")]
[model :- SearchableModel]
(let [db-model (get search-config/model-to-db-model model)]
[[(t2/table-name db-model) (-> db-model name u/lower-case-en keyword)]]))
(defmulti ^:private archived-where-clause
{:arglists '([model archived?])}
(fn [model _] model))
(defmethod archived-where-clause :default
[model archived?]
[:= (keyword (name (model->alias model)) "archived") archived?])
;; Databases can't be archived
(defmethod archived-where-clause "database"
[_model archived?]
(if-not archived?
true-clause
false-clause))
;; Table has an `:active` flag, but no `:archived` flag; never return inactive Tables
(defmethod archived-where-clause "table"
[model archived?]
(if archived?
false-clause ; No tables should appear in archive searches
[:and
[:= (keyword (name (model->alias model)) "active") true]
[:= (keyword (name (model->alias model)) "visibility_type") nil]]))
(defn- wildcard-match
[s]
(str "%" s "%"))
(defn- search-string-clause
[model query searchable-columns]
(when query
(into [:or]
(for [column searchable-columns
token (search-util/tokenize (search-util/normalize query))]
(if (and (= model "card") (= column (keyword (name (model->alias model)) "dataset_query")))
[:and
[:= (keyword (name (model->alias model)) "query_type") "native"]
[:like
[:lower column]
(wildcard-match token)]]
[:like
[:lower column]
(wildcard-match token)])))))
(s/defn ^:private base-where-clause-for-model :- [(s/one (s/enum :and := :inline) "type") s/Any]
[model :- SearchableModel, {:keys [search-string archived?]} :- SearchContext]
(let [archived-clause (archived-where-clause model archived?)
search-clause (search-string-clause model
search-string
(map (let [model-alias (name (model->alias model))]
(fn [column]
(keyword model-alias (name column))))
(search-config/searchable-columns-for-model model)))]
(if search-clause
[:and archived-clause search-clause]
archived-clause)))
(s/defn ^:private base-query-for-model :- {:select s/Any, :from s/Any, :where s/Any}
"Create a HoneySQL query map with `:select`, `:from`, and `:where` clauses for `model`, suitable for the `UNION ALL`
used in search."
[model :- SearchableModel, context :- SearchContext]
{:select (select-clause-for-model model)
:from (from-clause-for-model model)
:where (base-where-clause-for-model model context)})
(s/defn ^:private add-collection-join-and-where-clauses
"Add a `WHERE` clause to the query to only return Collections the Current User has access to; join against Collection
so we can return its `:name`."
[honeysql-query :- su/Map
collection-id-column :- s/Keyword
{:keys [current-user-perms]} :- SearchContext]
(let [visible-collections (collection/permissions-set->visible-collection-ids current-user-perms)
collection-filter-clause (collection/visible-collection-ids->honeysql-filter-clause
collection-id-column
visible-collections)
honeysql-query (-> honeysql-query
(sql.helpers/where collection-filter-clause)
(sql.helpers/where [:= :collection.namespace nil]))]
;; add a JOIN against Collection *unless* the source table is already Collection
(cond-> honeysql-query
(not= collection-id-column :collection.id)
(sql.helpers/left-join [:collection :collection]
[:= collection-id-column :collection.id]))))
(s/defn ^:private add-table-db-id-clause
"Add a WHERE clause to only return tables with the given DB id.
Used in data picker for joins because we can't join across DB's."
[query :- su/Map, id :- (s/maybe s/Int)]
(if (some? id)
(sql.helpers/where query [:= id :db_id])
query))
(s/defn ^:private add-card-db-id-clause
"Add a WHERE clause to only return cards with the given DB id.
Used in data picker for joins because we can't join across DB's."
[query :- su/Map, id :- (s/maybe s/Int)]
(if (some? id)
(sql.helpers/where query [:= id :database_id])
query))
;;; +----------------------------------------------------------------------------------------------------------------+
| Search Queries for each Toucan Model |
;;; +----------------------------------------------------------------------------------------------------------------+
(defmulti ^:private search-query-for-model
{:arglists '([model search-context])}
(fn [model _] model))
(s/defn ^:private shared-card-impl [dataset? :- s/Bool search-ctx :- SearchContext]
(-> (base-query-for-model "card" search-ctx)
(update :where (fn [where] [:and [:= :card.dataset dataset?] where]))
(sql.helpers/left-join [:card_bookmark :bookmark]
[:and
[:= :bookmark.card_id :card.id]
[:= :bookmark.user_id api/*current-user-id*]])
(add-collection-join-and-where-clauses :card.collection_id search-ctx)
(add-card-db-id-clause (:table-db-id search-ctx))))
(s/defmethod search-query-for-model "action"
[model search-ctx :- SearchContext]
(-> (base-query-for-model model search-ctx)
(sql.helpers/left-join [:report_card :model]
[:= :model.id :action.model_id])
(add-collection-join-and-where-clauses :model.collection_id search-ctx)))
(s/defmethod search-query-for-model "card"
[_model search-ctx :- SearchContext]
(shared-card-impl false search-ctx))
(s/defmethod search-query-for-model "dataset"
[_model search-ctx :- SearchContext]
(-> (shared-card-impl true search-ctx)
(update :select (fn [columns]
(cons [(h2x/literal "dataset") :model] (rest columns))))))
(s/defmethod search-query-for-model "collection"
[_model search-ctx :- SearchContext]
(-> (base-query-for-model "collection" search-ctx)
(sql.helpers/left-join [:collection_bookmark :bookmark]
[:and
[:= :bookmark.collection_id :collection.id]
[:= :bookmark.user_id api/*current-user-id*]])
(add-collection-join-and-where-clauses :collection.id search-ctx)))
(s/defmethod search-query-for-model "database"
[model search-ctx :- SearchContext]
(base-query-for-model model search-ctx))
(s/defmethod search-query-for-model "dashboard"
[model search-ctx :- SearchContext]
(-> (base-query-for-model model search-ctx)
(sql.helpers/left-join [:dashboard_bookmark :bookmark]
[:and
[:= :bookmark.dashboard_id :dashboard.id]
[:= :bookmark.user_id api/*current-user-id*]])
(add-collection-join-and-where-clauses :dashboard.collection_id search-ctx)))
(s/defmethod search-query-for-model "metric"
[model search-ctx :- SearchContext]
(-> (base-query-for-model model search-ctx)
(sql.helpers/left-join [:metabase_table :table] [:= :metric.table_id :table.id])))
(s/defmethod search-query-for-model "segment"
[model search-ctx :- SearchContext]
(-> (base-query-for-model model search-ctx)
(sql.helpers/left-join [:metabase_table :table] [:= :segment.table_id :table.id])))
(s/defmethod search-query-for-model "table"
[model {:keys [current-user-perms table-db-id], :as search-ctx} :- SearchContext]
(when (seq current-user-perms)
(let [base-query (base-query-for-model model search-ctx)]
(add-table-db-id-clause
(if (contains? current-user-perms "/")
base-query
(let [data-perms (filter #(re-find #"^/db/*" %) current-user-perms)]
{:select (:select base-query)
:from [[(merge
base-query
{:select [:id :schema :db_id :name :description :display_name :updated_at :initial_sync_status
[(h2x/concat (h2x/literal "/db/")
:db_id
(h2x/literal "/schema/")
[:case
[:not= :schema nil] :schema
:else (h2x/literal "")]
(h2x/literal "/table/") :id
(h2x/literal "/read/"))
:path]]})
:table]]
:where (if (seq data-perms)
(into [:or] (for [path data-perms]
[:like :path (str path "%")]))
[:inline [:= 0 1]])}))
table-db-id))))
(defn order-clause
"CASE expression that lets the results be ordered by whether they're an exact (non-fuzzy) match or not"
[query]
(let [match (wildcard-match (search-util/normalize query))
columns-to-search (->> all-search-columns
(filter (fn [[_k v]] (= v :text)))
(map first)
(remove #{:collection_authority_level :moderated_status :initial_sync_status}))
case-clauses (as-> columns-to-search <>
(map (fn [col] [:like [:lower col] match]) <>)
(interleave <> (repeat [:inline 0]))
(concat <> [:else [:inline 1]]))]
[(into [:case] case-clauses)]))
(defmulti ^:private check-permissions-for-model
{:arglists '([search-result])}
(comp keyword :model))
(defmethod check-permissions-for-model :default
[_]
;; We filter what we can (ie. everything that is in a collection) out already when querying
true)
(defmethod check-permissions-for-model :metric
[instance]
(mi/can-read? instance))
(defmethod check-permissions-for-model :segment
[instance]
(mi/can-read? instance))
(defmethod check-permissions-for-model :database
[instance]
(mi/can-read? instance))
(defn- query-model-set
"Queries all models with respect to query for one result to see if we get a result or not"
[search-ctx]
(map #(get (first %) :model)
(filter not-empty
(for [model search-config/all-models]
(let [search-query (search-query-for-model model search-ctx)
query-with-limit (sql.helpers/limit search-query 1)]
(mdb.query/query query-with-limit))))))
(defn- full-search-query
"Postgres 9 is not happy with the type munging it needs to do to make the union-all degenerate down to trivial case of
one model without errors. Therefore we degenerate it down for it"
[search-ctx]
(let [models (or (:models search-ctx)
search-config/all-models)
sql-alias :alias_is_required_by_sql_but_not_needed_here
order-clause [((fnil order-clause "") (:search-string search-ctx))]]
(if (= (count models) 1)
(search-query-for-model (first models) search-ctx)
{:select [:*]
:from [[{:union-all (vec (for [model models
:let [query (search-query-for-model model search-ctx)]
:when (seq query)]
query))} sql-alias]]
:order-by order-clause})))
(s/defn ^:private search
"Builds a search query that includes all the searchable entities and runs it"
[search-ctx :- SearchContext]
(let [search-query (full-search-query search-ctx)
_ (log/tracef "Searching with query:\n%s\n%s"
(u/pprint-to-str search-query)
(mdb.query/format-sql (first (mdb.query/compile search-query))))
to-toucan-instance (fn [row] (t2.instance/instance (search-config/model-to-db-model (:model row)) row))
reducible-results (mdb.query/reducible-query search-query :max-rows search-config/*db-max-results*)
xf (comp
(map t2.realize/realize)
(map to-toucan-instance)
(filter check-permissions-for-model)
;; MySQL returns `:bookmark` and `:archived` as `1` or `0` so convert those to boolean as
;; needed
(map #(update % :bookmark api/bit->boolean))
(map #(update % :archived api/bit->boolean))
(map (partial scoring/score-and-result (:search-string search-ctx)))
(filter #(pos? (:score %))))
total-results (scoring/top-results reducible-results search-config/max-filtered-results xf)]
;; We get to do this slicing and dicing with the result data because
;; the pagination of search is for UI improvement, not for performance.
;; We intend for the cardinality of the search results to be below the default max before this slicing occurs
{:total (count total-results)
:data (cond->> total-results
(some? (:offset-int search-ctx)) (drop (:offset-int search-ctx))
(some? (:limit-int search-ctx)) (take (:limit-int search-ctx)))
:available_models (query-model-set search-ctx)
:limit (:limit-int search-ctx)
:offset (:offset-int search-ctx)
:table_db_id (:table-db-id search-ctx)
:models (:models search-ctx)}))
;;; +----------------------------------------------------------------------------------------------------------------+
;;; | Endpoint |
;;; +----------------------------------------------------------------------------------------------------------------+
This is basically a union type . defendpoint splits the string if it only gets one
(def ^:private models-schema (s/conditional vector? [su/NonBlankString] :else su/NonBlankString))
(s/defn ^:private search-context :- SearchContext
[search-string :- (s/maybe su/NonBlankString),
archived-string :- (s/maybe su/BooleanString)
table-db-id :- (s/maybe su/IntGreaterThanZero)
models :- (s/maybe models-schema)
limit :- (s/maybe su/IntGreaterThanZero)
offset :- (s/maybe su/IntGreaterThanOrEqualToZero)]
(cond-> {:search-string search-string
:archived? (Boolean/parseBoolean archived-string)
:current-user-perms @api/*current-user-permissions-set*}
(some? table-db-id) (assoc :table-db-id table-db-id)
(some? models) (assoc :models
(apply hash-set (if (vector? models) models [models])))
(some? limit) (assoc :limit-int limit)
(some? offset) (assoc :offset-int offset)))
(api/defendpoint GET "/models"
"Get the set of models that a search query will return"
[q archived-string table-db-id]
(query-model-set (search-context q archived-string table-db-id nil nil nil)))
#_{:clj-kondo/ignore [:deprecated-var]}
(api/defendpoint-schema GET "/"
"Search within a bunch of models for the substring `q`.
For the list of models, check [[metabase.search.config/all-models]].
To search in archived portions of models, pass in `archived=true`.
To search for tables, cards, and models of a certain DB, pass in a DB id value
to `table_db_id`.
To specify a list of models, pass in an array to `models`.
"
[q archived table_db_id models]
{q (s/maybe su/NonBlankString)
archived (s/maybe su/BooleanString)
table_db_id (s/maybe su/IntGreaterThanZero)
models (s/maybe models-schema)}
(api/check-valid-page-params mw.offset-paging/*limit* mw.offset-paging/*offset*)
(search (search-context
q
archived
table_db_id
models
mw.offset-paging/*limit*
mw.offset-paging/*offset*)))
(api/define-routes)
| null | https://raw.githubusercontent.com/metabase/metabase/6e17ce37d2dd739d3d5e1b493d60a8805160e516/src/metabase/api/search.clj | clojure | +----------------------------------------------------------------------------------------------------------------+
| Columns for each Entity |
+----------------------------------------------------------------------------------------------------------------+
it looks something like:
returned for Card, Dashboard, and Collection
returned for Card and Dashboard
returned for everything except Collection
returned for Card only
returned for Metric and Segment
returned for Action
+----------------------------------------------------------------------------------------------------------------+
| Shared Query Logic |
+----------------------------------------------------------------------------------------------------------------+
This is an aliased column, no need to include the table alias
This is a column reference, need to add the table alias to the column
correct type, e.g.
SELECT cast(NULL AS integer)
For MySQL, this is not needed.
Databases can't be archived
Table has an `:active` flag, but no `:archived` flag; never return inactive Tables
No tables should appear in archive searches
join against Collection
add a JOIN against Collection *unless* the source table is already Collection
+----------------------------------------------------------------------------------------------------------------+
+----------------------------------------------------------------------------------------------------------------+
We filter what we can (ie. everything that is in a collection) out already when querying
MySQL returns `:bookmark` and `:archived` as `1` or `0` so convert those to boolean as
needed
We get to do this slicing and dicing with the result data because
the pagination of search is for UI improvement, not for performance.
We intend for the cardinality of the search results to be below the default max before this slicing occurs
+----------------------------------------------------------------------------------------------------------------+
| Endpoint |
+----------------------------------------------------------------------------------------------------------------+ | (ns metabase.api.search
(:require
[compojure.core :refer [GET]]
[flatland.ordered.map :as ordered-map]
[honey.sql.helpers :as sql.helpers]
[medley.core :as m]
[metabase.api.common :as api]
[metabase.db :as mdb]
[metabase.db.query :as mdb.query]
[metabase.models.collection :as collection]
[metabase.models.interface :as mi]
[metabase.models.permissions :as perms]
[metabase.search.config :as search-config]
[metabase.search.scoring :as scoring]
[metabase.search.util :as search-util]
[metabase.server.middleware.offset-paging :as mw.offset-paging]
[metabase.util :as u]
[metabase.util.honey-sql-2 :as h2x]
[metabase.util.log :as log]
[metabase.util.schema :as su]
[schema.core :as s]
[toucan2.core :as t2]
[toucan2.instance :as t2.instance]
[toucan2.realize :as t2.realize]))
(set! *warn-on-reflection* true)
(def ^:private SearchContext
"Map with the various allowed search parameters, used to construct the SQL query"
{:search-string (s/maybe su/NonBlankString)
:archived? s/Bool
:current-user-perms #{perms/PathSchema}
(s/optional-key :models) (s/maybe #{su/NonBlankString})
(s/optional-key :table-db-id) (s/maybe s/Int)
(s/optional-key :limit-int) (s/maybe s/Int)
(s/optional-key :offset-int) (s/maybe s/Int)})
(def ^:private SearchableModel
(apply s/enum search-config/all-models))
(def ^:private HoneySQLColumn
(s/cond-pre
s/Keyword
[(s/one s/Any "column or value")
(s/one s/Keyword "alias")]))
(def ^:private all-search-columns
"All columns that will appear in the search results, and the types of those columns. The generated search query is a
SELECT 'card' AS model, id, cast(NULL AS integer) AS table_id, ...
FROM report_card
UNION ALL
SELECT 'metric' as model, id, table_id, ...
FROM metric
Columns that aren't used in any individual query are replaced with `SELECT cast(NULL AS <type>)` statements. (These
are cast to the appropriate type because Postgres will assume `SELECT NULL` is `TEXT` by default and will refuse to
`UNION` two columns of two different types.)"
(ordered-map/ordered-map
returned for all models . Important to be first for changing model for dataset
:model :text
:id :integer
:name :text
:display_name :text
:description :text
:archived :boolean
:collection_id :integer
:collection_name :text
:collection_authority_level :text
:collection_position :integer
:bookmark :boolean
:updated_at :timestamp
:dashboardcard_count :integer
:dataset_query :text
:moderated_status :text
:table_id :integer
:database_id :integer
:table_schema :text
:table_name :text
:table_description :text
returned for Database and Table
:initial_sync_status :text
:model_id :integer
:model_name :text))
(def ^:private true-clause [:inline [:= 1 1]])
(def ^:private false-clause [:inline [:= 0 1]])
(s/defn ^:private model->alias :- s/Keyword
[model :- SearchableModel]
(keyword model))
(s/defn ^:private ->column-alias :- s/Keyword
"Returns the column name. If the column is aliased, i.e. [`:original_name` `:aliased_name`], return the aliased
column name"
[column-or-aliased :- HoneySQLColumn]
(if (sequential? column-or-aliased)
(second column-or-aliased)
column-or-aliased))
(s/defn ^:private canonical-columns :- [HoneySQLColumn]
"Returns a seq of canonicalized list of columns for the search query with the given `model` Will return column names
prefixed with the `model` name so that it can be used in criteria. Projects a `nil` for columns the `model` doesn't
have and doesn't modify aliases."
[model :- SearchableModel, col-alias->honeysql-clause :- {s/Keyword HoneySQLColumn}]
(for [[search-col col-type] all-search-columns
:let [maybe-aliased-col (get col-alias->honeysql-clause search-col)]]
(cond
(= search-col :model)
[(h2x/literal model) :model]
(sequential? maybe-aliased-col)
maybe-aliased-col
maybe-aliased-col
(keyword (name (model->alias model)) (name maybe-aliased-col))
This entity is missing the column , project a null for that column value . For Postgres and H2 , cast it to the
:else
[(when-not (= (mdb/db-type) :mysql)
[:cast nil col-type])
search-col])))
(s/defn ^:private select-clause-for-model :- [HoneySQLColumn]
"The search query uses a `union-all` which requires that there be the same number of columns in each of the segments
of the query. This function will take the columns for `model` and will inject constant `nil` values for any column
missing from `entity-columns` but found in `all-search-columns`."
[model :- SearchableModel]
(let [entity-columns (search-config/columns-for-model model)
column-alias->honeysql-clause (m/index-by ->column-alias entity-columns)
cols-or-nils (canonical-columns model column-alias->honeysql-clause)]
cols-or-nils))
(s/defn ^:private from-clause-for-model :- [(s/one [(s/one s/Keyword "table name") (s/one s/Keyword "alias")]
"from clause")]
[model :- SearchableModel]
(let [db-model (get search-config/model-to-db-model model)]
[[(t2/table-name db-model) (-> db-model name u/lower-case-en keyword)]]))
(defmulti ^:private archived-where-clause
{:arglists '([model archived?])}
(fn [model _] model))
(defmethod archived-where-clause :default
[model archived?]
[:= (keyword (name (model->alias model)) "archived") archived?])
(defmethod archived-where-clause "database"
[_model archived?]
(if-not archived?
true-clause
false-clause))
(defmethod archived-where-clause "table"
[model archived?]
(if archived?
[:and
[:= (keyword (name (model->alias model)) "active") true]
[:= (keyword (name (model->alias model)) "visibility_type") nil]]))
(defn- wildcard-match
[s]
(str "%" s "%"))
(defn- search-string-clause
[model query searchable-columns]
(when query
(into [:or]
(for [column searchable-columns
token (search-util/tokenize (search-util/normalize query))]
(if (and (= model "card") (= column (keyword (name (model->alias model)) "dataset_query")))
[:and
[:= (keyword (name (model->alias model)) "query_type") "native"]
[:like
[:lower column]
(wildcard-match token)]]
[:like
[:lower column]
(wildcard-match token)])))))
(s/defn ^:private base-where-clause-for-model :- [(s/one (s/enum :and := :inline) "type") s/Any]
[model :- SearchableModel, {:keys [search-string archived?]} :- SearchContext]
(let [archived-clause (archived-where-clause model archived?)
search-clause (search-string-clause model
search-string
(map (let [model-alias (name (model->alias model))]
(fn [column]
(keyword model-alias (name column))))
(search-config/searchable-columns-for-model model)))]
(if search-clause
[:and archived-clause search-clause]
archived-clause)))
(s/defn ^:private base-query-for-model :- {:select s/Any, :from s/Any, :where s/Any}
"Create a HoneySQL query map with `:select`, `:from`, and `:where` clauses for `model`, suitable for the `UNION ALL`
used in search."
[model :- SearchableModel, context :- SearchContext]
{:select (select-clause-for-model model)
:from (from-clause-for-model model)
:where (base-where-clause-for-model model context)})
(s/defn ^:private add-collection-join-and-where-clauses
so we can return its `:name`."
[honeysql-query :- su/Map
collection-id-column :- s/Keyword
{:keys [current-user-perms]} :- SearchContext]
(let [visible-collections (collection/permissions-set->visible-collection-ids current-user-perms)
collection-filter-clause (collection/visible-collection-ids->honeysql-filter-clause
collection-id-column
visible-collections)
honeysql-query (-> honeysql-query
(sql.helpers/where collection-filter-clause)
(sql.helpers/where [:= :collection.namespace nil]))]
(cond-> honeysql-query
(not= collection-id-column :collection.id)
(sql.helpers/left-join [:collection :collection]
[:= collection-id-column :collection.id]))))
(s/defn ^:private add-table-db-id-clause
"Add a WHERE clause to only return tables with the given DB id.
Used in data picker for joins because we can't join across DB's."
[query :- su/Map, id :- (s/maybe s/Int)]
(if (some? id)
(sql.helpers/where query [:= id :db_id])
query))
(s/defn ^:private add-card-db-id-clause
"Add a WHERE clause to only return cards with the given DB id.
Used in data picker for joins because we can't join across DB's."
[query :- su/Map, id :- (s/maybe s/Int)]
(if (some? id)
(sql.helpers/where query [:= id :database_id])
query))
| Search Queries for each Toucan Model |
(defmulti ^:private search-query-for-model
{:arglists '([model search-context])}
(fn [model _] model))
(s/defn ^:private shared-card-impl [dataset? :- s/Bool search-ctx :- SearchContext]
(-> (base-query-for-model "card" search-ctx)
(update :where (fn [where] [:and [:= :card.dataset dataset?] where]))
(sql.helpers/left-join [:card_bookmark :bookmark]
[:and
[:= :bookmark.card_id :card.id]
[:= :bookmark.user_id api/*current-user-id*]])
(add-collection-join-and-where-clauses :card.collection_id search-ctx)
(add-card-db-id-clause (:table-db-id search-ctx))))
(s/defmethod search-query-for-model "action"
[model search-ctx :- SearchContext]
(-> (base-query-for-model model search-ctx)
(sql.helpers/left-join [:report_card :model]
[:= :model.id :action.model_id])
(add-collection-join-and-where-clauses :model.collection_id search-ctx)))
(s/defmethod search-query-for-model "card"
[_model search-ctx :- SearchContext]
(shared-card-impl false search-ctx))
(s/defmethod search-query-for-model "dataset"
[_model search-ctx :- SearchContext]
(-> (shared-card-impl true search-ctx)
(update :select (fn [columns]
(cons [(h2x/literal "dataset") :model] (rest columns))))))
(s/defmethod search-query-for-model "collection"
[_model search-ctx :- SearchContext]
(-> (base-query-for-model "collection" search-ctx)
(sql.helpers/left-join [:collection_bookmark :bookmark]
[:and
[:= :bookmark.collection_id :collection.id]
[:= :bookmark.user_id api/*current-user-id*]])
(add-collection-join-and-where-clauses :collection.id search-ctx)))
(s/defmethod search-query-for-model "database"
[model search-ctx :- SearchContext]
(base-query-for-model model search-ctx))
(s/defmethod search-query-for-model "dashboard"
[model search-ctx :- SearchContext]
(-> (base-query-for-model model search-ctx)
(sql.helpers/left-join [:dashboard_bookmark :bookmark]
[:and
[:= :bookmark.dashboard_id :dashboard.id]
[:= :bookmark.user_id api/*current-user-id*]])
(add-collection-join-and-where-clauses :dashboard.collection_id search-ctx)))
(s/defmethod search-query-for-model "metric"
[model search-ctx :- SearchContext]
(-> (base-query-for-model model search-ctx)
(sql.helpers/left-join [:metabase_table :table] [:= :metric.table_id :table.id])))
(s/defmethod search-query-for-model "segment"
[model search-ctx :- SearchContext]
(-> (base-query-for-model model search-ctx)
(sql.helpers/left-join [:metabase_table :table] [:= :segment.table_id :table.id])))
(s/defmethod search-query-for-model "table"
[model {:keys [current-user-perms table-db-id], :as search-ctx} :- SearchContext]
(when (seq current-user-perms)
(let [base-query (base-query-for-model model search-ctx)]
(add-table-db-id-clause
(if (contains? current-user-perms "/")
base-query
(let [data-perms (filter #(re-find #"^/db/*" %) current-user-perms)]
{:select (:select base-query)
:from [[(merge
base-query
{:select [:id :schema :db_id :name :description :display_name :updated_at :initial_sync_status
[(h2x/concat (h2x/literal "/db/")
:db_id
(h2x/literal "/schema/")
[:case
[:not= :schema nil] :schema
:else (h2x/literal "")]
(h2x/literal "/table/") :id
(h2x/literal "/read/"))
:path]]})
:table]]
:where (if (seq data-perms)
(into [:or] (for [path data-perms]
[:like :path (str path "%")]))
[:inline [:= 0 1]])}))
table-db-id))))
(defn order-clause
"CASE expression that lets the results be ordered by whether they're an exact (non-fuzzy) match or not"
[query]
(let [match (wildcard-match (search-util/normalize query))
columns-to-search (->> all-search-columns
(filter (fn [[_k v]] (= v :text)))
(map first)
(remove #{:collection_authority_level :moderated_status :initial_sync_status}))
case-clauses (as-> columns-to-search <>
(map (fn [col] [:like [:lower col] match]) <>)
(interleave <> (repeat [:inline 0]))
(concat <> [:else [:inline 1]]))]
[(into [:case] case-clauses)]))
(defmulti ^:private check-permissions-for-model
{:arglists '([search-result])}
(comp keyword :model))
(defmethod check-permissions-for-model :default
[_]
true)
(defmethod check-permissions-for-model :metric
[instance]
(mi/can-read? instance))
(defmethod check-permissions-for-model :segment
[instance]
(mi/can-read? instance))
(defmethod check-permissions-for-model :database
[instance]
(mi/can-read? instance))
(defn- query-model-set
"Queries all models with respect to query for one result to see if we get a result or not"
[search-ctx]
(map #(get (first %) :model)
(filter not-empty
(for [model search-config/all-models]
(let [search-query (search-query-for-model model search-ctx)
query-with-limit (sql.helpers/limit search-query 1)]
(mdb.query/query query-with-limit))))))
(defn- full-search-query
"Postgres 9 is not happy with the type munging it needs to do to make the union-all degenerate down to trivial case of
one model without errors. Therefore we degenerate it down for it"
[search-ctx]
(let [models (or (:models search-ctx)
search-config/all-models)
sql-alias :alias_is_required_by_sql_but_not_needed_here
order-clause [((fnil order-clause "") (:search-string search-ctx))]]
(if (= (count models) 1)
(search-query-for-model (first models) search-ctx)
{:select [:*]
:from [[{:union-all (vec (for [model models
:let [query (search-query-for-model model search-ctx)]
:when (seq query)]
query))} sql-alias]]
:order-by order-clause})))
(s/defn ^:private search
"Builds a search query that includes all the searchable entities and runs it"
[search-ctx :- SearchContext]
(let [search-query (full-search-query search-ctx)
_ (log/tracef "Searching with query:\n%s\n%s"
(u/pprint-to-str search-query)
(mdb.query/format-sql (first (mdb.query/compile search-query))))
to-toucan-instance (fn [row] (t2.instance/instance (search-config/model-to-db-model (:model row)) row))
reducible-results (mdb.query/reducible-query search-query :max-rows search-config/*db-max-results*)
xf (comp
(map t2.realize/realize)
(map to-toucan-instance)
(filter check-permissions-for-model)
(map #(update % :bookmark api/bit->boolean))
(map #(update % :archived api/bit->boolean))
(map (partial scoring/score-and-result (:search-string search-ctx)))
(filter #(pos? (:score %))))
total-results (scoring/top-results reducible-results search-config/max-filtered-results xf)]
{:total (count total-results)
:data (cond->> total-results
(some? (:offset-int search-ctx)) (drop (:offset-int search-ctx))
(some? (:limit-int search-ctx)) (take (:limit-int search-ctx)))
:available_models (query-model-set search-ctx)
:limit (:limit-int search-ctx)
:offset (:offset-int search-ctx)
:table_db_id (:table-db-id search-ctx)
:models (:models search-ctx)}))
This is basically a union type . defendpoint splits the string if it only gets one
(def ^:private models-schema (s/conditional vector? [su/NonBlankString] :else su/NonBlankString))
(s/defn ^:private search-context :- SearchContext
[search-string :- (s/maybe su/NonBlankString),
archived-string :- (s/maybe su/BooleanString)
table-db-id :- (s/maybe su/IntGreaterThanZero)
models :- (s/maybe models-schema)
limit :- (s/maybe su/IntGreaterThanZero)
offset :- (s/maybe su/IntGreaterThanOrEqualToZero)]
(cond-> {:search-string search-string
:archived? (Boolean/parseBoolean archived-string)
:current-user-perms @api/*current-user-permissions-set*}
(some? table-db-id) (assoc :table-db-id table-db-id)
(some? models) (assoc :models
(apply hash-set (if (vector? models) models [models])))
(some? limit) (assoc :limit-int limit)
(some? offset) (assoc :offset-int offset)))
(api/defendpoint GET "/models"
"Get the set of models that a search query will return"
[q archived-string table-db-id]
(query-model-set (search-context q archived-string table-db-id nil nil nil)))
#_{:clj-kondo/ignore [:deprecated-var]}
(api/defendpoint-schema GET "/"
"Search within a bunch of models for the substring `q`.
For the list of models, check [[metabase.search.config/all-models]].
To search in archived portions of models, pass in `archived=true`.
To search for tables, cards, and models of a certain DB, pass in a DB id value
to `table_db_id`.
To specify a list of models, pass in an array to `models`.
"
[q archived table_db_id models]
{q (s/maybe su/NonBlankString)
archived (s/maybe su/BooleanString)
table_db_id (s/maybe su/IntGreaterThanZero)
models (s/maybe models-schema)}
(api/check-valid-page-params mw.offset-paging/*limit* mw.offset-paging/*offset*)
(search (search-context
q
archived
table_db_id
models
mw.offset-paging/*limit*
mw.offset-paging/*offset*)))
(api/define-routes)
|
b382c281dc8288fa09e1a48f9771513296cf605b409c1d9e42948d82e7425f00 | jeffshrager/biobike | ec2go.lsp |
("2.4.99.4" "0003836")
("3.4.16.5" "0004186" "0004258")
("2.3.1.139" "0004173")
("3.5.1.14" "0004046")
("4.99.1.1" "0004325")
("3.4.21.21" "0003802")
("1.3.3.4" "0004729")
("3.2.2.23" "0003907")
("2.4.2.31" "0003956")
("3.2.1.46" "0004336")
("4.2.1.46" "0008460")
("2.6.1.62" "0004015")
("3.1.2.15" "0004221")
("4.1.2.15" "0003849")
("1.1.1.38" "0004472")
("1.4.3.4" "0004041")
("3.4.21.34" "0003807")
("3.2.1.106" "0004573")
("3.5.1.54" "0004039")
("3.4.19.1" "0004254")
("1.2.1.38" "0003942")
("1.14.99.7" "0004506")
("3.4.21.68" "0004296")
("3.5.3.4" "0004037")
("2.7.1.105" "0003873")
("6.3.5.5" "0004088")
("1.8.99.4" "0004604")
("3.4.21.47" "0003812")
("2.4.2.7" "0003999")
("1.14.13.13" "0004498")
("2.3.1.38" "0004313")
("3.4.13.9" "0004251")
("3.1.6.12" "0003943")
("2.7.6.2" "0004788")
("3.4.21.22" "0003803")
("1.14.16.4" "0004510")
("3.1.3.48" "0004727" "0004725")
("1.1.1.21" "0004032")
("4.1.1.21" "0004638")
("2.4.1.38" "0003831")
("6.1.1.21" "0004821")
("3.4.21.7" "0004283")
("3.6.1.46" "0003927")
("2.7.7.14" "0004306")
("3.4.19.9" "0008464")
("1.1.2.3" "0004460")
("2.1.2.3" "0004643")
("3.1.2.3" "0004778")
("3.2.1.21" "0008422")
("1.1.1.85" "0003862")
("3.4.21.35" "0004293")
("1.6.5.5" "0003960")
("2.7.1.127" "0008440")
("3.1.1.13" "0004771")
("3.4.21.10" "0004284")
("2.3.1.21" "0004095")
("2.8.2.23" "0008467")
("2.6.1.38" "0008110")
("2.1.1.61" "0004808")
("3.4.21.69" "0003808")
("1.3.5.1" "0008177")
("5.3.1.21" "0004110")
("6.3.5.1" "0003952")
("4.1.99.3" "0003904")
("3.1.3.16" "0004722")
("3.4.11.-" "0004177")
("3.3.2.3" "0004301")
("4.2.1.13" "0003941")
("3.1.1.6" "0008126")
("3.1.3.64" "0004438")
("2.1.1.77" "0004719")
("2.7.1.38" "0004689")
("6.1.1.6" "0004824")
("2.3.1.85" "0004312")
("4.2.1.61" "0004317")
("1.13.11.42" "0004426")
("6.3.2.3" "0004363")
("1.3.1.13" "0004665")
("2.4.2.3" "0004850")
("5.4.2.3" "0004610")
("2.5.1.21" "0004310")
("1.5.5.1" "0004174")
("2.3.1.61" "0004149")
("1.3.99.3" "0003995")
("1.4.1.13" "0004355")
("3.5.2.3" "0004151")
("3.6.3.48" "0008563")
("3.1.3.56" "0004445")
("2.3.1.6" "0004102")
("3.1.1.69" "0000225")
("1.10.3.1" "0004097")
("5.3.1.6" "0004751")
("3.4.21.36" "0008125")
("1.14.13.39" "0004517")
("3.1.2.22" "0008474")
("3.1.26.-" "0004521")
("2.1.1.45" "0004799")
("3.4.23.1" "0004194")
("2.7.1.21" "0004797")
("2.7.8.15" "0003975")
("1.14.99.9" "0004508")
("1.1.1.2" "0008106")
("3.1.1.2" "0004064")
("6.1.1.2" "0004830")
("2.7.2.3" "0004618")
("2.6.1.13" "0004587")
("3.2.1.45" "0004348")
("2.5.1.6" "0004478")
("3.5.1.6" "0003837")
("2.4.2.30" "0003950")
("1.14.13.8" "0004499")
("1.6.99.3" "0003954")
("3.1.2.14" "0004320")
("2.2.1.2" "0004801")
("4.2.1.2" "0004333")
("1.1.1.37" "0004470")
("2.1.1.37" "0003886")
("4.1.1.37" "0004853")
("3.4.14.1" "0004214")
("2.4.1.69" "0008107")
("1.14.11.1" "0008336")
("3.1.4.17" "0004114")
("3.1.26.5" "0004526")
("6.3.1.2" "0004356")
("2.4.1.45" "0003851")
("2.7.1.6" "0004335")
("2.4.1.117" "0004581")
("1.4.1.2" "0004352")
("1.1.1.29" "0008465")
("3.1.1.29" "0004045")
("4.1.1.29" "0004782")
("2.3.1.37" "0003870")
("6.4.1.2" "0003989")
("2.8.1.6" "0004076")
("3.1.4.4" "0004630")
("2.4.2.14" "0004044")
("3.5.4.25" "0003935")
("1.5.1.2" "0004735")
("3.6.4.8" "0008575")
("3.5.1.2" "0004359")
("2.4.1.37" "0004381")
("6.1.1.20" "0004826")
("3.4.21.6" "0003804")
("6.5.1.2" "0003911")
("2.7.7.13" "0004475")
("1.2.4.4" "0003863")
("1.14.13.9" "0004502")
("1.11.1.9" "0004602")
("2.6.1.2" "0004021")
("2.7.4.8" "0004385")
("3.2.1.20" "0004558")
("4.2.1.20" "0004834")
("3.4.24.18" "0004238")
("2.5.1.37" "0004464")
("4.6.1.2" "0004383" "0008075")
("4.2.99.18" "0003906")
("2.7.7.6" "0003899" "0000129" "0003900" "0003901" "0003902")
("2.7.1.117" "0004687")
("1.14.11.2" "0004656")
("3.1.3.7" "0008441")
("4.1.3.7" "0004108")
("5.1.3.7" "0003974")
("2.3.1.20" "0004144")
("2.7.1.2" "0004340")
("3.7.1.2" "0004334")
("6.1.1.12" "0004815")
("1.14.11.16" "0004597")
("6.3.4.4" "0004019")
("1.1.3.15" "0003973")
("3.1.3.15" "0004401")
("3.4.24.64" "0004240")
("1.2.1.12" "0004365")
("2.7.7.53" "0003877")
("1.8.1.2" "0004783")
("2.5.1.29" "0004311")
("4.2.1.12" "0004456")
("2.7.1.37" "0004674")
("4.2.99.2" "0004795")
("3.5.4.4" "0004000")
("2.3.1.12" "0004742")
("1.5.1.20" "0004489")
("4.3.1.12" "0008473")
("3.2.1.76" "0003940")
("1.3.99.2" "0004085")
("5.3.99.2" "0004667")
("3.6.4.4" "0008574")
("3.4.22.34" "0001509")
("2.7.1.29" "0004371")
("3.2.1.52" "0004563")
("2.7.7.2" "0003919")
("2.4.1.150" "0008109")
("3.4.21.26" "0004287")
("5.4.99.2" "0004494")
("1.1.1.44" "0004616")
("3.1.6.1" "0004065")
("1.5.1.12" "0003842")
("2.7.1.20" "0004001")
("2.1.1.107" "0004851")
("2.1.3.3" "0004585")
("3.1.3.3" "0004647")
("1.16.3.1" "0004322")
("5.1.3.3" "0004034")
("3.5.3.15" "0004668")
("3.2.2.21" "0003905")
("3.4.24.65" "0004234")
("3.6.3.7" "0008554")
("2.4.1.119" "0004579")
("2.4.1.163" "0008457")
("1.6.99.2" "0003955")
("4.1.2.13" "0004332")
("4.1.1.36" "0004633")
("2.4.1.68" "0008424")
("3.5.1.52" "0000224")
("3.4.21.-" "0008132" "0008243")
("1.3.3.3" "0004109")
("3.1.26.4" "0004523" "0004524")
("3.1.2.6" "0004416")
("6.3.3.3" "0004141")
("3.4.21.48" "0004262")
("4.2.1.36" "0004409")
("2.6.1.52" "0004646" "0004648")
("3.4.21.92" "0008462")
("1.4.3.3" "0003884")
("2.3.2.13" "0003810")
("2.1.1.28" "0004603")
("3.4.21.27" "0003805")
("4.1.1.28" "0004058")
("2.4.1.151" "0003946")
("1.14.11.4" "0008475")
("2.3.1.84" "0004026")
("2.4.1.92" "0003947")
("6.3.4.16" "0004087")
("3.2.1.28" "0004555")
("3.3.2.6" "0004463")
("6.3.2.6" "0004639")
("2.6.1.44" "0008453")
("3.4.21.5" "0003809")
("2.7.7.12" "0008108")
("2.7.1.68" "0004431")
("1.1.3.22" "0004855")
("3.2.1.108" "0000016")
("1.11.1.8" "0004447")
("3.6.3.47" "0008562")
("2.7.6.1" "0004749")
("3.4.17.17" "0004189")
("2.1.2.2" "0004644")
("2.7.1.107" "0004143")
("1.14.15.4" "0004507")
("3.5.4.16" "0003934")
("1.7.3.3" "0004846")
("2.7.3.3" "0004054")
("4.1.1.11" "0004068")
("6.1.1.11" "0004828")
("3.4.22.36" "0004201")
("3.4.21.93" "0004285")
("1.2.1.11" "0004073")
("3.4.17.4" "0004226")
("4.2.1.11" "0004634")
("3.1.3.62" "0004446")
("2.7.1.36" "0004496")
("2.3.2.2" "0003840")
("4.3.2.2" "0004018")
("6.3.2.2" "0004357")
("1.1.1.153" "0004757")
("3.1.1.5" "0004622")
("2.4.1.11" "0004373")
("5.4.2.2" "0004614")
("2.1.1.67" "0008119")
("2.7.9.3" "0004756")
("3.2.1.51" "0004560")
("4.2.1.51" "0004664")
("3.4.15.1" "0004246")
("1.14.99.10" "0004509")
("6.1.1.5" "0004822")
("3.1.27.5" "0004522")
("1.2.1.5" "0004030")
("2.7.1.108" "0004168")
("1.1.1.43" "0008114")
("3.1.2.20" "0004562")
("3.4.16.2" "0004188")
("3.5.2.2" "0004157")
("2.3.1.51" "0003841")
("2.1.1.-" "0008276" "0000179" "0008650" "0008425")
("3.1.1.-" "0004302")
("6.2.1.5" "0004775")
("3.1.3.46" "0004330")
("2.3.1.5" "0004060")
("2.4.1.109" "0004169")
("3.1.1.59" "0004453")
("2.6.1.11" "0003992")
("3.6.1.11" "0004309")
("3.4.21.94" "0004286")
("6.1.1.9" "0004832")
("6.2.1.-" "0003996")
("6.3.5.4" "0004066")
("3.6.3.14" "0003936" "0003936")
("2.4.2.29" "0008479")
("1.1.1.35" "0003857")
("2.3.1.43" "0004607")
("4.4.1.5" "0004462")
("2.7.1.11" "0003872")
("4.2.1.9" "0004160")
("3.1.13.-" "0000175" "0004534")
("2.3.1.-" "0004147")
("1.1.1.1" "0004022" "0004025" "0004023" "0004024")
("2.1.1.1" "0008112")
("3.1.1.1" "0004091")
("4.1.1.1" "0004737")
("3.1.26.3" "0004525")
("6.1.1.1" "0004831")
("2.8.2.2" "0004027")
("1.5.1.5" "0004488")
("3.4.21.38" "0003806")
("3.2.1.35" "0004415")
("1.3.1.9" "0004318")
("2.3.1.9" "0003985")
("2.6.1.51" "0004760" "0004761" "0004762" "0004763")
("3.6.1.51" "0008550")
("2.4.1.-" "0003980" "0000009" "0004580")
("1.2.1.1" "0004327")
("2.2.1.1" "0004802")
("3.2.1.1" "0004556")
("2.3.2.12" "0000048")
("4.2.1.1" "0004089")
("2.4.1.141" "0004577")
("1.1.1.27" "0004459")
("2.3.1.35" "0004358")
("2.6.1.5" "0004838")
("1.14.15.6" "0008386")
("3.6.1.5" "0004050")
("5.1.99.1" "0004493")
("5.3.1.9" "0004347")
("6.2.1.1" "0003987")
("2.5.1.-" "0004662" "0004663" "0004660" "0004661" "0008495")
("1.3.1.1" "0004159")
("2.3.1.1" "0004042")
("3.4.19.12" "0008577")
("3.3.1.1" "0004013")
("1.2.1.27" "0004491")
("5.3.1.1" "0004807")
("6.3.1.1" "0004071")
("2.7.8.2" "0004142")
("6.3.4.15" "0004077")
("3.4.24.56" "0004231")
("3.4.21.4" "0004295")
("2.5.1.9" "0004746")
("4.2.1.75" "0004852")
("1.1.3.21" "0004369")
("1.3.99.1" "0000104")
("3.1.3.21" "0000121")
("4.1.3.21" "0004410")
("2.4.1.1" "0008184" "0004645")
("4.4.1.1" "0004123")
("1.11.1.7" "0004601")
("2.4.1.83" "0004582")
("3.5.1.9" "0004061")
("2.7.1.67" "0004430")
("6.1.1.19" "0004814")
("6.4.1.1" "0004736")
("2.6.1.9" "0004400")
("3.6.1.9" "0004551")
("1.14.14.1" "0005490")
("2.4.99.1" "0003835")
("2.5.1.1" "0004161")
("3.5.1.1" "0004067")
("4.2.1.19" "0004424")
("3.4.23.34" "0004193")
("3.6.4.7" "0008573")
("2.8.2.20" "0008476")
("6.1.1.10" "0004825")
("6.5.1.1" "0003910")
("3.1.3.13" "0004083")
("4.2.99.9" "0003962")
("1.5.99.1" "0008480")
("1.6.1.1" "0003957")
("2.6.1.1" "0004069")
("3.6.1.1" "0004427")
("3.4.17.3" "0004184")
("3.1.3.61" "0004444")
("2.4.1.142" "0004578")
("3.4.11.18" "0004239")
("3.2.1.10" "0004574")
("4.2.1.10" "0003855")
("2.7.7.5" "0004780")
("3.2.1.130" "0004569")
("2.7.1.35" "0008478")
("2.7.1.91" "0008481")
("4.6.1.1" "0004016" "0008294")
("3.1.3.6" "0008254")
("1.6.99.1" "0003959")
("1.3.1.10" "0004319")
("2.7.1.1" "0004396")
("6.3.4.3" "0004329")
("1.14.18.1" "0004503")
("5.3.1.10" "0004342")
("4.1.1.50" "0004014")
("3.4.24.57" "0008450")
("2.29.13.1" "0005469")
("2.7.7.-" "0008192" "0008193")
("1.13.11.24" "0008127")
("3.4.22.14" "0008129")
("2.8.1.1" "0004792")
("2.5.1.19" "0003866")
("3.4.21.71" "0004281")
("3.2.1.50" "0004561")
("3.4.22.-" "0004199")
("2.4.1.130" "0004584")
("1.18.1.2" "0004324")
("2.7.1.75" "0004136")
("1.3.3.6" "0003997")
("2.7.1.99" "0004740")
("1.4.3.13" "0004720")
("2.7.7.9" "0003983")
("1.1.1.42" "0004450")
("2.9.1.1" "0004125")
("2.6.1.19" "0003867")
("3.4.23.35" "0004191")
("2.3.1.50" "0004758")
("1.5.1.10" "0004755")
("2.5.1.10" "0004337")
("3.1.6.8" "0004098")
("1.4.3.6" "0008122")
("3.6.4.3" "0008568")
("3.4.22.27" "0004218")
("4.6.1.10" "0003874")
("2.4.1.143" "0008455")
("2.1.3.2" "0004070")
("3.1.3.2" "0003993")
("4.1.3.2" "0004474")
("2.1.2.11" "0003864")
("2.7.4.3" "0004017")
("5.1.3.2" "0003978")
("1.1.1.34" "0004420")
("3.6.3.13" "0008557")
("3.1.1.34" "0004465")
("3.2.1.58" "0004338")
("1.1.3.37" "0003885")
("3.4.14.10" "0004294")
("3.6.3.6" "0008553")
("1.13.11.34" "0004051")
("3.4.22.15" "0004217")
("3.6.1.50" "0008549")
("5.3.3.2" "0004452")
("2.4.1.131" "0004377")
("2.1.1.98" "0004164")
("2.7.1.50" "0004417")
("2.7.7.19" "0004652")
("2.4.1.90" "0003945")
("4.1.2.5" "0004793")
("2.4.2.11" "0004516")
("3.2.1.26" "0004564")
("3.4.25.1" "0004299")
("2.4.1.34" "0003843")
("6.3.4.14" "0004075")
("2.6.1.42" "0004084")
("3.6.1.42" "0004382")
("2.7.7.10" "0003982")
("1.6.6.8" "0003920")
("2.4.1.144" "0003830")
("3.2.2.5" "0003953")
("1.11.1.6" "0004096")
("2.3.1.26" "0004772")
("6.1.1.18" "0004819")
("3.6.3.45" "0008560")
("3.4.11.2" "0004179")
("1.1.1.145" "0003854")
("3.2.1.18" "0004308")
("4.3.2.5" "0004598")
("4.2.1.18" "0004490")
("2.1.2.9" "0004479")
("6.3.2.5" "0004632")
("3.2.2.-" "0004844")
("3.4.22.16" "0004215")
("3.1.3.12" "0004805")
("4.1.3.12" "0003852")
("3.4.24.34" "0008130")
("4.2.99.8" "0004124")
("2.7.7.50" "0004484")
("3.4.21.73" "0004297")
("2.7.2.11" "0004349")
("2.5.1.26" "0008609")
("3.5.1.26" "0003948")
("3.6.3.37" "0008558")
("2.4.1.132" "0004378")
("2.7.3.2" "0004111")
("2.1.2.1" "0004372")
("3.1.2.1" "0003986")
("2.7.1.82" "0004305")
("2.7.4.14" "0004127")
("2.4.1.18" "0003844")
("3.5.2.5" "0004038")
("3.2.2.1" "0008477")
("2.4.99.8" "0003828")
("4.1.2.27" "0008117")
("5.3.3.12" "0004167")
("2.4.1.145" "0008454")
("2.5.1.18" "0004364")
("2.7.1.26" "0008531")
("2.4.2.9" "0004845")
("3.4.21.61" "0004290")
("1.5.99.8" "0004657")
("2.7.1.74" "0004137")
("4.3.2.1" "0004056")
("6.3.2.1" "0004592")
("3.4.24.7" "0004232")
("1.1.1.41" "0004449")
("2.1.1.41" "0003838")
("3.4.23.25" "0004196")
("1.15.1.1" "0004785" "0008382" "0008383" "0004784")
("4.1.1.41" "0004492")
("3.4.22.38" "0004216")
("1.1.1.146" "0003845")
("3.1.3.68" "0003850")
("2.7.8.11" "0003881")
("2.4.2.1" "0004731")
("3.1.8.1" "0004063")
("3.4.22.17" "0004198")
("5.4.2.1" "0004619")
("3.4.24.35" "0004229")
("3.1.13.4" "0004535")
("1.2.1.41" "0004350")
("2.1.2.10" "0004047")
("3.2.2.19" "0003875")
("3.6.3.12" "0008556")
("2.1.1.33" "0008176")
("3.4.24.21" "0008533")
("4.1.1.33" "0004163")
("2.3.1.41" "0004315")
("1.14.13.17" "0008123")
("1.1.1.49" "0004345")
("2.7.3.12" "0008257")
("3.1.3.36" "0004439")
("4.1.1.49" "0004612")
("1.1.1.8" "0004367")
("3.1.1.8" "0004104")
("2.4.1.41" "0004653")
("3.2.1.33" "0004135")
("2.7.8.5" "0008444")
("4.2.1.33" "0003861")
("3.4.21.62" "0004291")
("1.14.99.1" "0004666")
("6.3.2.19" "0004842")
("6.3.5.3" "0004642")
("1.1.1.25" "0004764")
("2.4.2.19" "0004514")
("3.2.1.49" "0008456")
("4.1.1.25" "0004837")
("1.1.1.73" "0004552")
("3.5.1.41" "0004099")
("2.7.7.18" "0004515")
("5.2.1.8" "0004600" "0003755")
("3.-.-.-" "0003923")
("2.4.2.10" "0004588")
("2.8.2.1" "0004062")
("1.6.2.2." "0004128")
("3.2.1.25" "0004567")
("6.3.4.13" "0004637")
("4.3.1.8" "0004418")
("3.4.21.75" "0004276")
("5.3.1.8" "0004476")
("2.1.1.17" "0004608")
("3.1.1.17" "0004341")
("4.1.1.17" "0004586")
("1.1.1.249" "0004171")
("1.11.1.5" "0004130")
("3.6.3.44" "0008559")
("6.1.1.17" "0004818")
("6.3.4.6" "0004847")
("4.4.1.8" "0004121")
("4.1.1.65" "0004609")
("1.6.5.3" "0008137")
("2.3.1.97" "0004379")
("3.4.11.1" "0004178")
("3.1.4.45" "0003944")
("3.2.1.17" "0003796")
("4.2.1.17" "0004300")
("2.4.1.25" "0004134")
("2.7.8.1" "0004307")
("3.5.1.49" "0004328")
("2.5.1.8" "0004811")
("3.1.3.11" "0004331")
("3.5.4.6" "0003876")
("3.4.17.1" "0004182")
("3.1.1.4" "0004623")
("1.1.99.10" "0004344")
("3.5.1.25" "0008448")
("3.6.1.49" "0008548")
("2.7.1.33" "0004594")
("1.2.4.2" "0004591")
("6.1.1.4" "0004823")
("1.3.99.7" "0004361")
("3.1.4.37" "0004113")
("3.1.-.-" "0004519")
("3.6.4.6" "0008576")
("2.4.1.17" "0003981")
("4.4.1.17" "0004408")
("4.2.99.10" "0003961")
("6.2.1.4" "0004776")
("3.4.11.9" "0008451")
("3.4.24.37" "0004247")
("2.7.7.41" "0004605")
("6.3.4.2" "0003883")
("5.4.99.7" "0000250")
("2.7.4.6" "0004550")
("2.3.1.4" "0004343")
("1.3.99.10" "0008470")
("3.4.23.5" "0004192")
("2.3.1.57" "0004145")
("2.7.1.25" "0004020")
("1.4.4.2" "0004375")
("3.4.24.23" "0004235")
("3.1.3.5" "0008253")
("4.1.3.5" "0004421")
("1.3.-.-" "0000248")
("1.8.4.6" "0008113")
("1.14.17.1" "0004500")
("1.4.1.4" "0004354")
("3.1.3.67" "0004440")
("3.6.1.17" "0004081")
("2.4.99.10" "0004513")
("3.4.23.15" "0004195")
("1.1.1.40" "0004473")
("6.4.1.4" "0004485")
("1.1.1.204" "0004854")
("3.5.4.2" "0000034")
("1.6.99.7" "0004155")
("3.1.3.43" "0004741")
("1.14.99.3" "0004392")
("3.1.3.-" "0008579" "0004721" "0008138" "0008330")
("3.5.1.4" "0004040")
("3.4.14.5" "0004274")
("2.7.1.17" "0004856")
("5.5.1.4" "0004512")
("6.5.1.4" "0003963")
("1.6.4.2" "0004362")
("2.7.7.49" "0003964" "0003967" "0003966" "0003721")
("3.6.4.2" "0008567")
("2.7.7.8" "0004654")
("3.6.3.11" "0008555")
("3.1.3.9" "0004346")
("3.4.24.59" "0004243")
("2.1.1.32" "0004809")
("4.1.1.32" "0004613")
("3.4.24.11" "0004245")
("4.6.1.4" "0004107")
("2.7.7.25" "0004810")
("3.1.4.12" "0004767")
("2.7.4.2" "0004631")
("1.4.3.5" "0004733")
("2.7.3.11" "0008256")
("2.4.1.40" "0004380")
("1.3.99.11" "0004152")
("3.1.3.1" "0004035")
("4.1.3.1" "0004451")
("5.1.3.1" "0004750")
("2.4.2.18" "0004048")
("3.4.24.24" "0004228")
("2.3.1.32" "0004468")
("1.4.3.-" "0008131")
("3.6.3.51" "0008566")
("1.8.1.4" "0004148")
("3.8.1.4" "0004800")
("2.1.1.96" "0004790")
("1.2.3.1" "0004031")
("2.7.1.123" "0004685" "0004684")
("1.1.1.205" "0003938")
("1.2.1.24" "0004777")
("4.1.3.27" "0004049")
("3.2.1.24" "0004559")
("4.2.1.24" "0004655")
("1.3.3.1" "0004158")
("2.4.1.80" "0008120")
("4.2.1.96" "0008124")
("3.4.19.3" "0004219")
("2.1.1.16" "0004481")
("5.3.3.1" "0004769")
("1.3.1.24" "0004074")
("6.3.3.1" "0004641")
("6.1.1.16" "0004817")
("1.13.11.11" "0004833")
("5.3.1.24" "0004640")
("3.6.3.-" "0004009")
("2.7.7.4" "0004781")
("1.4.3.1" "0008445")
("2.7.1.40" "0004743")
("3.5.4.12" "0004132")
("2.1.1.125" "0008469")
("4.2.1.16" "0004794")
("2.8.3.5" "0008260")
("3.6.3.9" "0005391")
("1.1.1.100" "0004316")
("2.1.1.100" "0004671")
("1.13.11.27" "0003868")
("1.5.3.1" "0008115")
("2.3.1.88" "0004596")
("3.5.3.1" "0004053")
("1.3.99.12" "0003853")
("2.3.1.16" "0003988")
("3.4.24.-" "0008319" "0004249")
("2.1.1.56" "0004482")
("5.3.1.16" "0003949")
("2.7.1.32" "0004103")
("2.8.3.-" "0008410")
("1.14.17.3" "0004504")
("3.6.3.1" "0004012")
("2.4.1.16" "0004100")
("3.1.21.1" "0004530")
("1.14.99.5" "0004768")
("2.4.99.6" "0008118")
("4.1.2.25" "0004150")
("2.5.1.16" "0004766")
("5.99.1.3" "0003918")
("4.1.1.48" "0004425")
("2.7.1.24" "0004140")
("3.4.21.41" "0003815")
("3.2.1.113" "0004571")
("1.13.11.21" "0003834")
("1.17.4.1" "0004748")
("1.8.3.1" "0008482")
("3.5.3.19" "0004848")
("2.3.2.8" "0004057")
("3.2.1.48" "0004575")
("2.6.1.16" "0004360")
("2.7.1.112" "0004713")
("1.1.1.101" "0000140")
("1.9.3.1" "0004129")
("1.3.99.13" "0004466")
("3.4.21.54" "0004253")
("3.1.13.2" "0004533")
("2.4.2.8" "0004422")
("2.3.1.48" "0004402" "0004403" "0004404")
("1.10.2.2" "0008121")
("5.4.2.8" "0004615")
("3.4.21.1" "0004263")
("2.7.7.48" "0003968")
("6.3.2.25" "0004835")
("1.1.1.31" "0008442")
("3.6.3.10" "0005390")
("3.1.4.11" "0004435")
("2.1.1.114" "0004395")
("1.14.99.22" "0004501")
("1.1.2.4" "0004458")
("2.7.6.3" "0003848")
("3.1.2.4" "0003860")
("4.1.2.4" "0004139")
("1.2.1.31" "0004043")
("6.3.2.17" "0004326")
("3.2.1.31" "0004566")
("3.1.6.14" "0008449")
("3.4.21.42" "0003816")
("3.2.1.114" "0004572")
("1.13.11.31" "0004052")
("2.4.1.101" "0003827")
("2.4.2.17" "0003879")
("1.1.1.23" "0004399")
("4.1.1.23" "0004590")
("2.3.1.31" "0004414")
("3.6.3.50" "0008564")
("2.7.2.8" "0003991")
("3.6.1.48" "0008547")
("1.1.1.95" "0004617")
("3.4.21.9" "0004275")
("2.7.1.113" "0004138")
("6.3.5.2" "0003922")
("2.3.2.4" "0003839")
("6.3.4.11" "0004078")
("3.2.1.23" "0004565")
("3.1.1.7" "0003990")
("6.1.1.7" "0004813")
("2.7.1.48" "0004849")
("4.1.1.15" "0004351")
("6.1.1.15" "0004827")
("5.4.2.4" "0004082")
("3.5.1.31" "0008463")
("3.4.13.19" "0004237")
("1.1.99.5" "0004368")
("2.1.1.63" "0003908")
("4.1.3.18" "0003984")
("3.2.1.15" "0004650")
("2.3.1.7" "0004092")
("3.1.3.66" "0004443")
("3.6.1.31" "0004636")
("2.3.1.87" "0004059")
("2.7.8.8" "0003882")
("3.4.21.43" "0003813")
("3.6.4.11" "0008572")
("2.3.1.15" "0004366")
("1.6.2.4" "0003958")
("2.8.2.17" "0008459")
("2.4.1.102" "0003829")
("6.3.4.5" "0004055")
("1.3.99.5" "0003865")
("3.4.11.22" "0004250")
("5.3.99.5" "0004796")
("2.4.1.15" "0003825")
("1.10.3.3" "0008447")
("2.7.2.4" "0004072")
("1.5.1.7" "0004754")
("2.1.1.103" "0000234")
("4.2.1.55" "0003859")
("3.6.1.23" "0004170")
("3.4.11.7" "0004230")
("3.1.4.1" "0004528")
("3.1.27.9" "0000213")
("3.5.4.5" "0004126")
("1.1.1.3" "0004412")
("3.1.1.3" "0004806")
("1.5.1.15" "0004487")
("2.5.1.15" "0004156")
("3.1.1.47" "0003847")
("2.8.2.4" "0004304")
("3.1.22.1" "0004531")
("3.4.16.6" "0004187")
("3.6.1.7" "0003998")
("2.7.1.23" "0003951")
("3.4.24" "0008133")
("1.2.4.1" "0004739")
("2.7.8.17" "0003976")
("1.6.4.5" "0004791")
("2.7.1.71" "0004765")
("3.6.4.5" "0008569")
("1.2.1.3" "0004029")
("3.2.1.3" "0004339")
("4.2.1.3" "0003994")
("2.4.1.149" "0008532")
("5.2.1.3" "0004744")
("4.2.1.47" "0008446")
("5.4.99.5" "0004106")
("5.99.1.2" "0003917")
("6.1.1.3" "0004829")
("6.2.1.3" "0004467")
("5.3.4.1" "0003756")
("2.7.7.31" "0003912")
("6.3.4.1" "0003921")
("6.3.4.9" "0004079")
("3.1.3.41" "0003869")
("1.1.1.39" "0004471")
("3.5.4.9" "0004477")
("2.7.1.15" "0004747")
("4.3.1.3" "0004397")
("2.3.1.137" "0008458")
("3.5.1.55" "0008421")
("1.14.16.1" "0004505")
("1.1.1.30" "0003858")
("1.4.1.3" "0004353")
("3.4.21.78" "0004277")
("3.6.4.9" "0003763" "0003764" "0003765" "0003766")
("3.4.22.1" "0004213")
("6.4.1.3" "0004658")
("1.5.4.1" "0004734")
("3.1.4.10" "0004436")
("3.5.4.1" "0004131")
("2.7.7.23" "0003977")
("3.1.3.33" "0004651")
("3.4.22.40" "0008423")
("1.5.1.3" "0004146")
("2.5.1.3" "0004789")
("2.7.4.9" "0004798")
("2.3.1.39" "0004314")
("3.1.6.13" "0004423")
("6.5.1.3" "0003972")
("2.7.7.7" "0003887" "0003888" "0003889" "0003890" "0003891" "0003893" "0003895" "0003894")
("3.6.4.1" "0008570")
("1.1.1.22" "0003979")
("4.1.3.8" "0003878")
("4.1.1.22" "0004398")
("3.6.1.3" "0004003" "0008026" "0008094" "0008186" "0004002")
("4.6.1.3" "0003856")
("6.1.1.22" "0004816")
("3.6.1.47" "0003926" "0003928" "0003929" "0003930" "0008619" "0003931" "0003932" "0003925")
("2.7.7.15" "0004105")
("1.2.3.8" "0004732")
("3.1.4.50" "0004621")
("6.3.4.10" "0004080")
("3.2.1.22" "0004557")
("3.4.25.-" "0004298")
("4.2.1.22" "0004122")
("2.7.1.3" "0004454")
("1.1.1.86" "0004455")
("3.4.21.45" "0003818")
("1.1.1.184" "0004090")
("4.2.1.70" "0004730")
("2.7.1.137" "0004429")
("3.4.24.17" "0004248")
("1.13.11.5" "0004411")
("1.1.1.14" "0003939")
("2.1.1.14" "0003871")
("2.3.1.22" "0003846")
("3.4.17.22" "0008472")
("1.14.16.2" "0004511")
("1.1.1.62" "0004303")
("3.5.4.19" "0004635")
("3.4.21.79" "0004278")
("3.4.21.20" "0004261")
("5.3.3.8" "0008461" "0004165")
("6.1.1.14" "0004820")
("5.1.99.4" "0008111")
("3.5.4.10" "0003937")
("1.1.1.105" "0004745")
("3.2.1.14" "0004568")
("2.4.1.22" "0004461")
("3.1.3.65" "0004442")
("2.7.1.39" "0004413")
("2.3.1.86" "0004321")
("3.6.4.10" "0008571")
("1.3.1.14" "0004589")
("3.1.6.2" "0004773")
("3.5.1.22" "0004593")
("2.7.1.30" "0004370")
("2.7.7.3" "0004595")
("3.4.17.10" "0004183")
("3.1.3.4" "0008195")
("4.1.3.4" "0004419")
("5.3.99.4" "0008116")
("1.10.3.2" "0008471")
("3.6.3.49" "0005260")
("3.1.3.57" "0004441")
("3.6.3.8" "0005388")
("3.6.1.22" "0000210")
("3.4.21.46" "0003817") | null | https://raw.githubusercontent.com/jeffshrager/biobike/5313ec1fe8e82c21430d645e848ecc0386436f57/biolingua/ec2go.lsp | lisp |
("2.4.99.4" "0003836")
("3.4.16.5" "0004186" "0004258")
("2.3.1.139" "0004173")
("3.5.1.14" "0004046")
("4.99.1.1" "0004325")
("3.4.21.21" "0003802")
("1.3.3.4" "0004729")
("3.2.2.23" "0003907")
("2.4.2.31" "0003956")
("3.2.1.46" "0004336")
("4.2.1.46" "0008460")
("2.6.1.62" "0004015")
("3.1.2.15" "0004221")
("4.1.2.15" "0003849")
("1.1.1.38" "0004472")
("1.4.3.4" "0004041")
("3.4.21.34" "0003807")
("3.2.1.106" "0004573")
("3.5.1.54" "0004039")
("3.4.19.1" "0004254")
("1.2.1.38" "0003942")
("1.14.99.7" "0004506")
("3.4.21.68" "0004296")
("3.5.3.4" "0004037")
("2.7.1.105" "0003873")
("6.3.5.5" "0004088")
("1.8.99.4" "0004604")
("3.4.21.47" "0003812")
("2.4.2.7" "0003999")
("1.14.13.13" "0004498")
("2.3.1.38" "0004313")
("3.4.13.9" "0004251")
("3.1.6.12" "0003943")
("2.7.6.2" "0004788")
("3.4.21.22" "0003803")
("1.14.16.4" "0004510")
("3.1.3.48" "0004727" "0004725")
("1.1.1.21" "0004032")
("4.1.1.21" "0004638")
("2.4.1.38" "0003831")
("6.1.1.21" "0004821")
("3.4.21.7" "0004283")
("3.6.1.46" "0003927")
("2.7.7.14" "0004306")
("3.4.19.9" "0008464")
("1.1.2.3" "0004460")
("2.1.2.3" "0004643")
("3.1.2.3" "0004778")
("3.2.1.21" "0008422")
("1.1.1.85" "0003862")
("3.4.21.35" "0004293")
("1.6.5.5" "0003960")
("2.7.1.127" "0008440")
("3.1.1.13" "0004771")
("3.4.21.10" "0004284")
("2.3.1.21" "0004095")
("2.8.2.23" "0008467")
("2.6.1.38" "0008110")
("2.1.1.61" "0004808")
("3.4.21.69" "0003808")
("1.3.5.1" "0008177")
("5.3.1.21" "0004110")
("6.3.5.1" "0003952")
("4.1.99.3" "0003904")
("3.1.3.16" "0004722")
("3.4.11.-" "0004177")
("3.3.2.3" "0004301")
("4.2.1.13" "0003941")
("3.1.1.6" "0008126")
("3.1.3.64" "0004438")
("2.1.1.77" "0004719")
("2.7.1.38" "0004689")
("6.1.1.6" "0004824")
("2.3.1.85" "0004312")
("4.2.1.61" "0004317")
("1.13.11.42" "0004426")
("6.3.2.3" "0004363")
("1.3.1.13" "0004665")
("2.4.2.3" "0004850")
("5.4.2.3" "0004610")
("2.5.1.21" "0004310")
("1.5.5.1" "0004174")
("2.3.1.61" "0004149")
("1.3.99.3" "0003995")
("1.4.1.13" "0004355")
("3.5.2.3" "0004151")
("3.6.3.48" "0008563")
("3.1.3.56" "0004445")
("2.3.1.6" "0004102")
("3.1.1.69" "0000225")
("1.10.3.1" "0004097")
("5.3.1.6" "0004751")
("3.4.21.36" "0008125")
("1.14.13.39" "0004517")
("3.1.2.22" "0008474")
("3.1.26.-" "0004521")
("2.1.1.45" "0004799")
("3.4.23.1" "0004194")
("2.7.1.21" "0004797")
("2.7.8.15" "0003975")
("1.14.99.9" "0004508")
("1.1.1.2" "0008106")
("3.1.1.2" "0004064")
("6.1.1.2" "0004830")
("2.7.2.3" "0004618")
("2.6.1.13" "0004587")
("3.2.1.45" "0004348")
("2.5.1.6" "0004478")
("3.5.1.6" "0003837")
("2.4.2.30" "0003950")
("1.14.13.8" "0004499")
("1.6.99.3" "0003954")
("3.1.2.14" "0004320")
("2.2.1.2" "0004801")
("4.2.1.2" "0004333")
("1.1.1.37" "0004470")
("2.1.1.37" "0003886")
("4.1.1.37" "0004853")
("3.4.14.1" "0004214")
("2.4.1.69" "0008107")
("1.14.11.1" "0008336")
("3.1.4.17" "0004114")
("3.1.26.5" "0004526")
("6.3.1.2" "0004356")
("2.4.1.45" "0003851")
("2.7.1.6" "0004335")
("2.4.1.117" "0004581")
("1.4.1.2" "0004352")
("1.1.1.29" "0008465")
("3.1.1.29" "0004045")
("4.1.1.29" "0004782")
("2.3.1.37" "0003870")
("6.4.1.2" "0003989")
("2.8.1.6" "0004076")
("3.1.4.4" "0004630")
("2.4.2.14" "0004044")
("3.5.4.25" "0003935")
("1.5.1.2" "0004735")
("3.6.4.8" "0008575")
("3.5.1.2" "0004359")
("2.4.1.37" "0004381")
("6.1.1.20" "0004826")
("3.4.21.6" "0003804")
("6.5.1.2" "0003911")
("2.7.7.13" "0004475")
("1.2.4.4" "0003863")
("1.14.13.9" "0004502")
("1.11.1.9" "0004602")
("2.6.1.2" "0004021")
("2.7.4.8" "0004385")
("3.2.1.20" "0004558")
("4.2.1.20" "0004834")
("3.4.24.18" "0004238")
("2.5.1.37" "0004464")
("4.6.1.2" "0004383" "0008075")
("4.2.99.18" "0003906")
("2.7.7.6" "0003899" "0000129" "0003900" "0003901" "0003902")
("2.7.1.117" "0004687")
("1.14.11.2" "0004656")
("3.1.3.7" "0008441")
("4.1.3.7" "0004108")
("5.1.3.7" "0003974")
("2.3.1.20" "0004144")
("2.7.1.2" "0004340")
("3.7.1.2" "0004334")
("6.1.1.12" "0004815")
("1.14.11.16" "0004597")
("6.3.4.4" "0004019")
("1.1.3.15" "0003973")
("3.1.3.15" "0004401")
("3.4.24.64" "0004240")
("1.2.1.12" "0004365")
("2.7.7.53" "0003877")
("1.8.1.2" "0004783")
("2.5.1.29" "0004311")
("4.2.1.12" "0004456")
("2.7.1.37" "0004674")
("4.2.99.2" "0004795")
("3.5.4.4" "0004000")
("2.3.1.12" "0004742")
("1.5.1.20" "0004489")
("4.3.1.12" "0008473")
("3.2.1.76" "0003940")
("1.3.99.2" "0004085")
("5.3.99.2" "0004667")
("3.6.4.4" "0008574")
("3.4.22.34" "0001509")
("2.7.1.29" "0004371")
("3.2.1.52" "0004563")
("2.7.7.2" "0003919")
("2.4.1.150" "0008109")
("3.4.21.26" "0004287")
("5.4.99.2" "0004494")
("1.1.1.44" "0004616")
("3.1.6.1" "0004065")
("1.5.1.12" "0003842")
("2.7.1.20" "0004001")
("2.1.1.107" "0004851")
("2.1.3.3" "0004585")
("3.1.3.3" "0004647")
("1.16.3.1" "0004322")
("5.1.3.3" "0004034")
("3.5.3.15" "0004668")
("3.2.2.21" "0003905")
("3.4.24.65" "0004234")
("3.6.3.7" "0008554")
("2.4.1.119" "0004579")
("2.4.1.163" "0008457")
("1.6.99.2" "0003955")
("4.1.2.13" "0004332")
("4.1.1.36" "0004633")
("2.4.1.68" "0008424")
("3.5.1.52" "0000224")
("3.4.21.-" "0008132" "0008243")
("1.3.3.3" "0004109")
("3.1.26.4" "0004523" "0004524")
("3.1.2.6" "0004416")
("6.3.3.3" "0004141")
("3.4.21.48" "0004262")
("4.2.1.36" "0004409")
("2.6.1.52" "0004646" "0004648")
("3.4.21.92" "0008462")
("1.4.3.3" "0003884")
("2.3.2.13" "0003810")
("2.1.1.28" "0004603")
("3.4.21.27" "0003805")
("4.1.1.28" "0004058")
("2.4.1.151" "0003946")
("1.14.11.4" "0008475")
("2.3.1.84" "0004026")
("2.4.1.92" "0003947")
("6.3.4.16" "0004087")
("3.2.1.28" "0004555")
("3.3.2.6" "0004463")
("6.3.2.6" "0004639")
("2.6.1.44" "0008453")
("3.4.21.5" "0003809")
("2.7.7.12" "0008108")
("2.7.1.68" "0004431")
("1.1.3.22" "0004855")
("3.2.1.108" "0000016")
("1.11.1.8" "0004447")
("3.6.3.47" "0008562")
("2.7.6.1" "0004749")
("3.4.17.17" "0004189")
("2.1.2.2" "0004644")
("2.7.1.107" "0004143")
("1.14.15.4" "0004507")
("3.5.4.16" "0003934")
("1.7.3.3" "0004846")
("2.7.3.3" "0004054")
("4.1.1.11" "0004068")
("6.1.1.11" "0004828")
("3.4.22.36" "0004201")
("3.4.21.93" "0004285")
("1.2.1.11" "0004073")
("3.4.17.4" "0004226")
("4.2.1.11" "0004634")
("3.1.3.62" "0004446")
("2.7.1.36" "0004496")
("2.3.2.2" "0003840")
("4.3.2.2" "0004018")
("6.3.2.2" "0004357")
("1.1.1.153" "0004757")
("3.1.1.5" "0004622")
("2.4.1.11" "0004373")
("5.4.2.2" "0004614")
("2.1.1.67" "0008119")
("2.7.9.3" "0004756")
("3.2.1.51" "0004560")
("4.2.1.51" "0004664")
("3.4.15.1" "0004246")
("1.14.99.10" "0004509")
("6.1.1.5" "0004822")
("3.1.27.5" "0004522")
("1.2.1.5" "0004030")
("2.7.1.108" "0004168")
("1.1.1.43" "0008114")
("3.1.2.20" "0004562")
("3.4.16.2" "0004188")
("3.5.2.2" "0004157")
("2.3.1.51" "0003841")
("2.1.1.-" "0008276" "0000179" "0008650" "0008425")
("3.1.1.-" "0004302")
("6.2.1.5" "0004775")
("3.1.3.46" "0004330")
("2.3.1.5" "0004060")
("2.4.1.109" "0004169")
("3.1.1.59" "0004453")
("2.6.1.11" "0003992")
("3.6.1.11" "0004309")
("3.4.21.94" "0004286")
("6.1.1.9" "0004832")
("6.2.1.-" "0003996")
("6.3.5.4" "0004066")
("3.6.3.14" "0003936" "0003936")
("2.4.2.29" "0008479")
("1.1.1.35" "0003857")
("2.3.1.43" "0004607")
("4.4.1.5" "0004462")
("2.7.1.11" "0003872")
("4.2.1.9" "0004160")
("3.1.13.-" "0000175" "0004534")
("2.3.1.-" "0004147")
("1.1.1.1" "0004022" "0004025" "0004023" "0004024")
("2.1.1.1" "0008112")
("3.1.1.1" "0004091")
("4.1.1.1" "0004737")
("3.1.26.3" "0004525")
("6.1.1.1" "0004831")
("2.8.2.2" "0004027")
("1.5.1.5" "0004488")
("3.4.21.38" "0003806")
("3.2.1.35" "0004415")
("1.3.1.9" "0004318")
("2.3.1.9" "0003985")
("2.6.1.51" "0004760" "0004761" "0004762" "0004763")
("3.6.1.51" "0008550")
("2.4.1.-" "0003980" "0000009" "0004580")
("1.2.1.1" "0004327")
("2.2.1.1" "0004802")
("3.2.1.1" "0004556")
("2.3.2.12" "0000048")
("4.2.1.1" "0004089")
("2.4.1.141" "0004577")
("1.1.1.27" "0004459")
("2.3.1.35" "0004358")
("2.6.1.5" "0004838")
("1.14.15.6" "0008386")
("3.6.1.5" "0004050")
("5.1.99.1" "0004493")
("5.3.1.9" "0004347")
("6.2.1.1" "0003987")
("2.5.1.-" "0004662" "0004663" "0004660" "0004661" "0008495")
("1.3.1.1" "0004159")
("2.3.1.1" "0004042")
("3.4.19.12" "0008577")
("3.3.1.1" "0004013")
("1.2.1.27" "0004491")
("5.3.1.1" "0004807")
("6.3.1.1" "0004071")
("2.7.8.2" "0004142")
("6.3.4.15" "0004077")
("3.4.24.56" "0004231")
("3.4.21.4" "0004295")
("2.5.1.9" "0004746")
("4.2.1.75" "0004852")
("1.1.3.21" "0004369")
("1.3.99.1" "0000104")
("3.1.3.21" "0000121")
("4.1.3.21" "0004410")
("2.4.1.1" "0008184" "0004645")
("4.4.1.1" "0004123")
("1.11.1.7" "0004601")
("2.4.1.83" "0004582")
("3.5.1.9" "0004061")
("2.7.1.67" "0004430")
("6.1.1.19" "0004814")
("6.4.1.1" "0004736")
("2.6.1.9" "0004400")
("3.6.1.9" "0004551")
("1.14.14.1" "0005490")
("2.4.99.1" "0003835")
("2.5.1.1" "0004161")
("3.5.1.1" "0004067")
("4.2.1.19" "0004424")
("3.4.23.34" "0004193")
("3.6.4.7" "0008573")
("2.8.2.20" "0008476")
("6.1.1.10" "0004825")
("6.5.1.1" "0003910")
("3.1.3.13" "0004083")
("4.2.99.9" "0003962")
("1.5.99.1" "0008480")
("1.6.1.1" "0003957")
("2.6.1.1" "0004069")
("3.6.1.1" "0004427")
("3.4.17.3" "0004184")
("3.1.3.61" "0004444")
("2.4.1.142" "0004578")
("3.4.11.18" "0004239")
("3.2.1.10" "0004574")
("4.2.1.10" "0003855")
("2.7.7.5" "0004780")
("3.2.1.130" "0004569")
("2.7.1.35" "0008478")
("2.7.1.91" "0008481")
("4.6.1.1" "0004016" "0008294")
("3.1.3.6" "0008254")
("1.6.99.1" "0003959")
("1.3.1.10" "0004319")
("2.7.1.1" "0004396")
("6.3.4.3" "0004329")
("1.14.18.1" "0004503")
("5.3.1.10" "0004342")
("4.1.1.50" "0004014")
("3.4.24.57" "0008450")
("2.29.13.1" "0005469")
("2.7.7.-" "0008192" "0008193")
("1.13.11.24" "0008127")
("3.4.22.14" "0008129")
("2.8.1.1" "0004792")
("2.5.1.19" "0003866")
("3.4.21.71" "0004281")
("3.2.1.50" "0004561")
("3.4.22.-" "0004199")
("2.4.1.130" "0004584")
("1.18.1.2" "0004324")
("2.7.1.75" "0004136")
("1.3.3.6" "0003997")
("2.7.1.99" "0004740")
("1.4.3.13" "0004720")
("2.7.7.9" "0003983")
("1.1.1.42" "0004450")
("2.9.1.1" "0004125")
("2.6.1.19" "0003867")
("3.4.23.35" "0004191")
("2.3.1.50" "0004758")
("1.5.1.10" "0004755")
("2.5.1.10" "0004337")
("3.1.6.8" "0004098")
("1.4.3.6" "0008122")
("3.6.4.3" "0008568")
("3.4.22.27" "0004218")
("4.6.1.10" "0003874")
("2.4.1.143" "0008455")
("2.1.3.2" "0004070")
("3.1.3.2" "0003993")
("4.1.3.2" "0004474")
("2.1.2.11" "0003864")
("2.7.4.3" "0004017")
("5.1.3.2" "0003978")
("1.1.1.34" "0004420")
("3.6.3.13" "0008557")
("3.1.1.34" "0004465")
("3.2.1.58" "0004338")
("1.1.3.37" "0003885")
("3.4.14.10" "0004294")
("3.6.3.6" "0008553")
("1.13.11.34" "0004051")
("3.4.22.15" "0004217")
("3.6.1.50" "0008549")
("5.3.3.2" "0004452")
("2.4.1.131" "0004377")
("2.1.1.98" "0004164")
("2.7.1.50" "0004417")
("2.7.7.19" "0004652")
("2.4.1.90" "0003945")
("4.1.2.5" "0004793")
("2.4.2.11" "0004516")
("3.2.1.26" "0004564")
("3.4.25.1" "0004299")
("2.4.1.34" "0003843")
("6.3.4.14" "0004075")
("2.6.1.42" "0004084")
("3.6.1.42" "0004382")
("2.7.7.10" "0003982")
("1.6.6.8" "0003920")
("2.4.1.144" "0003830")
("3.2.2.5" "0003953")
("1.11.1.6" "0004096")
("2.3.1.26" "0004772")
("6.1.1.18" "0004819")
("3.6.3.45" "0008560")
("3.4.11.2" "0004179")
("1.1.1.145" "0003854")
("3.2.1.18" "0004308")
("4.3.2.5" "0004598")
("4.2.1.18" "0004490")
("2.1.2.9" "0004479")
("6.3.2.5" "0004632")
("3.2.2.-" "0004844")
("3.4.22.16" "0004215")
("3.1.3.12" "0004805")
("4.1.3.12" "0003852")
("3.4.24.34" "0008130")
("4.2.99.8" "0004124")
("2.7.7.50" "0004484")
("3.4.21.73" "0004297")
("2.7.2.11" "0004349")
("2.5.1.26" "0008609")
("3.5.1.26" "0003948")
("3.6.3.37" "0008558")
("2.4.1.132" "0004378")
("2.7.3.2" "0004111")
("2.1.2.1" "0004372")
("3.1.2.1" "0003986")
("2.7.1.82" "0004305")
("2.7.4.14" "0004127")
("2.4.1.18" "0003844")
("3.5.2.5" "0004038")
("3.2.2.1" "0008477")
("2.4.99.8" "0003828")
("4.1.2.27" "0008117")
("5.3.3.12" "0004167")
("2.4.1.145" "0008454")
("2.5.1.18" "0004364")
("2.7.1.26" "0008531")
("2.4.2.9" "0004845")
("3.4.21.61" "0004290")
("1.5.99.8" "0004657")
("2.7.1.74" "0004137")
("4.3.2.1" "0004056")
("6.3.2.1" "0004592")
("3.4.24.7" "0004232")
("1.1.1.41" "0004449")
("2.1.1.41" "0003838")
("3.4.23.25" "0004196")
("1.15.1.1" "0004785" "0008382" "0008383" "0004784")
("4.1.1.41" "0004492")
("3.4.22.38" "0004216")
("1.1.1.146" "0003845")
("3.1.3.68" "0003850")
("2.7.8.11" "0003881")
("2.4.2.1" "0004731")
("3.1.8.1" "0004063")
("3.4.22.17" "0004198")
("5.4.2.1" "0004619")
("3.4.24.35" "0004229")
("3.1.13.4" "0004535")
("1.2.1.41" "0004350")
("2.1.2.10" "0004047")
("3.2.2.19" "0003875")
("3.6.3.12" "0008556")
("2.1.1.33" "0008176")
("3.4.24.21" "0008533")
("4.1.1.33" "0004163")
("2.3.1.41" "0004315")
("1.14.13.17" "0008123")
("1.1.1.49" "0004345")
("2.7.3.12" "0008257")
("3.1.3.36" "0004439")
("4.1.1.49" "0004612")
("1.1.1.8" "0004367")
("3.1.1.8" "0004104")
("2.4.1.41" "0004653")
("3.2.1.33" "0004135")
("2.7.8.5" "0008444")
("4.2.1.33" "0003861")
("3.4.21.62" "0004291")
("1.14.99.1" "0004666")
("6.3.2.19" "0004842")
("6.3.5.3" "0004642")
("1.1.1.25" "0004764")
("2.4.2.19" "0004514")
("3.2.1.49" "0008456")
("4.1.1.25" "0004837")
("1.1.1.73" "0004552")
("3.5.1.41" "0004099")
("2.7.7.18" "0004515")
("5.2.1.8" "0004600" "0003755")
("3.-.-.-" "0003923")
("2.4.2.10" "0004588")
("2.8.2.1" "0004062")
("1.6.2.2." "0004128")
("3.2.1.25" "0004567")
("6.3.4.13" "0004637")
("4.3.1.8" "0004418")
("3.4.21.75" "0004276")
("5.3.1.8" "0004476")
("2.1.1.17" "0004608")
("3.1.1.17" "0004341")
("4.1.1.17" "0004586")
("1.1.1.249" "0004171")
("1.11.1.5" "0004130")
("3.6.3.44" "0008559")
("6.1.1.17" "0004818")
("6.3.4.6" "0004847")
("4.4.1.8" "0004121")
("4.1.1.65" "0004609")
("1.6.5.3" "0008137")
("2.3.1.97" "0004379")
("3.4.11.1" "0004178")
("3.1.4.45" "0003944")
("3.2.1.17" "0003796")
("4.2.1.17" "0004300")
("2.4.1.25" "0004134")
("2.7.8.1" "0004307")
("3.5.1.49" "0004328")
("2.5.1.8" "0004811")
("3.1.3.11" "0004331")
("3.5.4.6" "0003876")
("3.4.17.1" "0004182")
("3.1.1.4" "0004623")
("1.1.99.10" "0004344")
("3.5.1.25" "0008448")
("3.6.1.49" "0008548")
("2.7.1.33" "0004594")
("1.2.4.2" "0004591")
("6.1.1.4" "0004823")
("1.3.99.7" "0004361")
("3.1.4.37" "0004113")
("3.1.-.-" "0004519")
("3.6.4.6" "0008576")
("2.4.1.17" "0003981")
("4.4.1.17" "0004408")
("4.2.99.10" "0003961")
("6.2.1.4" "0004776")
("3.4.11.9" "0008451")
("3.4.24.37" "0004247")
("2.7.7.41" "0004605")
("6.3.4.2" "0003883")
("5.4.99.7" "0000250")
("2.7.4.6" "0004550")
("2.3.1.4" "0004343")
("1.3.99.10" "0008470")
("3.4.23.5" "0004192")
("2.3.1.57" "0004145")
("2.7.1.25" "0004020")
("1.4.4.2" "0004375")
("3.4.24.23" "0004235")
("3.1.3.5" "0008253")
("4.1.3.5" "0004421")
("1.3.-.-" "0000248")
("1.8.4.6" "0008113")
("1.14.17.1" "0004500")
("1.4.1.4" "0004354")
("3.1.3.67" "0004440")
("3.6.1.17" "0004081")
("2.4.99.10" "0004513")
("3.4.23.15" "0004195")
("1.1.1.40" "0004473")
("6.4.1.4" "0004485")
("1.1.1.204" "0004854")
("3.5.4.2" "0000034")
("1.6.99.7" "0004155")
("3.1.3.43" "0004741")
("1.14.99.3" "0004392")
("3.1.3.-" "0008579" "0004721" "0008138" "0008330")
("3.5.1.4" "0004040")
("3.4.14.5" "0004274")
("2.7.1.17" "0004856")
("5.5.1.4" "0004512")
("6.5.1.4" "0003963")
("1.6.4.2" "0004362")
("2.7.7.49" "0003964" "0003967" "0003966" "0003721")
("3.6.4.2" "0008567")
("2.7.7.8" "0004654")
("3.6.3.11" "0008555")
("3.1.3.9" "0004346")
("3.4.24.59" "0004243")
("2.1.1.32" "0004809")
("4.1.1.32" "0004613")
("3.4.24.11" "0004245")
("4.6.1.4" "0004107")
("2.7.7.25" "0004810")
("3.1.4.12" "0004767")
("2.7.4.2" "0004631")
("1.4.3.5" "0004733")
("2.7.3.11" "0008256")
("2.4.1.40" "0004380")
("1.3.99.11" "0004152")
("3.1.3.1" "0004035")
("4.1.3.1" "0004451")
("5.1.3.1" "0004750")
("2.4.2.18" "0004048")
("3.4.24.24" "0004228")
("2.3.1.32" "0004468")
("1.4.3.-" "0008131")
("3.6.3.51" "0008566")
("1.8.1.4" "0004148")
("3.8.1.4" "0004800")
("2.1.1.96" "0004790")
("1.2.3.1" "0004031")
("2.7.1.123" "0004685" "0004684")
("1.1.1.205" "0003938")
("1.2.1.24" "0004777")
("4.1.3.27" "0004049")
("3.2.1.24" "0004559")
("4.2.1.24" "0004655")
("1.3.3.1" "0004158")
("2.4.1.80" "0008120")
("4.2.1.96" "0008124")
("3.4.19.3" "0004219")
("2.1.1.16" "0004481")
("5.3.3.1" "0004769")
("1.3.1.24" "0004074")
("6.3.3.1" "0004641")
("6.1.1.16" "0004817")
("1.13.11.11" "0004833")
("5.3.1.24" "0004640")
("3.6.3.-" "0004009")
("2.7.7.4" "0004781")
("1.4.3.1" "0008445")
("2.7.1.40" "0004743")
("3.5.4.12" "0004132")
("2.1.1.125" "0008469")
("4.2.1.16" "0004794")
("2.8.3.5" "0008260")
("3.6.3.9" "0005391")
("1.1.1.100" "0004316")
("2.1.1.100" "0004671")
("1.13.11.27" "0003868")
("1.5.3.1" "0008115")
("2.3.1.88" "0004596")
("3.5.3.1" "0004053")
("1.3.99.12" "0003853")
("2.3.1.16" "0003988")
("3.4.24.-" "0008319" "0004249")
("2.1.1.56" "0004482")
("5.3.1.16" "0003949")
("2.7.1.32" "0004103")
("2.8.3.-" "0008410")
("1.14.17.3" "0004504")
("3.6.3.1" "0004012")
("2.4.1.16" "0004100")
("3.1.21.1" "0004530")
("1.14.99.5" "0004768")
("2.4.99.6" "0008118")
("4.1.2.25" "0004150")
("2.5.1.16" "0004766")
("5.99.1.3" "0003918")
("4.1.1.48" "0004425")
("2.7.1.24" "0004140")
("3.4.21.41" "0003815")
("3.2.1.113" "0004571")
("1.13.11.21" "0003834")
("1.17.4.1" "0004748")
("1.8.3.1" "0008482")
("3.5.3.19" "0004848")
("2.3.2.8" "0004057")
("3.2.1.48" "0004575")
("2.6.1.16" "0004360")
("2.7.1.112" "0004713")
("1.1.1.101" "0000140")
("1.9.3.1" "0004129")
("1.3.99.13" "0004466")
("3.4.21.54" "0004253")
("3.1.13.2" "0004533")
("2.4.2.8" "0004422")
("2.3.1.48" "0004402" "0004403" "0004404")
("1.10.2.2" "0008121")
("5.4.2.8" "0004615")
("3.4.21.1" "0004263")
("2.7.7.48" "0003968")
("6.3.2.25" "0004835")
("1.1.1.31" "0008442")
("3.6.3.10" "0005390")
("3.1.4.11" "0004435")
("2.1.1.114" "0004395")
("1.14.99.22" "0004501")
("1.1.2.4" "0004458")
("2.7.6.3" "0003848")
("3.1.2.4" "0003860")
("4.1.2.4" "0004139")
("1.2.1.31" "0004043")
("6.3.2.17" "0004326")
("3.2.1.31" "0004566")
("3.1.6.14" "0008449")
("3.4.21.42" "0003816")
("3.2.1.114" "0004572")
("1.13.11.31" "0004052")
("2.4.1.101" "0003827")
("2.4.2.17" "0003879")
("1.1.1.23" "0004399")
("4.1.1.23" "0004590")
("2.3.1.31" "0004414")
("3.6.3.50" "0008564")
("2.7.2.8" "0003991")
("3.6.1.48" "0008547")
("1.1.1.95" "0004617")
("3.4.21.9" "0004275")
("2.7.1.113" "0004138")
("6.3.5.2" "0003922")
("2.3.2.4" "0003839")
("6.3.4.11" "0004078")
("3.2.1.23" "0004565")
("3.1.1.7" "0003990")
("6.1.1.7" "0004813")
("2.7.1.48" "0004849")
("4.1.1.15" "0004351")
("6.1.1.15" "0004827")
("5.4.2.4" "0004082")
("3.5.1.31" "0008463")
("3.4.13.19" "0004237")
("1.1.99.5" "0004368")
("2.1.1.63" "0003908")
("4.1.3.18" "0003984")
("3.2.1.15" "0004650")
("2.3.1.7" "0004092")
("3.1.3.66" "0004443")
("3.6.1.31" "0004636")
("2.3.1.87" "0004059")
("2.7.8.8" "0003882")
("3.4.21.43" "0003813")
("3.6.4.11" "0008572")
("2.3.1.15" "0004366")
("1.6.2.4" "0003958")
("2.8.2.17" "0008459")
("2.4.1.102" "0003829")
("6.3.4.5" "0004055")
("1.3.99.5" "0003865")
("3.4.11.22" "0004250")
("5.3.99.5" "0004796")
("2.4.1.15" "0003825")
("1.10.3.3" "0008447")
("2.7.2.4" "0004072")
("1.5.1.7" "0004754")
("2.1.1.103" "0000234")
("4.2.1.55" "0003859")
("3.6.1.23" "0004170")
("3.4.11.7" "0004230")
("3.1.4.1" "0004528")
("3.1.27.9" "0000213")
("3.5.4.5" "0004126")
("1.1.1.3" "0004412")
("3.1.1.3" "0004806")
("1.5.1.15" "0004487")
("2.5.1.15" "0004156")
("3.1.1.47" "0003847")
("2.8.2.4" "0004304")
("3.1.22.1" "0004531")
("3.4.16.6" "0004187")
("3.6.1.7" "0003998")
("2.7.1.23" "0003951")
("3.4.24" "0008133")
("1.2.4.1" "0004739")
("2.7.8.17" "0003976")
("1.6.4.5" "0004791")
("2.7.1.71" "0004765")
("3.6.4.5" "0008569")
("1.2.1.3" "0004029")
("3.2.1.3" "0004339")
("4.2.1.3" "0003994")
("2.4.1.149" "0008532")
("5.2.1.3" "0004744")
("4.2.1.47" "0008446")
("5.4.99.5" "0004106")
("5.99.1.2" "0003917")
("6.1.1.3" "0004829")
("6.2.1.3" "0004467")
("5.3.4.1" "0003756")
("2.7.7.31" "0003912")
("6.3.4.1" "0003921")
("6.3.4.9" "0004079")
("3.1.3.41" "0003869")
("1.1.1.39" "0004471")
("3.5.4.9" "0004477")
("2.7.1.15" "0004747")
("4.3.1.3" "0004397")
("2.3.1.137" "0008458")
("3.5.1.55" "0008421")
("1.14.16.1" "0004505")
("1.1.1.30" "0003858")
("1.4.1.3" "0004353")
("3.4.21.78" "0004277")
("3.6.4.9" "0003763" "0003764" "0003765" "0003766")
("3.4.22.1" "0004213")
("6.4.1.3" "0004658")
("1.5.4.1" "0004734")
("3.1.4.10" "0004436")
("3.5.4.1" "0004131")
("2.7.7.23" "0003977")
("3.1.3.33" "0004651")
("3.4.22.40" "0008423")
("1.5.1.3" "0004146")
("2.5.1.3" "0004789")
("2.7.4.9" "0004798")
("2.3.1.39" "0004314")
("3.1.6.13" "0004423")
("6.5.1.3" "0003972")
("2.7.7.7" "0003887" "0003888" "0003889" "0003890" "0003891" "0003893" "0003895" "0003894")
("3.6.4.1" "0008570")
("1.1.1.22" "0003979")
("4.1.3.8" "0003878")
("4.1.1.22" "0004398")
("3.6.1.3" "0004003" "0008026" "0008094" "0008186" "0004002")
("4.6.1.3" "0003856")
("6.1.1.22" "0004816")
("3.6.1.47" "0003926" "0003928" "0003929" "0003930" "0008619" "0003931" "0003932" "0003925")
("2.7.7.15" "0004105")
("1.2.3.8" "0004732")
("3.1.4.50" "0004621")
("6.3.4.10" "0004080")
("3.2.1.22" "0004557")
("3.4.25.-" "0004298")
("4.2.1.22" "0004122")
("2.7.1.3" "0004454")
("1.1.1.86" "0004455")
("3.4.21.45" "0003818")
("1.1.1.184" "0004090")
("4.2.1.70" "0004730")
("2.7.1.137" "0004429")
("3.4.24.17" "0004248")
("1.13.11.5" "0004411")
("1.1.1.14" "0003939")
("2.1.1.14" "0003871")
("2.3.1.22" "0003846")
("3.4.17.22" "0008472")
("1.14.16.2" "0004511")
("1.1.1.62" "0004303")
("3.5.4.19" "0004635")
("3.4.21.79" "0004278")
("3.4.21.20" "0004261")
("5.3.3.8" "0008461" "0004165")
("6.1.1.14" "0004820")
("5.1.99.4" "0008111")
("3.5.4.10" "0003937")
("1.1.1.105" "0004745")
("3.2.1.14" "0004568")
("2.4.1.22" "0004461")
("3.1.3.65" "0004442")
("2.7.1.39" "0004413")
("2.3.1.86" "0004321")
("3.6.4.10" "0008571")
("1.3.1.14" "0004589")
("3.1.6.2" "0004773")
("3.5.1.22" "0004593")
("2.7.1.30" "0004370")
("2.7.7.3" "0004595")
("3.4.17.10" "0004183")
("3.1.3.4" "0008195")
("4.1.3.4" "0004419")
("5.3.99.4" "0008116")
("1.10.3.2" "0008471")
("3.6.3.49" "0005260")
("3.1.3.57" "0004441")
("3.6.3.8" "0005388")
("3.6.1.22" "0000210")
("3.4.21.46" "0003817") |
|
98853869c1ef9cfa58387bf74f99a7d445d918b442f2ee52e0d5e4c8873040a5 | nsg-ethz/O4 | variable.rkt | #lang racket/base
; Variable Context
; ---------------------------------------
(provide dec-variable
get-variable
set-variable
set-variables
substitute-variable)
; Implementation
; ---------------------------------------
(require o4/context/base)
; Struct
; -
; For variables, we store its string and value representation, as well as a flag denoting if the variable should be in-place replaced.
(struct dec-variable (string value f-replace) #:transparent)
; Getter
(define (get-variable ctx name)
(let ([dec (get-declare ctx name)])
; Check that returned declare is of type variable
(if (dec-variable? dec)
dec
null)))
; Setter
(define (set-variable ctx name str val [f-replace #f])
(set-declare ctx name (dec-variable str val f-replace)))
(define (set-variables ctx vars)
(for ([var (in-list vars)])
(apply set-variable ctx var)))
; Substitute Variable
; -
; This function checks if a variable with a given name is present in the given context.
; If not, it simply returns the variable.
; Otherwise, if the replace flag on the found variable is set, it returns the string and value representations of the context variable.
(define (substitute-variable ctx name)
(let ([var (get-variable ctx name)])
(if (or (null? var) (not (dec-variable-f-replace var)))
(values
(symbol->string name)
#f)
(values
(dec-variable-string var)
(dec-variable-value var)))))
| null | https://raw.githubusercontent.com/nsg-ethz/O4/870f6f78a6f7bb1e9e1502e29ff0048e5e1723c5/o4/context/variable.rkt | racket | Variable Context
---------------------------------------
Implementation
---------------------------------------
Struct
-
For variables, we store its string and value representation, as well as a flag denoting if the variable should be in-place replaced.
Getter
Check that returned declare is of type variable
Setter
Substitute Variable
-
This function checks if a variable with a given name is present in the given context.
If not, it simply returns the variable.
Otherwise, if the replace flag on the found variable is set, it returns the string and value representations of the context variable. | #lang racket/base
(provide dec-variable
get-variable
set-variable
set-variables
substitute-variable)
(require o4/context/base)
(struct dec-variable (string value f-replace) #:transparent)
(define (get-variable ctx name)
(let ([dec (get-declare ctx name)])
(if (dec-variable? dec)
dec
null)))
(define (set-variable ctx name str val [f-replace #f])
(set-declare ctx name (dec-variable str val f-replace)))
(define (set-variables ctx vars)
(for ([var (in-list vars)])
(apply set-variable ctx var)))
(define (substitute-variable ctx name)
(let ([var (get-variable ctx name)])
(if (or (null? var) (not (dec-variable-f-replace var)))
(values
(symbol->string name)
#f)
(values
(dec-variable-string var)
(dec-variable-value var)))))
|
fba8b716b0a1cb42452d14c542d5e2b6c2dc39bb246f8d29f045d916772a45c6 | anuyts/menkar | WHN.hs | module Menkar.Systems.Reldtt.WHN where
import Menkar.Basic
import Menkar.Analyzer
import Menkar.WHN
import Menkar.System.Fine
import Menkar.System.Scoper
import Menkar.System.WHN
import Menkar.Fine
import Menkar.Monad
import Menkar.Systems.Reldtt.Basic
import Menkar.Systems.Reldtt.Fine
import Menkar.Systems.Reldtt.Analyzer
import Menkar.Systems.Reldtt.Scoper
import Control.Monad.DoUntilFail
import Control.Exception.AssertFalse
import Data.Functor.Coerce
import Control.Monad.Trans.Class
import Control.Monad.Writer.Class
import Control.Monad.Trans.Writer.Strict hiding (listen, tell)
import Control.Monad.Trans.Maybe
import Control.Monad.State.Strict
import Control.Applicative
import Control.Lens
import Data.Void
import GHC.Generics
import Data.Functor.Compose
import Data.Maybe
| Precondition : Tails start at the same point and have the same neutral ( co)domain .
Precondition for correct result : The snouts are leq .
Output : bool@ if absolutely sure , @Left bool@ if tails are assumed not - blocked .
Precondition for correct result: The snouts are leq.
Output: @Right bool@ if absolutely sure, @Left bool@ if tails are assumed not-blocked. -}
relTail_ :: ModRel -> ModtySnout -> ModtySnout -> ModtyTail v -> ModtyTail v -> Either Bool Bool
relTail_ rel _ _ TailProblem _ = Right False
relTail_ rel _ _ _ TailProblem = Right False
relTail_ rel _ _ TailEmpty TailEmpty = Right True -- both empty
relTail_ rel _ _ TailEmpty _ = Right True -- both empty
relTail_ rel _ _ _ TailEmpty = Right True -- both empty
relTail_ rel _ _ (TailDisc dcod) (TailDisc dcod') = Right True -- both discrete
relTail_ rel _ _ (TailDisc dcod) (TailForget ddom') = Right True -- both empty
relTail_ rel _ _ (TailDisc dcod) (TailDiscForget ddom' dcod') = Right True -- both discrete
relTail_ rel _ _ (TailDisc dcod) (TailCont d') = Right True -- both empty
relTail_ rel _ _ (TailForget ddom) (TailDisc dcod') = Right True -- both empty
relTail_ rel _ _ (TailForget ddom) (TailForget ddom') = Right True -- both forget
relTail_ rel _ _ (TailForget ddom) (TailDiscForget ddom' dcod') = Right True -- both forget
relTail_ rel _ _ (TailForget ddom) (TailCont d') = Right True -- both empty
relTail_ rel _ _ (TailDiscForget ddom dcod) (TailDisc dcod') = Right True -- both discrete
relTail_ rel _ _ (TailDiscForget ddom dcod) (TailForget ddom') = Right True -- both forget
relTail_ rel _ _ (TailDiscForget ddom dcod) (TailDiscForget ddom' dcod') = Right True -- both forget
relTail_ rel _ _ (TailDiscForget ddom dcod) (TailCont d') = case rel of
since snouts are leq and cont is well - typed , we know that on the left , we do n't have Top .
ModLeq -> Right True -- non-Top-discreteness is less than continuity
ModEq -> Left False -- but not equal if tails are whn.
-- The only way that @ModLeq@ can be false, is when the left snout ends in Top, but then
if the snouts are leq , then so does the right one , so you ca n't have TailCont .
relTail_ rel _ _ (TailCont d) (TailDisc dcod') = Right True -- both are empty
relTail_ rel _ _ (TailCont d) (TailForget ddom') = Right True -- both are empty
relTail_ rel _ snoutR (TailCont d) (TailDiscForget ddom' dcod') = case rel of
ModEq -> Left False -- not equal if tails are whn
ModLeq -> case _modtySnout'degreesReversed snoutR of
[] -> Right False -- discreteness lists '='
discreteness is actually codiscreteness
_ -> Right False -- discreteness is less than continuity
relTail_ rel _ _ (TailCont d) (TailCont d') = Right True -- both continuity
| Precondition : Tails start at the same point and have the same neutral ( co)domain .
Precondition for correct result : The snouts are leq .
Return Nothing if presently unclear .
Precondition for correct result: The snouts are leq.
Return Nothing if presently unclear. -}
relTail :: forall whn v .
(MonadWHN Reldtt whn, DeBruijnLevel v) =>
ModRel ->
KnownModty v ->
KnownModty v ->
String ->
whn (Maybe Bool)
relTail rel (KnownModty snoutL tailL) (KnownModty snoutR tailR) reason = do
(whnTailL, metasL) <- runWriterT $ whnormalizeModtyTail tailL reason
(whnTailR, metasR) <- runWriterT $ whnormalizeModtyTail tailR reason
case relTail_ rel snoutL snoutR whnTailL whnTailR of
Right bool -> return $ Just bool
Left bool -> case (metasL, metasR) of
([], []) -> return $ Just bool
otherwise -> return $ Nothing
{-| Compare known modalities, assuming they have the same type.
Return a boolean if they compare,
or @Nothing@ in case of problems (not metavariable-related problems, but ACTUAL problems),
or @Just Nothing@ if presently unclear.
-}
relKnownModty :: forall whn v .
(MonadWHN Reldtt whn, DeBruijnLevel v) =>
ModRel ->
KnownModty v ->
KnownModty v ->
String ->
whn (Maybe (Maybe Bool))
relKnownModty rel kmu1@(KnownModty snout1 tail1) kmu2@(KnownModty snout2 tail2) reason = runMaybeT $ do
We 're now in the monad MaybeT whn _ whn ( Maybe _ )
-- If the forcing of domains and codomains causes problems, then we get `whn Nothing`, i.e. the do-block is aborted.
-- If relTail yields `Nothing`, then `lift` promotes this to `whn (Just Nothing)`
(kmu1, kmu2) <- MaybeT . return $
case compare (_modtySnout'dom $ _knownModty'snout kmu1) (_modtySnout'dom $ _knownModty'snout kmu2) of
LT -> (, kmu2) <$> forceDom snout1 tail1 (_modtySnout'dom snout2) (_modtyTail'dom tail2)
EQ -> Just (kmu1, kmu2)
GT -> (kmu1, ) <$> forceDom snout2 tail2 (_modtySnout'dom snout1) (_modtyTail'dom tail1)
(kmu1, kmu2) <- MaybeT . return $
case compare (_modtySnout'cod $ _knownModty'snout kmu1) (_modtySnout'cod $ _knownModty'snout kmu2) of
LT -> (, kmu2) <$> forceCod snout1 tail1 (_modtySnout'cod snout2) (_modtyTail'cod tail2)
EQ -> Just (kmu1, kmu2)
GT -> (kmu1, ) <$> forceCod snout2 tail2 (_modtySnout'cod snout1) (_modtyTail'cod tail1)
let opSnout = case rel of
ModEq -> (==)
ModLeq -> (<=)
let snoutsRelated = and $ getZipList $
(opSnout) <$> ZipList (_modtySnout'degreesReversed $ _knownModty'snout kmu1)
<*> ZipList (_modtySnout'degreesReversed $ _knownModty'snout kmu2)
tailsRelated <- lift $ relTail rel kmu1 kmu2 reason
return $ (snoutsRelated &&) <$> tailsRelated
----------------------------------
-- | composition
compModtySnout :: ModtySnout -> KnownModty v -> ModtySnout
compModtySnout (ModtySnout kmid kcod []) mu =
ModtySnout (_modtySnout'dom $ _knownModty'snout $ mu) kcod []
compModtySnout (ModtySnout kmid kcod krevdegs) mu =
ModtySnout (_modtySnout'dom $ _knownModty'snout $ mu) kcod $ flip knownGetDeg mu <$> krevdegs
-- | composition
compModtyTail :: ModtyTail v -> ModtyTail v -> ModtyTail v
compModtyTail (TailCont d) tail1 = tail1
compModtyTail tail2 (TailCont d) = tail2
compModtyTail TailEmpty TailEmpty = TailEmpty
compModtyTail TailEmpty (TailDisc _) = TailEmpty
( TailCodisc _ ) = TailEmpty
compModtyTail TailEmpty (TailForget ddom) = TailForget ddom
compModtyTail TailEmpty (TailDiscForget ddom _) = TailForget ddom
( TailCodiscForget ddom _ ) = TailForget ddom
compModtyTail (TailDisc dcod) TailEmpty = TailDisc dcod
compModtyTail (TailDisc dcod) (TailDisc _) = TailDisc dcod
( TailDisc dcod ) ( TailCodisc _ ) = TailDisc dcod
compModtyTail (TailDisc dcod) (TailForget ddom) = TailDiscForget ddom dcod
compModtyTail (TailDisc dcod) (TailDiscForget ddom _) = TailDiscForget ddom dcod
( TailDisc dcod ) ( TailCodiscForget ddom _ ) = dcod
( TailCodisc dcod ) TailEmpty = TailCodisc dcod
compModtyTail ( TailCodisc dcod ) ( TailDisc _ ) = TailCodisc dcod
compModtyTail ( TailCodisc dcod ) ( TailCodisc _ ) = TailCodisc dcod
compModtyTail ( TailCodisc dcod ) ( TailForget ddom ) = TailCodiscForget ( TailCodisc dcod ) ( TailDiscForget ddom _ ) = TailCodiscForget ( TailCodisc dcod ) ( TailCodiscForget ddom _ ) = TailCodiscForget ddom dcod
compModtyTail (TailCodisc dcod) (TailDisc _) = TailCodisc dcod
compModtyTail (TailCodisc dcod) (TailCodisc _) = TailCodisc dcod
compModtyTail (TailCodisc dcod) (TailForget ddom) = TailCodiscForget ddom dcod
compModtyTail (TailCodisc dcod) (TailDiscForget ddom _) = TailCodiscForget ddom dcod
compModtyTail (TailCodisc dcod) (TailCodiscForget ddom _) = TailCodiscForget ddom dcod-}
compModtyTail (TailForget _) TailEmpty = TailEmpty
compModtyTail (TailForget _) (TailDisc _) = TailEmpty
( TailForget _ ) ( TailCodisc _ ) = TailEmpty
compModtyTail (TailForget _) (TailForget ddom) = TailForget ddom
compModtyTail (TailForget _) (TailDiscForget ddom _) = TailForget ddom
( TailForget _ ) ( TailCodiscForget ddom _ ) = TailForget ddom
compModtyTail (TailDiscForget _ dcod) TailEmpty = TailDisc dcod
compModtyTail (TailDiscForget _ dcod) (TailDisc _) = TailDisc dcod
( TailDiscForget _ dcod ) ( TailCodisc _ ) = TailDisc dcod
compModtyTail (TailDiscForget _ dcod) (TailForget ddom) = TailDiscForget ddom dcod
compModtyTail (TailDiscForget _ dcod) (TailDiscForget ddom _) = TailDiscForget ddom dcod
( TailDiscForget _ dcod ) ( TailCodiscForget ddom _ ) = dcod
( TailCodiscForget _ dcod ) TailEmpty = TailCodisc dcod
( TailCodiscForget _ dcod ) ( TailDisc _ ) = TailCodisc dcod
( TailCodiscForget _ dcod ) ( TailCodisc _ ) = TailCodisc dcod
( TailCodiscForget _ dcod ) ( TailForget ddom ) = TailCodiscForget ( TailCodiscForget _ dcod ) ( TailDiscForget ddom _ ) = TailCodiscForget ( TailCodiscForget _ dcod ) ( TailCodiscForget ddom _ ) = TailCodiscForget ddom dcod
compModtyTail (TailCodiscForget _ dcod) (TailDisc _) = TailCodisc dcod
compModtyTail (TailCodiscForget _ dcod) (TailCodisc _) = TailCodisc dcod
compModtyTail (TailCodiscForget _ dcod) (TailForget ddom) = TailCodiscForget ddom dcod
compModtyTail (TailCodiscForget _ dcod) (TailDiscForget ddom _) = TailCodiscForget ddom dcod
compModtyTail (TailCodiscForget _ dcod) (TailCodiscForget ddom _) = TailCodiscForget ddom dcod-}
compModtyTail TailProblem _ = TailProblem
compModtyTail _ TailProblem = TailProblem
compKnownModty :: KnownModty v -> KnownModty v -> KnownModty v
compKnownModty mu2@(KnownModty snout2 tail2) mu1@(KnownModty snout1 tail1) =
let maybeStuff = case compare (_modtySnout'cod snout1) (_modtySnout'dom snout2) of
LT -> (, mu2) <$> forceCod snout1 tail1 (_modtySnout'dom snout2) (_modtyTail'dom tail2)
EQ -> Just (mu1, mu2)
GT -> (mu1, ) <$> forceDom snout2 tail2 (_modtySnout'cod snout1) (_modtyTail'cod tail1)
in case maybeStuff of
Nothing -> problemKnownModty
Just (mu1@(KnownModty snout1 tail1), mu2@(KnownModty snout2 tail2)) ->
let snoutComp = compModtySnout snout2 mu1
tailComp = compModtyTail tail2 tail1
in KnownModty snoutComp tailComp
compChainModty :: ChainModty v -> ChainModty v -> ChainModty v
compChainModty (ChainModtyLink kmu tnu chrho) chsigma =
ChainModtyLink kmu tnu $ compChainModty chrho chsigma
compChainModty (ChainModtyKnown kmu) (ChainModtyLink knu trho chsigma) =
ChainModtyLink (kmu `compKnownModty` knu) trho chsigma
compChainModty (ChainModtyKnown kmu) (ChainModtyKnown knu) =
ChainModtyKnown (kmu `compKnownModty` knu)
compChainModty chmu chnu =
ChainModtyTerm (_chainModty'dom chnu) (_chainModty'cod chmu) $ BareModty $ ModtyTermComp chmu chnu
whnormalizeComp : : forall whn v .
( MonadWHN Reldtt whn , MonadWriter [ Int ] whn , DeBruijnLevel v ) = >
Constraint Reldtt - >
Term
Term
Term
Type
String - >
whn ( Term v )
whnormalizeComp gamma mu2 dmid mu1 ty reason = do
whnTy < - whnormalizeType gamma ty reason
let giveUp = return $ BareModty $ ModtyTermComp mu2 dmid mu1
case unType whnTy of
Expr2 ( TermSys ( SysTypeModty ) ) - > do
whnMu1 < - whnormalize gamma mu1 ( Type $ Expr2 $ TermSys $ SysTypeModty ) reason
< - whnormalize gamma mu2 ( Type $ Expr2 $ TermSys $ SysTypeModty ) reason
case ( whnMu1 , ) of
( BareModty ( ModtyTermUnavailable ddom ' dmid ' ) , _ ) - >
return $ BareModty $ ModtyTermUnavailable ddom ' dcod -- USING THE TYPE !
( _ , BareModty ( ModtyTermUnavailable dmid ' dcod ' ) ) - >
return $ BareModty $ ModtyTermUnavailable -- USING THE TYPE !
( BareModty ( ModtyTerm ) , BareModty ( ModtyTerm ) ) - > do
let = case compare ( _ ) ( _ modtySnout'dom snout2 ) of
LT - > ( , ( snout2 , tail2 ) ) < $ > forceCod snout1 tail1 ( _ modtySnout'dom snout2 ) ( _ modtyTail'dom tail2 )
EQ - > Just ( ( snout1 , tail1 ) , ( snout2 , tail2 ) )
GT - > ( ( snout1 , tail1 ) , ) < $ > forceDom snout2 tail2 ( _ modtySnout'cod snout1 ) ( _ )
case of
Nothing - > Expr2 . TermProblem < $ > giveUp
Just ( ( snout1 , tail1 ) , ( snout2 , tail2 ) ) - > do
let snoutComp = compModtySnout snout2 snout1
let tailComp =
return $ BareModty $ ModtyTerm snoutComp tailComp
( _ , _ ) - > return $ BareModty $ ModtyTermComp whnMu1
otherwise - > giveUp
whnormalizeComp :: forall whn v .
(MonadWHN Reldtt whn, MonadWriter [Int] whn, DeBruijnLevel v) =>
Constraint Reldtt ->
Ctx Type Reldtt v ->
Term Reldtt v ->
Term Reldtt v ->
Term Reldtt v ->
Type Reldtt v ->
String ->
whn (Term Reldtt v)
whnormalizeComp gamma mu2 dmid mu1 ty reason = do
whnTy <- whnormalizeType gamma ty reason
let giveUp = return $ BareModty $ ModtyTermComp mu2 dmid mu1
case unType whnTy of
Expr2 (TermSys (SysTypeModty ddom dcod)) -> do
whnMu1 <- whnormalize gamma mu1 (Type $ Expr2 $ TermSys $ SysTypeModty ddom dmid) reason
whnMu2 <- whnormalize gamma mu2 (Type $ Expr2 $ TermSys $ SysTypeModty dmid dcod) reason
case (whnMu1, whnMu2) of
(BareModty (ModtyTermUnavailable ddom' dmid'), _) ->
return $ BareModty $ ModtyTermUnavailable ddom' dcod -- USING THE TYPE!
(_, BareModty (ModtyTermUnavailable dmid' dcod')) ->
return $ BareModty $ ModtyTermUnavailable ddom dcod' -- USING THE TYPE!
(BareModty (ModtyTerm snout2 tail2), BareModty (ModtyTerm snout1 tail1)) -> do
let maybeStuff = case compare (_modtySnout'cod snout1) (_modtySnout'dom snout2) of
LT -> (, (snout2, tail2)) <$> forceCod snout1 tail1 (_modtySnout'dom snout2) (_modtyTail'dom tail2)
EQ -> Just ((snout1, tail1), (snout2, tail2))
GT -> ((snout1, tail1), ) <$> forceDom snout2 tail2 (_modtySnout'cod snout1) (_modtyTail'cod tail1)
case maybeStuff of
Nothing -> Expr2 . TermProblem <$> giveUp
Just ((snout1, tail1), (snout2, tail2)) -> do
let snoutComp = compModtySnout snout2 snout1
let tailComp = compModtyTail tail2 tail1
return $ BareModty $ ModtyTerm snoutComp tailComp
(_, _) -> return $ BareModty $ ModtyTermComp whnMu2 dmid whnMu1
otherwise -> giveUp
-}
---------------------
-- | Beware that the omega - case is not really handled !
knownGetDegSnout : : KnownDeg - > ModtySnout - > KnownDeg
knownGetDegSnout KnownDegEq mu = KnownDegEq
knownGetDegSnout ( KnownDeg i ) ( ModtySnout kdom kcod ) = krevdegs ! ! ( length krevdegs - i - 1 )
knownGetDegSnout KnownDegOmega mu =
knownGetDegSnout KnownDegTop mu = KnownDegTop
knownGetDegSnout KnownDegProblem mu = KnownDegProblem
-- | Beware that the omega-case is not really handled!
knownGetDegSnout :: KnownDeg -> ModtySnout -> KnownDeg
knownGetDegSnout KnownDegEq mu = KnownDegEq
knownGetDegSnout (KnownDeg i) (ModtySnout kdom kcod krevdegs) = krevdegs !! (length krevdegs - i - 1)
knownGetDegSnout KnownDegOmega mu = KnownDegOmega
knownGetDegSnout KnownDegTop mu = KnownDegTop
knownGetDegSnout KnownDegProblem mu = KnownDegProblem
-}
knownGetDeg :: KnownDeg -> KnownModty v -> KnownDeg
knownGetDeg KnownDegEq _ = KnownDegEq
knownGetDeg KnownDegTop _ = KnownDegTop
knownGetDeg KnownDegProblem _ = KnownDegProblem
knownGetDeg (KnownDeg i) (KnownModty snout@(ModtySnout idom icod krevdegs) tail) =
if i < icod
then krevdegs !! (icod - i - 1)
else case tail of
TailEmpty -> KnownDegProblem
TailDisc dcod -> snoutMax
TailForget ddom -> KnownDegProblem
TailDiscForget ddom dcod -> snoutMax
TailCont d -> KnownDeg (i - icod + idom)
TailProblem -> KnownDegProblem
where snoutMax = _snout'max snout
knownGetDeg KnownDegOmega mu@(KnownModty snout@(ModtySnout idom icod krevdegs) tail) = case tail of
TailEmpty -> KnownDegProblem
TailDisc dcod -> snoutMax
TailForget ddom -> KnownDegProblem
TailDiscForget ddom dcod -> snoutMax
TailCont d -> KnownDegOmega
TailProblem -> KnownDegProblem
where snoutMax = _snout'max snout
---------------------
{-| Fails (returns Nothing) for modalities with a discrete tail of neutral length.
Precondition: argument has been whnormalized to the extent possible.
-}
knownApproxLeftAdjointProj :: KnownModty v -> Maybe (KnownModty v)
knownApproxLeftAdjointProj kmu@(KnownModty snout@(ModtySnout idom icod krevdegs) tail) =
Fields :
_ 1 : number of degrees popped from the input modality , minus one .
_ 2 : remaining tail of the input modality
_ 3 : already constructed part of output modality , REVERSED
_ 4 : length of _ 3
_1: number of degrees popped from the input modality, minus one.
_2: remaining tail of the input modality
_3: already constructed part of output modality, REVERSED
_4: length of _3
-}
let (_, _, krevdegs', _) = flip execState (-1, reverse krevdegs, [], 0) $
doUntilFail $ do
remainingTail <- use _2
threshold <- use _4
if threshold == idom
then return False
else True <$ case remainingTail of
nextDeg : remainingTail' -> if nextDeg > KnownDeg threshold
then do -- Write a degree, increase the length
nextDeg' <- use _1
_3 %= (nextDeg' :)
_4 += 1
else do -- Pop a degree, increase the pop-counter
_2 .= remainingTail'
_1 += 1
[] -> do -- Write a degree, increase the length
nextDeg' <- use _1
_3 %= (nextDeg' :)
_4 += 1
snout' = ModtySnout icod idom (int2deg <$> krevdegs')
snoutCohpi' = ModtySnout icod idom $ krevdegs' <&> \ i -> if i == (idom - 1) then KnownDegOmega else int2deg i
in case tail of
TailEmpty -> Just $ KnownModty snout' $ TailEmpty
TailDisc dcod -> case dcod of
ReldttMode BareModeOmega -> Just $ KnownModty snoutCohpi' $ TailForget dcod
_ -> case krevdegs of
We can read the tail as TailCodisc
KnownDegTop : _ -> Just $ KnownModty snout' $ TailForget dcod
_ -> Nothing
TailForget ddom -> Just $ KnownModty snout' $ TailDisc ddom
TailDiscForget ddom dcod -> case dcod of
ReldttMode BareModeOmega -> Just $ KnownModty snoutCohpi' $ TailDiscForget dcod ddom
_ -> case krevdegs of
We can read the tail as TailCodiscForget
KnownDegTop : _ -> Just $ KnownModty snout' $ TailDiscForget dcod ddom
_ -> Nothing
TailCont d -> Just $ KnownModty snout' $ TailCont d
TailProblem -> Just $ KnownModty snout' $ TailProblem
where int2deg :: Int -> KnownDeg
int2deg (-1) = KnownDegEq
int2deg i = KnownDeg i
---------------------
whnormalizeModtyTail :: forall whn v .
(MonadWHN Reldtt whn, MonadWriter [Int] whn, DeBruijnLevel v) =>
ModtyTail v ->
String ->
whn (ModtyTail v)
whnormalizeModtyTail tail reason =
case tail of
TailEmpty -> return TailEmpty
TailDisc dcod -> do
dcod <- whnormalizeMode dcod reason
case dcod of
ReldttMode (BareMode ModeTermZero) -> return TailEmpty
otherwise -> return $ TailDisc dcod
TailForget ddom -> do
ddom <- whnormalizeMode ddom reason
case ddom of
ReldttMode (BareMode ModeTermZero) -> return TailEmpty
otherwise -> return $ TailForget ddom
TailDiscForget ddom dcod -> do
ddom <- whnormalizeMode ddom reason
dcod <- whnormalizeMode dcod reason
case (ddom, dcod) of
(ReldttMode (BareMode ModeTermZero),
ReldttMode (BareMode ModeTermZero)) -> return TailEmpty
(ReldttMode (BareMode ModeTermZero), _) -> return $ TailDisc dcod
(_, ReldttMode (BareMode ModeTermZero)) -> return $ TailForget ddom
(_, _) -> return $ TailDiscForget ddom dcod
TailCont d -> do
d <- whnormalizeMode d reason
case d of
ReldttMode (BareMode ModeTermZero) -> return TailEmpty
otherwise -> return $ TailCont d
TailProblem -> return TailProblem
-- Why bother?
whnormalizeKnownModty :: forall whn v .
(MonadWHN Reldtt whn, MonadWriter [Int] whn, DeBruijnLevel v) =>
KnownModty v ->
String ->
whn (KnownModty v)
whnormalizeKnownModty mu@(KnownModty snout tail) reason = do
tail <- whnormalizeModtyTail tail reason
case tail of
TailEmpty -> return $ KnownModty snout TailEmpty
TailDisc dcod -> case dcod of
ReldttMode (BareMode ModeTermZero) -> return $ KnownModty snout TailEmpty
ReldttMode (BareMode (ModeTermSuc d)) ->
whnormalizeKnownModty (KnownModty (extDisc snout) $ TailDisc $ ReldttMode d) reason
_ -> return $ KnownModty snout tail
TailForget ddom -> case ddom of
ReldttMode (BareMode ModeTermZero) -> return $ KnownModty snout TailEmpty
ReldttMode (BareMode (ModeTermSuc d)) ->
whnormalizeKnownModty (KnownModty (extForget snout) $ TailForget $ ReldttMode d) reason
_ -> return $ KnownModty snout tail
TailDiscForget ddom dcod -> case dcod of
ReldttMode (BareMode ModeTermZero) ->
whnormalizeKnownModty (KnownModty snout $ TailForget ddom) reason
ReldttMode (BareMode (ModeTermSuc d)) ->
whnormalizeKnownModty (KnownModty (extDisc snout) $ TailDiscForget ddom (ReldttMode d)) reason
_ -> case ddom of
ReldttMode (BareMode ModeTermZero) ->
whnormalizeKnownModty (KnownModty snout $ TailDisc dcod) reason
ReldttMode (BareMode (ModeTermSuc d)) ->
whnormalizeKnownModty (KnownModty (extForget snout) $ TailDiscForget (ReldttMode d) dcod) reason
_ -> return $ KnownModty snout tail
TailCont d -> case d of
ReldttMode (BareMode ModeTermZero) -> return $ KnownModty snout TailEmpty
ReldttMode (BareMode (ModeTermSuc dpred)) ->
whnormalizeKnownModty (KnownModty (extCont snout) $ TailCont $ ReldttMode dpred) reason
_ -> return $ KnownModty snout tail
TailProblem -> return $ KnownModty snout TailProblem
whnormalizeChainModty :: forall whn v .
(MonadWHN Reldtt whn, MonadWriter [Int] whn, DeBruijnLevel v) =>
ChainModty v ->
String ->
whn (ChainModty v)
whnormalizeChainModty mu@(ChainModtyKnown knownMu) reason = return mu
's are aligned before relating them .
--ChainModtyKnown <$> whnormalizeKnownModty gamma knownMu reason
whnormalizeChainModty mu@(ChainModtyLink knownMu termNu chainRho) reason = do
termNu <- whnormalize termNu
(BareSysType $ SysTypeModty (_chainModty'cod chainRho) (_knownModty'dom knownMu)) reason
case termNu of
BareChainModty chainNu -> do
chainNu <- whnormalizeChainModty chainNu reason
case chainNu of
ChainModtyKnown knownNu -> do
chainRho <- whnormalizeChainModty chainRho reason
let composite = case chainRho of
ChainModtyKnown knownRho ->
ChainModtyKnown (knownMu `compKnownModty` knownNu `compKnownModty` knownRho)
ChainModtyLink knownSigma termTau chainUpsilon ->
mu . nu . . tau . upsilon
ChainModtyLink (knownMu `compKnownModty` knownNu `compKnownModty` knownSigma) termTau chainUpsilon
ChainModtyTerm ddom dcod trho ->
ChainModtyLink (knownMu `compKnownModty` knownNu) (BareChainModty chainRho) $
ChainModtyKnown $ idKnownModty ddom
ChainModtyMeta _ _ _ _ -> unreachable
ChainModtyAlreadyChecked _ _ _ -> unreachable
whnormalizeChainModty composite reason
ChainModtyLink knownNuA termNuB chainNuC -> do
mu . . nuB . nuC . rho
let composite = ChainModtyLink (knownMu `compKnownModty` knownNuA) termNuB $
compMod chainNuC chainRho
whnormalizeChainModty composite reason
ChainModtyTerm ddom dcod tnu -> return $ ChainModtyLink knownMu termNu chainRho
ChainModtyMeta _ _ _ _ -> unreachable
ChainModtyAlreadyChecked _ _ _ -> unreachable
otherwise -> return $ ChainModtyLink knownMu termNu chainRho
whnormalizeChainModty chmu@(ChainModtyTerm dom cod tmu) reason = do
(tmu, metasTMu) <- listen $ whnormalize tmu (BareSysType $ SysTypeModty dom cod) reason
case (tmu, metasTMu) of
(BareChainModty chmu, []) -> whnormalizeChainModty chmu reason
(_, []) -> whnormalizeChainModty
(ChainModtyLink (idKnownModty cod) tmu $ ChainModtyKnown $ idKnownModty dom)
reason
otherwise -> return $ ChainModtyTerm dom cod tmu
whnormalizeChainModty chmu@(ChainModtyMeta dom cod meta depcies) reason = do
maybeSolution <- awaitMeta reason meta depcies
case maybeSolution of
Nothing -> chmu <$ tell [meta]
Just solution -> whnormalizeChainModty solution reason
whnormalizeChainModty chmu@(ChainModtyAlreadyChecked dom cod chmuChecked) reason =
whnormalizeChainModty chmuChecked reason
whnormalizeChainModty : : forall whn v .
( MonadWHN Reldtt whn , MonadWriter [ Int ] whn , DeBruijnLevel v ) = >
ChainModty v - >
String - >
whn ( ChainModty v )
whnormalizeChainModty gamma reason = do
let cod = _ chainModty'cod chmu
whnCod < - whnormalizeMode gamma cod reason
case whnCod of
ReldttMode ( BareMode ModeTermZero ) - > return $ ChainModtyKnown $
forgetKnownModty $ _ chainModty'dom chmu
otherwise - > whnormalizeChainModtyNonzeroCod gamma chmu reason
whnormalizeChainModty :: forall whn v .
(MonadWHN Reldtt whn, MonadWriter [Int] whn, DeBruijnLevel v) =>
Ctx Type Reldtt v ->
ChainModty v ->
String ->
whn (ChainModty v)
whnormalizeChainModty gamma chmu reason = do
let cod = _chainModty'cod chmu
whnCod <- whnormalizeMode gamma cod reason
case whnCod of
ReldttMode (BareMode ModeTermZero) -> return $ ChainModtyKnown $
forgetKnownModty $ _chainModty'dom chmu
otherwise -> whnormalizeChainModtyNonzeroCod gamma chmu reason
-}
whnormalizeModeTerm :: forall whn v .
(MonadWHN Reldtt whn, MonadWriter [Int] whn, DeBruijnLevel v) =>
ModeTerm v ->
String ->
whn (ModeTerm v)
whnormalizeModeTerm d reason = case d of
ModeTermZero -> return $ ModeTermZero
--ModeTermFinite t -> BareMode . ModeTermFinite <$> whnormalize gamma t (hs2type NatType) reason
ModeTermSuc d -> do
d <- whnormalize d (BareSysType $ SysTypeMode) reason
case d of
BareMode ModeTermOmega -> return $ ModeTermOmega
_ -> return $ ModeTermSuc d
ModeTermOmega -> return $ ModeTermOmega
whnormalizeModtyTerm :: forall whn v .
(MonadWHN Reldtt whn, MonadWriter [Int] whn, DeBruijnLevel v) =>
ModtyTerm v ->
String ->
whn (ModtyTerm v)
whnormalizeModtyTerm mu reason = case mu of
-- ModtyTermChain is a constructor, don't normalize under it!
ModtyTermChain chmu -> return mu
only for prettyprinting
ModtyTermApproxLeftAdjointProj chrho -> do
chrho <- whnormalizeChainModty chrho reason
case chrho of
ChainModtyKnown krho -> case knownApproxLeftAdjointProj krho of
Just kmu -> return $ ModtyTermChain $ ChainModtyKnown $ kmu
Nothing -> return mu
otherwise -> return mu
ModtyTermComp chmu2 chmu1 -> do
(chmu1, metas1) <- listen $ whnormalizeChainModty chmu1 reason
(chmu2, metas2) <- listen $ whnormalizeChainModty chmu2 reason
case (metas1, metas2) of
([], []) -> return $ ModtyTermChain $ chmu2 `compChainModty` chmu1
(_, _) -> return $ ModtyTermComp chmu2 chmu1
ModtyTermUnavailable ddom dcod -> return mu
whnormalizeReldttDegree :: forall whn v .
(MonadWHN Reldtt whn, MonadWriter [Int] whn, DeBruijnLevel v) =>
ReldttDegree v ->
String ->
whn (ReldttDegree v)
whnormalizeReldttDegree i reason = do
case i of
DegKnown _ _ -> return i
DegGet j chmu -> do
j <- whnormalizeReldttDegree j reason
case j of
DegKnown d KnownDegEq -> return $ DegKnown (_chainModty'dom chmu) KnownDegEq
DegKnown d KnownDegTop -> return $ DegKnown (_chainModty'dom chmu) KnownDegTop
DegKnown d j' -> do
chmu <- whnormalizeChainModty chmu reason
case chmu of
ChainModtyKnown kmu -> return $ DegKnown (_chainModty'dom chmu) $ knownGetDeg j' kmu
_ -> return $ DegGet j chmu
_ -> return $ DegGet j chmu
instance SysWHN Reldtt where
whnormalizeSysTerm sysT ty reason = do
let returnSysT = return $ Expr2 $ TermSys $ sysT
let returnProblem = return $ Expr2 $ TermProblem $ Expr2 $ TermSys $ sysT
case sysT of
SysTermMode d -> BareMode <$> whnormalizeModeTerm d reason
SysTermModty mu -> BareModty <$> whnormalizeModtyTerm mu reason
-- This is a constructor, don't normalize under it!
SysTermChainModtyInDisguise chmu - > return $ Expr2 $ TermSys $ sysT
SysTermDeg i - > case i of
DegKnown _ - > return $ BareDeg i
j mu > do
j < - whnormalize gamma j ( BareSysType $ SysTypeDeg dcod ) reason
case j of
BareKnownDeg KnownDegEq - > return $ BareKnownDeg KnownDegEq
BareKnownDeg KnownDegTop - > return $ BareKnownDeg KnownDegTop
BareKnownDeg j ' - > do
mu < - whnormalize gamma mu ( BareSysType $ SysTypeModty ) reason
case mu of
BareKnownModty mu ' - > return $ BareKnownDeg $ knownGetDeg j ' mu '
_ - > return $ BareDeg $
_ - > return $ BareDeg $
DegKnown _ -> return $ BareDeg i
DegGet j mu ddom dcod -> do
j <- whnormalize gamma j (BareSysType $ SysTypeDeg dcod) reason
case j of
BareKnownDeg KnownDegEq -> return $ BareKnownDeg KnownDegEq
BareKnownDeg KnownDegTop -> return $ BareKnownDeg KnownDegTop
BareKnownDeg j' -> do
mu <- whnormalize gamma mu (BareSysType $ SysTypeModty ddom dcod) reason
case mu of
BareKnownModty mu' -> return $ BareKnownDeg $ knownGetDeg j' mu'
_ -> return $ BareDeg $ DegGet j mu ddom dcod
_ -> return $ BareDeg $ DegGet j mu ddom dcod-}
--SysTypeMode -> returnSysT
--SysTypeDeg d -> returnSysT
SysTypeModty > returnSysT
--_ -> _whnormalizeSys
whnormalizeMode gamma ( ReldttMode t ) reason = ReldttMode ! < $ > whnormalize gamma t ( BareSysType SysTypeMode ) reason
whnormalizeModality dom cod reason = whnormalizeChainModty gamma chmu reason
whnormalizeDegree gamma i d reason = do
case i of
DegKnown _ _ - > return i
j mu > do
j < - whnormalizeDegree gamma j dcod reason
case j of
DegKnown d KnownDegEq - > return $ DegKnown ddom KnownDegEq
DegKnown d KnownDegTop - > return $ DegKnown ddom KnownDegTop
DegKnown d j ' - > do
mu < - whnormalize gamma mu ( BareSysType $ SysTypeModty ) reason
case mu of
BareKnownModty mu ' - > return $ DegKnown ddom mu '
_ - > return $ DegGet j mu ddom dcod
_ - > return $ DegGet j mu ddom dcod
whnormalizeMode gamma (ReldttMode t) reason = ReldttMode !<$> whnormalize gamma t (BareSysType SysTypeMode) reason
whnormalizeModality gamma chmu dom cod reason = whnormalizeChainModty gamma chmu reason
whnormalizeDegree gamma i d reason = do
case i of
DegKnown _ _ -> return i
DegGet j mu ddom dcod -> do
j <- whnormalizeDegree gamma j dcod reason
case j of
DegKnown d KnownDegEq -> return $ DegKnown ddom KnownDegEq
DegKnown d KnownDegTop -> return $ DegKnown ddom KnownDegTop
DegKnown d j' -> do
mu <- whnormalize gamma mu (BareSysType $ SysTypeModty ddom dcod) reason
case mu of
BareKnownModty mu' -> return $ DegKnown ddom $ knownGetDeg j' mu'
_ -> return $ DegGet j mu ddom dcod
_ -> return $ DegGet j mu ddom dcod
-}
whnormalizeMultimodeOrSysAST token t extraT classifT reason = case token of
Left AnTokenMode -> ReldttMode !<$> whnormalize (getReldttMode t) (BareSysType SysTypeMode) reason
Left AnTokenModality -> whnormalizeChainModty t reason
Left AnTokenDegree -> whnormalizeReldttDegree t reason
Right AnTokenModeTerm -> whnormalizeModeTerm t reason
Right AnTokenModtyTerm -> whnormalizeModtyTerm t reason
Right AnTokenKnownModty -> whnormalizeKnownModty t reason
Right AnTokenModtySnout -> return t
Right AnTokenModtyTail -> whnormalizeModtyTail t reason
leqMod mu1 mu2 ddom dcod reason = do
-- You need to normalize: a tail might become empty!
(mu1, metasMu1) <- runWriterT $ whnormalizeChainModty mu1 reason
(mu2, metasMu2) <- runWriterT $ whnormalizeChainModty mu2 reason
case (metasMu1, metasMu2) of
-- Both are normal
([], []) -> case (mu1, mu2) of
(ChainModtyKnown kmu1, ChainModtyKnown kmu2) -> do
related <- relKnownModty ModLeq kmu1 kmu2 reason
case related of
-- Ill-typed.
Nothing -> return $ Just False
-- True, false or not yet clear
Just maybeBool -> return maybeBool
-- There are neutrals involved: don't bother. (Checking syntactic equality will yield weird behaviour.)
(_, _) -> return $ Just False
-- Either is not normal: come back later. (Checking syntactic equality will yield weird behaviour.)
(_ , _ ) -> return $ Nothing
leqDeg deg1 deg2 d reason = do
(deg1, metasDeg1) <- runWriterT $ whnormalizeDegree deg1 d reason
(deg2, metasDeg2) <- runWriterT $ whnormalizeDegree deg2 d reason
case (metasDeg1, deg1, metasDeg2, deg2) of
(_, DegKnown _ i1, _, DegKnown _ i2) -> return $ Just $ i1 <= i2
([], _, [], _) -> return $ Just False
(_ , _, _ , _) -> return Nothing
| null | https://raw.githubusercontent.com/anuyts/menkar/1f00e9febd1e9ed70c138ae8232b1c72a17d31da/menkar/src/Menkar/Systems/Reldtt/WHN.hs | haskell | both empty
both empty
both empty
both discrete
both empty
both discrete
both empty
both empty
both forget
both forget
both empty
both discrete
both forget
both forget
non-Top-discreteness is less than continuity
but not equal if tails are whn.
The only way that @ModLeq@ can be false, is when the left snout ends in Top, but then
both are empty
both are empty
not equal if tails are whn
discreteness lists '='
discreteness is less than continuity
both continuity
| Compare known modalities, assuming they have the same type.
Return a boolean if they compare,
or @Nothing@ in case of problems (not metavariable-related problems, but ACTUAL problems),
or @Just Nothing@ if presently unclear.
If the forcing of domains and codomains causes problems, then we get `whn Nothing`, i.e. the do-block is aborted.
If relTail yields `Nothing`, then `lift` promotes this to `whn (Just Nothing)`
--------------------------------
| composition
| composition
USING THE TYPE !
USING THE TYPE !
USING THE TYPE!
USING THE TYPE!
-------------------
| Beware that the omega - case is not really handled !
| Beware that the omega-case is not really handled!
-------------------
| Fails (returns Nothing) for modalities with a discrete tail of neutral length.
Precondition: argument has been whnormalized to the extent possible.
Write a degree, increase the length
Pop a degree, increase the pop-counter
Write a degree, increase the length
-------------------
Why bother?
ChainModtyKnown <$> whnormalizeKnownModty gamma knownMu reason
ModeTermFinite t -> BareMode . ModeTermFinite <$> whnormalize gamma t (hs2type NatType) reason
ModtyTermChain is a constructor, don't normalize under it!
This is a constructor, don't normalize under it!
SysTypeMode -> returnSysT
SysTypeDeg d -> returnSysT
_ -> _whnormalizeSys
You need to normalize: a tail might become empty!
Both are normal
Ill-typed.
True, false or not yet clear
There are neutrals involved: don't bother. (Checking syntactic equality will yield weird behaviour.)
Either is not normal: come back later. (Checking syntactic equality will yield weird behaviour.) | module Menkar.Systems.Reldtt.WHN where
import Menkar.Basic
import Menkar.Analyzer
import Menkar.WHN
import Menkar.System.Fine
import Menkar.System.Scoper
import Menkar.System.WHN
import Menkar.Fine
import Menkar.Monad
import Menkar.Systems.Reldtt.Basic
import Menkar.Systems.Reldtt.Fine
import Menkar.Systems.Reldtt.Analyzer
import Menkar.Systems.Reldtt.Scoper
import Control.Monad.DoUntilFail
import Control.Exception.AssertFalse
import Data.Functor.Coerce
import Control.Monad.Trans.Class
import Control.Monad.Writer.Class
import Control.Monad.Trans.Writer.Strict hiding (listen, tell)
import Control.Monad.Trans.Maybe
import Control.Monad.State.Strict
import Control.Applicative
import Control.Lens
import Data.Void
import GHC.Generics
import Data.Functor.Compose
import Data.Maybe
| Precondition : Tails start at the same point and have the same neutral ( co)domain .
Precondition for correct result : The snouts are leq .
Output : bool@ if absolutely sure , @Left bool@ if tails are assumed not - blocked .
Precondition for correct result: The snouts are leq.
Output: @Right bool@ if absolutely sure, @Left bool@ if tails are assumed not-blocked. -}
relTail_ :: ModRel -> ModtySnout -> ModtySnout -> ModtyTail v -> ModtyTail v -> Either Bool Bool
relTail_ rel _ _ TailProblem _ = Right False
relTail_ rel _ _ _ TailProblem = Right False
relTail_ rel _ _ (TailDiscForget ddom dcod) (TailCont d') = case rel of
since snouts are leq and cont is well - typed , we know that on the left , we do n't have Top .
if the snouts are leq , then so does the right one , so you ca n't have TailCont .
relTail_ rel _ snoutR (TailCont d) (TailDiscForget ddom' dcod') = case rel of
ModLeq -> case _modtySnout'degreesReversed snoutR of
discreteness is actually codiscreteness
| Precondition : Tails start at the same point and have the same neutral ( co)domain .
Precondition for correct result : The snouts are leq .
Return Nothing if presently unclear .
Precondition for correct result: The snouts are leq.
Return Nothing if presently unclear. -}
relTail :: forall whn v .
(MonadWHN Reldtt whn, DeBruijnLevel v) =>
ModRel ->
KnownModty v ->
KnownModty v ->
String ->
whn (Maybe Bool)
relTail rel (KnownModty snoutL tailL) (KnownModty snoutR tailR) reason = do
(whnTailL, metasL) <- runWriterT $ whnormalizeModtyTail tailL reason
(whnTailR, metasR) <- runWriterT $ whnormalizeModtyTail tailR reason
case relTail_ rel snoutL snoutR whnTailL whnTailR of
Right bool -> return $ Just bool
Left bool -> case (metasL, metasR) of
([], []) -> return $ Just bool
otherwise -> return $ Nothing
relKnownModty :: forall whn v .
(MonadWHN Reldtt whn, DeBruijnLevel v) =>
ModRel ->
KnownModty v ->
KnownModty v ->
String ->
whn (Maybe (Maybe Bool))
relKnownModty rel kmu1@(KnownModty snout1 tail1) kmu2@(KnownModty snout2 tail2) reason = runMaybeT $ do
We 're now in the monad MaybeT whn _ whn ( Maybe _ )
(kmu1, kmu2) <- MaybeT . return $
case compare (_modtySnout'dom $ _knownModty'snout kmu1) (_modtySnout'dom $ _knownModty'snout kmu2) of
LT -> (, kmu2) <$> forceDom snout1 tail1 (_modtySnout'dom snout2) (_modtyTail'dom tail2)
EQ -> Just (kmu1, kmu2)
GT -> (kmu1, ) <$> forceDom snout2 tail2 (_modtySnout'dom snout1) (_modtyTail'dom tail1)
(kmu1, kmu2) <- MaybeT . return $
case compare (_modtySnout'cod $ _knownModty'snout kmu1) (_modtySnout'cod $ _knownModty'snout kmu2) of
LT -> (, kmu2) <$> forceCod snout1 tail1 (_modtySnout'cod snout2) (_modtyTail'cod tail2)
EQ -> Just (kmu1, kmu2)
GT -> (kmu1, ) <$> forceCod snout2 tail2 (_modtySnout'cod snout1) (_modtyTail'cod tail1)
let opSnout = case rel of
ModEq -> (==)
ModLeq -> (<=)
let snoutsRelated = and $ getZipList $
(opSnout) <$> ZipList (_modtySnout'degreesReversed $ _knownModty'snout kmu1)
<*> ZipList (_modtySnout'degreesReversed $ _knownModty'snout kmu2)
tailsRelated <- lift $ relTail rel kmu1 kmu2 reason
return $ (snoutsRelated &&) <$> tailsRelated
compModtySnout :: ModtySnout -> KnownModty v -> ModtySnout
compModtySnout (ModtySnout kmid kcod []) mu =
ModtySnout (_modtySnout'dom $ _knownModty'snout $ mu) kcod []
compModtySnout (ModtySnout kmid kcod krevdegs) mu =
ModtySnout (_modtySnout'dom $ _knownModty'snout $ mu) kcod $ flip knownGetDeg mu <$> krevdegs
compModtyTail :: ModtyTail v -> ModtyTail v -> ModtyTail v
compModtyTail (TailCont d) tail1 = tail1
compModtyTail tail2 (TailCont d) = tail2
compModtyTail TailEmpty TailEmpty = TailEmpty
compModtyTail TailEmpty (TailDisc _) = TailEmpty
( TailCodisc _ ) = TailEmpty
compModtyTail TailEmpty (TailForget ddom) = TailForget ddom
compModtyTail TailEmpty (TailDiscForget ddom _) = TailForget ddom
( TailCodiscForget ddom _ ) = TailForget ddom
compModtyTail (TailDisc dcod) TailEmpty = TailDisc dcod
compModtyTail (TailDisc dcod) (TailDisc _) = TailDisc dcod
( TailDisc dcod ) ( TailCodisc _ ) = TailDisc dcod
compModtyTail (TailDisc dcod) (TailForget ddom) = TailDiscForget ddom dcod
compModtyTail (TailDisc dcod) (TailDiscForget ddom _) = TailDiscForget ddom dcod
( TailDisc dcod ) ( TailCodiscForget ddom _ ) = dcod
( TailCodisc dcod ) TailEmpty = TailCodisc dcod
compModtyTail ( TailCodisc dcod ) ( TailDisc _ ) = TailCodisc dcod
compModtyTail ( TailCodisc dcod ) ( TailCodisc _ ) = TailCodisc dcod
compModtyTail ( TailCodisc dcod ) ( TailForget ddom ) = TailCodiscForget ( TailCodisc dcod ) ( TailDiscForget ddom _ ) = TailCodiscForget ( TailCodisc dcod ) ( TailCodiscForget ddom _ ) = TailCodiscForget ddom dcod
compModtyTail (TailCodisc dcod) (TailDisc _) = TailCodisc dcod
compModtyTail (TailCodisc dcod) (TailCodisc _) = TailCodisc dcod
compModtyTail (TailCodisc dcod) (TailForget ddom) = TailCodiscForget ddom dcod
compModtyTail (TailCodisc dcod) (TailDiscForget ddom _) = TailCodiscForget ddom dcod
compModtyTail (TailCodisc dcod) (TailCodiscForget ddom _) = TailCodiscForget ddom dcod-}
compModtyTail (TailForget _) TailEmpty = TailEmpty
compModtyTail (TailForget _) (TailDisc _) = TailEmpty
( TailForget _ ) ( TailCodisc _ ) = TailEmpty
compModtyTail (TailForget _) (TailForget ddom) = TailForget ddom
compModtyTail (TailForget _) (TailDiscForget ddom _) = TailForget ddom
( TailForget _ ) ( TailCodiscForget ddom _ ) = TailForget ddom
compModtyTail (TailDiscForget _ dcod) TailEmpty = TailDisc dcod
compModtyTail (TailDiscForget _ dcod) (TailDisc _) = TailDisc dcod
( TailDiscForget _ dcod ) ( TailCodisc _ ) = TailDisc dcod
compModtyTail (TailDiscForget _ dcod) (TailForget ddom) = TailDiscForget ddom dcod
compModtyTail (TailDiscForget _ dcod) (TailDiscForget ddom _) = TailDiscForget ddom dcod
( TailDiscForget _ dcod ) ( TailCodiscForget ddom _ ) = dcod
( TailCodiscForget _ dcod ) TailEmpty = TailCodisc dcod
( TailCodiscForget _ dcod ) ( TailDisc _ ) = TailCodisc dcod
( TailCodiscForget _ dcod ) ( TailCodisc _ ) = TailCodisc dcod
( TailCodiscForget _ dcod ) ( TailForget ddom ) = TailCodiscForget ( TailCodiscForget _ dcod ) ( TailDiscForget ddom _ ) = TailCodiscForget ( TailCodiscForget _ dcod ) ( TailCodiscForget ddom _ ) = TailCodiscForget ddom dcod
compModtyTail (TailCodiscForget _ dcod) (TailDisc _) = TailCodisc dcod
compModtyTail (TailCodiscForget _ dcod) (TailCodisc _) = TailCodisc dcod
compModtyTail (TailCodiscForget _ dcod) (TailForget ddom) = TailCodiscForget ddom dcod
compModtyTail (TailCodiscForget _ dcod) (TailDiscForget ddom _) = TailCodiscForget ddom dcod
compModtyTail (TailCodiscForget _ dcod) (TailCodiscForget ddom _) = TailCodiscForget ddom dcod-}
compModtyTail TailProblem _ = TailProblem
compModtyTail _ TailProblem = TailProblem
compKnownModty :: KnownModty v -> KnownModty v -> KnownModty v
compKnownModty mu2@(KnownModty snout2 tail2) mu1@(KnownModty snout1 tail1) =
let maybeStuff = case compare (_modtySnout'cod snout1) (_modtySnout'dom snout2) of
LT -> (, mu2) <$> forceCod snout1 tail1 (_modtySnout'dom snout2) (_modtyTail'dom tail2)
EQ -> Just (mu1, mu2)
GT -> (mu1, ) <$> forceDom snout2 tail2 (_modtySnout'cod snout1) (_modtyTail'cod tail1)
in case maybeStuff of
Nothing -> problemKnownModty
Just (mu1@(KnownModty snout1 tail1), mu2@(KnownModty snout2 tail2)) ->
let snoutComp = compModtySnout snout2 mu1
tailComp = compModtyTail tail2 tail1
in KnownModty snoutComp tailComp
compChainModty :: ChainModty v -> ChainModty v -> ChainModty v
compChainModty (ChainModtyLink kmu tnu chrho) chsigma =
ChainModtyLink kmu tnu $ compChainModty chrho chsigma
compChainModty (ChainModtyKnown kmu) (ChainModtyLink knu trho chsigma) =
ChainModtyLink (kmu `compKnownModty` knu) trho chsigma
compChainModty (ChainModtyKnown kmu) (ChainModtyKnown knu) =
ChainModtyKnown (kmu `compKnownModty` knu)
compChainModty chmu chnu =
ChainModtyTerm (_chainModty'dom chnu) (_chainModty'cod chmu) $ BareModty $ ModtyTermComp chmu chnu
whnormalizeComp : : forall whn v .
( MonadWHN Reldtt whn , MonadWriter [ Int ] whn , DeBruijnLevel v ) = >
Constraint Reldtt - >
Term
Term
Term
Type
String - >
whn ( Term v )
whnormalizeComp gamma mu2 dmid mu1 ty reason = do
whnTy < - whnormalizeType gamma ty reason
let giveUp = return $ BareModty $ ModtyTermComp mu2 dmid mu1
case unType whnTy of
Expr2 ( TermSys ( SysTypeModty ) ) - > do
whnMu1 < - whnormalize gamma mu1 ( Type $ Expr2 $ TermSys $ SysTypeModty ) reason
< - whnormalize gamma mu2 ( Type $ Expr2 $ TermSys $ SysTypeModty ) reason
case ( whnMu1 , ) of
( BareModty ( ModtyTermUnavailable ddom ' dmid ' ) , _ ) - >
( _ , BareModty ( ModtyTermUnavailable dmid ' dcod ' ) ) - >
( BareModty ( ModtyTerm ) , BareModty ( ModtyTerm ) ) - > do
let = case compare ( _ ) ( _ modtySnout'dom snout2 ) of
LT - > ( , ( snout2 , tail2 ) ) < $ > forceCod snout1 tail1 ( _ modtySnout'dom snout2 ) ( _ modtyTail'dom tail2 )
EQ - > Just ( ( snout1 , tail1 ) , ( snout2 , tail2 ) )
GT - > ( ( snout1 , tail1 ) , ) < $ > forceDom snout2 tail2 ( _ modtySnout'cod snout1 ) ( _ )
case of
Nothing - > Expr2 . TermProblem < $ > giveUp
Just ( ( snout1 , tail1 ) , ( snout2 , tail2 ) ) - > do
let snoutComp = compModtySnout snout2 snout1
let tailComp =
return $ BareModty $ ModtyTerm snoutComp tailComp
( _ , _ ) - > return $ BareModty $ ModtyTermComp whnMu1
otherwise - > giveUp
whnormalizeComp :: forall whn v .
(MonadWHN Reldtt whn, MonadWriter [Int] whn, DeBruijnLevel v) =>
Constraint Reldtt ->
Ctx Type Reldtt v ->
Term Reldtt v ->
Term Reldtt v ->
Term Reldtt v ->
Type Reldtt v ->
String ->
whn (Term Reldtt v)
whnormalizeComp gamma mu2 dmid mu1 ty reason = do
whnTy <- whnormalizeType gamma ty reason
let giveUp = return $ BareModty $ ModtyTermComp mu2 dmid mu1
case unType whnTy of
Expr2 (TermSys (SysTypeModty ddom dcod)) -> do
whnMu1 <- whnormalize gamma mu1 (Type $ Expr2 $ TermSys $ SysTypeModty ddom dmid) reason
whnMu2 <- whnormalize gamma mu2 (Type $ Expr2 $ TermSys $ SysTypeModty dmid dcod) reason
case (whnMu1, whnMu2) of
(BareModty (ModtyTermUnavailable ddom' dmid'), _) ->
(_, BareModty (ModtyTermUnavailable dmid' dcod')) ->
(BareModty (ModtyTerm snout2 tail2), BareModty (ModtyTerm snout1 tail1)) -> do
let maybeStuff = case compare (_modtySnout'cod snout1) (_modtySnout'dom snout2) of
LT -> (, (snout2, tail2)) <$> forceCod snout1 tail1 (_modtySnout'dom snout2) (_modtyTail'dom tail2)
EQ -> Just ((snout1, tail1), (snout2, tail2))
GT -> ((snout1, tail1), ) <$> forceDom snout2 tail2 (_modtySnout'cod snout1) (_modtyTail'cod tail1)
case maybeStuff of
Nothing -> Expr2 . TermProblem <$> giveUp
Just ((snout1, tail1), (snout2, tail2)) -> do
let snoutComp = compModtySnout snout2 snout1
let tailComp = compModtyTail tail2 tail1
return $ BareModty $ ModtyTerm snoutComp tailComp
(_, _) -> return $ BareModty $ ModtyTermComp whnMu2 dmid whnMu1
otherwise -> giveUp
-}
knownGetDegSnout : : KnownDeg - > ModtySnout - > KnownDeg
knownGetDegSnout KnownDegEq mu = KnownDegEq
knownGetDegSnout ( KnownDeg i ) ( ModtySnout kdom kcod ) = krevdegs ! ! ( length krevdegs - i - 1 )
knownGetDegSnout KnownDegOmega mu =
knownGetDegSnout KnownDegTop mu = KnownDegTop
knownGetDegSnout KnownDegProblem mu = KnownDegProblem
knownGetDegSnout :: KnownDeg -> ModtySnout -> KnownDeg
knownGetDegSnout KnownDegEq mu = KnownDegEq
knownGetDegSnout (KnownDeg i) (ModtySnout kdom kcod krevdegs) = krevdegs !! (length krevdegs - i - 1)
knownGetDegSnout KnownDegOmega mu = KnownDegOmega
knownGetDegSnout KnownDegTop mu = KnownDegTop
knownGetDegSnout KnownDegProblem mu = KnownDegProblem
-}
knownGetDeg :: KnownDeg -> KnownModty v -> KnownDeg
knownGetDeg KnownDegEq _ = KnownDegEq
knownGetDeg KnownDegTop _ = KnownDegTop
knownGetDeg KnownDegProblem _ = KnownDegProblem
knownGetDeg (KnownDeg i) (KnownModty snout@(ModtySnout idom icod krevdegs) tail) =
if i < icod
then krevdegs !! (icod - i - 1)
else case tail of
TailEmpty -> KnownDegProblem
TailDisc dcod -> snoutMax
TailForget ddom -> KnownDegProblem
TailDiscForget ddom dcod -> snoutMax
TailCont d -> KnownDeg (i - icod + idom)
TailProblem -> KnownDegProblem
where snoutMax = _snout'max snout
knownGetDeg KnownDegOmega mu@(KnownModty snout@(ModtySnout idom icod krevdegs) tail) = case tail of
TailEmpty -> KnownDegProblem
TailDisc dcod -> snoutMax
TailForget ddom -> KnownDegProblem
TailDiscForget ddom dcod -> snoutMax
TailCont d -> KnownDegOmega
TailProblem -> KnownDegProblem
where snoutMax = _snout'max snout
knownApproxLeftAdjointProj :: KnownModty v -> Maybe (KnownModty v)
knownApproxLeftAdjointProj kmu@(KnownModty snout@(ModtySnout idom icod krevdegs) tail) =
Fields :
_ 1 : number of degrees popped from the input modality , minus one .
_ 2 : remaining tail of the input modality
_ 3 : already constructed part of output modality , REVERSED
_ 4 : length of _ 3
_1: number of degrees popped from the input modality, minus one.
_2: remaining tail of the input modality
_3: already constructed part of output modality, REVERSED
_4: length of _3
-}
let (_, _, krevdegs', _) = flip execState (-1, reverse krevdegs, [], 0) $
doUntilFail $ do
remainingTail <- use _2
threshold <- use _4
if threshold == idom
then return False
else True <$ case remainingTail of
nextDeg : remainingTail' -> if nextDeg > KnownDeg threshold
nextDeg' <- use _1
_3 %= (nextDeg' :)
_4 += 1
_2 .= remainingTail'
_1 += 1
nextDeg' <- use _1
_3 %= (nextDeg' :)
_4 += 1
snout' = ModtySnout icod idom (int2deg <$> krevdegs')
snoutCohpi' = ModtySnout icod idom $ krevdegs' <&> \ i -> if i == (idom - 1) then KnownDegOmega else int2deg i
in case tail of
TailEmpty -> Just $ KnownModty snout' $ TailEmpty
TailDisc dcod -> case dcod of
ReldttMode BareModeOmega -> Just $ KnownModty snoutCohpi' $ TailForget dcod
_ -> case krevdegs of
We can read the tail as TailCodisc
KnownDegTop : _ -> Just $ KnownModty snout' $ TailForget dcod
_ -> Nothing
TailForget ddom -> Just $ KnownModty snout' $ TailDisc ddom
TailDiscForget ddom dcod -> case dcod of
ReldttMode BareModeOmega -> Just $ KnownModty snoutCohpi' $ TailDiscForget dcod ddom
_ -> case krevdegs of
We can read the tail as TailCodiscForget
KnownDegTop : _ -> Just $ KnownModty snout' $ TailDiscForget dcod ddom
_ -> Nothing
TailCont d -> Just $ KnownModty snout' $ TailCont d
TailProblem -> Just $ KnownModty snout' $ TailProblem
where int2deg :: Int -> KnownDeg
int2deg (-1) = KnownDegEq
int2deg i = KnownDeg i
whnormalizeModtyTail :: forall whn v .
(MonadWHN Reldtt whn, MonadWriter [Int] whn, DeBruijnLevel v) =>
ModtyTail v ->
String ->
whn (ModtyTail v)
whnormalizeModtyTail tail reason =
case tail of
TailEmpty -> return TailEmpty
TailDisc dcod -> do
dcod <- whnormalizeMode dcod reason
case dcod of
ReldttMode (BareMode ModeTermZero) -> return TailEmpty
otherwise -> return $ TailDisc dcod
TailForget ddom -> do
ddom <- whnormalizeMode ddom reason
case ddom of
ReldttMode (BareMode ModeTermZero) -> return TailEmpty
otherwise -> return $ TailForget ddom
TailDiscForget ddom dcod -> do
ddom <- whnormalizeMode ddom reason
dcod <- whnormalizeMode dcod reason
case (ddom, dcod) of
(ReldttMode (BareMode ModeTermZero),
ReldttMode (BareMode ModeTermZero)) -> return TailEmpty
(ReldttMode (BareMode ModeTermZero), _) -> return $ TailDisc dcod
(_, ReldttMode (BareMode ModeTermZero)) -> return $ TailForget ddom
(_, _) -> return $ TailDiscForget ddom dcod
TailCont d -> do
d <- whnormalizeMode d reason
case d of
ReldttMode (BareMode ModeTermZero) -> return TailEmpty
otherwise -> return $ TailCont d
TailProblem -> return TailProblem
whnormalizeKnownModty :: forall whn v .
(MonadWHN Reldtt whn, MonadWriter [Int] whn, DeBruijnLevel v) =>
KnownModty v ->
String ->
whn (KnownModty v)
whnormalizeKnownModty mu@(KnownModty snout tail) reason = do
tail <- whnormalizeModtyTail tail reason
case tail of
TailEmpty -> return $ KnownModty snout TailEmpty
TailDisc dcod -> case dcod of
ReldttMode (BareMode ModeTermZero) -> return $ KnownModty snout TailEmpty
ReldttMode (BareMode (ModeTermSuc d)) ->
whnormalizeKnownModty (KnownModty (extDisc snout) $ TailDisc $ ReldttMode d) reason
_ -> return $ KnownModty snout tail
TailForget ddom -> case ddom of
ReldttMode (BareMode ModeTermZero) -> return $ KnownModty snout TailEmpty
ReldttMode (BareMode (ModeTermSuc d)) ->
whnormalizeKnownModty (KnownModty (extForget snout) $ TailForget $ ReldttMode d) reason
_ -> return $ KnownModty snout tail
TailDiscForget ddom dcod -> case dcod of
ReldttMode (BareMode ModeTermZero) ->
whnormalizeKnownModty (KnownModty snout $ TailForget ddom) reason
ReldttMode (BareMode (ModeTermSuc d)) ->
whnormalizeKnownModty (KnownModty (extDisc snout) $ TailDiscForget ddom (ReldttMode d)) reason
_ -> case ddom of
ReldttMode (BareMode ModeTermZero) ->
whnormalizeKnownModty (KnownModty snout $ TailDisc dcod) reason
ReldttMode (BareMode (ModeTermSuc d)) ->
whnormalizeKnownModty (KnownModty (extForget snout) $ TailDiscForget (ReldttMode d) dcod) reason
_ -> return $ KnownModty snout tail
TailCont d -> case d of
ReldttMode (BareMode ModeTermZero) -> return $ KnownModty snout TailEmpty
ReldttMode (BareMode (ModeTermSuc dpred)) ->
whnormalizeKnownModty (KnownModty (extCont snout) $ TailCont $ ReldttMode dpred) reason
_ -> return $ KnownModty snout tail
TailProblem -> return $ KnownModty snout TailProblem
whnormalizeChainModty :: forall whn v .
(MonadWHN Reldtt whn, MonadWriter [Int] whn, DeBruijnLevel v) =>
ChainModty v ->
String ->
whn (ChainModty v)
whnormalizeChainModty mu@(ChainModtyKnown knownMu) reason = return mu
's are aligned before relating them .
whnormalizeChainModty mu@(ChainModtyLink knownMu termNu chainRho) reason = do
termNu <- whnormalize termNu
(BareSysType $ SysTypeModty (_chainModty'cod chainRho) (_knownModty'dom knownMu)) reason
case termNu of
BareChainModty chainNu -> do
chainNu <- whnormalizeChainModty chainNu reason
case chainNu of
ChainModtyKnown knownNu -> do
chainRho <- whnormalizeChainModty chainRho reason
let composite = case chainRho of
ChainModtyKnown knownRho ->
ChainModtyKnown (knownMu `compKnownModty` knownNu `compKnownModty` knownRho)
ChainModtyLink knownSigma termTau chainUpsilon ->
mu . nu . . tau . upsilon
ChainModtyLink (knownMu `compKnownModty` knownNu `compKnownModty` knownSigma) termTau chainUpsilon
ChainModtyTerm ddom dcod trho ->
ChainModtyLink (knownMu `compKnownModty` knownNu) (BareChainModty chainRho) $
ChainModtyKnown $ idKnownModty ddom
ChainModtyMeta _ _ _ _ -> unreachable
ChainModtyAlreadyChecked _ _ _ -> unreachable
whnormalizeChainModty composite reason
ChainModtyLink knownNuA termNuB chainNuC -> do
mu . . nuB . nuC . rho
let composite = ChainModtyLink (knownMu `compKnownModty` knownNuA) termNuB $
compMod chainNuC chainRho
whnormalizeChainModty composite reason
ChainModtyTerm ddom dcod tnu -> return $ ChainModtyLink knownMu termNu chainRho
ChainModtyMeta _ _ _ _ -> unreachable
ChainModtyAlreadyChecked _ _ _ -> unreachable
otherwise -> return $ ChainModtyLink knownMu termNu chainRho
whnormalizeChainModty chmu@(ChainModtyTerm dom cod tmu) reason = do
(tmu, metasTMu) <- listen $ whnormalize tmu (BareSysType $ SysTypeModty dom cod) reason
case (tmu, metasTMu) of
(BareChainModty chmu, []) -> whnormalizeChainModty chmu reason
(_, []) -> whnormalizeChainModty
(ChainModtyLink (idKnownModty cod) tmu $ ChainModtyKnown $ idKnownModty dom)
reason
otherwise -> return $ ChainModtyTerm dom cod tmu
whnormalizeChainModty chmu@(ChainModtyMeta dom cod meta depcies) reason = do
maybeSolution <- awaitMeta reason meta depcies
case maybeSolution of
Nothing -> chmu <$ tell [meta]
Just solution -> whnormalizeChainModty solution reason
whnormalizeChainModty chmu@(ChainModtyAlreadyChecked dom cod chmuChecked) reason =
whnormalizeChainModty chmuChecked reason
whnormalizeChainModty : : forall whn v .
( MonadWHN Reldtt whn , MonadWriter [ Int ] whn , DeBruijnLevel v ) = >
ChainModty v - >
String - >
whn ( ChainModty v )
whnormalizeChainModty gamma reason = do
let cod = _ chainModty'cod chmu
whnCod < - whnormalizeMode gamma cod reason
case whnCod of
ReldttMode ( BareMode ModeTermZero ) - > return $ ChainModtyKnown $
forgetKnownModty $ _ chainModty'dom chmu
otherwise - > whnormalizeChainModtyNonzeroCod gamma chmu reason
whnormalizeChainModty :: forall whn v .
(MonadWHN Reldtt whn, MonadWriter [Int] whn, DeBruijnLevel v) =>
Ctx Type Reldtt v ->
ChainModty v ->
String ->
whn (ChainModty v)
whnormalizeChainModty gamma chmu reason = do
let cod = _chainModty'cod chmu
whnCod <- whnormalizeMode gamma cod reason
case whnCod of
ReldttMode (BareMode ModeTermZero) -> return $ ChainModtyKnown $
forgetKnownModty $ _chainModty'dom chmu
otherwise -> whnormalizeChainModtyNonzeroCod gamma chmu reason
-}
whnormalizeModeTerm :: forall whn v .
(MonadWHN Reldtt whn, MonadWriter [Int] whn, DeBruijnLevel v) =>
ModeTerm v ->
String ->
whn (ModeTerm v)
whnormalizeModeTerm d reason = case d of
ModeTermZero -> return $ ModeTermZero
ModeTermSuc d -> do
d <- whnormalize d (BareSysType $ SysTypeMode) reason
case d of
BareMode ModeTermOmega -> return $ ModeTermOmega
_ -> return $ ModeTermSuc d
ModeTermOmega -> return $ ModeTermOmega
whnormalizeModtyTerm :: forall whn v .
(MonadWHN Reldtt whn, MonadWriter [Int] whn, DeBruijnLevel v) =>
ModtyTerm v ->
String ->
whn (ModtyTerm v)
whnormalizeModtyTerm mu reason = case mu of
ModtyTermChain chmu -> return mu
only for prettyprinting
ModtyTermApproxLeftAdjointProj chrho -> do
chrho <- whnormalizeChainModty chrho reason
case chrho of
ChainModtyKnown krho -> case knownApproxLeftAdjointProj krho of
Just kmu -> return $ ModtyTermChain $ ChainModtyKnown $ kmu
Nothing -> return mu
otherwise -> return mu
ModtyTermComp chmu2 chmu1 -> do
(chmu1, metas1) <- listen $ whnormalizeChainModty chmu1 reason
(chmu2, metas2) <- listen $ whnormalizeChainModty chmu2 reason
case (metas1, metas2) of
([], []) -> return $ ModtyTermChain $ chmu2 `compChainModty` chmu1
(_, _) -> return $ ModtyTermComp chmu2 chmu1
ModtyTermUnavailable ddom dcod -> return mu
whnormalizeReldttDegree :: forall whn v .
(MonadWHN Reldtt whn, MonadWriter [Int] whn, DeBruijnLevel v) =>
ReldttDegree v ->
String ->
whn (ReldttDegree v)
whnormalizeReldttDegree i reason = do
case i of
DegKnown _ _ -> return i
DegGet j chmu -> do
j <- whnormalizeReldttDegree j reason
case j of
DegKnown d KnownDegEq -> return $ DegKnown (_chainModty'dom chmu) KnownDegEq
DegKnown d KnownDegTop -> return $ DegKnown (_chainModty'dom chmu) KnownDegTop
DegKnown d j' -> do
chmu <- whnormalizeChainModty chmu reason
case chmu of
ChainModtyKnown kmu -> return $ DegKnown (_chainModty'dom chmu) $ knownGetDeg j' kmu
_ -> return $ DegGet j chmu
_ -> return $ DegGet j chmu
instance SysWHN Reldtt where
whnormalizeSysTerm sysT ty reason = do
let returnSysT = return $ Expr2 $ TermSys $ sysT
let returnProblem = return $ Expr2 $ TermProblem $ Expr2 $ TermSys $ sysT
case sysT of
SysTermMode d -> BareMode <$> whnormalizeModeTerm d reason
SysTermModty mu -> BareModty <$> whnormalizeModtyTerm mu reason
SysTermChainModtyInDisguise chmu - > return $ Expr2 $ TermSys $ sysT
SysTermDeg i - > case i of
DegKnown _ - > return $ BareDeg i
j mu > do
j < - whnormalize gamma j ( BareSysType $ SysTypeDeg dcod ) reason
case j of
BareKnownDeg KnownDegEq - > return $ BareKnownDeg KnownDegEq
BareKnownDeg KnownDegTop - > return $ BareKnownDeg KnownDegTop
BareKnownDeg j ' - > do
mu < - whnormalize gamma mu ( BareSysType $ SysTypeModty ) reason
case mu of
BareKnownModty mu ' - > return $ BareKnownDeg $ knownGetDeg j ' mu '
_ - > return $ BareDeg $
_ - > return $ BareDeg $
DegKnown _ -> return $ BareDeg i
DegGet j mu ddom dcod -> do
j <- whnormalize gamma j (BareSysType $ SysTypeDeg dcod) reason
case j of
BareKnownDeg KnownDegEq -> return $ BareKnownDeg KnownDegEq
BareKnownDeg KnownDegTop -> return $ BareKnownDeg KnownDegTop
BareKnownDeg j' -> do
mu <- whnormalize gamma mu (BareSysType $ SysTypeModty ddom dcod) reason
case mu of
BareKnownModty mu' -> return $ BareKnownDeg $ knownGetDeg j' mu'
_ -> return $ BareDeg $ DegGet j mu ddom dcod
_ -> return $ BareDeg $ DegGet j mu ddom dcod-}
SysTypeModty > returnSysT
whnormalizeMode gamma ( ReldttMode t ) reason = ReldttMode ! < $ > whnormalize gamma t ( BareSysType SysTypeMode ) reason
whnormalizeModality dom cod reason = whnormalizeChainModty gamma chmu reason
whnormalizeDegree gamma i d reason = do
case i of
DegKnown _ _ - > return i
j mu > do
j < - whnormalizeDegree gamma j dcod reason
case j of
DegKnown d KnownDegEq - > return $ DegKnown ddom KnownDegEq
DegKnown d KnownDegTop - > return $ DegKnown ddom KnownDegTop
DegKnown d j ' - > do
mu < - whnormalize gamma mu ( BareSysType $ SysTypeModty ) reason
case mu of
BareKnownModty mu ' - > return $ DegKnown ddom mu '
_ - > return $ DegGet j mu ddom dcod
_ - > return $ DegGet j mu ddom dcod
whnormalizeMode gamma (ReldttMode t) reason = ReldttMode !<$> whnormalize gamma t (BareSysType SysTypeMode) reason
whnormalizeModality gamma chmu dom cod reason = whnormalizeChainModty gamma chmu reason
whnormalizeDegree gamma i d reason = do
case i of
DegKnown _ _ -> return i
DegGet j mu ddom dcod -> do
j <- whnormalizeDegree gamma j dcod reason
case j of
DegKnown d KnownDegEq -> return $ DegKnown ddom KnownDegEq
DegKnown d KnownDegTop -> return $ DegKnown ddom KnownDegTop
DegKnown d j' -> do
mu <- whnormalize gamma mu (BareSysType $ SysTypeModty ddom dcod) reason
case mu of
BareKnownModty mu' -> return $ DegKnown ddom $ knownGetDeg j' mu'
_ -> return $ DegGet j mu ddom dcod
_ -> return $ DegGet j mu ddom dcod
-}
whnormalizeMultimodeOrSysAST token t extraT classifT reason = case token of
Left AnTokenMode -> ReldttMode !<$> whnormalize (getReldttMode t) (BareSysType SysTypeMode) reason
Left AnTokenModality -> whnormalizeChainModty t reason
Left AnTokenDegree -> whnormalizeReldttDegree t reason
Right AnTokenModeTerm -> whnormalizeModeTerm t reason
Right AnTokenModtyTerm -> whnormalizeModtyTerm t reason
Right AnTokenKnownModty -> whnormalizeKnownModty t reason
Right AnTokenModtySnout -> return t
Right AnTokenModtyTail -> whnormalizeModtyTail t reason
leqMod mu1 mu2 ddom dcod reason = do
(mu1, metasMu1) <- runWriterT $ whnormalizeChainModty mu1 reason
(mu2, metasMu2) <- runWriterT $ whnormalizeChainModty mu2 reason
case (metasMu1, metasMu2) of
([], []) -> case (mu1, mu2) of
(ChainModtyKnown kmu1, ChainModtyKnown kmu2) -> do
related <- relKnownModty ModLeq kmu1 kmu2 reason
case related of
Nothing -> return $ Just False
Just maybeBool -> return maybeBool
(_, _) -> return $ Just False
(_ , _ ) -> return $ Nothing
leqDeg deg1 deg2 d reason = do
(deg1, metasDeg1) <- runWriterT $ whnormalizeDegree deg1 d reason
(deg2, metasDeg2) <- runWriterT $ whnormalizeDegree deg2 d reason
case (metasDeg1, deg1, metasDeg2, deg2) of
(_, DegKnown _ i1, _, DegKnown _ i2) -> return $ Just $ i1 <= i2
([], _, [], _) -> return $ Just False
(_ , _, _ , _) -> return Nothing
|
70fbe4369b42f257684380b8f85faa42ac9157ca21462a6371e3f80a22602a40 | andrejbauer/marshall | typecheck.ml | \section{Type checking ( module [ ] ) }
module Make = functor (D : Dyadic.DYADIC) ->
struct
module I = Interval.Make(D)
module S = Syntax.Make(D)
open S
let error = Message.typecheck_error
let check_segment i =
if not (I.forward i) then error "illegal interval"
let check_compact_segment i =
if not (I.proper i) then error "not a compact interval"
(* [type_of ctx e] computes the type of expression [e] in context [ctx]. *)
let rec type_of ctx = function
| Var x ->
(try
List.assoc x ctx
with Not_found -> error ("Unknown variable " ^ string_of_name x))
| RealVar (x, _) ->
error ("Typechecking encountered areal variable " ^ string_of_name x ^
". This should not happen")
| Dyadic _ -> Ty_Real
| Interval _ -> Ty_Real
| Cut (x, i, p1, p2) ->
check_segment i ;
check ((x, Ty_Real)::ctx) Ty_Sigma p1 ;
check ((x, Ty_Real)::ctx) Ty_Sigma p2 ;
Ty_Real
| Binary (_, e1, e2) ->
check ctx Ty_Real e1 ;
check ctx Ty_Real e2 ;
Ty_Real
| Unary (_, e) ->
check ctx Ty_Real e ;
Ty_Real
| Power (e, _) ->
check ctx Ty_Real e ;
Ty_Real
| True -> Ty_Sigma
| False -> Ty_Sigma
| And lst
| Or lst -> List.iter (check ctx Ty_Sigma) lst ; Ty_Sigma
| Less (e1, e2) ->
check ctx Ty_Real e1 ;
check ctx Ty_Real e2 ;
Ty_Sigma
| Exists (x, s, p) ->
check_segment s ;
check ((x,Ty_Real)::ctx) Ty_Sigma p ;
Ty_Sigma
| Forall (x, s, p) ->
check_compact_segment s ;
check ((x,Ty_Real)::ctx) Ty_Sigma p ;
Ty_Sigma
| Let (x, e1, e2) ->
let ty = type_of ctx e1 in
type_of ((x,ty)::ctx) e2
| Tuple lst -> Ty_Tuple (List.map (type_of ctx) lst)
| Proj (e, k) ->
(match type_of ctx e with
| Ty_Tuple lst as ty ->
(match List.nth_opt lst k with
| Some x -> x
| None ->
error ("Expected at least " ^ string_of_int k ^
" components but got " ^ string_of_type ty))
| ty -> error ("Expected a tuple but got " ^ string_of_type ty)
)
| Lambda (x, ty, e) ->
Ty_Arrow (ty, type_of ((x,ty)::ctx) e)
| App (e1, e2) ->
(match type_of ctx e1 with
| Ty_Arrow (ty1, ty2) -> check ctx ty1 e2 ; ty2
| ty -> error ("Expected a function but got " ^ string_of_type ty))
(* Does [e] have type [ty] in context [ctx]? *)
and check ctx ty e =
let ty' = type_of ctx e in
if ty <> ty' then
error (string_of_type ty ^ " expected but got " ^ string_of_type ty')
end;;
| null | https://raw.githubusercontent.com/andrejbauer/marshall/1e630b34b7ae880835aca31fd2b3eda562348b2e/src/typecheck.ml | ocaml | [type_of ctx e] computes the type of expression [e] in context [ctx].
Does [e] have type [ty] in context [ctx]? | \section{Type checking ( module [ ] ) }
module Make = functor (D : Dyadic.DYADIC) ->
struct
module I = Interval.Make(D)
module S = Syntax.Make(D)
open S
let error = Message.typecheck_error
let check_segment i =
if not (I.forward i) then error "illegal interval"
let check_compact_segment i =
if not (I.proper i) then error "not a compact interval"
let rec type_of ctx = function
| Var x ->
(try
List.assoc x ctx
with Not_found -> error ("Unknown variable " ^ string_of_name x))
| RealVar (x, _) ->
error ("Typechecking encountered areal variable " ^ string_of_name x ^
". This should not happen")
| Dyadic _ -> Ty_Real
| Interval _ -> Ty_Real
| Cut (x, i, p1, p2) ->
check_segment i ;
check ((x, Ty_Real)::ctx) Ty_Sigma p1 ;
check ((x, Ty_Real)::ctx) Ty_Sigma p2 ;
Ty_Real
| Binary (_, e1, e2) ->
check ctx Ty_Real e1 ;
check ctx Ty_Real e2 ;
Ty_Real
| Unary (_, e) ->
check ctx Ty_Real e ;
Ty_Real
| Power (e, _) ->
check ctx Ty_Real e ;
Ty_Real
| True -> Ty_Sigma
| False -> Ty_Sigma
| And lst
| Or lst -> List.iter (check ctx Ty_Sigma) lst ; Ty_Sigma
| Less (e1, e2) ->
check ctx Ty_Real e1 ;
check ctx Ty_Real e2 ;
Ty_Sigma
| Exists (x, s, p) ->
check_segment s ;
check ((x,Ty_Real)::ctx) Ty_Sigma p ;
Ty_Sigma
| Forall (x, s, p) ->
check_compact_segment s ;
check ((x,Ty_Real)::ctx) Ty_Sigma p ;
Ty_Sigma
| Let (x, e1, e2) ->
let ty = type_of ctx e1 in
type_of ((x,ty)::ctx) e2
| Tuple lst -> Ty_Tuple (List.map (type_of ctx) lst)
| Proj (e, k) ->
(match type_of ctx e with
| Ty_Tuple lst as ty ->
(match List.nth_opt lst k with
| Some x -> x
| None ->
error ("Expected at least " ^ string_of_int k ^
" components but got " ^ string_of_type ty))
| ty -> error ("Expected a tuple but got " ^ string_of_type ty)
)
| Lambda (x, ty, e) ->
Ty_Arrow (ty, type_of ((x,ty)::ctx) e)
| App (e1, e2) ->
(match type_of ctx e1 with
| Ty_Arrow (ty1, ty2) -> check ctx ty1 e2 ; ty2
| ty -> error ("Expected a function but got " ^ string_of_type ty))
and check ctx ty e =
let ty' = type_of ctx e in
if ty <> ty' then
error (string_of_type ty ^ " expected but got " ^ string_of_type ty')
end;;
|
78deed2e8a8f20011f24ed3c9fc7dc87914ea67788cc8ab7079a825bd02c7884 | jabber-at/ejabberd-contrib | mod_post_log.erl | %%%----------------------------------------------------------------------
%%% File : mod_post_log.erl
Author : < >
%%% Purpose : POST user messages to server via HTTP
Created : 02 Aug 2014 by < >
%%%
Based on mod_service_log.erl
%%%----------------------------------------------------------------------
-module(mod_post_log).
-author('').
-behaviour(gen_mod).
-export([start/2,
stop/1,
depends/2,
mod_opt_type/1,
log_user_send/1,
log_user_send/4,
post_result/1]).
-include("xmpp.hrl").
start(Host, _Opts) ->
ok = case inets:start() of
{error, {already_started, inets}} ->
ok;
ok ->
ok
end,
ejabberd_hooks:add(user_send_packet, Host,
?MODULE, log_user_send, 50),
ok.
stop(Host) ->
ejabberd_hooks:delete(user_send_packet, Host,
?MODULE, log_user_send, 50),
ok.
depends(_Host, _Opts) ->
[].
mod_opt_type(url) ->
fun(Val) when is_binary(Val) -> binary_to_list(Val);
(Val) -> Val
end;
mod_opt_type(ts_header) -> fun iolist_to_binary/1;
mod_opt_type(from_header) -> fun iolist_to_binary/1;
mod_opt_type(to_header) -> fun iolist_to_binary/1;
mod_opt_type(headers) -> fun(L) when is_list(L) -> L end;
mod_opt_type(content_type) -> fun iolist_to_binary/1;
mod_opt_type(req_options) -> fun(L) when is_list(L) -> L end;
mod_opt_type(_) ->
[url, ts_header, from_header, to_header, headers,
content_type, req_options].
TODO : remove log_user_send/4 after 17.02 is released
log_user_send(Packet, C2SState, From, To) ->
log_user_send({xmpp:set_from_to(Packet, From, To), C2SState}),
Packet.
log_user_send({#message{type = T} = Packet, _C2SState} = Acc)
when T == chat; T == groupchat ->
ok = log_message(Packet),
Acc;
log_user_send(Acc) ->
Acc.
log_message(#message{from = From, to = To, body = Body} = Msg) ->
case xmpp:get_text(Body) of
<<"">> ->
ok;
_ ->
XML = fxml:element_to_binary(xmpp:encode(Msg)),
post_xml(From, To, XML)
end.
post_xml(#jid{lserver = LServer} = From, To, Xml) ->
Ts = to_iso_8601_date(os:timestamp()),
Body = Xml,
Url = get_opt(LServer, url),
TsHeader = get_opt(LServer, ts_header, "X-Message-Timestamp"),
FromHeader = get_opt(LServer, from_header, "X-Message-From"),
ToHeader = get_opt(LServer, to_header, "X-Message-To"),
Headers = [ {TsHeader, Ts},
{FromHeader, format_jid(From)},
{ToHeader, format_jid(To)}
| get_opt(LServer, headers, []) ],
ContentType = get_opt(LServer, content_type, "text/xml"),
HttpOptions = get_opt(LServer, http_options, []),
ReqOptions = get_opt(LServer, req_options, []),
{ok, _ReqId} = httpc:request(post,
{Url, Headers, ContentType, Body},
HttpOptions,
[ {sync, false},
{receiver, {?MODULE, post_result, []}}
| ReqOptions ]),
ok.
post_result({_ReqId, {error, Reason}}) ->
report_error([ {error, Reason } ]);
post_result({_ReqId, Result}) ->
{StatusLine, Headers, Body} = Result,
{_HttpVersion, StatusCode, ReasonPhrase} = StatusLine,
if StatusCode < 200;
StatusCode > 299 ->
ok = report_error([ {status_code, StatusCode},
{reason_phrase, ReasonPhrase},
{headers, Headers},
{body, Body} ]),
ok;
true ->
ok
end.
get_opt(LServer, Opt) ->
get_opt(LServer, Opt, undefined).
get_opt(LServer, Opt, Default) ->
gen_mod:get_module_opt(LServer, ?MODULE, Opt, Default).
report_error(ReportArgs) ->
ok = error_logger:error_report([ mod_post_log_cannot_post | ReportArgs ]).
format_jid(JID) ->
binary_to_list(jid:to_string(JID)).
Erlang now()-style timestamps are in UTC by definition , and we are
assuming ISO 8601 dates should be printed in UTC as well , so no
%% conversion necessary
%%
%% Example:
%% {1385,388790,334905}
%% -becomes-
2013 - 11 - 25 14:13:10.334905Z
-spec to_iso_8601_date(erlang:timestamp()) -> string().
to_iso_8601_date(Timestamp) when is_tuple(Timestamp) ->
{{Y, Mo, D}, {H, M, S}} = calendar:now_to_universal_time(Timestamp),
{_, _, US} = Timestamp,
lists:flatten(io_lib:format("~4.10.0B-~2.10.0B-~2.10.0B ~2.10.0B:~2.10.0B:~2.10.0B.~6.10.0BZ",
[Y, Mo, D, H, M, S, US])).
| null | https://raw.githubusercontent.com/jabber-at/ejabberd-contrib/d5eb036b786c822d9fd56f881d27e31688ec6e91/mod_post_log/src/mod_post_log.erl | erlang | ----------------------------------------------------------------------
File : mod_post_log.erl
Purpose : POST user messages to server via HTTP
----------------------------------------------------------------------
conversion necessary
Example:
{1385,388790,334905}
-becomes- | Author : < >
Created : 02 Aug 2014 by < >
Based on mod_service_log.erl
-module(mod_post_log).
-author('').
-behaviour(gen_mod).
-export([start/2,
stop/1,
depends/2,
mod_opt_type/1,
log_user_send/1,
log_user_send/4,
post_result/1]).
-include("xmpp.hrl").
start(Host, _Opts) ->
ok = case inets:start() of
{error, {already_started, inets}} ->
ok;
ok ->
ok
end,
ejabberd_hooks:add(user_send_packet, Host,
?MODULE, log_user_send, 50),
ok.
stop(Host) ->
ejabberd_hooks:delete(user_send_packet, Host,
?MODULE, log_user_send, 50),
ok.
depends(_Host, _Opts) ->
[].
mod_opt_type(url) ->
fun(Val) when is_binary(Val) -> binary_to_list(Val);
(Val) -> Val
end;
mod_opt_type(ts_header) -> fun iolist_to_binary/1;
mod_opt_type(from_header) -> fun iolist_to_binary/1;
mod_opt_type(to_header) -> fun iolist_to_binary/1;
mod_opt_type(headers) -> fun(L) when is_list(L) -> L end;
mod_opt_type(content_type) -> fun iolist_to_binary/1;
mod_opt_type(req_options) -> fun(L) when is_list(L) -> L end;
mod_opt_type(_) ->
[url, ts_header, from_header, to_header, headers,
content_type, req_options].
TODO : remove log_user_send/4 after 17.02 is released
log_user_send(Packet, C2SState, From, To) ->
log_user_send({xmpp:set_from_to(Packet, From, To), C2SState}),
Packet.
log_user_send({#message{type = T} = Packet, _C2SState} = Acc)
when T == chat; T == groupchat ->
ok = log_message(Packet),
Acc;
log_user_send(Acc) ->
Acc.
log_message(#message{from = From, to = To, body = Body} = Msg) ->
case xmpp:get_text(Body) of
<<"">> ->
ok;
_ ->
XML = fxml:element_to_binary(xmpp:encode(Msg)),
post_xml(From, To, XML)
end.
post_xml(#jid{lserver = LServer} = From, To, Xml) ->
Ts = to_iso_8601_date(os:timestamp()),
Body = Xml,
Url = get_opt(LServer, url),
TsHeader = get_opt(LServer, ts_header, "X-Message-Timestamp"),
FromHeader = get_opt(LServer, from_header, "X-Message-From"),
ToHeader = get_opt(LServer, to_header, "X-Message-To"),
Headers = [ {TsHeader, Ts},
{FromHeader, format_jid(From)},
{ToHeader, format_jid(To)}
| get_opt(LServer, headers, []) ],
ContentType = get_opt(LServer, content_type, "text/xml"),
HttpOptions = get_opt(LServer, http_options, []),
ReqOptions = get_opt(LServer, req_options, []),
{ok, _ReqId} = httpc:request(post,
{Url, Headers, ContentType, Body},
HttpOptions,
[ {sync, false},
{receiver, {?MODULE, post_result, []}}
| ReqOptions ]),
ok.
post_result({_ReqId, {error, Reason}}) ->
report_error([ {error, Reason } ]);
post_result({_ReqId, Result}) ->
{StatusLine, Headers, Body} = Result,
{_HttpVersion, StatusCode, ReasonPhrase} = StatusLine,
if StatusCode < 200;
StatusCode > 299 ->
ok = report_error([ {status_code, StatusCode},
{reason_phrase, ReasonPhrase},
{headers, Headers},
{body, Body} ]),
ok;
true ->
ok
end.
get_opt(LServer, Opt) ->
get_opt(LServer, Opt, undefined).
get_opt(LServer, Opt, Default) ->
gen_mod:get_module_opt(LServer, ?MODULE, Opt, Default).
report_error(ReportArgs) ->
ok = error_logger:error_report([ mod_post_log_cannot_post | ReportArgs ]).
format_jid(JID) ->
binary_to_list(jid:to_string(JID)).
Erlang now()-style timestamps are in UTC by definition , and we are
assuming ISO 8601 dates should be printed in UTC as well , so no
2013 - 11 - 25 14:13:10.334905Z
-spec to_iso_8601_date(erlang:timestamp()) -> string().
to_iso_8601_date(Timestamp) when is_tuple(Timestamp) ->
{{Y, Mo, D}, {H, M, S}} = calendar:now_to_universal_time(Timestamp),
{_, _, US} = Timestamp,
lists:flatten(io_lib:format("~4.10.0B-~2.10.0B-~2.10.0B ~2.10.0B:~2.10.0B:~2.10.0B.~6.10.0BZ",
[Y, Mo, D, H, M, S, US])).
|
236f5e15f2e16a88c124a2177dc3917b5f464cdbdb43bf8dbac7accf8d27480a | geremih/xcljb | ir.clj | (ns xcljb.xmlgen.ir
(:require [clojure.string :as string]))
(defn beautify [name type]
(case type
:ns-name (string/replace name #"_" "-")
:arg (-> name
(string/replace #"_" "-")
(string/replace #"([a-z])([A-Z])" "$1-$2")
(string/lower-case))
:enum (-> name
(string/replace #"([a-z])([A-Z])" "$1-$2")
(string/upper-case))
:enum-item (-> name
(string/replace #"_" "-")
(string/replace #"([a-z])([A-Z])" "$1-$2")
(string/lower-case))
:fn-name (-> name
(string/replace #"([a-z])([A-Z])" "$1-$2")
e.g. " GetXIDList " - > " Get - XID - List " .
(string/replace #"([A-Z])([A-Z][a-z])" "$1-$2")
(string/lower-case))
:request (str name "Request")
:reply (str name "Reply")
:event (str name"Event")
:error (str name "Error")
:type (if (= name (string/upper-case name))
(string/capitalize name)
name)
:read-type (str "read-" name)
:->type (str "->" name)))
(defrecord QualifiedType [ns name])
(defn- parse-type [type]
(let [{:keys [ns name]} type]
(symbol (str "xcljb.gen." (beautify ns :ns-name) "-types") name)))
(defn- type->read-type [type]
(let [{:keys [ns name]} type]
(symbol (str "xcljb.gen." (beautify ns :ns-name) "-internal")
(-> name (beautify :type) (beautify :read-type)))))
(defn- name->type [context name]
(symbol (str "xcljb.gen." (beautify (:header context) :ns-name) "-types")
(beautify name :type)))
(defn- name->->type [context name]
(symbol (str "xcljb.gen." (beautify (:header context) :ns-name) "-types")
(-> name (beautify :type) (beautify :->type))))
(defn- gen-read-fields [fields & body]
`(let ~(reduce #(conj %1 (.gen-read-type-name %2) (.gen-read-type %2))
[]
fields)
~@body))
(defn- extension-name [context]
(:extension-xname context))
(defprotocol RequestFn
(gen-request-fn [this]))
(defprotocol Measurable
(gen-sizeof [this])
(gen-read-sizeof [this]))
(defprotocol Type
(gen-type [this]))
(defprotocol Expr
(gen-expr [this]))
(defprotocol ReadableFn
(gen-read-fn [this]))
(defprotocol ReadableType
(gen-read-type [this])
(gen-read-type-name [this]))
;;; Expressions.
(defrecord Op [op expr1 expr2]
Expr
(gen-expr [this]
(let [op (case (:op this)
"+" 'clojure.core/+
"-" 'clojure.core/-
"*" 'clojure.core/*
"/" 'clojure.core//
"&" 'clojure.core/bit-and
"<<" 'clojure.core/bit-shift-left)]
`(xcljb.gen-common/->Op ~op
~(gen-expr (:expr1 this))
~(gen-expr (:expr2 this))))))
(defrecord Unop [op expr]
Expr
(gen-expr [this]
(let [op (case (:op this)
"~" 'clojure.core/bit-not)]
`(xcljb.gen-common/->Unop ~op ~(gen-expr (:expr this))))))
(defrecord Fieldref [ref]
Expr
(gen-expr [this]
`(xcljb.gen-common/->Fieldref ~(beautify (:ref this) :arg))))
(defrecord Popcount [expr]
Expr
(gen-expr [this]
`(xcljb.gen-common/->Popcount ~(gen-expr (:expr this)))))
(defrecord Sumof [ref]
Expr
(gen-expr [this]
`(xcljb.gen-common/->Sumof ~(beautify (:ref this) :arg))))
(defrecord Value [value]
Expr
(gen-expr [this]
`(xcljb.gen-common/->Value ~(:value this))))
;;; Primitives.
(defrecord Primitive [name type]
Type
(gen-type [this]
`(def ~(symbol (:name this))
(xcljb.gen-common/->Primitive ~(:type this)))))
Fields .
(defrecord Pad [bytes]
Type
(gen-type [this]
`(xcljb.gen-common/->Pad ~(:bytes this))))
(defrecord BoolField [name size]
Type
(gen-type [this]
`(xcljb.gen-common/->BoolField ~(beautify (:name this) :arg)
~(:size this))))
(defrecord StringField [name expr]
Type
(gen-type [this]
`(xcljb.gen-common/->StringField ~(beautify (:name this) :arg)
~(when-let [expr (:expr this)]
(gen-expr expr)))))
(defrecord Field [name type enum altenum mask]
Type
(gen-type [this]
`(xcljb.gen-common/->Field ~(beautify (:name this) :arg)
~(parse-type (:type this)))))
(defrecord BoolList [name size expr]
Type
(gen-type [this]
`(xcljb.gen-common/->BoolList ~(beautify (:name this) :arg)
~(:size this)
~(when-let [expr (:expr this)]
(gen-expr expr)))))
(defrecord List [name type enum altenum mask expr]
Type
(gen-type [this]
`(xcljb.gen-common/->List ~(beautify (:name this) :arg)
~(parse-type (:type this))
~(when-let [expr (:expr this)]
(gen-expr expr)))))
;; Valueparam.
(defrecord Valueparam [name mask-type]
Type
(gen-type [this]
`(xcljb.gen-common/->Valueparam ~(beautify (:name this) :arg)
~(parse-type (:mask-type this)))))
and .
(defrecord Item [name value])
Avoid naming conflict with java.lang . .
(defrecord -Enum [name content])
(defn- instance-of? [inst classes]
(some #(instance? % inst) classes))
(defn- gen-args [content]
(let [fs (filter #(instance-of? % [BoolField
StringField
Field
List
Valueparam])
content)]
(map #(-> % (:name) (beautify :arg)) fs)))
;; Struct.
(defrecord Struct [name content]
Type
(gen-type [this]
`(def ~(symbol (:name this))
(xcljb.gen-common/->Struct [~@(map gen-type (:content this))]))))
(defrecord Typedef [name type]
Type
(gen-type [this]
`(def ~(symbol (:name this))
~(parse-type (:type this)))))
;; Request, Reply, Event, Error.
(defrecord Request [context name opcode combine-adjacent content]
RequestFn
(gen-request-fn [this]
(let [s-name (-> this (:name) (beautify :fn-name) (symbol))
s-spec (name->type (:context this)
(beautify (:name this) :request))
args (-> this (:content) (gen-args))
k-args (vec (map keyword args))
s-args (vec (map symbol args))]
`(defn ~s-name [~'conn ~@s-args]
(let [~'request (zipmap ~k-args ~s-args)]
~(if-let [ext-name (extension-name (:context this))]
`(xcljb.conn-ext/send ~'conn ~ext-name ~s-spec ~'request)
`(xcljb.conn-internal/send ~'conn ~s-spec ~'request))))))
Type
(gen-type [this]
`(def ~(-> this (:name) (beautify :request) (symbol))
(xcljb.gen-common/->Request ~(extension-name (:context this))
~(:opcode this)
[~@(map gen-type (:content this))]))))
(defrecord Reply [context name request-opcode content]
Type
(gen-type [this]
`(def ~(-> this (:name) (beautify :reply) (symbol))
(xcljb.gen-common/->Reply [~@(map gen-type (:content this))])))
ReadableFn
(gen-read-fn [this]
(let [ext-name (extension-name (:context this))
opcode (:request-opcode this)
s-reply (name->type (:context this)
(-> this (:name) (beautify :reply)))]
`(defmethod xcljb.common/read-reply [~ext-name ~opcode] [~'_ ~'_ ~'reply-buf]
(xcljb.gen-common/deserialize ~s-reply ~'reply-buf nil)))))
(defrecord QualifiedRef [ext-name number])
(defrecord Event [context name number no-seq-number content]
Type
(gen-type [this]
`(def ~(-> this (:name) (beautify :event) (symbol))
(xcljb.gen-common/->Event ~(extension-name (:context this))
~(:name this)
~(:number this)
~(:no-seq-number this)
[~@(map gen-type (:content this))])))
ReadableFn
(gen-read-fn [this]
(let [ext-name (extension-name (:context this))
number (:number this)
s-event (name->type (:context this)
(-> this (:name) (beautify :event)))]
`(defmethod xcljb.common/read-event [~ext-name ~number] [~'_ ~'_ ~'event-buf]
(xcljb.gen-common/deserialize ~s-event ~'event-buf nil)))))
(defrecord EventCopy [context name number ref]
Type
(gen-type [this]
`(def ~(-> this (:name) (beautify :event) (symbol))
(xcljb.gen-common/->EventCopy ~(extension-name (:context this))
~(:name this)
~(:number this)
~(:ref this))))
ReadableFn
(gen-read-fn [this]
(let [ext-name (extension-name (:context this))
number (:number this)
s-event (name->type (:context this)
(-> this (:name) (beautify :event)))]
`(defmethod xcljb.common/read-event [~ext-name ~number] [~'_ ~'_ ~'event-buf]
(xcljb.gen-common/deserialize ~s-event ~'event-buf nil)))))
;; Avoid naming conflict with java.lang.Error.
(defrecord -Error [context name number content]
Type
(gen-type [this]
`(def ~(-> this (:name) (beautify :error) (symbol))
(xcljb.gen-common/->Error' ~(extension-name (:context this))
~(:name this)
~(:number this)
[~@(map gen-type (:content this))])))
ReadableFn
(gen-read-fn [this]
(let [ext-name (extension-name (:context this))
number (:number this)
s-error (name->type (:context this)
(-> this (:name) (beautify :error)))]
`(defmethod xcljb.common/read-error [~ext-name ~number] [~'_ ~'_ ~'error-buf]
(xcljb.gen-common/deserialize ~s-error ~'error-buf nil)))))
(defrecord ErrorCopy [context name number ref]
Type
(gen-type [this]
`(def ~(-> this (:name) (beautify :error) (symbol))
(xcljb.gen-common/->ErrorCopy ~(extension-name (:context this))
~(:name this)
~(:number this)
~(:ref this))))
ReadableFn
(gen-read-fn [this]
(let [ext-name (extension-name (:context this))
number (:number this)
s-error (name->type (:context this)
(-> this (:name) (beautify :error)))]
`(defmethod xcljb.common/read-error [~ext-name ~number] [~'_ ~'_ ~'error-buf]
(xcljb.gen-common/deserialize ~s-error ~'error-buf nil)))))
;; Xcb.
(defrecord Xcb [header extension-name extension-xname extension-multiword major-version minor-version])
| null | https://raw.githubusercontent.com/geremih/xcljb/59e9ff795bf00595a3d46231a7bb4ec976852396/src/xcljb/xmlgen/ir.clj | clojure | Expressions.
Primitives.
Valueparam.
Struct.
Request, Reply, Event, Error.
Avoid naming conflict with java.lang.Error.
Xcb. | (ns xcljb.xmlgen.ir
(:require [clojure.string :as string]))
(defn beautify [name type]
(case type
:ns-name (string/replace name #"_" "-")
:arg (-> name
(string/replace #"_" "-")
(string/replace #"([a-z])([A-Z])" "$1-$2")
(string/lower-case))
:enum (-> name
(string/replace #"([a-z])([A-Z])" "$1-$2")
(string/upper-case))
:enum-item (-> name
(string/replace #"_" "-")
(string/replace #"([a-z])([A-Z])" "$1-$2")
(string/lower-case))
:fn-name (-> name
(string/replace #"([a-z])([A-Z])" "$1-$2")
e.g. " GetXIDList " - > " Get - XID - List " .
(string/replace #"([A-Z])([A-Z][a-z])" "$1-$2")
(string/lower-case))
:request (str name "Request")
:reply (str name "Reply")
:event (str name"Event")
:error (str name "Error")
:type (if (= name (string/upper-case name))
(string/capitalize name)
name)
:read-type (str "read-" name)
:->type (str "->" name)))
(defrecord QualifiedType [ns name])
(defn- parse-type [type]
(let [{:keys [ns name]} type]
(symbol (str "xcljb.gen." (beautify ns :ns-name) "-types") name)))
(defn- type->read-type [type]
(let [{:keys [ns name]} type]
(symbol (str "xcljb.gen." (beautify ns :ns-name) "-internal")
(-> name (beautify :type) (beautify :read-type)))))
(defn- name->type [context name]
(symbol (str "xcljb.gen." (beautify (:header context) :ns-name) "-types")
(beautify name :type)))
(defn- name->->type [context name]
(symbol (str "xcljb.gen." (beautify (:header context) :ns-name) "-types")
(-> name (beautify :type) (beautify :->type))))
(defn- gen-read-fields [fields & body]
`(let ~(reduce #(conj %1 (.gen-read-type-name %2) (.gen-read-type %2))
[]
fields)
~@body))
(defn- extension-name [context]
(:extension-xname context))
(defprotocol RequestFn
(gen-request-fn [this]))
(defprotocol Measurable
(gen-sizeof [this])
(gen-read-sizeof [this]))
(defprotocol Type
(gen-type [this]))
(defprotocol Expr
(gen-expr [this]))
(defprotocol ReadableFn
(gen-read-fn [this]))
(defprotocol ReadableType
(gen-read-type [this])
(gen-read-type-name [this]))
(defrecord Op [op expr1 expr2]
Expr
(gen-expr [this]
(let [op (case (:op this)
"+" 'clojure.core/+
"-" 'clojure.core/-
"*" 'clojure.core/*
"/" 'clojure.core//
"&" 'clojure.core/bit-and
"<<" 'clojure.core/bit-shift-left)]
`(xcljb.gen-common/->Op ~op
~(gen-expr (:expr1 this))
~(gen-expr (:expr2 this))))))
(defrecord Unop [op expr]
Expr
(gen-expr [this]
(let [op (case (:op this)
"~" 'clojure.core/bit-not)]
`(xcljb.gen-common/->Unop ~op ~(gen-expr (:expr this))))))
(defrecord Fieldref [ref]
Expr
(gen-expr [this]
`(xcljb.gen-common/->Fieldref ~(beautify (:ref this) :arg))))
(defrecord Popcount [expr]
Expr
(gen-expr [this]
`(xcljb.gen-common/->Popcount ~(gen-expr (:expr this)))))
(defrecord Sumof [ref]
Expr
(gen-expr [this]
`(xcljb.gen-common/->Sumof ~(beautify (:ref this) :arg))))
(defrecord Value [value]
Expr
(gen-expr [this]
`(xcljb.gen-common/->Value ~(:value this))))
(defrecord Primitive [name type]
Type
(gen-type [this]
`(def ~(symbol (:name this))
(xcljb.gen-common/->Primitive ~(:type this)))))
Fields .
(defrecord Pad [bytes]
Type
(gen-type [this]
`(xcljb.gen-common/->Pad ~(:bytes this))))
(defrecord BoolField [name size]
Type
(gen-type [this]
`(xcljb.gen-common/->BoolField ~(beautify (:name this) :arg)
~(:size this))))
(defrecord StringField [name expr]
Type
(gen-type [this]
`(xcljb.gen-common/->StringField ~(beautify (:name this) :arg)
~(when-let [expr (:expr this)]
(gen-expr expr)))))
(defrecord Field [name type enum altenum mask]
Type
(gen-type [this]
`(xcljb.gen-common/->Field ~(beautify (:name this) :arg)
~(parse-type (:type this)))))
(defrecord BoolList [name size expr]
Type
(gen-type [this]
`(xcljb.gen-common/->BoolList ~(beautify (:name this) :arg)
~(:size this)
~(when-let [expr (:expr this)]
(gen-expr expr)))))
(defrecord List [name type enum altenum mask expr]
Type
(gen-type [this]
`(xcljb.gen-common/->List ~(beautify (:name this) :arg)
~(parse-type (:type this))
~(when-let [expr (:expr this)]
(gen-expr expr)))))
(defrecord Valueparam [name mask-type]
Type
(gen-type [this]
`(xcljb.gen-common/->Valueparam ~(beautify (:name this) :arg)
~(parse-type (:mask-type this)))))
and .
(defrecord Item [name value])
Avoid naming conflict with java.lang . .
(defrecord -Enum [name content])
(defn- instance-of? [inst classes]
(some #(instance? % inst) classes))
(defn- gen-args [content]
(let [fs (filter #(instance-of? % [BoolField
StringField
Field
List
Valueparam])
content)]
(map #(-> % (:name) (beautify :arg)) fs)))
(defrecord Struct [name content]
Type
(gen-type [this]
`(def ~(symbol (:name this))
(xcljb.gen-common/->Struct [~@(map gen-type (:content this))]))))
(defrecord Typedef [name type]
Type
(gen-type [this]
`(def ~(symbol (:name this))
~(parse-type (:type this)))))
(defrecord Request [context name opcode combine-adjacent content]
RequestFn
(gen-request-fn [this]
(let [s-name (-> this (:name) (beautify :fn-name) (symbol))
s-spec (name->type (:context this)
(beautify (:name this) :request))
args (-> this (:content) (gen-args))
k-args (vec (map keyword args))
s-args (vec (map symbol args))]
`(defn ~s-name [~'conn ~@s-args]
(let [~'request (zipmap ~k-args ~s-args)]
~(if-let [ext-name (extension-name (:context this))]
`(xcljb.conn-ext/send ~'conn ~ext-name ~s-spec ~'request)
`(xcljb.conn-internal/send ~'conn ~s-spec ~'request))))))
Type
(gen-type [this]
`(def ~(-> this (:name) (beautify :request) (symbol))
(xcljb.gen-common/->Request ~(extension-name (:context this))
~(:opcode this)
[~@(map gen-type (:content this))]))))
(defrecord Reply [context name request-opcode content]
Type
(gen-type [this]
`(def ~(-> this (:name) (beautify :reply) (symbol))
(xcljb.gen-common/->Reply [~@(map gen-type (:content this))])))
ReadableFn
(gen-read-fn [this]
(let [ext-name (extension-name (:context this))
opcode (:request-opcode this)
s-reply (name->type (:context this)
(-> this (:name) (beautify :reply)))]
`(defmethod xcljb.common/read-reply [~ext-name ~opcode] [~'_ ~'_ ~'reply-buf]
(xcljb.gen-common/deserialize ~s-reply ~'reply-buf nil)))))
(defrecord QualifiedRef [ext-name number])
(defrecord Event [context name number no-seq-number content]
Type
(gen-type [this]
`(def ~(-> this (:name) (beautify :event) (symbol))
(xcljb.gen-common/->Event ~(extension-name (:context this))
~(:name this)
~(:number this)
~(:no-seq-number this)
[~@(map gen-type (:content this))])))
ReadableFn
(gen-read-fn [this]
(let [ext-name (extension-name (:context this))
number (:number this)
s-event (name->type (:context this)
(-> this (:name) (beautify :event)))]
`(defmethod xcljb.common/read-event [~ext-name ~number] [~'_ ~'_ ~'event-buf]
(xcljb.gen-common/deserialize ~s-event ~'event-buf nil)))))
(defrecord EventCopy [context name number ref]
Type
(gen-type [this]
`(def ~(-> this (:name) (beautify :event) (symbol))
(xcljb.gen-common/->EventCopy ~(extension-name (:context this))
~(:name this)
~(:number this)
~(:ref this))))
ReadableFn
(gen-read-fn [this]
(let [ext-name (extension-name (:context this))
number (:number this)
s-event (name->type (:context this)
(-> this (:name) (beautify :event)))]
`(defmethod xcljb.common/read-event [~ext-name ~number] [~'_ ~'_ ~'event-buf]
(xcljb.gen-common/deserialize ~s-event ~'event-buf nil)))))
(defrecord -Error [context name number content]
Type
(gen-type [this]
`(def ~(-> this (:name) (beautify :error) (symbol))
(xcljb.gen-common/->Error' ~(extension-name (:context this))
~(:name this)
~(:number this)
[~@(map gen-type (:content this))])))
ReadableFn
(gen-read-fn [this]
(let [ext-name (extension-name (:context this))
number (:number this)
s-error (name->type (:context this)
(-> this (:name) (beautify :error)))]
`(defmethod xcljb.common/read-error [~ext-name ~number] [~'_ ~'_ ~'error-buf]
(xcljb.gen-common/deserialize ~s-error ~'error-buf nil)))))
(defrecord ErrorCopy [context name number ref]
Type
(gen-type [this]
`(def ~(-> this (:name) (beautify :error) (symbol))
(xcljb.gen-common/->ErrorCopy ~(extension-name (:context this))
~(:name this)
~(:number this)
~(:ref this))))
ReadableFn
(gen-read-fn [this]
(let [ext-name (extension-name (:context this))
number (:number this)
s-error (name->type (:context this)
(-> this (:name) (beautify :error)))]
`(defmethod xcljb.common/read-error [~ext-name ~number] [~'_ ~'_ ~'error-buf]
(xcljb.gen-common/deserialize ~s-error ~'error-buf nil)))))
(defrecord Xcb [header extension-name extension-xname extension-multiword major-version minor-version])
|
12bf3156f8c65e69a58fe34b0f66ca9449be8264d03e8972e5c86c5452bf4582 | Deep-Symmetry/dysentery | vcdj.clj | (ns dysentery.vcdj
"Provides the ability to create a virtual CDJ device that can lurk
on a Pro DJ Link network and receive packets sent to players, so
details about the other players can be monitored."
{:author "James Elliott"}
(:require [clojure.math.numeric-tower :as math]
[dysentery.util :as util]
[taoensso.timbre :as timbre]
[dysentery.finder :as finder])
(:import [java.net InetAddress DatagramPacket DatagramSocket NetworkInterface]))
(def incoming-port
"The UDP port on which player unicast packets are received."
50002)
(defonce ^{:private true
:doc "Holds the persistent server socket, and the futures
that process incoming packets and keep our presence active on the
network."}
state (atom {:socket nil
:watcher nil
:keep-alive nil}))
(defn stop-sending-status
"Shut down the thread which is sending status packets to all players
on the network."
[]
(swap! state update :status-sender #(when %
(try (future-cancel %)
(catch Exception e
(timbre/warn e "Problem stopping DJ-Link player status sender.")))
nil)))
(defn shut-down
"Close the UDP server socket and terminate the packet processing
thread, if they are active."
[]
(stop-sending-status)
(swap! state (fn [current]
(-> current
(update :socket #(when %
(try (.close %)
(catch Exception e
(timbre/warn e "Problem closing DJ-Link player socket.")))
nil))
(update :watcher #(when %
(try (future-cancel %)
(catch Exception e
(timbre/warn e "Problem stopping DJ-Link player receiver.")))
nil))
(update :keep-alive #(when %
(try (future-cancel %)
(catch Exception e
(timbre/warn e "Problem stopping DJ-Link player keep-alive.")))
nil)))))
nil)
(defn- receive
"Block until a UDP message is received on the given DatagramSocket, and
return the payload packet."
[^DatagramSocket socket]
(let [buffer (byte-array 512)
packet (DatagramPacket. buffer 512)]
(try (.receive socket packet)
packet
(catch Exception e
(timbre/warn e "Problem reading from DJ Link player socket, shutting down.")
(shut-down)))))
(defonce ^{:private true
:doc "Holds a set of functions to call whenever a packet
has been received on the incoming status port. The function will be
called with two arguments, the device number found in the packet,
and the vector of unsigned byte values corresponding to the packet
data."}
packet-listeners (atom #{}))
(defn- process-packet
"React to a packet that was sent to our player port."
[packet data]
;; For now just stash the most recent packet and data into our state.
(swap! state assoc :packet packet :data data)
(let [device-number (get data 33)]
(swap! state assoc-in [:device-data device-number] data)
(when (seq @packet-listeners)
(doseq [listener @packet-listeners]
(try
(listener device-number data)
(catch Throwable t
(timbre/warn t "Problem calling device packet listener")))))))
(defn add-packet-listener
"Registers a function to be called whenever a packet is sent to the
incoming status port. The function will be called with two
arguments, the device number found in the packet, and the vector of
unsigned byte values corresponding to the packet data."
[listener]
(swap! packet-listeners conj listener))
(defn remove-packet-listener
"Stops calling a packet listener function that was registered with
[[add-packet-listener]]."
[listener]
(swap! packet-listeners disj listener))
(def keep-alive-interval
"How often, in milliseconds, we should send keep-alive packets to
maintain our presence on the network."
1500)
(def status-interval
"How often, in milliseconds, should we send status packets to the
other devices on the network."
200)
(def header-bytes
"The constant bytes which always form the start of a packet that we
send."
[0x51 0x73 0x70 0x74 0x31 0x57 0x6d 0x4a 0x4f 0x4c])
(defn- send-packet
"Create and send a packet with the specified `header-type` value at
byte 10, and specified payload bytes following the device name."
[header-type payload]
(let [packet (byte-array (concat header-bytes [header-type 00] (:device-name @state) payload))
datagram (DatagramPacket. packet (count packet) (:destination @state) finder/announce-port)]
(.send (:socket @state) datagram)))
(defn send-direct-packet
"Create and send a packet to port 50001 of the specified device, with
the specified `header-type` value at byte 10, and specified payload
bytes following our device name. `device` can either be a device
number to be looked up, or an actual device details map. Packets can
be sent to a different port (e.g. 50002 for status packets) by
specifying the port number as an optional fourth argument."
([device header-type payload]
(send-direct-packet device header-type payload 50001))
([device header-type payload port]
(if-let [device (if (number? device) (finder/device-given-number device) device)]
(let [data (vec (concat header-bytes [header-type] (:device-name @state) payload))
packet (byte-array data)
datagram (DatagramPacket. packet (count packet) (:address device) port)]
(.send (:socket @state) datagram)
(when (= port 50002)
(require 'dysentery.view)
((resolve 'dysentery.view/handle-device-packet) port 0 data)))
(throw (ex-info (str "No device found with number " device) {})))))
(defn set-player-sync
"Turn the specified player's sync mode on or off."
[device-number sync?]
(let [us (:player-number @state)
sync-byte (if sync? 0x10 0x20)
payload [0x01 0x00 us 0x00 0x08 0x00 0x00 0x00 us 0x00 0x00 0x00 sync-byte]]
(send-direct-packet device-number 0x2a payload)))
(defn appoint-master
"Tell the specified player to take over the tempo master role."
[device-number]
(let [us (:player-number @state)
payload [0x01 0x00 us 0x00 0x08 0x00 0x00 0x00 us 0x00 0x00 0x00 01]]
(send-direct-packet device-number 0x2a payload)))
(defn send-fader-start
"Send a message which will start or stop a set of players. The
arguments are sets of player numbers to start and stop; if you try
to both start and stop the same player, only the stop will be sent."
[start stop]
(let [us (:player-number @state)
commands (for [player (map inc (range 4))]
(if (stop player)
0x01
(if (start player)
0x00
0x02)))
payload (concat [0x01 0x00 us 0x00 0x04] commands)]
We would really want to send this as a broadcast to port 50001 , instead of to the affected players
;; individually, but there isn't infrastructure to support that in dysentery, and this works for testing.
Do it the efficient way when implementing the real support in Beat Link .
(doseq [device-number (clojure.set/union start stop)]
(send-direct-packet device-number 0x02 payload))))
(defn send-on-air
"Send a message which will set the on-air status of the players. The
argument is the set of player numbers that are currently on-air."
[players]
(let [us (:player-number @state)
flags (for [player (map inc (range 4))]
(if (players player)
0x01
0x00))
payload (concat [0x01 0x00 us 0x00 0x09] flags [0x00 0x00 0x00 0x00 0x00])]
We would really want to send this as a broadcast to port 50001 , instead of to the players
;; individually, but there isn't infrastructure to support that in dysentery, and this works for testing.
Do it the efficient way when implementing the real support in Beat Link .
(doseq [device-number (filter #(< % 5) (map :player (finder/current-dj-link-devices)))]
(send-direct-packet device-number 0x03 payload))))
(defn send-load-track
"Send a message which will cause the target player to load the
specified track. Players are identified by their device number, and
slot and track types are byte values described in the CDJ Status
section of the protocol analysis paper."
[target-player rekordbox-id source-player source-slot source-type]
(let [us (:player-number @state)
payload (concat [0x01
0x00 us 0x00 0x34 us 0x00 0x00 0x00 source-player source-slot source-type 0x00]
(util/decompose-int rekordbox-id 4)
[0x00 0x00 0x00 0x32 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00
0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00
0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00])]
(send-direct-packet target-player 0x19 payload 50002)))
(defn- build-status-payload
"Constructs the bytes which follow the device name in a status packet
describing our current state."
[]
(swap! state update :packet-count (fnil inc 0))
(let [{:keys [player-number playing? on-air? master? master-yielding-to sync? tempo beat sync-n packet-count]} @state
tempo (math/round (* tempo 100))
a (if playing? 0x01 0x00)
b-b (util/make-byte (inc (mod (dec beat) 4)))
d-r player-number
f (+ 0x84
(if playing? 0x40 0)
(if master? 0x20 0)
(if sync? 0x10 0)
(if on-air? 0x08 0))
p-1 (if playing? 3 5)
p-2 (if playing? 0x7a 0x7e)
p-3 (if playing? 9 1)
s-r 3
t-r 0x01
m (if master? 1 0)
y (or master-yielding-to 0xff)]
(concat [0x01
0x20
d-r s-r t-r 0x00 0x00 0x00 0x00 0x0d ; 0x28
0x00 0x00 0x00 0x01 0x00 0x00 0x00 0x00 ; 0x30
0x38
0x40
0x48
0x50
0x58
0x60
0x01 0x00 0x04 0x04 0x00 0x00 0x00 0x00 ; 0x68
0x00 0x00 0x00 0x04 0x00 0x01 0x00 0x00 ; 0x70
0x00 0x00 0x00 p-1 0x31 0x2e 0x34 0x30 ; 0x78
0x80
(util/decompose-int sync-n 4) ; 0x84
[0x00 f 0xff p-2 0x00 0x10 0x00 0x00 ; 0x88
0x80 0x00] (util/decompose-int tempo 2) ; 0x90
0x94
[0x00 0x10 0x00 0x00 0x00 p-3 m y] ; 0x98
0xa0
[0x01 0xff b-b 0x00] ; 0xa4
[0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 ; 0xa8
0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 ; 0xb8
0xc0
0xc8
[0x0f 0x01 0x00 0x00 ; 0xcc
0x12 0x34 0x56 0x78 0x00 0x00 0x00 0x01 ; 0xd0 ; TODO: Can we shorten this back up?
0xd8
0xe0
0xe8
0xf0
0xf8
0x100
0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 ; 0x108
0x110
0x00 0x00 0x06 0x2f]))) ; 0x118
(defn- send-status
"Sends a status packet reporting our current state directly to all
players on the network."
[]
(let [payload (build-status-payload)]
(doseq [target (finder/current-dj-link-devices #{(.getLocalAddress (:socket @state))})]
(try
(send-direct-packet target 0x0a payload 50002)
(catch Exception e
(timbre/error e "Problem sending status packet to" target))))))
(defn- send-keep-alive
"Send a packet which keeps us marked as present and active on the DJ
Link network."
[]
(let [{:keys [player-number mac-address ip-address]} @state]
(try
(send-packet 6 (concat [0x01 0x02 0x00 0x36 player-number 0x01] mac-address ip-address
[0x01 0x00 0x00 0x00 0x01 0x00]))
(catch Exception e
(timbre/error e "Unable to send keep-alive packet to DJ-Link announcement port, shutting down.")
(shut-down)))))
(defn start-sending-status
"Starts the thread that sends status updates to all players on the
network, if it is not already running."
[]
(swap! state update :status-sender
(fn [sender] (or sender
(future (loop []
(send-status)
(Thread/sleep status-interval)
(recur)))))))
(defn start
"Create a virtual CDJ on the specified address and interface, with
packet reception and keep-alive threads, and assign it a name and
player number defaulting to \"Virtual CDJ\" and 5, but configurable
via optional keyword arguments `:device-name` and `:player-number`."
[interface address & {:keys [device-name player-number] :or {device-name "Virtual CDJ" player-number 5}}]
(shut-down)
(try
(let [socket (DatagramSocket. incoming-port (.getAddress address))]
(swap! state assoc
:device-name (map byte (take 20 (concat device-name (repeat 0))))
:player-number player-number
:socket socket
:ip-address (vec (map util/unsign (.getAddress (.getLocalAddress socket))))
:mac-address (vec (map util/unsign (.getHardwareAddress interface)))
:destination (.getBroadcast address)
:watcher (future (loop []
(let [packet (receive socket)
data (vec (map util/unsign (take (.getLength packet) (.getData packet))))]
(process-packet packet data))
(recur)))
:keep-alive (future (loop []
(Thread/sleep keep-alive-interval)
(send-keep-alive)
(recur)))
:tempo 120.0
:beat 1
:master? false
:playing? false
:sync? false
:sync-n 0))
(catch Exception e
(timbre/error e "Failed while trying to set up virtual CDJ.")
(shut-down))))
(defn yield-master-to
"Called when we have been told that another player is becoming master
so we should give up that role. Start announcing that we are
yielding master to that player."
[player]
(when (not= player (:player-number @state))
(swap! state assoc :master-yielding-to player)))
(defn master-yield-response
"Called when a player we have told to yield the master role to us has
responded. If the answer byte is non-zero, we are becoming the
master. The final handoff will take place using the master and yield
bytes in the status packets of both devices."
[answer from-player]
(timbre/info "Received master yield response" answer "from player" from-player)
(when (not= 1 answer)
(timbre/warn "Strange, received master yield packet with answer of" answer "from player" from-player))
(swap! state assoc :master-yielded-from from-player)) ; Note we expect status announcing our master state now
(defn set-sync-mode
"Change our sync mode; we will be synced if `sync?` is truthy."
[sync?]
(swap! state assoc :sync? (boolean sync?)))
(defn handle-on-air-packet
"If our device number is one of the standard four, update our on-air
status appropriately in response to the packet."
[packet]
(let [us (:player-number @state)]
(when (<= 1 us 4)
(let [flag (get packet (+ 0x23 us))]
#_(timbre/info "on-air flag" flag)
(swap! state assoc :on-air? (boolean (= 1 flag)))))))
(defn saw-master-packet
"Record the current notion of the master player based on the device
number found in a packet that identifies itself as the master. If
`yielding` is not `0xff` this master is handing off its role to the
specified player number. If it is us, it is finally time for us to
really become master. Assert that state, and the yielding player
should respond by dropping their master state, and setting their
`sync-n` value to one greater than any other seen on the network.
Also keep track of the highest sync-n value seen for any master
packet on the network."
[player sync-n yielding]
(when (not= player (:player-number @state)) ; Ignore packets we are sending
(when sync-n
(swap! state update :max-sync-n #(max sync-n (or % 0))))
(if (= yielding 0xff)
;; This is a normal, non-yielding master packet. Update our notion of the current master, and if we
;; were yielding, finish that process, and update our sync-n appropriately. If we were master and not
;; yielding, still give up our master state but log a warning at this unexpected situation.
(let [{:keys [max-sync-n master-number master? master-yielding-to]} @state]
(when master?
(if master-yielding-to
(if (= player master-yielding-to)
(swap! state assoc :sync-n (inc max-sync-n))
(timbre/warn "Expected to yield to player" master-yielding-to "but saw master asserted by player" player))
(timbre/warn "Saw master asserted by player" player "when we were not yielding")))
(swap! state (fn [current]
(-> current
(assoc :master? false :master-number player)
(dissoc :master-yielding-to)))))
;; This is a yielding master packet. If it is us that is being yielded to, take over master if we are expecting
;; to, otherwise log a warning.
(let [{:keys [player-number master-yielded-from]} @state]
(when (= player-number yielding)
(when (not= player master-yielded-from)
(timbre/warn "Expected player" master-yielded-from "to yield master to us, but player" player "did"))
(swap! state (fn [current]
(-> current
(assoc :master? true :master-number player-number)
(dissoc :master-yielded-from)))))))))
(defn become-master
"Attempt to become the tempo master by sending a command to the
existing tempo master telling it to yield to us. We will change
state upon receiving an proper acknowledgement message."
[]
(let [us (:player-number @state)
master (:master-number @state)
payload [0x01 0x00 us 0x00 0x04 0x00 0x00 0x00 us]]
(when-not (<= 1 us 4)
(throw (IllegalStateException.
(str "Our player number " us " is not in the range 1 to 4; cannot be tempo master."))))
(if master
(if (= master us)
(timbre/info "We are already master, nothing to do.")
(do
(timbre/info "Sending master yield packet to" master "payload:" payload)
(send-direct-packet master 0x26 payload)))
(do
(timbre/info "No current master; simply becoming it")
(swap! state assoc :master? true :master-number us)))))
(defn send-media-query
"Asks the specified player to report details about the media mounted
in the specified slot."
[player slot]
(let [us (:player-number @state)
payload (concat [0x01 0x00 us 0x00 0x0c] (:ip-address @state) [0x00 0x00 0x00 player 0x00 0x00 0x00 slot])]
(send-direct-packet player 0x05 payload 50002)))
(defn send-settings
"Tests whether the suspected packet for establishing player settings
actually works."
[player]
(let [us (:player-number @state)
payload [0x02 player us 0x00 0x50 0x12 0x34
0x56 0x78 0x00 0x00 0x00 0x03 0x81 0x83 0x81 0x88 0x81 0x01 0x82 0x81 0x81 0x01
0x01 0x01 0x81 0x81 0x81 0x81 0x80 0x81 0x80 0x00 0x00 0x81 0x00 0x00 0x81 0x81
0x81 0x81 0x82 0x80 0x00 0x00 0x81 0x80 0x83 0x83 0x00 0x00 0x00 0x00 0x00 0x00
0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00
0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00]]
(send-direct-packet player 0x34 payload 50002)))
(defn set-tempo
"Set our internal notion of tempo, for use when we are master."
[tempo]
(swap! state assoc :tempo tempo))
| null | https://raw.githubusercontent.com/Deep-Symmetry/dysentery/0460d68a771c68f50c1cb1dcdf184a39d033f0e5/src/dysentery/vcdj.clj | clojure | For now just stash the most recent packet and data into our state.
if you try
individually, but there isn't infrastructure to support that in dysentery, and this works for testing.
individually, but there isn't infrastructure to support that in dysentery, and this works for testing.
0x28
0x30
0x68
0x70
0x78
0x84
0x88
0x90
0x98
0xa4
0xa8
0xb8
0xcc
0xd0 ; TODO: Can we shorten this back up?
0x108
0x118
Note we expect status announcing our master state now
Ignore packets we are sending
This is a normal, non-yielding master packet. Update our notion of the current master, and if we
were yielding, finish that process, and update our sync-n appropriately. If we were master and not
yielding, still give up our master state but log a warning at this unexpected situation.
This is a yielding master packet. If it is us that is being yielded to, take over master if we are expecting
to, otherwise log a warning. | (ns dysentery.vcdj
"Provides the ability to create a virtual CDJ device that can lurk
on a Pro DJ Link network and receive packets sent to players, so
details about the other players can be monitored."
{:author "James Elliott"}
(:require [clojure.math.numeric-tower :as math]
[dysentery.util :as util]
[taoensso.timbre :as timbre]
[dysentery.finder :as finder])
(:import [java.net InetAddress DatagramPacket DatagramSocket NetworkInterface]))
(def incoming-port
"The UDP port on which player unicast packets are received."
50002)
(defonce ^{:private true
:doc "Holds the persistent server socket, and the futures
that process incoming packets and keep our presence active on the
network."}
state (atom {:socket nil
:watcher nil
:keep-alive nil}))
(defn stop-sending-status
"Shut down the thread which is sending status packets to all players
on the network."
[]
(swap! state update :status-sender #(when %
(try (future-cancel %)
(catch Exception e
(timbre/warn e "Problem stopping DJ-Link player status sender.")))
nil)))
(defn shut-down
"Close the UDP server socket and terminate the packet processing
thread, if they are active."
[]
(stop-sending-status)
(swap! state (fn [current]
(-> current
(update :socket #(when %
(try (.close %)
(catch Exception e
(timbre/warn e "Problem closing DJ-Link player socket.")))
nil))
(update :watcher #(when %
(try (future-cancel %)
(catch Exception e
(timbre/warn e "Problem stopping DJ-Link player receiver.")))
nil))
(update :keep-alive #(when %
(try (future-cancel %)
(catch Exception e
(timbre/warn e "Problem stopping DJ-Link player keep-alive.")))
nil)))))
nil)
(defn- receive
"Block until a UDP message is received on the given DatagramSocket, and
return the payload packet."
[^DatagramSocket socket]
(let [buffer (byte-array 512)
packet (DatagramPacket. buffer 512)]
(try (.receive socket packet)
packet
(catch Exception e
(timbre/warn e "Problem reading from DJ Link player socket, shutting down.")
(shut-down)))))
(defonce ^{:private true
:doc "Holds a set of functions to call whenever a packet
has been received on the incoming status port. The function will be
called with two arguments, the device number found in the packet,
and the vector of unsigned byte values corresponding to the packet
data."}
packet-listeners (atom #{}))
(defn- process-packet
"React to a packet that was sent to our player port."
[packet data]
(swap! state assoc :packet packet :data data)
(let [device-number (get data 33)]
(swap! state assoc-in [:device-data device-number] data)
(when (seq @packet-listeners)
(doseq [listener @packet-listeners]
(try
(listener device-number data)
(catch Throwable t
(timbre/warn t "Problem calling device packet listener")))))))
(defn add-packet-listener
"Registers a function to be called whenever a packet is sent to the
incoming status port. The function will be called with two
arguments, the device number found in the packet, and the vector of
unsigned byte values corresponding to the packet data."
[listener]
(swap! packet-listeners conj listener))
(defn remove-packet-listener
"Stops calling a packet listener function that was registered with
[[add-packet-listener]]."
[listener]
(swap! packet-listeners disj listener))
(def keep-alive-interval
"How often, in milliseconds, we should send keep-alive packets to
maintain our presence on the network."
1500)
(def status-interval
"How often, in milliseconds, should we send status packets to the
other devices on the network."
200)
(def header-bytes
"The constant bytes which always form the start of a packet that we
send."
[0x51 0x73 0x70 0x74 0x31 0x57 0x6d 0x4a 0x4f 0x4c])
(defn- send-packet
"Create and send a packet with the specified `header-type` value at
byte 10, and specified payload bytes following the device name."
[header-type payload]
(let [packet (byte-array (concat header-bytes [header-type 00] (:device-name @state) payload))
datagram (DatagramPacket. packet (count packet) (:destination @state) finder/announce-port)]
(.send (:socket @state) datagram)))
(defn send-direct-packet
"Create and send a packet to port 50001 of the specified device, with
the specified `header-type` value at byte 10, and specified payload
bytes following our device name. `device` can either be a device
number to be looked up, or an actual device details map. Packets can
be sent to a different port (e.g. 50002 for status packets) by
specifying the port number as an optional fourth argument."
([device header-type payload]
(send-direct-packet device header-type payload 50001))
([device header-type payload port]
(if-let [device (if (number? device) (finder/device-given-number device) device)]
(let [data (vec (concat header-bytes [header-type] (:device-name @state) payload))
packet (byte-array data)
datagram (DatagramPacket. packet (count packet) (:address device) port)]
(.send (:socket @state) datagram)
(when (= port 50002)
(require 'dysentery.view)
((resolve 'dysentery.view/handle-device-packet) port 0 data)))
(throw (ex-info (str "No device found with number " device) {})))))
(defn set-player-sync
"Turn the specified player's sync mode on or off."
[device-number sync?]
(let [us (:player-number @state)
sync-byte (if sync? 0x10 0x20)
payload [0x01 0x00 us 0x00 0x08 0x00 0x00 0x00 us 0x00 0x00 0x00 sync-byte]]
(send-direct-packet device-number 0x2a payload)))
(defn appoint-master
"Tell the specified player to take over the tempo master role."
[device-number]
(let [us (:player-number @state)
payload [0x01 0x00 us 0x00 0x08 0x00 0x00 0x00 us 0x00 0x00 0x00 01]]
(send-direct-packet device-number 0x2a payload)))
(defn send-fader-start
"Send a message which will start or stop a set of players. The
to both start and stop the same player, only the stop will be sent."
[start stop]
(let [us (:player-number @state)
commands (for [player (map inc (range 4))]
(if (stop player)
0x01
(if (start player)
0x00
0x02)))
payload (concat [0x01 0x00 us 0x00 0x04] commands)]
We would really want to send this as a broadcast to port 50001 , instead of to the affected players
Do it the efficient way when implementing the real support in Beat Link .
(doseq [device-number (clojure.set/union start stop)]
(send-direct-packet device-number 0x02 payload))))
(defn send-on-air
"Send a message which will set the on-air status of the players. The
argument is the set of player numbers that are currently on-air."
[players]
(let [us (:player-number @state)
flags (for [player (map inc (range 4))]
(if (players player)
0x01
0x00))
payload (concat [0x01 0x00 us 0x00 0x09] flags [0x00 0x00 0x00 0x00 0x00])]
We would really want to send this as a broadcast to port 50001 , instead of to the players
Do it the efficient way when implementing the real support in Beat Link .
(doseq [device-number (filter #(< % 5) (map :player (finder/current-dj-link-devices)))]
(send-direct-packet device-number 0x03 payload))))
(defn send-load-track
"Send a message which will cause the target player to load the
specified track. Players are identified by their device number, and
slot and track types are byte values described in the CDJ Status
section of the protocol analysis paper."
[target-player rekordbox-id source-player source-slot source-type]
(let [us (:player-number @state)
payload (concat [0x01
0x00 us 0x00 0x34 us 0x00 0x00 0x00 source-player source-slot source-type 0x00]
(util/decompose-int rekordbox-id 4)
[0x00 0x00 0x00 0x32 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00
0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00
0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00])]
(send-direct-packet target-player 0x19 payload 50002)))
(defn- build-status-payload
"Constructs the bytes which follow the device name in a status packet
describing our current state."
[]
(swap! state update :packet-count (fnil inc 0))
(let [{:keys [player-number playing? on-air? master? master-yielding-to sync? tempo beat sync-n packet-count]} @state
tempo (math/round (* tempo 100))
a (if playing? 0x01 0x00)
b-b (util/make-byte (inc (mod (dec beat) 4)))
d-r player-number
f (+ 0x84
(if playing? 0x40 0)
(if master? 0x20 0)
(if sync? 0x10 0)
(if on-air? 0x08 0))
p-1 (if playing? 3 5)
p-2 (if playing? 0x7a 0x7e)
p-3 (if playing? 9 1)
s-r 3
t-r 0x01
m (if master? 1 0)
y (or master-yielding-to 0xff)]
(concat [0x01
0x20
0x38
0x40
0x48
0x50
0x58
0x60
0x80
0x94
0xa0
0xc0
0xc8
0xd8
0xe0
0xe8
0xf0
0xf8
0x100
0x110
(defn- send-status
"Sends a status packet reporting our current state directly to all
players on the network."
[]
(let [payload (build-status-payload)]
(doseq [target (finder/current-dj-link-devices #{(.getLocalAddress (:socket @state))})]
(try
(send-direct-packet target 0x0a payload 50002)
(catch Exception e
(timbre/error e "Problem sending status packet to" target))))))
(defn- send-keep-alive
"Send a packet which keeps us marked as present and active on the DJ
Link network."
[]
(let [{:keys [player-number mac-address ip-address]} @state]
(try
(send-packet 6 (concat [0x01 0x02 0x00 0x36 player-number 0x01] mac-address ip-address
[0x01 0x00 0x00 0x00 0x01 0x00]))
(catch Exception e
(timbre/error e "Unable to send keep-alive packet to DJ-Link announcement port, shutting down.")
(shut-down)))))
(defn start-sending-status
"Starts the thread that sends status updates to all players on the
network, if it is not already running."
[]
(swap! state update :status-sender
(fn [sender] (or sender
(future (loop []
(send-status)
(Thread/sleep status-interval)
(recur)))))))
(defn start
"Create a virtual CDJ on the specified address and interface, with
packet reception and keep-alive threads, and assign it a name and
player number defaulting to \"Virtual CDJ\" and 5, but configurable
via optional keyword arguments `:device-name` and `:player-number`."
[interface address & {:keys [device-name player-number] :or {device-name "Virtual CDJ" player-number 5}}]
(shut-down)
(try
(let [socket (DatagramSocket. incoming-port (.getAddress address))]
(swap! state assoc
:device-name (map byte (take 20 (concat device-name (repeat 0))))
:player-number player-number
:socket socket
:ip-address (vec (map util/unsign (.getAddress (.getLocalAddress socket))))
:mac-address (vec (map util/unsign (.getHardwareAddress interface)))
:destination (.getBroadcast address)
:watcher (future (loop []
(let [packet (receive socket)
data (vec (map util/unsign (take (.getLength packet) (.getData packet))))]
(process-packet packet data))
(recur)))
:keep-alive (future (loop []
(Thread/sleep keep-alive-interval)
(send-keep-alive)
(recur)))
:tempo 120.0
:beat 1
:master? false
:playing? false
:sync? false
:sync-n 0))
(catch Exception e
(timbre/error e "Failed while trying to set up virtual CDJ.")
(shut-down))))
(defn yield-master-to
"Called when we have been told that another player is becoming master
so we should give up that role. Start announcing that we are
yielding master to that player."
[player]
(when (not= player (:player-number @state))
(swap! state assoc :master-yielding-to player)))
(defn master-yield-response
"Called when a player we have told to yield the master role to us has
responded. If the answer byte is non-zero, we are becoming the
master. The final handoff will take place using the master and yield
bytes in the status packets of both devices."
[answer from-player]
(timbre/info "Received master yield response" answer "from player" from-player)
(when (not= 1 answer)
(timbre/warn "Strange, received master yield packet with answer of" answer "from player" from-player))
(defn set-sync-mode
"Change our sync mode; we will be synced if `sync?` is truthy."
[sync?]
(swap! state assoc :sync? (boolean sync?)))
(defn handle-on-air-packet
"If our device number is one of the standard four, update our on-air
status appropriately in response to the packet."
[packet]
(let [us (:player-number @state)]
(when (<= 1 us 4)
(let [flag (get packet (+ 0x23 us))]
#_(timbre/info "on-air flag" flag)
(swap! state assoc :on-air? (boolean (= 1 flag)))))))
(defn saw-master-packet
"Record the current notion of the master player based on the device
number found in a packet that identifies itself as the master. If
`yielding` is not `0xff` this master is handing off its role to the
specified player number. If it is us, it is finally time for us to
really become master. Assert that state, and the yielding player
should respond by dropping their master state, and setting their
`sync-n` value to one greater than any other seen on the network.
Also keep track of the highest sync-n value seen for any master
packet on the network."
[player sync-n yielding]
(when sync-n
(swap! state update :max-sync-n #(max sync-n (or % 0))))
(if (= yielding 0xff)
(let [{:keys [max-sync-n master-number master? master-yielding-to]} @state]
(when master?
(if master-yielding-to
(if (= player master-yielding-to)
(swap! state assoc :sync-n (inc max-sync-n))
(timbre/warn "Expected to yield to player" master-yielding-to "but saw master asserted by player" player))
(timbre/warn "Saw master asserted by player" player "when we were not yielding")))
(swap! state (fn [current]
(-> current
(assoc :master? false :master-number player)
(dissoc :master-yielding-to)))))
(let [{:keys [player-number master-yielded-from]} @state]
(when (= player-number yielding)
(when (not= player master-yielded-from)
(timbre/warn "Expected player" master-yielded-from "to yield master to us, but player" player "did"))
(swap! state (fn [current]
(-> current
(assoc :master? true :master-number player-number)
(dissoc :master-yielded-from)))))))))
(defn become-master
"Attempt to become the tempo master by sending a command to the
existing tempo master telling it to yield to us. We will change
state upon receiving an proper acknowledgement message."
[]
(let [us (:player-number @state)
master (:master-number @state)
payload [0x01 0x00 us 0x00 0x04 0x00 0x00 0x00 us]]
(when-not (<= 1 us 4)
(throw (IllegalStateException.
(str "Our player number " us " is not in the range 1 to 4; cannot be tempo master."))))
(if master
(if (= master us)
(timbre/info "We are already master, nothing to do.")
(do
(timbre/info "Sending master yield packet to" master "payload:" payload)
(send-direct-packet master 0x26 payload)))
(do
(timbre/info "No current master; simply becoming it")
(swap! state assoc :master? true :master-number us)))))
(defn send-media-query
"Asks the specified player to report details about the media mounted
in the specified slot."
[player slot]
(let [us (:player-number @state)
payload (concat [0x01 0x00 us 0x00 0x0c] (:ip-address @state) [0x00 0x00 0x00 player 0x00 0x00 0x00 slot])]
(send-direct-packet player 0x05 payload 50002)))
(defn send-settings
"Tests whether the suspected packet for establishing player settings
actually works."
[player]
(let [us (:player-number @state)
payload [0x02 player us 0x00 0x50 0x12 0x34
0x56 0x78 0x00 0x00 0x00 0x03 0x81 0x83 0x81 0x88 0x81 0x01 0x82 0x81 0x81 0x01
0x01 0x01 0x81 0x81 0x81 0x81 0x80 0x81 0x80 0x00 0x00 0x81 0x00 0x00 0x81 0x81
0x81 0x81 0x82 0x80 0x00 0x00 0x81 0x80 0x83 0x83 0x00 0x00 0x00 0x00 0x00 0x00
0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00
0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x00]]
(send-direct-packet player 0x34 payload 50002)))
(defn set-tempo
"Set our internal notion of tempo, for use when we are master."
[tempo]
(swap! state assoc :tempo tempo))
|
e30fdeb1c791390a4fd2662d799ac2784b5fefeced41c688c01fb53ef14c8202 | haskell-compat/base-compat | Repl.hs | {-# LANGUAGE PackageImports #-}
# OPTIONS_GHC -fno - warn - dodgy - exports -fno - warn - unused - imports #
-- | Reexports "Data.Either.Compat"
-- from a globally unique namespace.
module Data.Either.Compat.Repl (
module Data.Either.Compat
) where
import "this" Data.Either.Compat
| null | https://raw.githubusercontent.com/haskell-compat/base-compat/847aa35c4142f529525ffc645cd035ddb23ce8ee/base-compat/src/Data/Either/Compat/Repl.hs | haskell | # LANGUAGE PackageImports #
| Reexports "Data.Either.Compat"
from a globally unique namespace. | # OPTIONS_GHC -fno - warn - dodgy - exports -fno - warn - unused - imports #
module Data.Either.Compat.Repl (
module Data.Either.Compat
) where
import "this" Data.Either.Compat
|
b5d3fdf3a0378beb3167d888990a7d62cfe597070ff39d07cffd3ae8d28735f3 | gfngfn/SATySFi | loadJpeg.mli |
type file_path = string
val make_xobject : Pdf.t -> Pdf.pdfobject -> int -> int -> file_path -> Pdf.pdfobject
| null | https://raw.githubusercontent.com/gfngfn/SATySFi/9dbd61df0ab05943b3394830c371e927df45251a/src/backend/loadJpeg.mli | ocaml |
type file_path = string
val make_xobject : Pdf.t -> Pdf.pdfobject -> int -> int -> file_path -> Pdf.pdfobject
|
|
d9a2ec0efdeec41679253e66e61dc440074da93a000c9f97fbecfc740d5b0b56 | mschuldt/ga144 | ga144.rkt | #lang racket ;; -*- lexical-binding: t -*-
(define _PORT-DEBUG? false)
(define DISPLAY_STATE? false)
(define port-debug-list '(1 2))
(define (PORT-DEBUG? coord) (and _PORT-DEBUG? (member coord port-debug-list)))
(define ga-run-sim t) ;;global variable used for halting the simulation
(define (ga-stop-sim!)
(set! ga-run-sim nil))
(define (make-ga144 name_ (interactive_ false) (source-buffer_ false))
(new ga144% name_ interactive_ source-buffer_))
(define ga144%
(class object%
(super-new)
(init-field (name false) (interactive false) (source-buffer false))
(define time 0)
(define breakpoint false) ;; set to t when a breakpoint is reached
(define breakpoint-node false) ;;node where breakpoint originated
;;set by map when it wants the node to update the map with its activity
(define display-node-activity false)
(define map-buffer false)
;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; 8x18 node matrix
(define nodes (make-vector 144 false))
(define/public (get-nodes) nodes)
builds matrix of 144 f18 nodes
(define (build-node-matrix)
(for ((i 144))
(vector-set! nodes i (new f18a% i this source-buffer)))
(vector-map (lambda (node) (send node init)) nodes))
(define (index->node index)
(vector-ref nodes index))
(define/public (coord->node coord)
(let ((index (coord->index coord)))
(if (and (>= index 0)
(< index 144))
(vector-ref nodes index)
false ;;TODO: return pseudo node
)))
(define (fn:coord->node coord)
(coord->node coord))
;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; suspension and wakeup
;;TODO: better way to clone vector
(define active-nodes false)
;;index of last active node in the 'active-nodes' array
(define last-active-index 143) ;;all nodes are initially active
(define current-node-index 0) ;;index into 'active-nodes' of the current node
(define current-node false)
(define/public (remove-from-active-list node)
(let ((last-active-node (vector-ref active-nodes last-active-index))
(index (get-field active-index node)))
;;swap self with current node in 'active-nodes'
(vector-set! active-nodes index last-active-node)
(vector-set! active-nodes last-active-index node)
;;save the new node indices
(set-field! active-index last-active-node index)
(set-field! active-index node last-active-index)
;;decrement the number of active nodes
(set! last-active-index (sub1 last-active-index)))
(when show-io-changes?
(print-active)))
(define/public (add-to-active-list node)
(set! last-active-index (add1 last-active-index))
(let ((first-inactive-node (vector-ref active-nodes last-active-index))
(index (get-field active-index node)))
swap self with first inactive node in ' active - nodes '
(vector-set! active-nodes index first-inactive-node)
(vector-set! active-nodes last-active-index node)
;;save the new node indices
(set-field! active-index first-inactive-node index)
(set-field! active-index node last-active-index))
(when show-io-changes?
(print-active)))
;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; breakpoints
(define cli-active? false) ;; if true, we are in a cli session
(define/public (break (node false))
(set! breakpoint-node node)
;; set the breakpoint flag which returns control to the interpreter
(set! breakpoint t))
(define/public (get-breakpoint-node) breakpoint-node)
;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; program loading
(define/public (load compiled)
;; Places code into each node's RAM/ROM
(reset! false)
(for ((n (compiled-nodes compiled)))
(send (coord->node (node-coord n)) load n))
;;(fetch-I)
)
(define/public (load-bootstream bs (input-node 708))
Load a bootstream through INPUT - NODE
(send (coord->node input-node) load-bootstream bs)
(set! time 0))
;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; execution control
;; step functions return true if a breakpoint has been reached, else false
(define/public (step-program! (display-update-ok true))
(set! breakpoint false)
(set! time (add1 time))
(define index 0)
;;(setq inhibit-redisplay t)
(when (>= last-active-index 0)
(while (and (<= index last-active-index)
(not breakpoint))
(begin
(set! current-node (vector-ref active-nodes index))
(unless (and (send current-node step-program!)
(not (= index last-active-index)))
;; if node gets suspended during this step it will swap itself
;; with the last active node, declrementing last-active-index.
;; if that happens we need to step the node at the same index again.
(set! index (add1 index)))
)))
;;(setq inhibit-redisplay nil)
(when (and display-node-activity
display-update-ok)
(update-activity))
;;TODO: use current-node-index to correctly resume after a breakpoint
breakpoint)
(define/public (step-program-n! n)
(set! breakpoint false)
(set! ga-run-sim true)
(while (and (> n 0)
(not (or (= last-active-index -1)
breakpoint))
ga-run-sim)
(setq breakpoint (step-program! false))
(setq n (1- n)))
(when display-node-activity
(update-activity))
breakpoint)
;;step program until all nodes are non-active
(define/public (step-program!* (max-time false))
(set! breakpoint false)
(set! ga-run-sim true)
(if max-time
(while (and (not (or (= last-active-index -1)
breakpoint))
(< time 1000000)
ga-run-sim)
(step-program!))
(while (and (not (or (= last-active-index -1)
breakpoint))
ga-run-sim)
(set! breakpoint (step-program! false))))
(when display-node-activity
(update-activity))
;; (when (= (num-active-nodes) 0)
;; (when interactive
;; (printf "[[ All nodes are suspended\n"))
;; (set! breakpoint t))
breakpoint)
(define/public (fetch-I)
(vector-map (lambda (node) (send node fetch-I)) nodes))
(define/public (reset! (fetch true))
(set! time 0)
(set! active-nodes (vector-copy nodes))
(set! last-active-index 143)
(set! current-node-index 0)
(set! current-node (vector-ref active-nodes current-node-index))
(set! breakpoint false)
(set! cli-active? false)
(set! breakpoint-node false)
(vector-map (lambda (node) (send node reset!)) nodes)
(when fetch (fetch-I))
(when display-node-activity
(update-activity)))
(define/public (show-activity state)
(set! display-node-activity state)
(when state
(update-activity)))
(define (update-activity)
(vector-map (lambda (node)
(send node update-map-display time))
nodes)
(redisplay))
;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; state display functions
(define/public (get-active-nodes)
(if (>= last-active-index 0)
(for/list ((i (add1 last-active-index)))
(vector-ref active-nodes i))
'()))
(define/public (num-active-nodes)
(add1 last-active-index))
(define/public (display-node-states (nodes false))
(let ((nodes (if nodes
(map (lambda (n) (coord->node n)) nodes)
(get-active-nodes))))
(for ((node nodes))
(send node display-state))))
(define/public (display-dstacks (nodes false))
(let ((nodes (if nodes
(map (lambda (n) (coord->node n)) nodes)
(get-active-nodes))))
(for ((node nodes))
(send node display-dstack))))
(define/public (display-memory coord (n MEM-SIZE))
(send (fn:coord->node coord) display-memory n))
(define/public (print-active)
;;print a chip diagram showing the active nodes
(define (print-node coord)
(let* ((node (coord->node coord))
(suspended? (send node suspended?))
(reading-port (send node get-current-reading-port))
(writing-port (send node get-current-writing-port)))
(printf "~a" (if suspended?
(or reading-port writing-port " ")
"*"))))
(printf "--------------------\n")
(for ((row (range 8)))
(printf "|")
(for ((column (range 18)))
(print-node (+ (* (- 7 row) 100) column)))
(printf "|\n"))
(printf "--------------------\n"))
(define/public (print-node coord)
(send (coord->node coord) display-all))
(define show-io-changes? false)
(define/public (show-io-changes show)
(set! show-io-changes? show))
(define/public (get-time) time)
(define/public (reset-time) (set! time 0))
(define/public (disassemble-memory coord (start 0) (end #xff))
;; disassemble and print a nodes memory
(send (coord->node coord) disassemble-memory start end))
(define/public (disassemble-local coord)
;; disassemble and print a nodes memory
(send (coord->node coord) disassemble-local))
(define/public (get-execution-time)
(let ((ret nil))
(for ((node nodes))
(push (send node get-execution-time) ret))
ret))
(build-node-matrix)
(reset!)
))
| null | https://raw.githubusercontent.com/mschuldt/ga144/5b327b958f5d35cf5a015044e6ee62f46446169f/src/ga144.rkt | racket | -*- lexical-binding: t -*-
global variable used for halting the simulation
set to t when a breakpoint is reached
node where breakpoint originated
set by map when it wants the node to update the map with its activity
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
8x18 node matrix
TODO: return pseudo node
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
suspension and wakeup
TODO: better way to clone vector
index of last active node in the 'active-nodes' array
all nodes are initially active
index into 'active-nodes' of the current node
swap self with current node in 'active-nodes'
save the new node indices
decrement the number of active nodes
save the new node indices
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
breakpoints
if true, we are in a cli session
set the breakpoint flag which returns control to the interpreter
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
program loading
Places code into each node's RAM/ROM
(fetch-I)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
execution control
step functions return true if a breakpoint has been reached, else false
(setq inhibit-redisplay t)
if node gets suspended during this step it will swap itself
with the last active node, declrementing last-active-index.
if that happens we need to step the node at the same index again.
(setq inhibit-redisplay nil)
TODO: use current-node-index to correctly resume after a breakpoint
step program until all nodes are non-active
(when (= (num-active-nodes) 0)
(when interactive
(printf "[[ All nodes are suspended\n"))
(set! breakpoint t))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
state display functions
print a chip diagram showing the active nodes
disassemble and print a nodes memory
disassemble and print a nodes memory |
(define _PORT-DEBUG? false)
(define DISPLAY_STATE? false)
(define port-debug-list '(1 2))
(define (PORT-DEBUG? coord) (and _PORT-DEBUG? (member coord port-debug-list)))
(define (ga-stop-sim!)
(set! ga-run-sim nil))
(define (make-ga144 name_ (interactive_ false) (source-buffer_ false))
(new ga144% name_ interactive_ source-buffer_))
(define ga144%
(class object%
(super-new)
(init-field (name false) (interactive false) (source-buffer false))
(define time 0)
(define display-node-activity false)
(define map-buffer false)
(define nodes (make-vector 144 false))
(define/public (get-nodes) nodes)
builds matrix of 144 f18 nodes
(define (build-node-matrix)
(for ((i 144))
(vector-set! nodes i (new f18a% i this source-buffer)))
(vector-map (lambda (node) (send node init)) nodes))
(define (index->node index)
(vector-ref nodes index))
(define/public (coord->node coord)
(let ((index (coord->index coord)))
(if (and (>= index 0)
(< index 144))
(vector-ref nodes index)
)))
(define (fn:coord->node coord)
(coord->node coord))
(define active-nodes false)
(define current-node false)
(define/public (remove-from-active-list node)
(let ((last-active-node (vector-ref active-nodes last-active-index))
(index (get-field active-index node)))
(vector-set! active-nodes index last-active-node)
(vector-set! active-nodes last-active-index node)
(set-field! active-index last-active-node index)
(set-field! active-index node last-active-index)
(set! last-active-index (sub1 last-active-index)))
(when show-io-changes?
(print-active)))
(define/public (add-to-active-list node)
(set! last-active-index (add1 last-active-index))
(let ((first-inactive-node (vector-ref active-nodes last-active-index))
(index (get-field active-index node)))
swap self with first inactive node in ' active - nodes '
(vector-set! active-nodes index first-inactive-node)
(vector-set! active-nodes last-active-index node)
(set-field! active-index first-inactive-node index)
(set-field! active-index node last-active-index))
(when show-io-changes?
(print-active)))
(define/public (break (node false))
(set! breakpoint-node node)
(set! breakpoint t))
(define/public (get-breakpoint-node) breakpoint-node)
(define/public (load compiled)
(reset! false)
(for ((n (compiled-nodes compiled)))
(send (coord->node (node-coord n)) load n))
)
(define/public (load-bootstream bs (input-node 708))
Load a bootstream through INPUT - NODE
(send (coord->node input-node) load-bootstream bs)
(set! time 0))
(define/public (step-program! (display-update-ok true))
(set! breakpoint false)
(set! time (add1 time))
(define index 0)
(when (>= last-active-index 0)
(while (and (<= index last-active-index)
(not breakpoint))
(begin
(set! current-node (vector-ref active-nodes index))
(unless (and (send current-node step-program!)
(not (= index last-active-index)))
(set! index (add1 index)))
)))
(when (and display-node-activity
display-update-ok)
(update-activity))
breakpoint)
(define/public (step-program-n! n)
(set! breakpoint false)
(set! ga-run-sim true)
(while (and (> n 0)
(not (or (= last-active-index -1)
breakpoint))
ga-run-sim)
(setq breakpoint (step-program! false))
(setq n (1- n)))
(when display-node-activity
(update-activity))
breakpoint)
(define/public (step-program!* (max-time false))
(set! breakpoint false)
(set! ga-run-sim true)
(if max-time
(while (and (not (or (= last-active-index -1)
breakpoint))
(< time 1000000)
ga-run-sim)
(step-program!))
(while (and (not (or (= last-active-index -1)
breakpoint))
ga-run-sim)
(set! breakpoint (step-program! false))))
(when display-node-activity
(update-activity))
breakpoint)
(define/public (fetch-I)
(vector-map (lambda (node) (send node fetch-I)) nodes))
(define/public (reset! (fetch true))
(set! time 0)
(set! active-nodes (vector-copy nodes))
(set! last-active-index 143)
(set! current-node-index 0)
(set! current-node (vector-ref active-nodes current-node-index))
(set! breakpoint false)
(set! cli-active? false)
(set! breakpoint-node false)
(vector-map (lambda (node) (send node reset!)) nodes)
(when fetch (fetch-I))
(when display-node-activity
(update-activity)))
(define/public (show-activity state)
(set! display-node-activity state)
(when state
(update-activity)))
(define (update-activity)
(vector-map (lambda (node)
(send node update-map-display time))
nodes)
(redisplay))
(define/public (get-active-nodes)
(if (>= last-active-index 0)
(for/list ((i (add1 last-active-index)))
(vector-ref active-nodes i))
'()))
(define/public (num-active-nodes)
(add1 last-active-index))
(define/public (display-node-states (nodes false))
(let ((nodes (if nodes
(map (lambda (n) (coord->node n)) nodes)
(get-active-nodes))))
(for ((node nodes))
(send node display-state))))
(define/public (display-dstacks (nodes false))
(let ((nodes (if nodes
(map (lambda (n) (coord->node n)) nodes)
(get-active-nodes))))
(for ((node nodes))
(send node display-dstack))))
(define/public (display-memory coord (n MEM-SIZE))
(send (fn:coord->node coord) display-memory n))
(define/public (print-active)
(define (print-node coord)
(let* ((node (coord->node coord))
(suspended? (send node suspended?))
(reading-port (send node get-current-reading-port))
(writing-port (send node get-current-writing-port)))
(printf "~a" (if suspended?
(or reading-port writing-port " ")
"*"))))
(printf "--------------------\n")
(for ((row (range 8)))
(printf "|")
(for ((column (range 18)))
(print-node (+ (* (- 7 row) 100) column)))
(printf "|\n"))
(printf "--------------------\n"))
(define/public (print-node coord)
(send (coord->node coord) display-all))
(define show-io-changes? false)
(define/public (show-io-changes show)
(set! show-io-changes? show))
(define/public (get-time) time)
(define/public (reset-time) (set! time 0))
(define/public (disassemble-memory coord (start 0) (end #xff))
(send (coord->node coord) disassemble-memory start end))
(define/public (disassemble-local coord)
(send (coord->node coord) disassemble-local))
(define/public (get-execution-time)
(let ((ret nil))
(for ((node nodes))
(push (send node get-execution-time) ret))
ret))
(build-node-matrix)
(reset!)
))
|
e6c7789d77d5ebc764adc7793949b17afa71a0f610b8dfdad3dc34ddf6d679b0 | zwizwa/staapl | macro.rkt | #lang racket/base
(require
"../sig.rkt"
;; "sig.rkt"
"../coma/macro.rkt"
"../control/op.rkt"
"asm.rkt")
(provide (all-defined-out))
| null | https://raw.githubusercontent.com/zwizwa/staapl/e30e6ae6ac45de7141b97ad3cebf9b5a51bcda52/arm/macro.rkt | racket | "sig.rkt" | #lang racket/base
(require
"../sig.rkt"
"../coma/macro.rkt"
"../control/op.rkt"
"asm.rkt")
(provide (all-defined-out))
|
7aa6aa3b282ee18a14b475b9f0128e6599f641923be0db2cb5b68b7fdac9ddf3 | chaoxu/mgccl-haskell | qrt.hs | import System.IO
import Data.Set (fromList, toList)
main :: IO ()
main = do list <- getLine
p <- loop []
let l = words list
putStr $ unlines $ map printQuartets $ (toList.fromList) $ concatMap (\x->quartets $ part x l ([],[])) p
where part [] _ z = z
part (x:p) (y:l) (zero,one)
| x == '0' = part p l (y:zero,one)
| x == '1' = part p l (zero,y:one)
| otherwise = part p l (zero,one)
quartets xs = [if x<y then (x,y) else (y,x)|x<-pairs (fst xs),y<-pairs (snd xs)]
printQuartets ((a,b),(c,d)) = "{"++a++", "++b++"} {"++c++", "++d++"}"
pairs xs = [(x,y)|x<-xs,y<-xs,x<y]
loop xs = do end <- isEOF
if end
then return xs
else do c <- getLine
loop (c:xs)
| null | https://raw.githubusercontent.com/chaoxu/mgccl-haskell/bb03e39ae43f410bd2a673ac2b438929ab8ef7a1/rosalind/qrt.hs | haskell | import System.IO
import Data.Set (fromList, toList)
main :: IO ()
main = do list <- getLine
p <- loop []
let l = words list
putStr $ unlines $ map printQuartets $ (toList.fromList) $ concatMap (\x->quartets $ part x l ([],[])) p
where part [] _ z = z
part (x:p) (y:l) (zero,one)
| x == '0' = part p l (y:zero,one)
| x == '1' = part p l (zero,y:one)
| otherwise = part p l (zero,one)
quartets xs = [if x<y then (x,y) else (y,x)|x<-pairs (fst xs),y<-pairs (snd xs)]
printQuartets ((a,b),(c,d)) = "{"++a++", "++b++"} {"++c++", "++d++"}"
pairs xs = [(x,y)|x<-xs,y<-xs,x<y]
loop xs = do end <- isEOF
if end
then return xs
else do c <- getLine
loop (c:xs)
|
|
6190d9cffb9d60c540607e90d490482c43677e73e4e5c171df37872c3579c70a | MalloZup/missile | core_test.clj | (ns missile.core-test
(:require [clojure.test :refer :all]
[missile.core :refer :all]))
(deftest a-test
(testing "FIXME, I fail."
(is (= 0 1))))
| null | https://raw.githubusercontent.com/MalloZup/missile/1d6085e5a19747a1fe585d9fa274bd7cf438d180/test/missile/core_test.clj | clojure | (ns missile.core-test
(:require [clojure.test :refer :all]
[missile.core :refer :all]))
(deftest a-test
(testing "FIXME, I fail."
(is (= 0 1))))
|
|
38c56e77052a5e212357390acb84f42454b7ccfe18d44d6da2192113aa7d6739 | tweag/ormolu | preserve-empty-lines.hs | {- ORMOLU_DISABLE -}
bar :: Int
bar = 2
baz :: Int
baz = 3
ORMOLU_ENABLE
foo :: Int
foo = 1
| null | https://raw.githubusercontent.com/tweag/ormolu/897e6736ec17d7e359091abe3c24f4abfec015c4/data/examples/other/disabling/preserve-empty-lines.hs | haskell | ORMOLU_DISABLE | bar :: Int
bar = 2
baz :: Int
baz = 3
ORMOLU_ENABLE
foo :: Int
foo = 1
|
0c71ea23537eaa6fa647ab76dc830046fd2115e79c61cb1d402e6473d9f55c4c | jablo/cablesim | dhcp_util.erl | %%%-------------------------------------------------------------------
%%% File : dhcp_lib.erl
Author : < >
%%% Description :
%%%
Created : 17 Apr 2006 by < >
%%%-------------------------------------------------------------------
-module(dhcp_util).
%% API
-export([optsearch/2, get_client_id/1, fmt_clientid/1, fmt_gateway/1, fmt_ip/1,
fmt_hostname/1,
get_tftpserver/1, get_tftpfile/1]).
-include("dhcp.hrl").
optsearch(Option, D) when is_record(D, dhcp) ->
case lists:keysearch(Option, 1, D#dhcp.options) of
{value, {Option, Value}} ->
{value, Value};
false ->
false
end.
get_client_id(D) when is_record(D, dhcp) ->
case optsearch(?DHO_DHCP_CLIENT_IDENTIFIER, D) of
{value, ClientId} ->
ClientId;
false ->
D#dhcp.chaddr
end.
fmt_clientid(D) when is_record(D, dhcp) ->
fmt_clientid(get_client_id(D));
fmt_clientid([_T, E1, E2, E3, E4, E5, E6]) ->
fmt_clientid({E1, E2, E3, E4, E5, E6});
fmt_clientid({E1, E2, E3, E4, E5, E6}) ->
lists:flatten(
io_lib:format("~2.16.0b:~2.16.0b:~2.16.0b:~2.16.0b:~2.16.0b:~2.16.0b",
[E1, E2, E3, E4, E5, E6])).
fmt_gateway(D) when is_record(D, dhcp) ->
case D#dhcp.giaddr of
{0, 0, 0, 0} -> [];
IP -> lists:flatten(io_lib:format("via ~s", [fmt_ip(IP)]))
end.
fmt_hostname(D) when is_record(D, dhcp) ->
case optsearch(?DHO_HOST_NAME, D) of
{value, Hostname} ->
lists:flatten(io_lib:format("(~s)", [Hostname]));
false ->
[]
end.
fmt_ip({A1, A2, A3, A4}) ->
io_lib:format("~w.~w.~w.~w", [A1, A2, A3, A4]).
get_tftpserver(D = #dhcp{}) ->
case optsearch(?DHO_BOOT_SERVER, D) of
{value, BServer} ->
binary_to_list(BServer);
X ->
io:format("get_tftpserver couldn't match ~p ~n", [X]),
ok
end.
get_tftpfile(D = #dhcp{}) ->
case optsearch(?DHO_BOOT_FILE, D) of
{value, BFile} ->
binary_to_list(BFile);
false ->
ok
end.
| null | https://raw.githubusercontent.com/jablo/cablesim/da6628190f9ec5c426df73e921ff575470d1a078/src/dhcp_util.erl | erlang | -------------------------------------------------------------------
File : dhcp_lib.erl
Description :
-------------------------------------------------------------------
API | Author : < >
Created : 17 Apr 2006 by < >
-module(dhcp_util).
-export([optsearch/2, get_client_id/1, fmt_clientid/1, fmt_gateway/1, fmt_ip/1,
fmt_hostname/1,
get_tftpserver/1, get_tftpfile/1]).
-include("dhcp.hrl").
optsearch(Option, D) when is_record(D, dhcp) ->
case lists:keysearch(Option, 1, D#dhcp.options) of
{value, {Option, Value}} ->
{value, Value};
false ->
false
end.
get_client_id(D) when is_record(D, dhcp) ->
case optsearch(?DHO_DHCP_CLIENT_IDENTIFIER, D) of
{value, ClientId} ->
ClientId;
false ->
D#dhcp.chaddr
end.
fmt_clientid(D) when is_record(D, dhcp) ->
fmt_clientid(get_client_id(D));
fmt_clientid([_T, E1, E2, E3, E4, E5, E6]) ->
fmt_clientid({E1, E2, E3, E4, E5, E6});
fmt_clientid({E1, E2, E3, E4, E5, E6}) ->
lists:flatten(
io_lib:format("~2.16.0b:~2.16.0b:~2.16.0b:~2.16.0b:~2.16.0b:~2.16.0b",
[E1, E2, E3, E4, E5, E6])).
fmt_gateway(D) when is_record(D, dhcp) ->
case D#dhcp.giaddr of
{0, 0, 0, 0} -> [];
IP -> lists:flatten(io_lib:format("via ~s", [fmt_ip(IP)]))
end.
fmt_hostname(D) when is_record(D, dhcp) ->
case optsearch(?DHO_HOST_NAME, D) of
{value, Hostname} ->
lists:flatten(io_lib:format("(~s)", [Hostname]));
false ->
[]
end.
fmt_ip({A1, A2, A3, A4}) ->
io_lib:format("~w.~w.~w.~w", [A1, A2, A3, A4]).
get_tftpserver(D = #dhcp{}) ->
case optsearch(?DHO_BOOT_SERVER, D) of
{value, BServer} ->
binary_to_list(BServer);
X ->
io:format("get_tftpserver couldn't match ~p ~n", [X]),
ok
end.
get_tftpfile(D = #dhcp{}) ->
case optsearch(?DHO_BOOT_FILE, D) of
{value, BFile} ->
binary_to_list(BFile);
false ->
ok
end.
|
77efb2793c50423f2cd7599fd477544ef1c8086294ce1531d44a02442e9cb696 | commercialhaskell/stack | Foo.hs | module Foo where
import Control.Monad.STM
import Files
foo :: IO String
foo = atomically $ pure $ "foo using " ++ files
| null | https://raw.githubusercontent.com/commercialhaskell/stack/b846eae85d9f30d28d7a4fa33fea139e7d1420c7/test/integration/tests/internal-libraries/files/src-foo/Foo.hs | haskell | module Foo where
import Control.Monad.STM
import Files
foo :: IO String
foo = atomically $ pure $ "foo using " ++ files
|
|
cafbfedc938cc39f456848eb22d5715870a2eededea6d23ababf9efb73c48768 | dwango/fialyzer | main.ml | open Obeam
open Base
open Result
open Fialyzer
open Common
let extract_debug_info_buf beam_filename layout =
let {
Beam.cl_abst = opt_abst;
Beam.cl_dbgi = opt_dbgi;
} = layout in
match opt_abst with
| Some abst ->
abst.Beam.abst_buf
| None ->
begin
match opt_dbgi with
| Some dbgi ->
dbgi.Beam.dbgi_buf
| None ->
let message = "abst and dbgi chunk is not found" in
raise Known_error.(FialyzerError (InvalidBeam {beam_filename; message}))
end
let beam_to_etf beam_filename beam_buf = match Beam.parse_layout beam_buf with
| Ok (layout, _) ->
let debug_info_buf = extract_debug_info_buf beam_filename layout in
begin
match External_term_format.parse debug_info_buf with
| Ok (expr, _) ->
Ok expr
| Error (message, _rest) ->
Error Known_error.(FialyzerError (InvalidBeam {beam_filename; message}))
end
| Error (message, _rest) ->
Error Known_error.(FialyzerError (InvalidBeam {beam_filename; message}))
let read_file beam_filename =
if Caml.Sys.file_exists beam_filename then
Ok (Bitstring.bitstring_of_file beam_filename)
else
Error Known_error.(FialyzerError (NoSuchFile beam_filename))
let code_of_file beam_filename =
read_file beam_filename >>= fun beam ->
beam_to_etf beam_filename beam >>= fun etf ->
let sf = Simple_term_format.of_etf etf in
Abstract_format.of_sf sf
|> map_error ~f:(fun e ->
let message = Abstract_format.sexp_of_err_t e |> Sexp.to_string in
Known_error.(FialyzerError (InvalidBeam {beam_filename; message})))
let module_of_file beam_filename =
code_of_file beam_filename >>= fun code ->
From_erlang.code_to_module code
let read_plt plt_option=
let open Result in
match plt_option with
| None -> Ok None
| Some plt_file ->
Plt.of_file plt_file >>| Option.return
let () =
Cui.work (fun param ->
try
Log.debug [%here] "=== start fialyzer ===";
let files = param.Cui.beam_files in
Result.ok_exn begin
read_plt param.Cui.plt_file >>= fun plt ->
result_map_m ~f:module_of_file files >>= fun modules ->
Type_check.check_modules plt modules
end;
Caml.print_endline "done (passed successfully)"
with
| Known_error.FialyzerError err ->
Caml.prerr_endline (Known_error.to_message err);
Caml.exit 1
| exn ->
raise exn)
| null | https://raw.githubusercontent.com/dwango/fialyzer/3c4b4fc2dacf84008910135bfef16e4ce79f9c89/bin/main.ml | ocaml | open Obeam
open Base
open Result
open Fialyzer
open Common
let extract_debug_info_buf beam_filename layout =
let {
Beam.cl_abst = opt_abst;
Beam.cl_dbgi = opt_dbgi;
} = layout in
match opt_abst with
| Some abst ->
abst.Beam.abst_buf
| None ->
begin
match opt_dbgi with
| Some dbgi ->
dbgi.Beam.dbgi_buf
| None ->
let message = "abst and dbgi chunk is not found" in
raise Known_error.(FialyzerError (InvalidBeam {beam_filename; message}))
end
let beam_to_etf beam_filename beam_buf = match Beam.parse_layout beam_buf with
| Ok (layout, _) ->
let debug_info_buf = extract_debug_info_buf beam_filename layout in
begin
match External_term_format.parse debug_info_buf with
| Ok (expr, _) ->
Ok expr
| Error (message, _rest) ->
Error Known_error.(FialyzerError (InvalidBeam {beam_filename; message}))
end
| Error (message, _rest) ->
Error Known_error.(FialyzerError (InvalidBeam {beam_filename; message}))
let read_file beam_filename =
if Caml.Sys.file_exists beam_filename then
Ok (Bitstring.bitstring_of_file beam_filename)
else
Error Known_error.(FialyzerError (NoSuchFile beam_filename))
let code_of_file beam_filename =
read_file beam_filename >>= fun beam ->
beam_to_etf beam_filename beam >>= fun etf ->
let sf = Simple_term_format.of_etf etf in
Abstract_format.of_sf sf
|> map_error ~f:(fun e ->
let message = Abstract_format.sexp_of_err_t e |> Sexp.to_string in
Known_error.(FialyzerError (InvalidBeam {beam_filename; message})))
let module_of_file beam_filename =
code_of_file beam_filename >>= fun code ->
From_erlang.code_to_module code
let read_plt plt_option=
let open Result in
match plt_option with
| None -> Ok None
| Some plt_file ->
Plt.of_file plt_file >>| Option.return
let () =
Cui.work (fun param ->
try
Log.debug [%here] "=== start fialyzer ===";
let files = param.Cui.beam_files in
Result.ok_exn begin
read_plt param.Cui.plt_file >>= fun plt ->
result_map_m ~f:module_of_file files >>= fun modules ->
Type_check.check_modules plt modules
end;
Caml.print_endline "done (passed successfully)"
with
| Known_error.FialyzerError err ->
Caml.prerr_endline (Known_error.to_message err);
Caml.exit 1
| exn ->
raise exn)
|
|
edca003dd652818670636aea024b3b281ec9744423da3c893e1e653bf8ea3278 | TorXakis/TorXakis | CstrId.hs |
TorXakis - Model Based Testing
Copyright ( c ) 2015 - 2017 TNO and Radboud University
See LICENSE at root directory of this repository .
TorXakis - Model Based Testing
Copyright (c) 2015-2017 TNO and Radboud University
See LICENSE at root directory of this repository.
-}
# LANGUAGE FlexibleContexts #
--------------------------------------------------------------------------------
-- |
Module : TorXakis . Compiler . . CstrId
Copyright : ( c ) TNO and Radboud University
License : BSD3 ( see the file license.txt )
--
Maintainer : ( Embedded Systems Innovation by )
-- Stability : experimental
-- Portability : portable
--
Compilation functions related to ' TorXakis ' constructor i d 's .
--------------------------------------------------------------------------------
module TorXakis.Compiler.ValExpr.CstrId
(compileToCstrId)
where
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Text (Text)
import CstrId (CstrId (CstrId))
import Id (Id (Id))
import SortId (SortId)
import TorXakis.Compiler.Data (CompilerM, getNextId)
import TorXakis.Compiler.Maps (findSortIdM)
import TorXakis.Compiler.MapsTo (MapsTo)
import TorXakis.Parser.Data (ADTDecl, CstrDecl, CstrE, Loc,
adtName, constructors, cstrFields,
cstrName, fieldSort, getLoc, nodeLoc)
| Compile a list of ADT declarations into a map from the location of the
-- constructor declaration, to their corresponding constructor id's.
compileToCstrId :: (MapsTo Text SortId mm)
=> mm -> [ADTDecl] -> CompilerM (Map (Loc CstrE) CstrId)
compileToCstrId mm ds = Map.fromList . concat <$>
traverse (adtToCstrId mm) ds
adtToCstrId :: (MapsTo Text SortId mm)
=> mm
-> ADTDecl
-> CompilerM [(Loc CstrE, CstrId)]
adtToCstrId mm a = do
sId <- findSortIdM mm (adtName a, nodeLoc a)
traverse (cstrToCstrId mm sId) (constructors a)
cstrToCstrId :: (MapsTo Text SortId mm)
=> mm
^ SortId of the containing ADT .
-> CstrDecl
-> CompilerM (Loc CstrE, CstrId)
cstrToCstrId mm sId c = do
i <- getNextId
aSids <- traverse (findSortIdM mm . fieldSort) (cstrFields c)
return (getLoc c, CstrId (cstrName c) (Id i) aSids sId)
| null | https://raw.githubusercontent.com/TorXakis/TorXakis/038463824b3d358df6b6b3ff08732335b7dbdb53/sys/txs-compiler/src/TorXakis/Compiler/ValExpr/CstrId.hs | haskell | ------------------------------------------------------------------------------
|
Stability : experimental
Portability : portable
------------------------------------------------------------------------------
constructor declaration, to their corresponding constructor id's. |
TorXakis - Model Based Testing
Copyright ( c ) 2015 - 2017 TNO and Radboud University
See LICENSE at root directory of this repository .
TorXakis - Model Based Testing
Copyright (c) 2015-2017 TNO and Radboud University
See LICENSE at root directory of this repository.
-}
# LANGUAGE FlexibleContexts #
Module : TorXakis . Compiler . . CstrId
Copyright : ( c ) TNO and Radboud University
License : BSD3 ( see the file license.txt )
Maintainer : ( Embedded Systems Innovation by )
Compilation functions related to ' TorXakis ' constructor i d 's .
module TorXakis.Compiler.ValExpr.CstrId
(compileToCstrId)
where
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Text (Text)
import CstrId (CstrId (CstrId))
import Id (Id (Id))
import SortId (SortId)
import TorXakis.Compiler.Data (CompilerM, getNextId)
import TorXakis.Compiler.Maps (findSortIdM)
import TorXakis.Compiler.MapsTo (MapsTo)
import TorXakis.Parser.Data (ADTDecl, CstrDecl, CstrE, Loc,
adtName, constructors, cstrFields,
cstrName, fieldSort, getLoc, nodeLoc)
| Compile a list of ADT declarations into a map from the location of the
compileToCstrId :: (MapsTo Text SortId mm)
=> mm -> [ADTDecl] -> CompilerM (Map (Loc CstrE) CstrId)
compileToCstrId mm ds = Map.fromList . concat <$>
traverse (adtToCstrId mm) ds
adtToCstrId :: (MapsTo Text SortId mm)
=> mm
-> ADTDecl
-> CompilerM [(Loc CstrE, CstrId)]
adtToCstrId mm a = do
sId <- findSortIdM mm (adtName a, nodeLoc a)
traverse (cstrToCstrId mm sId) (constructors a)
cstrToCstrId :: (MapsTo Text SortId mm)
=> mm
^ SortId of the containing ADT .
-> CstrDecl
-> CompilerM (Loc CstrE, CstrId)
cstrToCstrId mm sId c = do
i <- getNextId
aSids <- traverse (findSortIdM mm . fieldSort) (cstrFields c)
return (getLoc c, CstrId (cstrName c) (Id i) aSids sId)
|
6f9c692dcab859a228b4c2cd0af61590033df61a7110a2b640dcfe8efe26d5b0 | gergoerdi/tandoori | tuple.hs | id x = x
idPair x = (id x, id x)
class Funktor f where
fmap :: (a -> b) -> f a -> f b
instance Funktor ((,) a) where
fmap f (x, y) = (x, f y)
fmap :: (b -> c) -> ((,) a) b -> ((,) a) c
fmap f (x, y) = (x, f y)
| null | https://raw.githubusercontent.com/gergoerdi/tandoori/515142ce76b96efa75d7044c9077d85394585556/input/tuple.hs | haskell | id x = x
idPair x = (id x, id x)
class Funktor f where
fmap :: (a -> b) -> f a -> f b
instance Funktor ((,) a) where
fmap f (x, y) = (x, f y)
fmap :: (b -> c) -> ((,) a) b -> ((,) a) c
fmap f (x, y) = (x, f y)
|
|
39ecdcf0af34ef98aea028775cb3d9e0115d00e45950cb88ccc85a21f31b10ab | slyrus/clem | matrix-classes.lisp |
(in-package :clem)
taken from KMR 's clsql package
(eval-when (:compile-toplevel :load-toplevel :execute)
(declaim (inline delistify))
(defun delistify (list)
"Some MOPs, like openmcl 0.14.2, cons attribute values in a list."
(if (and (listp list) (null (cdr list)))
(car list)
list)))
(defmacro defmatrixclass (type direct-superclasses &key
(element-type)
(accumulator-type)
(initial-element)
minval maxval
(val-format))
(unless direct-superclasses (setf direct-superclasses '(matrix)))
`(progn
(defclass ,type ,direct-superclasses
((initial-element :accessor initial-element
:initarg :initial-element :initform ,initial-element))
(:metaclass standard-matrix-class)
,@(when element-type `((:element-type ,(delistify element-type))))
,@(when accumulator-type `((:accumulator-type ,(delistify accumulator-type))))
,@(when val-format `((:val-format ,(delistify val-format))))
,@(when minval `((:minval ,(if (symbolp minval) (symbol-value minval) minval))))
,@(when maxval `((:maxval ,(if (symbolp maxval) (symbol-value minval) maxval)))))))
(defmatrixclass t-matrix ()
:element-type t
:accumulator-type t)
(defmatrixclass number-matrix (t-matrix)
:element-type number
:accumulator-type number)
(defmatrixclass real-matrix (number-matrix)
:element-type real
:accumulator-type real)
(defmatrixclass complex-matrix (number-matrix)
:element-type complex
:accumulator-type complex)
(defmatrixclass float-matrix (real-matrix)
:element-type float
:accumulator-type float
:val-format "~4,9F")
(defmatrixclass integer-matrix (real-matrix)
:element-type integer
:accumulator-type integer
:val-format "~d")
(defmatrixclass unsigned-byte-matrix (integer-matrix)
:element-type (unsigned-byte *)
:accumulator-type (unsigned-byte *)
:val-format "~d")
(defmatrixclass bit-matrix (integer-matrix) :element-type (unsigned-byte 1)
:accumulator-type (signed-byte 32)
:minval 0
:maxval 1
:val-format "~b")
(defmatrixclass ub8-matrix (unsigned-byte-matrix)
:element-type (unsigned-byte 8)
:accumulator-type (unsigned-byte 32)
:minval 0
:maxval #.(- (expt 2 8) 1))
(defmatrixclass ub16-matrix (unsigned-byte-matrix)
:element-type (unsigned-byte 16)
:accumulator-type (unsigned-byte 32)
:minval 0
:maxval #.(- (expt 2 16) 1))
(defmatrixclass ub32-matrix (unsigned-byte-matrix)
:element-type (unsigned-byte 32)
:accumulator-type (unsigned-byte 32)
:minval 0
:maxval #.(- (expt 2 32) 1))
(defmatrixclass sb8-matrix (integer-matrix)
:element-type (signed-byte 8)
:accumulator-type (signed-byte 32)
:minval #.(- (expt 2 7))
:maxval #.(- (expt 2 7) 1))
(defmatrixclass sb16-matrix (integer-matrix)
:element-type (signed-byte 16)
:accumulator-type (signed-byte 32)
:minval #.(- (expt 2 15))
:maxval #.(- (expt 2 15) 1))
(defmatrixclass sb32-matrix (integer-matrix)
:element-type (signed-byte 32)
:accumulator-type (signed-byte 32)
:minval #.(- (expt 2 31))
:maxval #.(- (expt 2 31) 1))
(defmatrixclass fixnum-matrix (integer-matrix)
:element-type fixnum
:accumulator-type (unsigned-byte 32)
:minval most-negative-fixnum
:maxval most-positive-fixnum)
(defmatrixclass single-float-matrix (float-matrix)
:element-type single-float
:accumulator-type single-float
:initial-element 0f0
:minval most-negative-single-float
:maxval most-positive-single-float)
(defmatrixclass double-float-matrix (float-matrix)
:element-type double-float
:accumulator-type double-float
:initial-element 0d0
:minval most-negative-double-float
:maxval most-positive-double-float)
| null | https://raw.githubusercontent.com/slyrus/clem/5eb055bb3f45840b24fd44825b975aa36bd6d97c/src/matrix-classes.lisp | lisp |
(in-package :clem)
taken from KMR 's clsql package
(eval-when (:compile-toplevel :load-toplevel :execute)
(declaim (inline delistify))
(defun delistify (list)
"Some MOPs, like openmcl 0.14.2, cons attribute values in a list."
(if (and (listp list) (null (cdr list)))
(car list)
list)))
(defmacro defmatrixclass (type direct-superclasses &key
(element-type)
(accumulator-type)
(initial-element)
minval maxval
(val-format))
(unless direct-superclasses (setf direct-superclasses '(matrix)))
`(progn
(defclass ,type ,direct-superclasses
((initial-element :accessor initial-element
:initarg :initial-element :initform ,initial-element))
(:metaclass standard-matrix-class)
,@(when element-type `((:element-type ,(delistify element-type))))
,@(when accumulator-type `((:accumulator-type ,(delistify accumulator-type))))
,@(when val-format `((:val-format ,(delistify val-format))))
,@(when minval `((:minval ,(if (symbolp minval) (symbol-value minval) minval))))
,@(when maxval `((:maxval ,(if (symbolp maxval) (symbol-value minval) maxval)))))))
(defmatrixclass t-matrix ()
:element-type t
:accumulator-type t)
(defmatrixclass number-matrix (t-matrix)
:element-type number
:accumulator-type number)
(defmatrixclass real-matrix (number-matrix)
:element-type real
:accumulator-type real)
(defmatrixclass complex-matrix (number-matrix)
:element-type complex
:accumulator-type complex)
(defmatrixclass float-matrix (real-matrix)
:element-type float
:accumulator-type float
:val-format "~4,9F")
(defmatrixclass integer-matrix (real-matrix)
:element-type integer
:accumulator-type integer
:val-format "~d")
(defmatrixclass unsigned-byte-matrix (integer-matrix)
:element-type (unsigned-byte *)
:accumulator-type (unsigned-byte *)
:val-format "~d")
(defmatrixclass bit-matrix (integer-matrix) :element-type (unsigned-byte 1)
:accumulator-type (signed-byte 32)
:minval 0
:maxval 1
:val-format "~b")
(defmatrixclass ub8-matrix (unsigned-byte-matrix)
:element-type (unsigned-byte 8)
:accumulator-type (unsigned-byte 32)
:minval 0
:maxval #.(- (expt 2 8) 1))
(defmatrixclass ub16-matrix (unsigned-byte-matrix)
:element-type (unsigned-byte 16)
:accumulator-type (unsigned-byte 32)
:minval 0
:maxval #.(- (expt 2 16) 1))
(defmatrixclass ub32-matrix (unsigned-byte-matrix)
:element-type (unsigned-byte 32)
:accumulator-type (unsigned-byte 32)
:minval 0
:maxval #.(- (expt 2 32) 1))
(defmatrixclass sb8-matrix (integer-matrix)
:element-type (signed-byte 8)
:accumulator-type (signed-byte 32)
:minval #.(- (expt 2 7))
:maxval #.(- (expt 2 7) 1))
(defmatrixclass sb16-matrix (integer-matrix)
:element-type (signed-byte 16)
:accumulator-type (signed-byte 32)
:minval #.(- (expt 2 15))
:maxval #.(- (expt 2 15) 1))
(defmatrixclass sb32-matrix (integer-matrix)
:element-type (signed-byte 32)
:accumulator-type (signed-byte 32)
:minval #.(- (expt 2 31))
:maxval #.(- (expt 2 31) 1))
(defmatrixclass fixnum-matrix (integer-matrix)
:element-type fixnum
:accumulator-type (unsigned-byte 32)
:minval most-negative-fixnum
:maxval most-positive-fixnum)
(defmatrixclass single-float-matrix (float-matrix)
:element-type single-float
:accumulator-type single-float
:initial-element 0f0
:minval most-negative-single-float
:maxval most-positive-single-float)
(defmatrixclass double-float-matrix (float-matrix)
:element-type double-float
:accumulator-type double-float
:initial-element 0d0
:minval most-negative-double-float
:maxval most-positive-double-float)
|
|
780bb4447a3389775e68b5126db91daa89d7aa2859ce973580d9750af3536378 | paurkedal/ocaml-prime | prime_io.mli | Copyright ( C ) 2014 - -2022 Petter A. Urkedal < >
*
* This library is free software ; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , either version 3 of the License , or ( at your
* option ) any later version , with the LGPL-3.0 Linking Exception .
*
* This library is distributed in the hope that it will be useful , but WITHOUT
* ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE . See the GNU Lesser General Public
* License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* and the LGPL-3.0 Linking Exception along with this library . If not , see
* < / > and < > , respectively .
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or (at your
* option) any later version, with the LGPL-3.0 Linking Exception.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* and the LGPL-3.0 Linking Exception along with this library. If not, see
* </> and <>, respectively.
*)
(** Helpers for standard library channels. *)
val with_file_in : (in_channel -> 'a) -> string -> 'a
(** [with_file_in f fp] is [f ic] where [ic] is a channel opened for input
from the file at [fp]. [ic] is closed when [f] returns or raises an
exception. *)
val with_file_out : (out_channel -> 'a) -> string -> 'a
(** [with_file_out f fp] calls [f oc] where [oc] is a channel opened for
output to [fp]. [oc] is closed when [f] returns or raises an exception.
In the latter case the file is also removed. *)
val with1_file_in : (in_channel -> 'a -> 'b) -> string -> 'a -> 'b
* [ with1_file_in f fp acc ] is [ f ic acc ] where [ ic ] is a channel opened for
reading from [ fp ] during the call . Due to the effect involved , this is
not a special case of { ! with_file_in } .
reading from [fp] during the call. Due to the effect involved, this is
not a special case of {!with_file_in}. *)
val with1_file_out : (out_channel -> 'a -> 'b) -> string -> 'a -> 'b
* [ with1_file_out f fp acc ] is [ f oc acc ] where [ oc ] is a channel opened for
writing to [ fp ] during the call . Due to the effect involved , this is not
a special case of { ! with_file_out } .
writing to [fp] during the call. Due to the effect involved, this is not
a special case of {!with_file_out}. *)
val fold_input : ?close: bool -> (in_channel -> 'a -> 'a) ->
in_channel -> 'a -> 'a
(** [fold_input f ic] forms the composition of successive calls to [f ic]
until [End_of_file] is raised.
@param close If true, close [ic] before returning, including if an
exception occurs while evaluating [f ic acc] for some [acc]. *)
val iter_input : ?close: bool -> (in_channel -> unit) -> in_channel -> unit
(** [iter_input f ic] calls [f ic] until [End_of_file] is raised.
@param close If true, close [ic] before returning, including if an
exception occurs while executing [f ic]. *)
| null | https://raw.githubusercontent.com/paurkedal/ocaml-prime/42efa85317069d726e8e3989e51c24ba03c56b47/lib/prime_io.mli | ocaml | * Helpers for standard library channels.
* [with_file_in f fp] is [f ic] where [ic] is a channel opened for input
from the file at [fp]. [ic] is closed when [f] returns or raises an
exception.
* [with_file_out f fp] calls [f oc] where [oc] is a channel opened for
output to [fp]. [oc] is closed when [f] returns or raises an exception.
In the latter case the file is also removed.
* [fold_input f ic] forms the composition of successive calls to [f ic]
until [End_of_file] is raised.
@param close If true, close [ic] before returning, including if an
exception occurs while evaluating [f ic acc] for some [acc].
* [iter_input f ic] calls [f ic] until [End_of_file] is raised.
@param close If true, close [ic] before returning, including if an
exception occurs while executing [f ic]. | Copyright ( C ) 2014 - -2022 Petter A. Urkedal < >
*
* This library is free software ; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , either version 3 of the License , or ( at your
* option ) any later version , with the LGPL-3.0 Linking Exception .
*
* This library is distributed in the hope that it will be useful , but WITHOUT
* ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE . See the GNU Lesser General Public
* License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* and the LGPL-3.0 Linking Exception along with this library . If not , see
* < / > and < > , respectively .
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or (at your
* option) any later version, with the LGPL-3.0 Linking Exception.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* and the LGPL-3.0 Linking Exception along with this library. If not, see
* </> and <>, respectively.
*)
val with_file_in : (in_channel -> 'a) -> string -> 'a
val with_file_out : (out_channel -> 'a) -> string -> 'a
val with1_file_in : (in_channel -> 'a -> 'b) -> string -> 'a -> 'b
* [ with1_file_in f fp acc ] is [ f ic acc ] where [ ic ] is a channel opened for
reading from [ fp ] during the call . Due to the effect involved , this is
not a special case of { ! with_file_in } .
reading from [fp] during the call. Due to the effect involved, this is
not a special case of {!with_file_in}. *)
val with1_file_out : (out_channel -> 'a -> 'b) -> string -> 'a -> 'b
* [ with1_file_out f fp acc ] is [ f oc acc ] where [ oc ] is a channel opened for
writing to [ fp ] during the call . Due to the effect involved , this is not
a special case of { ! with_file_out } .
writing to [fp] during the call. Due to the effect involved, this is not
a special case of {!with_file_out}. *)
val fold_input : ?close: bool -> (in_channel -> 'a -> 'a) ->
in_channel -> 'a -> 'a
val iter_input : ?close: bool -> (in_channel -> unit) -> in_channel -> unit
|
f6888d1ec2deea4877146cd2c4dbf1f3cd00ae2795a291a2491a958b49f1b2cc | patricoferris/try-eio | worker_rpc.mli | (* Worker_rpc *)
* Functions to facilitate RPC calls to web workers .
The assumption made in this module is that RPCs are answered in the order
they are made .
The assumption made in this module is that RPCs are answered in the order
they are made. *)
type context
(** Represents the channel used to communicate with the worker *)
exception Timeout
(** When RPC calls take too long, the Lwt promise is set to failed state with
this exception. *)
val start : Brr_webworkers.Worker.t -> int -> (unit -> unit) -> context
* [ start worker timeout timeout_fn ] initialises communications with a web
worker . [ timeout ] is the number of seconds to wait for a response from any
RPC before raising an error , and [ timeout_fn ] is called when a timeout
occurs .
worker. [timeout] is the number of seconds to wait for a response from any
RPC before raising an error, and [timeout_fn] is called when a timeout
occurs. *)
val rpc :
context -> Js_top_worker_rpc.Rpc.call -> Js_top_worker_rpc.Rpc.response Lwt.t
(** [rpc context call] returns a promise containing the result from the worker.
If we wait longer than the timeout specified in [context] for a response,
the Lwt promise will fail with exception {!Timeout}. *)
| null | https://raw.githubusercontent.com/patricoferris/try-eio/206ec0f422c44d89a1bc1c1c0114e748ad9324b0/src/worker_rpc.mli | ocaml | Worker_rpc
* Represents the channel used to communicate with the worker
* When RPC calls take too long, the Lwt promise is set to failed state with
this exception.
* [rpc context call] returns a promise containing the result from the worker.
If we wait longer than the timeout specified in [context] for a response,
the Lwt promise will fail with exception {!Timeout}. |
* Functions to facilitate RPC calls to web workers .
The assumption made in this module is that RPCs are answered in the order
they are made .
The assumption made in this module is that RPCs are answered in the order
they are made. *)
type context
exception Timeout
val start : Brr_webworkers.Worker.t -> int -> (unit -> unit) -> context
* [ start worker timeout timeout_fn ] initialises communications with a web
worker . [ timeout ] is the number of seconds to wait for a response from any
RPC before raising an error , and [ timeout_fn ] is called when a timeout
occurs .
worker. [timeout] is the number of seconds to wait for a response from any
RPC before raising an error, and [timeout_fn] is called when a timeout
occurs. *)
val rpc :
context -> Js_top_worker_rpc.Rpc.call -> Js_top_worker_rpc.Rpc.response Lwt.t
|
32fbc84aa13b3daea91629935c0d9d5040d25a68736d4cf66bfba13d689226a4 | khibino/haskell-relational-record | Aggregate.hs | # OPTIONS_GHC -fno - warn - orphans #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
{-# LANGUAGE TypeSynonymInstances #-}
# LANGUAGE MultiParamTypeClasses #
-- |
Module : Database . Relational . . Aggregate
Copyright : 2013 - 2019
-- License : BSD3
--
-- Maintainer :
-- Stability : experimental
-- Portability : unknown
--
-- This module contains definitions about aggregated query type.
module Database.Relational.Monad.Aggregate (
-- * Aggregated Query
QueryAggregate,
AggregatedQuery,
toSQL,
toSubQuery,
Window, over
) where
import Data.Functor.Identity (Identity (runIdentity))
import Data.Monoid ((<>))
import Language.SQL.Keyword (Keyword(..))
import qualified Language.SQL.Keyword as SQL
import Database.Relational.Internal.ContextType (Flat, Aggregated, OverWindow)
import Database.Relational.SqlSyntax
(Duplication, SubQuery, JoinProduct,
OrderingTerm, composeOrderBy, aggregatedSubQuery,
AggregateColumnRef, AggregateElem, composePartitionBy, )
import qualified Database.Relational.SqlSyntax as Syntax
import Database.Relational.Typed.Record
(Record, Predicate, untypeRecord, recordColumns, unsafeRecordFromColumns)
import Database.Relational.Projectable (PlaceHolders, SqlContext)
import Database.Relational.Monad.Class (MonadRestrict(..))
import Database.Relational.Monad.Trans.Restricting
(Restrictings, restrictings, extractRestrict)
import Database.Relational.Monad.Trans.Aggregating
(extractAggregateTerms, AggregatingSetT, PartitioningSet)
import Database.Relational.Monad.Trans.Ordering
(Orderings, extractOrderingTerms)
import Database.Relational.Monad.BaseType (ConfigureQuery, askConfig)
import Database.Relational.Monad.Type (QueryCore, extractCore, OrderedQuery)
-- | Aggregated query monad type.
type QueryAggregate = Orderings Aggregated (Restrictings Aggregated (AggregatingSetT QueryCore))
| Aggregated query type . ' AggregatedQuery ' p r = = ' QueryAggregate ' ( ' PlaceHolders ' p , ' Record ' ' Aggregated ' r ) .
type AggregatedQuery p r = OrderedQuery Aggregated (Restrictings Aggregated (AggregatingSetT QueryCore)) p r
-- | Partition monad type for partition-by clause.
type Window c = Orderings c (PartitioningSet c)
-- | Restricted 'MonadRestrict' instance.
instance MonadRestrict Flat q => MonadRestrict Flat (Restrictings Aggregated q) where
restrict = restrictings . restrict
extract :: AggregatedQuery p r
-> ConfigureQuery (((((((PlaceHolders p, Record Aggregated r), [OrderingTerm]),
[Predicate Aggregated]),
[AggregateElem]),
[Predicate Flat]),
JoinProduct), Duplication)
extract = extractCore . extractAggregateTerms . extractRestrict . extractOrderingTerms
| Run ' AggregatedQuery ' to get SQL with ' ConfigureQuery ' computation .
^ ' AggregatedQuery ' to run
-> ConfigureQuery String -- ^ Result SQL string with 'ConfigureQuery' computation
toSQL = fmap Syntax.toSQL . toSubQuery
| Run ' AggregatedQuery ' to get ' SubQuery ' with ' ConfigureQuery ' computation .
^ ' AggregatedQuery ' to run
-> ConfigureQuery SubQuery -- ^ Result 'SubQuery' with 'ConfigureQuery' computation
toSubQuery q = do
(((((((_ph, pj), ot), grs), ag), rs), pd), da) <- extract q
c <- askConfig
return $ aggregatedSubQuery c (untypeRecord pj) da pd (map untypeRecord rs) ag (map untypeRecord grs) ot
extractWindow :: Window c a -> ((a, [OrderingTerm]), [AggregateColumnRef])
extractWindow = runIdentity . extractAggregateTerms . extractOrderingTerms
-- | Operator to make record of window function result using built 'Window' monad.
over :: SqlContext c
=> Record OverWindow a
-> Window c ()
-> Record c a
wp `over` win =
unsafeRecordFromColumns
[ c <> OVER <> SQL.paren (composePartitionBy pt <> composeOrderBy ot)
| c <- recordColumns wp
] where (((), ot), pt) = extractWindow win
infix 8 `over`
| null | https://raw.githubusercontent.com/khibino/haskell-relational-record/759b3d7cea207e64d2bd1cf195125182f73d2a52/relational-query/src/Database/Relational/Monad/Aggregate.hs | haskell | # LANGUAGE TypeSynonymInstances #
|
License : BSD3
Maintainer :
Stability : experimental
Portability : unknown
This module contains definitions about aggregated query type.
* Aggregated Query
| Aggregated query monad type.
| Partition monad type for partition-by clause.
| Restricted 'MonadRestrict' instance.
^ Result SQL string with 'ConfigureQuery' computation
^ Result 'SubQuery' with 'ConfigureQuery' computation
| Operator to make record of window function result using built 'Window' monad. | # OPTIONS_GHC -fno - warn - orphans #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
Module : Database . Relational . . Aggregate
Copyright : 2013 - 2019
module Database.Relational.Monad.Aggregate (
QueryAggregate,
AggregatedQuery,
toSQL,
toSubQuery,
Window, over
) where
import Data.Functor.Identity (Identity (runIdentity))
import Data.Monoid ((<>))
import Language.SQL.Keyword (Keyword(..))
import qualified Language.SQL.Keyword as SQL
import Database.Relational.Internal.ContextType (Flat, Aggregated, OverWindow)
import Database.Relational.SqlSyntax
(Duplication, SubQuery, JoinProduct,
OrderingTerm, composeOrderBy, aggregatedSubQuery,
AggregateColumnRef, AggregateElem, composePartitionBy, )
import qualified Database.Relational.SqlSyntax as Syntax
import Database.Relational.Typed.Record
(Record, Predicate, untypeRecord, recordColumns, unsafeRecordFromColumns)
import Database.Relational.Projectable (PlaceHolders, SqlContext)
import Database.Relational.Monad.Class (MonadRestrict(..))
import Database.Relational.Monad.Trans.Restricting
(Restrictings, restrictings, extractRestrict)
import Database.Relational.Monad.Trans.Aggregating
(extractAggregateTerms, AggregatingSetT, PartitioningSet)
import Database.Relational.Monad.Trans.Ordering
(Orderings, extractOrderingTerms)
import Database.Relational.Monad.BaseType (ConfigureQuery, askConfig)
import Database.Relational.Monad.Type (QueryCore, extractCore, OrderedQuery)
type QueryAggregate = Orderings Aggregated (Restrictings Aggregated (AggregatingSetT QueryCore))
| Aggregated query type . ' AggregatedQuery ' p r = = ' QueryAggregate ' ( ' PlaceHolders ' p , ' Record ' ' Aggregated ' r ) .
type AggregatedQuery p r = OrderedQuery Aggregated (Restrictings Aggregated (AggregatingSetT QueryCore)) p r
type Window c = Orderings c (PartitioningSet c)
instance MonadRestrict Flat q => MonadRestrict Flat (Restrictings Aggregated q) where
restrict = restrictings . restrict
extract :: AggregatedQuery p r
-> ConfigureQuery (((((((PlaceHolders p, Record Aggregated r), [OrderingTerm]),
[Predicate Aggregated]),
[AggregateElem]),
[Predicate Flat]),
JoinProduct), Duplication)
extract = extractCore . extractAggregateTerms . extractRestrict . extractOrderingTerms
| Run ' AggregatedQuery ' to get SQL with ' ConfigureQuery ' computation .
^ ' AggregatedQuery ' to run
toSQL = fmap Syntax.toSQL . toSubQuery
| Run ' AggregatedQuery ' to get ' SubQuery ' with ' ConfigureQuery ' computation .
^ ' AggregatedQuery ' to run
toSubQuery q = do
(((((((_ph, pj), ot), grs), ag), rs), pd), da) <- extract q
c <- askConfig
return $ aggregatedSubQuery c (untypeRecord pj) da pd (map untypeRecord rs) ag (map untypeRecord grs) ot
extractWindow :: Window c a -> ((a, [OrderingTerm]), [AggregateColumnRef])
extractWindow = runIdentity . extractAggregateTerms . extractOrderingTerms
over :: SqlContext c
=> Record OverWindow a
-> Window c ()
-> Record c a
wp `over` win =
unsafeRecordFromColumns
[ c <> OVER <> SQL.paren (composePartitionBy pt <> composeOrderBy ot)
| c <- recordColumns wp
] where (((), ot), pt) = extractWindow win
infix 8 `over`
|
6f82e7bcb9b327c4e278cffb1a6fce75dec75ee96817a10231b7eb6e1a77a68b | pveber/bistro | console_logger.mli | val create : unit -> Bistro_engine.Logger.t
| null | https://raw.githubusercontent.com/pveber/bistro/da0ebc969c8c5ca091905366875cbf8366622280/lib/utils/console_logger.mli | ocaml | val create : unit -> Bistro_engine.Logger.t
|
|
be21c5a44011ac84730abbd1f88ebe3edfcb03f5e594f5916d65bbf36d34a391 | Clozure/ccl | packages.lisp | (defpackage "ELISP"
(:shadow "=" "DEFUN" "LET" "IF" "SETQ" "ASSOC" "COMMANDP" "AREF")
(:use "COMMON-LISP" "HEMLOCK-INTERNALS")
(:export
"%"
"="
"ABORT-RECURSIVE-EDIT"
"AREF"
"ASET"
"ASSQ"
"ASSOC"
"AUTOLOAD"
"BOBP"
"BODY"
"BOLP"
"BOOL-VECTOR-P"
"BUFFER-LOCAL-P"
"CAR-LESS-THAN-CAR"
"CAR-SAFE"
"CDR-SAFE"
"COMMANDP"
"DEFMACRO"
"DEFUN"
"DEFVAR"
"FEATURES"
"FILENAME"
"GET-BUFFER"
"GET-BUFFER-CREATE"
"GET-DEFAULT"
"GLOBAL-SET-KEY"
"IF"
"INTERACTIVE"
"KEY"
"KEYMAP"
"LET"
"LEXICAL-LET"
"LOAD-FILE"
"LOAD-LIBRARY"
"LOAD-PATH"
"LOCAL-SET-KEY"
"MAKE-BOOL-VECTOR"
"MAKE-KEYMAP"
"MAKE-VARIABLE-BUFFER-LOCAL"
"MAKE-SPARSE-KEYMAP"
"NOERROR"
"SET-DEFAULT"
"SETQ"
"USE-LOCAL-MAP"
"WHILE"
)
)
(defpackage "ELISP-INTERNALS"
(:shadow "READ-STRING")
(:use "COMMON-LISP")
(:export
"FIND-LAMBDA-LIST-VARIABLES"
"GENERATE-CL-PACKAGE"
"REQUIRE-LOAD"
"GET-USER-HOMEDIR"
"INTERACTIVE-GLUE"
"*ELISP-READTABLE*"
)
)
(defpackage "ELISP-USER"
(:use "ELISP" "ELISP-INTERNALS")
)
| null | https://raw.githubusercontent.com/Clozure/ccl/6c1a9458f7a5437b73ec227e989aa5b825f32fd3/cocoa-ide/hemlock/unused/archive/elisp/packages.lisp | lisp | (defpackage "ELISP"
(:shadow "=" "DEFUN" "LET" "IF" "SETQ" "ASSOC" "COMMANDP" "AREF")
(:use "COMMON-LISP" "HEMLOCK-INTERNALS")
(:export
"%"
"="
"ABORT-RECURSIVE-EDIT"
"AREF"
"ASET"
"ASSQ"
"ASSOC"
"AUTOLOAD"
"BOBP"
"BODY"
"BOLP"
"BOOL-VECTOR-P"
"BUFFER-LOCAL-P"
"CAR-LESS-THAN-CAR"
"CAR-SAFE"
"CDR-SAFE"
"COMMANDP"
"DEFMACRO"
"DEFUN"
"DEFVAR"
"FEATURES"
"FILENAME"
"GET-BUFFER"
"GET-BUFFER-CREATE"
"GET-DEFAULT"
"GLOBAL-SET-KEY"
"IF"
"INTERACTIVE"
"KEY"
"KEYMAP"
"LET"
"LEXICAL-LET"
"LOAD-FILE"
"LOAD-LIBRARY"
"LOAD-PATH"
"LOCAL-SET-KEY"
"MAKE-BOOL-VECTOR"
"MAKE-KEYMAP"
"MAKE-VARIABLE-BUFFER-LOCAL"
"MAKE-SPARSE-KEYMAP"
"NOERROR"
"SET-DEFAULT"
"SETQ"
"USE-LOCAL-MAP"
"WHILE"
)
)
(defpackage "ELISP-INTERNALS"
(:shadow "READ-STRING")
(:use "COMMON-LISP")
(:export
"FIND-LAMBDA-LIST-VARIABLES"
"GENERATE-CL-PACKAGE"
"REQUIRE-LOAD"
"GET-USER-HOMEDIR"
"INTERACTIVE-GLUE"
"*ELISP-READTABLE*"
)
)
(defpackage "ELISP-USER"
(:use "ELISP" "ELISP-INTERNALS")
)
|
|
352b5fa04ae8362894e213797028da12737f97cc911f66a86ace43201b6670b2 | andorp/mini-grin | Env.hs | # LANGUAGE DeriveFunctor #
module Grin.Interpreter.Env where
import Data.List (foldl')
import Data.Maybe (fromMaybe)
import Grin.Pretty
import Grin.Value
import qualified Data.Map.Strict as Map
-- * Env
-- | Environment mapping names to abstract values.
newtype Env v = Env (Map.Map Name v)
deriving (Eq, Show, Ord, Functor)
empty :: Env v
empty = Env mempty
lookup :: (Env v) -> Name -> v
lookup (Env m) n = fromMaybe (error $ "Missing:" ++ show n) $ Map.lookup n m
insert :: Name -> v -> Env v -> Env v
insert n v (Env m) = Env $ Map.insert n v m
inserts :: [(Name, v)] -> Env v -> Env v
inserts vs (Env m) = Env $ foldl' (\n (k,v) -> Map.insert k v n) m vs
-- Explicit instance!! different from default
instance (Semigroup v) => Semigroup (Env v) where
Env m1 <> Env m2 = Env (Map.unionWith (<>) m1 m2)
instance (Semigroup v) => Monoid (Env v) where
mempty = Env mempty
instance (Pretty v) => Pretty (Env v) where
pretty (Env m) = prettyKeyValue (Map.toList m)
| null | https://raw.githubusercontent.com/andorp/mini-grin/99913efa0f81cb2a76893d3e48c6d025df9c40c9/grin/src/Grin/Interpreter/Env.hs | haskell | * Env
| Environment mapping names to abstract values.
Explicit instance!! different from default | # LANGUAGE DeriveFunctor #
module Grin.Interpreter.Env where
import Data.List (foldl')
import Data.Maybe (fromMaybe)
import Grin.Pretty
import Grin.Value
import qualified Data.Map.Strict as Map
newtype Env v = Env (Map.Map Name v)
deriving (Eq, Show, Ord, Functor)
empty :: Env v
empty = Env mempty
lookup :: (Env v) -> Name -> v
lookup (Env m) n = fromMaybe (error $ "Missing:" ++ show n) $ Map.lookup n m
insert :: Name -> v -> Env v -> Env v
insert n v (Env m) = Env $ Map.insert n v m
inserts :: [(Name, v)] -> Env v -> Env v
inserts vs (Env m) = Env $ foldl' (\n (k,v) -> Map.insert k v n) m vs
instance (Semigroup v) => Semigroup (Env v) where
Env m1 <> Env m2 = Env (Map.unionWith (<>) m1 m2)
instance (Semigroup v) => Monoid (Env v) where
mempty = Env mempty
instance (Pretty v) => Pretty (Env v) where
pretty (Env m) = prettyKeyValue (Map.toList m)
|
9611dfffcbf9d85dafd3ee4cdfbb4e6dbc2e6828049ec0b5d547e99d14f12a5f | david-vanderson/warp | upgrade.rkt | #lang racket/base
(require racket/class
racket/draw)
(require "defs.rkt"
"utils.rkt"
"draw-utils.rkt")
(provide (all-defined-out))
(define (upgrade-radius space u)
5)
(define (upgrade-alive? space u)
(or (not (upgrade-life u))
((obj-age space u) . <= . (upgrade-life u))))
(define (draw-upgrade csd center scale space u fowa layer-ships)
(obj-sprite u csd center scale layer-ships 'circle
(/ (* 2.0 (upgrade-radius space u)) 100) fowa 0.0
(send the-color-database find-color (upgrade-color u))))
(define (ship-msg space ship msg)
(define w (ship-w ship 1.0))
(define y (if ((obj-dy ship) . < . 0)
(+ (obj-y ship) w 16)
(- (obj-y ship) w 10)))
(make-ann-text (obj-x ship) y #:pos 'space
(space-time space) 1000 2000 msg))
; return a list of changes
(define (upgrade-ship-random space ship
[alltypes '(engine warp
turning hull radar
pbolt missile cannon regen)]
#:amount [amount 1.1])
(define amtstr (number->string (inexact->exact (round (* 100.0 amount)))))
(define changes '())
(let loop ((types alltypes))
(cond
((null? types)
(printf "upgrade-ship-random ran out of types ~v for ship ~v\n" alltypes ship))
(else
(define t (list-ref types (random (length types))))
(case t
((warp)
(define t (ship-tool ship 'warp))
(when t
(append! changes
(chstat (ob-id ship) 'toolval
(list 'warp (list (* amount (car (tool-val t))) ; speed
(* (/ amount) (cadr (tool-val t))) ; threshold
0.0)))
(chadd (ship-msg space ship (string-append "warp " amtstr "%")) #f))))
((regen)
(define t (ship-tool ship 'regen))
(when t
(append! changes
(chstat (ob-id ship) 'toolval
(list 'regen (* amount (tool-val t))))
(chadd (ship-msg space ship (string-append "regen " amtstr "%")) #f))))
((cannon)
(define t (ship-tool ship 'cannon))
(when t
(append! changes
(chstat (ob-id ship) 'toolval
(list 'cannon (* amount (tool-val t))))
(chadd (ship-msg space ship (string-append "cannon " amtstr "%")) #f))))
((missile)
(define t (ship-tool ship 'missile))
(when t
(append! changes
(chstat (ob-id ship) 'toolval
(list 'missile (list (car (tool-val t)) (* amount (cadr (tool-val t))))))
(chadd (ship-msg space ship (string-append "missile " amtstr "%")) #f))))
((pbolt)
(define t (ship-tool ship 'pbolt))
(when t
(append! changes
(chstat (ob-id ship) 'toolval
(list 'pbolt (* amount (tool-val t))))
(chadd (ship-msg space ship (string-append "plasma " amtstr "%")) #f))))
((engine)
(define t (ship-tool ship 'engine))
(when t
(append! changes
(chstat (ob-id ship) 'toolval
(list 'engine (* amount (tool-val t))))
(chadd (ship-msg space ship (string-append "engine " amtstr "%")) #f))))
((turning)
(for ((tname '(turnleft turnright steer)))
(define t (ship-tool ship tname))
(when t
(append! changes
(chstat (ob-id ship) 'toolval (list tname (* amount (tool-val t)))))))
(when (not (null? changes))
(append! changes
(chadd (ship-msg space ship (string-append "turning " amtstr "%")) #f))))
((hull)
(append! changes
(chstat (ob-id ship) 'hull (* amount (ship-maxcon ship)))
(chadd (ship-msg space ship (string-append "hull " amtstr "%")) #f)))
((radar)
(append! changes
(chstat (ob-id ship) 'radar (* amount (ship-radar ship)))
(chadd (ship-msg space ship (string-append "radar " amtstr "%")) #f))))
(when (null? changes)
(loop (filter (lambda (x) (not (equal? x t)))
types))))))
changes) | null | https://raw.githubusercontent.com/david-vanderson/warp/cdc1d0bd942780fb5360dc6a34a2a06cf9518408/upgrade.rkt | racket | return a list of changes
speed
threshold | #lang racket/base
(require racket/class
racket/draw)
(require "defs.rkt"
"utils.rkt"
"draw-utils.rkt")
(provide (all-defined-out))
(define (upgrade-radius space u)
5)
(define (upgrade-alive? space u)
(or (not (upgrade-life u))
((obj-age space u) . <= . (upgrade-life u))))
(define (draw-upgrade csd center scale space u fowa layer-ships)
(obj-sprite u csd center scale layer-ships 'circle
(/ (* 2.0 (upgrade-radius space u)) 100) fowa 0.0
(send the-color-database find-color (upgrade-color u))))
(define (ship-msg space ship msg)
(define w (ship-w ship 1.0))
(define y (if ((obj-dy ship) . < . 0)
(+ (obj-y ship) w 16)
(- (obj-y ship) w 10)))
(make-ann-text (obj-x ship) y #:pos 'space
(space-time space) 1000 2000 msg))
(define (upgrade-ship-random space ship
[alltypes '(engine warp
turning hull radar
pbolt missile cannon regen)]
#:amount [amount 1.1])
(define amtstr (number->string (inexact->exact (round (* 100.0 amount)))))
(define changes '())
(let loop ((types alltypes))
(cond
((null? types)
(printf "upgrade-ship-random ran out of types ~v for ship ~v\n" alltypes ship))
(else
(define t (list-ref types (random (length types))))
(case t
((warp)
(define t (ship-tool ship 'warp))
(when t
(append! changes
(chstat (ob-id ship) 'toolval
0.0)))
(chadd (ship-msg space ship (string-append "warp " amtstr "%")) #f))))
((regen)
(define t (ship-tool ship 'regen))
(when t
(append! changes
(chstat (ob-id ship) 'toolval
(list 'regen (* amount (tool-val t))))
(chadd (ship-msg space ship (string-append "regen " amtstr "%")) #f))))
((cannon)
(define t (ship-tool ship 'cannon))
(when t
(append! changes
(chstat (ob-id ship) 'toolval
(list 'cannon (* amount (tool-val t))))
(chadd (ship-msg space ship (string-append "cannon " amtstr "%")) #f))))
((missile)
(define t (ship-tool ship 'missile))
(when t
(append! changes
(chstat (ob-id ship) 'toolval
(list 'missile (list (car (tool-val t)) (* amount (cadr (tool-val t))))))
(chadd (ship-msg space ship (string-append "missile " amtstr "%")) #f))))
((pbolt)
(define t (ship-tool ship 'pbolt))
(when t
(append! changes
(chstat (ob-id ship) 'toolval
(list 'pbolt (* amount (tool-val t))))
(chadd (ship-msg space ship (string-append "plasma " amtstr "%")) #f))))
((engine)
(define t (ship-tool ship 'engine))
(when t
(append! changes
(chstat (ob-id ship) 'toolval
(list 'engine (* amount (tool-val t))))
(chadd (ship-msg space ship (string-append "engine " amtstr "%")) #f))))
((turning)
(for ((tname '(turnleft turnright steer)))
(define t (ship-tool ship tname))
(when t
(append! changes
(chstat (ob-id ship) 'toolval (list tname (* amount (tool-val t)))))))
(when (not (null? changes))
(append! changes
(chadd (ship-msg space ship (string-append "turning " amtstr "%")) #f))))
((hull)
(append! changes
(chstat (ob-id ship) 'hull (* amount (ship-maxcon ship)))
(chadd (ship-msg space ship (string-append "hull " amtstr "%")) #f)))
((radar)
(append! changes
(chstat (ob-id ship) 'radar (* amount (ship-radar ship)))
(chadd (ship-msg space ship (string-append "radar " amtstr "%")) #f))))
(when (null? changes)
(loop (filter (lambda (x) (not (equal? x t)))
types))))))
changes) |
0b5d2e077696d565da87d75d11da6fa29038faba03e3ee55a470ae92e62f6de2 | input-output-hk/rscoin-haskell | Config.hs | # LANGUAGE TemplateHaskell #
-- | Configuration for rscoin-deploy.
module Config
( DeployConfig (..)
, readDeployConfig
) where
import qualified Data.Aeson.TH as A
import qualified Data.Yaml as Y
import RSCoin.Core (Severity)
import Serokell.Aeson.Options (defaultOptions, leaveTagOptions)
data DeployConfig = DeployConfig
{ dcDirectory :: !FilePath
, dcCreateTemp :: !Bool
, dcMintettes :: !Word
, dcExplorers :: !Word
, dcPeriod :: !Word
, dcGlobalSeverity :: !Severity
, dcBankSeverity :: !(Maybe Severity)
, dcNotarySeverity :: !(Maybe Severity)
, dcMintetteSeverity :: !(Maybe Severity)
, dcExplorerSeverity :: !(Maybe Severity)
} deriving (Show)
$(A.deriveJSON leaveTagOptions ''Severity)
$(A.deriveJSON defaultOptions ''DeployConfig)
readDeployConfig :: FilePath -> IO DeployConfig
readDeployConfig fp =
either (error . ("[FATAL] Failed to parse config: " ++) . show) id <$>
Y.decodeFileEither fp
| null | https://raw.githubusercontent.com/input-output-hk/rscoin-haskell/109d8f6f226e9d0b360fcaac14c5a90da112a810/src/Deploy/Config.hs | haskell | | Configuration for rscoin-deploy. | # LANGUAGE TemplateHaskell #
module Config
( DeployConfig (..)
, readDeployConfig
) where
import qualified Data.Aeson.TH as A
import qualified Data.Yaml as Y
import RSCoin.Core (Severity)
import Serokell.Aeson.Options (defaultOptions, leaveTagOptions)
data DeployConfig = DeployConfig
{ dcDirectory :: !FilePath
, dcCreateTemp :: !Bool
, dcMintettes :: !Word
, dcExplorers :: !Word
, dcPeriod :: !Word
, dcGlobalSeverity :: !Severity
, dcBankSeverity :: !(Maybe Severity)
, dcNotarySeverity :: !(Maybe Severity)
, dcMintetteSeverity :: !(Maybe Severity)
, dcExplorerSeverity :: !(Maybe Severity)
} deriving (Show)
$(A.deriveJSON leaveTagOptions ''Severity)
$(A.deriveJSON defaultOptions ''DeployConfig)
readDeployConfig :: FilePath -> IO DeployConfig
readDeployConfig fp =
either (error . ("[FATAL] Failed to parse config: " ++) . show) id <$>
Y.decodeFileEither fp
|
a46bb0afdf33756bdfbd47b04b631c5c0b2713ba383c23ba420748bd26f8f4f8 | LCBH/UKano | reduction_bipro.ml | * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* *
* Cryptographic protocol verifier *
* *
* , , and *
* *
* Copyright ( C ) INRIA , CNRS 2000 - 2020 *
* *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* *
* Cryptographic protocol verifier *
* *
* Bruno Blanchet, Vincent Cheval, and Marc Sylvestre *
* *
* Copyright (C) INRIA, CNRS 2000-2020 *
* *
*************************************************************)
This program is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
( at your option ) any later version .
This program is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details ( in file LICENSE ) .
You should have received a copy of the GNU General Public License
along with this program ; if not , write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details (in file LICENSE).
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
Trace reconstruction
This version of the trace reconstruction does not exploit the
order of nodes in the derivation tree .
This version of the trace reconstruction does not exploit the
order of nodes in the derivation tree.
*)
(* TO DO Test phases
Should I use evaluated terms in the "comment" field?
*)
open Types
open Pitypes
open Terms
open Reduction_helper
let made_forward_step = ref false
let failed_traces = ref 0
let debug_find_io_rule = ref false
let debug_backtracking = ref false
let debug_print s = ()
print_string s ;
( )
Display.Text.newline()*)
(* This exception is raise when the derivation prevents executing
a step *)
exception DerivBlocks
(* This exception is used in reduction_nobacktrack
It is raised after a bunch of reductions to
to get the final state after these reductions,
while preventing backtracking on these reductions.*)
exception Reduced of (term * term) reduc_state
[ Terms.auto_cleanup f ] runs [ f ( ) ] , removing all links
created by [ f ( ) ] , whether [ f ] terminates normally or
with an exception
[ auto_cleanup_red ] is a variant of this function that
treats the exception [ Reduced ] specially . Indeed , in most
cases , when an exception is raised , it is because we
backtrack , so the links we have set must be removed ,
since we undo the reductions .
However , the exception [ Reduced ] is different : for this
exception , we want to get the final state , so the links
must be kept .
created by [f()], whether [f] terminates normally or
with an exception
[auto_cleanup_red] is a variant of this function that
treats the exception [Reduced] specially. Indeed, in most
cases, when an exception is raised, it is because we
backtrack, so the links we have set must be removed,
since we undo the reductions.
However, the exception [Reduced] is different: for this
exception, we want to get the final state, so the links
must be kept.
*)
let auto_cleanup_red f =
let tmp_bound_vars = !current_bound_vars in
current_bound_vars := [];
try
let r = f () in
List.iter (fun v -> v.link <- NoLink) (!current_bound_vars);
current_bound_vars := tmp_bound_vars;
r
with
Reduced s ->
(* Do not delete the links when the exception [Reduced] is raised.
Keep them in [current_bound_vars] so that they are deleted later if needed *)
current_bound_vars := List.rev_append tmp_bound_vars (!current_bound_vars);
raise (Reduced s)
| x ->
List.iter (fun v -> v.link <- NoLink) (!current_bound_vars);
current_bound_vars := tmp_bound_vars;
raise x
(* Set when we should take the else branch of Get but we cannot
because an element has already been inserted so that the in branch
is taken. In this case, we try delaying the inserts. *)
let has_backtrack_get = ref false
exception No_result
(* We use the exception Unify for local failure *)
exception FailOnlyOnSide of int
let make_bi_choice (t1, t2) = make_choice t1 t2
let make_bi_choice_fact = function
| Pred(p1,args1), Pred(p2,args2) when p1 == p2 ->
Pred(p1,List.map2 make_choice args1 args2)
| _ -> Parsing_helper.internal_error "[reduction_bipro.ml >> make_bi_choice_fact] Should be the same predicate."
let get_choice = function
FunApp({ f_cat = Choice }, [t1;t2]) -> (t1,t2)
| _ -> Parsing_helper.internal_error "Choice term expected"
let equal_bi_terms_modulo (t1,t2) (t1',t2') =
(equal_terms_modulo t1 t1') && (equal_terms_modulo t2 t2')
let is_true_test (t1,t2) =
let r1 = equal_terms_modulo t1 Terms.true_term in
let r2 = equal_terms_modulo t2 Terms.true_term in
if r1 && r2 then true else
if (not r1) && (not r2) then false else
raise (FailOnlyOnSide (if (not r1) then 1 else 2))
(* [bi_action action] executes action for both sides.
Raises Unify when it fails.
Raises FailOnlyOnSide 1 when left side only of the action fails.
Raises FailOnlyOnSide 2 when right side only of the action fails.
Raises Unify when the action fails on both sides. *)
let bi_action action =
try
let t1 = action 1 in
try
let t2 = action 2 in
(t1,t2)
with Unify ->
(* Left side succeeded, right side failed *)
raise (FailOnlyOnSide 2)
with Unify ->
(* Left side failed *)
let _ = action 2 in
(* Left side failed, right side succeeded *)
raise (FailOnlyOnSide 1)
let rev_name_subst_bi = function
[t] -> let r = rev_name_subst t in (r,r)
| [t1;t2] -> (rev_name_subst t1, rev_name_subst t2)
| _ -> Parsing_helper.internal_error "Unexpected number of arguments for this predicate"
let get_term_type_bi = function
[t] -> Terms.get_term_type t
| [t1;t2] -> Terms.get_term_type t1
| _ -> Parsing_helper.internal_error "Unexpected number of arguments for this predicate"
let get_min_choice_phase() =
match (!Param.current_state).pi_min_choice_phase with
| Set min_phase -> min_phase
| Unset -> Parsing_helper.internal_error "pi_min_choice_phase not set"
let build_mess_fact phase (tc1,tc2) (t1,t2) =
if phase < get_min_choice_phase() then
Pred(Param.get_pred(Mess(phase, Terms.get_term_type t1)), [tc1;t1])
else
Pred(Param.get_pred(MessBin(phase, Terms.get_term_type t1)), [tc1;t1;tc2;t2])
let build_table_fact phase (t1,t2) =
if phase < get_min_choice_phase() then
Pred(Param.get_pred(Table(phase)), [t1])
else
Pred(Param.get_pred(TableBin(phase)), [t1;t2])
let build_precise_fact occ_name (t1,t2) =
let ev = Param.get_precise_event (Action (get_term_type t1)) in
Pred(Param.begin2_pred,[FunApp(ev,[occ_name;t1]);FunApp(ev,[occ_name;t2])])
(* Detect various goals *)
let is_table_goal cur_phase t = function
| Pred({p_info = [TableBin(i)]; _},[tbl_elem1;tbl_elem2]) ->
(* If the term tbl_elem is in the table
in phase cur_state.current_phase, it will still be in the table in any
later phase. *)
cur_phase <= i &&
equal_bi_terms_modulo (tbl_elem1,tbl_elem2) t
| Pred({p_info = [Table(i)]},[tbl_elem]) ->
(* When the phase is less than [min_choice_phase],
we use a unary [Table] predicate. *)
cur_phase <= i &&
equal_bi_terms_modulo (tbl_elem,tbl_elem) t
| _ -> false
let is_mess_goal cur_phase tc t = function
| Pred({p_info = [MessBin(n,_)]},[tcg1;tg1;tcg2;tg2]) ->
(n == cur_phase) &&
equal_bi_terms_modulo (tg1,tg2) t &&
equal_bi_terms_modulo (tcg1,tcg2) tc
| Pred({p_info = [Mess(n,_)]},[tcg;tg]) ->
(n == cur_phase) &&
equal_bi_terms_modulo (tg,tg) t &&
equal_bi_terms_modulo (tcg,tcg) tc
| _ -> false
(* Display clauses *)
let display_rule (n, sons, hsl, nl, concl) =
print_string ("Rule " ^ (string_of_int n) ^ ": ");
display_tag hsl nl;
print_string " ";
Display.Text.display_rule (List.map (fun t -> copy_fact2 t) sons, copy_fact2 concl, Empty concl, Terms.true_constraints);
Display.Text.newline()
(* Display the trace *)
let noninterftest_to_string = function
ProcessTest _ -> " process performs a test that may succeed on one side and not on the other"
| InputProcessTest _ -> "The pattern-matching in the input succeeds on one side and not on the other."
| NIFailTest _ -> "This holds on one side and not on the other."
| ApplyTest _ -> Parsing_helper.internal_error "There should be no ApplyTest in reduction_bipro.ml"
| CommTest _ -> "The communication succeeds on one side and not on the other."
| NIEqTest _ -> "The result in the left-hand side is different from the result in the right-hand side."
let display_trace final_state =
match !Param.trace_display with
Param.NoDisplay -> ()
| Param.ShortDisplay ->
if !Param.html_output then
Display.Html.display_labeled_trace final_state
else
begin
if !Param.display_init_state then
begin
print_string "A more detailed output of the traces is available with\n";
if !Param.typed_frontend then
print_string " set traceDisplay = long.\n"
else
print_string " param traceDisplay = long.\n";
Display.Text.newline()
end;
Display.Text.display_labeled_trace final_state
end
| Param.LongDisplay ->
if !Param.html_output then
ignore (Display.Html.display_reduc_state Display.bi_term_to_term true final_state)
else
ignore (Display.Text.display_reduc_state Display.bi_term_to_term true final_state)
(* Updating the goals *)
let is_equivalence_goal = function
| NonInterfGoal _ -> true
| _ -> false
(* Find a clause *)
let find_io_rule next_f hypspeclist hyplist name_params var_list io_rules =
let name_params1 = extract_name_params_noneed name_params in
let l = List.length hypspeclist in
let lnp = List.length name_params1 in
let lh = List.length hyplist in
(* Useful for debugging *)
if !debug_find_io_rule then
begin
auto_cleanup (fun () ->
print_string "Looking for ";
display_tag hypspeclist name_params1;
print_string " ";
Display.Text.display_list Display.Text.WithLinks.fact " & " hyplist;
Display.Text.newline())
end;
let found_terms = ref [] in
let rec find_io_rule_aux = function
[] -> raise Unify
| ((n, sons, hypspeclist2, name_params',_)::io_rules) ->
let l2 = List.length hypspeclist2 in
let lnp2 = List.length name_params' in
let lh2 = List.length sons in
if (l2 < l) || (lnp2 < lnp) || (lh2 < lh) || (not (hypspeclist = skip (l2-l) hypspeclist2))
then find_io_rule_aux io_rules
else
begin
let sons3 = skip (lh2-lh) sons in
try
let name_params2 = skip (lnp2-lnp) name_params' in
if not (Param.get_ignore_types()) &&
(List.exists2 (fun t1 t2 -> Terms.get_term_type t1 != Terms.get_term_type t2) name_params1 name_params2) then
raise Unify;
auto_cleanup_red (fun () ->
match_modulo_list (fun () ->
match_equiv_list (fun () ->
let new_found = List.map copy_closed_remove_syntactic var_list in
if List.exists (fun old_found ->
List.for_all2 equal_terms_modulo old_found new_found) (!found_terms) then
raise Unify;
found_terms := new_found :: (!found_terms);
if !debug_find_io_rule then
begin
auto_cleanup (fun () ->
print_string "Found ";
Display.Text.display_list Display.Text.WithLinks.term ", " new_found;
Display.Text.newline())
end;
next_f new_found) sons3 hyplist
) name_params1 name_params2
)
with Unify -> find_io_rule_aux io_rules
end
in
find_io_rule_aux io_rules
Evaluate a term possibly containing destructors .
It always succeeds , perhaps returning Fail .
It always succeeds, perhaps returning Fail. *)
let rec term_evaluation side = function
Var v ->
begin
match v.link with
TLink t ->
(* I think this is useful only to split a Choice inside t *)
term_evaluation side t
| _ -> Parsing_helper.internal_error "Error: term should be closed in attack reconstruction";
end
| FunApp(f,l) ->
(* for speed, use the initial definition of destructors, not the one enriched with the equational theory *)
match f.f_initial_cat with
Eq _ | Tuple ->
let l' = List.map (term_evaluation side) l in
if List.exists is_fail l' then
Terms.get_fail_term (snd f.f_type)
else
FunApp(f, l')
| Name _ | Failure -> FunApp(f,[])
| Choice ->
begin
match l with
[t1;t2] ->
if side = 1 then
term_evaluation side t1
else
term_evaluation side t2
| _ -> Parsing_helper.internal_error "Choice should have two arguments"
end
| BiProj Left ->
begin match l with [t] -> term_evaluation 1 t | _ -> assert false end
| BiProj Right ->
begin match l with [t] -> term_evaluation 2 t | _ -> assert false end
| Red redl ->
let l' = List.map (term_evaluation side) l in
let rec try_red_list = function
[] ->
Parsing_helper.internal_error "Term evaluation should always succeeds (perhaps returning Fail)"
| (red1::redl) ->
let (left, right, side_c) = auto_cleanup (fun () -> Terms.copy_red red1) in
try
auto_cleanup (fun () ->
match_modulo_list (fun () ->
close_destr_constraints side_c;
if TermsEq.check_closed_constraints side_c then
begin
(* TO DO (for speed) should I remove_syntactic, or keep it,
but take it into account elsewhere (when doing
function symbol comparisons, accept functions that
differ by their syntactic status) *)
close_term right;
TermsEq.remove_syntactic_term right
end
else
raise Unify
) left l')
with Unify -> try_red_list redl
in
try_red_list redl
| _ ->
Printf.printf "\nName of the function:";
Display.Text.display_function_name f;
Parsing_helper.internal_error "unexpected function symbol in term_evaluation (reduction_bipro.ml)"
(* Evaluates t1 and tests if it is equal to t2. *)
let equal_terms_modulo_eval t1 t2 =
let t1_l = term_evaluation 1 t1 in
let t1_r = term_evaluation 2 t1 in
if (is_fail t1_l) || (is_fail t1_r) then false else
equal_bi_terms_modulo (t1_l, t1_r) t2
(* Evaluates a term. Raises Unify when the result is fail. *)
let term_evaluation_fail t side =
let r = term_evaluation side t in
if is_fail r then
raise Unify
else
r
let fact_evaluation_fail fact side = match fact with
| Pred(p,args) ->
let args' =
List.map (fun t ->
let r = term_evaluation side t in
if is_fail r
then raise Unify
else r
) args
in
Pred(p,args')
let term_evaluation_fail2 t1 t2 side =
(term_evaluation_fail t1 side, term_evaluation_fail t2 side)
let term_evaluation_name_params occ t name_params =
let may_have_several_patterns = reduction_check_several_patterns occ in
let t' = bi_action (term_evaluation_fail t) in
if may_have_several_patterns then
t', ((MUnknown,make_bi_choice t',Always) :: name_params)
else
t', name_params
let term_evaluation_to_true t side =
let r = term_evaluation side t in
if (is_fail r) || (not (equal_terms_modulo r Terms.true_term)) then
raise Unify
else
r
let term_evaluation_name_params_true occ t name_params =
let may_have_several_patterns = reduction_check_several_patterns occ in
let t' = bi_action (term_evaluation_to_true t) in
if may_have_several_patterns then
((MUnknown,make_bi_choice t',Always) :: name_params)
else
name_params
(* Match a pattern
Raises Unify when the matching fails *)
let rec match_pattern p side t =
if not (Terms.equal_types (Terms.get_pat_type p) (Terms.get_term_type t)) then
raise Unify;
match p with
PatVar b ->
begin
if side = 1 then
Terms.link b (TLink (make_choice t t))
else
match b.link with
TLink (FunApp({ f_cat = Choice }, [t1;t2])) ->
Terms.link b (TLink (make_choice t1 t))
| _ ->
(* When the evaluation or pattern matching failed on the left side,
some variables may be unbounded when we try the pattern matching
on the right side *)
Terms.link b (TLink (make_choice t t))
end
| PatTuple(f,l) ->
let vl = Terms.var_gen (fst f.f_type) in
let tl =
match_modulo (fun () ->
List.map copy_closed_remove_syntactic vl) (FunApp(f, vl)) t
in
List.iter2 (fun p t -> match_pattern p side t) l tl
| PatEqual t' ->
let t'' = term_evaluation_fail t' side in
match_modulo (fun () -> ()) t'' t
let bi_match_pattern p (t1,t2) side =
if side = 1 then
match_pattern p side t1
else
match_pattern p side t2
let bi_match_pattern_and_test p (t1,t2) t side =
bi_match_pattern p (t1,t2) side;
let t' = term_evaluation_fail t side in
if not (equal_terms_modulo t' Terms.true_term) then
raise Unify
let term_evaluation_name_params_and_match pat occ t name_params =
let may_have_several_patterns = reduction_check_several_patterns occ in
let t'' = bi_action (fun side ->
let t' = term_evaluation_fail t side in
match_pattern pat side t';
t')
in
if may_have_several_patterns then
t'', ((MUnknown,make_bi_choice t'',Always) :: name_params)
else
t'', name_params
Terms come with a recipe that explains how to compute them .
Recipes may contain variables ( especially in prepared_attacker_rules )
which are later instantiated by putting links in these variables .
Copies of the recipes are not made immediately after creating the links ,
so these links remain when the trace progresses ; they are removed
in case of backtrack ( by auto_cleanup_red ) .
Not making too many copies is important for speed in complex
examples such as ffgg .
Copies of recipes are made before adding a term to public ,
so that recipes in public do not contain links .
They are also made before using a term in an input .
Terms come with a recipe that explains how to compute them.
Recipes may contain variables (especially in prepared_attacker_rules)
which are later instantiated by putting links in these variables.
Copies of the recipes are not made immediately after creating the links,
so these links remain when the trace progresses; they are removed
in case of backtrack (by auto_cleanup_red).
Not making too many copies is important for speed in complex
examples such as ffgg.
Copies of recipes are made before adding a term to public,
so that recipes in public do not contain links.
They are also made before using a term in an input.
*)
Decompose tuples
let rec decompose_term ((recipe, t) as pair:Types.term * (Types.term * Types.term)) =
match t with
(FunApp({f_cat = Tuple } as f,l), FunApp({f_cat = Tuple} as f',l')) when f == f' ->
let projs = Terms.get_all_projection_fun f in
decompose_list (List.map2 (fun fi ti -> (FunApp(fi,[recipe]),ti))
projs (List.combine l l'))
| _ -> [pair]
and decompose_list = function
[] -> []
| (a::l) -> (decompose_term a) @ (decompose_list l)
let rec decompose_term_rev (binder, t) =
match t with
(FunApp({f_cat = Tuple } as f,l), FunApp({f_cat = Tuple} as f',l')) when f == f' ->
let new_list = List.map (fun (x, x') -> ((Terms.new_var ~orig:false "~M" (Terms.get_term_type x)), (x, x')))
(List.combine l l')
in
Terms.link binder (TLink (FunApp(f, (List.map (fun (x, y) -> Var x) new_list))));
decompose_list_rev new_list
| t -> [(binder, t)]
and decompose_list_rev = function
[] -> []
| (a::l) -> (decompose_term_rev a) @ (decompose_list_rev l)
(* Test if a term is public *)
let rec is_in_public public = function
| (FunApp({f_cat = Tuple} as f, l), FunApp(f',l')) when f == f' ->
(match (is_in_public_list public) (List.combine l l') with
| None -> None
| Some lst -> Some(FunApp(f, lst)))
| t ->
try
let (ca, _) = List.find (fun (_, t') -> equal_bi_terms_modulo t t') public in
Some ca
with Not_found ->
None
and is_in_public_list public = function
[] -> Some []
| hd::tail ->
match is_in_public public hd with
None -> None
| Some ca ->
match is_in_public_list public tail with
None -> None
| Some catail -> Some (ca::catail)
let rec remove_first_in_public public = function
[] -> []
| (((c, a)::l) as l') ->
try
let (ca, _) = List.find (fun (_, t) -> equal_bi_terms_modulo a t) public in
Terms.link c (TLink ca);
remove_first_in_public public l
with Not_found ->
l'
let update_term_list oldpub public tc_list =
match tc_list with
[] -> []
| ((c0, t0)::l0) ->
let rec is_in_until = function
[] -> false
| (((ca, a)::l) as public) ->
if public == oldpub then false else
if equal_bi_terms_modulo a t0
then
begin
Terms.link c0 (TLink ca);
true
end
else
is_in_until l
in
if is_in_until public then
remove_first_in_public public l0
else
tc_list
(* We maintain the following invariants in public and prepared_attacker_rule:
1/ All rules in prepared_attacker_rule are for a phase later or equal to the current one.
Rules for a previous phase are removed.
2/ All rules in prepared_attacker_rule for the current phase have non-empty assumptions.
Rules with empty assumptions are removed after adding their conclusion to public.
3/ All assumptions of rules in prepared_attacker_rule are not in public.
When an assumption is in public, we remove it, and possibly apply 2/.
[add_public_and_close state l] guarantees that these invariants are preserved after
addition of the terms in [l] to public.
It removes assumptions of rules in prepared_attacker_rule that are in [l].
When a rule then has no assumptions and is for the current phase, it adds the
conclusion to public and continues closing recursively.
*)
let add_public_and_close state l =
let queue = ref l in
let rec remove_from_att_rules public ((recipe, t) as pair) = function
[] -> []
| (p, hyp_terms, (recipe_concl, concl_bi_term))::attacker_rules ->
let attacker_rules' = remove_from_att_rules public pair attacker_rules in
let phase_p = getphase p in
assert (phase_p >= state.current_phase);
let hyp_terms' = match hyp_terms with
[] -> []
| ((c0, t0)::l0) ->
if equal_bi_terms_modulo t0 t then
begin
link c0 (TLink recipe);
remove_first_in_public public l0
end
else
hyp_terms
in
if (hyp_terms' = []) && (phase_p = state.current_phase) then
begin
queue := (decompose_term (Terms.copy_term4 recipe_concl, concl_bi_term)) @ (!queue);
attacker_rules'
end
else
(* Keep the rule, removing hypotheses that are already in public *)
(p, hyp_terms', (recipe_concl, concl_bi_term)) :: attacker_rules'
in
let rec do_close state =
match !queue with
[] -> state
| ((c, t)::l) ->
queue := l;
if List.exists (fun (_, t') -> equal_bi_terms_modulo t t') state.public then
do_close state
else
let public' = (c, t) :: state.public in
do_close { state with
public = public';
prepared_attacker_rule = remove_from_att_rules public' (c, t) state.prepared_attacker_rule }
in
do_close state
let rec add_public_with_recipe state (recipe, t) =
match t with
(FunApp({ f_cat = Tuple } as f, l), FunApp({f_cat = Tuple} as f',l')) when f == f' ->
let projs = Terms.get_all_projection_fun f in
add_public_list state (List.map2 (fun fi ti -> (FunApp(fi, [recipe]), ti)) projs (List.combine l l'))
| t -> add_public_and_close state [(recipe, t)]
and add_public_list state = function
[] -> state
| (a::l) -> add_public_list (add_public_with_recipe state a) l
(* [close_public_after_phase_increment state] guarantees that the invariants on
public and prepared_attacker_rule mentioned above are preserved after a phase increment.
It removes rules for previous phases, adds to public the conclusions
of rules with no assumptions in the new phase, and closes using
[add_public_and_close]. *)
let close_public_after_phase_increment state =
let queue = ref [] in
let rec remove_from_att_rules public = function
[] -> []
| ((p, hyp_terms, (recipe_concl, concl_bi_term)) as rule)::attacker_rules ->
let attacker_rules' = remove_from_att_rules public attacker_rules in
let phase_p = getphase p in
if phase_p < state.current_phase then attacker_rules' else
if (hyp_terms = []) && (phase_p = state.current_phase) then
begin
queue := (decompose_term (Terms.copy_term4 recipe_concl, concl_bi_term)) @ (!queue);
attacker_rules'
end
else
(* Keep the rule *)
rule :: attacker_rules'
in
let state' =
{ state with
prepared_attacker_rule = remove_from_att_rules state.public state.prepared_attacker_rule }
in
add_public_and_close state' (!queue)
[ close_public_phase_change state n ] changes the current phase to [ n ]
after closes public , by incrementing the phase from [ state.current_phase ] to [ n ]
and closing by [ close_public_after_phase_increment ] at each increment .
after closes public, by incrementing the phase from [state.current_phase] to [n]
and closing by [close_public_after_phase_increment] at each increment. *)
let rec close_public_phase_change state n =
if n < state.current_phase then
Parsing_helper.internal_error "Phases should be in increasing order.";
if n = state.current_phase then state else
let state1 = { state with current_phase = state.current_phase + 1 } in
let state2 = close_public_after_phase_increment state1 in
close_public_phase_change state2 n
(* [close_public_initial state] guarantees that the invariants on
public and prepared_attacker_rule mentioned above are true initially.
It applies rules with empty assumptions in phase 0 by
[close_public_after_phase_increment] and
closes with terms initially known to be public by
[add_public_list]. *)
let close_public_initial state =
let state0 = { state with public = [] } in
let state1 = close_public_after_phase_increment state0 in
add_public_list state1 state.public
let add_public state t =
let new_recipe = new_var ~orig:false "~M" (get_term_type (fst t)) in
let l = decompose_term_rev (new_recipe, t) in
let l' = List.map (fun (b,t) -> (Var b, t)) l in
let state' = add_public_and_close state l' in
(Terms.copy_term4 (Var new_recipe), state')
let optional_eavesdrop state public_channel mess_term =
if public_channel then
The adversary is passive and the channel is public ;
the adversary eavesdrops the message sent by RIO / RIO_PatRemove
the adversary eavesdrops the message sent by RIO / RIO_PatRemove *)
let (new_recipe, state') = add_public state mess_term in
(Some new_recipe, state')
else
(None, state)
let get_occurrence_name_for_precise occ name_params =
let (np,npm) =
List.fold_right (fun (m,t,_) (acc_np,acc_npm) -> match m with
| MSid _ -> (t::acc_np,m::acc_npm)
| _ -> (acc_np,acc_npm)
) name_params ([],[])
in
let n = Reduction_helper.get_occ_name occ in
match n.f_cat with
| Name r ->
let n' = FunApp(n,np) in
FunApp(add_name_for_pat n',[])
| _ -> Parsing_helper.internal_error "[reduction_bipro.ml >> get_occurrence_name_for_precise] Unexpected case."
Do reductions that do not involve interactions
f takes as input
- a boolean indicating whether the attacker knowledge has changed
- the new state
When the goal is reached , do_red_nointeract returns the final state .
Otherwise , raises an exception .
f takes as input
- a boolean indicating whether the attacker knowledge has changed
- the new state
When the goal is reached, do_red_nointeract returns the final state.
Otherwise, raises an exception No_result.
*)
let rec do_red_nointeract f prev_state n =
let (proc, name_params, occs, facts, cache_info) =
List.nth prev_state.subprocess n in
match proc with
Nil -> debug_print "Doing Nil";
made_forward_step := true;
f false (do_rnil prev_state n)
| Par(p,q) ->
debug_print "Doing Par";
made_forward_step := true;
do_red_nointeract (fun new_att_know cur_state2 ->
do_red_nointeract (fun new_att_know2 cur_state3 ->
f (new_att_know || new_att_know2) cur_state3)
cur_state2 n
) { prev_state with
subprocess = add_at n (p, name_params, occs, facts, Nothing)
(replace_at n (q, name_params, occs, facts, Nothing)
prev_state.subprocess);
comment = RPar(n);
previous_state = Some prev_state } (n+1)
| Restr(na,(args,env),p,occ) ->
debug_print "Doing Restr";
made_forward_step := true;
let need_list = get_need_vars (!Param.current_state) na in
let include_info = prepare_include_info env args need_list in
let l = extract_name_params na include_info name_params in
let n' = FunApp(add_name_for_pat (FunApp(na, l)),[]) in
let p' = process_subst p na n' in
begin
do_red_nointeract f { prev_state with
subprocess = replace_at n (p', name_params, occs, facts, Nothing) prev_state.subprocess;
comment = RRestr(n, na, n');
previous_state = Some prev_state } n
end
| Let(pat,t,p,q,occ) ->
debug_print "Doing Let";
made_forward_step := true;
let new_occs = (LetTag occ) :: occs in
begin
try
auto_cleanup_red (fun () ->
let t', name_params' = term_evaluation_name_params_and_match pat (OLet(occ)) t name_params in
let p' = copy_process p in
let name_params'' = update_name_params IfQueryNeedsIt name_params' pat in
do_red_nointeract f { prev_state with
subprocess = replace_at n (p', name_params'', new_occs, facts, Nothing) prev_state.subprocess;
comment = RLet_In(n, pat, make_bi_choice t');
previous_state = Some prev_state } n
)
with Unify ->
do_red_nointeract f { prev_state with
subprocess = replace_at n (q, name_params, new_occs, facts, Nothing) prev_state.subprocess;
comment = RLet_Else(n, pat, t);
previous_state = Some prev_state } n
| FailOnlyOnSide _ ->
if is_equivalence_goal prev_state.goal
then
SUCCESS
{ prev_state with
goal = NonInterfGoal(ProcessTest([],[],(Some(n, List.nth prev_state.subprocess n)))) }
else
f false { prev_state with
subprocess = remove_at n prev_state.subprocess;
comment = RLet_Remove(n,pat, t);
previous_state = Some prev_state }
end
| Test(t,p,q,occ) ->
debug_print "Doing Test";
made_forward_step := true;
if q == Nil then
(* Optimize the case q == Nil: in this case, the adversary
cannot distinguish whether a destructor fails in t or
t is false. *)
begin
try
auto_cleanup_red (fun () ->
let new_occs = (TestTag occ) :: occs in
let name_params' = term_evaluation_name_params_true (OTest(occ)) t name_params in
do_red_nointeract f
{ prev_state with
subprocess = replace_at n (p, name_params', new_occs, facts, Nothing) prev_state.subprocess;
comment = RTest_Then(n, t);
previous_state = Some prev_state } n
)
with Unify ->
f false { prev_state with
subprocess = remove_at n prev_state.subprocess;
comment = RTest_Remove(n, t, TestFails);
previous_state = Some prev_state }
| FailOnlyOnSide _ ->
if is_equivalence_goal prev_state.goal
then
SUCCESS
{ prev_state with
goal = NonInterfGoal(ProcessTest([],[],(Some(n, List.nth prev_state.subprocess n)))) }
else
f false { prev_state with
subprocess = remove_at n prev_state.subprocess;
comment = RTest_Remove(n, t, Blocks);
previous_state = Some prev_state }
end
else
begin
try
auto_cleanup_red (fun () ->
let new_occs = (TestTag occ) :: occs in
let (t', name_params') = term_evaluation_name_params (OTest(occ)) t name_params in
if is_true_test t' then
do_red_nointeract f
{ prev_state with
subprocess = replace_at n (p, name_params', new_occs, facts, Nothing) prev_state.subprocess;
comment = RTest_Then(n, t);
previous_state = Some prev_state } n
else
do_red_nointeract f
{ prev_state with
subprocess = replace_at n (q, name_params', new_occs, facts, Nothing) prev_state.subprocess;
comment = RTest_Else(n, t);
previous_state = Some prev_state } n
)
with Unify ->
f false { prev_state with
subprocess = remove_at n prev_state.subprocess;
comment = RTest_Remove(n, t, DestrFails);
previous_state = Some prev_state }
| FailOnlyOnSide _ ->
if is_equivalence_goal prev_state.goal
then
SUCCESS
{ prev_state with
goal = NonInterfGoal(ProcessTest([],[],(Some(n, List.nth prev_state.subprocess n)))) }
else
f false { prev_state with
subprocess = remove_at n prev_state.subprocess;
comment = RTest_Remove(n, t, Blocks);
previous_state = Some prev_state }
end
| Output(tc,t,p,occ) ->
let new_goal_opt =
if cache_info != Nothing then
None (* Was already tested and failed before; will still fail if tested again *)
else
match prev_state.goal with
NonInterfGoal(CommTest(tin,tout,loc)) ->
if equal_terms_modulo_eval tc tout then
begin
(match is_in_public prev_state.public tin with
Some (recipe) ->
begin
let new_loc = Some (LocAttacker (recipe), LocProcess(n, List.nth prev_state.subprocess n)) in
Some (NonInterfGoal(CommTest(tin,tout,new_loc)))
end
| None -> (* find a process that does some input on tin *)
try
let (n',p') =
findi (function
(Input(tc,_,_,_),_,_,_,_) -> equal_terms_modulo_eval tc tin
| _ -> false
) prev_state.subprocess
in
let new_loc = Some (LocProcess(n',p'), LocProcess(n, List.nth prev_state.subprocess n)) in
Some (NonInterfGoal(CommTest(tin,tout,new_loc)))
with Not_found ->
None)
end
else None
| _ -> None
in
begin
match new_goal_opt with
Some new_goal -> { prev_state with goal = new_goal }
| None ->
debug_print "Doing Output";
(* For passive attackers, do red I/O only,
but still evaluate the arguments of the output *)
if not (!Param.active_attacker) then
match cache_info with
InputInfo _ | GetInfo _ -> Parsing_helper.internal_error "Should not have input/get info for an output!"
| OutputInfo _ -> f false prev_state (* Arguments already evaluated *)
| Nothing ->
try
auto_cleanup_red (fun () ->
let ((tc1,t1),(tc2,t2)) = bi_action (term_evaluation_fail2 tc t) in
let tc' = (tc1, tc2) in
let t' = (t1, t2) in
let tclist = decompose_term_rev (Terms.new_var ~orig:false "Useless" (Terms.get_term_type tc1), tc') in
f false { prev_state with
subprocess = replace_at n (Output(make_bi_choice tc', make_bi_choice t',p,occ),
name_params, occs, facts,
(OutputInfo(tclist, prev_state.public)))
prev_state.subprocess }
)
with Unify ->
f false { prev_state with
subprocess = remove_at n prev_state.subprocess;
comment = ROutput_Remove(n, tc, t, DestrFails);
previous_state = Some prev_state }
| FailOnlyOnSide _ ->
if is_equivalence_goal prev_state.goal
then
SUCCESS
{ prev_state with
goal = NonInterfGoal(ProcessTest([],[],(Some(n, List.nth prev_state.subprocess n)))) }
else
f false { prev_state with
subprocess = remove_at n prev_state.subprocess;
comment = ROutput_Remove(n, tc, t, Blocks);
previous_state = Some prev_state }
else
(* For active attackers, one can output on public channels *)
begin
let new_occs = (OutputTag occ) :: occs in
match cache_info with
InputInfo _ | GetInfo _ -> Parsing_helper.internal_error "Should not have input/get info for an output!"
| OutputInfo(tclist, oldpub) ->
let tclist' = update_term_list oldpub prev_state.public tclist in
if tclist' = [] then
begin
made_forward_step := true;
let (new_recipe, prev_state') = add_public prev_state (get_choice t) in
do_red_nointeract (if prev_state.public == prev_state'.public then f else
(fun mod_public cur_state -> f true cur_state))
{ prev_state' with
subprocess = replace_at n (p, name_params, new_occs, facts, Nothing) prev_state.subprocess;
comment = ROutput_Success(n, tc, new_recipe, t);
previous_state = Some prev_state } n
end
else
f false { prev_state with
subprocess = replace_at n (proc, name_params, occs, facts,
(OutputInfo(tclist', prev_state.public)))
prev_state.subprocess }
| Nothing ->
try
auto_cleanup_red (fun () ->
let ((tc1,t1),(tc2,t2)) = bi_action (term_evaluation_fail2 tc t) in
let tc' = (tc1, tc2) in
let t' = (t1, t2) in
let tclist = decompose_term_rev (Terms.new_var ~orig:false "Useless" (Terms.get_term_type tc1), tc') in
let tclist' = remove_first_in_public prev_state.public tclist in
if tclist' = [] then
begin
made_forward_step := true;
let (new_recipe, prev_state') = add_public prev_state t' in
do_red_nointeract (if prev_state.public == prev_state'.public then f else
(fun mod_public cur_state -> f true cur_state))
{ prev_state' with
subprocess = replace_at n (p, name_params, new_occs, facts, Nothing) prev_state.subprocess;
comment = ROutput_Success(n, make_bi_choice tc', new_recipe, make_bi_choice t');
previous_state = Some prev_state } n
end
else
When one side is a channel and the other side is not ,
we keep the Output process ; the failure of the equivalence
will be detected ( or has already been detected ) by CommTest
we keep the Output process; the failure of the equivalence
will be detected (or has already been detected) by CommTest *)
f false { prev_state with
subprocess = replace_at n (Output(make_bi_choice tc', make_bi_choice t',p,occ), name_params, occs, facts,
(OutputInfo(tclist', prev_state.public)))
prev_state.subprocess }
)
with Unify ->
f false { prev_state with
subprocess = remove_at n prev_state.subprocess;
comment = ROutput_Remove(n, tc, t, DestrFails);
previous_state = Some prev_state }
| FailOnlyOnSide _ ->
if is_equivalence_goal prev_state.goal
then
SUCCESS
{ prev_state with
goal = NonInterfGoal(ProcessTest([],[],(Some(n, List.nth prev_state.subprocess n)))) }
else
f false { prev_state with
subprocess = remove_at n prev_state.subprocess;
comment = ROutput_Remove(n, tc, t, Blocks);
previous_state = Some prev_state }
end
end
| Event(FunApp(fs,l) as t,_,p,occ) ->
debug_print "Doing Event";
made_forward_step := true;
let fstatus = Pievent.get_event_status (!Param.current_state) fs in
let do_end prev_state new_occs new_facts t' =
let n_subprocess = replace_at n (p,name_params,new_occs,new_facts,Nothing) prev_state.subprocess in
let (new_goal, event_in_goal, success) =
We do not store the list here since they were only used for injective queries .
This can not happen when proving lemmas on biprocesses .
This cannot happen when proving lemmas on biprocesses. *)
update_corresp_goal prev_state.goal (Some (occ,[]))
(function
| Pred(pr,[t1';t2']) -> pr == Param.end2_pred && equal_bi_terms_modulo t' (t1',t2')
| _ -> false
)
in
let bi_t = make_bi_choice t' in
let new_state =
{ prev_state with
subprocess = n_subprocess;
comment = REvent_Success(n,bi_t,event_in_goal);
events = bi_t::prev_state.events;
goal = new_goal;
previous_state = Some prev_state
}
in
if success
then new_state
else do_red_nointeract f new_state n
in
begin
(* Check that the argument of the event can be evaluated but otherwise ignore it *)
try
begin match fstatus.begin_status with
| No ->
auto_cleanup_red (fun () ->
let t' = bi_action (term_evaluation_fail t) in
let new_occs = (BeginEvent(occ)) :: occs in
do_end prev_state new_occs facts t'
)
| NonInj ->
auto_cleanup_red (fun () ->
let (t1,t2) = bi_action (term_evaluation_fail t) in
let new_occs' = (BeginEvent (occ)) :: occs in
let new_occs = BeginFact :: new_occs' in
let new_facts = Pred(Param.begin2_pred,[t1;t2]) :: facts in
try
find_io_rule (fun _ ->
do_end prev_state new_occs new_facts (t1,t2)
) new_occs' facts name_params [] prev_state.io_rule
with Unify -> raise DerivBlocks
)
| Inj -> Parsing_helper.internal_error "[Reduction_bipro.ml] Unexpected injective event."
end
with Unify ->
f false { prev_state with
subprocess = remove_at n prev_state.subprocess;
comment = REvent_Remove(n, t, DestrFails);
previous_state = Some prev_state }
| DerivBlocks ->
f false { prev_state with
subprocess = remove_at n prev_state.subprocess;
comment = REvent_Remove(n, t, Blocks);
previous_state = Some prev_state }
| FailOnlyOnSide _ ->
if is_equivalence_goal prev_state.goal
then
SUCCESS
{ prev_state with
goal = NonInterfGoal(ProcessTest([],[],(Some(n, List.nth prev_state.subprocess n)))) }
else
f false { prev_state with
subprocess = remove_at n prev_state.subprocess;
comment = REvent_Remove(n, t, Blocks);
previous_state = Some prev_state }
end
| LetFilter _ -> Parsing_helper.user_error "Predicates in 'let suchthat in else' processes are currently incompatible with non-interference.";
| Repl(p,occ) ->
debug_print "Doing Repl";
made_forward_step := true;
let sid = Terms.new_var ~orig:false "sid" Param.sid_type in
let new_occs = (ReplTag (occ,count_name_params name_params))::occs in
let copy_number = ref 0 in
let new_state = ref { prev_state with
subprocess = remove_at n prev_state.subprocess;
comment = RRepl(n,0);
previous_state = Some prev_state }
in
begin
try
auto_cleanup (fun () ->
find_io_rule (function
[sid_pat] ->
let p' = auto_cleanup (fun () -> copy_process p) in
incr copy_number;
new_state := { !new_state with
subprocess = add_at n (p', (MSid 0,sid_pat,Always)::name_params, new_occs, facts, Nothing) !new_state.subprocess
};
raise Unify
| _ -> Parsing_helper.internal_error "Repl case, reduction.ml"
) new_occs facts ((MSid 0,Var sid,Always)::name_params) [Var sid] prev_state.io_rule
)
with Unify ->
debug_print ("Repl: " ^ (string_of_int (!copy_number)) ^ " copies");
let rec do_red_copies b ncopies state =
if ncopies < 0 then
f b state
else
do_red_nointeract (fun b' s -> do_red_copies (b||b') (ncopies-1) s) state (n+ncopies)
in
do_red_copies false ((!copy_number)-1)
{ !new_state with
comment = RRepl(n,!copy_number)
}
end
| Input(tc,_,_,_) ->
begin
match prev_state.goal with
NonInterfGoal(CommTest(tin,tout,loc)) ->
if equal_terms_modulo_eval tc tin then
begin
(match is_in_public prev_state.public tout with
| Some recipe ->
begin
let new_loc = Some (LocProcess(n, List.nth prev_state.subprocess n), LocAttacker recipe) in
let new_goal = NonInterfGoal(CommTest(tin,tout,new_loc)) in
{ prev_state with goal = new_goal }
end
| None -> (* find a process that does some output on tout *)
try
let (n',p') =
findi (function
(Output(tc,_,_,_),_,_,_,_) -> equal_terms_modulo_eval tc tout
| _ -> false
) prev_state.subprocess
in
let new_loc = Some (LocProcess(n, List.nth prev_state.subprocess n), LocProcess(n',p')) in
let new_goal = NonInterfGoal(CommTest(tin,tout,new_loc)) in
{ prev_state with goal = new_goal }
with Not_found ->
f false prev_state)
end
else f false prev_state
| _ -> f false prev_state
end
| Insert(t,p,occ) ->
debug_print "Doing Insert";
begin
let new_occs = (InsertTag occ) :: occs in
let new_element_inserted = ref false in
try
auto_cleanup_red (fun () ->
let t' = bi_action (term_evaluation_fail t) in
let already_in = List.exists (equal_bi_terms_modulo t') prev_state.tables in
new_element_inserted := not already_in;
made_forward_step := true;
let (new_goal,insert_in_goal,success) =
update_corresp_goal prev_state.goal None
(is_table_goal prev_state.current_phase t')
in
let bi_t = make_bi_choice t' in
let new_state =
{ prev_state with
subprocess = replace_at n (p, name_params, new_occs, facts, Nothing) prev_state.subprocess;
tables = if already_in then prev_state.tables else t'::prev_state.tables;
comment = RInsert_Success(n, bi_t, insert_in_goal);
goal = new_goal;
previous_state = Some prev_state
}
in
if success
then new_state
else do_red_nointeract f new_state n
)
with Unify ->
f false { prev_state with
subprocess = remove_at n prev_state.subprocess;
comment = RInsert_Remove(n, t, DestrFails);
previous_state = Some prev_state }
| FailOnlyOnSide _ ->
if is_equivalence_goal prev_state.goal
then
SUCCESS
{ prev_state with
goal = NonInterfGoal(ProcessTest([],[],(Some(n, List.nth prev_state.subprocess n)))) }
else
f false { prev_state with
subprocess = remove_at n prev_state.subprocess;
comment = RInsert_Remove(n, t, Blocks);
previous_state = Some prev_state }
| No_result ->
(* The attack reconstruction failed after doing the insert.
Try not doing it, in case that allows executing the else branch of a Get. *)
if (!has_backtrack_get) && (!new_element_inserted) then
f false prev_state
else
raise No_result
end
| NamedProcess(name, tl, p) ->
debug_print "Doing NamedProcess";
do_red_nointeract f { prev_state with
subprocess = replace_at n (p, name_params, occs, facts, Nothing) prev_state.subprocess;
comment = RNamedProcess(n, name, tl);
previous_state = Some prev_state } n
| _ -> f false prev_state
(* Test success when the knowledge of the attacker has changed *)
let test_success cur_state' =
try
match cur_state'.goal with
| CorrespGoal(l) ->
let new_goal =
CorrespGoal (List.map (fun goal -> match goal with
| Fact(Pred({p_info = [AttackerBin(i,_)]},[t1;t2]) as fact, _, false) when cur_state'.current_phase <= i ->
compute the new recipe_lst
if t is known by the attacker in phase cur_state'.current_phase ,
it will still be known in phase i
if t is known by the attacker in phase cur_state'.current_phase,
it will still be known in phase i *)
begin match is_in_public cur_state'.public (t1,t2) with
| Some recipe -> Fact(fact,Some [recipe],true)
| _ -> goal
end
| Fact(Pred({p_info = [Attacker(i,_)]},[t]) as fact, _, false) when cur_state'.current_phase <= i ->
compute the new recipe_lst
if t is known by the attacker in phase cur_state'.current_phase ,
it will still be known in phase i
if t is known by the attacker in phase cur_state'.current_phase,
it will still be known in phase i *)
begin match is_in_public cur_state'.public (t,t) with
| Some recipe -> Fact(fact,Some [recipe],true)
| _ -> goal
end
| Fact(Pred({p_info = [MessBin(i,_)]},[tc1;t1;tc2;t2]) as fact, _, false) when cur_state'.current_phase <= i ->
(* compute the new recipe_lst
if tc and t are known by the attacker in phase cur_state'.current_phase,
they will still be known in phase i,
so the attacker will be able to send t on tc in phase i *)
begin match is_in_public cur_state'.public (t1,t2), is_in_public cur_state'.public (tc1,tc2) with
| Some recipe1, Some recipe2 -> Fact(fact, Some [recipe1; recipe2], true)
| _ -> goal
end
| Fact(Pred({p_info = [Mess(i,_)]},[tc;t]) as fact, _, false) when cur_state'.current_phase <= i ->
(* compute the new recipe_lst
if tc and t are known by the attacker in phase cur_state'.current_phase,
they will still be known in phase i,
so the attacker will be able to send t on tc in phase i *)
begin match is_in_public cur_state'.public (t,t), is_in_public cur_state'.public (tc,tc) with
| Some recipe1, Some recipe2 -> Fact(fact, Some [recipe1; recipe2], true)
| _ -> goal
end
| _ -> goal
) l)
in
(is_success_corresp_goal new_goal, {cur_state' with goal = new_goal})
| NonInterfGoal(NIEqTest((t1, _),(t2, _))) ->
(match is_in_public cur_state'.public t1, is_in_public cur_state'.public t2 with
| Some recipe1, Some recipe2 ->
let new_goal = NonInterfGoal(NIEqTest((t1, Some recipe1),(t2, Some recipe2))) in
(true, { cur_state' with goal = new_goal })
| _ -> (false, cur_state'))
| NonInterfGoal(NIFailTest (t, _)) ->
(match is_in_public cur_state'.public t with
| Some recipe ->
let new_goal = NonInterfGoal(NIFailTest (t, Some recipe)) in
(true, { cur_state' with goal = new_goal })
| None -> (false, cur_state'))
| NonInterfGoal(CommTest(tin,tout,loc)) ->
let rin =
(match is_in_public cur_state'.public tin with
| Some recipe -> Some (LocAttacker recipe)
| None ->
try
let (n,p) =
findi (function
(Input(tc,_,_,_),_,_,_,_) -> equal_terms_modulo_eval tc tin
| _ -> false
) cur_state'.subprocess
in
Some (LocProcess(n,p))
with Not_found ->
None)
in
let rout =
(match is_in_public cur_state'.public tout with
| Some recipe -> Some (LocAttacker recipe)
| None ->
try
let (n,p) =
findi (function
(Output(tc,_,_,_),_,_,_,_) -> equal_terms_modulo_eval tc tout
| _ -> false
) cur_state'.subprocess
in
Some (LocProcess(n,p))
with Not_found ->
None)
in
begin
match rin,rout with
Some lin, Some lout ->
let new_goal = NonInterfGoal(CommTest(tin,tout,Some(lin,lout))) in
(true, { cur_state' with goal = new_goal })
| _ -> (false, cur_state')
end
| _ -> (false, cur_state')
with Unify ->
(false, cur_state')
(* let test_success = Profile.f1 "test_success" test_success *)
let end_if_success next_f cur_state =
let (success, cur_state') = test_success cur_state in
if success then cur_state' else next_f cur_state'
(* Normalize the state after a reduction *)
let rec find_possible_outputs f cur_state n seen_list = function
[] -> f cur_state
| (Output(tc,t,p,out_occ) as proc, name_params, occs, facts, cache_info)::rest_subprocess when (!Param.active_attacker) ->
let tclist' =
match cache_info with
InputInfo _ | GetInfo _ -> Parsing_helper.internal_error "Should not have input/get info for an output!"
| OutputInfo(tclist, oldpub) ->
update_term_list oldpub cur_state.public tclist
| Nothing ->
let tclist = decompose_term_rev ((Terms.new_var ~orig:false "Useless" (Terms.get_term_type tc)), (tc, tc)) in
remove_first_in_public cur_state.public tclist
in
let seen_list' = (proc, name_params, occs, facts, OutputInfo(tclist', cur_state.public)) :: seen_list in
if tclist' = [] then
do_red_nointeract (fun change_pub cur_state2 ->
if change_pub then
end_if_success (find_possible_outputs_rec f) cur_state2
else
find_possible_outputs f cur_state2 0 [] cur_state2.subprocess
) { cur_state with subprocess = List.rev_append seen_list' rest_subprocess } n
else
find_possible_outputs f cur_state (n+1) seen_list' rest_subprocess
| sub_proc::rest_subprocess -> find_possible_outputs f cur_state (n+1) (sub_proc::seen_list) rest_subprocess
and find_possible_outputs_rec f cur_state3 =
find_possible_outputs f cur_state3 0 [] cur_state3.subprocess
(* When the process number n has been changed *)
let normal_state f change_pub cur_state n =
do_red_nointeract (fun change_pub2 cur_state2 ->
if change_pub || change_pub2 then
end_if_success (find_possible_outputs_rec f) cur_state2
else f cur_state2
) cur_state n
When two processes have been changed , numbers n1 and n2
let normal_state2 f change_pub cur_state n1 n2 =
let n1',n2' = if n1 < n2 then n1,n2 else n2,n1 in
do_red_nointeract (fun change_pub2 cur_state2 ->
do_red_nointeract (fun change_pub3 cur_state3 ->
if change_pub || change_pub2 || change_pub3 then
end_if_success (find_possible_outputs_rec f) cur_state3
else f cur_state3
) cur_state2 n1'
) cur_state n2'
(* When all processes have been changed *)
let normal_state_all f change_pub cur_state =
let rec do_red_all change_pub2 cur_state2 n =
if n < 0 then
if change_pub2 then
end_if_success (find_possible_outputs_rec f) cur_state2
else
f cur_state2
else
do_red_nointeract (fun change_pub3 cur_state3 ->
do_red_all (change_pub2 || change_pub3) cur_state3 (n-1)
) cur_state2 n
in
do_red_all change_pub cur_state (List.length cur_state.subprocess - 1)
(* Initial attacker knowledge *)
let rec public_build l =
match l with
| [] -> []
| h::t ->
if not h.f_private then
(FunApp(h,[]))::(public_build t)
else
public_build t
Initialize the rule lists
let rec init_rule state tree =
match tree.desc with
FHAny | FEmpty | FRemovedWithMaxHyp ->
begin
match tree.thefact with
| Pred(p,_) when p == Param.begin2_pred -> state
| Pred(p, [t]) when p.p_prop land Param.pred_ATTACKER != 0 ->
begin
let t' = rev_name_subst t in
match t' with
FunApp({ f_cat = Name _; f_private = false },[]) ->
{ state with public = (t',(t',t')) :: state.public }
| _ ->
(* Public contains terms, not patterns
-> translate the pattern into a term.
If the translation fails because a name is not in the table, we have to stop. *)
if (not (is_in_public state.public (t',t') = None)) then
state
else
(* I introduce a variable for the recipe here,
and use it when displaying hyp_not_matched.
Note: it is important that the term t' is never a tuple.
Otherwise, it would be decomposed later, and the link
between the recipe in public and the one in hyp_not_matched
would be lost. *)
let recipe = Var (new_var ~orig:false "~M" (Terms.get_term_type t')) in
{ state with
public = (recipe,(t',t')) :: state.public;
hyp_not_matched = (Some recipe, Pred(p,[t']))::state.hyp_not_matched }
end
| Pred(p, [t1;t2]) when p.p_prop land Param.pred_ATTACKER != 0 ->
begin
let t1' = rev_name_subst t1 in
let t2' = rev_name_subst t2 in
match t1', t2' with
(FunApp({ f_cat = Name _; f_private = false },[]),
FunApp({ f_cat = Name _; f_private = false },[])) when
equal_terms_modulo t1' t2' ->
{ state with public = (t1',(t1', t2')) :: state.public }
| _ ->
(* Public contains terms, not patterns
-> translate the pattern into a term.
If the translation fails because a name is not in the table, we have to stop. *)
if (not (is_in_public state.public (t1',t2') = None)) then
state
else
(* I introduce a variable for the recipe here,
and use it when displaying hyp_not_matched.
Note: it is important that the term t' is never a tuple.
Otherwise, it would be decomposed later, and the link
between the recipe in public and the one in hyp_not_matched
would be lost. *)
let recipe = Var (new_var ~orig:false "~M" (Terms.get_term_type t1')) in
{ state with
public = (recipe,(t1',t2')) :: state.public;
hyp_not_matched = (Some recipe, Pred(p,[t1';t2']))::state.hyp_not_matched }
end
| _ ->
let fact = rev_name_subst_fact tree.thefact in
if List.exists (fun (_, fact') -> Terms.equal_facts fact fact') state.hyp_not_matched then
(* Do not add [fact] in [state.hyp_not_matched] if it is already present *)
state
else
{ state with
hyp_not_matched = (None, fact)::state.hyp_not_matched }
end
| FRemovedWithProof _ -> state
| FRule (n, tags, constra, sons,_,_) ->
let rec init_sons_rule state1 = function
| [] ->
begin
match tags with
ProcessRule (hsl,nl) ->
{state1 with io_rule = (n, List.map (fun t -> rev_name_subst_fact t.thefact) sons,
hsl, rev_name_subst_list nl,
rev_name_subst_fact tree.thefact)::state1.io_rule}
| Apply (f,_) when f.f_cat != Tuple ->
begin
let (p,c) =
match tree.thefact with
Pred(p,l) -> (p,rev_name_subst_bi l)
in
let h = List.map (function
{ thefact = Pred(_,l) } -> (Terms.new_var ~orig:false "~X" (get_term_type_bi l), rev_name_subst_bi l)) sons
in
let h' = decompose_list_rev h in
(* concl_copy is the recipe used to compute the conclusion from the hypotheses *)
let recipe_concl = FunApp(f, (List.map (fun (x, y) -> Var x) h)) in
{state1 with prepared_attacker_rule = (p, h',(recipe_concl, c))::state1.prepared_attacker_rule}
end
| Rn _ ->
begin
match tree.thefact with
Pred(p, l) ->
let t1',t2' = rev_name_subst_bi l in
if not (equal_terms_modulo t1' t2') then
Parsing_helper.internal_error "Rule Rn should conclude p(name,name) with the same name";
{ state1 with prepared_attacker_rule = (p, [], (t1',(t1',t2')))::state1.prepared_attacker_rule }
end
| _ -> state1
end
| h::t ->
let state1' = init_rule state1 h in
init_sons_rule state1' t
in
init_sons_rule state sons
| FEquation son -> init_rule state son
(* Handle reductions i/o and in *)
(* Perform an input on a public channel (Res In) *)
let do_res_in cur_state seen_list rest_subprocess n current_cache_list name_params' new_occs facts tc pat p tc' mess_term public_status next_f =
The real list of processes is ( List.rev_append seen_list ( InputProcess : : rest_subprocess ) )
let (recipe, mess_list, oldpub) =
match public_status with
Some (recipe, m,o) -> (recipe, m,o)
| None ->
let new_recipe = Terms.new_var ~orig:false "~M" (Terms.get_term_type (fst mess_term)) in
(Var new_recipe, decompose_term_rev (new_recipe, mess_term), [])
in
(* Remove the elements of mess_list' that are already in cur_state.public *)
let mess_list' = update_term_list oldpub cur_state.public mess_list in
let recipe' = Terms.copy_term4 recipe in
When mess_list ' is not empty , its first element is not in cur_state.public
Remember that point to avoid testing again that part of public
Remember that point to avoid testing again that part of public *)
current_cache_list := (mess_term, Some (recipe', mess_list', cur_state.public)) :: (!current_cache_list);
if mess_list' != [] then raise Unify; (* The message is not public *)
try
made_forward_step := true;
auto_cleanup_red (fun () ->
let _ = bi_action (bi_match_pattern pat mess_term) in
let name_params'' = update_name_params Always name_params' pat in
let p' = auto_cleanup (fun () -> copy_process p) in
let fact' = build_mess_fact cur_state.current_phase tc' mess_term in
let fact_list = match new_occs with
| (InputTag _)::(PreciseTag(occ))::_ ->
let occ_n = get_occurrence_name_for_precise occ name_params' in
fact' :: (build_precise_fact occ_n mess_term) :: facts
| (InputTag _) :: _ -> fact' :: facts
| _ -> Parsing_helper.internal_error "[reduction_bipro.ml >> do_res_in] First element of new_occs should be an input tag."
in
normal_state next_f false
{ cur_state with
subprocess = List.rev_append seen_list ((p', name_params'', new_occs, fact_list, Nothing) :: rest_subprocess);
comment = RInput_Success(n, make_bi_choice tc', pat, recipe', make_bi_choice mess_term);
previous_state = Some cur_state } n
)
with No_result ->
(* Inputting the message mess_term on this input will always fail,
even in the following of the trace *)
current_cache_list := List.tl (!current_cache_list);
raise Unify
| FailOnlyOnSide _ ->
if is_equivalence_goal cur_state.goal
then
SUCCESS the pattern matching fails on one side only
{ cur_state with
goal = NonInterfGoal(InputProcessTest([],[],make_bi_choice mess_term, (Some(n, List.nth cur_state.subprocess n, LocAttacker recipe')))) }
else
begin
I can remove this message from the cache , since retrying this input
with the same message will always fail on one side , and I do not want
to consider that .
with the same message will always fail on one side, and I do not want
to consider that. *)
current_cache_list := List.tl (!current_cache_list);
raise Unify
end
(* Perform a (Red I/O) reduction between an input and an asynchronous output *)
let do_async_res_io cur_state seen_list rest_subprocess n current_cache_list name_params' new_occs facts tc pat p tc' mess_term public_channel next_f =
The real list of processes is ( List.rev_append seen_list ( InputProcess : : rest_subprocess ) )
It differs from cur_state.subprocess only by the cache of input processes , so when
looking for an output process , we can use cur_state.subprocess instead .
It differs from cur_state.subprocess only by the cache of input processes, so when
looking for an output process, we can use cur_state.subprocess instead. *)
current_cache_list := (mess_term, None) :: (!current_cache_list);
(* Find the corresponding asynchronous output *)
let rec find_asynchronous_output noutput = function
[] -> raise Unify (* not found *)
| ((Output(tc2, t2, Nil,out_occ), name_params2,occs2, facts2, cache_info2)::_) when
(equal_bi_terms_modulo (get_choice tc2) tc') && (equal_bi_terms_modulo (get_choice t2) mess_term) -> noutput
| _::rest_subprocess2 -> find_asynchronous_output (noutput+1) rest_subprocess2
in
let noutput = find_asynchronous_output 0 cur_state.subprocess in
begin
try
made_forward_step := true;
let fail_case input_fails =
(* The pattern does not match *)
let noutput' = if n>noutput then noutput else noutput-1 in
let (_, name_params2,occs2, facts2, _) = List.nth cur_state.subprocess noutput in
let (new_goal,comm_in_goal,attack_found) =
update_corresp_goal cur_state.goal None
(is_mess_goal cur_state.current_phase tc' mess_term)
in
let tc'' = make_bi_choice tc' in
(* When the adversary is passive and the channel is public,
the adversary eavesdrops the message sent by RIO_PatRemove *)
let (opt_recipe, cur_state1) = optional_eavesdrop cur_state public_channel mess_term in
let cur_state2 =
{ cur_state1 with
subprocess = replace_at noutput' (Nil, name_params2,occs2, facts2, Nothing) (List.rev_append seen_list rest_subprocess);
comment = RIO_PatRemove(n, tc'', pat, noutput, tc'', opt_recipe, make_bi_choice mess_term, comm_in_goal, input_fails);
goal = new_goal;
previous_state = Some cur_state
}
in
let cur_state3 = do_rnil cur_state2 noutput' in
if attack_found
then cur_state3
else next_f cur_state3
in
try
auto_cleanup_red (fun () ->
let _ = bi_action (bi_match_pattern pat mess_term) in
let name_params'' = update_name_params Always name_params' pat in
let p' = auto_cleanup (fun () -> copy_process p) in
let fact' = build_mess_fact cur_state.current_phase tc' mess_term in
let facts' = match new_occs with
| (InputTag _)::(PreciseTag(occ))::_ ->
let occ_n = get_occurrence_name_for_precise occ name_params' in
fact' :: (build_precise_fact occ_n mess_term) :: facts
| (InputTag _) :: _ -> fact' :: facts
| _ -> Parsing_helper.internal_error "[reduction.ml >> do_async_res_io] First element of new_occs should be an input tag."
in
let tc'' = make_bi_choice tc' in
let (_, name_params2,occs2, facts2, _) = List.nth cur_state.subprocess noutput in
let (new_goal,comm_in_goal,attack_found) =
update_corresp_goal cur_state.goal None
(is_mess_goal cur_state.current_phase tc' mess_term)
in
When the adversary is passive and the channel is public ,
the adversary eavesdrops the message sent by RIO
the adversary eavesdrops the message sent by RIO *)
let (opt_recipe, cur_state1) = optional_eavesdrop cur_state public_channel mess_term in
let cur_state' =
{ cur_state1 with
subprocess = replace_at noutput (Nil, name_params2,occs2, facts2, Nothing)
(List.rev_append seen_list ((p', name_params'', new_occs, facts', Nothing) :: rest_subprocess));
comment = RIO(n, tc'', pat, noutput, tc'', opt_recipe, make_bi_choice mess_term, comm_in_goal);
goal = new_goal;
previous_state = Some cur_state
}
in
Then do RNil on the Nil process that follows the output
let cur_state3 = do_rnil cur_state' noutput in
let ninput = if n > noutput then n-1 else n in
if attack_found
then cur_state3
else normal_state next_f (cur_state3.public != cur_state.public) cur_state3 ninput
)
with
| Unify -> fail_case true
| FailOnlyOnSide _ ->
if is_equivalence_goal cur_state.goal
then
SUCCESS the pattern matching fails on one side only
{ cur_state with
goal = NonInterfGoal(InputProcessTest([],[],make_bi_choice mess_term, Some(n, List.nth cur_state.subprocess n, LocProcess(noutput, List.nth cur_state.subprocess noutput)))) }
else fail_case false
with No_result ->
current_cache_list := List.tl (!current_cache_list);
raise Unify
end
(* Perform a (Res I/O) reduction with a synchronous output *)
let do_sync_res_io cur_state seen_list rest_subprocess n name_params' new_occs facts tc pat p tc' public_channel next_f =
The real list of processes is ( List.rev_append seen_list ( InputProcess : : rest_subprocess ) )
It differs from cur_state.subprocess only by the cache of input processes , so when
looking for an output process , we can use cur_state.subprocess instead .
It differs from cur_state.subprocess only by the cache of input processes, so when
looking for an output process, we can use cur_state.subprocess instead. *)
let rec find_synchronous_output noutput = function
[] -> raise No_result (* Not found *)
| ((Output(tc2,t2,p2,out_occ),name_params2,occs2, facts2, cache_info2)::rest_subprocess2) ->
begin
when ( p2 ! ) || public_channel || not ( is_equivalence_goal cur_state.goal )
try
let tc2' = get_choice tc2 in
let t2' = get_choice t2 in
if equal_bi_terms_modulo tc2' tc' then
begin
let (new_goal,comm_in_goal,attack_found) =
update_corresp_goal cur_state.goal None
(is_mess_goal cur_state.current_phase tc2' t2')
in
When p2 is and the Horn clause derivation does not justify the
input , the output is useless ( because it does not allow to execute
more instructions ) , except in two situations :
- when the attacker is passive and the channel is public ;
in this case , it allows the attacker to obtain the message
( public_channel is true in this case )
- when the communication itself is what makes the attack succeed ,
that is , the goal is that communication .
( comm_in_goal is true in this case )
input, the output is useless (because it does not allow to execute
more instructions), except in two situations:
- when the attacker is passive and the channel is public;
in this case, it allows the attacker to obtain the message
(public_channel is true in this case)
- when the communication itself is what makes the attack succeed,
that is, the goal is that communication.
(comm_in_goal is true in this case) *)
if not ((p2 != Nil) || public_channel || comm_in_goal)
then raise Unify;
made_forward_step := true;
(* The i/o reduction is possible, compute the reduced state *)
let fact = build_mess_fact cur_state.current_phase tc' t2' in
let facts' = match new_occs with
| (InputTag _)::(PreciseTag(occ))::_ ->
let occ_n = get_occurrence_name_for_precise occ name_params' in
fact :: (build_precise_fact occ_n t2') :: facts
| (InputTag _) :: _ -> fact :: facts
| _ -> Parsing_helper.internal_error "[reduction.ml >> do_sync_res_io] First element of new_occs should be an input tag."
in
let fail_case input_fails =
(* The pattern does not match *)
let noutput' = if n > noutput then noutput else noutput-1 in
(* When the adversary is passive and the channel is public,
the adversary eavesdrops the message sent by RIO_PatRemove *)
let (opt_recipe, cur_state1) = optional_eavesdrop cur_state public_channel t2' in
let cur_state' =
{ cur_state1 with
subprocess = replace_at noutput' (p2, name_params2, occs2, facts2, Nothing)
(List.rev_append seen_list rest_subprocess);
comment = RIO_PatRemove(n, make_bi_choice tc', pat, noutput, tc2, opt_recipe, t2, comm_in_goal, input_fails);
goal = new_goal;
previous_state = Some cur_state }
in
if attack_found
then cur_state'
else normal_state next_f (cur_state'.public != cur_state.public) cur_state' noutput'
in
try
auto_cleanup_red (fun () ->
let _ = bi_action (bi_match_pattern pat t2') in
let name_params'' = update_name_params Always name_params' pat in
let p' = auto_cleanup (fun () -> copy_process p) in
When the adversary is passive and the channel is public ,
the adversary eavesdrops the message sent by RIO
the adversary eavesdrops the message sent by RIO *)
let (opt_recipe, cur_state1) = optional_eavesdrop cur_state public_channel t2' in
let cur_state' =
{ cur_state1 with
subprocess = replace_at noutput (p2, name_params2, (OutputTag out_occ)::occs2, facts2, Nothing)
(List.rev_append seen_list ((p', name_params'', new_occs, facts', Nothing) :: rest_subprocess));
comment = RIO(n, make_bi_choice tc', pat, noutput, tc2, opt_recipe, t2, comm_in_goal);
goal = new_goal;
previous_state = Some cur_state }
in
if attack_found
then cur_state'
else normal_state2 next_f (cur_state'.public != cur_state.public) cur_state' noutput n
)
with
| Unify -> fail_case true
| FailOnlyOnSide _ ->
if is_equivalence_goal cur_state.goal
then
SUCCESS the pattern matching fails on one side only
{ cur_state with
goal = NonInterfGoal(InputProcessTest([],[],t2,Some(n, List.nth cur_state.subprocess n, LocProcess(noutput, List.nth cur_state.subprocess noutput)))) }
else fail_case false
end
else raise Unify
with Unify | No_result ->
find_synchronous_output (noutput+1) rest_subprocess2
end
| _::rest_subprocess2 -> find_synchronous_output (noutput+1) rest_subprocess2
in
find_synchronous_output 0 cur_state.subprocess
Perform a get ( Res Get )
let rec find_term stop_l t l =
if l == stop_l then false else
match l with
[] -> false
| (a::r) ->
if equal_bi_terms_modulo t a then true else find_term stop_l t r
let do_res_get cur_state seen_list rest_subprocess n current_cache_list name_params' new_occs facts pat t p mess_term old_tables next_f =
The real list of processes is ( List.rev_append seen_list ( InputProcess : : rest_subprocess ) )
current_cache_list := mess_term :: (!current_cache_list);
debug_print "Get";
if not (find_term old_tables mess_term cur_state.tables) then raise Unify; (* The entry is not found *)
debug_print "Ok, the entry is present";
try
made_forward_step := true;
auto_cleanup_red (fun () ->
(* we check that the pattern pat matches and t evaluates to true *)
let _ = bi_action (bi_match_pattern_and_test pat mess_term t) in
let name_params'' = update_name_params Always name_params' pat in
let p' = auto_cleanup (fun () -> copy_process p) in
let fact' = build_table_fact cur_state.current_phase mess_term in
let facts' = match new_occs with
| (GetTag _)::(PreciseTag(occ))::_ ->
let occ_n = get_occurrence_name_for_precise occ name_params' in
fact' :: (build_precise_fact occ_n mess_term) :: facts
| (GetTag _) :: _ -> fact' :: facts
| _ -> Parsing_helper.internal_error "[reduction.ml >> do_res_get] First element of new_occs should be a Get tag."
in
normal_state next_f false
{ cur_state with
subprocess = List.rev_append seen_list ((p', name_params'', new_occs, facts', Nothing) :: rest_subprocess);
comment = RGet_In(n, pat, t, make_bi_choice mess_term);
previous_state = Some cur_state } n
)
with No_result ->
(* Using the entry mess_term on this input will always fail,
even in the following of the trace *)
current_cache_list := List.tl (!current_cache_list);
raise Unify
| FailOnlyOnSide _ ->
if is_equivalence_goal cur_state.goal then
SUCCESS the pattern matching fails on one side only
{ cur_state with
goal = NonInterfGoal(ProcessTest([],[],Some(n, List.nth cur_state.subprocess n))) }
else
next_f
{ cur_state with
subprocess = remove_at n cur_state.subprocess;
comment = RGet_Remove(n, pat, t);
previous_state = Some cur_state }
Dispatch between ( Res In ) , asynchronous ( Res I / O ) , and synchronous ( Res I / O ) , and ( Res Get ) .
May also execute ( Insert ) in case an insert has been delayed because it prevented executing the
else branch of Get .
May also execute (Insert) in case an insert has been delayed because it prevented executing the
else branch of Get. *)
exception Backtrack_get
(* This exception is used only when I should take the
else of Get and I cannot because an element that
makes Get succeed already occurs. *)
let rec find_in_out next_f cur_state n seen_list = function
[] -> raise No_result
| ((Input(tc,pat,p,occ) as proc ,name_params,occs, facts, cache_info)::rest_subprocess) ->
debug_print ("Trying Input on process " ^ (string_of_int n));
begin
match cache_info with
OutputInfo _ | GetInfo _ -> Parsing_helper.internal_error "Should not have output/get info for an input!"
| InputInfo(tc_list, oldpub, tc', name_params', new_occs, l) ->
let tc_list' = update_term_list oldpub cur_state.public tc_list in
if (!Param.active_attacker) && (tc_list' = []) then
begin
(* The channel is public and the attacker is active, try (Res In) *)
let current_cache_list = ref [] in
let rec do_l = function
[] ->
let seen_list' = (proc ,name_params,occs, facts,
InputInfo(tc_list', cur_state.public, tc', name_params', new_occs, !current_cache_list)) :: seen_list in
find_in_out next_f cur_state (n+1) seen_list' rest_subprocess
| (mess_term, public_status)::l ->
try
do_res_in cur_state seen_list rest_subprocess n current_cache_list name_params' new_occs facts tc pat p tc' mess_term public_status next_f
with Unify ->
do_l l
in
do_l l
end
else
begin
(* The channel is private or the attacker is passive, try (Res I/O) *)
let current_cache_list = ref [] in
let public_channel = (not (!Param.active_attacker)) && (tc_list' = []) in
let rec do_l = function
[] ->
let seen_list' = (proc ,name_params,occs, facts,
InputInfo(tc_list', cur_state.public, tc', name_params', new_occs, !current_cache_list)) :: seen_list in
begin
try
do_sync_res_io cur_state seen_list rest_subprocess n name_params' new_occs facts tc pat p tc' public_channel next_f
with Unify | No_result ->
find_in_out next_f cur_state (n+1) seen_list' rest_subprocess
end
| (mess_term,_)::l ->
try
do_async_res_io cur_state seen_list rest_subprocess n current_cache_list name_params' new_occs facts tc pat p tc' mess_term public_channel next_f
with Unify ->
do_l l
in
do_l l
end
| Nothing ->
let seen_list' = ref ((proc, name_params, occs, facts, cache_info) :: seen_list) in
try
auto_cleanup_red (fun () ->
let (tc', name_params') = term_evaluation_name_params (OInChannel(occ)) tc name_params in
let m =
if cur_state.current_phase < get_min_choice_phase() then
let v = Reduction_helper.new_var_pat pat in
(v,v)
else
(Reduction_helper.new_var_pat pat, Reduction_helper.new_var_pat pat)
in
let fact = build_mess_fact cur_state.current_phase tc' m in
let (new_occs,new_facts) =
let ty = get_term_type (fst m) in
if Reduction_helper.exists_specific_precise_events_of_occ occ (Action ty)
then
let occ_n = get_occurrence_name_for_precise occ name_params' in
((InputTag occ) :: (PreciseTag(occ)) :: occs, (fact :: (build_precise_fact occ_n m) :: facts))
else ((InputTag occ) :: occs, (fact :: facts))
in
let new_recipe = Terms.new_var ~orig:false "Useless" (Terms.get_term_type (fst tc')) in
let tc_list = decompose_term_rev (new_recipe, tc') in
let tc_list' = remove_first_in_public cur_state.public tc_list in
if (!Param.active_attacker) && (tc_list' = []) then
begin
(* Input on a public channel, and the attacker is active: apply (Res In) *)
let current_cache_list = ref [] in
try
find_io_rule (function
[mess_term1;mess_term2] ->
do_res_in cur_state seen_list rest_subprocess n current_cache_list name_params' new_occs facts tc pat p tc' (mess_term1,mess_term2) None next_f
| _ -> Parsing_helper.internal_error "input case; reduction_bipro.ml"
) new_occs new_facts name_params' [fst m; snd m] cur_state.io_rule
with Unify ->
seen_list' := (proc, name_params, occs, facts,
InputInfo([], [], tc', name_params', new_occs, !current_cache_list)) :: seen_list;
raise No_result
end
else
begin
(* Input on a private channel or the attacker is passive: apply (Res I/O)
First try an asynchronous output, with a corresponding clause in the tree *)
let current_cache_list = ref [] in
let public_channel = (not (!Param.active_attacker)) && (tc_list' = []) in
try
find_io_rule (function
[mess_term1;mess_term2] ->
do_async_res_io cur_state seen_list rest_subprocess n current_cache_list name_params' new_occs facts tc pat p tc' (mess_term1,mess_term2) public_channel next_f
| _ -> Parsing_helper.internal_error "input case; reduction_bipro.ml"
) new_occs new_facts name_params' [fst m; snd m] cur_state.io_rule
with Unify ->
seen_list' := (proc, name_params,occs, facts,
InputInfo(tc_list', cur_state.public, tc', name_params', new_occs, !current_cache_list)) :: seen_list;
(* Try a synchronous output *)
do_sync_res_io cur_state seen_list rest_subprocess n name_params' new_occs facts tc pat p tc' public_channel next_f
end
)
with Unify | No_result ->
find_in_out next_f cur_state (n+1) (!seen_list') rest_subprocess
| FailOnlyOnSide _ ->
if is_equivalence_goal cur_state.goal then
SUCCESS the evaluation of the channel name fails on one side only
{ cur_state with
goal = NonInterfGoal(ProcessTest([],[],Some(n, List.nth cur_state.subprocess n))) }
else
find_in_out next_f
{ cur_state with
subprocess = remove_at n cur_state.subprocess;
comment = RInput_Remove(n, tc, pat, Blocks);
previous_state = Some cur_state } n seen_list rest_subprocess
end
| ((Get(pat,t,p,p_else,occ) as proc ,name_params,occs, facts, cache_info)::rest_subprocess) ->
debug_print ("Trying Get on process " ^ (string_of_int n));
begin
match cache_info with
OutputInfo _ | InputInfo _ -> Parsing_helper.internal_error "Should not have input/output info for a get!"
| GetInfo(old_tables, l) ->
let new_occs =
if Reduction_helper.exists_specific_precise_events_of_occ occ (Action Param.table_type) then
(GetTag occ) :: (PreciseTag occ) :: occs
else
(GetTag occ) :: occs
in
let current_cache_list = ref [] in
let rec do_l = function
[] ->
let seen_list' = (proc ,name_params,occs, facts,
GetInfo(cur_state.tables, !current_cache_list)) :: seen_list in
find_in_out next_f cur_state (n+1) seen_list' rest_subprocess
| mess_term::l ->
try
do_res_get cur_state seen_list rest_subprocess n current_cache_list name_params new_occs facts pat t p mess_term old_tables next_f
with Unify ->
do_l l
in
do_l l
| Nothing ->
let seen_list' = ref ((proc, name_params, occs, facts, cache_info) :: seen_list) in
try
auto_cleanup_red (fun () ->
let m =
if cur_state.current_phase < get_min_choice_phase() then
let v = Reduction_helper.new_var_pat pat in
(v,v)
else
(Reduction_helper.new_var_pat pat, Reduction_helper.new_var_pat pat)
in
let fact = build_table_fact cur_state.current_phase m in
let (new_occs,new_facts) =
if Reduction_helper.exists_specific_precise_events_of_occ occ (Action Param.table_type)
then
let occ_n = get_occurrence_name_for_precise occ name_params in
((GetTag occ) :: (PreciseTag occ) :: occs, fact :: (build_precise_fact occ_n m) :: facts)
else ((GetTag occ) :: occs, fact :: facts)
in
begin
let current_cache_list = ref [] in
try
find_io_rule (function
[mess_term1;mess_term2] ->
do_res_get cur_state seen_list rest_subprocess n current_cache_list name_params new_occs facts pat t p (mess_term1,mess_term2) [] next_f
| _ -> Parsing_helper.internal_error "get case; reduction_bipro.ml"
) new_occs new_facts name_params [fst m; snd m] cur_state.io_rule
with Unify ->
if p_else != Nil then
(* See if we should take the else branch if present *)
begin
try
let new_occs = (GetTagElse occ) :: occs in
find_io_rule (function
[] ->
(* We should take the else branch, since a clause uses that branch *)
debug_print "Get: else branch should be taken";
if List.exists (fun mess_term ->
try
auto_cleanup (fun () ->
(* we check that the pattern pat matches and t evaluates to true *)
let _ = bi_action (bi_match_pattern_and_test pat mess_term t) in
true)
with Unify -> false
(* When FailOnlyOnSide _ is raised, it will be catched above and
the trace reconstruction succeeds. *)) cur_state.tables
then
begin
debug_print "Get: an element of the table matches, cannot take the else branch, backtracking";
(* The Get process is blocked forever: the else branch should be taken,
but the table contains an element that prevents it. Since elements
are only added to tables, this situation will not change.
So I backtrack. *)
has_backtrack_get := true;
raise Backtrack_get
end
else
begin
debug_print "Get: taking the else branch";
normal_state next_f false
{ cur_state with
subprocess = List.rev_append seen_list ((p_else, name_params, new_occs, facts, Nothing) :: rest_subprocess);
comment = RGet_Else(n, pat, t);
previous_state = Some cur_state } n
end
| _ -> Parsing_helper.internal_error "get else case; reduction_bipro.ml"
) new_occs facts name_params [] cur_state.io_rule
with Unify ->
seen_list' := (proc, name_params, occs, facts,
GetInfo(cur_state.tables, !current_cache_list)) :: seen_list;
raise No_result
end
else
begin
seen_list' := (proc, name_params, occs, facts,
GetInfo(cur_state.tables, !current_cache_list)) :: seen_list;
raise No_result
end
end
)
with Unify | No_result ->
find_in_out next_f cur_state (n+1) (!seen_list') rest_subprocess
| FailOnlyOnSide _ ->
if is_equivalence_goal cur_state.goal then
SUCCESS an element of the table matches on one side and not on the other
{ cur_state with
goal = NonInterfGoal(ProcessTest([],[],Some(n, List.nth cur_state.subprocess n))) }
else
find_in_out next_f
{ cur_state with
subprocess = remove_at n cur_state.subprocess;
comment = RGet_Remove(n, pat, t);
previous_state = Some cur_state } n seen_list rest_subprocess
| Backtrack_get -> raise No_result
end
| ((Insert(t,p,occ), name_params, occs, facts, cache_info) as sub_proc)::rest_subprocess ->
debug_print "Doing Insert";
begin
let new_occs = (InsertTag occ) :: occs in
let new_element_inserted = ref false in
try
auto_cleanup_red (fun () ->
let t' = bi_action (term_evaluation_fail t) in
let already_in = List.exists (equal_bi_terms_modulo t') cur_state.tables in
new_element_inserted := not already_in;
(* This test will probably never succeed, because in
case it would succeed, it would have been detected earlier
in the [Insert] case of [do_red_nointeract] *)
let (new_goal,insert_in_goal,success) =
update_corresp_goal cur_state.goal None
(is_table_goal cur_state.current_phase t')
in
let new_state =
{ cur_state with
subprocess = List.rev_append seen_list ((p, name_params, new_occs, facts, Nothing) :: rest_subprocess);
tables = if already_in then cur_state.tables else t'::cur_state.tables;
comment = RInsert_Success(n, make_bi_choice t', insert_in_goal);
previous_state = Some cur_state;
goal = new_goal
}
in
if success
then new_state
else normal_state next_f false new_state n
)
with Unify | FailOnlyOnSide _ ->
Parsing_helper.internal_error "Insert: Unify/FailOnlyOnSide _ should have been detected on the first try of that insert"
| No_result ->
(* The attack reconstruction failed after doing the insert.
Try not doing it, in case that allows executing the else branch of a Get. *)
if (!has_backtrack_get) && (!new_element_inserted) then
find_in_out next_f cur_state (n+1) (sub_proc :: seen_list) rest_subprocess
else
raise No_result
end
| sub_proc::rest_subprocess ->
find_in_out next_f cur_state (n+1) (sub_proc :: seen_list) rest_subprocess
(* Handle phases *)
(* [extract_phase n processes] modifies the processes for a [phase n] transition:
removes processes with no phase prefix or a phase prefix less than [n];
removes the phase prefix [phase n], leaving the rest of the process;
leaves processes with phase prefix greater than [n] unchanged. *)
let rec extract_phase n = function
[] -> []
| (Phase(n',p,occ),name_params,occs, facts, cache_info)::r ->
let r' = extract_phase n r in
if n = n' then (p,name_params,occs, facts, Nothing)::r' else
if n<n' then (Phase(n',p,occ),name_params,occs, facts, Nothing)::r' else r'
| _::r -> extract_phase n r
(* [find_phase current_phase None processes] returns either
[None] when no process in [processes] starts with a phase, or
[Some n] when a process in [processes] starts with phase [n] and this is the lowest such phase.
It is an error if a process in [processes] starts with a phase less or equal to [current_phase]. *)
let rec find_phase current_phase found_phase = function
[] -> found_phase
| (Phase(n,p,_),name_params,occs, facts, cache_info)::rest_subprocess ->
if n <= current_phase then
Parsing_helper.user_error "Phases should be in increasing order.";
let found_phase' =
match found_phase with
None -> Some n
| Some n_found -> if n_found <= n then found_phase else Some n
in
find_phase current_phase found_phase' rest_subprocess
| _::rest_subprocess ->
find_phase current_phase found_phase rest_subprocess
let do_phase next_f cur_state =
match find_phase cur_state.current_phase None cur_state.subprocess with
None ->
if !made_forward_step then
begin
incr failed_traces;
made_forward_step := false
end;
(* Useful for debugging *)
if !debug_backtracking then
begin
ignore (Display.Text.display_reduc_state Display.bi_term_to_term true cur_state);
print_string "Blocked. Backtracking...\n"
end
else
debug_print "Backtracking";
raise No_result
| Some n ->
debug_print "Doing Phase";
made_forward_step := true;
Reclose public , since new function symbols may become applicable
let cur_state' = close_public_phase_change cur_state n in
(* Do transition to phase n *)
let cur_state'' =
{ cur_state' with
subprocess = extract_phase n cur_state'.subprocess;
previous_state = Some cur_state;
current_phase = n;
comment = RPhase(n) }
in
normal_state_all next_f false cur_state''
(* Put all reductions together *)
let reduction_step next_f state =
try
find_in_out next_f state 0 [] state.subprocess
with No_result ->
do_phase next_f state
let rec reduction_backtrack state =
reduction_step reduction_backtrack state
let rec reduction_nobacktrack state =
try
reduction_step (fun state -> raise (Reduced state)) state
with Reduced one_red_state ->
display_trace one_red_state;
Param.display_init_state := false;
reduction_nobacktrack { one_red_state with previous_state = None }
let reduction state =
if !Param.trace_backtracking then
reduction_backtrack state
else
reduction_nobacktrack state
(* Build the goal *)
let analyze_tree tree =
match tree.desc with
FRule(_, lbl, _, hyp,_,_) ->
begin
match lbl, hyp with
ProcessRule(hyp_tags, name_params), hyp ->
ProcessTest([], [], None)
| Rfail(p), hyp ->
NIFailTest((match hyp with
[{ thefact = Pred(_, l) }] -> rev_name_subst_bi l
| _ -> Parsing_helper.internal_error "Unexpected derivation for choice"), None)
| TestComm(pi,po), [{thefact = Pred(_,lin)}; {thefact = Pred(_,lout)}] ->
CommTest(rev_name_subst_bi lin, rev_name_subst_bi lout, None)
| TestEq(p), [{thefact = Pred(_,l1)};{thefact = Pred(_,l2)}] ->
NIEqTest((rev_name_subst_bi l1, None), (rev_name_subst_bi l2, None))
| _ -> Parsing_helper.internal_error "Unexpected clause concluding the derivation for choice"
end
| _ -> Parsing_helper.internal_error "Unexpected derivation for choice"
let build_goal tree = function
| CorrespQEnc _ ->
let (fact_list,_) = get_corresp_goals tree in
let goal_list =
List.map (function Pred(p,_) as pred_goal ->
if p == Param.end2_pred
then EventGoal(rev_name_subst_fact pred_goal,None)
else Fact(rev_name_subst_fact pred_goal,None,false)
) fact_list
in
CorrespGoal goal_list
| _ -> NonInterfGoal (analyze_tree tree)
(* Verify correspondence query *)
exception FalseQuery
let rec extract_conclusion_query restwork = function
| QTrue -> restwork ([],[],Terms.true_constraints,[],[])
| QFalse -> raise Unify
| QEvent (QSEvent _) -> Parsing_helper.internal_error "[reduction_bipro.ml >> extract_conclusion_query] QSEvent should only occur in query for processes."
| QEvent(QFact(p,_,l)) -> restwork ([],[Pred(p,l)],Terms.true_constraints,[],[])
| QEvent (QNeq (t1,t2)) -> restwork ([], [], Terms.constraints_of_neq t1 t2, [], [])
| QEvent (QGeq (t1,t2)) -> restwork ([], [], Terms.constraints_of_geq t1 t2, [], [])
| QEvent (QIsNat t) -> restwork ([],[],Terms.constraints_of_is_nat t,[],[])
| QEvent (QEq (t1,t2)) -> restwork ([], [], Terms.true_constraints, [t1], [t2])
| QEvent((QSEvent2(t1,t2))) -> restwork ([t1,t2],[],Terms.true_constraints,[],[])
| NestedQuery _ -> Parsing_helper.internal_error "[reduction_bipro.ml >> extract_conclusion_query] There should not be any nested query in correspondance queries for biprocess."
| QAnd(concl1,concl2) ->
extract_conclusion_query (fun (ev1, facts1, constra1, eq_left1, eq_right1) ->
extract_conclusion_query (fun (ev2, facts2, constra2, eq_left2, eq_right2) ->
restwork (ev1@ev2, facts1@facts2, Terms.wedge_constraints constra1 constra2, eq_left1@eq_left2, eq_right1@eq_right2)
) concl2
) concl1
| QOr(concl1,concl2) ->
try
extract_conclusion_query restwork concl1
with Unify ->
extract_conclusion_query restwork concl2
let rec find_in_event_table restwork ((t1,t2) as ev0) = function
[] -> raise Unify
| ev::rest ->
try
let ev1 = choice_in_term 1 ev
and ev2 = choice_in_term 2 ev in
TermsEq.unify_modulo_list (fun () ->
restwork ()
) [t1;t2] [ev1;ev2]
with Unify -> find_in_event_table restwork ev0 rest
let rec find_event_list restwork event_table = function
[] -> restwork ()
| ev::evlist ->
find_in_event_table (fun () ->
find_event_list restwork event_table evlist
) ev event_table
let bad_fact = Pred(Param.bad_pred, [])
let check_conclusion_query restwork event_table concl_q =
extract_conclusion_query (fun (evlist, facts, constra, eq_left, eq_right) ->
find_event_list (fun () ->
TermsEq.unify_modulo_list (fun () ->
We first look at the natural number predicates
TermsEq.close_constraints_eq_synt (fun constra' ->
[ facts ] should always be empty : lemmas and axioms never
use attacker , mess , table in conclusion , and user - defined
predicates are not used with biprocesses .
If facts were not empty , not checking them means that
I approximate : a query may be considered true when
it is in fact false . This approximation is fine :
ProVerif will consider that the found trace does not falsify
the query and will answer " can not be proved " .
use attacker, mess, table in conclusion, and user-defined
predicates are not used with biprocesses.
If facts were not empty, not checking them means that
I approximate: a query may be considered true when
it is in fact false. This approximation is fine:
ProVerif will consider that the found trace does not falsify
the query and will answer "cannot be proved". *)
let constra'' = TermsEq.remove_syntactic_constra constra' in
begin
try
TermsEq.check_constraints constra''
with TermsEq.FalseConstraint -> raise Unify
end;
(* The current hypothesis has been satisfied *)
restwork ()
) constra
) eq_left eq_right
) event_table evlist
) concl_q
let rec check_query_falsified_rec restwork event_table concl_q evl goall =
match (evl, goall) with
[], [] ->
(* The query does not contain any injective event. *)
let concl_q' =
Terms.auto_cleanup (fun () ->
Terms.copy_conclusion_query2 concl_q
)
in
check_conclusion_query restwork event_table concl_q'
| ev::rest_evl, (Fact(goal,_,_) | EventGoal(goal,_))::rest_goall ->
let (l,l') =
match ev, goal with
QFact(p,_,l), Pred(p',l') when p == p' -> l,l'
| QSEvent2(t1,t2), Pred(pr,[t1';t2']) when pr == Param.end2_pred -> [t1;t2],[t1';t2']
| _ ->
print_string "Query: "; Display.Text.display_event ev; print_newline();
print_string "Goal: "; Display.Text.display_fact goal; print_newline();
Parsing_helper.internal_error "The goal of the trace does not match the query (1)"
in
begin
try
TermsEq.unify_modulo_list (fun () ->
try
check_query_falsified_rec restwork event_table concl_q rest_evl rest_goall
with Unify -> raise FalseQuery
) l l'
with
| Unify ->
print_string "Query: "; Display.Text.WithLinks.term_list l; print_newline();
print_string "Goal: "; Display.Text.WithLinks.term_list l'; print_newline();
Parsing_helper.internal_error "The goal of the trace does not match the query (2)"
| FalseQuery -> raise Unify
end
| _ ->
Parsing_helper.internal_error "The goal of the trace does not match the query (3)"
let check_query_falsified q final_state =
(* Include in [event_table] the executed events *)
let event_table = List.rev final_state.events in
(*
List.iter (fun (t,_) ->
print_string "Event found "; Display.Text.display_term t; print_newline()) event_table;
*)
let Before(evl, hyp) = q in
match final_state.goal with
CorrespGoal(goall) ->
begin
(* The trace corresponds to a standard clause *)
try
check_query_falsified_rec (fun () ->
(* The trace may not falsify the query *)
Display.Def.print_line "I could not confirm that the previous trace falsifies the query.";
false
) event_table hyp (List.rev evl) (List.rev goall)
with Unify -> true
end
| _ -> Parsing_helper.internal_error "The goal of the trace does not match the query (4)"
(* Main trace reconstruction function *)
let do_reduction opt_query axioms tree =
(* Profile.start(); *)
debug_print "Initializing";
has_backtrack_get := false;
made_forward_step := true;
failed_traces := 0;
let freenames = (!Param.current_state).pi_freenames in
let public_init = public_build freenames in
public_free := public_init;
Param.display_init_state := true;
init_name_mapping freenames;
try
Reduction_helper.instantiate_natural_predicates (fun () ->
close_tree tree;
let ({ proc = main_process }, query) = Param.get_process_query (!Param.current_state) in
let init_state =
{ goal = (build_goal tree query);
subprocess = [(main_process, [],[],[],Nothing)];
public = List.map (fun t -> (t,(t, t))) public_init;
pub_vars = public_init;
tables = [];
io_rule = [];
prepared_attacker_rule = [];
previous_state = None;
hyp_not_matched = [];
assumed_false = [];
current_phase = 0;
comment = RInit;
events = [];
barriers = []
}
in
let res =
begin
try
let state = init_rule init_state tree in
(* Close initially the set public *)
let state = close_public_initial state in
if !debug_find_io_rule then
begin
auto_cleanup (fun () ->
print_string "Available rules:";
Display.Text.newline();
List.iter display_rule state.io_rule)
end;
debug_print "Initialization done";
if !Param.html_output then
begin
let qs = string_of_int (!Param.derivation_number) in
Display.LangHtml.openfile ((!Param.html_dir) ^ "/trace" ^ qs ^ ".html") ("ProVerif: trace for query " ^ qs);
Display.Html.print_string "<H1>Trace</H1>\n"
end;
let final_state = normal_state reduction true state 0 in
display_trace final_state;
let dot_err = Reduction_helper.create_pdf_trace Display.bi_term_to_term noninterftest_to_string "" final_state in
if !Param.html_output then
begin
Display.Html.display_goal Display.bi_term_to_term noninterftest_to_string final_state true;
Display.LangHtml.close();
let qs = string_of_int (!Param.derivation_number) in
Display.Html.print_string ("<A HREF=\"trace" ^ qs ^ ".html\">Trace</A><br>\n");
if (not !Param.command_line_graph_set) && (!Param.trace_backtracking && (dot_err = 0)) then
Display.Html.print_string ("<A HREF=\"trace" ^ qs ^ ".pdf\">Trace graph</A><br>\n")
end
else
Display.Text.display_goal Display.bi_term_to_term noninterftest_to_string final_state true;
(* Check the validity of the trace w.r.t. axioms *)
Lemma.check_axioms final_state axioms;
if final_state.hyp_not_matched = []
then
match opt_query with
| Some q -> check_query_falsified q final_state
| _ -> true
else false
with No_result ->
if not (!Param.trace_backtracking) then
Display.Def.print_line "Blocked!";
if !Param.html_output then
begin
Display.LangHtml.close();
if not (!Param.trace_backtracking) then
begin
let qs = string_of_int (!Param.derivation_number) in
Display.Html.print_string ("<A HREF=\"trace" ^ qs ^ ".html\">Unfinished trace</A><br>\n")
end;
Display.Html.print_line "Could not find a trace corresponding to this derivation."
end;
Display.Text.print_line "Could not find a trace corresponding to this derivation.";
false
end
in
(* print_endline ("Failed " ^ (string_of_int (!failed_traces)) ^ " traces."); *)
(* Profile.stop(); *)
res
) tree
with TermsEq.FalseConstraint -> false
let do_reduction recheck opt_query lemmas tree =
debug_print "Starting reduction";
let res =
Display.auto_cleanup_display (fun () ->
History.unify_derivation (fun tree ->
Display.auto_cleanup_display (fun () ->
do_reduction opt_query lemmas tree
)
) recheck tree
)
in
Terms.cleanup ();
res
| null | https://raw.githubusercontent.com/LCBH/UKano/13c046ddaca48b45d3652c3ea08e21599e051527/proverif2.01/src/reduction_bipro.ml | ocaml | TO DO Test phases
Should I use evaluated terms in the "comment" field?
This exception is raise when the derivation prevents executing
a step
This exception is used in reduction_nobacktrack
It is raised after a bunch of reductions to
to get the final state after these reductions,
while preventing backtracking on these reductions.
Do not delete the links when the exception [Reduced] is raised.
Keep them in [current_bound_vars] so that they are deleted later if needed
Set when we should take the else branch of Get but we cannot
because an element has already been inserted so that the in branch
is taken. In this case, we try delaying the inserts.
We use the exception Unify for local failure
[bi_action action] executes action for both sides.
Raises Unify when it fails.
Raises FailOnlyOnSide 1 when left side only of the action fails.
Raises FailOnlyOnSide 2 when right side only of the action fails.
Raises Unify when the action fails on both sides.
Left side succeeded, right side failed
Left side failed
Left side failed, right side succeeded
Detect various goals
If the term tbl_elem is in the table
in phase cur_state.current_phase, it will still be in the table in any
later phase.
When the phase is less than [min_choice_phase],
we use a unary [Table] predicate.
Display clauses
Display the trace
Updating the goals
Find a clause
Useful for debugging
I think this is useful only to split a Choice inside t
for speed, use the initial definition of destructors, not the one enriched with the equational theory
TO DO (for speed) should I remove_syntactic, or keep it,
but take it into account elsewhere (when doing
function symbol comparisons, accept functions that
differ by their syntactic status)
Evaluates t1 and tests if it is equal to t2.
Evaluates a term. Raises Unify when the result is fail.
Match a pattern
Raises Unify when the matching fails
When the evaluation or pattern matching failed on the left side,
some variables may be unbounded when we try the pattern matching
on the right side
Test if a term is public
We maintain the following invariants in public and prepared_attacker_rule:
1/ All rules in prepared_attacker_rule are for a phase later or equal to the current one.
Rules for a previous phase are removed.
2/ All rules in prepared_attacker_rule for the current phase have non-empty assumptions.
Rules with empty assumptions are removed after adding their conclusion to public.
3/ All assumptions of rules in prepared_attacker_rule are not in public.
When an assumption is in public, we remove it, and possibly apply 2/.
[add_public_and_close state l] guarantees that these invariants are preserved after
addition of the terms in [l] to public.
It removes assumptions of rules in prepared_attacker_rule that are in [l].
When a rule then has no assumptions and is for the current phase, it adds the
conclusion to public and continues closing recursively.
Keep the rule, removing hypotheses that are already in public
[close_public_after_phase_increment state] guarantees that the invariants on
public and prepared_attacker_rule mentioned above are preserved after a phase increment.
It removes rules for previous phases, adds to public the conclusions
of rules with no assumptions in the new phase, and closes using
[add_public_and_close].
Keep the rule
[close_public_initial state] guarantees that the invariants on
public and prepared_attacker_rule mentioned above are true initially.
It applies rules with empty assumptions in phase 0 by
[close_public_after_phase_increment] and
closes with terms initially known to be public by
[add_public_list].
Optimize the case q == Nil: in this case, the adversary
cannot distinguish whether a destructor fails in t or
t is false.
Was already tested and failed before; will still fail if tested again
find a process that does some input on tin
For passive attackers, do red I/O only,
but still evaluate the arguments of the output
Arguments already evaluated
For active attackers, one can output on public channels
Check that the argument of the event can be evaluated but otherwise ignore it
find a process that does some output on tout
The attack reconstruction failed after doing the insert.
Try not doing it, in case that allows executing the else branch of a Get.
Test success when the knowledge of the attacker has changed
compute the new recipe_lst
if tc and t are known by the attacker in phase cur_state'.current_phase,
they will still be known in phase i,
so the attacker will be able to send t on tc in phase i
compute the new recipe_lst
if tc and t are known by the attacker in phase cur_state'.current_phase,
they will still be known in phase i,
so the attacker will be able to send t on tc in phase i
let test_success = Profile.f1 "test_success" test_success
Normalize the state after a reduction
When the process number n has been changed
When all processes have been changed
Initial attacker knowledge
Public contains terms, not patterns
-> translate the pattern into a term.
If the translation fails because a name is not in the table, we have to stop.
I introduce a variable for the recipe here,
and use it when displaying hyp_not_matched.
Note: it is important that the term t' is never a tuple.
Otherwise, it would be decomposed later, and the link
between the recipe in public and the one in hyp_not_matched
would be lost.
Public contains terms, not patterns
-> translate the pattern into a term.
If the translation fails because a name is not in the table, we have to stop.
I introduce a variable for the recipe here,
and use it when displaying hyp_not_matched.
Note: it is important that the term t' is never a tuple.
Otherwise, it would be decomposed later, and the link
between the recipe in public and the one in hyp_not_matched
would be lost.
Do not add [fact] in [state.hyp_not_matched] if it is already present
concl_copy is the recipe used to compute the conclusion from the hypotheses
Handle reductions i/o and in
Perform an input on a public channel (Res In)
Remove the elements of mess_list' that are already in cur_state.public
The message is not public
Inputting the message mess_term on this input will always fail,
even in the following of the trace
Perform a (Red I/O) reduction between an input and an asynchronous output
Find the corresponding asynchronous output
not found
The pattern does not match
When the adversary is passive and the channel is public,
the adversary eavesdrops the message sent by RIO_PatRemove
Perform a (Res I/O) reduction with a synchronous output
Not found
The i/o reduction is possible, compute the reduced state
The pattern does not match
When the adversary is passive and the channel is public,
the adversary eavesdrops the message sent by RIO_PatRemove
The entry is not found
we check that the pattern pat matches and t evaluates to true
Using the entry mess_term on this input will always fail,
even in the following of the trace
This exception is used only when I should take the
else of Get and I cannot because an element that
makes Get succeed already occurs.
The channel is public and the attacker is active, try (Res In)
The channel is private or the attacker is passive, try (Res I/O)
Input on a public channel, and the attacker is active: apply (Res In)
Input on a private channel or the attacker is passive: apply (Res I/O)
First try an asynchronous output, with a corresponding clause in the tree
Try a synchronous output
See if we should take the else branch if present
We should take the else branch, since a clause uses that branch
we check that the pattern pat matches and t evaluates to true
When FailOnlyOnSide _ is raised, it will be catched above and
the trace reconstruction succeeds.
The Get process is blocked forever: the else branch should be taken,
but the table contains an element that prevents it. Since elements
are only added to tables, this situation will not change.
So I backtrack.
This test will probably never succeed, because in
case it would succeed, it would have been detected earlier
in the [Insert] case of [do_red_nointeract]
The attack reconstruction failed after doing the insert.
Try not doing it, in case that allows executing the else branch of a Get.
Handle phases
[extract_phase n processes] modifies the processes for a [phase n] transition:
removes processes with no phase prefix or a phase prefix less than [n];
removes the phase prefix [phase n], leaving the rest of the process;
leaves processes with phase prefix greater than [n] unchanged.
[find_phase current_phase None processes] returns either
[None] when no process in [processes] starts with a phase, or
[Some n] when a process in [processes] starts with phase [n] and this is the lowest such phase.
It is an error if a process in [processes] starts with a phase less or equal to [current_phase].
Useful for debugging
Do transition to phase n
Put all reductions together
Build the goal
Verify correspondence query
The current hypothesis has been satisfied
The query does not contain any injective event.
Include in [event_table] the executed events
List.iter (fun (t,_) ->
print_string "Event found "; Display.Text.display_term t; print_newline()) event_table;
The trace corresponds to a standard clause
The trace may not falsify the query
Main trace reconstruction function
Profile.start();
Close initially the set public
Check the validity of the trace w.r.t. axioms
print_endline ("Failed " ^ (string_of_int (!failed_traces)) ^ " traces.");
Profile.stop(); | * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* *
* Cryptographic protocol verifier *
* *
* , , and *
* *
* Copyright ( C ) INRIA , CNRS 2000 - 2020 *
* *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* *
* Cryptographic protocol verifier *
* *
* Bruno Blanchet, Vincent Cheval, and Marc Sylvestre *
* *
* Copyright (C) INRIA, CNRS 2000-2020 *
* *
*************************************************************)
This program is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
( at your option ) any later version .
This program is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details ( in file LICENSE ) .
You should have received a copy of the GNU General Public License
along with this program ; if not , write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details (in file LICENSE).
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
Trace reconstruction
This version of the trace reconstruction does not exploit the
order of nodes in the derivation tree .
This version of the trace reconstruction does not exploit the
order of nodes in the derivation tree.
*)
open Types
open Pitypes
open Terms
open Reduction_helper
let made_forward_step = ref false
let failed_traces = ref 0
let debug_find_io_rule = ref false
let debug_backtracking = ref false
let debug_print s = ()
print_string s ;
( )
Display.Text.newline()*)
exception DerivBlocks
exception Reduced of (term * term) reduc_state
[ Terms.auto_cleanup f ] runs [ f ( ) ] , removing all links
created by [ f ( ) ] , whether [ f ] terminates normally or
with an exception
[ auto_cleanup_red ] is a variant of this function that
treats the exception [ Reduced ] specially . Indeed , in most
cases , when an exception is raised , it is because we
backtrack , so the links we have set must be removed ,
since we undo the reductions .
However , the exception [ Reduced ] is different : for this
exception , we want to get the final state , so the links
must be kept .
created by [f()], whether [f] terminates normally or
with an exception
[auto_cleanup_red] is a variant of this function that
treats the exception [Reduced] specially. Indeed, in most
cases, when an exception is raised, it is because we
backtrack, so the links we have set must be removed,
since we undo the reductions.
However, the exception [Reduced] is different: for this
exception, we want to get the final state, so the links
must be kept.
*)
let auto_cleanup_red f =
let tmp_bound_vars = !current_bound_vars in
current_bound_vars := [];
try
let r = f () in
List.iter (fun v -> v.link <- NoLink) (!current_bound_vars);
current_bound_vars := tmp_bound_vars;
r
with
Reduced s ->
current_bound_vars := List.rev_append tmp_bound_vars (!current_bound_vars);
raise (Reduced s)
| x ->
List.iter (fun v -> v.link <- NoLink) (!current_bound_vars);
current_bound_vars := tmp_bound_vars;
raise x
let has_backtrack_get = ref false
exception No_result
exception FailOnlyOnSide of int
let make_bi_choice (t1, t2) = make_choice t1 t2
let make_bi_choice_fact = function
| Pred(p1,args1), Pred(p2,args2) when p1 == p2 ->
Pred(p1,List.map2 make_choice args1 args2)
| _ -> Parsing_helper.internal_error "[reduction_bipro.ml >> make_bi_choice_fact] Should be the same predicate."
let get_choice = function
FunApp({ f_cat = Choice }, [t1;t2]) -> (t1,t2)
| _ -> Parsing_helper.internal_error "Choice term expected"
let equal_bi_terms_modulo (t1,t2) (t1',t2') =
(equal_terms_modulo t1 t1') && (equal_terms_modulo t2 t2')
let is_true_test (t1,t2) =
let r1 = equal_terms_modulo t1 Terms.true_term in
let r2 = equal_terms_modulo t2 Terms.true_term in
if r1 && r2 then true else
if (not r1) && (not r2) then false else
raise (FailOnlyOnSide (if (not r1) then 1 else 2))
let bi_action action =
try
let t1 = action 1 in
try
let t2 = action 2 in
(t1,t2)
with Unify ->
raise (FailOnlyOnSide 2)
with Unify ->
let _ = action 2 in
raise (FailOnlyOnSide 1)
let rev_name_subst_bi = function
[t] -> let r = rev_name_subst t in (r,r)
| [t1;t2] -> (rev_name_subst t1, rev_name_subst t2)
| _ -> Parsing_helper.internal_error "Unexpected number of arguments for this predicate"
let get_term_type_bi = function
[t] -> Terms.get_term_type t
| [t1;t2] -> Terms.get_term_type t1
| _ -> Parsing_helper.internal_error "Unexpected number of arguments for this predicate"
let get_min_choice_phase() =
match (!Param.current_state).pi_min_choice_phase with
| Set min_phase -> min_phase
| Unset -> Parsing_helper.internal_error "pi_min_choice_phase not set"
let build_mess_fact phase (tc1,tc2) (t1,t2) =
if phase < get_min_choice_phase() then
Pred(Param.get_pred(Mess(phase, Terms.get_term_type t1)), [tc1;t1])
else
Pred(Param.get_pred(MessBin(phase, Terms.get_term_type t1)), [tc1;t1;tc2;t2])
let build_table_fact phase (t1,t2) =
if phase < get_min_choice_phase() then
Pred(Param.get_pred(Table(phase)), [t1])
else
Pred(Param.get_pred(TableBin(phase)), [t1;t2])
let build_precise_fact occ_name (t1,t2) =
let ev = Param.get_precise_event (Action (get_term_type t1)) in
Pred(Param.begin2_pred,[FunApp(ev,[occ_name;t1]);FunApp(ev,[occ_name;t2])])
let is_table_goal cur_phase t = function
| Pred({p_info = [TableBin(i)]; _},[tbl_elem1;tbl_elem2]) ->
cur_phase <= i &&
equal_bi_terms_modulo (tbl_elem1,tbl_elem2) t
| Pred({p_info = [Table(i)]},[tbl_elem]) ->
cur_phase <= i &&
equal_bi_terms_modulo (tbl_elem,tbl_elem) t
| _ -> false
let is_mess_goal cur_phase tc t = function
| Pred({p_info = [MessBin(n,_)]},[tcg1;tg1;tcg2;tg2]) ->
(n == cur_phase) &&
equal_bi_terms_modulo (tg1,tg2) t &&
equal_bi_terms_modulo (tcg1,tcg2) tc
| Pred({p_info = [Mess(n,_)]},[tcg;tg]) ->
(n == cur_phase) &&
equal_bi_terms_modulo (tg,tg) t &&
equal_bi_terms_modulo (tcg,tcg) tc
| _ -> false
let display_rule (n, sons, hsl, nl, concl) =
print_string ("Rule " ^ (string_of_int n) ^ ": ");
display_tag hsl nl;
print_string " ";
Display.Text.display_rule (List.map (fun t -> copy_fact2 t) sons, copy_fact2 concl, Empty concl, Terms.true_constraints);
Display.Text.newline()
let noninterftest_to_string = function
ProcessTest _ -> " process performs a test that may succeed on one side and not on the other"
| InputProcessTest _ -> "The pattern-matching in the input succeeds on one side and not on the other."
| NIFailTest _ -> "This holds on one side and not on the other."
| ApplyTest _ -> Parsing_helper.internal_error "There should be no ApplyTest in reduction_bipro.ml"
| CommTest _ -> "The communication succeeds on one side and not on the other."
| NIEqTest _ -> "The result in the left-hand side is different from the result in the right-hand side."
let display_trace final_state =
match !Param.trace_display with
Param.NoDisplay -> ()
| Param.ShortDisplay ->
if !Param.html_output then
Display.Html.display_labeled_trace final_state
else
begin
if !Param.display_init_state then
begin
print_string "A more detailed output of the traces is available with\n";
if !Param.typed_frontend then
print_string " set traceDisplay = long.\n"
else
print_string " param traceDisplay = long.\n";
Display.Text.newline()
end;
Display.Text.display_labeled_trace final_state
end
| Param.LongDisplay ->
if !Param.html_output then
ignore (Display.Html.display_reduc_state Display.bi_term_to_term true final_state)
else
ignore (Display.Text.display_reduc_state Display.bi_term_to_term true final_state)
let is_equivalence_goal = function
| NonInterfGoal _ -> true
| _ -> false
let find_io_rule next_f hypspeclist hyplist name_params var_list io_rules =
let name_params1 = extract_name_params_noneed name_params in
let l = List.length hypspeclist in
let lnp = List.length name_params1 in
let lh = List.length hyplist in
if !debug_find_io_rule then
begin
auto_cleanup (fun () ->
print_string "Looking for ";
display_tag hypspeclist name_params1;
print_string " ";
Display.Text.display_list Display.Text.WithLinks.fact " & " hyplist;
Display.Text.newline())
end;
let found_terms = ref [] in
let rec find_io_rule_aux = function
[] -> raise Unify
| ((n, sons, hypspeclist2, name_params',_)::io_rules) ->
let l2 = List.length hypspeclist2 in
let lnp2 = List.length name_params' in
let lh2 = List.length sons in
if (l2 < l) || (lnp2 < lnp) || (lh2 < lh) || (not (hypspeclist = skip (l2-l) hypspeclist2))
then find_io_rule_aux io_rules
else
begin
let sons3 = skip (lh2-lh) sons in
try
let name_params2 = skip (lnp2-lnp) name_params' in
if not (Param.get_ignore_types()) &&
(List.exists2 (fun t1 t2 -> Terms.get_term_type t1 != Terms.get_term_type t2) name_params1 name_params2) then
raise Unify;
auto_cleanup_red (fun () ->
match_modulo_list (fun () ->
match_equiv_list (fun () ->
let new_found = List.map copy_closed_remove_syntactic var_list in
if List.exists (fun old_found ->
List.for_all2 equal_terms_modulo old_found new_found) (!found_terms) then
raise Unify;
found_terms := new_found :: (!found_terms);
if !debug_find_io_rule then
begin
auto_cleanup (fun () ->
print_string "Found ";
Display.Text.display_list Display.Text.WithLinks.term ", " new_found;
Display.Text.newline())
end;
next_f new_found) sons3 hyplist
) name_params1 name_params2
)
with Unify -> find_io_rule_aux io_rules
end
in
find_io_rule_aux io_rules
Evaluate a term possibly containing destructors .
It always succeeds , perhaps returning Fail .
It always succeeds, perhaps returning Fail. *)
let rec term_evaluation side = function
Var v ->
begin
match v.link with
TLink t ->
term_evaluation side t
| _ -> Parsing_helper.internal_error "Error: term should be closed in attack reconstruction";
end
| FunApp(f,l) ->
match f.f_initial_cat with
Eq _ | Tuple ->
let l' = List.map (term_evaluation side) l in
if List.exists is_fail l' then
Terms.get_fail_term (snd f.f_type)
else
FunApp(f, l')
| Name _ | Failure -> FunApp(f,[])
| Choice ->
begin
match l with
[t1;t2] ->
if side = 1 then
term_evaluation side t1
else
term_evaluation side t2
| _ -> Parsing_helper.internal_error "Choice should have two arguments"
end
| BiProj Left ->
begin match l with [t] -> term_evaluation 1 t | _ -> assert false end
| BiProj Right ->
begin match l with [t] -> term_evaluation 2 t | _ -> assert false end
| Red redl ->
let l' = List.map (term_evaluation side) l in
let rec try_red_list = function
[] ->
Parsing_helper.internal_error "Term evaluation should always succeeds (perhaps returning Fail)"
| (red1::redl) ->
let (left, right, side_c) = auto_cleanup (fun () -> Terms.copy_red red1) in
try
auto_cleanup (fun () ->
match_modulo_list (fun () ->
close_destr_constraints side_c;
if TermsEq.check_closed_constraints side_c then
begin
close_term right;
TermsEq.remove_syntactic_term right
end
else
raise Unify
) left l')
with Unify -> try_red_list redl
in
try_red_list redl
| _ ->
Printf.printf "\nName of the function:";
Display.Text.display_function_name f;
Parsing_helper.internal_error "unexpected function symbol in term_evaluation (reduction_bipro.ml)"
let equal_terms_modulo_eval t1 t2 =
let t1_l = term_evaluation 1 t1 in
let t1_r = term_evaluation 2 t1 in
if (is_fail t1_l) || (is_fail t1_r) then false else
equal_bi_terms_modulo (t1_l, t1_r) t2
let term_evaluation_fail t side =
let r = term_evaluation side t in
if is_fail r then
raise Unify
else
r
let fact_evaluation_fail fact side = match fact with
| Pred(p,args) ->
let args' =
List.map (fun t ->
let r = term_evaluation side t in
if is_fail r
then raise Unify
else r
) args
in
Pred(p,args')
let term_evaluation_fail2 t1 t2 side =
(term_evaluation_fail t1 side, term_evaluation_fail t2 side)
let term_evaluation_name_params occ t name_params =
let may_have_several_patterns = reduction_check_several_patterns occ in
let t' = bi_action (term_evaluation_fail t) in
if may_have_several_patterns then
t', ((MUnknown,make_bi_choice t',Always) :: name_params)
else
t', name_params
let term_evaluation_to_true t side =
let r = term_evaluation side t in
if (is_fail r) || (not (equal_terms_modulo r Terms.true_term)) then
raise Unify
else
r
let term_evaluation_name_params_true occ t name_params =
let may_have_several_patterns = reduction_check_several_patterns occ in
let t' = bi_action (term_evaluation_to_true t) in
if may_have_several_patterns then
((MUnknown,make_bi_choice t',Always) :: name_params)
else
name_params
let rec match_pattern p side t =
if not (Terms.equal_types (Terms.get_pat_type p) (Terms.get_term_type t)) then
raise Unify;
match p with
PatVar b ->
begin
if side = 1 then
Terms.link b (TLink (make_choice t t))
else
match b.link with
TLink (FunApp({ f_cat = Choice }, [t1;t2])) ->
Terms.link b (TLink (make_choice t1 t))
| _ ->
Terms.link b (TLink (make_choice t t))
end
| PatTuple(f,l) ->
let vl = Terms.var_gen (fst f.f_type) in
let tl =
match_modulo (fun () ->
List.map copy_closed_remove_syntactic vl) (FunApp(f, vl)) t
in
List.iter2 (fun p t -> match_pattern p side t) l tl
| PatEqual t' ->
let t'' = term_evaluation_fail t' side in
match_modulo (fun () -> ()) t'' t
let bi_match_pattern p (t1,t2) side =
if side = 1 then
match_pattern p side t1
else
match_pattern p side t2
let bi_match_pattern_and_test p (t1,t2) t side =
bi_match_pattern p (t1,t2) side;
let t' = term_evaluation_fail t side in
if not (equal_terms_modulo t' Terms.true_term) then
raise Unify
let term_evaluation_name_params_and_match pat occ t name_params =
let may_have_several_patterns = reduction_check_several_patterns occ in
let t'' = bi_action (fun side ->
let t' = term_evaluation_fail t side in
match_pattern pat side t';
t')
in
if may_have_several_patterns then
t'', ((MUnknown,make_bi_choice t'',Always) :: name_params)
else
t'', name_params
Terms come with a recipe that explains how to compute them .
Recipes may contain variables ( especially in prepared_attacker_rules )
which are later instantiated by putting links in these variables .
Copies of the recipes are not made immediately after creating the links ,
so these links remain when the trace progresses ; they are removed
in case of backtrack ( by auto_cleanup_red ) .
Not making too many copies is important for speed in complex
examples such as ffgg .
Copies of recipes are made before adding a term to public ,
so that recipes in public do not contain links .
They are also made before using a term in an input .
Terms come with a recipe that explains how to compute them.
Recipes may contain variables (especially in prepared_attacker_rules)
which are later instantiated by putting links in these variables.
Copies of the recipes are not made immediately after creating the links,
so these links remain when the trace progresses; they are removed
in case of backtrack (by auto_cleanup_red).
Not making too many copies is important for speed in complex
examples such as ffgg.
Copies of recipes are made before adding a term to public,
so that recipes in public do not contain links.
They are also made before using a term in an input.
*)
Decompose tuples
let rec decompose_term ((recipe, t) as pair:Types.term * (Types.term * Types.term)) =
match t with
(FunApp({f_cat = Tuple } as f,l), FunApp({f_cat = Tuple} as f',l')) when f == f' ->
let projs = Terms.get_all_projection_fun f in
decompose_list (List.map2 (fun fi ti -> (FunApp(fi,[recipe]),ti))
projs (List.combine l l'))
| _ -> [pair]
and decompose_list = function
[] -> []
| (a::l) -> (decompose_term a) @ (decompose_list l)
let rec decompose_term_rev (binder, t) =
match t with
(FunApp({f_cat = Tuple } as f,l), FunApp({f_cat = Tuple} as f',l')) when f == f' ->
let new_list = List.map (fun (x, x') -> ((Terms.new_var ~orig:false "~M" (Terms.get_term_type x)), (x, x')))
(List.combine l l')
in
Terms.link binder (TLink (FunApp(f, (List.map (fun (x, y) -> Var x) new_list))));
decompose_list_rev new_list
| t -> [(binder, t)]
and decompose_list_rev = function
[] -> []
| (a::l) -> (decompose_term_rev a) @ (decompose_list_rev l)
let rec is_in_public public = function
| (FunApp({f_cat = Tuple} as f, l), FunApp(f',l')) when f == f' ->
(match (is_in_public_list public) (List.combine l l') with
| None -> None
| Some lst -> Some(FunApp(f, lst)))
| t ->
try
let (ca, _) = List.find (fun (_, t') -> equal_bi_terms_modulo t t') public in
Some ca
with Not_found ->
None
and is_in_public_list public = function
[] -> Some []
| hd::tail ->
match is_in_public public hd with
None -> None
| Some ca ->
match is_in_public_list public tail with
None -> None
| Some catail -> Some (ca::catail)
let rec remove_first_in_public public = function
[] -> []
| (((c, a)::l) as l') ->
try
let (ca, _) = List.find (fun (_, t) -> equal_bi_terms_modulo a t) public in
Terms.link c (TLink ca);
remove_first_in_public public l
with Not_found ->
l'
let update_term_list oldpub public tc_list =
match tc_list with
[] -> []
| ((c0, t0)::l0) ->
let rec is_in_until = function
[] -> false
| (((ca, a)::l) as public) ->
if public == oldpub then false else
if equal_bi_terms_modulo a t0
then
begin
Terms.link c0 (TLink ca);
true
end
else
is_in_until l
in
if is_in_until public then
remove_first_in_public public l0
else
tc_list
let add_public_and_close state l =
let queue = ref l in
let rec remove_from_att_rules public ((recipe, t) as pair) = function
[] -> []
| (p, hyp_terms, (recipe_concl, concl_bi_term))::attacker_rules ->
let attacker_rules' = remove_from_att_rules public pair attacker_rules in
let phase_p = getphase p in
assert (phase_p >= state.current_phase);
let hyp_terms' = match hyp_terms with
[] -> []
| ((c0, t0)::l0) ->
if equal_bi_terms_modulo t0 t then
begin
link c0 (TLink recipe);
remove_first_in_public public l0
end
else
hyp_terms
in
if (hyp_terms' = []) && (phase_p = state.current_phase) then
begin
queue := (decompose_term (Terms.copy_term4 recipe_concl, concl_bi_term)) @ (!queue);
attacker_rules'
end
else
(p, hyp_terms', (recipe_concl, concl_bi_term)) :: attacker_rules'
in
let rec do_close state =
match !queue with
[] -> state
| ((c, t)::l) ->
queue := l;
if List.exists (fun (_, t') -> equal_bi_terms_modulo t t') state.public then
do_close state
else
let public' = (c, t) :: state.public in
do_close { state with
public = public';
prepared_attacker_rule = remove_from_att_rules public' (c, t) state.prepared_attacker_rule }
in
do_close state
let rec add_public_with_recipe state (recipe, t) =
match t with
(FunApp({ f_cat = Tuple } as f, l), FunApp({f_cat = Tuple} as f',l')) when f == f' ->
let projs = Terms.get_all_projection_fun f in
add_public_list state (List.map2 (fun fi ti -> (FunApp(fi, [recipe]), ti)) projs (List.combine l l'))
| t -> add_public_and_close state [(recipe, t)]
and add_public_list state = function
[] -> state
| (a::l) -> add_public_list (add_public_with_recipe state a) l
let close_public_after_phase_increment state =
let queue = ref [] in
let rec remove_from_att_rules public = function
[] -> []
| ((p, hyp_terms, (recipe_concl, concl_bi_term)) as rule)::attacker_rules ->
let attacker_rules' = remove_from_att_rules public attacker_rules in
let phase_p = getphase p in
if phase_p < state.current_phase then attacker_rules' else
if (hyp_terms = []) && (phase_p = state.current_phase) then
begin
queue := (decompose_term (Terms.copy_term4 recipe_concl, concl_bi_term)) @ (!queue);
attacker_rules'
end
else
rule :: attacker_rules'
in
let state' =
{ state with
prepared_attacker_rule = remove_from_att_rules state.public state.prepared_attacker_rule }
in
add_public_and_close state' (!queue)
[ close_public_phase_change state n ] changes the current phase to [ n ]
after closes public , by incrementing the phase from [ state.current_phase ] to [ n ]
and closing by [ close_public_after_phase_increment ] at each increment .
after closes public, by incrementing the phase from [state.current_phase] to [n]
and closing by [close_public_after_phase_increment] at each increment. *)
let rec close_public_phase_change state n =
if n < state.current_phase then
Parsing_helper.internal_error "Phases should be in increasing order.";
if n = state.current_phase then state else
let state1 = { state with current_phase = state.current_phase + 1 } in
let state2 = close_public_after_phase_increment state1 in
close_public_phase_change state2 n
let close_public_initial state =
let state0 = { state with public = [] } in
let state1 = close_public_after_phase_increment state0 in
add_public_list state1 state.public
let add_public state t =
let new_recipe = new_var ~orig:false "~M" (get_term_type (fst t)) in
let l = decompose_term_rev (new_recipe, t) in
let l' = List.map (fun (b,t) -> (Var b, t)) l in
let state' = add_public_and_close state l' in
(Terms.copy_term4 (Var new_recipe), state')
let optional_eavesdrop state public_channel mess_term =
if public_channel then
The adversary is passive and the channel is public ;
the adversary eavesdrops the message sent by RIO / RIO_PatRemove
the adversary eavesdrops the message sent by RIO / RIO_PatRemove *)
let (new_recipe, state') = add_public state mess_term in
(Some new_recipe, state')
else
(None, state)
let get_occurrence_name_for_precise occ name_params =
let (np,npm) =
List.fold_right (fun (m,t,_) (acc_np,acc_npm) -> match m with
| MSid _ -> (t::acc_np,m::acc_npm)
| _ -> (acc_np,acc_npm)
) name_params ([],[])
in
let n = Reduction_helper.get_occ_name occ in
match n.f_cat with
| Name r ->
let n' = FunApp(n,np) in
FunApp(add_name_for_pat n',[])
| _ -> Parsing_helper.internal_error "[reduction_bipro.ml >> get_occurrence_name_for_precise] Unexpected case."
Do reductions that do not involve interactions
f takes as input
- a boolean indicating whether the attacker knowledge has changed
- the new state
When the goal is reached , do_red_nointeract returns the final state .
Otherwise , raises an exception .
f takes as input
- a boolean indicating whether the attacker knowledge has changed
- the new state
When the goal is reached, do_red_nointeract returns the final state.
Otherwise, raises an exception No_result.
*)
let rec do_red_nointeract f prev_state n =
let (proc, name_params, occs, facts, cache_info) =
List.nth prev_state.subprocess n in
match proc with
Nil -> debug_print "Doing Nil";
made_forward_step := true;
f false (do_rnil prev_state n)
| Par(p,q) ->
debug_print "Doing Par";
made_forward_step := true;
do_red_nointeract (fun new_att_know cur_state2 ->
do_red_nointeract (fun new_att_know2 cur_state3 ->
f (new_att_know || new_att_know2) cur_state3)
cur_state2 n
) { prev_state with
subprocess = add_at n (p, name_params, occs, facts, Nothing)
(replace_at n (q, name_params, occs, facts, Nothing)
prev_state.subprocess);
comment = RPar(n);
previous_state = Some prev_state } (n+1)
| Restr(na,(args,env),p,occ) ->
debug_print "Doing Restr";
made_forward_step := true;
let need_list = get_need_vars (!Param.current_state) na in
let include_info = prepare_include_info env args need_list in
let l = extract_name_params na include_info name_params in
let n' = FunApp(add_name_for_pat (FunApp(na, l)),[]) in
let p' = process_subst p na n' in
begin
do_red_nointeract f { prev_state with
subprocess = replace_at n (p', name_params, occs, facts, Nothing) prev_state.subprocess;
comment = RRestr(n, na, n');
previous_state = Some prev_state } n
end
| Let(pat,t,p,q,occ) ->
debug_print "Doing Let";
made_forward_step := true;
let new_occs = (LetTag occ) :: occs in
begin
try
auto_cleanup_red (fun () ->
let t', name_params' = term_evaluation_name_params_and_match pat (OLet(occ)) t name_params in
let p' = copy_process p in
let name_params'' = update_name_params IfQueryNeedsIt name_params' pat in
do_red_nointeract f { prev_state with
subprocess = replace_at n (p', name_params'', new_occs, facts, Nothing) prev_state.subprocess;
comment = RLet_In(n, pat, make_bi_choice t');
previous_state = Some prev_state } n
)
with Unify ->
do_red_nointeract f { prev_state with
subprocess = replace_at n (q, name_params, new_occs, facts, Nothing) prev_state.subprocess;
comment = RLet_Else(n, pat, t);
previous_state = Some prev_state } n
| FailOnlyOnSide _ ->
if is_equivalence_goal prev_state.goal
then
SUCCESS
{ prev_state with
goal = NonInterfGoal(ProcessTest([],[],(Some(n, List.nth prev_state.subprocess n)))) }
else
f false { prev_state with
subprocess = remove_at n prev_state.subprocess;
comment = RLet_Remove(n,pat, t);
previous_state = Some prev_state }
end
| Test(t,p,q,occ) ->
debug_print "Doing Test";
made_forward_step := true;
if q == Nil then
begin
try
auto_cleanup_red (fun () ->
let new_occs = (TestTag occ) :: occs in
let name_params' = term_evaluation_name_params_true (OTest(occ)) t name_params in
do_red_nointeract f
{ prev_state with
subprocess = replace_at n (p, name_params', new_occs, facts, Nothing) prev_state.subprocess;
comment = RTest_Then(n, t);
previous_state = Some prev_state } n
)
with Unify ->
f false { prev_state with
subprocess = remove_at n prev_state.subprocess;
comment = RTest_Remove(n, t, TestFails);
previous_state = Some prev_state }
| FailOnlyOnSide _ ->
if is_equivalence_goal prev_state.goal
then
SUCCESS
{ prev_state with
goal = NonInterfGoal(ProcessTest([],[],(Some(n, List.nth prev_state.subprocess n)))) }
else
f false { prev_state with
subprocess = remove_at n prev_state.subprocess;
comment = RTest_Remove(n, t, Blocks);
previous_state = Some prev_state }
end
else
begin
try
auto_cleanup_red (fun () ->
let new_occs = (TestTag occ) :: occs in
let (t', name_params') = term_evaluation_name_params (OTest(occ)) t name_params in
if is_true_test t' then
do_red_nointeract f
{ prev_state with
subprocess = replace_at n (p, name_params', new_occs, facts, Nothing) prev_state.subprocess;
comment = RTest_Then(n, t);
previous_state = Some prev_state } n
else
do_red_nointeract f
{ prev_state with
subprocess = replace_at n (q, name_params', new_occs, facts, Nothing) prev_state.subprocess;
comment = RTest_Else(n, t);
previous_state = Some prev_state } n
)
with Unify ->
f false { prev_state with
subprocess = remove_at n prev_state.subprocess;
comment = RTest_Remove(n, t, DestrFails);
previous_state = Some prev_state }
| FailOnlyOnSide _ ->
if is_equivalence_goal prev_state.goal
then
SUCCESS
{ prev_state with
goal = NonInterfGoal(ProcessTest([],[],(Some(n, List.nth prev_state.subprocess n)))) }
else
f false { prev_state with
subprocess = remove_at n prev_state.subprocess;
comment = RTest_Remove(n, t, Blocks);
previous_state = Some prev_state }
end
| Output(tc,t,p,occ) ->
let new_goal_opt =
if cache_info != Nothing then
else
match prev_state.goal with
NonInterfGoal(CommTest(tin,tout,loc)) ->
if equal_terms_modulo_eval tc tout then
begin
(match is_in_public prev_state.public tin with
Some (recipe) ->
begin
let new_loc = Some (LocAttacker (recipe), LocProcess(n, List.nth prev_state.subprocess n)) in
Some (NonInterfGoal(CommTest(tin,tout,new_loc)))
end
try
let (n',p') =
findi (function
(Input(tc,_,_,_),_,_,_,_) -> equal_terms_modulo_eval tc tin
| _ -> false
) prev_state.subprocess
in
let new_loc = Some (LocProcess(n',p'), LocProcess(n, List.nth prev_state.subprocess n)) in
Some (NonInterfGoal(CommTest(tin,tout,new_loc)))
with Not_found ->
None)
end
else None
| _ -> None
in
begin
match new_goal_opt with
Some new_goal -> { prev_state with goal = new_goal }
| None ->
debug_print "Doing Output";
if not (!Param.active_attacker) then
match cache_info with
InputInfo _ | GetInfo _ -> Parsing_helper.internal_error "Should not have input/get info for an output!"
| Nothing ->
try
auto_cleanup_red (fun () ->
let ((tc1,t1),(tc2,t2)) = bi_action (term_evaluation_fail2 tc t) in
let tc' = (tc1, tc2) in
let t' = (t1, t2) in
let tclist = decompose_term_rev (Terms.new_var ~orig:false "Useless" (Terms.get_term_type tc1), tc') in
f false { prev_state with
subprocess = replace_at n (Output(make_bi_choice tc', make_bi_choice t',p,occ),
name_params, occs, facts,
(OutputInfo(tclist, prev_state.public)))
prev_state.subprocess }
)
with Unify ->
f false { prev_state with
subprocess = remove_at n prev_state.subprocess;
comment = ROutput_Remove(n, tc, t, DestrFails);
previous_state = Some prev_state }
| FailOnlyOnSide _ ->
if is_equivalence_goal prev_state.goal
then
SUCCESS
{ prev_state with
goal = NonInterfGoal(ProcessTest([],[],(Some(n, List.nth prev_state.subprocess n)))) }
else
f false { prev_state with
subprocess = remove_at n prev_state.subprocess;
comment = ROutput_Remove(n, tc, t, Blocks);
previous_state = Some prev_state }
else
begin
let new_occs = (OutputTag occ) :: occs in
match cache_info with
InputInfo _ | GetInfo _ -> Parsing_helper.internal_error "Should not have input/get info for an output!"
| OutputInfo(tclist, oldpub) ->
let tclist' = update_term_list oldpub prev_state.public tclist in
if tclist' = [] then
begin
made_forward_step := true;
let (new_recipe, prev_state') = add_public prev_state (get_choice t) in
do_red_nointeract (if prev_state.public == prev_state'.public then f else
(fun mod_public cur_state -> f true cur_state))
{ prev_state' with
subprocess = replace_at n (p, name_params, new_occs, facts, Nothing) prev_state.subprocess;
comment = ROutput_Success(n, tc, new_recipe, t);
previous_state = Some prev_state } n
end
else
f false { prev_state with
subprocess = replace_at n (proc, name_params, occs, facts,
(OutputInfo(tclist', prev_state.public)))
prev_state.subprocess }
| Nothing ->
try
auto_cleanup_red (fun () ->
let ((tc1,t1),(tc2,t2)) = bi_action (term_evaluation_fail2 tc t) in
let tc' = (tc1, tc2) in
let t' = (t1, t2) in
let tclist = decompose_term_rev (Terms.new_var ~orig:false "Useless" (Terms.get_term_type tc1), tc') in
let tclist' = remove_first_in_public prev_state.public tclist in
if tclist' = [] then
begin
made_forward_step := true;
let (new_recipe, prev_state') = add_public prev_state t' in
do_red_nointeract (if prev_state.public == prev_state'.public then f else
(fun mod_public cur_state -> f true cur_state))
{ prev_state' with
subprocess = replace_at n (p, name_params, new_occs, facts, Nothing) prev_state.subprocess;
comment = ROutput_Success(n, make_bi_choice tc', new_recipe, make_bi_choice t');
previous_state = Some prev_state } n
end
else
When one side is a channel and the other side is not ,
we keep the Output process ; the failure of the equivalence
will be detected ( or has already been detected ) by CommTest
we keep the Output process; the failure of the equivalence
will be detected (or has already been detected) by CommTest *)
f false { prev_state with
subprocess = replace_at n (Output(make_bi_choice tc', make_bi_choice t',p,occ), name_params, occs, facts,
(OutputInfo(tclist', prev_state.public)))
prev_state.subprocess }
)
with Unify ->
f false { prev_state with
subprocess = remove_at n prev_state.subprocess;
comment = ROutput_Remove(n, tc, t, DestrFails);
previous_state = Some prev_state }
| FailOnlyOnSide _ ->
if is_equivalence_goal prev_state.goal
then
SUCCESS
{ prev_state with
goal = NonInterfGoal(ProcessTest([],[],(Some(n, List.nth prev_state.subprocess n)))) }
else
f false { prev_state with
subprocess = remove_at n prev_state.subprocess;
comment = ROutput_Remove(n, tc, t, Blocks);
previous_state = Some prev_state }
end
end
| Event(FunApp(fs,l) as t,_,p,occ) ->
debug_print "Doing Event";
made_forward_step := true;
let fstatus = Pievent.get_event_status (!Param.current_state) fs in
let do_end prev_state new_occs new_facts t' =
let n_subprocess = replace_at n (p,name_params,new_occs,new_facts,Nothing) prev_state.subprocess in
let (new_goal, event_in_goal, success) =
We do not store the list here since they were only used for injective queries .
This can not happen when proving lemmas on biprocesses .
This cannot happen when proving lemmas on biprocesses. *)
update_corresp_goal prev_state.goal (Some (occ,[]))
(function
| Pred(pr,[t1';t2']) -> pr == Param.end2_pred && equal_bi_terms_modulo t' (t1',t2')
| _ -> false
)
in
let bi_t = make_bi_choice t' in
let new_state =
{ prev_state with
subprocess = n_subprocess;
comment = REvent_Success(n,bi_t,event_in_goal);
events = bi_t::prev_state.events;
goal = new_goal;
previous_state = Some prev_state
}
in
if success
then new_state
else do_red_nointeract f new_state n
in
begin
try
begin match fstatus.begin_status with
| No ->
auto_cleanup_red (fun () ->
let t' = bi_action (term_evaluation_fail t) in
let new_occs = (BeginEvent(occ)) :: occs in
do_end prev_state new_occs facts t'
)
| NonInj ->
auto_cleanup_red (fun () ->
let (t1,t2) = bi_action (term_evaluation_fail t) in
let new_occs' = (BeginEvent (occ)) :: occs in
let new_occs = BeginFact :: new_occs' in
let new_facts = Pred(Param.begin2_pred,[t1;t2]) :: facts in
try
find_io_rule (fun _ ->
do_end prev_state new_occs new_facts (t1,t2)
) new_occs' facts name_params [] prev_state.io_rule
with Unify -> raise DerivBlocks
)
| Inj -> Parsing_helper.internal_error "[Reduction_bipro.ml] Unexpected injective event."
end
with Unify ->
f false { prev_state with
subprocess = remove_at n prev_state.subprocess;
comment = REvent_Remove(n, t, DestrFails);
previous_state = Some prev_state }
| DerivBlocks ->
f false { prev_state with
subprocess = remove_at n prev_state.subprocess;
comment = REvent_Remove(n, t, Blocks);
previous_state = Some prev_state }
| FailOnlyOnSide _ ->
if is_equivalence_goal prev_state.goal
then
SUCCESS
{ prev_state with
goal = NonInterfGoal(ProcessTest([],[],(Some(n, List.nth prev_state.subprocess n)))) }
else
f false { prev_state with
subprocess = remove_at n prev_state.subprocess;
comment = REvent_Remove(n, t, Blocks);
previous_state = Some prev_state }
end
| LetFilter _ -> Parsing_helper.user_error "Predicates in 'let suchthat in else' processes are currently incompatible with non-interference.";
| Repl(p,occ) ->
debug_print "Doing Repl";
made_forward_step := true;
let sid = Terms.new_var ~orig:false "sid" Param.sid_type in
let new_occs = (ReplTag (occ,count_name_params name_params))::occs in
let copy_number = ref 0 in
let new_state = ref { prev_state with
subprocess = remove_at n prev_state.subprocess;
comment = RRepl(n,0);
previous_state = Some prev_state }
in
begin
try
auto_cleanup (fun () ->
find_io_rule (function
[sid_pat] ->
let p' = auto_cleanup (fun () -> copy_process p) in
incr copy_number;
new_state := { !new_state with
subprocess = add_at n (p', (MSid 0,sid_pat,Always)::name_params, new_occs, facts, Nothing) !new_state.subprocess
};
raise Unify
| _ -> Parsing_helper.internal_error "Repl case, reduction.ml"
) new_occs facts ((MSid 0,Var sid,Always)::name_params) [Var sid] prev_state.io_rule
)
with Unify ->
debug_print ("Repl: " ^ (string_of_int (!copy_number)) ^ " copies");
let rec do_red_copies b ncopies state =
if ncopies < 0 then
f b state
else
do_red_nointeract (fun b' s -> do_red_copies (b||b') (ncopies-1) s) state (n+ncopies)
in
do_red_copies false ((!copy_number)-1)
{ !new_state with
comment = RRepl(n,!copy_number)
}
end
| Input(tc,_,_,_) ->
begin
match prev_state.goal with
NonInterfGoal(CommTest(tin,tout,loc)) ->
if equal_terms_modulo_eval tc tin then
begin
(match is_in_public prev_state.public tout with
| Some recipe ->
begin
let new_loc = Some (LocProcess(n, List.nth prev_state.subprocess n), LocAttacker recipe) in
let new_goal = NonInterfGoal(CommTest(tin,tout,new_loc)) in
{ prev_state with goal = new_goal }
end
try
let (n',p') =
findi (function
(Output(tc,_,_,_),_,_,_,_) -> equal_terms_modulo_eval tc tout
| _ -> false
) prev_state.subprocess
in
let new_loc = Some (LocProcess(n, List.nth prev_state.subprocess n), LocProcess(n',p')) in
let new_goal = NonInterfGoal(CommTest(tin,tout,new_loc)) in
{ prev_state with goal = new_goal }
with Not_found ->
f false prev_state)
end
else f false prev_state
| _ -> f false prev_state
end
| Insert(t,p,occ) ->
debug_print "Doing Insert";
begin
let new_occs = (InsertTag occ) :: occs in
let new_element_inserted = ref false in
try
auto_cleanup_red (fun () ->
let t' = bi_action (term_evaluation_fail t) in
let already_in = List.exists (equal_bi_terms_modulo t') prev_state.tables in
new_element_inserted := not already_in;
made_forward_step := true;
let (new_goal,insert_in_goal,success) =
update_corresp_goal prev_state.goal None
(is_table_goal prev_state.current_phase t')
in
let bi_t = make_bi_choice t' in
let new_state =
{ prev_state with
subprocess = replace_at n (p, name_params, new_occs, facts, Nothing) prev_state.subprocess;
tables = if already_in then prev_state.tables else t'::prev_state.tables;
comment = RInsert_Success(n, bi_t, insert_in_goal);
goal = new_goal;
previous_state = Some prev_state
}
in
if success
then new_state
else do_red_nointeract f new_state n
)
with Unify ->
f false { prev_state with
subprocess = remove_at n prev_state.subprocess;
comment = RInsert_Remove(n, t, DestrFails);
previous_state = Some prev_state }
| FailOnlyOnSide _ ->
if is_equivalence_goal prev_state.goal
then
SUCCESS
{ prev_state with
goal = NonInterfGoal(ProcessTest([],[],(Some(n, List.nth prev_state.subprocess n)))) }
else
f false { prev_state with
subprocess = remove_at n prev_state.subprocess;
comment = RInsert_Remove(n, t, Blocks);
previous_state = Some prev_state }
| No_result ->
if (!has_backtrack_get) && (!new_element_inserted) then
f false prev_state
else
raise No_result
end
| NamedProcess(name, tl, p) ->
debug_print "Doing NamedProcess";
do_red_nointeract f { prev_state with
subprocess = replace_at n (p, name_params, occs, facts, Nothing) prev_state.subprocess;
comment = RNamedProcess(n, name, tl);
previous_state = Some prev_state } n
| _ -> f false prev_state
let test_success cur_state' =
try
match cur_state'.goal with
| CorrespGoal(l) ->
let new_goal =
CorrespGoal (List.map (fun goal -> match goal with
| Fact(Pred({p_info = [AttackerBin(i,_)]},[t1;t2]) as fact, _, false) when cur_state'.current_phase <= i ->
compute the new recipe_lst
if t is known by the attacker in phase cur_state'.current_phase ,
it will still be known in phase i
if t is known by the attacker in phase cur_state'.current_phase,
it will still be known in phase i *)
begin match is_in_public cur_state'.public (t1,t2) with
| Some recipe -> Fact(fact,Some [recipe],true)
| _ -> goal
end
| Fact(Pred({p_info = [Attacker(i,_)]},[t]) as fact, _, false) when cur_state'.current_phase <= i ->
compute the new recipe_lst
if t is known by the attacker in phase cur_state'.current_phase ,
it will still be known in phase i
if t is known by the attacker in phase cur_state'.current_phase,
it will still be known in phase i *)
begin match is_in_public cur_state'.public (t,t) with
| Some recipe -> Fact(fact,Some [recipe],true)
| _ -> goal
end
| Fact(Pred({p_info = [MessBin(i,_)]},[tc1;t1;tc2;t2]) as fact, _, false) when cur_state'.current_phase <= i ->
begin match is_in_public cur_state'.public (t1,t2), is_in_public cur_state'.public (tc1,tc2) with
| Some recipe1, Some recipe2 -> Fact(fact, Some [recipe1; recipe2], true)
| _ -> goal
end
| Fact(Pred({p_info = [Mess(i,_)]},[tc;t]) as fact, _, false) when cur_state'.current_phase <= i ->
begin match is_in_public cur_state'.public (t,t), is_in_public cur_state'.public (tc,tc) with
| Some recipe1, Some recipe2 -> Fact(fact, Some [recipe1; recipe2], true)
| _ -> goal
end
| _ -> goal
) l)
in
(is_success_corresp_goal new_goal, {cur_state' with goal = new_goal})
| NonInterfGoal(NIEqTest((t1, _),(t2, _))) ->
(match is_in_public cur_state'.public t1, is_in_public cur_state'.public t2 with
| Some recipe1, Some recipe2 ->
let new_goal = NonInterfGoal(NIEqTest((t1, Some recipe1),(t2, Some recipe2))) in
(true, { cur_state' with goal = new_goal })
| _ -> (false, cur_state'))
| NonInterfGoal(NIFailTest (t, _)) ->
(match is_in_public cur_state'.public t with
| Some recipe ->
let new_goal = NonInterfGoal(NIFailTest (t, Some recipe)) in
(true, { cur_state' with goal = new_goal })
| None -> (false, cur_state'))
| NonInterfGoal(CommTest(tin,tout,loc)) ->
let rin =
(match is_in_public cur_state'.public tin with
| Some recipe -> Some (LocAttacker recipe)
| None ->
try
let (n,p) =
findi (function
(Input(tc,_,_,_),_,_,_,_) -> equal_terms_modulo_eval tc tin
| _ -> false
) cur_state'.subprocess
in
Some (LocProcess(n,p))
with Not_found ->
None)
in
let rout =
(match is_in_public cur_state'.public tout with
| Some recipe -> Some (LocAttacker recipe)
| None ->
try
let (n,p) =
findi (function
(Output(tc,_,_,_),_,_,_,_) -> equal_terms_modulo_eval tc tout
| _ -> false
) cur_state'.subprocess
in
Some (LocProcess(n,p))
with Not_found ->
None)
in
begin
match rin,rout with
Some lin, Some lout ->
let new_goal = NonInterfGoal(CommTest(tin,tout,Some(lin,lout))) in
(true, { cur_state' with goal = new_goal })
| _ -> (false, cur_state')
end
| _ -> (false, cur_state')
with Unify ->
(false, cur_state')
let end_if_success next_f cur_state =
let (success, cur_state') = test_success cur_state in
if success then cur_state' else next_f cur_state'
let rec find_possible_outputs f cur_state n seen_list = function
[] -> f cur_state
| (Output(tc,t,p,out_occ) as proc, name_params, occs, facts, cache_info)::rest_subprocess when (!Param.active_attacker) ->
let tclist' =
match cache_info with
InputInfo _ | GetInfo _ -> Parsing_helper.internal_error "Should not have input/get info for an output!"
| OutputInfo(tclist, oldpub) ->
update_term_list oldpub cur_state.public tclist
| Nothing ->
let tclist = decompose_term_rev ((Terms.new_var ~orig:false "Useless" (Terms.get_term_type tc)), (tc, tc)) in
remove_first_in_public cur_state.public tclist
in
let seen_list' = (proc, name_params, occs, facts, OutputInfo(tclist', cur_state.public)) :: seen_list in
if tclist' = [] then
do_red_nointeract (fun change_pub cur_state2 ->
if change_pub then
end_if_success (find_possible_outputs_rec f) cur_state2
else
find_possible_outputs f cur_state2 0 [] cur_state2.subprocess
) { cur_state with subprocess = List.rev_append seen_list' rest_subprocess } n
else
find_possible_outputs f cur_state (n+1) seen_list' rest_subprocess
| sub_proc::rest_subprocess -> find_possible_outputs f cur_state (n+1) (sub_proc::seen_list) rest_subprocess
and find_possible_outputs_rec f cur_state3 =
find_possible_outputs f cur_state3 0 [] cur_state3.subprocess
let normal_state f change_pub cur_state n =
do_red_nointeract (fun change_pub2 cur_state2 ->
if change_pub || change_pub2 then
end_if_success (find_possible_outputs_rec f) cur_state2
else f cur_state2
) cur_state n
When two processes have been changed , numbers n1 and n2
let normal_state2 f change_pub cur_state n1 n2 =
let n1',n2' = if n1 < n2 then n1,n2 else n2,n1 in
do_red_nointeract (fun change_pub2 cur_state2 ->
do_red_nointeract (fun change_pub3 cur_state3 ->
if change_pub || change_pub2 || change_pub3 then
end_if_success (find_possible_outputs_rec f) cur_state3
else f cur_state3
) cur_state2 n1'
) cur_state n2'
let normal_state_all f change_pub cur_state =
let rec do_red_all change_pub2 cur_state2 n =
if n < 0 then
if change_pub2 then
end_if_success (find_possible_outputs_rec f) cur_state2
else
f cur_state2
else
do_red_nointeract (fun change_pub3 cur_state3 ->
do_red_all (change_pub2 || change_pub3) cur_state3 (n-1)
) cur_state2 n
in
do_red_all change_pub cur_state (List.length cur_state.subprocess - 1)
let rec public_build l =
match l with
| [] -> []
| h::t ->
if not h.f_private then
(FunApp(h,[]))::(public_build t)
else
public_build t
Initialize the rule lists
let rec init_rule state tree =
match tree.desc with
FHAny | FEmpty | FRemovedWithMaxHyp ->
begin
match tree.thefact with
| Pred(p,_) when p == Param.begin2_pred -> state
| Pred(p, [t]) when p.p_prop land Param.pred_ATTACKER != 0 ->
begin
let t' = rev_name_subst t in
match t' with
FunApp({ f_cat = Name _; f_private = false },[]) ->
{ state with public = (t',(t',t')) :: state.public }
| _ ->
if (not (is_in_public state.public (t',t') = None)) then
state
else
let recipe = Var (new_var ~orig:false "~M" (Terms.get_term_type t')) in
{ state with
public = (recipe,(t',t')) :: state.public;
hyp_not_matched = (Some recipe, Pred(p,[t']))::state.hyp_not_matched }
end
| Pred(p, [t1;t2]) when p.p_prop land Param.pred_ATTACKER != 0 ->
begin
let t1' = rev_name_subst t1 in
let t2' = rev_name_subst t2 in
match t1', t2' with
(FunApp({ f_cat = Name _; f_private = false },[]),
FunApp({ f_cat = Name _; f_private = false },[])) when
equal_terms_modulo t1' t2' ->
{ state with public = (t1',(t1', t2')) :: state.public }
| _ ->
if (not (is_in_public state.public (t1',t2') = None)) then
state
else
let recipe = Var (new_var ~orig:false "~M" (Terms.get_term_type t1')) in
{ state with
public = (recipe,(t1',t2')) :: state.public;
hyp_not_matched = (Some recipe, Pred(p,[t1';t2']))::state.hyp_not_matched }
end
| _ ->
let fact = rev_name_subst_fact tree.thefact in
if List.exists (fun (_, fact') -> Terms.equal_facts fact fact') state.hyp_not_matched then
state
else
{ state with
hyp_not_matched = (None, fact)::state.hyp_not_matched }
end
| FRemovedWithProof _ -> state
| FRule (n, tags, constra, sons,_,_) ->
let rec init_sons_rule state1 = function
| [] ->
begin
match tags with
ProcessRule (hsl,nl) ->
{state1 with io_rule = (n, List.map (fun t -> rev_name_subst_fact t.thefact) sons,
hsl, rev_name_subst_list nl,
rev_name_subst_fact tree.thefact)::state1.io_rule}
| Apply (f,_) when f.f_cat != Tuple ->
begin
let (p,c) =
match tree.thefact with
Pred(p,l) -> (p,rev_name_subst_bi l)
in
let h = List.map (function
{ thefact = Pred(_,l) } -> (Terms.new_var ~orig:false "~X" (get_term_type_bi l), rev_name_subst_bi l)) sons
in
let h' = decompose_list_rev h in
let recipe_concl = FunApp(f, (List.map (fun (x, y) -> Var x) h)) in
{state1 with prepared_attacker_rule = (p, h',(recipe_concl, c))::state1.prepared_attacker_rule}
end
| Rn _ ->
begin
match tree.thefact with
Pred(p, l) ->
let t1',t2' = rev_name_subst_bi l in
if not (equal_terms_modulo t1' t2') then
Parsing_helper.internal_error "Rule Rn should conclude p(name,name) with the same name";
{ state1 with prepared_attacker_rule = (p, [], (t1',(t1',t2')))::state1.prepared_attacker_rule }
end
| _ -> state1
end
| h::t ->
let state1' = init_rule state1 h in
init_sons_rule state1' t
in
init_sons_rule state sons
| FEquation son -> init_rule state son
let do_res_in cur_state seen_list rest_subprocess n current_cache_list name_params' new_occs facts tc pat p tc' mess_term public_status next_f =
The real list of processes is ( List.rev_append seen_list ( InputProcess : : rest_subprocess ) )
let (recipe, mess_list, oldpub) =
match public_status with
Some (recipe, m,o) -> (recipe, m,o)
| None ->
let new_recipe = Terms.new_var ~orig:false "~M" (Terms.get_term_type (fst mess_term)) in
(Var new_recipe, decompose_term_rev (new_recipe, mess_term), [])
in
let mess_list' = update_term_list oldpub cur_state.public mess_list in
let recipe' = Terms.copy_term4 recipe in
When mess_list ' is not empty , its first element is not in cur_state.public
Remember that point to avoid testing again that part of public
Remember that point to avoid testing again that part of public *)
current_cache_list := (mess_term, Some (recipe', mess_list', cur_state.public)) :: (!current_cache_list);
try
made_forward_step := true;
auto_cleanup_red (fun () ->
let _ = bi_action (bi_match_pattern pat mess_term) in
let name_params'' = update_name_params Always name_params' pat in
let p' = auto_cleanup (fun () -> copy_process p) in
let fact' = build_mess_fact cur_state.current_phase tc' mess_term in
let fact_list = match new_occs with
| (InputTag _)::(PreciseTag(occ))::_ ->
let occ_n = get_occurrence_name_for_precise occ name_params' in
fact' :: (build_precise_fact occ_n mess_term) :: facts
| (InputTag _) :: _ -> fact' :: facts
| _ -> Parsing_helper.internal_error "[reduction_bipro.ml >> do_res_in] First element of new_occs should be an input tag."
in
normal_state next_f false
{ cur_state with
subprocess = List.rev_append seen_list ((p', name_params'', new_occs, fact_list, Nothing) :: rest_subprocess);
comment = RInput_Success(n, make_bi_choice tc', pat, recipe', make_bi_choice mess_term);
previous_state = Some cur_state } n
)
with No_result ->
current_cache_list := List.tl (!current_cache_list);
raise Unify
| FailOnlyOnSide _ ->
if is_equivalence_goal cur_state.goal
then
SUCCESS the pattern matching fails on one side only
{ cur_state with
goal = NonInterfGoal(InputProcessTest([],[],make_bi_choice mess_term, (Some(n, List.nth cur_state.subprocess n, LocAttacker recipe')))) }
else
begin
I can remove this message from the cache , since retrying this input
with the same message will always fail on one side , and I do not want
to consider that .
with the same message will always fail on one side, and I do not want
to consider that. *)
current_cache_list := List.tl (!current_cache_list);
raise Unify
end
let do_async_res_io cur_state seen_list rest_subprocess n current_cache_list name_params' new_occs facts tc pat p tc' mess_term public_channel next_f =
The real list of processes is ( List.rev_append seen_list ( InputProcess : : rest_subprocess ) )
It differs from cur_state.subprocess only by the cache of input processes , so when
looking for an output process , we can use cur_state.subprocess instead .
It differs from cur_state.subprocess only by the cache of input processes, so when
looking for an output process, we can use cur_state.subprocess instead. *)
current_cache_list := (mess_term, None) :: (!current_cache_list);
let rec find_asynchronous_output noutput = function
| ((Output(tc2, t2, Nil,out_occ), name_params2,occs2, facts2, cache_info2)::_) when
(equal_bi_terms_modulo (get_choice tc2) tc') && (equal_bi_terms_modulo (get_choice t2) mess_term) -> noutput
| _::rest_subprocess2 -> find_asynchronous_output (noutput+1) rest_subprocess2
in
let noutput = find_asynchronous_output 0 cur_state.subprocess in
begin
try
made_forward_step := true;
let fail_case input_fails =
let noutput' = if n>noutput then noutput else noutput-1 in
let (_, name_params2,occs2, facts2, _) = List.nth cur_state.subprocess noutput in
let (new_goal,comm_in_goal,attack_found) =
update_corresp_goal cur_state.goal None
(is_mess_goal cur_state.current_phase tc' mess_term)
in
let tc'' = make_bi_choice tc' in
let (opt_recipe, cur_state1) = optional_eavesdrop cur_state public_channel mess_term in
let cur_state2 =
{ cur_state1 with
subprocess = replace_at noutput' (Nil, name_params2,occs2, facts2, Nothing) (List.rev_append seen_list rest_subprocess);
comment = RIO_PatRemove(n, tc'', pat, noutput, tc'', opt_recipe, make_bi_choice mess_term, comm_in_goal, input_fails);
goal = new_goal;
previous_state = Some cur_state
}
in
let cur_state3 = do_rnil cur_state2 noutput' in
if attack_found
then cur_state3
else next_f cur_state3
in
try
auto_cleanup_red (fun () ->
let _ = bi_action (bi_match_pattern pat mess_term) in
let name_params'' = update_name_params Always name_params' pat in
let p' = auto_cleanup (fun () -> copy_process p) in
let fact' = build_mess_fact cur_state.current_phase tc' mess_term in
let facts' = match new_occs with
| (InputTag _)::(PreciseTag(occ))::_ ->
let occ_n = get_occurrence_name_for_precise occ name_params' in
fact' :: (build_precise_fact occ_n mess_term) :: facts
| (InputTag _) :: _ -> fact' :: facts
| _ -> Parsing_helper.internal_error "[reduction.ml >> do_async_res_io] First element of new_occs should be an input tag."
in
let tc'' = make_bi_choice tc' in
let (_, name_params2,occs2, facts2, _) = List.nth cur_state.subprocess noutput in
let (new_goal,comm_in_goal,attack_found) =
update_corresp_goal cur_state.goal None
(is_mess_goal cur_state.current_phase tc' mess_term)
in
When the adversary is passive and the channel is public ,
the adversary eavesdrops the message sent by RIO
the adversary eavesdrops the message sent by RIO *)
let (opt_recipe, cur_state1) = optional_eavesdrop cur_state public_channel mess_term in
let cur_state' =
{ cur_state1 with
subprocess = replace_at noutput (Nil, name_params2,occs2, facts2, Nothing)
(List.rev_append seen_list ((p', name_params'', new_occs, facts', Nothing) :: rest_subprocess));
comment = RIO(n, tc'', pat, noutput, tc'', opt_recipe, make_bi_choice mess_term, comm_in_goal);
goal = new_goal;
previous_state = Some cur_state
}
in
Then do RNil on the Nil process that follows the output
let cur_state3 = do_rnil cur_state' noutput in
let ninput = if n > noutput then n-1 else n in
if attack_found
then cur_state3
else normal_state next_f (cur_state3.public != cur_state.public) cur_state3 ninput
)
with
| Unify -> fail_case true
| FailOnlyOnSide _ ->
if is_equivalence_goal cur_state.goal
then
SUCCESS the pattern matching fails on one side only
{ cur_state with
goal = NonInterfGoal(InputProcessTest([],[],make_bi_choice mess_term, Some(n, List.nth cur_state.subprocess n, LocProcess(noutput, List.nth cur_state.subprocess noutput)))) }
else fail_case false
with No_result ->
current_cache_list := List.tl (!current_cache_list);
raise Unify
end
let do_sync_res_io cur_state seen_list rest_subprocess n name_params' new_occs facts tc pat p tc' public_channel next_f =
The real list of processes is ( List.rev_append seen_list ( InputProcess : : rest_subprocess ) )
It differs from cur_state.subprocess only by the cache of input processes , so when
looking for an output process , we can use cur_state.subprocess instead .
It differs from cur_state.subprocess only by the cache of input processes, so when
looking for an output process, we can use cur_state.subprocess instead. *)
let rec find_synchronous_output noutput = function
| ((Output(tc2,t2,p2,out_occ),name_params2,occs2, facts2, cache_info2)::rest_subprocess2) ->
begin
when ( p2 ! ) || public_channel || not ( is_equivalence_goal cur_state.goal )
try
let tc2' = get_choice tc2 in
let t2' = get_choice t2 in
if equal_bi_terms_modulo tc2' tc' then
begin
let (new_goal,comm_in_goal,attack_found) =
update_corresp_goal cur_state.goal None
(is_mess_goal cur_state.current_phase tc2' t2')
in
When p2 is and the Horn clause derivation does not justify the
input , the output is useless ( because it does not allow to execute
more instructions ) , except in two situations :
- when the attacker is passive and the channel is public ;
in this case , it allows the attacker to obtain the message
( public_channel is true in this case )
- when the communication itself is what makes the attack succeed ,
that is , the goal is that communication .
( comm_in_goal is true in this case )
input, the output is useless (because it does not allow to execute
more instructions), except in two situations:
- when the attacker is passive and the channel is public;
in this case, it allows the attacker to obtain the message
(public_channel is true in this case)
- when the communication itself is what makes the attack succeed,
that is, the goal is that communication.
(comm_in_goal is true in this case) *)
if not ((p2 != Nil) || public_channel || comm_in_goal)
then raise Unify;
made_forward_step := true;
let fact = build_mess_fact cur_state.current_phase tc' t2' in
let facts' = match new_occs with
| (InputTag _)::(PreciseTag(occ))::_ ->
let occ_n = get_occurrence_name_for_precise occ name_params' in
fact :: (build_precise_fact occ_n t2') :: facts
| (InputTag _) :: _ -> fact :: facts
| _ -> Parsing_helper.internal_error "[reduction.ml >> do_sync_res_io] First element of new_occs should be an input tag."
in
let fail_case input_fails =
let noutput' = if n > noutput then noutput else noutput-1 in
let (opt_recipe, cur_state1) = optional_eavesdrop cur_state public_channel t2' in
let cur_state' =
{ cur_state1 with
subprocess = replace_at noutput' (p2, name_params2, occs2, facts2, Nothing)
(List.rev_append seen_list rest_subprocess);
comment = RIO_PatRemove(n, make_bi_choice tc', pat, noutput, tc2, opt_recipe, t2, comm_in_goal, input_fails);
goal = new_goal;
previous_state = Some cur_state }
in
if attack_found
then cur_state'
else normal_state next_f (cur_state'.public != cur_state.public) cur_state' noutput'
in
try
auto_cleanup_red (fun () ->
let _ = bi_action (bi_match_pattern pat t2') in
let name_params'' = update_name_params Always name_params' pat in
let p' = auto_cleanup (fun () -> copy_process p) in
When the adversary is passive and the channel is public ,
the adversary eavesdrops the message sent by RIO
the adversary eavesdrops the message sent by RIO *)
let (opt_recipe, cur_state1) = optional_eavesdrop cur_state public_channel t2' in
let cur_state' =
{ cur_state1 with
subprocess = replace_at noutput (p2, name_params2, (OutputTag out_occ)::occs2, facts2, Nothing)
(List.rev_append seen_list ((p', name_params'', new_occs, facts', Nothing) :: rest_subprocess));
comment = RIO(n, make_bi_choice tc', pat, noutput, tc2, opt_recipe, t2, comm_in_goal);
goal = new_goal;
previous_state = Some cur_state }
in
if attack_found
then cur_state'
else normal_state2 next_f (cur_state'.public != cur_state.public) cur_state' noutput n
)
with
| Unify -> fail_case true
| FailOnlyOnSide _ ->
if is_equivalence_goal cur_state.goal
then
SUCCESS the pattern matching fails on one side only
{ cur_state with
goal = NonInterfGoal(InputProcessTest([],[],t2,Some(n, List.nth cur_state.subprocess n, LocProcess(noutput, List.nth cur_state.subprocess noutput)))) }
else fail_case false
end
else raise Unify
with Unify | No_result ->
find_synchronous_output (noutput+1) rest_subprocess2
end
| _::rest_subprocess2 -> find_synchronous_output (noutput+1) rest_subprocess2
in
find_synchronous_output 0 cur_state.subprocess
Perform a get ( Res Get )
let rec find_term stop_l t l =
if l == stop_l then false else
match l with
[] -> false
| (a::r) ->
if equal_bi_terms_modulo t a then true else find_term stop_l t r
let do_res_get cur_state seen_list rest_subprocess n current_cache_list name_params' new_occs facts pat t p mess_term old_tables next_f =
The real list of processes is ( List.rev_append seen_list ( InputProcess : : rest_subprocess ) )
current_cache_list := mess_term :: (!current_cache_list);
debug_print "Get";
debug_print "Ok, the entry is present";
try
made_forward_step := true;
auto_cleanup_red (fun () ->
let _ = bi_action (bi_match_pattern_and_test pat mess_term t) in
let name_params'' = update_name_params Always name_params' pat in
let p' = auto_cleanup (fun () -> copy_process p) in
let fact' = build_table_fact cur_state.current_phase mess_term in
let facts' = match new_occs with
| (GetTag _)::(PreciseTag(occ))::_ ->
let occ_n = get_occurrence_name_for_precise occ name_params' in
fact' :: (build_precise_fact occ_n mess_term) :: facts
| (GetTag _) :: _ -> fact' :: facts
| _ -> Parsing_helper.internal_error "[reduction.ml >> do_res_get] First element of new_occs should be a Get tag."
in
normal_state next_f false
{ cur_state with
subprocess = List.rev_append seen_list ((p', name_params'', new_occs, facts', Nothing) :: rest_subprocess);
comment = RGet_In(n, pat, t, make_bi_choice mess_term);
previous_state = Some cur_state } n
)
with No_result ->
current_cache_list := List.tl (!current_cache_list);
raise Unify
| FailOnlyOnSide _ ->
if is_equivalence_goal cur_state.goal then
SUCCESS the pattern matching fails on one side only
{ cur_state with
goal = NonInterfGoal(ProcessTest([],[],Some(n, List.nth cur_state.subprocess n))) }
else
next_f
{ cur_state with
subprocess = remove_at n cur_state.subprocess;
comment = RGet_Remove(n, pat, t);
previous_state = Some cur_state }
Dispatch between ( Res In ) , asynchronous ( Res I / O ) , and synchronous ( Res I / O ) , and ( Res Get ) .
May also execute ( Insert ) in case an insert has been delayed because it prevented executing the
else branch of Get .
May also execute (Insert) in case an insert has been delayed because it prevented executing the
else branch of Get. *)
exception Backtrack_get
let rec find_in_out next_f cur_state n seen_list = function
[] -> raise No_result
| ((Input(tc,pat,p,occ) as proc ,name_params,occs, facts, cache_info)::rest_subprocess) ->
debug_print ("Trying Input on process " ^ (string_of_int n));
begin
match cache_info with
OutputInfo _ | GetInfo _ -> Parsing_helper.internal_error "Should not have output/get info for an input!"
| InputInfo(tc_list, oldpub, tc', name_params', new_occs, l) ->
let tc_list' = update_term_list oldpub cur_state.public tc_list in
if (!Param.active_attacker) && (tc_list' = []) then
begin
let current_cache_list = ref [] in
let rec do_l = function
[] ->
let seen_list' = (proc ,name_params,occs, facts,
InputInfo(tc_list', cur_state.public, tc', name_params', new_occs, !current_cache_list)) :: seen_list in
find_in_out next_f cur_state (n+1) seen_list' rest_subprocess
| (mess_term, public_status)::l ->
try
do_res_in cur_state seen_list rest_subprocess n current_cache_list name_params' new_occs facts tc pat p tc' mess_term public_status next_f
with Unify ->
do_l l
in
do_l l
end
else
begin
let current_cache_list = ref [] in
let public_channel = (not (!Param.active_attacker)) && (tc_list' = []) in
let rec do_l = function
[] ->
let seen_list' = (proc ,name_params,occs, facts,
InputInfo(tc_list', cur_state.public, tc', name_params', new_occs, !current_cache_list)) :: seen_list in
begin
try
do_sync_res_io cur_state seen_list rest_subprocess n name_params' new_occs facts tc pat p tc' public_channel next_f
with Unify | No_result ->
find_in_out next_f cur_state (n+1) seen_list' rest_subprocess
end
| (mess_term,_)::l ->
try
do_async_res_io cur_state seen_list rest_subprocess n current_cache_list name_params' new_occs facts tc pat p tc' mess_term public_channel next_f
with Unify ->
do_l l
in
do_l l
end
| Nothing ->
let seen_list' = ref ((proc, name_params, occs, facts, cache_info) :: seen_list) in
try
auto_cleanup_red (fun () ->
let (tc', name_params') = term_evaluation_name_params (OInChannel(occ)) tc name_params in
let m =
if cur_state.current_phase < get_min_choice_phase() then
let v = Reduction_helper.new_var_pat pat in
(v,v)
else
(Reduction_helper.new_var_pat pat, Reduction_helper.new_var_pat pat)
in
let fact = build_mess_fact cur_state.current_phase tc' m in
let (new_occs,new_facts) =
let ty = get_term_type (fst m) in
if Reduction_helper.exists_specific_precise_events_of_occ occ (Action ty)
then
let occ_n = get_occurrence_name_for_precise occ name_params' in
((InputTag occ) :: (PreciseTag(occ)) :: occs, (fact :: (build_precise_fact occ_n m) :: facts))
else ((InputTag occ) :: occs, (fact :: facts))
in
let new_recipe = Terms.new_var ~orig:false "Useless" (Terms.get_term_type (fst tc')) in
let tc_list = decompose_term_rev (new_recipe, tc') in
let tc_list' = remove_first_in_public cur_state.public tc_list in
if (!Param.active_attacker) && (tc_list' = []) then
begin
let current_cache_list = ref [] in
try
find_io_rule (function
[mess_term1;mess_term2] ->
do_res_in cur_state seen_list rest_subprocess n current_cache_list name_params' new_occs facts tc pat p tc' (mess_term1,mess_term2) None next_f
| _ -> Parsing_helper.internal_error "input case; reduction_bipro.ml"
) new_occs new_facts name_params' [fst m; snd m] cur_state.io_rule
with Unify ->
seen_list' := (proc, name_params, occs, facts,
InputInfo([], [], tc', name_params', new_occs, !current_cache_list)) :: seen_list;
raise No_result
end
else
begin
let current_cache_list = ref [] in
let public_channel = (not (!Param.active_attacker)) && (tc_list' = []) in
try
find_io_rule (function
[mess_term1;mess_term2] ->
do_async_res_io cur_state seen_list rest_subprocess n current_cache_list name_params' new_occs facts tc pat p tc' (mess_term1,mess_term2) public_channel next_f
| _ -> Parsing_helper.internal_error "input case; reduction_bipro.ml"
) new_occs new_facts name_params' [fst m; snd m] cur_state.io_rule
with Unify ->
seen_list' := (proc, name_params,occs, facts,
InputInfo(tc_list', cur_state.public, tc', name_params', new_occs, !current_cache_list)) :: seen_list;
do_sync_res_io cur_state seen_list rest_subprocess n name_params' new_occs facts tc pat p tc' public_channel next_f
end
)
with Unify | No_result ->
find_in_out next_f cur_state (n+1) (!seen_list') rest_subprocess
| FailOnlyOnSide _ ->
if is_equivalence_goal cur_state.goal then
SUCCESS the evaluation of the channel name fails on one side only
{ cur_state with
goal = NonInterfGoal(ProcessTest([],[],Some(n, List.nth cur_state.subprocess n))) }
else
find_in_out next_f
{ cur_state with
subprocess = remove_at n cur_state.subprocess;
comment = RInput_Remove(n, tc, pat, Blocks);
previous_state = Some cur_state } n seen_list rest_subprocess
end
| ((Get(pat,t,p,p_else,occ) as proc ,name_params,occs, facts, cache_info)::rest_subprocess) ->
debug_print ("Trying Get on process " ^ (string_of_int n));
begin
match cache_info with
OutputInfo _ | InputInfo _ -> Parsing_helper.internal_error "Should not have input/output info for a get!"
| GetInfo(old_tables, l) ->
let new_occs =
if Reduction_helper.exists_specific_precise_events_of_occ occ (Action Param.table_type) then
(GetTag occ) :: (PreciseTag occ) :: occs
else
(GetTag occ) :: occs
in
let current_cache_list = ref [] in
let rec do_l = function
[] ->
let seen_list' = (proc ,name_params,occs, facts,
GetInfo(cur_state.tables, !current_cache_list)) :: seen_list in
find_in_out next_f cur_state (n+1) seen_list' rest_subprocess
| mess_term::l ->
try
do_res_get cur_state seen_list rest_subprocess n current_cache_list name_params new_occs facts pat t p mess_term old_tables next_f
with Unify ->
do_l l
in
do_l l
| Nothing ->
let seen_list' = ref ((proc, name_params, occs, facts, cache_info) :: seen_list) in
try
auto_cleanup_red (fun () ->
let m =
if cur_state.current_phase < get_min_choice_phase() then
let v = Reduction_helper.new_var_pat pat in
(v,v)
else
(Reduction_helper.new_var_pat pat, Reduction_helper.new_var_pat pat)
in
let fact = build_table_fact cur_state.current_phase m in
let (new_occs,new_facts) =
if Reduction_helper.exists_specific_precise_events_of_occ occ (Action Param.table_type)
then
let occ_n = get_occurrence_name_for_precise occ name_params in
((GetTag occ) :: (PreciseTag occ) :: occs, fact :: (build_precise_fact occ_n m) :: facts)
else ((GetTag occ) :: occs, fact :: facts)
in
begin
let current_cache_list = ref [] in
try
find_io_rule (function
[mess_term1;mess_term2] ->
do_res_get cur_state seen_list rest_subprocess n current_cache_list name_params new_occs facts pat t p (mess_term1,mess_term2) [] next_f
| _ -> Parsing_helper.internal_error "get case; reduction_bipro.ml"
) new_occs new_facts name_params [fst m; snd m] cur_state.io_rule
with Unify ->
if p_else != Nil then
begin
try
let new_occs = (GetTagElse occ) :: occs in
find_io_rule (function
[] ->
debug_print "Get: else branch should be taken";
if List.exists (fun mess_term ->
try
auto_cleanup (fun () ->
let _ = bi_action (bi_match_pattern_and_test pat mess_term t) in
true)
with Unify -> false
then
begin
debug_print "Get: an element of the table matches, cannot take the else branch, backtracking";
has_backtrack_get := true;
raise Backtrack_get
end
else
begin
debug_print "Get: taking the else branch";
normal_state next_f false
{ cur_state with
subprocess = List.rev_append seen_list ((p_else, name_params, new_occs, facts, Nothing) :: rest_subprocess);
comment = RGet_Else(n, pat, t);
previous_state = Some cur_state } n
end
| _ -> Parsing_helper.internal_error "get else case; reduction_bipro.ml"
) new_occs facts name_params [] cur_state.io_rule
with Unify ->
seen_list' := (proc, name_params, occs, facts,
GetInfo(cur_state.tables, !current_cache_list)) :: seen_list;
raise No_result
end
else
begin
seen_list' := (proc, name_params, occs, facts,
GetInfo(cur_state.tables, !current_cache_list)) :: seen_list;
raise No_result
end
end
)
with Unify | No_result ->
find_in_out next_f cur_state (n+1) (!seen_list') rest_subprocess
| FailOnlyOnSide _ ->
if is_equivalence_goal cur_state.goal then
SUCCESS an element of the table matches on one side and not on the other
{ cur_state with
goal = NonInterfGoal(ProcessTest([],[],Some(n, List.nth cur_state.subprocess n))) }
else
find_in_out next_f
{ cur_state with
subprocess = remove_at n cur_state.subprocess;
comment = RGet_Remove(n, pat, t);
previous_state = Some cur_state } n seen_list rest_subprocess
| Backtrack_get -> raise No_result
end
| ((Insert(t,p,occ), name_params, occs, facts, cache_info) as sub_proc)::rest_subprocess ->
debug_print "Doing Insert";
begin
let new_occs = (InsertTag occ) :: occs in
let new_element_inserted = ref false in
try
auto_cleanup_red (fun () ->
let t' = bi_action (term_evaluation_fail t) in
let already_in = List.exists (equal_bi_terms_modulo t') cur_state.tables in
new_element_inserted := not already_in;
let (new_goal,insert_in_goal,success) =
update_corresp_goal cur_state.goal None
(is_table_goal cur_state.current_phase t')
in
let new_state =
{ cur_state with
subprocess = List.rev_append seen_list ((p, name_params, new_occs, facts, Nothing) :: rest_subprocess);
tables = if already_in then cur_state.tables else t'::cur_state.tables;
comment = RInsert_Success(n, make_bi_choice t', insert_in_goal);
previous_state = Some cur_state;
goal = new_goal
}
in
if success
then new_state
else normal_state next_f false new_state n
)
with Unify | FailOnlyOnSide _ ->
Parsing_helper.internal_error "Insert: Unify/FailOnlyOnSide _ should have been detected on the first try of that insert"
| No_result ->
if (!has_backtrack_get) && (!new_element_inserted) then
find_in_out next_f cur_state (n+1) (sub_proc :: seen_list) rest_subprocess
else
raise No_result
end
| sub_proc::rest_subprocess ->
find_in_out next_f cur_state (n+1) (sub_proc :: seen_list) rest_subprocess
let rec extract_phase n = function
[] -> []
| (Phase(n',p,occ),name_params,occs, facts, cache_info)::r ->
let r' = extract_phase n r in
if n = n' then (p,name_params,occs, facts, Nothing)::r' else
if n<n' then (Phase(n',p,occ),name_params,occs, facts, Nothing)::r' else r'
| _::r -> extract_phase n r
let rec find_phase current_phase found_phase = function
[] -> found_phase
| (Phase(n,p,_),name_params,occs, facts, cache_info)::rest_subprocess ->
if n <= current_phase then
Parsing_helper.user_error "Phases should be in increasing order.";
let found_phase' =
match found_phase with
None -> Some n
| Some n_found -> if n_found <= n then found_phase else Some n
in
find_phase current_phase found_phase' rest_subprocess
| _::rest_subprocess ->
find_phase current_phase found_phase rest_subprocess
let do_phase next_f cur_state =
match find_phase cur_state.current_phase None cur_state.subprocess with
None ->
if !made_forward_step then
begin
incr failed_traces;
made_forward_step := false
end;
if !debug_backtracking then
begin
ignore (Display.Text.display_reduc_state Display.bi_term_to_term true cur_state);
print_string "Blocked. Backtracking...\n"
end
else
debug_print "Backtracking";
raise No_result
| Some n ->
debug_print "Doing Phase";
made_forward_step := true;
Reclose public , since new function symbols may become applicable
let cur_state' = close_public_phase_change cur_state n in
let cur_state'' =
{ cur_state' with
subprocess = extract_phase n cur_state'.subprocess;
previous_state = Some cur_state;
current_phase = n;
comment = RPhase(n) }
in
normal_state_all next_f false cur_state''
let reduction_step next_f state =
try
find_in_out next_f state 0 [] state.subprocess
with No_result ->
do_phase next_f state
let rec reduction_backtrack state =
reduction_step reduction_backtrack state
let rec reduction_nobacktrack state =
try
reduction_step (fun state -> raise (Reduced state)) state
with Reduced one_red_state ->
display_trace one_red_state;
Param.display_init_state := false;
reduction_nobacktrack { one_red_state with previous_state = None }
let reduction state =
if !Param.trace_backtracking then
reduction_backtrack state
else
reduction_nobacktrack state
let analyze_tree tree =
match tree.desc with
FRule(_, lbl, _, hyp,_,_) ->
begin
match lbl, hyp with
ProcessRule(hyp_tags, name_params), hyp ->
ProcessTest([], [], None)
| Rfail(p), hyp ->
NIFailTest((match hyp with
[{ thefact = Pred(_, l) }] -> rev_name_subst_bi l
| _ -> Parsing_helper.internal_error "Unexpected derivation for choice"), None)
| TestComm(pi,po), [{thefact = Pred(_,lin)}; {thefact = Pred(_,lout)}] ->
CommTest(rev_name_subst_bi lin, rev_name_subst_bi lout, None)
| TestEq(p), [{thefact = Pred(_,l1)};{thefact = Pred(_,l2)}] ->
NIEqTest((rev_name_subst_bi l1, None), (rev_name_subst_bi l2, None))
| _ -> Parsing_helper.internal_error "Unexpected clause concluding the derivation for choice"
end
| _ -> Parsing_helper.internal_error "Unexpected derivation for choice"
let build_goal tree = function
| CorrespQEnc _ ->
let (fact_list,_) = get_corresp_goals tree in
let goal_list =
List.map (function Pred(p,_) as pred_goal ->
if p == Param.end2_pred
then EventGoal(rev_name_subst_fact pred_goal,None)
else Fact(rev_name_subst_fact pred_goal,None,false)
) fact_list
in
CorrespGoal goal_list
| _ -> NonInterfGoal (analyze_tree tree)
exception FalseQuery
let rec extract_conclusion_query restwork = function
| QTrue -> restwork ([],[],Terms.true_constraints,[],[])
| QFalse -> raise Unify
| QEvent (QSEvent _) -> Parsing_helper.internal_error "[reduction_bipro.ml >> extract_conclusion_query] QSEvent should only occur in query for processes."
| QEvent(QFact(p,_,l)) -> restwork ([],[Pred(p,l)],Terms.true_constraints,[],[])
| QEvent (QNeq (t1,t2)) -> restwork ([], [], Terms.constraints_of_neq t1 t2, [], [])
| QEvent (QGeq (t1,t2)) -> restwork ([], [], Terms.constraints_of_geq t1 t2, [], [])
| QEvent (QIsNat t) -> restwork ([],[],Terms.constraints_of_is_nat t,[],[])
| QEvent (QEq (t1,t2)) -> restwork ([], [], Terms.true_constraints, [t1], [t2])
| QEvent((QSEvent2(t1,t2))) -> restwork ([t1,t2],[],Terms.true_constraints,[],[])
| NestedQuery _ -> Parsing_helper.internal_error "[reduction_bipro.ml >> extract_conclusion_query] There should not be any nested query in correspondance queries for biprocess."
| QAnd(concl1,concl2) ->
extract_conclusion_query (fun (ev1, facts1, constra1, eq_left1, eq_right1) ->
extract_conclusion_query (fun (ev2, facts2, constra2, eq_left2, eq_right2) ->
restwork (ev1@ev2, facts1@facts2, Terms.wedge_constraints constra1 constra2, eq_left1@eq_left2, eq_right1@eq_right2)
) concl2
) concl1
| QOr(concl1,concl2) ->
try
extract_conclusion_query restwork concl1
with Unify ->
extract_conclusion_query restwork concl2
let rec find_in_event_table restwork ((t1,t2) as ev0) = function
[] -> raise Unify
| ev::rest ->
try
let ev1 = choice_in_term 1 ev
and ev2 = choice_in_term 2 ev in
TermsEq.unify_modulo_list (fun () ->
restwork ()
) [t1;t2] [ev1;ev2]
with Unify -> find_in_event_table restwork ev0 rest
let rec find_event_list restwork event_table = function
[] -> restwork ()
| ev::evlist ->
find_in_event_table (fun () ->
find_event_list restwork event_table evlist
) ev event_table
let bad_fact = Pred(Param.bad_pred, [])
let check_conclusion_query restwork event_table concl_q =
extract_conclusion_query (fun (evlist, facts, constra, eq_left, eq_right) ->
find_event_list (fun () ->
TermsEq.unify_modulo_list (fun () ->
We first look at the natural number predicates
TermsEq.close_constraints_eq_synt (fun constra' ->
[ facts ] should always be empty : lemmas and axioms never
use attacker , mess , table in conclusion , and user - defined
predicates are not used with biprocesses .
If facts were not empty , not checking them means that
I approximate : a query may be considered true when
it is in fact false . This approximation is fine :
ProVerif will consider that the found trace does not falsify
the query and will answer " can not be proved " .
use attacker, mess, table in conclusion, and user-defined
predicates are not used with biprocesses.
If facts were not empty, not checking them means that
I approximate: a query may be considered true when
it is in fact false. This approximation is fine:
ProVerif will consider that the found trace does not falsify
the query and will answer "cannot be proved". *)
let constra'' = TermsEq.remove_syntactic_constra constra' in
begin
try
TermsEq.check_constraints constra''
with TermsEq.FalseConstraint -> raise Unify
end;
restwork ()
) constra
) eq_left eq_right
) event_table evlist
) concl_q
let rec check_query_falsified_rec restwork event_table concl_q evl goall =
match (evl, goall) with
[], [] ->
let concl_q' =
Terms.auto_cleanup (fun () ->
Terms.copy_conclusion_query2 concl_q
)
in
check_conclusion_query restwork event_table concl_q'
| ev::rest_evl, (Fact(goal,_,_) | EventGoal(goal,_))::rest_goall ->
let (l,l') =
match ev, goal with
QFact(p,_,l), Pred(p',l') when p == p' -> l,l'
| QSEvent2(t1,t2), Pred(pr,[t1';t2']) when pr == Param.end2_pred -> [t1;t2],[t1';t2']
| _ ->
print_string "Query: "; Display.Text.display_event ev; print_newline();
print_string "Goal: "; Display.Text.display_fact goal; print_newline();
Parsing_helper.internal_error "The goal of the trace does not match the query (1)"
in
begin
try
TermsEq.unify_modulo_list (fun () ->
try
check_query_falsified_rec restwork event_table concl_q rest_evl rest_goall
with Unify -> raise FalseQuery
) l l'
with
| Unify ->
print_string "Query: "; Display.Text.WithLinks.term_list l; print_newline();
print_string "Goal: "; Display.Text.WithLinks.term_list l'; print_newline();
Parsing_helper.internal_error "The goal of the trace does not match the query (2)"
| FalseQuery -> raise Unify
end
| _ ->
Parsing_helper.internal_error "The goal of the trace does not match the query (3)"
let check_query_falsified q final_state =
let event_table = List.rev final_state.events in
let Before(evl, hyp) = q in
match final_state.goal with
CorrespGoal(goall) ->
begin
try
check_query_falsified_rec (fun () ->
Display.Def.print_line "I could not confirm that the previous trace falsifies the query.";
false
) event_table hyp (List.rev evl) (List.rev goall)
with Unify -> true
end
| _ -> Parsing_helper.internal_error "The goal of the trace does not match the query (4)"
let do_reduction opt_query axioms tree =
debug_print "Initializing";
has_backtrack_get := false;
made_forward_step := true;
failed_traces := 0;
let freenames = (!Param.current_state).pi_freenames in
let public_init = public_build freenames in
public_free := public_init;
Param.display_init_state := true;
init_name_mapping freenames;
try
Reduction_helper.instantiate_natural_predicates (fun () ->
close_tree tree;
let ({ proc = main_process }, query) = Param.get_process_query (!Param.current_state) in
let init_state =
{ goal = (build_goal tree query);
subprocess = [(main_process, [],[],[],Nothing)];
public = List.map (fun t -> (t,(t, t))) public_init;
pub_vars = public_init;
tables = [];
io_rule = [];
prepared_attacker_rule = [];
previous_state = None;
hyp_not_matched = [];
assumed_false = [];
current_phase = 0;
comment = RInit;
events = [];
barriers = []
}
in
let res =
begin
try
let state = init_rule init_state tree in
let state = close_public_initial state in
if !debug_find_io_rule then
begin
auto_cleanup (fun () ->
print_string "Available rules:";
Display.Text.newline();
List.iter display_rule state.io_rule)
end;
debug_print "Initialization done";
if !Param.html_output then
begin
let qs = string_of_int (!Param.derivation_number) in
Display.LangHtml.openfile ((!Param.html_dir) ^ "/trace" ^ qs ^ ".html") ("ProVerif: trace for query " ^ qs);
Display.Html.print_string "<H1>Trace</H1>\n"
end;
let final_state = normal_state reduction true state 0 in
display_trace final_state;
let dot_err = Reduction_helper.create_pdf_trace Display.bi_term_to_term noninterftest_to_string "" final_state in
if !Param.html_output then
begin
Display.Html.display_goal Display.bi_term_to_term noninterftest_to_string final_state true;
Display.LangHtml.close();
let qs = string_of_int (!Param.derivation_number) in
Display.Html.print_string ("<A HREF=\"trace" ^ qs ^ ".html\">Trace</A><br>\n");
if (not !Param.command_line_graph_set) && (!Param.trace_backtracking && (dot_err = 0)) then
Display.Html.print_string ("<A HREF=\"trace" ^ qs ^ ".pdf\">Trace graph</A><br>\n")
end
else
Display.Text.display_goal Display.bi_term_to_term noninterftest_to_string final_state true;
Lemma.check_axioms final_state axioms;
if final_state.hyp_not_matched = []
then
match opt_query with
| Some q -> check_query_falsified q final_state
| _ -> true
else false
with No_result ->
if not (!Param.trace_backtracking) then
Display.Def.print_line "Blocked!";
if !Param.html_output then
begin
Display.LangHtml.close();
if not (!Param.trace_backtracking) then
begin
let qs = string_of_int (!Param.derivation_number) in
Display.Html.print_string ("<A HREF=\"trace" ^ qs ^ ".html\">Unfinished trace</A><br>\n")
end;
Display.Html.print_line "Could not find a trace corresponding to this derivation."
end;
Display.Text.print_line "Could not find a trace corresponding to this derivation.";
false
end
in
res
) tree
with TermsEq.FalseConstraint -> false
let do_reduction recheck opt_query lemmas tree =
debug_print "Starting reduction";
let res =
Display.auto_cleanup_display (fun () ->
History.unify_derivation (fun tree ->
Display.auto_cleanup_display (fun () ->
do_reduction opt_query lemmas tree
)
) recheck tree
)
in
Terms.cleanup ();
res
|
5a3243bd5613d32be5b8d72edaa25f7e8c5798ef119acf3e7932fadd036c9c6c | patricoferris/ocaml-multicore-monorepo | unsafe_pre407.ml | external get_int32_ne : bytes -> int -> int32 = "%caml_string_get32"
| null | https://raw.githubusercontent.com/patricoferris/ocaml-multicore-monorepo/22b441e6727bc303950b3b37c8fbc024c748fe55/duniverse/eqaf/check/unsafe_pre407.ml | ocaml | external get_int32_ne : bytes -> int -> int32 = "%caml_string_get32"
|
|
16b629482025df0f244a872875d771baee575485e7311507a16219f2272f299a | Plutonomicon/plutarch-plutus | Test.hs | | Common functions for testing Plutarch code
module Plutarch.Test (
-- * Plutarch specific `Expectation` operators
passert,
passertNot,
pfails,
psucceeds,
ptraces,
pshouldBe,
(#@?=),
-- * Budget expectation
psatisfyWithinBenchmark,
-- * Golden testing
(@|),
(@\),
(@->),
(@:->),
(@==),
pgoldenSpec,
pgoldenSpec',
PlutarchGoldens,
GoldenConf (..),
GoldenTest (..),
-- * Benchmark type for use in `(@:->)`
Benchmark (Benchmark, exBudgetCPU, exBudgetMemory, scriptSizeBytes),
ScriptSizeBytes,
-- * Test runner related utilities
noUnusedGoldens,
noUnusedGoldens',
hspecAndReturnForest,
) where
import Data.Text (Text)
import Data.Text qualified as T
import Plutarch (ClosedTerm, Config (Config, tracingMode), compile, pcon, printScript, pattern DetTracing)
import Plutarch.Bool (PBool (PFalse, PTrue))
import Plutarch.Evaluate (evalScript)
import Plutarch.Script qualified as Scripts
import Plutarch.Test.Benchmark (
Benchmark (Benchmark, exBudgetCPU, exBudgetMemory, scriptSizeBytes),
ScriptSizeBytes,
)
import Plutarch.Test.Golden (
GoldenConf (GoldenConf, chosenTests, goldenBasePath),
GoldenTest (GoldenT'Bench, GoldenT'UPLCPostEval, GoldenT'UPLCPreEval),
PlutarchGoldens,
TermExpectation,
evalScriptAlwaysWithBenchmark,
pgoldenSpec,
pgoldenSpec',
(@->),
(@:->),
(@\),
(@|),
)
import Plutarch.Test.Run (hspecAndReturnForest, noUnusedGoldens, noUnusedGoldens')
import Test.Hspec (Expectation, expectationFailure, shouldBe, shouldSatisfy)
import Test.Tasty.HUnit (assertFailure)
comp :: ClosedTerm a -> Scripts.Script
comp t = either (error . T.unpack) id $ compile (Config {tracingMode = DetTracing}) t
|
Like ` shouldBe ` but but for Plutarch terms
Like `shouldBe` but but for Plutarch terms
-}
pshouldBe :: ClosedTerm a -> ClosedTerm b -> Expectation
pshouldBe x y = do
p1 <- eval $ comp x
p2 <- eval $ comp y
pscriptShouldBe p1 p2
where
eval :: Scripts.Script -> IO Scripts.Script
eval s = case evalScript s of
(Left e, _, _) -> assertFailure $ "Script evaluation failed: " <> show e
(Right x', _, _) -> pure x'
{- |
Like `pshouldBe` but on `Script`
-}
pscriptShouldBe :: Scripts.Script -> Scripts.Script -> Expectation
pscriptShouldBe x y =
printScript x `shouldBe` printScript y
| Like ` @?= ` but for Plutarch terms
(#@?=) :: ClosedTerm a -> ClosedTerm b -> Expectation
(#@?=) = pshouldBe
-- | Asserts the term to be true
passert :: ClosedTerm a -> Expectation
passert p = p #@?= pcon PTrue
-- | Asserts the term to be false
passertNot :: ClosedTerm a -> Expectation
passertNot p = p #@?= pcon PFalse
-- | Asserts the term evaluates successfully without failing
psucceeds :: ClosedTerm a -> Expectation
psucceeds p =
case evalScript $ comp p of
(Left _, _, _) -> expectationFailure "Term failed to evaluate"
(Right _, _, _) -> pure ()
-- | Asserts the term evaluates without success
pfails :: ClosedTerm a -> Expectation
pfails p = do
case evalScript $ comp p of
(Left _, _, _) -> pure ()
(Right _, _, _) -> expectationFailure "Term succeeded"
| Check that the given benchmark is within certain maximum values .
Use this to ensure that a program 's benchmark does n't exceed expected values
( such as script size or memory budget ) . You will need this because ,
- ` Plutarch . Test ` 's golden testing uses maximum possible ExBudget for evaluating
programs
- You may want to check that the script size is within a certain value
Use this to ensure that a program's benchmark doesn't exceed expected values
(such as script size or memory budget). You will need this because,
- `Plutarch.Test`'s golden testing uses maximum possible ExBudget for evaluating
programs
- You may want to check that the script size is within a certain value
-}
psatisfyWithinBenchmark :: Benchmark -> Benchmark -> Expectation
psatisfyWithinBenchmark bench maxBudget = do
shouldSatisfy bench $ \_ ->
exBudgetCPU bench <= exBudgetCPU maxBudget
shouldSatisfy bench $ \_ ->
exBudgetMemory bench <= exBudgetMemory maxBudget
shouldSatisfy bench $ \_ ->
scriptSizeBytes bench <= scriptSizeBytes maxBudget
-- | Asserts that the term evaluates successfully with the given trace sequence
ptraces :: ClosedTerm a -> [Text] -> Expectation
ptraces p develTraces =
case evalScript $ comp p of
(Left _, _, _) -> expectationFailure "Term failed to evaluate"
(Right _, _, traceLog) -> do
traceLog `shouldBe` develTraces
| Test that the Plutarch program evaluates to the given term
(@==) :: ClosedTerm a -> ClosedTerm b -> TermExpectation a
(@==) p x = p @:-> \(_, script, _) -> script `pscriptShouldBe` xScript
where
xScript = fst . evalScriptAlwaysWithBenchmark $ comp x
infixr 1 @==
| null | https://raw.githubusercontent.com/Plutonomicon/plutarch-plutus/9b83892057f2aaaed76e3af6193ad1ae242244cc/plutarch-test/Plutarch/Test.hs | haskell | * Plutarch specific `Expectation` operators
* Budget expectation
* Golden testing
* Benchmark type for use in `(@:->)`
* Test runner related utilities
|
Like `pshouldBe` but on `Script`
| Asserts the term to be true
| Asserts the term to be false
| Asserts the term evaluates successfully without failing
| Asserts the term evaluates without success
| Asserts that the term evaluates successfully with the given trace sequence | | Common functions for testing Plutarch code
module Plutarch.Test (
passert,
passertNot,
pfails,
psucceeds,
ptraces,
pshouldBe,
(#@?=),
psatisfyWithinBenchmark,
(@|),
(@\),
(@->),
(@:->),
(@==),
pgoldenSpec,
pgoldenSpec',
PlutarchGoldens,
GoldenConf (..),
GoldenTest (..),
Benchmark (Benchmark, exBudgetCPU, exBudgetMemory, scriptSizeBytes),
ScriptSizeBytes,
noUnusedGoldens,
noUnusedGoldens',
hspecAndReturnForest,
) where
import Data.Text (Text)
import Data.Text qualified as T
import Plutarch (ClosedTerm, Config (Config, tracingMode), compile, pcon, printScript, pattern DetTracing)
import Plutarch.Bool (PBool (PFalse, PTrue))
import Plutarch.Evaluate (evalScript)
import Plutarch.Script qualified as Scripts
import Plutarch.Test.Benchmark (
Benchmark (Benchmark, exBudgetCPU, exBudgetMemory, scriptSizeBytes),
ScriptSizeBytes,
)
import Plutarch.Test.Golden (
GoldenConf (GoldenConf, chosenTests, goldenBasePath),
GoldenTest (GoldenT'Bench, GoldenT'UPLCPostEval, GoldenT'UPLCPreEval),
PlutarchGoldens,
TermExpectation,
evalScriptAlwaysWithBenchmark,
pgoldenSpec,
pgoldenSpec',
(@->),
(@:->),
(@\),
(@|),
)
import Plutarch.Test.Run (hspecAndReturnForest, noUnusedGoldens, noUnusedGoldens')
import Test.Hspec (Expectation, expectationFailure, shouldBe, shouldSatisfy)
import Test.Tasty.HUnit (assertFailure)
comp :: ClosedTerm a -> Scripts.Script
comp t = either (error . T.unpack) id $ compile (Config {tracingMode = DetTracing}) t
|
Like ` shouldBe ` but but for Plutarch terms
Like `shouldBe` but but for Plutarch terms
-}
pshouldBe :: ClosedTerm a -> ClosedTerm b -> Expectation
pshouldBe x y = do
p1 <- eval $ comp x
p2 <- eval $ comp y
pscriptShouldBe p1 p2
where
eval :: Scripts.Script -> IO Scripts.Script
eval s = case evalScript s of
(Left e, _, _) -> assertFailure $ "Script evaluation failed: " <> show e
(Right x', _, _) -> pure x'
pscriptShouldBe :: Scripts.Script -> Scripts.Script -> Expectation
pscriptShouldBe x y =
printScript x `shouldBe` printScript y
| Like ` @?= ` but for Plutarch terms
(#@?=) :: ClosedTerm a -> ClosedTerm b -> Expectation
(#@?=) = pshouldBe
passert :: ClosedTerm a -> Expectation
passert p = p #@?= pcon PTrue
passertNot :: ClosedTerm a -> Expectation
passertNot p = p #@?= pcon PFalse
psucceeds :: ClosedTerm a -> Expectation
psucceeds p =
case evalScript $ comp p of
(Left _, _, _) -> expectationFailure "Term failed to evaluate"
(Right _, _, _) -> pure ()
pfails :: ClosedTerm a -> Expectation
pfails p = do
case evalScript $ comp p of
(Left _, _, _) -> pure ()
(Right _, _, _) -> expectationFailure "Term succeeded"
| Check that the given benchmark is within certain maximum values .
Use this to ensure that a program 's benchmark does n't exceed expected values
( such as script size or memory budget ) . You will need this because ,
- ` Plutarch . Test ` 's golden testing uses maximum possible ExBudget for evaluating
programs
- You may want to check that the script size is within a certain value
Use this to ensure that a program's benchmark doesn't exceed expected values
(such as script size or memory budget). You will need this because,
- `Plutarch.Test`'s golden testing uses maximum possible ExBudget for evaluating
programs
- You may want to check that the script size is within a certain value
-}
psatisfyWithinBenchmark :: Benchmark -> Benchmark -> Expectation
psatisfyWithinBenchmark bench maxBudget = do
shouldSatisfy bench $ \_ ->
exBudgetCPU bench <= exBudgetCPU maxBudget
shouldSatisfy bench $ \_ ->
exBudgetMemory bench <= exBudgetMemory maxBudget
shouldSatisfy bench $ \_ ->
scriptSizeBytes bench <= scriptSizeBytes maxBudget
ptraces :: ClosedTerm a -> [Text] -> Expectation
ptraces p develTraces =
case evalScript $ comp p of
(Left _, _, _) -> expectationFailure "Term failed to evaluate"
(Right _, _, traceLog) -> do
traceLog `shouldBe` develTraces
| Test that the Plutarch program evaluates to the given term
(@==) :: ClosedTerm a -> ClosedTerm b -> TermExpectation a
(@==) p x = p @:-> \(_, script, _) -> script `pscriptShouldBe` xScript
where
xScript = fst . evalScriptAlwaysWithBenchmark $ comp x
infixr 1 @==
|
39df2c73465311e8a4355c07254fef31c6daa275dab147d5d70a04ea28df2f52 | disteph/cdsat | literals.mli | open Format
open Top
open Interfaces_basic
open Basic
open Specs
module LitF : sig
type t [@@deriving eq,hash,ord,show]
val id : t -> int
val print_in_fmt : ?print_atom:(formatter -> int -> unit)
-> formatter -> t -> unit
val pp : formatter -> t -> unit
val clear : unit -> unit
type revealed = bool*int
val reveal : t -> revealed
val build : revealed -> t
val clear : unit -> unit
val negation : t -> t
end
module LitB : sig
include PHCons
type revealed = bool*Terms.TermB.t
val reveal : t -> revealed
val build : revealed -> t
val clear : unit -> unit
val negation : t -> t
end
module TS : Specs.DataType with type t = LitF.t
| null | https://raw.githubusercontent.com/disteph/cdsat/1b569f3eae59802148f4274186746a9ed3e667ed/src/kernel/kernel.mld/termstructures.mld/literals/literals.mli | ocaml | open Format
open Top
open Interfaces_basic
open Basic
open Specs
module LitF : sig
type t [@@deriving eq,hash,ord,show]
val id : t -> int
val print_in_fmt : ?print_atom:(formatter -> int -> unit)
-> formatter -> t -> unit
val pp : formatter -> t -> unit
val clear : unit -> unit
type revealed = bool*int
val reveal : t -> revealed
val build : revealed -> t
val clear : unit -> unit
val negation : t -> t
end
module LitB : sig
include PHCons
type revealed = bool*Terms.TermB.t
val reveal : t -> revealed
val build : revealed -> t
val clear : unit -> unit
val negation : t -> t
end
module TS : Specs.DataType with type t = LitF.t
|
|
9bf5dab6a1408eb915dd021e9551bf07a8f50594e8c9aa93b332bd20ce86f9ba | esl/MongooseIM | mod_http_upload_s3.erl | %%==============================================================================
Copyright 2016 Erlang Solutions Ltd.
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%==============================================================================
-module(mod_http_upload_s3).
-author('').
-behaviour(mod_http_upload_backend).
-export([create_slot/6]).
%%--------------------------------------------------------------------
%% API
%%--------------------------------------------------------------------
-spec create_slot(UTCDateTime :: calendar:datetime(), Token :: binary(),
Filename :: unicode:unicode_binary(), ContentType :: binary() | undefined,
Size :: pos_integer(), Opts :: gen_mod:module_opts()) ->
{PUTURL :: binary(), GETURL :: binary(),
Headers :: #{binary() => binary()}}.
create_slot(UTCDateTime, Token, Filename, ContentType, Size, Opts) ->
#{s3 := #{add_acl := AddACL, region := Region, access_key_id := AccessKeyId,
secret_access_key := SecretAccessKey, bucket_url := BucketURL},
expiration_time := ExpirationTime} = Opts,
{Scheme, Host, Port, Path} = extract_uri_params(BucketURL, Token, Filename),
ExpectedHeaders = get_expected_headers(Scheme, Host, Port, Size,
ContentType, AddACL),
UnsignedQueries = create_queries(UTCDateTime, AccessKeyId, Region,
ExpirationTime, ExpectedHeaders),
Signature = aws_signature_v4:sign(<<"PUT">>, Path, UnsignedQueries, ExpectedHeaders,
UTCDateTime, Region, <<"s3">>, SecretAccessKey),
Queries = maps:put(<<"X-Amz-Signature">>, Signature, UnsignedQueries),
{
compose_url(Scheme, Host, Port, Path, Queries),
compose_url(Scheme, Host, Port, Path, #{}),
#{}
}.
%%--------------------------------------------------------------------
%% Helpers
%%--------------------------------------------------------------------
-spec create_queries(UTCDateTime :: calendar:datetime(), AccessKeyId :: binary(),
Region :: binary(), ExpirationTime :: pos_integer(),
ExpectedHeaders :: #{binary() => binary()}) ->
Queries :: #{binary() => binary()}.
create_queries(UTCDateTime, AccessKeyId, Region, ExpirationTime, ExpectedHeaders) ->
Scope = aws_signature_v4:compose_scope(UTCDateTime, Region, <<"s3">>),
SignedHeadersSemi = << <<H/binary, ";">> || H <- maps:keys(ExpectedHeaders) >>,
SignedHeaders = binary_part(SignedHeadersSemi, 0, byte_size(SignedHeadersSemi) - 1),
#{
<<"X-Amz-Algorithm">> => <<"AWS4-HMAC-SHA256">>,
<<"X-Amz-Credential">> => <<AccessKeyId/binary, "/", Scope/binary>>,
<<"X-Amz-Date">> => aws_signature_v4:datetime_iso8601(UTCDateTime),
<<"X-Amz-Expires">> => integer_to_binary(ExpirationTime),
<<"X-Amz-SignedHeaders">> => SignedHeaders
}.
-spec get_expected_headers(Scheme :: http | https | atom(),
Host :: unicode:unicode_binary(),
Port :: inet:port_number(),
Size :: pos_integer(),
ContentType :: binary() | undefined,
AddACL :: boolean()) ->
Headers :: #{binary() => binary()}.
get_expected_headers(Scheme, Host, Port, Size, ContentType, AddACL) ->
Headers = #{<<"host">> => with_port_component(Scheme, Host, Port),
<<"content-length">> => integer_to_binary(Size)},
WithContentType = maybe_add_content_type(ContentType, Headers),
maybe_add_acl(AddACL, WithContentType).
maybe_add_content_type(undefined, Headers) ->
Headers;
maybe_add_content_type(ContentType, Headers) ->
maps:put(<<"content-type">>, ContentType, Headers).
maybe_add_acl(false, Headers) ->
Headers;
maybe_add_acl(true, Headers) ->
maps:put(<<"x-amz-acl">>, <<"public-read">>, Headers).
-spec extract_uri_params(BucketURL :: unicode:unicode_binary(), Token :: binary(),
Filename :: unicode:unicode_binary()) ->
{Scheme :: http | https | atom(), Host :: unicode:unicode_binary(),
Port :: inet:port_number(), Path :: unicode:unicode_binary()}.
extract_uri_params(BucketURL, Token, Filename) ->
#{host := Host, scheme := Scheme, path := Path0} = Parsed =
uri_string_parse(BucketURL),
SchemeAtom = binary_to_existing_atom(Scheme, latin1),
Port = case maps:get(port, Parsed, undefined) of
undefined ->
scheme_to_port(SchemeAtom, 80);
P ->
P
end,
KeylessPath = trim_slash(Path0),
EscapedFilename = aws_signature_v4:uri_encode(Filename),
Path = <<KeylessPath/binary, "/", Token/binary, "/", EscapedFilename/binary>>,
{SchemeAtom, Host, Port, Path}.
is utf-8 encoded binary
uri_string_parse(Uri) when is_binary(Uri) ->
case uri_string:parse(Uri) of
Map when is_map(Map) ->
Map;
Other ->
error(#{what => failed_to_parse_uri, uri_string => Uri,
reason => Other})
end.
-spec compose_url(Scheme :: http | https | atom(), Host :: unicode:unicode_binary(),
Port :: inet:port_number(), Path :: unicode:unicode_binary(),
Queries :: #{binary() => binary()}) ->
URL :: unicode:unicode_binary().
compose_url(Scheme, Host, Port, Path, Queries) ->
SchemeBin = atom_to_binary(Scheme, latin1),
<<SchemeBin/binary, "://", (with_port_component(Scheme, Host, Port))/binary,
Path/binary, (query_string(Queries))/binary>>.
-spec query_string(Queries :: #{binary() => binary()}) -> QueryString :: binary().
query_string(Queries) ->
query_string(maps:to_list(Queries), []).
-spec query_string(Queries :: [binary()], Acc :: [binary()]) -> binary().
query_string([], Acc) ->
iolist_to_binary(lists:reverse(Acc));
query_string([Query | Queries], []) ->
query_string(Queries, [<<"?", (query_encode(Query))/binary>>]);
query_string([Query | Queries], Acc) ->
query_string(Queries, [<<"&", (query_encode(Query))/binary>> | Acc]).
-spec query_encode({Key :: binary(), Value :: binary()}) -> QueryComponent :: binary().
query_encode({Key, Value}) ->
<<(aws_signature_v4:uri_encode(Key))/binary, "=",
(aws_signature_v4:uri_encode(Value))/binary>>.
-spec with_port_component(Scheme :: http | https | atom(),
Host :: unicode:unicode_binary(),
Port :: inet:port_number()) -> binary().
with_port_component(Scheme, Host, Port) ->
case scheme_to_port(Scheme, undefined) of
Port -> Host;
_ -> <<Host/binary, ":", (integer_to_binary(Port))/binary>>
end.
scheme_to_port(http, _Default) -> 80;
scheme_to_port(https, _Default) -> 443;
scheme_to_port(_Scheme, Default) -> Default.
-spec trim_slash(binary()) -> binary().
trim_slash(<<>>) ->
<<>>;
trim_slash(Data) ->
case binary:last(Data) of
$/ -> erlang:binary_part(Data, 0, byte_size(Data) - 1);
_ -> Data
end.
| null | https://raw.githubusercontent.com/esl/MongooseIM/40b349deae239d02b76a32a650dbbb7adbc11d67/src/http_upload/mod_http_upload_s3.erl | erlang | ==============================================================================
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================
--------------------------------------------------------------------
API
--------------------------------------------------------------------
--------------------------------------------------------------------
Helpers
-------------------------------------------------------------------- | Copyright 2016 Erlang Solutions Ltd.
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(mod_http_upload_s3).
-author('').
-behaviour(mod_http_upload_backend).
-export([create_slot/6]).
-spec create_slot(UTCDateTime :: calendar:datetime(), Token :: binary(),
Filename :: unicode:unicode_binary(), ContentType :: binary() | undefined,
Size :: pos_integer(), Opts :: gen_mod:module_opts()) ->
{PUTURL :: binary(), GETURL :: binary(),
Headers :: #{binary() => binary()}}.
create_slot(UTCDateTime, Token, Filename, ContentType, Size, Opts) ->
#{s3 := #{add_acl := AddACL, region := Region, access_key_id := AccessKeyId,
secret_access_key := SecretAccessKey, bucket_url := BucketURL},
expiration_time := ExpirationTime} = Opts,
{Scheme, Host, Port, Path} = extract_uri_params(BucketURL, Token, Filename),
ExpectedHeaders = get_expected_headers(Scheme, Host, Port, Size,
ContentType, AddACL),
UnsignedQueries = create_queries(UTCDateTime, AccessKeyId, Region,
ExpirationTime, ExpectedHeaders),
Signature = aws_signature_v4:sign(<<"PUT">>, Path, UnsignedQueries, ExpectedHeaders,
UTCDateTime, Region, <<"s3">>, SecretAccessKey),
Queries = maps:put(<<"X-Amz-Signature">>, Signature, UnsignedQueries),
{
compose_url(Scheme, Host, Port, Path, Queries),
compose_url(Scheme, Host, Port, Path, #{}),
#{}
}.
-spec create_queries(UTCDateTime :: calendar:datetime(), AccessKeyId :: binary(),
Region :: binary(), ExpirationTime :: pos_integer(),
ExpectedHeaders :: #{binary() => binary()}) ->
Queries :: #{binary() => binary()}.
create_queries(UTCDateTime, AccessKeyId, Region, ExpirationTime, ExpectedHeaders) ->
Scope = aws_signature_v4:compose_scope(UTCDateTime, Region, <<"s3">>),
SignedHeadersSemi = << <<H/binary, ";">> || H <- maps:keys(ExpectedHeaders) >>,
SignedHeaders = binary_part(SignedHeadersSemi, 0, byte_size(SignedHeadersSemi) - 1),
#{
<<"X-Amz-Algorithm">> => <<"AWS4-HMAC-SHA256">>,
<<"X-Amz-Credential">> => <<AccessKeyId/binary, "/", Scope/binary>>,
<<"X-Amz-Date">> => aws_signature_v4:datetime_iso8601(UTCDateTime),
<<"X-Amz-Expires">> => integer_to_binary(ExpirationTime),
<<"X-Amz-SignedHeaders">> => SignedHeaders
}.
-spec get_expected_headers(Scheme :: http | https | atom(),
Host :: unicode:unicode_binary(),
Port :: inet:port_number(),
Size :: pos_integer(),
ContentType :: binary() | undefined,
AddACL :: boolean()) ->
Headers :: #{binary() => binary()}.
get_expected_headers(Scheme, Host, Port, Size, ContentType, AddACL) ->
Headers = #{<<"host">> => with_port_component(Scheme, Host, Port),
<<"content-length">> => integer_to_binary(Size)},
WithContentType = maybe_add_content_type(ContentType, Headers),
maybe_add_acl(AddACL, WithContentType).
maybe_add_content_type(undefined, Headers) ->
Headers;
maybe_add_content_type(ContentType, Headers) ->
maps:put(<<"content-type">>, ContentType, Headers).
maybe_add_acl(false, Headers) ->
Headers;
maybe_add_acl(true, Headers) ->
maps:put(<<"x-amz-acl">>, <<"public-read">>, Headers).
-spec extract_uri_params(BucketURL :: unicode:unicode_binary(), Token :: binary(),
Filename :: unicode:unicode_binary()) ->
{Scheme :: http | https | atom(), Host :: unicode:unicode_binary(),
Port :: inet:port_number(), Path :: unicode:unicode_binary()}.
extract_uri_params(BucketURL, Token, Filename) ->
#{host := Host, scheme := Scheme, path := Path0} = Parsed =
uri_string_parse(BucketURL),
SchemeAtom = binary_to_existing_atom(Scheme, latin1),
Port = case maps:get(port, Parsed, undefined) of
undefined ->
scheme_to_port(SchemeAtom, 80);
P ->
P
end,
KeylessPath = trim_slash(Path0),
EscapedFilename = aws_signature_v4:uri_encode(Filename),
Path = <<KeylessPath/binary, "/", Token/binary, "/", EscapedFilename/binary>>,
{SchemeAtom, Host, Port, Path}.
is utf-8 encoded binary
uri_string_parse(Uri) when is_binary(Uri) ->
case uri_string:parse(Uri) of
Map when is_map(Map) ->
Map;
Other ->
error(#{what => failed_to_parse_uri, uri_string => Uri,
reason => Other})
end.
-spec compose_url(Scheme :: http | https | atom(), Host :: unicode:unicode_binary(),
Port :: inet:port_number(), Path :: unicode:unicode_binary(),
Queries :: #{binary() => binary()}) ->
URL :: unicode:unicode_binary().
compose_url(Scheme, Host, Port, Path, Queries) ->
SchemeBin = atom_to_binary(Scheme, latin1),
<<SchemeBin/binary, "://", (with_port_component(Scheme, Host, Port))/binary,
Path/binary, (query_string(Queries))/binary>>.
-spec query_string(Queries :: #{binary() => binary()}) -> QueryString :: binary().
query_string(Queries) ->
query_string(maps:to_list(Queries), []).
-spec query_string(Queries :: [binary()], Acc :: [binary()]) -> binary().
query_string([], Acc) ->
iolist_to_binary(lists:reverse(Acc));
query_string([Query | Queries], []) ->
query_string(Queries, [<<"?", (query_encode(Query))/binary>>]);
query_string([Query | Queries], Acc) ->
query_string(Queries, [<<"&", (query_encode(Query))/binary>> | Acc]).
-spec query_encode({Key :: binary(), Value :: binary()}) -> QueryComponent :: binary().
query_encode({Key, Value}) ->
<<(aws_signature_v4:uri_encode(Key))/binary, "=",
(aws_signature_v4:uri_encode(Value))/binary>>.
-spec with_port_component(Scheme :: http | https | atom(),
Host :: unicode:unicode_binary(),
Port :: inet:port_number()) -> binary().
with_port_component(Scheme, Host, Port) ->
case scheme_to_port(Scheme, undefined) of
Port -> Host;
_ -> <<Host/binary, ":", (integer_to_binary(Port))/binary>>
end.
scheme_to_port(http, _Default) -> 80;
scheme_to_port(https, _Default) -> 443;
scheme_to_port(_Scheme, Default) -> Default.
-spec trim_slash(binary()) -> binary().
trim_slash(<<>>) ->
<<>>;
trim_slash(Data) ->
case binary:last(Data) of
$/ -> erlang:binary_part(Data, 0, byte_size(Data) - 1);
_ -> Data
end.
|
547d1fcf090adc9129d06c90ebf9ad88b411da6062fe28d21776a91b0daa403a | CloudI/CloudI | cloudi_service_test_http_req.erl | -*-Mode : erlang;coding : utf-8;tab - width:4;c - basic - offset:4;indent - tabs - mode:()-*-
ex : set utf-8 sts=4 ts=4 sw=4 et nomod :
%%%
%%%------------------------------------------------------------------------
%%% @doc
= = CloudI Service for the http_req Test==
%%% @end
%%%
MIT License
%%%
Copyright ( c ) 2011 - 2022 < mjtruog at protonmail dot com >
%%%
%%% Permission is hereby granted, free of charge, to any person obtaining a
%%% copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction , including without limitation
%%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software , and to permit persons to whom the
%%% Software is furnished to do so, subject to the following conditions:
%%%
%%% The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
%%%
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
%%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
%%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
%%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
%%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
%%% DEALINGS IN THE SOFTWARE.
%%%
@author < mjtruog at protonmail dot com >
2011 - 2022
%%% @version 2.0.5 {@date} {@time}
%%%------------------------------------------------------------------------
-module(cloudi_service_test_http_req).
-author('mjtruog at protonmail dot com').
-behaviour(cloudi_service).
cloudi_service callbacks
-export([cloudi_service_init/4,
cloudi_service_handle_request/11,
cloudi_service_terminate/3]).
-include_lib("cloudi_core/include/cloudi_logger.hrl").
-record(state, {}).
%%%------------------------------------------------------------------------
%%% External interface functions
%%%------------------------------------------------------------------------
%%%------------------------------------------------------------------------
Callback functions from cloudi_service
%%%------------------------------------------------------------------------
cloudi_service_init(_Args, _Prefix, _Timeout, Dispatcher) ->
cloudi_service:subscribe(Dispatcher, "erlang.xml/get"),
{ok, #state{}}.
cloudi_service_handle_request(_RequestType, _Name, _Pattern,
_RequestInfo, Request,
_Timeout, _Priority, _TransId, _Source,
State, _Dispatcher) ->
HttpQS = cloudi_request_info:key_value_parse(Request),
Response = case cloudi_key_value:find(<<"value">>, HttpQS) of
{ok, RawValue} ->
Value = case RawValue of
[V | _] ->
erlang:binary_to_integer(V);
V ->
erlang:binary_to_integer(V)
end,
cloudi_string:format_to_binary(
"<http_test><value>~w</value></http_test>", [Value]
);
error ->
<<"<http_test><error>no value specified</error></http_test>">>
end,
HttpResponseHeaders = #{"content-type" => "text/xml; charset=utf-8"},
ResponseInfo = cloudi_response_info:key_value_new(HttpResponseHeaders),
{reply, ResponseInfo, Response, State}.
cloudi_service_terminate(_Reason, _Timeout, _State) ->
?LOG_INFO("terminate http_req erlang", []),
ok.
%%%------------------------------------------------------------------------
%%% Private functions
%%%------------------------------------------------------------------------
| null | https://raw.githubusercontent.com/CloudI/CloudI/ec951deffbedcce823b16f82cef89e768f2ac07c/src/tests/http_req/erlang/src/cloudi_service_test_http_req.erl | erlang |
------------------------------------------------------------------------
@doc
@end
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
@version 2.0.5 {@date} {@time}
------------------------------------------------------------------------
------------------------------------------------------------------------
External interface functions
------------------------------------------------------------------------
------------------------------------------------------------------------
------------------------------------------------------------------------
------------------------------------------------------------------------
Private functions
------------------------------------------------------------------------ | -*-Mode : erlang;coding : utf-8;tab - width:4;c - basic - offset:4;indent - tabs - mode:()-*-
ex : set utf-8 sts=4 ts=4 sw=4 et nomod :
= = CloudI Service for the http_req Test==
MIT License
Copyright ( c ) 2011 - 2022 < mjtruog at protonmail dot com >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
@author < mjtruog at protonmail dot com >
2011 - 2022
-module(cloudi_service_test_http_req).
-author('mjtruog at protonmail dot com').
-behaviour(cloudi_service).
cloudi_service callbacks
-export([cloudi_service_init/4,
cloudi_service_handle_request/11,
cloudi_service_terminate/3]).
-include_lib("cloudi_core/include/cloudi_logger.hrl").
-record(state, {}).
Callback functions from cloudi_service
cloudi_service_init(_Args, _Prefix, _Timeout, Dispatcher) ->
cloudi_service:subscribe(Dispatcher, "erlang.xml/get"),
{ok, #state{}}.
cloudi_service_handle_request(_RequestType, _Name, _Pattern,
_RequestInfo, Request,
_Timeout, _Priority, _TransId, _Source,
State, _Dispatcher) ->
HttpQS = cloudi_request_info:key_value_parse(Request),
Response = case cloudi_key_value:find(<<"value">>, HttpQS) of
{ok, RawValue} ->
Value = case RawValue of
[V | _] ->
erlang:binary_to_integer(V);
V ->
erlang:binary_to_integer(V)
end,
cloudi_string:format_to_binary(
"<http_test><value>~w</value></http_test>", [Value]
);
error ->
<<"<http_test><error>no value specified</error></http_test>">>
end,
HttpResponseHeaders = #{"content-type" => "text/xml; charset=utf-8"},
ResponseInfo = cloudi_response_info:key_value_new(HttpResponseHeaders),
{reply, ResponseInfo, Response, State}.
cloudi_service_terminate(_Reason, _Timeout, _State) ->
?LOG_INFO("terminate http_req erlang", []),
ok.
|
1907739b3fac6c93a93e835746da3abdc5335d2b079edcc00dc7091b7386f21d | manutter51/woolybear | catalog.cljs | (ns woolybear.ad.catalog
(:require [re-frame.core :as re-frame]
[woolybear.ad.buttons :as buttons]
[woolybear.ad.layout :as layout]
[woolybear.packs.flex-panel :as flex]
[woolybear.packs.tab-panel :as tab-panel]
[woolybear.ad.catalog.layouts :as layout-demo]
[woolybear.ad.catalog.containers :as containers-demo]
[woolybear.ad.catalog.icons :as icons-demo]
[woolybear.ad.catalog.buttons :as buttons-demo]
[woolybear.ad.catalog.forms :as forms-demo]))
(def data-path [:ad-catalog :tab-panel])
(def init-db
{:tab-panel (tab-panel/mk-tab-panel-data data-path :demo/layouts)})
(re-frame/reg-sub
:db/ad-catalog
(fn [db _]
(:ad-catalog db)))
(re-frame/reg-sub
:ad-catalog/tab-panel
:<- [:db/ad-catalog]
(fn [ad-catalog]
(:tab-panel ad-catalog)))
(re-frame/reg-sub
:tab-panel/selected-tab
:<- [:ad-catalog/tab-panel]
(fn [tab-panel]
(:value tab-panel)))
(defn page
"Top-level AD Catalog page"
[]
[layout/page {:extra-classes :ad-catalog}
[flex/flex-panel {:height "calc(100vh - 2rem)"}
[flex/flex-top
[layout/page-header {:extra-classes :ad-catalog}
[layout/page-title "AD Catalog"]]
[tab-panel/tab-bar {:extra-classes :ad-catalog
:subscribe-to-component-data [:ad-catalog/tab-panel]}
[buttons/tab-button {:panel-id :demo/layouts} "Layout"]
[buttons/tab-button {:panel-id :demo/containers} "Containers"]
[buttons/tab-button {:panel-id :demo/icons} "Icons / Images"]
[buttons/tab-button {:panel-id :demo/buttons} "Buttons"]
[buttons/tab-button {:panel-id :demo/forms} "Forms"]
]]
[layout/page-body {:extra-classes :ad-catalog}
[tab-panel/tab-panel {:extra-classes :ad-catalog
:subscribe-to-selected-tab [:tab-panel/selected-tab]}
[tab-panel/sub-panel {:panel-id :demo/layouts}
[layout-demo/catalog]]
[tab-panel/sub-panel {:panel-id :demo/containers}
[containers-demo/catalog]]
[tab-panel/sub-panel {:panel-id :demo/icons}
[icons-demo/catalog]]
[tab-panel/sub-panel {:panel-id :demo/buttons}
[buttons-demo/catalog]]
[tab-panel/sub-panel {:panel-id :demo/forms}
[forms-demo/catalog]]
]]]])
| null | https://raw.githubusercontent.com/manutter51/woolybear/a7f820dfb2f51636122d56d1500baefe5733eb25/src/cljs/woolybear/ad/catalog.cljs | clojure | (ns woolybear.ad.catalog
(:require [re-frame.core :as re-frame]
[woolybear.ad.buttons :as buttons]
[woolybear.ad.layout :as layout]
[woolybear.packs.flex-panel :as flex]
[woolybear.packs.tab-panel :as tab-panel]
[woolybear.ad.catalog.layouts :as layout-demo]
[woolybear.ad.catalog.containers :as containers-demo]
[woolybear.ad.catalog.icons :as icons-demo]
[woolybear.ad.catalog.buttons :as buttons-demo]
[woolybear.ad.catalog.forms :as forms-demo]))
(def data-path [:ad-catalog :tab-panel])
(def init-db
{:tab-panel (tab-panel/mk-tab-panel-data data-path :demo/layouts)})
(re-frame/reg-sub
:db/ad-catalog
(fn [db _]
(:ad-catalog db)))
(re-frame/reg-sub
:ad-catalog/tab-panel
:<- [:db/ad-catalog]
(fn [ad-catalog]
(:tab-panel ad-catalog)))
(re-frame/reg-sub
:tab-panel/selected-tab
:<- [:ad-catalog/tab-panel]
(fn [tab-panel]
(:value tab-panel)))
(defn page
"Top-level AD Catalog page"
[]
[layout/page {:extra-classes :ad-catalog}
[flex/flex-panel {:height "calc(100vh - 2rem)"}
[flex/flex-top
[layout/page-header {:extra-classes :ad-catalog}
[layout/page-title "AD Catalog"]]
[tab-panel/tab-bar {:extra-classes :ad-catalog
:subscribe-to-component-data [:ad-catalog/tab-panel]}
[buttons/tab-button {:panel-id :demo/layouts} "Layout"]
[buttons/tab-button {:panel-id :demo/containers} "Containers"]
[buttons/tab-button {:panel-id :demo/icons} "Icons / Images"]
[buttons/tab-button {:panel-id :demo/buttons} "Buttons"]
[buttons/tab-button {:panel-id :demo/forms} "Forms"]
]]
[layout/page-body {:extra-classes :ad-catalog}
[tab-panel/tab-panel {:extra-classes :ad-catalog
:subscribe-to-selected-tab [:tab-panel/selected-tab]}
[tab-panel/sub-panel {:panel-id :demo/layouts}
[layout-demo/catalog]]
[tab-panel/sub-panel {:panel-id :demo/containers}
[containers-demo/catalog]]
[tab-panel/sub-panel {:panel-id :demo/icons}
[icons-demo/catalog]]
[tab-panel/sub-panel {:panel-id :demo/buttons}
[buttons-demo/catalog]]
[tab-panel/sub-panel {:panel-id :demo/forms}
[forms-demo/catalog]]
]]]])
|
|
8b610df85a203632c7f002a794ef09670359891721a8173a9cccd81e0c9c34e7 | mightybyte/zeus | Backend.hs | # LANGUAGE EmptyCase #
# LANGUAGE FlexibleContexts #
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
module Backend where
------------------------------------------------------------------------------
import Control.Concurrent
import Control.Error
import qualified Control.Exception as E
import Control.Lens
import qualified Control.Monad.Fail as Fail
import qualified Data.Map as M
import Control.Monad
import Control.Monad.Trans
import qualified Data.Aeson as A
import Data.Dependent.Sum (DSum ((:=>)))
import Data.IORef
import Data.Int
import Data.RNG
import qualified Data.Set as S
import Data.String.Conv
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Data.Text.Encoding
import Data.Time
import Data.Maybe (fromMaybe)
import Database.Beam
import Database.Beam.Sqlite
import Database.Beam.Sqlite.Migrate
import Database.Beam.Migrate.Backend
import Database.Beam.Migrate.Simple hiding (migrateScript)
import Database.SQLite.Simple
import GitHub.Auth
import GitHub.Data.Name
import GitHub.Data.Id
import GitHub.Data.Webhooks
import GitHub.Endpoints.Repos.Webhooks
import GitHub.Request
import qualified Network.WebSockets as WS
import Obelisk.Backend
import Obelisk.ExecutableConfig.Lookup
import Obelisk.Route
import Scrub
import Snap.Core
import Snap.Util.FileServe
import System.Directory
import System.Exit
import System.FilePath
import System.Mem.Weak
import System.Process (rawSystem)
import Text.Printf
------------------------------------------------------------------------------
import Backend.Build
import Backend.Cache
import Backend.CacheServer
import Backend.Common
import Backend.Db
import Backend.DbLib
import Backend.ExecutablePaths
import Backend.Github
import Backend.Gitlab
import Backend.Types.BackendSettings
import Backend.Types.ConnRepo
import Backend.Types.NixCacheKeyPair
import Backend.Types.ServerEnv
import Backend.WsCmds
import Backend.WsUtils
import Common.Api
import Common.Route
import Common.Types.BinaryCache
import Common.Types.BuildJob
import Common.Types.CiSettings
import Common.Types.ConnectedAccount
import Common.Types.JobStatus
import Common.Types.NixCacheKeyPair
import Common.Types.Repo
------------------------------------------------------------------------------
getSecretToken :: IO Text
getSecretToken = do
let secretFile = "zeus-access-token"
secretExists <- doesFileExist secretFile
if secretExists
then T.strip <$> T.readFile secretFile
else do
rng <- mkRNG
tok <- toS <$> randomToken 32 rng
T.writeFile secretFile tok
return tok
dbConnectInfo :: String
dbConnectInfo = "zeus.db"
getSigningKey :: IO NixCacheKeyPair
getSigningKey = do
Right secret <- readKeyFile signingKeySecretFile
Right public <- readKeyFile signingKeyPublicFile
return $ NixCacheKeyPair secret public
doesSigningKeyExist :: IO Bool
doesSigningKeyExist = do
secretExists <- doesFileExist signingKeySecretFile
publicExists <- doesFileExist signingKeyPublicFile
return $ secretExists && publicExists
------------------------------------------------------------------------------
-- | Generates a signing key for the Zeus nix cache. If there is no key, this
-- function generates a new one. The key name parameter is recommended to be
-- the domain name followed by "-1" (or other number) to allow for key
-- rotation.
getOrCreateSigningKey :: String -> IO NixCacheKeyPair
getOrCreateSigningKey keyName = do
keyExists <- doesSigningKeyExist
when (not keyExists) $ do
let secretFile = signingKeyBaseName <> ".sec"
publicFile = signingKeyBaseName <> ".pub"
args =
[ "--generate-binary-cache-key"
, keyName
, secretFile
, publicFile
]
putStrLn "Generating cache signing key"
putStrLn $ unwords (nixStore : args)
ec <- rawSystem nixStore args
case ec of
ExitFailure c -> error $ printf "Error %d: Could not generate nix cache key" c
ExitSuccess -> return ()
renameFile secretFile signingKeySecretFile
renameFile publicFile signingKeyPublicFile
getSigningKey
getAppCacheKey :: Text -> IO NixCacheKeyPair
getAppCacheKey appRoute = do
let appDomain = T.takeWhile (\c -> c /= ':' && c /= '/') $ T.drop 3 $ snd $ T.breakOn "://" appRoute
getOrCreateSigningKey $ T.unpack $ appDomain <> "-1"
verboseMigrate
:: (Database Sqlite db, Fail.MonadFail m)
=> BeamMigrationBackend Sqlite m
-> CheckedDatabaseSettings Sqlite db
-> m ()
verboseMigrate BeamMigrationBackend { backendActionProvider = actions
, backendGetDbConstraints = getCs }
db =
do actual <- getCs
let expected = collectChecks db
case finalSolution (heuristicSolver actions actual expected) of
Candidates {} -> Fail.fail "autoMigrate: Could not determine migration"
Solved (cmds) ->
-- Check if any of the commands are irreversible
case foldMap migrationCommandDataLossPossible cmds of
MigrationKeepsData -> mapM_ (runNoReturn . migrationCommand) cmds
_ -> do
let msg = unlines $
"autoMigrate: Not performing automatic migration due to data loss" :
"Here is a migration script that may or may not be helpful:" : "" :
map (toS . sqliteRenderSyntaxScript . fromSqliteCommand . migrationCommand) cmds
Fail.fail msg
backend :: Backend BackendRoute FrontendRoute
backend = Backend
{ _backend_run = \serve -> do
-- TODO Probably switch to a connection pool a some point, but we don't
-- expect a large volume of requests for awhile so this is probably a
-- very low priority.
dbConn <- open dbConnectInfo
runBeamSqliteDebug putStrLn dbConn $ verboseMigrate migrationBackend ciDbChecked
mcs <- getCiSettings dbConn
case mcs of
Nothing -> initCiSettings dbConn defCiSettings
Just _ -> return ()
secretToken <- getSecretToken
connRepo <- newConnRepo
buildThreads <- newIORef mempty
let settingsFile = "backend/settings.json" :: String
allConfigs <- getConfigs
settings <- case M.lookup (toS settingsFile) allConfigs of
Nothing -> return $ BackendSettings Nothing [] Nothing
Just bs -> do
case A.decode $ toS bs of
Nothing -> error ("Error parsing " <> settingsFile)
Just s -> return s
putStrLn $ "read settings: " <> show settings
let appRoute = fromMaybe (error "You must make this server reachable from the outside world, and put that url path in config/common/route")
$ T.strip . decodeUtf8 <$> M.lookup "common/route" allConfigs
listeners <- newIORef mempty
keyPair <- getAppCacheKey appRoute
let env = ServerEnv appRoute settings secretToken dbConn
connRepo buildThreads listeners keyPair
_ <- forkIO $ buildManagerThread env
_ <- forkIO $ cacheManagerThread env
putStrLn "Worker threads forked, starting server..."
serve $ serveBackendRoute env
, _backend_routeEncoder = backendRouteEncoder
}
enforceIpWhitelist :: [Cidr] -> Snap ()
enforceIpWhitelist [] = return ()
enforceIpWhitelist whitelist = do
addr <- getsRequest rqClientAddr
case parseIp (toS addr) of
Left e -> do
serverError $ "Couldn't parse IP returned by Snap: " <> toS e
Right ip ->
when (not $ any (matchesCidr ip) whitelist) $ do
liftIO $ putStrLn $ "Rejecting connection from " <> toS addr
notFound "Not found"
| Serve our file .
serveBackendRoute :: ServerEnv -> R BackendRoute -> Snap ()
serveBackendRoute env = \case
BackendRoute_Cache :=> Identity ps -> do
enforceIpWhitelist (_beSettings_ipWhitelist $ _serverEnv_settings env)
mcs <- liftIO $ getCiSettings (_serverEnv_db env)
case mcs of
Nothing -> liftIO $ putStrLn "Unexpected error: Couldn't get CiSettings"
Just cs ->
if _ciSettings_serveLocalCache cs
then nixCacheRoutes env ps
else notFound "Not found"
BackendRoute_Hook :=> Identity hr -> case hr of
Hook_GitHub :=> _ -> githubHandler env
Hook_GitLab :=> _ -> gitlabHandler env
BackendRoute_Ping :=> _ -> do
addr <- getsRequest rqClientAddr
writeText $ "CLIENT ADDR: " <> toS addr <> "\n"
writeText "PONG\nPONG\nPONG\n"
BackendRoute_RawBuildOut :=> Identity outputFile -> do
modifyResponse (addHeader "Content-Disposition" "inline")
serveFileAs "text/plain" (T.unpack $ "log/builds/" <> outputFile)
BackendRoute_Websocket :=> _ -> do
enforceIpWhitelist (_beSettings_ipWhitelist $ _serverEnv_settings env)
wsHandler $ \conn -> do
cid <- addConnection conn (_serverEnv_connRepo env)
putStrLn $ "Established websocket connection with connId " <> show cid
listJobs env conn
listAccounts env conn
listRepos env conn
listCaches env conn
sendCiInfo env conn
sendCiSettings env conn
talkClient env cid conn
BackendRoute_Missing :=> _ -> do
liftIO $ putStrLn "Unknown backend route"
writeText "Unknown backend route"
talkClient :: ServerEnv -> ConnId -> WS.Connection -> IO ()
talkClient env cid conn = do
E.handle cleanup $ forever $ do
clientCmd <- wsReceive conn
putStrLn "================================="
putStrLn $ "Got Up_ message " <> show clientCmd
case clientCmd of
Left e -> do
putStrLn $ "************************************************"
putStrLn $ "ERROR: websocketHandler couldn't decode message:"
putStrLn e
Right Up_ListAccounts -> listAccounts env conn
Right (Up_ConnectAccount cas) -> mapM_ (connectAccount env) cas
Right (Up_DelAccounts cas) -> delAccounts env conn cas
Right Up_ListRepos -> listRepos env conn
Right (Up_AddRepo rs) -> mapM_ (addRepo env conn) rs
Right (Up_DelRepos rs) -> mapM_ (deleteRepo env) rs
Right Up_GetJobs -> listJobs env conn
Right (Up_SubscribeJobOutput jids) -> mapM_ (subscribeJob env cid) jids
Right (Up_UnsubscribeJobOutput jids) -> mapM_ (unsubscribeJob env cid) jids
Right (Up_CancelJobs jids) -> do
mapM_ (cancelJobAndRemove env) jids
Right (Up_RerunJobs jids) -> do
mapM_ (rerunJob env) jids
Right (Up_GetCiSettings) -> sendCiSettings env conn
Right (Up_UpdateCiSettings cs) -> do
setCiSettings (_serverEnv_db env) cs
wsSend conn (Down_CiSettings $ scrub cs)
Right Up_GetCiInfo -> sendCiInfo env conn
Right Up_ListCaches -> listCaches env conn
Right (Up_AddCache cs) -> mapM_ (addCache env) cs
Right (Up_DelCaches cs) -> delCaches env conn cs
where
cRepo = _serverEnv_connRepo env
cleanup :: E.SomeException -> IO ()
cleanup _ = do
removeConnection cid cRepo
sendCiSettings :: ServerEnv -> WS.Connection -> IO ()
sendCiSettings se conn = do
Just cs <- getCiSettings (_serverEnv_db se)
wsSend conn (Down_CiSettings $ scrub cs)
sendCiInfo :: ServerEnv -> WS.Connection -> IO ()
sendCiInfo se conn = do
let k = nckToText $ _nixCacheKey_public $ _serverEnv_cacheKey se
wsSend conn (Down_CiInfo k)
subscribeJob :: ServerEnv -> ConnId -> BuildJobId -> IO ()
subscribeJob env connId jid@(BuildJobId jidInt) = do
output <- liftIO $ T.readFile (buildOutputDir </> show jidInt <> ".txt")
sendToConnId (_serverEnv_connRepo env) connId $ Down_JobOutput (jid, output)
atomicModifyIORef' (_serverEnv_buildListeners env) $ \m ->
(M.adjust (S.insert connId) jidInt m, ())
unsubscribeJob :: ServerEnv -> ConnId -> BuildJobId -> IO ()
unsubscribeJob env connId (BuildJobId jidInt) = do
atomicModifyIORef' (_serverEnv_buildListeners env) $ \m ->
(M.adjust (S.delete connId) jidInt m, ())
rerunJob :: ServerEnv -> BuildJobId -> IO ()
rerunJob se (BuildJobId jid) = do
let dbConn = _serverEnv_db se
mjob <- beamQueryConn dbConn $
runSelectReturningOne $ select $ do
job <- all_ (_ciDb_buildJobs ciDb)
guard_ (job ^. buildJob_id ==. (val_ jid))
return job
case mjob of
Nothing -> printf "Job ID %d does not exist\n" jid
Just _ -> do
printf "Re-running job %d\n" jid
t <- getCurrentTime
runBeamSqlite dbConn $ do
runUpdate $
update (_ciDb_buildJobs ciDb)
(\j -> mconcat
[ j ^. buildJob_receivedAt <-. val_ t
, j ^. buildJob_startedAt <-. val_ Nothing
, j ^. buildJob_endedAt <-. val_ Nothing
, j ^. buildJob_status <-. val_ JobPending ])
(\j -> _buildJob_id j ==. val_ jid)
return ()
cancelJobAndRemove :: ServerEnv -> BuildJobId -> IO ()
cancelJobAndRemove env (BuildJobId jid) = do
mwtid <- atomicModifyIORef (_serverEnv_buildThreads env) $ \m ->
let (v,m2) = M.updateLookupWithKey (\_ _ -> Nothing) jid m
in (m2,v)
-- This could be golfed, but probably not worth it
case mwtid of
Nothing -> updateJobStatus env jid JobVanished
Just wtid -> do
mtid <- deRefWeak wtid
maybe (return ()) killThread mtid
updateJobStatus env jid JobCanceled
broadcastJobs (_serverEnv_db env) (_serverEnv_connRepo env)
updateJobStatus :: ServerEnv -> Int32 -> JobStatus -> IO ()
updateJobStatus env jid status =
runBeamSqlite (_serverEnv_db env) $ do
runUpdate $
update (_ciDb_buildJobs ciDb)
(\job -> job ^. buildJob_status <-. val_ status)
(\job -> _buildJob_id job ==. val_ jid)
return ()
connectAccount
:: ServerEnv
-> ConnectedAccountT Maybe
-> IO ()
connectAccount env (ConnectedAccount _ n a pr) = do
beamQuery env $ do
runInsert $ insert (_ciDb_connectedAccounts ciDb) $ insertExpressions
$ maybeToList $ ConnectedAccount default_
<$> (val_ <$> n)
<*> (val_ <$> a)
<*> (val_ <$> pr)
as <- beamQuery env $ do
runSelectReturningList $ select $ all_ (_ciDb_connectedAccounts ciDb)
broadcast (_serverEnv_connRepo env) $ Down_ConnectedAccounts $ map (getScrubbed . scrub) as
listAccounts :: ServerEnv -> WS.Connection -> IO ()
listAccounts env wsConn = do
accounts <- beamQuery env $
runSelectReturningList $ select $ do
all_ (_ciDb_connectedAccounts ciDb)
wsSend wsConn $ Down_ConnectedAccounts $ map (getScrubbed . scrub) accounts
queryAllRepos :: ServerEnv -> IO [Repo]
queryAllRepos env =
beamQuery env $
runSelectReturningList $ select $ do
all_ (_ciDb_repos ciDb)
listRepos :: ServerEnv -> WS.Connection -> IO ()
listRepos env wsConn = do
repos <- queryAllRepos env
wsSend wsConn $ Down_Repos repos
listJobs :: ServerEnv -> WS.Connection -> IO ()
listJobs env conn = do
jobs <- getJobsFromDb (_serverEnv_db env) 20 0
wsSend conn $ Down_Jobs jobs
addRepo
:: ServerEnv
-> WS.Connection
-> RepoT Maybe
-> IO ()
addRepo env wsConn
(Repo _ (ConnectedAccountId (Just o)) (Just n) (Just ns)
(Just nf) (Just attrs) (Just t) (BinaryCacheId c) _) = do
mca <- beamQuery env $ do
runSelectReturningOne $ select $ do
account <- all_ (ciDb ^. ciDb_connectedAccounts)
guard_ (account ^. connectedAccount_id ==. (val_ o))
return account
let insertRepo hid = do
putStrLn "Repository hook setup successful"
beamQuery env $ do
runInsert $ insert (_ciDb_repos ciDb) $ insertExpressions
[Repo default_
(ConnectedAccountId $ val_ o)
(val_ n)
(val_ ns)
(val_ nf)
(val_ attrs)
(val_ t)
(val_ $ BinaryCacheId $ join c)
(val_ hid)
]
case mca of
Nothing -> return ()
Just ca -> do
putStrLn $ "Setting up new webhook for " <> show ca
let wbu = fromMaybe (_serverEnv_publicUrl env)
(_beSettings_webhookBaseUrl $ _serverEnv_settings env)
case _connectedAccount_provider ca of
GitHub -> do
erw <- setupGithubWebhook
wbu
(OAuth $ toS $ _connectedAccount_accessToken ca)
ns n (_serverEnv_secretToken env) AllowInsecure
case erw of
Left e -> wsSend wsConn $ Down_Alert $ "Error setting up webhook: " <> (T.pack $ show e)
Right rw -> do
let Id hid = repoWebhookId rw
insertRepo $ fromIntegral hid
GitLab -> do
mhid <- setupGitlabWebhook
wbu
ns
n
(_connectedAccount_accessToken ca)
(_serverEnv_secretToken env)
case mhid of
Nothing -> putStrLn "Didn't get a hook ID"
Just hid -> insertRepo $ fromIntegral hid
as <- beamQuery env $ do
runSelectReturningList $ select $ all_ (_ciDb_repos ciDb)
broadcast (_serverEnv_connRepo env) $ Down_Repos as
addRepo _ _ _ = putStrLn "AddRepo got bad argument"
deleteRepo :: ServerEnv -> RepoId -> IO ()
deleteRepo env rid = do
mrepo <- beamQuery env $
runSelectReturningOne $ select $ do
repo <- all_ (_ciDb_repos ciDb)
accessAccount <- all_ (_ciDb_connectedAccounts ciDb)
guard_ (repo ^. repo_id ==. (val_ $ repoKeyToInt rid))
guard_ (_repo_accessAccount repo `references_` accessAccount)
return (repo, accessAccount)
case mrepo of
Nothing -> return ()
Just (repo,accessAccount) -> do
case _connectedAccount_provider accessAccount of
GitHub -> do
_ <- executeRequest (OAuth $ toS $ _connectedAccount_accessToken accessAccount) $
deleteRepoWebhookR
(N $ _repo_namespace repo)
(N $ _repo_name repo)
(Id $ fromIntegral $ _repo_hookId repo)
return ()
GitLab -> do
deleteGitlabWebhook
(_repo_namespace repo)
(_repo_name repo)
(_connectedAccount_accessToken accessAccount)
(fromIntegral $ _repo_hookId repo)
beamQuery env $
runDelete $ delete (_ciDb_repos ciDb) $
(\r -> r ^. repo_id ==. val_ (repoKeyToInt rid))
as <- beamQuery env $
runSelectReturningList $ select $
all_ (_ciDb_repos ciDb)
broadcast (_serverEnv_connRepo env) $ Down_Repos as
return ()
delAccounts :: ServerEnv -> WS.Connection -> [ConnectedAccountId] -> IO ()
delAccounts env wsConn cas = do
beamQuery env $
runDelete $ delete (_ciDb_connectedAccounts ciDb) $
(\ca -> ca ^. connectedAccount_id `in_` map (val_ . caKeyToInt) cas)
listAccounts env wsConn
------------------------------------------------------------------------------
listCaches :: ServerEnv -> WS.Connection -> IO ()
listCaches env wsConn = do
accounts <- beamQuery env $
runSelectReturningList $ select $ do
all_ (_ciDb_binaryCaches
ciDb)
wsSend wsConn $ Down_Caches $ map (getScrubbed . scrub) accounts
addCache
:: ServerEnv
-> BinaryCacheT Maybe
-> IO ()
addCache env (BinaryCache _ c) = do
beamQuery env $ do
runInsert $ insert (_ciDb_binaryCaches ciDb) $ insertExpressions
$ maybeToList $ BinaryCache default_
<$> (val_ <$> c)
as <- beamQuery env $ do
runSelectReturningList $ select $ all_ (_ciDb_binaryCaches ciDb)
broadcast (_serverEnv_connRepo env) $ Down_Caches $ map (getScrubbed . scrub) as
delCaches :: ServerEnv -> WS.Connection -> [BinaryCacheId] -> IO ()
delCaches env wsConn cs = do
beamQuery env $
runDelete $ delete (_ciDb_binaryCaches ciDb) $
(\ca -> ca ^. binaryCache_id `in_` map (val_ . binaryCacheKeyToInt) cs)
listCaches env wsConn
| null | https://raw.githubusercontent.com/mightybyte/zeus/6a69c6145560be20ba684630ab531272205206be/backend/src/Backend.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE RankNTypes #
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
| Generates a signing key for the Zeus nix cache. If there is no key, this
function generates a new one. The key name parameter is recommended to be
the domain name followed by "-1" (or other number) to allow for key
rotation.
Check if any of the commands are irreversible
TODO Probably switch to a connection pool a some point, but we don't
expect a large volume of requests for awhile so this is probably a
very low priority.
This could be golfed, but probably not worth it
---------------------------------------------------------------------------- | # LANGUAGE EmptyCase #
# LANGUAGE FlexibleContexts #
# LANGUAGE LambdaCase #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
module Backend where
import Control.Concurrent
import Control.Error
import qualified Control.Exception as E
import Control.Lens
import qualified Control.Monad.Fail as Fail
import qualified Data.Map as M
import Control.Monad
import Control.Monad.Trans
import qualified Data.Aeson as A
import Data.Dependent.Sum (DSum ((:=>)))
import Data.IORef
import Data.Int
import Data.RNG
import qualified Data.Set as S
import Data.String.Conv
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Data.Text.Encoding
import Data.Time
import Data.Maybe (fromMaybe)
import Database.Beam
import Database.Beam.Sqlite
import Database.Beam.Sqlite.Migrate
import Database.Beam.Migrate.Backend
import Database.Beam.Migrate.Simple hiding (migrateScript)
import Database.SQLite.Simple
import GitHub.Auth
import GitHub.Data.Name
import GitHub.Data.Id
import GitHub.Data.Webhooks
import GitHub.Endpoints.Repos.Webhooks
import GitHub.Request
import qualified Network.WebSockets as WS
import Obelisk.Backend
import Obelisk.ExecutableConfig.Lookup
import Obelisk.Route
import Scrub
import Snap.Core
import Snap.Util.FileServe
import System.Directory
import System.Exit
import System.FilePath
import System.Mem.Weak
import System.Process (rawSystem)
import Text.Printf
import Backend.Build
import Backend.Cache
import Backend.CacheServer
import Backend.Common
import Backend.Db
import Backend.DbLib
import Backend.ExecutablePaths
import Backend.Github
import Backend.Gitlab
import Backend.Types.BackendSettings
import Backend.Types.ConnRepo
import Backend.Types.NixCacheKeyPair
import Backend.Types.ServerEnv
import Backend.WsCmds
import Backend.WsUtils
import Common.Api
import Common.Route
import Common.Types.BinaryCache
import Common.Types.BuildJob
import Common.Types.CiSettings
import Common.Types.ConnectedAccount
import Common.Types.JobStatus
import Common.Types.NixCacheKeyPair
import Common.Types.Repo
getSecretToken :: IO Text
getSecretToken = do
let secretFile = "zeus-access-token"
secretExists <- doesFileExist secretFile
if secretExists
then T.strip <$> T.readFile secretFile
else do
rng <- mkRNG
tok <- toS <$> randomToken 32 rng
T.writeFile secretFile tok
return tok
dbConnectInfo :: String
dbConnectInfo = "zeus.db"
getSigningKey :: IO NixCacheKeyPair
getSigningKey = do
Right secret <- readKeyFile signingKeySecretFile
Right public <- readKeyFile signingKeyPublicFile
return $ NixCacheKeyPair secret public
doesSigningKeyExist :: IO Bool
doesSigningKeyExist = do
secretExists <- doesFileExist signingKeySecretFile
publicExists <- doesFileExist signingKeyPublicFile
return $ secretExists && publicExists
getOrCreateSigningKey :: String -> IO NixCacheKeyPair
getOrCreateSigningKey keyName = do
keyExists <- doesSigningKeyExist
when (not keyExists) $ do
let secretFile = signingKeyBaseName <> ".sec"
publicFile = signingKeyBaseName <> ".pub"
args =
[ "--generate-binary-cache-key"
, keyName
, secretFile
, publicFile
]
putStrLn "Generating cache signing key"
putStrLn $ unwords (nixStore : args)
ec <- rawSystem nixStore args
case ec of
ExitFailure c -> error $ printf "Error %d: Could not generate nix cache key" c
ExitSuccess -> return ()
renameFile secretFile signingKeySecretFile
renameFile publicFile signingKeyPublicFile
getSigningKey
getAppCacheKey :: Text -> IO NixCacheKeyPair
getAppCacheKey appRoute = do
let appDomain = T.takeWhile (\c -> c /= ':' && c /= '/') $ T.drop 3 $ snd $ T.breakOn "://" appRoute
getOrCreateSigningKey $ T.unpack $ appDomain <> "-1"
verboseMigrate
:: (Database Sqlite db, Fail.MonadFail m)
=> BeamMigrationBackend Sqlite m
-> CheckedDatabaseSettings Sqlite db
-> m ()
verboseMigrate BeamMigrationBackend { backendActionProvider = actions
, backendGetDbConstraints = getCs }
db =
do actual <- getCs
let expected = collectChecks db
case finalSolution (heuristicSolver actions actual expected) of
Candidates {} -> Fail.fail "autoMigrate: Could not determine migration"
Solved (cmds) ->
case foldMap migrationCommandDataLossPossible cmds of
MigrationKeepsData -> mapM_ (runNoReturn . migrationCommand) cmds
_ -> do
let msg = unlines $
"autoMigrate: Not performing automatic migration due to data loss" :
"Here is a migration script that may or may not be helpful:" : "" :
map (toS . sqliteRenderSyntaxScript . fromSqliteCommand . migrationCommand) cmds
Fail.fail msg
backend :: Backend BackendRoute FrontendRoute
backend = Backend
{ _backend_run = \serve -> do
dbConn <- open dbConnectInfo
runBeamSqliteDebug putStrLn dbConn $ verboseMigrate migrationBackend ciDbChecked
mcs <- getCiSettings dbConn
case mcs of
Nothing -> initCiSettings dbConn defCiSettings
Just _ -> return ()
secretToken <- getSecretToken
connRepo <- newConnRepo
buildThreads <- newIORef mempty
let settingsFile = "backend/settings.json" :: String
allConfigs <- getConfigs
settings <- case M.lookup (toS settingsFile) allConfigs of
Nothing -> return $ BackendSettings Nothing [] Nothing
Just bs -> do
case A.decode $ toS bs of
Nothing -> error ("Error parsing " <> settingsFile)
Just s -> return s
putStrLn $ "read settings: " <> show settings
let appRoute = fromMaybe (error "You must make this server reachable from the outside world, and put that url path in config/common/route")
$ T.strip . decodeUtf8 <$> M.lookup "common/route" allConfigs
listeners <- newIORef mempty
keyPair <- getAppCacheKey appRoute
let env = ServerEnv appRoute settings secretToken dbConn
connRepo buildThreads listeners keyPair
_ <- forkIO $ buildManagerThread env
_ <- forkIO $ cacheManagerThread env
putStrLn "Worker threads forked, starting server..."
serve $ serveBackendRoute env
, _backend_routeEncoder = backendRouteEncoder
}
enforceIpWhitelist :: [Cidr] -> Snap ()
enforceIpWhitelist [] = return ()
enforceIpWhitelist whitelist = do
addr <- getsRequest rqClientAddr
case parseIp (toS addr) of
Left e -> do
serverError $ "Couldn't parse IP returned by Snap: " <> toS e
Right ip ->
when (not $ any (matchesCidr ip) whitelist) $ do
liftIO $ putStrLn $ "Rejecting connection from " <> toS addr
notFound "Not found"
| Serve our file .
serveBackendRoute :: ServerEnv -> R BackendRoute -> Snap ()
serveBackendRoute env = \case
BackendRoute_Cache :=> Identity ps -> do
enforceIpWhitelist (_beSettings_ipWhitelist $ _serverEnv_settings env)
mcs <- liftIO $ getCiSettings (_serverEnv_db env)
case mcs of
Nothing -> liftIO $ putStrLn "Unexpected error: Couldn't get CiSettings"
Just cs ->
if _ciSettings_serveLocalCache cs
then nixCacheRoutes env ps
else notFound "Not found"
BackendRoute_Hook :=> Identity hr -> case hr of
Hook_GitHub :=> _ -> githubHandler env
Hook_GitLab :=> _ -> gitlabHandler env
BackendRoute_Ping :=> _ -> do
addr <- getsRequest rqClientAddr
writeText $ "CLIENT ADDR: " <> toS addr <> "\n"
writeText "PONG\nPONG\nPONG\n"
BackendRoute_RawBuildOut :=> Identity outputFile -> do
modifyResponse (addHeader "Content-Disposition" "inline")
serveFileAs "text/plain" (T.unpack $ "log/builds/" <> outputFile)
BackendRoute_Websocket :=> _ -> do
enforceIpWhitelist (_beSettings_ipWhitelist $ _serverEnv_settings env)
wsHandler $ \conn -> do
cid <- addConnection conn (_serverEnv_connRepo env)
putStrLn $ "Established websocket connection with connId " <> show cid
listJobs env conn
listAccounts env conn
listRepos env conn
listCaches env conn
sendCiInfo env conn
sendCiSettings env conn
talkClient env cid conn
BackendRoute_Missing :=> _ -> do
liftIO $ putStrLn "Unknown backend route"
writeText "Unknown backend route"
talkClient :: ServerEnv -> ConnId -> WS.Connection -> IO ()
talkClient env cid conn = do
E.handle cleanup $ forever $ do
clientCmd <- wsReceive conn
putStrLn "================================="
putStrLn $ "Got Up_ message " <> show clientCmd
case clientCmd of
Left e -> do
putStrLn $ "************************************************"
putStrLn $ "ERROR: websocketHandler couldn't decode message:"
putStrLn e
Right Up_ListAccounts -> listAccounts env conn
Right (Up_ConnectAccount cas) -> mapM_ (connectAccount env) cas
Right (Up_DelAccounts cas) -> delAccounts env conn cas
Right Up_ListRepos -> listRepos env conn
Right (Up_AddRepo rs) -> mapM_ (addRepo env conn) rs
Right (Up_DelRepos rs) -> mapM_ (deleteRepo env) rs
Right Up_GetJobs -> listJobs env conn
Right (Up_SubscribeJobOutput jids) -> mapM_ (subscribeJob env cid) jids
Right (Up_UnsubscribeJobOutput jids) -> mapM_ (unsubscribeJob env cid) jids
Right (Up_CancelJobs jids) -> do
mapM_ (cancelJobAndRemove env) jids
Right (Up_RerunJobs jids) -> do
mapM_ (rerunJob env) jids
Right (Up_GetCiSettings) -> sendCiSettings env conn
Right (Up_UpdateCiSettings cs) -> do
setCiSettings (_serverEnv_db env) cs
wsSend conn (Down_CiSettings $ scrub cs)
Right Up_GetCiInfo -> sendCiInfo env conn
Right Up_ListCaches -> listCaches env conn
Right (Up_AddCache cs) -> mapM_ (addCache env) cs
Right (Up_DelCaches cs) -> delCaches env conn cs
where
cRepo = _serverEnv_connRepo env
cleanup :: E.SomeException -> IO ()
cleanup _ = do
removeConnection cid cRepo
sendCiSettings :: ServerEnv -> WS.Connection -> IO ()
sendCiSettings se conn = do
Just cs <- getCiSettings (_serverEnv_db se)
wsSend conn (Down_CiSettings $ scrub cs)
sendCiInfo :: ServerEnv -> WS.Connection -> IO ()
sendCiInfo se conn = do
let k = nckToText $ _nixCacheKey_public $ _serverEnv_cacheKey se
wsSend conn (Down_CiInfo k)
subscribeJob :: ServerEnv -> ConnId -> BuildJobId -> IO ()
subscribeJob env connId jid@(BuildJobId jidInt) = do
output <- liftIO $ T.readFile (buildOutputDir </> show jidInt <> ".txt")
sendToConnId (_serverEnv_connRepo env) connId $ Down_JobOutput (jid, output)
atomicModifyIORef' (_serverEnv_buildListeners env) $ \m ->
(M.adjust (S.insert connId) jidInt m, ())
unsubscribeJob :: ServerEnv -> ConnId -> BuildJobId -> IO ()
unsubscribeJob env connId (BuildJobId jidInt) = do
atomicModifyIORef' (_serverEnv_buildListeners env) $ \m ->
(M.adjust (S.delete connId) jidInt m, ())
rerunJob :: ServerEnv -> BuildJobId -> IO ()
rerunJob se (BuildJobId jid) = do
let dbConn = _serverEnv_db se
mjob <- beamQueryConn dbConn $
runSelectReturningOne $ select $ do
job <- all_ (_ciDb_buildJobs ciDb)
guard_ (job ^. buildJob_id ==. (val_ jid))
return job
case mjob of
Nothing -> printf "Job ID %d does not exist\n" jid
Just _ -> do
printf "Re-running job %d\n" jid
t <- getCurrentTime
runBeamSqlite dbConn $ do
runUpdate $
update (_ciDb_buildJobs ciDb)
(\j -> mconcat
[ j ^. buildJob_receivedAt <-. val_ t
, j ^. buildJob_startedAt <-. val_ Nothing
, j ^. buildJob_endedAt <-. val_ Nothing
, j ^. buildJob_status <-. val_ JobPending ])
(\j -> _buildJob_id j ==. val_ jid)
return ()
cancelJobAndRemove :: ServerEnv -> BuildJobId -> IO ()
cancelJobAndRemove env (BuildJobId jid) = do
mwtid <- atomicModifyIORef (_serverEnv_buildThreads env) $ \m ->
let (v,m2) = M.updateLookupWithKey (\_ _ -> Nothing) jid m
in (m2,v)
case mwtid of
Nothing -> updateJobStatus env jid JobVanished
Just wtid -> do
mtid <- deRefWeak wtid
maybe (return ()) killThread mtid
updateJobStatus env jid JobCanceled
broadcastJobs (_serverEnv_db env) (_serverEnv_connRepo env)
updateJobStatus :: ServerEnv -> Int32 -> JobStatus -> IO ()
updateJobStatus env jid status =
runBeamSqlite (_serverEnv_db env) $ do
runUpdate $
update (_ciDb_buildJobs ciDb)
(\job -> job ^. buildJob_status <-. val_ status)
(\job -> _buildJob_id job ==. val_ jid)
return ()
connectAccount
:: ServerEnv
-> ConnectedAccountT Maybe
-> IO ()
connectAccount env (ConnectedAccount _ n a pr) = do
beamQuery env $ do
runInsert $ insert (_ciDb_connectedAccounts ciDb) $ insertExpressions
$ maybeToList $ ConnectedAccount default_
<$> (val_ <$> n)
<*> (val_ <$> a)
<*> (val_ <$> pr)
as <- beamQuery env $ do
runSelectReturningList $ select $ all_ (_ciDb_connectedAccounts ciDb)
broadcast (_serverEnv_connRepo env) $ Down_ConnectedAccounts $ map (getScrubbed . scrub) as
listAccounts :: ServerEnv -> WS.Connection -> IO ()
listAccounts env wsConn = do
accounts <- beamQuery env $
runSelectReturningList $ select $ do
all_ (_ciDb_connectedAccounts ciDb)
wsSend wsConn $ Down_ConnectedAccounts $ map (getScrubbed . scrub) accounts
queryAllRepos :: ServerEnv -> IO [Repo]
queryAllRepos env =
beamQuery env $
runSelectReturningList $ select $ do
all_ (_ciDb_repos ciDb)
listRepos :: ServerEnv -> WS.Connection -> IO ()
listRepos env wsConn = do
repos <- queryAllRepos env
wsSend wsConn $ Down_Repos repos
listJobs :: ServerEnv -> WS.Connection -> IO ()
listJobs env conn = do
jobs <- getJobsFromDb (_serverEnv_db env) 20 0
wsSend conn $ Down_Jobs jobs
addRepo
:: ServerEnv
-> WS.Connection
-> RepoT Maybe
-> IO ()
addRepo env wsConn
(Repo _ (ConnectedAccountId (Just o)) (Just n) (Just ns)
(Just nf) (Just attrs) (Just t) (BinaryCacheId c) _) = do
mca <- beamQuery env $ do
runSelectReturningOne $ select $ do
account <- all_ (ciDb ^. ciDb_connectedAccounts)
guard_ (account ^. connectedAccount_id ==. (val_ o))
return account
let insertRepo hid = do
putStrLn "Repository hook setup successful"
beamQuery env $ do
runInsert $ insert (_ciDb_repos ciDb) $ insertExpressions
[Repo default_
(ConnectedAccountId $ val_ o)
(val_ n)
(val_ ns)
(val_ nf)
(val_ attrs)
(val_ t)
(val_ $ BinaryCacheId $ join c)
(val_ hid)
]
case mca of
Nothing -> return ()
Just ca -> do
putStrLn $ "Setting up new webhook for " <> show ca
let wbu = fromMaybe (_serverEnv_publicUrl env)
(_beSettings_webhookBaseUrl $ _serverEnv_settings env)
case _connectedAccount_provider ca of
GitHub -> do
erw <- setupGithubWebhook
wbu
(OAuth $ toS $ _connectedAccount_accessToken ca)
ns n (_serverEnv_secretToken env) AllowInsecure
case erw of
Left e -> wsSend wsConn $ Down_Alert $ "Error setting up webhook: " <> (T.pack $ show e)
Right rw -> do
let Id hid = repoWebhookId rw
insertRepo $ fromIntegral hid
GitLab -> do
mhid <- setupGitlabWebhook
wbu
ns
n
(_connectedAccount_accessToken ca)
(_serverEnv_secretToken env)
case mhid of
Nothing -> putStrLn "Didn't get a hook ID"
Just hid -> insertRepo $ fromIntegral hid
as <- beamQuery env $ do
runSelectReturningList $ select $ all_ (_ciDb_repos ciDb)
broadcast (_serverEnv_connRepo env) $ Down_Repos as
addRepo _ _ _ = putStrLn "AddRepo got bad argument"
deleteRepo :: ServerEnv -> RepoId -> IO ()
deleteRepo env rid = do
mrepo <- beamQuery env $
runSelectReturningOne $ select $ do
repo <- all_ (_ciDb_repos ciDb)
accessAccount <- all_ (_ciDb_connectedAccounts ciDb)
guard_ (repo ^. repo_id ==. (val_ $ repoKeyToInt rid))
guard_ (_repo_accessAccount repo `references_` accessAccount)
return (repo, accessAccount)
case mrepo of
Nothing -> return ()
Just (repo,accessAccount) -> do
case _connectedAccount_provider accessAccount of
GitHub -> do
_ <- executeRequest (OAuth $ toS $ _connectedAccount_accessToken accessAccount) $
deleteRepoWebhookR
(N $ _repo_namespace repo)
(N $ _repo_name repo)
(Id $ fromIntegral $ _repo_hookId repo)
return ()
GitLab -> do
deleteGitlabWebhook
(_repo_namespace repo)
(_repo_name repo)
(_connectedAccount_accessToken accessAccount)
(fromIntegral $ _repo_hookId repo)
beamQuery env $
runDelete $ delete (_ciDb_repos ciDb) $
(\r -> r ^. repo_id ==. val_ (repoKeyToInt rid))
as <- beamQuery env $
runSelectReturningList $ select $
all_ (_ciDb_repos ciDb)
broadcast (_serverEnv_connRepo env) $ Down_Repos as
return ()
delAccounts :: ServerEnv -> WS.Connection -> [ConnectedAccountId] -> IO ()
delAccounts env wsConn cas = do
beamQuery env $
runDelete $ delete (_ciDb_connectedAccounts ciDb) $
(\ca -> ca ^. connectedAccount_id `in_` map (val_ . caKeyToInt) cas)
listAccounts env wsConn
listCaches :: ServerEnv -> WS.Connection -> IO ()
listCaches env wsConn = do
accounts <- beamQuery env $
runSelectReturningList $ select $ do
all_ (_ciDb_binaryCaches
ciDb)
wsSend wsConn $ Down_Caches $ map (getScrubbed . scrub) accounts
addCache
:: ServerEnv
-> BinaryCacheT Maybe
-> IO ()
addCache env (BinaryCache _ c) = do
beamQuery env $ do
runInsert $ insert (_ciDb_binaryCaches ciDb) $ insertExpressions
$ maybeToList $ BinaryCache default_
<$> (val_ <$> c)
as <- beamQuery env $ do
runSelectReturningList $ select $ all_ (_ciDb_binaryCaches ciDb)
broadcast (_serverEnv_connRepo env) $ Down_Caches $ map (getScrubbed . scrub) as
delCaches :: ServerEnv -> WS.Connection -> [BinaryCacheId] -> IO ()
delCaches env wsConn cs = do
beamQuery env $
runDelete $ delete (_ciDb_binaryCaches ciDb) $
(\ca -> ca ^. binaryCache_id `in_` map (val_ . binaryCacheKeyToInt) cs)
listCaches env wsConn
|
6204e652b3f47d85a14f7d7b4e343ed092c32d709abd6056fc39c0e97267f596 | mokus0/junkbox | gadt_dependent.hs |
- ` ` gadt_dependent ''
- ( c ) 2009 , , Inc.
-
- dependent types using rank - N quantification , GADTs , and
- continuation - passing style ( to avoid escaping types )
- ``gadt_dependent''
- (c) 2009 Cook, J. MR SSD, Inc.
-
- dependent types using rank-N quantification, GADTs, and
- continuation-passing style (to avoid escaping types)
-}
# LANGUAGE
GADTs , EmptyDataDecls , RankNTypes ,
DeriveDataTypeable , ScopedTypeVariables , PatternSignatures ,
TypeFamilies , FlexibleContexts , MultiParamTypeClasses , FlexibleInstances ,
FunctionalDependencies , UndecidableInstances
#
GADTs, EmptyDataDecls, RankNTypes,
DeriveDataTypeable, ScopedTypeVariables, PatternSignatures,
TypeFamilies, FlexibleContexts, MultiParamTypeClasses, FlexibleInstances,
FunctionalDependencies, UndecidableInstances
#-}
module TypeExperiments.Gadt_dependent where
import Data.Typeable
import Prelude hiding (EQ, head, tail, take, reverse, drop, (++), zip, zipWith, splitAt, iterate, min)
data Zero
deriving Typeable
data Succ t
deriving Typeable
type One = Succ Zero
type Two = Succ One
type Three = Succ Two
type Four = Succ Three
type Five = Succ Four
type Six = Succ Five
type Seven = Succ Six
type Eight = Succ Seven
type Nine = Succ Eight
type Ten = Succ Nine
data Nat n where
Zero :: Nat Zero
Succ :: (Typeable n, UniqueNat n) => Nat n -> Nat (Succ n)
deriving (Typeable)
instance Show (Nat n) where
showsPrec p Zero = showString "Zero"
showsPrec p (Succ n) = showParen (p > 10) (showString "Succ " . showsPrec 11 n)
withNum :: Integer -> (forall n. Nat n -> t) -> t
withNum 0 f = f Zero
withNum (n+1) f = withNum n (\m -> case m of
m@Zero -> f (Succ m)
m@(Succ n) -> f (Succ m)
)
some handy sample values :
zero = Zero
one = Succ zero
two = Succ one
three = Succ two
four = Succ three
five = Succ four
six = Succ five
seven = Succ six
eight = Succ seven
nine = Succ eight
ten = Succ nine
numValue :: Num t => Nat n -> t
numValue Zero = 0
numValue (Succ n) = (1 +) $! numValue n
data EQ n m
where
EQ :: EQ a a
zeroEQ :: EQ Zero Zero
zeroEQ = EQ
succEQ :: EQ n m -> EQ (Succ n) (Succ m)
succEQ (EQ :: EQ n m) = (EQ :: EQ (Succ n) (Succ m))
data LT n m
where
ZeroLT :: UniqueNat n => LT Zero (Succ n)
SuccLT :: (UniqueNat n, UniqueNat m) => LT n m -> LT (Succ n) (Succ m)
instance Show (LT n m) where
showsPrec p ZeroLT = showString "ZeroLT"
showsPrec p (SuccLT proof) = showParen (p > 10) (showString "SuccLT " . showsPrec 11 proof)
type GT n m = LT m n
class Typeable n => UniqueNat n where
uniqueNat :: Nat n
instance UniqueNat Zero where
uniqueNat = Zero
instance UniqueNat n => UniqueNat (Succ n) where
uniqueNat = Succ uniqueNat
class UniqueNat ( Succ n ) = > UniquePos n
uniquePred :: UniqueNat (Succ n) => Nat n
uniquePred = pred uniqueNat
where
pred :: Nat (Succ n) -> Nat n
pred (Succ n) = n
decidable_equality :: (EQ n m -> t) -> t -> Nat n -> Nat m -> t
decidable_equality eq ne Zero Zero = eq (EQ :: EQ Zero Zero)
decidable_equality eq ne Zero _ = ne
decidable_equality eq ne _ Zero = ne
decidable_equality eq ne (Succ n) (Succ m) = decidable_equality eq' ne n m
where eq' eqWitness = eq (succEQ eqWitness)
trichotomy :: (LT n m -> t) -> (EQ n m -> t) -> (GT n m -> t) -> Nat n -> Nat m -> t
trichotomy lt eq gt Zero Zero = eq eqProof
trichotomy lt eq gt Zero (Succ _ :: Nat m) = lt ltProof
trichotomy lt eq gt (Succ _ :: Nat n) Zero = gt ltProof
trichotomy lt eq gt (Succ (n :: Nat n)) (Succ (m :: Nat m)) = trichotomy lt' eq' gt' n m
where
lt' ltWitness = lt (SuccLT ltWitness)
eq' eqWitness = eq (succEQ eqWitness)
gt' ltWitness = gt (SuccLT ltWitness)
type LTE n m = Either (EQ n m) (LT n m)
class ProvablyEQ n m where
eqProof :: EQ n m
instance ProvablyEQ n n where eqProof = EQ
instance ProvablyEQ n m => ProvablyEQ (f n) (f m) where
eqProof = case eqProof :: EQ n m of
EQ -> EQ
class ProvablyLT n m where
ltProof :: LT n m
instance UniqueNat n => ProvablyLT Zero (Succ n) where
ltProof = ZeroLT
instance (UniqueNat n, UniqueNat m, ProvablyLT n m) => ProvablyLT (Succ n) (Succ m) where
ltProof = SuccLT ltProof
withLT :: LT n m -> (forall x. LT x m -> t) -> t
withLT ltProof lt = lt ltProof
a non - zero finite ordinal consists of one of :
-- the next smaller ordinal itself
-- an element of the next smaller ordinal
data Ordinal n where
OrdZero :: UniqueNat n => Ordinal (Succ n)
OrdSucc :: UniqueNat n => Ordinal n -> Ordinal (Succ n)
deriving Typeable
instance Show (Ordinal n) where
showsPrec p (OrdZero) = showParen (p > 10) (showString "OrdZero ")
showsPrec p (OrdSucc n) = showParen (p > 10) (showString "OrdSucc " . showsPrec 11 n)
ordValue :: Ordinal n -> Integer
ordValue OrdZero = 0
ordValue (OrdSucc n) = (1 +) $! ordValue n
cardinality :: Ordinal n -> Nat n
cardinality OrdZero = uniqueNat
cardinality (OrdSucc n) = Succ (cardinality n)
mkOrdinal :: ProvablyLT n m => Nat n -> Ordinal m
mkOrdinal n = mkOrdinalProof ltProof n
where
mkOrdinalProof :: LT n m -> Nat n -> Ordinal m
mkOrdinalProof ZeroLT Zero = OrdZero
mkOrdinalProof (SuccLT ltProof) (Succ n) = OrdSucc (mkOrdinalProof ltProof n)
data Vec n a where
Nil :: Vec Zero a
Cons :: a -> Vec n a -> Vec (Succ n) a
deriving Typeable
instance Functor (Vec n) where
fmap f Nil = Nil
fmap f (Cons a v) = Cons (f a) (fmap f v)
instance Show a => Show (Vec n a) where
showsPrec p = showsPrec p . vecToList
v :: a -> Vec One a
v x = Cons x Nil
vecToList :: Vec n a -> [a]
vecToList Nil = []
vecToList (Cons a v) = a : vecToList v
(!) :: Vec n a -> Ordinal n -> a
Cons a v ! OrdZero = a
Cons a v ! OrdSucc n = v ! n
head :: Vec (Succ n) a -> a
head (Cons a v) = a
tail :: Vec (Succ n) a -> Vec n a
tail (Cons a v) = v
take :: ProvablyLT n (Succ m) => Nat n -> Vec m a -> Vec n a
take = takeWithProof ltProof
where
takeWithProof :: LT n (Succ m) -> Nat n -> Vec m a -> Vec n a
takeWithProof ZeroLT Zero v = Nil
takeWithProof (SuccLT ltProof) (Succ n) (Cons a v) = Cons a (takeWithProof ltProof n v)
snoc :: Vec n a -> a -> Vec (Succ n) a
snoc Nil a = Cons a Nil
snoc (Cons x v) a = Cons x (snoc v a)
reverse :: Vec n a -> Vec n a
reverse Nil = Nil
reverse (Cons a v) = snoc (reverse v) a
data Sum n m s where
ZeroSum :: Sum Zero m m
SuccSum :: Sum n m s -> Sum (Succ n) m (Succ s)
deriving Typeable
class Add n m s | m n -> s, n s -> m
where
addProof :: Sum n m s
instance Add Zero n n
where addProof = ZeroSum
instance Add n m s => Add (Succ n) m (Succ s)
where addProof = SuccSum addProof
drop :: Add n d m => Nat n -> Vec m a -> Vec d a
drop = dropWithProof addProof
where
dropWithProof :: Sum n d m -> Nat n -> Vec m a -> Vec d a
dropWithProof ZeroSum Zero v = v
dropWithProof (SuccSum addProof) (Succ n) (Cons _ v) = dropWithProof addProof n v
splitAt :: Add n d m => Nat n -> Vec m a -> (Vec n a, Vec d a)
splitAt = splitWithProof addProof
where
splitWithProof :: Sum n d m -> Nat n -> Vec m a -> (Vec n a, Vec d a)
splitWithProof ZeroSum Zero v = (Nil, v)
splitWithProof (SuccSum addProof) (Succ n) (Cons a v) = case splitWithProof addProof n v of
(pre, post) -> (Cons a pre, post)
(++) :: Add n m s => Vec n a -> Vec m a -> Vec s a
(++) = appendWithProof addProof
where
appendWithProof :: Sum n m s -> Vec n a -> Vec m a -> Vec s a
appendWithProof ZeroSum Nil v = v
appendWithProof (SuccSum addProof) (Cons a v1) v2 = Cons a (appendWithProof addProof v1 v2)
zip :: Vec n a -> Vec n b -> Vec n (a,b)
zip = zipWith (,)
zipWith :: (a -> b -> c) -> Vec n a -> Vec n b -> Vec n c
zipWith (*) Nil Nil = Nil
zipWith (*) (Cons a v1) (Cons b v2) = Cons (a*b) (zipWith (*) v1 v2)
-- can this be written it a way that it will actually execute?
-- I suspect not...
iterate :: (a -> a) -> a -> (forall n. Vec n a -> t) -> t
iterate f x g = iterate f (f x) (\v -> g (Cons x v))
foldVec :: t -> (a -> t -> t) -> Vec n a -> t
foldVec nil cons Nil = nil
foldVec nil cons (Cons x v) = cons x (foldVec nil cons v)
data Base b where
BNil :: UniqueNat b => Base b
BCons :: UniqueNat b => Ordinal b -> Base b -> Base b
deriving Typeable
instance Show (Base b) where
showsPrec p b@BNil = showString "{base " . shows (numValue (base b)) . showString "}"
showsPrec p (BCons d b) = showDigit d . shows b
showDigit d = showParen (numValue (cardinality d) > 10) (shows (ordValue d))
base :: Base b -> Nat b
base BNil = uniqueNat
base (BCons _ _) = uniqueNat
data Void
type Not a = a -> Void
data If p t f where
T :: p -> t -> If p t f
F :: (Not p) -> f -> If p t f
instance (Show p, Show t, Show f) => Show (If p t f) where
showsPrec p (T proof t) = showParen (p > 10) (showString "T " . showsPrec 11 proof . showChar ' ' . showsPrec 11 t)
showsPrec p (F proof f) = showParen (p > 10) (showString "F _ " . showsPrec 11 f)
min :: Nat a -> Nat b -> If (LT a b) (Nat a) (Nat b)
min Zero Zero = F (\_ -> error "impossible proof") Zero
min Zero (Succ _) = T ltProof Zero
min (Succ _) Zero = F (\_ -> error "impossible proof") Zero
min (Succ a) (Succ b) = case min a b of
T ltProof x -> T (SuccLT ltProof) (Succ x)
F nltProof x -> F (\(SuccLT pr) -> nltProof pr) (Succ x)
(?) :: If p t f -> (t -> a, f -> a) -> a
T p x ? (f,g) = f x
F p x ? (f,g) = g x
(??) :: If p t f -> (p -> t -> a, Not p -> f -> a) -> a
T p x ?? (f,g) = f p x
F p x ?? (f,g) = g p x
| null | https://raw.githubusercontent.com/mokus0/junkbox/151014bbef9db2b9205209df66c418d6d58b0d9e/Haskell/TypeExperiments/gadt_dependent.hs | haskell | the next smaller ordinal itself
an element of the next smaller ordinal
can this be written it a way that it will actually execute?
I suspect not... |
- ` ` gadt_dependent ''
- ( c ) 2009 , , Inc.
-
- dependent types using rank - N quantification , GADTs , and
- continuation - passing style ( to avoid escaping types )
- ``gadt_dependent''
- (c) 2009 Cook, J. MR SSD, Inc.
-
- dependent types using rank-N quantification, GADTs, and
- continuation-passing style (to avoid escaping types)
-}
# LANGUAGE
GADTs , EmptyDataDecls , RankNTypes ,
DeriveDataTypeable , ScopedTypeVariables , PatternSignatures ,
TypeFamilies , FlexibleContexts , MultiParamTypeClasses , FlexibleInstances ,
FunctionalDependencies , UndecidableInstances
#
GADTs, EmptyDataDecls, RankNTypes,
DeriveDataTypeable, ScopedTypeVariables, PatternSignatures,
TypeFamilies, FlexibleContexts, MultiParamTypeClasses, FlexibleInstances,
FunctionalDependencies, UndecidableInstances
#-}
module TypeExperiments.Gadt_dependent where
import Data.Typeable
import Prelude hiding (EQ, head, tail, take, reverse, drop, (++), zip, zipWith, splitAt, iterate, min)
data Zero
deriving Typeable
data Succ t
deriving Typeable
type One = Succ Zero
type Two = Succ One
type Three = Succ Two
type Four = Succ Three
type Five = Succ Four
type Six = Succ Five
type Seven = Succ Six
type Eight = Succ Seven
type Nine = Succ Eight
type Ten = Succ Nine
data Nat n where
Zero :: Nat Zero
Succ :: (Typeable n, UniqueNat n) => Nat n -> Nat (Succ n)
deriving (Typeable)
instance Show (Nat n) where
showsPrec p Zero = showString "Zero"
showsPrec p (Succ n) = showParen (p > 10) (showString "Succ " . showsPrec 11 n)
withNum :: Integer -> (forall n. Nat n -> t) -> t
withNum 0 f = f Zero
withNum (n+1) f = withNum n (\m -> case m of
m@Zero -> f (Succ m)
m@(Succ n) -> f (Succ m)
)
some handy sample values :
zero = Zero
one = Succ zero
two = Succ one
three = Succ two
four = Succ three
five = Succ four
six = Succ five
seven = Succ six
eight = Succ seven
nine = Succ eight
ten = Succ nine
numValue :: Num t => Nat n -> t
numValue Zero = 0
numValue (Succ n) = (1 +) $! numValue n
data EQ n m
where
EQ :: EQ a a
zeroEQ :: EQ Zero Zero
zeroEQ = EQ
succEQ :: EQ n m -> EQ (Succ n) (Succ m)
succEQ (EQ :: EQ n m) = (EQ :: EQ (Succ n) (Succ m))
data LT n m
where
ZeroLT :: UniqueNat n => LT Zero (Succ n)
SuccLT :: (UniqueNat n, UniqueNat m) => LT n m -> LT (Succ n) (Succ m)
instance Show (LT n m) where
showsPrec p ZeroLT = showString "ZeroLT"
showsPrec p (SuccLT proof) = showParen (p > 10) (showString "SuccLT " . showsPrec 11 proof)
type GT n m = LT m n
class Typeable n => UniqueNat n where
uniqueNat :: Nat n
instance UniqueNat Zero where
uniqueNat = Zero
instance UniqueNat n => UniqueNat (Succ n) where
uniqueNat = Succ uniqueNat
class UniqueNat ( Succ n ) = > UniquePos n
uniquePred :: UniqueNat (Succ n) => Nat n
uniquePred = pred uniqueNat
where
pred :: Nat (Succ n) -> Nat n
pred (Succ n) = n
decidable_equality :: (EQ n m -> t) -> t -> Nat n -> Nat m -> t
decidable_equality eq ne Zero Zero = eq (EQ :: EQ Zero Zero)
decidable_equality eq ne Zero _ = ne
decidable_equality eq ne _ Zero = ne
decidable_equality eq ne (Succ n) (Succ m) = decidable_equality eq' ne n m
where eq' eqWitness = eq (succEQ eqWitness)
trichotomy :: (LT n m -> t) -> (EQ n m -> t) -> (GT n m -> t) -> Nat n -> Nat m -> t
trichotomy lt eq gt Zero Zero = eq eqProof
trichotomy lt eq gt Zero (Succ _ :: Nat m) = lt ltProof
trichotomy lt eq gt (Succ _ :: Nat n) Zero = gt ltProof
trichotomy lt eq gt (Succ (n :: Nat n)) (Succ (m :: Nat m)) = trichotomy lt' eq' gt' n m
where
lt' ltWitness = lt (SuccLT ltWitness)
eq' eqWitness = eq (succEQ eqWitness)
gt' ltWitness = gt (SuccLT ltWitness)
type LTE n m = Either (EQ n m) (LT n m)
class ProvablyEQ n m where
eqProof :: EQ n m
instance ProvablyEQ n n where eqProof = EQ
instance ProvablyEQ n m => ProvablyEQ (f n) (f m) where
eqProof = case eqProof :: EQ n m of
EQ -> EQ
class ProvablyLT n m where
ltProof :: LT n m
instance UniqueNat n => ProvablyLT Zero (Succ n) where
ltProof = ZeroLT
instance (UniqueNat n, UniqueNat m, ProvablyLT n m) => ProvablyLT (Succ n) (Succ m) where
ltProof = SuccLT ltProof
withLT :: LT n m -> (forall x. LT x m -> t) -> t
withLT ltProof lt = lt ltProof
a non - zero finite ordinal consists of one of :
data Ordinal n where
OrdZero :: UniqueNat n => Ordinal (Succ n)
OrdSucc :: UniqueNat n => Ordinal n -> Ordinal (Succ n)
deriving Typeable
instance Show (Ordinal n) where
showsPrec p (OrdZero) = showParen (p > 10) (showString "OrdZero ")
showsPrec p (OrdSucc n) = showParen (p > 10) (showString "OrdSucc " . showsPrec 11 n)
ordValue :: Ordinal n -> Integer
ordValue OrdZero = 0
ordValue (OrdSucc n) = (1 +) $! ordValue n
cardinality :: Ordinal n -> Nat n
cardinality OrdZero = uniqueNat
cardinality (OrdSucc n) = Succ (cardinality n)
mkOrdinal :: ProvablyLT n m => Nat n -> Ordinal m
mkOrdinal n = mkOrdinalProof ltProof n
where
mkOrdinalProof :: LT n m -> Nat n -> Ordinal m
mkOrdinalProof ZeroLT Zero = OrdZero
mkOrdinalProof (SuccLT ltProof) (Succ n) = OrdSucc (mkOrdinalProof ltProof n)
data Vec n a where
Nil :: Vec Zero a
Cons :: a -> Vec n a -> Vec (Succ n) a
deriving Typeable
instance Functor (Vec n) where
fmap f Nil = Nil
fmap f (Cons a v) = Cons (f a) (fmap f v)
instance Show a => Show (Vec n a) where
showsPrec p = showsPrec p . vecToList
v :: a -> Vec One a
v x = Cons x Nil
vecToList :: Vec n a -> [a]
vecToList Nil = []
vecToList (Cons a v) = a : vecToList v
(!) :: Vec n a -> Ordinal n -> a
Cons a v ! OrdZero = a
Cons a v ! OrdSucc n = v ! n
head :: Vec (Succ n) a -> a
head (Cons a v) = a
tail :: Vec (Succ n) a -> Vec n a
tail (Cons a v) = v
take :: ProvablyLT n (Succ m) => Nat n -> Vec m a -> Vec n a
take = takeWithProof ltProof
where
takeWithProof :: LT n (Succ m) -> Nat n -> Vec m a -> Vec n a
takeWithProof ZeroLT Zero v = Nil
takeWithProof (SuccLT ltProof) (Succ n) (Cons a v) = Cons a (takeWithProof ltProof n v)
snoc :: Vec n a -> a -> Vec (Succ n) a
snoc Nil a = Cons a Nil
snoc (Cons x v) a = Cons x (snoc v a)
reverse :: Vec n a -> Vec n a
reverse Nil = Nil
reverse (Cons a v) = snoc (reverse v) a
data Sum n m s where
ZeroSum :: Sum Zero m m
SuccSum :: Sum n m s -> Sum (Succ n) m (Succ s)
deriving Typeable
class Add n m s | m n -> s, n s -> m
where
addProof :: Sum n m s
instance Add Zero n n
where addProof = ZeroSum
instance Add n m s => Add (Succ n) m (Succ s)
where addProof = SuccSum addProof
drop :: Add n d m => Nat n -> Vec m a -> Vec d a
drop = dropWithProof addProof
where
dropWithProof :: Sum n d m -> Nat n -> Vec m a -> Vec d a
dropWithProof ZeroSum Zero v = v
dropWithProof (SuccSum addProof) (Succ n) (Cons _ v) = dropWithProof addProof n v
splitAt :: Add n d m => Nat n -> Vec m a -> (Vec n a, Vec d a)
splitAt = splitWithProof addProof
where
splitWithProof :: Sum n d m -> Nat n -> Vec m a -> (Vec n a, Vec d a)
splitWithProof ZeroSum Zero v = (Nil, v)
splitWithProof (SuccSum addProof) (Succ n) (Cons a v) = case splitWithProof addProof n v of
(pre, post) -> (Cons a pre, post)
(++) :: Add n m s => Vec n a -> Vec m a -> Vec s a
(++) = appendWithProof addProof
where
appendWithProof :: Sum n m s -> Vec n a -> Vec m a -> Vec s a
appendWithProof ZeroSum Nil v = v
appendWithProof (SuccSum addProof) (Cons a v1) v2 = Cons a (appendWithProof addProof v1 v2)
zip :: Vec n a -> Vec n b -> Vec n (a,b)
zip = zipWith (,)
zipWith :: (a -> b -> c) -> Vec n a -> Vec n b -> Vec n c
zipWith (*) Nil Nil = Nil
zipWith (*) (Cons a v1) (Cons b v2) = Cons (a*b) (zipWith (*) v1 v2)
iterate :: (a -> a) -> a -> (forall n. Vec n a -> t) -> t
iterate f x g = iterate f (f x) (\v -> g (Cons x v))
foldVec :: t -> (a -> t -> t) -> Vec n a -> t
foldVec nil cons Nil = nil
foldVec nil cons (Cons x v) = cons x (foldVec nil cons v)
data Base b where
BNil :: UniqueNat b => Base b
BCons :: UniqueNat b => Ordinal b -> Base b -> Base b
deriving Typeable
instance Show (Base b) where
showsPrec p b@BNil = showString "{base " . shows (numValue (base b)) . showString "}"
showsPrec p (BCons d b) = showDigit d . shows b
showDigit d = showParen (numValue (cardinality d) > 10) (shows (ordValue d))
base :: Base b -> Nat b
base BNil = uniqueNat
base (BCons _ _) = uniqueNat
data Void
type Not a = a -> Void
data If p t f where
T :: p -> t -> If p t f
F :: (Not p) -> f -> If p t f
instance (Show p, Show t, Show f) => Show (If p t f) where
showsPrec p (T proof t) = showParen (p > 10) (showString "T " . showsPrec 11 proof . showChar ' ' . showsPrec 11 t)
showsPrec p (F proof f) = showParen (p > 10) (showString "F _ " . showsPrec 11 f)
min :: Nat a -> Nat b -> If (LT a b) (Nat a) (Nat b)
min Zero Zero = F (\_ -> error "impossible proof") Zero
min Zero (Succ _) = T ltProof Zero
min (Succ _) Zero = F (\_ -> error "impossible proof") Zero
min (Succ a) (Succ b) = case min a b of
T ltProof x -> T (SuccLT ltProof) (Succ x)
F nltProof x -> F (\(SuccLT pr) -> nltProof pr) (Succ x)
(?) :: If p t f -> (t -> a, f -> a) -> a
T p x ? (f,g) = f x
F p x ? (f,g) = g x
(??) :: If p t f -> (p -> t -> a, Not p -> f -> a) -> a
T p x ?? (f,g) = f p x
F p x ?? (f,g) = g p x
|
aca7bdab2886a5a093f694843272b44e62f57ff8637549aade8d7c9fcf53c70a | kazu-yamamoto/quic | Parameters.hs | {-# LANGUAGE OverloadedStrings #-}
# LANGUAGE RecordWildCards #
# LANGUAGE PatternSynonyms #
module Network.QUIC.Parameters (
Parameters(..)
, defaultParameters
only for Connection
, encodeParameters
, decodeParameters
, AuthCIDs(..)
, defaultAuthCIDs
, setCIDsToParameters
, getCIDsToParameters
) where
import qualified Data.ByteString as BS
import qualified Data.ByteString.Short as Short
import System.IO.Unsafe (unsafeDupablePerformIO)
import Network.QUIC.Imports
import Network.QUIC.Types
encodeParameters :: Parameters -> ByteString
encodeParameters = encodeParameterList . toParameterList
decodeParameters :: ByteString -> Maybe Parameters
decodeParameters bs = fromParameterList <$> decodeParameterList bs
newtype Key = Key Word32 deriving (Eq, Show)
type Value = ByteString
type ParameterList = [(Key,Value)]
pattern OriginalDestinationConnectionId :: Key
pattern OriginalDestinationConnectionId = Key 0x00
pattern MaxIdleTimeout :: Key
pattern MaxIdleTimeout = Key 0x01
pattern StateLessResetToken :: Key
pattern StateLessResetToken = Key 0x02
pattern MaxUdpPayloadSize :: Key
pattern MaxUdpPayloadSize = Key 0x03
pattern InitialMaxData :: Key
pattern InitialMaxData = Key 0x04
pattern InitialMaxStreamDataBidiLocal :: Key
pattern InitialMaxStreamDataBidiLocal = Key 0x05
pattern InitialMaxStreamDataBidiRemote :: Key
pattern InitialMaxStreamDataBidiRemote = Key 0x06
pattern InitialMaxStreamDataUni :: Key
pattern InitialMaxStreamDataUni = Key 0x07
pattern InitialMaxStreamsBidi :: Key
pattern InitialMaxStreamsBidi = Key 0x08
pattern InitialMaxStreamsUni :: Key
pattern InitialMaxStreamsUni = Key 0x09
pattern AckDelayExponent :: Key
pattern AckDelayExponent = Key 0x0a
pattern MaxAckDelay :: Key
pattern MaxAckDelay = Key 0x0b
pattern DisableActiveMigration :: Key
pattern DisableActiveMigration = Key 0x0c
pattern PreferredAddress :: Key
pattern PreferredAddress = Key 0x0d
pattern ActiveConnectionIdLimit :: Key
pattern ActiveConnectionIdLimit = Key 0x0e
pattern InitialSourceConnectionId :: Key
pattern InitialSourceConnectionId = Key 0x0f
pattern RetrySourceConnectionId :: Key
pattern RetrySourceConnectionId = Key 0x10
pattern VersionInformation :: Key
pattern VersionInformation = Key 0x11
pattern Grease :: Key
pattern Grease = Key 0xff
pattern GreaseQuicBit :: Key
pattern GreaseQuicBit = Key 0x2ab2
-- | QUIC transport parameters.
data Parameters = Parameters {
originalDestinationConnectionId :: Maybe CID
, maxIdleTimeout :: Milliseconds
16 bytes
, maxUdpPayloadSize :: Int
, initialMaxData :: Int
, initialMaxStreamDataBidiLocal :: Int
, initialMaxStreamDataBidiRemote :: Int
, initialMaxStreamDataUni :: Int
, initialMaxStreamsBidi :: Int
, initialMaxStreamsUni :: Int
, ackDelayExponent :: Int
, maxAckDelay :: Milliseconds
, disableActiveMigration :: Bool
, preferredAddress :: Maybe ByteString -- fixme
, activeConnectionIdLimit :: Int
, initialSourceConnectionId :: Maybe CID
, retrySourceConnectionId :: Maybe CID
, grease :: Maybe ByteString
, greaseQuicBit :: Bool
, versionInformation :: Maybe VersionInfo
} deriving (Eq,Show)
-- | The default value for QUIC transport parameters.
baseParameters :: Parameters
baseParameters = Parameters {
originalDestinationConnectionId = Nothing
, maxIdleTimeout = Milliseconds 0 -- disabled
, statelessResetToken = Nothing
, maxUdpPayloadSize = 65527
, initialMaxData = 0
, initialMaxStreamDataBidiLocal = 0
, initialMaxStreamDataBidiRemote = 0
, initialMaxStreamDataUni = 0
, initialMaxStreamsBidi = 0
, initialMaxStreamsUni = 0
, ackDelayExponent = 3
, maxAckDelay = Milliseconds 25
, disableActiveMigration = False
, preferredAddress = Nothing
, activeConnectionIdLimit = 2
, initialSourceConnectionId = Nothing
, retrySourceConnectionId = Nothing
, grease = Nothing
, greaseQuicBit = False
, versionInformation = Nothing
}
decInt :: ByteString -> Int
decInt = fromIntegral . decodeInt
encInt :: Int -> ByteString
encInt = encodeInt . fromIntegral
decMilliseconds :: ByteString -> Milliseconds
decMilliseconds = Milliseconds . fromIntegral . decodeInt
encMilliseconds :: Milliseconds -> ByteString
encMilliseconds (Milliseconds n) = encodeInt $ fromIntegral n
fromVersionInfo :: Maybe VersionInfo -> Value
fromVersionInfo Nothing = "" -- never reach
fromVersionInfo (Just VersionInfo{..}) = unsafeDupablePerformIO $
withWriteBuffer len $ \wbuf -> do
let putVersion (Version ver) = write32 wbuf ver
putVersion chosenVersion
mapM_ putVersion otherVersions
where
len = 4 * (length otherVersions + 1)
toVersionInfo :: Value -> Maybe VersionInfo
toVersionInfo bs
| len < 3 || remainder /= 0 = Just brokenVersionInfo
| otherwise = Just $ unsafeDupablePerformIO $
withReadBuffer bs $ \rbuf -> do
let getVersion = Version <$> read32 rbuf
VersionInfo <$> getVersion <*> replicateM (cnt - 1) getVersion
where
len = BS.length bs
(cnt,remainder) = len `divMod` 4
fromParameterList :: ParameterList -> Parameters
fromParameterList kvs = foldl' update params kvs
where
params = baseParameters
update x (OriginalDestinationConnectionId,v)
= x { originalDestinationConnectionId = Just (toCID v) }
update x (MaxIdleTimeout,v)
= x { maxIdleTimeout = decMilliseconds v }
update x (StateLessResetToken,v)
= x { statelessResetToken = Just (StatelessResetToken $ Short.toShort v) }
update x (MaxUdpPayloadSize,v)
= x { maxUdpPayloadSize = decInt v }
update x (InitialMaxData,v)
= x { initialMaxData = decInt v }
update x (InitialMaxStreamDataBidiLocal,v)
= x { initialMaxStreamDataBidiLocal = decInt v }
update x (InitialMaxStreamDataBidiRemote,v)
= x { initialMaxStreamDataBidiRemote = decInt v }
update x (InitialMaxStreamDataUni,v)
= x { initialMaxStreamDataUni = decInt v }
update x (InitialMaxStreamsBidi,v)
= x { initialMaxStreamsBidi = decInt v }
update x (InitialMaxStreamsUni,v)
= x { initialMaxStreamsUni = decInt v }
update x (AckDelayExponent,v)
= x { ackDelayExponent = decInt v }
update x (MaxAckDelay,v)
= x { maxAckDelay = decMilliseconds v }
update x (DisableActiveMigration,_)
= x { disableActiveMigration = True }
update x (PreferredAddress,v)
= x { preferredAddress = Just v }
update x (ActiveConnectionIdLimit,v)
= x { activeConnectionIdLimit = decInt v }
update x (InitialSourceConnectionId,v)
= x { initialSourceConnectionId = Just (toCID v) }
update x (RetrySourceConnectionId,v)
= x { retrySourceConnectionId = Just (toCID v) }
update x (Grease,v)
= x { grease = Just v }
update x (GreaseQuicBit,_)
= x { greaseQuicBit = True }
update x (VersionInformation,v)
= x { versionInformation = toVersionInfo v }
update x _ = x
diff :: Eq a => Parameters -> (Parameters -> a) -> Key -> (a -> Value) -> Maybe (Key,Value)
diff params label key enc
| val == val0 = Nothing
| otherwise = Just (key, enc val)
where
val = label params
val0 = label baseParameters
toParameterList :: Parameters -> ParameterList
toParameterList p = catMaybes [
diff p originalDestinationConnectionId
OriginalDestinationConnectionId (fromCID . fromJust)
, diff p maxIdleTimeout MaxIdleTimeout encMilliseconds
, diff p statelessResetToken StateLessResetToken encSRT
, diff p maxUdpPayloadSize MaxUdpPayloadSize encInt
, diff p initialMaxData InitialMaxData encInt
, diff p initialMaxStreamDataBidiLocal InitialMaxStreamDataBidiLocal encInt
, diff p initialMaxStreamDataBidiRemote InitialMaxStreamDataBidiRemote encInt
, diff p initialMaxStreamDataUni InitialMaxStreamDataUni encInt
, diff p initialMaxStreamsBidi InitialMaxStreamsBidi encInt
, diff p initialMaxStreamsUni InitialMaxStreamsUni encInt
, diff p ackDelayExponent AckDelayExponent encInt
, diff p maxAckDelay MaxAckDelay encMilliseconds
, diff p disableActiveMigration DisableActiveMigration (const "")
, diff p preferredAddress PreferredAddress fromJust
, diff p activeConnectionIdLimit ActiveConnectionIdLimit encInt
, diff p initialSourceConnectionId
InitialSourceConnectionId (fromCID . fromJust)
, diff p retrySourceConnectionId
RetrySourceConnectionId (fromCID . fromJust)
, diff p greaseQuicBit GreaseQuicBit (const "")
, diff p grease Grease fromJust
, diff p versionInformation VersionInformation fromVersionInfo
]
encSRT :: Maybe StatelessResetToken -> ByteString
encSRT (Just (StatelessResetToken srt)) = Short.fromShort srt
encSRT _ = error "encSRT"
encodeParameterList :: ParameterList -> ByteString
encodeParameterList kvs = unsafeDupablePerformIO $
withWriteBuffer 4096 $ \wbuf -> do -- for grease
mapM_ (put wbuf) kvs
where
put wbuf (Key k,v) = do
encodeInt' wbuf $ fromIntegral k
encodeInt' wbuf $ fromIntegral $ BS.length v
copyByteString wbuf v
decodeParameterList :: ByteString -> Maybe ParameterList
decodeParameterList bs = unsafeDupablePerformIO $ withReadBuffer bs (`go` id)
where
go rbuf build = do
rest1 <- remainingSize rbuf
if rest1 == 0 then
return $ Just (build [])
else do
key <- fromIntegral <$> decodeInt' rbuf
len <- fromIntegral <$> decodeInt' rbuf
val <- extractByteString rbuf len
go rbuf (build . ((Key key,val):))
-- | An example parameters obsoleted in the near future.
defaultParameters :: Parameters
defaultParameters = baseParameters {
30000
2048
, initialMaxData = 1048576
, initialMaxStreamDataBidiLocal = 262144
, initialMaxStreamDataBidiRemote = 262144
, initialMaxStreamDataUni = 262144
, initialMaxStreamsBidi = 100
, initialMaxStreamsUni = 3
, activeConnectionIdLimit = 3
, greaseQuicBit = True
}
data AuthCIDs = AuthCIDs {
initSrcCID :: Maybe CID
, origDstCID :: Maybe CID
, retrySrcCID :: Maybe CID
} deriving (Eq, Show)
defaultAuthCIDs :: AuthCIDs
defaultAuthCIDs = AuthCIDs Nothing Nothing Nothing
setCIDsToParameters :: AuthCIDs -> Parameters -> Parameters
setCIDsToParameters AuthCIDs{..} params = params {
originalDestinationConnectionId = origDstCID
, initialSourceConnectionId = initSrcCID
, retrySourceConnectionId = retrySrcCID
}
getCIDsToParameters :: Parameters -> AuthCIDs
getCIDsToParameters Parameters{..} = AuthCIDs {
origDstCID = originalDestinationConnectionId
, initSrcCID = initialSourceConnectionId
, retrySrcCID = retrySourceConnectionId
}
| null | https://raw.githubusercontent.com/kazu-yamamoto/quic/9dee8dd6f77d636c347ce755e01257d058fdd572/Network/QUIC/Parameters.hs | haskell | # LANGUAGE OverloadedStrings #
| QUIC transport parameters.
fixme
| The default value for QUIC transport parameters.
disabled
never reach
for grease
| An example parameters obsoleted in the near future. | # LANGUAGE RecordWildCards #
# LANGUAGE PatternSynonyms #
module Network.QUIC.Parameters (
Parameters(..)
, defaultParameters
only for Connection
, encodeParameters
, decodeParameters
, AuthCIDs(..)
, defaultAuthCIDs
, setCIDsToParameters
, getCIDsToParameters
) where
import qualified Data.ByteString as BS
import qualified Data.ByteString.Short as Short
import System.IO.Unsafe (unsafeDupablePerformIO)
import Network.QUIC.Imports
import Network.QUIC.Types
encodeParameters :: Parameters -> ByteString
encodeParameters = encodeParameterList . toParameterList
decodeParameters :: ByteString -> Maybe Parameters
decodeParameters bs = fromParameterList <$> decodeParameterList bs
newtype Key = Key Word32 deriving (Eq, Show)
type Value = ByteString
type ParameterList = [(Key,Value)]
pattern OriginalDestinationConnectionId :: Key
pattern OriginalDestinationConnectionId = Key 0x00
pattern MaxIdleTimeout :: Key
pattern MaxIdleTimeout = Key 0x01
pattern StateLessResetToken :: Key
pattern StateLessResetToken = Key 0x02
pattern MaxUdpPayloadSize :: Key
pattern MaxUdpPayloadSize = Key 0x03
pattern InitialMaxData :: Key
pattern InitialMaxData = Key 0x04
pattern InitialMaxStreamDataBidiLocal :: Key
pattern InitialMaxStreamDataBidiLocal = Key 0x05
pattern InitialMaxStreamDataBidiRemote :: Key
pattern InitialMaxStreamDataBidiRemote = Key 0x06
pattern InitialMaxStreamDataUni :: Key
pattern InitialMaxStreamDataUni = Key 0x07
pattern InitialMaxStreamsBidi :: Key
pattern InitialMaxStreamsBidi = Key 0x08
pattern InitialMaxStreamsUni :: Key
pattern InitialMaxStreamsUni = Key 0x09
pattern AckDelayExponent :: Key
pattern AckDelayExponent = Key 0x0a
pattern MaxAckDelay :: Key
pattern MaxAckDelay = Key 0x0b
pattern DisableActiveMigration :: Key
pattern DisableActiveMigration = Key 0x0c
pattern PreferredAddress :: Key
pattern PreferredAddress = Key 0x0d
pattern ActiveConnectionIdLimit :: Key
pattern ActiveConnectionIdLimit = Key 0x0e
pattern InitialSourceConnectionId :: Key
pattern InitialSourceConnectionId = Key 0x0f
pattern RetrySourceConnectionId :: Key
pattern RetrySourceConnectionId = Key 0x10
pattern VersionInformation :: Key
pattern VersionInformation = Key 0x11
pattern Grease :: Key
pattern Grease = Key 0xff
pattern GreaseQuicBit :: Key
pattern GreaseQuicBit = Key 0x2ab2
data Parameters = Parameters {
originalDestinationConnectionId :: Maybe CID
, maxIdleTimeout :: Milliseconds
16 bytes
, maxUdpPayloadSize :: Int
, initialMaxData :: Int
, initialMaxStreamDataBidiLocal :: Int
, initialMaxStreamDataBidiRemote :: Int
, initialMaxStreamDataUni :: Int
, initialMaxStreamsBidi :: Int
, initialMaxStreamsUni :: Int
, ackDelayExponent :: Int
, maxAckDelay :: Milliseconds
, disableActiveMigration :: Bool
, activeConnectionIdLimit :: Int
, initialSourceConnectionId :: Maybe CID
, retrySourceConnectionId :: Maybe CID
, grease :: Maybe ByteString
, greaseQuicBit :: Bool
, versionInformation :: Maybe VersionInfo
} deriving (Eq,Show)
baseParameters :: Parameters
baseParameters = Parameters {
originalDestinationConnectionId = Nothing
, statelessResetToken = Nothing
, maxUdpPayloadSize = 65527
, initialMaxData = 0
, initialMaxStreamDataBidiLocal = 0
, initialMaxStreamDataBidiRemote = 0
, initialMaxStreamDataUni = 0
, initialMaxStreamsBidi = 0
, initialMaxStreamsUni = 0
, ackDelayExponent = 3
, maxAckDelay = Milliseconds 25
, disableActiveMigration = False
, preferredAddress = Nothing
, activeConnectionIdLimit = 2
, initialSourceConnectionId = Nothing
, retrySourceConnectionId = Nothing
, grease = Nothing
, greaseQuicBit = False
, versionInformation = Nothing
}
decInt :: ByteString -> Int
decInt = fromIntegral . decodeInt
encInt :: Int -> ByteString
encInt = encodeInt . fromIntegral
decMilliseconds :: ByteString -> Milliseconds
decMilliseconds = Milliseconds . fromIntegral . decodeInt
encMilliseconds :: Milliseconds -> ByteString
encMilliseconds (Milliseconds n) = encodeInt $ fromIntegral n
fromVersionInfo :: Maybe VersionInfo -> Value
fromVersionInfo (Just VersionInfo{..}) = unsafeDupablePerformIO $
withWriteBuffer len $ \wbuf -> do
let putVersion (Version ver) = write32 wbuf ver
putVersion chosenVersion
mapM_ putVersion otherVersions
where
len = 4 * (length otherVersions + 1)
toVersionInfo :: Value -> Maybe VersionInfo
toVersionInfo bs
| len < 3 || remainder /= 0 = Just brokenVersionInfo
| otherwise = Just $ unsafeDupablePerformIO $
withReadBuffer bs $ \rbuf -> do
let getVersion = Version <$> read32 rbuf
VersionInfo <$> getVersion <*> replicateM (cnt - 1) getVersion
where
len = BS.length bs
(cnt,remainder) = len `divMod` 4
fromParameterList :: ParameterList -> Parameters
fromParameterList kvs = foldl' update params kvs
where
params = baseParameters
update x (OriginalDestinationConnectionId,v)
= x { originalDestinationConnectionId = Just (toCID v) }
update x (MaxIdleTimeout,v)
= x { maxIdleTimeout = decMilliseconds v }
update x (StateLessResetToken,v)
= x { statelessResetToken = Just (StatelessResetToken $ Short.toShort v) }
update x (MaxUdpPayloadSize,v)
= x { maxUdpPayloadSize = decInt v }
update x (InitialMaxData,v)
= x { initialMaxData = decInt v }
update x (InitialMaxStreamDataBidiLocal,v)
= x { initialMaxStreamDataBidiLocal = decInt v }
update x (InitialMaxStreamDataBidiRemote,v)
= x { initialMaxStreamDataBidiRemote = decInt v }
update x (InitialMaxStreamDataUni,v)
= x { initialMaxStreamDataUni = decInt v }
update x (InitialMaxStreamsBidi,v)
= x { initialMaxStreamsBidi = decInt v }
update x (InitialMaxStreamsUni,v)
= x { initialMaxStreamsUni = decInt v }
update x (AckDelayExponent,v)
= x { ackDelayExponent = decInt v }
update x (MaxAckDelay,v)
= x { maxAckDelay = decMilliseconds v }
update x (DisableActiveMigration,_)
= x { disableActiveMigration = True }
update x (PreferredAddress,v)
= x { preferredAddress = Just v }
update x (ActiveConnectionIdLimit,v)
= x { activeConnectionIdLimit = decInt v }
update x (InitialSourceConnectionId,v)
= x { initialSourceConnectionId = Just (toCID v) }
update x (RetrySourceConnectionId,v)
= x { retrySourceConnectionId = Just (toCID v) }
update x (Grease,v)
= x { grease = Just v }
update x (GreaseQuicBit,_)
= x { greaseQuicBit = True }
update x (VersionInformation,v)
= x { versionInformation = toVersionInfo v }
update x _ = x
diff :: Eq a => Parameters -> (Parameters -> a) -> Key -> (a -> Value) -> Maybe (Key,Value)
diff params label key enc
| val == val0 = Nothing
| otherwise = Just (key, enc val)
where
val = label params
val0 = label baseParameters
toParameterList :: Parameters -> ParameterList
toParameterList p = catMaybes [
diff p originalDestinationConnectionId
OriginalDestinationConnectionId (fromCID . fromJust)
, diff p maxIdleTimeout MaxIdleTimeout encMilliseconds
, diff p statelessResetToken StateLessResetToken encSRT
, diff p maxUdpPayloadSize MaxUdpPayloadSize encInt
, diff p initialMaxData InitialMaxData encInt
, diff p initialMaxStreamDataBidiLocal InitialMaxStreamDataBidiLocal encInt
, diff p initialMaxStreamDataBidiRemote InitialMaxStreamDataBidiRemote encInt
, diff p initialMaxStreamDataUni InitialMaxStreamDataUni encInt
, diff p initialMaxStreamsBidi InitialMaxStreamsBidi encInt
, diff p initialMaxStreamsUni InitialMaxStreamsUni encInt
, diff p ackDelayExponent AckDelayExponent encInt
, diff p maxAckDelay MaxAckDelay encMilliseconds
, diff p disableActiveMigration DisableActiveMigration (const "")
, diff p preferredAddress PreferredAddress fromJust
, diff p activeConnectionIdLimit ActiveConnectionIdLimit encInt
, diff p initialSourceConnectionId
InitialSourceConnectionId (fromCID . fromJust)
, diff p retrySourceConnectionId
RetrySourceConnectionId (fromCID . fromJust)
, diff p greaseQuicBit GreaseQuicBit (const "")
, diff p grease Grease fromJust
, diff p versionInformation VersionInformation fromVersionInfo
]
encSRT :: Maybe StatelessResetToken -> ByteString
encSRT (Just (StatelessResetToken srt)) = Short.fromShort srt
encSRT _ = error "encSRT"
encodeParameterList :: ParameterList -> ByteString
encodeParameterList kvs = unsafeDupablePerformIO $
mapM_ (put wbuf) kvs
where
put wbuf (Key k,v) = do
encodeInt' wbuf $ fromIntegral k
encodeInt' wbuf $ fromIntegral $ BS.length v
copyByteString wbuf v
decodeParameterList :: ByteString -> Maybe ParameterList
decodeParameterList bs = unsafeDupablePerformIO $ withReadBuffer bs (`go` id)
where
go rbuf build = do
rest1 <- remainingSize rbuf
if rest1 == 0 then
return $ Just (build [])
else do
key <- fromIntegral <$> decodeInt' rbuf
len <- fromIntegral <$> decodeInt' rbuf
val <- extractByteString rbuf len
go rbuf (build . ((Key key,val):))
defaultParameters :: Parameters
defaultParameters = baseParameters {
30000
2048
, initialMaxData = 1048576
, initialMaxStreamDataBidiLocal = 262144
, initialMaxStreamDataBidiRemote = 262144
, initialMaxStreamDataUni = 262144
, initialMaxStreamsBidi = 100
, initialMaxStreamsUni = 3
, activeConnectionIdLimit = 3
, greaseQuicBit = True
}
data AuthCIDs = AuthCIDs {
initSrcCID :: Maybe CID
, origDstCID :: Maybe CID
, retrySrcCID :: Maybe CID
} deriving (Eq, Show)
defaultAuthCIDs :: AuthCIDs
defaultAuthCIDs = AuthCIDs Nothing Nothing Nothing
setCIDsToParameters :: AuthCIDs -> Parameters -> Parameters
setCIDsToParameters AuthCIDs{..} params = params {
originalDestinationConnectionId = origDstCID
, initialSourceConnectionId = initSrcCID
, retrySourceConnectionId = retrySrcCID
}
getCIDsToParameters :: Parameters -> AuthCIDs
getCIDsToParameters Parameters{..} = AuthCIDs {
origDstCID = originalDestinationConnectionId
, initSrcCID = initialSourceConnectionId
, retrySrcCID = retrySourceConnectionId
}
|
9f3e5f199f321fc519dfab211b3f7b5851428295e07e5ffcf8a253f8253d2f43 | jeroanan/rkt-coreutils | stat.rkt | #lang typed/racket/base
(provide Stat%)
(define-type Stat%
(Class
[get-owner-has-rwx? (-> Boolean)]
[get-owner-has-r? (-> Boolean)]
[get-owner-has-w? (-> Boolean)]
[get-owner-has-x? (-> Boolean)]
[get-group-has-rwx? (-> Boolean)]
[get-group-has-r? (-> Boolean)]
[get-group-has-w? (-> Boolean)]
[get-group-has-x? (-> Boolean)]
[get-other-has-rwx? (-> Boolean)]
[get-other-has-r? (-> Boolean)]
[get-other-has-w? (-> Boolean)]
[get-other-has-x? (-> Boolean)]
[get-is-directory? (-> Boolean)]
[get-is-regular-file? (-> Boolean)]
[get-is-character-device? (-> Boolean)]
[get-is-symbolic-link? (-> Boolean)]
[get-is-fifo? (-> Boolean)]
[get-dev (-> Number)]
[get-uid (-> Number)]
[get-gid (-> Integer)]
[get-accessed-time (-> Integer)]
[get-modified-time (-> Integer)]
[get-created-time (-> Integer)]
[get-size (-> Number)]
[get-blocks (-> Number)]
[get-block-size (-> Number)]
[get-inode (-> Number)]
[get-number-of-hardlinks (-> Number)])) | null | https://raw.githubusercontent.com/jeroanan/rkt-coreutils/571629d1e2562c557ba258b31ce454add2e93dd9/src/repl/typedef/stat.rkt | racket | #lang typed/racket/base
(provide Stat%)
(define-type Stat%
(Class
[get-owner-has-rwx? (-> Boolean)]
[get-owner-has-r? (-> Boolean)]
[get-owner-has-w? (-> Boolean)]
[get-owner-has-x? (-> Boolean)]
[get-group-has-rwx? (-> Boolean)]
[get-group-has-r? (-> Boolean)]
[get-group-has-w? (-> Boolean)]
[get-group-has-x? (-> Boolean)]
[get-other-has-rwx? (-> Boolean)]
[get-other-has-r? (-> Boolean)]
[get-other-has-w? (-> Boolean)]
[get-other-has-x? (-> Boolean)]
[get-is-directory? (-> Boolean)]
[get-is-regular-file? (-> Boolean)]
[get-is-character-device? (-> Boolean)]
[get-is-symbolic-link? (-> Boolean)]
[get-is-fifo? (-> Boolean)]
[get-dev (-> Number)]
[get-uid (-> Number)]
[get-gid (-> Integer)]
[get-accessed-time (-> Integer)]
[get-modified-time (-> Integer)]
[get-created-time (-> Integer)]
[get-size (-> Number)]
[get-blocks (-> Number)]
[get-block-size (-> Number)]
[get-inode (-> Number)]
[get-number-of-hardlinks (-> Number)])) |
|
9729248ea91a53f2a9a4139cc18486571138bfcd280b92e14f96b3f8e41e7eab | conal/lambda-ccc | Bitonic.hs | {-# LANGUAGE GADTs #-}
# OPTIONS_GHC -Wall #
{ - # OPTIONS_GHC -fno - warn - unused - imports # - } -- TEMP
{ - # OPTIONS_GHC -fno - warn - unused - binds # - } -- TEMP
----------------------------------------------------------------------
-- |
-- Module : LambdaCCC.Bitonic
Copyright : ( c ) 2014 Tabula , Inc.
--
-- Maintainer :
-- Stability : experimental
--
Bitonic sort
----------------------------------------------------------------------
module LambdaCCC.Bitonic where
-- TODO: explicit exports
import Prelude hiding (reverse)
import Data.Functor ((<$>))
import Data.Foldable (toList)
import TypeUnary.TyNat (N1,N2,N3,N4)
import TypeUnary.Nat (IsNat(..),Nat(..))
import Circat.Pair
import Circat.RTree
import Circat.Misc (Unop,Reversible(..))
bsort :: (IsNat n, Ord a) => Unop (RTree n a)
bsort = bsort' nat
# INLINE bsort #
bsort' :: Ord a => Nat n -> Unop (RTree n a)
bsort' Zero = id
bsort' (Succ m) = \ (B ts) ->
merge (Succ m) (B (secondP reverse (bsort' m <$> ts)))
# INLINE bsort ' #
-- Equivalently,
bsort ' ( Succ m ) = \ ( B ( u : # v ) ) - >
merge ( Succ m ) ( B ( bsort ' m u : # reverse ( bsort ' m v ) ) )
Bitonic merge
merge :: Ord a => Nat n -> Unop (RTree n a)
merge n = butterfly' n sortP
# INLINE merge #
{--------------------------------------------------------------------
Tests
--------------------------------------------------------------------}
test :: (IsNat n, Ord a) => RTree n a -> [a]
test = toList . bsort
_t1 :: RTree N1 Int
_t1 = tree1 4 3
_t2 :: RTree N2 Int
_t2 = tree2 4 3 1 5
_t3 :: RTree N3 Int
_t3 = tree3 4 3 7 1 9 5 2 6
_t4 :: RTree N4 Int
_t4 = tree4 4 12 3 16 8 11 15 7 1 10 9 14 5 13 2 6
| null | https://raw.githubusercontent.com/conal/lambda-ccc/141a713456d447d27dbe440fa27a9372cd44dc7f/src/LambdaCCC/Bitonic.hs | haskell | # LANGUAGE GADTs #
TEMP
TEMP
--------------------------------------------------------------------
|
Module : LambdaCCC.Bitonic
Maintainer :
Stability : experimental
--------------------------------------------------------------------
TODO: explicit exports
Equivalently,
-------------------------------------------------------------------
Tests
------------------------------------------------------------------- | # OPTIONS_GHC -Wall #
Copyright : ( c ) 2014 Tabula , Inc.
Bitonic sort
module LambdaCCC.Bitonic where
import Prelude hiding (reverse)
import Data.Functor ((<$>))
import Data.Foldable (toList)
import TypeUnary.TyNat (N1,N2,N3,N4)
import TypeUnary.Nat (IsNat(..),Nat(..))
import Circat.Pair
import Circat.RTree
import Circat.Misc (Unop,Reversible(..))
bsort :: (IsNat n, Ord a) => Unop (RTree n a)
bsort = bsort' nat
# INLINE bsort #
bsort' :: Ord a => Nat n -> Unop (RTree n a)
bsort' Zero = id
bsort' (Succ m) = \ (B ts) ->
merge (Succ m) (B (secondP reverse (bsort' m <$> ts)))
# INLINE bsort ' #
bsort ' ( Succ m ) = \ ( B ( u : # v ) ) - >
merge ( Succ m ) ( B ( bsort ' m u : # reverse ( bsort ' m v ) ) )
Bitonic merge
merge :: Ord a => Nat n -> Unop (RTree n a)
merge n = butterfly' n sortP
# INLINE merge #
test :: (IsNat n, Ord a) => RTree n a -> [a]
test = toList . bsort
_t1 :: RTree N1 Int
_t1 = tree1 4 3
_t2 :: RTree N2 Int
_t2 = tree2 4 3 1 5
_t3 :: RTree N3 Int
_t3 = tree3 4 3 7 1 9 5 2 6
_t4 :: RTree N4 Int
_t4 = tree4 4 12 3 16 8 11 15 7 1 10 9 14 5 13 2 6
|
4204d51c53797fbeb83ceb10d7b94a6a3678cc1f105f082a1281d46d60cb9b9b | keera-studios/haskell-titan | ModelEvents.hs | -- |
--
Copyright : ( C ) Keera Studios Ltd , 2018
-- License : GPL-3
Maintainer :
module Model.ReactiveModel.ModelEvents where
import qualified Hails.MVC.Model.ReactiveModel as GRM
import Hails.MVC.Model.ReactiveModel.Events
-- Implement this interface if you want automatic update notification
-- import Hails.MVC.Model.ProtectedModel.UpdatableModel
data ModelEvent = UncapturedEvent
| Initialised
| SelectedFrameChanged
| SelectedFrameInputChanged
| CurSimFrameChanged
| FramesChanged
deriving (Eq,Ord)
instance GRM.Event ModelEvent where
undoStackChangedEvent = UncapturedEvent
instance UpdateNotifiableEvent ModelEvent where
-- updateNotificationEvent = MaxVersionAvailable
instance InitialisedEvent ModelEvent where
initialisedEvent = Initialised
| null | https://raw.githubusercontent.com/keera-studios/haskell-titan/958ddd2b468af00db46004a683c1c7aebe81526c/titan/src/Model/ReactiveModel/ModelEvents.hs | haskell | |
License : GPL-3
Implement this interface if you want automatic update notification
import Hails.MVC.Model.ProtectedModel.UpdatableModel
updateNotificationEvent = MaxVersionAvailable | Copyright : ( C ) Keera Studios Ltd , 2018
Maintainer :
module Model.ReactiveModel.ModelEvents where
import qualified Hails.MVC.Model.ReactiveModel as GRM
import Hails.MVC.Model.ReactiveModel.Events
data ModelEvent = UncapturedEvent
| Initialised
| SelectedFrameChanged
| SelectedFrameInputChanged
| CurSimFrameChanged
| FramesChanged
deriving (Eq,Ord)
instance GRM.Event ModelEvent where
undoStackChangedEvent = UncapturedEvent
instance UpdateNotifiableEvent ModelEvent where
instance InitialisedEvent ModelEvent where
initialisedEvent = Initialised
|
32726346b7e6a0dfcd1d545ae97d94ae632dfd52e597f4365bd84bea40bcdce1 | xnning/haskell-programming-from-first-principles | Main.hs | module Main where
import Control.Monad (forever)
import Data.Char (toLower)
import Data.Maybe (isJust)
import Data.List (intersperse)
import System.Exit (exitSuccess)
import System.Random (randomRIO)
import System.IO
newtype WordList = WordList [String]
allWords :: IO WordList
allWords = do
dict <- readFile "../data/dict.txt"
return $ WordList (lines dict)
minWordLength :: Int
minWordLength = 5
maxWordLength :: Int
maxWordLength = 9
gameWords :: IO WordList
gameWords = do
WordList aw <- allWords
return $ WordList (filter gameLength aw)
where gameLength w =
let l = length w
in l > minWordLength && l < maxWordLength
randomWord :: WordList -> IO String
randomWord (WordList wl) = do
randomIndex <- randomRIO (0, length wl - 1)
return $ wl !! randomIndex
randomWord' :: IO String
randomWord' = gameWords >>= randomWord
data Puzzle = Puzzle String [Maybe Char] [Char] [Char]
instance Show Puzzle where
show (Puzzle _ discovered guessed _) =
(intersperse ' ' $ fmap renderPuzzleChar discovered)
++ " Guessed so far: " ++ guessed
where renderPuzzleChar :: Maybe Char -> Char
renderPuzzleChar Nothing = '_'
renderPuzzleChar (Just c) = c
freshPuzzle :: String -> Puzzle
freshPuzzle wd = Puzzle wd (map (const Nothing) wd) [] []
charInWord :: Puzzle -> Char -> Bool
charInWord (Puzzle wd _ _ _) c = elem c wd
alreadyGuessed :: Puzzle -> Char -> Bool
alreadyGuessed (Puzzle _ _ gs _) c = elem c gs
fillInCharacter :: Puzzle -> Char -> Bool -> Puzzle
fillInCharacter (Puzzle word filledInSofar s incorrect) c correct =
if correct
then Puzzle word newFilledInSofar (c : s) incorrect
else Puzzle word newFilledInSofar (c : s) (c: incorrect)
where zipper guessed wordChar guessChar =
if wordChar == guessed
then Just wordChar
else guessChar
newFilledInSofar = zipWith (zipper c) word filledInSofar
handleGuess :: Puzzle -> Char -> IO Puzzle
handleGuess puzzle guess = do
putStrLn $ "Your guess was: " ++ [guess]
case (charInWord puzzle guess , alreadyGuessed puzzle guess) of
(_, True) -> do
putStrLn "You already guessed that character, pick something else!"
return puzzle
(True, _) -> do
putStrLn "This character was in the word, filling in the word accordingly"
return (fillInCharacter puzzle guess True)
(False, _) -> do
putStrLn "This character wasn't in the word, try again."
return (fillInCharacter puzzle guess False)
gameOver :: Puzzle -> IO ()
gameOver (Puzzle wordToGuess _ guessed incorrect) =
if (length incorrect) > 7 then
do putStrLn "You Lose!"
putStrLn $ "The word was: " ++ wordToGuess
exitSuccess
else
return ()
gameWin :: Puzzle -> IO ()
gameWin (Puzzle _ filledInSofar _ _) =
if all isJust filledInSofar then
do putStrLn "You win!"
exitSuccess
else return ()
runGame :: Puzzle -> IO ()
runGame puzzle = forever $ do
gameWin puzzle
gameOver puzzle
hSetBuffering stdout NoBuffering
putStrLn $ "Current puzzle is: " ++ show puzzle
putStr "Guess a letter: "
guess <- getLine
case guess of
[c] -> handleGuess puzzle c >>= runGame
_ -> putStrLn "Your guess must be a single charactr"
main :: IO ()
main = do
word <- randomWord'
let puzzle = freshPuzzle (fmap toLower word)
runGame puzzle
| null | https://raw.githubusercontent.com/xnning/haskell-programming-from-first-principles/0c49f799cfb6bf2dc05fa1265af3887b795dc5a0/projs/hangman/src/Main.hs | haskell | module Main where
import Control.Monad (forever)
import Data.Char (toLower)
import Data.Maybe (isJust)
import Data.List (intersperse)
import System.Exit (exitSuccess)
import System.Random (randomRIO)
import System.IO
newtype WordList = WordList [String]
allWords :: IO WordList
allWords = do
dict <- readFile "../data/dict.txt"
return $ WordList (lines dict)
minWordLength :: Int
minWordLength = 5
maxWordLength :: Int
maxWordLength = 9
gameWords :: IO WordList
gameWords = do
WordList aw <- allWords
return $ WordList (filter gameLength aw)
where gameLength w =
let l = length w
in l > minWordLength && l < maxWordLength
randomWord :: WordList -> IO String
randomWord (WordList wl) = do
randomIndex <- randomRIO (0, length wl - 1)
return $ wl !! randomIndex
randomWord' :: IO String
randomWord' = gameWords >>= randomWord
data Puzzle = Puzzle String [Maybe Char] [Char] [Char]
instance Show Puzzle where
show (Puzzle _ discovered guessed _) =
(intersperse ' ' $ fmap renderPuzzleChar discovered)
++ " Guessed so far: " ++ guessed
where renderPuzzleChar :: Maybe Char -> Char
renderPuzzleChar Nothing = '_'
renderPuzzleChar (Just c) = c
freshPuzzle :: String -> Puzzle
freshPuzzle wd = Puzzle wd (map (const Nothing) wd) [] []
charInWord :: Puzzle -> Char -> Bool
charInWord (Puzzle wd _ _ _) c = elem c wd
alreadyGuessed :: Puzzle -> Char -> Bool
alreadyGuessed (Puzzle _ _ gs _) c = elem c gs
fillInCharacter :: Puzzle -> Char -> Bool -> Puzzle
fillInCharacter (Puzzle word filledInSofar s incorrect) c correct =
if correct
then Puzzle word newFilledInSofar (c : s) incorrect
else Puzzle word newFilledInSofar (c : s) (c: incorrect)
where zipper guessed wordChar guessChar =
if wordChar == guessed
then Just wordChar
else guessChar
newFilledInSofar = zipWith (zipper c) word filledInSofar
handleGuess :: Puzzle -> Char -> IO Puzzle
handleGuess puzzle guess = do
putStrLn $ "Your guess was: " ++ [guess]
case (charInWord puzzle guess , alreadyGuessed puzzle guess) of
(_, True) -> do
putStrLn "You already guessed that character, pick something else!"
return puzzle
(True, _) -> do
putStrLn "This character was in the word, filling in the word accordingly"
return (fillInCharacter puzzle guess True)
(False, _) -> do
putStrLn "This character wasn't in the word, try again."
return (fillInCharacter puzzle guess False)
gameOver :: Puzzle -> IO ()
gameOver (Puzzle wordToGuess _ guessed incorrect) =
if (length incorrect) > 7 then
do putStrLn "You Lose!"
putStrLn $ "The word was: " ++ wordToGuess
exitSuccess
else
return ()
gameWin :: Puzzle -> IO ()
gameWin (Puzzle _ filledInSofar _ _) =
if all isJust filledInSofar then
do putStrLn "You win!"
exitSuccess
else return ()
runGame :: Puzzle -> IO ()
runGame puzzle = forever $ do
gameWin puzzle
gameOver puzzle
hSetBuffering stdout NoBuffering
putStrLn $ "Current puzzle is: " ++ show puzzle
putStr "Guess a letter: "
guess <- getLine
case guess of
[c] -> handleGuess puzzle c >>= runGame
_ -> putStrLn "Your guess must be a single charactr"
main :: IO ()
main = do
word <- randomWord'
let puzzle = freshPuzzle (fmap toLower word)
runGame puzzle
|
|
50f7854c90128ac82a30d03e5ee67e43106bff92e203870db5a22dc6d126ce35 | goldfirere/glambda | Type.hs | # LANGUAGE DataKinds , TypeOperators , PolyKinds ,
GADTs , RankNTypes , FlexibleInstances #
GADTs, RankNTypes, FlexibleInstances #-}
-----------------------------------------------------------------------------
-- |
-- Module : Language.Glambda.Type
Copyright : ( C ) 2015
-- License : BSD-style (see LICENSE)
Maintainer : ( )
-- Stability : experimental
--
-- Defines types
--
----------------------------------------------------------------------------
module Language.Glambda.Type (
* Glambda types to be used in Haskell terms
Ty(..), readTyCon,
* Glambda types to be used in Haskell types
STy(..), SCtx(..), ITy(..),
emptyContext, refineTy, unrefineTy, eqSTy,
) where
import Language.Glambda.Util
import Text.PrettyPrint.ANSI.Leijen
-- | Representation of a glambda type
data Ty
= Arr Ty Ty -- ^ A function type
| IntTy
| BoolTy
deriving Eq
infixr 1 `Arr`
-- | Perhaps convert a string representation of a base type into a 'Ty'
readTyCon :: String -> Maybe Ty
readTyCon "Int" = Just IntTy
readTyCon "Bool" = Just BoolTy
readTyCon _ = Nothing
-- | Singleton for a glambda type
data STy :: * -> * where
SArr :: STy arg -> STy res -> STy (arg -> res)
SIntTy :: STy Int
SBoolTy :: STy Bool
infixr 1 `SArr`
| An implicit ' ' , wrapped up in a class constraint
class ITy ty where
sty :: STy ty
instance (ITy arg, ITy res) => ITy (arg -> res) where
sty = sty `SArr` sty
instance ITy Int where
sty = SIntTy
instance ITy Bool where
sty = SBoolTy
-- | Singleton for a typing context
data SCtx :: [*] -> * where
SNil :: SCtx '[]
SCons :: STy h -> SCtx t -> SCtx (h ': t)
infixr 5 `SCons`
-- | The singleton for the empty context
emptyContext :: SCtx '[]
emptyContext = SNil
-- | Convert a 'Ty' into an 'STy'.
refineTy :: Ty -> (forall ty. STy ty -> r) -> r
refineTy (ty1 `Arr` ty2) k
= refineTy ty1 $ \sty1 ->
refineTy ty2 $ \sty2 ->
k (sty1 `SArr` sty2)
refineTy IntTy k = k SIntTy
refineTy BoolTy k = k SBoolTy
-- | Convert an 'STy' into a 'Ty'
unrefineTy :: STy ty -> Ty
unrefineTy (arg `SArr` res) = unrefineTy arg `Arr` unrefineTy res
unrefineTy SIntTy = IntTy
unrefineTy SBoolTy = BoolTy
| Compare two ' 's for equality .
eqSTy :: STy ty1 -> STy ty2 -> Maybe (ty1 :~: ty2)
eqSTy (s1 `SArr` t1) (s2 `SArr` t2)
| Just Refl <- s1 `eqSTy` s2
, Just Refl <- t1 `eqSTy` t2
= Just Refl
eqSTy SIntTy SIntTy = Just Refl
eqSTy SBoolTy SBoolTy = Just Refl
eqSTy _ _ = Nothing
-----------------------------------------
-- Pretty-printing
instance Pretty Ty where
pretty = pretty_ty topPrec
instance Show Ty where
show = render . pretty
instance Pretty (STy ty) where
pretty = pretty . unrefineTy
arrowLeftPrec, arrowRightPrec, arrowPrec :: Prec
arrowLeftPrec = 5
arrowRightPrec = 4.9
arrowPrec = 5
pretty_ty :: Prec -> Ty -> Doc
pretty_ty prec (Arr arg res) = maybeParens (prec >= arrowPrec) $
hsep [ pretty_ty arrowLeftPrec arg
, text "->"
, pretty_ty arrowRightPrec res ]
pretty_ty _ IntTy = text "Int"
pretty_ty _ BoolTy = text "Bool"
| null | https://raw.githubusercontent.com/goldfirere/glambda/b1bd8306ccbb2aef865e8b092aed0b26aa5ba0d6/src/Language/Glambda/Type.hs | haskell | ---------------------------------------------------------------------------
|
Module : Language.Glambda.Type
License : BSD-style (see LICENSE)
Stability : experimental
Defines types
--------------------------------------------------------------------------
| Representation of a glambda type
^ A function type
| Perhaps convert a string representation of a base type into a 'Ty'
| Singleton for a glambda type
| Singleton for a typing context
| The singleton for the empty context
| Convert a 'Ty' into an 'STy'.
| Convert an 'STy' into a 'Ty'
---------------------------------------
Pretty-printing | # LANGUAGE DataKinds , TypeOperators , PolyKinds ,
GADTs , RankNTypes , FlexibleInstances #
GADTs, RankNTypes, FlexibleInstances #-}
Copyright : ( C ) 2015
Maintainer : ( )
module Language.Glambda.Type (
* Glambda types to be used in Haskell terms
Ty(..), readTyCon,
* Glambda types to be used in Haskell types
STy(..), SCtx(..), ITy(..),
emptyContext, refineTy, unrefineTy, eqSTy,
) where
import Language.Glambda.Util
import Text.PrettyPrint.ANSI.Leijen
data Ty
| IntTy
| BoolTy
deriving Eq
infixr 1 `Arr`
readTyCon :: String -> Maybe Ty
readTyCon "Int" = Just IntTy
readTyCon "Bool" = Just BoolTy
readTyCon _ = Nothing
data STy :: * -> * where
SArr :: STy arg -> STy res -> STy (arg -> res)
SIntTy :: STy Int
SBoolTy :: STy Bool
infixr 1 `SArr`
| An implicit ' ' , wrapped up in a class constraint
class ITy ty where
sty :: STy ty
instance (ITy arg, ITy res) => ITy (arg -> res) where
sty = sty `SArr` sty
instance ITy Int where
sty = SIntTy
instance ITy Bool where
sty = SBoolTy
data SCtx :: [*] -> * where
SNil :: SCtx '[]
SCons :: STy h -> SCtx t -> SCtx (h ': t)
infixr 5 `SCons`
emptyContext :: SCtx '[]
emptyContext = SNil
refineTy :: Ty -> (forall ty. STy ty -> r) -> r
refineTy (ty1 `Arr` ty2) k
= refineTy ty1 $ \sty1 ->
refineTy ty2 $ \sty2 ->
k (sty1 `SArr` sty2)
refineTy IntTy k = k SIntTy
refineTy BoolTy k = k SBoolTy
unrefineTy :: STy ty -> Ty
unrefineTy (arg `SArr` res) = unrefineTy arg `Arr` unrefineTy res
unrefineTy SIntTy = IntTy
unrefineTy SBoolTy = BoolTy
| Compare two ' 's for equality .
eqSTy :: STy ty1 -> STy ty2 -> Maybe (ty1 :~: ty2)
eqSTy (s1 `SArr` t1) (s2 `SArr` t2)
| Just Refl <- s1 `eqSTy` s2
, Just Refl <- t1 `eqSTy` t2
= Just Refl
eqSTy SIntTy SIntTy = Just Refl
eqSTy SBoolTy SBoolTy = Just Refl
eqSTy _ _ = Nothing
instance Pretty Ty where
pretty = pretty_ty topPrec
instance Show Ty where
show = render . pretty
instance Pretty (STy ty) where
pretty = pretty . unrefineTy
arrowLeftPrec, arrowRightPrec, arrowPrec :: Prec
arrowLeftPrec = 5
arrowRightPrec = 4.9
arrowPrec = 5
pretty_ty :: Prec -> Ty -> Doc
pretty_ty prec (Arr arg res) = maybeParens (prec >= arrowPrec) $
hsep [ pretty_ty arrowLeftPrec arg
, text "->"
, pretty_ty arrowRightPrec res ]
pretty_ty _ IntTy = text "Int"
pretty_ty _ BoolTy = text "Bool"
|
945568b7b4a2ce691c4f985a4438e28196dba0403632bf639e7313870cf09c02 | uxbox/uxbox-backend | users.clj | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
;;
Copyright ( c ) 2016 < >
(ns uxbox.frontend.users
(:require [clojure.spec :as s]
[promesa.core :as p]
[catacumba.http :as http]
[storages.core :as st]
[storages.util :as path]
[uxbox.media :as media]
[uxbox.images :as images]
[uxbox.util.spec :as us]
[uxbox.services :as sv]
[uxbox.services.users :as svu]
[uxbox.util.response :refer (rsp)]
[uxbox.util.uuid :as uuid]))
;; --- Helpers
(defn- resolve-thumbnail
[user]
(let [opts {:src :photo
:dst :photo
:size [100 100]
:quality 90
:format "jpg"}]
(images/populate-thumbnails user opts)))
;; --- Retrieve Profile
(defn retrieve-profile
[{user :identity}]
(let [message {:user user
:type :retrieve-profile}]
(->> (sv/query message)
(p/map resolve-thumbnail)
(p/map #(http/ok (rsp %))))))
;; --- Update Profile
(s/def ::fullname string?)
(s/def ::metadata any?)
(s/def ::update-profile
(s/keys :req-un [::us/id ::us/username ::us/email
::fullname ::metadata]))
(defn update-profile
[{user :identity data :data}]
(let [data (us/conform ::update-profile data)
message (assoc data
:type :update-profile
:user user)]
(->> (sv/novelty message)
(p/map resolve-thumbnail)
(p/map #(http/ok (rsp %))))))
;; --- Update Password
(s/def ::old-password ::us/password)
(s/def ::update-password
(s/keys :req-un [::us/password ::old-password]))
(defn update-password
[{user :identity data :data}]
(let [data (us/conform ::update-password data)
message (assoc data
:type :update-profile-password
:user user)]
(-> (sv/novelty message)
(p/then #(http/ok (rsp %))))))
;; --- Update Profile Photo
(s/def ::file ::us/uploaded-file)
(s/def ::update-photo (s/keys :req-un [::file]))
(defn update-photo
[{user :identity data :data}]
(letfn [(store-photo [file]
(let [filename (path/base-name file)
storage media/images-storage]
(st/save storage filename file)))
(assign-photo [path]
(sv/novelty {:user user
:path (str path)
:type :update-profile-photo}))
(create-response [_]
(http/no-content))]
(let [{:keys [file]} (us/conform ::update-photo data)]
(->> (store-photo file)
(p/mapcat assign-photo)
(p/map create-response)))))
;; --- Register User
(s/def ::register
(s/keys :req-un [::us/username ::us/email ::us/password ::fullname]))
(defn register-user
[{data :data}]
(let [data (us/conform ::register data)
message (assoc data :type :register-profile)]
(->> (sv/novelty message)
(p/map #(http/ok (rsp %))))))
;; --- Request Password Recovery
;; FIXME: rename for consistency
(s/def ::request-recovery
(s/keys :req-un [::us/username]))
(defn request-recovery
[{data :data}]
(let [data (us/conform ::request-recovery data)
message (assoc data :type :request-profile-password-recovery)]
(->> (sv/novelty message)
(p/map (fn [_] (http/no-content))))))
;; --- Password Recovery
;; FIXME: rename for consistency
(s/def ::token string?)
(s/def ::password-recovery
(s/keys :req-un [::token ::us/password]))
(defn recover-password
[{data :data}]
(let [data (us/conform ::password-recovery data)
message (assoc data :type :recover-profile-password)]
(->> (sv/novelty message)
(p/map (fn [_] (http/no-content))))))
;; --- Valiadate Recovery Token
(defn validate-recovery-token
[{params :route-params}]
(let [message {:type :validate-profile-password-recovery-token
:token (:token params)}]
(->> (sv/query message)
(p/map (fn [v]
(if v
(http/no-content)
(http/not-found "")))))))
| null | https://raw.githubusercontent.com/uxbox/uxbox-backend/036c42db8424be3ac34c38be80577ee279141681/src/uxbox/frontend/users.clj | clojure |
--- Helpers
--- Retrieve Profile
--- Update Profile
--- Update Password
--- Update Profile Photo
--- Register User
--- Request Password Recovery
FIXME: rename for consistency
--- Password Recovery
FIXME: rename for consistency
--- Valiadate Recovery Token | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
Copyright ( c ) 2016 < >
(ns uxbox.frontend.users
(:require [clojure.spec :as s]
[promesa.core :as p]
[catacumba.http :as http]
[storages.core :as st]
[storages.util :as path]
[uxbox.media :as media]
[uxbox.images :as images]
[uxbox.util.spec :as us]
[uxbox.services :as sv]
[uxbox.services.users :as svu]
[uxbox.util.response :refer (rsp)]
[uxbox.util.uuid :as uuid]))
(defn- resolve-thumbnail
[user]
(let [opts {:src :photo
:dst :photo
:size [100 100]
:quality 90
:format "jpg"}]
(images/populate-thumbnails user opts)))
(defn retrieve-profile
[{user :identity}]
(let [message {:user user
:type :retrieve-profile}]
(->> (sv/query message)
(p/map resolve-thumbnail)
(p/map #(http/ok (rsp %))))))
(s/def ::fullname string?)
(s/def ::metadata any?)
(s/def ::update-profile
(s/keys :req-un [::us/id ::us/username ::us/email
::fullname ::metadata]))
(defn update-profile
[{user :identity data :data}]
(let [data (us/conform ::update-profile data)
message (assoc data
:type :update-profile
:user user)]
(->> (sv/novelty message)
(p/map resolve-thumbnail)
(p/map #(http/ok (rsp %))))))
(s/def ::old-password ::us/password)
(s/def ::update-password
(s/keys :req-un [::us/password ::old-password]))
(defn update-password
[{user :identity data :data}]
(let [data (us/conform ::update-password data)
message (assoc data
:type :update-profile-password
:user user)]
(-> (sv/novelty message)
(p/then #(http/ok (rsp %))))))
(s/def ::file ::us/uploaded-file)
(s/def ::update-photo (s/keys :req-un [::file]))
(defn update-photo
[{user :identity data :data}]
(letfn [(store-photo [file]
(let [filename (path/base-name file)
storage media/images-storage]
(st/save storage filename file)))
(assign-photo [path]
(sv/novelty {:user user
:path (str path)
:type :update-profile-photo}))
(create-response [_]
(http/no-content))]
(let [{:keys [file]} (us/conform ::update-photo data)]
(->> (store-photo file)
(p/mapcat assign-photo)
(p/map create-response)))))
(s/def ::register
(s/keys :req-un [::us/username ::us/email ::us/password ::fullname]))
(defn register-user
[{data :data}]
(let [data (us/conform ::register data)
message (assoc data :type :register-profile)]
(->> (sv/novelty message)
(p/map #(http/ok (rsp %))))))
(s/def ::request-recovery
(s/keys :req-un [::us/username]))
(defn request-recovery
[{data :data}]
(let [data (us/conform ::request-recovery data)
message (assoc data :type :request-profile-password-recovery)]
(->> (sv/novelty message)
(p/map (fn [_] (http/no-content))))))
(s/def ::token string?)
(s/def ::password-recovery
(s/keys :req-un [::token ::us/password]))
(defn recover-password
[{data :data}]
(let [data (us/conform ::password-recovery data)
message (assoc data :type :recover-profile-password)]
(->> (sv/novelty message)
(p/map (fn [_] (http/no-content))))))
(defn validate-recovery-token
[{params :route-params}]
(let [message {:type :validate-profile-password-recovery-token
:token (:token params)}]
(->> (sv/query message)
(p/map (fn [v]
(if v
(http/no-content)
(http/not-found "")))))))
|
9ffa48398734f28dacfda0f2e3662aacc6ff83123f2da8b536b8252b058f9fe6 | input-output-hk/project-icarus-importer | OutboundQueueSpec.hs | # LANGUAGE ScopedTypeVariables #
module Test.Network.Broadcast.OutboundQueueSpec
( spec
TODO define elsewhere .
, arbitraryNodeType
, arbitraryRoutes
, arbitraryPeers
) where
import Control.Monad
import Data.List (delete)
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import Data.Semigroup ((<>))
import Data.Set (Set)
import qualified Data.Set as Set
import qualified Network.Broadcast.OutboundQueue as OutQ
import Network.Broadcast.OutboundQueue.Demo
import Network.Broadcast.OutboundQueue.Types hiding (simplePeers)
import System.Wlog
import Test.Hspec (Spec, describe, it)
import Test.Hspec.QuickCheck (modifyMaxSuccess)
import Test.QuickCheck (Gen, Property, choose, forAll, ioProperty, property,
suchThat, (===))
import qualified Test.QuickCheck as QC
arbitraryNodeType :: Gen NodeType
arbitraryNodeType = QC.elements [minBound .. maxBound]
-- | An arbitrary 'Routes nid' must respect the invariant that an 'nid' does
-- not appear in multiple classifications.
-- You also get the map classifying each node in the routes.
arbitraryRoutes :: Ord nid => Gen nid -> Gen (Routes nid, Map nid NodeType)
arbitraryRoutes genNid = do
First , generate arbitrary sets of cores , relays , and edges .
coreSet <- Set.fromList <$> QC.listOf genNid
let usedNids = coreSet
relaySet <- Set.fromList <$> QC.listOf (genNid `suchThat` (not . flip Set.member usedNids))
let usedNids' = usedNids `Set.union` relaySet
edgeSet <- Set.fromList <$> QC.listOf (genNid `suchThat` (not . flip Set.member usedNids'))
-- Now randomly spread them into conjunctinos of disjunctions.
cores <- spread coreSet
relays <- spread relaySet
edges <- spread edgeSet
let coreMap = M.fromList (flip (,) NodeCore <$> Set.toList coreSet)
relayMap = M.fromList (flip (,) NodeRelay <$> Set.toList relaySet)
edgeMap = M.fromList (flip (,) NodeEdge <$> Set.toList edgeSet)
classification = coreMap <> relayMap <> edgeMap
pure (Routes cores relays edges, classification)
where
-- None of the lists will be empty.
spread :: Set t -> Gen [[t]]
spread s = go [] (Set.size s) (Set.toList s)
where
go :: [[t]] -> Int -> [t] -> Gen [[t]]
go acc 0 [] = pure acc
go acc n ts = do
toTake <- choose (1, n)
let (ac, ts') = splitAt toTake ts
go (ac : acc) (n - toTake) ts'
-- | There are invariants of 'Peers nid' that must be respected (see docs on
that type from ' Network . Broadcast . OutboundQueue . Types ' ) so we ca n't use
-- 'Peers <$> arbitrary <*> arbitrary'.
--
-- Uses 'arbitraryRoutes' then throws in arbitrary-many extra, unclassified
-- peers.
arbitraryPeers :: Ord nid => Gen nid -> Gen NodeType -> Gen (Peers nid)
arbitraryPeers genNid genNodeType = do
(routes, classification) <- arbitraryRoutes genNid
extras <- QC.listOf ((,) <$> (genNid `suchThat` flip M.notMember classification) <*> genNodeType)
pure $ Peers routes (classification <> M.fromList extras)
-- | FIXME should study this test to find out what exactly is does. Also, why
-- it's so slow.
--
-- Potentital confusion: in the text of this definition "node" really means
-- "outbound queue".
testInFlight :: IO Bool
testInFlight = do
removeAllHandlers
-- Set up some test nodes
allNodes <- do
ns <- forM [1..4] $ \nodeIdx -> newNode (C nodeIdx) NodeCore (CommsDelay 0)
forM_ ns $ \theNode -> setPeers theNode (delete theNode ns)
return ns
runEnqueue $ do
-- Send messages asynchronously
forM_ [1..1000] $ \n -> do
send Asynchronous (allNodes !! 0) (MsgTransaction OriginSender) (MsgId n)
-- Abruptly unsubscribe whilst messages are getting delivered
forM_ allNodes $ \theNode -> setPeers theNode []
-- Verify the invariants
let queues = map nodeOutQ allNodes
forM_ queues OutQ.flush
allInFlights <- mapM OutQ.currentlyInFlight queues
return $ all allGreaterThanZero allInFlights
allGreaterThanZero :: M.Map NodeId (M.Map OutQ.Precedence Int) -> Bool
allGreaterThanZero imap = all (>= 0) $ (concatMap M.elems (M.elems imap))
spec :: Spec
spec = describe "OutBoundQ" $ do
We test that ` removePeer ` will never yield something like
` [ [ ] ] ` . See :
it "removePeer doesn't yield empty singletons" $ property prop_removePeer
it "removePeer does preserve order" $ property prop_removePeer_ordering
This test takes quite a long time so we 'll drop the successes .
modifyMaxSuccess (const 10) $ do
-- Simulate a multi-peer conversation and then check
-- that after that we never have a negative count for
the ` qInFlight ` field of a ` OutBoundQ ` .
it "inflight conversations" $ ioProperty $ testInFlight
arbitraryFiniteInt :: Gen Int
arbitraryFiniteInt = choose (0, 1024)
prop_removePeer :: Property
prop_removePeer = forAll (arbitraryPeers arbitraryFiniteInt arbitraryNodeType) $
\(peers :: Peers Int) ->
let ints = Set.toList (peersRouteSet peers)
-- For every key in the route set, we check the property.
in forAll (QC.choose (0, Set.size (peersRouteSet peers) - 1)) $ \idx ->
let toRemove = ints !! idx
Peers{..} = removePeer toRemove peers
in and $ map checkProp [_routesCore peersRoutes, _routesEdge peersRoutes , _routesRelay peersRoutes]
where
checkProp = all (not . null)
-- We purposefully try to remove something which is not there, to make sure
-- removePeer doesn't alter the ordering of the forwading sets.
prop_removePeer_ordering :: Property
prop_removePeer_ordering = forAll (arbitraryPeers arbitraryFiniteInt arbitraryNodeType) $
\(peers :: Peers Int) ->
let stripped = filterEmptySingletons peers
peers' = removePeer (2000 :: Int) stripped
in peers' === stripped
where
filterEmptySingletons p =
let newRoutes = Routes (filter (not . null) (_routesCore . peersRoutes $ p))
(filter (not . null) (_routesRelay . peersRoutes $ p))
(filter (not . null) (_routesEdge . peersRoutes $ p))
in p { peersRoutes = newRoutes }
| null | https://raw.githubusercontent.com/input-output-hk/project-icarus-importer/36342f277bcb7f1902e677a02d1ce93e4cf224f0/networking/test/Test/Network/Broadcast/OutboundQueueSpec.hs | haskell | | An arbitrary 'Routes nid' must respect the invariant that an 'nid' does
not appear in multiple classifications.
You also get the map classifying each node in the routes.
Now randomly spread them into conjunctinos of disjunctions.
None of the lists will be empty.
| There are invariants of 'Peers nid' that must be respected (see docs on
'Peers <$> arbitrary <*> arbitrary'.
Uses 'arbitraryRoutes' then throws in arbitrary-many extra, unclassified
peers.
| FIXME should study this test to find out what exactly is does. Also, why
it's so slow.
Potentital confusion: in the text of this definition "node" really means
"outbound queue".
Set up some test nodes
Send messages asynchronously
Abruptly unsubscribe whilst messages are getting delivered
Verify the invariants
Simulate a multi-peer conversation and then check
that after that we never have a negative count for
For every key in the route set, we check the property.
We purposefully try to remove something which is not there, to make sure
removePeer doesn't alter the ordering of the forwading sets. | # LANGUAGE ScopedTypeVariables #
module Test.Network.Broadcast.OutboundQueueSpec
( spec
TODO define elsewhere .
, arbitraryNodeType
, arbitraryRoutes
, arbitraryPeers
) where
import Control.Monad
import Data.List (delete)
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import Data.Semigroup ((<>))
import Data.Set (Set)
import qualified Data.Set as Set
import qualified Network.Broadcast.OutboundQueue as OutQ
import Network.Broadcast.OutboundQueue.Demo
import Network.Broadcast.OutboundQueue.Types hiding (simplePeers)
import System.Wlog
import Test.Hspec (Spec, describe, it)
import Test.Hspec.QuickCheck (modifyMaxSuccess)
import Test.QuickCheck (Gen, Property, choose, forAll, ioProperty, property,
suchThat, (===))
import qualified Test.QuickCheck as QC
arbitraryNodeType :: Gen NodeType
arbitraryNodeType = QC.elements [minBound .. maxBound]
arbitraryRoutes :: Ord nid => Gen nid -> Gen (Routes nid, Map nid NodeType)
arbitraryRoutes genNid = do
First , generate arbitrary sets of cores , relays , and edges .
coreSet <- Set.fromList <$> QC.listOf genNid
let usedNids = coreSet
relaySet <- Set.fromList <$> QC.listOf (genNid `suchThat` (not . flip Set.member usedNids))
let usedNids' = usedNids `Set.union` relaySet
edgeSet <- Set.fromList <$> QC.listOf (genNid `suchThat` (not . flip Set.member usedNids'))
cores <- spread coreSet
relays <- spread relaySet
edges <- spread edgeSet
let coreMap = M.fromList (flip (,) NodeCore <$> Set.toList coreSet)
relayMap = M.fromList (flip (,) NodeRelay <$> Set.toList relaySet)
edgeMap = M.fromList (flip (,) NodeEdge <$> Set.toList edgeSet)
classification = coreMap <> relayMap <> edgeMap
pure (Routes cores relays edges, classification)
where
spread :: Set t -> Gen [[t]]
spread s = go [] (Set.size s) (Set.toList s)
where
go :: [[t]] -> Int -> [t] -> Gen [[t]]
go acc 0 [] = pure acc
go acc n ts = do
toTake <- choose (1, n)
let (ac, ts') = splitAt toTake ts
go (ac : acc) (n - toTake) ts'
that type from ' Network . Broadcast . OutboundQueue . Types ' ) so we ca n't use
arbitraryPeers :: Ord nid => Gen nid -> Gen NodeType -> Gen (Peers nid)
arbitraryPeers genNid genNodeType = do
(routes, classification) <- arbitraryRoutes genNid
extras <- QC.listOf ((,) <$> (genNid `suchThat` flip M.notMember classification) <*> genNodeType)
pure $ Peers routes (classification <> M.fromList extras)
testInFlight :: IO Bool
testInFlight = do
removeAllHandlers
allNodes <- do
ns <- forM [1..4] $ \nodeIdx -> newNode (C nodeIdx) NodeCore (CommsDelay 0)
forM_ ns $ \theNode -> setPeers theNode (delete theNode ns)
return ns
runEnqueue $ do
forM_ [1..1000] $ \n -> do
send Asynchronous (allNodes !! 0) (MsgTransaction OriginSender) (MsgId n)
forM_ allNodes $ \theNode -> setPeers theNode []
let queues = map nodeOutQ allNodes
forM_ queues OutQ.flush
allInFlights <- mapM OutQ.currentlyInFlight queues
return $ all allGreaterThanZero allInFlights
allGreaterThanZero :: M.Map NodeId (M.Map OutQ.Precedence Int) -> Bool
allGreaterThanZero imap = all (>= 0) $ (concatMap M.elems (M.elems imap))
spec :: Spec
spec = describe "OutBoundQ" $ do
We test that ` removePeer ` will never yield something like
` [ [ ] ] ` . See :
it "removePeer doesn't yield empty singletons" $ property prop_removePeer
it "removePeer does preserve order" $ property prop_removePeer_ordering
This test takes quite a long time so we 'll drop the successes .
modifyMaxSuccess (const 10) $ do
the ` qInFlight ` field of a ` OutBoundQ ` .
it "inflight conversations" $ ioProperty $ testInFlight
arbitraryFiniteInt :: Gen Int
arbitraryFiniteInt = choose (0, 1024)
prop_removePeer :: Property
prop_removePeer = forAll (arbitraryPeers arbitraryFiniteInt arbitraryNodeType) $
\(peers :: Peers Int) ->
let ints = Set.toList (peersRouteSet peers)
in forAll (QC.choose (0, Set.size (peersRouteSet peers) - 1)) $ \idx ->
let toRemove = ints !! idx
Peers{..} = removePeer toRemove peers
in and $ map checkProp [_routesCore peersRoutes, _routesEdge peersRoutes , _routesRelay peersRoutes]
where
checkProp = all (not . null)
prop_removePeer_ordering :: Property
prop_removePeer_ordering = forAll (arbitraryPeers arbitraryFiniteInt arbitraryNodeType) $
\(peers :: Peers Int) ->
let stripped = filterEmptySingletons peers
peers' = removePeer (2000 :: Int) stripped
in peers' === stripped
where
filterEmptySingletons p =
let newRoutes = Routes (filter (not . null) (_routesCore . peersRoutes $ p))
(filter (not . null) (_routesRelay . peersRoutes $ p))
(filter (not . null) (_routesEdge . peersRoutes $ p))
in p { peersRoutes = newRoutes }
|
c7a35e6c4752c6bdb110e14d134f0ae4798d3126321c99228c8df646c7aa3317 | roman01la/advent-of-code-2018 | day3.clj | (ns aoc2018.day3)
(defn read-id [s]
(->> (re-matches #"#(\d+) @ (\d+),(\d+): (\d+)x(\d+)" s)
rest
(map read-string)
(zipmap [:id :x :y :w :h])))
(def input
(->> (slurp "resources/aoc2018/day3.txt")
clojure.string/split-lines
(map read-id)))
(defn rect->area [{:keys [x y w h]}]
(for [x (range x (+ x w))
y (range y (+ y h))]
[x y]))
(defn add-rect [canvas rect]
(->> (rect->area rect)
(reduce
(fn [canvas point]
(update canvas point conj (:id rect)))
canvas)))
(defn overlapping? [ids]
(> (count ids) 1))
(defn overlapping-areas [input]
(->> (reduce add-rect {} input)
vals
(filter overlapping?)))
(defn part-1 []
(count (overlapping-areas input)))
(defn part-2 []
(let [overlapping-ids (reduce into #{} (overlapping-areas input))
all-ids (map :id input)]
(first (remove overlapping-ids all-ids))))
| null | https://raw.githubusercontent.com/roman01la/advent-of-code-2018/be377fa2fe25653760e20d6872e598695f7f3de7/src/aoc2018/day3.clj | clojure | (ns aoc2018.day3)
(defn read-id [s]
(->> (re-matches #"#(\d+) @ (\d+),(\d+): (\d+)x(\d+)" s)
rest
(map read-string)
(zipmap [:id :x :y :w :h])))
(def input
(->> (slurp "resources/aoc2018/day3.txt")
clojure.string/split-lines
(map read-id)))
(defn rect->area [{:keys [x y w h]}]
(for [x (range x (+ x w))
y (range y (+ y h))]
[x y]))
(defn add-rect [canvas rect]
(->> (rect->area rect)
(reduce
(fn [canvas point]
(update canvas point conj (:id rect)))
canvas)))
(defn overlapping? [ids]
(> (count ids) 1))
(defn overlapping-areas [input]
(->> (reduce add-rect {} input)
vals
(filter overlapping?)))
(defn part-1 []
(count (overlapping-areas input)))
(defn part-2 []
(let [overlapping-ids (reduce into #{} (overlapping-areas input))
all-ids (map :id input)]
(first (remove overlapping-ids all-ids))))
|
|
f5230e1a61a5f69485fb336c1e316da75577916bfca89a5eeac2f085bfbc6a89 | dsheets/ocaml-unix-unistd | unix_unistd_bindings.ml |
* Copyright ( c ) 2016
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
*
* Copyright (c) 2016 Jeremy Yallop
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
*)
open Ctypes
open Posix_types
let fd = Unix_representations
.(view int ~read:file_descr_of_int ~write:int_of_file_descr)
module C(F: Cstubs.FOREIGN) = struct
open F
let lseek = foreign "lseek"
(fd @-> off_t @-> int @-> returning off_t)
let unlink = foreign "unlink"
(string @-> returning int)
let rmdir = foreign "rmdir"
(string @-> returning int)
let write = foreign "write"
(fd @-> ptr void @-> size_t @-> returning ssize_t)
let pwrite = foreign "pwrite"
(fd @-> ptr void @-> size_t @-> off_t @-> returning ssize_t)
let read = foreign "read"
(fd @-> ptr void @-> size_t @-> returning ssize_t)
let pread = foreign "pread"
(fd @-> ptr void @-> size_t @-> off_t @-> returning ssize_t)
let close = foreign "close"
(fd @-> returning int)
let access = foreign "access"
(string @-> int @-> returning int)
let readlink = foreign "readlink"
(string @-> ptr char @-> size_t @-> returning ssize_t)
let symlink = foreign "symlink"
(string @-> string @-> returning int)
let truncate = foreign "truncate"
(string @-> off_t @-> returning int)
let ftruncate = foreign "ftruncate"
(fd @-> off_t @-> returning int)
let chown = foreign "chown"
(string @-> uid_t @-> gid_t @-> returning int)
let fchown = foreign "fchown"
(fd @-> uid_t @-> gid_t @-> returning int)
let seteuid = foreign "seteuid"
(uid_t @-> returning int)
let setegid = foreign "setegid"
(gid_t @-> returning int)
end
| null | https://raw.githubusercontent.com/dsheets/ocaml-unix-unistd/20187eb4d160703bdd872d2b5df87c4da9de81c7/lib_gen/unix_unistd_bindings.ml | ocaml |
* Copyright ( c ) 2016
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
*
* Copyright (c) 2016 Jeremy Yallop
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
*)
open Ctypes
open Posix_types
let fd = Unix_representations
.(view int ~read:file_descr_of_int ~write:int_of_file_descr)
module C(F: Cstubs.FOREIGN) = struct
open F
let lseek = foreign "lseek"
(fd @-> off_t @-> int @-> returning off_t)
let unlink = foreign "unlink"
(string @-> returning int)
let rmdir = foreign "rmdir"
(string @-> returning int)
let write = foreign "write"
(fd @-> ptr void @-> size_t @-> returning ssize_t)
let pwrite = foreign "pwrite"
(fd @-> ptr void @-> size_t @-> off_t @-> returning ssize_t)
let read = foreign "read"
(fd @-> ptr void @-> size_t @-> returning ssize_t)
let pread = foreign "pread"
(fd @-> ptr void @-> size_t @-> off_t @-> returning ssize_t)
let close = foreign "close"
(fd @-> returning int)
let access = foreign "access"
(string @-> int @-> returning int)
let readlink = foreign "readlink"
(string @-> ptr char @-> size_t @-> returning ssize_t)
let symlink = foreign "symlink"
(string @-> string @-> returning int)
let truncate = foreign "truncate"
(string @-> off_t @-> returning int)
let ftruncate = foreign "ftruncate"
(fd @-> off_t @-> returning int)
let chown = foreign "chown"
(string @-> uid_t @-> gid_t @-> returning int)
let fchown = foreign "fchown"
(fd @-> uid_t @-> gid_t @-> returning int)
let seteuid = foreign "seteuid"
(uid_t @-> returning int)
let setegid = foreign "setegid"
(gid_t @-> returning int)
end
|
|
69516afe67b971c59fbe962bde2b135ad7f3966c8bbe120bda7622fd0639204d | yetibot/core | profiles.sample.clj | Sample profiles.clj for Yetibot configuration .
equivalent to config.sample.edn
;;
;; It defines a dev profile, but you may want to share much of the configuration
between dev and prod , using Composite Profiles , optionally overriding
;; specific differences between dev and prod:
;; #composite-profiles
;;
Config is loaded using ` environ : `
;; And exploded into nested maps using `dec`:
{:dev
{:env
{:yetibot-log-level "debug"
:yetibot-log-path "/var/log/yetibot/yetibot.log"
:yetibot-log-rolling-enabled "true"
By default uses the ! prefix to match commands .
You can use this configuration to customize the prefix used by
:yetibot-command-prefix ","
;; Whether or not embedded commands should be globally available (enabled by
;; default)
:yetibot-command-embedded-enabled "false"
;; Whether to enable having a fallback command. Default is true.
:yetibot-command-fallback-enabled "true"
;; Override the default fallback help text. Default is empty.
:yetibot-command-fallback-help-text "Welcome to Yetibot 👋"
;; the default command to fall back to if no other commands match
:yetibot-default-command "giphy"
Whitelists and blackists : these can be used to enable / disable specific
;; commands. Only one of these must be specified. If both are specified, it
is considered an error and will crash on startup . By default there
;; is no whitelist or blacklist.
;;
Whitelist : when whitelist is specified , all commands are disabled except
;; those present in the `whitelist` collection. Example:
;;
;; :yetibot-command-whitelist-0 "echo"
;; :yetibot-command-whitelist-1 "list"
;;
;; Blacklist: when blacklist is specified, all commands are enabled except
;; those present in the `blacklist` collection. Example:
;;
;; :yetibot-command-blacklist-0 "echo"
;; :yetibot-command-blacklist-1 "list"
Yetibot needs a Postgres instance to run against .
:yetibot-db-url "postgresql:5432/yetibot"
:yetibot-db-table-prefix "yetibot_"
;; Storing of channel history in the history table is on by default
:yetibot-history-disabled "false"
;; ADAPTERS
;; Yetibot can listen on multiple instances of each adapters type. Current
adapter types are Slack and IRC .
;;
;; Each config map must have:
;; - a unique key (i.e. uuid)"
;; - a :type key with value "slack" or "irc"
;;
Example configuring 3 adapters : 2 Slacks and 1 IRC :
:yetibot-adapters-myteam-type "slack"
:yetibot-adapters-myteam-token "xoxb-111111111111111111111111111111111111"
:yetibot-adapters-k8s-type "slack"
:yetibot-adapters-k8s-token "xoxb-k8s-slack-9999999999999999"
:yetibot-adapters-freenode-type "irc"
:yetibot-adapters-freenode-host "chat.freenode.net"
:yetibot-adapters-freenode-port "7070"
:yetibot-adapters-freenode-ssl "true"
:yetibot-adapters-freenode-username "yetibot"
:yetibot-adapters-mymattermost-type "mattermost"
:yetibot-adapters-mymattermost-host "yetibot-mattermost.herokuapp.com"
:yetibot-adapters-mymattermost-token "h1111111111111111111111111"
:yetibot-adapters-mymattermost-secure "true" ;; true by default
Listens on port 3000 but this may be different for you if you ( e.g. if you
use a load balancer or map ports in Docker ) .
:yetibot-url ":3000"
;;
;; WORK
;;
:yetibot-github-token ""
:yetibot-github-org-0 ""
:yetibot-github-org-1 ""
: endpoint is optional : only specify if using GitHub Enterprise .
:yetibot-github-endpoint ""
;; `jira`
:yetibot-jira-domain ""
:yetibot-jira-user ""
:yetibot-jira-password ""
:yetibot-jira-projects-0-key "FOO"
:yetibot-jira-projects-0-default-version-id "42"
:yetibot-jira-default-issue-type-id "3"
:yetibot-jira-subtask-issue-type-id "27"
:yetibot-jira-default-project-key "Optional"
:yetibot-jira-cloud "true"
;; s3
:yetibot-s3-access-key ""
:yetibot-s3-secret-key ""
;; send and receive emails with `mail`
:yetibot-mail-host ""
:yetibot-mail-user ""
:yetibot-mail-pass ""
:yetibot-mail-from ""
:yetibot-mail-bcc ""
;;
;; FUN
;;
;; `giphy`
:yetibot-giphy-key ""
;; `meme`
:yetibot-imgflip-username ""
:yetibot-imgflip-password ""
;;
;; INFOs
;;
Alpha Vantage ( stock data )
:yetibot-alphavantage-key ""
` google `
:yetibot-google-api-key ""
:yetibot-google-custom-search-engine-id ""
:yetibot-google-options-safe "high"
;; `ebay`
:yetibot-ebay-appid ""
;; `twitter`: stream tweets from followers and followed topics directly into
;; chat, and post tweets
:yetibot-twitter-consumer-key ""
:yetibot-twitter-consumer-secret ""
:yetibot-twitter-token ""
:yetibot-twitter-secret ""
ISO 639 - 1 code : -of-ISO-639-1-codes
:yetibot-twitter-search-lang "en"
;; `jen` - Jenkins
instances config are mutable , and are therefore not defined in
this config . Instead , add them at runtime . See ` ! help for more info .
How long to cache jobs from each instance before refreshing
:yetibot-jenkins-cache-ttl "3600000"
Default job across all instances , used by ` ! build `
:yetibot-jenkins-default-job ""
:yetibot-jenkins-instances-0-name "yetibot"
:yetibot-jenkins-instances-0-uri "/"
:yetibot-jenkins-instances-0-default-job "default-job-name"
If your does n't require auth , set user and api - key to some
;; non-blank value in order to pass the configuration check.
:yetibot-jenkins-instances-0-user "jenkins-user"
:yetibot-jenkins-instances-0-apikey "abc"
;; additional instances can be configured by bumping the index
:yetibot-jenkins-instances-1-name "yetibot.core"
:yetibot-jenkins-instances-1-uri "/"
;; Admin section controls which users have admin privileges and which
;; commands are locked down to admin use only.
;;
Set of Strings : Slack IDs or IRC users ( which have ~ prefixes ) of users
;; who can use the yetibot `eval` command.
:yetibot-admin-users-0 "U123123"
:yetibot-admin-users-1 "~awesomeperson"
;; The set of commands to restrict to admins only (note `eval` is *always*
;; admin only regardless of config):
:yetibot-admin-commands-0 "observer"
:yetibot-admin-commands-1 "obs"
Configure GitHub if you have your own fork of the yetibot repo . This will
;; allow opening feature requests on your fork.
:yetibot-features-github-token ""
:yetibot-features-github-user ""
SSH servers are specified in groups so that multiple servers which share
;; usernames and keys don't need to each specify duplicate config. Fill in
;; your own key-names below instead of `:server-a-host`. This is the short
;; name that the ssh command will refer to, e.g.: `ssh server-a-host ls -al`.
:yetibot-ssh-groups-0-key "path-to-key"
:yetibot-ssh-groups-0-user ""
:yetibot-ssh-groups-0-servers-0-name ""
:yetibot-ssh-groups-0-servers-0-host ""
:yetibot-ssh-groups-0-servers-1-name ""
:yetibot-ssh-groups-0-servers-1-host ""
;; `weather`
:yetibot-weather-wunderground-key ""
:yetibot-weather-wunderground-default-zip ""
;; `wolfram`
:yetibot-wolfram-appid ""
;; `wordnik` dictionary
:yetibot-wordnik-key ""
;; nrepl configuration
:yetibot-nrepl-port ""
;; `karma`
:yetibot-karma-emoji-positive ":taco:"
:yetibot-karma-emoji-negative ":poop:"
}}}
| null | https://raw.githubusercontent.com/yetibot/core/254da5c7a5c62353da5e7eaeb6ab7066a62dbb25/config/profiles.sample.clj | clojure |
It defines a dev profile, but you may want to share much of the configuration
specific differences between dev and prod:
#composite-profiles
And exploded into nested maps using `dec`:
Whether or not embedded commands should be globally available (enabled by
default)
Whether to enable having a fallback command. Default is true.
Override the default fallback help text. Default is empty.
the default command to fall back to if no other commands match
commands. Only one of these must be specified. If both are specified, it
is no whitelist or blacklist.
those present in the `whitelist` collection. Example:
:yetibot-command-whitelist-0 "echo"
:yetibot-command-whitelist-1 "list"
Blacklist: when blacklist is specified, all commands are enabled except
those present in the `blacklist` collection. Example:
:yetibot-command-blacklist-0 "echo"
:yetibot-command-blacklist-1 "list"
Storing of channel history in the history table is on by default
ADAPTERS
Yetibot can listen on multiple instances of each adapters type. Current
Each config map must have:
- a unique key (i.e. uuid)"
- a :type key with value "slack" or "irc"
true by default
WORK
`jira`
s3
send and receive emails with `mail`
FUN
`giphy`
`meme`
INFOs
`ebay`
`twitter`: stream tweets from followers and followed topics directly into
chat, and post tweets
`jen` - Jenkins
non-blank value in order to pass the configuration check.
additional instances can be configured by bumping the index
Admin section controls which users have admin privileges and which
commands are locked down to admin use only.
who can use the yetibot `eval` command.
The set of commands to restrict to admins only (note `eval` is *always*
admin only regardless of config):
allow opening feature requests on your fork.
usernames and keys don't need to each specify duplicate config. Fill in
your own key-names below instead of `:server-a-host`. This is the short
name that the ssh command will refer to, e.g.: `ssh server-a-host ls -al`.
`weather`
`wolfram`
`wordnik` dictionary
nrepl configuration
`karma` | Sample profiles.clj for Yetibot configuration .
equivalent to config.sample.edn
between dev and prod , using Composite Profiles , optionally overriding
Config is loaded using ` environ : `
{:dev
{:env
{:yetibot-log-level "debug"
:yetibot-log-path "/var/log/yetibot/yetibot.log"
:yetibot-log-rolling-enabled "true"
By default uses the ! prefix to match commands .
You can use this configuration to customize the prefix used by
:yetibot-command-prefix ","
:yetibot-command-embedded-enabled "false"
:yetibot-command-fallback-enabled "true"
:yetibot-command-fallback-help-text "Welcome to Yetibot 👋"
:yetibot-default-command "giphy"
Whitelists and blackists : these can be used to enable / disable specific
is considered an error and will crash on startup . By default there
Whitelist : when whitelist is specified , all commands are disabled except
Yetibot needs a Postgres instance to run against .
:yetibot-db-url "postgresql:5432/yetibot"
:yetibot-db-table-prefix "yetibot_"
:yetibot-history-disabled "false"
adapter types are Slack and IRC .
Example configuring 3 adapters : 2 Slacks and 1 IRC :
:yetibot-adapters-myteam-type "slack"
:yetibot-adapters-myteam-token "xoxb-111111111111111111111111111111111111"
:yetibot-adapters-k8s-type "slack"
:yetibot-adapters-k8s-token "xoxb-k8s-slack-9999999999999999"
:yetibot-adapters-freenode-type "irc"
:yetibot-adapters-freenode-host "chat.freenode.net"
:yetibot-adapters-freenode-port "7070"
:yetibot-adapters-freenode-ssl "true"
:yetibot-adapters-freenode-username "yetibot"
:yetibot-adapters-mymattermost-type "mattermost"
:yetibot-adapters-mymattermost-host "yetibot-mattermost.herokuapp.com"
:yetibot-adapters-mymattermost-token "h1111111111111111111111111"
Listens on port 3000 but this may be different for you if you ( e.g. if you
use a load balancer or map ports in Docker ) .
:yetibot-url ":3000"
:yetibot-github-token ""
:yetibot-github-org-0 ""
:yetibot-github-org-1 ""
: endpoint is optional : only specify if using GitHub Enterprise .
:yetibot-github-endpoint ""
:yetibot-jira-domain ""
:yetibot-jira-user ""
:yetibot-jira-password ""
:yetibot-jira-projects-0-key "FOO"
:yetibot-jira-projects-0-default-version-id "42"
:yetibot-jira-default-issue-type-id "3"
:yetibot-jira-subtask-issue-type-id "27"
:yetibot-jira-default-project-key "Optional"
:yetibot-jira-cloud "true"
:yetibot-s3-access-key ""
:yetibot-s3-secret-key ""
:yetibot-mail-host ""
:yetibot-mail-user ""
:yetibot-mail-pass ""
:yetibot-mail-from ""
:yetibot-mail-bcc ""
:yetibot-giphy-key ""
:yetibot-imgflip-username ""
:yetibot-imgflip-password ""
Alpha Vantage ( stock data )
:yetibot-alphavantage-key ""
` google `
:yetibot-google-api-key ""
:yetibot-google-custom-search-engine-id ""
:yetibot-google-options-safe "high"
:yetibot-ebay-appid ""
:yetibot-twitter-consumer-key ""
:yetibot-twitter-consumer-secret ""
:yetibot-twitter-token ""
:yetibot-twitter-secret ""
ISO 639 - 1 code : -of-ISO-639-1-codes
:yetibot-twitter-search-lang "en"
instances config are mutable , and are therefore not defined in
this config . Instead , add them at runtime . See ` ! help for more info .
How long to cache jobs from each instance before refreshing
:yetibot-jenkins-cache-ttl "3600000"
Default job across all instances , used by ` ! build `
:yetibot-jenkins-default-job ""
:yetibot-jenkins-instances-0-name "yetibot"
:yetibot-jenkins-instances-0-uri "/"
:yetibot-jenkins-instances-0-default-job "default-job-name"
If your does n't require auth , set user and api - key to some
:yetibot-jenkins-instances-0-user "jenkins-user"
:yetibot-jenkins-instances-0-apikey "abc"
:yetibot-jenkins-instances-1-name "yetibot.core"
:yetibot-jenkins-instances-1-uri "/"
Set of Strings : Slack IDs or IRC users ( which have ~ prefixes ) of users
:yetibot-admin-users-0 "U123123"
:yetibot-admin-users-1 "~awesomeperson"
:yetibot-admin-commands-0 "observer"
:yetibot-admin-commands-1 "obs"
Configure GitHub if you have your own fork of the yetibot repo . This will
:yetibot-features-github-token ""
:yetibot-features-github-user ""
SSH servers are specified in groups so that multiple servers which share
:yetibot-ssh-groups-0-key "path-to-key"
:yetibot-ssh-groups-0-user ""
:yetibot-ssh-groups-0-servers-0-name ""
:yetibot-ssh-groups-0-servers-0-host ""
:yetibot-ssh-groups-0-servers-1-name ""
:yetibot-ssh-groups-0-servers-1-host ""
:yetibot-weather-wunderground-key ""
:yetibot-weather-wunderground-default-zip ""
:yetibot-wolfram-appid ""
:yetibot-wordnik-key ""
:yetibot-nrepl-port ""
:yetibot-karma-emoji-positive ":taco:"
:yetibot-karma-emoji-negative ":poop:"
}}}
|
e3a22f132ff1b6e6c9398bf91d00a5ac13e5284c5efd3182f487231a387d4529 | WhatsApp/erlt | map_ffi.erl | -file("elm_core/src/map_ffi.erlt", 1).
-module(map_ffi).
-eqwalizer_unchecked([{empty, 0},
{filter, 2},
{fold, 3},
{from_list, 1},
{get, 2},
{insert, 3},
{keys, 1},
{map, 2},
{remove, 2},
{size, 1},
{to_list, 1},
{union, 2},
{values, 1}]).
-export_type([map_/2]).
-export([empty/0,
get/2,
size/1,
insert/3,
remove/2,
fold/3,
union/2,
filter/2,
map/2]).
-export([keys/1, values/1, to_list/1, from_list/1]).
-import_type({maybe, [{maybe, 1}]}).
-type map_(_K, _V) :: term().
-spec empty() -> map_(_K, _V).
empty() -> maps:new().
-spec get(K, map_(K, V)) -> maybe:maybe(V).
get(Key, Map) ->
case maps:find(Key, Map) of
{ok, Value} -> {'$#maybe:maybe.just', Value};
error -> {'$#maybe:maybe.nothing'}
end.
-spec size(map_(_K, _V)) -> integer().
size(Map) -> maps:size(Map).
-spec insert(K, V, map_(K, V)) -> map_(K, V).
insert(Key, Value, Map) -> maps:put(Key, Value, Map).
-spec remove(K, map_(K, V)) -> map_(K, V).
remove(Key, Map) -> maps:remove(Key, Map).
-spec fold(fun((K, V, R) -> R), R, map_(K, V)) -> R.
fold(F, Acc, Map) -> maps:fold(F, Acc, Map).
-spec union(map_(K, V), map_(K, V)) -> map_(K, V).
union(Map1, Map2) -> maps:merge(Map1, Map2).
-spec filter(fun((K, V) -> boolean()),
map_(K, V)) -> map_(K, V).
filter(F, Map) -> maps:filter(F, Map).
-spec map(fun((K, A) -> B), map_(K, A)) -> map_(K, B).
map(F, Map) -> maps:map_(F, Map).
-spec keys(map_(K, _V)) -> [K].
keys(Map) -> maps:keys(Map).
-spec values(map_(_K, V)) -> [V].
values(Map) -> maps:values(Map).
-spec to_list(map_(K, V)) -> [{K, V}].
to_list(Map) -> maps:to_list(Map).
-spec from_list([{K, V}]) -> map_(K, V).
from_list(List) -> maps:from_list(List).
| null | https://raw.githubusercontent.com/WhatsApp/erlt/616a4adc628ca8754112e659701e57f1cd7fecd1/tests/elm_core/ir-spec/map_ffi.erl | erlang | -file("elm_core/src/map_ffi.erlt", 1).
-module(map_ffi).
-eqwalizer_unchecked([{empty, 0},
{filter, 2},
{fold, 3},
{from_list, 1},
{get, 2},
{insert, 3},
{keys, 1},
{map, 2},
{remove, 2},
{size, 1},
{to_list, 1},
{union, 2},
{values, 1}]).
-export_type([map_/2]).
-export([empty/0,
get/2,
size/1,
insert/3,
remove/2,
fold/3,
union/2,
filter/2,
map/2]).
-export([keys/1, values/1, to_list/1, from_list/1]).
-import_type({maybe, [{maybe, 1}]}).
-type map_(_K, _V) :: term().
-spec empty() -> map_(_K, _V).
empty() -> maps:new().
-spec get(K, map_(K, V)) -> maybe:maybe(V).
get(Key, Map) ->
case maps:find(Key, Map) of
{ok, Value} -> {'$#maybe:maybe.just', Value};
error -> {'$#maybe:maybe.nothing'}
end.
-spec size(map_(_K, _V)) -> integer().
size(Map) -> maps:size(Map).
-spec insert(K, V, map_(K, V)) -> map_(K, V).
insert(Key, Value, Map) -> maps:put(Key, Value, Map).
-spec remove(K, map_(K, V)) -> map_(K, V).
remove(Key, Map) -> maps:remove(Key, Map).
-spec fold(fun((K, V, R) -> R), R, map_(K, V)) -> R.
fold(F, Acc, Map) -> maps:fold(F, Acc, Map).
-spec union(map_(K, V), map_(K, V)) -> map_(K, V).
union(Map1, Map2) -> maps:merge(Map1, Map2).
-spec filter(fun((K, V) -> boolean()),
map_(K, V)) -> map_(K, V).
filter(F, Map) -> maps:filter(F, Map).
-spec map(fun((K, A) -> B), map_(K, A)) -> map_(K, B).
map(F, Map) -> maps:map_(F, Map).
-spec keys(map_(K, _V)) -> [K].
keys(Map) -> maps:keys(Map).
-spec values(map_(_K, V)) -> [V].
values(Map) -> maps:values(Map).
-spec to_list(map_(K, V)) -> [{K, V}].
to_list(Map) -> maps:to_list(Map).
-spec from_list([{K, V}]) -> map_(K, V).
from_list(List) -> maps:from_list(List).
|
|
5435233d0eb1583527f42672a3e97b41ea052cbce34e7872021ee194a26ee201 | phronmophobic/membrane | basic_components.cljc | (ns membrane.basic-components
#?(:cljs
(:require-macros [membrane.ui :refer [maybe-key-event]]
[membrane.component :refer [defui defeffect]]))
(:require [membrane.component :refer [#?(:clj defui)
#?(:clj defeffect)]
:as component]
[membrane.ui :as ui
:refer [vertical-layout
translate
horizontal-layout
label
with-color
with-style
image
on-click
on-mouse-up
bounds
spacer
filled-rectangle
rectangle
IBounds
IKeyPress
origin
origin-x
origin-y
on-key-press
bordered
children
maybe-key-press
on
IHandleEvent
index-for-position]]))
(defui on-hover
"Component for adding a hover? state."
[{:keys [hover? body]}]
(if hover?
(ui/wrap-on
:mouse-move-global
(fn [handler [x y :as pos]]
(let [[w h] (bounds body)
child-intents (handler pos)]
(if (or (neg? x)
(> x w)
(neg? y)
(> y h))
(conj child-intents
[:set $hover? false])
child-intents)))
body)
(ui/on-mouse-move
(fn [[x y]]
[[:set $hover? true]])
body)))
(defui on-mouse-out [{:keys [mouse-out body hover?]}]
(if hover?
(ui/wrap-on
:mouse-move-global
(fn [handler [x y :as pos]]
(let [[w h] (ui/bounds body)
intents (handler pos)]
(if (or (neg? x)
(> x w)
(neg? y)
(> y h))
(into
(conj intents
[:set $hover? false])
(mouse-out))
intents)))
body)
(ui/wrap-on
:mouse-move
(fn [handler [x y :as pos]]
(into [[:set $hover? true]]
(handler pos)))
body)))
(defui button
"Button component with hover state."
[{:keys [hover? text on-click]}]
(on-hover {:hover? hover?
:body (ui/button text on-click hover?)}))
(defeffect ::previous-line [$cursor $select-cursor text]
(run! #(apply dispatch! %)
[[:set $select-cursor nil]
[:update $cursor
(fn [cursor]
(let [prev-newline (.lastIndexOf ^String text "\n" (int (dec cursor)))]
(if (not= -1 prev-newline)
prev-newline
0)))]]))
(defeffect ::next-line [$cursor $select-cursor text]
(run! #(apply dispatch! %)
[[:set $select-cursor nil]
[:update $cursor
(fn [cursor]
(let [next-newline (.indexOf ^String text "\n" (int cursor))]
(if (not= -1 next-newline)
(inc next-newline)
(count text))))]]))
(defeffect ::forward-char [$cursor $select-cursor text]
(run! #(apply dispatch! %)
[[:set $select-cursor nil]
[:update $cursor
(fn [cursor]
(min (count text)
(inc cursor)))]]))
(defeffect ::backward-char [$cursor $select-cursor text]
(run! #(apply dispatch! %)
[[:set $select-cursor nil]
[:update $cursor
(fn [cursor]
(max 0
(dec (min (count text) cursor))))]]))
(defeffect ::insert-newline [$cursor $select-cursor $text]
(dispatch! ::insert-text $cursor $select-cursor $text "\n"))
(defeffect ::insert-text [$cursor $select-cursor $text s]
(run! #(apply dispatch! %)
[
[:update [(list 'collect-one $cursor)
(list 'collect-one $select-cursor)
$text]
(fn [cursor select-cursor text]
(let [
start-clip-index (min
(count text)
(if select-cursor
(min cursor select-cursor)
cursor))
end-clip-index (min
(count text)
(if select-cursor
(max cursor select-cursor)
cursor))]
(if text
(str (subs text 0 start-clip-index) s (subs text end-clip-index))
s)))]
[:update [(list 'collect-one $select-cursor)
$cursor]
(fn [select-cursor cursor]
(let [cursor (or cursor 0)
index (if select-cursor
(min select-cursor cursor)
cursor)]
(+ (count s) index)))]
[:set $select-cursor nil]
]))
(defeffect ::move-cursor-to-pos [$cursor text font pos]
(run! #(apply dispatch! %)
[[:update $cursor (fn [cursor]
(let [[mx my] pos
new-cursor (index-for-position font
text mx my)]
new-cursor))]]))
(defeffect ::start-drag [$mpos $down-pos pos]
(run! #(apply dispatch! %)
[[:set $mpos pos]
[:set $down-pos pos]]))
(defeffect ::drag [$mpos pos]
(run! #(apply dispatch! %)
[[:set $mpos pos]]))
(defeffect ::finish-drag [$select-cursor $cursor $down-pos pos text font]
(let [[mx my] pos
end-index (index-for-position font
text mx my)]
(run! #(apply dispatch! %)
[
[:update [(list 'collect-one $down-pos)
$select-cursor]
(fn [down-pos select-cursor]
(when-let [[dx dy] down-pos]
(let [idx (index-for-position font
text dx dy)]
(when (not= idx end-index)
(if (> idx end-index)
(min (count text) (inc idx))
idx))))
)]
[:set $down-pos nil]
[:update [(list 'collect-one $select-cursor)
$cursor]
(fn [select-cursor cursor]
(if (and select-cursor (> end-index select-cursor))
(min (count text) (inc end-index))
end-index))]])))
(def double-click-threshold 500)
(let [getTimeMillis #?(:clj (fn [] (.getTime ^java.util.Date (java.util.Date.)))
:cljs (fn [] (.getTime (js/Date.))))
pow #?(:clj (fn [n x] (Math/pow n x))
:cljs (fn [n x] (js/Math.pow n x)))
find-white-space #?(:clj (fn [text start]
(let [matcher (doto (re-matcher #"\s" text)
(.region start (count text)))]
(when (.find matcher)
(.start matcher))))
:cljs (fn [text start]
(let [regexp (js/RegExp. "\\s" "g")]
(set! (.-lastIndex regexp) start)
(let [result (.exec regexp text)]
(when result
(.-index result))))))]
(defeffect ::text-double-click [$last-click $select-cursor $cursor pos text font]
(let [now (getTimeMillis)
[mx my] pos]
(run! #(apply dispatch! %)
[
[:update [(list 'collect-one $last-click)
$select-cursor]
(fn [[last-click [dx dy]] select-cursor]
(if last-click
(let [diff (- now last-click)]
(if (and (< diff double-click-threshold)
(< (+ (pow (- mx dx) 2)
(pow (- my dy) 2))
100))
(let [index (index-for-position font
text mx my)]
(if-let [start (find-white-space text index)]
start
(count text)))
select-cursor))
select-cursor))]
[:update [(list 'collect-one $last-click)
$cursor]
(fn [[last-click [dx dy]] cursor]
(if last-click
(let [diff (- now last-click)]
(if (and (< diff double-click-threshold)
(< (+ (pow (- mx dx) 2)
(pow (- my dy) 2))
100))
(let [index (index-for-position font
text mx my)
text-backwards (clojure.string/reverse text)]
(if-let [start (find-white-space text-backwards
(- (count text) index))]
(- (count text) start)
0)
)
cursor))
cursor))]
[:set $last-click [now pos]]]))
))
(defeffect ::delete-backward [$cursor $select-cursor $text]
(run!
#(apply dispatch! %)
[
[:update [(list 'collect-one $cursor)
(list 'collect-one $select-cursor)
$text]
(fn [cursor select-cursor text]
(let [cursor (min (count text) cursor)
[clip-start clip-end] (if select-cursor
(let [select-cursor (min (count text) select-cursor)]
(if (< cursor select-cursor)
[cursor select-cursor]
[select-cursor cursor]))
[(max 0 (dec cursor)) cursor])]
(str (subs text 0 clip-start)
(subs text clip-end))))]
[:update [(list 'collect-one [$select-cursor])
$cursor]
(fn [select-cursor cursor]
(max 0 (if select-cursor
(min select-cursor cursor)
(dec cursor))))]
[:set $select-cursor nil]
]))
(defui selectable-text [{:keys [text down-pos mpos last-click cursor select-cursor font]}]
(ui/on
:clipboard-copy
(fn []
(when select-cursor
[[:clipboard-copy (subs text
(min cursor select-cursor)
(max cursor select-cursor))]]))
:clipboard-cut
(fn []
(when select-cursor
(let [new-text (when text
(str (subs text 0 (min cursor select-cursor))
(subs text (max cursor select-cursor))))]
[[:set $cursor (min cursor select-cursor)]
[:set $select-cursor nil]
[:set $text new-text]
[:clipboard-cut (subs text
(min cursor select-cursor)
(max cursor select-cursor))]
[::new-text new-text]])))
:mouse-up
(fn [[mx my :as pos]]
[[::finish-drag $select-cursor $cursor $down-pos pos text font]
[::text-double-click $last-click $select-cursor $cursor pos text font]])
:mouse-down
(fn [[mx my :as pos]]
[[::move-cursor-to-pos $cursor text font pos]
[::start-drag $mpos $down-pos pos]
[:set $select-cursor nil]])
:mouse-move
(fn [[mx my :as pos]]
(when down-pos
[[::drag $mpos pos]]))
[(spacer 100 10)
(when select-cursor
(ui/with-color
[0.6980392156862745
0.8431372549019608
1]
(ui/text-selection text
[(min select-cursor cursor)
(max select-cursor cursor)]
font)))
(when-let [[dx dy] down-pos]
(when-let [[mx my] mpos]
(translate (min mx dx)
(min my dy)
(filled-rectangle
[0.9 0.9 0.9]
(Math/abs
(double (- mx dx)))
(Math/abs
(double (- my dy)))))))
(label text font)]))
(defui textarea-view
"Raw component for a basic textarea. textarea should be preferred."
[{:keys [cursor
focus?
text
;; down-pos
;; mpos
select-cursor
;; last-click
font
border?]
:or {cursor 0
text ""
border? true}}]
(let [text (or text "")
padding-x (if border? 5 0)
padding-y (if border? 2 0)]
(maybe-key-press
focus?
(ui/wrap-on
:mouse-down
(fn [handler pos]
(let [intents (handler pos)]
(when (seq intents)
(cons [::request-focus]
intents))))
(on
:key-press
(fn [s]
(when focus?
(case s
:up
[[::previous-line $cursor $select-cursor text]]
:enter
[[::insert-newline $cursor $select-cursor $text]]
:down
[[::next-line $cursor $select-cursor text]]
:left
[[::backward-char $cursor $select-cursor text]]
:right
[[::forward-char $cursor $select-cursor text]]
:backspace
[[::delete-backward $cursor $select-cursor $text]]
;; else
(when (string? s)
[[::insert-text $cursor $select-cursor $text s]]))))
:clipboard-copy
(fn []
(when (and focus? select-cursor)
[[:clipboard-copy (subs text
(min cursor select-cursor)
(max cursor select-cursor))]]))
:clipboard-cut
(fn []
(when (and focus? select-cursor)
(let [new-text (when text
(str (subs text 0 (min cursor select-cursor))
(subs text (max cursor select-cursor))))]
[[:set $cursor (min cursor select-cursor)]
[:set $select-cursor nil]
[:set $text new-text]
[:clipboard-cut (subs text
(min cursor select-cursor)
(max cursor select-cursor))]
[::new-text new-text]])
)
)
:clipboard-paste
(fn [s]
(when focus?
[[::insert-text $cursor $select-cursor $text s]]))
(let [body [(when focus?
(ui/with-color
[0.5725490196078431
0.5725490196078431
0.5725490196078431
0.4]
(ui/text-cursor text cursor font)))
(selectable-text {:text text
:font font
:select-cursor select-cursor
:cursor cursor})]]
(if border?
(let [gray 0.65
[w h] (ui/bounds body)]
[(with-color [gray gray gray]
(with-style :membrane.ui/style-stroke
(rectangle (+ w (* 2 padding-x))
(+ (max h (+ padding-y (or (:size font)
(:size ui/default-font)))) (* 2 padding-y)))))
(translate padding-x
padding-y
body)])
body)))))))
(defui textarea
"Textarea component."
[{:keys [text
border?
font
^:membrane.component/contextual focus
textarea-state]
:or {border? true}}]
(on
::request-focus
(fn []
[[:set $focus $text]])
(textarea-view {:text text
:cursor (get textarea-state :cursor 0)
:focus? (= focus $text)
:font font
:down-pos (:down-pos textarea-state)
:mpos (:mpos textarea-state)
:border? (or border?
(nil? border?))
:select-cursor (:select-cursor textarea-state)}))
)
(defui textarea-light
"Alternate look for textarea component."
[{:keys [text
font
^:membrane.component/contextual focus
textarea-state]}]
(on
::request-focus
(fn []
[[:set [$focus] $text]])
(let [focus? (= focus $text)]
(let [textarea
(textarea-view {:text text
:cursor (get textarea-state :cursor 0)
:focus? focus?
:font font
:down-pos (:down-pos textarea-state)
:mpos (:mpos textarea-state)
:select-cursor (:select-cursor textarea-state)
:border? false})]
(ui/fill-bordered [0.97 0.97 0.97] [0 0]
textarea)))))
(defui scrollview
"Basic scrollview.
scroll-bounds should be a two element vector of [width height] of the scrollview
body should be an element.
"
[{:keys [offset mdownx? mdowny? scroll-bounds body]
:or {offset [0 0]}}]
(let [offset-x (nth offset 0)
offset-y (nth offset 1)
[width height] scroll-bounds
scroll-button-size 7
[total-width total-height] (bounds body)
max-offset-x (max 0
(- total-width width))
max-offset-y (max 0
(- total-height height))
clampx (fn [old-offset]
(max 0
(min max-offset-x
old-offset)))
clampy (fn [old-offset]
(max 0
(min max-offset-y
old-offset)))
scroll-elem (ui/scrollview
scroll-bounds [(- (clampx offset-x))
(- (clampy offset-y))]
body)
div0 (fn [a b]
(if (zero? b)
b
(/ a b)))
on-mouse-move
(if mdowny?
(fn [body]
(ui/on-mouse-move
(fn [[mx my]]
[[:set $offset-y (clampy (* (div0 (float my) height)
max-offset-y))]])
body))
(if mdownx?
(fn [body]
(ui/on-mouse-move
(fn [[mx my]]
[[:set $offset-x (clampx (* (div0 (float mx) width)
max-offset-x))]])
body))
identity))]
(on-mouse-out
{:hover (get extra [:mdown :hover])
:mouse-out
(fn []
[[:set $mdowny? nil]
[:set $mdownx? nil]])
:body
(ui/wrap-on
:scroll
(fn [handler [ox oy :as offset] pos]
(let [intents (handler offset pos)]
(if (seq intents)
intents
(when (or (not= offset-x
(clampx (+ ox offset-x)))
(not= offset-y
(clampy (+ oy offset-y))))
[[:update $offset-x (fn [old-offset]
(clampx (+ ox offset-x)))]
[:update $offset-y (fn [old-offset]
(clampy (+ oy offset-y)))]]))))
(on-mouse-move
(ui/on-mouse-event
(fn [[mx my :as mpos] button mouse-down? mods]
(if mouse-down?
(let [new-mdownx? (and (> my height)
(> total-width width))
new-mdowny? (and (> mx width)
(> total-height height))
intents (remove
nil?
(into
[(if (not= new-mdownx? mdownx?)
[:set $mdownx? new-mdownx?]
(if (not= new-mdowny? mdowny?)
[:set $mdowny? new-mdowny?]))]
(if new-mdowny?
[[:set $offset-y (clampy (* (div0 (float my) height)
max-offset-y))]]
(if new-mdownx?
[[:set $offset-x (clampx (* (div0 (float mx) width)
max-offset-x))]]
(ui/mouse-event scroll-elem mpos button mouse-down? mods)))))]
intents)
;; mouse up
(into
[[:set $mdownx? false]
[:set $mdowny? false]]
(ui/mouse-event scroll-elem mpos button mouse-down? mods)))
)
[
scroll-elem
(when (> total-height height)
(translate width 0
[(filled-rectangle [0.941 0.941 0.941]
scroll-button-size height)
(let [top (/ offset-y total-height)
bottom (/ (+ offset-y height)
total-height)]
(translate 0 (* height top)
(with-color
[0.73 0.73 0.73]
(ui/rounded-rectangle scroll-button-size (* height (- bottom top)) (/ scroll-button-size 2)))
))
(with-color [0.89 0.89 0.89]
(with-style :membrane.ui/style-stroke
(rectangle scroll-button-size height)))]))
(when (> total-width width)
(translate 0 height
[(filled-rectangle [0.941 0.941 0.941]
width scroll-button-size)
(let [left (/ offset-x total-width)
right (/ (+ offset-x width)
total-width)]
(translate (* width left) 0
(with-color
[0.73 0.73 0.73]
(ui/rounded-rectangle (* width (- right left)) scroll-button-size (/ scroll-button-size 2)))
)
)
(with-color [0.89 0.89 0.89]
(with-style :membrane.ui/style-stroke
(rectangle width scroll-button-size )))]))
])))})))
(defui test-scrollview [{:keys [state]}]
(scrollview {:scroll-bounds [200 200]
:body
(apply
vertical-layout
(for [i (range 100)]
(label (str "The quick brown fox"
" jumped over the lazy dog"
))))}))
(defui workspace
"Basic workspace.
scroll-bounds should be a two element vector of [width height] of the scrollview
body should be an element.
Acts similar to a scrollview, but no scroll bars are shown and the scroll offset isn't clamped.
"
[{:keys [offset scroll-bounds body]
:or {offset [0 0]}}]
(let [offset-x (nth offset 0)
offset-y (nth offset 1)
[width height] scroll-bounds
scroll-elem (ui/scrollview
scroll-bounds [(- offset-x)
(- offset-y)]
body)]
(ui/wrap-on
:scroll
(fn [handler [ox oy :as offset] pos]
(let [intents (handler offset pos)]
(if (seq intents)
intents
[[:update $offset-x (fn [old-offset]
(+ offset-x ox))]
[:update $offset-y (fn [old-offset]
(+ offset-y oy))]])))
scroll-elem)))
(comment
(let [view
(ui/->Cached
(let [n 100
maxx 500
maxy 500]
(ui/with-style :membrane.ui/style-stroke
(vec
(for [i (range n)]
(ui/with-stroke-width (inc (rand-int 10))
(ui/with-color [(rand) (rand) (rand)]
(ui/path [(rand-int maxx) (rand-int maxy)]
[(rand-int maxx) (rand-int maxy)]))))))))]
(defui test-workspace [{:keys []}]
(workspace {:scroll-bounds [300 300]
:body view})))
(require '[membrane.skia :as skia])
(skia/run (component/make-app #'test-workspace {}))
,)
(defeffect ::toggle [$bool]
(dispatch! :update $bool not))
(defui checkbox
"Checkbox component."
[{:keys [checked?]}]
(on
:mouse-down
(fn [_]
[[::toggle $checked?]])
(ui/checkbox checked?)))
(defui dropdown-list
[{:keys [options selected]}]
(let [
labels (for [option (map second options)]
(ui/label option))
max-width (reduce max 0 (map ui/width labels))
padding-y 8
padding-x 12
rows
(apply
vertical-layout
(for [[value option] options]
(let [hover? (get extra [:hover? value])
selected? (= selected value)
label (if selected?
(ui/with-color [1 1 1]
(ui/label option))
(ui/label option))
[_ h] (bounds label)
row-height (+ h 4)
row-width (+ max-width (* 2 padding-x))]
(on-hover
{:hover? hover?
:body
(on
:mouse-down
(fn [_]
[[::select $selected value]])
[(spacer row-width row-height)
(cond
selected?
(ui/filled-rectangle [0 0.48 1]
row-width row-height)
hover?
(ui/filled-rectangle [0.976 0.976 0.976]
row-width row-height))
(translate padding-x 2
label)])}))))
[rows-width rows-height] (bounds rows)
]
[(ui/with-style
::ui/style-stroke
(ui/with-color [0.831
0.831
0.831]
(ui/rounded-rectangle rows-width
(+ rows-height (* 2 padding-y))
4)))
(ui/with-style
::ui/style-fill
(ui/with-color [1 1 1]
(ui/rounded-rectangle rows-width
(+ rows-height (* 2 padding-y))
4)))
(translate 0 (- padding-y 2)
rows)])
)
(defui dropdown [{:keys [options selected open?]}]
(vertical-layout
(on
:mouse-down
(fn [_]
[[:update $open? not]])
(ui/bordered [10 10]
(if selected
(ui/label (first (keep (fn [[value option]]
(when (= value selected)
option))
options)))
(with-color [0.7 0.7 0.7]
(ui/label "no selection")))))
(when open?
(on
::select
(fn [$selected value]
[[::select $selected value]
[:set $open? false]])
(dropdown-list {:options options :selected selected})))
))
(defeffect ::select [$selected value]
(dispatch! :set $selected value))
(comment
(skia/run (component/make-app #'dropdown {:options [[:this "This"]
[:that "That "]
[:the-other "The Other"]]})))
(defeffect ::counter-dec [$num min]
(if min
(dispatch! :update $num #(max min (dec %)))
(dispatch! :update $num dec)))
(defeffect ::counter-inc [$num max]
(if max
(dispatch! :update $num #(min max (inc %)))
(dispatch! :update $num inc)))
(defui counter [{:keys [num min max]
:or {num 0}}]
(horizontal-layout
(button {:text "-"
:on-click
(fn []
[[::counter-dec $num min]])})
(ui/spacer 5 0)
(let [lbl (ui/label num)
w (ui/width lbl)
padding (/ (clojure.core/max 0 (- 20 w)) 2)]
(horizontal-layout
(spacer padding 0)
lbl
(spacer padding 0)))
(ui/spacer 5 0)
(button {:text "+"
:on-click
(fn []
[[::counter-inc $num max]])})))
(comment
(skia/run (component/make-app #'counter {:num 3})))
(defeffect ::update-slider [$num min max max-width integer? x]
(let [ratio (/ x max-width)
num (+ min (* ratio (- max min)))
num (if integer?
(int num)
(double num))]
(dispatch! :set $num
(clojure.core/max
min
(clojure.core/min num
max)))))
(defui number-slider [{:keys [num max-width min max integer? mdown?]
:or {max-width 100}}]
(let [ratio (/ (- num min)
(- max min))
width (* max-width (double ratio))
tint 0.85
gray [tint tint tint]]
(on
:mouse-down
(fn [[x y]]
[[:set $mdown? true]
[::update-slider $num min max max-width integer? x]])
:mouse-up
(fn [[x y]]
[[:set $mdown? false]
[::update-slider $num min max max-width integer? x]])
:mouse-move
(fn [[x y]]
(when mdown?
[[::update-slider $num min max max-width integer? x]]))
(ui/translate 1 1
(let [height 20
lbl (ui/label (if integer?
num
#?(:clj (format "%.2f" (double num))
:cljs (.toFixed (double num) 2))))]
[(ui/with-style :membrane.ui/style-fill
(ui/with-color gray
(rectangle width height)))
lbl
(ui/with-style :membrane.ui/style-stroke
(rectangle max-width height))
]))))
)
(comment
(skia/run (component/make-app #'number-slider {:num 3
:min 0
:max 20}))
(skia/run (component/make-app #'number-slider {:num 3
:min 5
:max 20
:max-width 300
:integer? true})))
| null | https://raw.githubusercontent.com/phronmophobic/membrane/507a212a2ac855b886d82bb839f197cb012b26d8/src/membrane/basic_components.cljc | clojure | down-pos
mpos
last-click
else
mouse up | (ns membrane.basic-components
#?(:cljs
(:require-macros [membrane.ui :refer [maybe-key-event]]
[membrane.component :refer [defui defeffect]]))
(:require [membrane.component :refer [#?(:clj defui)
#?(:clj defeffect)]
:as component]
[membrane.ui :as ui
:refer [vertical-layout
translate
horizontal-layout
label
with-color
with-style
image
on-click
on-mouse-up
bounds
spacer
filled-rectangle
rectangle
IBounds
IKeyPress
origin
origin-x
origin-y
on-key-press
bordered
children
maybe-key-press
on
IHandleEvent
index-for-position]]))
(defui on-hover
"Component for adding a hover? state."
[{:keys [hover? body]}]
(if hover?
(ui/wrap-on
:mouse-move-global
(fn [handler [x y :as pos]]
(let [[w h] (bounds body)
child-intents (handler pos)]
(if (or (neg? x)
(> x w)
(neg? y)
(> y h))
(conj child-intents
[:set $hover? false])
child-intents)))
body)
(ui/on-mouse-move
(fn [[x y]]
[[:set $hover? true]])
body)))
(defui on-mouse-out [{:keys [mouse-out body hover?]}]
(if hover?
(ui/wrap-on
:mouse-move-global
(fn [handler [x y :as pos]]
(let [[w h] (ui/bounds body)
intents (handler pos)]
(if (or (neg? x)
(> x w)
(neg? y)
(> y h))
(into
(conj intents
[:set $hover? false])
(mouse-out))
intents)))
body)
(ui/wrap-on
:mouse-move
(fn [handler [x y :as pos]]
(into [[:set $hover? true]]
(handler pos)))
body)))
(defui button
"Button component with hover state."
[{:keys [hover? text on-click]}]
(on-hover {:hover? hover?
:body (ui/button text on-click hover?)}))
(defeffect ::previous-line [$cursor $select-cursor text]
(run! #(apply dispatch! %)
[[:set $select-cursor nil]
[:update $cursor
(fn [cursor]
(let [prev-newline (.lastIndexOf ^String text "\n" (int (dec cursor)))]
(if (not= -1 prev-newline)
prev-newline
0)))]]))
(defeffect ::next-line [$cursor $select-cursor text]
(run! #(apply dispatch! %)
[[:set $select-cursor nil]
[:update $cursor
(fn [cursor]
(let [next-newline (.indexOf ^String text "\n" (int cursor))]
(if (not= -1 next-newline)
(inc next-newline)
(count text))))]]))
(defeffect ::forward-char [$cursor $select-cursor text]
(run! #(apply dispatch! %)
[[:set $select-cursor nil]
[:update $cursor
(fn [cursor]
(min (count text)
(inc cursor)))]]))
(defeffect ::backward-char [$cursor $select-cursor text]
(run! #(apply dispatch! %)
[[:set $select-cursor nil]
[:update $cursor
(fn [cursor]
(max 0
(dec (min (count text) cursor))))]]))
(defeffect ::insert-newline [$cursor $select-cursor $text]
(dispatch! ::insert-text $cursor $select-cursor $text "\n"))
(defeffect ::insert-text [$cursor $select-cursor $text s]
(run! #(apply dispatch! %)
[
[:update [(list 'collect-one $cursor)
(list 'collect-one $select-cursor)
$text]
(fn [cursor select-cursor text]
(let [
start-clip-index (min
(count text)
(if select-cursor
(min cursor select-cursor)
cursor))
end-clip-index (min
(count text)
(if select-cursor
(max cursor select-cursor)
cursor))]
(if text
(str (subs text 0 start-clip-index) s (subs text end-clip-index))
s)))]
[:update [(list 'collect-one $select-cursor)
$cursor]
(fn [select-cursor cursor]
(let [cursor (or cursor 0)
index (if select-cursor
(min select-cursor cursor)
cursor)]
(+ (count s) index)))]
[:set $select-cursor nil]
]))
(defeffect ::move-cursor-to-pos [$cursor text font pos]
(run! #(apply dispatch! %)
[[:update $cursor (fn [cursor]
(let [[mx my] pos
new-cursor (index-for-position font
text mx my)]
new-cursor))]]))
(defeffect ::start-drag [$mpos $down-pos pos]
(run! #(apply dispatch! %)
[[:set $mpos pos]
[:set $down-pos pos]]))
(defeffect ::drag [$mpos pos]
(run! #(apply dispatch! %)
[[:set $mpos pos]]))
(defeffect ::finish-drag [$select-cursor $cursor $down-pos pos text font]
(let [[mx my] pos
end-index (index-for-position font
text mx my)]
(run! #(apply dispatch! %)
[
[:update [(list 'collect-one $down-pos)
$select-cursor]
(fn [down-pos select-cursor]
(when-let [[dx dy] down-pos]
(let [idx (index-for-position font
text dx dy)]
(when (not= idx end-index)
(if (> idx end-index)
(min (count text) (inc idx))
idx))))
)]
[:set $down-pos nil]
[:update [(list 'collect-one $select-cursor)
$cursor]
(fn [select-cursor cursor]
(if (and select-cursor (> end-index select-cursor))
(min (count text) (inc end-index))
end-index))]])))
(def double-click-threshold 500)
(let [getTimeMillis #?(:clj (fn [] (.getTime ^java.util.Date (java.util.Date.)))
:cljs (fn [] (.getTime (js/Date.))))
pow #?(:clj (fn [n x] (Math/pow n x))
:cljs (fn [n x] (js/Math.pow n x)))
find-white-space #?(:clj (fn [text start]
(let [matcher (doto (re-matcher #"\s" text)
(.region start (count text)))]
(when (.find matcher)
(.start matcher))))
:cljs (fn [text start]
(let [regexp (js/RegExp. "\\s" "g")]
(set! (.-lastIndex regexp) start)
(let [result (.exec regexp text)]
(when result
(.-index result))))))]
(defeffect ::text-double-click [$last-click $select-cursor $cursor pos text font]
(let [now (getTimeMillis)
[mx my] pos]
(run! #(apply dispatch! %)
[
[:update [(list 'collect-one $last-click)
$select-cursor]
(fn [[last-click [dx dy]] select-cursor]
(if last-click
(let [diff (- now last-click)]
(if (and (< diff double-click-threshold)
(< (+ (pow (- mx dx) 2)
(pow (- my dy) 2))
100))
(let [index (index-for-position font
text mx my)]
(if-let [start (find-white-space text index)]
start
(count text)))
select-cursor))
select-cursor))]
[:update [(list 'collect-one $last-click)
$cursor]
(fn [[last-click [dx dy]] cursor]
(if last-click
(let [diff (- now last-click)]
(if (and (< diff double-click-threshold)
(< (+ (pow (- mx dx) 2)
(pow (- my dy) 2))
100))
(let [index (index-for-position font
text mx my)
text-backwards (clojure.string/reverse text)]
(if-let [start (find-white-space text-backwards
(- (count text) index))]
(- (count text) start)
0)
)
cursor))
cursor))]
[:set $last-click [now pos]]]))
))
(defeffect ::delete-backward [$cursor $select-cursor $text]
(run!
#(apply dispatch! %)
[
[:update [(list 'collect-one $cursor)
(list 'collect-one $select-cursor)
$text]
(fn [cursor select-cursor text]
(let [cursor (min (count text) cursor)
[clip-start clip-end] (if select-cursor
(let [select-cursor (min (count text) select-cursor)]
(if (< cursor select-cursor)
[cursor select-cursor]
[select-cursor cursor]))
[(max 0 (dec cursor)) cursor])]
(str (subs text 0 clip-start)
(subs text clip-end))))]
[:update [(list 'collect-one [$select-cursor])
$cursor]
(fn [select-cursor cursor]
(max 0 (if select-cursor
(min select-cursor cursor)
(dec cursor))))]
[:set $select-cursor nil]
]))
(defui selectable-text [{:keys [text down-pos mpos last-click cursor select-cursor font]}]
(ui/on
:clipboard-copy
(fn []
(when select-cursor
[[:clipboard-copy (subs text
(min cursor select-cursor)
(max cursor select-cursor))]]))
:clipboard-cut
(fn []
(when select-cursor
(let [new-text (when text
(str (subs text 0 (min cursor select-cursor))
(subs text (max cursor select-cursor))))]
[[:set $cursor (min cursor select-cursor)]
[:set $select-cursor nil]
[:set $text new-text]
[:clipboard-cut (subs text
(min cursor select-cursor)
(max cursor select-cursor))]
[::new-text new-text]])))
:mouse-up
(fn [[mx my :as pos]]
[[::finish-drag $select-cursor $cursor $down-pos pos text font]
[::text-double-click $last-click $select-cursor $cursor pos text font]])
:mouse-down
(fn [[mx my :as pos]]
[[::move-cursor-to-pos $cursor text font pos]
[::start-drag $mpos $down-pos pos]
[:set $select-cursor nil]])
:mouse-move
(fn [[mx my :as pos]]
(when down-pos
[[::drag $mpos pos]]))
[(spacer 100 10)
(when select-cursor
(ui/with-color
[0.6980392156862745
0.8431372549019608
1]
(ui/text-selection text
[(min select-cursor cursor)
(max select-cursor cursor)]
font)))
(when-let [[dx dy] down-pos]
(when-let [[mx my] mpos]
(translate (min mx dx)
(min my dy)
(filled-rectangle
[0.9 0.9 0.9]
(Math/abs
(double (- mx dx)))
(Math/abs
(double (- my dy)))))))
(label text font)]))
(defui textarea-view
"Raw component for a basic textarea. textarea should be preferred."
[{:keys [cursor
focus?
text
select-cursor
font
border?]
:or {cursor 0
text ""
border? true}}]
(let [text (or text "")
padding-x (if border? 5 0)
padding-y (if border? 2 0)]
(maybe-key-press
focus?
(ui/wrap-on
:mouse-down
(fn [handler pos]
(let [intents (handler pos)]
(when (seq intents)
(cons [::request-focus]
intents))))
(on
:key-press
(fn [s]
(when focus?
(case s
:up
[[::previous-line $cursor $select-cursor text]]
:enter
[[::insert-newline $cursor $select-cursor $text]]
:down
[[::next-line $cursor $select-cursor text]]
:left
[[::backward-char $cursor $select-cursor text]]
:right
[[::forward-char $cursor $select-cursor text]]
:backspace
[[::delete-backward $cursor $select-cursor $text]]
(when (string? s)
[[::insert-text $cursor $select-cursor $text s]]))))
:clipboard-copy
(fn []
(when (and focus? select-cursor)
[[:clipboard-copy (subs text
(min cursor select-cursor)
(max cursor select-cursor))]]))
:clipboard-cut
(fn []
(when (and focus? select-cursor)
(let [new-text (when text
(str (subs text 0 (min cursor select-cursor))
(subs text (max cursor select-cursor))))]
[[:set $cursor (min cursor select-cursor)]
[:set $select-cursor nil]
[:set $text new-text]
[:clipboard-cut (subs text
(min cursor select-cursor)
(max cursor select-cursor))]
[::new-text new-text]])
)
)
:clipboard-paste
(fn [s]
(when focus?
[[::insert-text $cursor $select-cursor $text s]]))
(let [body [(when focus?
(ui/with-color
[0.5725490196078431
0.5725490196078431
0.5725490196078431
0.4]
(ui/text-cursor text cursor font)))
(selectable-text {:text text
:font font
:select-cursor select-cursor
:cursor cursor})]]
(if border?
(let [gray 0.65
[w h] (ui/bounds body)]
[(with-color [gray gray gray]
(with-style :membrane.ui/style-stroke
(rectangle (+ w (* 2 padding-x))
(+ (max h (+ padding-y (or (:size font)
(:size ui/default-font)))) (* 2 padding-y)))))
(translate padding-x
padding-y
body)])
body)))))))
(defui textarea
"Textarea component."
[{:keys [text
border?
font
^:membrane.component/contextual focus
textarea-state]
:or {border? true}}]
(on
::request-focus
(fn []
[[:set $focus $text]])
(textarea-view {:text text
:cursor (get textarea-state :cursor 0)
:focus? (= focus $text)
:font font
:down-pos (:down-pos textarea-state)
:mpos (:mpos textarea-state)
:border? (or border?
(nil? border?))
:select-cursor (:select-cursor textarea-state)}))
)
(defui textarea-light
"Alternate look for textarea component."
[{:keys [text
font
^:membrane.component/contextual focus
textarea-state]}]
(on
::request-focus
(fn []
[[:set [$focus] $text]])
(let [focus? (= focus $text)]
(let [textarea
(textarea-view {:text text
:cursor (get textarea-state :cursor 0)
:focus? focus?
:font font
:down-pos (:down-pos textarea-state)
:mpos (:mpos textarea-state)
:select-cursor (:select-cursor textarea-state)
:border? false})]
(ui/fill-bordered [0.97 0.97 0.97] [0 0]
textarea)))))
(defui scrollview
"Basic scrollview.
scroll-bounds should be a two element vector of [width height] of the scrollview
body should be an element.
"
[{:keys [offset mdownx? mdowny? scroll-bounds body]
:or {offset [0 0]}}]
(let [offset-x (nth offset 0)
offset-y (nth offset 1)
[width height] scroll-bounds
scroll-button-size 7
[total-width total-height] (bounds body)
max-offset-x (max 0
(- total-width width))
max-offset-y (max 0
(- total-height height))
clampx (fn [old-offset]
(max 0
(min max-offset-x
old-offset)))
clampy (fn [old-offset]
(max 0
(min max-offset-y
old-offset)))
scroll-elem (ui/scrollview
scroll-bounds [(- (clampx offset-x))
(- (clampy offset-y))]
body)
div0 (fn [a b]
(if (zero? b)
b
(/ a b)))
on-mouse-move
(if mdowny?
(fn [body]
(ui/on-mouse-move
(fn [[mx my]]
[[:set $offset-y (clampy (* (div0 (float my) height)
max-offset-y))]])
body))
(if mdownx?
(fn [body]
(ui/on-mouse-move
(fn [[mx my]]
[[:set $offset-x (clampx (* (div0 (float mx) width)
max-offset-x))]])
body))
identity))]
(on-mouse-out
{:hover (get extra [:mdown :hover])
:mouse-out
(fn []
[[:set $mdowny? nil]
[:set $mdownx? nil]])
:body
(ui/wrap-on
:scroll
(fn [handler [ox oy :as offset] pos]
(let [intents (handler offset pos)]
(if (seq intents)
intents
(when (or (not= offset-x
(clampx (+ ox offset-x)))
(not= offset-y
(clampy (+ oy offset-y))))
[[:update $offset-x (fn [old-offset]
(clampx (+ ox offset-x)))]
[:update $offset-y (fn [old-offset]
(clampy (+ oy offset-y)))]]))))
(on-mouse-move
(ui/on-mouse-event
(fn [[mx my :as mpos] button mouse-down? mods]
(if mouse-down?
(let [new-mdownx? (and (> my height)
(> total-width width))
new-mdowny? (and (> mx width)
(> total-height height))
intents (remove
nil?
(into
[(if (not= new-mdownx? mdownx?)
[:set $mdownx? new-mdownx?]
(if (not= new-mdowny? mdowny?)
[:set $mdowny? new-mdowny?]))]
(if new-mdowny?
[[:set $offset-y (clampy (* (div0 (float my) height)
max-offset-y))]]
(if new-mdownx?
[[:set $offset-x (clampx (* (div0 (float mx) width)
max-offset-x))]]
(ui/mouse-event scroll-elem mpos button mouse-down? mods)))))]
intents)
(into
[[:set $mdownx? false]
[:set $mdowny? false]]
(ui/mouse-event scroll-elem mpos button mouse-down? mods)))
)
[
scroll-elem
(when (> total-height height)
(translate width 0
[(filled-rectangle [0.941 0.941 0.941]
scroll-button-size height)
(let [top (/ offset-y total-height)
bottom (/ (+ offset-y height)
total-height)]
(translate 0 (* height top)
(with-color
[0.73 0.73 0.73]
(ui/rounded-rectangle scroll-button-size (* height (- bottom top)) (/ scroll-button-size 2)))
))
(with-color [0.89 0.89 0.89]
(with-style :membrane.ui/style-stroke
(rectangle scroll-button-size height)))]))
(when (> total-width width)
(translate 0 height
[(filled-rectangle [0.941 0.941 0.941]
width scroll-button-size)
(let [left (/ offset-x total-width)
right (/ (+ offset-x width)
total-width)]
(translate (* width left) 0
(with-color
[0.73 0.73 0.73]
(ui/rounded-rectangle (* width (- right left)) scroll-button-size (/ scroll-button-size 2)))
)
)
(with-color [0.89 0.89 0.89]
(with-style :membrane.ui/style-stroke
(rectangle width scroll-button-size )))]))
])))})))
(defui test-scrollview [{:keys [state]}]
(scrollview {:scroll-bounds [200 200]
:body
(apply
vertical-layout
(for [i (range 100)]
(label (str "The quick brown fox"
" jumped over the lazy dog"
))))}))
(defui workspace
"Basic workspace.
scroll-bounds should be a two element vector of [width height] of the scrollview
body should be an element.
Acts similar to a scrollview, but no scroll bars are shown and the scroll offset isn't clamped.
"
[{:keys [offset scroll-bounds body]
:or {offset [0 0]}}]
(let [offset-x (nth offset 0)
offset-y (nth offset 1)
[width height] scroll-bounds
scroll-elem (ui/scrollview
scroll-bounds [(- offset-x)
(- offset-y)]
body)]
(ui/wrap-on
:scroll
(fn [handler [ox oy :as offset] pos]
(let [intents (handler offset pos)]
(if (seq intents)
intents
[[:update $offset-x (fn [old-offset]
(+ offset-x ox))]
[:update $offset-y (fn [old-offset]
(+ offset-y oy))]])))
scroll-elem)))
(comment
(let [view
(ui/->Cached
(let [n 100
maxx 500
maxy 500]
(ui/with-style :membrane.ui/style-stroke
(vec
(for [i (range n)]
(ui/with-stroke-width (inc (rand-int 10))
(ui/with-color [(rand) (rand) (rand)]
(ui/path [(rand-int maxx) (rand-int maxy)]
[(rand-int maxx) (rand-int maxy)]))))))))]
(defui test-workspace [{:keys []}]
(workspace {:scroll-bounds [300 300]
:body view})))
(require '[membrane.skia :as skia])
(skia/run (component/make-app #'test-workspace {}))
,)
(defeffect ::toggle [$bool]
(dispatch! :update $bool not))
(defui checkbox
"Checkbox component."
[{:keys [checked?]}]
(on
:mouse-down
(fn [_]
[[::toggle $checked?]])
(ui/checkbox checked?)))
(defui dropdown-list
[{:keys [options selected]}]
(let [
labels (for [option (map second options)]
(ui/label option))
max-width (reduce max 0 (map ui/width labels))
padding-y 8
padding-x 12
rows
(apply
vertical-layout
(for [[value option] options]
(let [hover? (get extra [:hover? value])
selected? (= selected value)
label (if selected?
(ui/with-color [1 1 1]
(ui/label option))
(ui/label option))
[_ h] (bounds label)
row-height (+ h 4)
row-width (+ max-width (* 2 padding-x))]
(on-hover
{:hover? hover?
:body
(on
:mouse-down
(fn [_]
[[::select $selected value]])
[(spacer row-width row-height)
(cond
selected?
(ui/filled-rectangle [0 0.48 1]
row-width row-height)
hover?
(ui/filled-rectangle [0.976 0.976 0.976]
row-width row-height))
(translate padding-x 2
label)])}))))
[rows-width rows-height] (bounds rows)
]
[(ui/with-style
::ui/style-stroke
(ui/with-color [0.831
0.831
0.831]
(ui/rounded-rectangle rows-width
(+ rows-height (* 2 padding-y))
4)))
(ui/with-style
::ui/style-fill
(ui/with-color [1 1 1]
(ui/rounded-rectangle rows-width
(+ rows-height (* 2 padding-y))
4)))
(translate 0 (- padding-y 2)
rows)])
)
(defui dropdown [{:keys [options selected open?]}]
(vertical-layout
(on
:mouse-down
(fn [_]
[[:update $open? not]])
(ui/bordered [10 10]
(if selected
(ui/label (first (keep (fn [[value option]]
(when (= value selected)
option))
options)))
(with-color [0.7 0.7 0.7]
(ui/label "no selection")))))
(when open?
(on
::select
(fn [$selected value]
[[::select $selected value]
[:set $open? false]])
(dropdown-list {:options options :selected selected})))
))
(defeffect ::select [$selected value]
(dispatch! :set $selected value))
(comment
(skia/run (component/make-app #'dropdown {:options [[:this "This"]
[:that "That "]
[:the-other "The Other"]]})))
(defeffect ::counter-dec [$num min]
(if min
(dispatch! :update $num #(max min (dec %)))
(dispatch! :update $num dec)))
(defeffect ::counter-inc [$num max]
(if max
(dispatch! :update $num #(min max (inc %)))
(dispatch! :update $num inc)))
(defui counter [{:keys [num min max]
:or {num 0}}]
(horizontal-layout
(button {:text "-"
:on-click
(fn []
[[::counter-dec $num min]])})
(ui/spacer 5 0)
(let [lbl (ui/label num)
w (ui/width lbl)
padding (/ (clojure.core/max 0 (- 20 w)) 2)]
(horizontal-layout
(spacer padding 0)
lbl
(spacer padding 0)))
(ui/spacer 5 0)
(button {:text "+"
:on-click
(fn []
[[::counter-inc $num max]])})))
(comment
(skia/run (component/make-app #'counter {:num 3})))
(defeffect ::update-slider [$num min max max-width integer? x]
(let [ratio (/ x max-width)
num (+ min (* ratio (- max min)))
num (if integer?
(int num)
(double num))]
(dispatch! :set $num
(clojure.core/max
min
(clojure.core/min num
max)))))
(defui number-slider [{:keys [num max-width min max integer? mdown?]
:or {max-width 100}}]
(let [ratio (/ (- num min)
(- max min))
width (* max-width (double ratio))
tint 0.85
gray [tint tint tint]]
(on
:mouse-down
(fn [[x y]]
[[:set $mdown? true]
[::update-slider $num min max max-width integer? x]])
:mouse-up
(fn [[x y]]
[[:set $mdown? false]
[::update-slider $num min max max-width integer? x]])
:mouse-move
(fn [[x y]]
(when mdown?
[[::update-slider $num min max max-width integer? x]]))
(ui/translate 1 1
(let [height 20
lbl (ui/label (if integer?
num
#?(:clj (format "%.2f" (double num))
:cljs (.toFixed (double num) 2))))]
[(ui/with-style :membrane.ui/style-fill
(ui/with-color gray
(rectangle width height)))
lbl
(ui/with-style :membrane.ui/style-stroke
(rectangle max-width height))
]))))
)
(comment
(skia/run (component/make-app #'number-slider {:num 3
:min 0
:max 20}))
(skia/run (component/make-app #'number-slider {:num 3
:min 5
:max 20
:max-width 300
:integer? true})))
|
c9d172436bfdfa056b19172d0135cb56c705cd866799b088a7d9df37e12615a4 | bittide/bittide-hardware | Domain.hs | SPDX - FileCopyrightText : 2022 Google LLC
--
SPDX - License - Identifier : Apache-2.0
# OPTIONS_GHC -fno - warn - orphans #
module Bittide.Domain where
import Clash.Explicit.Prelude
import Bittide.ClockControl (ClockControlConfig, defClockConfig)
createDomain vSystem{
vName="Bittide"
, vPeriod=hzToPeriod 200e6
, vResetKind=Synchronous
}
defBittideClockConfig :: ClockControlConfig Bittide 12
defBittideClockConfig = defClockConfig
| null | https://raw.githubusercontent.com/bittide/bittide-hardware/c0d45b7c64451b4897a9a91dddd75add99340a28/elastic-buffer-sim/src/Bittide/Domain.hs | haskell | SPDX - FileCopyrightText : 2022 Google LLC
SPDX - License - Identifier : Apache-2.0
# OPTIONS_GHC -fno - warn - orphans #
module Bittide.Domain where
import Clash.Explicit.Prelude
import Bittide.ClockControl (ClockControlConfig, defClockConfig)
createDomain vSystem{
vName="Bittide"
, vPeriod=hzToPeriod 200e6
, vResetKind=Synchronous
}
defBittideClockConfig :: ClockControlConfig Bittide 12
defBittideClockConfig = defClockConfig
|
|
d9d7fcb1160de7232f8ff20ceb78f495bb70de4eef214a0a1ce2810f828db932 | B-Lang-org/bsc | ANoInline.hs | module ANoInline (aNoInline) where
import Util(itos)
import Position(noPosition)
import Flags(Flags)
import Id(mkId, getIdBaseString)
import FStringCompat(mkFString)
import Control.Monad.State
import qualified Data.Map as M
import ASyntax
import ASyntaxUtil(mapMAExprs)
import SignalNaming
-- ===============
-- Naming conventions
new defs generated in this stage will be named " < aNoInlinePref > < # > "
aNoInlinePref :: String
aNoInlinePref = "__f"
-- instances of noinline functions will be named with this prefix
instPrefix :: String
instPrefix = "instance_"
-- ===============
State for the Monad
data NIState = NIState {
-- unique name generator
nis_uniqueId :: Integer,
-- definitions processed so far
nis_defs :: [ADef],
reverse lookup of defs processed or added so far ,
-- to avoid creating new ids for the exprs which already have ids
nis_rlookup :: M.Map (AExpr,AType) AId
}
Monad type
type NIStateMonad a = State NIState a
-- Monad Util
-- Adds a processed def
addDef :: ADef -> NIStateMonad ()
addDef newdef@(ADef i t e _) = do
state <- get
olddefs <- gets nis_defs
rlm <- gets nis_rlookup
let rlm1 = M.insert (e,t) i rlm
put (state { nis_defs = (newdef:olddefs), nis_rlookup = rlm1 })
-- Generates a new Id from the expression to give to it
-- (This is only ever used for ANoInlineFunCall, so it need not be so general.)
genIdFromAExpr :: AExpr -> NIStateMonad AId
genIdFromAExpr expr = do
state <- get
uniqueNum <- gets nis_uniqueId
put (state { nis_uniqueId = uniqueNum + 1 })
let newIdStr = signalNameFromAExpr expr ++ aNoInlinePref ++ itos uniqueNum
return $ mkId
XXX aexpr should have an instance of HasPosition
(mkFString newIdStr)
-- Add the expression -- really the definition to the monad
addExpr :: AType -> AExpr -> NIStateMonad AId
addExpr t e = do
ds <- gets nis_defs
rlm <- gets nis_rlookup
case ( M.lookup (e,t) rlm ) of
Nothing ->
do
nid <- genIdFromAExpr e
addDef (ADef nid t e [])
return nid
-- don't create a new id for an expression that already has an id
Just fid -> return fid
-- ===============
-- aNoInline
Make sure all no - inline functions are top - level defs ,
and give each call an instance name ( recording it in the )
-- so that all backends use the same instance name
aNoInline :: Flags -> APackage -> APackage
aNoInline flags apkg =
let
-- initial state
initState = NIState {
nis_uniqueId = 1,
nis_defs = [],
nis_rlookup = M.empty
}
-- fields of the package
ifc = apkg_interface apkg
rs = apkg_rules apkg
insts = apkg_state_instances apkg
defs = apkg_local_defs apkg
-- monadic action
action = do
we ca n't use mapAExprs in one go over the whole package
because we do n't want to lift exprs at the top level of defs .
-- instead, by parts:
map over the defs
( this does n't return defs , it adds them all to the state ,
-- to be retrieved at the end)
mapM_ liftADef defs
-- map over ifcs
ifc' <- mapMAExprs (liftAExpr False) ifc
-- map over rules
rs' <- mapMAExprs (liftAExpr False) rs
-- map over state
insts' <- mapMAExprs (liftAExpr False) insts
get back the final list of defs
-- (original defs with lifting, plus any new defs)
defs' <- gets nis_defs
now that all ANoInlineFunCall are top - level defs ,
assign instance names to each one
let defs'' = updateNoInlineDefs defs'
-- return the new package
return (apkg { apkg_interface = ifc',
apkg_rules = rs',
apkg_state_instances = insts',
apkg_local_defs = defs'' })
in
evalState action initState
-- ===============
This does n't return the defs , because they are returned via the monad
liftADef :: ADef -> NIStateMonad ()
liftADef (ADef i t e p) = do
-- Top level case does not need to be pulled out
e' <- liftAExpr True e
addDef (ADef i t e' p)
-- "top" is whether the expression is the top of an ADef
-- (and therefore should not be lifted)
liftAExpr :: Bool -> AExpr -> NIStateMonad AExpr
liftAExpr False (ANoInlineFunCall t i f es) = do
es' <- mapM (liftAExpr False) es
i <- addExpr t (ANoInlineFunCall t i f es')
return (ASDef t i)
-- anything else, just recurse
liftAExpr True (ANoInlineFunCall t i f es) = do
es' <- mapM (liftAExpr False) es
return $ ANoInlineFunCall t i f es'
liftAExpr _ (APrim aid ty op es) = do
es' <- mapM (liftAExpr False) es
return $ APrim aid ty op es'
liftAExpr _ (AMethCall ty aid mid es) = do
es' <- mapM (liftAExpr False) es
return $ AMethCall ty aid mid es'
liftAExpr _ (AFunCall ty aid fun isC es) = do
es' <- mapM (liftAExpr False) es
return $ AFunCall ty aid fun isC es'
liftAExpr _ expr = return expr
-- ===============
updateNoInlineDefs :: [ADef] -> [ADef]
updateNoInlineDefs defs =
let
updateDef :: ADef -> (Integer, [ADef]) -> (Integer, [ADef])
updateDef (ADef di dt (ANoInlineFunCall ft fi f es) props) (n, ds) =
let (ANoInlineFun m ts ps _) = f
inst_name = instPrefix ++ getIdBaseString fi ++ "_" ++ itos n
f' = (ANoInlineFun m ts ps (Just inst_name))
d' = (ADef di dt (ANoInlineFunCall ft fi f' es) props)
in (n+1, d':ds)
updateDef d (n, ds) = (n, d:ds)
in
snd $ foldr updateDef (0,[]) defs
-- ===============
| null | https://raw.githubusercontent.com/B-Lang-org/bsc/bd141b505394edc5a4bdd3db442a9b0a8c101f0f/src/comp/ANoInline.hs | haskell | ===============
Naming conventions
instances of noinline functions will be named with this prefix
===============
unique name generator
definitions processed so far
to avoid creating new ids for the exprs which already have ids
Monad Util
Adds a processed def
Generates a new Id from the expression to give to it
(This is only ever used for ANoInlineFunCall, so it need not be so general.)
Add the expression -- really the definition to the monad
don't create a new id for an expression that already has an id
===============
aNoInline
so that all backends use the same instance name
initial state
fields of the package
monadic action
instead, by parts:
to be retrieved at the end)
map over ifcs
map over rules
map over state
(original defs with lifting, plus any new defs)
return the new package
===============
Top level case does not need to be pulled out
"top" is whether the expression is the top of an ADef
(and therefore should not be lifted)
anything else, just recurse
===============
=============== | module ANoInline (aNoInline) where
import Util(itos)
import Position(noPosition)
import Flags(Flags)
import Id(mkId, getIdBaseString)
import FStringCompat(mkFString)
import Control.Monad.State
import qualified Data.Map as M
import ASyntax
import ASyntaxUtil(mapMAExprs)
import SignalNaming
new defs generated in this stage will be named " < aNoInlinePref > < # > "
aNoInlinePref :: String
aNoInlinePref = "__f"
instPrefix :: String
instPrefix = "instance_"
State for the Monad
data NIState = NIState {
nis_uniqueId :: Integer,
nis_defs :: [ADef],
reverse lookup of defs processed or added so far ,
nis_rlookup :: M.Map (AExpr,AType) AId
}
Monad type
type NIStateMonad a = State NIState a
addDef :: ADef -> NIStateMonad ()
addDef newdef@(ADef i t e _) = do
state <- get
olddefs <- gets nis_defs
rlm <- gets nis_rlookup
let rlm1 = M.insert (e,t) i rlm
put (state { nis_defs = (newdef:olddefs), nis_rlookup = rlm1 })
genIdFromAExpr :: AExpr -> NIStateMonad AId
genIdFromAExpr expr = do
state <- get
uniqueNum <- gets nis_uniqueId
put (state { nis_uniqueId = uniqueNum + 1 })
let newIdStr = signalNameFromAExpr expr ++ aNoInlinePref ++ itos uniqueNum
return $ mkId
XXX aexpr should have an instance of HasPosition
(mkFString newIdStr)
addExpr :: AType -> AExpr -> NIStateMonad AId
addExpr t e = do
ds <- gets nis_defs
rlm <- gets nis_rlookup
case ( M.lookup (e,t) rlm ) of
Nothing ->
do
nid <- genIdFromAExpr e
addDef (ADef nid t e [])
return nid
Just fid -> return fid
Make sure all no - inline functions are top - level defs ,
and give each call an instance name ( recording it in the )
aNoInline :: Flags -> APackage -> APackage
aNoInline flags apkg =
let
initState = NIState {
nis_uniqueId = 1,
nis_defs = [],
nis_rlookup = M.empty
}
ifc = apkg_interface apkg
rs = apkg_rules apkg
insts = apkg_state_instances apkg
defs = apkg_local_defs apkg
action = do
we ca n't use mapAExprs in one go over the whole package
because we do n't want to lift exprs at the top level of defs .
map over the defs
( this does n't return defs , it adds them all to the state ,
mapM_ liftADef defs
ifc' <- mapMAExprs (liftAExpr False) ifc
rs' <- mapMAExprs (liftAExpr False) rs
insts' <- mapMAExprs (liftAExpr False) insts
get back the final list of defs
defs' <- gets nis_defs
now that all ANoInlineFunCall are top - level defs ,
assign instance names to each one
let defs'' = updateNoInlineDefs defs'
return (apkg { apkg_interface = ifc',
apkg_rules = rs',
apkg_state_instances = insts',
apkg_local_defs = defs'' })
in
evalState action initState
This does n't return the defs , because they are returned via the monad
liftADef :: ADef -> NIStateMonad ()
liftADef (ADef i t e p) = do
e' <- liftAExpr True e
addDef (ADef i t e' p)
liftAExpr :: Bool -> AExpr -> NIStateMonad AExpr
liftAExpr False (ANoInlineFunCall t i f es) = do
es' <- mapM (liftAExpr False) es
i <- addExpr t (ANoInlineFunCall t i f es')
return (ASDef t i)
liftAExpr True (ANoInlineFunCall t i f es) = do
es' <- mapM (liftAExpr False) es
return $ ANoInlineFunCall t i f es'
liftAExpr _ (APrim aid ty op es) = do
es' <- mapM (liftAExpr False) es
return $ APrim aid ty op es'
liftAExpr _ (AMethCall ty aid mid es) = do
es' <- mapM (liftAExpr False) es
return $ AMethCall ty aid mid es'
liftAExpr _ (AFunCall ty aid fun isC es) = do
es' <- mapM (liftAExpr False) es
return $ AFunCall ty aid fun isC es'
liftAExpr _ expr = return expr
updateNoInlineDefs :: [ADef] -> [ADef]
updateNoInlineDefs defs =
let
updateDef :: ADef -> (Integer, [ADef]) -> (Integer, [ADef])
updateDef (ADef di dt (ANoInlineFunCall ft fi f es) props) (n, ds) =
let (ANoInlineFun m ts ps _) = f
inst_name = instPrefix ++ getIdBaseString fi ++ "_" ++ itos n
f' = (ANoInlineFun m ts ps (Just inst_name))
d' = (ADef di dt (ANoInlineFunCall ft fi f' es) props)
in (n+1, d':ds)
updateDef d (n, ds) = (n, d:ds)
in
snd $ foldr updateDef (0,[]) defs
|
a7b9d2da2fec00307bd10a178b8719c3f447a55a6f44bd93949e43756067a8b5 | TrustInSoft/tis-interpreter | gui_parameters.mli | Modified by TrustInSoft
(**************************************************************************)
(* *)
This file is part of Frama - C.
(* *)
Copyright ( C ) 2007 - 2015
CEA ( Commissariat à l'énergie atomique et aux énergies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
(** GUI as a plug-in. *)
include Plugin.S
module Project_name: Parameter_sig.String
(** Option -gui-project. *)
module Undo: Parameter_sig.Bool
(** Option -undo. *)
module Theme: Parameter_sig.String
(** Option -gui-theme. *)
(*
Local Variables:
compile-command: "make -C ../../.."
End:
*)
| null | https://raw.githubusercontent.com/TrustInSoft/tis-interpreter/33132ce4a825494ea48bf2dd6fd03a56b62cc5c3/src/plugins/gui/gui_parameters.mli | ocaml | ************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************
* GUI as a plug-in.
* Option -gui-project.
* Option -undo.
* Option -gui-theme.
Local Variables:
compile-command: "make -C ../../.."
End:
| Modified by TrustInSoft
This file is part of Frama - C.
Copyright ( C ) 2007 - 2015
CEA ( Commissariat à l'énergie atomique et aux énergies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
include Plugin.S
module Project_name: Parameter_sig.String
module Undo: Parameter_sig.Bool
module Theme: Parameter_sig.String
|
9e6740528dc562836403dcdb9fba0259e82fe0c8c9ff5168aaba994312a9fc30 | acl2/acl2 | conjuncts-of-uterm.lisp | ; Getting the conjuncts of an untranslated term
;
Copyright ( C ) 2022 - 2023 Kestrel Institute
;
License : A 3 - clause BSD license . See the file books/3BSD - mod.txt .
;
Author : ( )
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(in-package "ACL2")
(include-book "kestrel/utilities/forms" :dir :system)
(include-book "kestrel/lists-light/union-equal-alt" :dir :system)
(include-book "tools/flag" :dir :system)
(mutual-recursion
;; TODO: Consider treating a negated disjunction as a conjunction
(defun conjuncts-of-uterm (uterm ;; untranslated
)
(declare (xargs :guard t
:verify-guards nil ; done below
))
(if (not (consp uterm))
(list uterm)
(if (eq 'and (ffn-symb uterm))
(conjuncts-of-uterms (fargs uterm))
(if (and (eq 'if (ffn-symb uterm)) ; (if <x> <y> nil) is (and <x> <y>)
(= 3 (len (fargs uterm))) ; for guards
(or (equal nil (farg3 uterm))
(equal *nil* (farg3 uterm))))
(union-equal-alt (conjuncts-of-uterm (farg1 uterm))
(conjuncts-of-uterm (farg2 uterm)))
;; todo: Handle (if <x> nil <y>)?
(list uterm)))))
(defun conjuncts-of-uterms (uterms ;; untranslated
)
(declare (xargs :guard t))
(if (atom uterms)
nil
(union-equal-alt (conjuncts-of-uterm (first uterms))
(conjuncts-of-uterms (rest uterms))))))
(make-flag conjuncts-of-uterm)
(defthm-flag-conjuncts-of-uterm)
(defthm-flag-conjuncts-of-uterm
(defthm true-listp-of-conjuncts-of-uterm
(true-listp (conjuncts-of-uterm uterm))
:flag conjuncts-of-uterm)
(defthm true-listp-of-conjuncts-of-uterms
(true-listp (conjuncts-of-uterms uterms))
:flag conjuncts-of-uterms))
(verify-guards conjuncts-of-uterm)
| null | https://raw.githubusercontent.com/acl2/acl2/e90629124ac135ff1b2be79af4f77b0ecf636f85/books/kestrel/untranslated-terms/conjuncts-of-uterm.lisp | lisp | Getting the conjuncts of an untranslated term
TODO: Consider treating a negated disjunction as a conjunction
untranslated
done below
(if <x> <y> nil) is (and <x> <y>)
for guards
todo: Handle (if <x> nil <y>)?
untranslated | Copyright ( C ) 2022 - 2023 Kestrel Institute
License : A 3 - clause BSD license . See the file books/3BSD - mod.txt .
Author : ( )
(in-package "ACL2")
(include-book "kestrel/utilities/forms" :dir :system)
(include-book "kestrel/lists-light/union-equal-alt" :dir :system)
(include-book "tools/flag" :dir :system)
(mutual-recursion
)
(declare (xargs :guard t
))
(if (not (consp uterm))
(list uterm)
(if (eq 'and (ffn-symb uterm))
(conjuncts-of-uterms (fargs uterm))
(or (equal nil (farg3 uterm))
(equal *nil* (farg3 uterm))))
(union-equal-alt (conjuncts-of-uterm (farg1 uterm))
(conjuncts-of-uterm (farg2 uterm)))
(list uterm)))))
)
(declare (xargs :guard t))
(if (atom uterms)
nil
(union-equal-alt (conjuncts-of-uterm (first uterms))
(conjuncts-of-uterms (rest uterms))))))
(make-flag conjuncts-of-uterm)
(defthm-flag-conjuncts-of-uterm)
(defthm-flag-conjuncts-of-uterm
(defthm true-listp-of-conjuncts-of-uterm
(true-listp (conjuncts-of-uterm uterm))
:flag conjuncts-of-uterm)
(defthm true-listp-of-conjuncts-of-uterms
(true-listp (conjuncts-of-uterms uterms))
:flag conjuncts-of-uterms))
(verify-guards conjuncts-of-uterm)
|
359453343172f6d6aa0fd0fbaaa98cec0642e3cfd352ec7bed71682afc94ff9e | ghc/testsuite | tcfail189.hs | -- Checks that the correct type is used checking the using clause of
-- the group when a by clause is present
{-# OPTIONS_GHC -XTransformListComp #-}
module ShouldFail where
foo = [ length x
| x <- [1..10]
, then group by x using take 2
]
| null | https://raw.githubusercontent.com/ghc/testsuite/998a816ae89c4fd573f4abd7c6abb346cf7ee9af/tests/typecheck/should_fail/tcfail189.hs | haskell | Checks that the correct type is used checking the using clause of
the group when a by clause is present
# OPTIONS_GHC -XTransformListComp # |
module ShouldFail where
foo = [ length x
| x <- [1..10]
, then group by x using take 2
]
|
c36872254fbd9e92b13301adc4383febe20f4072c4b8e05f960f7a39a25ad7ac | mokus0/shapefile | ByParts.hs | # LANGUAGE RecordWildCards #
module Database.Shapefile.Shp.ByParts where
import Database.Shapefile.ShapeTypes (ESRIShapeType, getShapeType32le)
import Database.Shapefile.Shapes.ByParts
(ESRIShape, getShape, putShape, shapeType, contentLengthWords)
import Database.Shapefile.Shp (ShpFileHeader (..), ShpRecHeader (..),
ShpRecord (..), shpFileLengthBytes, shpRecSizeBytes,
getShpRecHeader, putShpRecHeader, getShpFileHeader,
putShpFileHeader)
import Database.Shapefile.Shp.Handle (ShpHandle (..), readShpBlock)
import Database.Shapefile.Shx (shxLengthBytes, shxOffsetBytes)
import Database.Shapefile.Shx.Handle (getShxRecord)
import Data.Binary.Get (Get, getLazyByteString,
getRemainingLazyByteString, runGet)
import Data.Binary.Put (Put, runPut)
import Data.Word (Word32)
import Database.XBase.Dbf.Handle (DbfRecHandle, dbfGetRecord)
data ShpRec = ShpRec
{ shpRecHdr :: ShpRecHeader
, shpRecShape :: ESRIShape
} deriving (Eq, Show)
instance ShpRecord ShpRec where
shpRecHeader = shpRecHdr
|Total size of the shape record in bytes , including the header
shpRecTotalSizeBytes :: ShpRec -> Integer
shpRecTotalSizeBytes = (8 +) . shpRecSizeBytes . shpRecHdr
shpRecShapeType :: ShpRec -> ESRIShapeType
shpRecShapeType ShpRec{shpRecShape=shape} = shapeType shape
|Pack several shapes into ' ShpRec 's , setting proper record numbers and
-- sizes.
mkShpRecs :: [ESRIShape] -> [ShpRec]
mkShpRecs shapes = zipWith mkShpRec [1..] shapes
|Pack the data for a shape into a ' ShpRec ' with the specified record number
mkShpRec :: Word32 -> ESRIShape -> ShpRec
mkShpRec n shape =
ShpRec (ShpRecHeader n (fromIntegral $ contentLengthWords shape)) shape
putShpRec :: ShpRec -> Put
putShpRec ShpRec {..} = do
{- 0 : Record Header -} putShpRecHeader shpRecHdr
8 : Record content
getShpRec :: Get ShpRec
getShpRec = do
{- 0 : Record Header -} shpRecHdr <- getShpRecHeader
8 : Record content
( 8 + len ) bytes total
{ shpRecHdr = shpRecHdr
, shpRecShape = shpShape
}
putShpFile :: ShpFileHeader -> [ShpRec] -> Put
putShpFile shpHdr shpRecs = do
putShpFileHeader shpHdr
mapM_ putShpRec shpRecs
getShpFile :: Get (ShpFileHeader, [ShpRec])
getShpFile = do
hdr <- getShpFileHeader
rest <- getLazyByteString (fromInteger (shpFileLengthBytes hdr) - 100)
let n = shpFileLengthBytes hdr - 100
return (hdr, slurp n rest)
where
slurp 0 rest = []
slurp n rest = flip runGet rest $ do
rec <- getShpRec
rest <- getRemainingLazyByteString
let n' = n - shpRecTotalSizeBytes rec
return (rec : slurp n' rest)
getShpRecord :: ShpHandle -> Int -> IO (ShpRec, Maybe DbfRecHandle)
getShpRecord shp n = do
shxRec <- getShxRecord (shxHandle shp) n
rec <- readShpBlock shp (shxOffsetBytes shxRec) (8 + fromInteger (shxLengthBytes shxRec))
dbfRec <- dbfGetRecord (dbfHandle shp) (toInteger n)
return (runGet getShpRec rec, dbfRec)
getShpShape :: ShpHandle -> Int -> IO (ESRIShape, Maybe DbfRecHandle)
getShpShape shp n = do
shxRec <- getShxRecord (shxHandle shp) n
blk <- readShpBlock shp (shxOffsetBytes shxRec) (8 + fromInteger (shxLengthBytes shxRec))
dbfRec <- dbfGetRecord (dbfHandle shp) (toInteger n)
let shpRec@ShpRec{ shpRecShape=shape } = runGet getShpRec blk
return (shape, dbfRec)
| null | https://raw.githubusercontent.com/mokus0/shapefile/9a3821e1c6aef08b9304ed3c79cccbba09970d7d/src/Database/Shapefile/Shp/ByParts.hs | haskell | sizes.
0 : Record Header
0 : Record Header | # LANGUAGE RecordWildCards #
module Database.Shapefile.Shp.ByParts where
import Database.Shapefile.ShapeTypes (ESRIShapeType, getShapeType32le)
import Database.Shapefile.Shapes.ByParts
(ESRIShape, getShape, putShape, shapeType, contentLengthWords)
import Database.Shapefile.Shp (ShpFileHeader (..), ShpRecHeader (..),
ShpRecord (..), shpFileLengthBytes, shpRecSizeBytes,
getShpRecHeader, putShpRecHeader, getShpFileHeader,
putShpFileHeader)
import Database.Shapefile.Shp.Handle (ShpHandle (..), readShpBlock)
import Database.Shapefile.Shx (shxLengthBytes, shxOffsetBytes)
import Database.Shapefile.Shx.Handle (getShxRecord)
import Data.Binary.Get (Get, getLazyByteString,
getRemainingLazyByteString, runGet)
import Data.Binary.Put (Put, runPut)
import Data.Word (Word32)
import Database.XBase.Dbf.Handle (DbfRecHandle, dbfGetRecord)
data ShpRec = ShpRec
{ shpRecHdr :: ShpRecHeader
, shpRecShape :: ESRIShape
} deriving (Eq, Show)
instance ShpRecord ShpRec where
shpRecHeader = shpRecHdr
|Total size of the shape record in bytes , including the header
shpRecTotalSizeBytes :: ShpRec -> Integer
shpRecTotalSizeBytes = (8 +) . shpRecSizeBytes . shpRecHdr
shpRecShapeType :: ShpRec -> ESRIShapeType
shpRecShapeType ShpRec{shpRecShape=shape} = shapeType shape
|Pack several shapes into ' ShpRec 's , setting proper record numbers and
mkShpRecs :: [ESRIShape] -> [ShpRec]
mkShpRecs shapes = zipWith mkShpRec [1..] shapes
|Pack the data for a shape into a ' ShpRec ' with the specified record number
mkShpRec :: Word32 -> ESRIShape -> ShpRec
mkShpRec n shape =
ShpRec (ShpRecHeader n (fromIntegral $ contentLengthWords shape)) shape
putShpRec :: ShpRec -> Put
putShpRec ShpRec {..} = do
8 : Record content
getShpRec :: Get ShpRec
getShpRec = do
8 : Record content
( 8 + len ) bytes total
{ shpRecHdr = shpRecHdr
, shpRecShape = shpShape
}
putShpFile :: ShpFileHeader -> [ShpRec] -> Put
putShpFile shpHdr shpRecs = do
putShpFileHeader shpHdr
mapM_ putShpRec shpRecs
getShpFile :: Get (ShpFileHeader, [ShpRec])
getShpFile = do
hdr <- getShpFileHeader
rest <- getLazyByteString (fromInteger (shpFileLengthBytes hdr) - 100)
let n = shpFileLengthBytes hdr - 100
return (hdr, slurp n rest)
where
slurp 0 rest = []
slurp n rest = flip runGet rest $ do
rec <- getShpRec
rest <- getRemainingLazyByteString
let n' = n - shpRecTotalSizeBytes rec
return (rec : slurp n' rest)
getShpRecord :: ShpHandle -> Int -> IO (ShpRec, Maybe DbfRecHandle)
getShpRecord shp n = do
shxRec <- getShxRecord (shxHandle shp) n
rec <- readShpBlock shp (shxOffsetBytes shxRec) (8 + fromInteger (shxLengthBytes shxRec))
dbfRec <- dbfGetRecord (dbfHandle shp) (toInteger n)
return (runGet getShpRec rec, dbfRec)
getShpShape :: ShpHandle -> Int -> IO (ESRIShape, Maybe DbfRecHandle)
getShpShape shp n = do
shxRec <- getShxRecord (shxHandle shp) n
blk <- readShpBlock shp (shxOffsetBytes shxRec) (8 + fromInteger (shxLengthBytes shxRec))
dbfRec <- dbfGetRecord (dbfHandle shp) (toInteger n)
let shpRec@ShpRec{ shpRecShape=shape } = runGet getShpRec blk
return (shape, dbfRec)
|
173064d416b1773c19e5c9e6b0fbeeeb3244f0c4812afc0fa7f9e2248296a746 | target/theta-idl | Name.hs | {-# LANGUAGE DeriveAnyClass #-}
# LANGUAGE DeriveGeneric #
{-# LANGUAGE DeriveLift #-}
{-# LANGUAGE DerivingVia #-}
{-# LANGUAGE NamedFieldPuns #-}
# LANGUAGE OverloadedLists #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ViewPatterns #-}
| A module for working with names and namespaces in Theta .
module Theta.Name where
import qualified Data.Char as Char
import Data.Hashable (Hashable)
import qualified Data.Map as Map
import Data.Maybe (fromMaybe)
import Data.Text (Text)
import qualified Data.Text as Text
import Data.Tree (Tree (..))
import GHC.Exts (IsList (..), IsString (..))
import GHC.Generics (Generic)
import Language.Haskell.TH.Syntax (Lift)
import Test.QuickCheck (Arbitrary (..))
import qualified Test.QuickCheck as QuickCheck
import Text.Printf (printf)
import Theta.Hash (Hash, hashList, hashText)
import Theta.Pretty (Pretty (..), ShowPretty (..),
showPretty)
-- * Definitions
-- | An identifier which can refer to types, or constructors.
--
-- All identifiers have a base name as well as the name of the module
-- where they were defined, which acts as a namespace to disambiguate
-- definitions with the same base name.
--
-- A 'Name' is uniquely determined by 'name' and 'namespace'—'Name'
-- values are always fully qualified. We might have some kind of
namespace inference at the language level ( similar to Avro ) , but
-- all namespaces will be fully resolved by the time they are parsed
-- into 'Name' values. This is important because it lets us compare
-- 'Name's for equality directly and use them in sets and maps without
-- worrying.
data Name = Name { moduleName :: ModuleName
, name :: Text
}
deriving stock (Eq, Ord, Generic, Lift)
deriving anyclass (Hashable)
instance Show Name where show = printf "\"%s\"" . showPretty
-- | Parses string literals as dot-sperated names.
--
-- Will throw an exception if the name is not valid.
--
-- @
-- > "com.target.foo" :: Name
-- Name (ModuleName ["com"] "target") "foo"
-- @
instance IsString Name where
fromString string = fromMaybe invalid $ parse $ Text.pack string
where invalid = error $ "Name " <> show string <> " is not valid."
instance Arbitrary Name where
arbitrary = Name <$> arbitrary <*> randomPart
-- TODO: Generate valid non-ASCII names as well
| A QuickCheck generator that outputs a valid /part/ of a Theta
-- name.
--
In the fully - qualified name @com.example . Foo@ , @"com"@ , @"example"@
and @"Foo"@ are the parts .
randomPart :: QuickCheck.Gen Text
randomPart = do
first <- QuickCheck.elements $ '_' : letters
rest <- randomList $ '_' : (letters <> digits)
pure $ Text.pack $ first : rest
where letters = ['a'..'z'] <> ['A'..'Z']
digits = ['0'..'9']
randomList =
QuickCheck.resize 20 . QuickCheck.listOf . QuickCheck.elements
-- | Calculate the hash for a fully qualified name. This considers
-- both the base name *and* the namespace.
hashName :: Name -> Hash
hashName = hashList . map hashText . parts
-- | Return the parts of a name (ie module name and base name) as a
-- list.
--
In the fully - qualified name @com.example . Foo@ , @"com"@ , @"example"@
and @"Foo"@ are the parts .
parts :: Name -> [Text]
parts Name { moduleName, name } =
namespace moduleName <> [baseName moduleName, name]
-- | The canonical string represenation of a name. This includes the
-- module name with each component separated by "." followed by the
-- name itself.
--
-- @
-- > pretty $ Name ["com"] "Foo"
" com . "
-- > pretty $ Name ["com", "target"] "Foo"
" com.target . "
-- @
instance Pretty Name where
pretty Name { moduleName, name } = pretty moduleName <> "." <> name
-- | Why a name doesn't parse.
data Reason = Unqualified
-- ^ The name needs to specify a namespace.
| Invalid
^ The name is not syntactically valid in Theta .
deriving (Show, Eq)
-- | Parse a text representation of a 'Name', returning an error with
-- a 'Reason' if the name doesn't parse.
parse' :: Text -> Either Reason Name
parse' (Text.splitOn "." -> components)
| not (all valid components) = Left Invalid
| otherwise = case components of
[] -> Left Invalid
[_] -> Left Unqualified
parts -> Right $ Name
{ name = last parts
, moduleName = ModuleName { namespace, baseName }
}
where baseName = last $ init components
namespace = init $ init components
valid "" = False
valid part = first (Text.head part) && rest (Text.tail part)
first x = Char.isLetter x || x == '_'
rest = Text.all (\ x -> Char.isAlphaNum x || x == '_')
-- | Parse a text representation of a 'Name'.
--
-- Will return 'Nothing' if the name does not have an explicit module
-- name supplied.
parse :: Text -> Maybe Name
parse text = case parse' text of
Right name -> Just name
Left _ -> Nothing
-- | Render a name to a fully-qualified 'Text' representation.
render :: Name -> Text
render = pretty
-- * Modules
-- | An identifier which refers to a module.
--
-- Modules have a base name and an /optional/ namespace.
--
-- A module name is uniquely determined by its base name and
-- namespace. In the future, we might have some kind of "namespace
inference " similar to Avro , but that should happen /before/ we
produce values of this type . Once you have a ' ModuleName ' , it
-- should be fully qualified and you can compare it for equality
-- directly.
data ModuleName = ModuleName
{ namespace :: [Text]
, baseName :: Text
}
deriving stock (Eq, Ord, Generic, Lift)
deriving anyclass (Hashable)
deriving Show via ShowPretty ModuleName
instance Pretty ModuleName where
pretty = renderModuleName
instance Arbitrary ModuleName where
arbitrary = ModuleName <$> ns <*> randomPart
where ns = QuickCheck.resize 3 $ QuickCheck.listOf randomPart
-- | Render a module name as a string with dots between namespace
-- components.
renderModuleName :: ModuleName -> Text
renderModuleName = Text.intercalate "." . toList
-- | Parse a module name as a series of names separated by dots (.).
--
-- @
> " foo " : : ModuleName
-- ModuleName [] "foo"
> " com.example.foo " : : ModuleName
-- ModuleName ["com", "example"] "foo"
-- @
instance IsString ModuleName where
fromString = fromList . Text.splitOn "." . Text.pack
instance IsList ModuleName where
type Item ModuleName = Text
fromList [] = error "Cannot have an empty module name."
fromList components = ModuleName
{ namespace = init components
, baseName = last components
}
toList ModuleName { baseName, namespace } = namespace <> [baseName]
-- | Return a list of the module name's parts.
--
-- @
-- > moduleParts "foo"
-- ["foo"]
-- > moduleParts "com.example.foo"
-- ["com", "example", "foo"]
-- @
moduleParts :: ModuleName -> [Text]
moduleParts = toList
-- | Build a module name from its parts.
fromModuleParts :: [Text] -> ModuleName
fromModuleParts = fromList
-- | Parses a module name, which is any set of identifiers separated
-- by dots (.).
--
-- Will error out if the string is empty.
parseModuleName :: Text -> ModuleName
parseModuleName (Text.splitOn "." -> components) = fromList components
-- | Extract the root part of the module name. If the module has a
-- namespace, the root is the /first/ component of the namespace;
-- otherwise, the root is the module's base name.
--
-- @
-- > moduleRoot "foo"
-- "foo"
-- > moduleRoot "example.foo"
-- "example"
-- > moduleRoot "com.example.foo"
-- "com"
-- @
moduleRoot :: ModuleName -> Text
moduleRoot ModuleName { namespace, baseName } = case namespace of
[] -> baseName
(root : _) -> root
newtype ModuleTree = ModuleTree (Map.Map Text ModuleTree)
-- | Given a list of modules, consolidate them into a hierarchy
-- according to their namespaces.
--
Given , and @"com.bar"@ , the
-- hierarchy would be:
--
-- @
-- com
-- ↳ example
-- ↳ foo
-- ↳ bar
-- @
moduleHierarchy :: [ModuleName] -> [Tree Text]
moduleHierarchy names = toTree $ foldr insert empty (moduleParts <$> names)
where insert [] tree = tree
insert (p : ps) (ModuleTree map) =
ModuleTree $ Map.insertWith (\ _ -> insert ps) p (expand ps) map
expand [] = empty
expand (p : ps) = ModuleTree [(p, expand ps)]
empty = ModuleTree Map.empty
toTree (ModuleTree map) =
[Node part (toTree tree) | (part, tree) <- Map.toList map]
| null | https://raw.githubusercontent.com/target/theta-idl/0fed73dc7f3963fd2c63cb408edcda0547a6d453/theta/src/Theta/Name.hs | haskell | # LANGUAGE DeriveAnyClass #
# LANGUAGE DeriveLift #
# LANGUAGE DerivingVia #
# LANGUAGE NamedFieldPuns #
# LANGUAGE OverloadedStrings #
# LANGUAGE TypeFamilies #
# LANGUAGE ViewPatterns #
* Definitions
| An identifier which can refer to types, or constructors.
All identifiers have a base name as well as the name of the module
where they were defined, which acts as a namespace to disambiguate
definitions with the same base name.
A 'Name' is uniquely determined by 'name' and 'namespace'—'Name'
values are always fully qualified. We might have some kind of
all namespaces will be fully resolved by the time they are parsed
into 'Name' values. This is important because it lets us compare
'Name's for equality directly and use them in sets and maps without
worrying.
| Parses string literals as dot-sperated names.
Will throw an exception if the name is not valid.
@
> "com.target.foo" :: Name
Name (ModuleName ["com"] "target") "foo"
@
TODO: Generate valid non-ASCII names as well
name.
| Calculate the hash for a fully qualified name. This considers
both the base name *and* the namespace.
| Return the parts of a name (ie module name and base name) as a
list.
| The canonical string represenation of a name. This includes the
module name with each component separated by "." followed by the
name itself.
@
> pretty $ Name ["com"] "Foo"
> pretty $ Name ["com", "target"] "Foo"
@
| Why a name doesn't parse.
^ The name needs to specify a namespace.
| Parse a text representation of a 'Name', returning an error with
a 'Reason' if the name doesn't parse.
| Parse a text representation of a 'Name'.
Will return 'Nothing' if the name does not have an explicit module
name supplied.
| Render a name to a fully-qualified 'Text' representation.
* Modules
| An identifier which refers to a module.
Modules have a base name and an /optional/ namespace.
A module name is uniquely determined by its base name and
namespace. In the future, we might have some kind of "namespace
should be fully qualified and you can compare it for equality
directly.
| Render a module name as a string with dots between namespace
components.
| Parse a module name as a series of names separated by dots (.).
@
ModuleName [] "foo"
ModuleName ["com", "example"] "foo"
@
| Return a list of the module name's parts.
@
> moduleParts "foo"
["foo"]
> moduleParts "com.example.foo"
["com", "example", "foo"]
@
| Build a module name from its parts.
| Parses a module name, which is any set of identifiers separated
by dots (.).
Will error out if the string is empty.
| Extract the root part of the module name. If the module has a
namespace, the root is the /first/ component of the namespace;
otherwise, the root is the module's base name.
@
> moduleRoot "foo"
"foo"
> moduleRoot "example.foo"
"example"
> moduleRoot "com.example.foo"
"com"
@
| Given a list of modules, consolidate them into a hierarchy
according to their namespaces.
hierarchy would be:
@
com
↳ example
↳ foo
↳ bar
@ | # LANGUAGE DeriveGeneric #
# LANGUAGE OverloadedLists #
| A module for working with names and namespaces in Theta .
module Theta.Name where
import qualified Data.Char as Char
import Data.Hashable (Hashable)
import qualified Data.Map as Map
import Data.Maybe (fromMaybe)
import Data.Text (Text)
import qualified Data.Text as Text
import Data.Tree (Tree (..))
import GHC.Exts (IsList (..), IsString (..))
import GHC.Generics (Generic)
import Language.Haskell.TH.Syntax (Lift)
import Test.QuickCheck (Arbitrary (..))
import qualified Test.QuickCheck as QuickCheck
import Text.Printf (printf)
import Theta.Hash (Hash, hashList, hashText)
import Theta.Pretty (Pretty (..), ShowPretty (..),
showPretty)
namespace inference at the language level ( similar to Avro ) , but
data Name = Name { moduleName :: ModuleName
, name :: Text
}
deriving stock (Eq, Ord, Generic, Lift)
deriving anyclass (Hashable)
instance Show Name where show = printf "\"%s\"" . showPretty
instance IsString Name where
fromString string = fromMaybe invalid $ parse $ Text.pack string
where invalid = error $ "Name " <> show string <> " is not valid."
instance Arbitrary Name where
arbitrary = Name <$> arbitrary <*> randomPart
| A QuickCheck generator that outputs a valid /part/ of a Theta
In the fully - qualified name @com.example . Foo@ , @"com"@ , @"example"@
and @"Foo"@ are the parts .
randomPart :: QuickCheck.Gen Text
randomPart = do
first <- QuickCheck.elements $ '_' : letters
rest <- randomList $ '_' : (letters <> digits)
pure $ Text.pack $ first : rest
where letters = ['a'..'z'] <> ['A'..'Z']
digits = ['0'..'9']
randomList =
QuickCheck.resize 20 . QuickCheck.listOf . QuickCheck.elements
hashName :: Name -> Hash
hashName = hashList . map hashText . parts
In the fully - qualified name @com.example . Foo@ , @"com"@ , @"example"@
and @"Foo"@ are the parts .
parts :: Name -> [Text]
parts Name { moduleName, name } =
namespace moduleName <> [baseName moduleName, name]
" com . "
" com.target . "
instance Pretty Name where
pretty Name { moduleName, name } = pretty moduleName <> "." <> name
data Reason = Unqualified
| Invalid
^ The name is not syntactically valid in Theta .
deriving (Show, Eq)
parse' :: Text -> Either Reason Name
parse' (Text.splitOn "." -> components)
| not (all valid components) = Left Invalid
| otherwise = case components of
[] -> Left Invalid
[_] -> Left Unqualified
parts -> Right $ Name
{ name = last parts
, moduleName = ModuleName { namespace, baseName }
}
where baseName = last $ init components
namespace = init $ init components
valid "" = False
valid part = first (Text.head part) && rest (Text.tail part)
first x = Char.isLetter x || x == '_'
rest = Text.all (\ x -> Char.isAlphaNum x || x == '_')
parse :: Text -> Maybe Name
parse text = case parse' text of
Right name -> Just name
Left _ -> Nothing
render :: Name -> Text
render = pretty
inference " similar to Avro , but that should happen /before/ we
produce values of this type . Once you have a ' ModuleName ' , it
data ModuleName = ModuleName
{ namespace :: [Text]
, baseName :: Text
}
deriving stock (Eq, Ord, Generic, Lift)
deriving anyclass (Hashable)
deriving Show via ShowPretty ModuleName
instance Pretty ModuleName where
pretty = renderModuleName
instance Arbitrary ModuleName where
arbitrary = ModuleName <$> ns <*> randomPart
where ns = QuickCheck.resize 3 $ QuickCheck.listOf randomPart
renderModuleName :: ModuleName -> Text
renderModuleName = Text.intercalate "." . toList
> " foo " : : ModuleName
> " com.example.foo " : : ModuleName
instance IsString ModuleName where
fromString = fromList . Text.splitOn "." . Text.pack
instance IsList ModuleName where
type Item ModuleName = Text
fromList [] = error "Cannot have an empty module name."
fromList components = ModuleName
{ namespace = init components
, baseName = last components
}
toList ModuleName { baseName, namespace } = namespace <> [baseName]
moduleParts :: ModuleName -> [Text]
moduleParts = toList
fromModuleParts :: [Text] -> ModuleName
fromModuleParts = fromList
parseModuleName :: Text -> ModuleName
parseModuleName (Text.splitOn "." -> components) = fromList components
moduleRoot :: ModuleName -> Text
moduleRoot ModuleName { namespace, baseName } = case namespace of
[] -> baseName
(root : _) -> root
newtype ModuleTree = ModuleTree (Map.Map Text ModuleTree)
Given , and @"com.bar"@ , the
moduleHierarchy :: [ModuleName] -> [Tree Text]
moduleHierarchy names = toTree $ foldr insert empty (moduleParts <$> names)
where insert [] tree = tree
insert (p : ps) (ModuleTree map) =
ModuleTree $ Map.insertWith (\ _ -> insert ps) p (expand ps) map
expand [] = empty
expand (p : ps) = ModuleTree [(p, expand ps)]
empty = ModuleTree Map.empty
toTree (ModuleTree map) =
[Node part (toTree tree) | (part, tree) <- Map.toList map]
|
ea2cdeaa85bb67d41b010dd4e441cee8631f43d3aa7758714d75b5682fa23484 | rwmjones/guestfs-tools | perl_edit.ml | virt - builder
* Copyright ( C ) 2013 Red Hat Inc.
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation ; either version 2 of the License , or
* ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License along
* with this program ; if not , write to the Free Software Foundation , Inc. ,
* 51 Franklin Street , Fifth Floor , Boston , USA .
* Copyright (C) 2013 Red Hat Inc.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*)
open Std_utils
open Tools_utils
external c_edit_file : verbose:bool -> Guestfs.t -> int64 -> string -> string
-> unit
= "virt_customize_edit_file_perl"
let edit_file g file expr =
(* Note we pass original 'g' even though it is not used by the
* callee. This is so that 'g' is kept as a root on the stack, and
* so cannot be garbage collected while we are in the c_edit_file
* function.
*)
c_edit_file (verbose ()) g (Guestfs.c_pointer g) file expr
| null | https://raw.githubusercontent.com/rwmjones/guestfs-tools/57423d907270526ea664ff15601cce956353820e/customize/perl_edit.ml | ocaml | Note we pass original 'g' even though it is not used by the
* callee. This is so that 'g' is kept as a root on the stack, and
* so cannot be garbage collected while we are in the c_edit_file
* function.
| virt - builder
* Copyright ( C ) 2013 Red Hat Inc.
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation ; either version 2 of the License , or
* ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License along
* with this program ; if not , write to the Free Software Foundation , Inc. ,
* 51 Franklin Street , Fifth Floor , Boston , USA .
* Copyright (C) 2013 Red Hat Inc.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*)
open Std_utils
open Tools_utils
external c_edit_file : verbose:bool -> Guestfs.t -> int64 -> string -> string
-> unit
= "virt_customize_edit_file_perl"
let edit_file g file expr =
c_edit_file (verbose ()) g (Guestfs.c_pointer g) file expr
|
c6b8f612e6ead4df8da8521029495a6accc92be226e6691eed95d3b651be5d0b | gsakkas/rite | 0017.ml | LamG VarPatG (AppG [EmptyG])
fun fn -> x (fun a -> a)
fun c -> x c
fun x -> h (acc x)
fun a -> x a
fun el -> x (a q)
fun y -> x (a y)
fun q -> x (a q)
fun c -> x (a c)
fun i -> x (a i)
fun l -> x (a l)
fun z -> x (a z)
fun x -> a x
fun z -> a (x z)
fun p -> x (a p)
fun y -> a (x y)
fun y -> x y
fun b -> x (a b)
fun w -> x (a w)
| null | https://raw.githubusercontent.com/gsakkas/rite/958a0ad2460e15734447bc07bd181f5d35956d3b/data/sp14/clusters/0017.ml | ocaml | LamG VarPatG (AppG [EmptyG])
fun fn -> x (fun a -> a)
fun c -> x c
fun x -> h (acc x)
fun a -> x a
fun el -> x (a q)
fun y -> x (a y)
fun q -> x (a q)
fun c -> x (a c)
fun i -> x (a i)
fun l -> x (a l)
fun z -> x (a z)
fun x -> a x
fun z -> a (x z)
fun p -> x (a p)
fun y -> a (x y)
fun y -> x y
fun b -> x (a b)
fun w -> x (a w)
|
|
89ea5076e3e18d3314b905985998370f11341765fbac88a07afe2339c6575763 | incoherentsoftware/defect-process | Types.hs | module Enemy.DeathEffectData.Types
( EnemyDeathEffectData(..)
) where
import Data.Aeson.Types (FromJSON, genericParseJSON, parseJSON)
import GHC.Generics (Generic)
import Util
import Window.Graphics.Util
data EnemyDeathEffectData = EnemyDeathEffectData
{ _drawScale :: DrawScale
, _offset :: Maybe Pos2
}
deriving Generic
instance FromJSON EnemyDeathEffectData where
parseJSON = genericParseJSON aesonFieldDropUnderscore
| null | https://raw.githubusercontent.com/incoherentsoftware/defect-process/14ec46dec2c48135bc4e5965b7b75532ef19268e/src/Enemy/DeathEffectData/Types.hs | haskell | module Enemy.DeathEffectData.Types
( EnemyDeathEffectData(..)
) where
import Data.Aeson.Types (FromJSON, genericParseJSON, parseJSON)
import GHC.Generics (Generic)
import Util
import Window.Graphics.Util
data EnemyDeathEffectData = EnemyDeathEffectData
{ _drawScale :: DrawScale
, _offset :: Maybe Pos2
}
deriving Generic
instance FromJSON EnemyDeathEffectData where
parseJSON = genericParseJSON aesonFieldDropUnderscore
|
|
f270311f5e79808125f66a4eaf07fa6fd9d874306db9ec825606f6957e2000a6 | coccinelle/coccinelle | token_annot.mli | type annot_key =
Exclude_start
| Exclude_end
type annot_val =
Unit
type annots
val empty : annots
val get_annot : annots -> annot_key -> annot_val option
val put_annot : annot_key -> annot_val -> annots -> annots
val append_annots : annots -> annots -> annots
| null | https://raw.githubusercontent.com/coccinelle/coccinelle/b2a4b9b77157ef83a1bbf01bfa16ea9498f7c7ea/parsing_c/token_annot.mli | ocaml | type annot_key =
Exclude_start
| Exclude_end
type annot_val =
Unit
type annots
val empty : annots
val get_annot : annots -> annot_key -> annot_val option
val put_annot : annot_key -> annot_val -> annots -> annots
val append_annots : annots -> annots -> annots
|
|
529efe79b5408a2afb5e77f72ff5a4cb7f198183369670baf82cf2cc305f126c | sirherrbatka/statistical-learning | variables.lisp | (cl:in-package #:statistical-learning.optimization)
(def <squared-error> (make 'squared-error-function))
(defconstant right t)
(defconstant left nil)
| null | https://raw.githubusercontent.com/sirherrbatka/statistical-learning/491a9c749f0bb09194793bc26487a10fae69dae0/source/optimization/variables.lisp | lisp | (cl:in-package #:statistical-learning.optimization)
(def <squared-error> (make 'squared-error-function))
(defconstant right t)
(defconstant left nil)
|
|
bbc0560863c7161a9418aaa33398e427dbe15a3baad61be68961aa149d51ca28 | adobe/Chronikis | Translate.hs |
Copyright 2019 Adobe . All rights reserved . This file is licensed to you under
the Apache License , Version 2.0 ( the " License " ) ; you may not use this file
except in compliance with the License . You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software distributed
under the License is distributed on an " AS IS " BASIS , WITHOUT WARRANTIES OR
REPRESENTATIONS OF ANY KIND , either express or implied . See the License for the
specific language governing permissions and limitations under the License .
Copyright 2019 Adobe. All rights reserved. This file is licensed to you under
the Apache License, Version 2.0 (the "License"); you may not use this file
except in compliance with the License. You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
REPRESENTATIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
-}
module Translate
( DeclS, DeclSB, Model(..), ModelBody(..), translate, unrollModel
)
where
import TranslateImpl
| null | https://raw.githubusercontent.com/adobe/Chronikis/b83b9e38341cdc1539fc7625c6355c6fd176d8a3/compiler/src/Translate.hs | haskell |
Copyright 2019 Adobe . All rights reserved . This file is licensed to you under
the Apache License , Version 2.0 ( the " License " ) ; you may not use this file
except in compliance with the License . You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software distributed
under the License is distributed on an " AS IS " BASIS , WITHOUT WARRANTIES OR
REPRESENTATIONS OF ANY KIND , either express or implied . See the License for the
specific language governing permissions and limitations under the License .
Copyright 2019 Adobe. All rights reserved. This file is licensed to you under
the Apache License, Version 2.0 (the "License"); you may not use this file
except in compliance with the License. You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
REPRESENTATIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
-}
module Translate
( DeclS, DeclSB, Model(..), ModelBody(..), translate, unrollModel
)
where
import TranslateImpl
|
|
ac364dd20c85af3b561fcac94742ed3d43238f47102c3c204b9fc46ab71374f8 | marcoheisig/Typo | type-checks.lisp | (in-package #:typo.vm)
(defmacro define-type-check (type)
(check-type type symbol)
(let ((name (intern (format nil "~@:(the-~A~)" type) #.*package*)))
`(progn
(eval-when (:compile-toplevel :load-toplevel :execute)
(declaim (inline ,name))
(defun ,name (object)
(check-type object ,type)
object))
(define-fnrecord ,name (object)
;; Of course type checks are only pure in their respective
;; domains. But this is fine for us, since we do not consider
;; signaled conditions when it comes to types, only returned
;; values.
(:properties :foldable :movable)
,@(when (subtypep type 'number)
`((:differentiator _ (declare (ignore object)) ,(coerce 1 type))))
(:specializer
(ntype-subtypecase (wrapper-ntype object)
((not ,type) (abort-specialization))
(,type object)
(t (wrap-default (type-specifier-ntype ',type)))))))))
(define-type-check number)
(define-type-check real)
(define-type-check rational)
(define-type-check integer)
(define-type-check float)
(define-type-check short-float)
(define-type-check single-float)
(define-type-check double-float)
(define-type-check long-float)
(define-type-check complex)
(define-type-check complex-short-float)
(define-type-check complex-single-float)
(define-type-check complex-double-float)
(define-type-check complex-long-float)
(define-type-check function)
(define-type-check character)
(define-type-check symbol)
| null | https://raw.githubusercontent.com/marcoheisig/Typo/ab83f62329b3f55846e6649ac9d8027e297f8c7b/code/vm/type-checks.lisp | lisp | Of course type checks are only pure in their respective
domains. But this is fine for us, since we do not consider
signaled conditions when it comes to types, only returned
values. | (in-package #:typo.vm)
(defmacro define-type-check (type)
(check-type type symbol)
(let ((name (intern (format nil "~@:(the-~A~)" type) #.*package*)))
`(progn
(eval-when (:compile-toplevel :load-toplevel :execute)
(declaim (inline ,name))
(defun ,name (object)
(check-type object ,type)
object))
(define-fnrecord ,name (object)
(:properties :foldable :movable)
,@(when (subtypep type 'number)
`((:differentiator _ (declare (ignore object)) ,(coerce 1 type))))
(:specializer
(ntype-subtypecase (wrapper-ntype object)
((not ,type) (abort-specialization))
(,type object)
(t (wrap-default (type-specifier-ntype ',type)))))))))
(define-type-check number)
(define-type-check real)
(define-type-check rational)
(define-type-check integer)
(define-type-check float)
(define-type-check short-float)
(define-type-check single-float)
(define-type-check double-float)
(define-type-check long-float)
(define-type-check complex)
(define-type-check complex-short-float)
(define-type-check complex-single-float)
(define-type-check complex-double-float)
(define-type-check complex-long-float)
(define-type-check function)
(define-type-check character)
(define-type-check symbol)
|
4f158374a4414791148bf97eb3bbe8cfc775803506d6d5c931d6b3f6370035b5 | tezos/tezos-mirror | michelson_commands.ml | (*****************************************************************************)
(* *)
(* Open Source License *)
Copyright ( c ) 2021 Nomadic Labs , < >
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
to deal in the Software without restriction , including without limitation
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
and/or sell copies of the Software , and to permit persons to whom the
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
open Michelson_generation
let group =
{
Tezos_clic.name = "Michelson generation";
title = "Command for generating random Michelson code and data";
}
module Michelson_concat_cmd = struct
let handler () file1 file2 file3 () =
let trace1 = Michelson_mcmc_samplers.load ~filename:file1 in
let trace2 = Michelson_mcmc_samplers.load ~filename:file2 in
let terms = trace1 @ trace2 in
let l1 = List.length trace1 in
let l2 = List.length trace2 in
Format.eprintf
"Loaded %d terms from %s, %d terms from %s, total %d@."
l1
file1
l2
file2
(l1 + l2) ;
Michelson_mcmc_samplers.save ~filename:file3 ~terms ;
return_unit
let params =
Tezos_clic.(
prefixes [Protocol.name; "michelson"; "concat"; "files"]
@@ string ~name:"FILENAME" ~desc:"First file"
@@ prefixes ["and"]
@@ string ~name:"FILENAME" ~desc:"Second file"
@@ prefixes ["into"]
@@ string ~name:"FILENAME" ~desc:"Target file"
@@ stop)
let command =
Tezos_clic.command
~group
~desc:"Michelson generation"
Tezos_clic.no_options
params
handler
end
let () = Registration.add_command Michelson_concat_cmd.command
module Michelson_gen_cmd = struct
let lift_opt f opt_arg state =
match opt_arg with None -> state | Some arg -> f arg state
let handler (min_size, max_size, burn_in, seed) terms_count terms_kind
filename () =
let default = Michelson_generation.default_generator_config in
let min = Option.value ~default:default.target_size.min min_size in
let max = Option.value ~default:default.target_size.max max_size in
let burn_in_multiplier =
Option.value ~default:default.burn_in_multiplier burn_in
in
let rng_state =
match seed with
| None ->
Format.eprintf "Self-initialization of PRNG@." ;
let state = Random.State.make_self_init () in
Format.(eprintf "PRNG state hash: %d@." (Hashtbl.hash state)) ;
state
| Some seed ->
Format.eprintf "PRNG initialized with seed %d@." seed ;
Random.State.make [|seed|]
in
let cfg =
{Michelson_generation.target_size = {min; max}; burn_in_multiplier}
in
let terms_count =
match int_of_string terms_count with
| exception Failure _ ->
Format.eprintf "TERMS-COUNT must be an integer, exiting@." ;
exit 1
| terms_count ->
if terms_count <= 0 then (
Format.eprintf "TERMS-COUNT must be strictly positive, exiting@." ;
exit 1)
else terms_count
in
let progress =
Benchmark_helpers.make_progress_printer
Format.err_formatter
terms_count
"Generating term"
in
let terms =
match terms_kind with
| "data" ->
Stdlib.List.init terms_count (fun _i ->
progress () ;
Michelson_mcmc_samplers.Data
(Michelson_generation.make_data_sampler rng_state cfg))
| "code" ->
Stdlib.List.init terms_count (fun _i ->
progress () ;
Michelson_mcmc_samplers.Code
(Michelson_generation.make_code_sampler rng_state cfg))
| _ ->
Format.eprintf "Term kind must be either \"data\" or \"code\"@." ;
exit 1
in
Michelson_mcmc_samplers.save ~filename ~terms ;
return_unit
let min_size_arg =
let min_size =
Tezos_clic.parameter (fun (_ : unit) parsed ->
try return (int_of_string parsed)
with _ ->
Format.eprintf "Error while parsing --min-size argument.@." ;
exit 1)
in
Tezos_clic.arg
~doc:"Lower bound for target size of terms"
~long:"min-size"
~placeholder:"int"
min_size
let max_size_arg =
let max_size =
Tezos_clic.parameter (fun (_ : unit) parsed ->
try return (int_of_string parsed)
with _ ->
Format.eprintf "Error while parsing --max-size argument.@." ;
exit 1)
in
Tezos_clic.arg
~doc:"Lower bound for target size of terms"
~long:"max-size"
~placeholder:"int"
max_size
let burn_in_arg =
let target_size =
Tezos_clic.parameter (fun (_ : unit) parsed ->
try return (int_of_string parsed)
with _ ->
Format.eprintf "Error while parsing --burn-in argument.@." ;
exit 1)
in
Tezos_clic.arg
~doc:"Burn-in multiplier"
~long:"burn-in"
~placeholder:"int"
target_size
let seed_arg =
let seed =
Tezos_clic.parameter (fun (_ : unit) parsed ->
try return (int_of_string parsed)
with _ ->
Format.eprintf "Error while parsing --seed argument.@." ;
exit 1)
in
Tezos_clic.arg ~doc:"RNG seed" ~long:"seed" ~placeholder:"int" seed
let options = Tezos_clic.args4 min_size_arg max_size_arg burn_in_arg seed_arg
let params =
Tezos_clic.(
prefixes [Protocol.name; "michelson"; "generate"]
@@ string ~name:"TERMS-COUNT" ~desc:"Number of terms to generate"
@@ prefixes ["terms"; "of"; "kind"]
@@ string ~name:"{data|code}" ~desc:"Kind of term to generate"
@@ prefixes ["in"]
@@ string ~name:"FILENAME" ~desc:"File where to save Michelson terms"
@@ stop)
let command =
Tezos_clic.command
~group
~desc:"Michelson generation"
options
params
handler
end
let () = Registration.add_command Michelson_gen_cmd.command
| null | https://raw.githubusercontent.com/tezos/tezos-mirror/b5f1d8664f1f91f7be11b15817805a26f4e2417d/src/proto_alpha/lib_benchmarks_proto/michelson_commands.ml | ocaml | ***************************************************************************
Open Source License
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
*************************************************************************** | Copyright ( c ) 2021 Nomadic Labs , < >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
open Michelson_generation
let group =
{
Tezos_clic.name = "Michelson generation";
title = "Command for generating random Michelson code and data";
}
module Michelson_concat_cmd = struct
let handler () file1 file2 file3 () =
let trace1 = Michelson_mcmc_samplers.load ~filename:file1 in
let trace2 = Michelson_mcmc_samplers.load ~filename:file2 in
let terms = trace1 @ trace2 in
let l1 = List.length trace1 in
let l2 = List.length trace2 in
Format.eprintf
"Loaded %d terms from %s, %d terms from %s, total %d@."
l1
file1
l2
file2
(l1 + l2) ;
Michelson_mcmc_samplers.save ~filename:file3 ~terms ;
return_unit
let params =
Tezos_clic.(
prefixes [Protocol.name; "michelson"; "concat"; "files"]
@@ string ~name:"FILENAME" ~desc:"First file"
@@ prefixes ["and"]
@@ string ~name:"FILENAME" ~desc:"Second file"
@@ prefixes ["into"]
@@ string ~name:"FILENAME" ~desc:"Target file"
@@ stop)
let command =
Tezos_clic.command
~group
~desc:"Michelson generation"
Tezos_clic.no_options
params
handler
end
let () = Registration.add_command Michelson_concat_cmd.command
module Michelson_gen_cmd = struct
let lift_opt f opt_arg state =
match opt_arg with None -> state | Some arg -> f arg state
let handler (min_size, max_size, burn_in, seed) terms_count terms_kind
filename () =
let default = Michelson_generation.default_generator_config in
let min = Option.value ~default:default.target_size.min min_size in
let max = Option.value ~default:default.target_size.max max_size in
let burn_in_multiplier =
Option.value ~default:default.burn_in_multiplier burn_in
in
let rng_state =
match seed with
| None ->
Format.eprintf "Self-initialization of PRNG@." ;
let state = Random.State.make_self_init () in
Format.(eprintf "PRNG state hash: %d@." (Hashtbl.hash state)) ;
state
| Some seed ->
Format.eprintf "PRNG initialized with seed %d@." seed ;
Random.State.make [|seed|]
in
let cfg =
{Michelson_generation.target_size = {min; max}; burn_in_multiplier}
in
let terms_count =
match int_of_string terms_count with
| exception Failure _ ->
Format.eprintf "TERMS-COUNT must be an integer, exiting@." ;
exit 1
| terms_count ->
if terms_count <= 0 then (
Format.eprintf "TERMS-COUNT must be strictly positive, exiting@." ;
exit 1)
else terms_count
in
let progress =
Benchmark_helpers.make_progress_printer
Format.err_formatter
terms_count
"Generating term"
in
let terms =
match terms_kind with
| "data" ->
Stdlib.List.init terms_count (fun _i ->
progress () ;
Michelson_mcmc_samplers.Data
(Michelson_generation.make_data_sampler rng_state cfg))
| "code" ->
Stdlib.List.init terms_count (fun _i ->
progress () ;
Michelson_mcmc_samplers.Code
(Michelson_generation.make_code_sampler rng_state cfg))
| _ ->
Format.eprintf "Term kind must be either \"data\" or \"code\"@." ;
exit 1
in
Michelson_mcmc_samplers.save ~filename ~terms ;
return_unit
let min_size_arg =
let min_size =
Tezos_clic.parameter (fun (_ : unit) parsed ->
try return (int_of_string parsed)
with _ ->
Format.eprintf "Error while parsing --min-size argument.@." ;
exit 1)
in
Tezos_clic.arg
~doc:"Lower bound for target size of terms"
~long:"min-size"
~placeholder:"int"
min_size
let max_size_arg =
let max_size =
Tezos_clic.parameter (fun (_ : unit) parsed ->
try return (int_of_string parsed)
with _ ->
Format.eprintf "Error while parsing --max-size argument.@." ;
exit 1)
in
Tezos_clic.arg
~doc:"Lower bound for target size of terms"
~long:"max-size"
~placeholder:"int"
max_size
let burn_in_arg =
let target_size =
Tezos_clic.parameter (fun (_ : unit) parsed ->
try return (int_of_string parsed)
with _ ->
Format.eprintf "Error while parsing --burn-in argument.@." ;
exit 1)
in
Tezos_clic.arg
~doc:"Burn-in multiplier"
~long:"burn-in"
~placeholder:"int"
target_size
let seed_arg =
let seed =
Tezos_clic.parameter (fun (_ : unit) parsed ->
try return (int_of_string parsed)
with _ ->
Format.eprintf "Error while parsing --seed argument.@." ;
exit 1)
in
Tezos_clic.arg ~doc:"RNG seed" ~long:"seed" ~placeholder:"int" seed
let options = Tezos_clic.args4 min_size_arg max_size_arg burn_in_arg seed_arg
let params =
Tezos_clic.(
prefixes [Protocol.name; "michelson"; "generate"]
@@ string ~name:"TERMS-COUNT" ~desc:"Number of terms to generate"
@@ prefixes ["terms"; "of"; "kind"]
@@ string ~name:"{data|code}" ~desc:"Kind of term to generate"
@@ prefixes ["in"]
@@ string ~name:"FILENAME" ~desc:"File where to save Michelson terms"
@@ stop)
let command =
Tezos_clic.command
~group
~desc:"Michelson generation"
options
params
handler
end
let () = Registration.add_command Michelson_gen_cmd.command
|
356409d36a36425407bcf3de250ba95a94c98e9989f49054eda932467ca9438a | jeroanan/rkt-coreutils | tty.rkt | #lang s-exp "util/frontend-program.rkt"
(require "repl/tty.rkt")
(tty)
| null | https://raw.githubusercontent.com/jeroanan/rkt-coreutils/571629d1e2562c557ba258b31ce454add2e93dd9/src/tty.rkt | racket | #lang s-exp "util/frontend-program.rkt"
(require "repl/tty.rkt")
(tty)
|
|
e79eca5ab7d19bda4f532056f66763a744eaa4d636455bc963f1310ff467c8fc | bennn/dissertation | quad-main.rkt | #lang typed/racket/base
(provide
typeset
)
;; ----------------------------------------------------------------------------
(require
require-typed-check
"../base/quad-types.rkt"
racket/class
(only-in racket/list append* split-at drop-right)
(only-in racket/sequence sequence->list)
(only-in math/flonum fl+ fl fl>))
(require/typed/check "quads.rkt"
(make-quadattrs (-> (Listof Any) QuadAttrs))
(quad-car (-> Quad (U String Quad)))
(line (->* ((Listof Any)) #:rest USQ Quad))
(quads->column (-> (Listof Quad) Quad))
(quads->page (-> (Listof Quad) Quad))
(quads->block (-> (Listof Quad) Quad))
(quad-has-attr? (-> Quad Symbol Boolean))
(quad-name (-> Quad Symbol))
(quad-attr-ref (((U Quad QuadAttrs) Symbol) (Any) . ->* . Any))
(quad-list (-> Quad (Listof USQ)))
(quad-attrs (-> Quad (Listof Any)))
(quads->doc (-> (Listof Quad) Quad))
(page (->* ((Listof Any)) #:rest USQ Quad))
(column (->* ((Listof Any)) #:rest USQ Quad))
)
(require/typed/check "wrap.rkt"
(insert-spacers-in-line ((Quad) ((Option Symbol)) . ->* . Quad))
( wrap - adaptive ( - > * ( ( ) ) ( Float ) ( ) ) )
(wrap-best (->* ((Listof Quad)) (Float) (Listof Quad)))
( wrap - first ( - > * ( ( ) ) ( Float ) ( ) ) )
(fill (->* (Quad) ((Option Float)) Quad))
(add-horiz-positions (-> Quad Quad))
)
(require/typed/check "world.rkt"
[world:line-looseness-key Symbol]
[world:allow-hyphenated-last-word-in-paragraph Boolean]
[world:line-looseness-tolerance Float]
[world:line-index-key Symbol]
[world:measure-key Symbol]
[world:use-hyphenation? Boolean]
[world:max-quality Index]
[world:total-lines-key Symbol]
[world:draft-quality Index]
[world:quality-key Symbol]
[world:quality-key-default (Parameterof Integer)]
[world:paper-width-default (Parameterof Float)]
[world:column-count-key Symbol]
[world:column-count-key-default (Parameterof Integer)]
[world:column-gutter-key Symbol]
[world:column-gutter-key-default (Parameterof Float)]
[world:column-index-key Symbol]
[world:min-first-lines Index]
[world:min-last-lines Index]
[world:minimum-lines-per-column Index]
[world:default-lines-per-column Index]
)
(require/typed/check "measure.rkt"
[round-float (-> Float Float)]
[load-text-cache-file (-> Void)]
[update-text-cache-file (-> Void)]
)
(require/typed/check "utils.rkt"
(add-vert-positions (-> Quad Quad))
(attr-change (-> QuadAttrs (Listof Any) QuadAttrs))
(compute-line-height (-> Quad Quad))
(hyphenate-quad (USQ -> USQ))
(join-quads ((Listof Quad) -> (Listof Quad)))
(merge-attrs (QuadAttrs * -> QuadAttrs))
(quad-attr-set* (Quad (Listof Any) -> Quad))
(split-last (All (A) ((Listof A) -> (values (Listof A) A))))
(split-quad (-> Quad (Listof Quad)))
)
(require/typed/check "sugar-list.rkt"
(slice-at (All (A) (case-> ((Listof A) Positive-Integer -> (Listof (Listof A)))
((Listof A) Positive-Integer Boolean -> (Listof (Listof A))))))
)
;; bg: should maybe import this
(require/typed/check "../base/csp/csp.rkt"
[problem% (Class (init-field [solver Any])
(field [_solver Any])
(field [_variable-domains Any])
(field [_constraints Any])
[reset (-> Void)]
[custom-print (Output-Port Integer -> Void)]
[custom-display (Output-Port -> Void)]
[custom-write (Output-Port -> Void)]
[add-variable (Any (Listof Any) . -> . Void)]
[add-variables ((Listof Any) Any . -> . Void)]
[add-constraint ((Index . -> . Boolean) (Listof Any) . -> . Void)]
[get-solution (-> HashTableTop)]
[get-solutions (-> (Listof (HashTable String Integer)))]
[get-solution-iter (-> HashTableTop)]
[set-solver (Any . -> . Void)]
[get-solver (-> Any)])])
(: listof-quad? (-> Any Boolean : (Listof Quad)))
(define (listof-quad? qs)
(and (list? qs) (andmap quad? qs)))
;; =============================================================================
(define-type Block-Type (Listof Quad))
(define-type Multicolumn-Type (Listof Block-Type))
(define-type Multipage-Type (Listof Multicolumn-Type))
(: typeset (-> Quad Quad))
(define (typeset x)
(load-text-cache-file)
(define pages (append*
(for/list : (Listof (Listof Quad))
([multipage (in-list (input->nested-blocks x))])
(columns->pages (append*
(for/list : (Listof (Listof Quad))
([multicolumn (in-list multipage)])
(lines->columns (append*
(for/list : (Listof (Listof Quad))
([block-quads (in-list multicolumn)])
(block-quads->lines block-quads))))))))))
(define doc (pages->doc pages))
(update-text-cache-file)
doc)
;; -----------------------------------------------------------------------------
(: cons-reverse (All (A B) ((Listof A) (Listof B) -> (Pairof (Listof A) (Listof B)))))
(define (cons-reverse xs ys)
(cons (reverse xs) ys))
(: input->nested-blocks (Quad . -> . (Listof Multipage-Type)))
(define (input->nested-blocks i)
(define-values (mps mcs bs b)
(for/fold ([multipages : (Listof Multipage-Type) null]
[multicolumns : (Listof Multicolumn-Type) null]
[blocks : (Listof Block-Type) null]
[block-acc : Block-Type null])
([q (in-list (split-quad i))])
(case (quad-name q)
[(page-break) (values (cons-reverse (cons-reverse (cons-reverse block-acc blocks) multicolumns) multipages) null null null)]
[(column-break) (values multipages (cons-reverse (cons-reverse block-acc blocks) multicolumns) null null)]
[(block-break) (values multipages multicolumns (cons-reverse block-acc blocks) null)]
[else (values multipages multicolumns blocks (cons q block-acc))])))
(reverse (cons-reverse (cons-reverse (cons-reverse b bs) mcs) mps)))
(: merge-adjacent-within (Quad . -> . Quad))
(define (merge-adjacent-within q)
(quad (quad-name q)
(make-quadattrs (quad-attrs q))
(join-quads (assert (quad-list q) listof-quad?))))
(: hyphenate-quad-except-last-word (Quad . -> . Quad))
(define (hyphenate-quad-except-last-word q)
(define-values (first-quads last-quad) (split-last (quad-list q)))
(quad (quad-name q) (make-quadattrs (quad-attrs q))
(append (for/list : (Listof USQ) ([q (in-list first-quads)])
(hyphenate-quad q))
(list last-quad))))
(: average-looseness ((Listof Quad) . -> . Float))
(define (average-looseness lines)
(if (<= (length lines) 1)
(ann 0.0 Float)
(let ([lines-to-measure : (Listof Quad) (drop-right lines 1)]) ; exclude last line from looseness calculation
(round-float (/ (foldl fl+ 0.0 (map (λ([line : Quad]) (assert (quad-attr-ref line world:line-looseness-key 0.0) flonum?)) lines-to-measure)) (- (fl (length lines)) 1.0))))))
;;; todo: introduce a Quad subtype where quad-list is guaranteed to be all Quads (no strings)
(: block->lines (Quad . -> . (Listof Quad)))
(define (block->lines b)
(: wrap-quads ((Listof Quad) . -> . (Listof Quad)))
(define (wrap-quads qs)
(define wrap-proc wrap-best)
;; (cond
;; [(>= quality world:max-quality) wrap-best]
;; [(<= quality world:draft-quality) wrap-first]
;; [else wrap-adaptive]))
(wrap-proc qs))
(define wrapped-lines-without-hyphens (wrap-quads (assert (quad-list b) listof-quad?))) ; 100/150
(define avg-looseness (average-looseness wrapped-lines-without-hyphens))
(define gets-hyphenation? (and world:use-hyphenation?
(fl> avg-looseness world:line-looseness-tolerance)))
(define wrapped-lines (if gets-hyphenation?
(wrap-quads (split-quad ((if world:allow-hyphenated-last-word-in-paragraph
(lambda ([x : USQ]) (assert (hyphenate-quad x) quad?))
hyphenate-quad-except-last-word) (merge-adjacent-within b))))
wrapped-lines-without-hyphens))
(map insert-spacers-in-line
(for/list : (Listof Quad)
([line-idx (in-naturals)][the-line-any : USQ (in-list wrapped-lines)])
(define the-line (assert the-line-any quad?))
(apply line (attr-change (make-quadattrs (quad-attrs the-line)) (list 'line-idx line-idx 'lines (length wrapped-lines))) (quad-list the-line)))))
(: number-pages ((Listof Quad) . -> . (Listof Quad)))
(define (number-pages ps)
(for/list ([i (in-naturals)][p (in-list ps)])
(apply page (merge-attrs (make-quadattrs (quad-attrs p)) `((page . ,i))) (quad-list p))))
(: pages->doc ((Listof Quad) . -> . Quad))
(define (pages->doc ps)
todo : resolve and other last - minute tasks
;; todo: generalize computation of widths and heights, recursively
(: columns-mapper (Quad . -> . Quad))
(define (columns-mapper page-in)
(apply page (make-quadattrs (quad-attrs page-in))
(map add-vert-positions (for/list : (Listof Quad) ([col-any (in-list (quad-list page-in))])
(define col (assert col-any quad?))
(apply column (make-quadattrs (quad-attrs col)) (map (λ([ln : Any]) (compute-line-height (add-horiz-positions (fill (assert ln quad?))))) (quad-list col)))))))
(define mapped-pages (map columns-mapper (number-pages ps)))
(define doc (quads->doc mapped-pages))
doc)
(: lines->columns ((Listof Quad) . -> . (Listof Quad)))
(define (lines->columns lines)
(define prob (new problem% [solver #f]))
(define max-column-lines world:default-lines-per-column)
(define-values (columns ignored-return-value)
(for/fold ([columns : (Listof Quad) null]
[lines-remaining : (Listof Quad) lines])
([col-idx : Natural (stop-before (in-naturals) (λ(x) (null? lines-remaining)))])
;; domain constraint is best way to simplify csp, because it limits the search space.
;; search from largest possible value to smallest.
largest possible is the minimum of the column lines , or
;; the number of lines left (modulo minimum page lines) ...
(define viable-column-range
(sequence->list (in-range (min max-column-lines (max
(length lines-remaining)
(- (length lines-remaining) world:minimum-lines-per-column)))
... and the smallest possible is 1 , or the current minimum lines .
;; (sub1 insures that range is inclusive of last value.)
(sub1 (min 1 world:minimum-lines-per-column)) -1)))
(send prob add-variable "column-lines" viable-column-range)
;; greediness constraint: leave enough lines for next page, or take all
(: greediness-constraint (Index . -> . Boolean))
(define (greediness-constraint pl)
(define leftover (- (length lines-remaining) pl))
(or (= leftover 0) (>= leftover world:minimum-lines-per-column)))
(send prob add-constraint greediness-constraint '("column-lines"))
;; last lines constraint: don't take page that will end with too few lines of last paragraph.
(: last-lines-constraint (-> Index Boolean))
(define (last-lines-constraint pl)
(define last-line-of-page (list-ref lines-remaining (sub1 pl)))
(define lines-in-this-paragraph (assert (quad-attr-ref last-line-of-page world:total-lines-key) integer?))
(define line-index-of-last-line (assert (quad-attr-ref last-line-of-page world:line-index-key) integer?))
(define (paragraph-too-short-to-meet-constraint?)
(< lines-in-this-paragraph world:min-last-lines))
(or (paragraph-too-short-to-meet-constraint?)
(>= (add1 line-index-of-last-line) world:min-last-lines)))
(send prob add-constraint last-lines-constraint '("column-lines"))
;; first lines constraint: don't take page that will leave too few lines at top of next page
(: first-lines-constraint (Index (Listof Quad) . -> . Boolean))
(define (first-lines-constraint pl lines-remaining)
(define last-line-of-page (list-ref lines-remaining (sub1 pl)))
(define lines-in-this-paragraph (assert (quad-attr-ref last-line-of-page world:total-lines-key) integer?))
(define line-index-of-last-line (assert (quad-attr-ref last-line-of-page world:line-index-key) integer?))
(define lines-that-will-remain (- lines-in-this-paragraph (add1 line-index-of-last-line)))
(define (paragraph-too-short-to-meet-constraint?)
(< lines-in-this-paragraph world:min-first-lines))
(or (paragraph-too-short-to-meet-constraint?)
(= 0 lines-that-will-remain) ; ok to use all lines ...
(>= lines-that-will-remain world:min-first-lines))) ; but if any remain, must be minimum number.
(send prob add-constraint (λ(x) (first-lines-constraint (assert x index?) lines-remaining)) '("column-lines"))
(define s (send prob get-solution))
(define how-many-lines-to-take (assert (hash-ref s "column-lines") exact-nonnegative-integer?))
(define-values (lines-to-take lines-to-leave) (split-at lines-remaining how-many-lines-to-take))
(send prob reset)
(define new-column (quads->column lines-to-take))
(values (cons (apply column (attr-change (make-quadattrs (quad-attrs new-column)) (list world:column-index-key col-idx)) (quad-list new-column)) columns) lines-to-leave)))
(reverse columns))
(: columns->pages ((Listof Quad) . -> . (Listof Quad)))
(define (columns->pages cols)
(define columns-per-page (assert (quad-attr-ref (car cols) world:column-count-key (world:column-count-key-default)) exact-positive-integer?))
(define column-gutter (assert (quad-attr-ref (car cols) world:column-gutter-key (world:column-gutter-key-default)) flonum?))
;; don't use default value here. If the col doesn't have a measure key,
;; it deserves to be an error, because that means the line was composed incorrectly.
(when (not (quad-has-attr? (car cols) world:measure-key))
(error 'columns->pages "column attrs contain no measure key: ~a ~a" (quad-attrs (car cols)) (quad-car (car cols))))
(define column-width (assert (quad-attr-ref (car cols) world:measure-key) flonum?))
(define width-of-printed-area (+ (* columns-per-page column-width) (* (sub1 columns-per-page) column-gutter)))
(define result-pages
((inst map Quad (Listof Quad)) (λ(cols) (quads->page cols))
(for/list : (Listof (Listof Quad)) ([page-cols (in-list (slice-at cols columns-per-page))])
(define-values (last-x cols)
(for/fold ([current-x : Float (/ (- (world:paper-width-default) width-of-printed-area) 2.0)]
[cols : (Listof Quad) null])
([col (in-list page-cols)][idx (in-naturals)])
(values (foldl fl+ 0.0 (list current-x column-width column-gutter)) (cons (quad-attr-set* col (list 'x current-x 'y 40.0 world:column-index-key idx)) cols))))
(reverse cols))))
result-pages)
(: block-quads->lines ((Listof Quad) . -> . (Listof Quad)))
(define (block-quads->lines qs)
(block->lines (quads->block qs)))
| null | https://raw.githubusercontent.com/bennn/dissertation/779bfe6f8fee19092849b7e2cfc476df33e9357b/dissertation/scrbl/jfp-2019/benchmarks/quadU/typed/quad-main.rkt | racket | ----------------------------------------------------------------------------
bg: should maybe import this
=============================================================================
-----------------------------------------------------------------------------
exclude last line from looseness calculation
todo: introduce a Quad subtype where quad-list is guaranteed to be all Quads (no strings)
(cond
[(>= quality world:max-quality) wrap-best]
[(<= quality world:draft-quality) wrap-first]
[else wrap-adaptive]))
100/150
todo: generalize computation of widths and heights, recursively
domain constraint is best way to simplify csp, because it limits the search space.
search from largest possible value to smallest.
the number of lines left (modulo minimum page lines) ...
(sub1 insures that range is inclusive of last value.)
greediness constraint: leave enough lines for next page, or take all
last lines constraint: don't take page that will end with too few lines of last paragraph.
first lines constraint: don't take page that will leave too few lines at top of next page
ok to use all lines ...
but if any remain, must be minimum number.
don't use default value here. If the col doesn't have a measure key,
it deserves to be an error, because that means the line was composed incorrectly. | #lang typed/racket/base
(provide
typeset
)
(require
require-typed-check
"../base/quad-types.rkt"
racket/class
(only-in racket/list append* split-at drop-right)
(only-in racket/sequence sequence->list)
(only-in math/flonum fl+ fl fl>))
(require/typed/check "quads.rkt"
(make-quadattrs (-> (Listof Any) QuadAttrs))
(quad-car (-> Quad (U String Quad)))
(line (->* ((Listof Any)) #:rest USQ Quad))
(quads->column (-> (Listof Quad) Quad))
(quads->page (-> (Listof Quad) Quad))
(quads->block (-> (Listof Quad) Quad))
(quad-has-attr? (-> Quad Symbol Boolean))
(quad-name (-> Quad Symbol))
(quad-attr-ref (((U Quad QuadAttrs) Symbol) (Any) . ->* . Any))
(quad-list (-> Quad (Listof USQ)))
(quad-attrs (-> Quad (Listof Any)))
(quads->doc (-> (Listof Quad) Quad))
(page (->* ((Listof Any)) #:rest USQ Quad))
(column (->* ((Listof Any)) #:rest USQ Quad))
)
(require/typed/check "wrap.rkt"
(insert-spacers-in-line ((Quad) ((Option Symbol)) . ->* . Quad))
( wrap - adaptive ( - > * ( ( ) ) ( Float ) ( ) ) )
(wrap-best (->* ((Listof Quad)) (Float) (Listof Quad)))
( wrap - first ( - > * ( ( ) ) ( Float ) ( ) ) )
(fill (->* (Quad) ((Option Float)) Quad))
(add-horiz-positions (-> Quad Quad))
)
(require/typed/check "world.rkt"
[world:line-looseness-key Symbol]
[world:allow-hyphenated-last-word-in-paragraph Boolean]
[world:line-looseness-tolerance Float]
[world:line-index-key Symbol]
[world:measure-key Symbol]
[world:use-hyphenation? Boolean]
[world:max-quality Index]
[world:total-lines-key Symbol]
[world:draft-quality Index]
[world:quality-key Symbol]
[world:quality-key-default (Parameterof Integer)]
[world:paper-width-default (Parameterof Float)]
[world:column-count-key Symbol]
[world:column-count-key-default (Parameterof Integer)]
[world:column-gutter-key Symbol]
[world:column-gutter-key-default (Parameterof Float)]
[world:column-index-key Symbol]
[world:min-first-lines Index]
[world:min-last-lines Index]
[world:minimum-lines-per-column Index]
[world:default-lines-per-column Index]
)
(require/typed/check "measure.rkt"
[round-float (-> Float Float)]
[load-text-cache-file (-> Void)]
[update-text-cache-file (-> Void)]
)
(require/typed/check "utils.rkt"
(add-vert-positions (-> Quad Quad))
(attr-change (-> QuadAttrs (Listof Any) QuadAttrs))
(compute-line-height (-> Quad Quad))
(hyphenate-quad (USQ -> USQ))
(join-quads ((Listof Quad) -> (Listof Quad)))
(merge-attrs (QuadAttrs * -> QuadAttrs))
(quad-attr-set* (Quad (Listof Any) -> Quad))
(split-last (All (A) ((Listof A) -> (values (Listof A) A))))
(split-quad (-> Quad (Listof Quad)))
)
(require/typed/check "sugar-list.rkt"
(slice-at (All (A) (case-> ((Listof A) Positive-Integer -> (Listof (Listof A)))
((Listof A) Positive-Integer Boolean -> (Listof (Listof A))))))
)
(require/typed/check "../base/csp/csp.rkt"
[problem% (Class (init-field [solver Any])
(field [_solver Any])
(field [_variable-domains Any])
(field [_constraints Any])
[reset (-> Void)]
[custom-print (Output-Port Integer -> Void)]
[custom-display (Output-Port -> Void)]
[custom-write (Output-Port -> Void)]
[add-variable (Any (Listof Any) . -> . Void)]
[add-variables ((Listof Any) Any . -> . Void)]
[add-constraint ((Index . -> . Boolean) (Listof Any) . -> . Void)]
[get-solution (-> HashTableTop)]
[get-solutions (-> (Listof (HashTable String Integer)))]
[get-solution-iter (-> HashTableTop)]
[set-solver (Any . -> . Void)]
[get-solver (-> Any)])])
(: listof-quad? (-> Any Boolean : (Listof Quad)))
(define (listof-quad? qs)
(and (list? qs) (andmap quad? qs)))
(define-type Block-Type (Listof Quad))
(define-type Multicolumn-Type (Listof Block-Type))
(define-type Multipage-Type (Listof Multicolumn-Type))
(: typeset (-> Quad Quad))
(define (typeset x)
(load-text-cache-file)
(define pages (append*
(for/list : (Listof (Listof Quad))
([multipage (in-list (input->nested-blocks x))])
(columns->pages (append*
(for/list : (Listof (Listof Quad))
([multicolumn (in-list multipage)])
(lines->columns (append*
(for/list : (Listof (Listof Quad))
([block-quads (in-list multicolumn)])
(block-quads->lines block-quads))))))))))
(define doc (pages->doc pages))
(update-text-cache-file)
doc)
(: cons-reverse (All (A B) ((Listof A) (Listof B) -> (Pairof (Listof A) (Listof B)))))
(define (cons-reverse xs ys)
(cons (reverse xs) ys))
(: input->nested-blocks (Quad . -> . (Listof Multipage-Type)))
(define (input->nested-blocks i)
(define-values (mps mcs bs b)
(for/fold ([multipages : (Listof Multipage-Type) null]
[multicolumns : (Listof Multicolumn-Type) null]
[blocks : (Listof Block-Type) null]
[block-acc : Block-Type null])
([q (in-list (split-quad i))])
(case (quad-name q)
[(page-break) (values (cons-reverse (cons-reverse (cons-reverse block-acc blocks) multicolumns) multipages) null null null)]
[(column-break) (values multipages (cons-reverse (cons-reverse block-acc blocks) multicolumns) null null)]
[(block-break) (values multipages multicolumns (cons-reverse block-acc blocks) null)]
[else (values multipages multicolumns blocks (cons q block-acc))])))
(reverse (cons-reverse (cons-reverse (cons-reverse b bs) mcs) mps)))
(: merge-adjacent-within (Quad . -> . Quad))
(define (merge-adjacent-within q)
(quad (quad-name q)
(make-quadattrs (quad-attrs q))
(join-quads (assert (quad-list q) listof-quad?))))
(: hyphenate-quad-except-last-word (Quad . -> . Quad))
(define (hyphenate-quad-except-last-word q)
(define-values (first-quads last-quad) (split-last (quad-list q)))
(quad (quad-name q) (make-quadattrs (quad-attrs q))
(append (for/list : (Listof USQ) ([q (in-list first-quads)])
(hyphenate-quad q))
(list last-quad))))
(: average-looseness ((Listof Quad) . -> . Float))
(define (average-looseness lines)
(if (<= (length lines) 1)
(ann 0.0 Float)
(round-float (/ (foldl fl+ 0.0 (map (λ([line : Quad]) (assert (quad-attr-ref line world:line-looseness-key 0.0) flonum?)) lines-to-measure)) (- (fl (length lines)) 1.0))))))
(: block->lines (Quad . -> . (Listof Quad)))
(define (block->lines b)
(: wrap-quads ((Listof Quad) . -> . (Listof Quad)))
(define (wrap-quads qs)
(define wrap-proc wrap-best)
(wrap-proc qs))
(define avg-looseness (average-looseness wrapped-lines-without-hyphens))
(define gets-hyphenation? (and world:use-hyphenation?
(fl> avg-looseness world:line-looseness-tolerance)))
(define wrapped-lines (if gets-hyphenation?
(wrap-quads (split-quad ((if world:allow-hyphenated-last-word-in-paragraph
(lambda ([x : USQ]) (assert (hyphenate-quad x) quad?))
hyphenate-quad-except-last-word) (merge-adjacent-within b))))
wrapped-lines-without-hyphens))
(map insert-spacers-in-line
(for/list : (Listof Quad)
([line-idx (in-naturals)][the-line-any : USQ (in-list wrapped-lines)])
(define the-line (assert the-line-any quad?))
(apply line (attr-change (make-quadattrs (quad-attrs the-line)) (list 'line-idx line-idx 'lines (length wrapped-lines))) (quad-list the-line)))))
(: number-pages ((Listof Quad) . -> . (Listof Quad)))
(define (number-pages ps)
(for/list ([i (in-naturals)][p (in-list ps)])
(apply page (merge-attrs (make-quadattrs (quad-attrs p)) `((page . ,i))) (quad-list p))))
(: pages->doc ((Listof Quad) . -> . Quad))
(define (pages->doc ps)
todo : resolve and other last - minute tasks
(: columns-mapper (Quad . -> . Quad))
(define (columns-mapper page-in)
(apply page (make-quadattrs (quad-attrs page-in))
(map add-vert-positions (for/list : (Listof Quad) ([col-any (in-list (quad-list page-in))])
(define col (assert col-any quad?))
(apply column (make-quadattrs (quad-attrs col)) (map (λ([ln : Any]) (compute-line-height (add-horiz-positions (fill (assert ln quad?))))) (quad-list col)))))))
(define mapped-pages (map columns-mapper (number-pages ps)))
(define doc (quads->doc mapped-pages))
doc)
(: lines->columns ((Listof Quad) . -> . (Listof Quad)))
(define (lines->columns lines)
(define prob (new problem% [solver #f]))
(define max-column-lines world:default-lines-per-column)
(define-values (columns ignored-return-value)
(for/fold ([columns : (Listof Quad) null]
[lines-remaining : (Listof Quad) lines])
([col-idx : Natural (stop-before (in-naturals) (λ(x) (null? lines-remaining)))])
largest possible is the minimum of the column lines , or
(define viable-column-range
(sequence->list (in-range (min max-column-lines (max
(length lines-remaining)
(- (length lines-remaining) world:minimum-lines-per-column)))
... and the smallest possible is 1 , or the current minimum lines .
(sub1 (min 1 world:minimum-lines-per-column)) -1)))
(send prob add-variable "column-lines" viable-column-range)
(: greediness-constraint (Index . -> . Boolean))
(define (greediness-constraint pl)
(define leftover (- (length lines-remaining) pl))
(or (= leftover 0) (>= leftover world:minimum-lines-per-column)))
(send prob add-constraint greediness-constraint '("column-lines"))
(: last-lines-constraint (-> Index Boolean))
(define (last-lines-constraint pl)
(define last-line-of-page (list-ref lines-remaining (sub1 pl)))
(define lines-in-this-paragraph (assert (quad-attr-ref last-line-of-page world:total-lines-key) integer?))
(define line-index-of-last-line (assert (quad-attr-ref last-line-of-page world:line-index-key) integer?))
(define (paragraph-too-short-to-meet-constraint?)
(< lines-in-this-paragraph world:min-last-lines))
(or (paragraph-too-short-to-meet-constraint?)
(>= (add1 line-index-of-last-line) world:min-last-lines)))
(send prob add-constraint last-lines-constraint '("column-lines"))
(: first-lines-constraint (Index (Listof Quad) . -> . Boolean))
(define (first-lines-constraint pl lines-remaining)
(define last-line-of-page (list-ref lines-remaining (sub1 pl)))
(define lines-in-this-paragraph (assert (quad-attr-ref last-line-of-page world:total-lines-key) integer?))
(define line-index-of-last-line (assert (quad-attr-ref last-line-of-page world:line-index-key) integer?))
(define lines-that-will-remain (- lines-in-this-paragraph (add1 line-index-of-last-line)))
(define (paragraph-too-short-to-meet-constraint?)
(< lines-in-this-paragraph world:min-first-lines))
(or (paragraph-too-short-to-meet-constraint?)
(send prob add-constraint (λ(x) (first-lines-constraint (assert x index?) lines-remaining)) '("column-lines"))
(define s (send prob get-solution))
(define how-many-lines-to-take (assert (hash-ref s "column-lines") exact-nonnegative-integer?))
(define-values (lines-to-take lines-to-leave) (split-at lines-remaining how-many-lines-to-take))
(send prob reset)
(define new-column (quads->column lines-to-take))
(values (cons (apply column (attr-change (make-quadattrs (quad-attrs new-column)) (list world:column-index-key col-idx)) (quad-list new-column)) columns) lines-to-leave)))
(reverse columns))
(: columns->pages ((Listof Quad) . -> . (Listof Quad)))
(define (columns->pages cols)
(define columns-per-page (assert (quad-attr-ref (car cols) world:column-count-key (world:column-count-key-default)) exact-positive-integer?))
(define column-gutter (assert (quad-attr-ref (car cols) world:column-gutter-key (world:column-gutter-key-default)) flonum?))
(when (not (quad-has-attr? (car cols) world:measure-key))
(error 'columns->pages "column attrs contain no measure key: ~a ~a" (quad-attrs (car cols)) (quad-car (car cols))))
(define column-width (assert (quad-attr-ref (car cols) world:measure-key) flonum?))
(define width-of-printed-area (+ (* columns-per-page column-width) (* (sub1 columns-per-page) column-gutter)))
(define result-pages
((inst map Quad (Listof Quad)) (λ(cols) (quads->page cols))
(for/list : (Listof (Listof Quad)) ([page-cols (in-list (slice-at cols columns-per-page))])
(define-values (last-x cols)
(for/fold ([current-x : Float (/ (- (world:paper-width-default) width-of-printed-area) 2.0)]
[cols : (Listof Quad) null])
([col (in-list page-cols)][idx (in-naturals)])
(values (foldl fl+ 0.0 (list current-x column-width column-gutter)) (cons (quad-attr-set* col (list 'x current-x 'y 40.0 world:column-index-key idx)) cols))))
(reverse cols))))
result-pages)
(: block-quads->lines ((Listof Quad) . -> . (Listof Quad)))
(define (block-quads->lines qs)
(block->lines (quads->block qs)))
|
ef4f51a805a2eaacb110f41a038448cec62b75bce01b9f8cd204280f1e776f1b | kupl/LearnML | patch.ml | let rec max (l : int list) : int =
match l with hd :: tl -> if hd > max tl then hd else max tl | [] -> min_int
| null | https://raw.githubusercontent.com/kupl/LearnML/c98ef2b95ef67e657b8158a2c504330e9cfb7700/result/cafe2/max/sub10/patch.ml | ocaml | let rec max (l : int list) : int =
match l with hd :: tl -> if hd > max tl then hd else max tl | [] -> min_int
|
|
a13a00f322a6afb2ce0c728cb4b2707c2f3c0d29492e4897df7ba0e6b1fe583e | coingaming/lnd-client | Lightning_Fields.hs | {- This file was auto-generated from lightning.proto by the proto-lens-protoc program. -}
# LANGUAGE ScopedTypeVariables , DataKinds , TypeFamilies , UndecidableInstances , GeneralizedNewtypeDeriving , MultiParamTypeClasses , FlexibleContexts , FlexibleInstances , PatternSynonyms , MagicHash , NoImplicitPrelude , BangPatterns , TypeApplications , OverloadedStrings , DerivingStrategies , DeriveGeneric #
{-# OPTIONS_GHC -Wno-unused-imports#-}
{-# OPTIONS_GHC -Wno-duplicate-exports#-}
# OPTIONS_GHC -Wno - dodgy - exports #
module Proto.Lightning_Fields where
import qualified Data.ProtoLens.Runtime.Prelude as Prelude
import qualified Data.ProtoLens.Runtime.Data.Int as Data.Int
import qualified Data.ProtoLens.Runtime.Data.Monoid as Data.Monoid
import qualified Data.ProtoLens.Runtime.Data.Word as Data.Word
import qualified Data.ProtoLens.Runtime.Data.ProtoLens as Data.ProtoLens
import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Encoding.Bytes as Data.ProtoLens.Encoding.Bytes
import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Encoding.Growing as Data.ProtoLens.Encoding.Growing
import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Encoding.Parser.Unsafe as Data.ProtoLens.Encoding.Parser.Unsafe
import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Encoding.Wire as Data.ProtoLens.Encoding.Wire
import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Field as Data.ProtoLens.Field
import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Message.Enum as Data.ProtoLens.Message.Enum
import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Service.Types as Data.ProtoLens.Service.Types
import qualified Data.ProtoLens.Runtime.Lens.Family2 as Lens.Family2
import qualified Data.ProtoLens.Runtime.Lens.Family2.Unchecked as Lens.Family2.Unchecked
import qualified Data.ProtoLens.Runtime.Data.Text as Data.Text
import qualified Data.ProtoLens.Runtime.Data.Map as Data.Map
import qualified Data.ProtoLens.Runtime.Data.ByteString as Data.ByteString
import qualified Data.ProtoLens.Runtime.Data.ByteString.Char8 as Data.ByteString.Char8
import qualified Data.ProtoLens.Runtime.Data.Text.Encoding as Data.Text.Encoding
import qualified Data.ProtoLens.Runtime.Data.Vector as Data.Vector
import qualified Data.ProtoLens.Runtime.Data.Vector.Generic as Data.Vector.Generic
import qualified Data.ProtoLens.Runtime.Data.Vector.Unboxed as Data.Vector.Unboxed
import qualified Data.ProtoLens.Runtime.Text.Read as Text.Read
import qualified Proto.Lnrpc.Ln0
import qualified Proto.Lnrpc.Ln1
abandoned ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "abandoned" a) =>
Lens.Family2.LensLike' f s a
abandoned = Data.ProtoLens.Field.field @"abandoned"
accept ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "accept" a) =>
Lens.Family2.LensLike' f s a
accept = Data.ProtoLens.Field.field @"accept"
account ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "account" a) =>
Lens.Family2.LensLike' f s a
account = Data.ProtoLens.Field.field @"account"
activeOnly ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "activeOnly" a) =>
Lens.Family2.LensLike' f s a
activeOnly = Data.ProtoLens.Field.field @"activeOnly"
addr ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "addr" a) =>
Lens.Family2.LensLike' f s a
addr = Data.ProtoLens.Field.field @"addr"
addrToAmount ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "addrToAmount" a) =>
Lens.Family2.LensLike' f s a
addrToAmount = Data.ProtoLens.Field.field @"addrToAmount"
address ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "address" a) =>
Lens.Family2.LensLike' f s a
address = Data.ProtoLens.Field.field @"address"
addressType ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "addressType" a) =>
Lens.Family2.LensLike' f s a
addressType = Data.ProtoLens.Field.field @"addressType"
alias ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "alias" a) =>
Lens.Family2.LensLike' f s a
alias = Data.ProtoLens.Field.field @"alias"
allowSelfPayment ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "allowSelfPayment" a) =>
Lens.Family2.LensLike' f s a
allowSelfPayment = Data.ProtoLens.Field.field @"allowSelfPayment"
amount ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "amount" a) =>
Lens.Family2.LensLike' f s a
amount = Data.ProtoLens.Field.field @"amount"
amountSat ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "amountSat" a) =>
Lens.Family2.LensLike' f s a
amountSat = Data.ProtoLens.Field.field @"amountSat"
amt ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "amt" a) =>
Lens.Family2.LensLike' f s a
amt = Data.ProtoLens.Field.field @"amt"
amtMsat ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "amtMsat" a) =>
Lens.Family2.LensLike' f s a
amtMsat = Data.ProtoLens.Field.field @"amtMsat"
bestHeaderTimestamp ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "bestHeaderTimestamp" a) =>
Lens.Family2.LensLike' f s a
bestHeaderTimestamp
= Data.ProtoLens.Field.field @"bestHeaderTimestamp"
blockHash ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "blockHash" a) =>
Lens.Family2.LensLike' f s a
blockHash = Data.ProtoLens.Field.field @"blockHash"
blockHeight ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "blockHeight" a) =>
Lens.Family2.LensLike' f s a
blockHeight = Data.ProtoLens.Field.field @"blockHeight"
blockSha ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "blockSha" a) =>
Lens.Family2.LensLike' f s a
blockSha = Data.ProtoLens.Field.field @"blockSha"
breach ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "breach" a) =>
Lens.Family2.LensLike' f s a
breach = Data.ProtoLens.Field.field @"breach"
bytesRecv ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "bytesRecv" a) =>
Lens.Family2.LensLike' f s a
bytesRecv = Data.ProtoLens.Field.field @"bytesRecv"
bytesSent ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "bytesSent" a) =>
Lens.Family2.LensLike' f s a
bytesSent = Data.ProtoLens.Field.field @"bytesSent"
chain ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "chain" a) =>
Lens.Family2.LensLike' f s a
chain = Data.ProtoLens.Field.field @"chain"
chainHash ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "chainHash" a) =>
Lens.Family2.LensLike' f s a
chainHash = Data.ProtoLens.Field.field @"chainHash"
chains ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "chains" a) =>
Lens.Family2.LensLike' f s a
chains = Data.ProtoLens.Field.field @"chains"
chanClose ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "chanClose" a) =>
Lens.Family2.LensLike' f s a
chanClose = Data.ProtoLens.Field.field @"chanClose"
chanOpen ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "chanOpen" a) =>
Lens.Family2.LensLike' f s a
chanOpen = Data.ProtoLens.Field.field @"chanOpen"
chanPending ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "chanPending" a) =>
Lens.Family2.LensLike' f s a
chanPending = Data.ProtoLens.Field.field @"chanPending"
channelFlags ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "channelFlags" a) =>
Lens.Family2.LensLike' f s a
channelFlags = Data.ProtoLens.Field.field @"channelFlags"
channelPoint ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "channelPoint" a) =>
Lens.Family2.LensLike' f s a
channelPoint = Data.ProtoLens.Field.field @"channelPoint"
channelReserve ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "channelReserve" a) =>
Lens.Family2.LensLike' f s a
channelReserve = Data.ProtoLens.Field.field @"channelReserve"
channels ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "channels" a) =>
Lens.Family2.LensLike' f s a
channels = Data.ProtoLens.Field.field @"channels"
closeAddress ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "closeAddress" a) =>
Lens.Family2.LensLike' f s a
closeAddress = Data.ProtoLens.Field.field @"closeAddress"
closePending ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "closePending" a) =>
Lens.Family2.LensLike' f s a
closePending = Data.ProtoLens.Field.field @"closePending"
closingTxid ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "closingTxid" a) =>
Lens.Family2.LensLike' f s a
closingTxid = Data.ProtoLens.Field.field @"closingTxid"
cltvLimit ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "cltvLimit" a) =>
Lens.Family2.LensLike' f s a
cltvLimit = Data.ProtoLens.Field.field @"cltvLimit"
color ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "color" a) =>
Lens.Family2.LensLike' f s a
color = Data.ProtoLens.Field.field @"color"
commitHash ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "commitHash" a) =>
Lens.Family2.LensLike' f s a
commitHash = Data.ProtoLens.Field.field @"commitHash"
commitmentType ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "commitmentType" a) =>
Lens.Family2.LensLike' f s a
commitmentType = Data.ProtoLens.Field.field @"commitmentType"
confirmations ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "confirmations" a) =>
Lens.Family2.LensLike' f s a
confirmations = Data.ProtoLens.Field.field @"confirmations"
cooperative ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "cooperative" a) =>
Lens.Family2.LensLike' f s a
cooperative = Data.ProtoLens.Field.field @"cooperative"
csvDelay ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "csvDelay" a) =>
Lens.Family2.LensLike' f s a
csvDelay = Data.ProtoLens.Field.field @"csvDelay"
data' ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "data'" a) =>
Lens.Family2.LensLike' f s a
data' = Data.ProtoLens.Field.field @"data'"
deliveryAddress ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "deliveryAddress" a) =>
Lens.Family2.LensLike' f s a
deliveryAddress = Data.ProtoLens.Field.field @"deliveryAddress"
dest ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "dest" a) =>
Lens.Family2.LensLike' f s a
dest = Data.ProtoLens.Field.field @"dest"
destAddresses ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "destAddresses" a) =>
Lens.Family2.LensLike' f s a
destAddresses = Data.ProtoLens.Field.field @"destAddresses"
destCustomRecords ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "destCustomRecords" a) =>
Lens.Family2.LensLike' f s a
destCustomRecords = Data.ProtoLens.Field.field @"destCustomRecords"
destFeatures ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "destFeatures" a) =>
Lens.Family2.LensLike' f s a
destFeatures = Data.ProtoLens.Field.field @"destFeatures"
destString ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "destString" a) =>
Lens.Family2.LensLike' f s a
destString = Data.ProtoLens.Field.field @"destString"
dustLimit ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "dustLimit" a) =>
Lens.Family2.LensLike' f s a
dustLimit = Data.ProtoLens.Field.field @"dustLimit"
endHeight ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "endHeight" a) =>
Lens.Family2.LensLike' f s a
endHeight = Data.ProtoLens.Field.field @"endHeight"
error ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "error" a) =>
Lens.Family2.LensLike' f s a
error = Data.ProtoLens.Field.field @"error"
errors ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "errors" a) =>
Lens.Family2.LensLike' f s a
errors = Data.ProtoLens.Field.field @"errors"
features ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "features" a) =>
Lens.Family2.LensLike' f s a
features = Data.ProtoLens.Field.field @"features"
feeLimit ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "feeLimit" a) =>
Lens.Family2.LensLike' f s a
feeLimit = Data.ProtoLens.Field.field @"feeLimit"
feePerKw ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "feePerKw" a) =>
Lens.Family2.LensLike' f s a
feePerKw = Data.ProtoLens.Field.field @"feePerKw"
feeSat ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "feeSat" a) =>
Lens.Family2.LensLike' f s a
feeSat = Data.ProtoLens.Field.field @"feeSat"
feerateSatPerByte ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "feerateSatPerByte" a) =>
Lens.Family2.LensLike' f s a
feerateSatPerByte = Data.ProtoLens.Field.field @"feerateSatPerByte"
finalCltvDelta ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "finalCltvDelta" a) =>
Lens.Family2.LensLike' f s a
finalCltvDelta = Data.ProtoLens.Field.field @"finalCltvDelta"
flapCount ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "flapCount" a) =>
Lens.Family2.LensLike' f s a
flapCount = Data.ProtoLens.Field.field @"flapCount"
force ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "force" a) =>
Lens.Family2.LensLike' f s a
force = Data.ProtoLens.Field.field @"force"
fundingAddress ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "fundingAddress" a) =>
Lens.Family2.LensLike' f s a
fundingAddress = Data.ProtoLens.Field.field @"fundingAddress"
fundingAmount ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "fundingAmount" a) =>
Lens.Family2.LensLike' f s a
fundingAmount = Data.ProtoLens.Field.field @"fundingAmount"
fundingAmt ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "fundingAmt" a) =>
Lens.Family2.LensLike' f s a
fundingAmt = Data.ProtoLens.Field.field @"fundingAmt"
fundingCanceled ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "fundingCanceled" a) =>
Lens.Family2.LensLike' f s a
fundingCanceled = Data.ProtoLens.Field.field @"fundingCanceled"
fundingShim ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "fundingShim" a) =>
Lens.Family2.LensLike' f s a
fundingShim = Data.ProtoLens.Field.field @"fundingShim"
host ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "host" a) =>
Lens.Family2.LensLike' f s a
host = Data.ProtoLens.Field.field @"host"
identityPubkey ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "identityPubkey" a) =>
Lens.Family2.LensLike' f s a
identityPubkey = Data.ProtoLens.Field.field @"identityPubkey"
inFlightMaxMsat ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "inFlightMaxMsat" a) =>
Lens.Family2.LensLike' f s a
inFlightMaxMsat = Data.ProtoLens.Field.field @"inFlightMaxMsat"
inactiveOnly ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "inactiveOnly" a) =>
Lens.Family2.LensLike' f s a
inactiveOnly = Data.ProtoLens.Field.field @"inactiveOnly"
inbound ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "inbound" a) =>
Lens.Family2.LensLike' f s a
inbound = Data.ProtoLens.Field.field @"inbound"
key ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "key" a) =>
Lens.Family2.LensLike' f s a
key = Data.ProtoLens.Field.field @"key"
label ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "label" a) =>
Lens.Family2.LensLike' f s a
label = Data.ProtoLens.Field.field @"label"
lastFlapNs ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "lastFlapNs" a) =>
Lens.Family2.LensLike' f s a
lastFlapNs = Data.ProtoLens.Field.field @"lastFlapNs"
lastHopPubkey ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "lastHopPubkey" a) =>
Lens.Family2.LensLike' f s a
lastHopPubkey = Data.ProtoLens.Field.field @"lastHopPubkey"
lastPingPayload ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "lastPingPayload" a) =>
Lens.Family2.LensLike' f s a
lastPingPayload = Data.ProtoLens.Field.field @"lastPingPayload"
latestError ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "latestError" a) =>
Lens.Family2.LensLike' f s a
latestError = Data.ProtoLens.Field.field @"latestError"
localForce ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "localForce" a) =>
Lens.Family2.LensLike' f s a
localForce = Data.ProtoLens.Field.field @"localForce"
localFundingAmount ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "localFundingAmount" a) =>
Lens.Family2.LensLike' f s a
localFundingAmount
= Data.ProtoLens.Field.field @"localFundingAmount"
maxAcceptedHtlcs ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maxAcceptedHtlcs" a) =>
Lens.Family2.LensLike' f s a
maxAcceptedHtlcs = Data.ProtoLens.Field.field @"maxAcceptedHtlcs"
maxConfs ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maxConfs" a) =>
Lens.Family2.LensLike' f s a
maxConfs = Data.ProtoLens.Field.field @"maxConfs"
maxHtlcCount ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maxHtlcCount" a) =>
Lens.Family2.LensLike' f s a
maxHtlcCount = Data.ProtoLens.Field.field @"maxHtlcCount"
maxLocalCsv ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maxLocalCsv" a) =>
Lens.Family2.LensLike' f s a
maxLocalCsv = Data.ProtoLens.Field.field @"maxLocalCsv"
maxValueInFlight ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maxValueInFlight" a) =>
Lens.Family2.LensLike' f s a
maxValueInFlight = Data.ProtoLens.Field.field @"maxValueInFlight"
maybe'addr ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maybe'addr" a) =>
Lens.Family2.LensLike' f s a
maybe'addr = Data.ProtoLens.Field.field @"maybe'addr"
maybe'chanClose ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maybe'chanClose" a) =>
Lens.Family2.LensLike' f s a
maybe'chanClose = Data.ProtoLens.Field.field @"maybe'chanClose"
maybe'chanOpen ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maybe'chanOpen" a) =>
Lens.Family2.LensLike' f s a
maybe'chanOpen = Data.ProtoLens.Field.field @"maybe'chanOpen"
maybe'chanPending ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maybe'chanPending" a) =>
Lens.Family2.LensLike' f s a
maybe'chanPending = Data.ProtoLens.Field.field @"maybe'chanPending"
maybe'channelPoint ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maybe'channelPoint" a) =>
Lens.Family2.LensLike' f s a
maybe'channelPoint
= Data.ProtoLens.Field.field @"maybe'channelPoint"
maybe'closePending ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maybe'closePending" a) =>
Lens.Family2.LensLike' f s a
maybe'closePending
= Data.ProtoLens.Field.field @"maybe'closePending"
maybe'feeLimit ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maybe'feeLimit" a) =>
Lens.Family2.LensLike' f s a
maybe'feeLimit = Data.ProtoLens.Field.field @"maybe'feeLimit"
maybe'fundingShim ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maybe'fundingShim" a) =>
Lens.Family2.LensLike' f s a
maybe'fundingShim = Data.ProtoLens.Field.field @"maybe'fundingShim"
maybe'outpoint ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maybe'outpoint" a) =>
Lens.Family2.LensLike' f s a
maybe'outpoint = Data.ProtoLens.Field.field @"maybe'outpoint"
maybe'paymentRoute ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maybe'paymentRoute" a) =>
Lens.Family2.LensLike' f s a
maybe'paymentRoute
= Data.ProtoLens.Field.field @"maybe'paymentRoute"
maybe'psbtFund ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maybe'psbtFund" a) =>
Lens.Family2.LensLike' f s a
maybe'psbtFund = Data.ProtoLens.Field.field @"maybe'psbtFund"
maybe'route ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maybe'route" a) =>
Lens.Family2.LensLike' f s a
maybe'route = Data.ProtoLens.Field.field @"maybe'route"
maybe'update ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maybe'update" a) =>
Lens.Family2.LensLike' f s a
maybe'update = Data.ProtoLens.Field.field @"maybe'update"
maybe'value ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maybe'value" a) =>
Lens.Family2.LensLike' f s a
maybe'value = Data.ProtoLens.Field.field @"maybe'value"
minAcceptDepth ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "minAcceptDepth" a) =>
Lens.Family2.LensLike' f s a
minAcceptDepth = Data.ProtoLens.Field.field @"minAcceptDepth"
minConfs ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "minConfs" a) =>
Lens.Family2.LensLike' f s a
minConfs = Data.ProtoLens.Field.field @"minConfs"
minHtlc ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "minHtlc" a) =>
Lens.Family2.LensLike' f s a
minHtlc = Data.ProtoLens.Field.field @"minHtlc"
minHtlcIn ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "minHtlcIn" a) =>
Lens.Family2.LensLike' f s a
minHtlcIn = Data.ProtoLens.Field.field @"minHtlcIn"
minHtlcMsat ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "minHtlcMsat" a) =>
Lens.Family2.LensLike' f s a
minHtlcMsat = Data.ProtoLens.Field.field @"minHtlcMsat"
msg ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "msg" a) =>
Lens.Family2.LensLike' f s a
msg = Data.ProtoLens.Field.field @"msg"
network ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "network" a) =>
Lens.Family2.LensLike' f s a
network = Data.ProtoLens.Field.field @"network"
nodePubkey ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "nodePubkey" a) =>
Lens.Family2.LensLike' f s a
nodePubkey = Data.ProtoLens.Field.field @"nodePubkey"
nodePubkeyString ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "nodePubkeyString" a) =>
Lens.Family2.LensLike' f s a
nodePubkeyString = Data.ProtoLens.Field.field @"nodePubkeyString"
numActiveChannels ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "numActiveChannels" a) =>
Lens.Family2.LensLike' f s a
numActiveChannels = Data.ProtoLens.Field.field @"numActiveChannels"
numConfirmations ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "numConfirmations" a) =>
Lens.Family2.LensLike' f s a
numConfirmations = Data.ProtoLens.Field.field @"numConfirmations"
numConfsLeft ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "numConfsLeft" a) =>
Lens.Family2.LensLike' f s a
numConfsLeft = Data.ProtoLens.Field.field @"numConfsLeft"
numInactiveChannels ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "numInactiveChannels" a) =>
Lens.Family2.LensLike' f s a
numInactiveChannels
= Data.ProtoLens.Field.field @"numInactiveChannels"
numPeers ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "numPeers" a) =>
Lens.Family2.LensLike' f s a
numPeers = Data.ProtoLens.Field.field @"numPeers"
numPendingChannels ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "numPendingChannels" a) =>
Lens.Family2.LensLike' f s a
numPendingChannels
= Data.ProtoLens.Field.field @"numPendingChannels"
outgoingChanId ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "outgoingChanId" a) =>
Lens.Family2.LensLike' f s a
outgoingChanId = Data.ProtoLens.Field.field @"outgoingChanId"
outpoint ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "outpoint" a) =>
Lens.Family2.LensLike' f s a
outpoint = Data.ProtoLens.Field.field @"outpoint"
paymentAddr ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "paymentAddr" a) =>
Lens.Family2.LensLike' f s a
paymentAddr = Data.ProtoLens.Field.field @"paymentAddr"
paymentError ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "paymentError" a) =>
Lens.Family2.LensLike' f s a
paymentError = Data.ProtoLens.Field.field @"paymentError"
paymentHash ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "paymentHash" a) =>
Lens.Family2.LensLike' f s a
paymentHash = Data.ProtoLens.Field.field @"paymentHash"
paymentHashString ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "paymentHashString" a) =>
Lens.Family2.LensLike' f s a
paymentHashString = Data.ProtoLens.Field.field @"paymentHashString"
paymentPreimage ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "paymentPreimage" a) =>
Lens.Family2.LensLike' f s a
paymentPreimage = Data.ProtoLens.Field.field @"paymentPreimage"
paymentRequest ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "paymentRequest" a) =>
Lens.Family2.LensLike' f s a
paymentRequest = Data.ProtoLens.Field.field @"paymentRequest"
paymentRoute ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "paymentRoute" a) =>
Lens.Family2.LensLike' f s a
paymentRoute = Data.ProtoLens.Field.field @"paymentRoute"
peer ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "peer" a) =>
Lens.Family2.LensLike' f s a
peer = Data.ProtoLens.Field.field @"peer"
peers ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "peers" a) =>
Lens.Family2.LensLike' f s a
peers = Data.ProtoLens.Field.field @"peers"
pendingChanId ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "pendingChanId" a) =>
Lens.Family2.LensLike' f s a
pendingChanId = Data.ProtoLens.Field.field @"pendingChanId"
pendingChannels ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "pendingChannels" a) =>
Lens.Family2.LensLike' f s a
pendingChannels = Data.ProtoLens.Field.field @"pendingChannels"
perm ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "perm" a) =>
Lens.Family2.LensLike' f s a
perm = Data.ProtoLens.Field.field @"perm"
pingTime ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "pingTime" a) =>
Lens.Family2.LensLike' f s a
pingTime = Data.ProtoLens.Field.field @"pingTime"
pkScript ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "pkScript" a) =>
Lens.Family2.LensLike' f s a
pkScript = Data.ProtoLens.Field.field @"pkScript"
private ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "private" a) =>
Lens.Family2.LensLike' f s a
private = Data.ProtoLens.Field.field @"private"
privateOnly ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "privateOnly" a) =>
Lens.Family2.LensLike' f s a
privateOnly = Data.ProtoLens.Field.field @"privateOnly"
progress ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "progress" a) =>
Lens.Family2.LensLike' f s a
progress = Data.ProtoLens.Field.field @"progress"
psbt ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "psbt" a) =>
Lens.Family2.LensLike' f s a
psbt = Data.ProtoLens.Field.field @"psbt"
psbtFund ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "psbtFund" a) =>
Lens.Family2.LensLike' f s a
psbtFund = Data.ProtoLens.Field.field @"psbtFund"
pubKey ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "pubKey" a) =>
Lens.Family2.LensLike' f s a
pubKey = Data.ProtoLens.Field.field @"pubKey"
pubkey ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "pubkey" a) =>
Lens.Family2.LensLike' f s a
pubkey = Data.ProtoLens.Field.field @"pubkey"
publicOnly ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "publicOnly" a) =>
Lens.Family2.LensLike' f s a
publicOnly = Data.ProtoLens.Field.field @"publicOnly"
pushAmt ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "pushAmt" a) =>
Lens.Family2.LensLike' f s a
pushAmt = Data.ProtoLens.Field.field @"pushAmt"
pushSat ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "pushSat" a) =>
Lens.Family2.LensLike' f s a
pushSat = Data.ProtoLens.Field.field @"pushSat"
rawTxHex ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "rawTxHex" a) =>
Lens.Family2.LensLike' f s a
rawTxHex = Data.ProtoLens.Field.field @"rawTxHex"
recoveryFinished ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "recoveryFinished" a) =>
Lens.Family2.LensLike' f s a
recoveryFinished = Data.ProtoLens.Field.field @"recoveryFinished"
recoveryMode ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "recoveryMode" a) =>
Lens.Family2.LensLike' f s a
recoveryMode = Data.ProtoLens.Field.field @"recoveryMode"
remoteCsvDelay ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "remoteCsvDelay" a) =>
Lens.Family2.LensLike' f s a
remoteCsvDelay = Data.ProtoLens.Field.field @"remoteCsvDelay"
remoteForce ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "remoteForce" a) =>
Lens.Family2.LensLike' f s a
remoteForce = Data.ProtoLens.Field.field @"remoteForce"
remoteMaxHtlcs ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "remoteMaxHtlcs" a) =>
Lens.Family2.LensLike' f s a
remoteMaxHtlcs = Data.ProtoLens.Field.field @"remoteMaxHtlcs"
remoteMaxValueInFlightMsat ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "remoteMaxValueInFlightMsat" a) =>
Lens.Family2.LensLike' f s a
remoteMaxValueInFlightMsat
= Data.ProtoLens.Field.field @"remoteMaxValueInFlightMsat"
reserveSat ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "reserveSat" a) =>
Lens.Family2.LensLike' f s a
reserveSat = Data.ProtoLens.Field.field @"reserveSat"
route ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "route" a) =>
Lens.Family2.LensLike' f s a
route = Data.ProtoLens.Field.field @"route"
satPerByte ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "satPerByte" a) =>
Lens.Family2.LensLike' f s a
satPerByte = Data.ProtoLens.Field.field @"satPerByte"
satPerVbyte ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "satPerVbyte" a) =>
Lens.Family2.LensLike' f s a
satPerVbyte = Data.ProtoLens.Field.field @"satPerVbyte"
satRecv ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "satRecv" a) =>
Lens.Family2.LensLike' f s a
satRecv = Data.ProtoLens.Field.field @"satRecv"
satSent ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "satSent" a) =>
Lens.Family2.LensLike' f s a
satSent = Data.ProtoLens.Field.field @"satSent"
sendAll ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "sendAll" a) =>
Lens.Family2.LensLike' f s a
sendAll = Data.ProtoLens.Field.field @"sendAll"
signature ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "signature" a) =>
Lens.Family2.LensLike' f s a
signature = Data.ProtoLens.Field.field @"signature"
singleHash ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "singleHash" a) =>
Lens.Family2.LensLike' f s a
singleHash = Data.ProtoLens.Field.field @"singleHash"
spendUnconfirmed ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "spendUnconfirmed" a) =>
Lens.Family2.LensLike' f s a
spendUnconfirmed = Data.ProtoLens.Field.field @"spendUnconfirmed"
startHeight ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "startHeight" a) =>
Lens.Family2.LensLike' f s a
startHeight = Data.ProtoLens.Field.field @"startHeight"
success ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "success" a) =>
Lens.Family2.LensLike' f s a
success = Data.ProtoLens.Field.field @"success"
syncType ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "syncType" a) =>
Lens.Family2.LensLike' f s a
syncType = Data.ProtoLens.Field.field @"syncType"
syncedToChain ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "syncedToChain" a) =>
Lens.Family2.LensLike' f s a
syncedToChain = Data.ProtoLens.Field.field @"syncedToChain"
syncedToGraph ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "syncedToGraph" a) =>
Lens.Family2.LensLike' f s a
syncedToGraph = Data.ProtoLens.Field.field @"syncedToGraph"
targetConf ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "targetConf" a) =>
Lens.Family2.LensLike' f s a
targetConf = Data.ProtoLens.Field.field @"targetConf"
testnet ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "testnet" a) =>
Lens.Family2.LensLike' f s a
testnet = Data.ProtoLens.Field.field @"testnet"
timeStamp ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "timeStamp" a) =>
Lens.Family2.LensLike' f s a
timeStamp = Data.ProtoLens.Field.field @"timeStamp"
timeout ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "timeout" a) =>
Lens.Family2.LensLike' f s a
timeout = Data.ProtoLens.Field.field @"timeout"
timestamp ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "timestamp" a) =>
Lens.Family2.LensLike' f s a
timestamp = Data.ProtoLens.Field.field @"timestamp"
totalFees ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "totalFees" a) =>
Lens.Family2.LensLike' f s a
totalFees = Data.ProtoLens.Field.field @"totalFees"
transactions ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "transactions" a) =>
Lens.Family2.LensLike' f s a
transactions = Data.ProtoLens.Field.field @"transactions"
txHash ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "txHash" a) =>
Lens.Family2.LensLike' f s a
txHash = Data.ProtoLens.Field.field @"txHash"
txid ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "txid" a) =>
Lens.Family2.LensLike' f s a
txid = Data.ProtoLens.Field.field @"txid"
type' ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "type'" a) =>
Lens.Family2.LensLike' f s a
type' = Data.ProtoLens.Field.field @"type'"
upfrontShutdown ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "upfrontShutdown" a) =>
Lens.Family2.LensLike' f s a
upfrontShutdown = Data.ProtoLens.Field.field @"upfrontShutdown"
uris ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "uris" a) =>
Lens.Family2.LensLike' f s a
uris = Data.ProtoLens.Field.field @"uris"
utxos ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "utxos" a) =>
Lens.Family2.LensLike' f s a
utxos = Data.ProtoLens.Field.field @"utxos"
valid ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "valid" a) =>
Lens.Family2.LensLike' f s a
valid = Data.ProtoLens.Field.field @"valid"
value ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "value" a) =>
Lens.Family2.LensLike' f s a
value = Data.ProtoLens.Field.field @"value"
vec'chains ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "vec'chains" a) =>
Lens.Family2.LensLike' f s a
vec'chains = Data.ProtoLens.Field.field @"vec'chains"
vec'channels ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "vec'channels" a) =>
Lens.Family2.LensLike' f s a
vec'channels = Data.ProtoLens.Field.field @"vec'channels"
vec'destAddresses ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "vec'destAddresses" a) =>
Lens.Family2.LensLike' f s a
vec'destAddresses = Data.ProtoLens.Field.field @"vec'destAddresses"
vec'destFeatures ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "vec'destFeatures" a) =>
Lens.Family2.LensLike' f s a
vec'destFeatures = Data.ProtoLens.Field.field @"vec'destFeatures"
vec'errors ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "vec'errors" a) =>
Lens.Family2.LensLike' f s a
vec'errors = Data.ProtoLens.Field.field @"vec'errors"
vec'peers ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "vec'peers" a) =>
Lens.Family2.LensLike' f s a
vec'peers = Data.ProtoLens.Field.field @"vec'peers"
vec'pendingChannels ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "vec'pendingChannels" a) =>
Lens.Family2.LensLike' f s a
vec'pendingChannels
= Data.ProtoLens.Field.field @"vec'pendingChannels"
vec'transactions ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "vec'transactions" a) =>
Lens.Family2.LensLike' f s a
vec'transactions = Data.ProtoLens.Field.field @"vec'transactions"
vec'uris ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "vec'uris" a) =>
Lens.Family2.LensLike' f s a
vec'uris = Data.ProtoLens.Field.field @"vec'uris"
vec'utxos ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "vec'utxos" a) =>
Lens.Family2.LensLike' f s a
vec'utxos = Data.ProtoLens.Field.field @"vec'utxos"
version ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "version" a) =>
Lens.Family2.LensLike' f s a
version = Data.ProtoLens.Field.field @"version" | null | https://raw.githubusercontent.com/coingaming/lnd-client/98974c514cd82253dbd6111bafbbb2bbff6bffe2/src/Proto/Lightning_Fields.hs | haskell | This file was auto-generated from lightning.proto by the proto-lens-protoc program.
# OPTIONS_GHC -Wno-unused-imports#
# OPTIONS_GHC -Wno-duplicate-exports# | # LANGUAGE ScopedTypeVariables , DataKinds , TypeFamilies , UndecidableInstances , GeneralizedNewtypeDeriving , MultiParamTypeClasses , FlexibleContexts , FlexibleInstances , PatternSynonyms , MagicHash , NoImplicitPrelude , BangPatterns , TypeApplications , OverloadedStrings , DerivingStrategies , DeriveGeneric #
# OPTIONS_GHC -Wno - dodgy - exports #
module Proto.Lightning_Fields where
import qualified Data.ProtoLens.Runtime.Prelude as Prelude
import qualified Data.ProtoLens.Runtime.Data.Int as Data.Int
import qualified Data.ProtoLens.Runtime.Data.Monoid as Data.Monoid
import qualified Data.ProtoLens.Runtime.Data.Word as Data.Word
import qualified Data.ProtoLens.Runtime.Data.ProtoLens as Data.ProtoLens
import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Encoding.Bytes as Data.ProtoLens.Encoding.Bytes
import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Encoding.Growing as Data.ProtoLens.Encoding.Growing
import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Encoding.Parser.Unsafe as Data.ProtoLens.Encoding.Parser.Unsafe
import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Encoding.Wire as Data.ProtoLens.Encoding.Wire
import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Field as Data.ProtoLens.Field
import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Message.Enum as Data.ProtoLens.Message.Enum
import qualified Data.ProtoLens.Runtime.Data.ProtoLens.Service.Types as Data.ProtoLens.Service.Types
import qualified Data.ProtoLens.Runtime.Lens.Family2 as Lens.Family2
import qualified Data.ProtoLens.Runtime.Lens.Family2.Unchecked as Lens.Family2.Unchecked
import qualified Data.ProtoLens.Runtime.Data.Text as Data.Text
import qualified Data.ProtoLens.Runtime.Data.Map as Data.Map
import qualified Data.ProtoLens.Runtime.Data.ByteString as Data.ByteString
import qualified Data.ProtoLens.Runtime.Data.ByteString.Char8 as Data.ByteString.Char8
import qualified Data.ProtoLens.Runtime.Data.Text.Encoding as Data.Text.Encoding
import qualified Data.ProtoLens.Runtime.Data.Vector as Data.Vector
import qualified Data.ProtoLens.Runtime.Data.Vector.Generic as Data.Vector.Generic
import qualified Data.ProtoLens.Runtime.Data.Vector.Unboxed as Data.Vector.Unboxed
import qualified Data.ProtoLens.Runtime.Text.Read as Text.Read
import qualified Proto.Lnrpc.Ln0
import qualified Proto.Lnrpc.Ln1
abandoned ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "abandoned" a) =>
Lens.Family2.LensLike' f s a
abandoned = Data.ProtoLens.Field.field @"abandoned"
accept ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "accept" a) =>
Lens.Family2.LensLike' f s a
accept = Data.ProtoLens.Field.field @"accept"
account ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "account" a) =>
Lens.Family2.LensLike' f s a
account = Data.ProtoLens.Field.field @"account"
activeOnly ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "activeOnly" a) =>
Lens.Family2.LensLike' f s a
activeOnly = Data.ProtoLens.Field.field @"activeOnly"
addr ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "addr" a) =>
Lens.Family2.LensLike' f s a
addr = Data.ProtoLens.Field.field @"addr"
addrToAmount ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "addrToAmount" a) =>
Lens.Family2.LensLike' f s a
addrToAmount = Data.ProtoLens.Field.field @"addrToAmount"
address ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "address" a) =>
Lens.Family2.LensLike' f s a
address = Data.ProtoLens.Field.field @"address"
addressType ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "addressType" a) =>
Lens.Family2.LensLike' f s a
addressType = Data.ProtoLens.Field.field @"addressType"
alias ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "alias" a) =>
Lens.Family2.LensLike' f s a
alias = Data.ProtoLens.Field.field @"alias"
allowSelfPayment ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "allowSelfPayment" a) =>
Lens.Family2.LensLike' f s a
allowSelfPayment = Data.ProtoLens.Field.field @"allowSelfPayment"
amount ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "amount" a) =>
Lens.Family2.LensLike' f s a
amount = Data.ProtoLens.Field.field @"amount"
amountSat ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "amountSat" a) =>
Lens.Family2.LensLike' f s a
amountSat = Data.ProtoLens.Field.field @"amountSat"
amt ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "amt" a) =>
Lens.Family2.LensLike' f s a
amt = Data.ProtoLens.Field.field @"amt"
amtMsat ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "amtMsat" a) =>
Lens.Family2.LensLike' f s a
amtMsat = Data.ProtoLens.Field.field @"amtMsat"
bestHeaderTimestamp ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "bestHeaderTimestamp" a) =>
Lens.Family2.LensLike' f s a
bestHeaderTimestamp
= Data.ProtoLens.Field.field @"bestHeaderTimestamp"
blockHash ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "blockHash" a) =>
Lens.Family2.LensLike' f s a
blockHash = Data.ProtoLens.Field.field @"blockHash"
blockHeight ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "blockHeight" a) =>
Lens.Family2.LensLike' f s a
blockHeight = Data.ProtoLens.Field.field @"blockHeight"
blockSha ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "blockSha" a) =>
Lens.Family2.LensLike' f s a
blockSha = Data.ProtoLens.Field.field @"blockSha"
breach ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "breach" a) =>
Lens.Family2.LensLike' f s a
breach = Data.ProtoLens.Field.field @"breach"
bytesRecv ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "bytesRecv" a) =>
Lens.Family2.LensLike' f s a
bytesRecv = Data.ProtoLens.Field.field @"bytesRecv"
bytesSent ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "bytesSent" a) =>
Lens.Family2.LensLike' f s a
bytesSent = Data.ProtoLens.Field.field @"bytesSent"
chain ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "chain" a) =>
Lens.Family2.LensLike' f s a
chain = Data.ProtoLens.Field.field @"chain"
chainHash ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "chainHash" a) =>
Lens.Family2.LensLike' f s a
chainHash = Data.ProtoLens.Field.field @"chainHash"
chains ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "chains" a) =>
Lens.Family2.LensLike' f s a
chains = Data.ProtoLens.Field.field @"chains"
chanClose ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "chanClose" a) =>
Lens.Family2.LensLike' f s a
chanClose = Data.ProtoLens.Field.field @"chanClose"
chanOpen ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "chanOpen" a) =>
Lens.Family2.LensLike' f s a
chanOpen = Data.ProtoLens.Field.field @"chanOpen"
chanPending ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "chanPending" a) =>
Lens.Family2.LensLike' f s a
chanPending = Data.ProtoLens.Field.field @"chanPending"
channelFlags ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "channelFlags" a) =>
Lens.Family2.LensLike' f s a
channelFlags = Data.ProtoLens.Field.field @"channelFlags"
channelPoint ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "channelPoint" a) =>
Lens.Family2.LensLike' f s a
channelPoint = Data.ProtoLens.Field.field @"channelPoint"
channelReserve ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "channelReserve" a) =>
Lens.Family2.LensLike' f s a
channelReserve = Data.ProtoLens.Field.field @"channelReserve"
channels ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "channels" a) =>
Lens.Family2.LensLike' f s a
channels = Data.ProtoLens.Field.field @"channels"
closeAddress ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "closeAddress" a) =>
Lens.Family2.LensLike' f s a
closeAddress = Data.ProtoLens.Field.field @"closeAddress"
closePending ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "closePending" a) =>
Lens.Family2.LensLike' f s a
closePending = Data.ProtoLens.Field.field @"closePending"
closingTxid ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "closingTxid" a) =>
Lens.Family2.LensLike' f s a
closingTxid = Data.ProtoLens.Field.field @"closingTxid"
cltvLimit ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "cltvLimit" a) =>
Lens.Family2.LensLike' f s a
cltvLimit = Data.ProtoLens.Field.field @"cltvLimit"
color ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "color" a) =>
Lens.Family2.LensLike' f s a
color = Data.ProtoLens.Field.field @"color"
commitHash ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "commitHash" a) =>
Lens.Family2.LensLike' f s a
commitHash = Data.ProtoLens.Field.field @"commitHash"
commitmentType ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "commitmentType" a) =>
Lens.Family2.LensLike' f s a
commitmentType = Data.ProtoLens.Field.field @"commitmentType"
confirmations ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "confirmations" a) =>
Lens.Family2.LensLike' f s a
confirmations = Data.ProtoLens.Field.field @"confirmations"
cooperative ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "cooperative" a) =>
Lens.Family2.LensLike' f s a
cooperative = Data.ProtoLens.Field.field @"cooperative"
csvDelay ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "csvDelay" a) =>
Lens.Family2.LensLike' f s a
csvDelay = Data.ProtoLens.Field.field @"csvDelay"
data' ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "data'" a) =>
Lens.Family2.LensLike' f s a
data' = Data.ProtoLens.Field.field @"data'"
deliveryAddress ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "deliveryAddress" a) =>
Lens.Family2.LensLike' f s a
deliveryAddress = Data.ProtoLens.Field.field @"deliveryAddress"
dest ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "dest" a) =>
Lens.Family2.LensLike' f s a
dest = Data.ProtoLens.Field.field @"dest"
destAddresses ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "destAddresses" a) =>
Lens.Family2.LensLike' f s a
destAddresses = Data.ProtoLens.Field.field @"destAddresses"
destCustomRecords ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "destCustomRecords" a) =>
Lens.Family2.LensLike' f s a
destCustomRecords = Data.ProtoLens.Field.field @"destCustomRecords"
destFeatures ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "destFeatures" a) =>
Lens.Family2.LensLike' f s a
destFeatures = Data.ProtoLens.Field.field @"destFeatures"
destString ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "destString" a) =>
Lens.Family2.LensLike' f s a
destString = Data.ProtoLens.Field.field @"destString"
dustLimit ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "dustLimit" a) =>
Lens.Family2.LensLike' f s a
dustLimit = Data.ProtoLens.Field.field @"dustLimit"
endHeight ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "endHeight" a) =>
Lens.Family2.LensLike' f s a
endHeight = Data.ProtoLens.Field.field @"endHeight"
error ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "error" a) =>
Lens.Family2.LensLike' f s a
error = Data.ProtoLens.Field.field @"error"
errors ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "errors" a) =>
Lens.Family2.LensLike' f s a
errors = Data.ProtoLens.Field.field @"errors"
features ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "features" a) =>
Lens.Family2.LensLike' f s a
features = Data.ProtoLens.Field.field @"features"
feeLimit ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "feeLimit" a) =>
Lens.Family2.LensLike' f s a
feeLimit = Data.ProtoLens.Field.field @"feeLimit"
feePerKw ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "feePerKw" a) =>
Lens.Family2.LensLike' f s a
feePerKw = Data.ProtoLens.Field.field @"feePerKw"
feeSat ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "feeSat" a) =>
Lens.Family2.LensLike' f s a
feeSat = Data.ProtoLens.Field.field @"feeSat"
feerateSatPerByte ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "feerateSatPerByte" a) =>
Lens.Family2.LensLike' f s a
feerateSatPerByte = Data.ProtoLens.Field.field @"feerateSatPerByte"
finalCltvDelta ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "finalCltvDelta" a) =>
Lens.Family2.LensLike' f s a
finalCltvDelta = Data.ProtoLens.Field.field @"finalCltvDelta"
flapCount ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "flapCount" a) =>
Lens.Family2.LensLike' f s a
flapCount = Data.ProtoLens.Field.field @"flapCount"
force ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "force" a) =>
Lens.Family2.LensLike' f s a
force = Data.ProtoLens.Field.field @"force"
fundingAddress ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "fundingAddress" a) =>
Lens.Family2.LensLike' f s a
fundingAddress = Data.ProtoLens.Field.field @"fundingAddress"
fundingAmount ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "fundingAmount" a) =>
Lens.Family2.LensLike' f s a
fundingAmount = Data.ProtoLens.Field.field @"fundingAmount"
fundingAmt ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "fundingAmt" a) =>
Lens.Family2.LensLike' f s a
fundingAmt = Data.ProtoLens.Field.field @"fundingAmt"
fundingCanceled ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "fundingCanceled" a) =>
Lens.Family2.LensLike' f s a
fundingCanceled = Data.ProtoLens.Field.field @"fundingCanceled"
fundingShim ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "fundingShim" a) =>
Lens.Family2.LensLike' f s a
fundingShim = Data.ProtoLens.Field.field @"fundingShim"
host ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "host" a) =>
Lens.Family2.LensLike' f s a
host = Data.ProtoLens.Field.field @"host"
identityPubkey ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "identityPubkey" a) =>
Lens.Family2.LensLike' f s a
identityPubkey = Data.ProtoLens.Field.field @"identityPubkey"
inFlightMaxMsat ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "inFlightMaxMsat" a) =>
Lens.Family2.LensLike' f s a
inFlightMaxMsat = Data.ProtoLens.Field.field @"inFlightMaxMsat"
inactiveOnly ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "inactiveOnly" a) =>
Lens.Family2.LensLike' f s a
inactiveOnly = Data.ProtoLens.Field.field @"inactiveOnly"
inbound ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "inbound" a) =>
Lens.Family2.LensLike' f s a
inbound = Data.ProtoLens.Field.field @"inbound"
key ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "key" a) =>
Lens.Family2.LensLike' f s a
key = Data.ProtoLens.Field.field @"key"
label ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "label" a) =>
Lens.Family2.LensLike' f s a
label = Data.ProtoLens.Field.field @"label"
lastFlapNs ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "lastFlapNs" a) =>
Lens.Family2.LensLike' f s a
lastFlapNs = Data.ProtoLens.Field.field @"lastFlapNs"
lastHopPubkey ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "lastHopPubkey" a) =>
Lens.Family2.LensLike' f s a
lastHopPubkey = Data.ProtoLens.Field.field @"lastHopPubkey"
lastPingPayload ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "lastPingPayload" a) =>
Lens.Family2.LensLike' f s a
lastPingPayload = Data.ProtoLens.Field.field @"lastPingPayload"
latestError ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "latestError" a) =>
Lens.Family2.LensLike' f s a
latestError = Data.ProtoLens.Field.field @"latestError"
localForce ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "localForce" a) =>
Lens.Family2.LensLike' f s a
localForce = Data.ProtoLens.Field.field @"localForce"
localFundingAmount ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "localFundingAmount" a) =>
Lens.Family2.LensLike' f s a
localFundingAmount
= Data.ProtoLens.Field.field @"localFundingAmount"
maxAcceptedHtlcs ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maxAcceptedHtlcs" a) =>
Lens.Family2.LensLike' f s a
maxAcceptedHtlcs = Data.ProtoLens.Field.field @"maxAcceptedHtlcs"
maxConfs ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maxConfs" a) =>
Lens.Family2.LensLike' f s a
maxConfs = Data.ProtoLens.Field.field @"maxConfs"
maxHtlcCount ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maxHtlcCount" a) =>
Lens.Family2.LensLike' f s a
maxHtlcCount = Data.ProtoLens.Field.field @"maxHtlcCount"
maxLocalCsv ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maxLocalCsv" a) =>
Lens.Family2.LensLike' f s a
maxLocalCsv = Data.ProtoLens.Field.field @"maxLocalCsv"
maxValueInFlight ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maxValueInFlight" a) =>
Lens.Family2.LensLike' f s a
maxValueInFlight = Data.ProtoLens.Field.field @"maxValueInFlight"
maybe'addr ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maybe'addr" a) =>
Lens.Family2.LensLike' f s a
maybe'addr = Data.ProtoLens.Field.field @"maybe'addr"
maybe'chanClose ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maybe'chanClose" a) =>
Lens.Family2.LensLike' f s a
maybe'chanClose = Data.ProtoLens.Field.field @"maybe'chanClose"
maybe'chanOpen ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maybe'chanOpen" a) =>
Lens.Family2.LensLike' f s a
maybe'chanOpen = Data.ProtoLens.Field.field @"maybe'chanOpen"
maybe'chanPending ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maybe'chanPending" a) =>
Lens.Family2.LensLike' f s a
maybe'chanPending = Data.ProtoLens.Field.field @"maybe'chanPending"
maybe'channelPoint ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maybe'channelPoint" a) =>
Lens.Family2.LensLike' f s a
maybe'channelPoint
= Data.ProtoLens.Field.field @"maybe'channelPoint"
maybe'closePending ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maybe'closePending" a) =>
Lens.Family2.LensLike' f s a
maybe'closePending
= Data.ProtoLens.Field.field @"maybe'closePending"
maybe'feeLimit ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maybe'feeLimit" a) =>
Lens.Family2.LensLike' f s a
maybe'feeLimit = Data.ProtoLens.Field.field @"maybe'feeLimit"
maybe'fundingShim ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maybe'fundingShim" a) =>
Lens.Family2.LensLike' f s a
maybe'fundingShim = Data.ProtoLens.Field.field @"maybe'fundingShim"
maybe'outpoint ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maybe'outpoint" a) =>
Lens.Family2.LensLike' f s a
maybe'outpoint = Data.ProtoLens.Field.field @"maybe'outpoint"
maybe'paymentRoute ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maybe'paymentRoute" a) =>
Lens.Family2.LensLike' f s a
maybe'paymentRoute
= Data.ProtoLens.Field.field @"maybe'paymentRoute"
maybe'psbtFund ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maybe'psbtFund" a) =>
Lens.Family2.LensLike' f s a
maybe'psbtFund = Data.ProtoLens.Field.field @"maybe'psbtFund"
maybe'route ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maybe'route" a) =>
Lens.Family2.LensLike' f s a
maybe'route = Data.ProtoLens.Field.field @"maybe'route"
maybe'update ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maybe'update" a) =>
Lens.Family2.LensLike' f s a
maybe'update = Data.ProtoLens.Field.field @"maybe'update"
maybe'value ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "maybe'value" a) =>
Lens.Family2.LensLike' f s a
maybe'value = Data.ProtoLens.Field.field @"maybe'value"
minAcceptDepth ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "minAcceptDepth" a) =>
Lens.Family2.LensLike' f s a
minAcceptDepth = Data.ProtoLens.Field.field @"minAcceptDepth"
minConfs ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "minConfs" a) =>
Lens.Family2.LensLike' f s a
minConfs = Data.ProtoLens.Field.field @"minConfs"
minHtlc ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "minHtlc" a) =>
Lens.Family2.LensLike' f s a
minHtlc = Data.ProtoLens.Field.field @"minHtlc"
minHtlcIn ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "minHtlcIn" a) =>
Lens.Family2.LensLike' f s a
minHtlcIn = Data.ProtoLens.Field.field @"minHtlcIn"
minHtlcMsat ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "minHtlcMsat" a) =>
Lens.Family2.LensLike' f s a
minHtlcMsat = Data.ProtoLens.Field.field @"minHtlcMsat"
msg ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "msg" a) =>
Lens.Family2.LensLike' f s a
msg = Data.ProtoLens.Field.field @"msg"
network ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "network" a) =>
Lens.Family2.LensLike' f s a
network = Data.ProtoLens.Field.field @"network"
nodePubkey ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "nodePubkey" a) =>
Lens.Family2.LensLike' f s a
nodePubkey = Data.ProtoLens.Field.field @"nodePubkey"
nodePubkeyString ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "nodePubkeyString" a) =>
Lens.Family2.LensLike' f s a
nodePubkeyString = Data.ProtoLens.Field.field @"nodePubkeyString"
numActiveChannels ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "numActiveChannels" a) =>
Lens.Family2.LensLike' f s a
numActiveChannels = Data.ProtoLens.Field.field @"numActiveChannels"
numConfirmations ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "numConfirmations" a) =>
Lens.Family2.LensLike' f s a
numConfirmations = Data.ProtoLens.Field.field @"numConfirmations"
numConfsLeft ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "numConfsLeft" a) =>
Lens.Family2.LensLike' f s a
numConfsLeft = Data.ProtoLens.Field.field @"numConfsLeft"
numInactiveChannels ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "numInactiveChannels" a) =>
Lens.Family2.LensLike' f s a
numInactiveChannels
= Data.ProtoLens.Field.field @"numInactiveChannels"
numPeers ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "numPeers" a) =>
Lens.Family2.LensLike' f s a
numPeers = Data.ProtoLens.Field.field @"numPeers"
numPendingChannels ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "numPendingChannels" a) =>
Lens.Family2.LensLike' f s a
numPendingChannels
= Data.ProtoLens.Field.field @"numPendingChannels"
outgoingChanId ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "outgoingChanId" a) =>
Lens.Family2.LensLike' f s a
outgoingChanId = Data.ProtoLens.Field.field @"outgoingChanId"
outpoint ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "outpoint" a) =>
Lens.Family2.LensLike' f s a
outpoint = Data.ProtoLens.Field.field @"outpoint"
paymentAddr ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "paymentAddr" a) =>
Lens.Family2.LensLike' f s a
paymentAddr = Data.ProtoLens.Field.field @"paymentAddr"
paymentError ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "paymentError" a) =>
Lens.Family2.LensLike' f s a
paymentError = Data.ProtoLens.Field.field @"paymentError"
paymentHash ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "paymentHash" a) =>
Lens.Family2.LensLike' f s a
paymentHash = Data.ProtoLens.Field.field @"paymentHash"
paymentHashString ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "paymentHashString" a) =>
Lens.Family2.LensLike' f s a
paymentHashString = Data.ProtoLens.Field.field @"paymentHashString"
paymentPreimage ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "paymentPreimage" a) =>
Lens.Family2.LensLike' f s a
paymentPreimage = Data.ProtoLens.Field.field @"paymentPreimage"
paymentRequest ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "paymentRequest" a) =>
Lens.Family2.LensLike' f s a
paymentRequest = Data.ProtoLens.Field.field @"paymentRequest"
paymentRoute ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "paymentRoute" a) =>
Lens.Family2.LensLike' f s a
paymentRoute = Data.ProtoLens.Field.field @"paymentRoute"
peer ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "peer" a) =>
Lens.Family2.LensLike' f s a
peer = Data.ProtoLens.Field.field @"peer"
peers ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "peers" a) =>
Lens.Family2.LensLike' f s a
peers = Data.ProtoLens.Field.field @"peers"
pendingChanId ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "pendingChanId" a) =>
Lens.Family2.LensLike' f s a
pendingChanId = Data.ProtoLens.Field.field @"pendingChanId"
pendingChannels ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "pendingChannels" a) =>
Lens.Family2.LensLike' f s a
pendingChannels = Data.ProtoLens.Field.field @"pendingChannels"
perm ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "perm" a) =>
Lens.Family2.LensLike' f s a
perm = Data.ProtoLens.Field.field @"perm"
pingTime ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "pingTime" a) =>
Lens.Family2.LensLike' f s a
pingTime = Data.ProtoLens.Field.field @"pingTime"
pkScript ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "pkScript" a) =>
Lens.Family2.LensLike' f s a
pkScript = Data.ProtoLens.Field.field @"pkScript"
private ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "private" a) =>
Lens.Family2.LensLike' f s a
private = Data.ProtoLens.Field.field @"private"
privateOnly ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "privateOnly" a) =>
Lens.Family2.LensLike' f s a
privateOnly = Data.ProtoLens.Field.field @"privateOnly"
progress ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "progress" a) =>
Lens.Family2.LensLike' f s a
progress = Data.ProtoLens.Field.field @"progress"
psbt ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "psbt" a) =>
Lens.Family2.LensLike' f s a
psbt = Data.ProtoLens.Field.field @"psbt"
psbtFund ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "psbtFund" a) =>
Lens.Family2.LensLike' f s a
psbtFund = Data.ProtoLens.Field.field @"psbtFund"
pubKey ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "pubKey" a) =>
Lens.Family2.LensLike' f s a
pubKey = Data.ProtoLens.Field.field @"pubKey"
pubkey ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "pubkey" a) =>
Lens.Family2.LensLike' f s a
pubkey = Data.ProtoLens.Field.field @"pubkey"
publicOnly ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "publicOnly" a) =>
Lens.Family2.LensLike' f s a
publicOnly = Data.ProtoLens.Field.field @"publicOnly"
pushAmt ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "pushAmt" a) =>
Lens.Family2.LensLike' f s a
pushAmt = Data.ProtoLens.Field.field @"pushAmt"
pushSat ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "pushSat" a) =>
Lens.Family2.LensLike' f s a
pushSat = Data.ProtoLens.Field.field @"pushSat"
rawTxHex ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "rawTxHex" a) =>
Lens.Family2.LensLike' f s a
rawTxHex = Data.ProtoLens.Field.field @"rawTxHex"
recoveryFinished ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "recoveryFinished" a) =>
Lens.Family2.LensLike' f s a
recoveryFinished = Data.ProtoLens.Field.field @"recoveryFinished"
recoveryMode ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "recoveryMode" a) =>
Lens.Family2.LensLike' f s a
recoveryMode = Data.ProtoLens.Field.field @"recoveryMode"
remoteCsvDelay ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "remoteCsvDelay" a) =>
Lens.Family2.LensLike' f s a
remoteCsvDelay = Data.ProtoLens.Field.field @"remoteCsvDelay"
remoteForce ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "remoteForce" a) =>
Lens.Family2.LensLike' f s a
remoteForce = Data.ProtoLens.Field.field @"remoteForce"
remoteMaxHtlcs ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "remoteMaxHtlcs" a) =>
Lens.Family2.LensLike' f s a
remoteMaxHtlcs = Data.ProtoLens.Field.field @"remoteMaxHtlcs"
remoteMaxValueInFlightMsat ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "remoteMaxValueInFlightMsat" a) =>
Lens.Family2.LensLike' f s a
remoteMaxValueInFlightMsat
= Data.ProtoLens.Field.field @"remoteMaxValueInFlightMsat"
reserveSat ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "reserveSat" a) =>
Lens.Family2.LensLike' f s a
reserveSat = Data.ProtoLens.Field.field @"reserveSat"
route ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "route" a) =>
Lens.Family2.LensLike' f s a
route = Data.ProtoLens.Field.field @"route"
satPerByte ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "satPerByte" a) =>
Lens.Family2.LensLike' f s a
satPerByte = Data.ProtoLens.Field.field @"satPerByte"
satPerVbyte ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "satPerVbyte" a) =>
Lens.Family2.LensLike' f s a
satPerVbyte = Data.ProtoLens.Field.field @"satPerVbyte"
satRecv ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "satRecv" a) =>
Lens.Family2.LensLike' f s a
satRecv = Data.ProtoLens.Field.field @"satRecv"
satSent ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "satSent" a) =>
Lens.Family2.LensLike' f s a
satSent = Data.ProtoLens.Field.field @"satSent"
sendAll ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "sendAll" a) =>
Lens.Family2.LensLike' f s a
sendAll = Data.ProtoLens.Field.field @"sendAll"
signature ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "signature" a) =>
Lens.Family2.LensLike' f s a
signature = Data.ProtoLens.Field.field @"signature"
singleHash ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "singleHash" a) =>
Lens.Family2.LensLike' f s a
singleHash = Data.ProtoLens.Field.field @"singleHash"
spendUnconfirmed ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "spendUnconfirmed" a) =>
Lens.Family2.LensLike' f s a
spendUnconfirmed = Data.ProtoLens.Field.field @"spendUnconfirmed"
startHeight ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "startHeight" a) =>
Lens.Family2.LensLike' f s a
startHeight = Data.ProtoLens.Field.field @"startHeight"
success ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "success" a) =>
Lens.Family2.LensLike' f s a
success = Data.ProtoLens.Field.field @"success"
syncType ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "syncType" a) =>
Lens.Family2.LensLike' f s a
syncType = Data.ProtoLens.Field.field @"syncType"
syncedToChain ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "syncedToChain" a) =>
Lens.Family2.LensLike' f s a
syncedToChain = Data.ProtoLens.Field.field @"syncedToChain"
syncedToGraph ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "syncedToGraph" a) =>
Lens.Family2.LensLike' f s a
syncedToGraph = Data.ProtoLens.Field.field @"syncedToGraph"
targetConf ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "targetConf" a) =>
Lens.Family2.LensLike' f s a
targetConf = Data.ProtoLens.Field.field @"targetConf"
testnet ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "testnet" a) =>
Lens.Family2.LensLike' f s a
testnet = Data.ProtoLens.Field.field @"testnet"
timeStamp ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "timeStamp" a) =>
Lens.Family2.LensLike' f s a
timeStamp = Data.ProtoLens.Field.field @"timeStamp"
timeout ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "timeout" a) =>
Lens.Family2.LensLike' f s a
timeout = Data.ProtoLens.Field.field @"timeout"
timestamp ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "timestamp" a) =>
Lens.Family2.LensLike' f s a
timestamp = Data.ProtoLens.Field.field @"timestamp"
totalFees ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "totalFees" a) =>
Lens.Family2.LensLike' f s a
totalFees = Data.ProtoLens.Field.field @"totalFees"
transactions ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "transactions" a) =>
Lens.Family2.LensLike' f s a
transactions = Data.ProtoLens.Field.field @"transactions"
txHash ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "txHash" a) =>
Lens.Family2.LensLike' f s a
txHash = Data.ProtoLens.Field.field @"txHash"
txid ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "txid" a) =>
Lens.Family2.LensLike' f s a
txid = Data.ProtoLens.Field.field @"txid"
type' ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "type'" a) =>
Lens.Family2.LensLike' f s a
type' = Data.ProtoLens.Field.field @"type'"
upfrontShutdown ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "upfrontShutdown" a) =>
Lens.Family2.LensLike' f s a
upfrontShutdown = Data.ProtoLens.Field.field @"upfrontShutdown"
uris ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "uris" a) =>
Lens.Family2.LensLike' f s a
uris = Data.ProtoLens.Field.field @"uris"
utxos ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "utxos" a) =>
Lens.Family2.LensLike' f s a
utxos = Data.ProtoLens.Field.field @"utxos"
valid ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "valid" a) =>
Lens.Family2.LensLike' f s a
valid = Data.ProtoLens.Field.field @"valid"
value ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "value" a) =>
Lens.Family2.LensLike' f s a
value = Data.ProtoLens.Field.field @"value"
vec'chains ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "vec'chains" a) =>
Lens.Family2.LensLike' f s a
vec'chains = Data.ProtoLens.Field.field @"vec'chains"
vec'channels ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "vec'channels" a) =>
Lens.Family2.LensLike' f s a
vec'channels = Data.ProtoLens.Field.field @"vec'channels"
vec'destAddresses ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "vec'destAddresses" a) =>
Lens.Family2.LensLike' f s a
vec'destAddresses = Data.ProtoLens.Field.field @"vec'destAddresses"
vec'destFeatures ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "vec'destFeatures" a) =>
Lens.Family2.LensLike' f s a
vec'destFeatures = Data.ProtoLens.Field.field @"vec'destFeatures"
vec'errors ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "vec'errors" a) =>
Lens.Family2.LensLike' f s a
vec'errors = Data.ProtoLens.Field.field @"vec'errors"
vec'peers ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "vec'peers" a) =>
Lens.Family2.LensLike' f s a
vec'peers = Data.ProtoLens.Field.field @"vec'peers"
vec'pendingChannels ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "vec'pendingChannels" a) =>
Lens.Family2.LensLike' f s a
vec'pendingChannels
= Data.ProtoLens.Field.field @"vec'pendingChannels"
vec'transactions ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "vec'transactions" a) =>
Lens.Family2.LensLike' f s a
vec'transactions = Data.ProtoLens.Field.field @"vec'transactions"
vec'uris ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "vec'uris" a) =>
Lens.Family2.LensLike' f s a
vec'uris = Data.ProtoLens.Field.field @"vec'uris"
vec'utxos ::
forall f s a.
(Prelude.Functor f,
Data.ProtoLens.Field.HasField s "vec'utxos" a) =>
Lens.Family2.LensLike' f s a
vec'utxos = Data.ProtoLens.Field.field @"vec'utxos"
version ::
forall f s a.
(Prelude.Functor f, Data.ProtoLens.Field.HasField s "version" a) =>
Lens.Family2.LensLike' f s a
version = Data.ProtoLens.Field.field @"version" |
577093938a9090bf292c91c18f72c3475795ff3dbaa2ad05ad7a2c43c5346e44 | mbj/stratosphere | AwsVpcConfigurationProperty.hs | module Stratosphere.Scheduler.Schedule.AwsVpcConfigurationProperty (
AwsVpcConfigurationProperty(..), mkAwsVpcConfigurationProperty
) where
import qualified Data.Aeson as JSON
import qualified Stratosphere.Prelude as Prelude
import Stratosphere.Property
import Stratosphere.ResourceProperties
import Stratosphere.Value
data AwsVpcConfigurationProperty
= AwsVpcConfigurationProperty {assignPublicIp :: (Prelude.Maybe (Value Prelude.Text)),
securityGroups :: (Prelude.Maybe (ValueList Prelude.Text)),
subnets :: (ValueList Prelude.Text)}
mkAwsVpcConfigurationProperty ::
ValueList Prelude.Text -> AwsVpcConfigurationProperty
mkAwsVpcConfigurationProperty subnets
= AwsVpcConfigurationProperty
{subnets = subnets, assignPublicIp = Prelude.Nothing,
securityGroups = Prelude.Nothing}
instance ToResourceProperties AwsVpcConfigurationProperty where
toResourceProperties AwsVpcConfigurationProperty {..}
= ResourceProperties
{awsType = "AWS::Scheduler::Schedule.AwsVpcConfiguration",
supportsTags = Prelude.False,
properties = Prelude.fromList
((Prelude.<>)
["Subnets" JSON..= subnets]
(Prelude.catMaybes
[(JSON..=) "AssignPublicIp" Prelude.<$> assignPublicIp,
(JSON..=) "SecurityGroups" Prelude.<$> securityGroups]))}
instance JSON.ToJSON AwsVpcConfigurationProperty where
toJSON AwsVpcConfigurationProperty {..}
= JSON.object
(Prelude.fromList
((Prelude.<>)
["Subnets" JSON..= subnets]
(Prelude.catMaybes
[(JSON..=) "AssignPublicIp" Prelude.<$> assignPublicIp,
(JSON..=) "SecurityGroups" Prelude.<$> securityGroups])))
instance Property "AssignPublicIp" AwsVpcConfigurationProperty where
type PropertyType "AssignPublicIp" AwsVpcConfigurationProperty = Value Prelude.Text
set newValue AwsVpcConfigurationProperty {..}
= AwsVpcConfigurationProperty
{assignPublicIp = Prelude.pure newValue, ..}
instance Property "SecurityGroups" AwsVpcConfigurationProperty where
type PropertyType "SecurityGroups" AwsVpcConfigurationProperty = ValueList Prelude.Text
set newValue AwsVpcConfigurationProperty {..}
= AwsVpcConfigurationProperty
{securityGroups = Prelude.pure newValue, ..}
instance Property "Subnets" AwsVpcConfigurationProperty where
type PropertyType "Subnets" AwsVpcConfigurationProperty = ValueList Prelude.Text
set newValue AwsVpcConfigurationProperty {..}
= AwsVpcConfigurationProperty {subnets = newValue, ..} | null | https://raw.githubusercontent.com/mbj/stratosphere/c70f301715425247efcda29af4f3fcf7ec04aa2f/services/scheduler/gen/Stratosphere/Scheduler/Schedule/AwsVpcConfigurationProperty.hs | haskell | module Stratosphere.Scheduler.Schedule.AwsVpcConfigurationProperty (
AwsVpcConfigurationProperty(..), mkAwsVpcConfigurationProperty
) where
import qualified Data.Aeson as JSON
import qualified Stratosphere.Prelude as Prelude
import Stratosphere.Property
import Stratosphere.ResourceProperties
import Stratosphere.Value
data AwsVpcConfigurationProperty
= AwsVpcConfigurationProperty {assignPublicIp :: (Prelude.Maybe (Value Prelude.Text)),
securityGroups :: (Prelude.Maybe (ValueList Prelude.Text)),
subnets :: (ValueList Prelude.Text)}
mkAwsVpcConfigurationProperty ::
ValueList Prelude.Text -> AwsVpcConfigurationProperty
mkAwsVpcConfigurationProperty subnets
= AwsVpcConfigurationProperty
{subnets = subnets, assignPublicIp = Prelude.Nothing,
securityGroups = Prelude.Nothing}
instance ToResourceProperties AwsVpcConfigurationProperty where
toResourceProperties AwsVpcConfigurationProperty {..}
= ResourceProperties
{awsType = "AWS::Scheduler::Schedule.AwsVpcConfiguration",
supportsTags = Prelude.False,
properties = Prelude.fromList
((Prelude.<>)
["Subnets" JSON..= subnets]
(Prelude.catMaybes
[(JSON..=) "AssignPublicIp" Prelude.<$> assignPublicIp,
(JSON..=) "SecurityGroups" Prelude.<$> securityGroups]))}
instance JSON.ToJSON AwsVpcConfigurationProperty where
toJSON AwsVpcConfigurationProperty {..}
= JSON.object
(Prelude.fromList
((Prelude.<>)
["Subnets" JSON..= subnets]
(Prelude.catMaybes
[(JSON..=) "AssignPublicIp" Prelude.<$> assignPublicIp,
(JSON..=) "SecurityGroups" Prelude.<$> securityGroups])))
instance Property "AssignPublicIp" AwsVpcConfigurationProperty where
type PropertyType "AssignPublicIp" AwsVpcConfigurationProperty = Value Prelude.Text
set newValue AwsVpcConfigurationProperty {..}
= AwsVpcConfigurationProperty
{assignPublicIp = Prelude.pure newValue, ..}
instance Property "SecurityGroups" AwsVpcConfigurationProperty where
type PropertyType "SecurityGroups" AwsVpcConfigurationProperty = ValueList Prelude.Text
set newValue AwsVpcConfigurationProperty {..}
= AwsVpcConfigurationProperty
{securityGroups = Prelude.pure newValue, ..}
instance Property "Subnets" AwsVpcConfigurationProperty where
type PropertyType "Subnets" AwsVpcConfigurationProperty = ValueList Prelude.Text
set newValue AwsVpcConfigurationProperty {..}
= AwsVpcConfigurationProperty {subnets = newValue, ..} |
|
0cd903949c66df0c1762eb53ade846c9fb0430100ca27b04df767882ef097815 | ekmett/ekmett.github.com | Char8.hs | # LANGUAGE CPP #
{-# OPTIONS_HADDOCK prune #-}
-- |
-- Module : Data.Buffer.Lazy.Char8
Copyright : ( c ) 2006
-- License : BSD-style
--
Maintainer :
-- Stability : experimental
Portability : non - portable ( imports Data . Buffer . Lazy )
--
Manipulate /lazy/ ' 's using ' ' operations . All will
be truncated to 8 bits . It can be expected that these functions will
-- run at identical speeds to their 'Data.Word.Word8' equivalents in
-- "Data.Buffer.Lazy".
--
-- This module is intended to be imported @qualified@, to avoid name
clashes with " Prelude " functions . eg .
--
-- > import qualified Data.Buffer.Lazy.Char8 as C
--
module Data.Buffer.Lazy.Char8 (
-- * The @Buffer@ type
instances : , Ord , Show , Read , Data , Typeable
* Introducing and eliminating ' 's
empty, -- :: Buffer
: : Buffer
pack, -- :: String -> Buffer
unpack, -- :: Buffer -> String
fromChunks, -- :: [Strict.Buffer] -> Buffer
toChunks, -- :: Buffer -> [Strict.Buffer]
-- * Basic interface
: : Buffer - > Buffer
: : Buffer - > Buffer
snoc, -- :: Buffer -> Char -> Buffer
append, -- :: Buffer -> Buffer -> Buffer
: : Buffer - >
: : Buffer - > Maybe ( , Buffer )
: : Buffer - >
tail, -- :: Buffer -> Buffer
init, -- :: Buffer -> Buffer
null, -- :: Buffer -> Bool
: : Buffer - > Int64
-- * Transforming Buffers
: : ( ) - > Buffer - > Buffer
reverse, -- :: Buffer -> Buffer
: : Buffer - > Buffer
intercalate, -- :: Buffer -> [Buffer] -> Buffer
transpose, -- :: [Buffer] -> [Buffer]
-- * Reducing 'Buffer's (folds)
foldl, -- :: (a -> Char -> a) -> a -> Buffer -> a
foldl', -- :: (a -> Char -> a) -> a -> Buffer -> a
: : ( Char - > ) - > Buffer - >
: : ( Char - > ) - > Buffer - >
foldr, -- :: (Char -> a -> a) -> a -> Buffer -> a
: : ( Char - > ) - > Buffer - >
-- ** Special folds
concat, -- :: [Buffer] -> Buffer
concatMap, -- :: (Char -> Buffer) -> Buffer -> Buffer
: : ( Bool ) - > Buffer - > Bool
: : ( Bool ) - > Buffer - > Bool
: : Buffer - >
: : Buffer - >
-- * Building Buffers
-- ** Scans
: : ( Char - > ) - > Char - > Buffer - > Buffer
scanl1 , -- : : ( Char - > ) - > Buffer - > Buffer
scanr , -- : : ( Char - > ) - > Char - > Buffer - > Buffer
scanr1 , -- : : ( Char - > ) - > Buffer - > Buffer
-- ** Accumulating maps
: : ( acc - > Char - > ( acc , ) ) - > acc - > Buffer - > ( acc , Buffer )
: : ( acc - > Char - > ( acc , ) ) - > acc - > Buffer - > ( acc , Buffer )
-- ** Infinite Buffers
: :
replicate, -- :: Int64 -> Char -> Buffer
cycle, -- :: Buffer -> Buffer
: : ( ) - > Char - > Buffer
-- ** Unfolding Buffers
: : ( a - > Maybe ( , a ) ) - > a - > Buffer
* Substrings
-- ** Breaking strings
take, -- :: Int64 -> Buffer -> Buffer
drop, -- :: Int64 -> Buffer -> Buffer
splitAt, -- :: Int64 -> Buffer -> (Buffer, Buffer)
: : ( Bool ) - > Buffer - > Buffer
: : ( Bool ) - > Buffer - > Buffer
: : ( Bool ) - > Buffer - > ( Buffer , Buffer )
: : ( Bool ) - > Buffer - > ( Buffer , Buffer )
group, -- :: Buffer -> [Buffer]
: : ( Char - > Bool ) - > Buffer - > [ Buffer ]
inits, -- :: Buffer -> [Buffer]
tails, -- :: Buffer -> [Buffer]
-- ** Breaking into many substrings
: : Buffer - > [ Buffer ]
: : ( Bool ) - > Buffer - > [ Buffer ]
-- ** Breaking into lines and words
lines, -- :: Buffer -> [Buffer]
words, -- :: Buffer -> [Buffer]
unlines, -- :: [Buffer] -> Buffer
unwords, -- :: Buffer -> [Buffer]
-- * Predicates
isPrefixOf, -- :: Buffer -> Buffer -> Bool
-- isSuffixOf, -- :: Buffer -> Buffer -> Bool
-- * Searching Buffers
-- ** Searching by equality
: : Bool
: : Bool
-- ** Searching with a predicate
: : ( Bool ) - > Buffer - > Maybe
: : ( Bool ) - > Buffer - > Buffer
partition -- : : ( Bool ) - > Buffer - > ( Buffer , Buffer )
-- * Indexing Buffers
: : Buffer - > Int64 - >
: : Buffer - > Maybe Int64
: : Buffer - > [ Int64 ]
: : ( Bool ) - > Buffer - > Maybe Int64
: : ( Bool ) - > Buffer - > [ Int64 ]
: : Buffer - > Int64
-- * Zipping and unzipping Buffers
: : Buffer - > Buffer - > [ ( , ) ]
: : ( Char - > c ) - > Buffer - > Buffer - > [ c ]
unzip , -- : : [ ( , ) ] - > ( Buffer , Buffer )
-- * Ordered Buffers
-- sort, -- :: Buffer -> Buffer
-- * Low level conversions
-- ** Copying Buffers
copy, -- :: Buffer -> Buffer
-- * Reading from Buffers
readInt,
readInteger,
* I\/O with ' 's
-- ** Standard input and output
getContents, -- :: IO Buffer
putStr, -- :: Buffer -> IO ()
putStrLn, -- :: Buffer -> IO ()
interact, -- :: (Buffer -> Buffer) -> IO ()
-- ** Files
readFile, -- :: FilePath -> IO Buffer
writeFile, -- :: FilePath -> Buffer -> IO ()
appendFile, -- :: FilePath -> Buffer -> IO ()
-- ** I\/O with Handles
hGetContents, -- :: Handle -> IO Buffer
hGet, -- :: Handle -> Int64 -> IO Buffer
hGetNonBlocking, -- :: Handle -> Int64 -> IO Buffer
hPut, -- :: Handle -> Buffer -> IO ()
) where
-- Functions transparently exported
import Data.Buffer.Lazy.Word8
(fromChunks, toChunks
,empty,null,length,tail,init,append,reverse,transpose,cycle
,concat,take,drop,splitAt,intercalate,isPrefixOf,group,inits,tails,copy
,hGetContents, hGet, hPut, getContents
,hGetNonBlocking
,putStr, putStrLn, interact)
-- Functions we need to wrap.
import qualified Data.Buffer.Lazy.Word8 as L
import qualified Data.Buffer as S (Buffer) -- typename only
import qualified Data.Buffer as B
import qualified Data.Buffer.Unsafe as B
import Data.Buffer.Lazy.Internal
import Data.Buffer.Internal (w2c, c2w, isSpaceWord8)
import Data.Int (Int64)
import qualified Data.List as List
import Prelude hiding
(reverse,head,tail,last,init,null,length,map,lines,foldl,foldr,unlines
,concat,any,take,drop,splitAt,takeWhile,dropWhile,span,break,elem,filter
,unwords,words,maximum,minimum,all,concatMap,scanl,scanl1,foldl1,foldr1
,readFile,writeFile,appendFile,replicate,getContents,getLine,putStr,putStrLn
,zip,zipWith,unzip,notElem,repeat,iterate,interact,cycle)
import System.IO (hClose,openFile,IOMode(..))
#ifndef __NHC__
import Control.Exception (bracket)
#else
import IO (bracket)
#endif
#if __GLASGOW_HASKELL__ >= 608
import Data.String
#endif
#define STRICT1(f) f a | a `seq` False = undefined
#define STRICT2(f) f a b | a `seq` b `seq` False = undefined
#define STRICT3(f) f a b c | a `seq` b `seq` c `seq` False = undefined
#define STRICT4(f) f a b c d | a `seq` b `seq` c `seq` d `seq` False = undefined
#define STRICT5(f) f a b c d e | a `seq` b `seq` c `seq` d `seq` e `seq` False = undefined
#define STRICT5_(f) f a b c d _ | a `seq` b `seq` c `seq` d `seq` False = undefined
------------------------------------------------------------------------
| /O(1)/ Convert a ' ' into a ' Buffer '
singleton :: Char -> Buffer
singleton = L.singleton . c2w
# INLINE singleton #
#if __GLASGOW_HASKELL__ >= 608
instance IsString Buffer where
fromString = pack
{-# INLINE fromString #-}
#endif
-- | /O(n)/ Convert a 'String' into a 'Buffer'.
pack :: [Char] -> Buffer
pack = L.pack. List.map c2w
-- | /O(n)/ Converts a 'Buffer' to a 'String'.
unpack :: Buffer -> [Char]
unpack = List.map w2c . L.unpack
# INLINE unpack #
-- | /O(1)/ 'cons' is analogous to '(:)' for lists.
cons :: Char -> Buffer -> Buffer
cons = L.cons . c2w
# INLINE cons #
| /O(1)/ Unlike ' cons ' , ' '' is
-- strict in the Buffer that we are consing onto. More precisely, it forces
the head and the first chunk . It does this because , for space efficiency , it
may coalesce the new byte onto the first \'chunk\ ' rather than starting a
-- new \'chunk\'.
--
-- So that means you can't use a lazy recursive contruction like this:
--
-- > let xs = cons\' c xs in xs
--
-- You can however use 'cons', as well as 'repeat' and 'cycle', to build
-- infinite lazy Buffers.
--
cons' :: Char -> Buffer -> Buffer
cons' = L.cons' . c2w
{-# INLINE cons' #-}
| /O(n)/ Append a to the end of a ' Buffer ' . Similar to
-- 'cons', this function performs a memcpy.
snoc :: Buffer -> Char -> Buffer
snoc p = L.snoc p . c2w
# INLINE snoc #
| /O(1)/ Extract the first element of a Buffer , which must be non - empty .
head :: Buffer -> Char
head = w2c . L.head
# INLINE head #
-- | /O(1)/ Extract the head and tail of a Buffer, returning Nothing
-- if it is empty.
uncons :: Buffer -> Maybe (Char, Buffer)
uncons bs = case L.uncons bs of
Nothing -> Nothing
Just (w, bs') -> Just (w2c w, bs')
# INLINE uncons #
-- | /O(1)/ Extract the last element of a packed string, which must be non-empty.
last :: Buffer -> Char
last = w2c . L.last
# INLINE last #
| /O(n)/ ' map ' is the Buffer obtained by applying @f@ to each element of @xs@
map :: (Char -> Char) -> Buffer -> Buffer
map f = L.map (c2w . f . w2c)
# INLINE map #
| /O(n)/ The ' intersperse ' function takes a and a ' Buffer '
and ' that between the elements of the
-- 'Buffer'. It is analogous to the intersperse function on Lists.
intersperse :: Char -> Buffer -> Buffer
intersperse = L.intersperse . c2w
# INLINE intersperse #
-- | 'foldl', applied to a binary operator, a starting value (typically
-- the left-identity of the operator), and a Buffer, reduces the
-- Buffer using the binary operator, from left to right.
foldl :: (a -> Char -> a) -> a -> Buffer -> a
foldl f = L.foldl (\a c -> f a (w2c c))
{-# INLINE foldl #-}
-- | 'foldl\'' is like foldl, but strict in the accumulator.
foldl' :: (a -> Char -> a) -> a -> Buffer -> a
foldl' f = L.foldl' (\a c -> f a (w2c c))
{-# INLINE foldl' #-}
-- | 'foldr', applied to a binary operator, a starting value
-- (typically the right-identity of the operator), and a packed string,
-- reduces the packed string using the binary operator, from right to left.
foldr :: (Char -> a -> a) -> a -> Buffer -> a
foldr f = L.foldr (\c a -> f (w2c c) a)
# INLINE foldr #
| ' foldl1 ' is a variant of ' foldl ' that has no starting value
-- argument, and thus must be applied to non-empty 'Buffers'.
foldl1 :: (Char -> Char -> Char) -> Buffer -> Char
foldl1 f ps = w2c (L.foldl1 (\x y -> c2w (f (w2c x) (w2c y))) ps)
# INLINE foldl1 #
| ' foldl1\ '' is like ' foldl1 ' , but strict in the accumulator .
foldl1' :: (Char -> Char -> Char) -> Buffer -> Char
foldl1' f ps = w2c (L.foldl1' (\x y -> c2w (f (w2c x) (w2c y))) ps)
| ' ' is a variant of ' foldr ' that has no starting value argument ,
-- and thus must be applied to non-empty 'Buffer's
foldr1 :: (Char -> Char -> Char) -> Buffer -> Char
foldr1 f ps = w2c (L.foldr1 (\x y -> c2w (f (w2c x) (w2c y))) ps)
# INLINE foldr1 #
-- | Map a function over a 'Buffer' and concatenate the results
concatMap :: (Char -> Buffer) -> Buffer -> Buffer
concatMap f = L.concatMap (f . w2c)
# INLINE concatMap #
-- | Applied to a predicate and a Buffer, 'any' determines if
-- any element of the 'Buffer' satisfies the predicate.
any :: (Char -> Bool) -> Buffer -> Bool
any f = L.any (f . w2c)
# INLINE any #
-- | Applied to a predicate and a 'Buffer', 'all' determines if
-- all elements of the 'Buffer' satisfy the predicate.
all :: (Char -> Bool) -> Buffer -> Bool
all f = L.all (f . w2c)
# INLINE all #
-- | 'maximum' returns the maximum value from a 'Buffer'
maximum :: Buffer -> Char
maximum = w2c . L.maximum
# INLINE maximum #
-- | 'minimum' returns the minimum value from a 'Buffer'
minimum :: Buffer -> Char
minimum = w2c . L.minimum
# INLINE minimum #
-- ---------------------------------------------------------------------
-- Building Buffers
| ' ' is similar to ' foldl ' , but returns a list of successive
-- reduced values from the left. This function will fuse.
--
> f z [ x1 , x2 , ... ] = = [ z , z ` f ` x1 , ( z ` f ` x1 ) ` f ` x2 , ... ]
--
-- Note that
--
> last ( f z xs ) = = foldl f z xs .
scanl :: (Char -> Char -> Char) -> Char -> Buffer -> Buffer
scanl f z = L.scanl (\a b -> c2w (f (w2c a) (w2c b))) (c2w z)
-- | The 'mapAccumL' function behaves like a combination of 'map' and
-- 'foldl'; it applies a function to each element of a Buffer,
-- passing an accumulating parameter from left to right, and returning a
final value of this accumulator together with the new Buffer .
mapAccumL :: (acc -> Char -> (acc, Char)) -> acc -> Buffer -> (acc, Buffer)
mapAccumL f = L.mapAccumL (\a w -> case f a (w2c w) of (a',c) -> (a', c2w c))
-- | The 'mapAccumR' function behaves like a combination of 'map' and
-- 'foldr'; it applies a function to each element of a Buffer,
-- passing an accumulating parameter from right to left, and returning a
final value of this accumulator together with the new Buffer .
mapAccumR :: (acc -> Char -> (acc, Char)) -> acc -> Buffer -> (acc, Buffer)
mapAccumR f = L.mapAccumR (\acc w -> case f acc (w2c w) of (acc', c) -> (acc', c2w c))
------------------------------------------------------------------------
-- Generating and unfolding Buffers
| @'iterate ' f returns an infinite Buffer of repeated applications
of to @x@ :
--
-- > iterate f x == [x, f x, f (f x), ...]
--
iterate :: (Char -> Char) -> Char -> Buffer
iterate f = L.iterate (c2w . f . w2c) . c2w
| @'repeat ' is an infinite Buffer , with @x@ the value of every
-- element.
--
repeat :: Char -> Buffer
repeat = L.repeat . c2w
| /O(n)/ @'replicate ' n is a Buffer of length @n@ with
-- the value of every element.
--
replicate :: Int64 -> Char -> Buffer
replicate w c = L.replicate w (c2w c)
-- | /O(n)/ The 'unfoldr' function is analogous to the List \'unfoldr\'.
-- 'unfoldr' builds a Buffer from a seed value. The function takes
-- the element and returns 'Nothing' if it is done producing the
-- Buffer or returns 'Just' @(a,b)@, in which case, @a@ is a
prepending to the and @b@ is used as the next element in a
-- recursive call.
unfoldr :: (a -> Maybe (Char, a)) -> a -> Buffer
unfoldr f = L.unfoldr $ \a -> case f a of
Nothing -> Nothing
Just (c, a') -> Just (c2w c, a')
------------------------------------------------------------------------
-- | 'takeWhile', applied to a predicate @p@ and a Buffer @xs@,
-- returns the longest prefix (possibly empty) of @xs@ of elements that
satisfy @p@.
takeWhile :: (Char -> Bool) -> Buffer -> Buffer
takeWhile f = L.takeWhile (f . w2c)
# INLINE takeWhile #
| ' dropWhile ' @p xs@ returns the suffix remaining after ' takeWhile ' @p xs@.
dropWhile :: (Char -> Bool) -> Buffer -> Buffer
dropWhile f = L.dropWhile (f . w2c)
# INLINE dropWhile #
| ' break ' @p@ is equivalent to @'span ' ( ' not ' . p)@.
break :: (Char -> Bool) -> Buffer -> (Buffer, Buffer)
break f = L.break (f . w2c)
# INLINE break #
| ' span ' @p xs@ breaks the Buffer into two segments . It is
equivalent to @('takeWhile ' p xs , ' dropWhile ' p xs)@
span :: (Char -> Bool) -> Buffer -> (Buffer, Buffer)
span f = L.span (f . w2c)
# INLINE span #
-- | ' breakChar ' breaks its Buffer argument at the first occurence
-- of the specified . It is more efficient than ' break ' as it is
-- implemented with @memchr(3)@. I.e.
--
-- > break (= = ' c ' ) " abcd " = = breakChar ' c ' " abcd "
--
breakChar : : Buffer - > ( Buffer , Buffer )
breakChar = L.breakByte . c2w
{ - # INLINE breakChar #
-- | 'breakChar' breaks its Buffer argument at the first occurence
-- of the specified Char. It is more efficient than 'break' as it is
-- implemented with @memchr(3)@. I.e.
--
-- > break (=='c') "abcd" == breakChar 'c' "abcd"
--
breakChar :: Char -> Buffer -> (Buffer, Buffer)
breakChar = L.breakByte . c2w
{-# INLINE breakChar #-}
| ' spanChar ' breaks its Buffer argument at the first
occurence of a other than its argument . It is more efficient
-- than 'span (==)'
--
-- > span (=='c') "abcd" == spanByte 'c' "abcd"
--
spanChar :: Char -> Buffer -> (Buffer, Buffer)
spanChar = L.spanByte . c2w
# INLINE spanChar #
-}
--
TODO , more rules for breakChar *
--
-- | /O(n)/ Break a 'Buffer' into pieces separated by the byte
-- argument, consuming the delimiter. I.e.
--
-- > split '\n' "a\nb\nd\ne" == ["a","b","d","e"]
> split ' a ' " aXaXaXa " = = [ " " , " X","X","X " ]
-- > split 'x' "x" == ["",""]
--
-- and
--
-- > intercalate [c] . split c == id
> split = = splitWith . (=
--
-- As for all splitting functions in this library, this function does
-- not copy the substrings, it just constructs new 'Buffers' that
-- are slices of the original.
--
split :: Char -> Buffer -> [Buffer]
split = L.split . c2w
# INLINE split #
-- | /O(n)/ Splits a 'Buffer' into components delimited by
-- separators, where the predicate returns True for a separator element.
The resulting components do not contain the separators . Two adjacent
-- separators result in an empty component in the output. eg.
--
-- > splitWith (=='a') "aabbaca" == ["","","bb","c",""]
--
splitWith :: (Char -> Bool) -> Buffer -> [Buffer]
splitWith f = L.splitWith (f . w2c)
# INLINE splitWith #
-- | The 'groupBy' function is the non-overloaded version of 'group'.
groupBy :: (Char -> Char -> Bool) -> Buffer -> [Buffer]
groupBy k = L.groupBy (\a b -> k (w2c a) (w2c b))
-- | /O(1)/ 'Buffer' index (subscript) operator, starting from 0.
index :: Buffer -> Int64 -> Char
index = (w2c .) . L.index
# INLINE index #
| /O(n)/ The ' elemIndex ' function returns the index of the first
element in the given ' Buffer ' which is equal ( by memchr ) to the
-- query element, or 'Nothing' if there is no such element.
elemIndex :: Char -> Buffer -> Maybe Int64
elemIndex = L.elemIndex . c2w
{-# INLINE elemIndex #-}
-- | /O(n)/ The 'elemIndices' function extends 'elemIndex', by returning
-- the indices of all elements equal to the query element, in ascending order.
elemIndices :: Char -> Buffer -> [Int64]
elemIndices = L.elemIndices . c2w
# INLINE elemIndices #
-- | The 'findIndex' function takes a predicate and a 'Buffer' and
returns the index of the first element in the Buffer satisfying the predicate .
findIndex :: (Char -> Bool) -> Buffer -> Maybe Int64
findIndex f = L.findIndex (f . w2c)
# INLINE findIndex #
-- | The 'findIndices' function extends 'findIndex', by returning the
-- indices of all elements satisfying the predicate, in ascending order.
findIndices :: (Char -> Bool) -> Buffer -> [Int64]
findIndices f = L.findIndices (f . w2c)
| count returns the number of times its argument appears in the
--
-- > count == length . elemIndices
-- > count '\n' == length . lines
--
-- But more efficiently than using length on the intermediate list.
count :: Char -> Buffer -> Int64
count c = L.count (c2w c)
-- | /O(n)/ 'elem' is the 'Buffer' membership predicate. This
implementation uses @memchr(3)@.
elem :: Char -> Buffer -> Bool
elem c = L.elem (c2w c)
# INLINE elem #
-- | /O(n)/ 'notElem' is the inverse of 'elem'
notElem :: Char -> Buffer -> Bool
notElem c = L.notElem (c2w c)
# INLINE notElem #
-- | /O(n)/ 'filter', applied to a predicate and a Buffer,
-- returns a Buffer containing those characters that satisfy the
-- predicate.
filter :: (Char -> Bool) -> Buffer -> Buffer
filter f = L.filter (f . w2c)
# INLINE filter #
-- | /O(n)/ and /O(n\/c ) space/ A first order equivalent of /filter .
-- (= =) / , for the common case of filtering a single . It is more
-- efficient to use /filterChar/ in this case .
--
-- > filterChar = = filter . (= -- filterChar is around 10x faster , and uses much less space , than its
-- filter equivalent
--
filterChar : : Buffer - > Buffer
filterChar c ps = replicate ( count c ps ) c
{ - # INLINE filterChar #
-- | /O(n)/ and /O(n\/c) space/ A first order equivalent of /filter .
-- (==)/, for the common case of filtering a single Char. It is more
-- efficient to use /filterChar/ in this case.
--
-- > filterChar == filter . (==)
--
-- filterChar is around 10x faster, and uses much less space, than its
-- filter equivalent
--
filterChar :: Char -> Buffer -> Buffer
filterChar c ps = replicate (count c ps) c
{-# INLINE filterChar #-}
# RULES
" Buffer specialise filter (= = x ) " forall x.
filter ( (= ) =
#
"Buffer specialise filter (== x)" forall x.
filter ((==) x) = filterChar x
#-}
# RULES
" Buffer specialise filter (= = x ) " forall x.
filter (= = x ) =
#
"Buffer specialise filter (== x)" forall x.
filter (== x) = filterChar x
#-}
-}
-- | /O(n)/ The 'find' function takes a predicate and a Buffer,
and returns the first element in matching the predicate , or ' Nothing '
-- if there is no such element.
find :: (Char -> Bool) -> Buffer -> Maybe Char
find f ps = w2c `fmap` L.find (f . w2c) ps
{-# INLINE find #-}
-- | /O(n)/ A first order equivalent of /filter . (= =) / , for the common
-- case of filtering a single . It is more efficient to use
-- filterChar in this case .
--
-- > filterChar = = filter . (= -- filterChar is around 10x faster , and uses much less space , than its
-- filter equivalent
--
filterChar : : Buffer - > Buffer
filterChar c = L.filterByte ( c2w c )
{ - # INLINE filterChar #
-- | /O(n)/ A first order equivalent of /filter . (==)/, for the common
-- case of filtering a single Char. It is more efficient to use
-- filterChar in this case.
--
-- > filterChar == filter . (==)
--
-- filterChar is around 10x faster, and uses much less space, than its
-- filter equivalent
--
filterChar :: Char -> Buffer -> Buffer
filterChar c = L.filterByte (c2w c)
{-# INLINE filterChar #-}
| /O(n)/ A first order equivalent of /filter . ( \/=)/ , for the common
case of filtering a single out of a list . It is more efficient
-- to use /filterNotChar/ in this case.
--
-- > filterNotChar == filter . (/=)
--
-- filterNotChar is around 3x faster, and uses much less space, than its
-- filter equivalent
--
filterNotChar :: Char -> Buffer -> Buffer
filterNotChar c = L.filterNotByte (c2w c)
# INLINE filterNotChar #
-}
| /O(n)/ ' zip ' takes two Buffers and returns a list of
corresponding pairs of . If one input is short ,
-- excess elements of the longer Buffer are discarded. This is
-- equivalent to a pair of 'unpack' operations, and so space
-- usage may be large for multi-megabyte Buffers
zip :: Buffer -> Buffer -> [(Char,Char)]
zip ps qs
| L.null ps || L.null qs = []
| otherwise = (head ps, head qs) : zip (L.tail ps) (L.tail qs)
-- | 'zipWith' generalises 'zip' by zipping with the function given as
the first argument , instead of a tupling function . For example ,
@'zipWith ' ( + ) @ is applied to two Buffers to produce the list
-- of corresponding sums.
zipWith :: (Char -> Char -> a) -> Buffer -> Buffer -> [a]
zipWith f = L.zipWith ((. w2c) . f . w2c)
-- | 'lines' breaks a Buffer up into a list of Buffers at
-- newline Chars. The resulting strings do not contain newlines.
--
As of bytestring 0.9.0.3 , this function is stricter than its
-- list cousin.
--
lines :: Buffer -> [Buffer]
lines Empty = []
lines (Chunk c0 cs0) = loop0 c0 cs0
where
-- this is a really performance sensitive function but the
-- chunked representation makes the general case a bit expensive
-- however assuming a large chunk size and normalish line lengths
-- we will find line endings much more frequently than chunk
-- endings so it makes sense to optimise for that common case.
So we partition into two special cases depending on whether we
-- are keeping back a list of chunks that will eventually be output
-- once we get to the end of the current line.
-- the common special case where we have no existing chunks of
-- the current line
loop0 :: S.Buffer -> Buffer -> [Buffer]
loop0 c cs =
case B.elemIndex (c2w '\n') c of
Nothing -> case cs of
Empty | B.null c -> []
| otherwise -> Chunk c Empty : []
(Chunk c' cs')
| B.null c -> loop0 c' cs'
| otherwise -> loop c' [c] cs'
Just n | n /= 0 -> Chunk (B.unsafeTake n c) Empty
: loop0 (B.unsafeDrop (n+1) c) cs
| otherwise -> Empty
: loop0 (B.unsafeTail c) cs
-- the general case when we are building a list of chunks that are
-- part of the same line
loop :: S.Buffer -> [S.Buffer] -> Buffer -> [Buffer]
loop c line cs =
case B.elemIndex (c2w '\n') c of
Nothing ->
case cs of
Empty -> let c' = revChunks (c : line)
in c' `seq` (c' : [])
(Chunk c' cs') -> loop c' (c : line) cs'
Just n ->
let c' = revChunks (B.unsafeTake n c : line)
in c' `seq` (c' : loop0 (B.unsafeDrop (n+1) c) cs)
This function is too strict ! Consider ,
> prop_lazy =
( L.unpack . head . lazylines $ L.append ( L.pack " a\nb\n " ) ( error " failed " ) )
= =
" a "
fails . Here 's a properly lazy version of ' lines ' for lazy bytestrings
lazylines : : L.Buffer - > [ L.Buffer ]
lazylines s
| L.null s = [ ]
| otherwise =
let ( l , s ' ) = L.break ( (= =) ' \n ' ) s
in l : if L.null s ' then [ ]
else lazylines ( L.tail s ' )
we need a similarly lazy , but efficient version .
This function is too strict! Consider,
> prop_lazy =
(L.unpack . head . lazylines $ L.append (L.pack "a\nb\n") (error "failed"))
==
"a"
fails. Here's a properly lazy version of 'lines' for lazy bytestrings
lazylines :: L.Buffer -> [L.Buffer]
lazylines s
| L.null s = []
| otherwise =
let (l,s') = L.break ((==) '\n') s
in l : if L.null s' then []
else lazylines (L.tail s')
we need a similarly lazy, but efficient version.
-}
-- | 'unlines' is an inverse operation to 'lines'. It joins lines,
-- after appending a terminating newline to each.
unlines :: [Buffer] -> Buffer
unlines [] = empty
unlines ss = (concat $ List.intersperse nl ss) `append` nl -- half as much space
where nl = singleton '\n'
-- | 'words' breaks a Buffer up into a list of words, which
were delimited by representing white space . And
--
-- > tokens isSpace = words
--
words :: Buffer -> [Buffer]
words = List.filter (not . L.null) . L.splitWith isSpaceWord8
# INLINE words #
-- | The 'unwords' function is analogous to the 'unlines' function, on words.
unwords :: [Buffer] -> Buffer
unwords = intercalate (singleton ' ')
# INLINE unwords #
-- | readInt reads an Int from the beginning of the Buffer. If
-- there is no integer at the beginning of the string, it returns
-- Nothing, otherwise it just returns the int read, and the rest of the
-- string.
{-
-- Faster:
data MaybeS = NothingS
| JustS {-# UNPACK #-} !Int {-# UNPACK #-} !Buffer
-}
readInt :: Buffer -> Maybe (Int, Buffer)
# INLINE readInt #
readInt Empty = Nothing
readInt (Chunk x xs) = case w2c (B.unsafeHead x) of
'-' -> loop True 0 0 (B.unsafeTail x) xs
'+' -> loop False 0 0 (B.unsafeTail x) xs
_ -> loop False 0 0 x xs
where loop :: Bool -> Int -> Int
-> S.Buffer -> Buffer -> Maybe (Int, Buffer)
STRICT5_(loop)
loop neg i n c cs
| B.null c = case cs of
Empty -> end neg i n c cs
(Chunk c' cs') -> loop neg i n c' cs'
| otherwise =
case B.unsafeHead c of
w | w >= 0x30
&& w <= 0x39 -> loop neg (i+1)
(n * 10 + (fromIntegral w - 0x30))
(B.unsafeTail c) cs
| otherwise -> end neg i n c cs
# INLINE end #
end _ 0 _ _ _ = Nothing
end neg _ n c cs = e `seq` e
where n' = if neg then negate n else n
c' = chunk c cs
e = n' `seq` c' `seq` Just $! (n',c')
-- in n' `seq` c' `seq` JustS n' c'
| readInteger reads an Integer from the beginning of the Buffer . If
-- there is no integer at the beginning of the string, it returns Nothing,
-- otherwise it just returns the int read, and the rest of the string.
readInteger :: Buffer -> Maybe (Integer, Buffer)
readInteger Empty = Nothing
readInteger (Chunk c0 cs0) =
case w2c (B.unsafeHead c0) of
'-' -> first (B.unsafeTail c0) cs0 >>= \(n, cs') -> return (-n, cs')
'+' -> first (B.unsafeTail c0) cs0
_ -> first c0 cs0
where first c cs
| B.null c = case cs of
Empty -> Nothing
(Chunk c' cs') -> first' c' cs'
| otherwise = first' c cs
first' c cs = case B.unsafeHead c of
w | w >= 0x30 && w <= 0x39 -> Just $
loop 1 (fromIntegral w - 0x30) [] (B.unsafeTail c) cs
| otherwise -> Nothing
loop :: Int -> Int -> [Integer]
-> S.Buffer -> Buffer -> (Integer, Buffer)
STRICT5_(loop)
loop d acc ns c cs
| B.null c = case cs of
Empty -> combine d acc ns c cs
(Chunk c' cs') -> loop d acc ns c' cs'
| otherwise =
case B.unsafeHead c of
w | w >= 0x30 && w <= 0x39 ->
if d < 9 then loop (d+1)
(10*acc + (fromIntegral w - 0x30))
ns (B.unsafeTail c) cs
else loop 1 (fromIntegral w - 0x30)
(fromIntegral acc : ns)
(B.unsafeTail c) cs
| otherwise -> combine d acc ns c cs
combine _ acc [] c cs = end (fromIntegral acc) c cs
combine d acc ns c cs =
end (10^d * combine1 1000000000 ns + fromIntegral acc) c cs
combine1 _ [n] = n
combine1 b ns = combine1 (b*b) $ combine2 b ns
combine2 b (n:m:ns) = let t = n+m*b in t `seq` (t : combine2 b ns)
combine2 _ ns = ns
end n c cs = let c' = chunk c cs
in c' `seq` (n, c')
-- | Read an entire file /lazily/ into a 'Buffer'. Use 'text mode'
on Windows to interpret newlines
readFile :: FilePath -> IO Buffer
readFile f = openFile f ReadMode >>= hGetContents
-- | Write a 'Buffer' to a file.
writeFile :: FilePath -> Buffer -> IO ()
writeFile f txt = bracket (openFile f WriteMode) hClose
(\hdl -> hPut hdl txt)
-- | Append a 'Buffer' to a file.
appendFile :: FilePath -> Buffer -> IO ()
appendFile f txt = bracket (openFile f AppendMode) hClose
(\hdl -> hPut hdl txt)
-- ---------------------------------------------------------------------
-- Internal utilities
-- reverse a list of possibly-empty chunks into a lazy Buffer
revChunks :: [S.Buffer] -> Buffer
revChunks cs = List.foldl' (flip chunk) Empty cs
| null | https://raw.githubusercontent.com/ekmett/ekmett.github.com/8d3abab5b66db631e148e1d046d18909bece5893/haskell/buffer/Data/Buffer/Lazy/Char8.hs | haskell | # OPTIONS_HADDOCK prune #
|
Module : Data.Buffer.Lazy.Char8
License : BSD-style
Stability : experimental
run at identical speeds to their 'Data.Word.Word8' equivalents in
"Data.Buffer.Lazy".
This module is intended to be imported @qualified@, to avoid name
> import qualified Data.Buffer.Lazy.Char8 as C
* The @Buffer@ type
:: Buffer
:: String -> Buffer
:: Buffer -> String
:: [Strict.Buffer] -> Buffer
:: Buffer -> [Strict.Buffer]
* Basic interface
:: Buffer -> Char -> Buffer
:: Buffer -> Buffer -> Buffer
:: Buffer -> Buffer
:: Buffer -> Buffer
:: Buffer -> Bool
* Transforming Buffers
:: Buffer -> Buffer
:: Buffer -> [Buffer] -> Buffer
:: [Buffer] -> [Buffer]
* Reducing 'Buffer's (folds)
:: (a -> Char -> a) -> a -> Buffer -> a
:: (a -> Char -> a) -> a -> Buffer -> a
:: (Char -> a -> a) -> a -> Buffer -> a
** Special folds
:: [Buffer] -> Buffer
:: (Char -> Buffer) -> Buffer -> Buffer
* Building Buffers
** Scans
: : ( Char - > ) - > Buffer - > Buffer
: : ( Char - > ) - > Char - > Buffer - > Buffer
: : ( Char - > ) - > Buffer - > Buffer
** Accumulating maps
** Infinite Buffers
:: Int64 -> Char -> Buffer
:: Buffer -> Buffer
** Unfolding Buffers
** Breaking strings
:: Int64 -> Buffer -> Buffer
:: Int64 -> Buffer -> Buffer
:: Int64 -> Buffer -> (Buffer, Buffer)
:: Buffer -> [Buffer]
:: Buffer -> [Buffer]
:: Buffer -> [Buffer]
** Breaking into many substrings
** Breaking into lines and words
:: Buffer -> [Buffer]
:: Buffer -> [Buffer]
:: [Buffer] -> Buffer
:: Buffer -> [Buffer]
* Predicates
:: Buffer -> Buffer -> Bool
isSuffixOf, -- :: Buffer -> Buffer -> Bool
* Searching Buffers
** Searching by equality
** Searching with a predicate
: : ( Bool ) - > Buffer - > ( Buffer , Buffer )
* Indexing Buffers
* Zipping and unzipping Buffers
: : [ ( , ) ] - > ( Buffer , Buffer )
* Ordered Buffers
sort, -- :: Buffer -> Buffer
* Low level conversions
** Copying Buffers
:: Buffer -> Buffer
* Reading from Buffers
** Standard input and output
:: IO Buffer
:: Buffer -> IO ()
:: Buffer -> IO ()
:: (Buffer -> Buffer) -> IO ()
** Files
:: FilePath -> IO Buffer
:: FilePath -> Buffer -> IO ()
:: FilePath -> Buffer -> IO ()
** I\/O with Handles
:: Handle -> IO Buffer
:: Handle -> Int64 -> IO Buffer
:: Handle -> Int64 -> IO Buffer
:: Handle -> Buffer -> IO ()
Functions transparently exported
Functions we need to wrap.
typename only
----------------------------------------------------------------------
# INLINE fromString #
| /O(n)/ Convert a 'String' into a 'Buffer'.
| /O(n)/ Converts a 'Buffer' to a 'String'.
| /O(1)/ 'cons' is analogous to '(:)' for lists.
strict in the Buffer that we are consing onto. More precisely, it forces
new \'chunk\'.
So that means you can't use a lazy recursive contruction like this:
> let xs = cons\' c xs in xs
You can however use 'cons', as well as 'repeat' and 'cycle', to build
infinite lazy Buffers.
# INLINE cons' #
'cons', this function performs a memcpy.
| /O(1)/ Extract the head and tail of a Buffer, returning Nothing
if it is empty.
| /O(1)/ Extract the last element of a packed string, which must be non-empty.
'Buffer'. It is analogous to the intersperse function on Lists.
| 'foldl', applied to a binary operator, a starting value (typically
the left-identity of the operator), and a Buffer, reduces the
Buffer using the binary operator, from left to right.
# INLINE foldl #
| 'foldl\'' is like foldl, but strict in the accumulator.
# INLINE foldl' #
| 'foldr', applied to a binary operator, a starting value
(typically the right-identity of the operator), and a packed string,
reduces the packed string using the binary operator, from right to left.
argument, and thus must be applied to non-empty 'Buffers'.
and thus must be applied to non-empty 'Buffer's
| Map a function over a 'Buffer' and concatenate the results
| Applied to a predicate and a Buffer, 'any' determines if
any element of the 'Buffer' satisfies the predicate.
| Applied to a predicate and a 'Buffer', 'all' determines if
all elements of the 'Buffer' satisfy the predicate.
| 'maximum' returns the maximum value from a 'Buffer'
| 'minimum' returns the minimum value from a 'Buffer'
---------------------------------------------------------------------
Building Buffers
reduced values from the left. This function will fuse.
Note that
| The 'mapAccumL' function behaves like a combination of 'map' and
'foldl'; it applies a function to each element of a Buffer,
passing an accumulating parameter from left to right, and returning a
| The 'mapAccumR' function behaves like a combination of 'map' and
'foldr'; it applies a function to each element of a Buffer,
passing an accumulating parameter from right to left, and returning a
----------------------------------------------------------------------
Generating and unfolding Buffers
> iterate f x == [x, f x, f (f x), ...]
element.
the value of every element.
| /O(n)/ The 'unfoldr' function is analogous to the List \'unfoldr\'.
'unfoldr' builds a Buffer from a seed value. The function takes
the element and returns 'Nothing' if it is done producing the
Buffer or returns 'Just' @(a,b)@, in which case, @a@ is a
recursive call.
----------------------------------------------------------------------
| 'takeWhile', applied to a predicate @p@ and a Buffer @xs@,
returns the longest prefix (possibly empty) of @xs@ of elements that
| ' breakChar ' breaks its Buffer argument at the first occurence
of the specified . It is more efficient than ' break ' as it is
implemented with @memchr(3)@. I.e.
> break (= = ' c ' ) " abcd " = = breakChar ' c ' " abcd "
| 'breakChar' breaks its Buffer argument at the first occurence
of the specified Char. It is more efficient than 'break' as it is
implemented with @memchr(3)@. I.e.
> break (=='c') "abcd" == breakChar 'c' "abcd"
# INLINE breakChar #
than 'span (==)'
> span (=='c') "abcd" == spanByte 'c' "abcd"
| /O(n)/ Break a 'Buffer' into pieces separated by the byte
argument, consuming the delimiter. I.e.
> split '\n' "a\nb\nd\ne" == ["a","b","d","e"]
> split 'x' "x" == ["",""]
and
> intercalate [c] . split c == id
As for all splitting functions in this library, this function does
not copy the substrings, it just constructs new 'Buffers' that
are slices of the original.
| /O(n)/ Splits a 'Buffer' into components delimited by
separators, where the predicate returns True for a separator element.
separators result in an empty component in the output. eg.
> splitWith (=='a') "aabbaca" == ["","","bb","c",""]
| The 'groupBy' function is the non-overloaded version of 'group'.
| /O(1)/ 'Buffer' index (subscript) operator, starting from 0.
query element, or 'Nothing' if there is no such element.
# INLINE elemIndex #
| /O(n)/ The 'elemIndices' function extends 'elemIndex', by returning
the indices of all elements equal to the query element, in ascending order.
| The 'findIndex' function takes a predicate and a 'Buffer' and
| The 'findIndices' function extends 'findIndex', by returning the
indices of all elements satisfying the predicate, in ascending order.
> count == length . elemIndices
> count '\n' == length . lines
But more efficiently than using length on the intermediate list.
| /O(n)/ 'elem' is the 'Buffer' membership predicate. This
| /O(n)/ 'notElem' is the inverse of 'elem'
| /O(n)/ 'filter', applied to a predicate and a Buffer,
returns a Buffer containing those characters that satisfy the
predicate.
| /O(n)/ and /O(n\/c ) space/ A first order equivalent of /filter .
(= =) / , for the common case of filtering a single . It is more
efficient to use /filterChar/ in this case .
> filterChar = = filter . (= -- filterChar is around 10x faster , and uses much less space , than its
filter equivalent
| /O(n)/ and /O(n\/c) space/ A first order equivalent of /filter .
(==)/, for the common case of filtering a single Char. It is more
efficient to use /filterChar/ in this case.
> filterChar == filter . (==)
filterChar is around 10x faster, and uses much less space, than its
filter equivalent
# INLINE filterChar #
| /O(n)/ The 'find' function takes a predicate and a Buffer,
if there is no such element.
# INLINE find #
| /O(n)/ A first order equivalent of /filter . (= =) / , for the common
case of filtering a single . It is more efficient to use
filterChar in this case .
> filterChar = = filter . (= -- filterChar is around 10x faster , and uses much less space , than its
filter equivalent
| /O(n)/ A first order equivalent of /filter . (==)/, for the common
case of filtering a single Char. It is more efficient to use
filterChar in this case.
> filterChar == filter . (==)
filterChar is around 10x faster, and uses much less space, than its
filter equivalent
# INLINE filterChar #
to use /filterNotChar/ in this case.
> filterNotChar == filter . (/=)
filterNotChar is around 3x faster, and uses much less space, than its
filter equivalent
excess elements of the longer Buffer are discarded. This is
equivalent to a pair of 'unpack' operations, and so space
usage may be large for multi-megabyte Buffers
| 'zipWith' generalises 'zip' by zipping with the function given as
of corresponding sums.
| 'lines' breaks a Buffer up into a list of Buffers at
newline Chars. The resulting strings do not contain newlines.
list cousin.
this is a really performance sensitive function but the
chunked representation makes the general case a bit expensive
however assuming a large chunk size and normalish line lengths
we will find line endings much more frequently than chunk
endings so it makes sense to optimise for that common case.
are keeping back a list of chunks that will eventually be output
once we get to the end of the current line.
the common special case where we have no existing chunks of
the current line
the general case when we are building a list of chunks that are
part of the same line
| 'unlines' is an inverse operation to 'lines'. It joins lines,
after appending a terminating newline to each.
half as much space
| 'words' breaks a Buffer up into a list of words, which
> tokens isSpace = words
| The 'unwords' function is analogous to the 'unlines' function, on words.
| readInt reads an Int from the beginning of the Buffer. If
there is no integer at the beginning of the string, it returns
Nothing, otherwise it just returns the int read, and the rest of the
string.
-- Faster:
data MaybeS = NothingS
| JustS {-# UNPACK #
# UNPACK #
in n' `seq` c' `seq` JustS n' c'
there is no integer at the beginning of the string, it returns Nothing,
otherwise it just returns the int read, and the rest of the string.
| Read an entire file /lazily/ into a 'Buffer'. Use 'text mode'
| Write a 'Buffer' to a file.
| Append a 'Buffer' to a file.
---------------------------------------------------------------------
Internal utilities
reverse a list of possibly-empty chunks into a lazy Buffer
| # LANGUAGE CPP #
Copyright : ( c ) 2006
Maintainer :
Portability : non - portable ( imports Data . Buffer . Lazy )
Manipulate /lazy/ ' 's using ' ' operations . All will
be truncated to 8 bits . It can be expected that these functions will
clashes with " Prelude " functions . eg .
module Data.Buffer.Lazy.Char8 (
instances : , Ord , Show , Read , Data , Typeable
* Introducing and eliminating ' 's
: : Buffer
: : Buffer - > Buffer
: : Buffer - > Buffer
: : Buffer - >
: : Buffer - > Maybe ( , Buffer )
: : Buffer - >
: : Buffer - > Int64
: : ( ) - > Buffer - > Buffer
: : Buffer - > Buffer
: : ( Char - > ) - > Buffer - >
: : ( Char - > ) - > Buffer - >
: : ( Char - > ) - > Buffer - >
: : ( Bool ) - > Buffer - > Bool
: : ( Bool ) - > Buffer - > Bool
: : Buffer - >
: : Buffer - >
: : ( Char - > ) - > Char - > Buffer - > Buffer
: : ( acc - > Char - > ( acc , ) ) - > acc - > Buffer - > ( acc , Buffer )
: : ( acc - > Char - > ( acc , ) ) - > acc - > Buffer - > ( acc , Buffer )
: :
: : ( ) - > Char - > Buffer
: : ( a - > Maybe ( , a ) ) - > a - > Buffer
* Substrings
: : ( Bool ) - > Buffer - > Buffer
: : ( Bool ) - > Buffer - > Buffer
: : ( Bool ) - > Buffer - > ( Buffer , Buffer )
: : ( Bool ) - > Buffer - > ( Buffer , Buffer )
: : ( Char - > Bool ) - > Buffer - > [ Buffer ]
: : Buffer - > [ Buffer ]
: : ( Bool ) - > Buffer - > [ Buffer ]
: : Bool
: : Bool
: : ( Bool ) - > Buffer - > Maybe
: : ( Bool ) - > Buffer - > Buffer
: : Buffer - > Int64 - >
: : Buffer - > Maybe Int64
: : Buffer - > [ Int64 ]
: : ( Bool ) - > Buffer - > Maybe Int64
: : ( Bool ) - > Buffer - > [ Int64 ]
: : Buffer - > Int64
: : Buffer - > Buffer - > [ ( , ) ]
: : ( Char - > c ) - > Buffer - > Buffer - > [ c ]
readInt,
readInteger,
* I\/O with ' 's
) where
import Data.Buffer.Lazy.Word8
(fromChunks, toChunks
,empty,null,length,tail,init,append,reverse,transpose,cycle
,concat,take,drop,splitAt,intercalate,isPrefixOf,group,inits,tails,copy
,hGetContents, hGet, hPut, getContents
,hGetNonBlocking
,putStr, putStrLn, interact)
import qualified Data.Buffer.Lazy.Word8 as L
import qualified Data.Buffer as B
import qualified Data.Buffer.Unsafe as B
import Data.Buffer.Lazy.Internal
import Data.Buffer.Internal (w2c, c2w, isSpaceWord8)
import Data.Int (Int64)
import qualified Data.List as List
import Prelude hiding
(reverse,head,tail,last,init,null,length,map,lines,foldl,foldr,unlines
,concat,any,take,drop,splitAt,takeWhile,dropWhile,span,break,elem,filter
,unwords,words,maximum,minimum,all,concatMap,scanl,scanl1,foldl1,foldr1
,readFile,writeFile,appendFile,replicate,getContents,getLine,putStr,putStrLn
,zip,zipWith,unzip,notElem,repeat,iterate,interact,cycle)
import System.IO (hClose,openFile,IOMode(..))
#ifndef __NHC__
import Control.Exception (bracket)
#else
import IO (bracket)
#endif
#if __GLASGOW_HASKELL__ >= 608
import Data.String
#endif
#define STRICT1(f) f a | a `seq` False = undefined
#define STRICT2(f) f a b | a `seq` b `seq` False = undefined
#define STRICT3(f) f a b c | a `seq` b `seq` c `seq` False = undefined
#define STRICT4(f) f a b c d | a `seq` b `seq` c `seq` d `seq` False = undefined
#define STRICT5(f) f a b c d e | a `seq` b `seq` c `seq` d `seq` e `seq` False = undefined
#define STRICT5_(f) f a b c d _ | a `seq` b `seq` c `seq` d `seq` False = undefined
| /O(1)/ Convert a ' ' into a ' Buffer '
singleton :: Char -> Buffer
singleton = L.singleton . c2w
# INLINE singleton #
#if __GLASGOW_HASKELL__ >= 608
instance IsString Buffer where
fromString = pack
#endif
pack :: [Char] -> Buffer
pack = L.pack. List.map c2w
unpack :: Buffer -> [Char]
unpack = List.map w2c . L.unpack
# INLINE unpack #
cons :: Char -> Buffer -> Buffer
cons = L.cons . c2w
# INLINE cons #
| /O(1)/ Unlike ' cons ' , ' '' is
the head and the first chunk . It does this because , for space efficiency , it
may coalesce the new byte onto the first \'chunk\ ' rather than starting a
cons' :: Char -> Buffer -> Buffer
cons' = L.cons' . c2w
| /O(n)/ Append a to the end of a ' Buffer ' . Similar to
snoc :: Buffer -> Char -> Buffer
snoc p = L.snoc p . c2w
# INLINE snoc #
| /O(1)/ Extract the first element of a Buffer , which must be non - empty .
head :: Buffer -> Char
head = w2c . L.head
# INLINE head #
uncons :: Buffer -> Maybe (Char, Buffer)
uncons bs = case L.uncons bs of
Nothing -> Nothing
Just (w, bs') -> Just (w2c w, bs')
# INLINE uncons #
last :: Buffer -> Char
last = w2c . L.last
# INLINE last #
| /O(n)/ ' map ' is the Buffer obtained by applying @f@ to each element of @xs@
map :: (Char -> Char) -> Buffer -> Buffer
map f = L.map (c2w . f . w2c)
# INLINE map #
| /O(n)/ The ' intersperse ' function takes a and a ' Buffer '
and ' that between the elements of the
intersperse :: Char -> Buffer -> Buffer
intersperse = L.intersperse . c2w
# INLINE intersperse #
foldl :: (a -> Char -> a) -> a -> Buffer -> a
foldl f = L.foldl (\a c -> f a (w2c c))
foldl' :: (a -> Char -> a) -> a -> Buffer -> a
foldl' f = L.foldl' (\a c -> f a (w2c c))
foldr :: (Char -> a -> a) -> a -> Buffer -> a
foldr f = L.foldr (\c a -> f (w2c c) a)
# INLINE foldr #
| ' foldl1 ' is a variant of ' foldl ' that has no starting value
foldl1 :: (Char -> Char -> Char) -> Buffer -> Char
foldl1 f ps = w2c (L.foldl1 (\x y -> c2w (f (w2c x) (w2c y))) ps)
# INLINE foldl1 #
| ' foldl1\ '' is like ' foldl1 ' , but strict in the accumulator .
foldl1' :: (Char -> Char -> Char) -> Buffer -> Char
foldl1' f ps = w2c (L.foldl1' (\x y -> c2w (f (w2c x) (w2c y))) ps)
| ' ' is a variant of ' foldr ' that has no starting value argument ,
foldr1 :: (Char -> Char -> Char) -> Buffer -> Char
foldr1 f ps = w2c (L.foldr1 (\x y -> c2w (f (w2c x) (w2c y))) ps)
# INLINE foldr1 #
concatMap :: (Char -> Buffer) -> Buffer -> Buffer
concatMap f = L.concatMap (f . w2c)
# INLINE concatMap #
any :: (Char -> Bool) -> Buffer -> Bool
any f = L.any (f . w2c)
# INLINE any #
all :: (Char -> Bool) -> Buffer -> Bool
all f = L.all (f . w2c)
# INLINE all #
maximum :: Buffer -> Char
maximum = w2c . L.maximum
# INLINE maximum #
minimum :: Buffer -> Char
minimum = w2c . L.minimum
# INLINE minimum #
| ' ' is similar to ' foldl ' , but returns a list of successive
> f z [ x1 , x2 , ... ] = = [ z , z ` f ` x1 , ( z ` f ` x1 ) ` f ` x2 , ... ]
> last ( f z xs ) = = foldl f z xs .
scanl :: (Char -> Char -> Char) -> Char -> Buffer -> Buffer
scanl f z = L.scanl (\a b -> c2w (f (w2c a) (w2c b))) (c2w z)
final value of this accumulator together with the new Buffer .
mapAccumL :: (acc -> Char -> (acc, Char)) -> acc -> Buffer -> (acc, Buffer)
mapAccumL f = L.mapAccumL (\a w -> case f a (w2c w) of (a',c) -> (a', c2w c))
final value of this accumulator together with the new Buffer .
mapAccumR :: (acc -> Char -> (acc, Char)) -> acc -> Buffer -> (acc, Buffer)
mapAccumR f = L.mapAccumR (\acc w -> case f acc (w2c w) of (acc', c) -> (acc', c2w c))
| @'iterate ' f returns an infinite Buffer of repeated applications
of to @x@ :
iterate :: (Char -> Char) -> Char -> Buffer
iterate f = L.iterate (c2w . f . w2c) . c2w
| @'repeat ' is an infinite Buffer , with @x@ the value of every
repeat :: Char -> Buffer
repeat = L.repeat . c2w
| /O(n)/ @'replicate ' n is a Buffer of length @n@ with
replicate :: Int64 -> Char -> Buffer
replicate w c = L.replicate w (c2w c)
prepending to the and @b@ is used as the next element in a
unfoldr :: (a -> Maybe (Char, a)) -> a -> Buffer
unfoldr f = L.unfoldr $ \a -> case f a of
Nothing -> Nothing
Just (c, a') -> Just (c2w c, a')
satisfy @p@.
takeWhile :: (Char -> Bool) -> Buffer -> Buffer
takeWhile f = L.takeWhile (f . w2c)
# INLINE takeWhile #
| ' dropWhile ' @p xs@ returns the suffix remaining after ' takeWhile ' @p xs@.
dropWhile :: (Char -> Bool) -> Buffer -> Buffer
dropWhile f = L.dropWhile (f . w2c)
# INLINE dropWhile #
| ' break ' @p@ is equivalent to @'span ' ( ' not ' . p)@.
break :: (Char -> Bool) -> Buffer -> (Buffer, Buffer)
break f = L.break (f . w2c)
# INLINE break #
| ' span ' @p xs@ breaks the Buffer into two segments . It is
equivalent to @('takeWhile ' p xs , ' dropWhile ' p xs)@
span :: (Char -> Bool) -> Buffer -> (Buffer, Buffer)
span f = L.span (f . w2c)
# INLINE span #
breakChar : : Buffer - > ( Buffer , Buffer )
breakChar = L.breakByte . c2w
{ - # INLINE breakChar #
breakChar :: Char -> Buffer -> (Buffer, Buffer)
breakChar = L.breakByte . c2w
| ' spanChar ' breaks its Buffer argument at the first
occurence of a other than its argument . It is more efficient
spanChar :: Char -> Buffer -> (Buffer, Buffer)
spanChar = L.spanByte . c2w
# INLINE spanChar #
-}
TODO , more rules for breakChar *
> split ' a ' " aXaXaXa " = = [ " " , " X","X","X " ]
> split = = splitWith . (=
split :: Char -> Buffer -> [Buffer]
split = L.split . c2w
# INLINE split #
The resulting components do not contain the separators . Two adjacent
splitWith :: (Char -> Bool) -> Buffer -> [Buffer]
splitWith f = L.splitWith (f . w2c)
# INLINE splitWith #
groupBy :: (Char -> Char -> Bool) -> Buffer -> [Buffer]
groupBy k = L.groupBy (\a b -> k (w2c a) (w2c b))
index :: Buffer -> Int64 -> Char
index = (w2c .) . L.index
# INLINE index #
| /O(n)/ The ' elemIndex ' function returns the index of the first
element in the given ' Buffer ' which is equal ( by memchr ) to the
elemIndex :: Char -> Buffer -> Maybe Int64
elemIndex = L.elemIndex . c2w
elemIndices :: Char -> Buffer -> [Int64]
elemIndices = L.elemIndices . c2w
# INLINE elemIndices #
returns the index of the first element in the Buffer satisfying the predicate .
findIndex :: (Char -> Bool) -> Buffer -> Maybe Int64
findIndex f = L.findIndex (f . w2c)
# INLINE findIndex #
findIndices :: (Char -> Bool) -> Buffer -> [Int64]
findIndices f = L.findIndices (f . w2c)
| count returns the number of times its argument appears in the
count :: Char -> Buffer -> Int64
count c = L.count (c2w c)
implementation uses @memchr(3)@.
elem :: Char -> Buffer -> Bool
elem c = L.elem (c2w c)
# INLINE elem #
notElem :: Char -> Buffer -> Bool
notElem c = L.notElem (c2w c)
# INLINE notElem #
filter :: (Char -> Bool) -> Buffer -> Buffer
filter f = L.filter (f . w2c)
# INLINE filter #
filterChar : : Buffer - > Buffer
filterChar c ps = replicate ( count c ps ) c
{ - # INLINE filterChar #
filterChar :: Char -> Buffer -> Buffer
filterChar c ps = replicate (count c ps) c
# RULES
" Buffer specialise filter (= = x ) " forall x.
filter ( (= ) =
#
"Buffer specialise filter (== x)" forall x.
filter ((==) x) = filterChar x
#-}
# RULES
" Buffer specialise filter (= = x ) " forall x.
filter (= = x ) =
#
"Buffer specialise filter (== x)" forall x.
filter (== x) = filterChar x
#-}
-}
and returns the first element in matching the predicate , or ' Nothing '
find :: (Char -> Bool) -> Buffer -> Maybe Char
find f ps = w2c `fmap` L.find (f . w2c) ps
filterChar : : Buffer - > Buffer
filterChar c = L.filterByte ( c2w c )
{ - # INLINE filterChar #
filterChar :: Char -> Buffer -> Buffer
filterChar c = L.filterByte (c2w c)
| /O(n)/ A first order equivalent of /filter . ( \/=)/ , for the common
case of filtering a single out of a list . It is more efficient
filterNotChar :: Char -> Buffer -> Buffer
filterNotChar c = L.filterNotByte (c2w c)
# INLINE filterNotChar #
-}
| /O(n)/ ' zip ' takes two Buffers and returns a list of
corresponding pairs of . If one input is short ,
zip :: Buffer -> Buffer -> [(Char,Char)]
zip ps qs
| L.null ps || L.null qs = []
| otherwise = (head ps, head qs) : zip (L.tail ps) (L.tail qs)
the first argument , instead of a tupling function . For example ,
@'zipWith ' ( + ) @ is applied to two Buffers to produce the list
zipWith :: (Char -> Char -> a) -> Buffer -> Buffer -> [a]
zipWith f = L.zipWith ((. w2c) . f . w2c)
As of bytestring 0.9.0.3 , this function is stricter than its
lines :: Buffer -> [Buffer]
lines Empty = []
lines (Chunk c0 cs0) = loop0 c0 cs0
where
So we partition into two special cases depending on whether we
loop0 :: S.Buffer -> Buffer -> [Buffer]
loop0 c cs =
case B.elemIndex (c2w '\n') c of
Nothing -> case cs of
Empty | B.null c -> []
| otherwise -> Chunk c Empty : []
(Chunk c' cs')
| B.null c -> loop0 c' cs'
| otherwise -> loop c' [c] cs'
Just n | n /= 0 -> Chunk (B.unsafeTake n c) Empty
: loop0 (B.unsafeDrop (n+1) c) cs
| otherwise -> Empty
: loop0 (B.unsafeTail c) cs
loop :: S.Buffer -> [S.Buffer] -> Buffer -> [Buffer]
loop c line cs =
case B.elemIndex (c2w '\n') c of
Nothing ->
case cs of
Empty -> let c' = revChunks (c : line)
in c' `seq` (c' : [])
(Chunk c' cs') -> loop c' (c : line) cs'
Just n ->
let c' = revChunks (B.unsafeTake n c : line)
in c' `seq` (c' : loop0 (B.unsafeDrop (n+1) c) cs)
This function is too strict ! Consider ,
> prop_lazy =
( L.unpack . head . lazylines $ L.append ( L.pack " a\nb\n " ) ( error " failed " ) )
= =
" a "
fails . Here 's a properly lazy version of ' lines ' for lazy bytestrings
lazylines : : L.Buffer - > [ L.Buffer ]
lazylines s
| L.null s = [ ]
| otherwise =
let ( l , s ' ) = L.break ( (= =) ' \n ' ) s
in l : if L.null s ' then [ ]
else lazylines ( L.tail s ' )
we need a similarly lazy , but efficient version .
This function is too strict! Consider,
> prop_lazy =
(L.unpack . head . lazylines $ L.append (L.pack "a\nb\n") (error "failed"))
==
"a"
fails. Here's a properly lazy version of 'lines' for lazy bytestrings
lazylines :: L.Buffer -> [L.Buffer]
lazylines s
| L.null s = []
| otherwise =
let (l,s') = L.break ((==) '\n') s
in l : if L.null s' then []
else lazylines (L.tail s')
we need a similarly lazy, but efficient version.
-}
unlines :: [Buffer] -> Buffer
unlines [] = empty
where nl = singleton '\n'
were delimited by representing white space . And
words :: Buffer -> [Buffer]
words = List.filter (not . L.null) . L.splitWith isSpaceWord8
# INLINE words #
unwords :: [Buffer] -> Buffer
unwords = intercalate (singleton ' ')
# INLINE unwords #
-}
readInt :: Buffer -> Maybe (Int, Buffer)
# INLINE readInt #
readInt Empty = Nothing
readInt (Chunk x xs) = case w2c (B.unsafeHead x) of
'-' -> loop True 0 0 (B.unsafeTail x) xs
'+' -> loop False 0 0 (B.unsafeTail x) xs
_ -> loop False 0 0 x xs
where loop :: Bool -> Int -> Int
-> S.Buffer -> Buffer -> Maybe (Int, Buffer)
STRICT5_(loop)
loop neg i n c cs
| B.null c = case cs of
Empty -> end neg i n c cs
(Chunk c' cs') -> loop neg i n c' cs'
| otherwise =
case B.unsafeHead c of
w | w >= 0x30
&& w <= 0x39 -> loop neg (i+1)
(n * 10 + (fromIntegral w - 0x30))
(B.unsafeTail c) cs
| otherwise -> end neg i n c cs
# INLINE end #
end _ 0 _ _ _ = Nothing
end neg _ n c cs = e `seq` e
where n' = if neg then negate n else n
c' = chunk c cs
e = n' `seq` c' `seq` Just $! (n',c')
| readInteger reads an Integer from the beginning of the Buffer . If
readInteger :: Buffer -> Maybe (Integer, Buffer)
readInteger Empty = Nothing
readInteger (Chunk c0 cs0) =
case w2c (B.unsafeHead c0) of
'-' -> first (B.unsafeTail c0) cs0 >>= \(n, cs') -> return (-n, cs')
'+' -> first (B.unsafeTail c0) cs0
_ -> first c0 cs0
where first c cs
| B.null c = case cs of
Empty -> Nothing
(Chunk c' cs') -> first' c' cs'
| otherwise = first' c cs
first' c cs = case B.unsafeHead c of
w | w >= 0x30 && w <= 0x39 -> Just $
loop 1 (fromIntegral w - 0x30) [] (B.unsafeTail c) cs
| otherwise -> Nothing
loop :: Int -> Int -> [Integer]
-> S.Buffer -> Buffer -> (Integer, Buffer)
STRICT5_(loop)
loop d acc ns c cs
| B.null c = case cs of
Empty -> combine d acc ns c cs
(Chunk c' cs') -> loop d acc ns c' cs'
| otherwise =
case B.unsafeHead c of
w | w >= 0x30 && w <= 0x39 ->
if d < 9 then loop (d+1)
(10*acc + (fromIntegral w - 0x30))
ns (B.unsafeTail c) cs
else loop 1 (fromIntegral w - 0x30)
(fromIntegral acc : ns)
(B.unsafeTail c) cs
| otherwise -> combine d acc ns c cs
combine _ acc [] c cs = end (fromIntegral acc) c cs
combine d acc ns c cs =
end (10^d * combine1 1000000000 ns + fromIntegral acc) c cs
combine1 _ [n] = n
combine1 b ns = combine1 (b*b) $ combine2 b ns
combine2 b (n:m:ns) = let t = n+m*b in t `seq` (t : combine2 b ns)
combine2 _ ns = ns
end n c cs = let c' = chunk c cs
in c' `seq` (n, c')
on Windows to interpret newlines
readFile :: FilePath -> IO Buffer
readFile f = openFile f ReadMode >>= hGetContents
writeFile :: FilePath -> Buffer -> IO ()
writeFile f txt = bracket (openFile f WriteMode) hClose
(\hdl -> hPut hdl txt)
appendFile :: FilePath -> Buffer -> IO ()
appendFile f txt = bracket (openFile f AppendMode) hClose
(\hdl -> hPut hdl txt)
revChunks :: [S.Buffer] -> Buffer
revChunks cs = List.foldl' (flip chunk) Empty cs
|
aa1bd1fc534ba6368c033f8e26c8cdbe3d2592ecfd3b7ff44933dcc85661d269 | sondresl/AdventOfCode | Day16.hs | module Day16 where
import Text.Regex.TDFA
import Data.List.Extra (splitOn)
constraints = [("children", 3),
("cats", 7),
("samoyeds", 2),
("pomeranians", 3),
("akitas", 0),
("vizslas", 0),
("goldfish", 5),
("trees", 3),
("cars", 2),
("perfumes", 1)]
type Sue = (Int, [(String, Int)])
parse :: String -> [Sue]
parse = map go . lines
where
go str = (read (str =~ "[0-9]+"), rest str)
rest :: String -> [(String, Int)]
rest str = let ms = getAllTextMatches (str =~ "[a-z]+: [0-9]+")
in map ((\[a,b] -> (a, read b)) . splitOn ": ") ms
findSue :: Sue -> Bool
findSue (i, cs) = all go constraints
where go (x, y) = comp x (== y)
comp x f = maybe True f (lookup x cs)
findSue2 :: Sue -> Bool
findSue2 (i, cs) = all go constraints
where go ("cats", y) = comp "cats" (> y)
go ("trees", y) = comp "trees" (> y)
go ("pomeranians", y) = comp "pomeranians" (< y)
go ("goldfish", y) = comp "goldfish" (< y)
go (x, y) = comp x (== y)
comp x f = maybe True f (lookup x cs)
main = do
input <- parse <$> readFile "../data/16.in"
print $ filter findSue input
print $ filter findSue2 input
| null | https://raw.githubusercontent.com/sondresl/AdventOfCode/224cf59354c7c1c31821f36884fe8909c5fdf9a6/2015/Haskell/src/Day16.hs | haskell | module Day16 where
import Text.Regex.TDFA
import Data.List.Extra (splitOn)
constraints = [("children", 3),
("cats", 7),
("samoyeds", 2),
("pomeranians", 3),
("akitas", 0),
("vizslas", 0),
("goldfish", 5),
("trees", 3),
("cars", 2),
("perfumes", 1)]
type Sue = (Int, [(String, Int)])
parse :: String -> [Sue]
parse = map go . lines
where
go str = (read (str =~ "[0-9]+"), rest str)
rest :: String -> [(String, Int)]
rest str = let ms = getAllTextMatches (str =~ "[a-z]+: [0-9]+")
in map ((\[a,b] -> (a, read b)) . splitOn ": ") ms
findSue :: Sue -> Bool
findSue (i, cs) = all go constraints
where go (x, y) = comp x (== y)
comp x f = maybe True f (lookup x cs)
findSue2 :: Sue -> Bool
findSue2 (i, cs) = all go constraints
where go ("cats", y) = comp "cats" (> y)
go ("trees", y) = comp "trees" (> y)
go ("pomeranians", y) = comp "pomeranians" (< y)
go ("goldfish", y) = comp "goldfish" (< y)
go (x, y) = comp x (== y)
comp x f = maybe True f (lookup x cs)
main = do
input <- parse <$> readFile "../data/16.in"
print $ filter findSue input
print $ filter findSue2 input
|
|
c5fe35487cdda69c9887fb3d6a46520af645114bc6ef12f11b63965a59df310f | ghc/ghc | Lexeme.hs | -- (c) The GHC Team
--
-- Functions to evaluate whether or not a string is a valid identifier.
-- There is considerable overlap between the logic here and the logic
in GHC.Parser . , but sadly there seems to be no way to merge them .
module GHC.Utils.Lexeme (
* Lexical characteristics of names
| Use these functions to figure what kind of name a ' FastString '
represents ; these functions do /not/ check that the identifier
-- is valid.
isLexCon, isLexVar, isLexId, isLexSym,
isLexConId, isLexConSym, isLexVarId, isLexVarSym,
startsVarSym, startsVarId, startsConSym, startsConId,
-- * Validating identifiers
| These functions ( working over plain old ' 's ) check
-- to make sure that the identifier is valid.
okVarOcc, okConOcc, okTcOcc,
okVarIdOcc, okVarSymOcc, okConIdOcc, okConSymOcc
Some of the exports above are not used within GHC , but may
be of value to GHC API users .
) where
import GHC.Prelude
import GHC.Data.FastString
import Data.Char
import qualified Data.Set as Set
import GHC.Lexeme
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* *
Lexical categories
* *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
These functions test strings to see if they fit the lexical categories
defined in the report .
Note [ Classification of generated names ]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Some names generated for internal use can show up in debugging output ,
e.g. when using -ddump - simpl . These generated names start with a $
but should still be pretty - printed using prefix notation . We make sure
this is the case in isLexVarSym by only classifying a name as a symbol
if all its characters are symbols , not just its first one .
************************************************************************
* *
Lexical categories
* *
************************************************************************
These functions test strings to see if they fit the lexical categories
defined in the Haskell report.
Note [Classification of generated names]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Some names generated for internal use can show up in debugging output,
e.g. when using -ddump-simpl. These generated names start with a $
but should still be pretty-printed using prefix notation. We make sure
this is the case in isLexVarSym by only classifying a name as a symbol
if all its characters are symbols, not just its first one.
-}
isLexCon, isLexVar, isLexId, isLexSym :: FastString -> Bool
isLexConId, isLexConSym, isLexVarId, isLexVarSym :: FastString -> Bool
isLexCon cs = isLexConId cs || isLexConSym cs
isLexVar cs = isLexVarId cs || isLexVarSym cs
isLexId cs = isLexConId cs || isLexVarId cs
isLexSym cs = isLexConSym cs || isLexVarSym cs
-------------
isLexConId cs = case unpackFS cs of -- Prefix type or data constructors
e.g. " " , " [ ] " , " ( , ) "
c:_ -> cs == fsLit "[]" || startsConId c
isLexVarId cs = case unpackFS cs of -- Ordinary prefix identifiers
[] -> False -- e.g. "x", "_x"
c:_ -> startsVarId c
isLexConSym cs = case unpackFS cs of -- Infix type or data constructors
[] -> False -- e.g. ":-:", ":", "->"
c:_ -> cs == fsLit "->" || startsConSym c
isLexVarSym fs -- Infix identifiers e.g. "+"
| fs == (fsLit "~R#") = True
| otherwise
= case (if nullFS fs then [] else unpackFS fs) of
[] -> False
(c:cs) -> startsVarSym c && all isVarSymChar cs
-- See Note [Classification of generated names]
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* *
Detecting valid names for Template Haskell
* *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
************************************************************************
* *
Detecting valid names for Template Haskell
* *
************************************************************************
-}
----------------------
-- External interface
----------------------
-- | Is this an acceptable variable name?
okVarOcc :: String -> Bool
okVarOcc str@(c:_)
| startsVarId c
= okVarIdOcc str
| startsVarSym c
= okVarSymOcc str
okVarOcc _ = False
-- | Is this an acceptable constructor name?
okConOcc :: String -> Bool
okConOcc str@(c:_)
| startsConId c
= okConIdOcc str
| startsConSym c
= okConSymOcc str
| str == "[]"
= True
okConOcc _ = False
-- | Is this an acceptable type name?
okTcOcc :: String -> Bool
okTcOcc "[]" = True
okTcOcc "->" = True
okTcOcc "~" = True
okTcOcc str@(c:_)
| startsConId c
= okConIdOcc str
| startsConSym c
= okConSymOcc str
| startsVarSym c
= okVarSymOcc str
okTcOcc _ = False
-- | Is this an acceptable alphanumeric variable name, assuming it starts
-- with an acceptable letter?
okVarIdOcc :: String -> Bool
okVarIdOcc str = okIdOcc str &&
-- admit "_" as a valid identifier. Required to support typed
holes in Template Haskell . See # 10267
(str == "_" || not (str `Set.member` reservedIds))
-- | Is this an acceptable symbolic variable name, assuming it starts
-- with an acceptable character?
okVarSymOcc :: String -> Bool
okVarSymOcc str = all okSymChar str &&
not (str `Set.member` reservedOps) &&
not (isDashes str)
-- | Is this an acceptable alphanumeric constructor name, assuming it
-- starts with an acceptable letter?
okConIdOcc :: String -> Bool
okConIdOcc str = okIdOcc str ||
is_tuple_name1 True str ||
-- Is it a boxed tuple...
is_tuple_name1 False str ||
... or an unboxed tuple ( # 12407 ) ...
is_sum_name1 str
... or an unboxed sum ( # 12514 ) ?
where
-- check for tuple name, starting at the beginning
is_tuple_name1 True ('(' : rest) = is_tuple_name2 True rest
is_tuple_name1 False ('(' : '#' : rest) = is_tuple_name2 False rest
is_tuple_name1 _ _ = False
-- check for tuple tail
is_tuple_name2 True ")" = True
is_tuple_name2 False "#)" = True
is_tuple_name2 boxed (',' : rest) = is_tuple_name2 boxed rest
is_tuple_name2 boxed (ws : rest)
| isSpace ws = is_tuple_name2 boxed rest
is_tuple_name2 _ _ = False
-- check for sum name, starting at the beginning
is_sum_name1 ('(' : '#' : rest) = is_sum_name2 False rest
is_sum_name1 _ = False
-- check for sum tail, only allowing at most one underscore
is_sum_name2 _ "#)" = True
is_sum_name2 underscore ('|' : rest) = is_sum_name2 underscore rest
is_sum_name2 False ('_' : rest) = is_sum_name2 True rest
is_sum_name2 underscore (ws : rest)
| isSpace ws = is_sum_name2 underscore rest
is_sum_name2 _ _ = False
-- | Is this an acceptable symbolic constructor name, assuming it
-- starts with an acceptable character?
okConSymOcc :: String -> Bool
okConSymOcc ":" = True
okConSymOcc str = all okSymChar str &&
not (str `Set.member` reservedOps)
----------------------
Internal functions
----------------------
-- | Is this string an acceptable id, possibly with a suffix of hashes,
-- but not worrying about case or clashing with reserved words?
okIdOcc :: String -> Bool
okIdOcc str
= let hashes = dropWhile okIdChar str in
all (== '#') hashes -- -XMagicHash allows a suffix of hashes
-- of course, `all` says "True" to an empty list
| Is this character acceptable in an identifier ( after the first letter ) ?
See alexGetByte in GHC.Parser .
okIdChar :: Char -> Bool
okIdChar c = case generalCategory c of
UppercaseLetter -> True
LowercaseLetter -> True
TitlecaseLetter -> True
See # 10196
OtherLetter -> True -- See #1103
See # 7650
DecimalNumber -> True
OtherNumber -> True -- See #4373
_ -> c == '\'' || c == '_'
| All reserved identifiers . Taken from section 2.4 of the 2010 Report .
reservedIds :: Set.Set String
reservedIds = Set.fromList [ "case", "class", "data", "default", "deriving"
, "do", "else", "foreign", "if", "import", "in"
, "infix", "infixl", "infixr", "instance", "let"
, "module", "newtype", "of", "then", "type", "where"
, "_" ]
| All reserved operators . Taken from section 2.4 of the 2010 Report .
reservedOps :: Set.Set String
reservedOps = Set.fromList [ "..", ":", "::", "=", "\\", "|", "<-", "->"
, "@", "~", "=>" ]
| Does this string contain only dashes and has at least 2 of them ?
isDashes :: String -> Bool
isDashes ('-' : '-' : rest) = all (== '-') rest
isDashes _ = False
| null | https://raw.githubusercontent.com/ghc/ghc/37cfe3c0f4fb16189bbe3bb735f758cd6e3d9157/compiler/GHC/Utils/Lexeme.hs | haskell | (c) The GHC Team
Functions to evaluate whether or not a string is a valid identifier.
There is considerable overlap between the logic here and the logic
is valid.
* Validating identifiers
to make sure that the identifier is valid.
-----------
Prefix type or data constructors
Ordinary prefix identifiers
e.g. "x", "_x"
Infix type or data constructors
e.g. ":-:", ":", "->"
Infix identifiers e.g. "+"
See Note [Classification of generated names]
--------------------
External interface
--------------------
| Is this an acceptable variable name?
| Is this an acceptable constructor name?
| Is this an acceptable type name?
| Is this an acceptable alphanumeric variable name, assuming it starts
with an acceptable letter?
admit "_" as a valid identifier. Required to support typed
| Is this an acceptable symbolic variable name, assuming it starts
with an acceptable character?
| Is this an acceptable alphanumeric constructor name, assuming it
starts with an acceptable letter?
Is it a boxed tuple...
check for tuple name, starting at the beginning
check for tuple tail
check for sum name, starting at the beginning
check for sum tail, only allowing at most one underscore
| Is this an acceptable symbolic constructor name, assuming it
starts with an acceptable character?
--------------------
--------------------
| Is this string an acceptable id, possibly with a suffix of hashes,
but not worrying about case or clashing with reserved words?
-XMagicHash allows a suffix of hashes
of course, `all` says "True" to an empty list
See #1103
See #4373 | in GHC.Parser . , but sadly there seems to be no way to merge them .
module GHC.Utils.Lexeme (
* Lexical characteristics of names
| Use these functions to figure what kind of name a ' FastString '
represents ; these functions do /not/ check that the identifier
isLexCon, isLexVar, isLexId, isLexSym,
isLexConId, isLexConSym, isLexVarId, isLexVarSym,
startsVarSym, startsVarId, startsConSym, startsConId,
| These functions ( working over plain old ' 's ) check
okVarOcc, okConOcc, okTcOcc,
okVarIdOcc, okVarSymOcc, okConIdOcc, okConSymOcc
Some of the exports above are not used within GHC , but may
be of value to GHC API users .
) where
import GHC.Prelude
import GHC.Data.FastString
import Data.Char
import qualified Data.Set as Set
import GHC.Lexeme
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* *
Lexical categories
* *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
These functions test strings to see if they fit the lexical categories
defined in the report .
Note [ Classification of generated names ]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Some names generated for internal use can show up in debugging output ,
e.g. when using -ddump - simpl . These generated names start with a $
but should still be pretty - printed using prefix notation . We make sure
this is the case in isLexVarSym by only classifying a name as a symbol
if all its characters are symbols , not just its first one .
************************************************************************
* *
Lexical categories
* *
************************************************************************
These functions test strings to see if they fit the lexical categories
defined in the Haskell report.
Note [Classification of generated names]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Some names generated for internal use can show up in debugging output,
e.g. when using -ddump-simpl. These generated names start with a $
but should still be pretty-printed using prefix notation. We make sure
this is the case in isLexVarSym by only classifying a name as a symbol
if all its characters are symbols, not just its first one.
-}
isLexCon, isLexVar, isLexId, isLexSym :: FastString -> Bool
isLexConId, isLexConSym, isLexVarId, isLexVarSym :: FastString -> Bool
isLexCon cs = isLexConId cs || isLexConSym cs
isLexVar cs = isLexVarId cs || isLexVarSym cs
isLexId cs = isLexConId cs || isLexVarId cs
isLexSym cs = isLexConSym cs || isLexVarSym cs
e.g. " " , " [ ] " , " ( , ) "
c:_ -> cs == fsLit "[]" || startsConId c
c:_ -> startsVarId c
c:_ -> cs == fsLit "->" || startsConSym c
| fs == (fsLit "~R#") = True
| otherwise
= case (if nullFS fs then [] else unpackFS fs) of
[] -> False
(c:cs) -> startsVarSym c && all isVarSymChar cs
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* *
Detecting valid names for Template Haskell
* *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
************************************************************************
* *
Detecting valid names for Template Haskell
* *
************************************************************************
-}
okVarOcc :: String -> Bool
okVarOcc str@(c:_)
| startsVarId c
= okVarIdOcc str
| startsVarSym c
= okVarSymOcc str
okVarOcc _ = False
okConOcc :: String -> Bool
okConOcc str@(c:_)
| startsConId c
= okConIdOcc str
| startsConSym c
= okConSymOcc str
| str == "[]"
= True
okConOcc _ = False
okTcOcc :: String -> Bool
okTcOcc "[]" = True
okTcOcc "->" = True
okTcOcc "~" = True
okTcOcc str@(c:_)
| startsConId c
= okConIdOcc str
| startsConSym c
= okConSymOcc str
| startsVarSym c
= okVarSymOcc str
okTcOcc _ = False
okVarIdOcc :: String -> Bool
okVarIdOcc str = okIdOcc str &&
holes in Template Haskell . See # 10267
(str == "_" || not (str `Set.member` reservedIds))
okVarSymOcc :: String -> Bool
okVarSymOcc str = all okSymChar str &&
not (str `Set.member` reservedOps) &&
not (isDashes str)
okConIdOcc :: String -> Bool
okConIdOcc str = okIdOcc str ||
is_tuple_name1 True str ||
is_tuple_name1 False str ||
... or an unboxed tuple ( # 12407 ) ...
is_sum_name1 str
... or an unboxed sum ( # 12514 ) ?
where
is_tuple_name1 True ('(' : rest) = is_tuple_name2 True rest
is_tuple_name1 False ('(' : '#' : rest) = is_tuple_name2 False rest
is_tuple_name1 _ _ = False
is_tuple_name2 True ")" = True
is_tuple_name2 False "#)" = True
is_tuple_name2 boxed (',' : rest) = is_tuple_name2 boxed rest
is_tuple_name2 boxed (ws : rest)
| isSpace ws = is_tuple_name2 boxed rest
is_tuple_name2 _ _ = False
is_sum_name1 ('(' : '#' : rest) = is_sum_name2 False rest
is_sum_name1 _ = False
is_sum_name2 _ "#)" = True
is_sum_name2 underscore ('|' : rest) = is_sum_name2 underscore rest
is_sum_name2 False ('_' : rest) = is_sum_name2 True rest
is_sum_name2 underscore (ws : rest)
| isSpace ws = is_sum_name2 underscore rest
is_sum_name2 _ _ = False
okConSymOcc :: String -> Bool
okConSymOcc ":" = True
okConSymOcc str = all okSymChar str &&
not (str `Set.member` reservedOps)
Internal functions
okIdOcc :: String -> Bool
okIdOcc str
= let hashes = dropWhile okIdChar str in
| Is this character acceptable in an identifier ( after the first letter ) ?
See alexGetByte in GHC.Parser .
okIdChar :: Char -> Bool
okIdChar c = case generalCategory c of
UppercaseLetter -> True
LowercaseLetter -> True
TitlecaseLetter -> True
See # 10196
See # 7650
DecimalNumber -> True
_ -> c == '\'' || c == '_'
| All reserved identifiers . Taken from section 2.4 of the 2010 Report .
reservedIds :: Set.Set String
reservedIds = Set.fromList [ "case", "class", "data", "default", "deriving"
, "do", "else", "foreign", "if", "import", "in"
, "infix", "infixl", "infixr", "instance", "let"
, "module", "newtype", "of", "then", "type", "where"
, "_" ]
| All reserved operators . Taken from section 2.4 of the 2010 Report .
reservedOps :: Set.Set String
reservedOps = Set.fromList [ "..", ":", "::", "=", "\\", "|", "<-", "->"
, "@", "~", "=>" ]
| Does this string contain only dashes and has at least 2 of them ?
isDashes :: String -> Bool
isDashes ('-' : '-' : rest) = all (== '-') rest
isDashes _ = False
|
cc2c0507f9414514086ede102fe6d9f1c4dd9911a22f441a4ca8a22e0bf34db5 | racket/typed-racket | check-within.rkt |
#lang typed/racket/optional
(require scheme/math typed/test-engine/scheme-tests)
(define-struct: circle ({radius : Number}))
(: circle-area (circle -> Number))
(check-within (+ 1 2.14) pi .1)
(check-range 2 1 3)
(check-member-of 'a 'b 'c 'd 'a 'z)
(check-error (error "fail") "fail")
(define (circle-area c)
(* pi (circle-radius c) (circle-radius c)))
(test)
| null | https://raw.githubusercontent.com/racket/typed-racket/1dde78d165472d67ae682b68622d2b7ee3e15e1e/typed-racket-test/succeed/optional/check-within.rkt | racket |
#lang typed/racket/optional
(require scheme/math typed/test-engine/scheme-tests)
(define-struct: circle ({radius : Number}))
(: circle-area (circle -> Number))
(check-within (+ 1 2.14) pi .1)
(check-range 2 1 3)
(check-member-of 'a 'b 'c 'd 'a 'z)
(check-error (error "fail") "fail")
(define (circle-area c)
(* pi (circle-radius c) (circle-radius c)))
(test)
|
|
c201cb0acc0811af709359403af009bcfa70d75a19fc03a7a2571ece5adbf70a | jrslepak/Remora | info.rkt | #lang setup/infotab
(define scribblings '(("scribblings/remora.scrbl")))
| null | https://raw.githubusercontent.com/jrslepak/Remora/1a831dec554df9a7ef3eeb10f0d22036f1f86dbd/remora/info.rkt | racket | #lang setup/infotab
(define scribblings '(("scribblings/remora.scrbl")))
|
|
6c3441bb13582d2d3ed4264589320e6dbe865ab2cd405ef1bd39af344fb60f86 | zotonic/zotonic | action_wires_alert.erl | @author < >
2009
%%
Based on code copyright ( c ) 2008 - 2009
Copyright 2009
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(action_wires_alert).
-include_lib("zotonic_core/include/zotonic.hrl").
-export([
render_action/4,
event/2
]).
render_action(TriggerId, TargetId, Args, Context) ->
{PostbackMsgJS, _PickledPostback} = z_render:make_postback({alert, Args}, click, TriggerId, TargetId, ?MODULE, Context),
{PostbackMsgJS, Context}.
%% @doc Fill the dialog with the delete confirmation template. The next step will ask to delete the resource
@spec event(Event , ) - > Context2
event(#postback{message={alert, Args}}, Context) ->
Title = proplists:get_value(title, Args, ?__(<<"Alert">>, Context)),
Vars = [
{title, Title},
{action, proplists:get_all_values(action, Args)}
| Args
],
z_render:dialog(Title, "_action_dialog_alert.tpl", Vars, Context).
| null | https://raw.githubusercontent.com/zotonic/zotonic/852f627c28adf6e5212e8ad5383d4af3a2f25e3f/apps/zotonic_mod_wires/src/actions/action_wires_alert.erl | erlang |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@doc Fill the dialog with the delete confirmation template. The next step will ask to delete the resource | @author < >
2009
Based on code copyright ( c ) 2008 - 2009
Copyright 2009
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(action_wires_alert).
-include_lib("zotonic_core/include/zotonic.hrl").
-export([
render_action/4,
event/2
]).
render_action(TriggerId, TargetId, Args, Context) ->
{PostbackMsgJS, _PickledPostback} = z_render:make_postback({alert, Args}, click, TriggerId, TargetId, ?MODULE, Context),
{PostbackMsgJS, Context}.
@spec event(Event , ) - > Context2
event(#postback{message={alert, Args}}, Context) ->
Title = proplists:get_value(title, Args, ?__(<<"Alert">>, Context)),
Vars = [
{title, Title},
{action, proplists:get_all_values(action, Args)}
| Args
],
z_render:dialog(Title, "_action_dialog_alert.tpl", Vars, Context).
|
9d3abeb86c6481f158e9505b70728a8502bf6ec41057b478383759a3de85fa59 | borkdude/jet | test_utils.clj | (ns jet.test-utils
(:require
[jet.main :as main]
[me.raynes.conch :refer [let-programs] :as sh]))
(set! *warn-on-reflection* true)
(defn jet-jvm [input & args]
(with-out-str
(with-in-str input
(apply main/-main args))))
(defn jet-native [input & args]
(let-programs [jet "./jet"]
(binding [sh/*throw* false]
(apply jet (conj (vec args)
{:in input})))))
(def jet
(case (System/getenv "JET_TEST_ENV")
"jvm" #'jet-jvm
"native" #'jet-native
#'jet-jvm))
(if (= jet #'jet-jvm)
(println "==== Testing JVM version")
(println "==== Testing native version"))
| null | https://raw.githubusercontent.com/borkdude/jet/9b49ab4c3e2f84c862c76212e0a56bbc035e241c/test/jet/test_utils.clj | clojure | (ns jet.test-utils
(:require
[jet.main :as main]
[me.raynes.conch :refer [let-programs] :as sh]))
(set! *warn-on-reflection* true)
(defn jet-jvm [input & args]
(with-out-str
(with-in-str input
(apply main/-main args))))
(defn jet-native [input & args]
(let-programs [jet "./jet"]
(binding [sh/*throw* false]
(apply jet (conj (vec args)
{:in input})))))
(def jet
(case (System/getenv "JET_TEST_ENV")
"jvm" #'jet-jvm
"native" #'jet-native
#'jet-jvm))
(if (= jet #'jet-jvm)
(println "==== Testing JVM version")
(println "==== Testing native version"))
|
|
de1d76370f362278b21ebc6ede9ad5b1165b731081b16f960665aaf52dba0d35 | tcsprojects/pgsolver | switch_internal.ml | open Basics;;
open Stratimpralgs;;
open Paritygame;;
open Tcsset;;
open Tcsbasedata;;
open Univsolve;;
open Tcslist;;
open Tcsarray;;
let list_upfront l i =
let rec tile f t =
let j = List.hd t in
if j = i then (f, t) else tile (j::f) (List.tl t)
in
let (f, t) = tile [] l in
t @ (List.rev f)
let list_max a less = ListUtils.max_elt (fun x y -> if less x y then -1 else 1) a
let improvement_policy_learn_strategies game node_total_ordering strategy_set old_strategy valu =
Step 1 : Update strategy_set
let strategy_set = ref strategy_set in
let add_to_strategy_set strat =
strategy_set := TreeSet.add strat !strategy_set
in
add_to_strategy_set old_strategy;
let n = pg_size game in
for i = 0 to n - 1 do
let pl = pg_get_owner game i in
let tr = pg_get_successors game i in
if pl = plr_Even then (
ns_iter (fun j ->
if node_valuation_ordering game node_total_ordering valu.(j) valu.(old_strategy.(i)) > 0 then (
let s = Array.copy old_strategy in
s.(i) <- j;
add_to_strategy_set s
)
) tr;
)
done;
Step 2 : Build improvement set
let improvement_set = ref (TreeSet.empty (fun x y -> compare (Array.to_list x) (Array.to_list y))) in
let add_to_improvement_set strat =
improvement_set := TreeSet.add strat !improvement_set
in
let morph base target node =
let base_counter = compute_counter_strategy game base in
let rec helper set v =
if TreeSet.mem v set then None
else if pg_get_owner game v = plr_Odd then helper (TreeSet.add v set) base_counter.(v)
else if base.(v) = target.(v) then helper (TreeSet.add v set) base.(v)
else let a = Array.copy base in
a.(v) <- target.(v);
Some a
in
helper TreeSet.empty_def node
in
TreeSet.iter (fun strategy ->
let n = pg_size game in
for i = 0 to n - 1 do
let current = ref (morph old_strategy strategy i) in
while !current != None do
match !current with
Some cur -> (
add_to_improvement_set cur;
current := morph cur strategy i
)
| None -> ()
done;
done;
) !strategy_set;
(* Step3: Build combination strategy *)
let improvement_set_array = Array.make (TreeSet.cardinal !improvement_set) ([||], valu) in
let i = ref 0 in
TreeSet.iter (fun st ->
improvement_set_array.(!i) <- (st, evaluate_strategy game node_total_ordering st);
incr i
) !improvement_set;
let best_strategies = Array.make (pg_size game) 0 in
Array.iteri (fun i (st, va) ->
let m = pg_size game in
for v = 0 to m - 1 do
if node_valuation_ordering game node_total_ordering va.(v) (snd improvement_set_array.(best_strategies.(v))).(v) > 0
then best_strategies.(v) <- i;
done;
) improvement_set_array;
let strategy = Array.init (pg_size game) (fun v ->
if pg_get_owner game v = plr_Odd then -1
else (fst improvement_set_array.(best_strategies.(v))).(v)
) in
(strategy, !strategy_set)
let improvement_policy_learn_cycles sub_policy game node_total_ordering (cycles, u) old_strategy valu =
let (strategy, u') = sub_policy game node_total_ordering u old_strategy valu cycles in
let combined_strategy =
Array.init (Array.length valu) (fun i ->
if pg_get_owner game i = plr_Even
then strategy.(i)
else best_decision_by_valuation_ordering game node_total_ordering valu i
)
in
let game' = subgame_by_edge_pred game (fun i j -> combined_strategy.(i) = j) in
let (sccs, sccindex, topology, roots) = strongly_connected_components game' in
let cycles = ref cycles in
let normalize l =
ns_make (list_upfront (ns_nodes l) (list_max (ns_nodes l) (fun x y -> pg_get_priority game x < pg_get_priority game y)))
in
Array.iteri (fun i scc ->
if (ns_size scc > 1) && (topology.(i) = []) then (
let c = normalize scc in
if (pg_get_priority game (ns_first c) mod 2 = 0) && (not (TreeSet.mem c !cycles)) then (
cycles := TreeSet.add c !cycles;
let fmt k =
match (pg_get_desc game k) with
None -> string_of_int k
| Some t -> t
in
message 2 (fun _ -> "\nLearned cycle #" ^ string_of_int (TreeSet.cardinal !cycles) ^ " : " ^ ListUtils.format fmt (ns_nodes c) ^ "\n")
)
)
) sccs;
(strategy, (!cycles, u'))
let improvement_policy_level game node_total_ordering data old_strategy valu =
let (level) = data in
let n = Array.length valu in
if (level == 0) then (
(improvement_policy_optimize_all_locally game node_total_ordering old_strategy valu, 1)
) else (
(* Contains nodes that have been identified as non final;
We don't use cycles with potential escape edges leading to non-final-nodes *)
let non_final_nodes = ref TreeSet.empty_def in
(* Contains edges that we have used as escape edges;
We don't use cycles with potential escape edges included in the set *)
let used_escape_edges = ref TreeSet.empty_def in
let counter_strategy = compute_counter_strategy game old_strategy in
let next_counter = ref (Array.copy counter_strategy) in
let new_strategy = Array.copy old_strategy in
let running = ref true in
let changed = ref false in
while !running do
let graph = subgame_by_edge_pred game (fun v w ->
let pl = pg_get_owner game v in
(pl = plr_Even) || (counter_strategy.(v) = w && !next_counter.(v) = w)
) in
let m = pg_size game in
for i = 0 to m - 1 do
let pl = pg_get_owner game i in
let tr = pg_get_successors game i in
if (pl = plr_Odd) && ns_exists (fun j -> counter_strategy.(i) != j && (TreeSet.mem j !non_final_nodes || TreeSet.mem (i,j) !used_escape_edges)) tr
then ns_iter (fun w -> pg_del_edge graph i w) (pg_get_successors graph i)
done;
let cycle = ref None in
let i = ref 0 in
while (!cycle = None && !i < n) do
let pr = pg_get_priority game !i in
if (pr mod 2 == 0) then (
let s = ref TreeSet.empty_def in
let rec build j cyc =
if (j = !i && cyc != []) then (
cycle := Some cyc
) else if not (TreeSet.mem j !s) && (pg_get_priority game j <= pr) then (
s := TreeSet.add j !s;
let tr = pg_get_successors graph j in
ns_iter (fun k ->
if (!cycle = None)
then build k ((j,k)::cyc);
) tr;
)
in
build !i [];
);
incr i;
done;
match !cycle with
| None -> running := false
| Some cycle -> (
changed := true;
List.iter (fun (i,j) ->
if (pg_get_owner game i = plr_Even)
then new_strategy.(i) <- j
) cycle;
List.iter (fun (i,_) ->
non_final_nodes := TreeSet.add i !non_final_nodes;
) cycle;
next_counter := compute_counter_strategy game new_strategy;
List.iter (fun (i,j) ->
if (pg_get_owner game i = plr_Odd && !next_counter.(i) != j)
then used_escape_edges := TreeSet.add (i, !next_counter.(i)) !used_escape_edges
) cycle;
);
done;
if !changed
then (new_strategy, 0)
else (improvement_policy_optimize_all_locally game node_total_ordering old_strategy valu, 1)
)
let improvement_policy_smart game node_total_ordering todo old_strategy valu cycles =
let combined_strategy =
Array.init (Array.length valu) (fun i ->
if pg_get_owner game i = plr_Even
then old_strategy.(i)
else best_decision_by_valuation_ordering game node_total_ordering valu i
)
in
let improv_edge x y =
node_valuation_ordering game node_total_ordering valu.(old_strategy.(x)) valu.(y) <= 0
in
let cycle_applies cycle =
let x = ref (List.hd cycle) in
let cycle' = ref ((List.tl cycle) @ [!x]) in
let applies1 = ref true in
let applies0 = ref true in
while !applies1 && (not (!cycle' = [])) do
let z = List.hd !cycle' in
cycle' := List.tl !cycle';
if pg_get_owner game !x = plr_Odd
then applies1 := combined_strategy.(!x) = z
else applies0 := !applies0 && ((combined_strategy.(!x) = z) || (not (improv_edge !x z)));
x := z
done;
!applies1 && (not !applies0)
in
let todo = TreeSet.filter cycle_applies (if TreeSet.is_empty todo then cycles else todo) in
if TreeSet.is_empty todo then (
let strat = improvement_policy_optimize_all_locally game node_total_ordering old_strategy valu in
(strat, todo)
)
else (
let strategy = Array.copy old_strategy in
TreeSet.iter (fun cycle ->
let fmt k =
match (pg_get_desc game k) with
None -> string_of_int k
| Some t -> t
in
message 2 (fun _ -> "\nApply cycle : " ^ ListUtils.format fmt cycle ^ "\n");
let x = ref (List.hd cycle) in
let cycle' = ref ((List.tl cycle) @ [!x]) in
while (not (!cycle' = [])) do
let z = List.hd !cycle' in
cycle' := List.tl !cycle';
if (pg_get_owner game !x = plr_Even) && (improv_edge !x z) then strategy.(!x) <- z;
x := z
done
) todo;
(strategy, todo)
)
let improvement_policy_cycle_avoid game node_total_ordering old_strategy valu =
let new_strategy = Array.copy old_strategy in
let counter_strategy =
Array.init (Array.length valu) (fun i ->
if pg_get_owner game i = plr_Even
then -1
else best_decision_by_valuation_ordering game node_total_ordering valu i
)
in
let allowed v w =
let s = ref TreeSet.empty_def in
let current = ref w in
let finished = ref false in
while not !finished do
s := TreeSet.add !current !s;
if pg_get_owner game !current = plr_Even
then current := new_strategy.(!current)
else current := counter_strategy.(!current);
finished := TreeSet.mem !current !s;
done;
not (TreeSet.mem v !s)
in
let deford x y = node_valuation_total_ordering game node_total_ordering valu x y in
let ordering base x y =
let ax = allowed base x in
let ay = allowed base y in
if ax = ay then deford x y
else if ax then 1 else -1
in
let changed = ref false in
let n = Array.length old_strategy in
for i = 0 to n - 1 do
if (pg_get_owner game i = plr_Even) then (
let w = best_decision_by_ordering game (ordering i) i in
if (w != new_strategy.(i)) && (deford new_strategy.(i) w < 0) then (
new_strategy.(i) <- w;
changed := true;
)
);
done;
if !changed
then new_strategy
else improvement_policy_optimize_all_locally game node_total_ordering old_strategy valu
type ('a, 'b) ab = A of 'a | B of 'b
let cycle_enforce_cycles_compare (node0, node1, edge0, edge1) (node0', node1', edge0', edge1') =
let c0 = ns_compare node0 node0' in
let c1 = ns_compare node1 node1' in
let c2 = TreeMap.compare compare edge0 edge0' in
let c3 = TreeMap.compare compare edge1 edge1' in
if c0 != 0 then c0
else if c1 != 0 then c1
else if c2 != 0 then c2
else c3
let improvement_policy_cycle_enforce game node_total_ordering (cycles, idx) old_strategy valu =
let n = Array.length valu in
let get_cycles strategy =
let cycles = ref (TreeSet.empty cycle_enforce_cycles_compare) in
let combined_strategy =
Array.init (Array.length valu) (fun i ->
if pg_get_owner game i = plr_Even
then strategy.(i)
else best_decision_by_valuation_ordering game node_total_ordering valu i
)
in
let game' = subgame_by_edge_pred game (fun i j -> combined_strategy.(i) = j) in
let (sccs, sccindex, topology, roots) = strongly_connected_components game' in
Array.iteri (fun i scc ->
if (ns_size scc > 1) && (topology.(i) = []) then (
let node0 = ref ns_empty in
let node1 = ref ns_empty in
let edge0 = ref TreeMap.empty_def in
let edge1 = ref TreeMap.empty_def in
ns_iter (fun v ->
if pg_get_owner game v = plr_Even then (
node0 := ns_add v !node0;
edge0 := TreeMap.add v combined_strategy.(v) !edge0;
)
else (
node1 := ns_add v !node1;
edge1 := TreeMap.add v combined_strategy.(v) !edge1;
)
) scc;
cycles := TreeSet.add (!node0, !node1, !edge0, !edge1) !cycles;
)
) sccs;
!cycles
in
let cyc_value v (node0, node1, edge0, edge1) =
let valworst = ref None in
let valcur = ref (empty_descending_relevance_ordered_set game node_total_ordering) in
let m = ref (ns_size node1) in
let nodecur = ref v in
while !m > 0 do
while (pg_get_owner game !nodecur = plr_Even) do
valcur := TreeSet.add !nodecur !valcur;
nodecur := TreeMap.find !nodecur edge0
done;
ns_iter (fun w ->
if not (TreeMap.find !nodecur edge1 = w) then (
let (e, pth, f) = valu.(w) in
let valw = (e, TreeSet.union pth !valcur, f) in
match !valworst with
None -> valworst := Some valw;
| Some valw' -> if node_valuation_ordering game node_total_ordering valw valw' < 0
then valworst := Some valw;
);
) (pg_get_successors game !nodecur);
valcur := TreeSet.add !nodecur !valcur;
nodecur := TreeMap.find !nodecur edge1;
decr m;
done;
OptionUtils.get_some !valworst
in
let c = ref 0 in
let i = ref idx in
let finished = ref false in
let new_strategy = Array.copy old_strategy in
while (not !finished) && (!c <= n) do
if (pg_get_owner game !i = plr_Even) then (
let pots = ref [] in
ns_iter (fun w ->
new_strategy.(!i) <- w;
if TreeSet.subset (get_cycles new_strategy) cycles
then pots := (A w, valu.(w))::!pots;
new_strategy.(!i) <- old_strategy.(!i);
) (pg_get_successors game !i);
TreeSet.iter (fun ((node0, node1, edge0, edge1) as cyc) ->
if (ns_elem !i node0) then (
TreeMap.iter (fun v w ->
new_strategy.(v) <- w;
) edge0;
if TreeSet.subset (get_cycles new_strategy) cycles
then pots := (B cyc, cyc_value !i cyc)::!pots;
ns_iter (fun v ->
new_strategy.(v) <- old_strategy.(v);
) node0;
);
) cycles;
let best = ref None in
List.iter (fun (q, valw) ->
match !best with
None -> if node_valuation_ordering game node_total_ordering valw valu.(old_strategy.(!i)) > 0
then best := Some (q, valw)
| Some (q', valw') -> if node_valuation_ordering game node_total_ordering valw valw' > 0
then best := Some (q, valw);
) !pots;
match !best with
None -> ()
| Some (A w, _) -> (
new_strategy.(!i) <- w;
finished := true;
)
| Some (B (_, _, edge0, _), _) -> (
TreeMap.iter (fun v w ->
new_strategy.(v) <- w;
) edge0;
finished := true;
);
);
incr c;
i := (!i + 1) mod n;
done;
if !finished
then (new_strategy, (cycles, !i))
else (
let new_strategy = improvement_policy_optimize_all_locally game node_total_ordering old_strategy valu in
(new_strategy, (TreeSet.union cycles (get_cycles new_strategy), idx))
)
let strategy_improvement_cycle_avoid game =
strategy_improvement game initial_strategy_by_best_reward node_total_ordering_by_position (improvement_policy_no_user_data improvement_policy_cycle_avoid) () false "STRIMPR_INTONE";;
let strategy_improvement_cycle_enforce game =
strategy_improvement game initial_strategy_by_best_reward node_total_ordering_by_position improvement_policy_cycle_enforce ((TreeSet.empty cycle_enforce_cycles_compare), 0) false "STRIMPR_INTTWO";;
let strategy_improvement_learn_strategies game =
strategy_improvement game initial_strategy_by_best_reward node_total_ordering_by_position improvement_policy_learn_strategies (TreeSet.empty (fun x y -> compare (Array.to_list x) (Array.to_list y))) true "STRIMPR_STRLEA";;
let strategy_improvement_smart_policy game =
(*strategy_improvement game initial_strategy_by_best_reward node_total_ordering_by_position (improvement_policy_learn_cycles improvement_policy_smart) (TreeSet.empty compare, TreeSet.empty compare) true "STRIMPR_SMART";; *)
strategy_improvement game initial_strategy_by_best_reward node_total_ordering_by_position (improvement_policy_level) (0) true "STRIMPR_SMART";;
let strategy_improvement_justlearn_policy game =
strategy_improvement game initial_strategy_by_best_reward node_total_ordering_by_position (improvement_policy_learn_cycles (fun a b c d e f -> (improvement_policy_optimize_all_locally a b d e, c))) (TreeSet.empty compare, TreeSet.empty compare) true "STRIMPR_JULE";;
register_sub_solver
(fun g -> universal_solve (universal_solve_init_options_verbose !universal_solve_global_options) strategy_improvement_smart_policy g)
"smartstratimpr" "ssi" "use smart strategy improvement";;
register_sub_solver
(fun g -> universal_solve (universal_solve_init_options_verbose !universal_solve_global_options) strategy_improvement_learn_strategies g)
"learnstratimpr" "lsi" "use strategy-learning strategy improvement";;
register_sub_solver
( fun g - > universal_solve ( universal_solve_init_options_verbose ! universal_solve_global_options ) )
" julestratimpr " " siju " " use just learn strategy improvement " ; ;
register_sub_solver
(fun g -> universal_solve (universal_solve_init_options_verbose !universal_solve_global_options) strategy_improvement_justlearn_policy g)
"julestratimpr" "siju" "use just learn strategy improvement";;
*)
register_sub_solver
( fun g - > universal_solve ( universal_solve_init_options_verbose ! universal_solve_global_options ) )
" strimprbyco " " sibc " " use strategy improvement by counterstrategy " ; ;
register_sub_solver
(fun g -> universal_solve (universal_solve_init_options_verbose !universal_solve_global_options) strategy_improvement_by_counterstrategy_policy g)
"strimprbyco" "sibc" "use strategy improvement by counterstrategy";;
*)
let register _ =
register_sub_solver
(fun g -> universal_solve (universal_solve_init_options_verbose !universal_solve_global_options) strategy_improvement_cycle_avoid g)
"switchint" "swint" "switch internal #1";
register_sub_solver
(fun g -> universal_solve (universal_solve_init_options_verbose !universal_solve_global_options) strategy_improvement_cycle_enforce g)
"switchintx" "swintx" "switch internal #2";;
| null | https://raw.githubusercontent.com/tcsprojects/pgsolver/b0c31a8b367c405baed961385ad645d52f648325/src/solvers/stratimpralgs/switch_internal.ml | ocaml | Step3: Build combination strategy
Contains nodes that have been identified as non final;
We don't use cycles with potential escape edges leading to non-final-nodes
Contains edges that we have used as escape edges;
We don't use cycles with potential escape edges included in the set
strategy_improvement game initial_strategy_by_best_reward node_total_ordering_by_position (improvement_policy_learn_cycles improvement_policy_smart) (TreeSet.empty compare, TreeSet.empty compare) true "STRIMPR_SMART";; | open Basics;;
open Stratimpralgs;;
open Paritygame;;
open Tcsset;;
open Tcsbasedata;;
open Univsolve;;
open Tcslist;;
open Tcsarray;;
let list_upfront l i =
let rec tile f t =
let j = List.hd t in
if j = i then (f, t) else tile (j::f) (List.tl t)
in
let (f, t) = tile [] l in
t @ (List.rev f)
let list_max a less = ListUtils.max_elt (fun x y -> if less x y then -1 else 1) a
let improvement_policy_learn_strategies game node_total_ordering strategy_set old_strategy valu =
Step 1 : Update strategy_set
let strategy_set = ref strategy_set in
let add_to_strategy_set strat =
strategy_set := TreeSet.add strat !strategy_set
in
add_to_strategy_set old_strategy;
let n = pg_size game in
for i = 0 to n - 1 do
let pl = pg_get_owner game i in
let tr = pg_get_successors game i in
if pl = plr_Even then (
ns_iter (fun j ->
if node_valuation_ordering game node_total_ordering valu.(j) valu.(old_strategy.(i)) > 0 then (
let s = Array.copy old_strategy in
s.(i) <- j;
add_to_strategy_set s
)
) tr;
)
done;
Step 2 : Build improvement set
let improvement_set = ref (TreeSet.empty (fun x y -> compare (Array.to_list x) (Array.to_list y))) in
let add_to_improvement_set strat =
improvement_set := TreeSet.add strat !improvement_set
in
let morph base target node =
let base_counter = compute_counter_strategy game base in
let rec helper set v =
if TreeSet.mem v set then None
else if pg_get_owner game v = plr_Odd then helper (TreeSet.add v set) base_counter.(v)
else if base.(v) = target.(v) then helper (TreeSet.add v set) base.(v)
else let a = Array.copy base in
a.(v) <- target.(v);
Some a
in
helper TreeSet.empty_def node
in
TreeSet.iter (fun strategy ->
let n = pg_size game in
for i = 0 to n - 1 do
let current = ref (morph old_strategy strategy i) in
while !current != None do
match !current with
Some cur -> (
add_to_improvement_set cur;
current := morph cur strategy i
)
| None -> ()
done;
done;
) !strategy_set;
let improvement_set_array = Array.make (TreeSet.cardinal !improvement_set) ([||], valu) in
let i = ref 0 in
TreeSet.iter (fun st ->
improvement_set_array.(!i) <- (st, evaluate_strategy game node_total_ordering st);
incr i
) !improvement_set;
let best_strategies = Array.make (pg_size game) 0 in
Array.iteri (fun i (st, va) ->
let m = pg_size game in
for v = 0 to m - 1 do
if node_valuation_ordering game node_total_ordering va.(v) (snd improvement_set_array.(best_strategies.(v))).(v) > 0
then best_strategies.(v) <- i;
done;
) improvement_set_array;
let strategy = Array.init (pg_size game) (fun v ->
if pg_get_owner game v = plr_Odd then -1
else (fst improvement_set_array.(best_strategies.(v))).(v)
) in
(strategy, !strategy_set)
let improvement_policy_learn_cycles sub_policy game node_total_ordering (cycles, u) old_strategy valu =
let (strategy, u') = sub_policy game node_total_ordering u old_strategy valu cycles in
let combined_strategy =
Array.init (Array.length valu) (fun i ->
if pg_get_owner game i = plr_Even
then strategy.(i)
else best_decision_by_valuation_ordering game node_total_ordering valu i
)
in
let game' = subgame_by_edge_pred game (fun i j -> combined_strategy.(i) = j) in
let (sccs, sccindex, topology, roots) = strongly_connected_components game' in
let cycles = ref cycles in
let normalize l =
ns_make (list_upfront (ns_nodes l) (list_max (ns_nodes l) (fun x y -> pg_get_priority game x < pg_get_priority game y)))
in
Array.iteri (fun i scc ->
if (ns_size scc > 1) && (topology.(i) = []) then (
let c = normalize scc in
if (pg_get_priority game (ns_first c) mod 2 = 0) && (not (TreeSet.mem c !cycles)) then (
cycles := TreeSet.add c !cycles;
let fmt k =
match (pg_get_desc game k) with
None -> string_of_int k
| Some t -> t
in
message 2 (fun _ -> "\nLearned cycle #" ^ string_of_int (TreeSet.cardinal !cycles) ^ " : " ^ ListUtils.format fmt (ns_nodes c) ^ "\n")
)
)
) sccs;
(strategy, (!cycles, u'))
let improvement_policy_level game node_total_ordering data old_strategy valu =
let (level) = data in
let n = Array.length valu in
if (level == 0) then (
(improvement_policy_optimize_all_locally game node_total_ordering old_strategy valu, 1)
) else (
let non_final_nodes = ref TreeSet.empty_def in
let used_escape_edges = ref TreeSet.empty_def in
let counter_strategy = compute_counter_strategy game old_strategy in
let next_counter = ref (Array.copy counter_strategy) in
let new_strategy = Array.copy old_strategy in
let running = ref true in
let changed = ref false in
while !running do
let graph = subgame_by_edge_pred game (fun v w ->
let pl = pg_get_owner game v in
(pl = plr_Even) || (counter_strategy.(v) = w && !next_counter.(v) = w)
) in
let m = pg_size game in
for i = 0 to m - 1 do
let pl = pg_get_owner game i in
let tr = pg_get_successors game i in
if (pl = plr_Odd) && ns_exists (fun j -> counter_strategy.(i) != j && (TreeSet.mem j !non_final_nodes || TreeSet.mem (i,j) !used_escape_edges)) tr
then ns_iter (fun w -> pg_del_edge graph i w) (pg_get_successors graph i)
done;
let cycle = ref None in
let i = ref 0 in
while (!cycle = None && !i < n) do
let pr = pg_get_priority game !i in
if (pr mod 2 == 0) then (
let s = ref TreeSet.empty_def in
let rec build j cyc =
if (j = !i && cyc != []) then (
cycle := Some cyc
) else if not (TreeSet.mem j !s) && (pg_get_priority game j <= pr) then (
s := TreeSet.add j !s;
let tr = pg_get_successors graph j in
ns_iter (fun k ->
if (!cycle = None)
then build k ((j,k)::cyc);
) tr;
)
in
build !i [];
);
incr i;
done;
match !cycle with
| None -> running := false
| Some cycle -> (
changed := true;
List.iter (fun (i,j) ->
if (pg_get_owner game i = plr_Even)
then new_strategy.(i) <- j
) cycle;
List.iter (fun (i,_) ->
non_final_nodes := TreeSet.add i !non_final_nodes;
) cycle;
next_counter := compute_counter_strategy game new_strategy;
List.iter (fun (i,j) ->
if (pg_get_owner game i = plr_Odd && !next_counter.(i) != j)
then used_escape_edges := TreeSet.add (i, !next_counter.(i)) !used_escape_edges
) cycle;
);
done;
if !changed
then (new_strategy, 0)
else (improvement_policy_optimize_all_locally game node_total_ordering old_strategy valu, 1)
)
let improvement_policy_smart game node_total_ordering todo old_strategy valu cycles =
let combined_strategy =
Array.init (Array.length valu) (fun i ->
if pg_get_owner game i = plr_Even
then old_strategy.(i)
else best_decision_by_valuation_ordering game node_total_ordering valu i
)
in
let improv_edge x y =
node_valuation_ordering game node_total_ordering valu.(old_strategy.(x)) valu.(y) <= 0
in
let cycle_applies cycle =
let x = ref (List.hd cycle) in
let cycle' = ref ((List.tl cycle) @ [!x]) in
let applies1 = ref true in
let applies0 = ref true in
while !applies1 && (not (!cycle' = [])) do
let z = List.hd !cycle' in
cycle' := List.tl !cycle';
if pg_get_owner game !x = plr_Odd
then applies1 := combined_strategy.(!x) = z
else applies0 := !applies0 && ((combined_strategy.(!x) = z) || (not (improv_edge !x z)));
x := z
done;
!applies1 && (not !applies0)
in
let todo = TreeSet.filter cycle_applies (if TreeSet.is_empty todo then cycles else todo) in
if TreeSet.is_empty todo then (
let strat = improvement_policy_optimize_all_locally game node_total_ordering old_strategy valu in
(strat, todo)
)
else (
let strategy = Array.copy old_strategy in
TreeSet.iter (fun cycle ->
let fmt k =
match (pg_get_desc game k) with
None -> string_of_int k
| Some t -> t
in
message 2 (fun _ -> "\nApply cycle : " ^ ListUtils.format fmt cycle ^ "\n");
let x = ref (List.hd cycle) in
let cycle' = ref ((List.tl cycle) @ [!x]) in
while (not (!cycle' = [])) do
let z = List.hd !cycle' in
cycle' := List.tl !cycle';
if (pg_get_owner game !x = plr_Even) && (improv_edge !x z) then strategy.(!x) <- z;
x := z
done
) todo;
(strategy, todo)
)
let improvement_policy_cycle_avoid game node_total_ordering old_strategy valu =
let new_strategy = Array.copy old_strategy in
let counter_strategy =
Array.init (Array.length valu) (fun i ->
if pg_get_owner game i = plr_Even
then -1
else best_decision_by_valuation_ordering game node_total_ordering valu i
)
in
let allowed v w =
let s = ref TreeSet.empty_def in
let current = ref w in
let finished = ref false in
while not !finished do
s := TreeSet.add !current !s;
if pg_get_owner game !current = plr_Even
then current := new_strategy.(!current)
else current := counter_strategy.(!current);
finished := TreeSet.mem !current !s;
done;
not (TreeSet.mem v !s)
in
let deford x y = node_valuation_total_ordering game node_total_ordering valu x y in
let ordering base x y =
let ax = allowed base x in
let ay = allowed base y in
if ax = ay then deford x y
else if ax then 1 else -1
in
let changed = ref false in
let n = Array.length old_strategy in
for i = 0 to n - 1 do
if (pg_get_owner game i = plr_Even) then (
let w = best_decision_by_ordering game (ordering i) i in
if (w != new_strategy.(i)) && (deford new_strategy.(i) w < 0) then (
new_strategy.(i) <- w;
changed := true;
)
);
done;
if !changed
then new_strategy
else improvement_policy_optimize_all_locally game node_total_ordering old_strategy valu
type ('a, 'b) ab = A of 'a | B of 'b
let cycle_enforce_cycles_compare (node0, node1, edge0, edge1) (node0', node1', edge0', edge1') =
let c0 = ns_compare node0 node0' in
let c1 = ns_compare node1 node1' in
let c2 = TreeMap.compare compare edge0 edge0' in
let c3 = TreeMap.compare compare edge1 edge1' in
if c0 != 0 then c0
else if c1 != 0 then c1
else if c2 != 0 then c2
else c3
let improvement_policy_cycle_enforce game node_total_ordering (cycles, idx) old_strategy valu =
let n = Array.length valu in
let get_cycles strategy =
let cycles = ref (TreeSet.empty cycle_enforce_cycles_compare) in
let combined_strategy =
Array.init (Array.length valu) (fun i ->
if pg_get_owner game i = plr_Even
then strategy.(i)
else best_decision_by_valuation_ordering game node_total_ordering valu i
)
in
let game' = subgame_by_edge_pred game (fun i j -> combined_strategy.(i) = j) in
let (sccs, sccindex, topology, roots) = strongly_connected_components game' in
Array.iteri (fun i scc ->
if (ns_size scc > 1) && (topology.(i) = []) then (
let node0 = ref ns_empty in
let node1 = ref ns_empty in
let edge0 = ref TreeMap.empty_def in
let edge1 = ref TreeMap.empty_def in
ns_iter (fun v ->
if pg_get_owner game v = plr_Even then (
node0 := ns_add v !node0;
edge0 := TreeMap.add v combined_strategy.(v) !edge0;
)
else (
node1 := ns_add v !node1;
edge1 := TreeMap.add v combined_strategy.(v) !edge1;
)
) scc;
cycles := TreeSet.add (!node0, !node1, !edge0, !edge1) !cycles;
)
) sccs;
!cycles
in
let cyc_value v (node0, node1, edge0, edge1) =
let valworst = ref None in
let valcur = ref (empty_descending_relevance_ordered_set game node_total_ordering) in
let m = ref (ns_size node1) in
let nodecur = ref v in
while !m > 0 do
while (pg_get_owner game !nodecur = plr_Even) do
valcur := TreeSet.add !nodecur !valcur;
nodecur := TreeMap.find !nodecur edge0
done;
ns_iter (fun w ->
if not (TreeMap.find !nodecur edge1 = w) then (
let (e, pth, f) = valu.(w) in
let valw = (e, TreeSet.union pth !valcur, f) in
match !valworst with
None -> valworst := Some valw;
| Some valw' -> if node_valuation_ordering game node_total_ordering valw valw' < 0
then valworst := Some valw;
);
) (pg_get_successors game !nodecur);
valcur := TreeSet.add !nodecur !valcur;
nodecur := TreeMap.find !nodecur edge1;
decr m;
done;
OptionUtils.get_some !valworst
in
let c = ref 0 in
let i = ref idx in
let finished = ref false in
let new_strategy = Array.copy old_strategy in
while (not !finished) && (!c <= n) do
if (pg_get_owner game !i = plr_Even) then (
let pots = ref [] in
ns_iter (fun w ->
new_strategy.(!i) <- w;
if TreeSet.subset (get_cycles new_strategy) cycles
then pots := (A w, valu.(w))::!pots;
new_strategy.(!i) <- old_strategy.(!i);
) (pg_get_successors game !i);
TreeSet.iter (fun ((node0, node1, edge0, edge1) as cyc) ->
if (ns_elem !i node0) then (
TreeMap.iter (fun v w ->
new_strategy.(v) <- w;
) edge0;
if TreeSet.subset (get_cycles new_strategy) cycles
then pots := (B cyc, cyc_value !i cyc)::!pots;
ns_iter (fun v ->
new_strategy.(v) <- old_strategy.(v);
) node0;
);
) cycles;
let best = ref None in
List.iter (fun (q, valw) ->
match !best with
None -> if node_valuation_ordering game node_total_ordering valw valu.(old_strategy.(!i)) > 0
then best := Some (q, valw)
| Some (q', valw') -> if node_valuation_ordering game node_total_ordering valw valw' > 0
then best := Some (q, valw);
) !pots;
match !best with
None -> ()
| Some (A w, _) -> (
new_strategy.(!i) <- w;
finished := true;
)
| Some (B (_, _, edge0, _), _) -> (
TreeMap.iter (fun v w ->
new_strategy.(v) <- w;
) edge0;
finished := true;
);
);
incr c;
i := (!i + 1) mod n;
done;
if !finished
then (new_strategy, (cycles, !i))
else (
let new_strategy = improvement_policy_optimize_all_locally game node_total_ordering old_strategy valu in
(new_strategy, (TreeSet.union cycles (get_cycles new_strategy), idx))
)
let strategy_improvement_cycle_avoid game =
strategy_improvement game initial_strategy_by_best_reward node_total_ordering_by_position (improvement_policy_no_user_data improvement_policy_cycle_avoid) () false "STRIMPR_INTONE";;
let strategy_improvement_cycle_enforce game =
strategy_improvement game initial_strategy_by_best_reward node_total_ordering_by_position improvement_policy_cycle_enforce ((TreeSet.empty cycle_enforce_cycles_compare), 0) false "STRIMPR_INTTWO";;
let strategy_improvement_learn_strategies game =
strategy_improvement game initial_strategy_by_best_reward node_total_ordering_by_position improvement_policy_learn_strategies (TreeSet.empty (fun x y -> compare (Array.to_list x) (Array.to_list y))) true "STRIMPR_STRLEA";;
let strategy_improvement_smart_policy game =
strategy_improvement game initial_strategy_by_best_reward node_total_ordering_by_position (improvement_policy_level) (0) true "STRIMPR_SMART";;
let strategy_improvement_justlearn_policy game =
strategy_improvement game initial_strategy_by_best_reward node_total_ordering_by_position (improvement_policy_learn_cycles (fun a b c d e f -> (improvement_policy_optimize_all_locally a b d e, c))) (TreeSet.empty compare, TreeSet.empty compare) true "STRIMPR_JULE";;
register_sub_solver
(fun g -> universal_solve (universal_solve_init_options_verbose !universal_solve_global_options) strategy_improvement_smart_policy g)
"smartstratimpr" "ssi" "use smart strategy improvement";;
register_sub_solver
(fun g -> universal_solve (universal_solve_init_options_verbose !universal_solve_global_options) strategy_improvement_learn_strategies g)
"learnstratimpr" "lsi" "use strategy-learning strategy improvement";;
register_sub_solver
( fun g - > universal_solve ( universal_solve_init_options_verbose ! universal_solve_global_options ) )
" julestratimpr " " siju " " use just learn strategy improvement " ; ;
register_sub_solver
(fun g -> universal_solve (universal_solve_init_options_verbose !universal_solve_global_options) strategy_improvement_justlearn_policy g)
"julestratimpr" "siju" "use just learn strategy improvement";;
*)
register_sub_solver
( fun g - > universal_solve ( universal_solve_init_options_verbose ! universal_solve_global_options ) )
" strimprbyco " " sibc " " use strategy improvement by counterstrategy " ; ;
register_sub_solver
(fun g -> universal_solve (universal_solve_init_options_verbose !universal_solve_global_options) strategy_improvement_by_counterstrategy_policy g)
"strimprbyco" "sibc" "use strategy improvement by counterstrategy";;
*)
let register _ =
register_sub_solver
(fun g -> universal_solve (universal_solve_init_options_verbose !universal_solve_global_options) strategy_improvement_cycle_avoid g)
"switchint" "swint" "switch internal #1";
register_sub_solver
(fun g -> universal_solve (universal_solve_init_options_verbose !universal_solve_global_options) strategy_improvement_cycle_enforce g)
"switchintx" "swintx" "switch internal #2";;
|
7a3f5232c5b598da13afdfa088d40ba676cfa0060e70321224d0b9912893e0c5 | ocaml-multicore/multicoretests | lin_tests_dsl.ml | (* ************************************************************ *)
Tests of thread - safe [ Ephemeron ]
(* *)
(* Note that while the API is immutable and does not have *)
(* any toplevel state, the test fails. *)
(* *)
The present guess is that it is because of the GC .
The linearazibilty check fails because we do n't have
control over the GC in order to reproduce its
(* behaviour. *)
(* ************************************************************ *)
module EConf =
struct
module E = Ephemeron.K1.Make(struct
type t = Int.t
let equal = Int.equal
let hash = Fun.id
end)
type t = string E.t
let init () = E.create 42
let cleanup _ = ()
open Lin
let int,string = nat_small, string_small_printable
let api =
[ val_ "Ephemeron.clear" E.clear (t @-> returning unit);
val_ "Ephemeron.add" E.add (t @-> int @-> string @-> returning unit);
val_ "Ephemeron.remove" E.remove (t @-> int @-> returning unit);
val_ "Ephemeron.find" E.find (t @-> int @-> returning_or_exc string);
val_ "Ephemeron.find_opt" E.find_opt (t @-> int @-> returning (option string));
val_ "Ephemeron.find_all" E.find_all (t @-> int @-> returning (list string));
val_ "Ephemeron.replace" E.replace (t @-> int @-> string @-> returning unit);
val_ "Ephemeron.mem" E.mem (t @-> int @-> returning bool);
val_ "Ephemeron.length" E.length (t @-> returning int);
val_ "Ephemeron.clean" E.clean (t @-> returning unit);
]
end
module ET_domain = Lin_domain.Make(EConf)
module ET_thread = Lin_thread.Make(EConf) [@alert "-experimental"]
;;
QCheck_base_runner.run_tests_main [
ET_domain.neg_lin_test ~count:1000 ~name:"Lin DSL Ephemeron test with Domain";
ET_thread.lin_test ~count:250 ~name:"Lin DSL Ephemeron test with Thread";
]
| null | https://raw.githubusercontent.com/ocaml-multicore/multicoretests/3e0f2ceb72eaf334e97252140ae5d40bf6461b96/src/ephemeron/lin_tests_dsl.ml | ocaml | ************************************************************
Note that while the API is immutable and does not have
any toplevel state, the test fails.
behaviour.
************************************************************ | Tests of thread - safe [ Ephemeron ]
The present guess is that it is because of the GC .
The linearazibilty check fails because we do n't have
control over the GC in order to reproduce its
module EConf =
struct
module E = Ephemeron.K1.Make(struct
type t = Int.t
let equal = Int.equal
let hash = Fun.id
end)
type t = string E.t
let init () = E.create 42
let cleanup _ = ()
open Lin
let int,string = nat_small, string_small_printable
let api =
[ val_ "Ephemeron.clear" E.clear (t @-> returning unit);
val_ "Ephemeron.add" E.add (t @-> int @-> string @-> returning unit);
val_ "Ephemeron.remove" E.remove (t @-> int @-> returning unit);
val_ "Ephemeron.find" E.find (t @-> int @-> returning_or_exc string);
val_ "Ephemeron.find_opt" E.find_opt (t @-> int @-> returning (option string));
val_ "Ephemeron.find_all" E.find_all (t @-> int @-> returning (list string));
val_ "Ephemeron.replace" E.replace (t @-> int @-> string @-> returning unit);
val_ "Ephemeron.mem" E.mem (t @-> int @-> returning bool);
val_ "Ephemeron.length" E.length (t @-> returning int);
val_ "Ephemeron.clean" E.clean (t @-> returning unit);
]
end
module ET_domain = Lin_domain.Make(EConf)
module ET_thread = Lin_thread.Make(EConf) [@alert "-experimental"]
;;
QCheck_base_runner.run_tests_main [
ET_domain.neg_lin_test ~count:1000 ~name:"Lin DSL Ephemeron test with Domain";
ET_thread.lin_test ~count:250 ~name:"Lin DSL Ephemeron test with Thread";
]
|
0bb07b7e4fe6c10c18603dc0e7620b2205454bbabcfbfde995bd067b7da8bf05 | fab13n/lamtez | typecheck_ctx.ml | open Utils
module A = Ast
module P = String_of_ast
let _DEBUG_ = ref false
module StringMap = Map.Make(String)
module ExprMap = Map.Make(struct type t = A.expr let compare=compare end)
type composite = {type_params: A.tvar list; cases: (A.tag*A.etype) list}
type substitutions = (A.tvar * A.etype) list
type t = {
sums: composite StringMap.t; (* type_params, (case, type)* *)
products: composite StringMap.t;
product_tags: string StringMap.t;
sum_tags: string StringMap.t;
aliases: A.scheme StringMap.t;
primitives: A.tvar list StringMap.t;
evars: A.scheme StringMap.t;
types_assoc: A.etype ExprMap.t;
}
let empty = {
sums = StringMap.empty;
products = StringMap.empty;
sum_tags = StringMap.empty;
product_tags = StringMap.empty;
aliases = StringMap.empty;
evars = StringMap.empty;
primitives = StringMap.empty;
types_assoc = ExprMap.empty;
}
let string_of_t ctx =
let sot = P.string_of_type in
let sos = P.string_of_scheme in
let list_of_map m = StringMap.fold (fun name x acc -> (name, x)::acc) m [] in
let string_of_comp sep (name, c) =
"type "^name ^" "^ sep_list " " (fun x->x) c.type_params ^ " = " ^
sep_list sep (fun (tag, t) -> tag^" "^sot t) c.cases
in
let string_of_alias (name, (p, t)) = "type "^sep_list " " (fun x->x) (name::p)^" = "^sot t in
let string_of_evar (name, s) = "val "^name^": "^sos s in
let is_not_dummy_alias = function name, ([], A.TApp(_, name', [])) -> false | _ -> true in
sep_list "\n" (fun x->x) (List.flatten [
List.map (string_of_comp " + ") (list_of_map ctx.sums);
List.map (string_of_comp " * ") (list_of_map ctx.products) ;
List.map string_of_alias (List.filter is_not_dummy_alias (list_of_map ctx.aliases));
List.map string_of_evar (list_of_map ctx.evars);
["# sum tags: "^sep_list ", " (fun (a,b) -> a^"->"^b) (list_of_map ctx.sum_tags)];
["# product tags: "^sep_list ", " (fun (a,b) -> a^"->"^b) (list_of_map ctx.product_tags)];
])
let product_of_name, sum_of_name =
let composite_of_name cmap name =
let c = StringMap.find name cmap in
c.type_params, c.cases
in
(fun ctx -> composite_of_name ctx.products),
(fun ctx -> composite_of_name ctx.sums)
let name_of_sum_tag ctx tag = StringMap.find tag ctx.sum_tags
let name_of_product_tag ctx tag = StringMap.find tag ctx.product_tags
let decl_of_name ctx name =
try
let c = StringMap.find name ctx.sums
in A.DSum(A.noloc, name, c.type_params, c.cases)
with Not_found -> try
let c = StringMap.find name ctx.products
in A.DProduct(A.noloc, name, c.type_params, c.cases)
with Not_found -> try
let params = StringMap.find name ctx.primitives
in A.DPrim(A.noloc, name, params)
with Not_found -> try
let (params, t) = StringMap.find name ctx.aliases
in A.DAlias(A.noloc, name, params, t)
with Not_found ->
raise Not_found
let check_fresh_name ctx name =
if StringMap.mem name ctx.sums
|| StringMap.mem name ctx.products
|| StringMap.mem name ctx.aliases
|| StringMap.mem name ctx.primitives
then type_error A.noloc ("duplicate type name "^name)
let check_fresh_tag map cases =
List.iter (fun (tag, _) -> if StringMap.mem tag map then type_error A.noloc ("duplicate tag "^tag)) cases
let add_alias name scheme ctx =
check_fresh_name ctx name;
{ctx with aliases = StringMap.add name scheme ctx.aliases}
let add_composite names_map tags_map aliases_map name type_params cases ctx =
check_fresh_name ctx name;
check_fresh_tag ctx.sum_tags cases;
check_fresh_tag ctx.product_tags cases;
let aliases = if type_params<>[] then aliases_map else StringMap.add name ([], A.tapp name []) aliases_map in
let names_map = StringMap.add name {type_params; cases} names_map in
let tags_map = List.fold_left (fun tags_map (tag, _) -> StringMap.add tag name tags_map) tags_map cases in
aliases, names_map, tags_map
let add_sum name type_params cases ctx =
let aliases, sums, sum_tags =
add_composite ctx.sums ctx.sum_tags ctx.aliases name type_params cases ctx in
{ctx with aliases; sums; sum_tags}
let add_product name type_params cases ctx =
let aliases, products, product_tags =
add_composite ctx.products ctx.product_tags ctx.aliases name type_params cases ctx in
{ctx with aliases; products; product_tags}
let add_prim name type_params ctx =
check_fresh_name ctx name;
let ctx = if type_params<>[] then ctx else
add_alias name ([], A.TApp(A.noloc, name, [])) ctx in
let p = StringMap.add name type_params ctx.primitives in
{ctx with primitives = p}
let add_evar name t ctx =
{ctx with evars=StringMap.add name t ctx.evars}
let forget_evar name ctx =
{ctx with evars=StringMap.remove name ctx.evars}
type bookmark_item = A.evar * A.scheme option
type bookmark = bookmark_item list
let bookmark_empty = []
let push_evar name t ctx =
let prev_content = try Some(StringMap.find name ctx.evars) with Not_found -> None in
add_evar name t ctx, (name, prev_content)
let pop_evar (name, prev_t) ctx =
match prev_t with
| None -> forget_evar name ctx
| Some t -> add_evar name t ctx
let push_evars list ctx =
let fold (ctx, bookmark) (name, scheme) =
let ctx, prev = push_evar name scheme ctx in
ctx, prev :: bookmark
in
List.fold_left fold (ctx, []) list
let pop_evars bookmark ctx =
List.fold_right pop_evar bookmark ctx
let instantiate_scheme (params, t) =
let x = List.fold_left (fun t p -> A.replace_tvar p (A.fresh_tvar()) t) t params in
print_endline ( " Instanciate " ^P.string_of_scheme ( params , t)^ " : : = " ^P.string_of_type x ) ;
x
let instantiate_composite name (params, d_pairs) =
let subst = List.map (fun v -> (v, A.fresh_tvar())) params in
let r (tag, t) = tag, List.fold_left (fun t (v, v') -> A.replace_tvar v v' t) t subst in
A.TApp(A.noloc, name, List.map snd subst), List.map r d_pairs
(* Replace tvars with their values as much as possible, deep into a typeT *)
let rec expand_type ctx t =
let r = expand_type ctx in
match t with
| A.TFail -> t
| A.TLambda(_, t0, t1, cmb) -> A.TLambda(A.noloc, r t0, r t1, cmb)
| A.TApp(_, name, args) -> A.TApp(A.noloc, name, List.map r args)
| A.TTuple(_, types) -> A.TTuple(A.noloc, List.map r types)
| A.TId(_, id) -> (try r (instantiate_scheme (StringMap.find id ctx.aliases)) with Not_found -> t)
and expand_scheme ctx (v, t) =
failwith "Check expand_scheme!"
[ ] , expand_type ctx t
and scheme_of_evar ctx name =
try StringMap.find name ctx.evars
with Not_found -> type_error A.noloc ("Unbound variable "^name)
let save_type e t ctx =
{ctx with types_assoc = ExprMap.add e t ctx.types_assoc}
let retrieve_type ctx e =
try ExprMap.find e ctx.types_assoc
with Not_found -> failwith ("This expression was never typechecked: "^String_of_ast.string_of_expr e)
Combo of a with a map2 : the function f returns both
* an accumulator and a transformed list element .
* ( ' acc - > ' a - > ' b - > ( ' acc*'c ) ) - > ' acc - >
* ' a list - > ' b list - > ( ' acc * ' c list )
* an accumulator and a transformed list element.
* ('acc -> 'a -> 'b -> ('acc*'c)) -> 'acc ->
* 'a list -> 'b list -> ('acc * 'c list)
*)
let list_fold_map2 f acc a_list b_list =
let acc, rev_c_list = List.fold_left2
(fun (acc, c_list) a b -> let acc, c = f acc a b in acc, c::c_list)
(acc, []) a_list b_list
in
acc, List.rev rev_c_list
let get_evars ctx =
List.map fst @@ StringMap.bindings ctx.evars
let rec unify ctx t0 t1 =
TODO : have a direction , to choose a prefered model for the result and report loc
* and differenciate < : int from int < :
* and differenciate nat <: int from int <: nat *)
TODO : add awareness of < : int
let t0 = expand_type ctx t0 in
let t1 = expand_type ctx t1 in
(* print_endline("<"^String_of_ast.string_of_type t0^" U "^String_of_ast.string_of_type t1^">"); *)
match t0, t1 with
| A.TFail, t | t, A.TFail -> ctx, t
| A.TId(_, id0), A.TId(_, id1) ->
if id0=id1 then ctx, t0 else
TODO use better ordering , which favors manually created vars .
let id0, id1 = min id0 id1, max id0 id1 in
if !_DEBUG_ then print_endline ("Constraint: var "^id1^" => var "^id0);
add_alias id1 ([], A.TId(A.noloc, id0)) ctx, A.TId(A.noloc, id0)
| A.TId(_, id), t | t, A.TId(_, id) ->
if !_DEBUG_ then print_endline ("Constraint: var "^id^" => type "^P.string_of_type t);
add_alias id ([], t) ctx, t
TODO closure / combinator unification ? would require a more careful ordering .
| A.TLambda(_, t00, t01, cmb0), A.TLambda(_, t10, t11, cmb1) ->
let ctx, t0 = unify ctx t00 t10 in
let ctx, t1 = unify ctx t01 t11 in
ctx, A.TLambda(A.noloc, t0, t1, cmb0 && cmb1)
| A.TApp ( _ , " " , [ ] ) , A.TApp ( _ , " int " , [ ] ) | A.TApp ( _ , " int " , [ ] ) , A.TApp ( _ , " " , [ ] ) - >
ctx , A.TApp(A.noloc , " " , [ ] )
ctx, A.TApp(A.noloc, "nat", []) *)
| A.TApp(_, name0, args0), A.TApp(_, name1, args1)
when name0=name1 && List.length args0 = List.length args1 ->
let ctx, args_u = list_fold_map2 unify ctx args0 args1 in
ctx, A.TApp(A.noloc, name0, args_u)
| A.TTuple(_, a), A.TTuple(_, b) when List.length a = List.length b ->
let ctx, c = list_fold_map2 unify ctx a b in
ctx, A.TTuple(A.noloc, c)
| _ -> type_error A.noloc ("Not unifiable: "^P.string_of_type t0^" and "^P.string_of_type t1)
TODO add locations to msg . They must come from exprs , not types . | null | https://raw.githubusercontent.com/fab13n/lamtez/ec0aab3093ca8380a4cd364f21cf763d729de25f/typecheck_ctx.ml | ocaml | type_params, (case, type)*
Replace tvars with their values as much as possible, deep into a typeT
print_endline("<"^String_of_ast.string_of_type t0^" U "^String_of_ast.string_of_type t1^">"); | open Utils
module A = Ast
module P = String_of_ast
let _DEBUG_ = ref false
module StringMap = Map.Make(String)
module ExprMap = Map.Make(struct type t = A.expr let compare=compare end)
type composite = {type_params: A.tvar list; cases: (A.tag*A.etype) list}
type substitutions = (A.tvar * A.etype) list
type t = {
products: composite StringMap.t;
product_tags: string StringMap.t;
sum_tags: string StringMap.t;
aliases: A.scheme StringMap.t;
primitives: A.tvar list StringMap.t;
evars: A.scheme StringMap.t;
types_assoc: A.etype ExprMap.t;
}
let empty = {
sums = StringMap.empty;
products = StringMap.empty;
sum_tags = StringMap.empty;
product_tags = StringMap.empty;
aliases = StringMap.empty;
evars = StringMap.empty;
primitives = StringMap.empty;
types_assoc = ExprMap.empty;
}
let string_of_t ctx =
let sot = P.string_of_type in
let sos = P.string_of_scheme in
let list_of_map m = StringMap.fold (fun name x acc -> (name, x)::acc) m [] in
let string_of_comp sep (name, c) =
"type "^name ^" "^ sep_list " " (fun x->x) c.type_params ^ " = " ^
sep_list sep (fun (tag, t) -> tag^" "^sot t) c.cases
in
let string_of_alias (name, (p, t)) = "type "^sep_list " " (fun x->x) (name::p)^" = "^sot t in
let string_of_evar (name, s) = "val "^name^": "^sos s in
let is_not_dummy_alias = function name, ([], A.TApp(_, name', [])) -> false | _ -> true in
sep_list "\n" (fun x->x) (List.flatten [
List.map (string_of_comp " + ") (list_of_map ctx.sums);
List.map (string_of_comp " * ") (list_of_map ctx.products) ;
List.map string_of_alias (List.filter is_not_dummy_alias (list_of_map ctx.aliases));
List.map string_of_evar (list_of_map ctx.evars);
["# sum tags: "^sep_list ", " (fun (a,b) -> a^"->"^b) (list_of_map ctx.sum_tags)];
["# product tags: "^sep_list ", " (fun (a,b) -> a^"->"^b) (list_of_map ctx.product_tags)];
])
let product_of_name, sum_of_name =
let composite_of_name cmap name =
let c = StringMap.find name cmap in
c.type_params, c.cases
in
(fun ctx -> composite_of_name ctx.products),
(fun ctx -> composite_of_name ctx.sums)
let name_of_sum_tag ctx tag = StringMap.find tag ctx.sum_tags
let name_of_product_tag ctx tag = StringMap.find tag ctx.product_tags
let decl_of_name ctx name =
try
let c = StringMap.find name ctx.sums
in A.DSum(A.noloc, name, c.type_params, c.cases)
with Not_found -> try
let c = StringMap.find name ctx.products
in A.DProduct(A.noloc, name, c.type_params, c.cases)
with Not_found -> try
let params = StringMap.find name ctx.primitives
in A.DPrim(A.noloc, name, params)
with Not_found -> try
let (params, t) = StringMap.find name ctx.aliases
in A.DAlias(A.noloc, name, params, t)
with Not_found ->
raise Not_found
let check_fresh_name ctx name =
if StringMap.mem name ctx.sums
|| StringMap.mem name ctx.products
|| StringMap.mem name ctx.aliases
|| StringMap.mem name ctx.primitives
then type_error A.noloc ("duplicate type name "^name)
let check_fresh_tag map cases =
List.iter (fun (tag, _) -> if StringMap.mem tag map then type_error A.noloc ("duplicate tag "^tag)) cases
let add_alias name scheme ctx =
check_fresh_name ctx name;
{ctx with aliases = StringMap.add name scheme ctx.aliases}
let add_composite names_map tags_map aliases_map name type_params cases ctx =
check_fresh_name ctx name;
check_fresh_tag ctx.sum_tags cases;
check_fresh_tag ctx.product_tags cases;
let aliases = if type_params<>[] then aliases_map else StringMap.add name ([], A.tapp name []) aliases_map in
let names_map = StringMap.add name {type_params; cases} names_map in
let tags_map = List.fold_left (fun tags_map (tag, _) -> StringMap.add tag name tags_map) tags_map cases in
aliases, names_map, tags_map
let add_sum name type_params cases ctx =
let aliases, sums, sum_tags =
add_composite ctx.sums ctx.sum_tags ctx.aliases name type_params cases ctx in
{ctx with aliases; sums; sum_tags}
let add_product name type_params cases ctx =
let aliases, products, product_tags =
add_composite ctx.products ctx.product_tags ctx.aliases name type_params cases ctx in
{ctx with aliases; products; product_tags}
let add_prim name type_params ctx =
check_fresh_name ctx name;
let ctx = if type_params<>[] then ctx else
add_alias name ([], A.TApp(A.noloc, name, [])) ctx in
let p = StringMap.add name type_params ctx.primitives in
{ctx with primitives = p}
let add_evar name t ctx =
{ctx with evars=StringMap.add name t ctx.evars}
let forget_evar name ctx =
{ctx with evars=StringMap.remove name ctx.evars}
type bookmark_item = A.evar * A.scheme option
type bookmark = bookmark_item list
let bookmark_empty = []
let push_evar name t ctx =
let prev_content = try Some(StringMap.find name ctx.evars) with Not_found -> None in
add_evar name t ctx, (name, prev_content)
let pop_evar (name, prev_t) ctx =
match prev_t with
| None -> forget_evar name ctx
| Some t -> add_evar name t ctx
let push_evars list ctx =
let fold (ctx, bookmark) (name, scheme) =
let ctx, prev = push_evar name scheme ctx in
ctx, prev :: bookmark
in
List.fold_left fold (ctx, []) list
let pop_evars bookmark ctx =
List.fold_right pop_evar bookmark ctx
let instantiate_scheme (params, t) =
let x = List.fold_left (fun t p -> A.replace_tvar p (A.fresh_tvar()) t) t params in
print_endline ( " Instanciate " ^P.string_of_scheme ( params , t)^ " : : = " ^P.string_of_type x ) ;
x
let instantiate_composite name (params, d_pairs) =
let subst = List.map (fun v -> (v, A.fresh_tvar())) params in
let r (tag, t) = tag, List.fold_left (fun t (v, v') -> A.replace_tvar v v' t) t subst in
A.TApp(A.noloc, name, List.map snd subst), List.map r d_pairs
let rec expand_type ctx t =
let r = expand_type ctx in
match t with
| A.TFail -> t
| A.TLambda(_, t0, t1, cmb) -> A.TLambda(A.noloc, r t0, r t1, cmb)
| A.TApp(_, name, args) -> A.TApp(A.noloc, name, List.map r args)
| A.TTuple(_, types) -> A.TTuple(A.noloc, List.map r types)
| A.TId(_, id) -> (try r (instantiate_scheme (StringMap.find id ctx.aliases)) with Not_found -> t)
and expand_scheme ctx (v, t) =
failwith "Check expand_scheme!"
[ ] , expand_type ctx t
and scheme_of_evar ctx name =
try StringMap.find name ctx.evars
with Not_found -> type_error A.noloc ("Unbound variable "^name)
let save_type e t ctx =
{ctx with types_assoc = ExprMap.add e t ctx.types_assoc}
let retrieve_type ctx e =
try ExprMap.find e ctx.types_assoc
with Not_found -> failwith ("This expression was never typechecked: "^String_of_ast.string_of_expr e)
Combo of a with a map2 : the function f returns both
* an accumulator and a transformed list element .
* ( ' acc - > ' a - > ' b - > ( ' acc*'c ) ) - > ' acc - >
* ' a list - > ' b list - > ( ' acc * ' c list )
* an accumulator and a transformed list element.
* ('acc -> 'a -> 'b -> ('acc*'c)) -> 'acc ->
* 'a list -> 'b list -> ('acc * 'c list)
*)
let list_fold_map2 f acc a_list b_list =
let acc, rev_c_list = List.fold_left2
(fun (acc, c_list) a b -> let acc, c = f acc a b in acc, c::c_list)
(acc, []) a_list b_list
in
acc, List.rev rev_c_list
let get_evars ctx =
List.map fst @@ StringMap.bindings ctx.evars
let rec unify ctx t0 t1 =
TODO : have a direction , to choose a prefered model for the result and report loc
* and differenciate < : int from int < :
* and differenciate nat <: int from int <: nat *)
TODO : add awareness of < : int
let t0 = expand_type ctx t0 in
let t1 = expand_type ctx t1 in
match t0, t1 with
| A.TFail, t | t, A.TFail -> ctx, t
| A.TId(_, id0), A.TId(_, id1) ->
if id0=id1 then ctx, t0 else
TODO use better ordering , which favors manually created vars .
let id0, id1 = min id0 id1, max id0 id1 in
if !_DEBUG_ then print_endline ("Constraint: var "^id1^" => var "^id0);
add_alias id1 ([], A.TId(A.noloc, id0)) ctx, A.TId(A.noloc, id0)
| A.TId(_, id), t | t, A.TId(_, id) ->
if !_DEBUG_ then print_endline ("Constraint: var "^id^" => type "^P.string_of_type t);
add_alias id ([], t) ctx, t
TODO closure / combinator unification ? would require a more careful ordering .
| A.TLambda(_, t00, t01, cmb0), A.TLambda(_, t10, t11, cmb1) ->
let ctx, t0 = unify ctx t00 t10 in
let ctx, t1 = unify ctx t01 t11 in
ctx, A.TLambda(A.noloc, t0, t1, cmb0 && cmb1)
| A.TApp ( _ , " " , [ ] ) , A.TApp ( _ , " int " , [ ] ) | A.TApp ( _ , " int " , [ ] ) , A.TApp ( _ , " " , [ ] ) - >
ctx , A.TApp(A.noloc , " " , [ ] )
ctx, A.TApp(A.noloc, "nat", []) *)
| A.TApp(_, name0, args0), A.TApp(_, name1, args1)
when name0=name1 && List.length args0 = List.length args1 ->
let ctx, args_u = list_fold_map2 unify ctx args0 args1 in
ctx, A.TApp(A.noloc, name0, args_u)
| A.TTuple(_, a), A.TTuple(_, b) when List.length a = List.length b ->
let ctx, c = list_fold_map2 unify ctx a b in
ctx, A.TTuple(A.noloc, c)
| _ -> type_error A.noloc ("Not unifiable: "^P.string_of_type t0^" and "^P.string_of_type t1)
TODO add locations to msg . They must come from exprs , not types . |
fe206d5e0e24ce4ca6b4414802522df5876fbcf199358759a343dc7ff8387e0d | active-group/sqlosure | time_test.clj | (ns sqlosure.time-test
(:require [sqlosure.time :refer :all]
[clojure.test :refer :all]))
(def d1 (java.time.LocalDate/of 1989 10 31))
(def t1 (java.time.LocalDateTime/of 1989 10 31 0 0))
(deftest make-date-test
(is (sqlosure.type/date? (make-date)))
(is (= (make-date 1989 10 31) d1))
(is (thrown? Exception (make-date :not-a-valid-date))))
(deftest make-timestamp-test
(is (sqlosure.type/timestamp? (make-timestamp)))
(is (= (make-timestamp 1989 10 31 0 0)
(java.time.LocalDateTime/of 1989 10 31 0 0)))
(is (= (make-timestamp 1989 10 31 0 0 0)
(java.time.LocalDateTime/of 1989 10 31 0 0 0)))
(is (= (make-timestamp 1989 10 31 0 0 0 0)
(java.time.LocalDateTime/of 1989 10 31 0 0 0 0)))
(is (thrown? Exception (make-timestamp :not-a-valid-timestamp))))
(deftest date-identity-test
(is (= d1
(-> d1
to-sql-date
from-sql-date))))
(deftest timestamp-identity-test
(is (= t1
(-> t1
to-sql-timestamp
from-sql-timestamp))))
(deftest coerce-time-values-test
(is (= (list (to-sql-date d1) (to-sql-timestamp t1) :foo :bar 42)
(coerce-time-values [d1 t1 :foo :bar 42]))))
| null | https://raw.githubusercontent.com/active-group/sqlosure/3cd74e90df4f3c49c841a1a75941acc7444c4bfb/test/sqlosure/time_test.clj | clojure | (ns sqlosure.time-test
(:require [sqlosure.time :refer :all]
[clojure.test :refer :all]))
(def d1 (java.time.LocalDate/of 1989 10 31))
(def t1 (java.time.LocalDateTime/of 1989 10 31 0 0))
(deftest make-date-test
(is (sqlosure.type/date? (make-date)))
(is (= (make-date 1989 10 31) d1))
(is (thrown? Exception (make-date :not-a-valid-date))))
(deftest make-timestamp-test
(is (sqlosure.type/timestamp? (make-timestamp)))
(is (= (make-timestamp 1989 10 31 0 0)
(java.time.LocalDateTime/of 1989 10 31 0 0)))
(is (= (make-timestamp 1989 10 31 0 0 0)
(java.time.LocalDateTime/of 1989 10 31 0 0 0)))
(is (= (make-timestamp 1989 10 31 0 0 0 0)
(java.time.LocalDateTime/of 1989 10 31 0 0 0 0)))
(is (thrown? Exception (make-timestamp :not-a-valid-timestamp))))
(deftest date-identity-test
(is (= d1
(-> d1
to-sql-date
from-sql-date))))
(deftest timestamp-identity-test
(is (= t1
(-> t1
to-sql-timestamp
from-sql-timestamp))))
(deftest coerce-time-values-test
(is (= (list (to-sql-date d1) (to-sql-timestamp t1) :foo :bar 42)
(coerce-time-values [d1 t1 :foo :bar 42]))))
|
Subsets and Splits